code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
module FRP.Chimera.Agent.Stream
(
dataFlowS
) where
import Control.Monad.State
import FRP.BearRiver
import FRP.Chimera.Agent.Interface
import FRP.Chimera.Agent.Monad
dataFlowS :: MonadState (AgentOut m o d e) m
=> SF m (AgentData d) ()
dataFlowS = proc d -> do
_ <- arrM (\d -> dataFlowM d) -< d
returnA -< () | thalerjonathan/phd | coding/libraries/chimera/src/FRP/Chimera/Agent/Stream.hs | gpl-3.0 | 411 | 1 | 13 | 98 | 128 | 70 | 58 | 14 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module QuickPlot.IPC.QQ (
json
) where
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Quote
import qualified Data.Vector as V
import qualified Data.Text as T
import Data.Aeson hiding (json)
import QuickPlot.IPC.QQParser
json :: QuasiQuoter
json = QuasiQuoter { quoteExp = jsonExp
, quotePat = const $ error "No quotePat defined for jsonQQ"
, quoteType = const $ error "No quoteType defined for jsonQQ"
, quoteDec = const $ error "No quoteDec defined for jsonQQ"
}
jsonExp :: String -> ExpQ
jsonExp string =
case parseTHJSON string of
Left err -> error $ "JSON is invalid: " ++ show err
Right val -> [| val |]
instance Lift JSONValue where
lift (JSONString string) = [| String (T.pack string) |]
lift JSONNull = [| Null |]
lift (JSONObject objects) = [| object $jsonList |]
where
jsonList :: ExpQ
jsonList = ListE <$> mapM objs2list objects
objs2list :: (HashKey, JSONValue) -> ExpQ
objs2list (key, value) = case key of
HashStringKey k -> [|(T.pack k, $(lift value))|]
HashVarKey k -> [|(T.pack $(dyn k), $(lift value))|]
lift (JSONArray arr) = [| Array $ V.fromList $(ListE <$> mapM lift arr) |]
lift (JSONNumber n) = [| Number (fromRational $(return $ LitE $ RationalL (toRational n))) |]
lift (JSONBool b) = [| Bool b |]
lift (JSONCode e) = [| toJSON $(return e) |]
| tepf/QuickPlot | src/QuickPlot/IPC/QQ.hs | gpl-3.0 | 1,699 | 0 | 11 | 575 | 378 | 221 | 157 | 34 | 2 |
module Pappy.Basic where
-- This module contains basic definitions needed for a pure pappy generated
-- parser
import Pappy.Pos
-- BEGIN CODE
---------- Data types used for parsing
data ErrorDescriptor =
Expected String
| Message String
deriving(Eq)
data ParseError = ParseError {
errorPos :: Pos,
errorDescrs :: [ErrorDescriptor]
}
data Result d v =
Parsed v d ParseError
| NoParse ParseError
-- Join two ParseErrors, giving preference to the one farthest right,
-- or merging their descriptor sets if they are at the same position.
joinErrors :: ParseError -> ParseError -> ParseError
joinErrors (e @ (ParseError p m)) (e' @ (ParseError p' m')) =
if p' > p || null m then e'
else if p > p' || null m' then e
else ParseError p (m `union` m') where
union xs (y:ys) = f (reverse xs) ys where
f xs (y:ys) = if y `elem` xs then f xs ys else f (y:xs) ys
f xs [] = reverse xs
msgError pos msg = ParseError pos [Message msg]
-- Comparison operators for ParseError just compare relative positions.
instance Eq ParseError where
ParseError p1 m1 == ParseError p2 m2 = p1 == p2
ParseError p1 m1 /= ParseError p2 m2 = p1 /= p2
instance Ord ParseError where
ParseError p1 m1 < ParseError p2 m2 = p1 < p2
ParseError p1 m1 > ParseError p2 m2 = p1 > p2
ParseError p1 m1 <= ParseError p2 m2 = p1 <= p2
ParseError p1 m1 >= ParseError p2 m2 = p1 >= p2
-- Special behavior: "max" joins two errors
max p1 p2 = joinErrors p1 p2
min p1 p2 = undefined
-- Show function for error messages
instance Show ParseError where
show (ParseError pos []) =
show pos ++ ": parse error"
show (ParseError pos msgs) = expectmsg expects ++ messages msgs
where
expects = getExpects msgs
getExpects [] = []
getExpects (Expected exp : rest) = exp : getExpects rest
getExpects (Message msg : rest) = getExpects rest
expectmsg [] = ""
expectmsg [exp] = show pos ++ ": expecting " ++ exp ++ "\n"
expectmsg [e1, e2] = show pos ++ ": expecting either "
++ e1 ++ " or " ++ e2 ++ "\n"
expectmsg (first : rest) = show pos ++ ": expecting one of: "
++ first ++ expectlist rest
++ "\n"
expectlist [last] = ", or " ++ last
expectlist (mid : rest) = ", " ++ mid ++ expectlist rest
messages [] = []
messages (Expected exp : rest) = messages rest
messages (Message msg : rest) =
show pos ++ ": " ++ msg ++ "\n" ++ messages rest
errorAnnotate :: Bool -> String -> Pos -> Result d v -> Result d v
errorAnnotate isStrict desc pos = munge where
munge (Parsed v rem err) = Parsed v rem (fix err)
munge (NoParse err) = NoParse (fix err)
fix (err @ (ParseError p ms)) =
if p > pos && not isStrict
then err else expError pos desc
expError pos desc = ParseError pos [Expected desc]
| lagleki/tersmu-0.2 | Pappy/Basic.hs | gpl-3.0 | 2,822 | 39 | 14 | 710 | 1,055 | 526 | 529 | 60 | 5 |
module Scene where
import Data.Vect.Float
import qualified Data.Vector as V
type Point = Vec2
data Scene
= Scene
{ scenePoints :: V.Vector Point
}
emptyScene
= Scene
{ scenePoints = V.fromList
[ Vec2 50 100
, Vec2 200 300
]
}
| mkovacs/yami | editor/src/main/Scene.hs | gpl-3.0 | 274 | 0 | 10 | 89 | 78 | 46 | 32 | 12 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Lamdu.GUI.TaggedList
( Item(..), iTag, iValue, iEventMap, iAddAfter
, Keys(..), kAdd, kOrderBefore, kOrderAfter
, make, makeBody, itemId, delEventMap, addNextEventMap
) where
import qualified Control.Lens as Lens
import Data.List.Extended (withPrevNext)
import GUI.Momentu (ModKey)
import qualified GUI.Momentu.EventMap as E
import GUI.Momentu.EventMap (EventMap)
import qualified GUI.Momentu.I18N as MomentuTexts
import qualified GUI.Momentu.State as GuiState
import qualified GUI.Momentu.Widget as Widget
import qualified GUI.Momentu.Widgets.Menu.Search as SearchMenu
import qualified Lamdu.Config as Config
import qualified Lamdu.GUI.Expr.TagEdit as TagEdit
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import qualified Lamdu.I18N.CodeUI as Texts
import qualified Lamdu.Sugar.Types as Sugar
import Lamdu.Prelude
data Item name i o a = Item
{ _iTag :: Sugar.TagRef name i o
, _iValue :: a
, _iEventMap :: EventMap (o GuiState.Update)
, _iAddAfter :: i (Sugar.TagChoice name o)
}
Lens.makeLenses ''Item
data Keys a = Keys
{ _kAdd :: a
, _kOrderBefore :: a
, _kOrderAfter :: a
} deriving (Functor, Foldable, Traversable)
Lens.makeLenses ''Keys
make ::
_ =>
Lens.ALens' env Text ->
Keys [ModKey] ->
Widget.Id -> Widget.Id ->
Sugar.TaggedList name i o a ->
m (EventMap (o GuiState.Update), [Item name i o a])
make cat keys prevId nextId tl =
(,)
<$> addNextEventMap cat (keys ^. kAdd) prevId
<*> foldMap (makeBody cat keys prevId nextId) (tl ^. Sugar.tlItems)
makeBody ::
_ =>
Lens.ALens' env Text ->
Keys [ModKey] ->
Widget.Id -> Widget.Id ->
Sugar.TaggedListBody name i o a ->
m [Item name i o a]
makeBody cat keys prevId nextId items =
do
env <- Lens.view id
let addOrderAfter Nothing = id
addOrderAfter (Just orderAfter) =
iEventMap <>~
E.keysEventMap (keys ^. kOrderAfter)
(E.toDoc env [has . MomentuTexts.edit, cat, has . Texts.moveAfter])
orderAfter
let addDel (p, n, item) =
item
& iEventMap <>~ delEventMap cat (void (item ^. iValue . _1)) p n env
& iValue %~ (^. _2)
(:) <$> makeItem cat (keys ^. kAdd) (items ^. Sugar.tlHead)
<*> traverse (makeSwappableItem cat keys) (items ^. Sugar.tlTail)
<&> zipWith addOrderAfter orderAfters
<&> withPrevNext prevId nextId (itemId . (^. iTag))
<&> Lens.mapped %~ addDel
where
orderAfters =
(items ^.. Sugar.tlTail . traverse . Sugar.tsiSwapWithPrevious <&> Just) <>
[Nothing]
delEventMap ::
_ => Lens.ALens' env Text -> o () -> Widget.Id -> Widget.Id -> m (EventMap (o GuiState.Update))
delEventMap cat fpDel prevId nextId =
Lens.view id <&>
\env ->
let dir keys delText dstPosId =
E.keyPresses (env ^. has . keys)
(E.toDoc env [has . MomentuTexts.edit, cat, has . delText])
(GuiState.updateCursor dstPosId <$ fpDel)
in
-- TODO: Imports SearchMenu just for deleteBackwards text?
dir Config.delBackwardKeys SearchMenu.textDeleteBackwards prevId <>
dir Config.delForwardKeys MomentuTexts.delete nextId
addNextEventMap :: _ => Lens.ALens' env Text -> [ModKey] -> Widget.Id -> m _
addNextEventMap cat addKeys myId =
Lens.view id <&>
\env ->
E.keysEventMapMovesCursor addKeys
(E.toDoc env [has . MomentuTexts.edit, cat, has . Texts.add])
(pure (TagEdit.addItemId myId))
makeItem ::
_ =>
Lens.ALens' env Text -> [ModKey] ->
Sugar.TaggedItem name i o a -> m (Item name i o (o (), a))
makeItem cat addKeys item =
addNextEventMap cat addKeys (itemId (item ^. Sugar.tiTag)) <&>
\x ->
Item
{ _iTag = item ^. Sugar.tiTag
, _iValue = (item ^. Sugar.tiDelete, item ^. Sugar.tiValue)
, _iAddAfter = item ^. Sugar.tiAddAfter
, _iEventMap = x
}
makeSwappableItem ::
_ =>
Lens.ALens' env Text -> Keys [ModKey] ->
Sugar.TaggedSwappableItem name i o a -> m (Item name i o (o (), a))
makeSwappableItem cat keys item =
do
env <- Lens.view id
let eventMap =
E.keysEventMap (keys ^. kOrderBefore)
(E.toDoc env
[has . MomentuTexts.edit, has . Texts.moveBefore])
(item ^. Sugar.tsiSwapWithPrevious)
makeItem cat (keys ^. kAdd) (item ^. Sugar.tsiItem)
<&> iEventMap <>~ eventMap
itemId :: Sugar.TagRef name i o -> Widget.Id
itemId item = item ^. Sugar.tagRefTag . Sugar.tagInstance & WidgetIds.fromEntityId
| lamdu/lamdu | src/Lamdu/GUI/TaggedList.hs | gpl-3.0 | 4,727 | 0 | 18 | 1,258 | 1,586 | 849 | 737 | -1 | -1 |
{-
Copyright 2011 Alexander Midgley
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Sqlite.Types where
import qualified Data.ByteString as BS
import Data.List
import Database.HDBC
import Database.HDBC.Sqlite3
data SqliteValue = SqliteNull
| SqliteInt {getSqliteInt :: Int}
| SqliteReal {getSqliteReal :: Double}
| SqliteText {getSqliteText :: String}
| SqliteBlob {getSqliteBlob :: BS.ByteString}
deriving (Eq, Show)
data SqliteValType = SqliteIntType
| SqliteRealType
| SqliteTextType
| SqliteBlobType
deriving (Eq, Show)
data ColumnDesc = ColumnDesc
{
colName :: String,
colType :: SqliteValType,
colKey :: Bool,
colNullable :: Bool
}
deriving (Eq, Show)
type TableDesc = (String, -- Table name
[ColumnDesc]) -- Columns
data SqliteDb = SqliteDbClosed
| SqliteDb
{
dbPath :: FilePath,
dbConn :: Connection,
dbTables :: [TableDesc]
}
instance Show SqliteDb where
show SqliteDbClosed = "SqliteDbClosed"
show db = "SqliteDb " ++ show (dbPath db)
findCol :: String -> [ColumnDesc] -> Maybe ColumnDesc
findCol name = find (\col -> name == colName col)
convertSqlValue :: SqliteValType -> SqlValue -> SqliteValue
convertSqlValue _ SqlNull = SqliteNull
convertSqlValue SqliteIntType val = SqliteInt . fromSql $ val
convertSqlValue SqliteRealType val = SqliteReal . fromSql $ val
convertSqlValue SqliteTextType val = SqliteText . fromSql $ val
convertSqlValue SqliteBlobType val = SqliteBlob . fromSql $ val
| shadwstalkr/sqlite-admin | Sqlite/Types.hs | gpl-3.0 | 2,443 | 0 | 9 | 763 | 393 | 225 | 168 | 40 | 1 |
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, FlexibleContexts, ViewPatterns, MultiParamTypeClasses, TypeFamilies, FunctionalDependencies #-}
{-# OPTIONS -Wall #-}
module OrderableFace(
module Math.Group,
module FaceClasses,
OrderableFace(..),
defaultRightActionForOrderedFace,
defaultVerticesForOrderedFace,
forgetVertexOrder,
getVertexOrder,
toOrderedFace,
defaultPackOrderedFaceI,
defaultUnpackOrderedFaceI,
polyprop_OrderableFace
) where
import Math.Group
import FaceClasses
import Data.Monoid
import QuickCheckUtil
import Test.QuickCheck
import Data.Proxy
import TIndex
import Control.Arrow
-- | Parameters:
--
-- * @t@: Face with unordered vertices (also represents faces with vertices ordered canonically)
--
-- * @ot@: Face with ordered vertices
--
-- CONTRACT:
--
-- * @vertices (packOrderedFace x g) == vertices x *. g@
--
-- * @packOrderedFace x (g1 .*. g2) == (packOrderedFace x g1) *. g2@
--
-- * @id == uncurry packOrderedFace . unpackOrderedFace@
--
-- * 'packOrderedFace' and 'unpackOrderedFace' must be inverses of each other (in other words, @ot@ is isomorphic to @(VertexSymGroup t, t)@, but I leave open the possibility to use a more efficient representation in the future)
class ( Group (VertexSymGroup t)
, RightAction (VertexSymGroup t) (Verts t)
, RightAction (VertexSymGroup t) ot
, Vertices t
, Vertices ot
, Verts t ~ Verts ot
)
=> OrderableFace
t
ot
| t -> ot, ot -> t where
type VertexSymGroup t
unpackOrderedFace :: ot -> (t,VertexSymGroup t)
packOrderedFace :: t -> (VertexSymGroup t) -> ot
defaultVerticesForOrderedFace :: OrderableFace t ot => ot -> Verts t
defaultVerticesForOrderedFace (unpackOrderedFace -> (x,g)) = vertices x *. g
defaultRightActionForOrderedFace :: OrderableFace t ot => ot -> VertexSymGroup t -> ot
defaultRightActionForOrderedFace (unpackOrderedFace -> (x,g1)) g2 = packOrderedFace x (g1 .*. g2)
polyprop_OrderableFace :: forall t ot.
(Show t, Show ot, Show (VertexSymGroup t), Show (Verts t),
OrderableFace t ot,
Arbitrary t, Arbitrary ot, Arbitrary (VertexSymGroup t),
Eq (Verts t), Eq ot) =>
Proxy t -> Property
polyprop_OrderableFace _ =
p1 .&. p2 .&. p3
where
p1 (x :: t) g =
vertices (packOrderedFace x g) .=. vertices x *. g
p2 (x :: t) g1 g2 =
packOrderedFace x (g1 .*. g2) .=. packOrderedFace x g1 *. g2
p3 (ox :: ot) =
ox .=. uncurry packOrderedFace (unpackOrderedFace ox)
forgetVertexOrder :: OrderableFace t ot => ot -> t
forgetVertexOrder = fst . unpackOrderedFace
getVertexOrder :: OrderableFace t ot => ot -> VertexSymGroup t
getVertexOrder = snd . unpackOrderedFace
-- | Equivalent to 'packOrderedFace mempty'
toOrderedFace :: OrderableFace t ot => t -> ot
toOrderedFace = flip packOrderedFace mempty
defaultUnpackOrderedFaceI
:: (HasTIndex ia a, HasTIndex ib b, OrderableFace b a) =>
ia -> (ib, VertexSymGroup b)
defaultUnpackOrderedFaceI = traverseI first unpackOrderedFace
defaultPackOrderedFaceI
:: (HasTIndex ia a, HasTIndex ib b, OrderableFace a b) =>
ia -> VertexSymGroup a -> ib
defaultPackOrderedFaceI = traverseI fmap packOrderedFace
| DanielSchuessler/hstri | OrderableFace.hs | gpl-3.0 | 3,378 | 0 | 11 | 776 | 777 | 419 | 358 | 67 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Organizations.DeidentifyTemplates.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a DeidentifyTemplate. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates-deid to learn
-- more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.organizations.deidentifyTemplates.get@.
module Network.Google.Resource.DLP.Organizations.DeidentifyTemplates.Get
(
-- * REST Resource
OrganizationsDeidentifyTemplatesGetResource
-- * Creating a Request
, organizationsDeidentifyTemplatesGet
, OrganizationsDeidentifyTemplatesGet
-- * Request Lenses
, odtgXgafv
, odtgUploadProtocol
, odtgAccessToken
, odtgUploadType
, odtgName
, odtgCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.organizations.deidentifyTemplates.get@ method which the
-- 'OrganizationsDeidentifyTemplatesGet' request conforms to.
type OrganizationsDeidentifyTemplatesGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] GooglePrivacyDlpV2DeidentifyTemplate
-- | Gets a DeidentifyTemplate. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates-deid to learn
-- more.
--
-- /See:/ 'organizationsDeidentifyTemplatesGet' smart constructor.
data OrganizationsDeidentifyTemplatesGet =
OrganizationsDeidentifyTemplatesGet'
{ _odtgXgafv :: !(Maybe Xgafv)
, _odtgUploadProtocol :: !(Maybe Text)
, _odtgAccessToken :: !(Maybe Text)
, _odtgUploadType :: !(Maybe Text)
, _odtgName :: !Text
, _odtgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsDeidentifyTemplatesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'odtgXgafv'
--
-- * 'odtgUploadProtocol'
--
-- * 'odtgAccessToken'
--
-- * 'odtgUploadType'
--
-- * 'odtgName'
--
-- * 'odtgCallback'
organizationsDeidentifyTemplatesGet
:: Text -- ^ 'odtgName'
-> OrganizationsDeidentifyTemplatesGet
organizationsDeidentifyTemplatesGet pOdtgName_ =
OrganizationsDeidentifyTemplatesGet'
{ _odtgXgafv = Nothing
, _odtgUploadProtocol = Nothing
, _odtgAccessToken = Nothing
, _odtgUploadType = Nothing
, _odtgName = pOdtgName_
, _odtgCallback = Nothing
}
-- | V1 error format.
odtgXgafv :: Lens' OrganizationsDeidentifyTemplatesGet (Maybe Xgafv)
odtgXgafv
= lens _odtgXgafv (\ s a -> s{_odtgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
odtgUploadProtocol :: Lens' OrganizationsDeidentifyTemplatesGet (Maybe Text)
odtgUploadProtocol
= lens _odtgUploadProtocol
(\ s a -> s{_odtgUploadProtocol = a})
-- | OAuth access token.
odtgAccessToken :: Lens' OrganizationsDeidentifyTemplatesGet (Maybe Text)
odtgAccessToken
= lens _odtgAccessToken
(\ s a -> s{_odtgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
odtgUploadType :: Lens' OrganizationsDeidentifyTemplatesGet (Maybe Text)
odtgUploadType
= lens _odtgUploadType
(\ s a -> s{_odtgUploadType = a})
-- | Required. Resource name of the organization and deidentify template to
-- be read, for example
-- \`organizations\/433245324\/deidentifyTemplates\/432452342\` or
-- projects\/project-id\/deidentifyTemplates\/432452342.
odtgName :: Lens' OrganizationsDeidentifyTemplatesGet Text
odtgName = lens _odtgName (\ s a -> s{_odtgName = a})
-- | JSONP
odtgCallback :: Lens' OrganizationsDeidentifyTemplatesGet (Maybe Text)
odtgCallback
= lens _odtgCallback (\ s a -> s{_odtgCallback = a})
instance GoogleRequest
OrganizationsDeidentifyTemplatesGet
where
type Rs OrganizationsDeidentifyTemplatesGet =
GooglePrivacyDlpV2DeidentifyTemplate
type Scopes OrganizationsDeidentifyTemplatesGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
OrganizationsDeidentifyTemplatesGet'{..}
= go _odtgName _odtgXgafv _odtgUploadProtocol
_odtgAccessToken
_odtgUploadType
_odtgCallback
(Just AltJSON)
dLPService
where go
= buildClient
(Proxy ::
Proxy OrganizationsDeidentifyTemplatesGetResource)
mempty
| brendanhay/gogol | gogol-dlp/gen/Network/Google/Resource/DLP/Organizations/DeidentifyTemplates/Get.hs | mpl-2.0 | 5,441 | 0 | 15 | 1,140 | 702 | 413 | 289 | 106 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.Consents.ListRevisions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the revisions of the specified Consent in reverse chronological
-- order.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.consentStores.consents.listRevisions@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.Consents.ListRevisions
(
-- * REST Resource
ProjectsLocationsDataSetsConsentStoresConsentsListRevisionsResource
-- * Creating a Request
, projectsLocationsDataSetsConsentStoresConsentsListRevisions
, ProjectsLocationsDataSetsConsentStoresConsentsListRevisions
-- * Request Lenses
, pldscsclrXgafv
, pldscsclrUploadProtocol
, pldscsclrAccessToken
, pldscsclrUploadType
, pldscsclrName
, pldscsclrFilter
, pldscsclrPageToken
, pldscsclrPageSize
, pldscsclrCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.consentStores.consents.listRevisions@ method which the
-- 'ProjectsLocationsDataSetsConsentStoresConsentsListRevisions' request conforms to.
type ProjectsLocationsDataSetsConsentStoresConsentsListRevisionsResource
=
"v1" :>
CaptureMode "name" "listRevisions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListConsentRevisionsResponse
-- | Lists the revisions of the specified Consent in reverse chronological
-- order.
--
-- /See:/ 'projectsLocationsDataSetsConsentStoresConsentsListRevisions' smart constructor.
data ProjectsLocationsDataSetsConsentStoresConsentsListRevisions =
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions'
{ _pldscsclrXgafv :: !(Maybe Xgafv)
, _pldscsclrUploadProtocol :: !(Maybe Text)
, _pldscsclrAccessToken :: !(Maybe Text)
, _pldscsclrUploadType :: !(Maybe Text)
, _pldscsclrName :: !Text
, _pldscsclrFilter :: !(Maybe Text)
, _pldscsclrPageToken :: !(Maybe Text)
, _pldscsclrPageSize :: !(Maybe (Textual Int32))
, _pldscsclrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsConsentStoresConsentsListRevisions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldscsclrXgafv'
--
-- * 'pldscsclrUploadProtocol'
--
-- * 'pldscsclrAccessToken'
--
-- * 'pldscsclrUploadType'
--
-- * 'pldscsclrName'
--
-- * 'pldscsclrFilter'
--
-- * 'pldscsclrPageToken'
--
-- * 'pldscsclrPageSize'
--
-- * 'pldscsclrCallback'
projectsLocationsDataSetsConsentStoresConsentsListRevisions
:: Text -- ^ 'pldscsclrName'
-> ProjectsLocationsDataSetsConsentStoresConsentsListRevisions
projectsLocationsDataSetsConsentStoresConsentsListRevisions pPldscsclrName_ =
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions'
{ _pldscsclrXgafv = Nothing
, _pldscsclrUploadProtocol = Nothing
, _pldscsclrAccessToken = Nothing
, _pldscsclrUploadType = Nothing
, _pldscsclrName = pPldscsclrName_
, _pldscsclrFilter = Nothing
, _pldscsclrPageToken = Nothing
, _pldscsclrPageSize = Nothing
, _pldscsclrCallback = Nothing
}
-- | V1 error format.
pldscsclrXgafv :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Xgafv)
pldscsclrXgafv
= lens _pldscsclrXgafv
(\ s a -> s{_pldscsclrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldscsclrUploadProtocol :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrUploadProtocol
= lens _pldscsclrUploadProtocol
(\ s a -> s{_pldscsclrUploadProtocol = a})
-- | OAuth access token.
pldscsclrAccessToken :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrAccessToken
= lens _pldscsclrAccessToken
(\ s a -> s{_pldscsclrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldscsclrUploadType :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrUploadType
= lens _pldscsclrUploadType
(\ s a -> s{_pldscsclrUploadType = a})
-- | Required. The resource name of the Consent to retrieve revisions for.
pldscsclrName :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions Text
pldscsclrName
= lens _pldscsclrName
(\ s a -> s{_pldscsclrName = a})
-- | Optional. Restricts the revisions returned to those matching a filter.
-- The following syntax is available: * A string field value can be written
-- as text inside quotation marks, for example \`\"query text\"\`. The only
-- valid relational operation for text fields is equality (\`=\`), where
-- text is searched within the field, rather than having the field be equal
-- to the text. For example, \`\"Comment = great\"\` returns messages with
-- \`great\` in the comment field. * A number field value can be written as
-- an integer, a decimal, or an exponential. The valid relational operators
-- for number fields are the equality operator (\`=\`), along with the less
-- than\/greater than operators (\`\<\`, \`\<=\`, \`>\`, \`>=\`). Note that
-- there is no inequality (\`!=\`) operator. You can prepend the \`NOT\`
-- operator to an expression to negate it. * A date field value must be
-- written in \`yyyy-mm-dd\` form. Fields with date and time use the
-- RFC3339 time format. Leading zeros are required for one-digit months and
-- days. The valid relational operators for date fields are the equality
-- operator (\`=\`) , along with the less than\/greater than operators
-- (\`\<\`, \`\<=\`, \`>\`, \`>=\`). Note that there is no inequality
-- (\`!=\`) operator. You can prepend the \`NOT\` operator to an expression
-- to negate it. * Multiple field query expressions can be combined in one
-- query by adding \`AND\` or \`OR\` operators between the expressions. If
-- a boolean operator appears within a quoted string, it is not treated as
-- special, it\'s just another part of the character string to be matched.
-- You can prepend the \`NOT\` operator to an expression to negate it.
-- Fields available for filtering are: - user_id. For example,
-- \`filter=\'user_id=\"user123\"\'\`. - consent_artifact - state -
-- revision_create_time - metadata. For example,
-- \`filter=Metadata(\\\"testkey\\\")=\\\"value\\\"\` or
-- \`filter=HasMetadata(\\\"testkey\\\")\`.
pldscsclrFilter :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrFilter
= lens _pldscsclrFilter
(\ s a -> s{_pldscsclrFilter = a})
-- | Optional. Token to retrieve the next page of results or empty if there
-- are no more results in the list.
pldscsclrPageToken :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrPageToken
= lens _pldscsclrPageToken
(\ s a -> s{_pldscsclrPageToken = a})
-- | Optional. Limit on the number of revisions to return in a single
-- response. If not specified, 100 is used. May not be larger than 1000.
pldscsclrPageSize :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Int32)
pldscsclrPageSize
= lens _pldscsclrPageSize
(\ s a -> s{_pldscsclrPageSize = a})
. mapping _Coerce
-- | JSONP
pldscsclrCallback :: Lens' ProjectsLocationsDataSetsConsentStoresConsentsListRevisions (Maybe Text)
pldscsclrCallback
= lens _pldscsclrCallback
(\ s a -> s{_pldscsclrCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions
where
type Rs
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions
= ListConsentRevisionsResponse
type Scopes
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsConsentStoresConsentsListRevisions'{..}
= go _pldscsclrName _pldscsclrXgafv
_pldscsclrUploadProtocol
_pldscsclrAccessToken
_pldscsclrUploadType
_pldscsclrFilter
_pldscsclrPageToken
_pldscsclrPageSize
_pldscsclrCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsConsentStoresConsentsListRevisionsResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/ConsentStores/Consents/ListRevisions.hs | mpl-2.0 | 9,819 | 0 | 18 | 1,898 | 990 | 585 | 405 | 148 | 1 |
{-
Habit of Fate, a game to incentivize habit formation.
Copyright (C) 2017 Gregory Crosswhite
This program is free software: you can redistribute it and/or modify
it under version 3 of the terms of the GNU Affero General Public License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE UnicodeSyntax #-}
module HabitOfFate.Server.Requests.Api.GetMarks (handler) where
import HabitOfFate.Prelude
import Network.HTTP.Types.Status (ok200)
import Web.Scotty (ScottyM)
import qualified Web.Scotty as Scotty
import HabitOfFate.Data.Account
import HabitOfFate.Server.Common
import HabitOfFate.Server.Transaction
handler ∷ Environment → ScottyM ()
handler environment =
Scotty.get "/api/marks" <<< apiTransaction environment $ do
log "Requested marks."
use marks_ <&> jsonResult ok200
| gcross/habit-of-fate | sources/library/HabitOfFate/Server/Requests/Api/GetMarks.hs | agpl-3.0 | 1,292 | 0 | 9 | 225 | 129 | 75 | 54 | 17 | 1 |
import Data.Array
import Text.Printf
import Text.Regex.PCRE
parse :: String -> String
parse s = printf str (sub 1) (sub 2) (sub 3)
where str = "CountryCode=%s,LocalAreaCode=%s,Number=%s"
m = s =~ "^(\\d{1,3})[ -](\\d{1,3})[ -](\\d{4,10})$" :: MatchArray
sub = (\(i, l) -> take l (drop i s)) . (!) m
main = do
n <- readLn
c <- getContents
let xs = take n $ lines c
putStr $ unlines $ map parse xs
| itsbruce/hackerrank | alg/regex/splitPhoneNumbers.hs | unlicense | 438 | 15 | 9 | 114 | 174 | 90 | 84 | 13 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Parse
( loadER
)
where
import Prelude hiding (null)
import Control.Monad (liftM2, when, void)
import Data.Char (isAlphaNum, isSpace)
import Data.List (find)
import qualified Data.Map as M
import Data.Maybe
import Data.Text.Lazy hiding (find, map, reverse)
import Data.Text.Lazy.IO
import System.IO (Handle)
import Text.Parsec
import Text.Parsec.Text.Lazy
import Text.Printf (printf)
import ER
data AST = E Entity
| A Attribute
| R Relation
deriving Show
data GlobalOptions = GlobalOptions { gtoptions :: Options
, ghoptions :: Options
, geoptions :: Options
, groptions :: Options
}
deriving Show
emptyGlobalOptions :: GlobalOptions
emptyGlobalOptions = GlobalOptions M.empty M.empty M.empty M.empty
loadER :: String -> Handle -> IO (Either String ER)
loadER fpath f = do
s <- hGetContents f
case parse (do { (opts, ast) <- document; return $ toER opts ast}) fpath s of
Left err -> return $ Left $ show err
Right err@(Left _) -> return err
Right (Right er) -> return $ Right er
-- | Converts a list of syntactic categories in an entity-relationship
-- description to an ER representation. If there was a problem with the
-- conversion, an error is reported. This includes checking that each
-- relationship contains only valid entity names.
--
-- This preserves the ordering of the syntactic elements in the original
-- description.
toER :: GlobalOptions -> [AST] -> Either String ER
toER gopts = toER' (ER [] [] title)
where title = gtoptions gopts `mergeOpts` defaultTitleOpts
toER' :: ER -> [AST] -> Either String ER
toER' er [] = Right (reversed er) >>= validRels
toER' (ER { entities = [] }) (A a:_) =
let name = show (field a)
in Left $ printf "Attribute '%s' comes before first entity." name
toER' er@(ER { entities = e':es }) (A a:xs) = do
let e = e' { attribs = a:attribs e' }
toER' (er { entities = e:es }) xs
toER' er@(ER { entities = es }) (E e:xs) = do
let opts = eoptions e
`mergeOpts` geoptions gopts
`mergeOpts` defaultEntityOpts
let hopts = eoptions e
`mergeOpts` ghoptions gopts
`mergeOpts` defaultHeaderOpts
toER' (er { entities = e { eoptions = opts, hoptions = hopts }:es}) xs
toER' er@(ER { rels = rs }) (R r:xs) = do
let opts = roptions r
`mergeOpts` groptions gopts
`mergeOpts` defaultRelOpts
toER' (er { rels = r { roptions = opts }:rs }) xs
reversed :: ER -> ER
reversed er@(ER { entities = es, rels = rs }) =
let es' = map (\e -> e { attribs = reverse (attribs e) }) es
in er { entities = reverse es', rels = reverse rs }
validRels :: ER -> Either String ER
validRels er = validRels' (rels er) er
validRels' :: [Relation] -> ER -> Either String ER
validRels' [] er = return er
validRels' (r:_) er = do
let r1 = find (\e -> name e == entity1 r) (entities er)
let r2 = find (\e -> name e == entity2 r) (entities er)
let err getter = Left
$ printf "Unknown entity '%s' in relationship."
$ unpack $ getter r
when (isNothing r1) (err entity1)
when (isNothing r2) (err entity2)
return er
document :: Parser (GlobalOptions, [AST])
document = do skipMany (comment <|> blanks)
opts <- globalOptions emptyGlobalOptions
ast <- fmap catMaybes $ manyTill top eof
return (opts, ast)
where top = (entity <?> "entity declaration")
<|> (try rel <?> "relationship") -- must come before attr
<|> (try attr <?> "attribute")
<|> (comment <?> "comment")
<|> blanks
blanks = many1 (space <?> "whitespace") >> return Nothing
entity :: Parser (Maybe AST)
entity = do n <- between (char '[') (char ']') ident
spacesNoNew
opts <- options
eolComment
return $ Just $ E Entity { name = n, attribs = [],
hoptions = opts, eoptions = opts }
attr :: Parser (Maybe AST)
attr = do
keys <- many $ oneOf "*+ \t"
let (ispk, isfk) = ('*' `elem` keys, '+' `elem` keys)
n <- ident
opts <- options
eolComment
return
$ Just
$ A Attribute { field = n, pk = ispk, fk = isfk, aoptions = opts }
rel :: Parser (Maybe AST)
rel = do
let ops = "?1*+"
e1 <- ident
op1 <- oneOf ops
string "--"
op2 <- oneOf ops
e2 <- ident
opts <- options
let getCard op =
case cardByName op of
Just t -> return t
Nothing -> unexpected (printf "Cardinality '%s' does not exist." op)
t1 <- getCard op1
t2 <- getCard op2
return $ Just $ R Relation { entity1 = e1, entity2 = e2
, card1 = t1, card2 = t2, roptions = opts }
globalOptions :: GlobalOptions -> Parser GlobalOptions
globalOptions gopts =
option gopts $ try $ do
n <- ident
opts <- options
case n of
"title" -> emptiness >> globalOptions (gopts { gtoptions = opts})
"header" -> emptiness >> globalOptions (gopts { ghoptions = opts})
"entity" -> emptiness >> globalOptions (gopts { geoptions = opts})
"relationship" -> emptiness >> globalOptions (gopts { groptions = opts})
_ -> fail "not a valid directive"
options :: Parser (M.Map String Option)
options =
option M.empty
$ fmap M.fromList
$ try
$ between (char '{' >> emptiness) (emptiness >> char '}')
$ opt `sepEndBy` (emptiness >> char ',' >> emptiness)
opt :: Parser (String, Option)
opt = do
name <- liftM2 (:) letter (manyTill (letter <|> char '-') (char ':'))
<?> "option name"
emptiness
value <- between (char '"') (char '"') (many $ noneOf "\"")
<?> "option value"
case optionByName name value of
Left err -> fail err
Right o' -> emptiness >> return (name, o')
comment :: Parser (Maybe AST)
comment = do
char '#'
manyTill anyChar $ try eol
return Nothing
ident :: Parser Text
ident = do
spacesNoNew
let p = satisfy (\c -> c == '_' || isAlphaNum c)
<?> "letter, digit or underscore"
n <- fmap pack (many1 p)
spacesNoNew
return n
emptiness :: Parser ()
emptiness = skipMany (void (many1 space) <|> eolComment)
eolComment :: Parser ()
eolComment = spacesNoNew >> (eol <|> void comment)
spacesNoNew :: Parser ()
spacesNoNew = skipMany $ satisfy $ \c -> c /= '\n' && c /= '\r' && isSpace c
eol :: Parser ()
eol = eof <|> do
c <- oneOf "\n\r"
when (c == '\r') $ optional $ char '\n'
| fgaray/erd | src/Parse.hs | unlicense | 6,938 | 0 | 18 | 2,193 | 2,445 | 1,252 | 1,193 | 171 | 6 |
{-# LANGUAGE OverloadedStrings, UnicodeSyntax #-}
module Network.HTTP.Link.ParserSpec where
import Test.Hspec
import Test.Hspec.Attoparsec
import Data.Text
import Data.Maybe (fromJust)
import Network.HTTP.Link (lnk)
import Network.HTTP.Link.Types
import Network.HTTP.Link.Parser
import Network.URI (URI)
import Data.Attoparsec.Text (Parser)
spec ∷ Spec
spec = do
describe "linkHeader" $ do
let l u r = fromJust $ lnk u r
it "parses a single link" $ do
("<http://example.com>; rel=\"example\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Rel, "example")] ]
it "parses empty attributes" $ do
("<http://example.com>; title=\"\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Title, "")] ]
it "parses custom attributes" $ do
("<http://example.com>; weirdThingy=\"something\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Other "weirdThingy", "something")] ]
it "parses backslash escaped attributes" $ do
("<http://example.com>; title=\"some \\\" thing \\\"\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Title, "some \" thing \"")] ]
it "parses escaped attributes" $ do
("<http://example.com>; title=\"some %22 thing %22\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Title, "some \" thing \"")] ]
it "parses multiple attributes" $ do
("<http://example.com>; rel=\"example\"; title=\"example dot com\"" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Rel, "example"), (Title, "example dot com")] ]
it "parses custom attributes named similarly to standard ones" $ do
-- this was caught by QuickCheck! <3
("<http://example.com>; rel=hello; relAtion=\"something\"; rev=next" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Rel, "hello"), (Other "relAtion", "something"), (Rev, "next")] ]
it "parses unquoted rel, rev attributes" $ do
("<http://example.com>; rel=next; rev=prev" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Rel, "next"), (Rev, "prev")] ]
it "does not blow up on title*" $ do
("<http://example.com>; title*=UTF-8'de'n%c3%a4chstes%20Kapitel" ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Title', "UTF-8'de'n%c3%a4chstes%20Kapitel")] ]
it "parses weird whitespace all over the place" $ do
("\n\t < http://example.com\t>;rel=\t\"example\"; \ttitle =\"example dot com\" \n " ∷ Text) ~> linkHeaderURI
`shouldParse` [ l "http://example.com" [(Rel, "example"), (Title, "example dot com")] ]
where
linkHeaderURI = linkHeader :: Parser [Link URI]
| myfreeweb/http-link-header | test-suite/Network/HTTP/Link/ParserSpec.hs | unlicense | 2,858 | 0 | 18 | 618 | 655 | 366 | 289 | 46 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- A number of utilities related to data sets and dataframes.
module Spark.Core.Internal.FunctionsInternals(
DynColPackable,
StaticColPackable2,
NameTuple(..),
TupleEquivalence(..),
asCol,
asCol',
pack1,
pack,
pack',
struct',
struct,
-- Developer tools
checkOrigin,
projectColFunction,
projectColFunction',
projectColFunction2',
colOpNoBroadcast
) where
import qualified Data.Vector as V
import qualified Data.Map.Strict as M
import qualified Data.List.NonEmpty as N
import qualified Data.Text as T
import Control.Arrow
import Formatting
import Spark.Core.Internal.ColumnStructures
import Spark.Core.Internal.ColumnFunctions
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.DatasetStd(broadcastPair)
import Spark.Core.Internal.DatasetStructures
import Spark.Core.Internal.Utilities
import Spark.Core.Internal.TypesFunctions
import Spark.Core.Internal.LocalDataFunctions
import Spark.Core.Internal.TypesStructures
import Spark.Core.Internal.Projections
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.TypesGenerics(SQLTypeable, buildType)
import Spark.Core.StructuresInternal
import Spark.Core.Try
{-| The class of pairs of types that express the fact that some type a can
be converted to a dataset of type b.
This class is only inhabited by some internal types: lists, tuples, etc.
-}
class DynColPackable a where
-- Returns (possibly) some form of the type a packed into a single column.
-- This implementation must make sure that the final column is either a
-- failure or is well-formed (no name duplicates, etc.)
_packAsColumn :: a -> Column'
{-| The class of pairs of types that express the fact that some type a can
be converted to a dataset of type b.
This class is meant to be extended by users to create converters associated
to their data types.
-}
class StaticColPackable2 ref a b | a -> ref where
_staticPackAsColumn2 :: a -> Column ref b
data NameTuple to = NameTuple [String]
{-| A class that expresses the fact that a certain type (that is well-formed)
is equivalent to a tuple of points.
Useful for auto conversions between tuples of columns and data structures.
-}
class TupleEquivalence to tup | to -> tup where
tupleFieldNames :: NameTuple to
-- Here is the basic algorithm:
-- - datasets can only contain rows of things
-- - columns and observables contain cells (which may be empty)
-- - a strict struct cell is equivalent to a row
-- - a non-strict or non-struct cell is equivalent to a row with a single item
-- - as a consequence, there is no "row with a unique field". This is equivalent
-- to the element inside the field
-- Invariants to respect in terms of types (not in terms of values)
-- untypedCol . asCol == asCol'
-- pack1 . asCol == asCol . pack1
-- for single columns, pack = Right . pack1
-- The typed function
-- This only works for inner types that are known to the Haskell type system
-- fun :: (SQLTypeable a, HasCallStack) => Column a -> Column a -> Column a
-- fun = undefined
-- The untyped equivalent
-- Each of the inputs can be either a column or a try, and the final outcome is always a try
-- When both types are known to the type system, the 2 calls are equivalent
-- fun' :: (ColumnLike a1, ColumnLike a2, HasCallStack) => a1 -> a2 -> Try Column'
-- fun' = undefined
-- | Represents a dataframe as a single column.
asCol :: Dataset a -> Column a a
asCol ds =
-- Simply recast the dataset as a column.
-- The empty path indicates that we are wrapping the whole thing.
iEmptyCol ds (unsafeCastType $ nodeType ds) (FieldPath V.empty)
asCol' :: DataFrame -> Column'
asCol' x = column' ((iUntypedColData . asCol) <$> x)
-- | Packs a single column into a dataframe.
pack1 :: Column ref a -> Dataset a
pack1 = _pack1
{-| Packs a number of columns into a single dataframe.
This operation is checked for same origin and no duplication of columns.
This function accepts columns, list of columns and tuples of columns (both
typed and untyped).
-}
pack' :: (DynColPackable a) => a -> DataFrame
-- Pack the columns and check that they have the same origin.
pack' z = pack1 <$> (unColumn' . _packAsColumn $ z)
{-| Packs a number of columns with the same references into a single dataset.
The type of the dataset must be provided in order to have proper type inference.
TODO: example.
-}
pack :: forall ref a b. (StaticColPackable2 ref a b) => a -> Dataset b
pack z =
let c = _staticPackAsColumn2 z :: ColumnData ref b
in pack1 c
{-| Packs a number of columns into a single column (the struct construct).
Columns must have different names, or an error is returned.
-}
struct' :: [Column'] -> Column'
struct' cols = column' $ do
l <- sequence (unColumn' <$> cols)
let fields = (colFieldName &&& id) <$> l
_buildStruct fields
{-| Packs a number of columns into a single structure, given a return type.
The field names of the columns are discarded, and replaced by the field names
of the structure.
-}
struct :: forall ref a b. (StaticColPackable2 ref a b) => a -> Column ref b
struct = _staticPackAsColumn2
checkOrigin :: [Column'] -> Try [UntypedColumnData]
checkOrigin x = _checkOrigin =<< sequence (unColumn' <$> x)
{-| Takes a typed function that operates on columns and projects this function
onto a similar operation for type observables.
This function is not very smart and may throw an error for complex cases such
as broadcasting, joins, etc.
-}
-- TODO: we do not need technically the typeable constraint.
-- It is an additional check.
projectColFunction :: forall x y.
(HasCallStack, SQLTypeable y, SQLTypeable x) =>
(forall ref. Column ref x -> Column ref y) -> LocalData x -> LocalData y
projectColFunction f o =
let o' = untypedLocalData o
sqltx = buildType :: SQLType x
sqlty = buildType :: SQLType y
f' :: UntypedColumnData -> Column'
f' x = column' $ dropColType . f <$> castTypeCol sqltx x
f'' :: Column' -> Column'
f'' x = tryCol' $ f' <$> unColumn' x
o2 = unObservable' $ projectColFunctionUntyped f'' o'
o3 = castType sqlty =<< o2
in forceRight o3
projectColFunctionUntyped ::
(Column' -> Column') -> UntypedLocalData -> LocalFrame
projectColFunctionUntyped f obs = Observable' $ do
-- Create a placeholder dataset and a corresponding column.
let dt = unSQLType (nodeType obs)
-- Pass them to the function.
let no = NodeDistributedLit dt V.empty
let ds = emptyDataset no (SQLType dt)
let c = asCol ds
colRes <- unColumn' $ f (untypedCol (dropColType c))
let dtOut = unSQLType $ colType colRes
-- This will fail if there is a broadcast.
co <- _replaceObservables M.empty (colOp colRes)
let op = NodeStructuredTransform co
return $ emptyLocalData op (SQLType dtOut)
`parents` [untyped obs]
{-| Takes a function that operates on columns, and projects this
function onto the same operations for observables.
This is not very smart at the moment and will miss the more
complex operations such as broadcasting, etc.
-}
-- TODO: use for the numerical transforms instead of special stuff.
projectColFunction' ::
(Column' -> Column') ->
LocalFrame -> LocalFrame
projectColFunction' f obs = Observable' $ do
cd <- unObservable' obs
let x = projectColFunctionUntyped f cd
unObservable' x
projectColFunction2' ::
(Column' -> Column' -> Column') ->
LocalFrame ->
LocalFrame ->
LocalFrame
projectColFunction2' f o1' o2' = obsTry $ do
let f2 :: Column' -> Column'
f2 dc = f (dc /- "_1") (dc /- "_2")
o1 <- unObservable' o1'
o2 <- unObservable' o2'
let o = iPackTupleObs $ o1 N.:| [o2]
return $ projectColFunctionUntyped f2 o
colOpNoBroadcast :: GeneralizedColOp -> Try ColOp
colOpNoBroadcast = _replaceObservables M.empty
-- {-| Low-level operator that takes an observable and propagates it along the
-- content of an existing dataset.
--
-- Users are advised to use the Column-based `broadcast` function instead.
-- -}
-- broadcastPair :: Dataset a -> LocalData b -> Dataset (a, b)
-- broadcastPair ds ld = n `parents` [untyped ds, untyped ld]
-- where n = emptyNodeStandard (nodeLocality ds) sqlt name
-- sqlt = tupleType (nodeType ds) (nodeType ld)
-- name = "org.spark.BroadcastPair"
_checkOrigin :: [UntypedColumnData] -> Try [UntypedColumnData]
_checkOrigin [] = pure []
_checkOrigin l =
case _columnOrigin l of
[_] -> pure l
l' -> tryError $ sformat ("Too many distinct origins: "%sh) l'
instance forall x. (DynColPackable x) => DynColPackable [x] where
_packAsColumn = struct' . (_packAsColumn <$>)
instance DynColPackable Column' where
_packAsColumn = id
instance forall ref a. DynColPackable (Column ref a) where
_packAsColumn = untypedCol . iUntypedColData
instance forall z1 z2. (DynColPackable z1, DynColPackable z2) => DynColPackable (z1, z2) where
_packAsColumn (c1, c2) = struct' [_packAsColumn c1, _packAsColumn c2]
-- ******** Experimental ************
instance forall ref a. StaticColPackable2 ref (Column ref a) a where
_staticPackAsColumn2 = id
-- Tuples are equivalent to tuples
instance forall a1 a2. TupleEquivalence (a1, a2) (a1, a2) where
tupleFieldNames = NameTuple ["_1", "_2"]
-- The equations that bind column packable stuff through their tuple equivalents
instance forall ref b a1 a2 z1 z2. (
TupleEquivalence b (a1, a2),
StaticColPackable2 ref z1 a1,
StaticColPackable2 ref z2 a2) =>
StaticColPackable2 ref (z1, z2) b where
_staticPackAsColumn2 (c1, c2) =
let
x1 = iUntypedColData (_staticPackAsColumn2 c1 :: Column ref a1)
x2 = iUntypedColData (_staticPackAsColumn2 c2 :: Column ref a2)
names = tupleFieldNames :: NameTuple b
in _unsafeBuildStruct [x1, x2] names
instance forall ref b a1 a2 a3 z1 z2 z3. (
TupleEquivalence b (a1, a2, a3),
StaticColPackable2 ref z1 a1,
StaticColPackable2 ref z2 a2,
StaticColPackable2 ref z3 a3) =>
StaticColPackable2 ref (z1, z2, z3) b where
_staticPackAsColumn2 (c1, c2, c3) =
let
x1 = iUntypedColData (_staticPackAsColumn2 c1 :: Column ref a1)
x2 = iUntypedColData (_staticPackAsColumn2 c2 :: Column ref a2)
x3 = iUntypedColData (_staticPackAsColumn2 c3 :: Column ref a3)
names = tupleFieldNames :: NameTuple b
in _unsafeBuildStruct [x1, x2, x3] names
_unsafeBuildStruct :: [UntypedColumnData] -> NameTuple x -> Column ref x
_unsafeBuildStruct cols (NameTuple names) =
if length cols /= length names
then failure $ sformat ("The number of columns and names differs:"%sh%" and "%sh) cols names
else
let fnames = unsafeFieldName . T.pack <$> names
uc = _buildStruct (fnames `zip` cols)
z = forceRight uc
in z { _cOp = _cOp z }
_buildTuple :: [UntypedColumnData] -> Try UntypedColumnData
_buildTuple l = _buildStruct (zip names l) where
names = (:[]) . unsafeFieldName . ("_" <> ) . show' $ [0..(length l)]
_buildStruct :: [(FieldName, UntypedColumnData)] -> Try UntypedColumnData
_buildStruct cols = do
let fields = GenColStruct $ (uncurry GeneralizedTransField . (fst &&& colOp . snd)) <$> V.fromList cols
st <- structTypeFromFields $ (fst &&& unSQLType . colType . snd) <$> cols
let name = structName st
case _columnOrigin (snd <$> cols) of
[ds] ->
pure ColumnData {
_cOrigin = ds,
_cType = StrictType (Struct st),
_cOp = fields,
_cReferingPath = Just $ unsafeFieldName name
}
l -> tryError $ sformat ("_buildStruct: Too many distinct origins: "%sh) l
_columnOrigin :: [UntypedColumnData] -> [UntypedDataset]
_columnOrigin l =
let
groups = myGroupBy' (nodeId . colOrigin) l
in (colOrigin . N.head . snd) <$> groups
-- The packing algorithm
-- It eliminates the broadcast variables into joins and then wraps the
-- remaining transform into structured transform.
-- TODO: the data structure and the algorithms use unsafe operations
-- It should be transfromed to safe operations eventually.
_pack1 :: (HasCallStack) => Column ref a -> Dataset a
_pack1 ucd =
let gco = colOp ucd
ulds = _collectObs gco
in case ulds of
[] -> let co = forceRight $ colOpNoBroadcast gco in
_packCol1 ucd co
(h : t) -> forceRight $ _packCol1WithObs ucd (h N.:| t)
_packCol1WithObs :: Column ref a -> N.NonEmpty UntypedLocalData -> Try (Dataset a)
_packCol1WithObs c ulds = do
let packedObs = iPackTupleObs ulds
-- Retrieve the field names in the pack structure.
let st = structTypeTuple (unSQLType . nodeType <$> ulds)
let names = V.toList $ structFieldName <$> structFields st
let paths = FieldPath . V.fromList . (unsafeFieldName "_2" : ) . (:[]) <$> names
let m = M.fromList ((nodeId <$> N.toList ulds) `zip` paths)
let joined = broadcastPair (colOrigin c) packedObs
co <- _replaceObservables m (colOp c)
let no = NodeStructuredTransform co
let f = emptyDataset no (colType c) `parents` [untyped joined]
return f
_replaceObservables :: M.Map NodeId FieldPath -> GeneralizedColOp -> Try ColOp
-- Special case for when there is nothing in the dictionary
_replaceObservables m (GenColExtraction fp) | M.null m = pure $ ColExtraction fp
_replaceObservables _ (GenColExtraction (FieldPath v)) =
-- It is a normal extraction, prepend the suffix of the data structure.
pure (ColExtraction (FieldPath v')) where
v' = V.cons (unsafeFieldName "_1") v
_replaceObservables _ (GenColLit dt c) = pure (ColLit dt c)
_replaceObservables m (GenColFunction n v) =
(\x -> ColFunction n x Nothing) <$> sequence (_replaceObservables m <$> v)
_replaceObservables m (GenColStruct v) = ColStruct <$> sequence (_replaceField m <$> v)
_replaceObservables m (BroadcastColOp uld) =
case M.lookup (nodeId uld) m of
Just p -> pure $ ColExtraction p
Nothing -> tryError $ "_replaceObservables: error: missing key " <> show' uld <> " in " <> show' m
_replaceField :: M.Map NodeId FieldPath -> GeneralizedTransField -> Try TransformField
_replaceField m (GeneralizedTransField n v) = TransformField n <$> _replaceObservables m v
-- Unconditionally packs the column into a dataset.
_packCol1 :: Column ref a -> ColOp -> Dataset a
-- Special case for column operations that are no-ops: return the dataset itself.
_packCol1 c (ColExtraction (FieldPath v)) | V.null v =
-- TODO: we should not need to force this operation.
forceRight $ castType (colType c) (colOrigin c)
_packCol1 c op =
emptyDataset (NodeStructuredTransform op) (colType c)
`parents` [untyped (colOrigin c)]
_collectObs :: GeneralizedColOp -> [UntypedLocalData]
_collectObs (GenColFunction _ v) = concat (_collectObs <$> V.toList v)
_collectObs (BroadcastColOp uld) = [uld]
_collectObs (GenColStruct v) = concat (_collectObs . gtfValue <$> V.toList v)
_collectObs _ = [] -- Anything else has no broadcast info.
| tjhunter/karps | haskell/src/Spark/Core/Internal/FunctionsInternals.hs | apache-2.0 | 15,175 | 0 | 16 | 2,995 | 3,472 | 1,836 | 1,636 | -1 | -1 |
----------------------------------------------------------------------------
-- |
-- Module : Web.Skroutz.Endoints
-- Copyright : (c) 2016 Remous-Aris Koutsiamanis
-- License : Apache License 2.0
-- Maintainer : Remous-Aris Koutsiamanis <ariskou@gmail.com>
-- Stability : alpha
-- Portability : non-portable
--
-- Provides the Skroutz API endpoints/methods.
----------------------------------------------------------------------------
module Web.Skroutz.Endpoints
(
module X
)
where
import Web.Skroutz.Endpoints.Auth as X
import Web.Skroutz.Endpoints.Compat as X
import Web.Skroutz.Endpoints.Model as X
| ariskou/skroutz-haskell-api | src/Web/Skroutz/Endpoints.hs | apache-2.0 | 659 | 0 | 4 | 117 | 51 | 41 | 10 | 6 | 0 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QCompleter.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QCompleter (
CompletionMode, ePopupCompletion, eUnfilteredPopupCompletion, eInlineCompletion
, ModelSorting, eUnsortedModel, eCaseSensitivelySortedModel, eCaseInsensitivelySortedModel
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CCompletionMode a = CCompletionMode a
type CompletionMode = QEnum(CCompletionMode Int)
ieCompletionMode :: Int -> CompletionMode
ieCompletionMode x = QEnum (CCompletionMode x)
instance QEnumC (CCompletionMode Int) where
qEnum_toInt (QEnum (CCompletionMode x)) = x
qEnum_fromInt x = QEnum (CCompletionMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> CompletionMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
ePopupCompletion :: CompletionMode
ePopupCompletion
= ieCompletionMode $ 0
eUnfilteredPopupCompletion :: CompletionMode
eUnfilteredPopupCompletion
= ieCompletionMode $ 1
eInlineCompletion :: CompletionMode
eInlineCompletion
= ieCompletionMode $ 2
data CModelSorting a = CModelSorting a
type ModelSorting = QEnum(CModelSorting Int)
ieModelSorting :: Int -> ModelSorting
ieModelSorting x = QEnum (CModelSorting x)
instance QEnumC (CModelSorting Int) where
qEnum_toInt (QEnum (CModelSorting x)) = x
qEnum_fromInt x = QEnum (CModelSorting x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ModelSorting -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eUnsortedModel :: ModelSorting
eUnsortedModel
= ieModelSorting $ 0
eCaseSensitivelySortedModel :: ModelSorting
eCaseSensitivelySortedModel
= ieModelSorting $ 1
eCaseInsensitivelySortedModel :: ModelSorting
eCaseInsensitivelySortedModel
= ieModelSorting $ 2
| keera-studios/hsQt | Qtc/Enums/Gui/QCompleter.hs | bsd-2-clause | 4,492 | 0 | 18 | 963 | 1,148 | 574 | 574 | 102 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QMenuBar.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:17
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QMenuBar (
QqMenuBar(..)
,isDefaultUp
,setDefaultUp
,qMenuBar_delete
,qMenuBar_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QPaintDevice
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QMenuBar ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QMenuBar_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QMenuBar_userMethod" qtc_QMenuBar_userMethod :: Ptr (TQMenuBar a) -> CInt -> IO ()
instance QuserMethod (QMenuBarSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QMenuBar_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QMenuBar ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QMenuBar_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QMenuBar_userMethodVariant" qtc_QMenuBar_userMethodVariant :: Ptr (TQMenuBar a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QMenuBarSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QMenuBar_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqMenuBar x1 where
qMenuBar :: x1 -> IO (QMenuBar ())
instance QqMenuBar (()) where
qMenuBar ()
= withQMenuBarResult $
qtc_QMenuBar
foreign import ccall "qtc_QMenuBar" qtc_QMenuBar :: IO (Ptr (TQMenuBar ()))
instance QqMenuBar ((QWidget t1)) where
qMenuBar (x1)
= withQMenuBarResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar1 cobj_x1
foreign import ccall "qtc_QMenuBar1" qtc_QMenuBar1 :: Ptr (TQWidget t1) -> IO (Ptr (TQMenuBar ()))
instance QactionAt (QMenuBar a) ((Point)) where
actionAt x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QMenuBar_actionAt_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QMenuBar_actionAt_qth" qtc_QMenuBar_actionAt_qth :: Ptr (TQMenuBar a) -> CInt -> CInt -> IO (Ptr (TQAction ()))
instance QqactionAt (QMenuBar a) ((QPoint t1)) where
qactionAt x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_actionAt cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_actionAt" qtc_QMenuBar_actionAt :: Ptr (TQMenuBar a) -> Ptr (TQPoint t1) -> IO (Ptr (TQAction ()))
instance QactionEvent (QMenuBar ()) ((QActionEvent t1)) where
actionEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_actionEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_actionEvent_h" qtc_QMenuBar_actionEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQActionEvent t1) -> IO ()
instance QactionEvent (QMenuBarSc a) ((QActionEvent t1)) where
actionEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_actionEvent_h cobj_x0 cobj_x1
instance QqactionGeometry (QMenuBar a) ((QAction t1)) where
qactionGeometry x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_actionGeometry cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_actionGeometry" qtc_QMenuBar_actionGeometry :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> IO (Ptr (TQRect ()))
instance QactionGeometry (QMenuBar a) ((QAction t1)) where
actionGeometry x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_actionGeometry_qth cobj_x0 cobj_x1 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QMenuBar_actionGeometry_qth" qtc_QMenuBar_actionGeometry_qth :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QactiveAction (QMenuBar a) (()) where
activeAction x0 ()
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_activeAction cobj_x0
foreign import ccall "qtc_QMenuBar_activeAction" qtc_QMenuBar_activeAction :: Ptr (TQMenuBar a) -> IO (Ptr (TQAction ()))
instance QaddAction (QMenuBar ()) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_addAction1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_addAction1" qtc_QMenuBar_addAction1 :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> IO ()
instance QaddAction (QMenuBarSc a) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_addAction1 cobj_x0 cobj_x1
instance QaddAction (QMenuBar ()) ((String)) (IO (QAction ())) where
addAction x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_addAction cobj_x0 cstr_x1
foreign import ccall "qtc_QMenuBar_addAction" qtc_QMenuBar_addAction :: Ptr (TQMenuBar a) -> CWString -> IO (Ptr (TQAction ()))
instance QaddAction (QMenuBarSc a) ((String)) (IO (QAction ())) where
addAction x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_addAction cobj_x0 cstr_x1
instance QaddAction (QMenuBar ()) ((String, QObject t2, String)) (IO (QAction ())) where
addAction x0 (x1, x2, x3)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QMenuBar_addAction2 cobj_x0 cstr_x1 cobj_x2 cstr_x3
foreign import ccall "qtc_QMenuBar_addAction2" qtc_QMenuBar_addAction2 :: Ptr (TQMenuBar a) -> CWString -> Ptr (TQObject t2) -> CWString -> IO (Ptr (TQAction ()))
instance QaddAction (QMenuBarSc a) ((String, QObject t2, String)) (IO (QAction ())) where
addAction x0 (x1, x2, x3)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QMenuBar_addAction2 cobj_x0 cstr_x1 cobj_x2 cstr_x3
instance QaddMenu (QMenuBar a) ((QMenu t1)) (IO (QAction ())) where
addMenu x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_addMenu cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_addMenu" qtc_QMenuBar_addMenu :: Ptr (TQMenuBar a) -> Ptr (TQMenu t1) -> IO (Ptr (TQAction ()))
instance QaddMenu (QMenuBar a) ((QIcon t1, String)) (IO (QMenu ())) where
addMenu x0 (x1, x2)
= withQMenuResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QMenuBar_addMenu2 cobj_x0 cobj_x1 cstr_x2
foreign import ccall "qtc_QMenuBar_addMenu2" qtc_QMenuBar_addMenu2 :: Ptr (TQMenuBar a) -> Ptr (TQIcon t1) -> CWString -> IO (Ptr (TQMenu ()))
instance QaddMenu (QMenuBar a) ((String)) (IO (QMenu ())) where
addMenu x0 (x1)
= withQMenuResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_addMenu1 cobj_x0 cstr_x1
foreign import ccall "qtc_QMenuBar_addMenu1" qtc_QMenuBar_addMenu1 :: Ptr (TQMenuBar a) -> CWString -> IO (Ptr (TQMenu ()))
instance QaddSeparator (QMenuBar a) (()) where
addSeparator x0 ()
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_addSeparator cobj_x0
foreign import ccall "qtc_QMenuBar_addSeparator" qtc_QMenuBar_addSeparator :: Ptr (TQMenuBar a) -> IO (Ptr (TQAction ()))
instance QchangeEvent (QMenuBar ()) ((QEvent t1)) where
changeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_changeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_changeEvent_h" qtc_QMenuBar_changeEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQEvent t1) -> IO ()
instance QchangeEvent (QMenuBarSc a) ((QEvent t1)) where
changeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_changeEvent_h cobj_x0 cobj_x1
instance Qclear (QMenuBar a) (()) where
clear x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_clear cobj_x0
foreign import ccall "qtc_QMenuBar_clear" qtc_QMenuBar_clear :: Ptr (TQMenuBar a) -> IO ()
instance QcornerWidget (QMenuBar a) (()) where
cornerWidget x0 ()
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_cornerWidget cobj_x0
foreign import ccall "qtc_QMenuBar_cornerWidget" qtc_QMenuBar_cornerWidget :: Ptr (TQMenuBar a) -> IO (Ptr (TQWidget ()))
instance QcornerWidget (QMenuBar a) ((Corner)) where
cornerWidget x0 (x1)
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_cornerWidget1 cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QMenuBar_cornerWidget1" qtc_QMenuBar_cornerWidget1 :: Ptr (TQMenuBar a) -> CLong -> IO (Ptr (TQWidget ()))
instance Qevent (QMenuBar ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_event_h" qtc_QMenuBar_event_h :: Ptr (TQMenuBar a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QMenuBarSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_event_h cobj_x0 cobj_x1
instance QeventFilter (QMenuBar ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMenuBar_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QMenuBar_eventFilter" qtc_QMenuBar_eventFilter :: Ptr (TQMenuBar a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QMenuBarSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMenuBar_eventFilter cobj_x0 cobj_x1 cobj_x2
instance QfocusInEvent (QMenuBar ()) ((QFocusEvent t1)) where
focusInEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_focusInEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_focusInEvent_h" qtc_QMenuBar_focusInEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusInEvent (QMenuBarSc a) ((QFocusEvent t1)) where
focusInEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_focusInEvent_h cobj_x0 cobj_x1
instance QfocusOutEvent (QMenuBar ()) ((QFocusEvent t1)) where
focusOutEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_focusOutEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_focusOutEvent_h" qtc_QMenuBar_focusOutEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusOutEvent (QMenuBarSc a) ((QFocusEvent t1)) where
focusOutEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_focusOutEvent_h cobj_x0 cobj_x1
instance QheightForWidth (QMenuBar ()) ((Int)) where
heightForWidth x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_heightForWidth_h cobj_x0 (toCInt x1)
foreign import ccall "qtc_QMenuBar_heightForWidth_h" qtc_QMenuBar_heightForWidth_h :: Ptr (TQMenuBar a) -> CInt -> IO CInt
instance QheightForWidth (QMenuBarSc a) ((Int)) where
heightForWidth x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_heightForWidth_h cobj_x0 (toCInt x1)
instance QinitStyleOption (QMenuBar ()) ((QStyleOptionMenuItem t1, QAction t2)) where
initStyleOption x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMenuBar_initStyleOption cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QMenuBar_initStyleOption" qtc_QMenuBar_initStyleOption :: Ptr (TQMenuBar a) -> Ptr (TQStyleOptionMenuItem t1) -> Ptr (TQAction t2) -> IO ()
instance QinitStyleOption (QMenuBarSc a) ((QStyleOptionMenuItem t1, QAction t2)) where
initStyleOption x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMenuBar_initStyleOption cobj_x0 cobj_x1 cobj_x2
instance QinsertMenu (QMenuBar a) ((QAction t1, QMenu t2)) where
insertMenu x0 (x1, x2)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMenuBar_insertMenu cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QMenuBar_insertMenu" qtc_QMenuBar_insertMenu :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> Ptr (TQMenu t2) -> IO (Ptr (TQAction ()))
instance QinsertSeparator (QMenuBar a) ((QAction t1)) where
insertSeparator x0 (x1)
= withQActionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_insertSeparator cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_insertSeparator" qtc_QMenuBar_insertSeparator :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> IO (Ptr (TQAction ()))
isDefaultUp :: QMenuBar a -> (()) -> IO (Bool)
isDefaultUp x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_isDefaultUp cobj_x0
foreign import ccall "qtc_QMenuBar_isDefaultUp" qtc_QMenuBar_isDefaultUp :: Ptr (TQMenuBar a) -> IO CBool
instance QkeyPressEvent (QMenuBar ()) ((QKeyEvent t1)) where
keyPressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_keyPressEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_keyPressEvent_h" qtc_QMenuBar_keyPressEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyPressEvent (QMenuBarSc a) ((QKeyEvent t1)) where
keyPressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_keyPressEvent_h cobj_x0 cobj_x1
instance QleaveEvent (QMenuBar ()) ((QEvent t1)) where
leaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_leaveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_leaveEvent_h" qtc_QMenuBar_leaveEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQEvent t1) -> IO ()
instance QleaveEvent (QMenuBarSc a) ((QEvent t1)) where
leaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_leaveEvent_h cobj_x0 cobj_x1
instance QqminimumSizeHint (QMenuBar ()) (()) where
qminimumSizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_minimumSizeHint_h cobj_x0
foreign import ccall "qtc_QMenuBar_minimumSizeHint_h" qtc_QMenuBar_minimumSizeHint_h :: Ptr (TQMenuBar a) -> IO (Ptr (TQSize ()))
instance QqminimumSizeHint (QMenuBarSc a) (()) where
qminimumSizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_minimumSizeHint_h cobj_x0
instance QminimumSizeHint (QMenuBar ()) (()) where
minimumSizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_minimumSizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QMenuBar_minimumSizeHint_qth_h" qtc_QMenuBar_minimumSizeHint_qth_h :: Ptr (TQMenuBar a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QminimumSizeHint (QMenuBarSc a) (()) where
minimumSizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_minimumSizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
instance QmouseMoveEvent (QMenuBar ()) ((QMouseEvent t1)) where
mouseMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseMoveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_mouseMoveEvent_h" qtc_QMenuBar_mouseMoveEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseMoveEvent (QMenuBarSc a) ((QMouseEvent t1)) where
mouseMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseMoveEvent_h cobj_x0 cobj_x1
instance QmousePressEvent (QMenuBar ()) ((QMouseEvent t1)) where
mousePressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mousePressEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_mousePressEvent_h" qtc_QMenuBar_mousePressEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmousePressEvent (QMenuBarSc a) ((QMouseEvent t1)) where
mousePressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mousePressEvent_h cobj_x0 cobj_x1
instance QmouseReleaseEvent (QMenuBar ()) ((QMouseEvent t1)) where
mouseReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseReleaseEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_mouseReleaseEvent_h" qtc_QMenuBar_mouseReleaseEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseReleaseEvent (QMenuBarSc a) ((QMouseEvent t1)) where
mouseReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseReleaseEvent_h cobj_x0 cobj_x1
instance QpaintEvent (QMenuBar ()) ((QPaintEvent t1)) where
paintEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_paintEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_paintEvent_h" qtc_QMenuBar_paintEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQPaintEvent t1) -> IO ()
instance QpaintEvent (QMenuBarSc a) ((QPaintEvent t1)) where
paintEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_paintEvent_h cobj_x0 cobj_x1
instance QresizeEvent (QMenuBar ()) ((QResizeEvent t1)) where
resizeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_resizeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_resizeEvent_h" qtc_QMenuBar_resizeEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQResizeEvent t1) -> IO ()
instance QresizeEvent (QMenuBarSc a) ((QResizeEvent t1)) where
resizeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_resizeEvent_h cobj_x0 cobj_x1
instance QsetActiveAction (QMenuBar a) ((QAction t1)) where
setActiveAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_setActiveAction cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_setActiveAction" qtc_QMenuBar_setActiveAction :: Ptr (TQMenuBar a) -> Ptr (TQAction t1) -> IO ()
instance QsetCornerWidget (QMenuBar a) ((QWidget t1)) where
setCornerWidget x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_setCornerWidget cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_setCornerWidget" qtc_QMenuBar_setCornerWidget :: Ptr (TQMenuBar a) -> Ptr (TQWidget t1) -> IO ()
instance QsetCornerWidget (QMenuBar a) ((QWidget t1, Corner)) where
setCornerWidget x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_setCornerWidget1 cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QMenuBar_setCornerWidget1" qtc_QMenuBar_setCornerWidget1 :: Ptr (TQMenuBar a) -> Ptr (TQWidget t1) -> CLong -> IO ()
setDefaultUp :: QMenuBar a -> ((Bool)) -> IO ()
setDefaultUp x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setDefaultUp cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_setDefaultUp" qtc_QMenuBar_setDefaultUp :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance QsetVisible (QMenuBar ()) ((Bool)) where
setVisible x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setVisible_h cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_setVisible_h" qtc_QMenuBar_setVisible_h :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance QsetVisible (QMenuBarSc a) ((Bool)) where
setVisible x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setVisible_h cobj_x0 (toCBool x1)
instance QqsizeHint (QMenuBar ()) (()) where
qsizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sizeHint_h cobj_x0
foreign import ccall "qtc_QMenuBar_sizeHint_h" qtc_QMenuBar_sizeHint_h :: Ptr (TQMenuBar a) -> IO (Ptr (TQSize ()))
instance QqsizeHint (QMenuBarSc a) (()) where
qsizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sizeHint_h cobj_x0
instance QsizeHint (QMenuBar ()) (()) where
sizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QMenuBar_sizeHint_qth_h" qtc_QMenuBar_sizeHint_qth_h :: Ptr (TQMenuBar a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QsizeHint (QMenuBarSc a) (()) where
sizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
qMenuBar_delete :: QMenuBar a -> IO ()
qMenuBar_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_delete cobj_x0
foreign import ccall "qtc_QMenuBar_delete" qtc_QMenuBar_delete :: Ptr (TQMenuBar a) -> IO ()
qMenuBar_deleteLater :: QMenuBar a -> IO ()
qMenuBar_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_deleteLater cobj_x0
foreign import ccall "qtc_QMenuBar_deleteLater" qtc_QMenuBar_deleteLater :: Ptr (TQMenuBar a) -> IO ()
instance QcloseEvent (QMenuBar ()) ((QCloseEvent t1)) where
closeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_closeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_closeEvent_h" qtc_QMenuBar_closeEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQCloseEvent t1) -> IO ()
instance QcloseEvent (QMenuBarSc a) ((QCloseEvent t1)) where
closeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_closeEvent_h cobj_x0 cobj_x1
instance QcontextMenuEvent (QMenuBar ()) ((QContextMenuEvent t1)) where
contextMenuEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_contextMenuEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_contextMenuEvent_h" qtc_QMenuBar_contextMenuEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQContextMenuEvent t1) -> IO ()
instance QcontextMenuEvent (QMenuBarSc a) ((QContextMenuEvent t1)) where
contextMenuEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_contextMenuEvent_h cobj_x0 cobj_x1
instance Qcreate (QMenuBar ()) (()) (IO ()) where
create x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_create cobj_x0
foreign import ccall "qtc_QMenuBar_create" qtc_QMenuBar_create :: Ptr (TQMenuBar a) -> IO ()
instance Qcreate (QMenuBarSc a) (()) (IO ()) where
create x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_create cobj_x0
instance Qcreate (QMenuBar ()) ((QVoid t1)) (IO ()) where
create x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_create1" qtc_QMenuBar_create1 :: Ptr (TQMenuBar a) -> Ptr (TQVoid t1) -> IO ()
instance Qcreate (QMenuBarSc a) ((QVoid t1)) (IO ()) where
create x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create1 cobj_x0 cobj_x1
instance Qcreate (QMenuBar ()) ((QVoid t1, Bool)) (IO ()) where
create x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create2 cobj_x0 cobj_x1 (toCBool x2)
foreign import ccall "qtc_QMenuBar_create2" qtc_QMenuBar_create2 :: Ptr (TQMenuBar a) -> Ptr (TQVoid t1) -> CBool -> IO ()
instance Qcreate (QMenuBarSc a) ((QVoid t1, Bool)) (IO ()) where
create x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create2 cobj_x0 cobj_x1 (toCBool x2)
instance Qcreate (QMenuBar ()) ((QVoid t1, Bool, Bool)) (IO ()) where
create x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create3 cobj_x0 cobj_x1 (toCBool x2) (toCBool x3)
foreign import ccall "qtc_QMenuBar_create3" qtc_QMenuBar_create3 :: Ptr (TQMenuBar a) -> Ptr (TQVoid t1) -> CBool -> CBool -> IO ()
instance Qcreate (QMenuBarSc a) ((QVoid t1, Bool, Bool)) (IO ()) where
create x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_create3 cobj_x0 cobj_x1 (toCBool x2) (toCBool x3)
instance Qdestroy (QMenuBar ()) (()) where
destroy x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy cobj_x0
foreign import ccall "qtc_QMenuBar_destroy" qtc_QMenuBar_destroy :: Ptr (TQMenuBar a) -> IO ()
instance Qdestroy (QMenuBarSc a) (()) where
destroy x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy cobj_x0
instance Qdestroy (QMenuBar ()) ((Bool)) where
destroy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy1 cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_destroy1" qtc_QMenuBar_destroy1 :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance Qdestroy (QMenuBarSc a) ((Bool)) where
destroy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy1 cobj_x0 (toCBool x1)
instance Qdestroy (QMenuBar ()) ((Bool, Bool)) where
destroy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy2 cobj_x0 (toCBool x1) (toCBool x2)
foreign import ccall "qtc_QMenuBar_destroy2" qtc_QMenuBar_destroy2 :: Ptr (TQMenuBar a) -> CBool -> CBool -> IO ()
instance Qdestroy (QMenuBarSc a) ((Bool, Bool)) where
destroy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_destroy2 cobj_x0 (toCBool x1) (toCBool x2)
instance QdevType (QMenuBar ()) (()) where
devType x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_devType_h cobj_x0
foreign import ccall "qtc_QMenuBar_devType_h" qtc_QMenuBar_devType_h :: Ptr (TQMenuBar a) -> IO CInt
instance QdevType (QMenuBarSc a) (()) where
devType x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_devType_h cobj_x0
instance QdragEnterEvent (QMenuBar ()) ((QDragEnterEvent t1)) where
dragEnterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragEnterEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_dragEnterEvent_h" qtc_QMenuBar_dragEnterEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQDragEnterEvent t1) -> IO ()
instance QdragEnterEvent (QMenuBarSc a) ((QDragEnterEvent t1)) where
dragEnterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragEnterEvent_h cobj_x0 cobj_x1
instance QdragLeaveEvent (QMenuBar ()) ((QDragLeaveEvent t1)) where
dragLeaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragLeaveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_dragLeaveEvent_h" qtc_QMenuBar_dragLeaveEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQDragLeaveEvent t1) -> IO ()
instance QdragLeaveEvent (QMenuBarSc a) ((QDragLeaveEvent t1)) where
dragLeaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragLeaveEvent_h cobj_x0 cobj_x1
instance QdragMoveEvent (QMenuBar ()) ((QDragMoveEvent t1)) where
dragMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragMoveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_dragMoveEvent_h" qtc_QMenuBar_dragMoveEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQDragMoveEvent t1) -> IO ()
instance QdragMoveEvent (QMenuBarSc a) ((QDragMoveEvent t1)) where
dragMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dragMoveEvent_h cobj_x0 cobj_x1
instance QdropEvent (QMenuBar ()) ((QDropEvent t1)) where
dropEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dropEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_dropEvent_h" qtc_QMenuBar_dropEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQDropEvent t1) -> IO ()
instance QdropEvent (QMenuBarSc a) ((QDropEvent t1)) where
dropEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_dropEvent_h cobj_x0 cobj_x1
instance QenabledChange (QMenuBar ()) ((Bool)) where
enabledChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_enabledChange cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_enabledChange" qtc_QMenuBar_enabledChange :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance QenabledChange (QMenuBarSc a) ((Bool)) where
enabledChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_enabledChange cobj_x0 (toCBool x1)
instance QenterEvent (QMenuBar ()) ((QEvent t1)) where
enterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_enterEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_enterEvent_h" qtc_QMenuBar_enterEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQEvent t1) -> IO ()
instance QenterEvent (QMenuBarSc a) ((QEvent t1)) where
enterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_enterEvent_h cobj_x0 cobj_x1
instance QfocusNextChild (QMenuBar ()) (()) where
focusNextChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusNextChild cobj_x0
foreign import ccall "qtc_QMenuBar_focusNextChild" qtc_QMenuBar_focusNextChild :: Ptr (TQMenuBar a) -> IO CBool
instance QfocusNextChild (QMenuBarSc a) (()) where
focusNextChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusNextChild cobj_x0
instance QfocusNextPrevChild (QMenuBar ()) ((Bool)) where
focusNextPrevChild x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusNextPrevChild cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_focusNextPrevChild" qtc_QMenuBar_focusNextPrevChild :: Ptr (TQMenuBar a) -> CBool -> IO CBool
instance QfocusNextPrevChild (QMenuBarSc a) ((Bool)) where
focusNextPrevChild x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusNextPrevChild cobj_x0 (toCBool x1)
instance QfocusPreviousChild (QMenuBar ()) (()) where
focusPreviousChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusPreviousChild cobj_x0
foreign import ccall "qtc_QMenuBar_focusPreviousChild" qtc_QMenuBar_focusPreviousChild :: Ptr (TQMenuBar a) -> IO CBool
instance QfocusPreviousChild (QMenuBarSc a) (()) where
focusPreviousChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_focusPreviousChild cobj_x0
instance QfontChange (QMenuBar ()) ((QFont t1)) where
fontChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_fontChange cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_fontChange" qtc_QMenuBar_fontChange :: Ptr (TQMenuBar a) -> Ptr (TQFont t1) -> IO ()
instance QfontChange (QMenuBarSc a) ((QFont t1)) where
fontChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_fontChange cobj_x0 cobj_x1
instance QhideEvent (QMenuBar ()) ((QHideEvent t1)) where
hideEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_hideEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_hideEvent_h" qtc_QMenuBar_hideEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQHideEvent t1) -> IO ()
instance QhideEvent (QMenuBarSc a) ((QHideEvent t1)) where
hideEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_hideEvent_h cobj_x0 cobj_x1
instance QinputMethodEvent (QMenuBar ()) ((QInputMethodEvent t1)) where
inputMethodEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_inputMethodEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_inputMethodEvent" qtc_QMenuBar_inputMethodEvent :: Ptr (TQMenuBar a) -> Ptr (TQInputMethodEvent t1) -> IO ()
instance QinputMethodEvent (QMenuBarSc a) ((QInputMethodEvent t1)) where
inputMethodEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_inputMethodEvent cobj_x0 cobj_x1
instance QinputMethodQuery (QMenuBar ()) ((InputMethodQuery)) where
inputMethodQuery x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_inputMethodQuery_h cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QMenuBar_inputMethodQuery_h" qtc_QMenuBar_inputMethodQuery_h :: Ptr (TQMenuBar a) -> CLong -> IO (Ptr (TQVariant ()))
instance QinputMethodQuery (QMenuBarSc a) ((InputMethodQuery)) where
inputMethodQuery x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_inputMethodQuery_h cobj_x0 (toCLong $ qEnum_toInt x1)
instance QkeyReleaseEvent (QMenuBar ()) ((QKeyEvent t1)) where
keyReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_keyReleaseEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_keyReleaseEvent_h" qtc_QMenuBar_keyReleaseEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyReleaseEvent (QMenuBarSc a) ((QKeyEvent t1)) where
keyReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_keyReleaseEvent_h cobj_x0 cobj_x1
instance QlanguageChange (QMenuBar ()) (()) where
languageChange x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_languageChange cobj_x0
foreign import ccall "qtc_QMenuBar_languageChange" qtc_QMenuBar_languageChange :: Ptr (TQMenuBar a) -> IO ()
instance QlanguageChange (QMenuBarSc a) (()) where
languageChange x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_languageChange cobj_x0
instance Qmetric (QMenuBar ()) ((PaintDeviceMetric)) where
metric x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_metric cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QMenuBar_metric" qtc_QMenuBar_metric :: Ptr (TQMenuBar a) -> CLong -> IO CInt
instance Qmetric (QMenuBarSc a) ((PaintDeviceMetric)) where
metric x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_metric cobj_x0 (toCLong $ qEnum_toInt x1)
instance QmouseDoubleClickEvent (QMenuBar ()) ((QMouseEvent t1)) where
mouseDoubleClickEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseDoubleClickEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_mouseDoubleClickEvent_h" qtc_QMenuBar_mouseDoubleClickEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseDoubleClickEvent (QMenuBarSc a) ((QMouseEvent t1)) where
mouseDoubleClickEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_mouseDoubleClickEvent_h cobj_x0 cobj_x1
instance Qmove (QMenuBar ()) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_move1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QMenuBar_move1" qtc_QMenuBar_move1 :: Ptr (TQMenuBar a) -> CInt -> CInt -> IO ()
instance Qmove (QMenuBarSc a) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_move1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qmove (QMenuBar ()) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QMenuBar_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QMenuBar_move_qth" qtc_QMenuBar_move_qth :: Ptr (TQMenuBar a) -> CInt -> CInt -> IO ()
instance Qmove (QMenuBarSc a) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QMenuBar_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
instance Qqmove (QMenuBar ()) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_move cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_move" qtc_QMenuBar_move :: Ptr (TQMenuBar a) -> Ptr (TQPoint t1) -> IO ()
instance Qqmove (QMenuBarSc a) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_move cobj_x0 cobj_x1
instance QmoveEvent (QMenuBar ()) ((QMoveEvent t1)) where
moveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_moveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_moveEvent_h" qtc_QMenuBar_moveEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQMoveEvent t1) -> IO ()
instance QmoveEvent (QMenuBarSc a) ((QMoveEvent t1)) where
moveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_moveEvent_h cobj_x0 cobj_x1
instance QpaintEngine (QMenuBar ()) (()) where
paintEngine x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_paintEngine_h cobj_x0
foreign import ccall "qtc_QMenuBar_paintEngine_h" qtc_QMenuBar_paintEngine_h :: Ptr (TQMenuBar a) -> IO (Ptr (TQPaintEngine ()))
instance QpaintEngine (QMenuBarSc a) (()) where
paintEngine x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_paintEngine_h cobj_x0
instance QpaletteChange (QMenuBar ()) ((QPalette t1)) where
paletteChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_paletteChange cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_paletteChange" qtc_QMenuBar_paletteChange :: Ptr (TQMenuBar a) -> Ptr (TQPalette t1) -> IO ()
instance QpaletteChange (QMenuBarSc a) ((QPalette t1)) where
paletteChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_paletteChange cobj_x0 cobj_x1
instance Qrepaint (QMenuBar ()) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_repaint cobj_x0
foreign import ccall "qtc_QMenuBar_repaint" qtc_QMenuBar_repaint :: Ptr (TQMenuBar a) -> IO ()
instance Qrepaint (QMenuBarSc a) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_repaint cobj_x0
instance Qrepaint (QMenuBar ()) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QMenuBar_repaint2" qtc_QMenuBar_repaint2 :: Ptr (TQMenuBar a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance Qrepaint (QMenuBarSc a) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance Qrepaint (QMenuBar ()) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_repaint1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_repaint1" qtc_QMenuBar_repaint1 :: Ptr (TQMenuBar a) -> Ptr (TQRegion t1) -> IO ()
instance Qrepaint (QMenuBarSc a) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_repaint1 cobj_x0 cobj_x1
instance QresetInputContext (QMenuBar ()) (()) where
resetInputContext x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_resetInputContext cobj_x0
foreign import ccall "qtc_QMenuBar_resetInputContext" qtc_QMenuBar_resetInputContext :: Ptr (TQMenuBar a) -> IO ()
instance QresetInputContext (QMenuBarSc a) (()) where
resetInputContext x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_resetInputContext cobj_x0
instance Qresize (QMenuBar ()) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_resize1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QMenuBar_resize1" qtc_QMenuBar_resize1 :: Ptr (TQMenuBar a) -> CInt -> CInt -> IO ()
instance Qresize (QMenuBarSc a) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_resize1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qqresize (QMenuBar ()) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_resize cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_resize" qtc_QMenuBar_resize :: Ptr (TQMenuBar a) -> Ptr (TQSize t1) -> IO ()
instance Qqresize (QMenuBarSc a) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_resize cobj_x0 cobj_x1
instance Qresize (QMenuBar ()) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QMenuBar_resize_qth cobj_x0 csize_x1_w csize_x1_h
foreign import ccall "qtc_QMenuBar_resize_qth" qtc_QMenuBar_resize_qth :: Ptr (TQMenuBar a) -> CInt -> CInt -> IO ()
instance Qresize (QMenuBarSc a) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QMenuBar_resize_qth cobj_x0 csize_x1_w csize_x1_h
instance QsetGeometry (QMenuBar ()) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QMenuBar_setGeometry1" qtc_QMenuBar_setGeometry1 :: Ptr (TQMenuBar a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QMenuBarSc a) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance QqsetGeometry (QMenuBar ()) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_setGeometry cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_setGeometry" qtc_QMenuBar_setGeometry :: Ptr (TQMenuBar a) -> Ptr (TQRect t1) -> IO ()
instance QqsetGeometry (QMenuBarSc a) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_setGeometry cobj_x0 cobj_x1
instance QsetGeometry (QMenuBar ()) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMenuBar_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QMenuBar_setGeometry_qth" qtc_QMenuBar_setGeometry_qth :: Ptr (TQMenuBar a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QMenuBarSc a) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMenuBar_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
instance QsetMouseTracking (QMenuBar ()) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setMouseTracking cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_setMouseTracking" qtc_QMenuBar_setMouseTracking :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance QsetMouseTracking (QMenuBarSc a) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_setMouseTracking cobj_x0 (toCBool x1)
instance QshowEvent (QMenuBar ()) ((QShowEvent t1)) where
showEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_showEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_showEvent_h" qtc_QMenuBar_showEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQShowEvent t1) -> IO ()
instance QshowEvent (QMenuBarSc a) ((QShowEvent t1)) where
showEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_showEvent_h cobj_x0 cobj_x1
instance QtabletEvent (QMenuBar ()) ((QTabletEvent t1)) where
tabletEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_tabletEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_tabletEvent_h" qtc_QMenuBar_tabletEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQTabletEvent t1) -> IO ()
instance QtabletEvent (QMenuBarSc a) ((QTabletEvent t1)) where
tabletEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_tabletEvent_h cobj_x0 cobj_x1
instance QupdateMicroFocus (QMenuBar ()) (()) where
updateMicroFocus x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_updateMicroFocus cobj_x0
foreign import ccall "qtc_QMenuBar_updateMicroFocus" qtc_QMenuBar_updateMicroFocus :: Ptr (TQMenuBar a) -> IO ()
instance QupdateMicroFocus (QMenuBarSc a) (()) where
updateMicroFocus x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_updateMicroFocus cobj_x0
instance QwheelEvent (QMenuBar ()) ((QWheelEvent t1)) where
wheelEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_wheelEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_wheelEvent_h" qtc_QMenuBar_wheelEvent_h :: Ptr (TQMenuBar a) -> Ptr (TQWheelEvent t1) -> IO ()
instance QwheelEvent (QMenuBarSc a) ((QWheelEvent t1)) where
wheelEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_wheelEvent_h cobj_x0 cobj_x1
instance QwindowActivationChange (QMenuBar ()) ((Bool)) where
windowActivationChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_windowActivationChange cobj_x0 (toCBool x1)
foreign import ccall "qtc_QMenuBar_windowActivationChange" qtc_QMenuBar_windowActivationChange :: Ptr (TQMenuBar a) -> CBool -> IO ()
instance QwindowActivationChange (QMenuBarSc a) ((Bool)) where
windowActivationChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_windowActivationChange cobj_x0 (toCBool x1)
instance QchildEvent (QMenuBar ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_childEvent" qtc_QMenuBar_childEvent :: Ptr (TQMenuBar a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QMenuBarSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QMenuBar ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QMenuBar_connectNotify" qtc_QMenuBar_connectNotify :: Ptr (TQMenuBar a) -> CWString -> IO ()
instance QconnectNotify (QMenuBarSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QMenuBar ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_customEvent" qtc_QMenuBar_customEvent :: Ptr (TQMenuBar a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QMenuBarSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QMenuBar ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QMenuBar_disconnectNotify" qtc_QMenuBar_disconnectNotify :: Ptr (TQMenuBar a) -> CWString -> IO ()
instance QdisconnectNotify (QMenuBarSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_disconnectNotify cobj_x0 cstr_x1
instance Qreceivers (QMenuBar ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QMenuBar_receivers" qtc_QMenuBar_receivers :: Ptr (TQMenuBar a) -> CWString -> IO CInt
instance Qreceivers (QMenuBarSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMenuBar_receivers cobj_x0 cstr_x1
instance Qsender (QMenuBar ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sender cobj_x0
foreign import ccall "qtc_QMenuBar_sender" qtc_QMenuBar_sender :: Ptr (TQMenuBar a) -> IO (Ptr (TQObject ()))
instance Qsender (QMenuBarSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMenuBar_sender cobj_x0
instance QtimerEvent (QMenuBar ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMenuBar_timerEvent" qtc_QMenuBar_timerEvent :: Ptr (TQMenuBar a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QMenuBarSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMenuBar_timerEvent cobj_x0 cobj_x1
| uduki/hsQt | Qtc/Gui/QMenuBar.hs | bsd-2-clause | 50,410 | 0 | 15 | 8,488 | 17,148 | 8,687 | 8,461 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification, ParallelListComp, TemplateHaskell #-}
{-| TemplateHaskell helper for Ganeti Haskell code.
As TemplateHaskell require that splices be defined in a separate
module, we combine all the TemplateHaskell functionality that HTools
needs in this module (except the one for unittests).
-}
{-
Copyright (C) 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.THH ( declareSADT
, declareLADT
, declareILADT
, declareIADT
, makeJSONInstance
, deCamelCase
, genOpID
, genAllConstr
, genAllOpIDs
, PyValue(..)
, PyValueEx(..)
, OpCodeDescriptor
, genOpCode
, genStrOfOp
, genStrOfKey
, genLuxiOp
, Field (..)
, simpleField
, andRestArguments
, withDoc
, defaultField
, optionalField
, optionalNullSerField
, renameField
, customField
, timeStampFields
, uuidFields
, serialFields
, tagsFields
, TagSet
, buildObject
, buildObjectSerialisation
, buildParam
, DictObject(..)
, genException
, excErrMsg
) where
import Control.Monad (liftM)
import Data.Char
import Data.List
import qualified Data.Map as M
import qualified Data.Set as Set
import Language.Haskell.TH
import qualified Text.JSON as JSON
import Text.JSON.Pretty (pp_value)
import Ganeti.JSON
import Data.Maybe
import Data.Functor ((<$>))
-- * Exported types
-- | Class of objects that can be converted to 'JSObject'
-- lists-format.
class DictObject a where
toDict :: a -> [(String, JSON.JSValue)]
-- | Optional field information.
data OptionalType
= NotOptional -- ^ Field is not optional
| OptionalOmitNull -- ^ Field is optional, null is not serialised
| OptionalSerializeNull -- ^ Field is optional, null is serialised
| AndRestArguments -- ^ Special field capturing all the remaining fields
-- as plain JSON values
deriving (Show, Eq)
-- | Serialised field data type.
data Field = Field { fieldName :: String
, fieldType :: Q Type
, fieldRead :: Maybe (Q Exp)
, fieldShow :: Maybe (Q Exp)
, fieldExtraKeys :: [String]
, fieldDefault :: Maybe (Q Exp)
, fieldConstr :: Maybe String
, fieldIsOptional :: OptionalType
, fieldDoc :: String
}
-- | Generates a simple field.
simpleField :: String -> Q Type -> Field
simpleField fname ftype =
Field { fieldName = fname
, fieldType = ftype
, fieldRead = Nothing
, fieldShow = Nothing
, fieldExtraKeys = []
, fieldDefault = Nothing
, fieldConstr = Nothing
, fieldIsOptional = NotOptional
, fieldDoc = ""
}
-- | Generate an AndRestArguments catch-all field.
andRestArguments :: String -> Field
andRestArguments fname =
Field { fieldName = fname
, fieldType = [t| M.Map String JSON.JSValue |]
, fieldRead = Nothing
, fieldShow = Nothing
, fieldExtraKeys = []
, fieldDefault = Nothing
, fieldConstr = Nothing
, fieldIsOptional = AndRestArguments
, fieldDoc = ""
}
withDoc :: String -> Field -> Field
withDoc doc field =
field { fieldDoc = doc }
-- | Sets the renamed constructor field.
renameField :: String -> Field -> Field
renameField constrName field = field { fieldConstr = Just constrName }
-- | Sets the default value on a field (makes it optional with a
-- default value).
defaultField :: Q Exp -> Field -> Field
defaultField defval field = field { fieldDefault = Just defval }
-- | Marks a field optional (turning its base type into a Maybe).
optionalField :: Field -> Field
optionalField field = field { fieldIsOptional = OptionalOmitNull }
-- | Marks a field optional (turning its base type into a Maybe), but
-- with 'Nothing' serialised explicitly as /null/.
optionalNullSerField :: Field -> Field
optionalNullSerField field = field { fieldIsOptional = OptionalSerializeNull }
-- | Sets custom functions on a field.
customField :: Name -- ^ The name of the read function
-> Name -- ^ The name of the show function
-> [String] -- ^ The name of extra field keys
-> Field -- ^ The original field
-> Field -- ^ Updated field
customField readfn showfn extra field =
field { fieldRead = Just (varE readfn), fieldShow = Just (varE showfn)
, fieldExtraKeys = extra }
-- | Computes the record name for a given field, based on either the
-- string value in the JSON serialisation or the custom named if any
-- exists.
fieldRecordName :: Field -> String
fieldRecordName (Field { fieldName = name, fieldConstr = alias }) =
fromMaybe (camelCase name) alias
-- | Computes the preferred variable name to use for the value of this
-- field. If the field has a specific constructor name, then we use a
-- first-letter-lowercased version of that; otherwise, we simply use
-- the field name. See also 'fieldRecordName'.
fieldVariable :: Field -> String
fieldVariable f =
case (fieldConstr f) of
Just name -> ensureLower name
_ -> map (\c -> if c == '-' then '_' else c) $ fieldName f
-- | Compute the actual field type (taking into account possible
-- optional status).
actualFieldType :: Field -> Q Type
actualFieldType f | fieldIsOptional f `elem` [NotOptional, AndRestArguments] = t
| otherwise = [t| Maybe $t |]
where t = fieldType f
-- | Checks that a given field is not optional (for object types or
-- fields which should not allow this case).
checkNonOptDef :: (Monad m) => Field -> m ()
checkNonOptDef (Field { fieldIsOptional = OptionalOmitNull
, fieldName = name }) =
fail $ "Optional field " ++ name ++ " used in parameter declaration"
checkNonOptDef (Field { fieldIsOptional = OptionalSerializeNull
, fieldName = name }) =
fail $ "Optional field " ++ name ++ " used in parameter declaration"
checkNonOptDef (Field { fieldDefault = (Just _), fieldName = name }) =
fail $ "Default field " ++ name ++ " used in parameter declaration"
checkNonOptDef _ = return ()
-- | Produces the expression that will de-serialise a given
-- field. Since some custom parsing functions might need to use the
-- entire object, we do take and pass the object to any custom read
-- functions.
loadFn :: Field -- ^ The field definition
-> Q Exp -- ^ The value of the field as existing in the JSON message
-> Q Exp -- ^ The entire object in JSON object format
-> Q Exp -- ^ Resulting expression
loadFn (Field { fieldRead = Just readfn }) expr o = [| $expr >>= $readfn $o |]
loadFn _ expr _ = expr
-- * Common field declarations
-- | Timestamp fields description.
timeStampFields :: [Field]
timeStampFields =
[ defaultField [| 0::Double |] $ simpleField "ctime" [t| Double |]
, defaultField [| 0::Double |] $ simpleField "mtime" [t| Double |]
]
-- | Serial number fields description.
serialFields :: [Field]
serialFields =
[ renameField "Serial" $ simpleField "serial_no" [t| Int |] ]
-- | UUID fields description.
uuidFields :: [Field]
uuidFields = [ simpleField "uuid" [t| String |] ]
-- | Tag set type alias.
type TagSet = Set.Set String
-- | Tag field description.
tagsFields :: [Field]
tagsFields = [ defaultField [| Set.empty |] $
simpleField "tags" [t| TagSet |] ]
-- * Internal types
-- | A simple field, in constrast to the customisable 'Field' type.
type SimpleField = (String, Q Type)
-- | A definition for a single constructor for a simple object.
type SimpleConstructor = (String, [SimpleField])
-- | A definition for ADTs with simple fields.
type SimpleObject = [SimpleConstructor]
-- | A type alias for an opcode constructor of a regular object.
type OpCodeConstructor = (String, Q Type, String, [Field], String)
-- | A type alias for a Luxi constructor of a regular object.
type LuxiConstructor = (String, [Field])
-- * Helper functions
-- | Ensure first letter is lowercase.
--
-- Used to convert type name to function prefix, e.g. in @data Aa ->
-- aaToRaw@.
ensureLower :: String -> String
ensureLower [] = []
ensureLower (x:xs) = toLower x:xs
-- | Ensure first letter is uppercase.
--
-- Used to convert constructor name to component
ensureUpper :: String -> String
ensureUpper [] = []
ensureUpper (x:xs) = toUpper x:xs
-- | Helper for quoted expressions.
varNameE :: String -> Q Exp
varNameE = varE . mkName
-- | showJSON as an expression, for reuse.
showJSONE :: Q Exp
showJSONE = varE 'JSON.showJSON
-- | makeObj as an expression, for reuse.
makeObjE :: Q Exp
makeObjE = varE 'JSON.makeObj
-- | fromObj (Ganeti specific) as an expression, for reuse.
fromObjE :: Q Exp
fromObjE = varE 'fromObj
-- | ToRaw function name.
toRawName :: String -> Name
toRawName = mkName . (++ "ToRaw") . ensureLower
-- | FromRaw function name.
fromRawName :: String -> Name
fromRawName = mkName . (++ "FromRaw") . ensureLower
-- | Converts a name to it's varE\/litE representations.
reprE :: Either String Name -> Q Exp
reprE = either stringE varE
-- | Smarter function application.
--
-- This does simply f x, except that if is 'id', it will skip it, in
-- order to generate more readable code when using -ddump-splices.
appFn :: Exp -> Exp -> Exp
appFn f x | f == VarE 'id = x
| otherwise = AppE f x
-- | Builds a field for a normal constructor.
buildConsField :: Q Type -> StrictTypeQ
buildConsField ftype = do
ftype' <- ftype
return (NotStrict, ftype')
-- | Builds a constructor based on a simple definition (not field-based).
buildSimpleCons :: Name -> SimpleObject -> Q Dec
buildSimpleCons tname cons = do
decl_d <- mapM (\(cname, fields) -> do
fields' <- mapM (buildConsField . snd) fields
return $ NormalC (mkName cname) fields') cons
return $ DataD [] tname [] decl_d [''Show, ''Eq]
-- | Generate the save function for a given type.
genSaveSimpleObj :: Name -- ^ Object type
-> String -- ^ Function name
-> SimpleObject -- ^ Object definition
-> (SimpleConstructor -> Q Clause) -- ^ Constructor save fn
-> Q (Dec, Dec)
genSaveSimpleObj tname sname opdefs fn = do
let sigt = AppT (AppT ArrowT (ConT tname)) (ConT ''JSON.JSValue)
fname = mkName sname
cclauses <- mapM fn opdefs
return $ (SigD fname sigt, FunD fname cclauses)
-- * Template code for simple raw type-equivalent ADTs
-- | Generates a data type declaration.
--
-- The type will have a fixed list of instances.
strADTDecl :: Name -> [String] -> Dec
strADTDecl name constructors =
DataD [] name []
(map (flip NormalC [] . mkName) constructors)
[''Show, ''Eq, ''Enum, ''Bounded, ''Ord]
-- | Generates a toRaw function.
--
-- This generates a simple function of the form:
--
-- @
-- nameToRaw :: Name -> /traw/
-- nameToRaw Cons1 = var1
-- nameToRaw Cons2 = \"value2\"
-- @
genToRaw :: Name -> Name -> Name -> [(String, Either String Name)] -> Q [Dec]
genToRaw traw fname tname constructors = do
let sigt = AppT (AppT ArrowT (ConT tname)) (ConT traw)
-- the body clauses, matching on the constructor and returning the
-- raw value
clauses <- mapM (\(c, v) -> clause [recP (mkName c) []]
(normalB (reprE v)) []) constructors
return [SigD fname sigt, FunD fname clauses]
-- | Generates a fromRaw function.
--
-- The function generated is monadic and can fail parsing the
-- raw value. It is of the form:
--
-- @
-- nameFromRaw :: (Monad m) => /traw/ -> m Name
-- nameFromRaw s | s == var1 = Cons1
-- | s == \"value2\" = Cons2
-- | otherwise = fail /.../
-- @
genFromRaw :: Name -> Name -> Name -> [(String, Either String Name)] -> Q [Dec]
genFromRaw traw fname tname constructors = do
-- signature of form (Monad m) => String -> m $name
sigt <- [t| (Monad m) => $(conT traw) -> m $(conT tname) |]
-- clauses for a guarded pattern
let varp = mkName "s"
varpe = varE varp
clauses <- mapM (\(c, v) -> do
-- the clause match condition
g <- normalG [| $varpe == $(reprE v) |]
-- the clause result
r <- [| return $(conE (mkName c)) |]
return (g, r)) constructors
-- the otherwise clause (fallback)
oth_clause <- do
g <- normalG [| otherwise |]
r <- [|fail ("Invalid string value for type " ++
$(litE (stringL (nameBase tname))) ++ ": " ++ show $varpe) |]
return (g, r)
let fun = FunD fname [Clause [VarP varp]
(GuardedB (clauses++[oth_clause])) []]
return [SigD fname sigt, fun]
-- | Generates a data type from a given raw format.
--
-- The format is expected to multiline. The first line contains the
-- type name, and the rest of the lines must contain two words: the
-- constructor name and then the string representation of the
-- respective constructor.
--
-- The function will generate the data type declaration, and then two
-- functions:
--
-- * /name/ToRaw, which converts the type to a raw type
--
-- * /name/FromRaw, which (monadically) converts from a raw type to the type
--
-- Note that this is basically just a custom show\/read instance,
-- nothing else.
declareADT
:: (a -> Either String Name) -> Name -> String -> [(String, a)] -> Q [Dec]
declareADT fn traw sname cons = do
let name = mkName sname
ddecl = strADTDecl name (map fst cons)
-- process cons in the format expected by genToRaw
cons' = map (\(a, b) -> (a, fn b)) cons
toraw <- genToRaw traw (toRawName sname) name cons'
fromraw <- genFromRaw traw (fromRawName sname) name cons'
return $ ddecl:toraw ++ fromraw
declareLADT :: Name -> String -> [(String, String)] -> Q [Dec]
declareLADT = declareADT Left
declareILADT :: String -> [(String, Int)] -> Q [Dec]
declareILADT sname cons = do
consNames <- sequence [ newName ('_':n) | (n, _) <- cons ]
consFns <- concat <$> sequence
[ do sig <- sigD n [t| Int |]
let expr = litE (IntegerL (toInteger i))
fn <- funD n [clause [] (normalB expr) []]
return [sig, fn]
| n <- consNames
| (_, i) <- cons ]
let cons' = [ (n, n') | (n, _) <- cons | n' <- consNames ]
(consFns ++) <$> declareADT Right ''Int sname cons'
declareIADT :: String -> [(String, Name)] -> Q [Dec]
declareIADT = declareADT Right ''Int
declareSADT :: String -> [(String, Name)] -> Q [Dec]
declareSADT = declareADT Right ''String
-- | Creates the showJSON member of a JSON instance declaration.
--
-- This will create what is the equivalent of:
--
-- @
-- showJSON = showJSON . /name/ToRaw
-- @
--
-- in an instance JSON /name/ declaration
genShowJSON :: String -> Q Dec
genShowJSON name = do
body <- [| JSON.showJSON . $(varE (toRawName name)) |]
return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []]
-- | Creates the readJSON member of a JSON instance declaration.
--
-- This will create what is the equivalent of:
--
-- @
-- readJSON s = case readJSON s of
-- Ok s' -> /name/FromRaw s'
-- Error e -> Error /description/
-- @
--
-- in an instance JSON /name/ declaration
genReadJSON :: String -> Q Dec
genReadJSON name = do
let s = mkName "s"
body <- [| case JSON.readJSON $(varE s) of
JSON.Ok s' -> $(varE (fromRawName name)) s'
JSON.Error e ->
JSON.Error $ "Can't parse raw value for type " ++
$(stringE name) ++ ": " ++ e ++ " from " ++
show $(varE s)
|]
return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []]
-- | Generates a JSON instance for a given type.
--
-- This assumes that the /name/ToRaw and /name/FromRaw functions
-- have been defined as by the 'declareSADT' function.
makeJSONInstance :: Name -> Q [Dec]
makeJSONInstance name = do
let base = nameBase name
showJ <- genShowJSON base
readJ <- genReadJSON base
return [InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name)) [readJ,showJ]]
-- * Template code for opcodes
-- | Transforms a CamelCase string into an_underscore_based_one.
deCamelCase :: String -> String
deCamelCase =
intercalate "_" . map (map toUpper) . groupBy (\_ b -> not $ isUpper b)
-- | Transform an underscore_name into a CamelCase one.
camelCase :: String -> String
camelCase = concatMap (ensureUpper . drop 1) .
groupBy (\_ b -> b /= '_' && b /= '-') . ('_':)
-- | Computes the name of a given constructor.
constructorName :: Con -> Q Name
constructorName (NormalC name _) = return name
constructorName (RecC name _) = return name
constructorName x = fail $ "Unhandled constructor " ++ show x
-- | Extract all constructor names from a given type.
reifyConsNames :: Name -> Q [String]
reifyConsNames name = do
reify_result <- reify name
case reify_result of
TyConI (DataD _ _ _ cons _) -> mapM (liftM nameBase . constructorName) cons
o -> fail $ "Unhandled name passed to reifyConsNames, expected\
\ type constructor but got '" ++ show o ++ "'"
-- | Builds the generic constructor-to-string function.
--
-- This generates a simple function of the following form:
--
-- @
-- fname (ConStructorOne {}) = trans_fun("ConStructorOne")
-- fname (ConStructorTwo {}) = trans_fun("ConStructorTwo")
-- @
--
-- This builds a custom list of name\/string pairs and then uses
-- 'genToRaw' to actually generate the function.
genConstrToStr :: (String -> String) -> Name -> String -> Q [Dec]
genConstrToStr trans_fun name fname = do
cnames <- reifyConsNames name
let svalues = map (Left . trans_fun) cnames
genToRaw ''String (mkName fname) name $ zip cnames svalues
-- | Constructor-to-string for OpCode.
genOpID :: Name -> String -> Q [Dec]
genOpID = genConstrToStr deCamelCase
-- | Builds a list with all defined constructor names for a type.
--
-- @
-- vstr :: String
-- vstr = [...]
-- @
--
-- Where the actual values of the string are the constructor names
-- mapped via @trans_fun@.
genAllConstr :: (String -> String) -> Name -> String -> Q [Dec]
genAllConstr trans_fun name vstr = do
cnames <- reifyConsNames name
let svalues = sort $ map trans_fun cnames
vname = mkName vstr
sig = SigD vname (AppT ListT (ConT ''String))
body = NormalB (ListE (map (LitE . StringL) svalues))
return $ [sig, ValD (VarP vname) body []]
-- | Generates a list of all defined opcode IDs.
genAllOpIDs :: Name -> String -> Q [Dec]
genAllOpIDs = genAllConstr deCamelCase
-- | OpCode parameter (field) type.
type OpParam = (String, Q Type, Q Exp)
-- * Python code generation
-- | Converts Haskell values into Python values
--
-- This is necessary for the default values of opcode parameters and
-- return values. For example, if a default value or return type is a
-- Data.Map, then it must be shown as a Python dictioanry.
class PyValue a where
showValue :: a -> String
-- | Encapsulates Python default values
data PyValueEx = forall a. PyValue a => PyValueEx a
instance PyValue PyValueEx where
showValue (PyValueEx x) = showValue x
-- | Transfers opcode data between the opcode description (through
-- @genOpCode@) and the Python code generation functions.
type OpCodeDescriptor =
(String, String, String, [String],
[String], [Maybe PyValueEx], [String], String)
-- | Strips out the module name
--
-- @
-- pyBaseName "Data.Map" = "Map"
-- @
pyBaseName :: String -> String
pyBaseName str =
case span (/= '.') str of
(x, []) -> x
(_, _:x) -> pyBaseName x
-- | Converts a Haskell type name into a Python type name.
--
-- @
-- pyTypename "Bool" = "ht.TBool"
-- @
pyTypeName :: Show a => a -> String
pyTypeName name =
"ht.T" ++ (case pyBaseName (show name) of
"()" -> "None"
"Map" -> "DictOf"
"Set" -> "SetOf"
"ListSet" -> "SetOf"
"Either" -> "Or"
"GenericContainer" -> "DictOf"
"JSValue" -> "Any"
"JSObject" -> "Object"
str -> str)
-- | Converts a Haskell type into a Python type.
--
-- @
-- pyType [Int] = "ht.TListOf(ht.TInt)"
-- @
pyType :: Type -> Q String
pyType (AppT typ1 typ2) =
do t <- pyCall typ1 typ2
return $ t ++ ")"
pyType (ConT name) = return (pyTypeName name)
pyType ListT = return "ht.TListOf"
pyType (TupleT 0) = return "ht.TNone"
pyType (TupleT _) = return "ht.TTupleOf"
pyType typ = error $ "unhandled case for type " ++ show typ
-- | Converts a Haskell type application into a Python type.
--
-- @
-- Maybe Int = "ht.TMaybe(ht.TInt)"
-- @
pyCall :: Type -> Type -> Q String
pyCall (AppT typ1 typ2) arg =
do t <- pyCall typ1 typ2
targ <- pyType arg
return $ t ++ ", " ++ targ
pyCall typ1 typ2 =
do t1 <- pyType typ1
t2 <- pyType typ2
return $ t1 ++ "(" ++ t2
-- | @pyType opt typ@ converts Haskell type @typ@ into a Python type,
-- where @opt@ determines if the converted type is optional (i.e.,
-- Maybe).
--
-- @
-- pyType False [Int] = "ht.TListOf(ht.TInt)" (mandatory)
-- pyType True [Int] = "ht.TMaybe(ht.TListOf(ht.TInt))" (optional)
-- @
pyOptionalType :: Bool -> Type -> Q String
pyOptionalType opt typ
| opt = do t <- pyType typ
return $ "ht.TMaybe(" ++ t ++ ")"
| otherwise = pyType typ
-- | Optionally encapsulates default values in @PyValueEx@.
--
-- @maybeApp exp typ@ returns a quoted expression that encapsulates
-- the default value @exp@ of an opcode parameter cast to @typ@ in a
-- @PyValueEx@, if @exp@ is @Just@. Otherwise, it returns a quoted
-- expression with @Nothing@.
maybeApp :: Maybe (Q Exp) -> Q Type -> Q Exp
maybeApp Nothing _ =
[| Nothing |]
maybeApp (Just expr) typ =
[| Just ($(conE (mkName "PyValueEx")) ($expr :: $typ)) |]
-- | Generates a Python type according to whether the field is
-- optional
genPyType :: OptionalType -> Q Type -> Q ExpQ
genPyType opt typ =
do t <- typ
stringE <$> pyOptionalType (opt /= NotOptional) t
-- | Generates Python types from opcode parameters.
genPyTypes :: [Field] -> Q ExpQ
genPyTypes fs =
listE <$> mapM (\f -> genPyType (fieldIsOptional f) (fieldType f)) fs
-- | Generates Python default values from opcode parameters.
genPyDefaults :: [Field] -> ExpQ
genPyDefaults fs =
listE $ map (\f -> maybeApp (fieldDefault f) (fieldType f)) fs
-- | Generates a Haskell function call to "showPyClass" with the
-- necessary information on how to build the Python class string.
pyClass :: OpCodeConstructor -> ExpQ
pyClass (consName, consType, consDoc, consFields, consDscField) =
do let pyClassVar = varNameE "showPyClass"
consName' = stringE consName
consType' <- genPyType NotOptional consType
let consDoc' = stringE consDoc
consFieldNames = listE $ map (stringE . fieldName) consFields
consFieldDocs = listE $ map (stringE . fieldDoc) consFields
consFieldTypes <- genPyTypes consFields
let consFieldDefaults = genPyDefaults consFields
[| ($consName',
$consType',
$consDoc',
$consFieldNames,
$consFieldTypes,
$consFieldDefaults,
$consFieldDocs,
consDscField) |]
-- | Generates a function called "pyClasses" that holds the list of
-- all the opcode descriptors necessary for generating the Python
-- opcodes.
pyClasses :: [OpCodeConstructor] -> Q [Dec]
pyClasses cons =
do let name = mkName "pyClasses"
sig = SigD name (AppT ListT (ConT ''OpCodeDescriptor))
fn <- FunD name <$> (:[]) <$> declClause cons
return [sig, fn]
where declClause c =
clause [] (normalB (ListE <$> mapM pyClass c)) []
-- | Converts from an opcode constructor to a Luxi constructor.
opcodeConsToLuxiCons :: (a, b, c, d, e) -> (a, d)
opcodeConsToLuxiCons (x, _, _, y, _) = (x, y)
-- | Generates the OpCode data type.
--
-- This takes an opcode logical definition, and builds both the
-- datatype and the JSON serialisation out of it. We can't use a
-- generic serialisation since we need to be compatible with Ganeti's
-- own, so we have a few quirks to work around.
genOpCode :: String -- ^ Type name to use
-> [OpCodeConstructor] -- ^ Constructor name and parameters
-> Q [Dec]
genOpCode name cons = do
let tname = mkName name
decl_d <- mapM (\(cname, _, _, fields, _) -> do
-- we only need the type of the field, without Q
fields' <- mapM (fieldTypeInfo "op") fields
return $ RecC (mkName cname) fields')
cons
let declD = DataD [] tname [] decl_d [''Show, ''Eq]
let (allfsig, allffn) = genAllOpFields "allOpFields" cons
save_decs <- genSaveOpCode tname "saveOpCode" "toDictOpCode"
(map opcodeConsToLuxiCons cons) saveConstructor True
(loadsig, loadfn) <- genLoadOpCode cons
pyDecls <- pyClasses cons
return $ [declD, allfsig, allffn, loadsig, loadfn] ++ save_decs ++ pyDecls
-- | Generates the function pattern returning the list of fields for a
-- given constructor.
genOpConsFields :: OpCodeConstructor -> Clause
genOpConsFields (cname, _, _, fields, _) =
let op_id = deCamelCase cname
fvals = map (LitE . StringL) . sort . nub $
concatMap (\f -> fieldName f:fieldExtraKeys f) fields
in Clause [LitP (StringL op_id)] (NormalB $ ListE fvals) []
-- | Generates a list of all fields of an opcode constructor.
genAllOpFields :: String -- ^ Function name
-> [OpCodeConstructor] -- ^ Object definition
-> (Dec, Dec)
genAllOpFields sname opdefs =
let cclauses = map genOpConsFields opdefs
other = Clause [WildP] (NormalB (ListE [])) []
fname = mkName sname
sigt = AppT (AppT ArrowT (ConT ''String)) (AppT ListT (ConT ''String))
in (SigD fname sigt, FunD fname (cclauses++[other]))
-- | Generates the \"save\" clause for an entire opcode constructor.
--
-- This matches the opcode with variables named the same as the
-- constructor fields (just so that the spliced in code looks nicer),
-- and passes those name plus the parameter definition to 'saveObjectField'.
saveConstructor :: LuxiConstructor -- ^ The constructor
-> Q Clause -- ^ Resulting clause
saveConstructor (sname, fields) = do
let cname = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP cname (map varP fnames)
let felems = map (uncurry saveObjectField) (zip fnames fields)
-- now build the OP_ID serialisation
opid = [| [( $(stringE "OP_ID"),
JSON.showJSON $(stringE . deCamelCase $ sname) )] |]
flist = listE (opid:felems)
-- and finally convert all this to a json object
flist' = [| concat $flist |]
clause [pat] (normalB flist') []
-- | Generates the main save opcode function.
--
-- This builds a per-constructor match clause that contains the
-- respective constructor-serialisation code.
genSaveOpCode :: Name -- ^ Object ype
-> String -- ^ To 'JSValue' function name
-> String -- ^ To 'JSObject' function name
-> [LuxiConstructor] -- ^ Object definition
-> (LuxiConstructor -> Q Clause) -- ^ Constructor save fn
-> Bool -- ^ Whether to generate
-- obj or just a
-- list\/tuple of values
-> Q [Dec]
genSaveOpCode tname jvalstr tdstr opdefs fn gen_object = do
tdclauses <- mapM fn opdefs
let typecon = ConT tname
jvalname = mkName jvalstr
jvalsig = AppT (AppT ArrowT typecon) (ConT ''JSON.JSValue)
tdname = mkName tdstr
tdsig <- [t| $(return typecon) -> [(String, JSON.JSValue)] |]
jvalclause <- if gen_object
then [| $makeObjE . $(varE tdname) |]
else [| JSON.showJSON . map snd . $(varE tdname) |]
return [ SigD tdname tdsig
, FunD tdname tdclauses
, SigD jvalname jvalsig
, ValD (VarP jvalname) (NormalB jvalclause) []]
-- | Generates load code for a single constructor of the opcode data type.
loadConstructor :: OpCodeConstructor -> Q Exp
loadConstructor (sname, _, _, fields, _) = do
let name = mkName sname
fbinds <- mapM (loadObjectField fields) fields
let (fnames, fstmts) = unzip fbinds
let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames
fstmts' = fstmts ++ [NoBindS (AppE (VarE 'return) cval)]
return $ DoE fstmts'
-- | Generates the loadOpCode function.
genLoadOpCode :: [OpCodeConstructor] -> Q (Dec, Dec)
genLoadOpCode opdefs = do
let fname = mkName "loadOpCode"
arg1 = mkName "v"
objname = mkName "o"
opid = mkName "op_id"
st1 <- bindS (varP objname) [| liftM JSON.fromJSObject
(JSON.readJSON $(varE arg1)) |]
st2 <- bindS (varP opid) [| $fromObjE $(varE objname) $(stringE "OP_ID") |]
-- the match results (per-constructor blocks)
mexps <- mapM loadConstructor opdefs
fails <- [| fail $ "Unknown opcode " ++ $(varE opid) |]
let mpats = map (\(me, (consName, _, _, _, _)) ->
let mp = LitP . StringL . deCamelCase $ consName
in Match mp (NormalB me) []
) $ zip mexps opdefs
defmatch = Match WildP (NormalB fails) []
cst = NoBindS $ CaseE (VarE opid) $ mpats++[defmatch]
body = DoE [st1, st2, cst]
sigt <- [t| JSON.JSValue -> JSON.Result $(conT (mkName "OpCode")) |]
return $ (SigD fname sigt, FunD fname [Clause [VarP arg1] (NormalB body) []])
-- * Template code for luxi
-- | Constructor-to-string for LuxiOp.
genStrOfOp :: Name -> String -> Q [Dec]
genStrOfOp = genConstrToStr id
-- | Constructor-to-string for MsgKeys.
genStrOfKey :: Name -> String -> Q [Dec]
genStrOfKey = genConstrToStr ensureLower
-- | Generates the LuxiOp data type.
--
-- This takes a Luxi operation definition and builds both the
-- datatype and the function transforming the arguments to JSON.
-- We can't use anything less generic, because the way different
-- operations are serialized differs on both parameter- and top-level.
--
-- There are two things to be defined for each parameter:
--
-- * name
--
-- * type
--
genLuxiOp :: String -> [LuxiConstructor] -> Q [Dec]
genLuxiOp name cons = do
let tname = mkName name
decl_d <- mapM (\(cname, fields) -> do
-- we only need the type of the field, without Q
fields' <- mapM actualFieldType fields
let fields'' = zip (repeat NotStrict) fields'
return $ NormalC (mkName cname) fields'')
cons
let declD = DataD [] (mkName name) [] decl_d [''Show, ''Eq]
save_decs <- genSaveOpCode tname "opToArgs" "opToDict"
cons saveLuxiConstructor False
req_defs <- declareSADT "LuxiReq" .
map (\(str, _) -> ("Req" ++ str, mkName ("luxiReq" ++ str))) $
cons
return $ declD:save_decs ++ req_defs
-- | Generates the \"save\" clause for entire LuxiOp constructor.
saveLuxiConstructor :: LuxiConstructor -> Q Clause
saveLuxiConstructor (sname, fields) = do
let cname = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP cname (map varP fnames)
let felems = map (uncurry saveObjectField) (zip fnames fields)
flist = [| concat $(listE felems) |]
clause [pat] (normalB flist) []
-- * "Objects" functionality
-- | Extract the field's declaration from a Field structure.
fieldTypeInfo :: String -> Field -> Q (Name, Strict, Type)
fieldTypeInfo field_pfx fd = do
t <- actualFieldType fd
let n = mkName . (field_pfx ++) . fieldRecordName $ fd
return (n, NotStrict, t)
-- | Build an object declaration.
buildObject :: String -> String -> [Field] -> Q [Dec]
buildObject sname field_pfx fields = do
let name = mkName sname
fields_d <- mapM (fieldTypeInfo field_pfx) fields
let decl_d = RecC name fields_d
let declD = DataD [] name [] [decl_d] [''Show, ''Eq]
ser_decls <- buildObjectSerialisation sname fields
return $ declD:ser_decls
-- | Generates an object definition: data type and its JSON instance.
buildObjectSerialisation :: String -> [Field] -> Q [Dec]
buildObjectSerialisation sname fields = do
let name = mkName sname
savedecls <- genSaveObject saveObjectField sname fields
(loadsig, loadfn) <- genLoadObject (loadObjectField fields) sname fields
shjson <- objectShowJSON sname
rdjson <- objectReadJSON sname
let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name))
[rdjson, shjson]
return $ savedecls ++ [loadsig, loadfn, instdecl]
-- | The toDict function name for a given type.
toDictName :: String -> Name
toDictName sname = mkName ("toDict" ++ sname)
-- | Generates the save object functionality.
genSaveObject :: (Name -> Field -> Q Exp)
-> String -> [Field] -> Q [Dec]
genSaveObject save_fn sname fields = do
let name = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP name (map varP fnames)
let tdname = toDictName sname
tdsigt <- [t| $(conT name) -> [(String, JSON.JSValue)] |]
let felems = map (uncurry save_fn) (zip fnames fields)
flist = listE felems
-- and finally convert all this to a json object
tdlist = [| concat $flist |]
iname = mkName "i"
tclause <- clause [pat] (normalB tdlist) []
cclause <- [| $makeObjE . $(varE tdname) |]
let fname = mkName ("save" ++ sname)
sigt <- [t| $(conT name) -> JSON.JSValue |]
return [SigD tdname tdsigt, FunD tdname [tclause],
SigD fname sigt, ValD (VarP fname) (NormalB cclause) []]
-- | Generates the code for saving an object's field, handling the
-- various types of fields that we have.
saveObjectField :: Name -> Field -> Q Exp
saveObjectField fvar field =
case fieldIsOptional field of
OptionalOmitNull -> [| case $(varE fvar) of
Nothing -> []
Just v -> [( $nameE, JSON.showJSON v )]
|]
OptionalSerializeNull -> [| case $(varE fvar) of
Nothing -> [( $nameE, JSON.JSNull )]
Just v -> [( $nameE, JSON.showJSON v )]
|]
NotOptional ->
case fieldShow field of
-- Note: the order of actual:extra is important, since for
-- some serialisation types (e.g. Luxi), we use tuples
-- (positional info) rather than object (name info)
Nothing -> [| [( $nameE, JSON.showJSON $fvarE)] |]
Just fn -> [| let (actual, extra) = $fn $fvarE
in ($nameE, JSON.showJSON actual):extra
|]
AndRestArguments -> [| M.toList $(varE fvar) |]
where nameE = stringE (fieldName field)
fvarE = varE fvar
-- | Generates the showJSON clause for a given object name.
objectShowJSON :: String -> Q Dec
objectShowJSON name = do
body <- [| JSON.showJSON . $(varE . mkName $ "save" ++ name) |]
return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []]
-- | Generates the load object functionality.
genLoadObject :: (Field -> Q (Name, Stmt))
-> String -> [Field] -> Q (Dec, Dec)
genLoadObject load_fn sname fields = do
let name = mkName sname
funname = mkName $ "load" ++ sname
arg1 = mkName $ if null fields then "_" else "v"
objname = mkName "o"
opid = mkName "op_id"
st1 <- bindS (varP objname) [| liftM JSON.fromJSObject
(JSON.readJSON $(varE arg1)) |]
fbinds <- mapM load_fn fields
let (fnames, fstmts) = unzip fbinds
let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames
retstmt = [NoBindS (AppE (VarE 'return) cval)]
-- FIXME: should we require an empty dict for an empty type?
-- this allows any JSValue right now
fstmts' = if null fields
then retstmt
else st1:fstmts ++ retstmt
sigt <- [t| JSON.JSValue -> JSON.Result $(conT name) |]
return $ (SigD funname sigt,
FunD funname [Clause [VarP arg1] (NormalB (DoE fstmts')) []])
-- | Generates code for loading an object's field.
loadObjectField :: [Field] -> Field -> Q (Name, Stmt)
loadObjectField allFields field = do
let name = fieldVariable field
names = map fieldVariable allFields
otherNames = listE . map stringE $ names \\ [name]
fvar <- newName name
-- these are used in all patterns below
let objvar = varNameE "o"
objfield = stringE (fieldName field)
loadexp =
case fieldIsOptional field of
NotOptional ->
case fieldDefault field of
Just defv ->
[| $(varE 'fromObjWithDefault) $objvar
$objfield $defv |]
Nothing -> [| $fromObjE $objvar $objfield |]
AndRestArguments -> [| return . M.fromList
$ filter (not . (`elem` $otherNames) . fst)
$objvar |]
_ -> [| $(varE 'maybeFromObj) $objvar $objfield |]
-- we treat both optional types the same, since
-- 'maybeFromObj' can deal with both missing and null values
-- appropriately (the same)
bexp <- loadFn field loadexp objvar
return (fvar, BindS (VarP fvar) bexp)
-- | Builds the readJSON instance for a given object name.
objectReadJSON :: String -> Q Dec
objectReadJSON name = do
let s = mkName "s"
body <- [| case JSON.readJSON $(varE s) of
JSON.Ok s' -> $(varE .mkName $ "load" ++ name) s'
JSON.Error e ->
JSON.Error $ "Can't parse value for type " ++
$(stringE name) ++ ": " ++ e
|]
return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []]
-- * Inheritable parameter tables implementation
-- | Compute parameter type names.
paramTypeNames :: String -> (String, String)
paramTypeNames root = ("Filled" ++ root ++ "Params",
"Partial" ++ root ++ "Params")
-- | Compute information about the type of a parameter field.
paramFieldTypeInfo :: String -> Field -> Q (Name, Strict, Type)
paramFieldTypeInfo field_pfx fd = do
t <- actualFieldType fd
let n = mkName . (++ "P") . (field_pfx ++) .
fieldRecordName $ fd
return (n, NotStrict, AppT (ConT ''Maybe) t)
-- | Build a parameter declaration.
--
-- This function builds two different data structures: a /filled/ one,
-- in which all fields are required, and a /partial/ one, in which all
-- fields are optional. Due to the current record syntax issues, the
-- fields need to be named differrently for the two structures, so the
-- partial ones get a /P/ suffix.
buildParam :: String -> String -> [Field] -> Q [Dec]
buildParam sname field_pfx fields = do
let (sname_f, sname_p) = paramTypeNames sname
name_f = mkName sname_f
name_p = mkName sname_p
fields_f <- mapM (fieldTypeInfo field_pfx) fields
fields_p <- mapM (paramFieldTypeInfo field_pfx) fields
let decl_f = RecC name_f fields_f
decl_p = RecC name_p fields_p
let declF = DataD [] name_f [] [decl_f] [''Show, ''Eq]
declP = DataD [] name_p [] [decl_p] [''Show, ''Eq]
ser_decls_f <- buildObjectSerialisation sname_f fields
ser_decls_p <- buildPParamSerialisation sname_p fields
fill_decls <- fillParam sname field_pfx fields
return $ [declF, declP] ++ ser_decls_f ++ ser_decls_p ++ fill_decls ++
buildParamAllFields sname fields ++
buildDictObjectInst name_f sname_f
-- | Builds a list of all fields of a parameter.
buildParamAllFields :: String -> [Field] -> [Dec]
buildParamAllFields sname fields =
let vname = mkName ("all" ++ sname ++ "ParamFields")
sig = SigD vname (AppT ListT (ConT ''String))
val = ListE $ map (LitE . StringL . fieldName) fields
in [sig, ValD (VarP vname) (NormalB val) []]
-- | Builds the 'DictObject' instance for a filled parameter.
buildDictObjectInst :: Name -> String -> [Dec]
buildDictObjectInst name sname =
[InstanceD [] (AppT (ConT ''DictObject) (ConT name))
[ValD (VarP 'toDict) (NormalB (VarE (toDictName sname))) []]]
-- | Generates the serialisation for a partial parameter.
buildPParamSerialisation :: String -> [Field] -> Q [Dec]
buildPParamSerialisation sname fields = do
let name = mkName sname
savedecls <- genSaveObject savePParamField sname fields
(loadsig, loadfn) <- genLoadObject loadPParamField sname fields
shjson <- objectShowJSON sname
rdjson <- objectReadJSON sname
let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name))
[rdjson, shjson]
return $ savedecls ++ [loadsig, loadfn, instdecl]
-- | Generates code to save an optional parameter field.
savePParamField :: Name -> Field -> Q Exp
savePParamField fvar field = do
checkNonOptDef field
let actualVal = mkName "v"
normalexpr <- saveObjectField actualVal field
-- we have to construct the block here manually, because we can't
-- splice-in-splice
return $ CaseE (VarE fvar) [ Match (ConP 'Nothing [])
(NormalB (ConE '[])) []
, Match (ConP 'Just [VarP actualVal])
(NormalB normalexpr) []
]
-- | Generates code to load an optional parameter field.
loadPParamField :: Field -> Q (Name, Stmt)
loadPParamField field = do
checkNonOptDef field
let name = fieldName field
fvar <- newName name
-- these are used in all patterns below
let objvar = varNameE "o"
objfield = stringE name
loadexp = [| $(varE 'maybeFromObj) $objvar $objfield |]
bexp <- loadFn field loadexp objvar
return (fvar, BindS (VarP fvar) bexp)
-- | Builds a simple declaration of type @n_x = fromMaybe f_x p_x@.
buildFromMaybe :: String -> Q Dec
buildFromMaybe fname =
valD (varP (mkName $ "n_" ++ fname))
(normalB [| $(varE 'fromMaybe)
$(varNameE $ "f_" ++ fname)
$(varNameE $ "p_" ++ fname) |]) []
-- | Builds a function that executes the filling of partial parameter
-- from a full copy (similar to Python's fillDict).
fillParam :: String -> String -> [Field] -> Q [Dec]
fillParam sname field_pfx fields = do
let fnames = map (\fd -> field_pfx ++ fieldRecordName fd) fields
(sname_f, sname_p) = paramTypeNames sname
oname_f = "fobj"
oname_p = "pobj"
name_f = mkName sname_f
name_p = mkName sname_p
fun_name = mkName $ "fill" ++ sname ++ "Params"
le_full = ValD (ConP name_f (map (VarP . mkName . ("f_" ++)) fnames))
(NormalB . VarE . mkName $ oname_f) []
le_part = ValD (ConP name_p (map (VarP . mkName . ("p_" ++)) fnames))
(NormalB . VarE . mkName $ oname_p) []
obj_new = foldl (\accu vname -> AppE accu (VarE vname)) (ConE name_f)
$ map (mkName . ("n_" ++)) fnames
le_new <- mapM buildFromMaybe fnames
funt <- [t| $(conT name_f) -> $(conT name_p) -> $(conT name_f) |]
let sig = SigD fun_name funt
fclause = Clause [VarP (mkName oname_f), VarP (mkName oname_p)]
(NormalB $ LetE (le_full:le_part:le_new) obj_new) []
fun = FunD fun_name [fclause]
return [sig, fun]
-- * Template code for exceptions
-- | Exception simple error message field.
excErrMsg :: (String, Q Type)
excErrMsg = ("errMsg", [t| String |])
-- | Builds an exception type definition.
genException :: String -- ^ Name of new type
-> SimpleObject -- ^ Constructor name and parameters
-> Q [Dec]
genException name cons = do
let tname = mkName name
declD <- buildSimpleCons tname cons
(savesig, savefn) <- genSaveSimpleObj tname ("save" ++ name) cons $
uncurry saveExcCons
(loadsig, loadfn) <- genLoadExc tname ("load" ++ name) cons
return [declD, loadsig, loadfn, savesig, savefn]
-- | Generates the \"save\" clause for an entire exception constructor.
--
-- This matches the exception with variables named the same as the
-- constructor fields (just so that the spliced in code looks nicer),
-- and calls showJSON on it.
saveExcCons :: String -- ^ The constructor name
-> [SimpleField] -- ^ The parameter definitions for this
-- constructor
-> Q Clause -- ^ Resulting clause
saveExcCons sname fields = do
let cname = mkName sname
fnames <- mapM (newName . fst) fields
let pat = conP cname (map varP fnames)
felems = if null fnames
then conE '() -- otherwise, empty list has no type
else listE $ map (\f -> [| JSON.showJSON $(varE f) |]) fnames
let tup = tupE [ litE (stringL sname), felems ]
clause [pat] (normalB [| JSON.showJSON $tup |]) []
-- | Generates load code for a single constructor of an exception.
--
-- Generates the code (if there's only one argument, we will use a
-- list, not a tuple:
--
-- @
-- do
-- (x1, x2, ...) <- readJSON args
-- return $ Cons x1 x2 ...
-- @
loadExcConstructor :: Name -> String -> [SimpleField] -> Q Exp
loadExcConstructor inname sname fields = do
let name = mkName sname
f_names <- mapM (newName . fst) fields
let read_args = AppE (VarE 'JSON.readJSON) (VarE inname)
let binds = case f_names of
[x] -> BindS (ListP [VarP x])
_ -> BindS (TupP (map VarP f_names))
cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) f_names
return $ DoE [binds read_args, NoBindS (AppE (VarE 'return) cval)]
{-| Generates the loadException function.
This generates a quite complicated function, along the lines of:
@
loadFn (JSArray [JSString name, args]) = case name of
"A1" -> do
(x1, x2, ...) <- readJSON args
return $ A1 x1 x2 ...
"a2" -> ...
s -> fail $ "Unknown exception" ++ s
loadFn v = fail $ "Expected array but got " ++ show v
@
-}
genLoadExc :: Name -> String -> SimpleObject -> Q (Dec, Dec)
genLoadExc tname sname opdefs = do
let fname = mkName sname
exc_name <- newName "name"
exc_args <- newName "args"
exc_else <- newName "s"
arg_else <- newName "v"
fails <- [| fail $ "Unknown exception '" ++ $(varE exc_else) ++ "'" |]
-- default match for unknown exception name
let defmatch = Match (VarP exc_else) (NormalB fails) []
-- the match results (per-constructor blocks)
str_matches <-
mapM (\(s, params) -> do
body_exp <- loadExcConstructor exc_args s params
return $ Match (LitP (StringL s)) (NormalB body_exp) [])
opdefs
-- the first function clause; we can't use [| |] due to TH
-- limitations, so we have to build the AST by hand
let clause1 = Clause [ConP 'JSON.JSArray
[ListP [ConP 'JSON.JSString [VarP exc_name],
VarP exc_args]]]
(NormalB (CaseE (AppE (VarE 'JSON.fromJSString)
(VarE exc_name))
(str_matches ++ [defmatch]))) []
-- the fail expression for the second function clause
fail_type <- [| fail $ "Invalid exception: expected '(string, [args])' " ++
" but got " ++ show (pp_value $(varE arg_else)) ++ "'"
|]
-- the second function clause
let clause2 = Clause [VarP arg_else] (NormalB fail_type) []
sigt <- [t| JSON.JSValue -> JSON.Result $(conT tname) |]
return $ (SigD fname sigt, FunD fname [clause1, clause2])
| apyrgio/snf-ganeti | src/Ganeti/THH.hs | bsd-2-clause | 49,664 | 4 | 20 | 13,259 | 11,417 | 6,067 | 5,350 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
-- Copyright (c) 2012 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
-- | This module defines a class for compiler error messages.
module Data.Message.Class(
Message(..),
Severity(..)
) where
import Data.Pos
import Text.Format
-- | A type indicating the severity of a reported error
data Severity =
-- | Indicates a bug in the compiler itself
Internal
-- | An error (ie syntax error, type error)
| Error
-- | A warning
| Warning
-- | Information, not indicating a warning
| Info
deriving (Ord, Eq)
-- | A class representing a compiler message
class (Ord m, Format m, Position m) => Message m where
-- | Get the message's severity
severity :: m -> Severity
-- | Format the message's payload, do not include position or
-- severity.
describe :: m -> Doc
instance Format Severity where
format Internal = text "internal error"
format Error = text "error"
format Warning = text "warning"
format Info = text "info" | emc2/proglang-util | Data/Message/Class.hs | bsd-3-clause | 2,514 | 0 | 7 | 486 | 197 | 124 | 73 | 20 | 0 |
{-# LANGUAGE ViewPatterns, TupleSections, RecordWildCards, ScopedTypeVariables, PatternGuards #-}
module Action.Generate(actionGenerate) where
import Data.List.Extra
import System.FilePath
import System.Directory.Extra
import System.IO.Extra
import Data.Tuple.Extra
import Control.Exception.Extra
import Data.IORef
import Data.Maybe
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import Control.Monad.Extra
import Data.Monoid
import Data.Ord
import System.Console.CmdArgs.Verbosity
import Prelude
import Output.Items
import Output.Tags
import Output.Names
import Output.Types
import Input.Cabal
import Input.Haddock
import Input.Download
import Input.Reorder
import Input.Set
import Input.Settings
import Input.Item
import General.Util
import General.Store
import General.Timing
import General.Str
import Action.CmdLine
import General.Conduit
import Control.DeepSeq
{-
data GenList
= GenList_Package String -- a literally named package
| GenList_GhcPkg String -- command to run, or "" for @ghc-pkg list@
| GenList_Stackage String -- URL of stackage file, defaults to @http://www.stackage.org/lts/cabal.config@
| GenList_Dependencies String -- dependencies in a named .cabal file
| GenList_Sort String -- URL of file to sort by, defaults to @http://packdeps.haskellers.com/reverse@
data GenTags
= GenTags_GhcPkg String -- command to run, or "" for @ghc-pkg dump@
| GenTags_Diff FilePath -- a diff to apply to previous metadata
| GenTags_Tarball String -- tarball of Cabal files, defaults to http://hackage.haskell.org/packages/index.tar.gz
| GetTags_Cabal FilePath -- tarball to get tag information from
data GenData
= GenData_File FilePath -- a file containing package data
| GenData_Tarball String -- URL where a tarball of data files resides
* `hoogle generate` - generate for all things in Stackage based on Hackage information.
* `hoogle generate --source=file1.txt --source=local --source=stackage --source=hackage --source=tarball.tar.gz`
Which files you want to index. Currently the list on stackage, could be those locally installed, those in a .cabal file etc. A `--list` flag, defaults to `stackage=url`. Can also be `ghc-pkg`, `ghc-pkg=user` `ghc-pkg=global`. `name=p1`.
Extra metadata you want to apply. Could be a file. `+shake author:Neil-Mitchell`, `-shake author:Neil-Mitchel`. Can be sucked out of .cabal files. A `--tags` flag, defaults to `tarball=url` and `diff=renamings.txt`.
Where the haddock files are. Defaults to `tarball=hackage-url`. Can also be `file=p1.txt`. Use `--data` flag.
Defaults to: `hoogle generate --list=ghc-pkg --list=constrain=stackage-url`.
Three pieces of data:
* Which packages to index, in order.
* Metadata.
generate :: Maybe Int -> [GenList] -> [GenTags] -> [GenData] -> IO ()
-- how often to redownload, where to put the files
generate :: FilePath -> [(String, [(String, String)])] -> [(String, LBS.ByteString)] -> IO ()
generate output metadata = ...
-}
-- -- generate all
-- @tagsoup -- generate tagsoup
-- @tagsoup filter -- search the tagsoup package
-- filter -- search all
type Download = String -> URL -> IO FilePath
readHaskellOnline :: Timing -> Settings -> Download -> IO (Map.Map PkgName Package, Set.Set PkgName, ConduitT () (PkgName, URL, LBStr) IO ())
readHaskellOnline timing settings download = do
stackageLts <- download "haskell-stackage-lts.txt" "https://www.stackage.org/nightly/cabal.config"
stackageNightly <- download "haskell-stackage-nightly.txt" "https://www.stackage.org/lts/cabal.config"
platform <- download "haskell-platform.txt" "https://raw.githubusercontent.com/haskell/haskell-platform/master/hptool/src/Releases2015.hs"
cabals <- download "haskell-cabal.tar.gz" "https://hackage.haskell.org/packages/index.tar.gz"
hoogles <- download "haskell-hoogle.tar.gz" "https://hackage.haskell.org/packages/hoogle.tar.gz"
-- peakMegabytesAllocated = 2
setStackage <- Set.map strPack <$> (Set.union <$> setStackage stackageLts <*> setStackage stackageNightly)
setPlatform <- Set.map strPack <$> setPlatform platform
setGHC <- Set.map strPack <$> setGHC platform
cbl <- timed timing "Reading Cabal" $ parseCabalTarball settings cabals
let want = Set.insert (strPack "ghc") $ Set.unions [setStackage, setPlatform, setGHC]
cbl <- pure $ flip Map.mapWithKey cbl $ \name p ->
p{packageTags =
[(strPack "set",strPack "included-with-ghc") | name `Set.member` setGHC] ++
[(strPack "set",strPack "haskell-platform") | name `Set.member` setPlatform] ++
[(strPack "set",strPack "stackage") | name `Set.member` setStackage] ++
packageTags p}
let source = do
tar <- liftIO $ tarballReadFiles hoogles
forM_ tar $ \(strPack . takeBaseName -> name, src) ->
yield (name, hackagePackageURL name, src)
pure (cbl, want, source)
readHaskellDirs :: Timing -> Settings -> [FilePath] -> IO (Map.Map PkgName Package, Set.Set PkgName, ConduitT () (PkgName, URL, LBStr) IO ())
readHaskellDirs timing settings dirs = do
files <- concatMapM listFilesRecursive dirs
-- We reverse/sort the list because of #206
-- Two identical package names with different versions might be foo-2.0 and foo-1.0
-- We never distinguish on versions, so they are considered equal when reordering
-- So put 2.0 first in the list and rely on stable sorting. A bit of a hack.
let order a = second Down $ parseTrailingVersion a
let packages = map (strPack . takeBaseName &&& id) $ sortOn (map order . splitDirectories) $ filter ((==) ".txt" . takeExtension) files
cabals <- mapM parseCabal $ filter ((==) ".cabal" . takeExtension) files
let source = forM_ packages $ \(name, file) -> do
src <- liftIO $ bstrReadFile file
dir <- liftIO $ canonicalizePath $ takeDirectory file
let url = "file://" ++ ['/' | not $ "/" `isPrefixOf` dir] ++ replace "\\" "/" dir ++ "/"
yield (name, url, lbstrFromChunks [src])
pure (Map.union
(Map.fromList cabals)
(Map.fromListWith (<>) $ map generateBarePackage packages)
,Set.fromList $ map fst packages, source)
where
parseCabal fp = do
src <- readFileUTF8' fp
let pkg = readCabal settings src
pure (strPack $ takeBaseName fp, pkg)
generateBarePackage (name, file) =
(name, mempty{packageTags = (strPack "set", strPack "all") : sets})
where
sets = map setFromDir $ filter (`isPrefixOf` file) dirs
setFromDir dir = (strPack "set", strPack $ takeFileName $ dropTrailingPathSeparator dir)
readFregeOnline :: Timing -> Download -> IO (Map.Map PkgName Package, Set.Set PkgName, ConduitT () (PkgName, URL, LBStr) IO ())
readFregeOnline timing download = do
frege <- download "frege-frege.txt" "http://try.frege-lang.org/hoogle-frege.txt"
let source = do
src <- liftIO $ bstrReadFile frege
yield (strPack "frege", "http://google.com/", lbstrFromChunks [src])
pure (Map.empty, Set.singleton $ strPack "frege", source)
readHaskellGhcpkg :: Timing -> Settings -> IO (Map.Map PkgName Package, Set.Set PkgName, ConduitT () (PkgName, URL, LBStr) IO ())
readHaskellGhcpkg timing settings = do
cbl <- timed timing "Reading ghc-pkg" $ readGhcPkg settings
let source =
forM_ (Map.toList cbl) $ \(name,Package{..}) -> whenJust packageDocs $ \docs -> do
let file = docs </> strUnpack name <.> "txt"
whenM (liftIO $ doesFileExist file) $ do
src <- liftIO $ bstrReadFile file
docs <- liftIO $ canonicalizePath docs
let url = "file://" ++ ['/' | not $ all isPathSeparator $ take 1 docs] ++
replace "\\" "/" (addTrailingPathSeparator docs)
yield (name, url, lbstrFromChunks [src])
cbl <- pure $ let ts = map (both strPack) [("set","stackage"),("set","installed")]
in Map.map (\p -> p{packageTags = ts ++ packageTags p}) cbl
pure (cbl, Map.keysSet cbl, source)
readHaskellHaddock :: Timing -> Settings -> FilePath -> IO (Map.Map PkgName Package, Set.Set PkgName, ConduitT () (PkgName, URL, LBStr) IO ())
readHaskellHaddock timing settings docBaseDir = do
cbl <- timed timing "Reading ghc-pkg" $ readGhcPkg settings
let source =
forM_ (Map.toList cbl) $ \(name, p@Package{..}) -> do
let docs = docDir (strUnpack name) p
file = docBaseDir </> docs </> (strUnpack name) <.> "txt"
whenM (liftIO $ doesFileExist file) $ do
src <- liftIO $ bstrReadFile file
let url = ['/' | not $ all isPathSeparator $ take 1 docs] ++
replace "\\" "/" (addTrailingPathSeparator docs)
yield (name, url, lbstrFromChunks [src])
cbl <- pure $ let ts = map (both strPack) [("set","stackage"),("set","installed")]
in Map.map (\p -> p{packageTags = ts ++ packageTags p}) cbl
pure (cbl, Map.keysSet cbl, source)
where docDir name Package{..} = name ++ "-" ++ strUnpack packageVersion
actionGenerate :: CmdLine -> IO ()
actionGenerate g@Generate{..} = withTiming (if debug then Just $ replaceExtension database "timing" else Nothing) $ \timing -> do
putStrLn "Starting generate"
createDirectoryIfMissing True $ takeDirectory database
whenLoud $ putStrLn $ "Generating files to " ++ takeDirectory database
download <- pure $ downloadInput timing insecure download (takeDirectory database)
settings <- loadSettings
(cbl, want, source) <- case language of
Haskell | Just dir <- haddock -> readHaskellHaddock timing settings dir
| [""] <- local_ -> readHaskellGhcpkg timing settings
| [] <- local_ -> readHaskellOnline timing settings download
| otherwise -> readHaskellDirs timing settings local_
Frege | [] <- local_ -> readFregeOnline timing download
| otherwise -> errorIO "No support for local Frege databases"
(cblErrs, popularity) <- evaluate $ packagePopularity cbl
cbl <- evaluate $ Map.map (\p -> p{packageDepends=[]}) cbl -- clear the memory, since the information is no longer used
evaluate popularity
-- mtl is more popular than transformers, despite having dodgy docs, which is a shame, so we hack it
popularity <- evaluate $ Map.adjust (max $ 1 + Map.findWithDefault 0 (strPack "mtl") popularity) (strPack "transformers") popularity
want <- pure $ if include /= [] then Set.fromList $ map strPack include else want
want <- pure $ case count of Nothing -> want; Just count -> Set.fromList $ take count $ Set.toList want
(stats, _) <- storeWriteFile database $ \store -> do
xs <- withBinaryFile (database `replaceExtension` "warn") WriteMode $ \warnings -> do
hSetEncoding warnings utf8
hPutStr warnings $ unlines cblErrs
nCblErrs <- evaluate $ length cblErrs
itemWarn <- newIORef 0
let warning msg = do modifyIORef itemWarn succ; hPutStrLn warnings msg
let consume :: ConduitM (Int, (PkgName, URL, LBStr)) (Maybe Target, [Item]) IO ()
consume = awaitForever $ \(i, (strUnpack -> pkg, url, body)) -> do
timedOverwrite timing ("[" ++ show i ++ "/" ++ show (Set.size want) ++ "] " ++ pkg) $
parseHoogle (\msg -> warning $ pkg ++ ":" ++ msg) url body
writeItems store $ \items -> do
xs <- runConduit $
source .|
filterC (flip Set.member want . fst3) .|
void ((|$|)
(zipFromC 1 .| consume)
(do seen <- fmap Set.fromList $ mapMC (evaluate . force . strCopy . fst3) .| sinkList
let missing = [x | x <- Set.toList $ want `Set.difference` seen
, fmap packageLibrary (Map.lookup x cbl) /= Just False]
liftIO $ putStrLn ""
liftIO $ whenNormal $ when (missing /= []) $ do
putStrLn $ "Packages missing documentation: " ++ unwords (sortOn lower $ map strUnpack missing)
liftIO $ when (Set.null seen) $
exitFail "No packages were found, aborting (use no arguments to index all of Stackage)"
-- synthesise things for Cabal packages that are not documented
forM_ (Map.toList cbl) $ \(name, Package{..}) -> when (name `Set.notMember` seen) $ do
let ret prefix = yield $ fakePackage name $ prefix ++ trim (strUnpack packageSynopsis)
if name `Set.member` want then
(if packageLibrary
then ret "Documentation not found, so not searched.\n"
else ret "Executable only. ")
else if null include then
ret "Not on Stackage, so not searched.\n"
else
pure ()
))
.| pipelineC 10 (items .| sinkList)
itemWarn <- readIORef itemWarn
when (itemWarn > 0) $
putStrLn $ "Found " ++ show itemWarn ++ " warnings when processing items"
pure [(a,b) | (a,bs) <- xs, b <- bs]
itemsMemory <- getStatsCurrentLiveBytes
xs <- timed timing "Reordering items" $ pure $! reorderItems settings (\s -> maybe 1 negate $ Map.lookup s popularity) xs
timed timing "Writing tags" $ writeTags store (`Set.member` want) (\x -> maybe [] (map (both strUnpack) . packageTags) $ Map.lookup x cbl) xs
timed timing "Writing names" $ writeNames store xs
timed timing "Writing types" $ writeTypes store (if debug then Just $ dropExtension database else Nothing) xs
x <- getVerbosity
when (x >= Loud) $
whenJustM getStatsDebug print
when (x >= Normal) $ do
whenJustM getStatsPeakAllocBytes $ \x ->
putStrLn $ "Peak of " ++ x ++ ", " ++ fromMaybe "unknown" itemsMemory ++ " for items"
when debug $
writeFile (database `replaceExtension` "store") $ unlines stats
| ndmitchell/hoogle | src/Action/Generate.hs | bsd-3-clause | 14,622 | 0 | 43 | 3,899 | 3,754 | 1,876 | 1,878 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Numeral.JA.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.JA.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "JA Tests"
[ makeCorpusTest [Seal Numeral] corpus
]
| facebookincubator/duckling | tests/Duckling/Numeral/JA/Tests.hs | bsd-3-clause | 504 | 0 | 9 | 78 | 79 | 50 | 29 | 11 | 1 |
{-# LANGUAGE RecordWildCards, TypeFamilies, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings, TupleSections #-}
module Aws.Sqs.Commands.SetQueueAttributes where
import Aws.Response
import Aws.Sqs.Info
import Aws.Sqs.Metadata
import qualified Aws.Sqs.Model as M
import Aws.Sqs.Query
import Aws.Sqs.Response
import Aws.Signature
import Aws.Transaction
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
data SetQueueAttributes = SetQueueAttributes{
sqaAttribute :: M.QueueAttribute,
sqaValue :: T.Text,
sqaQueueName :: M.QueueName
}deriving (Show)
data SetQueueAttributesResponse = SetQueueAttributesResponse{
} deriving (Show)
instance ResponseConsumer r SetQueueAttributesResponse where
type ResponseMetadata SetQueueAttributesResponse = SqsMetadata
responseConsumer _ = sqsXmlResponseConsumer parse
where
parse _ = do
return SetQueueAttributesResponse {}
instance SignQuery SetQueueAttributes where
type Info SetQueueAttributes = SqsInfo
signQuery SetQueueAttributes {..} = sqsSignQuery SqsQuery {
sqsQueueName = Just sqaQueueName,
sqsQuery = [("Action", Just "SetQueueAttributes"),
("Attribute.Name", Just $ TE.encodeUtf8 $ M.printQueueAttribute sqaAttribute),
("Attribute.Value", Just $ TE.encodeUtf8 sqaValue)]}
instance Transaction SetQueueAttributes SetQueueAttributesResponse
| jgm/aws | Aws/Sqs/Commands/SetQueueAttributes.hs | bsd-3-clause | 1,689 | 1 | 13 | 499 | 296 | 172 | 124 | 32 | 0 |
{-# LANGUAGE PackageImports #-}
module Data.Typeable (module M) where
import "base" Data.Typeable as M
| silkapp/base-noprelude | src/Data/Typeable.hs | bsd-3-clause | 108 | 0 | 4 | 18 | 21 | 15 | 6 | 3 | 0 |
-- boilerplate {{{
{-# LANGUAGE FlexibleContexts, NoMonomorphismRestriction #-}
module Data.SGF.Parse.Raw (
collection,
Property(..),
enum
) where
import Control.Applicative hiding (many, (<|>))
import Control.Monad
import Data.Char
import Data.Tree
import Data.Word
import Prelude hiding (lex)
import Text.Parsec (SourcePos(..), incSourceColumn)
import Text.Parsec.Prim
import Text.Parsec.Combinator
-- }}}
data Property = Property {
position :: SourcePos, -- ^
-- Currently, this is pretty lame: it doesn't track
-- line number and character number, only byte
-- offset from the beginning of the file. This is
-- because I don't really understand how to
-- correctly track line number and character number
-- properly in the face of dynamically changing
-- encodings, whereas byte number is a totally
-- braindead statistic to track.
name :: String, -- ^
-- The literal name of the property. This is
-- guaranteed to be a non-empty string of
-- upper-case ASCII characters.
values :: [[Word8]] -- ^ The arguments to the property.
} deriving (Eq, Ord, Show)
-- |
-- Handy way to convert known-ASCII characters from 'Word8' to 'Char', among other
-- things.
enum :: (Enum a, Enum b) => a -> b
enum = toEnum . fromEnum
ensure p x = guard (p x) >> return x
satisfy p = tokenPrim
((\x -> ['\'', x, '\'']) . enum)
(\pos _ _ -> incSourceColumn pos 1)
(ensure p)
satisfyChar = satisfy . (. enum)
anyWord = satisfy (const True)
exactWord = satisfy . (==) . enum
someWord = satisfy . flip elem . map enum
noWord = satisfy . flip notElem . map enum
whitespace = many (satisfyChar isSpace)
-- assumed: the current byte is literally ASCII '\\' iff the current byte is
-- the last byte of the encoding of the actual character '\\' and neither of
-- the bytes that are literally ASCII ']' and ASCII ':' occur after the first
-- byte of any multi-byte encoded character
-- (in particular, UTF-8, ASCII, and ISO 8859-1 satisfy this property)
escapedChar = liftM2 (\x y -> [x, y]) (exactWord '\\') anyWord
unescapedExcept ws = fmap return (noWord ws)
literalTextExcept ws = fmap concat $ many (escapedChar <|> unescapedExcept ws)
property = liftM3 ((. map enum) . Property)
(getPosition)
(many1 (satisfyChar (liftM2 (&&) isUpper (< '\128'))))
(sepEndBy1 (exactWord '[' >> literalTextExcept "]" <* exactWord ']') whitespace)
node = do
exactWord ';'
whitespace
sepEndBy property whitespace
gameTree = do
exactWord '('
whitespace
(node:nodes) <- sepEndBy1 node whitespace
trees <- sepEndBy gameTree whitespace
exactWord ')'
return (Node node (foldr ((return .) . Node) trees nodes))
-- |
-- Parse the tree-structure of an SGF file, but without any knowledge of the
-- semantics of the properties, etc.
collection :: Stream s m Word8 => ParsecT s u m [Tree [Property]]
collection = whitespace >> sepEndBy1 gameTree whitespace <* whitespace <* eof
| tonicebrian/sgf | Data/SGF/Parse/Raw.hs | bsd-3-clause | 3,277 | 0 | 14 | 911 | 720 | 398 | 322 | 52 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import qualified GRApi
import Options.Applicative
import Data.Semigroup ((<>))
import Types
import qualified Paths_gr as Meta (version)
import Data.Version (showVersion)
import qualified Data.ByteString.Lazy.Char8 as L8
import Control.Exception (SomeException, try)
appOptions :: Parser AppOptions
appOptions = AppOptions
<$> optional (strOption
( short 'k' <> long "with-key"
<> metavar "APIKEY"
<> help "Supply the API key as a command line argument."))
<*> optional (option auto
( short 'l' <> long "limit"
<> metavar "LIMIT"
<> help "Limit the number of responses to LIMIT" ))
-- | Helper Function
withInfo :: Parser a -> String -> ParserInfo a
withInfo opts desc = info (helper <*> opts) $ progDesc desc
parseOptions :: Parser Options
parseOptions = Options <$> appOptions <* version <*> parseCommand
version :: Parser (a -> a)
version = infoOption (Data.Version.showVersion Meta.version)
( short 'v'
<> long "version"
<> help "Print version information" )
-- Commands
parseCommand :: Parser Command
parseCommand = subparser $
command "find" (parseFindBook `withInfo` "Find a book") <>
command "findAuthor" (parseFindAuthor `withInfo` "Find an author") <>
command "showFollowers" (parseShowFollowers `withInfo` "Show followers of user with id") <>
command "show" (parseShowShelf `withInfo` "Show a shelf, e.GRApi. to-read") <>
command "book" (parseShowBook `withInfo` "Show information about a book.") <>
command "add" (parseAddBook `withInfo` "Add a book to a shelf.")
parseShowBook :: Parser Command
parseShowBook = ShowBook
<$> argument auto (metavar "BOOK_ID_OR_TITLE")
parseAddBook :: Parser Command
parseAddBook = AddBook
<$> argument str (metavar "SHELFNAME")
<*> argument auto (metavar "BOOK_ID")
parseFindBook :: Parser Command
parseFindBook = FindBook
<$> argument str (metavar "TITLE")
parseFindAuthor :: Parser Command
parseFindAuthor = FindAuthor
<$> argument str (metavar "AUTHORNAME")
parseShowFollowers :: Parser Command
parseShowFollowers = ShowFollowers
<$> argument auto (metavar "GOODREADS_USER_ID" <> value 0) -- FIXME arguments to commands?
parseShowShelf :: Parser Command
parseShowShelf = ShowShelf
<$> argument str (metavar "SHELFNAME")
<*> argument auto (metavar "GOODREADS_USER_ID" <> value 0) -- if default-user-id is defined in config, use it as default.
main :: IO ()
main =
run =<<
execParser (parseOptions `withInfo` "Interact with the Goodreads API. See --help for options.")
run :: Options -> IO ()
run (Options app cmd) =
case cmd of
FindBook bookTitle -> GRApi.doFindBook app bookTitle
FindAuthor authorName -> GRApi.doFindAuthor app authorName
ShowFollowers uID -> print uID
ShowShelf shelfName uID -> GRApi.doShowShelf app shelfName uID
AddBook shelfName bookID -> GRApi.doAddBook app shelfName bookID
ShowBook bookID -> GRApi.doShowBook app bookID
| jmn/goodreads | app/Main.hs | bsd-3-clause | 3,121 | 0 | 14 | 666 | 797 | 410 | 387 | 70 | 6 |
--------------------------------------------------------------------
-- |
-- Module : RSS
-- Copyright : (c) Don Stewart, 2008
-- License : BSD3
--
-- Maintainer: Don Stewart <dons@galois.com>
-- Stability : provisional
-- Portability:
--
--------------------------------------------------------------------
module RSS where
import Types
import API
import Utils.URL
import Utils.Misc
import Text.RSS
import Network.URI
import System.Locale
import Data.Time
import System.IO
-- sure
Just logo = parseURI "http://upload.wikimedia.org/wikipedia/commons/thumb/4/43/Feed-icon.svg/48px-Feed-icon.svg.png"
outputRSS :: [Paste] -> String -> FilePath -> IO ()
outputRSS pastes url path = do
let Just homepage = parseURI url
time <- getClockTime >>= toCalendarTime
writeFile path . showXML . rssToXML $
RSS "hpaste"
homepage
"hpaste: recent pastes"
[Language "en"
,Copyright "(c) Don Stewart 2008"
,ManagingEditor "dons@galois.com (Don Stewart)"
,WebMaster "dons@galois.com (Don Stewart)"
,ChannelPubDate time
,LastBuildDate time
,Generator "Haskell RSS"
,TTL (30) -- minutes
,Image logo
"hpaste.org"
homepage
Nothing Nothing Nothing
]
(map (ppr time url homepage) pastes)
ppr :: CalendarTime -> String -> URI -> Paste -> Item
ppr time baseurl home p =
[ Title (paste_title p)
, case parseURI (baseurl ++ exportURL (methodURL mView (paste_id p))) of
Just uri -> Link uri
Nothing -> Link home
, PubDate $ case paste_timestamp p of
Nothing -> time
Just utc ->
case parseCalendarTime defaultTimeLocale rfc822_named_format_str
(formatTime defaultTimeLocale rfc822_named_format_str utc)
of Nothing -> time
Just t -> t
, Guid True $ show $
case parseURI (baseurl ++ exportURL (methodURL mView (paste_id p))) of
Just uri -> uri
Nothing -> home
, Author (paste_author p)
, Description (paste_content p)
, Category Nothing (paste_language p)
]
| glguy/hpaste | src/RSS.hs | bsd-3-clause | 2,222 | 0 | 15 | 644 | 513 | 261 | 252 | 52 | 5 |
{-# LANGUAGE RankNTypes, GADTs #-}
-- |
-- Module: Data.Concurrent.Reactive
-- Copyright: Andy Gill
-- License: BSD3
--
-- Maintainer: Andy Gill <andygill@ku.edu>
-- Stability: unstable
-- Portability: GHC
--
-- An API for generating reactive objects, as used in the TIMBER programming language.
--
module Control.Concurrent.Reactive
( Action
, Request
, reactiveObjectIO
, Sink
, pauseIO
, reactiveIO
) where
import Control.Concurrent.Chan
import Control.Concurrent
import Control.Exception as Ex
-- An action is an IO-based change to an explicit state
type Action s = s -> IO s -- only state change
type Request s a = s -> IO (s,a) -- state change + reply to be passed back to caller
-- This is the 'forkIO' of the O'Haskell Object sub-system.
-- To consider; how do we handle proper exceptions?
-- we need to bullet-proof this for exception!
-- Choices:
-- * do the Requests see the failure
-- * Actions do not see anything
-- *
data Msg s = Act (Action s)
| forall a . Req (Request s a) (MVar a)
| Done (MVar ())
reactiveObjectIO
:: state
-> (
ThreadId
-> (forall r. Request state r -> IO r) -- requests
-> (Action state -> IO ()) -- actions
-> IO () -- done
-> object
)
-> IO object
reactiveObjectIO state mkObject = do
chan <- newChan
-- the state is passed as the argument, watch for strictness issues.
let dispatch state = do
action <- readChan chan
case action of
Act act -> do state1 <- act state
dispatch $! state1
Req req box -> do (state1,ret) <- req state
putMVar box ret
dispatch $! state1
Done box -> do putMVar box ()
return () -- no looping; we are done
-- We return the pid, so you can build a hard-abort function
-- we need to think about this; how do you abort an object
pid <- forkIO $ dispatch state
-- This trick of using a return MVar is straight from Johan's PhD.
let requestit fun = do
ret <- newEmptyMVar
writeChan chan $ Req fun ret
takeMVar ret -- wait for the object to react
let actionit act = writeChan chan $ Act act
let doneit = do
ret <- newEmptyMVar
writeChan chan $ Done ret
takeMVar ret -- wait for the object to *finish*
return (mkObject pid requestit actionit doneit)
-- From Conal; a Sink is a object into which things are thrown.
type Sink a = a -> IO ()
-- This turns a reactive style call into a pausing IO call.
pauseIO :: (a -> Sink b -> IO ()) -> a -> IO b
pauseIO fn a = do
var <- newEmptyMVar
forkIO $ do fn a (\ b -> putMVar var b)
takeMVar var
-- This turns a pausing IO call into a reactive style call.
reactiveIO :: (a -> IO b) -> a -> Sink b -> IO ()
reactiveIO fn a sinkB = do
forkIO $ do b <- fn a
sinkB b
return ()
| andygill/io-reactive | Control/Concurrent/Reactive.hs | bsd-3-clause | 2,962 | 12 | 18 | 891 | 706 | 357 | 349 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Blockchain.SigningTools (
signTransaction,
whoSignedThisTransaction
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import Data.Binary
import Data.ByteString.Internal
import Network.Haskoin.Internals hiding (Address)
import Numeric
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Blockchain.ExtendedECDSA
import Blockchain.Data.Address
import Blockchain.Data.Transaction
import qualified Blockchain.Colors as CL
import Blockchain.Format
import Blockchain.Data.RLP
import Blockchain.SHA
import Blockchain.Data.SignedTransaction
import Blockchain.Util
--import Debug.Trace
addLeadingZerosTo64::String->String
addLeadingZerosTo64 x = replicate (64 - length x) '0' ++ x
signTransaction::Monad m=>PrvKey->Transaction->SecretT m SignedTransaction
signTransaction privKey t = do
ExtendedSignature signature yIsOdd <- extSignMsg theHash privKey
return SignedTransaction{
unsignedTransaction = t,
v = if yIsOdd then 0x1c else 0x1b,
r =
case B16.decode $ B.pack $ map c2w $ addLeadingZerosTo64 $ showHex (sigR signature) "" of
(val, "") -> byteString2Integer val
_ -> error ("error: sigR is: " ++ showHex (sigR signature) ""),
s =
case B16.decode $ B.pack $ map c2w $ addLeadingZerosTo64 $ showHex (sigS signature) "" of
(val, "") -> byteString2Integer val
_ -> error ("error: sigS is: " ++ showHex (sigS signature) "")
}
where
SHA theHash = hash $ rlpSerialize $ rlpEncode t
whoSignedThisTransaction::SignedTransaction->Address
whoSignedThisTransaction SignedTransaction{unsignedTransaction=ut, v=v', r=r', s=s'} =
pubKey2Address (getPubKeyFromSignature xSignature theHash)
where
xSignature = ExtendedSignature (Signature (fromInteger r') (fromInteger s')) (0x1c == v')
SHA theHash = hash (rlpSerialize $ rlpEncode ut)
| kejace/ethereum-client-haskell | src/Blockchain/SigningTools.hs | bsd-3-clause | 2,070 | 0 | 18 | 472 | 552 | 301 | 251 | 42 | 4 |
-- | Re-exports all interaction related modules.
module Language.Java.Paragon.Interaction
( -- * Exported modules
module Language.Java.Paragon.Interaction.Debugging
, module Language.Java.Paragon.Interaction.Flags
, module Language.Java.Paragon.Interaction.Headers
, module Language.Java.Paragon.Interaction.Panic
, module Language.Java.Paragon.Interaction.Pretty
, module Language.Java.Paragon.Interaction.Unparse
) where
import Language.Java.Paragon.Interaction.Debugging
import Language.Java.Paragon.Interaction.Flags
import Language.Java.Paragon.Interaction.Headers
import Language.Java.Paragon.Interaction.Panic
import Language.Java.Paragon.Interaction.Pretty
import Language.Java.Paragon.Interaction.Unparse
| bvdelft/paragon | src/Language/Java/Paragon/Interaction.hs | bsd-3-clause | 733 | 0 | 5 | 64 | 114 | 87 | 27 | 14 | 0 |
{-# LANGUAGE RecordWildCards, OverloadedStrings #-}
module UI.Player (
musicPlayer
) where
import qualified Brick.Main as UI
import qualified Brick.Types as UI
import qualified Brick.Widgets.Center as UI
import qualified Brick.Widgets.Core as UI
import qualified Graphics.Vty as UI
import qualified UI.Extra as UI
import Control.Concurrent.Chan (writeChan, newChan)
import Control.Concurrent.STM.TVar
import Control.Monad (void, when)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Cont (ContT (..))
import Control.Monad.STM (atomically)
import Data.Default.Class
import Data.Foldable (toList)
import Data.List (intercalate)
import Data.Maybe (isJust, fromJust)
import qualified Data.Sequence as S
import Text.Printf (printf)
import System.Random (randomRIO)
import System.Process (callProcess)
import qualified FM.FM as FM
import qualified FM.Song as Song
import qualified FM.Cache as Cache
import qualified FM.NetEase as NetEase
import UI.Menu
import Types
data State = State {
session :: SomeSession
, player :: Player
, cache :: Cache
, source :: MusicSource
, playMode :: PlayMode
, playSequence :: S.Seq Song.Song
, stopped :: Bool
, currentIndex :: Int
, focusedIndex :: Int
, progress :: (Double, Double)
, currentLyrics :: String
, postEvent :: Event -> IO ()
, autoProceed :: Bool
}
data Event = VtyEvent UI.Event
| UserEventFetchMore
| UserEventPending Bool
| UserEventUpdateProgress (Double, Double)
| UserEventUpdateLyrics String
liftCache State {..} m = liftIO $ runCache cache m
liftSession State {..} m = liftIO $ runSession session m
liftPlayer State {..} m = liftIO $ runPlayer player m
fetch :: (MonadIO m) => State -> m [Song.Song]
fetch state@State {..} = case source of
NetEaseFM -> liftSession state NetEase.fetchFM
NetEaseDailyRecommendation -> liftSession state NetEase.fetchRecommend
NetEasePlayLists -> undefined
NetEasePlayList id _ -> liftSession state (NetEase.fetchPlayList id)
LocalCache -> liftSession state Cache.fetchCache
fetchUrl :: (MonadIO m) => State -> Song.Song -> m (Maybe String)
fetchUrl state@State {..} song = if isLocal source
then liftSession state (Cache.fetchUrl song)
else do
localPath <- liftCache state (Cache.lookupCache (Song.uid song))
case localPath of
Just path -> return $ Just path
Nothing -> liftSession state (NetEase.fetchUrl song)
fetchLyrics :: (MonadIO m) => State -> Song.Song -> m Song.Lyrics
fetchLyrics state@State {..} song = if isLocal source
then liftSession state (Cache.fetchLyrics song)
else liftSession state (NetEase.fetchLyrics song)
fetchMore :: (MonadIO m) => State -> m State
fetchMore state@State {..} = do
new <- S.fromList <$> fetch state
return state { playSequence = playSequence S.>< new }
play :: (MonadIO m) => State -> m State
play state@State {..}
| currentIndex == 0 = return state
| otherwise = do
let onBegin () = let Song.Song {..} = playSequence `S.index` (currentIndex - 1)
in callProcess "notify-send" [ title ++ "\n\n" ++ intercalate " / " artists ++ "\n\n" ++ album]
let onTerminate e = when e (postEvent (UserEventPending False))
let onProgress p = postEvent (UserEventUpdateProgress p)
let onLyrics l = postEvent (UserEventUpdateLyrics l)
liftPlayer state $
FM.play (playSequence `S.index` (currentIndex - 1))
(fetchUrl state)
(fetchLyrics state)
onBegin
onTerminate
onProgress
onLyrics
return state { focusedIndex = currentIndex
, stopped = False
, progress = (0, 0)
, currentLyrics = []
, autoProceed = True }
pause :: (MonadIO m) => State -> m State
pause state = do
liftPlayer state FM.pause
return state { autoProceed = False }
resume :: (MonadIO m) => State -> m State
resume state = do
liftPlayer state FM.resume
return state { autoProceed = True }
stop :: (MonadIO m) => State -> m State
stop state = do
liftPlayer state FM.stop
return state { stopped = True
, progress = (0, 0)
, currentLyrics = []
, autoProceed = False }
setVolume :: (MonadIO m) => State -> Int -> m State
setVolume state@State {..} d = do
let newVolume = max 0 $ min 100 $ FM.playerVolume player + d
let newState = state { player = player { FM.playerVolume = newVolume } }
liftPlayer newState FM.updateVolume
return newState
toggleMute :: (MonadIO m) => State -> m State
toggleMute state@State {..} = do
let newMuted = not (FM.playerMuted player)
let newState = state { player = player { FM.playerMuted = newMuted } }
liftPlayer newState FM.updateVolume
return newState
cacheSong :: (MonadIO m) => State -> m State
cacheSong state@State {..} = do
when (focusedIndex /= 0 && not (isLocal source)) $ do
let song = playSequence `S.index` (focusedIndex - 1)
url <- fetchUrl state song
when (isJust url) $ liftCache state $ FM.cacheSong song (fromJust url)
return state
deleteSong :: (MonadIO m) => State -> m State
deleteSong state@State {..} = do
state <- stop state
if (focusedIndex /= 0 && isLocal source)
then liftCache state $ do
FM.deleteSong $ playSequence `S.index` (focusedIndex - 1)
let (heads, tails) = S.splitAt (focusedIndex - 1) playSequence
let newSequence = heads `mappend` S.drop 1 tails
let newIndex = if S.length tails == 1 then focusedIndex - 1 else focusedIndex
return state { playSequence = newSequence, focusedIndex = newIndex }
else return state
musicPlayerDraw :: State -> [UI.Widget]
musicPlayerDraw State {..} = [ui]
where
formatSong Song.Song {..} =
printf "%s - %s - %s" title (intercalate " / " artists) album :: String
formatTime time = printf "%02d:%02d" minute second :: String
where
(minute, second) = floor time `quotRem` 60 :: (Int, Int)
ui = UI.vBox [ UI.separator
, title
, UI.separator
, bar1
, UI.separator
, bar2
, UI.separator
, lyrics
, UI.separator
, playList
]
title = UI.mkYellow $ UI.hCenter $ UI.str body
where
body | stopped = "[停止]"
| otherwise = formatSong $ playSequence `S.index` (currentIndex - 1)
bar1 | stopped = UI.separator
| otherwise = UI.mkGreen $ UI.hCenter $ UI.str $
printf "[%s] (%s/%s)" (make '>' total occupied)
(formatTime cur)
(formatTime len)
where
(len, cur) = progress
ratio = if len == 0 then 0 else cur / len
total = 35 :: Int
occupied = ceiling $ fromIntegral total * ratio
make c total occupied = replicate occupied c ++ replicate (total - occupied) ' '
bar2 = UI.mkCyan $ UI.hCenter $ UI.str $ unwords [playModeBar, volumeBar]
where
playModeBar = printf "[播放模式: %s]" (show1 playMode)
volumeBar | FM.playerMuted player = "[静音]"
| otherwise = printf "[音量: %d%%]" (FM.playerVolume player)
lyrics = UI.mkRed $ UI.hCenter $ UI.str $ if null currentLyrics then " " else currentLyrics
playList = UI.viewport "vp" UI.Vertical $ UI.hCenter $ UI.vBox $ do
(song, index) <- zip (toList playSequence) [1 .. ]
let mkItem | index == focusedIndex = UI.visible . UI.mkCyan . UI.str . UI.mkFocused
| otherwise = UI.mkWhite . UI.str . UI.mkUnfocused
return $ mkItem (show index ++ ". " ++ formatSong song)
musicPlayerEvent :: State -> Event -> UI.EventM (UI.Next State)
musicPlayerEvent state@State {..} event = case event of
UserEventFetchMore -> UI.continue =<< fetchMore state
UserEventPending forceProceed -> do
pState <- liftIO $ atomically $ readTVar (FM.playerState player)
if (forceProceed || autoProceed) && (isStopped pState)
then do
let needMore = currentIndex == S.length playSequence && playMode == Stream
state@State {..} <- if needMore then fetchMore state else return state
nextIndex <- case playMode of
Stream -> return $ min (S.length playSequence) (currentIndex + 1)
LoopOne -> return currentIndex
LoopAll -> return $
if currentIndex + 1 > S.length playSequence
then 1
else currentIndex + 1
Shuffle -> liftIO $ randomRIO (1, S.length playSequence)
UI.continue =<< play state { currentIndex = nextIndex }
else UI.continue state
UserEventUpdateProgress p -> UI.continue state { progress = p }
UserEventUpdateLyrics l -> UI.continue state { currentLyrics = l }
VtyEvent (UI.EvKey UI.KEsc []) ->
if stopped
then UI.halt state
else UI.continue =<< stop state
VtyEvent (UI.EvKey (UI.KChar ' ') []) ->
musicPlayerEvent state (VtyEvent $ UI.EvKey UI.KEnter [])
VtyEvent (UI.EvKey UI.KEnter []) -> do
pState <- liftIO $ atomically $ readTVar (FM.playerState player)
UI.continue =<< case pState of
Playing _ -> if currentIndex == focusedIndex
then pause state
else do
state@State {..} <- stop state
play state { currentIndex = focusedIndex }
Paused _ -> if currentIndex == focusedIndex
then resume state
else do
state@State {..} <- stop state
play state { currentIndex = focusedIndex }
Stopped -> play state { currentIndex = focusedIndex }
VtyEvent (UI.EvKey UI.KUp []) ->
UI.continue state { focusedIndex = max 0 (focusedIndex - 1) }
VtyEvent (UI.EvKey UI.KDown []) -> do
let needMore = focusedIndex == S.length playSequence && playMode == Stream
state@State {..} <-
if needMore
then liftIO (fetchMore state)
else return state
UI.continue state { focusedIndex = min (S.length playSequence) (focusedIndex + 1) }
VtyEvent (UI.EvKey (UI.KChar '-') []) -> UI.continue =<< setVolume state (-10)
VtyEvent (UI.EvKey (UI.KChar '=') []) -> UI.continue =<< setVolume state 10
VtyEvent (UI.EvKey (UI.KChar 'm') []) -> UI.continue =<< toggleMute state
VtyEvent (UI.EvKey (UI.KChar 'n') []) -> do
state <- stop state
liftIO $ postEvent (UserEventPending True)
UI.continue state
VtyEvent (UI.EvKey (UI.KChar 'o') []) -> UI.suspendAndResume $ do
newPlayMode <- menuSelection_ [minBound .. maxBound] (Just playMode) "播放模式"
return $ case newPlayMode of
Just newPlayMode -> state { playMode = newPlayMode }
Nothing -> state
VtyEvent (UI.EvKey (UI.KChar 'c') []) -> UI.continue =<< cacheSong state
VtyEvent (UI.EvKey (UI.KChar 'C') []) -> UI.continue =<< deleteSong state
_ -> UI.continue state
musicPlayerApp :: UI.App State Event
musicPlayerApp = UI.App { UI.appDraw = musicPlayerDraw
, UI.appStartEvent = return
, UI.appHandleEvent = musicPlayerEvent
, UI.appAttrMap = const UI.defaultAttributeMap
, UI.appLiftVtyEvent = VtyEvent
, UI.appChooseCursor = UI.neverShowCursor
}
musicPlayer_ :: MusicSource -> SomeSession -> Cache -> IO ()
musicPlayer_ source session cache = void $ do
player <- FM.initPlayer
chan <- newChan
let postEvent = writeChan chan
let state = State { session = session
, player = player
, cache = cache
, source = source
, playMode = defaultPlayMode source
, playSequence = S.empty
, stopped = True
, currentIndex = 0
, focusedIndex = 0
, progress = (0, 0)
, currentLyrics = []
, postEvent = postEvent
, autoProceed = False
}
postEvent UserEventFetchMore
UI.customMain (UI.mkVty def) chan musicPlayerApp state
musicPlayer :: MusicSource -> SomeSession -> Cache -> ContT () IO ()
musicPlayer source session cache = ContT (const $ musicPlayer_ source session cache)
| foreverbell/fm-client | exe/UI/Player.hs | bsd-3-clause | 12,517 | 0 | 21 | 3,578 | 4,147 | 2,136 | 2,011 | 274 | 30 |
{-# LANGUAGE OverloadedStrings #-}
module YahooAPI where
import qualified Control.Exception as E
import Control.Lens
import Control.Monad (mzero)
import Data.ByteString.Lazy
import Data.Csv
import Data.Text
import qualified Data.Text as T
import Data.Time
import Data.Time.Calendar
import Data.Typeable
import qualified Data.Vector as V
import qualified Network.HTTP.Client as HC
import Network.Wreq
data YahooQuote = YahooQuote { yQDate :: T.Text,
yQOpen :: T.Text,
yQHigh :: T.Text,
yQLow :: T.Text,
yQClose :: T.Text,
yQVolume :: T.Text,
yQAdjClose :: T.Text } deriving (Show, Eq)
type Symbol = String
data Interval = Daily | Weekly | Monthly deriving (Eq, Ord)
instance Show Interval where
show Daily = "d"
show Weekly = "w"
show Monthly = "m"
--data NetworkException = InvalidSymbol String deriving (Typeable)
--instance E.Exception NetworkException
{-}
instance Show NetworkException where
show (InvalidSymbol s) = "EXCEPTION: Invalid URL or ticker symbol \'" ++ s ++ "\'"
-}
instance FromRecord YahooQuote where
parseRecord v
| V.length v == 7 = YahooQuote <$> v .! 0
<*> v .! 1
<*> v .! 2
<*> v .! 3
<*> v .! 4
<*> v .! 5
<*> v .! 6
| otherwise = mzero
toQuotes :: ByteString -> [YahooQuote]
toQuotes qbs =
let
vresult = (decode HasHeader qbs :: Either String (V.Vector YahooQuote))
in
case vresult of
Left e -> []
Right v -> V.toList v
getCSV :: Symbol -> Interval -> Day -> Integer -> IO (Maybe ByteString)
getCSV sym intvl endday numdays = do
r <- E.try (getWith opts baseUrl) :: IO (Either E.SomeException (Response ByteString)) -- `E.catch` handler
case r of
Left ex -> return Nothing
Right rbs -> do
return $ Just $ rbs ^. responseBody
where
handler e@(HC.StatusCodeException s _ _)
| s ^. statusCode == 404 = return Nothing :: IO (Maybe ByteString) --E.throwIO (InvalidSymbol sym)
| otherwise = E.throwIO e
baseUrl = "http://real-chart.finance.yahoo.com/table.csv"
stday = addDays (-(numdays-1)) endday
(f,d,e) = toGregorian endday
(c,a,b) = toGregorian stday
opts = defaults &
param "a" .~ [T.pack . show $ a-1] &
param "b" .~ [T.pack $ show b] &
param "c" .~ [T.pack $ show c] &
param "d" .~ [T.pack . show $ d-1] &
param "e" .~ [T.pack $ show e] &
param "f" .~ [T.pack $ show f] &
param "ignore" .~ [".csv"] &
param "s" .~ [T.pack sym] &
param "g" .~ [T.pack $ show intvl]
| tjroth/hgetquotes | src/YahooAPI.hs | bsd-3-clause | 3,060 | 0 | 28 | 1,153 | 882 | 463 | 419 | 69 | 2 |
module Main where
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.Framework.Providers.HUnit
import Test.QuickCheck
import qualified AbListTests (tests)
import qualified AbneListTests (tests)
main :: IO ()
main = defaultMain [tests]
tests =
testGroup "the"
[ AbListTests.tests
, AbneListTests.tests
]
| techtangents/ablist | test/Tests.hs | bsd-3-clause | 342 | 0 | 7 | 48 | 87 | 53 | 34 | 13 | 1 |
{-# LANGUAGE EmptyDataDecls, GADTs, StandaloneDeriving, OverloadedStrings, MultiParamTypeClasses, ScopedTypeVariables, TypeFamilies, FlexibleContexts, TemplateHaskell, GeneralizedNewtypeDeriving, DeriveFoldable, OverloadedLists #-}
-- | Implements the Authorize.NET JSON API. Types generally correspond to those defined in the XSD.
-- | XSD location: https://api.authorize.net/xml/v1/schema/AnetApiSchema.xsd
module Network.AuthorizeNet.Types (
module Network.AuthorizeNet.Types
) where
import Control.Applicative
import Control.Lens ((^.))
import Control.Monad
import Control.Monad.Except
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
-- import Data.Aeson
-- import Data.Aeson.TH
-- import Data.Aeson.Types hiding (Parser)
import Data.Int
import Data.Maybe
import Data.Monoid
import Data.Proxy
import Data.String
import GHC.Exts
import Network.Wreq hiding (Proxy)
import Text.XML.HaXml.Schema.Schema (SchemaType(..), SimpleType(..), Extension(..), Restricts(..))
import Text.XML.HaXml.Schema.Schema as Schema
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Read as T
-- | These let you control exactly what shows in the XML header information
data XmlNamespaceLevel = Namespace_none
| Namespace_xsd
| Namespace_full
deriving (Eq, Show)
class SchemaType a => XmlParsable a where
xmlParsableName :: a -> String
xmlNamespaceLevel :: a -> XmlNamespaceLevel
class XmlParsable a => ApiRequest a where
type ResponseType a
class XmlParsable a => ApiResponse a where
aNetApiResponse :: a -> ANetApiResponse
-- | There are 5 types of hosted forms: http://developer.authorize.net/api/reference/features/customer_profiles.html
data CimHostedProfileForm = CimHosted_Manage
| CimHosted_AddPayment
| CimHosted_EditPayment
| CimHosted_AddShipping
| CimHosted_EditShipping
deriving (Eq, Show)
-- | Information about the Authorize.NET API's endpoint. See 'API Endpoints' at http://developer.authorize.net/api/reference/index.html
-- | If you had a mock of their API set up somewhere for unit tests, you would use it by creating a value of this type.
-- | This and 'MerchantAuthentication' are required for every request
data ApiConfig = ApiConfig {
apiConfig_baseUrl :: String,
apiConfig_hostedProfileUrlBase :: T.Text,
apiConfig_simPostUrl :: T.Text
} deriving (Show)
newtype NumericString = NumericString { unNumericString :: Integer } deriving (Eq, Ord, Show, Num)
--newtype Decimal = Decimal T.Text deriving (Eq, Show, IsString, ToJSON, FromJSON)
newtype Decimal = Decimal T.Text deriving (Eq, Show, IsString)
-- | Creates a Decimal from a number of USD cents.
mkDecimal :: Int -> Decimal
mkDecimal priceCents =
let dollars = priceCents `div` 100
cents = priceCents `mod` 100
in Decimal $ T.pack $ show dollars ++ "." ++ show cents
-- | Some Authorize.NET services in their JSON represent a single element as a single-element list, and others use an object. This type normalizes them into a list.
data ArrayOf a = ArrayOf [a] deriving (Eq, Show, Foldable)
type CustomerAddressId = NumericString
type CustomerProfileId = NumericString
type CustomerPaymentProfileId = NumericString
type CustomerShippingAddressId = NumericString
type ShippingProfileId = NumericString
type SubscriptionId = NumericString
type TransactionId = NumericString
type TaxId = T.Text
type MerchantCustomerId = NumericString
type AuthCode = T.Text
type AvsCode = T.Text
type InvoiceNumber = Int
data ArrayOfString = ArrayOfString {
arrayOfString_string :: ArrayOf T.Text
} deriving (Eq, Show)
data ArrayOfNumericString = ArrayOfNumericString {
arrayOfNumericString_numericString :: ArrayOf NumericString
} deriving (Eq, Show)
data SubscriptionIdList = SubscriptionIdList {
subscriptionIdList_subscriptionId :: ArrayOf NumericString
} deriving (Eq, Show)
type CardCode = NumericString
-- | Holds API credentials for Authorize.NET. You should get these when you sign up for a sandbox or production account.
-- | This and 'ApiConfig' are required for every request.
data MerchantAuthentication = MerchantAuthentication {
merchantAuthentication_name :: T.Text,
merchantAuthentication_transactionKey :: T.Text
} deriving (Eq)
instance Show MerchantAuthentication where
show x = "MerchantAuthentication { merchantAuthentication_name = \"REDACTED\", merchantAuthentication_transactionKey = \"REDACTED\" }"
-- | anet:customerTypeEnum
data CustomerType = CustomerType_individual
| CustomerType_business
deriving (Eq, Show)
-- | anet:nameAndAddressType
data NameAndAddress = NameAndAddress {
nameAddress_firstName :: T.Text,
nameAddress_lastName :: T.Text,
nameAddress_company :: T.Text,
nameAddress_address :: T.Text,
nameAddress_city :: T.Text,
nameAddress_state :: T.Text,
nameAddress_zip :: T.Text,
nameAddress_country :: T.Text
} deriving (Eq, Show)
-- | anet:cardArt
data CardArt = CardArt {
cardArt_cardBrand :: Maybe T.Text,
cardArt_cardImageHeight :: Maybe T.Text,
cardArt_cardImageUrl :: Maybe T.Text,
cardArt_cardImageWidth :: Maybe T.Text,
cardArt_cardType :: Maybe T.Text
} deriving (Eq, Show)
data CreditCard = CreditCard {
-- Extension fields from anet:creditCardSimpleType
creditCard_cardNumber :: T.Text,
creditCard_expirationDate :: T.Text,
creditCard_cardCode :: Maybe NumericString,
creditCard_isPaymentToken :: Maybe Bool,
creditCard_cryptogram :: Maybe T.Text
} deriving (Eq, Show)
mkCreditCard :: T.Text -> T.Text -> Maybe CardCode -> CreditCard
mkCreditCard cardNumber expirationDate cardCode = CreditCard cardNumber expirationDate cardCode Nothing Nothing
data CreditCardMasked = CreditCardMasked {
creditCardMasked_cardNumber :: T.Text,
creditCardMasked_expirationDate :: T.Text,
creditCardMasked_cardType :: Maybe T.Text,
creditCardMasked_cardArt :: Maybe CardArt
} deriving (Eq, Show)
mkCreditCardMasked :: T.Text -> T.Text -> CreditCardMasked
mkCreditCardMasked cardNumber expirationDate = CreditCardMasked cardNumber expirationDate Nothing Nothing
data CustomerAddress = CustomerAddress {
customerAddress_firstName :: Maybe T.Text,
customerAddress_lastName :: Maybe T.Text,
customerAddress_company :: Maybe T.Text,
customerAddress_address :: Maybe T.Text,
customerAddress_city :: Maybe T.Text,
customerAddress_state :: Maybe T.Text,
customerAddress_zip :: Maybe T.Text,
customerAddress_country :: Maybe T.Text,
customerAddress_phoneNumber :: Maybe T.Text,
customerAddress_faxNumber :: Maybe T.Text,
customerAddress_email :: Maybe T.Text
} deriving (Eq, Show)
mkCustomerAddress :: CustomerAddress
mkCustomerAddress = CustomerAddress Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
-- | anet:driversLicenseType
data DriversLicense = DriversLicense {
driversLicense_number :: T.Text,
driversLicense_state :: T.Text,
driversLicense_dateOfBirth :: T.Text
} deriving (Eq, Show)
data BankAccountType = BankAccountType_checking
| BankAccountType_savings
| BankAccountType_businessChecking
deriving (Eq, Show)
-- | anet:echeckTypeEnum
data EcheckType = Echeck_PPD
| Echeck_WEB
| Echeck_CCD
| Echeck_TEL
| Echeck_ARC
| Echeck_BOC
deriving (Eq, Show)
-- | anet:transactionTypeEnum
data TransactionType = Transaction_authOnlyTransaction
| Transaction_authCaptureTransaction
| Transaction_captureOnlyTransaction
| Transaction_refundTransaction
| Transaction_priorAuthCaptureTransaction
| Transaction_voidTransaction
| Transaction_getDetailsTransaction
| Transaction_authOnlyContinueTransaction
| Transaction_authCaptureContinueTransaction
deriving (Eq, Show)
-- | anet:bankAccountType
data BankAccount = BankAccount {
bankAccount_accountType :: Maybe BankAccountType,
bankAccount_routingNumber :: T.Text,
bankAccount_accountNumber :: T.Text,
bankAccount_nameOnAccount :: T.Text,
bankAccount_echeckType :: Maybe EcheckType,
bankAccount_bankName :: Maybe T.Text,
bankAccount_checkNumber :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:bankAccountMaskedType
data BankAccountMasked = BankAccountMasked {
bankAccountMasked_accountType :: Maybe BankAccountType,
bankAccountMasked_routingNumber :: T.Text,
bankAccountMasked_accountNumber :: T.Text,
bankAccountMasked_nameOnAccount :: T.Text,
bankAccountMasked_echeckType :: Maybe EcheckType,
bankAccountMasked_bankName :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:creditCardTrackType
data CreditCardTrack = CreditCardTrack {
creditCardTrack_track1 :: Maybe T.Text,
creditCardTrack_track2 :: Maybe T.Text,
creditCardTrack_cardCode :: Maybe CardCode
} deriving (Eq, Show)
-- | anet:encryptedTrackDataType
data EncryptedTrackData = EncryptedTrackData_dummy deriving (Eq, Show)
-- | anet:payPalType
data PayPal = PayPal_dummy deriving (Eq, Show)
-- | anet:opaqueDataType
data OpaqueData = OpaqueData_dummy deriving (Eq, Show)
-- | anet:paymentEmvType
data PaymentEmv = PaymentEmv_dummy deriving (Eq, Show)
-- | anet:paymentType
data Payment = Payment_creditCard CreditCard
| Payment_bankAccount BankAccount
| Payment_trackData CreditCardTrack
| Payment_encryptedTrackData EncryptedTrackData
| Payment_payPal PayPal
| Payment_opaqueData OpaqueData
| Payment_emv PaymentEmv
deriving (Eq, Show)
-- | anet:tokenMaskedType
data TokenMasked = TokenMasked {
tokenMasked_tokenSource :: Maybe T.Text,
tokenMasked_tokenNumber :: T.Text,
tokenMasked_expirationDate :: T.Text
} deriving (Eq, Show)
-- | anet:paymentMaskedType
data PaymentMasked = PaymentMasked_creditCard CreditCardMasked
| PaymentMasked_bankAccount BankAccountMasked
| PaymentMasked_tokenInformation TokenMasked
deriving (Eq, Show)
-- | anet:paymentProfile
data PaymentProfile = PaymentProfile {
paymentProfile_paymentProfileId :: NumericString,
paymentProfile_cardCode :: Maybe CardCode
} deriving (Eq, Show)
-- | anet:customerPaymentProfileType
data CustomerPaymentProfile = CustomerPaymentProfile {
customerPaymentProfile_customerType :: Maybe CustomerType,
customerPaymentProfile_billTo :: Maybe CustomerAddress,
customerPaymentProfile_payment :: Maybe Payment,
customerPaymentProfile_driversLicense :: Maybe DriversLicense,
customerPaymentProfile_taxId :: Maybe TaxId
} deriving (Eq, Show)
mkCustomerPaymentProfile :: CustomerPaymentProfile
mkCustomerPaymentProfile = CustomerPaymentProfile Nothing Nothing Nothing Nothing Nothing
-- | anet:customerPaymentProfileTypeEx
data CustomerPaymentProfileEx = CustomerPaymentProfileEx {
customerPaymentProfileEx_customerType :: Maybe CustomerType,
customerPaymentProfileEx_billTo :: Maybe CustomerAddress,
customerPaymentProfileEx_payment :: Maybe Payment,
customerPaymentProfileEx_driversLicense :: Maybe DriversLicense,
customerPaymentProfileEx_taxId :: Maybe TaxId,
customerPaymentProfileEx_customerPaymentProfileId :: Maybe CustomerPaymentProfileId
} deriving (Eq, Show)
-- | anet:customerPaymentProfileMaskedType
data CustomerPaymentProfileMasked = CustomerPaymentProfileMasked {
customerPaymentProfileMasked_customerType :: Maybe CustomerType,
customerPaymentProfileMasked_billTo :: Maybe CustomerAddress,
customerPaymentProfileMasked_customerProfileId :: Maybe CustomerProfileId,
customerPaymentProfileMasked_customerPaymentProfileId :: CustomerPaymentProfileId,
customerPaymentProfileMasked_payment :: Maybe PaymentMasked,
customerPaymentProfileMasked_driversLicense :: Maybe DriversLicense,
customerPaymentProfileMasked_taxId :: Maybe TaxId,
customerPaymentProfileMasked_subscriptionIds :: Maybe SubscriptionIdList
} deriving (Eq, Show)
mkCustomerPaymentProfileMasked :: CustomerPaymentProfileId -> CustomerPaymentProfileMasked
mkCustomerPaymentProfileMasked customerPaymentProfileId = CustomerPaymentProfileMasked Nothing Nothing Nothing customerPaymentProfileId Nothing Nothing Nothing Nothing
-- | anet:CustomerPaymentProfileSearchTypeEnum
data CustomerPaymentProfileSearchType = SearchType_cardsExpiringInMonth deriving (Eq, Show)
data CustomerPaymentProfileOrderFieldEnum = OrderField_id deriving (Eq, Show)
-- | anet:CustomerPaymentProfileSorting
data CustomerPaymentProfileSorting = CustomerPaymentProfileSorting {
customerPaymentProfileSorting_orderBy :: CustomerPaymentProfileOrderFieldEnum,
customerPaymentProfileSorting_orderDescending :: Bool
} deriving (Eq, Show)
-- | anet:Paging
data Paging = Paging {
paging_limit :: NumericString,
paging_offset :: NumericString
} deriving (Eq, Show)
-- | anet:customerPaymentProfileListItemType
data CustomerPaymentProfileListItem = CustomerPaymentProfileListItem {
customerPaymentProfileListItem_customerPaymentProfileId :: CustomerPaymentProfileId,
customerPaymentProfileListItem_customerProfileId :: CustomerProfileId,
customerPaymentProfileListItem_billTo :: CustomerAddress,
customerPaymentProfileListItem_payment :: PaymentMasked
} deriving (Eq, Show)
-- | anet:arrayOfCustomerPaymentProfileListItemType
data ArrayOfCustomerPaymentProfileListItem = ArrayOfCustomerPaymentProfileListItem {
arrayOfCustomerPaymentProfileListIitem_paymentProfile :: ArrayOf CustomerPaymentProfileListItem
} deriving (Eq, Show)
-- | anet:customerProfileBaseType
data CustomerProfileBase = CustomerProfileBase {
customerProfileBase_merchantCustomerId :: Maybe T.Text,
customerProfileBase_description :: Maybe T.Text,
customerProfileBase_email :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:customerProfileType
-- | Contains a 'Maybe' 'PaymentProfile' and 'Maybe' 'CustomerAddress' instead of an unbounded list of these due to JSON not supporting duplicate keys.
data CustomerProfile = CustomerProfile {
customerProfile_merchantCustomerId :: T.Text,
customerProfile_description :: T.Text,
customerProfile_email :: T.Text,
customerProfile_paymentProfiles :: Maybe CustomerPaymentProfile,
customerProfile_shipTos :: Maybe CustomerAddress
} deriving (Eq, Show)
-- | anet:customerProfileExType
data CustomerProfileEx = CustomerProfileEx {
customerProfileEx_merchantCustomerId :: T.Text,
customerProfileEx_description :: T.Text,
customerProfileEx_email :: T.Text,
customerProfileEx_customerProfileId :: Maybe CustomerProfileId
} deriving (Eq, Show)
-- | anet:customerAddressExType
data CustomerAddressEx = CustomerAddressEx {
customerAddressEx_firstName :: Maybe T.Text,
customerAddressEx_lastName :: Maybe T.Text,
customerAddressEx_company :: Maybe T.Text,
customerAddressEx_address :: Maybe T.Text,
customerAddressEx_city :: Maybe T.Text,
customerAddressEx_state :: Maybe T.Text,
customerAddressEx_zip :: Maybe T.Text,
customerAddressEx_country :: Maybe T.Text,
customerAddressEx_phoneNumber :: Maybe T.Text,
customerAddressEx_faxNumber :: Maybe T.Text,
customerAddressEx_email :: Maybe T.Text,
customerAddressEx_customerAddressId :: Maybe CustomerAddressId
} deriving (Eq, Show)
-- | anet:customerProfileMaskedType
data CustomerProfileMasked = CustomerProfileMasked {
customerProfileMasked_merchantCustomerId :: Maybe T.Text,
customerProfileMasked_description :: Maybe T.Text,
customerProfileMasked_email :: Maybe T.Text,
customerProfileMasked_customerProfileId :: Maybe NumericString,
customerProfileMasked_paymentProfiles :: ArrayOf CustomerPaymentProfileMasked,
customerProfileMasked_shipToList :: ArrayOf CustomerAddressEx
} deriving (Eq, Show)
data ValidationMode = Validation_none
| Validation_testMode
| Validation_liveMode
-- | Per Authorize.NET: "NOT RECOMMENDED. Use of this option can result in fines from your processor."
| Validation_oldLiveMode
deriving (Eq, Show)
-- | anet:customerProfilePaymentType
data CustomerProfilePayment = CustomerProfilePayment {
customerProfilePayment_createProfile :: Maybe Bool,
customerProfilePayment_customerProfileId :: Maybe CustomerProfileId,
customerProfilePayment_paymentProfile :: Maybe PaymentProfile,
customerProfilePayment_shippingProfileId :: Maybe ShippingProfileId
} deriving (Eq, Show)
mkCustomerProfilePayment :: CustomerProfilePayment
mkCustomerProfilePayment = CustomerProfilePayment Nothing Nothing Nothing Nothing
-- | anet:solutionType
data Solution = Solution {
solution_id :: T.Text,
solution_name :: Maybe T.Text,
solution_vendorName :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:orderType
data Order = Order {
order_invoiceNumber :: Maybe T.Text,
order_description :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:lineItemType
data LineItem = LineItem {
lineItem_itemId :: T.Text,
lineItem_name :: T.Text,
lineItem_description :: Maybe T.Text,
lineItem_quantity :: Decimal,
lineItem_unitPrice :: Decimal,
lineItem_taxable :: Maybe Bool
} deriving (Eq, Show)
-- | anet:ArrayOfLineItem
data LineItems = LineItems {
lineItems_lineItem :: ArrayOf LineItem
} deriving (Eq, Show)
-- | anet:extendedAmountType
data ExtendedAmount = ExtendedAmount {
extendedAmount_amount :: Decimal,
extendedAmount_name :: Maybe T.Text,
extendedAmount_description :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:customerDataType
data CustomerData = CustomerData {
customerData_type :: Maybe CustomerType,
customerData_id :: Maybe T.Text,
customerData_email :: Maybe T.Text,
customerData_driverseLicense :: Maybe DriversLicense,
customerData_taxId :: Maybe TaxId
} deriving (Eq, Show)
mkCustomerData :: CustomerData
mkCustomerData = CustomerData Nothing Nothing Nothing Nothing Nothing
-- | anet:ccAuthenticationType
data CcAuthentication = CcAuthentication {
ccAuthentication_authenticationIndicator :: T.Text,
ccAuthentication_cardholderAuthenticationValue :: T.Text
} deriving (Eq, Show)
-- | anet:transRetailInfoType
data TransRetailInfo = TransRetailInfo {
transRetailInfo_marketType :: Maybe T.Text,
transRetailInfo_deviceType :: Maybe T.Text,
transRetailInfo_customerSignature :: Maybe T.Text
} deriving (Eq, Show)
data SettingName = SettingName_emailCustomer
-- | Sends an email to the merchant email address on your Authorize.NET after every purchase
| SettingName_merchantEmail
| SettingName_allowPartialAuth
| SettingName_headerEmailReceipt
| SettingName_footerEmailReceipt
| SettingName_recurringBilling
| SettingName_duplicateWindow
| SettingName_testRequest
| SettingName_hostedProfileReturnUrlText
| SettingName_hostedProfileReturnUrl
| SettingName_hostedProfilePageBorderVisible
| SettingName_hostedProfileIFrameCommunicatorUrl
| SettingName_hostedProfileHeadingBgColor
| SettingName_hostedProfileValidationMode
| SettingName_hostedProfileBillingAddressRequired
| SettingName_hostedProfileCardCodeRequired
deriving (Eq, Show)
-- | anet:settingType
data Setting = Setting {
setting_settingName :: SettingName,
setting_settingValue :: T.Text
} deriving (Eq, Show)
-- anet:ArrayOfSetting
data ArrayOfSetting = ArrayOfSetting {
arrayOfSetting_setting :: ArrayOf Setting
} deriving (Eq, Show)
-- | anet:userField
data UserField = UserField {
userField_name :: T.Text,
userField_value :: T.Text
} deriving (Eq, Show)
data ArrayOfUserField = ArrayOfUserField {
arrayOfUserField_userField :: ArrayOf UserField
} deriving (Eq, Show)
data SecureAcceptance = SecureAcceptance {
secureAcceptance_SecureAcceptanceUrl :: T.Text,
secureAcceptance_PayerID :: T.Text
} deriving (Eq, Show)
data EmvResponse = EmvResponse {
emvResponse_tsvData :: Maybe T.Text,
emvResponse_tag :: Maybe T.Text
} deriving (Eq, Show)
-- | anet:transactionRequestType
data TransactionRequest = TransactionRequest {
transactionRequest_transactionType :: TransactionType,
-- | Total amount(including taxes, duty, shipping, etc.) to charge the card. A decimal number like "8.45".
transactionRequest_amount :: Decimal,
-- | Currency code. A common one is "USD".
transactionRequest_currencyCode :: Maybe T.Text,
transactionRequest_payment :: Maybe Payment,
transactionRequest_profile :: Maybe CustomerProfilePayment,
transactionRequest_solution :: Maybe Solution,
transactionRequest_callId :: Maybe T.Text,
-- | An identification number assigned to each POS (Point of Sale) device by a merchant's processor. This number allows the processor to identify the source of a transaction.
transactionRequest_terminalNumber :: Maybe T.Text,
-- | Authorization code. This may have been obtained from a verbal authorization or through another channel.
transactionRequest_authCode :: Maybe T.Text,
-- | Transaction ID of the original partial authorization transaction.
-- | Required only for refundTransaction, priorAuthCaptureTransaction, and voidTransaction. Do not include this field if you are providing splitTenderId
transactionRequest_refTransId :: Maybe T.Text,
transactionRequest_splitTenderId :: Maybe T.Text,
transactionRequest_order :: Maybe Order,
transactionRequest_lineItems :: Maybe LineItems,
transactionRequest_tax :: Maybe ExtendedAmount,
transactionRequest_duty :: Maybe ExtendedAmount,
transactionRequest_shipping :: Maybe ExtendedAmount,
transactionRequest_taxExempt :: Maybe Bool,
transactionRequest_poNumber :: Maybe T.Text,
transactionRequest_customer :: Maybe CustomerData,
transactionRequest_billTo :: Maybe CustomerAddress,
transactionRequest_shipTo :: Maybe CustomerAddress,
transactionRequest_customerIP :: Maybe T.Text,
transactionRequest_cardholderAuthentication :: Maybe CcAuthentication,
transactionRequest_retail :: Maybe TransRetailInfo,
transactionRequest_employeeId :: Maybe T.Text,
transactionRequest_transactionSettings :: Maybe ArrayOfSetting,
transactionRequest_userFields :: Maybe UserField
} deriving (Eq, Show)
-- | The TransactionRequest type has a lot of Maybe fields, so use this to get a bare-bones default.
mkTransactionRequest :: TransactionType -> Decimal -> TransactionRequest
mkTransactionRequest transactionType amount = TransactionRequest transactionType amount Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
-- | anet:messageTypeEnum
data MessageType = Message_Ok
| Message_Error
deriving (Eq, Show)
-- | The possible message codes are documented at http://developer.authorize.net/api/reference/dist/json/responseCodes.json
data Message = Message {
message_code :: T.Text,
message_text :: T.Text
} deriving (Eq, Show)
-- | anet:messagesType
data Messages = Messages {
messages_resultCode :: MessageType,
messages_message :: ArrayOf Message
} deriving (Eq, Show)
-- | anet:transactionResponse has an element called 'prePaidCard' with an anonymous type
data PrePaidCard = PrePaidCard {
prePaidCard_requestedAmount :: Maybe T.Text,
prePaidCard_approvedAmount :: Maybe T.Text,
prePaidCard_balanceOnCard :: Maybe T.Text
} deriving (Eq, Show)
data TransactionResponse_message = TransactionResponse_message {
transactionResponseMessage_code :: Maybe T.Text,
transactionResponseMessage_description :: Maybe T.Text
} deriving (Eq, Show)
data ArrayOfTransactionResponseMessage = ArrayOfTransactionResponseMessage {
arrayOfTransactionResponseMessage_message :: ArrayOf TransactionResponse_message
} deriving (Eq, Show)
data TransactionResponse_error = TransactionResponse_error {
transactionResponseError_errorCode :: Maybe T.Text,
transactionResponseError_errorText :: Maybe T.Text
} deriving (Eq, Show)
data ArrayOfTransactionResponseError = ArrayOfTransactionResponseError {
arrayOfTransactionResponseMessage_error :: ArrayOf TransactionResponse_error
} deriving (Eq, Show)
data TransactionResponse_splitTenderPayment = TransactionResponse_splitTenderPayment {
transactionResponseSplitTenderPayment_transId :: Maybe T.Text,
transactionResponseSplitTenderPayment_responseCode :: Maybe T.Text,
transactionResponseSplitTenderPayment_responseToCustomer :: Maybe T.Text,
transactionResponseSplitTenderPayment_authCode :: Maybe T.Text,
transactionResponseSplitTenderPayment_accountNumber :: Maybe T.Text,
transactionResponseSplitTenderPayment_accountType :: Maybe T.Text,
transactionResponseSplitTenderPayment_requestedAmount :: Maybe T.Text,
transactionResponseSplitTenderPayment_approvedAmount :: Maybe T.Text,
transactionResponseSplitTenderPayment_balanceOnCard :: Maybe T.Text
} deriving (Eq, Show)
data ArrayOfTransactionResponseSplitTenderPayment = ArrayOfTransactionResponseSplitTenderPayment {
arrayOfTransactionResponseSplitTenderPayment_splitTenderPayment :: ArrayOf TransactionResponse_splitTenderPayment
} deriving (Eq, Show)
-- | anet:ANetApiResponse
data ANetApiResponse = ANetApiResponse {
aNetApiResponse_refId :: Maybe T.Text,
aNetApiResponse_messages :: Messages,
aNetApiResponse_sessionToken :: Maybe T.Text
} deriving (Eq, Show)
| MichaelBurge/haskell-authorize-net | src/Network/AuthorizeNet/Types.hs | bsd-3-clause | 27,139 | 0 | 11 | 5,555 | 4,414 | 2,534 | 1,880 | 467 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Revisions
( Revisable
, revisionsFor
, saveRevision
, logDeletion
) where
import Base
import Models (runDB)
import Pager (pageJSON)
import Data.Aeson
import Database.Persist
import Database.Persist.Sql
class (PersistEntity a, ToBackendKey SqlBackend a) => Revisable a where
revisionClassName :: Key a -> Text
instance Revisable Property where
revisionClassName _ = "Property"
instance Revisable Space where
revisionClassName _ = "Space"
instance Revisable Theorem where
revisionClassName _ = "Theorem"
instance Revisable Trait where
revisionClassName _ = "Trait"
instance ToJSON (Page Revision) where
toJSON = pageJSON "revisions" $
\(Entity _id Revision{..}) -> object
[ "id" .= _id
, "user_id" .= revisionUserId
, "body" .= revisionBody
, "created_at" .= revisionCreatedAt
]
instance ToJSON Revision where
toJSON = error "ToJSON Revision"
revisionsFor :: Revisable a => Key a -> [Filter Revision]
revisionsFor _id = [RevisionItemId ==. fromSqlKey _id, RevisionItemClass ==. revisionClassName _id]
saveRevision :: Revisable a => Entity User -> Entity a -> Action ()
saveRevision (Entity userId _) obj@(Entity _id _) =
void . runDB . insert $ Revision
{ revisionItemId = fromSqlKey _id
, revisionItemClass = revisionClassName _id
-- FIXME: the ToJSON instances for these objects have changed,
-- but the revision bodies need to be backwards compatible
, revisionBody = ""
, revisionUserId = userId
, revisionCreatedAt = Nothing
, revisionDeletes = False
}
logDeletion :: Revisable a => Entity User -> Entity a -> Action ()
logDeletion = error "logDeletion"
| jamesdabbs/pi-base-2 | src/Revisions.hs | bsd-3-clause | 1,934 | 0 | 11 | 409 | 463 | 247 | 216 | 49 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Reflex.Dom.Internal.Foreign where
import Control.Concurrent
import Control.Exception (bracket)
import Control.Lens hiding (set)
import Control.Monad
import Control.Monad.State.Strict hiding (forM, forM_, get, mapM, mapM_, sequence, sequence_)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Foreign.Marshal hiding (void)
import Foreign.Ptr
import GHCJS.DOM hiding (runWebGUI)
import GHCJS.DOM.Navigator
import GHCJS.DOM.Window
import Graphics.UI.Gtk hiding (Widget)
import Graphics.UI.Gtk.WebKit.JavaScriptCore.JSBase
import Graphics.UI.Gtk.WebKit.JavaScriptCore.JSObjectRef
import Graphics.UI.Gtk.WebKit.JavaScriptCore.JSStringRef
import Graphics.UI.Gtk.WebKit.JavaScriptCore.JSValueRef
import Graphics.UI.Gtk.WebKit.JavaScriptCore.WebFrame
import Graphics.UI.Gtk.WebKit.Types hiding (Event, Text, Widget)
import Graphics.UI.Gtk.WebKit.WebFrame
import Graphics.UI.Gtk.WebKit.WebInspector
import Graphics.UI.Gtk.WebKit.WebSettings
import Graphics.UI.Gtk.WebKit.WebView
import System.Directory
import System.Glib.FFI hiding (void)
#ifndef mingw32_HOST_OS
import System.Posix.Signals
#endif
quitWebView :: WebView -> IO ()
quitWebView wv = postGUIAsync $ do w <- widgetGetToplevel wv --TODO: Shouldn't this be postGUISync?
widgetDestroy w
installQuitHandler :: WebView -> IO ()
#ifdef mingw32_HOST_OS
installQuitHandler wv = return () -- TODO: Maybe figure something out here for Windows users.
#else
installQuitHandler wv = void $ installHandler keyboardSignal (Catch (quitWebView wv)) Nothing
#endif
makeDefaultWebView :: Text -> (WebView -> IO ()) -> IO ()
makeDefaultWebView userAgentKey main = do
_ <- initGUI
window <- windowNew
_ <- timeoutAddFull (yield >> return True) priorityHigh 10
windowSetDefaultSize window 900 600
windowSetPosition window WinPosCenter
scrollWin <- scrolledWindowNew Nothing Nothing
webView <- webViewNew
settings <- webViewGetWebSettings webView
userAgent <- settings `get` webSettingsUserAgent
settings `set` [ webSettingsUserAgent := userAgent <> " " <> userAgentKey
, webSettingsEnableUniversalAccessFromFileUris := True
, webSettingsEnableDeveloperExtras := True
]
webViewSetWebSettings webView settings
window `containerAdd` scrollWin
scrollWin `containerAdd` webView
_ <- on window objectDestroy . liftIO $ mainQuit
widgetShowAll window
_ <- webView `on` loadFinished $ \_ -> do
main webView --TODO: Should probably only do this once
inspector <- webViewGetInspector webView
_ <- inspector `on` inspectWebView $ \_ -> do
inspectorWindow <- windowNew
windowSetDefaultSize inspectorWindow 900 600
inspectorScrollWin <- scrolledWindowNew Nothing Nothing
inspectorWebView <- webViewNew
inspectorWindow `containerAdd` inspectorScrollWin
inspectorScrollWin `containerAdd` inspectorWebView
widgetShowAll inspectorWindow
return inspectorWebView
wf <- webViewGetMainFrame webView
pwd <- getCurrentDirectory
webFrameLoadString wf "" Nothing $ "file://" <> pwd <> "/"
installQuitHandler webView
mainGUI
runWebGUI :: (WebView -> IO ()) -> IO ()
runWebGUI = runWebGUI' "GHCJS"
runWebGUI' :: Text -> (WebView -> IO ()) -> IO ()
runWebGUI' userAgentKey main = do
-- Are we in a java script inside some kind of browser
mbWindow <- currentWindow
case mbWindow of
Just window -> do
-- Check if we are running in javascript inside the the native version
Just n <- getNavigator window
agent <- getUserAgent n
unless ((" " <> userAgentKey) `T.isSuffixOf` agent) $ main (castToWebView window)
Nothing -> do
makeDefaultWebView userAgentKey main
foreign import ccall "wrapper"
wrapper :: JSObjectCallAsFunctionCallback' -> IO JSObjectCallAsFunctionCallback
toJSObject :: JSContextRef -> [Ptr OpaqueJSValue] -> IO JSObjectRef
toJSObject ctx args = do
o <- jsobjectmake ctx nullPtr nullPtr
iforM_ args $ \n a -> do
prop <- jsstringcreatewithutf8cstring $ show n
jsobjectsetproperty ctx o prop a 1 nullPtr
return o
fromJSStringMaybe :: JSContextRef -> JSValueRef -> IO (Maybe Text)
fromJSStringMaybe c t = do
isNull <- jsvalueisnull c t
if isNull then return Nothing else do
j <- jsvaluetostringcopy c t nullPtr
l <- jsstringgetmaximumutf8cstringsize j
s <- allocaBytes (fromIntegral l) $ \ps -> do
_ <- jsstringgetutf8cstring'_ j ps (fromIntegral l)
peekCString ps
return $ Just $ T.pack s
getLocationHost :: WebView -> IO Text
getLocationHost wv = withWebViewContext wv $ \c -> do
script <- jsstringcreatewithutf8cstring "location.host"
lh <- jsevaluatescript c script nullPtr nullPtr 1 nullPtr
lh' <- fromJSStringMaybe c lh
return $ fromMaybe "" lh'
getLocationProtocol :: WebView -> IO Text
getLocationProtocol wv = withWebViewContext wv $ \c -> do
script <- jsstringcreatewithutf8cstring "location.protocol"
lp <- jsevaluatescript c script nullPtr nullPtr 1 nullPtr
lp' <- fromJSStringMaybe c lp
return $ fromMaybe "" lp'
bsToArrayBuffer :: JSContextRef -> ByteString -> IO JSValueRef
bsToArrayBuffer c bs = do
elems <- forM (BS.unpack bs) $ \x -> jsvaluemakenumber c $ fromIntegral x
let numElems = length elems
bracket (mallocArray numElems) free $ \elemsArr -> do
pokeArray elemsArr elems
a <- jsobjectmakearray c (fromIntegral numElems) elemsArr nullPtr
newUint8Array <- jsstringcreatewithutf8cstring "new Uint8Array(this)"
jsevaluatescript c newUint8Array a nullPtr 1 nullPtr
bsFromArrayBuffer :: JSContextRef -> JSValueRef -> IO ByteString
bsFromArrayBuffer c a = do
let getIntegral = fmap round . (\x -> jsvaluetonumber c x nullPtr)
getByteLength <- jsstringcreatewithutf8cstring "this.byteLength"
byteLength <- getIntegral =<< jsevaluatescript c getByteLength a nullPtr 1 nullPtr
toUint8Array <- jsstringcreatewithutf8cstring "new Uint8Array(this)"
uint8Array <- jsevaluatescript c toUint8Array a nullPtr 1 nullPtr
getIx <- jsstringcreatewithutf8cstring "this[0][this[1]]"
let arrayLookup i = do
i' <- jsvaluemakenumber c (fromIntegral i)
args <- toJSObject c [uint8Array, i']
getIntegral =<< jsevaluatescript c getIx args nullPtr 1 nullPtr
BS.pack <$> forM [0..byteLength-1] arrayLookup
withWebViewContext :: WebView -> (JSContextRef -> IO a) -> IO a
withWebViewContext wv f = f =<< webFrameGetGlobalContext =<< webViewGetMainFrame wv
| manyoo/reflex-dom | src-ghc/Reflex/Dom/Internal/Foreign.hs | bsd-3-clause | 6,719 | 0 | 18 | 1,160 | 1,806 | 908 | 898 | 144 | 2 |
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
{-# OPTIONS_NHC98 -cpp #-}
{-# OPTIONS_JHC -fcpp -fffi #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Utils
-- Copyright : Isaac Jones, Simon Marlow 2003-2004
-- portions Copyright (c) 2007, Galois Inc.
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- A large and somewhat miscellaneous collection of utility functions used
-- throughout the rest of the Cabal lib and in other tools that use the Cabal
-- lib like @cabal-install@. It has a very simple set of logging actions. It
-- has low level functions for running programs, a bunch of wrappers for
-- various directory and file functions that do extra logging.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Utils (
cabalVersion,
-- * logging and errors
die,
dieWithLocation,
topHandler,
warn, notice, setupMessage, info, debug,
chattyTry,
-- * running programs
rawSystemExit,
rawSystemExitCode,
rawSystemExitWithEnv,
rawSystemStdout,
rawSystemStdInOut,
maybeExit,
xargs,
findProgramLocation,
findProgramVersion,
-- * copying files
smartCopySources,
createDirectoryIfMissingVerbose,
copyFileVerbose,
copyDirectoryRecursiveVerbose,
copyFiles,
-- * installing files
installOrdinaryFile,
installExecutableFile,
installOrdinaryFiles,
installDirectoryContents,
-- * File permissions
setFileOrdinary,
setFileExecutable,
-- * file names
currentDir,
-- * finding files
findFile,
findFirstFile,
findFileWithExtension,
findFileWithExtension',
findModuleFile,
findModuleFiles,
getDirectoryContentsRecursive,
-- * simple file globbing
matchFileGlob,
matchDirFileGlob,
parseFileGlob,
FileGlob(..),
-- * temp files and dirs
withTempFile,
withTempDirectory,
-- * .cabal and .buildinfo files
defaultPackageDesc,
findPackageDesc,
defaultHookedPackageDesc,
findHookedPackageDesc,
-- * reading and writing files safely
withFileContents,
writeFileAtomic,
rewriteFile,
-- * Unicode
fromUTF8,
toUTF8,
readUTF8File,
withUTF8FileContents,
writeUTF8File,
normaliseLineEndings,
-- * generic utils
equating,
comparing,
isInfixOf,
intercalate,
lowercase,
wrapText,
wrapLine,
) where
import Control.Monad
( when, unless, filterM )
#ifdef __GLASGOW_HASKELL__
import Control.Concurrent.MVar
( newEmptyMVar, putMVar, takeMVar )
#endif
import Data.List
( nub, unfoldr, isPrefixOf, tails, intersperse )
import Data.Char as Char
( toLower, chr, ord )
import Data.Bits
( Bits((.|.), (.&.), shiftL, shiftR) )
import System.Directory
( getDirectoryContents, doesDirectoryExist, doesFileExist, removeFile
, findExecutable )
import System.Environment
( getProgName )
import System.Cmd
( rawSystem )
import System.Exit
( exitWith, ExitCode(..) )
import System.FilePath
( normalise, (</>), (<.>), takeDirectory, splitFileName
, splitExtension, splitExtensions, splitDirectories )
import System.Directory
( createDirectory, renameFile, removeDirectoryRecursive )
import System.IO
( Handle, openFile, openBinaryFile, IOMode(ReadMode), hSetBinaryMode
, hGetContents, stderr, stdout, hPutStr, hFlush, hClose )
import System.IO.Error as IO.Error
( isDoesNotExistError, isAlreadyExistsError
, ioeSetFileName, ioeGetFileName, ioeGetErrorString )
#if !(defined(__HUGS__) || (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 608))
import System.IO.Error
( ioeSetLocation, ioeGetLocation )
#endif
import System.IO.Unsafe
( unsafeInterleaveIO )
import qualified Control.Exception as Exception
import Distribution.Text
( display, simpleParse )
import Distribution.Package
( PackageIdentifier )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.Version
(Version(..))
import Control.Exception (evaluate)
import System.Process (runProcess)
#ifdef __GLASGOW_HASKELL__
import Control.Concurrent (forkIO)
import System.Process (runInteractiveProcess, waitForProcess)
#else
import System.Cmd (system)
import System.Directory (getTemporaryDirectory)
#endif
import Distribution.Compat.CopyFile
( copyFile, copyOrdinaryFile, copyExecutableFile
, setFileOrdinary, setFileExecutable, setDirOrdinary )
import Distribution.Compat.TempFile
( openTempFile, openNewBinaryFile, createTempDirectory )
import Distribution.Compat.Exception
( IOException, throwIOIO, tryIO, catchIO, catchExit, onException )
import Distribution.Verbosity
#ifdef VERSION_base
import qualified Paths_Cabal (version)
#endif
-- We only get our own version number when we're building with ourselves
cabalVersion :: Version
#if defined(VERSION_base)
cabalVersion = Paths_Cabal.version
#elif defined(CABAL_VERSION)
cabalVersion = Version [CABAL_VERSION] []
#else
cabalVersion = Version [1,9999] [] --used when bootstrapping
#endif
-- ----------------------------------------------------------------------------
-- Exception and logging utils
dieWithLocation :: FilePath -> Maybe Int -> String -> IO a
dieWithLocation filename lineno msg =
ioError . setLocation lineno
. flip ioeSetFileName (normalise filename)
$ userError msg
where
#if defined(__HUGS__) || (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 608)
setLocation _ err = err
#else
setLocation Nothing err = err
setLocation (Just n) err = ioeSetLocation err (show n)
#endif
die :: String -> IO a
die msg = ioError (userError msg)
topHandler :: IO a -> IO a
topHandler prog = catchIO prog handle
where
handle ioe = do
hFlush stdout
pname <- getProgName
hPutStr stderr (mesage pname)
exitWith (ExitFailure 1)
where
mesage pname = wrapText (pname ++ ": " ++ file ++ detail)
file = case ioeGetFileName ioe of
Nothing -> ""
Just path -> path ++ location ++ ": "
#if defined(__HUGS__) || (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 608)
location = ""
#else
location = case ioeGetLocation ioe of
l@(n:_) | n >= '0' && n <= '9' -> ':' : l
_ -> ""
#endif
detail = ioeGetErrorString ioe
-- | Non fatal conditions that may be indicative of an error or problem.
--
-- We display these at the 'normal' verbosity level.
--
warn :: Verbosity -> String -> IO ()
warn verbosity msg =
when (verbosity >= normal) $ do
hFlush stdout
hPutStr stderr (wrapText ("Warning: " ++ msg))
-- | Useful status messages.
--
-- We display these at the 'normal' verbosity level.
--
-- This is for the ordinary helpful status messages that users see. Just
-- enough information to know that things are working but not floods of detail.
--
notice :: Verbosity -> String -> IO ()
notice verbosity msg =
when (verbosity >= normal) $
putStr (wrapText msg)
setupMessage :: Verbosity -> String -> PackageIdentifier -> IO ()
setupMessage verbosity msg pkgid =
notice verbosity (msg ++ ' ': display pkgid ++ "...")
-- | More detail on the operation of some action.
--
-- We display these messages when the verbosity level is 'verbose'
--
info :: Verbosity -> String -> IO ()
info verbosity msg =
when (verbosity >= verbose) $
putStr (wrapText msg)
-- | Detailed internal debugging information
--
-- We display these messages when the verbosity level is 'deafening'
--
debug :: Verbosity -> String -> IO ()
debug verbosity msg =
when (verbosity >= deafening) $ do
putStr (wrapText msg)
hFlush stdout
-- | Perform an IO action, catching any IO exceptions and printing an error
-- if one occurs.
chattyTry :: String -- ^ a description of the action we were attempting
-> IO () -- ^ the action itself
-> IO ()
chattyTry desc action =
catchIO action $ \exception ->
putStrLn $ "Error while " ++ desc ++ ": " ++ show exception
-- -----------------------------------------------------------------------------
-- Helper functions
-- | Wraps text to the default line width. Existing newlines are preserved.
wrapText :: String -> String
wrapText = unlines
. concatMap (map unwords
. wrapLine 79
. words)
. lines
-- | Wraps a list of words to a list of lines of words of a particular width.
wrapLine :: Int -> [String] -> [[String]]
wrapLine width = wrap 0 []
where wrap :: Int -> [String] -> [String] -> [[String]]
wrap 0 [] (w:ws)
| length w + 1 > width
= wrap (length w) [w] ws
wrap col line (w:ws)
| col + length w + 1 > width
= reverse line : wrap 0 [] (w:ws)
wrap col line (w:ws)
= let col' = col + length w + 1
in wrap col' (w:line) ws
wrap _ [] [] = []
wrap _ line [] = [reverse line]
-- -----------------------------------------------------------------------------
-- rawSystem variants
maybeExit :: IO ExitCode -> IO ()
maybeExit cmd = do
res <- cmd
unless (res == ExitSuccess) $ exitWith res
printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO ()
printRawCommandAndArgs verbosity path args
| verbosity >= deafening = print (path, args)
| verbosity >= verbose = putStrLn $ unwords (path : args)
| otherwise = return ()
printRawCommandAndArgsAndEnv :: Verbosity
-> FilePath
-> [String]
-> [(String, String)]
-> IO ()
printRawCommandAndArgsAndEnv verbosity path args env
| verbosity >= deafening = do putStrLn ("Environment: " ++ show env)
print (path, args)
| verbosity >= verbose = putStrLn $ unwords (path : args)
| otherwise = return ()
-- Exit with the same exitcode if the subcommand fails
rawSystemExit :: Verbosity -> FilePath -> [String] -> IO ()
rawSystemExit verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode
rawSystemExitCode verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
return exitcode
rawSystemExitWithEnv :: Verbosity
-> FilePath
-> [String]
-> [(String, String)]
-> IO ()
rawSystemExitWithEnv verbosity path args env = do
printRawCommandAndArgsAndEnv verbosity path args env
hFlush stdout
ph <- runProcess path args Nothing (Just env) Nothing Nothing Nothing
exitcode <- waitForProcess ph
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
-- | Run a command and return its output.
--
-- The output is assumed to be text in the locale encoding.
--
rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String
rawSystemStdout verbosity path args = do
(output, errors, exitCode) <- rawSystemStdInOut verbosity path args
Nothing False
when (exitCode /= ExitSuccess) $
die errors
return output
-- | Run a command and return its output, errors and exit status. Optionally
-- also supply some input. Also provides control over whether the binary/text
-- mode of the input and output.
--
rawSystemStdInOut :: Verbosity
-> FilePath -> [String]
-> Maybe (String, Bool) -- ^ input text and binary mode
-> Bool -- ^ output in binary mode
-> IO (String, String, ExitCode) -- ^ output, errors, exit
rawSystemStdInOut verbosity path args input outputBinary = do
printRawCommandAndArgs verbosity path args
#ifdef __GLASGOW_HASKELL__
Exception.bracket
(runInteractiveProcess path args Nothing Nothing)
(\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh)
$ \(inh,outh,errh,pid) -> do
-- output mode depends on what the caller wants
hSetBinaryMode outh outputBinary
-- but the errors are always assumed to be text (in the current locale)
hSetBinaryMode errh False
-- fork off a couple threads to pull on the stderr and stdout
-- so if the process writes to stderr we do not block.
err <- hGetContents errh
out <- hGetContents outh
mv <- newEmptyMVar
let force str = (evaluate (length str) >> return ())
`Exception.finally` putMVar mv ()
--TODO: handle exceptions like text decoding.
_ <- forkIO $ force out
_ <- forkIO $ force err
-- push all the input, if any
case input of
Nothing -> return ()
Just (inputStr, inputBinary) -> do
-- input mode depends on what the caller wants
hSetBinaryMode inh inputBinary
hPutStr inh inputStr
hClose inh
--TODO: this probably fails if the process refuses to consume
-- or if it closes stdin (eg if it exits)
-- wait for both to finish, in either order
takeMVar mv
takeMVar mv
-- wait for the program to terminate
exitcode <- waitForProcess pid
unless (exitcode == ExitSuccess) $
debug verbosity $ path ++ " returned " ++ show exitcode
++ if null err then "" else
" with error message:\n" ++ err
return (out, err, exitcode)
#else
tmpDir <- getTemporaryDirectory
withTempFile tmpDir ".cmd.stdout" $ \outName outHandle ->
withTempFile tmpDir ".cmd.stdin" $ \inName inHandle -> do
hClose outHandle
case input of
Nothing -> return ()
Just (inputStr, inputBinary) -> do
hSetBinaryMode inHandle inputBinary
hPutStr inHandle inputStr
hClose inHandle
let quote name = "'" ++ name ++ "'"
cmd = unwords (map quote (path:args))
++ " <" ++ quote inName
++ " >" ++ quote outName
exitcode <- system cmd
unless (exitcode == ExitSuccess) $
debug verbosity $ path ++ " returned " ++ show exitcode
Exception.bracket (openFile outName ReadMode) hClose $ \hnd -> do
hSetBinaryMode hnd outputBinary
output <- hGetContents hnd
length output `seq` return (output, "", exitcode)
#endif
-- | Look for a program on the path.
findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath)
findProgramLocation verbosity prog = do
debug verbosity $ "searching for " ++ prog ++ " in path."
res <- findExecutable prog
case res of
Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path")
Just path -> debug verbosity ("found " ++ prog ++ " at "++ path)
return res
-- | Look for a program and try to find it's version number. It can accept
-- either an absolute path or the name of a program binary, in which case we
-- will look for the program on the path.
--
findProgramVersion :: String -- ^ version args
-> (String -> String) -- ^ function to select version
-- number from program output
-> Verbosity
-> FilePath -- ^ location
-> IO (Maybe Version)
findProgramVersion versionArg selectVersion verbosity path = do
str <- rawSystemStdout verbosity path [versionArg]
`catchIO` (\_ -> return "")
`catchExit` (\_ -> return "")
let version :: Maybe Version
version = simpleParse (selectVersion str)
case version of
Nothing -> warn verbosity $ "cannot determine version of " ++ path
++ " :\n" ++ show str
Just v -> debug verbosity $ path ++ " is version " ++ display v
return version
-- | Like the unix xargs program. Useful for when we've got very long command
-- lines that might overflow an OS limit on command line length and so you
-- need to invoke a command multiple times to get all the args in.
--
-- Use it with either of the rawSystem variants above. For example:
--
-- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs
--
xargs :: Int -> ([String] -> IO ())
-> [String] -> [String] -> IO ()
xargs maxSize rawSystemFun fixedArgs bigArgs =
let fixedArgSize = sum (map length fixedArgs) + length fixedArgs
chunkSize = maxSize - fixedArgSize
in mapM_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs)
where chunks len = unfoldr $ \s ->
if null s then Nothing
else Just (chunk [] len s)
chunk acc _ [] = (reverse acc,[])
chunk acc len (s:ss)
| len' < len = chunk (s:acc) (len-len'-1) ss
| otherwise = (reverse acc, s:ss)
where len' = length s
-- ------------------------------------------------------------
-- * File Utilities
-- ------------------------------------------------------------
----------------
-- Finding files
-- | Find a file by looking in a search path. The file path must match exactly.
--
findFile :: [FilePath] -- ^search locations
-> FilePath -- ^File Name
-> IO FilePath
findFile searchPath fileName =
findFirstFile id
[ path </> fileName
| path <- nub searchPath]
>>= maybe (die $ fileName ++ " doesn't exist") return
-- | Find a file by looking in a search path with one of a list of possible
-- file extensions. The file base name should be given and it will be tried
-- with each of the extensions in each element of the search path.
--
findFileWithExtension :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe FilePath)
findFileWithExtension extensions searchPath baseName =
findFirstFile id
[ path </> baseName <.> ext
| path <- nub searchPath
, ext <- nub extensions ]
-- | Like 'findFileWithExtension' but returns which element of the search path
-- the file was found in, and the file path relative to that base directory.
--
findFileWithExtension' :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe (FilePath, FilePath))
findFileWithExtension' extensions searchPath baseName =
findFirstFile (uncurry (</>))
[ (path, baseName <.> ext)
| path <- nub searchPath
, ext <- nub extensions ]
findFirstFile :: (a -> FilePath) -> [a] -> IO (Maybe a)
findFirstFile file = findFirst
where findFirst [] = return Nothing
findFirst (x:xs) = do exists <- doesFileExist (file x)
if exists
then return (Just x)
else findFirst xs
-- | Finds the files corresponding to a list of Haskell module names.
--
-- As 'findModuleFile' but for a list of module names.
--
findModuleFiles :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> [ModuleName] -- ^ modules
-> IO [(FilePath, FilePath)]
findModuleFiles searchPath extensions moduleNames =
mapM (findModuleFile searchPath extensions) moduleNames
-- | Find the file corresponding to a Haskell module name.
--
-- This is similar to 'findFileWithExtension'' but specialised to a module
-- name. The function fails if the file corresponding to the module is missing.
--
findModuleFile :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> ModuleName -- ^ module
-> IO (FilePath, FilePath)
findModuleFile searchPath extensions moduleName =
maybe notFound return
=<< findFileWithExtension' extensions searchPath
(ModuleName.toFilePath moduleName)
where
notFound = die $ "Error: Could not find module: " ++ display moduleName
++ " with any suffix: " ++ show extensions
++ " in the search path: " ++ show searchPath
-- | List all the files in a directory and all subdirectories.
--
-- The order places files in sub-directories after all the files in their
-- parent directories. The list is generated lazily so is not well defined if
-- the source directory structure changes before the list is used.
--
getDirectoryContentsRecursive :: FilePath -> IO [FilePath]
getDirectoryContentsRecursive topdir = recurseDirectories [""]
where
recurseDirectories :: [FilePath] -> IO [FilePath]
recurseDirectories [] = return []
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
files' <- recurseDirectories (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files, reverse dirs')
collect files dirs' (entry:entries) | ignore entry
= collect files dirs' entries
collect files dirs' (entry:entries) = do
let dirEntry = dir </> entry
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
if isDirectory
then collect files (dirEntry:dirs') entries
else collect (dirEntry:files) dirs' entries
ignore ['.'] = True
ignore ['.', '.'] = True
ignore _ = False
----------------
-- File globbing
data FileGlob
-- | No glob at all, just an ordinary file
= NoGlob FilePath
-- | dir prefix and extension, like @\"foo\/bar\/\*.baz\"@ corresponds to
-- @FileGlob \"foo\/bar\" \".baz\"@
| FileGlob FilePath String
parseFileGlob :: FilePath -> Maybe FileGlob
parseFileGlob filepath = case splitExtensions filepath of
(filepath', ext) -> case splitFileName filepath' of
(dir, "*") | '*' `elem` dir
|| '*' `elem` ext
|| null ext -> Nothing
| null dir -> Just (FileGlob "." ext)
| otherwise -> Just (FileGlob dir ext)
_ | '*' `elem` filepath -> Nothing
| otherwise -> Just (NoGlob filepath)
matchFileGlob :: FilePath -> IO [FilePath]
matchFileGlob = matchDirFileGlob "."
matchDirFileGlob :: FilePath -> FilePath -> IO [FilePath]
matchDirFileGlob dir filepath = case parseFileGlob filepath of
Nothing -> die $ "invalid file glob '" ++ filepath
++ "'. Wildcards '*' are only allowed in place of the file"
++ " name, not in the directory name or file extension."
++ " If a wildcard is used it must be with an file extension."
Just (NoGlob filepath') -> return [filepath']
Just (FileGlob dir' ext) -> do
files <- getDirectoryContents (dir </> dir')
case [ dir' </> file
| file <- files
, let (name, ext') = splitExtensions file
, not (null name) && ext' == ext ] of
[] -> die $ "filepath wildcard '" ++ filepath
++ "' does not match any files."
matches -> return matches
----------------------------------------
-- Copying and installing files and dirs
-- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels.
--
createDirectoryIfMissingVerbose :: Verbosity
-> Bool -- ^ Create its parents too?
-> FilePath
-> IO ()
createDirectoryIfMissingVerbose verbosity create_parents path0
| create_parents = createDirs (parents path0)
| otherwise = createDirs (take 1 (parents path0))
where
parents = reverse . scanl1 (</>) . splitDirectories . normalise
createDirs [] = return ()
createDirs (dir:[]) = createDir dir throwIOIO
createDirs (dir:dirs) =
createDir dir $ \_ -> do
createDirs dirs
createDir dir throwIOIO
createDir :: FilePath -> (IOException -> IO ()) -> IO ()
createDir dir notExistHandler = do
r <- tryIO $ createDirectoryVerbose verbosity dir
case (r :: Either IOException ()) of
Right () -> return ()
Left e
| isDoesNotExistError e -> notExistHandler e
-- createDirectory (and indeed POSIX mkdir) does not distinguish
-- between a dir already existing and a file already existing. So we
-- check for it here. Unfortunately there is a slight race condition
-- here, but we think it is benign. It could report an exeption in
-- the case that the dir did exist but another process deletes the
-- directory and creates a file in its place before we can check
-- that the directory did indeed exist.
| isAlreadyExistsError e -> (do
isDir <- doesDirectoryExist dir
if isDir then return ()
else throwIOIO e
) `catchIO` ((\_ -> return ()) :: IOException -> IO ())
| otherwise -> throwIOIO e
createDirectoryVerbose :: Verbosity -> FilePath -> IO ()
createDirectoryVerbose verbosity dir = do
info verbosity $ "creating " ++ dir
createDirectory dir
setDirOrdinary dir
-- | Copies a file without copying file permissions. The target file is created
-- with default permissions. Any existing target file is replaced.
--
-- At higher verbosity levels it logs an info message.
--
copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyFileVerbose verbosity src dest = do
info verbosity ("copy " ++ src ++ " to " ++ dest)
copyFile src dest
-- | Install an ordinary file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\"
-- while on Windows it uses the default permissions for the target directory.
--
installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO ()
installOrdinaryFile verbosity src dest = do
info verbosity ("Installing " ++ src ++ " to " ++ dest)
copyOrdinaryFile src dest
-- | Install an executable file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\"
-- while on Windows it uses the default permissions for the target directory.
--
installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO ()
installExecutableFile verbosity src dest = do
info verbosity ("Installing executable " ++ src ++ " to " ++ dest)
copyExecutableFile src dest
-- | Copies a bunch of files to a target directory, preserving the directory
-- structure in the target location. The target directories are created if they
-- do not exist.
--
-- The files are identified by a pair of base directory and a path relative to
-- that base. It is only the relative part that is preserved in the
-- destination.
--
-- For example:
--
-- > copyFiles normal "dist/src"
-- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")]
--
-- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and
-- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\".
--
-- This operation is not atomic. Any IO failure during the copy (including any
-- missing source files) leaves the target in an unknown state so it is best to
-- use it with a freshly created directory so that it can be simply deleted if
-- anything goes wrong.
--
copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
copyFiles verbosity targetDir srcFiles = do
-- Create parent directories for everything
let dirs = map (targetDir </>) . nub . map (takeDirectory . snd) $ srcFiles
mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs
-- Copy all the files
sequence_ [ let src = srcBase </> srcFile
dest = targetDir </> srcFile
in copyFileVerbose verbosity src dest
| (srcBase, srcFile) <- srcFiles ]
-- | This is like 'copyFiles' but uses 'installOrdinaryFile'.
--
installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
installOrdinaryFiles verbosity targetDir srcFiles = do
-- Create parent directories for everything
let dirs = map (targetDir </>) . nub . map (takeDirectory . snd) $ srcFiles
mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs
-- Copy all the files
sequence_ [ let src = srcBase </> srcFile
dest = targetDir </> srcFile
in installOrdinaryFile verbosity src dest
| (srcBase, srcFile) <- srcFiles ]
-- | This installs all the files in a directory to a target location,
-- preserving the directory layout. All the files are assumed to be ordinary
-- rather than executable files.
--
installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO ()
installDirectoryContents verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
---------------------------------
-- Deprecated file copy functions
{-# DEPRECATED smartCopySources
"Use findModuleFiles and copyFiles or installOrdinaryFiles" #-}
smartCopySources :: Verbosity -> [FilePath] -> FilePath
-> [ModuleName] -> [String] -> IO ()
smartCopySources verbosity searchPath targetDir moduleNames extensions =
findModuleFiles searchPath extensions moduleNames
>>= copyFiles verbosity targetDir
{-# DEPRECATED copyDirectoryRecursiveVerbose
"You probably want installDirectoryContents instead" #-}
copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyDirectoryRecursiveVerbose verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
---------------------------
-- Temporary files and dirs
-- | Use a temporary filename that doesn't already exist.
--
withTempFile :: FilePath -- ^ Temp dir to create the file in
-> String -- ^ File name template. See 'openTempFile'.
-> (FilePath -> Handle -> IO a) -> IO a
withTempFile tmpDir template action =
Exception.bracket
(openTempFile tmpDir template)
(\(name, handle) -> hClose handle >> removeFile name)
(uncurry action)
-- | Create and use a temporary directory.
--
-- Creates a new temporary directory inside the given directory, making use
-- of the template. The temp directory is deleted after use. For example:
--
-- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ...
--
-- The @tmpDir@ will be a new subdirectory of the given directory, e.g.
-- @src/sdist.342@.
--
withTempDirectory :: Verbosity -> FilePath -> String -> (FilePath -> IO a) -> IO a
withTempDirectory _verbosity targetDir template =
Exception.bracket
(createTempDirectory targetDir template)
(removeDirectoryRecursive)
-----------------------------------
-- Safely reading and writing files
-- | Gets the contents of a file, but guarantee that it gets closed.
--
-- The file is read lazily but if it is not fully consumed by the action then
-- the remaining input is truncated and the file is closed.
--
withFileContents :: FilePath -> (String -> IO a) -> IO a
withFileContents name action =
Exception.bracket (openFile name ReadMode) hClose
(\hnd -> hGetContents hnd >>= action)
-- | Writes a file atomically.
--
-- The file is either written sucessfully or an IO exception is raised and
-- the original file is left unchanged.
--
-- On windows it is not possible to delete a file that is open by a process.
-- This case will give an IO exception but the atomic property is not affected.
--
writeFileAtomic :: FilePath -> String -> IO ()
writeFileAtomic targetFile content = do
(tmpFile, tmpHandle) <- openNewBinaryFile targetDir template
do hPutStr tmpHandle content
hClose tmpHandle
renameFile tmpFile targetFile
`onException` do hClose tmpHandle
removeFile tmpFile
where
template = targetName <.> "tmp"
targetDir | null targetDir_ = currentDir
| otherwise = targetDir_
--TODO: remove this when takeDirectory/splitFileName is fixed
-- to always return a valid dir
(targetDir_,targetName) = splitFileName targetFile
-- | Write a file but only if it would have new content. If we would be writing
-- the same as the existing content then leave the file as is so that we do not
-- update the file's modification time.
--
rewriteFile :: FilePath -> String -> IO ()
rewriteFile path newContent =
flip catchIO mightNotExist $ do
existingContent <- readFile path
_ <- evaluate (length existingContent)
unless (existingContent == newContent) $
writeFileAtomic path newContent
where
mightNotExist e | isDoesNotExistError e = writeFileAtomic path newContent
| otherwise = ioError e
-- | The path name that represents the current directory.
-- In Unix, it's @\".\"@, but this is system-specific.
-- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.)
currentDir :: FilePath
currentDir = "."
-- ------------------------------------------------------------
-- * Finding the description file
-- ------------------------------------------------------------
-- |Package description file (/pkgname/@.cabal@)
defaultPackageDesc :: Verbosity -> IO FilePath
defaultPackageDesc _verbosity = findPackageDesc currentDir
-- |Find a package description file in the given directory. Looks for
-- @.cabal@ files.
findPackageDesc :: FilePath -- ^Where to look
-> IO FilePath -- ^<pkgname>.cabal
findPackageDesc dir
= do files <- getDirectoryContents dir
-- to make sure we do not mistake a ~/.cabal/ dir for a <pkgname>.cabal
-- file we filter to exclude dirs and null base file names:
cabalFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == ".cabal" ]
case cabalFiles of
[] -> noDesc
[cabalFile] -> return cabalFile
multiple -> multiDesc multiple
where
noDesc :: IO a
noDesc = die $ "No cabal file found.\n"
++ "Please create a package description file <pkgname>.cabal"
multiDesc :: [String] -> IO a
multiDesc l = die $ "Multiple cabal files found.\n"
++ "Please use only one of: "
++ intercalate ", " l
-- |Optional auxiliary package information file (/pkgname/@.buildinfo@)
defaultHookedPackageDesc :: IO (Maybe FilePath)
defaultHookedPackageDesc = findHookedPackageDesc currentDir
-- |Find auxiliary package information in the given directory.
-- Looks for @.buildinfo@ files.
findHookedPackageDesc
:: FilePath -- ^Directory to search
-> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present
findHookedPackageDesc dir = do
files <- getDirectoryContents dir
buildInfoFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == buildInfoExt ]
case buildInfoFiles of
[] -> return Nothing
[f] -> return (Just f)
_ -> die ("Multiple files with extension " ++ buildInfoExt)
buildInfoExt :: String
buildInfoExt = ".buildinfo"
-- ------------------------------------------------------------
-- * Unicode stuff
-- ------------------------------------------------------------
-- This is a modification of the UTF8 code from gtk2hs and the
-- utf8-string package.
fromUTF8 :: String -> String
fromUTF8 [] = []
fromUTF8 (c:cs)
| c <= '\x7F' = c : fromUTF8 cs
| c <= '\xBF' = replacementChar : fromUTF8 cs
| c <= '\xDF' = twoBytes c cs
| c <= '\xEF' = moreBytes 3 0x800 cs (ord c .&. 0xF)
| c <= '\xF7' = moreBytes 4 0x10000 cs (ord c .&. 0x7)
| c <= '\xFB' = moreBytes 5 0x200000 cs (ord c .&. 0x3)
| c <= '\xFD' = moreBytes 6 0x4000000 cs (ord c .&. 0x1)
| otherwise = replacementChar : fromUTF8 cs
where
twoBytes c0 (c1:cs')
| ord c1 .&. 0xC0 == 0x80
= let d = ((ord c0 .&. 0x1F) `shiftL` 6)
.|. (ord c1 .&. 0x3F)
in if d >= 0x80
then chr d : fromUTF8 cs'
else replacementChar : fromUTF8 cs'
twoBytes _ cs' = replacementChar : fromUTF8 cs'
moreBytes :: Int -> Int -> [Char] -> Int -> [Char]
moreBytes 1 overlong cs' acc
| overlong <= acc && acc <= 0x10FFFF
&& (acc < 0xD800 || 0xDFFF < acc)
&& (acc < 0xFFFE || 0xFFFF < acc)
= chr acc : fromUTF8 cs'
| otherwise
= replacementChar : fromUTF8 cs'
moreBytes byteCount overlong (cn:cs') acc
| ord cn .&. 0xC0 == 0x80
= moreBytes (byteCount-1) overlong cs'
((acc `shiftL` 6) .|. ord cn .&. 0x3F)
moreBytes _ _ cs' _
= replacementChar : fromUTF8 cs'
replacementChar = '\xfffd'
toUTF8 :: String -> String
toUTF8 [] = []
toUTF8 (c:cs)
| c <= '\x07F' = c
: toUTF8 cs
| c <= '\x7FF' = chr (0xC0 .|. (w `shiftR` 6))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| c <= '\xFFFF'= chr (0xE0 .|. (w `shiftR` 12))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| otherwise = chr (0xf0 .|. (w `shiftR` 18))
: chr (0x80 .|. ((w `shiftR` 12) .&. 0x3F))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
where w = ord c
-- | Ignore a Unicode byte order mark (BOM) at the beginning of the input
--
ignoreBOM :: String -> String
ignoreBOM ('\xFEFF':string) = string
ignoreBOM string = string
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Reads lazily using ordinary 'readFile'.
--
readUTF8File :: FilePath -> IO String
readUTF8File f = fmap (ignoreBOM . fromUTF8)
. hGetContents =<< openBinaryFile f ReadMode
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Same behaviour as 'withFileContents'.
--
withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a
withUTF8FileContents name action =
Exception.bracket
(openBinaryFile name ReadMode)
hClose
(\hnd -> hGetContents hnd >>= action . ignoreBOM . fromUTF8)
-- | Writes a Unicode String as a UTF8 encoded text file.
--
-- Uses 'writeFileAtomic', so provides the same guarantees.
--
writeUTF8File :: FilePath -> String -> IO ()
writeUTF8File path = writeFileAtomic path . toUTF8
-- | Fix different systems silly line ending conventions
normaliseLineEndings :: String -> String
normaliseLineEndings [] = []
normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows
normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old osx
normaliseLineEndings ( c :s) = c : normaliseLineEndings s
-- ------------------------------------------------------------
-- * Common utils
-- ------------------------------------------------------------
equating :: Eq a => (b -> a) -> b -> b -> Bool
equating p x y = p x == p y
comparing :: Ord a => (b -> a) -> b -> b -> Ordering
comparing p x y = p x `compare` p y
isInfixOf :: String -> String -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
intercalate :: [a] -> [[a]] -> [a]
intercalate sep = concat . intersperse sep
lowercase :: String -> String
lowercase = map Char.toLower
| alphaHeavy/cabal | Cabal/Distribution/Simple/Utils.hs | bsd-3-clause | 42,064 | 0 | 19 | 11,055 | 8,477 | 4,428 | 4,049 | 650 | 7 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Inspection.Database
( DB(..)
, initialDB
, GetBuildMatrix(..)
, GetEventLog(..)
, AddEventRecord(..)
) where
import Prelude ()
import MyLittlePrelude
import Control.Monad.Reader.Class (asks)
import Control.Monad.State.Class (state)
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy)
import Inspection.BuildMatrix
import Inspection.Event (Event)
import Inspection.EventLog (EventLog, EventRecord(..), EventId(..))
import qualified Inspection.EventLog as EventLog
data DB = DB { buildMatrix :: BuildMatrix
, eventLog :: EventLog Event
} deriving (Show, Eq, Generic, Typeable)
initialDB :: DB
initialDB = DB { buildMatrix = mempty
, eventLog = EventLog.empty
}
deriveSafeCopy 0 'base ''DB
addEventRecord :: EventRecord Event -> Update DB EventId
addEventRecord eventRecord =
state $ \db ->
( EventId $ length $ eventLog db
, db { buildMatrix = execute (eventBody eventRecord) (buildMatrix db)
, eventLog = EventLog.add eventRecord (eventLog db)
}
)
getEventLog :: Query DB (EventLog Event)
getEventLog = asks eventLog
getBuildMatrix :: Query DB BuildMatrix
getBuildMatrix = asks buildMatrix
makeAcidic ''DB [ 'getBuildMatrix
, 'addEventRecord
, 'getEventLog
]
| zudov/purescript-inspection | src/Inspection/Database.hs | bsd-3-clause | 1,430 | 0 | 12 | 329 | 391 | 229 | 162 | 38 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Code generator utilities; mostly monadic
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module GHC.StgToCmm.Utils (
cgLit, mkSimpleLit,
emitRawDataLits, mkRawDataLits,
emitRawRODataLits, mkRawRODataLits,
emitDataCon,
emitRtsCall, emitRtsCallWithResult, emitRtsCallGen,
assignTemp, newTemp,
newUnboxedTupleRegs,
emitMultiAssign, emitCmmLitSwitch, emitSwitch,
tagToClosure, mkTaggedObjectLoad,
callerSaves, callerSaveVolatileRegs, get_GlobalReg_addr,
cmmAndWord, cmmOrWord, cmmNegate, cmmEqWord, cmmNeWord,
cmmUGtWord, cmmSubWord, cmmMulWord, cmmAddWord, cmmUShrWord,
cmmOffsetExprW, cmmOffsetExprB,
cmmRegOffW, cmmRegOffB,
cmmLabelOffW, cmmLabelOffB,
cmmOffsetW, cmmOffsetB,
cmmOffsetLitW, cmmOffsetLitB,
cmmLoadIndexW,
cmmConstrTag1,
cmmUntag, cmmIsTagged,
addToMem, addToMemE, addToMemLblE, addToMemLbl,
mkWordCLit, mkByteStringCLit,
newStringCLit, newByteStringCLit,
blankWord,
-- * Update remembered set operations
whenUpdRemSetEnabled,
emitUpdRemSetPush,
emitUpdRemSetPushThunk,
) where
#include "HsVersions.h"
import GhcPrelude
import GHC.StgToCmm.Monad
import GHC.StgToCmm.Closure
import GHC.Cmm
import GHC.Cmm.BlockId
import GHC.Cmm.Graph as CmmGraph
import GHC.Platform.Regs
import GHC.Cmm.CLabel
import GHC.Cmm.Utils hiding (mkDataLits, mkRODataLits, mkByteStringCLit)
import GHC.Cmm.Switch
import GHC.StgToCmm.CgUtils
import ForeignCall
import IdInfo
import Type
import TyCon
import GHC.Runtime.Layout
import Module
import Literal
import Digraph
import Util
import Unique
import UniqSupply (MonadUnique(..))
import DynFlags
import FastString
import Outputable
import GHC.Types.RepType
import CostCentre
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString as BS
import qualified Data.Map as M
import Data.Char
import Data.List
import Data.Ord
-------------------------------------------------------------------------
--
-- Literals
--
-------------------------------------------------------------------------
cgLit :: Literal -> FCode CmmLit
cgLit (LitString s) = newByteStringCLit s
-- not unpackFS; we want the UTF-8 byte stream.
cgLit other_lit = do dflags <- getDynFlags
return (mkSimpleLit dflags other_lit)
mkSimpleLit :: DynFlags -> Literal -> CmmLit
mkSimpleLit dflags (LitChar c) = CmmInt (fromIntegral (ord c))
(wordWidth dflags)
mkSimpleLit dflags LitNullAddr = zeroCLit dflags
mkSimpleLit dflags (LitNumber LitNumInt i _) = CmmInt i (wordWidth dflags)
mkSimpleLit _ (LitNumber LitNumInt64 i _) = CmmInt i W64
mkSimpleLit dflags (LitNumber LitNumWord i _) = CmmInt i (wordWidth dflags)
mkSimpleLit _ (LitNumber LitNumWord64 i _) = CmmInt i W64
mkSimpleLit _ (LitFloat r) = CmmFloat r W32
mkSimpleLit _ (LitDouble r) = CmmFloat r W64
mkSimpleLit _ (LitLabel fs ms fod)
= let -- TODO: Literal labels might not actually be in the current package...
labelSrc = ForeignLabelInThisPackage
in CmmLabel (mkForeignLabel fs ms labelSrc fod)
-- NB: LitRubbish should have been lowered in "CoreToStg"
mkSimpleLit _ other = pprPanic "mkSimpleLit" (ppr other)
--------------------------------------------------------------------------
--
-- Incrementing a memory location
--
--------------------------------------------------------------------------
addToMemLbl :: CmmType -> CLabel -> Int -> CmmAGraph
addToMemLbl rep lbl n = addToMem rep (CmmLit (CmmLabel lbl)) n
addToMemLblE :: CmmType -> CLabel -> CmmExpr -> CmmAGraph
addToMemLblE rep lbl = addToMemE rep (CmmLit (CmmLabel lbl))
addToMem :: CmmType -- rep of the counter
-> CmmExpr -- Address
-> Int -- What to add (a word)
-> CmmAGraph
addToMem rep ptr n = addToMemE rep ptr (CmmLit (CmmInt (toInteger n) (typeWidth rep)))
addToMemE :: CmmType -- rep of the counter
-> CmmExpr -- Address
-> CmmExpr -- What to add (a word-typed expression)
-> CmmAGraph
addToMemE rep ptr n
= mkStore ptr (CmmMachOp (MO_Add (typeWidth rep)) [CmmLoad ptr rep, n])
-------------------------------------------------------------------------
--
-- Loading a field from an object,
-- where the object pointer is itself tagged
--
-------------------------------------------------------------------------
mkTaggedObjectLoad
:: DynFlags -> LocalReg -> LocalReg -> ByteOff -> DynTag -> CmmAGraph
-- (loadTaggedObjectField reg base off tag) generates assignment
-- reg = bitsK[ base + off - tag ]
-- where K is fixed by 'reg'
mkTaggedObjectLoad dflags reg base offset tag
= mkAssign (CmmLocal reg)
(CmmLoad (cmmOffsetB dflags
(CmmReg (CmmLocal base))
(offset - tag))
(localRegType reg))
-------------------------------------------------------------------------
--
-- Converting a closure tag to a closure for enumeration types
-- (this is the implementation of tagToEnum#).
--
-------------------------------------------------------------------------
tagToClosure :: DynFlags -> TyCon -> CmmExpr -> CmmExpr
tagToClosure dflags tycon tag
= CmmLoad (cmmOffsetExprW dflags closure_tbl tag) (bWord dflags)
where closure_tbl = CmmLit (CmmLabel lbl)
lbl = mkClosureTableLabel (tyConName tycon) NoCafRefs
-------------------------------------------------------------------------
--
-- Conditionals and rts calls
--
-------------------------------------------------------------------------
emitRtsCall :: UnitId -> FastString -> [(CmmExpr,ForeignHint)] -> Bool -> FCode ()
emitRtsCall pkg fun args safe = emitRtsCallGen [] (mkCmmCodeLabel pkg fun) args safe
emitRtsCallWithResult :: LocalReg -> ForeignHint -> UnitId -> FastString
-> [(CmmExpr,ForeignHint)] -> Bool -> FCode ()
emitRtsCallWithResult res hint pkg fun args safe
= emitRtsCallGen [(res,hint)] (mkCmmCodeLabel pkg fun) args safe
-- Make a call to an RTS C procedure
emitRtsCallGen
:: [(LocalReg,ForeignHint)]
-> CLabel
-> [(CmmExpr,ForeignHint)]
-> Bool -- True <=> CmmSafe call
-> FCode ()
emitRtsCallGen res lbl args safe
= do { dflags <- getDynFlags
; updfr_off <- getUpdFrameOff
; let (caller_save, caller_load) = callerSaveVolatileRegs dflags
; emit caller_save
; call updfr_off
; emit caller_load }
where
call updfr_off =
if safe then
emit =<< mkCmmCall fun_expr res' args' updfr_off
else do
let conv = ForeignConvention CCallConv arg_hints res_hints CmmMayReturn
emit $ mkUnsafeCall (ForeignTarget fun_expr conv) res' args'
(args', arg_hints) = unzip args
(res', res_hints) = unzip res
fun_expr = mkLblExpr lbl
-----------------------------------------------------------------------------
--
-- Caller-Save Registers
--
-----------------------------------------------------------------------------
-- Here we generate the sequence of saves/restores required around a
-- foreign call instruction.
-- TODO: reconcile with includes/Regs.h
-- * Regs.h claims that BaseReg should be saved last and loaded first
-- * This might not have been tickled before since BaseReg is callee save
-- * Regs.h saves SparkHd, ParkT1, SparkBase and SparkLim
--
-- This code isn't actually used right now, because callerSaves
-- only ever returns true in the current universe for registers NOT in
-- system_regs (just do a grep for CALLER_SAVES in
-- includes/stg/MachRegs.h). It's all one giant no-op, and for
-- good reason: having to save system registers on every foreign call
-- would be very expensive, so we avoid assigning them to those
-- registers when we add support for an architecture.
--
-- Note that the old code generator actually does more work here: it
-- also saves other global registers. We can't (nor want) to do that
-- here, as we don't have liveness information. And really, we
-- shouldn't be doing the workaround at this point in the pipeline, see
-- Note [Register parameter passing] and the ToDo on CmmCall in
-- cmm/CmmNode.hs. Right now the workaround is to avoid inlining across
-- unsafe foreign calls in rewriteAssignments, but this is strictly
-- temporary.
callerSaveVolatileRegs :: DynFlags -> (CmmAGraph, CmmAGraph)
callerSaveVolatileRegs dflags = (caller_save, caller_load)
where
platform = targetPlatform dflags
caller_save = catAGraphs (map callerSaveGlobalReg regs_to_save)
caller_load = catAGraphs (map callerRestoreGlobalReg regs_to_save)
system_regs = [ Sp,SpLim,Hp,HpLim,CCCS,CurrentTSO,CurrentNursery
{- ,SparkHd,SparkTl,SparkBase,SparkLim -}
, BaseReg ]
regs_to_save = filter (callerSaves platform) system_regs
callerSaveGlobalReg reg
= mkStore (get_GlobalReg_addr dflags reg) (CmmReg (CmmGlobal reg))
callerRestoreGlobalReg reg
= mkAssign (CmmGlobal reg)
(CmmLoad (get_GlobalReg_addr dflags reg) (globalRegType dflags reg))
-------------------------------------------------------------------------
--
-- Strings generate a top-level data block
--
-------------------------------------------------------------------------
mkRawDataLits :: Section -> CLabel -> [CmmLit] -> GenCmmDecl CmmStatics info stmt
-- Build a data-segment data block
mkRawDataLits section lbl lits
= CmmData section (CmmStaticsRaw lbl (map CmmStaticLit lits))
mkRawRODataLits :: CLabel -> [CmmLit] -> GenCmmDecl CmmStatics info stmt
-- Build a read-only data block
mkRawRODataLits lbl lits
= mkRawDataLits section lbl lits
where
section | any needsRelocation lits = Section RelocatableReadOnlyData lbl
| otherwise = Section ReadOnlyData lbl
needsRelocation (CmmLabel _) = True
needsRelocation (CmmLabelOff _ _) = True
needsRelocation _ = False
mkByteStringCLit
:: CLabel -> ByteString -> (CmmLit, GenCmmDecl CmmStatics info stmt)
-- We have to make a top-level decl for the string,
-- and return a literal pointing to it
mkByteStringCLit lbl bytes
= (CmmLabel lbl, CmmData (Section sec lbl) (CmmStaticsRaw lbl [CmmString bytes]))
where
-- This can not happen for String literals (as there \NUL is replaced by
-- C0 80). However, it can happen with Addr# literals.
sec = if 0 `BS.elem` bytes then ReadOnlyData else CString
emitRawDataLits :: CLabel -> [CmmLit] -> FCode ()
-- Emit a data-segment data block
emitRawDataLits lbl lits = emitDecl (mkRawDataLits (Section Data lbl) lbl lits)
emitRawRODataLits :: CLabel -> [CmmLit] -> FCode ()
-- Emit a read-only data block
emitRawRODataLits lbl lits = emitDecl (mkRawRODataLits lbl lits)
emitDataCon :: CLabel -> CmmInfoTable -> CostCentreStack -> [CmmLit] -> FCode ()
emitDataCon lbl itbl ccs payload = emitDecl (CmmData (Section Data lbl) (CmmStatics lbl itbl ccs payload))
newStringCLit :: String -> FCode CmmLit
-- Make a global definition for the string,
-- and return its label
newStringCLit str = newByteStringCLit (BS8.pack str)
newByteStringCLit :: ByteString -> FCode CmmLit
newByteStringCLit bytes
= do { uniq <- newUnique
; let (lit, decl) = mkByteStringCLit (mkStringLitLabel uniq) bytes
; emitDecl decl
; return lit }
-------------------------------------------------------------------------
--
-- Assigning expressions to temporaries
--
-------------------------------------------------------------------------
assignTemp :: CmmExpr -> FCode LocalReg
-- Make sure the argument is in a local register.
-- We don't bother being particularly aggressive with avoiding
-- unnecessary local registers, since we can rely on a later
-- optimization pass to inline as necessary (and skipping out
-- on things like global registers can be a little dangerous
-- due to them being trashed on foreign calls--though it means
-- the optimization pass doesn't have to do as much work)
assignTemp (CmmReg (CmmLocal reg)) = return reg
assignTemp e = do { dflags <- getDynFlags
; uniq <- newUnique
; let reg = LocalReg uniq (cmmExprType dflags e)
; emitAssign (CmmLocal reg) e
; return reg }
newTemp :: MonadUnique m => CmmType -> m LocalReg
newTemp rep = do { uniq <- getUniqueM
; return (LocalReg uniq rep) }
newUnboxedTupleRegs :: Type -> FCode ([LocalReg], [ForeignHint])
-- Choose suitable local regs to use for the components
-- of an unboxed tuple that we are about to return to
-- the Sequel. If the Sequel is a join point, using the
-- regs it wants will save later assignments.
newUnboxedTupleRegs res_ty
= ASSERT( isUnboxedTupleType res_ty )
do { dflags <- getDynFlags
; sequel <- getSequel
; regs <- choose_regs dflags sequel
; ASSERT( regs `equalLength` reps )
return (regs, map primRepForeignHint reps) }
where
reps = typePrimRep res_ty
choose_regs _ (AssignTo regs _) = return regs
choose_regs dflags _ = mapM (newTemp . primRepCmmType dflags) reps
-------------------------------------------------------------------------
-- emitMultiAssign
-------------------------------------------------------------------------
emitMultiAssign :: [LocalReg] -> [CmmExpr] -> FCode ()
-- Emit code to perform the assignments in the
-- input simultaneously, using temporary variables when necessary.
type Key = Int
type Vrtx = (Key, Stmt) -- Give each vertex a unique number,
-- for fast comparison
type Stmt = (LocalReg, CmmExpr) -- r := e
-- We use the strongly-connected component algorithm, in which
-- * the vertices are the statements
-- * an edge goes from s1 to s2 iff
-- s1 assigns to something s2 uses
-- that is, if s1 should *follow* s2 in the final order
emitMultiAssign [] [] = return ()
emitMultiAssign [reg] [rhs] = emitAssign (CmmLocal reg) rhs
emitMultiAssign regs rhss = do
dflags <- getDynFlags
ASSERT2( equalLength regs rhss, ppr regs $$ ppr rhss )
unscramble dflags ([1..] `zip` (regs `zip` rhss))
unscramble :: DynFlags -> [Vrtx] -> FCode ()
unscramble dflags vertices = mapM_ do_component components
where
edges :: [ Node Key Vrtx ]
edges = [ DigraphNode vertex key1 (edges_from stmt1)
| vertex@(key1, stmt1) <- vertices ]
edges_from :: Stmt -> [Key]
edges_from stmt1 = [ key2 | (key2, stmt2) <- vertices,
stmt1 `mustFollow` stmt2 ]
components :: [SCC Vrtx]
components = stronglyConnCompFromEdgedVerticesUniq edges
-- do_components deal with one strongly-connected component
-- Not cyclic, or singleton? Just do it
do_component :: SCC Vrtx -> FCode ()
do_component (AcyclicSCC (_,stmt)) = mk_graph stmt
do_component (CyclicSCC []) = panic "do_component"
do_component (CyclicSCC [(_,stmt)]) = mk_graph stmt
-- Cyclic? Then go via temporaries. Pick one to
-- break the loop and try again with the rest.
do_component (CyclicSCC ((_,first_stmt) : rest)) = do
dflags <- getDynFlags
u <- newUnique
let (to_tmp, from_tmp) = split dflags u first_stmt
mk_graph to_tmp
unscramble dflags rest
mk_graph from_tmp
split :: DynFlags -> Unique -> Stmt -> (Stmt, Stmt)
split dflags uniq (reg, rhs)
= ((tmp, rhs), (reg, CmmReg (CmmLocal tmp)))
where
rep = cmmExprType dflags rhs
tmp = LocalReg uniq rep
mk_graph :: Stmt -> FCode ()
mk_graph (reg, rhs) = emitAssign (CmmLocal reg) rhs
mustFollow :: Stmt -> Stmt -> Bool
(reg, _) `mustFollow` (_, rhs) = regUsedIn dflags (CmmLocal reg) rhs
-------------------------------------------------------------------------
-- mkSwitch
-------------------------------------------------------------------------
emitSwitch :: CmmExpr -- Tag to switch on
-> [(ConTagZ, CmmAGraphScoped)] -- Tagged branches
-> Maybe CmmAGraphScoped -- Default branch (if any)
-> ConTagZ -> ConTagZ -- Min and Max possible values;
-- behaviour outside this range is
-- undefined
-> FCode ()
-- First, two rather common cases in which there is no work to do
emitSwitch _ [] (Just code) _ _ = emit (fst code)
emitSwitch _ [(_,code)] Nothing _ _ = emit (fst code)
-- Right, off we go
emitSwitch tag_expr branches mb_deflt lo_tag hi_tag = do
join_lbl <- newBlockId
mb_deflt_lbl <- label_default join_lbl mb_deflt
branches_lbls <- label_branches join_lbl branches
tag_expr' <- assignTemp' tag_expr
-- Sort the branches before calling mk_discrete_switch
let branches_lbls' = [ (fromIntegral i, l) | (i,l) <- sortBy (comparing fst) branches_lbls ]
let range = (fromIntegral lo_tag, fromIntegral hi_tag)
emit $ mk_discrete_switch False tag_expr' branches_lbls' mb_deflt_lbl range
emitLabel join_lbl
mk_discrete_switch :: Bool -- ^ Use signed comparisons
-> CmmExpr
-> [(Integer, BlockId)]
-> Maybe BlockId
-> (Integer, Integer)
-> CmmAGraph
-- SINGLETON TAG RANGE: no case analysis to do
mk_discrete_switch _ _tag_expr [(tag, lbl)] _ (lo_tag, hi_tag)
| lo_tag == hi_tag
= ASSERT( tag == lo_tag )
mkBranch lbl
-- SINGLETON BRANCH, NO DEFAULT: no case analysis to do
mk_discrete_switch _ _tag_expr [(_tag,lbl)] Nothing _
= mkBranch lbl
-- The simplifier might have eliminated a case
-- so we may have e.g. case xs of
-- [] -> e
-- In that situation we can be sure the (:) case
-- can't happen, so no need to test
-- SOMETHING MORE COMPLICATED: defer to GHC.Cmm.Switch.Implement
-- See Note [Cmm Switches, the general plan] in GHC.Cmm.Switch
mk_discrete_switch signed tag_expr branches mb_deflt range
= mkSwitch tag_expr $ mkSwitchTargets signed range mb_deflt (M.fromList branches)
divideBranches :: Ord a => [(a,b)] -> ([(a,b)], a, [(a,b)])
divideBranches branches = (lo_branches, mid, hi_branches)
where
-- 2 branches => n_branches `div` 2 = 1
-- => branches !! 1 give the *second* tag
-- There are always at least 2 branches here
(mid,_) = branches !! (length branches `div` 2)
(lo_branches, hi_branches) = span is_lo branches
is_lo (t,_) = t < mid
--------------
emitCmmLitSwitch :: CmmExpr -- Tag to switch on
-> [(Literal, CmmAGraphScoped)] -- Tagged branches
-> CmmAGraphScoped -- Default branch (always)
-> FCode () -- Emit the code
emitCmmLitSwitch _scrut [] deflt = emit $ fst deflt
emitCmmLitSwitch scrut branches deflt = do
scrut' <- assignTemp' scrut
join_lbl <- newBlockId
deflt_lbl <- label_code join_lbl deflt
branches_lbls <- label_branches join_lbl branches
dflags <- getDynFlags
let cmm_ty = cmmExprType dflags scrut
rep = typeWidth cmm_ty
-- We find the necessary type information in the literals in the branches
let signed = case head branches of
(LitNumber nt _ _, _) -> litNumIsSigned nt
_ -> False
let range | signed = (tARGET_MIN_INT dflags, tARGET_MAX_INT dflags)
| otherwise = (0, tARGET_MAX_WORD dflags)
if isFloatType cmm_ty
then emit =<< mk_float_switch rep scrut' deflt_lbl noBound branches_lbls
else emit $ mk_discrete_switch
signed
scrut'
[(litValue lit,l) | (lit,l) <- branches_lbls]
(Just deflt_lbl)
range
emitLabel join_lbl
-- | lower bound (inclusive), upper bound (exclusive)
type LitBound = (Maybe Literal, Maybe Literal)
noBound :: LitBound
noBound = (Nothing, Nothing)
mk_float_switch :: Width -> CmmExpr -> BlockId
-> LitBound
-> [(Literal,BlockId)]
-> FCode CmmAGraph
mk_float_switch rep scrut deflt _bounds [(lit,blk)]
= do dflags <- getDynFlags
return $ mkCbranch (cond dflags) deflt blk Nothing
where
cond dflags = CmmMachOp ne [scrut, CmmLit cmm_lit]
where
cmm_lit = mkSimpleLit dflags lit
ne = MO_F_Ne rep
mk_float_switch rep scrut deflt_blk_id (lo_bound, hi_bound) branches
= do dflags <- getDynFlags
lo_blk <- mk_float_switch rep scrut deflt_blk_id bounds_lo lo_branches
hi_blk <- mk_float_switch rep scrut deflt_blk_id bounds_hi hi_branches
mkCmmIfThenElse (cond dflags) lo_blk hi_blk
where
(lo_branches, mid_lit, hi_branches) = divideBranches branches
bounds_lo = (lo_bound, Just mid_lit)
bounds_hi = (Just mid_lit, hi_bound)
cond dflags = CmmMachOp lt [scrut, CmmLit cmm_lit]
where
cmm_lit = mkSimpleLit dflags mid_lit
lt = MO_F_Lt rep
--------------
label_default :: BlockId -> Maybe CmmAGraphScoped -> FCode (Maybe BlockId)
label_default _ Nothing
= return Nothing
label_default join_lbl (Just code)
= do lbl <- label_code join_lbl code
return (Just lbl)
--------------
label_branches :: BlockId -> [(a,CmmAGraphScoped)] -> FCode [(a,BlockId)]
label_branches _join_lbl []
= return []
label_branches join_lbl ((tag,code):branches)
= do lbl <- label_code join_lbl code
branches' <- label_branches join_lbl branches
return ((tag,lbl):branches')
--------------
label_code :: BlockId -> CmmAGraphScoped -> FCode BlockId
-- label_code J code
-- generates
-- [L: code; goto J]
-- and returns L
label_code join_lbl (code,tsc) = do
lbl <- newBlockId
emitOutOfLine lbl (code CmmGraph.<*> mkBranch join_lbl, tsc)
return lbl
--------------
assignTemp' :: CmmExpr -> FCode CmmExpr
assignTemp' e
| isTrivialCmmExpr e = return e
| otherwise = do
dflags <- getDynFlags
lreg <- newTemp (cmmExprType dflags e)
let reg = CmmLocal lreg
emitAssign reg e
return (CmmReg reg)
---------------------------------------------------------------------------
-- Pushing to the update remembered set
---------------------------------------------------------------------------
whenUpdRemSetEnabled :: DynFlags -> FCode a -> FCode ()
whenUpdRemSetEnabled dflags code = do
do_it <- getCode code
the_if <- mkCmmIfThenElse' is_enabled do_it mkNop (Just False)
emit the_if
where
enabled = CmmLoad (CmmLit $ CmmLabel mkNonmovingWriteBarrierEnabledLabel) (bWord dflags)
zero = zeroExpr dflags
is_enabled = cmmNeWord dflags enabled zero
-- | Emit code to add an entry to a now-overwritten pointer to the update
-- remembered set.
emitUpdRemSetPush :: CmmExpr -- ^ value of pointer which was overwritten
-> FCode ()
emitUpdRemSetPush ptr = do
emitRtsCall
rtsUnitId
(fsLit "updateRemembSetPushClosure_")
[(CmmReg (CmmGlobal BaseReg), AddrHint),
(ptr, AddrHint)]
False
emitUpdRemSetPushThunk :: CmmExpr -- ^ the thunk
-> FCode ()
emitUpdRemSetPushThunk ptr = do
emitRtsCall
rtsUnitId
(fsLit "updateRemembSetPushThunk_")
[(CmmReg (CmmGlobal BaseReg), AddrHint),
(ptr, AddrHint)]
False
| sdiehl/ghc | compiler/GHC/StgToCmm/Utils.hs | bsd-3-clause | 23,970 | 0 | 15 | 5,670 | 5,205 | 2,777 | 2,428 | -1 | -1 |
module Data.Bioparser
( decodeFasta -- * fasta -> vector of fasta records
, encodeFasta -- * vector -> fasta format
, decodeFastq
, encodeFastq
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Attoparsec.ByteString
import Data.Vector (Vector)
import qualified Data.Vector as V
import Data.Monoid ((<>))
import Data.Bioparser.Combinators
import Data.Bioparser.Types
decodeFasta :: ByteString -> Vector FastaRecord
decodeFasta x = case feed (parse parseFasta x) "\n" of
Done _ r -> r
_ -> error "parse error"
decodeFastq :: ByteString -> Vector FastqRecord
decodeFastq x = case feed (parse parseFastq x) B.empty of
Done _ r -> r
_ -> error "parse error"
-- obeys the following law, should test for this more extensively
-- decodeFasta . encodeFasta . decodeFasta = decodeFasta
-- encodeFasta puts a "\n" at the end of file so may not be
-- exactly equal to input i.e.
-- fmap (== f) (encodeFasta $ decodeFasta f) may not return True
encodeFasta :: Vector FastaRecord -> ByteString
encodeFasta = foldr (mappend . fastaEncoder) mempty
where fastaEncoder (FastaRecord (d,s)) = ">" <> d <> "\n" <> s <> "\n"
encodeFastq :: Vector FastqRecord -> ByteString
encodeFastq = foldr (mappend . fastqEncoder) mempty
where fastqEncoder (FastqRecord (d,s,p,sc)) = "@" <> d <> "\n" <> s <> "\n"
<> "+" <> p <> "\n" <> sc <> "\n"
| fushitarazu/bioparser | src/Data/Bioparser.hs | bsd-3-clause | 1,578 | 0 | 15 | 433 | 370 | 204 | 166 | 28 | 2 |
{-# LANGUAGE DeriveGeneric #-}
module Ray.KdTree
( -- * Introduction
-- $intro
-- * Usage
-- $usage
-- * Variants
-- ** Dynamic /k/-d trees
-- $dkdtrees
-- ** /k/-d maps
-- $kdmaps
-- * Advanced
-- ** Custom distance functions
-- $customdistancefunctions
-- ** Axis value types
-- $axisvaluetypes
-- * Reference
-- ** Types
PointAsListFn
, SquaredDistanceFn
, KdTree
-- ** /k/-d tree construction
, empty
, emptyWithDist
, singleton
, singletonWithDist
, build
, buildWithDist
, insertUnbalanced
, batchInsertUnbalanced
-- ** Query
, nearest
, inRadius
, kNearest
, inRange
, toList
, null
, size
-- ** Utilities
, defaultSqrDist
) where
import Control.DeepSeq
import Control.DeepSeq.Generics (genericRnf)
import GHC.Generics
--import qualified Data.Foldable as F
import Prelude hiding (null)
--import qualified Data.Vector.Unboxed as V
import qualified Data.Vector as V
import qualified Ray.KdMap as KDM
import Ray.KdMap (PointAsListFn, SquaredDistanceFn, defaultSqrDist)
import Ray.Optics
-- $intro
--
-- Let's say you have a large set of 3D points called /data points/,
-- and you'd like to be able to quickly perform /point queries/ on the
-- data points. One example of a point query is the /nearest neighbor/
-- query: given a set of data points @points@ and a query point @p@,
-- which point in @points@ is closest to @p@?
--
-- We can efficiently solve the nearest neighbor query (along with
-- many other types of point queries) if we appropriately organize the
-- data points. One such method of organization is called the /k/-d
-- tree algorithm, which is implemented in this module.
-- $usage
--
-- Let's say you have a list of 3D data points, and each point is of
-- type @Point3d@:
--
-- @
-- data Point3d = Point3d { x :: Double
-- , y :: Double
-- , z :: Double
-- } deriving Show
-- @
--
-- We call a point's individual values /axis values/ (i.e., @x@, @y@,
-- and @z@ in the case of @Point3d@).
--
-- In order to generate a /k/-d tree of @Point3d@'s, we need to define
-- a 'PointAsListFn' that expresses the point's axis values as a list:
--
-- @
-- point3dAsList :: Point3d -> [Double]
-- point3dAsList (Point3d x y z) = [x, y, z]
-- @
--
-- Now we can build a 'KdTree' structure from a list of data points
-- and perform a nearest neighbor query as follows:
--
-- @
-- >>> let dataPoints = [(Point3d 0.0 0.0 0.0), (Point3d 1.0 1.0 1.0)]
--
-- >>> let kdt = 'build' point3dAsList dataPoints
--
-- >>> let queryPoint = Point3d 0.1 0.1 0.1
--
-- >>> 'nearest' kdt queryPoint
-- Point3d {x = 0.0, y = 0.0, z = 0.0}
-- @
-- $dkdtrees
--
-- The 'KdTree' structure is meant for static sets of data points. If
-- you need to insert points into an existing /k/-d tree, check out
-- @Data.KdTree.Dynamic.@'Data.KdTree.Dynamic.KdTree'.
-- $kdmaps
--
-- If you need to associate additional data with each point in the
-- tree (i.e., points are /keys/ associated with /values/), check out
-- @Data.KdMap.Static.@'Data.KdMap.Static.KdMap' and
-- @Data.KdMap.Dynamic.@'Data.KdMap.Dynamic.KdMap' for static and dynamic
-- variants of this functionality. Please /do not/ try to fake this
-- functionality with a 'KdTree' by augmenting your point type with
-- the extra data; you're gonna have a bad time.
-- $customdistancefunctions
--
-- You may have noticed in the previous use case that we never
-- specified what "nearest" means for our points. By default,
-- 'build' uses a Euclidean distance function that is sufficient
-- in most cases. However, point queries are typically faster on a
-- 'KdTree' built with a user-specified custom distance
-- function. Let's generate a 'KdTree' using a custom distance
-- function.
--
-- One idiosyncrasy about 'KdTree' is that custom distance functions
-- are actually specified as /squared distance/ functions
-- ('SquaredDistanceFn'). This means that your custom distance
-- function must return the /square/ of the actual distance between
-- two points. This is for efficiency: regular distance functions
-- often require expensive square root computations, whereas in our
-- case, the squared distance works fine and doesn't require computing
-- any square roots. Here's an example of a squared distance function
-- for @Point3d@:
--
-- @
-- point3dSquaredDistance :: Point3d -> Point3d -> Double
-- point3dSquaredDistance (Point3d x1 y1 z1) (Point3d x2 y2 z2) =
-- let dx = x1 - x2
-- dy = y1 - y2
-- dz = z1 - z2
-- in dx * dx + dy * dy + dz * dz
-- @
--
-- We can build a 'KdTree' using our custom distance function as follows:
--
-- @
-- >>> let kdt = 'buildWithDist' point3dAsList point3dSquaredDistance points
-- @
-- $axisvaluetypes
--
-- In the above examples, we used a point type with axis values of
-- type 'Double'. We can in fact use axis values of any type that is
-- an instance of the 'Real' typeclass. This means you can use points
-- that are composed of 'Double's, 'Int's, 'Float's, and so on:
--
-- @
-- data Point2i = Point2i Int Int
--
-- point2iAsList :: Point2i -> [Int]
-- point2iAsList (Point2i x y) = [x, y]
--
-- kdt :: [Point2i] -> KdTree Int Point2i
-- kdt dataPoints = 'build' point2iAsList dataPoints
-- @
-- | A /k/-d tree structure that stores points of type @p@ with axis
-- values of type @a@.
newtype KdTree = KdTree KDM.KdMap deriving Generic
instance NFData KdTree where rnf = genericRnf
instance Show KdTree where
show (KdTree kdm) = "KdTree " ++ show kdm
{-
instance F.Foldable KdTree where
foldr f z (KdTree kdMap) = KDM.foldrWithKey (f . fst) z kdMap
-}
-- | Builds an empty 'KdTree'.
empty :: PointAsListFn -> KdTree
empty = KdTree . KDM.empty
-- | Builds an empty 'KdTree' using a user-specified squared distance
-- function.
emptyWithDist :: PointAsListFn
-> SquaredDistanceFn
-> KdTree
emptyWithDist p2l d2 = KdTree $ KDM.emptyWithDist p2l d2
-- | Builds a 'KdTree' with a single point.
singleton :: PointAsListFn -> Photon -> KdTree
singleton p2l p = KdTree $ KDM.singleton p2l (p, ())
-- | Builds a 'KdTree' with a single point using a user-specified
-- squared distance function.
singletonWithDist :: PointAsListFn
-> SquaredDistanceFn
-> Photon
-> KdTree
singletonWithDist p2l d2 p = KdTree $ KDM.singletonWithDist p2l d2 (p, ())
null :: KdTree -> Bool
null (KdTree kdm) = KDM.null kdm
-- | Builds a 'KdTree' from a list of data points using a default
-- squared distance function 'defaultSqrDist'.
--
-- Average complexity: /O(n * log(n))/ for /n/ data points.
--
-- Worst case time complexity: /O(n^2)/ for /n/ data points.
--
-- Worst case space complexity: /O(n)/ for /n/ data points.
build :: PointAsListFn
-> [Photon] -- ^ non-empty list of data points to be stored in the /k/-d tree
-> KdTree
build pointAsList ps =
KdTree $ KDM.build pointAsList $ zip ps $ repeat ()
-- | Builds a 'KdTree' from a list of data points using a
-- user-specified squared distance function.
--
-- Average time complexity: /O(n * log(n))/ for /n/ data points.
--
-- Worst case time complexity: /O(n^2)/ for /n/ data points.
--
-- Worst case space complexity: /O(n)/ for /n/ data points.
buildWithDist :: PointAsListFn
-> SquaredDistanceFn
-> [Photon]
-> KdTree
buildWithDist pointAsList distSqr ps =
KdTree $ KDM.buildWithDist pointAsList distSqr $ zip ps $ repeat ()
-- | Inserts a point into a 'KdTree'. This can potentially
-- cause the internal tree structure to become unbalanced. If the tree
-- becomes too unbalanced, point queries will be very inefficient. If
-- you need to perform lots of point insertions on an already existing
-- /k/-d tree, check out
-- @Data.KdTree.Dynamic.@'Data.KdTree.Dynamic.KdTree'.
--
-- Average complexity: /O(log(n))/ for /n/ data points.
--
-- Worse case time complexity: /O(n)/ for /n/ data points.
insertUnbalanced :: KdTree -> Photon -> KdTree
insertUnbalanced (KdTree kdm) p = KdTree $ KDM.insertUnbalanced kdm p ()
-- | Inserts a list of points into a 'KdTree'. This can potentially
-- cause the internal tree structure to become unbalanced, which leads
-- to inefficient point queries.
--
-- Average complexity: /O(n * log(n))/ for /n/ data points.
--
-- Worst case time complexity: /O(n^2)/ for /n/ data points.
batchInsertUnbalanced :: KdTree -> [Photon] -> KdTree
batchInsertUnbalanced (KdTree kdm) ps =
KdTree $ KDM.batchInsertUnbalanced kdm $ zip ps $ repeat ()
-- | Given a 'KdTree' and a query point, returns the nearest point
-- in the 'KdTree' to the query point.
--
-- Average time complexity: /O(log(n))/ for /n/ data points.
--
-- Worst case time complexity: /O(n)/ for /n/ data points.
--
-- Throws an error if called on an empty 'KdTree'.
nearest :: KdTree -> Photon -> Photon
nearest (KdTree t) query
| KDM.null t = error "Attempted to call nearest on an empty KdTree."
| otherwise = fst $ KDM.nearest t query
-- | Given a 'KdTree', a query point, and a radius, returns all
-- points in the 'KdTree' that are within the given radius of the
-- query point.
--
-- Points are not returned in any particular order.
--
-- Worst case time complexity: /O(n)/ for /n/ data points and
-- a radius that subsumes all points in the structure.
inRadius :: KdTree
-> Double -- ^ radius
-> Photon -- ^ query point
-> V.Vector Photon -- ^ list of points in tree with given
-- radius of query point
inRadius (KdTree t) radius query = V.map fst $ KDM.inRadius t radius query
-- | Given a 'KdTree', a query point, and a number @k@, returns the
-- @k@ nearest points in the 'KdTree' to the query point.
--
-- Neighbors are returned in order of increasing distance from query
-- point.
--
-- Average time complexity: /log(k) * log(n)/ for /k/ nearest
-- neighbors on a structure with /n/ data points.
--
-- Worst case time complexity: /n * log(k)/ for /k/ nearest
-- neighbors on a structure with /n/ data points.
kNearest :: KdTree -> Int -> Photon -> [Photon]
kNearest (KdTree t) k query = map fst $ KDM.kNearest t k query
-- | Finds all points in a 'KdTree' with points within a given range,
-- where the range is specified as a set of lower and upper bounds.
--
-- Points are not returned in any particular order.
--
-- Worst case time complexity: /O(n)/ for n data points and a range
-- that spans all the points.
inRange :: KdTree
-> Photon -- ^ lower bounds of range
-> Photon -- ^ upper bounds of range
-> [Photon] -- ^ all points within given range
inRange (KdTree t) lower upper = map fst $ KDM.inRange t lower upper
-- | Returns a list of all the points in the 'KdTree'.
--
-- Time complexity: /O(n)/ for /n/ data points.
toList :: KdTree -> [Photon]
toList (KdTree t) = KDM.keys t
-- | Returns the number of elements in the 'KdTree'.
--
-- Time complexity: /O(1)/
size :: KdTree -> Int
size (KdTree t) = KDM.size t
| eijian/raytracer | src/Ray/KdTree.hs | bsd-3-clause | 11,253 | 0 | 9 | 2,546 | 1,084 | 675 | 409 | 85 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Main where
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Char (toUpper)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.Environment (getArgs)
import Text.Shakespeare.Text (st)
import Text.StrToHex
data Encoding
= UTF8
| UTF16LE
| UTF16BE
| UTF32LE
| UTF32BE
deriving (Show, Read)
encodingFromString :: String -> Encoding
encodingFromString = read . map toUpper
withText :: Encoding -> T.Text -> IO ()
withText enc t = B.putStrLn $ case enc of
UTF8 -> strToHexUtf8 t
UTF16LE -> strToHexUtf16LE t
UTF16BE -> strToHexUtf16BE t
UTF32LE -> strToHexUtf32LE t
UTF32BE -> strToHexUtf32BE t
withStdin :: Encoding -> IO ()
withStdin enc = TIO.getContents >>= withText enc
displayHelp :: IO ()
displayHelp = TIO.putStrLn [st|Usage: str2hex ENCODING [TEXT | -]
Available encodings:
* utf8
* utf16le
* utf16be
* utf32le
* utf32be
|]
main :: IO ()
main = getArgs >>= go
where
go [encoding] = withStdin $ encodingFromString encoding
go [encoding, "-"] = withStdin $ encodingFromString encoding
go [encoding, text] = withText (encodingFromString encoding) $ T.pack text
go _ = displayHelp
| siphilia/str2hex | app/Main.hs | bsd-3-clause | 1,351 | 0 | 10 | 366 | 359 | 196 | 163 | 35 | 5 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utility functions on @Core@ syntax
-}
{-# LANGUAGE CPP #-}
module CoreSubst (
-- * Main data types
Subst(..), -- Implementation exported for supercompiler's Renaming.hs only
TvSubstEnv, IdSubstEnv, InScopeSet,
-- ** Substituting into expressions and related types
deShadowBinds, substSpec, substRulesForImportedIds,
substTy, substCo, substExpr, substExprSC, substBind, substBindSC,
substUnfolding, substUnfoldingSC,
lookupIdSubst, lookupTvSubst, lookupCvSubst, substIdOcc,
substTickish, substVarSet,
-- ** Operations on substitutions
emptySubst, mkEmptySubst, mkSubst, mkOpenSubst, substInScope, isEmptySubst,
extendIdSubst, extendIdSubstList, extendTvSubst, extendTvSubstList,
extendCvSubst, extendCvSubstList,
extendSubst, extendSubstList, extendSubstWithVar, zapSubstEnv,
addInScopeSet, extendInScope, extendInScopeList, extendInScopeIds,
isInScope, setInScope,
delBndr, delBndrs,
-- ** Substituting and cloning binders
substBndr, substBndrs, substRecBndrs,
cloneBndr, cloneBndrs, cloneIdBndr, cloneIdBndrs, cloneRecIdBndrs,
-- ** Simple expression optimiser
simpleOptPgm, simpleOptExpr, simpleOptExprWith,
exprIsConApp_maybe, exprIsLiteral_maybe, exprIsLambda_maybe,
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreUtils
import Literal ( Literal(MachStr) )
import qualified Data.ByteString as BS
import OccurAnal( occurAnalyseExpr, occurAnalysePgm )
import qualified Type
import qualified Coercion
-- We are defining local versions
import Type hiding ( substTy, extendTvSubst, extendTvSubstList
, isInScope, substTyVarBndr, cloneTyVarBndr )
import Coercion hiding ( substTy, substCo, extendTvSubst, substTyVarBndr, substCoVarBndr )
import TyCon ( tyConArity )
import DataCon
import PrelNames ( eqBoxDataConKey, coercibleDataConKey, unpackCStringIdKey
, unpackCStringUtf8IdKey )
import OptCoercion ( optCoercion )
import PprCore ( pprCoreBindings, pprRules )
import Module ( Module )
import VarSet
import VarEnv
import Id
import Name ( Name )
import Var
import IdInfo
import Unique
import UniqSupply
import Maybes
import ErrUtils
import DynFlags
import BasicTypes ( isAlwaysActive )
import Util
import Pair
import Outputable
import PprCore () -- Instances
import FastString
import Data.List
import TysWiredIn
{-
************************************************************************
* *
\subsection{Substitutions}
* *
************************************************************************
-}
-- | A substitution environment, containing both 'Id' and 'TyVar' substitutions.
--
-- Some invariants apply to how you use the substitution:
--
-- 1. #in_scope_invariant# The in-scope set contains at least those 'Id's and 'TyVar's that will be in scope /after/
-- applying the substitution to a term. Precisely, the in-scope set must be a superset of the free vars of the
-- substitution range that might possibly clash with locally-bound variables in the thing being substituted in.
--
-- 2. #apply_once# You may apply the substitution only /once/
--
-- There are various ways of setting up the in-scope set such that the first of these invariants hold:
--
-- * Arrange that the in-scope set really is all the things in scope
--
-- * Arrange that it's the free vars of the range of the substitution
--
-- * Make it empty, if you know that all the free vars of the substitution are fresh, and hence can't possibly clash
data Subst
= Subst InScopeSet -- Variables in in scope (both Ids and TyVars) /after/
-- applying the substitution
IdSubstEnv -- Substitution for Ids
TvSubstEnv -- Substitution from TyVars to Types
CvSubstEnv -- Substitution from CoVars to Coercions
-- INVARIANT 1: See #in_scope_invariant#
-- This is what lets us deal with name capture properly
-- It's a hard invariant to check...
--
-- INVARIANT 2: The substitution is apply-once; see Note [Apply once] with
-- Types.TvSubstEnv
--
-- INVARIANT 3: See Note [Extending the Subst]
{-
Note [Extending the Subst]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a core Subst, which binds Ids as well, we make a different choice for Ids
than we do for TyVars.
For TyVars, see Note [Extending the TvSubst] with Type.TvSubstEnv
For Ids, we have a different invariant
The IdSubstEnv is extended *only* when the Unique on an Id changes
Otherwise, we just extend the InScopeSet
In consequence:
* If the TvSubstEnv and IdSubstEnv are both empty, substExpr would be a
no-op, so substExprSC ("short cut") does nothing.
However, substExpr still goes ahead and substitutes. Reason: we may
want to replace existing Ids with new ones from the in-scope set, to
avoid space leaks.
* In substIdBndr, we extend the IdSubstEnv only when the unique changes
* If the CvSubstEnv, TvSubstEnv and IdSubstEnv are all empty,
substExpr does nothing (Note that the above rule for substIdBndr
maintains this property. If the incoming envts are both empty, then
substituting the type and IdInfo can't change anything.)
* In lookupIdSubst, we *must* look up the Id in the in-scope set, because
it may contain non-trivial changes. Example:
(/\a. \x:a. ...x...) Int
We extend the TvSubstEnv with [a |-> Int]; but x's unique does not change
so we only extend the in-scope set. Then we must look up in the in-scope
set when we find the occurrence of x.
* The requirement to look up the Id in the in-scope set means that we
must NOT take no-op short cut when the IdSubst is empty.
We must still look up every Id in the in-scope set.
* (However, we don't need to do so for expressions found in the IdSubst
itself, whose range is assumed to be correct wrt the in-scope set.)
Why do we make a different choice for the IdSubstEnv than the
TvSubstEnv and CvSubstEnv?
* For Ids, we change the IdInfo all the time (e.g. deleting the
unfolding), and adding it back later, so using the TyVar convention
would entail extending the substitution almost all the time
* The simplifier wants to look up in the in-scope set anyway, in case it
can see a better unfolding from an enclosing case expression
* For TyVars, only coercion variables can possibly change, and they are
easy to spot
-}
-- | An environment for substituting for 'Id's
type IdSubstEnv = IdEnv CoreExpr
----------------------------
isEmptySubst :: Subst -> Bool
isEmptySubst (Subst _ id_env tv_env cv_env)
= isEmptyVarEnv id_env && isEmptyVarEnv tv_env && isEmptyVarEnv cv_env
emptySubst :: Subst
emptySubst = Subst emptyInScopeSet emptyVarEnv emptyVarEnv emptyVarEnv
mkEmptySubst :: InScopeSet -> Subst
mkEmptySubst in_scope = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
mkSubst :: InScopeSet -> TvSubstEnv -> CvSubstEnv -> IdSubstEnv -> Subst
mkSubst in_scope tvs cvs ids = Subst in_scope ids tvs cvs
-- | Find the in-scope set: see "CoreSubst#in_scope_invariant"
substInScope :: Subst -> InScopeSet
substInScope (Subst in_scope _ _ _) = in_scope
-- | Remove all substitutions for 'Id's and 'Var's that might have been built up
-- while preserving the in-scope set
zapSubstEnv :: Subst -> Subst
zapSubstEnv (Subst in_scope _ _ _) = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
-- | Add a substitution for an 'Id' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendIdSubst :: Subst -> Id -> CoreExpr -> Subst
-- ToDo: add an ASSERT that fvs(subst-result) is already in the in-scope set
extendIdSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope (extendVarEnv ids v r) tvs cvs
-- | Adds multiple 'Id' substitutions to the 'Subst': see also 'extendIdSubst'
extendIdSubstList :: Subst -> [(Id, CoreExpr)] -> Subst
extendIdSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope (extendVarEnvList ids prs) tvs cvs
-- | Add a substitution for a 'TyVar' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendTvSubst :: Subst -> TyVar -> Type -> Subst
extendTvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids (extendVarEnv tvs v r) cvs
-- | Adds multiple 'TyVar' substitutions to the 'Subst': see also 'extendTvSubst'
extendTvSubstList :: Subst -> [(TyVar,Type)] -> Subst
extendTvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids (extendVarEnvList tvs prs) cvs
-- | Add a substitution from a 'CoVar' to a 'Coercion' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendCvSubst :: Subst -> CoVar -> Coercion -> Subst
extendCvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids tvs (extendVarEnv cvs v r)
-- | Adds multiple 'CoVar' -> 'Coercion' substitutions to the
-- 'Subst': see also 'extendCvSubst'
extendCvSubstList :: Subst -> [(CoVar,Coercion)] -> Subst
extendCvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids tvs (extendVarEnvList cvs prs)
-- | Add a substitution appropriate to the thing being substituted
-- (whether an expression, type, or coercion). See also
-- 'extendIdSubst', 'extendTvSubst', and 'extendCvSubst'.
extendSubst :: Subst -> Var -> CoreArg -> Subst
extendSubst subst var arg
= case arg of
Type ty -> ASSERT( isTyVar var ) extendTvSubst subst var ty
Coercion co -> ASSERT( isCoVar var ) extendCvSubst subst var co
_ -> ASSERT( isId var ) extendIdSubst subst var arg
extendSubstWithVar :: Subst -> Var -> Var -> Subst
extendSubstWithVar subst v1 v2
| isTyVar v1 = ASSERT( isTyVar v2 ) extendTvSubst subst v1 (mkTyVarTy v2)
| isCoVar v1 = ASSERT( isCoVar v2 ) extendCvSubst subst v1 (mkCoVarCo v2)
| otherwise = ASSERT( isId v2 ) extendIdSubst subst v1 (Var v2)
-- | Add a substitution as appropriate to each of the terms being
-- substituted (whether expressions, types, or coercions). See also
-- 'extendSubst'.
extendSubstList :: Subst -> [(Var,CoreArg)] -> Subst
extendSubstList subst [] = subst
extendSubstList subst ((var,rhs):prs) = extendSubstList (extendSubst subst var rhs) prs
-- | Find the substitution for an 'Id' in the 'Subst'
lookupIdSubst :: SDoc -> Subst -> Id -> CoreExpr
lookupIdSubst doc (Subst in_scope ids _ _) v
| not (isLocalId v) = Var v
| Just e <- lookupVarEnv ids v = e
| Just v' <- lookupInScope in_scope v = Var v'
-- Vital! See Note [Extending the Subst]
| otherwise = WARN( True, ptext (sLit "CoreSubst.lookupIdSubst") <+> doc <+> ppr v
$$ ppr in_scope)
Var v
-- | Find the substitution for a 'TyVar' in the 'Subst'
lookupTvSubst :: Subst -> TyVar -> Type
lookupTvSubst (Subst _ _ tvs _) v = ASSERT( isTyVar v) lookupVarEnv tvs v `orElse` Type.mkTyVarTy v
-- | Find the coercion substitution for a 'CoVar' in the 'Subst'
lookupCvSubst :: Subst -> CoVar -> Coercion
lookupCvSubst (Subst _ _ _ cvs) v = ASSERT( isCoVar v ) lookupVarEnv cvs v `orElse` mkCoVarCo v
delBndr :: Subst -> Var -> Subst
delBndr (Subst in_scope ids tvs cvs) v
| isCoVar v = Subst in_scope ids tvs (delVarEnv cvs v)
| isTyVar v = Subst in_scope ids (delVarEnv tvs v) cvs
| otherwise = Subst in_scope (delVarEnv ids v) tvs cvs
delBndrs :: Subst -> [Var] -> Subst
delBndrs (Subst in_scope ids tvs cvs) vs
= Subst in_scope (delVarEnvList ids vs) (delVarEnvList tvs vs) (delVarEnvList cvs vs)
-- Easiest thing is just delete all from all!
-- | Simultaneously substitute for a bunch of variables
-- No left-right shadowing
-- ie the substitution for (\x \y. e) a1 a2
-- so neither x nor y scope over a1 a2
mkOpenSubst :: InScopeSet -> [(Var,CoreArg)] -> Subst
mkOpenSubst in_scope pairs = Subst in_scope
(mkVarEnv [(id,e) | (id, e) <- pairs, isId id])
(mkVarEnv [(tv,ty) | (tv, Type ty) <- pairs])
(mkVarEnv [(v,co) | (v, Coercion co) <- pairs])
------------------------------
isInScope :: Var -> Subst -> Bool
isInScope v (Subst in_scope _ _ _) = v `elemInScopeSet` in_scope
-- | Add the 'Var' to the in-scope set, but do not remove
-- any existing substitutions for it
addInScopeSet :: Subst -> VarSet -> Subst
addInScopeSet (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetSet` vs) ids tvs cvs
-- | Add the 'Var' to the in-scope set: as a side effect,
-- and remove any existing substitutions for it
extendInScope :: Subst -> Var -> Subst
extendInScope (Subst in_scope ids tvs cvs) v
= Subst (in_scope `extendInScopeSet` v)
(ids `delVarEnv` v) (tvs `delVarEnv` v) (cvs `delVarEnv` v)
-- | Add the 'Var's to the in-scope set: see also 'extendInScope'
extendInScopeList :: Subst -> [Var] -> Subst
extendInScopeList (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) (tvs `delVarEnvList` vs) (cvs `delVarEnvList` vs)
-- | Optimized version of 'extendInScopeList' that can be used if you are certain
-- all the things being added are 'Id's and hence none are 'TyVar's or 'CoVar's
extendInScopeIds :: Subst -> [Id] -> Subst
extendInScopeIds (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) tvs cvs
setInScope :: Subst -> InScopeSet -> Subst
setInScope (Subst _ ids tvs cvs) in_scope = Subst in_scope ids tvs cvs
-- Pretty printing, for debugging only
instance Outputable Subst where
ppr (Subst in_scope ids tvs cvs)
= ptext (sLit "<InScope =") <+> braces (fsep (map ppr (varEnvElts (getInScopeVars in_scope))))
$$ ptext (sLit " IdSubst =") <+> ppr ids
$$ ptext (sLit " TvSubst =") <+> ppr tvs
$$ ptext (sLit " CvSubst =") <+> ppr cvs
<> char '>'
{-
************************************************************************
* *
Substituting expressions
* *
************************************************************************
-}
-- | Apply a substitution to an entire 'CoreExpr'. Remember, you may only
-- apply the substitution /once/: see "CoreSubst#apply_once"
--
-- Do *not* attempt to short-cut in the case of an empty substitution!
-- See Note [Extending the Subst]
substExprSC :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExprSC _doc subst orig_expr
| isEmptySubst subst = orig_expr
| otherwise = -- pprTrace "enter subst-expr" (doc $$ ppr orig_expr) $
subst_expr subst orig_expr
substExpr :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExpr _doc subst orig_expr = subst_expr subst orig_expr
subst_expr :: Subst -> CoreExpr -> CoreExpr
subst_expr subst expr
= go expr
where
go (Var v) = lookupIdSubst (text "subst_expr") subst v
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (substCo subst co)
go (Lit lit) = Lit lit
go (App fun arg) = App (go fun) (go arg)
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) = Cast (go e) (substCo subst co)
-- Do not optimise even identity coercions
-- Reason: substitution applies to the LHS of RULES, and
-- if you "optimise" an identity coercion, you may
-- lose a binder. We optimise the LHS of rules at
-- construction time
go (Lam bndr body) = Lam bndr' (subst_expr subst' body)
where
(subst', bndr') = substBndr subst bndr
go (Let bind body) = Let bind' (subst_expr subst' body)
where
(subst', bind') = substBind subst bind
go (Case scrut bndr ty alts) = Case (go scrut) bndr' (substTy subst ty) (map (go_alt subst') alts)
where
(subst', bndr') = substBndr subst bndr
go_alt subst (con, bndrs, rhs) = (con, bndrs', subst_expr subst' rhs)
where
(subst', bndrs') = substBndrs subst bndrs
-- | Apply a substitution to an entire 'CoreBind', additionally returning an updated 'Subst'
-- that should be used by subsequent substitutions.
substBind, substBindSC :: Subst -> CoreBind -> (Subst, CoreBind)
substBindSC subst bind -- Short-cut if the substitution is empty
| not (isEmptySubst subst)
= substBind subst bind
| otherwise
= case bind of
NonRec bndr rhs -> (subst', NonRec bndr' rhs)
where
(subst', bndr') = substBndr subst bndr
Rec pairs -> (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' | isEmptySubst subst' = rhss
| otherwise = map (subst_expr subst') rhss
substBind subst (NonRec bndr rhs) = (subst', NonRec bndr' (subst_expr subst rhs))
where
(subst', bndr') = substBndr subst bndr
substBind subst (Rec pairs) = (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' = map (subst_expr subst') rhss
-- | De-shadowing the program is sometimes a useful pre-pass. It can be done simply
-- by running over the bindings with an empty substitution, because substitution
-- returns a result that has no-shadowing guaranteed.
--
-- (Actually, within a single /type/ there might still be shadowing, because
-- 'substTy' is a no-op for the empty substitution, but that's probably OK.)
--
-- [Aug 09] This function is not used in GHC at the moment, but seems so
-- short and simple that I'm going to leave it here
deShadowBinds :: CoreProgram -> CoreProgram
deShadowBinds binds = snd (mapAccumL substBind emptySubst binds)
{-
************************************************************************
* *
Substituting binders
* *
************************************************************************
Remember that substBndr and friends are used when doing expression
substitution only. Their only business is substitution, so they
preserve all IdInfo (suitably substituted). For example, we *want* to
preserve occ info in rules.
-}
-- | Substitutes a 'Var' for another one according to the 'Subst' given, returning
-- the result and an updated 'Subst' that should be used by subsequent substitutions.
-- 'IdInfo' is preserved by this process, although it is substituted into appropriately.
substBndr :: Subst -> Var -> (Subst, Var)
substBndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = substIdBndr (text "var-bndr") subst subst bndr
-- | Applies 'substBndr' to a number of 'Var's, accumulating a new 'Subst' left-to-right
substBndrs :: Subst -> [Var] -> (Subst, [Var])
substBndrs subst bndrs = mapAccumL substBndr subst bndrs
-- | Substitute in a mutually recursive group of 'Id's
substRecBndrs :: Subst -> [Id] -> (Subst, [Id])
substRecBndrs subst bndrs
= (new_subst, new_bndrs)
where -- Here's the reason we need to pass rec_subst to subst_id
(new_subst, new_bndrs) = mapAccumL (substIdBndr (text "rec-bndr") new_subst) subst bndrs
substIdBndr :: SDoc
-> Subst -- ^ Substitution to use for the IdInfo
-> Subst -> Id -- ^ Substitution and Id to transform
-> (Subst, Id) -- ^ Transformed pair
-- NB: unfolding may be zapped
substIdBndr _doc rec_subst subst@(Subst in_scope env tvs cvs) old_id
= -- pprTrace "substIdBndr" (doc $$ ppr old_id $$ ppr in_scope) $
(Subst (in_scope `extendInScopeSet` new_id) new_env tvs cvs, new_id)
where
id1 = uniqAway in_scope old_id -- id1 is cloned if necessary
id2 | no_type_change = id1
| otherwise = setIdType id1 (substTy subst old_ty)
old_ty = idType old_id
no_type_change = isEmptyVarEnv tvs ||
isEmptyVarSet (Type.tyVarsOfType old_ty)
-- new_id has the right IdInfo
-- The lazy-set is because we're in a loop here, with
-- rec_subst, when dealing with a mutually-recursive group
new_id = maybeModifyIdInfo mb_new_info id2
mb_new_info = substIdInfo rec_subst id2 (idInfo id2)
-- NB: unfolding info may be zapped
-- Extend the substitution if the unique has changed
-- See the notes with substTyVarBndr for the delVarEnv
new_env | no_change = delVarEnv env old_id
| otherwise = extendVarEnv env old_id (Var new_id)
no_change = id1 == old_id
-- See Note [Extending the Subst]
-- it's /not/ necessary to check mb_new_info and no_type_change
{-
Now a variant that unconditionally allocates a new unique.
It also unconditionally zaps the OccInfo.
-}
-- | Very similar to 'substBndr', but it always allocates a new 'Unique' for
-- each variable in its output. It substitutes the IdInfo though.
cloneIdBndr :: Subst -> UniqSupply -> Id -> (Subst, Id)
cloneIdBndr subst us old_id
= clone_id subst subst (old_id, uniqFromSupply us)
-- | Applies 'cloneIdBndr' to a number of 'Id's, accumulating a final
-- substitution from left to right
cloneIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneIdBndrs subst us ids
= mapAccumL (clone_id subst) subst (ids `zip` uniqsFromSupply us)
cloneBndrs :: Subst -> UniqSupply -> [Var] -> (Subst, [Var])
-- Works for all kinds of variables (typically case binders)
-- not just Ids
cloneBndrs subst us vs
= mapAccumL (\subst (v, u) -> cloneBndr subst u v) subst (vs `zip` uniqsFromSupply us)
cloneBndr :: Subst -> Unique -> Var -> (Subst, Var)
cloneBndr subst uniq v
| isTyVar v = cloneTyVarBndr subst v uniq
| otherwise = clone_id subst subst (v,uniq) -- Works for coercion variables too
-- | Clone a mutually recursive group of 'Id's
cloneRecIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneRecIdBndrs subst us ids
= (subst', ids')
where
(subst', ids') = mapAccumL (clone_id subst') subst
(ids `zip` uniqsFromSupply us)
-- Just like substIdBndr, except that it always makes a new unique
-- It is given the unique to use
clone_id :: Subst -- Substitution for the IdInfo
-> Subst -> (Id, Unique) -- Substitution and Id to transform
-> (Subst, Id) -- Transformed pair
clone_id rec_subst subst@(Subst in_scope idvs tvs cvs) (old_id, uniq)
= (Subst (in_scope `extendInScopeSet` new_id) new_idvs tvs new_cvs, new_id)
where
id1 = setVarUnique old_id uniq
id2 = substIdType subst id1
new_id = maybeModifyIdInfo (substIdInfo rec_subst id2 (idInfo old_id)) id2
(new_idvs, new_cvs) | isCoVar old_id = (idvs, extendVarEnv cvs old_id (mkCoVarCo new_id))
| otherwise = (extendVarEnv idvs old_id (Var new_id), cvs)
{-
************************************************************************
* *
Types and Coercions
* *
************************************************************************
For types and coercions we just call the corresponding functions in
Type and Coercion, but we have to repackage the substitution, from a
Subst to a TvSubst.
-}
substTyVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substTyVarBndr (Subst in_scope id_env tv_env cv_env) tv
= case Type.substTyVarBndr (TvSubst in_scope tv_env) tv of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
cloneTyVarBndr :: Subst -> TyVar -> Unique -> (Subst, TyVar)
cloneTyVarBndr (Subst in_scope id_env tv_env cv_env) tv uniq
= case Type.cloneTyVarBndr (TvSubst in_scope tv_env) tv uniq of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
substCoVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substCoVarBndr (Subst in_scope id_env tv_env cv_env) cv
= case Coercion.substCoVarBndr (CvSubst in_scope tv_env cv_env) cv of
(CvSubst in_scope' tv_env' cv_env', cv')
-> (Subst in_scope' id_env tv_env' cv_env', cv')
-- | See 'Type.substTy'
substTy :: Subst -> Type -> Type
substTy subst ty = Type.substTy (getTvSubst subst) ty
getTvSubst :: Subst -> TvSubst
getTvSubst (Subst in_scope _ tenv _) = TvSubst in_scope tenv
getCvSubst :: Subst -> CvSubst
getCvSubst (Subst in_scope _ tenv cenv) = CvSubst in_scope tenv cenv
-- | See 'Coercion.substCo'
substCo :: Subst -> Coercion -> Coercion
substCo subst co = Coercion.substCo (getCvSubst subst) co
{-
************************************************************************
* *
\section{IdInfo substitution}
* *
************************************************************************
-}
substIdType :: Subst -> Id -> Id
substIdType subst@(Subst _ _ tv_env cv_env) id
| (isEmptyVarEnv tv_env && isEmptyVarEnv cv_env) || isEmptyVarSet (Type.tyVarsOfType old_ty) = id
| otherwise = setIdType id (substTy subst old_ty)
-- The tyVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
------------------
-- | Substitute into some 'IdInfo' with regard to the supplied new 'Id'.
substIdInfo :: Subst -> Id -> IdInfo -> Maybe IdInfo
substIdInfo subst new_id info
| nothing_to_do = Nothing
| otherwise = Just (info `setSpecInfo` substSpec subst new_id old_rules
`setUnfoldingInfo` substUnfolding subst old_unf)
where
old_rules = specInfo info
old_unf = unfoldingInfo info
nothing_to_do = isEmptySpecInfo old_rules && isClosedUnfolding old_unf
------------------
-- | Substitutes for the 'Id's within an unfolding
substUnfolding, substUnfoldingSC :: Subst -> Unfolding -> Unfolding
-- Seq'ing on the returned Unfolding is enough to cause
-- all the substitutions to happen completely
substUnfoldingSC subst unf -- Short-cut version
| isEmptySubst subst = unf
| otherwise = substUnfolding subst unf
substUnfolding subst df@(DFunUnfolding { df_bndrs = bndrs, df_args = args })
= df { df_bndrs = bndrs', df_args = args' }
where
(subst',bndrs') = substBndrs subst bndrs
args' = map (substExpr (text "subst-unf:dfun") subst') args
substUnfolding subst unf@(CoreUnfolding { uf_tmpl = tmpl, uf_src = src })
-- Retain an InlineRule!
| not (isStableSource src) -- Zap an unstable unfolding, to save substitution work
= NoUnfolding
| otherwise -- But keep a stable one!
= seqExpr new_tmpl `seq`
unf { uf_tmpl = new_tmpl }
where
new_tmpl = substExpr (text "subst-unf") subst tmpl
substUnfolding _ unf = unf -- NoUnfolding, OtherCon
------------------
substIdOcc :: Subst -> Id -> Id
-- These Ids should not be substituted to non-Ids
substIdOcc subst v = case lookupIdSubst (text "substIdOcc") subst v of
Var v' -> v'
other -> pprPanic "substIdOcc" (vcat [ppr v <+> ppr other, ppr subst])
------------------
-- | Substitutes for the 'Id's within the 'WorkerInfo' given the new function 'Id'
substSpec :: Subst -> Id -> SpecInfo -> SpecInfo
substSpec subst new_id (SpecInfo rules rhs_fvs)
= seqSpecInfo new_spec `seq` new_spec
where
subst_ru_fn = const (idName new_id)
new_spec = SpecInfo (map (substRule subst subst_ru_fn) rules)
(substVarSet subst rhs_fvs)
------------------
substRulesForImportedIds :: Subst -> [CoreRule] -> [CoreRule]
substRulesForImportedIds subst rules
= map (substRule subst not_needed) rules
where
not_needed name = pprPanic "substRulesForImportedIds" (ppr name)
------------------
substRule :: Subst -> (Name -> Name) -> CoreRule -> CoreRule
-- The subst_ru_fn argument is applied to substitute the ru_fn field
-- of the rule:
-- - Rules for *imported* Ids never change ru_fn
-- - Rules for *local* Ids are in the IdInfo for that Id,
-- and the ru_fn field is simply replaced by the new name
-- of the Id
substRule _ _ rule@(BuiltinRule {}) = rule
substRule subst subst_ru_fn rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs
, ru_local = is_local })
= rule { ru_bndrs = bndrs',
ru_fn = if is_local
then subst_ru_fn fn_name
else fn_name,
ru_args = map (substExpr (text "subst-rule" <+> ppr fn_name) subst') args,
ru_rhs = simpleOptExprWith subst' rhs }
-- Do simple optimisation on RHS, in case substitution lets
-- you improve it. The real simplifier never gets to look at it.
where
(subst', bndrs') = substBndrs subst bndrs
------------------
substVects :: Subst -> [CoreVect] -> [CoreVect]
substVects subst = map (substVect subst)
------------------
substVect :: Subst -> CoreVect -> CoreVect
substVect subst (Vect v rhs) = Vect v (simpleOptExprWith subst rhs)
substVect _subst vd@(NoVect _) = vd
substVect _subst vd@(VectType _ _ _) = vd
substVect _subst vd@(VectClass _) = vd
substVect _subst vd@(VectInst _) = vd
------------------
substVarSet :: Subst -> VarSet -> VarSet
substVarSet subst fvs
= foldVarSet (unionVarSet . subst_fv subst) emptyVarSet fvs
where
subst_fv subst fv
| isId fv = exprFreeVars (lookupIdSubst (text "substVarSet") subst fv)
| otherwise = Type.tyVarsOfType (lookupTvSubst subst fv)
------------------
substTickish :: Subst -> Tickish Id -> Tickish Id
substTickish subst (Breakpoint n ids) = Breakpoint n (map do_one ids)
where do_one = getIdFromTrivialExpr . lookupIdSubst (text "subst_tickish") subst
substTickish _subst other = other
{- Note [substTickish]
A Breakpoint contains a list of Ids. What happens if we ever want to
substitute an expression for one of these Ids?
First, we ensure that we only ever substitute trivial expressions for
these Ids, by marking them as NoOccInfo in the occurrence analyser.
Then, when substituting for the Id, we unwrap any type applications
and abstractions to get back to an Id, with getIdFromTrivialExpr.
Second, we have to ensure that we never try to substitute a literal
for an Id in a breakpoint. We ensure this by never storing an Id with
an unlifted type in a Breakpoint - see Coverage.mkTickish.
Breakpoints can't handle free variables with unlifted types anyway.
-}
{-
Note [Worker inlining]
~~~~~~~~~~~~~~~~~~~~~~
A worker can get sustituted away entirely.
- it might be trivial
- it might simply be very small
We do not treat an InlWrapper as an 'occurrence' in the occurrence
analyser, so it's possible that the worker is not even in scope any more.
In all all these cases we simply drop the special case, returning to
InlVanilla. The WARN is just so I can see if it happens a lot.
************************************************************************
* *
The Very Simple Optimiser
* *
************************************************************************
Note [Optimise coercion boxes aggressively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The simple expression optimiser needs to deal with Eq# boxes as follows:
1. If the result of optimising the RHS of a non-recursive binding is an
Eq# box, that box is substituted rather than turned into a let, just as
if it were trivial.
let eqv = Eq# co in e ==> e[Eq# co/eqv]
2. If the result of optimising a case scrutinee is a Eq# box and the case
deconstructs it in a trivial way, we evaluate the case then and there.
case Eq# co of Eq# cov -> e ==> e[co/cov]
We do this for two reasons:
1. Bindings/case scrutinisation of this form is often created by the
evidence-binding mechanism and we need them to be inlined to be able
desugar RULE LHSes that involve equalities (see e.g. T2291)
2. The test T4356 fails Lint because it creates a coercion between types
of kind (* -> * -> *) and (?? -> ? -> *), which differ. If we do this
inlining aggressively we can collapse away the intermediate coercion between
these two types and hence pass Lint again. (This is a sort of a hack.)
In fact, our implementation uses slightly liberalised versions of the second rule
rule so that the optimisations are a bit more generally applicable. Precisely:
2a. We reduce any situation where we can spot a case-of-known-constructor
As a result, the only time we should get residual coercion boxes in the code is
when the type checker generates something like:
\eqv -> let eqv' = Eq# (case eqv of Eq# cov -> ... cov ...)
However, the case of lambda-bound equality evidence is fairly rare, so these two
rules should suffice for solving the rule LHS problem for now.
Annoyingly, we cannot use this modified rule 1a instead of 1:
1a. If we come across a let-bound constructor application with trivial arguments,
add an appropriate unfolding to the let binder. We spot constructor applications
by using exprIsConApp_maybe, so this would actually let rule 2a reduce more.
The reason is that we REALLY NEED coercion boxes to be substituted away. With rule 1a
we wouldn't simplify this expression at all:
let eqv = Eq# co
in foo eqv (bar eqv)
The rule LHS desugarer can't deal with Let at all, so we need to push that box into
the use sites.
-}
simpleOptExpr :: CoreExpr -> CoreExpr
-- Do simple optimisation on an expression
-- The optimisation is very straightforward: just
-- inline non-recursive bindings that are used only once,
-- or where the RHS is trivial
--
-- We also inline bindings that bind a Eq# box: see
-- See Note [Optimise coercion boxes aggressively].
--
-- The result is NOT guaranteed occurrence-analysed, because
-- in (let x = y in ....) we substitute for x; so y's occ-info
-- may change radically
simpleOptExpr expr
= -- pprTrace "simpleOptExpr" (ppr init_subst $$ ppr expr)
simpleOptExprWith init_subst expr
where
init_subst = mkEmptySubst (mkInScopeSet (exprFreeVars expr))
-- It's potentially important to make a proper in-scope set
-- Consider let x = ..y.. in \y. ...x...
-- Then we should remember to clone y before substituting
-- for x. It's very unlikely to occur, because we probably
-- won't *be* substituting for x if it occurs inside a
-- lambda.
--
-- It's a bit painful to call exprFreeVars, because it makes
-- three passes instead of two (occ-anal, and go)
simpleOptExprWith :: Subst -> InExpr -> OutExpr
simpleOptExprWith subst expr = simple_opt_expr subst (occurAnalyseExpr expr)
----------------------
simpleOptPgm :: DynFlags -> Module
-> CoreProgram -> [CoreRule] -> [CoreVect]
-> IO (CoreProgram, [CoreRule], [CoreVect])
simpleOptPgm dflags this_mod binds rules vects
= do { dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings occ_anald_binds $$ pprRules rules );
; return (reverse binds', substRulesForImportedIds subst' rules, substVects subst' vects) }
where
occ_anald_binds = occurAnalysePgm this_mod (\_ -> False) {- No rules active -}
rules vects emptyVarEnv binds
(subst', binds') = foldl do_one (emptySubst, []) occ_anald_binds
do_one (subst, binds') bind
= case simple_opt_bind subst bind of
(subst', Nothing) -> (subst', binds')
(subst', Just bind') -> (subst', bind':binds')
----------------------
type InVar = Var
type OutVar = Var
type InId = Id
type OutId = Id
type InExpr = CoreExpr
type OutExpr = CoreExpr
-- In these functions the substitution maps InVar -> OutExpr
----------------------
simple_opt_expr :: Subst -> InExpr -> OutExpr
simple_opt_expr subst expr
= go expr
where
in_scope_env = (substInScope subst, simpleUnfoldingFun)
go (Var v) = lookupIdSubst (text "simpleOptExpr") subst v
go (App e1 e2) = simple_app subst e1 [go e2]
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (optCoercion (getCvSubst subst) co)
go (Lit lit) = Lit lit
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) | isReflCo co' = go e
| otherwise = Cast (go e) co'
where
co' = optCoercion (getCvSubst subst) co
go (Let bind body) = case simple_opt_bind subst bind of
(subst', Nothing) -> simple_opt_expr subst' body
(subst', Just bind) -> Let bind (simple_opt_expr subst' body)
go lam@(Lam {}) = go_lam [] subst lam
go (Case e b ty as)
-- See Note [Optimise coercion boxes aggressively]
| isDeadBinder b
, Just (con, _tys, es) <- exprIsConApp_maybe in_scope_env e'
, Just (altcon, bs, rhs) <- findAlt (DataAlt con) as
= case altcon of
DEFAULT -> go rhs
_ -> mkLets (catMaybes mb_binds) $ simple_opt_expr subst' rhs
where (subst', mb_binds) = mapAccumL simple_opt_out_bind subst
(zipEqual "simpleOptExpr" bs es)
| otherwise
= Case e' b' (substTy subst ty)
(map (go_alt subst') as)
where
e' = go e
(subst', b') = subst_opt_bndr subst b
----------------------
go_alt subst (con, bndrs, rhs)
= (con, bndrs', simple_opt_expr subst' rhs)
where
(subst', bndrs') = subst_opt_bndrs subst bndrs
----------------------
-- go_lam tries eta reduction
go_lam bs' subst (Lam b e)
= go_lam (b':bs') subst' e
where
(subst', b') = subst_opt_bndr subst b
go_lam bs' subst e
| Just etad_e <- tryEtaReduce bs e' = etad_e
| otherwise = mkLams bs e'
where
bs = reverse bs'
e' = simple_opt_expr subst e
----------------------
-- simple_app collects arguments for beta reduction
simple_app :: Subst -> InExpr -> [OutExpr] -> CoreExpr
simple_app subst (App e1 e2) as
= simple_app subst e1 (simple_opt_expr subst e2 : as)
simple_app subst (Lam b e) (a:as)
= case maybe_substitute subst b a of
Just ext_subst -> simple_app ext_subst e as
Nothing -> Let (NonRec b2 a) (simple_app subst' e as)
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
simple_app subst (Var v) as
| isCompulsoryUnfolding (idUnfolding v)
, isAlwaysActive (idInlineActivation v)
-- See Note [Unfold compulsory unfoldings in LHSs]
= simple_app subst (unfoldingTemplate (idUnfolding v)) as
simple_app subst (Tick t e) as
-- Okay to do "(Tick t e) x ==> Tick t (e x)"?
| t `tickishScopesLike` SoftScope
= mkTick t $ simple_app subst e as
simple_app subst e as
= foldl App (simple_opt_expr subst e) as
----------------------
simple_opt_bind,simple_opt_bind' :: Subst -> CoreBind -> (Subst, Maybe CoreBind)
simple_opt_bind s b -- Can add trace stuff here
= simple_opt_bind' s b
simple_opt_bind' subst (Rec prs)
= (subst'', res_bind)
where
res_bind = Just (Rec (reverse rev_prs'))
(subst', bndrs') = subst_opt_bndrs subst (map fst prs)
(subst'', rev_prs') = foldl do_pr (subst', []) (prs `zip` bndrs')
do_pr (subst, prs) ((b,r), b')
= case maybe_substitute subst b r2 of
Just subst' -> (subst', prs)
Nothing -> (subst, (b2,r2):prs)
where
b2 = add_info subst b b'
r2 = simple_opt_expr subst r
simple_opt_bind' subst (NonRec b r)
= simple_opt_out_bind subst (b, simple_opt_expr subst r)
----------------------
simple_opt_out_bind :: Subst -> (InVar, OutExpr) -> (Subst, Maybe CoreBind)
simple_opt_out_bind subst (b, r')
| Just ext_subst <- maybe_substitute subst b r'
= (ext_subst, Nothing)
| otherwise
= (subst', Just (NonRec b2 r'))
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
----------------------
maybe_substitute :: Subst -> InVar -> OutExpr -> Maybe Subst
-- (maybe_substitute subst in_var out_rhs)
-- either extends subst with (in_var -> out_rhs)
-- or returns Nothing
maybe_substitute subst b r
| Type ty <- r -- let a::* = TYPE ty in <body>
= ASSERT( isTyVar b )
Just (extendTvSubst subst b ty)
| Coercion co <- r
= ASSERT( isCoVar b )
Just (extendCvSubst subst b co)
| isId b -- let x = e in <body>
, not (isCoVar b) -- See Note [Do not inline CoVars unconditionally]
-- in SimplUtils
, safe_to_inline (idOccInfo b)
, isAlwaysActive (idInlineActivation b) -- Note [Inline prag in simplOpt]
, not (isStableUnfolding (idUnfolding b))
, not (isExportedId b)
, not (isUnLiftedType (idType b)) || exprOkForSpeculation r
= Just (extendIdSubst subst b r)
| otherwise
= Nothing
where
-- Unconditionally safe to inline
safe_to_inline :: OccInfo -> Bool
safe_to_inline (IAmALoopBreaker {}) = False
safe_to_inline IAmDead = True
safe_to_inline (OneOcc in_lam one_br _) = (not in_lam && one_br) || trivial
safe_to_inline NoOccInfo = trivial
trivial | exprIsTrivial r = True
| (Var fun, args) <- collectArgs r
, Just dc <- isDataConWorkId_maybe fun
, dc `hasKey` eqBoxDataConKey || dc `hasKey` coercibleDataConKey
, all exprIsTrivial args = True -- See Note [Optimise coercion boxes aggressively]
| otherwise = False
----------------------
subst_opt_bndr :: Subst -> InVar -> (Subst, OutVar)
subst_opt_bndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = subst_opt_id_bndr subst bndr
subst_opt_id_bndr :: Subst -> InId -> (Subst, OutId)
-- Nuke all fragile IdInfo, unfolding, and RULES;
-- it gets added back later by add_info
-- Rather like SimplEnv.substIdBndr
--
-- It's important to zap fragile OccInfo (which CoreSubst.substIdBndr
-- carefully does not do) because simplOptExpr invalidates it
subst_opt_id_bndr subst@(Subst in_scope id_subst tv_subst cv_subst) old_id
= (Subst new_in_scope new_id_subst tv_subst cv_subst, new_id)
where
id1 = uniqAway in_scope old_id
id2 = setIdType id1 (substTy subst (idType old_id))
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
new_in_scope = in_scope `extendInScopeSet` new_id
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_id_subst | new_id /= old_id
= extendVarEnv id_subst old_id (Var new_id)
| otherwise
= delVarEnv id_subst old_id
----------------------
subst_opt_bndrs :: Subst -> [InVar] -> (Subst, [OutVar])
subst_opt_bndrs subst bndrs
= mapAccumL subst_opt_bndr subst bndrs
----------------------
add_info :: Subst -> InVar -> OutVar -> OutVar
add_info subst old_bndr new_bndr
| isTyVar old_bndr = new_bndr
| otherwise = maybeModifyIdInfo mb_new_info new_bndr
where mb_new_info = substIdInfo subst new_bndr (idInfo old_bndr)
simpleUnfoldingFun :: IdUnfoldingFun
simpleUnfoldingFun id
| isAlwaysActive (idInlineActivation id) = idUnfolding id
| otherwise = noUnfolding
{-
Note [Inline prag in simplOpt]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If there's an INLINE/NOINLINE pragma that restricts the phase in
which the binder can be inlined, we don't inline here; after all,
we don't know what phase we're in. Here's an example
foo :: Int -> Int -> Int
{-# INLINE foo #-}
foo m n = inner m
where
{-# INLINE [1] inner #-}
inner m = m+n
bar :: Int -> Int
bar n = foo n 1
When inlining 'foo' in 'bar' we want the let-binding for 'inner'
to remain visible until Phase 1
Note [Unfold compulsory unfoldings in LHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user writes `RULES map coerce = coerce` as a rule, the rule
will only ever match if simpleOptExpr replaces coerce by its unfolding
on the LHS, because that is the core that the rule matching engine
will find. So do that for everything that has a compulsory
unfolding. Also see Note [Desugaring coerce as cast] in Desugar.
However, we don't want to inline 'seq', which happens to also have a
compulsory unfolding, so we only do this unfolding only for things
that are always-active. See Note [User-defined RULES for seq] in MkId.
************************************************************************
* *
exprIsConApp_maybe
* *
************************************************************************
Note [exprIsConApp_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsConApp_maybe is a very important function. There are two principal
uses:
* case e of { .... }
* cls_op e, where cls_op is a class operation
In both cases you want to know if e is of form (C e1..en) where C is
a data constructor.
However e might not *look* as if
Note [exprIsConApp_maybe on literal strings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #9400.
Conceptually, a string literal "abc" is just ('a':'b':'c':[]), but in Core
they are represented as unpackCString# "abc"# by MkCore.mkStringExprFS, or
unpackCStringUtf8# when the literal contains multi-byte UTF8 characters.
For optimizations we want to be able to treat it as a list, so they can be
decomposed when used in a case-statement. exprIsConApp_maybe detects those
calls to unpackCString# and returns:
Just (':', [Char], ['a', unpackCString# "bc"]).
We need to be careful about UTF8 strings here. ""# contains a ByteString, so
we must parse it back into a FastString to split off the first character.
That way we can treat unpackCString# and unpackCStringUtf8# in the same way.
-}
data ConCont = CC [CoreExpr] Coercion
-- Substitution already applied
-- | Returns @Just (dc, [t1..tk], [x1..xn])@ if the argument expression is
-- a *saturated* constructor application of the form @dc t1..tk x1 .. xn@,
-- where t1..tk are the *universally-qantified* type args of 'dc'
exprIsConApp_maybe :: InScopeEnv -> CoreExpr -> Maybe (DataCon, [Type], [CoreExpr])
exprIsConApp_maybe (in_scope, id_unf) expr
= go (Left in_scope) expr (CC [] (mkReflCo Representational (exprType expr)))
where
go :: Either InScopeSet Subst
-> CoreExpr -> ConCont
-> Maybe (DataCon, [Type], [CoreExpr])
go subst (Tick t expr) cont
| not (tickishIsCode t) = go subst expr cont
go subst (Cast expr co1) (CC [] co2)
= go subst expr (CC [] (subst_co subst co1 `mkTransCo` co2))
go subst (App fun arg) (CC args co)
= go subst fun (CC (subst_arg subst arg : args) co)
go subst (Lam var body) (CC (arg:args) co)
| exprIsTrivial arg -- Don't duplicate stuff!
= go (extend subst var arg) body (CC args co)
go (Right sub) (Var v) cont
= go (Left (substInScope sub))
(lookupIdSubst (text "exprIsConApp" <+> ppr expr) sub v)
cont
go (Left in_scope) (Var fun) cont@(CC args co)
| Just con <- isDataConWorkId_maybe fun
, count isValArg args == idArity fun
= dealWithCoercion co con args
-- Look through dictionary functions; see Note [Unfolding DFuns]
| DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = dfun_args } <- unfolding
, bndrs `equalLength` args -- See Note [DFun arity check]
, let subst = mkOpenSubst in_scope (bndrs `zip` args)
= dealWithCoercion co con (map (substExpr (text "exprIsConApp1") subst) dfun_args)
-- Look through unfoldings, but only arity-zero one;
-- if arity > 0 we are effectively inlining a function call,
-- and that is the business of callSiteInline.
-- In practice, without this test, most of the "hits" were
-- CPR'd workers getting inlined back into their wrappers,
| idArity fun == 0
, Just rhs <- expandUnfolding_maybe unfolding
, let in_scope' = extendInScopeSetSet in_scope (exprFreeVars rhs)
= go (Left in_scope') rhs cont
| (fun `hasKey` unpackCStringIdKey)
|| (fun `hasKey` unpackCStringUtf8IdKey)
, [Lit (MachStr str)] <- args
= dealWithStringLiteral fun str co
where
unfolding = id_unf fun
go _ _ _ = Nothing
----------------------------
-- Operations on the (Either InScopeSet CoreSubst)
-- The Left case is wildly dominant
subst_co (Left {}) co = co
subst_co (Right s) co = CoreSubst.substCo s co
subst_arg (Left {}) e = e
subst_arg (Right s) e = substExpr (text "exprIsConApp2") s e
extend (Left in_scope) v e = Right (extendSubst (mkEmptySubst in_scope) v e)
extend (Right s) v e = Right (extendSubst s v e)
-- See Note [exprIsConApp_maybe on literal strings]
dealWithStringLiteral :: Var -> BS.ByteString -> Coercion
-> Maybe (DataCon, [Type], [CoreExpr])
-- This is not possible with user-supplied empty literals, MkCore.mkStringExprFS
-- turns those into [] automatically, but just in case something else in GHC
-- generates a string literal directly.
dealWithStringLiteral _ str co
| BS.null str
= dealWithCoercion co nilDataCon [Type charTy]
dealWithStringLiteral fun str co
= let strFS = mkFastStringByteString str
char = mkConApp charDataCon [mkCharLit (headFS strFS)]
charTail = fastStringToByteString (tailFS strFS)
-- In singleton strings, just add [] instead of unpackCstring# ""#.
rest = if BS.null charTail
then mkConApp nilDataCon [Type charTy]
else App (Var fun)
(Lit (MachStr charTail))
in dealWithCoercion co consDataCon [Type charTy, char, rest]
dealWithCoercion :: Coercion -> DataCon -> [CoreExpr]
-> Maybe (DataCon, [Type], [CoreExpr])
dealWithCoercion co dc dc_args
| isReflCo co
, let (univ_ty_args, rest_args) = splitAtList (dataConUnivTyVars dc) dc_args
= Just (dc, stripTypeArgs univ_ty_args, rest_args)
| Pair _from_ty to_ty <- coercionKind co
, Just (to_tc, to_tc_arg_tys) <- splitTyConApp_maybe to_ty
, to_tc == dataConTyCon dc
-- These two tests can fail; we might see
-- (C x y) `cast` (g :: T a ~ S [a]),
-- where S is a type function. In fact, exprIsConApp
-- will probably not be called in such circumstances,
-- but there't nothing wrong with it
= -- Here we do the KPush reduction rule as described in the FC paper
-- The transformation applies iff we have
-- (C e1 ... en) `cast` co
-- where co :: (T t1 .. tn) ~ to_ty
-- The left-hand one must be a T, because exprIsConApp returned True
-- but the right-hand one might not be. (Though it usually will.)
let
tc_arity = tyConArity to_tc
dc_univ_tyvars = dataConUnivTyVars dc
dc_ex_tyvars = dataConExTyVars dc
arg_tys = dataConRepArgTys dc
non_univ_args = dropList dc_univ_tyvars dc_args
(ex_args, val_args) = splitAtList dc_ex_tyvars non_univ_args
-- Make the "theta" from Fig 3 of the paper
gammas = decomposeCo tc_arity co
theta_subst = liftCoSubstWith Representational
(dc_univ_tyvars ++ dc_ex_tyvars)
-- existentials are at role N
(gammas ++ map (mkReflCo Nominal)
(stripTypeArgs ex_args))
-- Cast the value arguments (which include dictionaries)
new_val_args = zipWith cast_arg arg_tys val_args
cast_arg arg_ty arg = mkCast arg (theta_subst arg_ty)
dump_doc = vcat [ppr dc, ppr dc_univ_tyvars, ppr dc_ex_tyvars,
ppr arg_tys, ppr dc_args,
ppr ex_args, ppr val_args, ppr co, ppr _from_ty, ppr to_ty, ppr to_tc ]
in
ASSERT2( eqType _from_ty (mkTyConApp to_tc (stripTypeArgs $ takeList dc_univ_tyvars dc_args))
, dump_doc )
ASSERT2( all isTypeArg ex_args, dump_doc )
ASSERT2( equalLength val_args arg_tys, dump_doc )
Just (dc, to_tc_arg_tys, ex_args ++ new_val_args)
| otherwise
= Nothing
stripTypeArgs :: [CoreExpr] -> [Type]
stripTypeArgs args = ASSERT2( all isTypeArg args, ppr args )
[ty | Type ty <- args]
-- We really do want isTypeArg here, not isTyCoArg!
{-
Note [Unfolding DFuns]
~~~~~~~~~~~~~~~~~~~~~~
DFuns look like
df :: forall a b. (Eq a, Eq b) -> Eq (a,b)
df a b d_a d_b = MkEqD (a,b) ($c1 a b d_a d_b)
($c2 a b d_a d_b)
So to split it up we just need to apply the ops $c1, $c2 etc
to the very same args as the dfun. It takes a little more work
to compute the type arguments to the dictionary constructor.
Note [DFun arity check]
~~~~~~~~~~~~~~~~~~~~~~~
Here we check that the total number of supplied arguments (inclding
type args) matches what the dfun is expecting. This may be *less*
than the ordinary arity of the dfun: see Note [DFun unfoldings] in CoreSyn
-}
exprIsLiteral_maybe :: InScopeEnv -> CoreExpr -> Maybe Literal
-- Same deal as exprIsConApp_maybe, but much simpler
-- Nevertheless we do need to look through unfoldings for
-- Integer literals, which are vigorously hoisted to top level
-- and not subsequently inlined
exprIsLiteral_maybe env@(_, id_unf) e
= case e of
Lit l -> Just l
Tick _ e' -> exprIsLiteral_maybe env e' -- dubious?
Var v | Just rhs <- expandUnfolding_maybe (id_unf v)
-> exprIsLiteral_maybe env rhs
_ -> Nothing
{-
Note [exprIsLambda_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsLambda_maybe will, given an expression `e`, try to turn it into the form
`Lam v e'` (returned as `Just (v,e')`). Besides using lambdas, it looks through
casts (using the Push rule), and it unfolds function calls if the unfolding
has a greater arity than arguments are present.
Currently, it is used in Rules.match, and is required to make
"map coerce = coerce" match.
-}
exprIsLambda_maybe :: InScopeEnv -> CoreExpr
-> Maybe (Var, CoreExpr,[Tickish Id])
-- See Note [exprIsLambda_maybe]
-- The simple case: It is a lambda already
exprIsLambda_maybe _ (Lam x e)
= Just (x, e, [])
-- Still straightforward: Ticks that we can float out of the way
exprIsLambda_maybe (in_scope_set, id_unf) (Tick t e)
| tickishFloatable t
, Just (x, e, ts) <- exprIsLambda_maybe (in_scope_set, id_unf) e
= Just (x, e, t:ts)
-- Also possible: A casted lambda. Push the coercion inside
exprIsLambda_maybe (in_scope_set, id_unf) (Cast casted_e co)
| Just (x, e,ts) <- exprIsLambda_maybe (in_scope_set, id_unf) casted_e
-- Only do value lambdas.
-- this implies that x is not in scope in gamma (makes this code simpler)
, not (isTyVar x) && not (isCoVar x)
, ASSERT( not $ x `elemVarSet` tyCoVarsOfCo co) True
, Just (x',e') <- pushCoercionIntoLambda in_scope_set x e co
, let res = Just (x',e',ts)
= --pprTrace "exprIsLambda_maybe:Cast" (vcat [ppr casted_e,ppr co,ppr res)])
res
-- Another attempt: See if we find a partial unfolding
exprIsLambda_maybe (in_scope_set, id_unf) e
| (Var f, as, ts) <- collectArgsTicks tickishFloatable e
, idArity f > length (filter isValArg as)
-- Make sure there is hope to get a lambda
, Just rhs <- expandUnfolding_maybe (id_unf f)
-- Optimize, for beta-reduction
, let e' = simpleOptExprWith (mkEmptySubst in_scope_set) (rhs `mkApps` as)
-- Recurse, because of possible casts
, Just (x', e'', ts') <- exprIsLambda_maybe (in_scope_set, id_unf) e'
, let res = Just (x', e'', ts++ts')
= -- pprTrace "exprIsLambda_maybe:Unfold" (vcat [ppr e, ppr (x',e'')])
res
exprIsLambda_maybe _ _e
= -- pprTrace "exprIsLambda_maybe:Fail" (vcat [ppr _e])
Nothing
pushCoercionIntoLambda
:: InScopeSet -> Var -> CoreExpr -> Coercion -> Maybe (Var, CoreExpr)
pushCoercionIntoLambda in_scope x e co
-- This implements the Push rule from the paper on coercions
-- Compare with simplCast in Simplify
| ASSERT(not (isTyVar x) && not (isCoVar x)) True
, Pair s1s2 t1t2 <- coercionKind co
, Just (_s1,_s2) <- splitFunTy_maybe s1s2
, Just (t1,_t2) <- splitFunTy_maybe t1t2
= let [co1, co2] = decomposeCo 2 co
-- Should we optimize the coercions here?
-- Otherwise they might not match too well
x' = x `setIdType` t1
in_scope' = in_scope `extendInScopeSet` x'
subst = extendIdSubst (mkEmptySubst in_scope')
x
(mkCast (Var x') co1)
in Just (x', subst_expr subst e `mkCast` co2)
| otherwise
= pprTrace "exprIsLambda_maybe: Unexpected lambda in case" (ppr (Lam x e))
Nothing
| fmthoma/ghc | compiler/coreSyn/CoreSubst.hs | bsd-3-clause | 58,910 | 1 | 22 | 14,995 | 11,371 | 5,977 | 5,394 | 665 | 13 |
module TVM.Disk where
import Control.Monad (when)
import System.Process
import Data.Char (toLower)
import System.Exit
data DiskType = Qcow2 | Qcow | Raw
deriving (Show,Eq)
createDisk :: Maybe FilePath -- ^ optional backing file
-> FilePath -- ^ disk path
-> Int -- ^ size in megabytes
-> DiskType -- ^ disk type
-> IO ()
createDisk backingFile filepath sizeMb diskType = do
ec <- rawSystem "qemu-img" ("create" : cli)
when (ec /= ExitSuccess) $ do
putStrLn (error "creating disk: " ++ show ec)
exitFailure -- shouldn't exit, but just return error
where cli =
maybe [] (\b -> ["-b", b]) backingFile ++
[ "-f", map toLower (show diskType), filepath ] ++
maybe [] (const [show sizeMb ++ "M"]) backingFile
| vincenthz/tvm | TVM/Disk.hs | bsd-3-clause | 820 | 0 | 13 | 236 | 249 | 132 | 117 | 21 | 1 |
{-|
Module: Data.Astro.Time.Sidereal
Description: Sidereal Time
Copyright: Alexander Ignatyev, 2016
According to the Sidereal Clock any observed star returns to the same position
in the sky every 24 hours.
Each sidereal day is shorter than the solar day, 24 hours of sidereal time
corresponding to 23:56:04.0916 of solar time.
-}
module Data.Astro.Time.Sidereal
(
GreenwichSiderealTime
, LocalSiderealTime
, dhToGST
, dhToLST
, gstToDH
, lstToDH
, hmsToGST
, hmsToLST
, utToGST
, gstToUT
, gstToLST
, lstToGST
, lstToGSTwDC
)
where
import Data.Astro.Types (DecimalHours(..), fromHMS)
import Data.Astro.Time.JulianDate (JulianDate(..), TimeBaseType, numberOfCenturies, splitToDayAndTime)
import Data.Astro.Time.Epoch (j2000)
import Data.Astro.Utils (reduceToZeroRange)
import qualified Data.Astro.Types as C
-- | Greenwich Sidereal Time
-- GST can be in range [-12h, 36h] carrying out a day correction
newtype GreenwichSiderealTime = GST TimeBaseType deriving (Show, Eq)
-- | Local Sidereal Time
newtype LocalSiderealTime = LST TimeBaseType deriving (Show, Eq)
-- | Convert Decimal Hours to Greenwich Sidereal Time
dhToGST :: DecimalHours -> GreenwichSiderealTime
dhToGST (DH t) = GST t
-- | Convert Decimal Hours to Local Sidereal Time
dhToLST :: DecimalHours -> LocalSiderealTime
dhToLST (DH t) = LST t
-- | Convert Greenwich Sidereal Time to Decimal Hours
gstToDH :: GreenwichSiderealTime -> DecimalHours
gstToDH (GST t) = DH t
-- | Convert Local Sidereal Time to Decimal Hours
lstToDH :: LocalSiderealTime -> DecimalHours
lstToDH (LST t) = DH t
-- | Comvert Hours, Minutes, Seconds to Greenwich Sidereal Time
hmsToGST :: Int -> Int -> TimeBaseType -> GreenwichSiderealTime
hmsToGST h m s = dhToGST $ fromHMS h m s
-- | Comvert Hours, Minutes, Seconds to Local Sidereal Time
hmsToLST :: Int -> Int -> TimeBaseType -> LocalSiderealTime
hmsToLST h m s = dhToLST $ fromHMS h m s
-- | Convert from Universal Time (UT) to Greenwich Sidereal Time (GST)
utToGST :: JulianDate -> GreenwichSiderealTime
utToGST jd =
let (JD day, JD time) = splitToDayAndTime jd
t = solarSiderealTimesDiff day
time' = reduceToZeroRange 24 $ time*24/siderealDayLength + t
in GST $ time'
-- | Convert from Greenwich Sidereal Time (GST) to Universal Time (UT).
-- It takes GST and Greenwich Date, returns JulianDate.
-- Because the sidereal day is shorter than the solar day (see comment to the module).
-- In case of such ambiguity the early time will be returned.
-- You can easily check the ambiguity: if time is equal or less 00:03:56
-- you can get the second time by adding 23:56:04
gstToUT :: JulianDate -> GreenwichSiderealTime -> JulianDate
gstToUT jd gst =
let (day, time) = dayTime jd gst
t = solarSiderealTimesDiff day
time' = (reduceToZeroRange 24 (time-t)) * siderealDayLength
in JD $ day + time'/24
where dayTime jd (GST gst)
| gst < 0 = (day-1, gst+24)
| gst >= 24 = (day+1, gst-24)
| otherwise = (day, gst)
where (JD day, _) = splitToDayAndTime jd
-- | Convert Greenwich Sidereal Time to Local Sidereal Time.
-- It takes GST and longitude in decimal degrees
gstToLST :: C.DecimalDegrees -> GreenwichSiderealTime -> LocalSiderealTime
gstToLST longitude (GST gst) =
let C.DH dhours = C.toDecimalHours longitude
lst = reduceToZeroRange 24 $ gst + dhours
in LST lst
-- | Convert Local Sidereal Time to Greenwich Sidereal Time
-- It takes LST and longitude in decimal degrees
lstToGST :: C.DecimalDegrees -> LocalSiderealTime -> GreenwichSiderealTime
lstToGST longitude (LST lst) =
let C.DH dhours = C.toDecimalHours longitude
gst = reduceToZeroRange 24 $ lst - dhours
in GST gst
-- | Convert Local Sidereal Time to Greenwich Sidereal Time with Day Correction.
-- It takes LST and longitude in decimal degrees
lstToGSTwDC :: C.DecimalDegrees -> LocalSiderealTime -> GreenwichSiderealTime
lstToGSTwDC longitude (LST lst) =
let C.DH dhours = C.toDecimalHours longitude
gst = lst - dhours
in GST gst
-- Sidereal time internal functions
-- sidereal 24h correspond to 23:56:04 of solar time
siderealDayLength :: TimeBaseType
siderealDayLength = hours/24
where C.DH hours = fromHMS 23 56 04.0916
solarSiderealTimesDiff :: TimeBaseType -> TimeBaseType
solarSiderealTimesDiff d =
let t = numberOfCenturies j2000 (JD d)
in reduceToZeroRange 24 $ 6.697374558 + 2400.051336*t + 0.000025862*t*t
| Alexander-Ignatyev/astro | src/Data/Astro/Time/Sidereal.hs | bsd-3-clause | 4,444 | 0 | 13 | 836 | 954 | 509 | 445 | 73 | 1 |
module MeshGenerator where
import Data.Sequence as S
import Data.List
import Data.Foldable
import System.IO
import Prelude as P
import qualified Data.Text as T
import Types
testVerts :: Seq (Vertex Double)
testVerts = fromList [Vertex 0.0 0.0 0.0, Vertex 0.0 0.0 1.0, Vertex 1.0 0.0 1.0, Vertex 1.0 0.0 0.0]
testTris :: Seq Triangle
testTris = fromList [Triangle 1 2 3, Triangle 1 3 4]
testSquare :: Mesh Double
testSquare = Mesh testVerts testTris
squareGrid :: Int -> Mesh Double
squareGrid axisResolution = Mesh verts tris
where
axisRange :: [Double]
axisRange = map (\t -> 2.0 * (t - 0.5 * fromIntegral (axisResolution - 1)) / (fromIntegral (axisResolution - 1))) (P.take axisResolution [0.0, 1.0 ..])
tris = fromList $ P.concatMap takeOffSetGiveTris $ allPairs axisResolution
verts = fromList [Vertex vx 0.0 vz | vx <- axisRange, vz <- axisRange]
radialGrid :: Int -> Mesh Double
radialGrid axisResolution = Mesh verts tris
where
radiusRange :: [Double]
radiusRange = map (\t -> t / (fromIntegral (axisResolution - 1))) (P.take axisResolution [1.0, 2.0 ..])
thetaRange :: [Double]
thetaRange = map (\t -> 2.0 * pi * t / (fromIntegral (axisResolution - 1))) (P.take axisResolution [0.0, 1.0 ..])
tris = (fromList $ P.concatMap takeOffSetGiveTris $ allPairs axisResolution) >< --all but center
fromList [Triangle (S.length verts) r1 r0 | (r0, r1) <- firstRowPairs axisResolution] --center tris
verts = (fromList [Vertex (radius * cos theta) 0.0 (radius * sin theta) | radius <- radiusRange, theta <- thetaRange]) |> Vertex 0.0 0.0 0.0
recursiveSplitAt :: Int -> Seq a -> [Seq a]
recursiveSplitAt splitAt inSeq | S.length inSeq <= splitAt = [inSeq]
recursiveSplitAt splitAt inSeq | otherwise = firstSeq : recursiveSplitAt splitAt secSeq
where
(firstSeq, secSeq) = S.splitAt splitAt inSeq
splitMeshAt :: Int -> Mesh a -> [Mesh a]
splitMeshAt splitAt (Mesh verts trisLong) = meshList
where
meshList = map (\tris -> Mesh verts tris) trisList
trisList = recursiveSplitAt splitAt trisLong
takeOffSetGiveTris :: ((Int, Int), (Int, Int)) -> [Triangle]
takeOffSetGiveTris ((r0, r1), (c0, c1)) = [(Triangle (r0 + c0) (r1 + c0) (r0 + c1)), (Triangle (r0 + c1) (r1 + c0) (r1 + c1))]
firstRowPairs :: Int -> [(Int, Int)]
firstRowPairs resolution = pairs $ P.take resolution [1, 2 ..]
colPairs :: Int -> [(Int, Int)]
colPairs resolution = pairs $ P.take resolution [0, resolution ..]
allPairs :: Int -> [((Int, Int), (Int, Int))]
allPairs resolution = [(rowOffset, colOffset) | colOffset <- colPairs resolution, rowOffset <- firstRowPairs resolution]
pairs :: [a] -> [(a, a)]
pairs xs@(y:ys) = P.zip xs ys
pairs _ = []
meshToObj :: (Show a) => Mesh a -> String -> IO ()
meshToObj mesh filename = writeFile filename (show mesh ++ "\n")
meshVertLength :: Mesh a -> Int
meshVertLength (Mesh verts tris) = S.length verts
meshTrisLength :: Mesh a -> Int
meshTrisLength (Mesh verts tris) = S.length tris
| zobot/MeshGenerator | src/MeshGenerator.hs | bsd-3-clause | 3,067 | 0 | 16 | 643 | 1,245 | 664 | 581 | 54 | 1 |
module Main where
import Console.Driver
main :: IO ()
main = do start
| rewinfrey/haskell-tic-tac-toe | app/Main.hs | bsd-3-clause | 72 | 0 | 6 | 15 | 27 | 15 | 12 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Futhark.Pass.Simplify
( simplify,
simplifySOACS,
simplifySeq,
simplifyMC,
simplifyGPU,
simplifyGPUMem,
simplifySeqMem,
simplifyMCMem,
)
where
import qualified Futhark.IR.GPU.Simplify as GPU
import qualified Futhark.IR.GPUMem as GPUMem
import qualified Futhark.IR.MC as MC
import qualified Futhark.IR.MCMem as MCMem
import qualified Futhark.IR.SOACS.Simplify as SOACS
import qualified Futhark.IR.Seq as Seq
import qualified Futhark.IR.SeqMem as SeqMem
import Futhark.IR.Syntax
import Futhark.Pass
simplify ::
(Prog rep -> PassM (Prog rep)) ->
Pass rep rep
simplify = Pass "simplify" "Perform simple enabling optimisations."
simplifySOACS :: Pass SOACS.SOACS SOACS.SOACS
simplifySOACS = simplify SOACS.simplifySOACS
simplifyGPU :: Pass GPU.GPU GPU.GPU
simplifyGPU = simplify GPU.simplifyGPU
simplifySeq :: Pass Seq.Seq Seq.Seq
simplifySeq = simplify Seq.simplifyProg
simplifyMC :: Pass MC.MC MC.MC
simplifyMC = simplify MC.simplifyProg
simplifyGPUMem :: Pass GPUMem.GPUMem GPUMem.GPUMem
simplifyGPUMem = simplify GPUMem.simplifyProg
simplifySeqMem :: Pass SeqMem.SeqMem SeqMem.SeqMem
simplifySeqMem = simplify SeqMem.simplifyProg
simplifyMCMem :: Pass MCMem.MCMem MCMem.MCMem
simplifyMCMem = simplify MCMem.simplifyProg
| diku-dk/futhark | src/Futhark/Pass/Simplify.hs | isc | 1,305 | 0 | 10 | 185 | 322 | 187 | 135 | 37 | 1 |
{-|
Description : Mock out Cabal path abstraction
This is a mock copy of something Cabal generates during installation.
-}
module Paths_uroboro
(
getDataFileName
) where
-- |Adjust path for Cabal moving things around.
getDataFileName :: FilePath -> IO FilePath
getDataFileName = return
| tewe/uroboro | Paths_uroboro.hs | mit | 302 | 0 | 6 | 59 | 28 | 17 | 11 | 5 | 1 |
{-| Implementation of command-line functions.
This module holds the common command-line related functions for the
binaries, separated into this module since "Ganeti.Utils" is
used in many other places and this is more IO oriented.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.CLI
( Options(..)
, OptType
, defaultOptions
, Ganeti.HTools.CLI.parseOpts
, parseOptsInner
, parseYesNo
, parseISpecString
, shTemplate
, maybePrintNodes
, maybePrintInsts
, maybeShowWarnings
, printKeys
, printFinal
, setNodeStatus
-- * The options
, oDataFile
, oDiskMoves
, oDiskTemplate
, oSpindleUse
, oDynuFile
, oEvacMode
, oExInst
, oExTags
, oExecJobs
, oGroup
, oIAllocSrc
, oInstMoves
, genOLuxiSocket
, oLuxiSocket
, oMachineReadable
, oMaxCpu
, oMaxSolLength
, oMinDisk
, oMinGain
, oMinGainLim
, oMinScore
, oNoHeaders
, oNoSimulation
, oNodeSim
, oOfflineNode
, oOutputDir
, oPrintCommands
, oPrintInsts
, oPrintNodes
, oQuiet
, oRapiMaster
, oSaveCluster
, oSelInst
, oShowHelp
, oShowVer
, oShowComp
, oStdSpec
, oTieredSpec
, oVerbose
, oPriority
, genericOpts
) where
import Control.Monad
import Data.Char (toUpper)
import Data.Maybe (fromMaybe)
import System.Console.GetOpt
import System.IO
import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.Path as Path
import Ganeti.HTools.Types
import Ganeti.BasicTypes
import Ganeti.Common as Common
import Ganeti.Types
import Ganeti.Utils
-- * Data types
-- | Command line options structure.
data Options = Options
{ optDataFile :: Maybe FilePath -- ^ Path to the cluster data file
, optDiskMoves :: Bool -- ^ Allow disk moves
, optInstMoves :: Bool -- ^ Allow instance moves
, optDiskTemplate :: Maybe DiskTemplate -- ^ Override for the disk template
, optSpindleUse :: Maybe Int -- ^ Override for the spindle usage
, optDynuFile :: Maybe FilePath -- ^ Optional file with dynamic use data
, optEvacMode :: Bool -- ^ Enable evacuation mode
, optExInst :: [String] -- ^ Instances to be excluded
, optExTags :: Maybe [String] -- ^ Tags to use for exclusion
, optExecJobs :: Bool -- ^ Execute the commands via Luxi
, optGroup :: Maybe GroupID -- ^ The UUID of the group to process
, optIAllocSrc :: Maybe FilePath -- ^ The iallocation spec
, optSelInst :: [String] -- ^ Instances to be excluded
, optLuxi :: Maybe FilePath -- ^ Collect data from Luxi
, optMachineReadable :: Bool -- ^ Output machine-readable format
, optMaster :: String -- ^ Collect data from RAPI
, optMaxLength :: Int -- ^ Stop after this many steps
, optMcpu :: Maybe Double -- ^ Override max cpu ratio for nodes
, optMdsk :: Double -- ^ Max disk usage ratio for nodes
, optMinGain :: Score -- ^ Min gain we aim for in a step
, optMinGainLim :: Score -- ^ Limit below which we apply mingain
, optMinScore :: Score -- ^ The minimum score we aim for
, optNoHeaders :: Bool -- ^ Do not show a header line
, optNoSimulation :: Bool -- ^ Skip the rebalancing dry-run
, optNodeSim :: [String] -- ^ Cluster simulation mode
, optOffline :: [String] -- ^ Names of offline nodes
, optOutPath :: FilePath -- ^ Path to the output directory
, optSaveCluster :: Maybe FilePath -- ^ Save cluster state to this file
, optShowCmds :: Maybe FilePath -- ^ Whether to show the command list
, optShowHelp :: Bool -- ^ Just show the help
, optShowComp :: Bool -- ^ Just show the completion info
, optShowInsts :: Bool -- ^ Whether to show the instance map
, optShowNodes :: Maybe [String] -- ^ Whether to show node status
, optShowVer :: Bool -- ^ Just show the program version
, optStdSpec :: Maybe RSpec -- ^ Requested standard specs
, optTestCount :: Maybe Int -- ^ Optional test count override
, optTieredSpec :: Maybe RSpec -- ^ Requested specs for tiered mode
, optReplay :: Maybe String -- ^ Unittests: RNG state
, optVerbose :: Int -- ^ Verbosity level
, optPriority :: Maybe OpSubmitPriority -- ^ OpCode submit priority
} deriving Show
-- | Default values for the command line options.
defaultOptions :: Options
defaultOptions = Options
{ optDataFile = Nothing
, optDiskMoves = True
, optInstMoves = True
, optDiskTemplate = Nothing
, optSpindleUse = Nothing
, optDynuFile = Nothing
, optEvacMode = False
, optExInst = []
, optExTags = Nothing
, optExecJobs = False
, optGroup = Nothing
, optIAllocSrc = Nothing
, optSelInst = []
, optLuxi = Nothing
, optMachineReadable = False
, optMaster = ""
, optMaxLength = -1
, optMcpu = Nothing
, optMdsk = defReservedDiskRatio
, optMinGain = 1e-2
, optMinGainLim = 1e-1
, optMinScore = 1e-9
, optNoHeaders = False
, optNoSimulation = False
, optNodeSim = []
, optOffline = []
, optOutPath = "."
, optSaveCluster = Nothing
, optShowCmds = Nothing
, optShowHelp = False
, optShowComp = False
, optShowInsts = False
, optShowNodes = Nothing
, optShowVer = False
, optStdSpec = Nothing
, optTestCount = Nothing
, optTieredSpec = Nothing
, optReplay = Nothing
, optVerbose = 1
, optPriority = Nothing
}
-- | Abbreviation for the option type.
type OptType = GenericOptType Options
instance StandardOptions Options where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- * Helper functions
parseISpecString :: String -> String -> Result RSpec
parseISpecString descr inp = do
let sp = sepSplit ',' inp
err = Bad ("Invalid " ++ descr ++ " specification: '" ++ inp ++
"', expected disk,ram,cpu")
when (length sp /= 3) err
prs <- mapM (\(fn, val) -> fn val) $
zip [ annotateResult (descr ++ " specs disk") . parseUnit
, annotateResult (descr ++ " specs memory") . parseUnit
, tryRead (descr ++ " specs cpus")
] sp
case prs of
[dsk, ram, cpu] -> return $ RSpec cpu ram dsk
_ -> err
-- | Disk template choices.
optComplDiskTemplate :: OptCompletion
optComplDiskTemplate = OptComplChoices $
map diskTemplateToRaw [minBound..maxBound]
-- * Command line options
oDataFile :: OptType
oDataFile =
(Option "t" ["text-data"]
(ReqArg (\ f o -> Ok o { optDataFile = Just f }) "FILE")
"the cluster data FILE",
OptComplFile)
oDiskMoves :: OptType
oDiskMoves =
(Option "" ["no-disk-moves"]
(NoArg (\ opts -> Ok opts { optDiskMoves = False}))
"disallow disk moves from the list of allowed instance changes,\
\ thus allowing only the 'cheap' failover/migrate operations",
OptComplNone)
oDiskTemplate :: OptType
oDiskTemplate =
(Option "" ["disk-template"]
(reqWithConversion diskTemplateFromRaw
(\dt opts -> Ok opts { optDiskTemplate = Just dt })
"TEMPLATE") "select the desired disk template",
optComplDiskTemplate)
oSpindleUse :: OptType
oSpindleUse =
(Option "" ["spindle-use"]
(reqWithConversion (tryRead "parsing spindle-use")
(\su opts -> do
when (su < 0) $
fail "Invalid value of the spindle-use (expected >= 0)"
return $ opts { optSpindleUse = Just su })
"SPINDLES") "select how many virtual spindle instances use\
\ [default read from cluster]",
OptComplFloat)
oSelInst :: OptType
oSelInst =
(Option "" ["select-instances"]
(ReqArg (\ f opts -> Ok opts { optSelInst = sepSplit ',' f }) "INSTS")
"only select given instances for any moves",
OptComplManyInstances)
oInstMoves :: OptType
oInstMoves =
(Option "" ["no-instance-moves"]
(NoArg (\ opts -> Ok opts { optInstMoves = False}))
"disallow instance (primary node) moves from the list of allowed,\
\ instance changes, thus allowing only slower, but sometimes\
\ safer, drbd secondary changes",
OptComplNone)
oDynuFile :: OptType
oDynuFile =
(Option "U" ["dynu-file"]
(ReqArg (\ f opts -> Ok opts { optDynuFile = Just f }) "FILE")
"Import dynamic utilisation data from the given FILE",
OptComplFile)
oEvacMode :: OptType
oEvacMode =
(Option "E" ["evac-mode"]
(NoArg (\opts -> Ok opts { optEvacMode = True }))
"enable evacuation mode, where the algorithm only moves\
\ instances away from offline and drained nodes",
OptComplNone)
oExInst :: OptType
oExInst =
(Option "" ["exclude-instances"]
(ReqArg (\ f opts -> Ok opts { optExInst = sepSplit ',' f }) "INSTS")
"exclude given instances from any moves",
OptComplManyInstances)
oExTags :: OptType
oExTags =
(Option "" ["exclusion-tags"]
(ReqArg (\ f opts -> Ok opts { optExTags = Just $ sepSplit ',' f })
"TAG,...") "Enable instance exclusion based on given tag prefix",
OptComplString)
oExecJobs :: OptType
oExecJobs =
(Option "X" ["exec"]
(NoArg (\ opts -> Ok opts { optExecJobs = True}))
"execute the suggested moves via Luxi (only available when using\
\ it for data gathering)",
OptComplNone)
oGroup :: OptType
oGroup =
(Option "G" ["group"]
(ReqArg (\ f o -> Ok o { optGroup = Just f }) "ID")
"the target node group (name or UUID)",
OptComplOneGroup)
oIAllocSrc :: OptType
oIAllocSrc =
(Option "I" ["ialloc-src"]
(ReqArg (\ f opts -> Ok opts { optIAllocSrc = Just f }) "FILE")
"Specify an iallocator spec as the cluster data source",
OptComplFile)
genOLuxiSocket :: String -> OptType
genOLuxiSocket defSocket =
(Option "L" ["luxi"]
(OptArg ((\ f opts -> Ok opts { optLuxi = Just f }) .
fromMaybe defSocket) "SOCKET")
("collect data via Luxi, optionally using the given SOCKET path [" ++
defSocket ++ "]"),
OptComplFile)
oLuxiSocket :: IO OptType
oLuxiSocket = liftM genOLuxiSocket Path.defaultLuxiSocket
oMachineReadable :: OptType
oMachineReadable =
(Option "" ["machine-readable"]
(OptArg (\ f opts -> do
flag <- parseYesNo True f
return $ opts { optMachineReadable = flag }) "CHOICE")
"enable machine readable output (pass either 'yes' or 'no' to\
\ explicitly control the flag, or without an argument defaults to\
\ yes",
optComplYesNo)
oMaxCpu :: OptType
oMaxCpu =
(Option "" ["max-cpu"]
(reqWithConversion (tryRead "parsing max-cpu")
(\mcpu opts -> do
when (mcpu <= 0) $
fail "Invalid value of the max-cpu ratio, expected >0"
return $ opts { optMcpu = Just mcpu }) "RATIO")
"maximum virtual-to-physical cpu ratio for nodes (from 0\
\ upwards) [default read from cluster]",
OptComplFloat)
oMaxSolLength :: OptType
oMaxSolLength =
(Option "l" ["max-length"]
(reqWithConversion (tryRead "max solution length")
(\i opts -> Ok opts { optMaxLength = i }) "N")
"cap the solution at this many balancing or allocation\
\ rounds (useful for very unbalanced clusters or empty\
\ clusters)",
OptComplInteger)
oMinDisk :: OptType
oMinDisk =
(Option "" ["min-disk"]
(reqWithConversion (tryRead "min free disk space")
(\n opts -> Ok opts { optMdsk = n }) "RATIO")
"minimum free disk space for nodes (between 0 and 1) [0]",
OptComplFloat)
oMinGain :: OptType
oMinGain =
(Option "g" ["min-gain"]
(reqWithConversion (tryRead "min gain")
(\g opts -> Ok opts { optMinGain = g }) "DELTA")
"minimum gain to aim for in a balancing step before giving up",
OptComplFloat)
oMinGainLim :: OptType
oMinGainLim =
(Option "" ["min-gain-limit"]
(reqWithConversion (tryRead "min gain limit")
(\g opts -> Ok opts { optMinGainLim = g }) "SCORE")
"minimum cluster score for which we start checking the min-gain",
OptComplFloat)
oMinScore :: OptType
oMinScore =
(Option "e" ["min-score"]
(reqWithConversion (tryRead "min score")
(\e opts -> Ok opts { optMinScore = e }) "EPSILON")
"mininum score to aim for",
OptComplFloat)
oNoHeaders :: OptType
oNoHeaders =
(Option "" ["no-headers"]
(NoArg (\ opts -> Ok opts { optNoHeaders = True }))
"do not show a header line",
OptComplNone)
oNoSimulation :: OptType
oNoSimulation =
(Option "" ["no-simulation"]
(NoArg (\opts -> Ok opts {optNoSimulation = True}))
"do not perform rebalancing simulation",
OptComplNone)
oNodeSim :: OptType
oNodeSim =
(Option "" ["simulate"]
(ReqArg (\ f o -> Ok o { optNodeSim = f:optNodeSim o }) "SPEC")
"simulate an empty cluster, given as\
\ 'alloc_policy,num_nodes,disk,ram,cpu'",
OptComplString)
oOfflineNode :: OptType
oOfflineNode =
(Option "O" ["offline"]
(ReqArg (\ n o -> Ok o { optOffline = n:optOffline o }) "NODE")
"set node as offline",
OptComplOneNode)
oOutputDir :: OptType
oOutputDir =
(Option "d" ["output-dir"]
(ReqArg (\ d opts -> Ok opts { optOutPath = d }) "PATH")
"directory in which to write output files",
OptComplDir)
oPrintCommands :: OptType
oPrintCommands =
(Option "C" ["print-commands"]
(OptArg ((\ f opts -> Ok opts { optShowCmds = Just f }) .
fromMaybe "-")
"FILE")
"print the ganeti command list for reaching the solution,\
\ if an argument is passed then write the commands to a\
\ file named as such",
OptComplNone)
oPrintInsts :: OptType
oPrintInsts =
(Option "" ["print-instances"]
(NoArg (\ opts -> Ok opts { optShowInsts = True }))
"print the final instance map",
OptComplNone)
oPrintNodes :: OptType
oPrintNodes =
(Option "p" ["print-nodes"]
(OptArg ((\ f opts ->
let (prefix, realf) = case f of
'+':rest -> (["+"], rest)
_ -> ([], f)
splitted = prefix ++ sepSplit ',' realf
in Ok opts { optShowNodes = Just splitted }) .
fromMaybe []) "FIELDS")
"print the final node list",
OptComplNone)
oQuiet :: OptType
oQuiet =
(Option "q" ["quiet"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts - 1 }))
"decrease the verbosity level",
OptComplNone)
oRapiMaster :: OptType
oRapiMaster =
(Option "m" ["master"]
(ReqArg (\ m opts -> Ok opts { optMaster = m }) "ADDRESS")
"collect data via RAPI at the given ADDRESS",
OptComplHost)
oSaveCluster :: OptType
oSaveCluster =
(Option "S" ["save"]
(ReqArg (\ f opts -> Ok opts { optSaveCluster = Just f }) "FILE")
"Save cluster state at the end of the processing to FILE",
OptComplNone)
oStdSpec :: OptType
oStdSpec =
(Option "" ["standard-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "standard" inp
return $ opts { optStdSpec = Just tspec } )
"STDSPEC")
"enable standard specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oTieredSpec :: OptType
oTieredSpec =
(Option "" ["tiered-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "tiered" inp
return $ opts { optTieredSpec = Just tspec } )
"TSPEC")
"enable tiered specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oVerbose :: OptType
oVerbose =
(Option "v" ["verbose"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts + 1 }))
"increase the verbosity level",
OptComplNone)
oPriority :: OptType
oPriority =
(Option "" ["priority"]
(ReqArg (\ inp opts -> do
prio <- parseSubmitPriority inp
Ok opts { optPriority = Just prio }) "PRIO")
"set the priority of submitted jobs",
OptComplChoices (map fmtSubmitPriority [minBound..maxBound]))
-- | Generic options.
genericOpts :: [GenericOptType Options]
genericOpts = [ oShowVer
, oShowHelp
, oShowComp
]
-- * Functions
-- | Wrapper over 'Common.parseOpts' with our custom options.
parseOpts :: [String] -- ^ The command line arguments
-> String -- ^ The program name
-> [OptType] -- ^ The supported command line options
-> [ArgCompletion] -- ^ The supported command line arguments
-> IO (Options, [String]) -- ^ The resulting options and leftover
-- arguments
parseOpts = Common.parseOpts defaultOptions
-- | A shell script template for autogenerated scripts.
shTemplate :: String
shTemplate =
printf "#!/bin/sh\n\n\
\# Auto-generated script for executing cluster rebalancing\n\n\
\# To stop, touch the file /tmp/stop-htools\n\n\
\set -e\n\n\
\check() {\n\
\ if [ -f /tmp/stop-htools ]; then\n\
\ echo 'Stop requested, exiting'\n\
\ exit 0\n\
\ fi\n\
\}\n\n"
-- | Optionally print the node list.
maybePrintNodes :: Maybe [String] -- ^ The field list
-> String -- ^ Informational message
-> ([String] -> String) -- ^ Function to generate the listing
-> IO ()
maybePrintNodes Nothing _ _ = return ()
maybePrintNodes (Just fields) msg fn = do
hPutStrLn stderr ""
hPutStrLn stderr (msg ++ " status:")
hPutStrLn stderr $ fn fields
-- | Optionally print the instance list.
maybePrintInsts :: Bool -- ^ Whether to print the instance list
-> String -- ^ Type of the instance map (e.g. initial)
-> String -- ^ The instance data
-> IO ()
maybePrintInsts do_print msg instdata =
when do_print $ do
hPutStrLn stderr ""
hPutStrLn stderr $ msg ++ " instance map:"
hPutStr stderr instdata
-- | Function to display warning messages from parsing the cluster
-- state.
maybeShowWarnings :: [String] -- ^ The warning messages
-> IO ()
maybeShowWarnings fix_msgs =
unless (null fix_msgs) $ do
hPutStrLn stderr "Warning: cluster has inconsistent data:"
hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs
-- | Format a list of key, value as a shell fragment.
printKeys :: String -- ^ Prefix to printed variables
-> [(String, String)] -- ^ List of (key, value) pairs to be printed
-> IO ()
printKeys prefix =
mapM_ (\(k, v) ->
printf "%s_%s=%s\n" prefix (map toUpper k) (ensureQuoted v))
-- | Prints the final @OK@ marker in machine readable output.
printFinal :: String -- ^ Prefix to printed variable
-> Bool -- ^ Whether output should be machine readable;
-- note: if not, there is nothing to print
-> IO ()
printFinal prefix True =
-- this should be the final entry
printKeys prefix [("OK", "1")]
printFinal _ False = return ()
-- | Potentially set the node as offline based on passed offline list.
setNodeOffline :: [Ndx] -> Node.Node -> Node.Node
setNodeOffline offline_indices n =
if Node.idx n `elem` offline_indices
then Node.setOffline n True
else n
-- | Set node properties based on command line options.
setNodeStatus :: Options -> Node.List -> IO Node.List
setNodeStatus opts fixed_nl = do
let offline_passed = optOffline opts
all_nodes = Container.elems fixed_nl
offline_lkp = map (lookupName (map Node.name all_nodes)) offline_passed
offline_wrong = filter (not . goodLookupResult) offline_lkp
offline_names = map lrContent offline_lkp
offline_indices = map Node.idx $
filter (\n -> Node.name n `elem` offline_names)
all_nodes
m_cpu = optMcpu opts
m_dsk = optMdsk opts
unless (null offline_wrong) .
exitErr $ printf "wrong node name(s) set as offline: %s\n"
(commaJoin (map lrContent offline_wrong))
let setMCpuFn = case m_cpu of
Nothing -> id
Just new_mcpu -> flip Node.setMcpu new_mcpu
let nm = Container.map (setNodeOffline offline_indices .
flip Node.setMdsk m_dsk .
setMCpuFn) fixed_nl
return nm
| dblia/nosql-ganeti | src/Ganeti/HTools/CLI.hs | gpl-2.0 | 21,112 | 0 | 21 | 5,540 | 4,491 | 2,525 | 1,966 | 507 | 2 |
--
-- Copyright (c) 2015 Assured Information Security, Inc. <lejosnej@ainfosec.com>
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE PatternGuards #-}
-- description: add default USB policy
-- date: 07/30/2015
module Migrations.M_26 (migration) where
import UpgradeEngine
migration = Migration {
sourceVersion = 26
, targetVersion = 27
, actions = act
}
-- The following uses ".", so it should be read from bottom to top
act :: IO ()
act = xformPrimaryJSON $ policy
where
policy = jsSet "/usb-rules/9999/command" (jsBoxString "allow") .
jsSet "/usb-rules/9999/description" (jsBoxString "Allow everything else") .
jsSet "/usb-rules/9900/device/keyboard" (jsBoxString "1") .
jsSet "/usb-rules/9900/command" (jsBoxString "deny") .
jsSet "/usb-rules/9900/description" (jsBoxString "Deny keyboard passthrough to all VMs")
| crogers1/manager | upgrade-db/Migrations/M_26.hs | gpl-2.0 | 1,684 | 0 | 13 | 381 | 158 | 94 | 64 | 14 | 1 |
module HplAssets.BPM.Parsers.XML.XmlBusinessProcess where
import Data.Maybe
import Text.XML.HXT.Core
import Text.XML.HXT.RelaxNG
import HplAssets.BPM.Types
import HplAssets.BPM.Parsers.XML.JPDLXmlBinding
import BasicTypes
bpSchema :: String
bpSchema = "schema-bpm.rng"
parseBusinessProcessFile schema fileName =
do
bpModel <- parseBusinessProcessFile' fileName
return $ Success bpModel
parseBusinessProcessFile' fileName =
do
[x] <- runX ( xunpickleDocument xpXMLProcessDefinition [ withValidate yes
, withTrace 1
, withRemoveWS yes
, withPreserveComment yes
] (createURI fileName) )
let bpmodel = processDefinitionToBPM x
return (BPM [bpmodel])
{-Returns the actual String or blank String-}
getMaybeString :: Maybe String -> String
getMaybeString (Just s) = s
getMaybeString Nothing = ""
{-Removes ocurrences of Nothing in a given list-}
discardNothingFromList :: [Maybe a] -> [a]
discardNothingFromList (h:t) = case h of
Nothing -> discardNothingFromList t
(Just y) -> y:discardNothingFromList t
discardNothingFromList [] = []
findFlowObject :: [FlowObject] -> String -> FlowObject -> FlowObject
findFlowObject flows name currentFlow | resultSet == [] = currentFlow
| otherwise = head resultSet
where resultSet = discardNothingFromList $ map filter flows
where
filter (FlowObject a b c d) | a == name = (Just (FlowObject a b c d))
| otherwise = Nothing
filter Start = Nothing
filter End = Nothing
{-Convert from JPDL BPM to Hepheastus BPM Model-}
processDefinitionToBPM :: XMLProcessDefinition -> BusinessProcess
processDefinitionToBPM (XMLProcessDefinition pdActions
pdCancelTimers
pdCreateTimers
pdDecicisions
pdEndStates
pdEvents
pdExceptionHandlers
pdForks
pdJoins
pdName
pdNodes
pdProcessStates
pdScripts
pdStartState
pdStates
pdSuperStates
pdSwimlanes
pdTaskNodes
pdTasks) = (BusinessProcess (getMaybeString pdName) (BasicProcess) buildFlowObjects buildTransitions)
where
{-Flow objects functions-}
buildFlowObjects = (map fst concatFlowObjectsTuples) --[Start] ++ (map fst concatFlowObjectsTuples) ++ [End]
concatFlowObjectsTuples = convertStates ++ convertDecisions ++ convertNodes ++ convertTaskNodes ++ convertForks ++ convertJoins
{-Flow objects functions - States conversion-}
convertStates = [(startState pdStartState) ,(endState $ head pdEndStates)] ++ map convertState pdStates
convertState (XMLState stName stAction stScript stCreateTimer stCancelTimer stTransitions stEvents stTimers stExceptionHandlers) = ((FlowObject (getMaybeString stName) Activity [] []), stTransitions)
startState (XMLStartState ssName ssSwimLane ssTask ssTransitions ssEvents ssExceptionHandlers) = (Start, []) --((FlowObject (getMaybeString ssName) Activity [] []), ssTransitions)
endState (XMLEndState esName esEvents esExceptionHandlers) = (End, []) --((FlowObject (getMaybeString esName) Activity [] []), [])
{-Flow objects functions - Decision conversion-}
convertDecisions = map convertDecision pdDecicisions
convertDecision (XMLDecision dName dHandler dTransitions dEvents dTimers dExceptionHandlers) = ((FlowObject dName Gateway [] []), dTransitions)
{-Flow objects functions - Node conversion-}
convertNodes = map convertNode pdNodes
convertNode (XMLNode nName nAction nScript nCreateTimer nCancelTimer nTransitions nEvents nTimers nExceptionHandlers) = ((FlowObject nName Activity [] []), nTransitions)
{-Flow objects functions - Task-Node conversion-}
convertTaskNodes = map convertTaskNode pdTaskNodes
convertTaskNode (XMLTaskNode tnName tnSignal tnCreateTasks tnEndTasks tnTasks tnTransitions tnEvents tnTimers tnExceptionHandlers) = ((FlowObject (getMaybeString tnName) Activity [] []), tnTransitions)
{-Flow objects functions - Fork conversion-}
convertForks = map convertFork pdForks
convertFork (XMLFork fName fScript fTransitions fEvents fTimers fExceptionHandlers) = ((FlowObject (getMaybeString fName) Gateway [] []), fTransitions)
{-Flow objects functions - Join conversion-}
convertJoins = map convertJoin pdJoins
convertJoin (XMLJoin jName jTransitions jEvents jTimers jExceptionHandlers) = ((FlowObject jName Join [] []), jTransitions)
{-Transitions related functions-}
buildTransitions = discardNothingFromList $ [(mkTransition Start (fst $ startState pdStartState) ""),(mkTransition (fst $ endState $ head pdEndStates) End "")] ++ (map convertTransition $ concat (map createTransitionTuple $ concatFlowObjectsTuples))
where
createTransitionTuple (flowObject, transitions) = map buildTuple transitions
where
buildTuple transition = (transition, flowObject)
convertTransition ((XMLTransition trName trTo trActions trScripts trCreateTimers trCancelTimers trExceptionHandlers), flowObject)= mkTransition flowObject (findFlowObject buildFlowObjects (getMaybeString trTo) flowObject) (getMaybeString trName)
| alessandroleite/hephaestus-pl | src/meta-hephaestus/HplAssets/BPM/Parsers/XML/XmlBusinessProcess.hs | lgpl-3.0 | 7,908 | 2 | 14 | 3,713 | 1,205 | 628 | 577 | 78 | 3 |
import System.Environment
import System.Exit
import System.IO
import System.Timeout
import Control.Monad
import Data.List
import Jana.Parser
import Jana.Eval (runProgram)
import Jana.Types (defaultOptions, EvalOptions(..))
import Jana.Invert
data Options = Options
{ timeOut :: Int
, invert :: Bool
, evalOpts :: EvalOptions }
defaults = Options
{ timeOut = -1
, invert = False
, evalOpts = defaultOptions }
usage = "usage: jana [options] <file>\n\
\options:\n\
\ -m use 32-bit modular arithmetic\n\
\ -tN timeout after N seconds\n\
\ -i print inverted program"
parseArgs :: IO (Maybe ([String], Options))
parseArgs =
do args <- getArgs
(flags, files) <- return $ splitArgs args
case checkFlags flags of
Left err -> putStrLn err >> return Nothing
Right opts -> return $ Just (files, opts)
splitArgs :: [String] -> ([String], [String])
splitArgs = partition (\arg -> head arg == '-' && length arg > 1)
checkFlags :: [String] -> Either String Options
checkFlags = foldM addOption defaults
addOption :: Options -> String -> Either String Options
addOption opts@(Options { evalOpts = evalOptions }) "-m" =
return $ opts { evalOpts = evalOptions { modInt = True } }
addOption opts ('-':'t':time) =
case reads time of
[(timeVal, "")] -> return $ opts { timeOut = timeVal }
_ -> Left "non-number given to -t option"
addOption opts "-i" = return opts { invert = True }
addOption _ f = Left $ "invalid option: " ++ f
loadFile :: String -> IO String
loadFile "-" = getContents
loadFile filename = readFile filename
printInverted :: String -> IO ()
printInverted filename =
do text <- loadFile filename
case parseProgram filename text of
Left err -> print err >> (exitWith $ ExitFailure 1)
Right prog -> print $ invertProgram prog
parseAndRun :: String -> EvalOptions -> IO ()
parseAndRun filename evalOptions =
do text <- loadFile filename
case parseProgram filename text of
Left err -> print err >> (exitWith $ ExitFailure 1)
Right prog -> runProgram filename prog evalOptions
main :: IO ()
main = do args <- parseArgs
case args of
Just ([file], Options { invert = True }) -> printInverted file
Just ([file], opts) ->
do res <- timeout (timeOut opts * 1000000)
(parseAndRun file (evalOpts opts))
case res of
Nothing -> exitWith $ ExitFailure 124
_ -> return ()
_ -> putStrLn usage
| mbudde/jana | src/Main.hs | bsd-3-clause | 2,619 | 0 | 17 | 725 | 846 | 436 | 410 | 65 | 4 |
{-# LANGUAGE DeriveFunctor #-}
module Distribution.Client.Dependency.Modular.Package
(module Distribution.Client.Dependency.Modular.Package,
module Distribution.Package) where
import Data.List as L
import Distribution.Package -- from Cabal
import Distribution.Text -- from Cabal
import Distribution.Client.Dependency.Modular.Version
-- | A package name.
type PN = PackageName
-- | Unpacking a package name.
unPN :: PN -> String
unPN (PackageName pn) = pn
-- | Package version. A package name plus a version number.
type PV = PackageId
-- | Qualified package version.
type QPV = Q PV
-- | Package id. Currently just a black-box string.
type PId = InstalledPackageId
-- | Location. Info about whether a package is installed or not, and where
-- exactly it is located. For installed packages, uniquely identifies the
-- package instance via its 'PId'.
--
-- TODO: More information is needed about the repo.
data Loc = Inst PId | InRepo
deriving (Eq, Ord, Show)
-- | Instance. A version number and a location.
data I = I Ver Loc
deriving (Eq, Ord, Show)
-- | String representation of an instance.
showI :: I -> String
showI (I v InRepo) = showVer v
showI (I v (Inst (InstalledPackageId i))) = showVer v ++ "/installed" ++ shortId i
where
-- A hack to extract the beginning of the package ABI hash
shortId = snip (splitAt 4) (++ "...") .
snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':)
snip p f xs = case p xs of
(ys, zs) -> (if L.null zs then id else f) ys
-- | Package instance. A package name and an instance.
data PI qpn = PI qpn I
deriving (Eq, Ord, Show, Functor)
-- | String representation of a package instance.
showPI :: PI QPN -> String
showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i
-- | Checks if a package instance corresponds to an installed package.
instPI :: PI qpn -> Bool
instPI (PI _ (I _ (Inst _))) = True
instPI _ = False
instI :: I -> Bool
instI (I _ (Inst _)) = True
instI _ = False
-- | Package path.
--
-- Stored in reverse order
data PP =
-- User-specified independent goal
Independent Int PP
-- Setup dependencies are always considered independent from their package
| Setup PN PP
-- Any dependency on base is considered independent (allows for base shims)
| Base PN PP
-- Unqualified
| None
deriving (Eq, Ord, Show)
-- | Strip any 'Base' qualifiers from a PP
--
-- (the Base qualifier does not get inherited)
stripBase :: PP -> PP
stripBase (Independent i pp) = Independent i (stripBase pp)
stripBase (Setup pn pp) = Setup pn (stripBase pp)
stripBase (Base _pn pp) = stripBase pp
stripBase None = None
-- | String representation of a package path.
--
-- NOTE: This always ends in a period
showPP :: PP -> String
showPP (Independent i pp) = show i ++ "." ++ showPP pp
showPP (Setup pn pp) = display pn ++ ".setup." ++ showPP pp
showPP (Base pn pp) = display pn ++ "." ++ showPP pp
showPP None = ""
-- | A qualified entity. Pairs a package path with the entity.
data Q a = Q PP a
deriving (Eq, Ord, Show)
-- | Standard string representation of a qualified entity.
showQ :: (a -> String) -> (Q a -> String)
showQ showa (Q None x) = showa x
showQ showa (Q pp x) = showPP pp ++ showa x
-- | Qualified package name.
type QPN = Q PN
-- | String representation of a qualified package path.
showQPN :: QPN -> String
showQPN = showQ display
-- | Create artificial parents for each of the package names, making
-- them all independent.
makeIndependent :: [PN] -> [QPN]
makeIndependent ps = [ Q pp pn | (pn, i) <- zip ps [0::Int ..]
, let pp = Independent i None
]
unQualify :: Q a -> a
unQualify (Q _ x) = x
| Helkafen/cabal | cabal-install/Distribution/Client/Dependency/Modular/Package.hs | bsd-3-clause | 3,824 | 0 | 15 | 941 | 1,018 | 555 | 463 | 64 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Type.Coercion
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : not portable
--
-- Definition of representational equality ('Coercion').
--
-- @since 4.7.0.0
-----------------------------------------------------------------------------
module Data.Type.Coercion
( Coercion(..)
, coerceWith
, sym
, trans
, repr
, TestCoercion(..)
) where
import qualified Data.Type.Equality as Eq
import Data.Maybe
import GHC.Enum
import GHC.Show
import GHC.Read
import GHC.Base
-- | Representational equality. If @Coercion a b@ is inhabited by some terminating
-- value, then the type @a@ has the same underlying representation as the type @b@.
--
-- To use this equality in practice, pattern-match on the @Coercion a b@ to get out
-- the @Coercible a b@ instance, and then use 'coerce' to apply it.
--
-- @since 4.7.0.0
data Coercion a b where
Coercion :: Coercible a b => Coercion a b
-- with credit to Conal Elliott for 'ty', Erik Hesselink & Martijn van
-- Steenbergen for 'type-equality', Edward Kmett for 'eq', and Gabor Greif
-- for 'type-eq'
-- | Type-safe cast, using representational equality
coerceWith :: Coercion a b -> a -> b
coerceWith Coercion x = coerce x
-- | Symmetry of representational equality
sym :: Coercion a b -> Coercion b a
sym Coercion = Coercion
-- | Transitivity of representational equality
trans :: Coercion a b -> Coercion b c -> Coercion a c
trans Coercion Coercion = Coercion
-- | Convert propositional (nominal) equality to representational equality
repr :: (a Eq.:~: b) -> Coercion a b
repr Eq.Refl = Coercion
deriving instance Eq (Coercion a b)
deriving instance Show (Coercion a b)
deriving instance Ord (Coercion a b)
-- | @since 4.7.0.0
instance Coercible a b => Read (Coercion a b) where
readsPrec d = readParen (d > 10) (\r -> [(Coercion, s) | ("Coercion",s) <- lex r ])
-- | @since 4.7.0.0
instance Coercible a b => Enum (Coercion a b) where
toEnum 0 = Coercion
toEnum _ = errorWithoutStackTrace "Data.Type.Coercion.toEnum: bad argument"
fromEnum Coercion = 0
-- | @since 4.7.0.0
deriving instance Coercible a b => Bounded (Coercion a b)
-- | This class contains types where you can learn the equality of two types
-- from information contained in /terms/. Typically, only singleton types should
-- inhabit this class.
class TestCoercion f where
-- | Conditionally prove the representational equality of @a@ and @b@.
testCoercion :: f a -> f b -> Maybe (Coercion a b)
-- | @since 4.7.0.0
instance TestCoercion ((Eq.:~:) a) where
testCoercion Eq.Refl Eq.Refl = Just Coercion
-- | @since 4.7.0.0
instance TestCoercion (Coercion a) where
testCoercion Coercion Coercion = Just Coercion
| vTurbine/ghc | libraries/base/Data/Type/Coercion.hs | bsd-3-clause | 3,215 | 0 | 12 | 597 | 573 | 320 | 253 | 48 | 1 |
{-# LANGUAGE Haskell2010, OverloadedStrings #-}
{-# LINE 1 "Network/Wai/Middleware/ForceDomain.hs" #-}
-- |
--
-- @since 3.0.14
module Network.Wai.Middleware.ForceDomain where
import Data.ByteString (ByteString)
import Data.Monoid ((<>), mempty)
import Network.HTTP.Types (hLocation, methodGet, status301, status307)
import Prelude
import Network.Wai
import Network.Wai.Request
-- | Force a domain by redirecting.
-- The `checkDomain` function takes the current domain and checks whether it is correct.
-- It should return `Nothing` if the domain is correct, or `Just "domain.com"` if it is incorrect.
--
-- @since 3.0.14
forceDomain :: (ByteString -> Maybe ByteString) -> Middleware
forceDomain checkDomain app req sendResponse =
case requestHeaderHost req >>= checkDomain of
Nothing ->
app req sendResponse
Just domain ->
sendResponse $ redirectResponse domain
where
-- From: Network.Wai.Middleware.ForceSSL
redirectResponse domain =
responseBuilder status [(hLocation, location domain)] mempty
location h =
let p = if appearsSecure req then "https://" else "http://" in
p <> h <> rawPathInfo req <> rawQueryString req
status
| requestMethod req == methodGet = status301
| otherwise = status307
| phischu/fragnix | tests/packages/scotty/Network.Wai.Middleware.ForceDomain.hs | bsd-3-clause | 1,343 | 0 | 12 | 306 | 253 | 139 | 114 | 24 | 3 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Network/HPACK.hs" #-}
{-# LANGUAGE CPP #-}
-- | HPACK(<https://tools.ietf.org/html/rfc7541>) encoding and decoding a header list.
module Network.HPACK (
-- * Encoding and decoding
encodeHeader
, decodeHeader
-- * Encoding and decoding with token
, encodeTokenHeader
, decodeTokenHeader
-- * DynamicTable
, DynamicTable
, defaultDynamicTableSize
, newDynamicTableForEncoding
, newDynamicTableForDecoding
, clearDynamicTable
, withDynamicTableForEncoding
, withDynamicTableForDecoding
, setLimitForEncoding
-- * Strategy for encoding
, CompressionAlgo(..)
, EncodeStrategy(..)
, defaultEncodeStrategy
-- * Errors
, DecodeError(..)
, BufferOverrun(..)
-- * Headers
, HeaderList
, Header
, HeaderName
, HeaderValue
, TokenHeaderList
, TokenHeader
-- * Value table
, ValueTable
, getHeaderValue
, toHeaderTable
-- * Basic types
, Size
, Index
, Buffer
, BufferSize
) where
import Network.HPACK.HeaderBlock
import Network.HPACK.Table
import Network.HPACK.Types
-- | Default dynamic table size.
-- The value is 4,096 bytes: an array has 128 entries.
--
-- >>> defaultDynamicTableSize
-- 4096
defaultDynamicTableSize :: Int
defaultDynamicTableSize = 4096
| phischu/fragnix | tests/packages/scotty/Network.HPACK.hs | bsd-3-clause | 1,340 | 0 | 5 | 307 | 159 | 112 | 47 | 39 | 1 |
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}
| pbrisbin/yesod-paginator | test/Spec.hs | mit | 90 | 0 | 2 | 10 | 4 | 3 | 1 | 2 | 0 |
{-# LANGUAGE CPP #-}
module Distribution.Client.Dependency.Modular.Preference where
-- Reordering or pruning the tree in order to prefer or make certain choices.
import qualified Data.List as L
import qualified Data.Map as M
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
import Control.Applicative
#endif
import qualified Data.Set as S
import Prelude hiding (sequence)
import Control.Monad.Reader hiding (sequence)
import Data.Ord
import Data.Map (Map)
import Data.Traversable (sequence)
import Distribution.Client.Dependency.Types
( PackageConstraint(..), PackagePreferences(..), InstalledPreference(..) )
import Distribution.Client.Types
( OptionalStanza(..) )
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Tree
import Distribution.Client.Dependency.Modular.Version
-- | Generic abstraction for strategies that just rearrange the package order.
-- Only packages that match the given predicate are reordered.
packageOrderFor :: (PN -> Bool) -> (PN -> I -> I -> Ordering) -> Tree a -> Tree a
packageOrderFor p cmp' = trav go
where
go (PChoiceF v@(Q _ pn) r cs)
| p pn = PChoiceF v r (P.sortByKeys (flip (cmp pn)) cs)
| otherwise = PChoiceF v r cs
go x = x
cmp :: PN -> POption -> POption -> Ordering
cmp pn (POption i _) (POption i' _) = cmp' pn i i'
-- | Prefer to link packages whenever possible
preferLinked :: Tree a -> Tree a
preferLinked = trav go
where
go (PChoiceF qn a cs) = PChoiceF qn a (P.sortByKeys cmp cs)
go x = x
cmp (POption _ linkedTo) (POption _ linkedTo') = cmpL linkedTo linkedTo'
cmpL Nothing Nothing = EQ
cmpL Nothing (Just _) = GT
cmpL (Just _) Nothing = LT
cmpL (Just _) (Just _) = EQ
-- | Ordering that treats preferred versions as greater than non-preferred
-- versions.
preferredVersionsOrdering :: VR -> Ver -> Ver -> Ordering
preferredVersionsOrdering vr v1 v2 =
compare (checkVR vr v1) (checkVR vr v2)
-- | Traversal that tries to establish package preferences (not constraints).
-- Works by reordering choice nodes.
preferPackagePreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a
preferPackagePreferences pcs = packageOrderFor (const True) preference
where
preference pn i1@(I v1 _) i2@(I v2 _) =
let PackagePreferences vr ipref = pcs pn
in preferredVersionsOrdering vr v1 v2 `mappend` -- combines lexically
locationsOrdering ipref i1 i2
-- Note that we always rank installed before uninstalled, and later
-- versions before earlier, but we can change the priority of the
-- two orderings.
locationsOrdering PreferInstalled v1 v2 =
preferInstalledOrdering v1 v2 `mappend` preferLatestOrdering v1 v2
locationsOrdering PreferLatest v1 v2 =
preferLatestOrdering v1 v2 `mappend` preferInstalledOrdering v1 v2
-- | Ordering that treats installed instances as greater than uninstalled ones.
preferInstalledOrdering :: I -> I -> Ordering
preferInstalledOrdering (I _ (Inst _)) (I _ (Inst _)) = EQ
preferInstalledOrdering (I _ (Inst _)) _ = GT
preferInstalledOrdering _ (I _ (Inst _)) = LT
preferInstalledOrdering _ _ = EQ
-- | Compare instances by their version numbers.
preferLatestOrdering :: I -> I -> Ordering
preferLatestOrdering (I v1 _) (I v2 _) = compare v1 v2
-- | Helper function that tries to enforce a single package constraint on a
-- given instance for a P-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintP :: ConflictSet QPN -> I -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintP c (I v _) (PackageConstraintVersion _ vr) r
| checkVR vr v = r
| otherwise = Fail c (GlobalConstraintVersion vr)
processPackageConstraintP c i (PackageConstraintInstalled _) r
| instI i = r
| otherwise = Fail c GlobalConstraintInstalled
processPackageConstraintP c i (PackageConstraintSource _) r
| not (instI i) = r
| otherwise = Fail c GlobalConstraintSource
processPackageConstraintP _ _ _ r = r
-- | Helper function that tries to enforce a single package constraint on a
-- given flag setting for an F-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintF :: Flag -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintF f c b' (PackageConstraintFlags _ fa) r =
case L.lookup f fa of
Nothing -> r
Just b | b == b' -> r
| otherwise -> Fail c GlobalConstraintFlag
processPackageConstraintF _ _ _ _ r = r
-- | Helper function that tries to enforce a single package constraint on a
-- given flag setting for an F-node. Translates the constraint into a
-- tree-transformer that either leaves the subtree untouched, or replaces it
-- with an appropriate failure node.
processPackageConstraintS :: OptionalStanza -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a
processPackageConstraintS s c b' (PackageConstraintStanzas _ ss) r =
if not b' && s `elem` ss then Fail c GlobalConstraintFlag
else r
processPackageConstraintS _ _ _ _ r = r
-- | Traversal that tries to establish various kinds of user constraints. Works
-- by selectively disabling choices that have been ruled out by global user
-- constraints.
enforcePackageConstraints :: M.Map PN [PackageConstraint] -> Tree QGoalReasonChain -> Tree QGoalReasonChain
enforcePackageConstraints pcs = trav go
where
go (PChoiceF qpn@(Q _ pn) gr ts) =
let c = toConflictSet (Goal (P qpn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ (POption i _) -> foldl (\ h pc -> h . processPackageConstraintP c i pc) id
(M.findWithDefault [] pn pcs)
in PChoiceF qpn gr (P.mapWithKey g ts)
go (FChoiceF qfn@(FN (PI (Q _ pn) _) f) gr tr m ts) =
let c = toConflictSet (Goal (F qfn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ b -> foldl (\ h pc -> h . processPackageConstraintF f c b pc) id
(M.findWithDefault [] pn pcs)
in FChoiceF qfn gr tr m (P.mapWithKey g ts)
go (SChoiceF qsn@(SN (PI (Q _ pn) _) f) gr tr ts) =
let c = toConflictSet (Goal (S qsn) gr)
-- compose the transformation functions for each of the relevant constraint
g = \ b -> foldl (\ h pc -> h . processPackageConstraintS f c b pc) id
(M.findWithDefault [] pn pcs)
in SChoiceF qsn gr tr (P.mapWithKey g ts)
go x = x
-- | Transformation that tries to enforce manual flags. Manual flags
-- can only be re-set explicitly by the user. This transformation should
-- be run after user preferences have been enforced. For manual flags,
-- it checks if a user choice has been made. If not, it disables all but
-- the first choice.
enforceManualFlags :: Tree QGoalReasonChain -> Tree QGoalReasonChain
enforceManualFlags = trav go
where
go (FChoiceF qfn gr tr True ts) = FChoiceF qfn gr tr True $
let c = toConflictSet (Goal (F qfn) gr)
in case span isDisabled (P.toList ts) of
([], y : ys) -> P.fromList (y : L.map (\ (b, _) -> (b, Fail c ManualFlag)) ys)
_ -> ts -- something has been manually selected, leave things alone
where
isDisabled (_, Fail _ GlobalConstraintFlag) = True
isDisabled _ = False
go x = x
-- | Prefer installed packages over non-installed packages, generally.
-- All installed packages or non-installed packages are treated as
-- equivalent.
preferInstalled :: Tree a -> Tree a
preferInstalled = packageOrderFor (const True) (const preferInstalledOrdering)
-- | Prefer packages with higher version numbers over packages with
-- lower version numbers, for certain packages.
preferLatestFor :: (PN -> Bool) -> Tree a -> Tree a
preferLatestFor p = packageOrderFor p (const preferLatestOrdering)
-- | Prefer packages with higher version numbers over packages with
-- lower version numbers, for all packages.
preferLatest :: Tree a -> Tree a
preferLatest = preferLatestFor (const True)
-- | Require installed packages.
requireInstalled :: (PN -> Bool) -> Tree QGoalReasonChain -> Tree QGoalReasonChain
requireInstalled p = trav go
where
go (PChoiceF v@(Q _ pn) gr cs)
| p pn = PChoiceF v gr (P.mapWithKey installed cs)
| otherwise = PChoiceF v gr cs
where
installed (POption (I _ (Inst _)) _) x = x
installed _ _ = Fail (toConflictSet (Goal (P v) gr)) CannotInstall
go x = x
-- | Avoid reinstalls.
--
-- This is a tricky strategy. If a package version is installed already and the
-- same version is available from a repo, the repo version will never be chosen.
-- This would result in a reinstall (either destructively, or potentially,
-- shadowing). The old instance won't be visible or even present anymore, but
-- other packages might have depended on it.
--
-- TODO: It would be better to actually check the reverse dependencies of installed
-- packages. If they're not depended on, then reinstalling should be fine. Even if
-- they are, perhaps this should just result in trying to reinstall those other
-- packages as well. However, doing this all neatly in one pass would require to
-- change the builder, or at least to change the goal set after building.
avoidReinstalls :: (PN -> Bool) -> Tree QGoalReasonChain -> Tree QGoalReasonChain
avoidReinstalls p = trav go
where
go (PChoiceF qpn@(Q _ pn) gr cs)
| p pn = PChoiceF qpn gr disableReinstalls
| otherwise = PChoiceF qpn gr cs
where
disableReinstalls =
let installed = [ v | (POption (I v (Inst _)) _, _) <- toList cs ]
in P.mapWithKey (notReinstall installed) cs
notReinstall vs (POption (I v InRepo) _) _ | v `elem` vs =
Fail (toConflictSet (Goal (P qpn) gr)) CannotReinstall
notReinstall _ _ x =
x
go x = x
-- | Always choose the first goal in the list next, abandoning all
-- other choices.
--
-- This is unnecessary for the default search strategy, because
-- it descends only into the first goal choice anyway,
-- but may still make sense to just reduce the tree size a bit.
firstGoal :: Tree a -> Tree a
firstGoal = trav go
where
go (GoalChoiceF xs) = -- casePSQ xs (GoalChoiceF xs) (\ _ t _ -> out t) -- more space efficient, but removes valuable debug info
casePSQ xs (GoalChoiceF (fromList [])) (\ g t _ -> GoalChoiceF (fromList [(g, t)]))
go x = x
-- Note that we keep empty choice nodes, because they mean success.
-- | Transformation that tries to make a decision on base as early as
-- possible. In nearly all cases, there's a single choice for the base
-- package. Also, fixing base early should lead to better error messages.
preferBaseGoalChoice :: Tree a -> Tree a
preferBaseGoalChoice = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortByKeys preferBase xs)
go x = x
preferBase :: OpenGoal comp -> OpenGoal comp -> Ordering
preferBase (OpenGoal (Simple (Dep (Q _pp pn) _) _) _) _ | unPN pn == "base" = LT
preferBase _ (OpenGoal (Simple (Dep (Q _pp pn) _) _) _) | unPN pn == "base" = GT
preferBase _ _ = EQ
-- | Deal with setup dependencies after regular dependencies, so that we can
-- will link setup depencencies against package dependencies when possible
deferSetupChoices :: Tree a -> Tree a
deferSetupChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortByKeys deferSetup xs)
go x = x
deferSetup :: OpenGoal comp -> OpenGoal comp -> Ordering
deferSetup (OpenGoal (Simple (Dep (Q (Setup _ _) _) _) _) _) _ = GT
deferSetup _ (OpenGoal (Simple (Dep (Q (Setup _ _) _) _) _) _) = LT
deferSetup _ _ = EQ
-- | Transformation that sorts choice nodes so that
-- child nodes with a small branching degree are preferred. As a
-- special case, choices with 0 branches will be preferred (as they
-- are immediately considered inconsistent), and choices with 1
-- branch will also be preferred (as they don't involve choice).
preferEasyGoalChoices :: Tree a -> Tree a
preferEasyGoalChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing choices) xs)
go x = x
-- | Transformation that tries to avoid making weak flag choices early.
-- Weak flags are trivial flags (not influencing dependencies) or such
-- flags that are explicitly declared to be weak in the index.
deferWeakFlagChoices :: Tree a -> Tree a
deferWeakFlagChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy defer xs)
go x = x
defer :: Tree a -> Tree a -> Ordering
defer (FChoice _ _ True _ _) _ = GT
defer _ (FChoice _ _ True _ _) = LT
defer _ _ = EQ
-- | Variant of 'preferEasyGoalChoices'.
--
-- Only approximates the number of choices in the branches. Less accurate,
-- more efficient.
lpreferEasyGoalChoices :: Tree a -> Tree a
lpreferEasyGoalChoices = trav go
where
go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing lchoices) xs)
go x = x
-- | Variant of 'preferEasyGoalChoices'.
--
-- I first thought that using a paramorphism might be faster here,
-- but it doesn't seem to make any difference.
preferEasyGoalChoices' :: Tree a -> Tree a
preferEasyGoalChoices' = para (inn . go)
where
go (GoalChoiceF xs) = GoalChoiceF (P.map fst (P.sortBy (comparing (choices . snd)) xs))
go x = fmap fst x
-- | Monad used internally in enforceSingleInstanceRestriction
type EnforceSIR = Reader (Map (PI PN) QPN)
-- | Enforce ghc's single instance restriction
--
-- From the solver's perspective, this means that for any package instance
-- (that is, package name + package version) there can be at most one qualified
-- goal resolving to that instance (there may be other goals _linking_ to that
-- instance however).
enforceSingleInstanceRestriction :: Tree QGoalReasonChain -> Tree QGoalReasonChain
enforceSingleInstanceRestriction = (`runReader` M.empty) . cata go
where
go :: TreeF QGoalReasonChain (EnforceSIR (Tree QGoalReasonChain)) -> EnforceSIR (Tree QGoalReasonChain)
-- We just verify package choices.
go (PChoiceF qpn gr cs) =
PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn) cs)
go _otherwise =
innM _otherwise
-- The check proper
goP :: QPN -> POption -> EnforceSIR (Tree QGoalReasonChain) -> EnforceSIR (Tree QGoalReasonChain)
goP qpn@(Q _ pn) (POption i linkedTo) r = do
let inst = PI pn i
env <- ask
case (linkedTo, M.lookup inst env) of
(Just _, _) ->
-- For linked nodes we don't check anything
r
(Nothing, Nothing) ->
-- Not linked, not already used
local (M.insert inst qpn) r
(Nothing, Just qpn') -> do
-- Not linked, already used. This is an error
return $ Fail (S.fromList [P qpn, P qpn']) MultipleInstances
| corngood/cabal | cabal-install/Distribution/Client/Dependency/Modular/Preference.hs | bsd-3-clause | 15,938 | 0 | 21 | 4,026 | 3,899 | 2,002 | 1,897 | 197 | 5 |
-- Copyright 2006-2008, Galois, Inc.
-- This software is distributed under a standard, three-clause BSD license.
-- Please see the file LICENSE, distributed with this software, for specific
-- terms and conditions.
-- An example showing how programs can interact with the Xenstore.
import Control.Concurrent
import Control.Exception
import Control.Monad
import Hypervisor.Console
import Hypervisor.Debug
import Hypervisor.ErrorCodes
import Hypervisor.XenStore
import System.FilePath
import Prelude hiding (getLine)
main :: IO ()
main = do
con <- initXenConsole
xs <- initXenStore
me <- xsGetDomId xs
here <- xsGetDomainPath xs me
writeConsole con ("Hello! This is an interactive XenStore thing for " ++
show me ++ "\n")
writeConsole con ("Valid commands: quit, ls, cd\n\n")
writeDebugConsole "Starting interaction loop!\n"
runPrompt con xs here
runPrompt :: Console -> XenStore -> FilePath -> IO ()
runPrompt con xs here = do
writeConsole con (here ++ "> ")
inquery <- getLine con
case words inquery of
("quit":_) -> return ()
("ls" :_) -> do
contents <- filter (/= "") `fmap` xsDirectory xs here
values <- mapM (getValue xs) (map (here </>) contents)
let contents' = map (forceSize 25) contents
values' = map (forceSize 40) values
forM_ (zip contents' values') $ \ (key, value) ->
writeConsole con (key ++ " ==> " ++ value ++ "\n")
runPrompt con xs here
("cd" :x:_) -> do
case x of
".." -> runPrompt con xs (takeDirectory here)
d -> runPrompt con xs (here </> d)
_ -> do writeConsole con "Unrecognized command.\n"
runPrompt con xs here
getValue :: XenStore -> String -> IO String
getValue xs key = handle handleException (emptify `fmap` xsRead xs key)
where
handleException :: ErrorCode -> IO String
handleException _ = return "<read error>"
emptify "" = "<empty>"
emptify s = s
forceSize :: Int -> String -> String
forceSize n str
| length str > n = "..." ++ drop (length str - (n - 3)) str
| length str < n = str ++ (replicate (n - length str) ' ')
| otherwise = str
getLine :: Console -> IO String
getLine con = do
nextC <- readConsole con 1
writeConsole con nextC
case nextC of
"\r" -> writeConsole con "\n" >> return ""
[x] -> (x:) `fmap` getLine con
_ -> fail "More than one character back?"
| GaloisInc/HaLVM | examples/Core/Xenstore/Xenstore.hs | bsd-3-clause | 2,406 | 2 | 18 | 577 | 802 | 388 | 414 | 59 | 5 |
module OpTypes where
type EqOp a = a -> a -> Bool
type OrdOp a = a -> a -> Bool
type CmpOp a = a -> a -> Ordering
eqBy :: Eq a => (b -> a) -> EqOp b
eqBy f x y = f x == f y
ordBy :: Ord a => (b -> a) -> OrdOp b
ordBy f x y = f x <= f y
cmpBy :: Ord a => (b -> a) -> CmpOp b
cmpBy f x y = f x `compare` f y
| forste/haReFork | tools/base/lib/OpTypes.hs | bsd-3-clause | 336 | 0 | 8 | 123 | 195 | 101 | 94 | 10 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, mkCast )
import Id ( Id, idType, mkSysLocal, idDemandInfo, setIdDemandInfo,
setIdUnfolding,
setIdInfo, idOneShotInfo, setIdOneShotInfo
)
import IdInfo ( vanillaIdInfo )
import DataCon
import Demand
import MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID )
import MkId ( voidArgId, voidPrimId )
import TysPrim ( voidPrimTy )
import TysWiredIn ( tupleCon )
import Type
import Coercion hiding ( substTy, substTyVarBndr )
import FamInstEnv
import BasicTypes ( TupleSort(..), OneShotInfo(..), worstOneShot )
import Literal ( absentLiteralOf )
import TyCon
import UniqSupply
import Unique
import Maybes
import Util
import Outputable
import DynFlags
import FastString
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> [OneShotInfo] -- One-shot-ness of the function, value args only
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info one_shots
= do { let arg_info = demands `zip` (one_shots ++ repeat NoOneShotInfo)
all_one_shots = foldr (worstOneShot . snd) OneShotLam arg_info
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs emptyTvSubst fun_ty arg_info
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty) <- mkWWcpr fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args all_one_shots cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not (only_one_void_argument) || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> OneShotInfo -- Whether all arguments are one-shot
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args all_one_shot res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [newArg], args ++ [voidPrimId])
where
needsAValueLambda =
isUnLiftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
-- see Note [All One-Shot Arguments of a Worker]
newArg = setIdOneShotInfo voidArgId all_one_shot
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
Note [All One-Shot Arguments of a Worker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes, derived join-points are just lambda-lifted thunks, whose
only argument is of the unit type and is never used. This might
interfere with the absence analysis, basing on which results these
never-used arguments are eliminated in the worker. The additional
argument `all_one_shot` of `mkWorkerArgs` is to prevent this.
Example. Suppose we have
foo = \p(one-shot) q(one-shot). y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(one-shot). y + 3
But suppse foo didn't have all one-shot args:
foo = \p(not-one-shot) q(one-shot). expensive y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(not-one-shot). y + 3
If we made the void-arg one-shot we might inline an expensive
computation for y, which would be terrible!
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [(Demand,OneShotInfo)] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty arg_info
| null arg_info
= return ([], id, id, substTy subst fun_ty)
| ((dmd,one_shot):arg_info') <- arg_info
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd one_shot
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' arg_info'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' arg_info
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`App` Type (mkTyVarTy tv')),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty arg_info
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> OneShotInfo -> Id
mk_wrap_arg uniq ty dmd one_shot
= mkSysLocal (fsLit "w") uniq ty
`setIdDemandInfo` dmd
`setIdOneShotInfo` one_shot
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" set_worker_arg_info unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
one_shot = idOneShotInfo arg
-- If the wrapper argument is a one-shot lambda, then
-- so should (all) the corresponding worker arguments be
-- This bites when we do w/w on a case join point
set_worker_arg_info worker_arg demand
= worker_arg `setIdDemandInfo` demand
`setIdOneShotInfo` one_shot
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons !! (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr fam_envs body_ty res
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnLiftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (Var arg)
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_con = tupleCon UnboxedTuple (length arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkConApp2 ubx_tup_con arg_tys args
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt ubx_tup_con, args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnLiftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, ptext (sLit "No absent value for") <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc dflags (ppr arg <+> ppr (idType arg))
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocal (fsLit "ww") uniq ty
| green-haskell/ghc | compiler/stranal/WwLib.hs | bsd-3-clause | 31,719 | 0 | 15 | 9,473 | 3,826 | 2,087 | 1,739 | 264 | 2 |
{-# LANGUAGE GADTs #-}
module CmmSink (
cmmSink
) where
import Cmm
import CmmOpt
import BlockId
import CmmLive
import CmmUtils
import Hoopl
import CodeGen.Platform
import Platform (isARM, platformArch)
import DynFlags
import UniqFM
import PprCmm ()
import Data.List (partition)
import qualified Data.Set as Set
import Data.Maybe
-- -----------------------------------------------------------------------------
-- Sinking and inlining
-- This is an optimisation pass that
-- (a) moves assignments closer to their uses, to reduce register pressure
-- (b) pushes assignments into a single branch of a conditional if possible
-- (c) inlines assignments to registers that are mentioned only once
-- (d) discards dead assignments
--
-- This tightens up lots of register-heavy code. It is particularly
-- helpful in the Cmm generated by the Stg->Cmm code generator, in
-- which every function starts with a copyIn sequence like:
--
-- x1 = R1
-- x2 = Sp[8]
-- x3 = Sp[16]
-- if (Sp - 32 < SpLim) then L1 else L2
--
-- we really want to push the x1..x3 assignments into the L2 branch.
--
-- Algorithm:
--
-- * Start by doing liveness analysis.
--
-- * Keep a list of assignments A; earlier ones may refer to later ones.
-- Currently we only sink assignments to local registers, because we don't
-- have liveness information about global registers.
--
-- * Walk forwards through the graph, look at each node N:
--
-- * If it is a dead assignment, i.e. assignment to a register that is
-- not used after N, discard it.
--
-- * Try to inline based on current list of assignments
-- * If any assignments in A (1) occur only once in N, and (2) are
-- not live after N, inline the assignment and remove it
-- from A.
--
-- * If an assignment in A is cheap (RHS is local register), then
-- inline the assignment and keep it in A in case it is used afterwards.
--
-- * Otherwise don't inline.
--
-- * If N is assignment to a local register pick up the assignment
-- and add it to A.
--
-- * If N is not an assignment to a local register:
-- * remove any assignments from A that conflict with N, and
-- place them before N in the current block. We call this
-- "dropping" the assignments.
--
-- * An assignment conflicts with N if it:
-- - assigns to a register mentioned in N
-- - mentions a register assigned by N
-- - reads from memory written by N
-- * do this recursively, dropping dependent assignments
--
-- * At an exit node:
-- * drop any assignments that are live on more than one successor
-- and are not trivial
-- * if any successor has more than one predecessor (a join-point),
-- drop everything live in that successor. Since we only propagate
-- assignments that are not dead at the successor, we will therefore
-- eliminate all assignments dead at this point. Thus analysis of a
-- join-point will always begin with an empty list of assignments.
--
--
-- As a result of above algorithm, sinking deletes some dead assignments
-- (transitively, even). This isn't as good as removeDeadAssignments,
-- but it's much cheaper.
-- -----------------------------------------------------------------------------
-- things that we aren't optimising very well yet.
--
-- -----------
-- (1) From GHC's FastString.hashStr:
--
-- s2ay:
-- if ((_s2an::I64 == _s2ao::I64) >= 1) goto c2gn; else goto c2gp;
-- c2gn:
-- R1 = _s2au::I64;
-- call (I64[Sp])(R1) args: 8, res: 0, upd: 8;
-- c2gp:
-- _s2cO::I64 = %MO_S_Rem_W64(%MO_UU_Conv_W8_W64(I8[_s2aq::I64 + (_s2an::I64 << 0)]) + _s2au::I64 * 128,
-- 4091);
-- _s2an::I64 = _s2an::I64 + 1;
-- _s2au::I64 = _s2cO::I64;
-- goto s2ay;
--
-- a nice loop, but we didn't eliminate the silly assignment at the end.
-- See Note [dependent assignments], which would probably fix this.
-- This is #8336 on Trac.
--
-- -----------
-- (2) From stg_atomically_frame in PrimOps.cmm
--
-- We have a diamond control flow:
--
-- x = ...
-- |
-- / \
-- A B
-- \ /
-- |
-- use of x
--
-- Now x won't be sunk down to its use, because we won't push it into
-- both branches of the conditional. We certainly do have to check
-- that we can sink it past all the code in both A and B, but having
-- discovered that, we could sink it to its use.
--
-- -----------------------------------------------------------------------------
type Assignment = (LocalReg, CmmExpr, AbsMem)
-- Assignment caches AbsMem, an abstraction of the memory read by
-- the RHS of the assignment.
type Assignments = [Assignment]
-- A sequence of assignements; kept in *reverse* order
-- So the list [ x=e1, y=e2 ] means the sequence of assignments
-- y = e2
-- x = e1
cmmSink :: DynFlags -> CmmGraph -> CmmGraph
cmmSink dflags graph = ofBlockList (g_entry graph) $ sink mapEmpty $ blocks
where
liveness = cmmLocalLiveness dflags graph
getLive l = mapFindWithDefault Set.empty l liveness
blocks = postorderDfs graph
join_pts = findJoinPoints blocks
sink :: BlockEnv Assignments -> [CmmBlock] -> [CmmBlock]
sink _ [] = []
sink sunk (b:bs) =
-- pprTrace "sink" (ppr lbl) $
blockJoin first final_middle final_last : sink sunk' bs
where
lbl = entryLabel b
(first, middle, last) = blockSplit b
succs = successors last
-- Annotate the middle nodes with the registers live *after*
-- the node. This will help us decide whether we can inline
-- an assignment in the current node or not.
live = Set.unions (map getLive succs)
live_middle = gen_kill dflags last live
ann_middles = annotate dflags live_middle (blockToList middle)
-- Now sink and inline in this block
(middle', assigs) = walk dflags ann_middles (mapFindWithDefault [] lbl sunk)
fold_last = constantFoldNode dflags last
(final_last, assigs') = tryToInline dflags live fold_last assigs
-- We cannot sink into join points (successors with more than
-- one predecessor), so identify the join points and the set
-- of registers live in them.
(joins, nonjoins) = partition (`mapMember` join_pts) succs
live_in_joins = Set.unions (map getLive joins)
-- We do not want to sink an assignment into multiple branches,
-- so identify the set of registers live in multiple successors.
-- This is made more complicated because when we sink an assignment
-- into one branch, this might change the set of registers that are
-- now live in multiple branches.
init_live_sets = map getLive nonjoins
live_in_multi live_sets r =
case filter (Set.member r) live_sets of
(_one:_two:_) -> True
_ -> False
-- Now, drop any assignments that we will not sink any further.
(dropped_last, assigs'') = dropAssignments dflags drop_if init_live_sets assigs'
drop_if a@(r,rhs,_) live_sets = (should_drop, live_sets')
where
should_drop = conflicts dflags a final_last
|| not (isTrivial dflags rhs) && live_in_multi live_sets r
|| r `Set.member` live_in_joins
live_sets' | should_drop = live_sets
| otherwise = map upd live_sets
upd set | r `Set.member` set = set `Set.union` live_rhs
| otherwise = set
live_rhs = foldRegsUsed dflags extendRegSet emptyRegSet rhs
final_middle = foldl blockSnoc middle' dropped_last
sunk' = mapUnion sunk $
mapFromList [ (l, filterAssignments dflags (getLive l) assigs'')
| l <- succs ]
{- TODO: enable this later, when we have some good tests in place to
measure the effect and tune it.
-- small: an expression we don't mind duplicating
isSmall :: CmmExpr -> Bool
isSmall (CmmReg (CmmLocal _)) = True --
isSmall (CmmLit _) = True
isSmall (CmmMachOp (MO_Add _) [x,y]) = isTrivial x && isTrivial y
isSmall (CmmRegOff (CmmLocal _) _) = True
isSmall _ = False
-}
--
-- We allow duplication of trivial expressions: registers (both local and
-- global) and literals.
--
isTrivial :: DynFlags -> CmmExpr -> Bool
isTrivial _ (CmmReg (CmmLocal _)) = True
isTrivial dflags (CmmReg (CmmGlobal r)) = -- see Note [Inline GlobalRegs?]
if isARM (platformArch (targetPlatform dflags))
then True -- CodeGen.Platform.ARM does not have globalRegMaybe
else isJust (globalRegMaybe (targetPlatform dflags) r)
-- GlobalRegs that are loads from BaseReg are not trivial
isTrivial _ (CmmLit _) = True
isTrivial _ _ = False
--
-- annotate each node with the set of registers live *after* the node
--
annotate :: DynFlags -> LocalRegSet -> [CmmNode O O] -> [(LocalRegSet, CmmNode O O)]
annotate dflags live nodes = snd $ foldr ann (live,[]) nodes
where ann n (live,nodes) = (gen_kill dflags n live, (live,n) : nodes)
--
-- Find the blocks that have multiple successors (join points)
--
findJoinPoints :: [CmmBlock] -> BlockEnv Int
findJoinPoints blocks = mapFilter (>1) succ_counts
where
all_succs = concatMap successors blocks
succ_counts :: BlockEnv Int
succ_counts = foldr (\l -> mapInsertWith (+) l 1) mapEmpty all_succs
--
-- filter the list of assignments to remove any assignments that
-- are not live in a continuation.
--
filterAssignments :: DynFlags -> LocalRegSet -> Assignments -> Assignments
filterAssignments dflags live assigs = reverse (go assigs [])
where go [] kept = kept
go (a@(r,_,_):as) kept | needed = go as (a:kept)
| otherwise = go as kept
where
needed = r `Set.member` live
|| any (conflicts dflags a) (map toNode kept)
-- Note that we must keep assignments that are
-- referred to by other assignments we have
-- already kept.
-- -----------------------------------------------------------------------------
-- Walk through the nodes of a block, sinking and inlining assignments
-- as we go.
--
-- On input we pass in a:
-- * list of nodes in the block
-- * a list of assignments that appeared *before* this block and
-- that are being sunk.
--
-- On output we get:
-- * a new block
-- * a list of assignments that will be placed *after* that block.
--
walk :: DynFlags
-> [(LocalRegSet, CmmNode O O)] -- nodes of the block, annotated with
-- the set of registers live *after*
-- this node.
-> Assignments -- The current list of
-- assignments we are sinking.
-- Earlier assignments may refer
-- to later ones.
-> ( Block CmmNode O O -- The new block
, Assignments -- Assignments to sink further
)
walk dflags nodes assigs = go nodes emptyBlock assigs
where
go [] block as = (block, as)
go ((live,node):ns) block as
| shouldDiscard node live = go ns block as
-- discard dead assignment
| Just a <- shouldSink dflags node2 = go ns block (a : as1)
| otherwise = go ns block' as'
where
node1 = constantFoldNode dflags node
(node2, as1) = tryToInline dflags live node1 as
(dropped, as') = dropAssignmentsSimple dflags
(\a -> conflicts dflags a node2) as1
block' = foldl blockSnoc block dropped `blockSnoc` node2
--
-- Heuristic to decide whether to pick up and sink an assignment
-- Currently we pick up all assignments to local registers. It might
-- be profitable to sink assignments to global regs too, but the
-- liveness analysis doesn't track those (yet) so we can't.
--
shouldSink :: DynFlags -> CmmNode e x -> Maybe Assignment
shouldSink dflags (CmmAssign (CmmLocal r) e) | no_local_regs = Just (r, e, exprMem dflags e)
where no_local_regs = True -- foldRegsUsed (\_ _ -> False) True e
shouldSink _ _other = Nothing
--
-- discard dead assignments. This doesn't do as good a job as
-- removeDeadAsssignments, because it would need multiple passes
-- to get all the dead code, but it catches the common case of
-- superfluous reloads from the stack that the stack allocator
-- leaves behind.
--
-- Also we catch "r = r" here. You might think it would fall
-- out of inlining, but the inliner will see that r is live
-- after the instruction and choose not to inline r in the rhs.
--
shouldDiscard :: CmmNode e x -> LocalRegSet -> Bool
shouldDiscard node live
= case node of
CmmAssign r (CmmReg r') | r == r' -> True
CmmAssign (CmmLocal r) _ -> not (r `Set.member` live)
_otherwise -> False
toNode :: Assignment -> CmmNode O O
toNode (r,rhs,_) = CmmAssign (CmmLocal r) rhs
dropAssignmentsSimple :: DynFlags -> (Assignment -> Bool) -> Assignments
-> ([CmmNode O O], Assignments)
dropAssignmentsSimple dflags f = dropAssignments dflags (\a _ -> (f a, ())) ()
dropAssignments :: DynFlags -> (Assignment -> s -> (Bool, s)) -> s -> Assignments
-> ([CmmNode O O], Assignments)
dropAssignments dflags should_drop state assigs
= (dropped, reverse kept)
where
(dropped,kept) = go state assigs [] []
go _ [] dropped kept = (dropped, kept)
go state (assig : rest) dropped kept
| conflict = go state' rest (toNode assig : dropped) kept
| otherwise = go state' rest dropped (assig:kept)
where
(dropit, state') = should_drop assig state
conflict = dropit || any (conflicts dflags assig) dropped
-- -----------------------------------------------------------------------------
-- Try to inline assignments into a node.
tryToInline
:: DynFlags
-> LocalRegSet -- set of registers live after this
-- node. We cannot inline anything
-- that is live after the node, unless
-- it is small enough to duplicate.
-> CmmNode O x -- The node to inline into
-> Assignments -- Assignments to inline
-> (
CmmNode O x -- New node
, Assignments -- Remaining assignments
)
tryToInline dflags live node assigs = go usages node [] assigs
where
usages :: UniqFM Int -- Maps each LocalReg to a count of how often it is used
usages = foldLocalRegsUsed dflags addUsage emptyUFM node
go _usages node _skipped [] = (node, [])
go usages node skipped (a@(l,rhs,_) : rest)
| cannot_inline = dont_inline
| occurs_none = discard -- Note [discard during inlining]
| occurs_once = inline_and_discard
| isTrivial dflags rhs = inline_and_keep
| otherwise = dont_inline
where
inline_and_discard = go usages' inl_node skipped rest
where usages' = foldLocalRegsUsed dflags addUsage usages rhs
discard = go usages node skipped rest
dont_inline = keep node -- don't inline the assignment, keep it
inline_and_keep = keep inl_node -- inline the assignment, keep it
keep node' = (final_node, a : rest')
where (final_node, rest') = go usages' node' (l:skipped) rest
usages' = foldLocalRegsUsed dflags (\m r -> addToUFM m r 2)
usages rhs
-- we must not inline anything that is mentioned in the RHS
-- of a binding that we have already skipped, so we set the
-- usages of the regs on the RHS to 2.
cannot_inline = skipped `regsUsedIn` rhs -- Note [dependent assignments]
|| l `elem` skipped
|| not (okToInline dflags rhs node)
l_usages = lookupUFM usages l
l_live = l `elemRegSet` live
occurs_once = not l_live && l_usages == Just 1
occurs_none = not l_live && l_usages == Nothing
inl_node = mapExpDeep inline node
-- mapExpDeep is where the inlining actually takes place!
where inline (CmmReg (CmmLocal l')) | l == l' = rhs
inline (CmmRegOff (CmmLocal l') off) | l == l'
= cmmOffset dflags rhs off
-- re-constant fold after inlining
inline (CmmMachOp op args) = cmmMachOpFold dflags op args
inline other = other
-- Note [dependent assignments]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- If our assignment list looks like
--
-- [ y = e, x = ... y ... ]
--
-- We cannot inline x. Remember this list is really in reverse order,
-- so it means x = ... y ...; y = e
--
-- Hence if we inline x, the outer assignment to y will capture the
-- reference in x's right hand side.
--
-- In this case we should rename the y in x's right-hand side,
-- i.e. change the list to [ y = e, x = ... y1 ..., y1 = y ]
-- Now we can go ahead and inline x.
--
-- For now we do nothing, because this would require putting
-- everything inside UniqSM.
--
-- One more variant of this (#7366):
--
-- [ y = e, y = z ]
--
-- If we don't want to inline y = e, because y is used many times, we
-- might still be tempted to inline y = z (because we always inline
-- trivial rhs's). But of course we can't, because y is equal to e,
-- not z.
-- Note [discard during inlining]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Opportunities to discard assignments sometimes appear after we've
-- done some inlining. Here's an example:
--
-- x = R1;
-- y = P64[x + 7];
-- z = P64[x + 15];
-- /* z is dead */
-- R1 = y & (-8);
--
-- The x assignment is trivial, so we inline it in the RHS of y, and
-- keep both x and y. z gets dropped because it is dead, then we
-- inline y, and we have a dead assignment to x. If we don't notice
-- that x is dead in tryToInline, we end up retaining it.
addUsage :: UniqFM Int -> LocalReg -> UniqFM Int
addUsage m r = addToUFM_C (+) m r 1
regsUsedIn :: [LocalReg] -> CmmExpr -> Bool
regsUsedIn [] _ = False
regsUsedIn ls e = wrapRecExpf f e False
where f (CmmReg (CmmLocal l)) _ | l `elem` ls = True
f (CmmRegOff (CmmLocal l) _) _ | l `elem` ls = True
f _ z = z
-- we don't inline into CmmUnsafeForeignCall if the expression refers
-- to global registers. This is a HACK to avoid global registers
-- clashing with C argument-passing registers, really the back-end
-- ought to be able to handle it properly, but currently neither PprC
-- nor the NCG can do it. See Note [Register parameter passing]
-- See also StgCmmForeign:load_args_into_temps.
okToInline :: DynFlags -> CmmExpr -> CmmNode e x -> Bool
okToInline dflags expr node@(CmmUnsafeForeignCall{}) =
not (globalRegistersConflict dflags expr node)
okToInline _ _ _ = True
-- -----------------------------------------------------------------------------
-- | @conflicts (r,e) stmt@ is @False@ if and only if the assignment
-- @r = e@ can be safely commuted past @stmt@.
--
-- We only sink "r = G" assignments right now, so conflicts is very simple:
--
conflicts :: DynFlags -> Assignment -> CmmNode O x -> Bool
conflicts dflags (r, rhs, addr) node
-- (1) node defines registers used by rhs of assignment. This catches
-- assignments and all three kinds of calls. See Note [Sinking and calls]
| globalRegistersConflict dflags rhs node = True
| localRegistersConflict dflags rhs node = True
-- (2) node uses register defined by assignment
| foldRegsUsed dflags (\b r' -> r == r' || b) False node = True
-- (3) a store to an address conflicts with a read of the same memory
| CmmStore addr' e <- node
, memConflicts addr (loadAddr dflags addr' (cmmExprWidth dflags e)) = True
-- (4) an assignment to Hp/Sp conflicts with a heap/stack read respectively
| HeapMem <- addr, CmmAssign (CmmGlobal Hp) _ <- node = True
| StackMem <- addr, CmmAssign (CmmGlobal Sp) _ <- node = True
| SpMem{} <- addr, CmmAssign (CmmGlobal Sp) _ <- node = True
-- (5) foreign calls clobber heap: see Note [Foreign calls clobber heap]
| CmmUnsafeForeignCall{} <- node, memConflicts addr AnyMem = True
-- (6) native calls clobber any memory
| CmmCall{} <- node, memConflicts addr AnyMem = True
-- (7) otherwise, no conflict
| otherwise = False
-- Returns True if node defines any global registers that are used in the
-- Cmm expression
globalRegistersConflict :: DynFlags -> CmmExpr -> CmmNode e x -> Bool
globalRegistersConflict dflags expr node =
foldRegsDefd dflags (\b r -> b || (CmmGlobal r) `regUsedIn` expr) False node
-- Returns True if node defines any local registers that are used in the
-- Cmm expression
localRegistersConflict :: DynFlags -> CmmExpr -> CmmNode e x -> Bool
localRegistersConflict dflags expr node =
foldRegsDefd dflags (\b r -> b || (CmmLocal r) `regUsedIn` expr) False node
-- Note [Sinking and calls]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
--
-- We have three kinds of calls: normal (CmmCall), safe foreign (CmmForeignCall)
-- and unsafe foreign (CmmUnsafeForeignCall). We perform sinking pass after
-- stack layout (see Note [Sinking after stack layout]) which leads to two
-- invariants related to calls:
--
-- a) during stack layout phase all safe foreign calls are turned into
-- unsafe foreign calls (see Note [Lower safe foreign calls]). This
-- means that we will never encounter CmmForeignCall node when running
-- sinking after stack layout
--
-- b) stack layout saves all variables live across a call on the stack
-- just before making a call (remember we are not sinking assignments to
-- stack):
--
-- L1:
-- x = R1
-- P64[Sp - 16] = L2
-- P64[Sp - 8] = x
-- Sp = Sp - 16
-- call f() returns L2
-- L2:
--
-- We will attempt to sink { x = R1 } but we will detect conflict with
-- { P64[Sp - 8] = x } and hence we will drop { x = R1 } without even
-- checking whether it conflicts with { call f() }. In this way we will
-- never need to check any assignment conflicts with CmmCall. Remember
-- that we still need to check for potential memory conflicts.
--
-- So the result is that we only need to worry about CmmUnsafeForeignCall nodes
-- when checking conflicts (see Note [Unsafe foreign calls clobber caller-save registers]).
-- This assumption holds only when we do sinking after stack layout. If we run
-- it before stack layout we need to check for possible conflicts with all three
-- kinds of calls. Our `conflicts` function does that by using a generic
-- foldRegsDefd and foldRegsUsed functions defined in DefinerOfRegs and
-- UserOfRegs typeclasses.
--
-- An abstraction of memory read or written.
data AbsMem
= NoMem -- no memory accessed
| AnyMem -- arbitrary memory
| HeapMem -- definitely heap memory
| StackMem -- definitely stack memory
| SpMem -- <size>[Sp+n]
{-# UNPACK #-} !Int
{-# UNPACK #-} !Int
-- Having SpMem is important because it lets us float loads from Sp
-- past stores to Sp as long as they don't overlap, and this helps to
-- unravel some long sequences of
-- x1 = [Sp + 8]
-- x2 = [Sp + 16]
-- ...
-- [Sp + 8] = xi
-- [Sp + 16] = xj
--
-- Note that SpMem is invalidated if Sp is changed, but the definition
-- of 'conflicts' above handles that.
-- ToDo: this won't currently fix the following commonly occurring code:
-- x1 = [R1 + 8]
-- x2 = [R1 + 16]
-- ..
-- [Hp - 8] = x1
-- [Hp - 16] = x2
-- ..
-- because [R1 + 8] and [Hp - 8] are both HeapMem. We know that
-- assignments to [Hp + n] do not conflict with any other heap memory,
-- but this is tricky to nail down. What if we had
--
-- x = Hp + n
-- [x] = ...
--
-- the store to [x] should be "new heap", not "old heap".
-- Furthermore, you could imagine that if we started inlining
-- functions in Cmm then there might well be reads of heap memory
-- that was written in the same basic block. To take advantage of
-- non-aliasing of heap memory we will have to be more clever.
-- Note [Foreign calls clobber heap]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- It is tempting to say that foreign calls clobber only
-- non-heap/stack memory, but unfortunately we break this invariant in
-- the RTS. For example, in stg_catch_retry_frame we call
-- stmCommitNestedTransaction() which modifies the contents of the
-- TRec it is passed (this actually caused incorrect code to be
-- generated).
--
-- Since the invariant is true for the majority of foreign calls,
-- perhaps we ought to have a special annotation for calls that can
-- modify heap/stack memory. For now we just use the conservative
-- definition here.
--
-- Some CallishMachOp imply a memory barrier e.g. AtomicRMW and
-- therefore we should never float any memory operations across one of
-- these calls.
bothMems :: AbsMem -> AbsMem -> AbsMem
bothMems NoMem x = x
bothMems x NoMem = x
bothMems HeapMem HeapMem = HeapMem
bothMems StackMem StackMem = StackMem
bothMems (SpMem o1 w1) (SpMem o2 w2)
| o1 == o2 = SpMem o1 (max w1 w2)
| otherwise = StackMem
bothMems SpMem{} StackMem = StackMem
bothMems StackMem SpMem{} = StackMem
bothMems _ _ = AnyMem
memConflicts :: AbsMem -> AbsMem -> Bool
memConflicts NoMem _ = False
memConflicts _ NoMem = False
memConflicts HeapMem StackMem = False
memConflicts StackMem HeapMem = False
memConflicts SpMem{} HeapMem = False
memConflicts HeapMem SpMem{} = False
memConflicts (SpMem o1 w1) (SpMem o2 w2)
| o1 < o2 = o1 + w1 > o2
| otherwise = o2 + w2 > o1
memConflicts _ _ = True
exprMem :: DynFlags -> CmmExpr -> AbsMem
exprMem dflags (CmmLoad addr w) = bothMems (loadAddr dflags addr (typeWidth w)) (exprMem dflags addr)
exprMem dflags (CmmMachOp _ es) = foldr bothMems NoMem (map (exprMem dflags) es)
exprMem _ _ = NoMem
loadAddr :: DynFlags -> CmmExpr -> Width -> AbsMem
loadAddr dflags e w =
case e of
CmmReg r -> regAddr dflags r 0 w
CmmRegOff r i -> regAddr dflags r i w
_other | CmmGlobal Sp `regUsedIn` e -> StackMem
| otherwise -> AnyMem
regAddr :: DynFlags -> CmmReg -> Int -> Width -> AbsMem
regAddr _ (CmmGlobal Sp) i w = SpMem i (widthInBytes w)
regAddr _ (CmmGlobal Hp) _ _ = HeapMem
regAddr _ (CmmGlobal CurrentTSO) _ _ = HeapMem -- important for PrimOps
regAddr dflags r _ _ | isGcPtrType (cmmRegType dflags r) = HeapMem -- yay! GCPtr pays for itself
regAddr _ _ _ _ = AnyMem
{-
Note [Inline GlobalRegs?]
Should we freely inline GlobalRegs?
Actually it doesn't make a huge amount of difference either way, so we
*do* currently treat GlobalRegs as "trivial" and inline them
everywhere, but for what it's worth, here is what I discovered when I
(SimonM) looked into this:
Common sense says we should not inline GlobalRegs, because when we
have
x = R1
the register allocator will coalesce this assignment, generating no
code, and simply record the fact that x is bound to $rbx (or
whatever). Furthermore, if we were to sink this assignment, then the
range of code over which R1 is live increases, and the range of code
over which x is live decreases. All things being equal, it is better
for x to be live than R1, because R1 is a fixed register whereas x can
live in any register. So we should neither sink nor inline 'x = R1'.
However, not inlining GlobalRegs can have surprising
consequences. e.g. (cgrun020)
c3EN:
_s3DB::P64 = R1;
_c3ES::P64 = _s3DB::P64 & 7;
if (_c3ES::P64 >= 2) goto c3EU; else goto c3EV;
c3EU:
_s3DD::P64 = P64[_s3DB::P64 + 6];
_s3DE::P64 = P64[_s3DB::P64 + 14];
I64[Sp - 8] = c3F0;
R1 = _s3DE::P64;
P64[Sp] = _s3DD::P64;
inlining the GlobalReg gives:
c3EN:
if (R1 & 7 >= 2) goto c3EU; else goto c3EV;
c3EU:
I64[Sp - 8] = c3F0;
_s3DD::P64 = P64[R1 + 6];
R1 = P64[R1 + 14];
P64[Sp] = _s3DD::P64;
but if we don't inline the GlobalReg, instead we get:
_s3DB::P64 = R1;
if (_s3DB::P64 & 7 >= 2) goto c3EU; else goto c3EV;
c3EU:
I64[Sp - 8] = c3F0;
R1 = P64[_s3DB::P64 + 14];
P64[Sp] = P64[_s3DB::P64 + 6];
This looks better - we managed to inline _s3DD - but in fact it
generates an extra reg-reg move:
.Lc3EU:
movq $c3F0_info,-8(%rbp)
movq %rbx,%rax
movq 14(%rbx),%rbx
movq 6(%rax),%rax
movq %rax,(%rbp)
because _s3DB is now live across the R1 assignment, we lost the
benefit of coalescing.
Who is at fault here? Perhaps if we knew that _s3DB was an alias for
R1, then we would not sink a reference to _s3DB past the R1
assignment. Or perhaps we *should* do that - we might gain by sinking
it, despite losing the coalescing opportunity.
Sometimes not inlining global registers wins by virtue of the rule
about not inlining into arguments of a foreign call, e.g. (T7163) this
is what happens when we inlined F1:
_s3L2::F32 = F1;
_c3O3::F32 = %MO_F_Mul_W32(F1, 10.0 :: W32);
(_s3L7::F32) = call "ccall" arg hints: [] result hints: [] rintFloat(_c3O3::F32);
but if we don't inline F1:
(_s3L7::F32) = call "ccall" arg hints: [] result hints: [] rintFloat(%MO_F_Mul_W32(_s3L2::F32,
10.0 :: W32));
-}
| forked-upstream-packages-for-ghcjs/ghc | compiler/cmm/CmmSink.hs | bsd-3-clause | 29,872 | 0 | 17 | 7,959 | 4,289 | 2,367 | 1,922 | 242 | 5 |
{-# LANGUAGE Trustworthy #-}
module BadImport02_A (
Nat, zero, succ', showNat
) where
data Nat = NatC Int
zero :: Nat
zero = NatC 0
succ' :: Nat -> Nat
succ' (NatC n) = NatC $ n + 1
showNat :: Nat -> String
showNat (NatC n) = "Nat " ++ show n
| frantisekfarka/ghc-dsi | testsuite/tests/safeHaskell/unsafeLibs/BadImport02_A.hs | bsd-3-clause | 258 | 0 | 7 | 68 | 102 | 56 | 46 | 10 | 1 |
-- Copyright (c) 2000 Galois Connections, Inc.
-- All rights reserved. This software is distributed as
-- free software under the license in the file "LICENSE",
-- which is included in the distribution.
module Eval where
import Data.Array
import Geometry
import CSG
import Surface
import Data
import Parse (rayParse, rayParseF)
class Monad m => MonadEval m where
doOp :: PrimOp -> GMLOp -> Stack -> m Stack
tick :: m ()
err :: String -> m a
tick = return ()
newtype Pure a = Pure a deriving Show
instance Monad Pure where
Pure x >>= k = k x
return = Pure
fail s = error s
instance MonadEval Pure where
doOp = doPureOp
err s = error s
instance MonadEval IO where
doOp prim op stk = do { -- putStrLn ("Calling " ++ show op
-- ++ " << " ++ show stk ++ " >>")
doAllOp prim op stk
}
err s = error s
data State
= State { env :: Env
, stack :: Stack
, code :: Code
} deriving Show
callback :: Env -> Code -> Stack -> Stack
callback env code stk
= case eval (State { env = env, stack = stk, code = code}) of
Pure stk -> stk
{-# SPECIALIZE eval :: State -> Pure Stack #-}
{-# SPECIALIZE eval :: State -> IO Stack #-}
eval :: MonadEval m => State -> m Stack
eval st =
do { () <- return () -- $ unsafePerformIO (print st) -- Functional debugger
; if moreCode st then
do { tick -- tick first, so as to catch loops on new eval.
; st' <- step st
; eval st'
}
else return (stack st)
}
moreCode :: State -> Bool
moreCode (State {code = []}) = False
moreCode _ = True
-- Step has a precondition that there *is* code to run
{-# SPECIALIZE step :: State -> Pure State #-}
{-# SPECIALIZE step :: State -> IO State #-}
step :: MonadEval m => State -> m State
-- Rule 1: Pushing BaseValues
step st@(State{ stack = stack, code = (TBool b):cs })
= return (st { stack = (VBool b):stack, code = cs })
step st@(State{ stack = stack, code = (TInt i):cs })
= return (st { stack = (VInt i):stack, code = cs })
step st@(State{ stack = stack, code = (TReal r):cs })
= return (st { stack = (VReal r):stack, code = cs })
step st@(State{ stack = stack, code = (TString s):cs })
= return (st { stack = (VString s):stack, code = cs })
-- Rule 2: Name binding
step st@(State{ env = env, stack = (v:stack), code = (TBind id):cs }) =
return (State { env = extendEnv env id v, stack = stack, code = cs })
step st@(State{ env = env, stack = [], code = (TBind id):cs }) =
err "Attempt to bind the top of an empty stack"
-- Rule 3: Name lookup
step st@(State{ env = env, stack = stack, code = (TId id):cs }) =
case (lookupEnv env id) of
Just v -> return (st { stack = v:stack, code = cs })
Nothing -> err ("Cannot find value for identifier: " ++ id)
-- Rule 4: Closure creation
step st@(State{ env = env, stack = stack, code = (TBody body):cs }) =
return (st { stack = (VClosure env body):stack, code = cs })
-- Rule 5: Application
step st@(State{ env = env, stack = (VClosure env' code'):stack, code = TApply:cs }) =
do { stk <- eval (State {env = env', stack = stack, code = code'})
; return (st { stack = stk, code = cs })
}
step st@(State{ env = env, stack = [], code = TApply:cs }) =
err "Application with an empty stack"
step st@(State{ env = env, stack = _:_, code = TApply:cs }) =
err "Application of a non-closure"
-- Rule 6: Arrays
step st@(State{ env = env, stack = stack, code = TArray code':cs }) =
do { stk <- eval (State {env = env, stack = [], code = code'})
; let last = length stk-1
; let arr = array (0,last) (zip [last,last-1..] stk)
; return (st { stack = (VArray arr):stack, code = cs })
}
-- Rule 7 & 8: If statement
step st@(State{ env = env, stack = (VClosure e2 c2):(VClosure e1 c1):(VBool True):stack, code = TIf:cs }) =
do { stk <- eval (State {env = e1, stack = stack, code = c1})
; return (st { stack = stk, code = cs })
}
step st@(State{ env = env, stack = (VClosure e2 c2):(VClosure e1 c1):(VBool False):stack, code = TIf:cs }) =
do { stk <- eval (State {env = e2, stack = stack, code = c2})
; return (st { stack = stk, code = cs })
}
step st@(State{ env = env, stack = _, code = TIf:cs }) =
err "Incorrect use of if (bad and/or inappropriate values on the stack)"
-- Rule 9: Operators
step st@(State{ env = env, stack = stack, code = (TOp op):cs }) =
do { stk <- doOp (opFnTable ! op) op stack
; return (st { stack = stk, code = cs })
}
-- Rule Opps
step _ = err "Tripped on sidewalk while stepping."
--------------------------------------------------------------------------
-- Operator code
opFnTable :: Array GMLOp PrimOp
opFnTable = array (minBound,maxBound)
[ (op,prim) | (_,TOp op,prim) <- opcodes ]
doPureOp :: (MonadEval m) => PrimOp -> GMLOp -> Stack -> m Stack
doPureOp _ Op_render _ =
err ("\nAttempting to call render from inside a purely functional callback.")
doPureOp primOp op stk = doPrimOp primOp op stk -- call the purely functional operators
{-# SPECIALIZE doPrimOp :: PrimOp -> GMLOp -> Stack -> Pure Stack #-}
{-# SPECIALIZE doPrimOp :: PrimOp -> GMLOp -> Stack -> IO Stack #-}
{-# SPECIALIZE doPrimOp :: PrimOp -> GMLOp -> Stack -> Abs Stack #-}
doPrimOp :: (MonadEval m) => PrimOp -> GMLOp -> Stack -> m Stack
-- 1 argument.
doPrimOp (Int_Int fn) _ (VInt i1:stk)
= return ((VInt (fn i1)) : stk)
doPrimOp (Real_Real fn) _ (VReal r1:stk)
= return ((VReal (fn r1)) : stk)
doPrimOp (Point_Real fn) _ (VPoint x y z:stk)
= return ((VReal (fn x y z)) : stk)
-- This is where the callbacks happen from...
doPrimOp (Surface_Obj fn) _ (VClosure env code:stk)
= case absapply env code [VAbsObj AbsFACE,VAbsObj AbsU,VAbsObj AbsV] of
Just [VReal r3,VReal r2,VReal r1,VPoint c1 c2 c3] ->
let
res = prop (color c1 c2 c3) r1 r2 r3
in
return ((VObject (fn (SConst res))) : stk)
_ -> return ((VObject (fn (SFun call))) : stk)
where
-- The most general case
call i r1 r2 =
case callback env code [VReal r2,VReal r1,VInt i] of
[VReal r3,VReal r2,VReal r1,VPoint c1 c2 c3]
-> prop (color c1 c2 c3) r1 r2 r3
stk -> error ("callback failed: incorrectly typed return arguments"
++ show stk)
doPrimOp (Real_Int fn) _ (VReal r1:stk)
= return ((VInt (fn r1)) : stk)
doPrimOp (Int_Real fn) _ (VInt r1:stk)
= return ((VReal (fn r1)) : stk)
doPrimOp (Arr_Int fn) _ (VArray arr:stk)
= return ((VInt (fn arr)) : stk)
-- 2 arguments.
doPrimOp (Int_Int_Int fn) _ (VInt i2:VInt i1:stk)
= return ((VInt (fn i1 i2)) : stk)
doPrimOp (Int_Int_Bool fn) _ (VInt i2:VInt i1:stk)
= return ((VBool (fn i1 i2)) : stk)
doPrimOp (Real_Real_Real fn) _ (VReal r2:VReal r1:stk)
= return ((VReal (fn r1 r2)) : stk)
doPrimOp (Real_Real_Bool fn) _ (VReal r2:VReal r1:stk)
= return ((VBool (fn r1 r2)) : stk)
doPrimOp (Arr_Int_Value fn) _ (VInt i:VArray arr:stk)
= return ((fn arr i) : stk)
-- Many arguments, typically image mangling
doPrimOp (Obj_Obj_Obj fn) _ (VObject o2:VObject o1:stk)
= return ((VObject (fn o1 o2)) : stk)
doPrimOp (Point_Color_Light fn) _ (VPoint r g b:VPoint x y z : stk)
= return (VLight (fn (x,y,z) (color r g b)) : stk)
doPrimOp (Point_Point_Color_Real_Real_Light fn) _
(VReal r2:VReal r1:VPoint r g b:VPoint x2 y2 z2:VPoint x1 y1 z1 : stk)
= return (VLight (fn (x1,y1,z1) (x2,y2,z2) (color r g b) r1 r2) : stk)
doPrimOp (Real_Real_Real_Point fn) _ (VReal r3:VReal r2:VReal r1:stk)
= return ((fn r1 r2 r3) : stk)
doPrimOp (Obj_Real_Obj fn) _ (VReal r:VObject o:stk)
= return (VObject (fn o r) : stk)
doPrimOp (Obj_Real_Real_Real_Obj fn) _ (VReal r3:VReal r2:VReal r1:VObject o:stk)
= return (VObject (fn o r1 r2 r3) : stk)
-- This one is our testing harness
doPrimOp (Value_String_Value fn) _ (VString s:o:stk)
= res `seq` return (res : stk)
where
res = fn o s
doPrimOp primOp op args
= err ("\n\ntype error when attempting to execute builtin primitive \"" ++
show op ++ "\"\n\n| " ++
show op ++ " takes " ++ show (length types) ++ " argument" ++ s
++ " with" ++ the ++ " type" ++ s ++ "\n|\n|" ++
" " ++ unwords [ show ty | ty <- types ] ++ "\n|\n|" ++
" currently, the relevent argument" ++ s ++ " on the stack " ++
are ++ "\n|\n| " ++
unwords [ "(" ++ show arg ++ ")"
| arg <- reverse (take (length types) args) ] ++ "\n|\n| "
++ " (top of stack is on the right hand side)\n\n")
where
len = length types
s = (if len /= 1 then "s" else "")
are = (if len /= 1 then "are" else "is")
the = (if len /= 1 then "" else " the")
types = getPrimOpType primOp
-- Render is somewhat funny, because it can only get called at top level.
-- All other operations are purely functional.
doAllOp :: PrimOp -> GMLOp -> Stack -> IO Stack
doAllOp (Render render) Op_render
(VString str:VInt ht:VInt wid:VReal fov
:VInt dep:VObject obj:VArray arr
:VPoint r g b : stk)
= do { render (color r g b) lights obj dep (fov * (pi / 180.0)) wid ht str
; return stk
}
where
lights = [ light | (VLight light) <- elems arr ]
doAllOp primOp op stk = doPrimOp primOp op stk -- call the purely functional operators
------------------------------------------------------------------------------
{-
- Abstract evaluation.
-
- The idea is you check for constant code that
- (1) does not look at its arguments
- (2) gives a fixed result
-
- We run for 100 steps.
-
-}
absapply :: Env -> Code -> Stack -> Maybe Stack
absapply env code stk =
case runAbs (eval (State env stk code)) 100 of
AbsState stk _ -> Just stk
AbsFail m -> Nothing
newtype Abs a = Abs { runAbs :: Int -> AbsState a }
data AbsState a = AbsState a !Int
| AbsFail String
instance Monad Abs where
(Abs fn) >>= k = Abs (\ s -> case fn s of
AbsState r s' -> runAbs (k r) s'
AbsFail m -> AbsFail m)
return x = Abs (\ n -> AbsState x n)
fail s = Abs (\ n -> AbsFail s)
instance MonadEval Abs where
doOp = doAbsOp
err = fail
tick = Abs (\ n -> if n <= 0
then AbsFail "run out of time"
else AbsState () (n-1))
doAbsOp :: PrimOp -> GMLOp -> Stack -> Abs Stack
doAbsOp _ Op_point (VReal r3:VReal r2:VReal r1:stk)
= return ((VPoint r1 r2 r3) : stk)
-- here, you could have an (AbsPoint :: AbsObj) which you put on the
-- stack, with any object in the three fields.
doAbsOp _ op _ = err ("operator not understood (" ++ show op ++ ")")
------------------------------------------------------------------------------
-- Driver
mainEval :: Code -> IO ()
mainEval prog = do { stk <- eval (State emptyEnv [] prog)
; return ()
}
{-
* Oops, one of the example actually has something
* on the stack at the end.
* Oh well...
; if null stk
then return ()
else do { putStrLn done
; print stk
}
-}
done = "Items still on stack at (successful) termination of program"
------------------------------------------------------------------------------
-- testing
test :: String -> Pure Stack
test is = eval (State emptyEnv [] (rayParse is))
testF :: String -> IO Stack
testF is = do prog <- rayParseF is
eval (State emptyEnv [] prog)
testA :: String -> Either String (Stack,Int)
testA is = case runAbs (eval (State emptyEnv
[VAbsObj AbsFACE,VAbsObj AbsU,VAbsObj AbsV]
(rayParse is))) 100 of
AbsState a n -> Right (a,n)
AbsFail m -> Left m
abstest1 = "1.0 0.0 0.0 point /red { /v /u /face red 1.0 0.0 1.0 } apply"
-- should be [3:: Int]
et1 = test "1 /x { x } /f 2 /x f apply x addi"
| acowley/ghc | testsuite/tests/programs/galois_raytrace/Eval.hs | bsd-3-clause | 12,223 | 14 | 30 | 3,460 | 4,606 | 2,421 | 2,185 | 221 | 6 |
{-# LANGUAGE CPP #-}
-- -fno-warn-deprecations for use of Map.foldWithKey
{-# OPTIONS_GHC -fno-warn-deprecations #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.PackageDescription.Configuration
-- Copyright : Thomas Schilling, 2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is about the cabal configurations feature. It exports
-- 'finalizePackageDescription' and 'flattenPackageDescription' which are
-- functions for converting 'GenericPackageDescription's down to
-- 'PackageDescription's. It has code for working with the tree of conditions
-- and resolving or flattening conditions.
module Distribution.PackageDescription.Configuration (
finalizePackageDescription,
flattenPackageDescription,
-- Utils
parseCondition,
freeVars,
mapCondTree,
mapTreeData,
mapTreeConds,
mapTreeConstrs,
) where
import Distribution.Package
( PackageName, Dependency(..) )
import Distribution.PackageDescription
( GenericPackageDescription(..), PackageDescription(..)
, Library(..), Executable(..), BuildInfo(..)
, Flag(..), FlagName(..), FlagAssignment
, Benchmark(..), CondTree(..), ConfVar(..), Condition(..)
, TestSuite(..) )
import Distribution.PackageDescription.Utils
( cabalBug, userBug )
import Distribution.Version
( VersionRange, anyVersion, intersectVersionRanges, withinRange )
import Distribution.Compiler
( CompilerId(CompilerId) )
import Distribution.System
( Platform(..), OS, Arch )
import Distribution.Simple.Utils
( currentDir, lowercase )
import Distribution.Simple.Compiler
( CompilerInfo(..) )
import Distribution.Text
( Text(parse) )
import Distribution.Compat.ReadP as ReadP hiding ( char )
import Control.Arrow (first)
import qualified Distribution.Compat.ReadP as ReadP ( char )
import Data.Char ( isAlphaNum )
import Data.Maybe ( catMaybes, maybeToList )
import Data.Map ( Map, fromListWith, toList )
import qualified Data.Map as Map
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
------------------------------------------------------------------------------
-- | Simplify the condition and return its free variables.
simplifyCondition :: Condition c
-> (c -> Either d Bool) -- ^ (partial) variable assignment
-> (Condition d, [d])
simplifyCondition cond i = fv . walk $ cond
where
walk cnd = case cnd of
Var v -> either Var Lit (i v)
Lit b -> Lit b
CNot c -> case walk c of
Lit True -> Lit False
Lit False -> Lit True
c' -> CNot c'
COr c d -> case (walk c, walk d) of
(Lit False, d') -> d'
(Lit True, _) -> Lit True
(c', Lit False) -> c'
(_, Lit True) -> Lit True
(c',d') -> COr c' d'
CAnd c d -> case (walk c, walk d) of
(Lit False, _) -> Lit False
(Lit True, d') -> d'
(_, Lit False) -> Lit False
(c', Lit True) -> c'
(c',d') -> CAnd c' d'
-- gather free vars
fv c = (c, fv' c)
fv' c = case c of
Var v -> [v]
Lit _ -> []
CNot c' -> fv' c'
COr c1 c2 -> fv' c1 ++ fv' c2
CAnd c1 c2 -> fv' c1 ++ fv' c2
-- | Simplify a configuration condition using the OS and arch names. Returns
-- the names of all the flags occurring in the condition.
simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar
-> (Condition FlagName, [FlagName])
simplifyWithSysParams os arch cinfo cond = (cond', flags)
where
(cond', flags) = simplifyCondition cond interp
interp (OS os') = Right $ os' == os
interp (Arch arch') = Right $ arch' == arch
interp (Impl comp vr)
| matchImpl (compilerInfoId cinfo) = Right True
| otherwise = case compilerInfoCompat cinfo of
-- fixme: treat Nothing as unknown, rather than empty list once we
-- support partial resolution of system parameters
Nothing -> Right False
Just compat -> Right (any matchImpl compat)
where
matchImpl (CompilerId c v) = comp == c && v `withinRange` vr
interp (Flag f) = Left f
-- TODO: Add instances and check
--
-- prop_sC_idempotent cond a o = cond' == cond''
-- where
-- cond' = simplifyCondition cond a o
-- cond'' = simplifyCondition cond' a o
--
-- prop_sC_noLits cond a o = isLit res || not (hasLits res)
-- where
-- res = simplifyCondition cond a o
-- hasLits (Lit _) = True
-- hasLits (CNot c) = hasLits c
-- hasLits (COr l r) = hasLits l || hasLits r
-- hasLits (CAnd l r) = hasLits l || hasLits r
-- hasLits _ = False
--
-- | Parse a configuration condition from a string.
parseCondition :: ReadP r (Condition ConfVar)
parseCondition = condOr
where
condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr
condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd
cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond
+++ archCond +++ flagCond +++ implCond )
inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp)
notCond = ReadP.char '!' >> sp >> cond >>= return . CNot
osCond = string "os" >> sp >> inparens osIdent >>= return . Var
archCond = string "arch" >> sp >> inparens archIdent >>= return . Var
flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var
implCond = string "impl" >> sp >> inparens implIdent >>= return . Var
boolLiteral = fmap Lit parse
archIdent = fmap Arch parse
osIdent = fmap OS parse
flagIdent = fmap (Flag . FlagName . lowercase) (munch1 isIdentChar)
isIdentChar c = isAlphaNum c || c == '_' || c == '-'
oper s = sp >> string s >> sp
sp = skipSpaces
implIdent = do i <- parse
vr <- sp >> option anyVersion parse
return $ Impl i vr
------------------------------------------------------------------------------
mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w)
-> CondTree v c a -> CondTree w d b
mapCondTree fa fc fcnd (CondNode a c ifs) =
CondNode (fa a) (fc c) (map g ifs)
where
g (cnd, t, me) = (fcnd cnd, mapCondTree fa fc fcnd t,
fmap (mapCondTree fa fc fcnd) me)
mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a
mapTreeConstrs f = mapCondTree id f id
mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a
mapTreeConds f = mapCondTree id id f
mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b
mapTreeData f = mapCondTree f id id
-- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for
-- clarity.
data DepTestRslt d = DepOk | MissingDeps d
instance Monoid d => Monoid (DepTestRslt d) where
mempty = DepOk
mappend DepOk x = x
mappend x DepOk = x
mappend (MissingDeps d) (MissingDeps d') = MissingDeps (d `mappend` d')
data BT a = BTN a | BTB (BT a) (BT a) -- very simple binary tree
-- | Try to find a flag assignment that satisfies the constraints of all trees.
--
-- Returns either the missing dependencies, or a tuple containing the
-- resulting data, the associated dependencies, and the chosen flag
-- assignments.
--
-- In case of failure, the _smallest_ number of of missing dependencies is
-- returned. [TODO: Could also be specified with a function argument.]
--
-- TODO: The current algorithm is rather naive. A better approach would be to:
--
-- * Rule out possible paths, by taking a look at the associated dependencies.
--
-- * Infer the required values for the conditions of these paths, and
-- calculate the required domains for the variables used in these
-- conditions. Then picking a flag assignment would be linear (I guess).
--
-- This would require some sort of SAT solving, though, thus it's not
-- implemented unless we really need it.
--
resolveWithFlags ::
[(FlagName,[Bool])]
-- ^ Domain for each flag name, will be tested in order.
-> OS -- ^ OS as returned by Distribution.System.buildOS
-> Arch -- ^ Arch as returned by Distribution.System.buildArch
-> CompilerInfo -- ^ Compiler information
-> [Dependency] -- ^ Additional constraints
-> [CondTree ConfVar [Dependency] PDTagged]
-> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function.
-> Either [Dependency] (TargetSet PDTagged, FlagAssignment)
-- ^ Either the missing dependencies (error case), or a pair of
-- (set of build targets with dependencies, chosen flag assignments)
resolveWithFlags dom os arch impl constrs trees checkDeps =
case try dom [] of
Right r -> Right r
Left dbt -> Left $ findShortest dbt
where
extraConstrs = toDepMap constrs
-- simplify trees by (partially) evaluating all conditions and converting
-- dependencies to dependency maps.
simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps
. mapTreeConds (fst . simplifyWithSysParams os arch impl))
trees
-- @try@ recursively tries all possible flag assignments in the domain and
-- either succeeds or returns a binary tree with the missing dependencies
-- encountered in each run. Since the tree is constructed lazily, we
-- avoid some computation overhead in the successful case.
try [] flags =
let targetSet = TargetSet $ flip map simplifiedTrees $
-- apply additional constraints to all dependencies
first (`constrainBy` extraConstrs) .
simplifyCondTree (env flags)
deps = overallDependencies targetSet
in case checkDeps (fromDepMap deps) of
DepOk -> Right (targetSet, flags)
MissingDeps mds -> Left (BTN mds)
try ((n, vals):rest) flags =
tryAll $ map (\v -> try rest ((n, v):flags)) vals
tryAll = foldr mp mz
-- special version of `mplus' for our local purposes
mp (Left xs) (Left ys) = (Left (BTB xs ys))
mp (Left _) m@(Right _) = m
mp m@(Right _) _ = m
-- `mzero'
mz = Left (BTN [])
env flags flag = (maybe (Left flag) Right . lookup flag) flags
-- for the error case we inspect our lazy tree of missing dependencies and
-- pick the shortest list of missing dependencies
findShortest (BTN x) = x
findShortest (BTB lt rt) =
let l = findShortest lt
r = findShortest rt
in case (l,r) of
([], xs) -> xs -- [] is too short
(xs, []) -> xs
([x], _) -> [x] -- single elem is optimum
(_, [x]) -> [x]
(xs, ys) -> if lazyLengthCmp xs ys
then xs else ys
-- lazy variant of @\xs ys -> length xs <= length ys@
lazyLengthCmp [] _ = True
lazyLengthCmp _ [] = False
lazyLengthCmp (_:xs) (_:ys) = lazyLengthCmp xs ys
-- | A map of dependencies. Newtyped since the default monoid instance is not
-- appropriate. The monoid instance uses 'intersectVersionRanges'.
newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange }
deriving (Show, Read)
instance Monoid DependencyMap where
mempty = DependencyMap Map.empty
(DependencyMap a) `mappend` (DependencyMap b) =
DependencyMap (Map.unionWith intersectVersionRanges a b)
toDepMap :: [Dependency] -> DependencyMap
toDepMap ds =
DependencyMap $ fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ]
fromDepMap :: DependencyMap -> [Dependency]
fromDepMap m = [ Dependency p vr | (p,vr) <- toList (unDependencyMap m) ]
simplifyCondTree :: (Monoid a, Monoid d) =>
(v -> Either v Bool)
-> CondTree v d a
-> (d, a)
simplifyCondTree env (CondNode a d ifs) =
mconcat $ (d, a) : catMaybes (map simplifyIf ifs)
where
simplifyIf (cnd, t, me) =
case simplifyCondition cnd env of
(Lit True, _) -> Just $ simplifyCondTree env t
(Lit False, _) -> fmap (simplifyCondTree env) me
_ -> error $ "Environment not defined for all free vars"
-- | Flatten a CondTree. This will resolve the CondTree by taking all
-- possible paths into account. Note that since branches represent exclusive
-- choices this may not result in a \"sane\" result.
ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c)
ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs)
where f (_, t, me) = ignoreConditions t
: maybeToList (fmap ignoreConditions me)
freeVars :: CondTree ConfVar c a -> [FlagName]
freeVars t = [ f | Flag f <- freeVars' t ]
where
freeVars' (CondNode _ _ ifs) = concatMap compfv ifs
compfv (c, ct, mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct
condfv c = case c of
Var v -> [v]
Lit _ -> []
CNot c' -> condfv c'
COr c1 c2 -> condfv c1 ++ condfv c2
CAnd c1 c2 -> condfv c1 ++ condfv c2
------------------------------------------------------------------------------
-- | A set of targets with their package dependencies
newtype TargetSet a = TargetSet [(DependencyMap, a)]
-- | Combine the target-specific dependencies in a TargetSet to give the
-- dependencies for the package as a whole.
overallDependencies :: TargetSet PDTagged -> DependencyMap
overallDependencies (TargetSet targets) = mconcat depss
where
(depss, _) = unzip $ filter (removeDisabledSections . snd) targets
removeDisabledSections :: PDTagged -> Bool
removeDisabledSections (Lib _) = True
removeDisabledSections (Exe _ _) = True
removeDisabledSections (Test _ t) = testEnabled t
removeDisabledSections (Bench _ b) = benchmarkEnabled b
removeDisabledSections PDNull = True
-- Apply extra constraints to a dependency map.
-- Combines dependencies where the result will only contain keys from the left
-- (first) map. If a key also exists in the right map, both constraints will
-- be intersected.
constrainBy :: DependencyMap -- ^ Input map
-> DependencyMap -- ^ Extra constraints
-> DependencyMap
constrainBy left extra =
DependencyMap $
Map.foldWithKey tightenConstraint (unDependencyMap left)
(unDependencyMap extra)
where tightenConstraint n c l =
case Map.lookup n l of
Nothing -> l
Just vr -> Map.insert n (intersectVersionRanges vr c) l
-- | Collect up the targets in a TargetSet of tagged targets, storing the
-- dependencies as we go.
flattenTaggedTargets :: TargetSet PDTagged ->
(Maybe Library, [(String, Executable)], [(String, TestSuite)]
, [(String, Benchmark)])
flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, [], [], []) targets
where
untag (_, Lib _) (Just _, _, _, _) = userBug "Only one library expected"
untag (deps, Lib l) (Nothing, exes, tests, bms) =
(Just l', exes, tests, bms)
where
l' = l {
libBuildInfo = (libBuildInfo l) { targetBuildDepends = fromDepMap deps }
}
untag (deps, Exe n e) (mlib, exes, tests, bms)
| any ((== n) . fst) exes =
userBug $ "There exist several exes with the same name: '" ++ n ++ "'"
| any ((== n) . fst) tests =
userBug $ "There exists a test with the same name as an exe: '" ++ n ++ "'"
| any ((== n) . fst) bms =
userBug $ "There exists a benchmark with the same name as an exe: '" ++ n ++ "'"
| otherwise = (mlib, (n, e'):exes, tests, bms)
where
e' = e {
buildInfo = (buildInfo e) { targetBuildDepends = fromDepMap deps }
}
untag (deps, Test n t) (mlib, exes, tests, bms)
| any ((== n) . fst) tests =
userBug $ "There exist several tests with the same name: '" ++ n ++ "'"
| any ((== n) . fst) exes =
userBug $ "There exists an exe with the same name as the test: '" ++ n ++ "'"
| any ((== n) . fst) bms =
userBug $ "There exists a benchmark with the same name as the test: '" ++ n ++ "'"
| otherwise = (mlib, exes, (n, t'):tests, bms)
where
t' = t {
testBuildInfo = (testBuildInfo t)
{ targetBuildDepends = fromDepMap deps }
}
untag (deps, Bench n b) (mlib, exes, tests, bms)
| any ((== n) . fst) bms =
userBug $ "There exist several benchmarks with the same name: '" ++ n ++ "'"
| any ((== n) . fst) exes =
userBug $ "There exists an exe with the same name as the benchmark: '" ++ n ++ "'"
| any ((== n) . fst) tests =
userBug $ "There exists a test with the same name as the benchmark: '" ++ n ++ "'"
| otherwise = (mlib, exes, tests, (n, b'):bms)
where
b' = b {
benchmarkBuildInfo = (benchmarkBuildInfo b)
{ targetBuildDepends = fromDepMap deps }
}
untag (_, PDNull) x = x -- actually this should not happen, but let's be liberal
------------------------------------------------------------------------------
-- Convert GenericPackageDescription to PackageDescription
--
data PDTagged = Lib Library
| Exe String Executable
| Test String TestSuite
| Bench String Benchmark
| PDNull
deriving Show
instance Monoid PDTagged where
mempty = PDNull
PDNull `mappend` x = x
x `mappend` PDNull = x
Lib l `mappend` Lib l' = Lib (l `mappend` l')
Exe n e `mappend` Exe n' e' | n == n' = Exe n (e `mappend` e')
Test n t `mappend` Test n' t' | n == n' = Test n (t `mappend` t')
Bench n b `mappend` Bench n' b' | n == n' = Bench n (b `mappend` b')
_ `mappend` _ = cabalBug "Cannot combine incompatible tags"
-- | Create a package description with all configurations resolved.
--
-- This function takes a `GenericPackageDescription` and several environment
-- parameters and tries to generate `PackageDescription` by finding a flag
-- assignment that result in satisfiable dependencies.
--
-- It takes as inputs a not necessarily complete specifications of flags
-- assignments, an optional package index as well as platform parameters. If
-- some flags are not assigned explicitly, this function will try to pick an
-- assignment that causes this function to succeed. The package index is
-- optional since on some platforms we cannot determine which packages have
-- been installed before. When no package index is supplied, every dependency
-- is assumed to be satisfiable, therefore all not explicitly assigned flags
-- will get their default values.
--
-- This function will fail if it cannot find a flag assignment that leads to
-- satisfiable dependencies. (It will not try alternative assignments for
-- explicitly specified flags.) In case of failure it will return a /minimum/
-- number of dependencies that could not be satisfied. On success, it will
-- return the package description and the full flag assignment chosen.
--
finalizePackageDescription ::
FlagAssignment -- ^ Explicitly specified flag assignments
-> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of
-- available packages? If this is unknown then use
-- True.
-> Platform -- ^ The 'Arch' and 'OS'
-> CompilerInfo -- ^ Compiler information
-> [Dependency] -- ^ Additional constraints
-> GenericPackageDescription
-> Either [Dependency]
(PackageDescription, FlagAssignment)
-- ^ Either missing dependencies or the resolved package
-- description along with the flag assignments chosen.
finalizePackageDescription userflags satisfyDep
(Platform arch os) impl constraints
(GenericPackageDescription pkg flags mlib0 exes0 tests0 bms0) =
case resolveFlags of
Right ((mlib, exes', tests', bms'), targetSet, flagVals) ->
Right ( pkg { library = mlib
, executables = exes'
, testSuites = tests'
, benchmarks = bms'
, buildDepends = fromDepMap (overallDependencies targetSet)
--TODO: we need to find a way to avoid pulling in deps
-- for non-buildable components. However cannot simply
-- filter at this stage, since if the package were not
-- available we would have failed already.
}
, flagVals )
Left missing -> Left missing
where
-- Combine lib, exes, and tests into one list of @CondTree@s with tagged data
condTrees = maybeToList (fmap (mapTreeData Lib) mlib0 )
++ map (\(name,tree) -> mapTreeData (Exe name) tree) exes0
++ map (\(name,tree) -> mapTreeData (Test name) tree) tests0
++ map (\(name,tree) -> mapTreeData (Bench name) tree) bms0
resolveFlags =
case resolveWithFlags flagChoices os arch impl constraints condTrees check of
Right (targetSet, fs) ->
let (mlib, exes, tests, bms) = flattenTaggedTargets targetSet in
Right ( (fmap libFillInDefaults mlib,
map (\(n,e) -> (exeFillInDefaults e) { exeName = n }) exes,
map (\(n,t) -> (testFillInDefaults t) { testName = n }) tests,
map (\(n,b) -> (benchFillInDefaults b) { benchmarkName = n }) bms),
targetSet, fs)
Left missing -> Left missing
flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags
d2c manual n b = case lookup n userflags of
Just val -> [val]
Nothing
| manual -> [b]
| otherwise -> [b, not b]
--flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices
check ds = let missingDeps = filter (not . satisfyDep) ds
in if null missingDeps
then DepOk
else MissingDeps missingDeps
{-
let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] [])
let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] [])
let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])]
let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index
let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds
resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ...
resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ...
-}
-- | Flatten a generic package description by ignoring all conditions and just
-- join the field descriptors into on package description. Note, however,
-- that this may lead to inconsistent field values, since all values are
-- joined into one field, which may not be possible in the original package
-- description, due to the use of exclusive choices (if ... else ...).
--
-- TODO: One particularly tricky case is defaulting. In the original package
-- description, e.g., the source directory might either be the default or a
-- certain, explicitly set path. Since defaults are filled in only after the
-- package has been resolved and when no explicit value has been set, the
-- default path will be missing from the package description returned by this
-- function.
flattenPackageDescription :: GenericPackageDescription -> PackageDescription
flattenPackageDescription (GenericPackageDescription pkg _ mlib0 exes0 tests0 bms0) =
pkg { library = mlib
, executables = reverse exes
, testSuites = reverse tests
, benchmarks = reverse bms
, buildDepends = ldeps ++ reverse edeps ++ reverse tdeps ++ reverse bdeps
}
where
(mlib, ldeps) = case mlib0 of
Just lib -> let (l,ds) = ignoreConditions lib in
(Just (libFillInDefaults l), ds)
Nothing -> (Nothing, [])
(exes, edeps) = foldr flattenExe ([],[]) exes0
(tests, tdeps) = foldr flattenTst ([],[]) tests0
(bms, bdeps) = foldr flattenBm ([],[]) bms0
flattenExe (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (exeFillInDefaults $ e { exeName = n }) : es, ds' ++ ds )
flattenTst (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (testFillInDefaults $ e { testName = n }) : es, ds' ++ ds )
flattenBm (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (benchFillInDefaults $ e { benchmarkName = n }) : es, ds' ++ ds )
-- This is in fact rather a hack. The original version just overrode the
-- default values, however, when adding conditions we had to switch to a
-- modifier-based approach. There, nothing is ever overwritten, but only
-- joined together.
--
-- This is the cleanest way i could think of, that doesn't require
-- changing all field parsing functions to return modifiers instead.
libFillInDefaults :: Library -> Library
libFillInDefaults lib@(Library { libBuildInfo = bi }) =
lib { libBuildInfo = biFillInDefaults bi }
exeFillInDefaults :: Executable -> Executable
exeFillInDefaults exe@(Executable { buildInfo = bi }) =
exe { buildInfo = biFillInDefaults bi }
testFillInDefaults :: TestSuite -> TestSuite
testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) =
tst { testBuildInfo = biFillInDefaults bi }
benchFillInDefaults :: Benchmark -> Benchmark
benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) =
bm { benchmarkBuildInfo = biFillInDefaults bi }
biFillInDefaults :: BuildInfo -> BuildInfo
biFillInDefaults bi =
if null (hsSourceDirs bi)
then bi { hsSourceDirs = [currentDir] }
else bi
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/Distribution/PackageDescription/Configuration.hs | bsd-3-clause | 26,553 | 0 | 20 | 7,211 | 6,260 | 3,382 | 2,878 | 371 | 19 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, ExistentialQuantification
, DeriveDataTypeable
#-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Handle.Types
-- Copyright : (c) The University of Glasgow, 1994-2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- Basic types for the implementation of IO Handles.
--
-----------------------------------------------------------------------------
module GHC.IO.Handle.Types (
Handle(..), Handle__(..), showHandle,
checkHandleInvariants,
BufferList(..),
HandleType(..),
isReadableHandleType, isWritableHandleType, isReadWriteHandleType,
BufferMode(..),
BufferCodec(..),
NewlineMode(..), Newline(..), nativeNewline,
universalNewlineMode, noNewlineTranslation, nativeNewlineMode
) where
#undef DEBUG
import GHC.Base
import GHC.MVar
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.BufferedIO
import GHC.IO.Encoding.Types
import GHC.IORef
import Data.Maybe
import GHC.Show
import GHC.Read
import GHC.Word
import GHC.IO.Device
import Data.Typeable
#ifdef DEBUG
import Control.Monad
#endif
-- ---------------------------------------------------------------------------
-- Handle type
-- A Handle is represented by (a reference to) a record
-- containing the state of the I/O port/device. We record
-- the following pieces of info:
-- * type (read,write,closed etc.)
-- * the underlying file descriptor
-- * buffering mode
-- * buffer, and spare buffers
-- * user-friendly name (usually the
-- FilePath used when IO.openFile was called)
-- Note: when a Handle is garbage collected, we want to flush its buffer
-- and close the OS file handle, so as to free up a (precious) resource.
-- | Haskell defines operations to read and write characters from and to files,
-- represented by values of type @Handle@. Each value of this type is a
-- /handle/: a record used by the Haskell run-time system to /manage/ I\/O
-- with file system objects. A handle has at least the following properties:
--
-- * whether it manages input or output or both;
--
-- * whether it is /open/, /closed/ or /semi-closed/;
--
-- * whether the object is seekable;
--
-- * whether buffering is disabled, or enabled on a line or block basis;
--
-- * a buffer (whose length may be zero).
--
-- Most handles will also have a current I\/O position indicating where the next
-- input or output operation will occur. A handle is /readable/ if it
-- manages only input or both input and output; likewise, it is /writable/ if
-- it manages only output or both input and output. A handle is /open/ when
-- first allocated.
-- Once it is closed it can no longer be used for either input or output,
-- though an implementation cannot re-use its storage while references
-- remain to it. Handles are in the 'Show' and 'Eq' classes. The string
-- produced by showing a handle is system dependent; it should include
-- enough information to identify the handle for debugging. A handle is
-- equal according to '==' only to itself; no attempt
-- is made to compare the internal state of different handles for equality.
data Handle
= FileHandle -- A normal handle to a file
FilePath -- the file (used for error messages
-- only)
!(MVar Handle__)
| DuplexHandle -- A handle to a read/write stream
FilePath -- file for a FIFO, otherwise some
-- descriptive string (used for error
-- messages only)
!(MVar Handle__) -- The read side
!(MVar Handle__) -- The write side
deriving Typeable
-- NOTES:
-- * A 'FileHandle' is seekable. A 'DuplexHandle' may or may not be
-- seekable.
instance Eq Handle where
(FileHandle _ h1) == (FileHandle _ h2) = h1 == h2
(DuplexHandle _ h1 _) == (DuplexHandle _ h2 _) = h1 == h2
_ == _ = False
data Handle__
= forall dev enc_state dec_state . (IODevice dev, BufferedIO dev, Typeable dev) =>
Handle__ {
haDevice :: !dev,
haType :: HandleType, -- type (read/write/append etc.)
haByteBuffer :: !(IORef (Buffer Word8)),
haBufferMode :: BufferMode,
haLastDecode :: !(IORef (dec_state, Buffer Word8)),
haCharBuffer :: !(IORef (Buffer CharBufElem)), -- the current buffer
haBuffers :: !(IORef (BufferList CharBufElem)), -- spare buffers
haEncoder :: Maybe (TextEncoder enc_state),
haDecoder :: Maybe (TextDecoder dec_state),
haCodec :: Maybe TextEncoding,
haInputNL :: Newline,
haOutputNL :: Newline,
haOtherSide :: Maybe (MVar Handle__) -- ptr to the write side of a
-- duplex handle.
}
deriving Typeable
-- we keep a few spare buffers around in a handle to avoid allocating
-- a new one for each hPutStr. These buffers are *guaranteed* to be the
-- same size as the main buffer.
data BufferList e
= BufferListNil
| BufferListCons (RawBuffer e) (BufferList e)
-- Internally, we classify handles as being one
-- of the following:
data HandleType
= ClosedHandle
| SemiClosedHandle
| ReadHandle
| WriteHandle
| AppendHandle
| ReadWriteHandle
isReadableHandleType :: HandleType -> Bool
isReadableHandleType ReadHandle = True
isReadableHandleType ReadWriteHandle = True
isReadableHandleType _ = False
isWritableHandleType :: HandleType -> Bool
isWritableHandleType AppendHandle = True
isWritableHandleType WriteHandle = True
isWritableHandleType ReadWriteHandle = True
isWritableHandleType _ = False
isReadWriteHandleType :: HandleType -> Bool
isReadWriteHandleType ReadWriteHandle{} = True
isReadWriteHandleType _ = False
-- INVARIANTS on Handles:
--
-- * A handle *always* has a buffer, even if it is only 1 character long
-- (an unbuffered handle needs a 1 character buffer in order to support
-- hLookAhead and hIsEOF).
-- * In a read Handle, the byte buffer is always empty (we decode when reading)
-- * In a wriite Handle, the Char buffer is always empty (we encode when writing)
--
checkHandleInvariants :: Handle__ -> IO ()
#ifdef DEBUG
checkHandleInvariants h_ = do
bbuf <- readIORef (haByteBuffer h_)
checkBuffer bbuf
cbuf <- readIORef (haCharBuffer h_)
checkBuffer cbuf
when (isWriteBuffer cbuf && not (isEmptyBuffer cbuf)) $
error ("checkHandleInvariants: char write buffer non-empty: " ++
summaryBuffer bbuf ++ ", " ++ summaryBuffer cbuf)
when (isWriteBuffer bbuf /= isWriteBuffer cbuf) $
error ("checkHandleInvariants: buffer modes differ: " ++
summaryBuffer bbuf ++ ", " ++ summaryBuffer cbuf)
#else
checkHandleInvariants _ = return ()
#endif
-- ---------------------------------------------------------------------------
-- Buffering modes
-- | Three kinds of buffering are supported: line-buffering,
-- block-buffering or no-buffering. These modes have the following
-- effects. For output, items are written out, or /flushed/,
-- from the internal buffer according to the buffer mode:
--
-- * /line-buffering/: the entire output buffer is flushed
-- whenever a newline is output, the buffer overflows,
-- a 'System.IO.hFlush' is issued, or the handle is closed.
--
-- * /block-buffering/: the entire buffer is written out whenever it
-- overflows, a 'System.IO.hFlush' is issued, or the handle is closed.
--
-- * /no-buffering/: output is written immediately, and never stored
-- in the buffer.
--
-- An implementation is free to flush the buffer more frequently,
-- but not less frequently, than specified above.
-- The output buffer is emptied as soon as it has been written out.
--
-- Similarly, input occurs according to the buffer mode for the handle:
--
-- * /line-buffering/: when the buffer for the handle is not empty,
-- the next item is obtained from the buffer; otherwise, when the
-- buffer is empty, characters up to and including the next newline
-- character are read into the buffer. No characters are available
-- until the newline character is available or the buffer is full.
--
-- * /block-buffering/: when the buffer for the handle becomes empty,
-- the next block of data is read into the buffer.
--
-- * /no-buffering/: the next input item is read and returned.
-- The 'System.IO.hLookAhead' operation implies that even a no-buffered
-- handle may require a one-character buffer.
--
-- The default buffering mode when a handle is opened is
-- implementation-dependent and may depend on the file system object
-- which is attached to that handle.
-- For most implementations, physical files will normally be block-buffered
-- and terminals will normally be line-buffered.
data BufferMode
= NoBuffering -- ^ buffering is disabled if possible.
| LineBuffering
-- ^ line-buffering should be enabled if possible.
| BlockBuffering (Maybe Int)
-- ^ block-buffering should be enabled if possible.
-- The size of the buffer is @n@ items if the argument
-- is 'Just' @n@ and is otherwise implementation-dependent.
deriving (Eq, Ord, Read, Show)
{-
[note Buffering Implementation]
Each Handle has two buffers: a byte buffer (haByteBuffer) and a Char
buffer (haCharBuffer).
[note Buffered Reading]
For read Handles, bytes are read into the byte buffer, and immediately
decoded into the Char buffer (see
GHC.IO.Handle.Internals.readTextDevice). The only way there might be
some data left in the byte buffer is if there is a partial multi-byte
character sequence that cannot be decoded into a full character.
Note that the buffering mode (haBufferMode) makes no difference when
reading data into a Handle. When reading, we can always just read all
the data there is available without blocking, decode it into the Char
buffer, and then provide it immediately to the caller.
[note Buffered Writing]
Characters are written into the Char buffer by e.g. hPutStr. At the
end of the operation, or when the char buffer is full, the buffer is
decoded to the byte buffer (see writeCharBuffer). This is so that we
can detect encoding errors at the right point.
Hence, the Char buffer is always empty between Handle operations.
[note Buffer Sizing]
The char buffer is always a default size (dEFAULT_CHAR_BUFFER_SIZE).
The byte buffer size is chosen by the underlying device (via its
IODevice.newBuffer). Hence the size of these buffers is not under
user control.
There are certain minimum sizes for these buffers imposed by the
library (but not checked):
- we must be able to buffer at least one character, so that
hLookAhead can work
- the byte buffer must be able to store at least one encoded
character in the current encoding (6 bytes?)
- when reading, the char buffer must have room for two characters, so
that we can spot the \r\n sequence.
How do we implement hSetBuffering?
For reading, we have never used the user-supplied buffer size, because
there's no point: we always pass all available data to the reader
immediately. Buffering would imply waiting until a certain amount of
data is available, which has no advantages. So hSetBuffering is
essentially a no-op for read handles, except that it turns on/off raw
mode for the underlying device if necessary.
For writing, the buffering mode is handled by the write operations
themselves (hPutChar and hPutStr). Every write ends with
writeCharBuffer, which checks whether the buffer should be flushed
according to the current buffering mode. Additionally, we look for
newlines and flush if the mode is LineBuffering.
[note Buffer Flushing]
** Flushing the Char buffer
We must be able to flush the Char buffer, in order to implement
hSetEncoding, and things like hGetBuf which want to read raw bytes.
Flushing the Char buffer on a write Handle is easy: it is always empty.
Flushing the Char buffer on a read Handle involves rewinding the byte
buffer to the point representing the next Char in the Char buffer.
This is done by
- remembering the state of the byte buffer *before* the last decode
- re-decoding the bytes that represent the chars already read from the
Char buffer. This gives us the point in the byte buffer that
represents the *next* Char to be read.
In order for this to work, after readTextHandle we must NOT MODIFY THE
CONTENTS OF THE BYTE OR CHAR BUFFERS, except to remove characters from
the Char buffer.
** Flushing the byte buffer
The byte buffer can be flushed if the Char buffer has already been
flushed (see above). For a read Handle, flushing the byte buffer
means seeking the device back by the number of bytes in the buffer,
and hence it is only possible on a seekable Handle.
-}
-- ---------------------------------------------------------------------------
-- Newline translation
-- | The representation of a newline in the external file or stream.
data Newline = LF -- ^ '\n'
| CRLF -- ^ '\r\n'
deriving (Eq, Ord, Read, Show)
-- | Specifies the translation, if any, of newline characters between
-- internal Strings and the external file or stream. Haskell Strings
-- are assumed to represent newlines with the '\n' character; the
-- newline mode specifies how to translate '\n' on output, and what to
-- translate into '\n' on input.
data NewlineMode
= NewlineMode { inputNL :: Newline,
-- ^ the representation of newlines on input
outputNL :: Newline
-- ^ the representation of newlines on output
}
deriving (Eq, Ord, Read, Show)
-- | The native newline representation for the current platform: 'LF'
-- on Unix systems, 'CRLF' on Windows.
nativeNewline :: Newline
#ifdef mingw32_HOST_OS
nativeNewline = CRLF
#else
nativeNewline = LF
#endif
-- | Map '\r\n' into '\n' on input, and '\n' to the native newline
-- represetnation on output. This mode can be used on any platform, and
-- works with text files using any newline convention. The downside is
-- that @readFile >>= writeFile@ might yield a different file.
--
-- > universalNewlineMode = NewlineMode { inputNL = CRLF,
-- > outputNL = nativeNewline }
--
universalNewlineMode :: NewlineMode
universalNewlineMode = NewlineMode { inputNL = CRLF,
outputNL = nativeNewline }
-- | Use the native newline representation on both input and output
--
-- > nativeNewlineMode = NewlineMode { inputNL = nativeNewline
-- > outputNL = nativeNewline }
--
nativeNewlineMode :: NewlineMode
nativeNewlineMode = NewlineMode { inputNL = nativeNewline,
outputNL = nativeNewline }
-- | Do no newline translation at all.
--
-- > noNewlineTranslation = NewlineMode { inputNL = LF, outputNL = LF }
--
noNewlineTranslation :: NewlineMode
noNewlineTranslation = NewlineMode { inputNL = LF, outputNL = LF }
-- ---------------------------------------------------------------------------
-- Show instance for Handles
-- handle types are 'show'n when printing error msgs, so
-- we provide a more user-friendly Show instance for it
-- than the derived one.
instance Show HandleType where
showsPrec _ t =
case t of
ClosedHandle -> showString "closed"
SemiClosedHandle -> showString "semi-closed"
ReadHandle -> showString "readable"
WriteHandle -> showString "writable"
AppendHandle -> showString "writable (append)"
ReadWriteHandle -> showString "read-writable"
instance Show Handle where
showsPrec _ (FileHandle file _) = showHandle file
showsPrec _ (DuplexHandle file _ _) = showHandle file
showHandle :: FilePath -> String -> String
showHandle file = showString "{handle: " . showString file . showString "}"
| beni55/haste-compiler | libraries/ghc-7.8/base/GHC/IO/Handle/Types.hs | bsd-3-clause | 16,363 | 0 | 13 | 3,713 | 1,355 | 814 | 541 | 136 | 1 |
{-|
Module: Math.Ftensor.Lib.General
Copyright: (c) 2015 Michael Benfield
License: ISC
Functions and type families useful in implementing tensor types.
Like other modules under @Math.Ftensor.Lib@, casual users of the library should
not need to directly use this module.
-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.Normalise #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE UndecidableInstances #-} -- for AllMultiIndicesInBounds
module Math.Ftensor.Lib.General (
MultiIndexToI,
multiIndexToI',
multiIndexToI,
InBounds,
inBounds,
AllMultiIndicesInBounds,
) where
import Data.Proxy
import GHC.Exts (Constraint)
import GHC.TypeLits
import Math.Ftensor.SizedList
import Math.Ftensor.Lib.TypeList
type family InBounds (dims::[Nat]) (multiIndex::[Nat]) :: Constraint where
InBounds '[] '[] = ()
InBounds (d ': ds) (i ': is) = (i+1 <= d, InBounds ds is)
inBounds
:: forall (dims::[Nat])
. KnownType dims (SizedList (Length dims) Int)
=> Proxy (dims::[Nat])
-> SizedList (Length dims) Int
-> Bool
inBounds p = f ((summon p) :: SizedList (Length dims) Int)
where
f :: SizedList len Int -> SizedList len Int -> Bool
f (dim :- dims) (i :- is) = 0 <= i && i < dim && f dims is
f _ _ = True
type family MultiIndexToI (dims::[Nat]) (multiIndex::[Nat]) :: Nat where
MultiIndexToI '[] '[] = 0
MultiIndexToI (dim ': dims) is = MultiIndexToI_ dims is 0
type family MultiIndexToI_ (dims::[Nat]) (multiIndex::[Nat]) (accum::Nat)
:: Nat where
MultiIndexToI_ '[] '[i] accum = accum + i
MultiIndexToI_ (dim ': dims) (i ': is) accum =
MultiIndexToI_ dims is (dim*(accum + i))
multiIndexToI'
:: forall (dims::[Nat]) (multiIndex::[Nat]) (result::Nat)
. (KnownNat result, result ~ MultiIndexToI dims multiIndex)
=> Proxy dims
-> Proxy multiIndex
-> Int
multiIndexToI' _ _ = summon (Proxy::Proxy result)
multiIndexToI
:: forall (dims::[Nat])
. KnownType dims (SizedList (Length dims) Int)
=> Proxy dims
-> SizedList (Length dims) Int
-> Int
multiIndexToI p multiIndex =
case (summon p) :: SizedList (Length dims) Int of
N -> 0
_ :- dims -> f dims multiIndex 0
where
f :: SizedList n Int -> SizedList (n+1) Int -> Int -> Int
f N (i :- N) accum = accum + i
f (dim :- dims) (i :- is) accum = f dims is (dim*(accum + i))
f _ _ _ = error "multiIndexToI: can't happen"
type family AllMultiIndicesInBounds (dims::[Nat]) :: [[Nat]] where
AllMultiIndicesInBounds '[] = '[ '[]]
AllMultiIndicesInBounds (d ': ds) =
CartesianProduct (EnumFromTo 0 (d-1)) (AllMultiIndicesInBounds ds)
| mikebenfield/ftensor | src/Math/Ftensor/Lib/General.hs | isc | 2,670 | 0 | 11 | 577 | 962 | 530 | 432 | -1 | -1 |
module ProjectEuler.Problem127
( problem
) where
import Control.Monad
import Data.Bifunctor
import Data.Monoid
import Math.NumberTheory.Euclidean
import Math.NumberTheory.Primes
import qualified Data.DList as DL
import qualified Data.IntMap as IM
import qualified Data.Vector as V
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 127 Solved result
{-
No idea at first, as always. But there are few things that might come in handy:
- note that a + b = c, so we only need to search for two numbers and the third one
can be derived from that.
- given that a, b, c are pair-wise coprimes, therefore
rad(a * b * c) = rad(a) * rad(b) * rad(c), by definition (no shared prime factor).
- c = a + b, if c < 1000, we know a + b < 1000
- c = a + b and a < b, therefore c < 2*b
- b > 2, because:
+ when b = 2, (a,b,c) = (1,2,3), in which rad(a*b*c) < c does not hold.
+ b > 1, otherwise there is no value for a to take.
- rad(c) >= 2, this is simply because c > b > 2.
- rad(c) >= 2 && c < 2*b && rad(a)*rad(b)*rad(c) < c therefore:
rad(a)*rad(b)*rad(c) < c < 2 * b <= rad(c) * b
=> rad(a) * rad(b) < b
TODO: there are some rooms of improvement from this fact:
- precompute rad(_), create a mapping from rad(x) to list of x.
- if we start with searching b, we can constrain a to a very limited search space.
- note that gcd(rad(a), rad(b)) = 1 iff. gcd(a,b) = 1, therefore
we gain more speed by this groupping on rad(_), as there are less coprime tests to do.
Update: now the example (c < 1000) given by the problem is working,
but it is too slow to simply plugging in 120000, we need to do something else.
Update: brute forced the answer: 18407904, but the problem is designed in
a way such that we can do this much more faster, going to investigate on that.
-}
maxN :: Int
maxN = 120000
radVec :: V.Vector Int
radVec =
V.fromListN (maxN+1) $
{-
laziness in action: the actual computation only happen when that position in vector
is accessed for the first time.
-}
undefined : fmap radImpl [1..]
where
radImpl :: Int -> Int
radImpl = getProduct . foldMap (Product . unPrime . fst) . factorise
rad :: Int -> Int
rad = (radVec V.!) -- requires that 0 < input <= maxN
revRadMap :: [] (Int, [Int])
revRadMap =
(fmap . second) DL.toList
. IM.toAscList
-- append it the other way to keep the values sorted.
. IM.fromListWith (flip (<>))
. fmap (\x -> (rad x, DL.singleton x))
$ [1..maxN]
searchAbcHits :: [(Int, Int, Int)]
searchAbcHits = do
(radB, bs) <- revRadMap
b <- bs
-- we are searching radA where radA * radB < b
(radA, as) <- takeWhile (\(radA', _) -> radA' * radB < b) revRadMap
guard $ coprime radB radA
let rab = radA * radB
-- a < b && a + b < maxN => a < min(b,maxN-b)
a <- takeWhile (< min b (maxN-b)) as
let c = a + b
guard $ rab * rad c < c
pure (a,b,c)
result :: Int
result = getSum $ foldMap (\(_,_,c) -> Sum c) searchAbcHits
| Javran/Project-Euler | src/ProjectEuler/Problem127.hs | mit | 3,032 | 0 | 12 | 747 | 518 | 287 | 231 | 43 | 1 |
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
import Misc
toWord :: Int -> String
toWord 1 = "one"
toWord 2 = "two"
toWord 3 = "three"
toWord 4 = "four"
toWord 5 = "five"
toWord 6 = "six"
toWord 7 = "seven"
toWord 8 = "eight"
toWord 9 = "nine"
toWord 10 = "ten"
toWord 11 = "eleven"
toWord 12 = "twelve"
toWord 13 = "thirteen"
toWord 14 = "fourteen"
toWord 15 = "fifteen"
toWord 16 = "sixteen"
toWord 17 = "seventeen"
toWord 18 = "eighteen"
toWord 19 = "nineteen"
toWord 20 = "twenty"
toWord 30 = "thirty"
toWord 40 = "forty"
toWord 50 = "fifty"
toWord 60 = "sixty"
toWord 70 = "seventy"
toWord 80 = "eighty"
toWord 90 = "ninety"
toWord 100 = "hundred"
toWord 1000 = "thousand"
stringify :: Int -> String
stringify num =
let lowTwenty = if lowerTwo > 0 && lowerTwo <= 20 then toWord lowerTwo else "" -- special case for low Twenty
lowStr = if low /= 0 then toWord low else ""
tenStr = if ten /= 0 then toWord ten else ""
hundStr = if hund /= 0 then (toWord (hund `div` 100)) ++ " " ++ toWord 100 else ""
thouStr = if thou /= 0 then (toWord (thou `div` 1000)) ++ " " ++ toWord 1000 else ""
andStr = if (lowStr /= "" || tenStr /= "" || lowTwenty /= "") && (hundStr /= "" || thouStr /= "") then "and" else ""
low = num `mod` 10
ten = num `mod` 100 - num `mod` 10
hund = num `mod` 1000 - num `mod` 100
thou = num `mod` 10000 - num `mod` 1000
lowerTwo = num `mod` 100
lowTwoDigStr = if lowTwenty /= "" then lowTwenty else tenStr ++ " " ++ lowStr
in thouStr ++ " " ++ hundStr ++ " " ++ andStr ++ " " ++ lowTwoDigStr
ans17 :: Int
ans17 = sum . map length . map (concat.words) . map stringify $ [1..1000]
| cbrghostrider/Hacking | Euler/prob17.hs | mit | 2,079 | 0 | 16 | 501 | 662 | 357 | 305 | 48 | 8 |
-------------------------------------------------------------------------------
-- |
-- Module : Dzen
-- Copyright : (c) Patrick Brisbin 2010
-- License : as-is
--
-- Maintainer : pbrisbin@gmail.com
-- Stability : unstable
-- Portability : unportable
--
-- Provides data types and functions to easily define and spawn dzen
-- bars.
--
-------------------------------------------------------------------------------
module Dzen (
-- * Usage
-- $usage
DzenConf(..)
, TextAlign(..)
, defaultDzen
, spawnDzen
, spawnToDzen
-- * API
, dzen
, dzenArgs
) where
import System.IO
import System.Posix.IO
import System.Posix.Process (executeFile, forkProcess, createSession)
import Data.List (intercalate)
-- $usage
--
-- To use, copy the source code for this module into
-- @~\/.xmonad\/lib\/Dzen.hs@ and add the following to your
-- @~\/.xmonad\/xmonad.hs@:
--
-- >
-- > import Dzen
-- > import XMonad.Hooks.DynamicLog
-- >
-- > main :: IO ()
-- > main = do
-- > d <- spawnDzen someDzen
-- >
-- > xmonad $ defaultConfig
-- > { ...
-- > , logHook = myLogHook d
-- > }
-- >
-- > myLogHook h = dynamicLogWithPP $ defaultPP
-- > { ...
-- > , ppOutput = hPutStrLn h
-- > }
--
-- If you want to feed some other process into a dzen you can use the
-- following:
--
-- > spawnToDzen "conky" someDzen
--
-- Where someDzen is a 'DzenConf' (see 'defaultDzen' for an example).
--
-- | A data type to fully describe a spawnable dzen bar, take a look at
-- @\/usr\/share\/doc\/dzen2\/README@ to see what input is acceptable.
-- Options are wrapped in 'Just', so using 'Nothing' will not add that
-- option to the @dzen2@ executable. @exec@ and @addargs@ can be
-- empty.
data DzenConf = DzenConf
{ x_position :: Maybe Int -- ^ x position
, y_position :: Maybe Int -- ^ y position
, width :: Maybe Int -- ^ width
, height :: Maybe Int -- ^ line height
, alignment :: Maybe TextAlign -- ^ alignment of title window
, font :: Maybe String -- ^ font
, fg_color :: Maybe String -- ^ foreground color
, bg_color :: Maybe String -- ^ background color
, exec :: [String] -- ^ exec flags, ex: [\"onstart=lower\", ...]
, addargs :: [String] -- ^ additional arguments, ex: [\"-p\", \"-tw\", \"5\"]
}
-- | A simple data type for the text alignment of the dzen bar
data TextAlign = LeftAlign | RightAlign | Centered
-- | 'show' 'TextAlign' makes it suitable for use as a dzen argument
instance Show TextAlign where
show LeftAlign = "l"
show RightAlign = "r"
show Centered = "c"
-- | Spawn a dzen by configuraion and return its handle, behaves
-- exactly as spawnPipe but takes a DzenConf argument.
spawnDzen :: DzenConf -> IO Handle
spawnDzen d = do
(rd, wr) <- createPipe
setFdOption wr CloseOnExec True
h <- fdToHandle wr
hSetBuffering h LineBuffering
forkProcess $ do
createSession
dupTo rd stdInput
-- why does this not work?
--executeFile "dzen2" True (dzenArgs d) Nothing
executeFile "/bin/sh" False ["-c", dzen d] Nothing
return h
-- | Spawn a process sending its stdout to the stdin of the dzen
spawnToDzen :: String -> DzenConf -> IO ()
spawnToDzen x d = do
(rd, wr) <- createPipe
setFdOption rd CloseOnExec True
setFdOption wr CloseOnExec True
hin <- fdToHandle rd
hout <- fdToHandle wr
hSetBuffering hin LineBuffering
hSetBuffering hout LineBuffering
-- the dzen
forkProcess $ do
createSession
dupTo rd stdInput
-- why does this not work?
--executeFile "dzen2" True (dzenArgs d) Nothing
executeFile "/bin/sh" False ["-c", dzen d] Nothing
-- the input process
forkProcess $ do
createSession
dupTo wr stdOutput
executeFile "/bin/sh" False ["-c", x] Nothing
return ()
-- | The full dzen command as a string
dzen :: DzenConf -> String
dzen = unwords . (:) "dzen2" . dzenArgs
-- | The right list of arguments for \"dzen2\"
dzenArgs :: DzenConf -> [String]
dzenArgs d = addOpt ("-fn", fmap quote $ font d)
++ addOpt ("-fg", fmap quote $ fg_color d)
++ addOpt ("-bg", fmap quote $ bg_color d)
++ addOpt ("-ta", fmap show $ alignment d)
++ addOpt ("-x" , fmap show $ x_position d)
++ addOpt ("-y" , fmap show $ y_position d)
++ addOpt ("-w" , fmap show $ width d)
++ addOpt ("-h" , fmap show $ height d)
++ addExec (exec d)
++ addargs d
where
quote = ("'"++) . (++"'")
addOpt (_ , Nothing ) = []
addOpt (opt, Just arg) = [opt, arg]
addExec [] = []
addExec es = ["-e", quote $ intercalate ";" es]
-- | A default dzen configuration. Similar colors to default decorations
-- and prompts in other modules. Added options @-p@ and @-e
-- \'onstart=lower\'@ are useful for dzen-as-statusbar.
defaultDzen :: DzenConf
defaultDzen = DzenConf {
x_position = Nothing
, y_position = Nothing
, width = Nothing
, height = Nothing
, alignment = Just LeftAlign
, font = Just "-misc-fixed-*-*-*-*-10-*-*-*-*-*-*-*"
, fg_color = Just "#FFFFFF"
, bg_color = Just "#333333"
, exec = ["onstart=lower"]
, addargs = ["-p"]
}
| sagax/dot_rc | xmonad/station_1/lib/Dzen.hs | mit | 5,477 | 0 | 17 | 1,533 | 1,012 | 562 | 450 | 87 | 3 |
module BlocVoting.Nulldata where
import qualified Data.ByteString as BS
data Nulldata = Nulldata {
ndScript :: BS.ByteString
, ndAddress :: BS.ByteString
, ndTimestamp :: Int
, ndHeight :: Int
}
deriving (Show, Eq)
modNulldataScript :: BS.ByteString -> Nulldata -> Nulldata
modNulldataScript newScript (Nulldata _ address ts h) = Nulldata newScript address ts h
| XertroV/blocvoting | src/BlocVoting/Nulldata.hs | mit | 377 | 0 | 9 | 67 | 108 | 62 | 46 | 10 | 1 |
module Sudoku (Puzzle, printSudoku, displayPuzzle, sudoku) where
import Control.Monad
import Data.List (transpose)
import FD
type Puzzle = [Int]
displayPuzzle :: Puzzle -> String
displayPuzzle = unlines . map show . chunk 9
printSudoku :: Puzzle -> IO ()
printSudoku = putStr . unlines . map displayPuzzle . sudoku
chunk :: Int -> [a] -> [[a]]
chunk _ [] = []
chunk n xs = ys : chunk n zs where
(ys, zs) = splitAt n xs
sudoku :: Puzzle -> [Puzzle]
sudoku puzzle = runFD $ do
vars <- newVars 81 (1, 9)
zipWithM_ (\x n -> when (n > 0) (x `hasValue` n)) vars puzzle
mapM_ varsAllDifferent (rows vars)
mapM_ varsAllDifferent (columns vars)
mapM_ varsAllDifferent (boxes vars)
varsLabelling vars
rows, columns, boxes :: [a] -> [[a]]
rows = chunk 9
columns = transpose . rows
boxes = concat . map (map concat . transpose) . chunk 3 . chunk 3 . chunk 3
| dmoverton/finite-domain | src/Sudoku.hs | mit | 881 | 0 | 13 | 189 | 394 | 207 | 187 | 25 | 1 |
module Ten where
stops = "pbtdkg"
vowels = "aeiou"
stopVowelStop :: [Char] -> [Char] -> [[Char]]
stopVowelStop stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops]
noPs :: [Char] -> [Char] -> [[Char]]
noPs stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops, x /= 'p']
onlyPs :: [Char] -> [Char] -> [[Char]]
onlyPs stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops, x == 'p']
nouns = ["house", "cane", "sugar", "wave"]
verbs = ["blow", "swim", "argue", "fart"]
nounVerbNoun :: [[Char]] -> [[Char]] -> [[Char]]
nounVerbNoun nouns verbs = [x ++ " " ++ y ++ " " ++ z | x <- nouns, y <- verbs, z <- nouns]
avgLetters :: Fractional a => [Char] -> a
avgLetters sentence = numLetters / numWords
where wordList = words sentence
numWords = realToFrac (length wordList)
numLetters = realToFrac (sum (map length wordList))
myOr :: [Bool] -> Bool
myOr = foldr (\a b ->
if a then True else b) False
myOr' :: [Bool] -> Bool
myOr' = foldr (||) False
myAny :: (a -> Bool) -> [a] -> Bool
myAny p = foldr (\a b ->
if p a then True else b) False
myAny' :: (a -> Bool) -> [a] -> Bool
myAny' p = foldr (\a b -> (p a) || b) False
myElem :: Eq a => a -> [a] -> Bool
myElem e = foldr (\a b -> a == e || b) False
myReverse :: [a] -> [a]
myReverse = foldr (\a b -> b ++ [a]) []
myMap :: (a -> b) -> [a] -> [b]
myMap f = foldr (\a b -> [f a] ++ b) []
myFilter :: (a -> Bool) -> [a] -> [a]
myFilter p = foldr (\a b ->
if p a then [a] ++ b else b) []
squish :: [[a]] -> [a]
squish = foldr (\a b -> a ++ b) []
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap f = foldr (\a b -> f a ++ b) []
squishAgain :: [[a]] -> [a]
squishAgain = squishMap id
myMaximumBy :: (a -> a -> Ordering) -> [a] -> a
myMaximumBy compFn = foldr1 (\a b ->
if (compFn a b) == GT then a else b)
myMinimumBy :: (a -> a -> Ordering) -> [a] -> a
myMinimumBy compFn = foldr1 (\a b ->
if (compFn a b) == LT then a else b)
| mudphone/HaskellBook | src/Ten.hs | mit | 2,064 | 0 | 11 | 588 | 1,110 | 613 | 497 | 49 | 2 |
{-# htermination compare :: Int -> Int -> Ordering #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_compare_5.hs | mit | 55 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGFEDiffuseLightingElement
(js_getIn1, getIn1, js_getSurfaceScale, getSurfaceScale,
js_getDiffuseConstant, getDiffuseConstant, js_getKernelUnitLengthX,
getKernelUnitLengthX, js_getKernelUnitLengthY,
getKernelUnitLengthY, SVGFEDiffuseLightingElement,
castToSVGFEDiffuseLightingElement,
gTypeSVGFEDiffuseLightingElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"in1\"]" js_getIn1 ::
SVGFEDiffuseLightingElement -> IO (Nullable SVGAnimatedString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDiffuseLightingElement.in1 Mozilla SVGFEDiffuseLightingElement.in1 documentation>
getIn1 ::
(MonadIO m) =>
SVGFEDiffuseLightingElement -> m (Maybe SVGAnimatedString)
getIn1 self = liftIO (nullableToMaybe <$> (js_getIn1 (self)))
foreign import javascript unsafe "$1[\"surfaceScale\"]"
js_getSurfaceScale ::
SVGFEDiffuseLightingElement -> IO (Nullable SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDiffuseLightingElement.surfaceScale Mozilla SVGFEDiffuseLightingElement.surfaceScale documentation>
getSurfaceScale ::
(MonadIO m) =>
SVGFEDiffuseLightingElement -> m (Maybe SVGAnimatedNumber)
getSurfaceScale self
= liftIO (nullableToMaybe <$> (js_getSurfaceScale (self)))
foreign import javascript unsafe "$1[\"diffuseConstant\"]"
js_getDiffuseConstant ::
SVGFEDiffuseLightingElement -> IO (Nullable SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDiffuseLightingElement.diffuseConstant Mozilla SVGFEDiffuseLightingElement.diffuseConstant documentation>
getDiffuseConstant ::
(MonadIO m) =>
SVGFEDiffuseLightingElement -> m (Maybe SVGAnimatedNumber)
getDiffuseConstant self
= liftIO (nullableToMaybe <$> (js_getDiffuseConstant (self)))
foreign import javascript unsafe "$1[\"kernelUnitLengthX\"]"
js_getKernelUnitLengthX ::
SVGFEDiffuseLightingElement -> IO (Nullable SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDiffuseLightingElement.kernelUnitLengthX Mozilla SVGFEDiffuseLightingElement.kernelUnitLengthX documentation>
getKernelUnitLengthX ::
(MonadIO m) =>
SVGFEDiffuseLightingElement -> m (Maybe SVGAnimatedNumber)
getKernelUnitLengthX self
= liftIO (nullableToMaybe <$> (js_getKernelUnitLengthX (self)))
foreign import javascript unsafe "$1[\"kernelUnitLengthY\"]"
js_getKernelUnitLengthY ::
SVGFEDiffuseLightingElement -> IO (Nullable SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDiffuseLightingElement.kernelUnitLengthY Mozilla SVGFEDiffuseLightingElement.kernelUnitLengthY documentation>
getKernelUnitLengthY ::
(MonadIO m) =>
SVGFEDiffuseLightingElement -> m (Maybe SVGAnimatedNumber)
getKernelUnitLengthY self
= liftIO (nullableToMaybe <$> (js_getKernelUnitLengthY (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGFEDiffuseLightingElement.hs | mit | 3,910 | 30 | 10 | 601 | 727 | 424 | 303 | 60 | 1 |
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module App where
import Control.Monad.Reader (ReaderT)
import Control.Monad.Trans.Either (EitherT)
import qualified Hasql as H
import qualified Hasql.Postgres as HP
import Servant
import Web.Scotty (ActionM)
type RouteHandler = AppConfig -> ActionM ()
type PostgresSession = forall m a. H.Session HP.Postgres m a
-> m (Either (H.SessionError HP.Postgres) a)
type RouteM = ReaderT AppConfig (EitherT ServantErr IO)
data AppConfig = AppConfig {
pgsession :: PostgresSession }
| muhbaasu/pfennig-server | src/Pfennig/App.hs | mit | 714 | 0 | 12 | 220 | 154 | 92 | 62 | 16 | 0 |
module Main (main) where
import Nauva.Server
import Nauva.Product.Nauva.Catalog (catalogApp)
main :: IO ()
main = devServer catalogApp | wereHamster/nauva | product/nauva/catalog/dev/Main.hs | mit | 157 | 0 | 6 | 39 | 44 | 26 | 18 | 5 | 1 |
module Numeric.ByteString
( toIntegerBE
, toIntegerLE
) where
toIntegerBE :: ByteString -> Integer
toIntegerBE = decode
| nickspinale/crypto | src/Numeric/ByteString.hs | mit | 133 | 0 | 5 | 29 | 28 | 17 | 11 | 5 | 1 |
-- Testing.hs
-- Simplest assertion functions ever ;)
module Utils.Testing (assertEqual, assertTrue, assertFalse) where
import Utils.Console
assert :: Eq a => a -> a -> Bool
assert expected value = expected == value
assertEqual :: (Eq a, Show a) => String -> a -> a -> IO()
assertEqual message expected value = do
let
boolToStr bv = "\r[" ++ (if bv then wrapGreen " OK " else wrapRed "FAIL") ++ "]"
result = assert expected value
putStrLn $ " " ++ " " ++ message ++ " " ++ (boolToStr result)
if not result
then do
putStrLn $ "Expected --> " ++ (wrapYellow (show expected))
putStrLn $ " Got --> " ++ (wrapRed (show value))
else return ()
assertTrue :: String -> Bool -> IO()
assertTrue message value = assertEqual message True value
assertFalse :: String -> Bool -> IO()
assertFalse message value = assertEqual message False value
| etrepat/cis194 | src/Utils/Testing.hs | mit | 880 | 0 | 14 | 198 | 318 | 160 | 158 | 19 | 3 |
module Main where
import JSON
import Model
import GLA
import Text.JSON
import System.Environment
main :: IO ()
main = do
progName <- getProgName
args <- getArgs
if length args /= 1 then
fail ("Usage: " ++ progName ++ " <configuration file.json>")
else do
let filename = head args
putStrLn ("Reading from: " ++ filename)
content <- readFile filename
let c = decode content :: Result Configuration
case c of
Error str -> fail str
Ok config -> putStrLn (show (runGLA config))
| Vetii/SCFDMA | app/Main.hs | mit | 570 | 0 | 17 | 178 | 180 | 87 | 93 | 20 | 3 |
main = do
line <- getLine
let a:b:c:_ = (map read $ words line) :: [Int]
putStrLn $ solve a b c where
solve a b c
| a*a+b*b==c*c || a*a+c*c==b*b || b*b+c*c==a*a = "yes"
| a+b<=c || a+c<=b || b+c<=a = "not a triangle"
| otherwise = "no"
| Voleking/ICPC | references/aoapc-book/BeginningAlgorithmContests/haskell/ch1/ex1-9.hs | mit | 264 | 0 | 24 | 77 | 205 | 99 | 106 | 8 | 1 |
--
--
--
------------------
-- Exercise 12.31.
------------------
--
--
--
module E'12'31 where
| pascal-knodel/haskell-craft | _/links/E'12'31.hs | mit | 106 | 0 | 2 | 24 | 13 | 12 | 1 | 1 | 0 |
{- |
module: $Header$
description: Stream parsers
license: MIT
maintainer: Joe Leslie-Hurd <joe@gilith.com>
stability: provisional
portability: portable
-}
module OpenTheory.Parser.Stream
where
import qualified OpenTheory.Primitive.Natural as Natural
import qualified Test.QuickCheck as QuickCheck
data Stream a =
Error
| Eof
| Cons a (Stream a)
deriving (Eq, Ord, Show)
append :: [a] -> Stream a -> Stream a
append [] xs = xs
append (h : t) xs = Cons h (append t xs)
fromList :: [a] -> Stream a
fromList l = append l Eof
lengthStream :: Stream a -> Natural.Natural
lengthStream Error = 0
lengthStream Eof = 0
lengthStream (Cons _ xs) = lengthStream xs + 1
mapStream :: (a -> b) -> Stream a -> Stream b
mapStream _ Error = Error
mapStream _ Eof = Eof
mapStream f (Cons x xs) = Cons (f x) (mapStream f xs)
toList :: Stream a -> ([a], Bool)
toList Error = ([], True)
toList Eof = ([], False)
toList (Cons x xs) = let (l, e) = toList xs in (x : l, e)
instance QuickCheck.Arbitrary a => QuickCheck.Arbitrary (Stream a) where
arbitrary =
fmap (\(l, b) -> append l (if b then Error else Eof))
QuickCheck.arbitrary
| gilith/opentheory | data/haskell/opentheory-parser/src/OpenTheory/Parser/Stream.hs | mit | 1,140 | 0 | 11 | 228 | 459 | 244 | 215 | 29 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-}
module World
-- (
-- World, Env, Agents, Agent(..), Genome, Chromosome, GeneStateTable, Locus(..), Gene(..), Tfbs(..), ID, Thres, GeneState
-- , devAg, agent0, groupGeneTfbs, gSTFromGenome, fitnessAgent, showGST, hammDist, targetGST, hammDistAg)
where
import Data.Maybe (fromMaybe)
import Control.Monad
import qualified Data.Map.Strict as Map
import qualified Parameters as P
import MyRandom
import Types
import System.Random.Shuffle (shuffle')
import Control.Monad.State (state)
-- | Develops agents according to 'P.devTime' and 'updateAgent'
devAg :: Agent -> Agent
devAg a = if P.resetGeneStatesOnBirth
then fromMaybe NoAgent $ devAg' $ setToStart a
else fromMaybe NoAgent $ devAg' a
devAg' :: Agent -> Maybe Agent
devAg' = takeUntilSame . take P.devTime . iterate updateAgent
where
-- takeUntilSame [_,_] = Nothing; takeUntilSame [_] = Nothing; takeUntilSame [] = Nothing
takeUntilSame [] = Nothing
takeUntilSame [a] = case a of
Agent {} -> Just $ a { hasCyclicAttractor = True }
NoAgent -> error "updateAgent returned dead agent"
takeUntilSame (a:b:rest) =
if sameGST a b
then Just $ a { hasCyclicAttractor = False }
else takeUntilSame $ b:rest
-- | Do two agents share GST
sameGST :: Agent -> Agent -> Bool
sameGST NoAgent _ = True
sameGST _ NoAgent = False
sameGST ag1 ag2 = geneStateTable ag1 == geneStateTable ag2
-- | The default GeneStateTable where All genes have expression 0
defaultGst :: GST
defaultGst = Map.fromList defaultStates
where defaultStates = take P.nrGeneTypes' $ zip [0..] (repeat 0) :: [(ID,GeneState)]
-- -- | Check if locus is effected by expressed transcription factors
-- -- Return the effect it has. 0 is no effect.
-- locusEffect :: Locus -> GeneStateTable -> Weight
-- locusEffect (CTfbs (Tfbs i w st)) gst
-- | Map.lookup i gst == Just 1 = w
-- | otherwise = 0
-- locusEffect _ _ = 0
setToStart :: Agent -> Agent
setToStart NoAgent = error "Cannot set NoAgent to start. World.hs:setToStart"
setToStart ag = ag { genome = updateGenome P.startingGST $ genome ag
, geneStateTable = P.startingGST }
-- | Updates the genestates in the genome and the genestatetable with
-- 'updateGenome' and 'gSTFromGenome'
-- Kills the agent if it doesn't have all genes (when length gst /= 'P.nrGeneTypes')
updateAgent :: Agent -> Agent
updateAgent NoAgent = NoAgent
updateAgent ag =
if length newGST == P.nrGeneTypes'
then ag { genome = newGenome, geneStateTable = newGST}
else NoAgent
where
newGenome = updateGenome (geneStateTable ag) (genome ag)
newGST = toGST newGenome
-- | Updates every Chromosome in Genome with updateChrom
updateGenome :: InferGST gst => gst -> Genome -> Genome
updateGenome = map . updateChrom --0
(↞) :: (b -> c) -> (a1 -> a -> b) -> a1 -> a -> c
(↞) = (.).(.)
updateChrom :: InferGST gst => gst -> Chromosome -> Chromosome
updateChrom = updateGenes ↞ updateTfbss
updateTfbss :: InferGST gst => gst -> Chromosome -> Chromosome
updateTfbss !gst' =
map (onTfbs $ \t -> if P.dosiseffect
then t {tfbsSt = gst Map.! tfbsID t}
else t {tfbsSt = bottom $ gst Map.! tfbsID t}
)
where bottom gs | gs == GS 0 = GS 0
| gs > GS 0 = GS 1
| otherwise = error "negative GeneState"
gst = toGST gst'
updateGenes :: Chromosome -> Chromosome
updateGenes = updateGenes' 0
where updateGenes' :: Integer -> Chromosome -> Chromosome
updateGenes' _ [] = []
updateGenes' !a (l:ls) = case l of
CTfbs t -> l : updateGenes' (a + s * w) ls
where w = toInteger $ wt t; s = toInteger $ tfbsSt t
_ -> onGene (updateGene a) l : updateGenes' 0 ls
-- x -> x : updateGenes' a ls
updateGene :: Integer -> Gene -> Gene
updateGene !a !g = g {genSt = newState} where
newState | a <= t = GS 0 --Gene never stays the same: fix change <= to < uncomment next line
-- | a == t = genSt g
| otherwise = GS 1
where t = toInteger $ thres g
-- -- | Updates each gene in Chromosome using 'updateLoc'
-- -- Initial argument is an accumulator that counts the effects of binding sites.
-- updateChrom :: Weight -> GeneStateTable -> Chromosome -> Chromosome
-- updateChrom a gst (l:ls) = l' : updateChrom a' gst ls
-- where (a', l') = updateLoc a gst l
-- updateChrom _ _ _ = []
--
-- -- | if Tfbs, increment accumulater with effect
-- -- if Gene, change the expression according to accumulater and set accumulater to 0
-- -- returns a (accumulator, changed Locus) pair
-- updateLoc :: Weight -> GeneStateTable -> Locus -> (Weight, Locus)
-- updateLoc a gst loc@(CTfbs (Tfbs i w))
-- | Map.lookup i gst == Just 1 = (a + w, loc)
-- | otherwise = (a, loc)
-- updateLoc a _ (CGene (Gene i t st)) =
-- (0, CGene (Gene i t newState)) where
-- newState | fromIntegral a <= t = GS 0 --Gene never stays the same: fix change <= to < uncomment next line
-- -- | fromIntegral a == t = st
-- | otherwise = GS 1
-- updateLoc a _ loc = (a, loc)
-- | Groups genes and preceding transcription factors together
-- e.g. [TF, TF, Gene, TF, Gene, TF] -> [[TF, TF, Gene], [TF, Gene], [TF]]
groupGeneTfbs :: [Locus] -> [[Locus]]
groupGeneTfbs [] = []
groupGeneTfbs loci = h: groupGeneTfbs t
where (h,t) = takeWhileInclusive isGene loci
-- | takes a predicate and returns a pair of lists with the first ending with
-- the element that satisfies the predicate
takeWhileInclusive :: (a -> Bool) -> [a] -> ([a],[a])
takeWhileInclusive p ls = takeWhileInclusive' ([], ls)
where
takeWhileInclusive' (a,[]) = (a,[])
takeWhileInclusive' (a,x:xs) =
if p x
then (henk, xs)
else takeWhileInclusive' (henk,xs)
where henk = a ++ [x]
--
-- > foldr f z [] = z
-- > foldr f z (x:xs) = x `f` foldr f z xs
-- | Answers the question: Does every gene of this genome have at least one associated transcription factor?
connected :: Genome -> Bool
connected = all (>1) . map length . groupGeneTfbs . concat
-- | Generate a random Agent using 'goodRandomGenome'
randomAgent :: Rand Agent
randomAgent = do
randGenome <- goodRandomGenome
let agent = devAg $ emptyAgent {genome = randGenome, geneStateTable = defaultGst}
if agent == NoAgent
then randomAgent
else return $ devAg agent
-- | Generate a random genome that is 'connected'
-- loops until it finds one
goodRandomGenome :: Rand Genome
goodRandomGenome = do
randGenome <- randomGenome
if connected randGenome
then return randGenome
else goodRandomGenome
-- | Just a 'randomChromosome'
randomGenome :: Rand Genome
randomGenome = fmap (:[]) randomChromosomeLonger
-- | Generate a random chromosome by shuffling twice as many random Tfbss
-- as random genes. using 'randomTfbss' and 'randomGenes' and 'shuffle''
randomChromosome :: Rand Chromosome
randomChromosome = do
r <- getModifyRand
randomChrom <- concat <$> sequence [randomTfbss,randomTfbss,randomGenes]
return $ shuffle' randomChrom (length randomChrom) r
randomChromosomeLonger :: Rand Chromosome
randomChromosomeLonger = do
r <- getModifyRand
randomChrom <- concat <$>
sequence (replicate 5 randomTfbss ++ [randomGenes, randomGenes])
return $ shuffle' randomChrom (length randomChrom) r
-- | Generate all possible Tfbss (0..nrGeneTypes) with each random weight
randomTfbss :: Rand [Locus]
randomTfbss = do
-- randomWeights <- replicateM n' $ state randomW
randomWeights <- replicateM n' $ state randomW
r <- getModifyRand
let shuffled = shuffle' randomWeights n' r
return $ map CTfbs $ shuffle' (zipWith3 Tfbs [0..n-1] shuffled (repeat 0)) n' r
where
n' = P.nrGeneTypes'; n = P.nrGeneTypes
-- randomW :: Rand Weight
-- randomW = Rand $ \s -> case randomBool s of (w,s') -> R (f w) s'
-- where f x = if x then -1 else 1
randomW :: PureMT -> (Weight, PureMT)
randomW g = let (d, g') = randomBool g
in if d then (-1, g') else (1, g')
-- | Generate all possible genes (0..nrGeneTypes) with each random threshold
-- and state 0
randomGenes :: Rand [Locus]
randomGenes = do
randomThresholds <- replicateM n' $ getRange (P.minThres, P.maxThres)
r <- getModifyRand
let shuffled = shuffle' randomThresholds n' r
return $ map CGene $ shuffle' (zipWith makeGene [0..n-1] shuffled) n' r
where n' = P.nrGeneTypes'; n = P.nrGeneTypes
makeGene i t = Gene i t 0
dead :: Agent -> Bool
dead = not . living
living :: Agent -> Bool
living = (/=) NoAgent
| KarimxD/Evolverbetert | src/World.hs | mit | 9,101 | 1 | 14 | 2,410 | 1,944 | 1,044 | 900 | 134 | 5 |
module Philed.Control.Monad (
module Control.Monad,
module Philed.Control.MonadExtras) where
import Control.Monad
import Philed.Control.MonadExtras
| Chattered/PhilEdCommon | Philed/Control/Monad.hs | mit | 153 | 0 | 5 | 17 | 33 | 22 | 11 | 5 | 0 |
{-# LANGUAGE CPP #-}
{- |
Module : Language.Scheme.Variables
Copyright : Justin Ethier
Licence : MIT (see LICENSE in the distribution)
Maintainer : github.com/justinethier
Stability : experimental
Portability : portable
This module contains code for working with Scheme variables,
and the environments that contain them.
-}
module Language.Scheme.Variables
(
-- * Environments
printEnv
, recPrintEnv
, recExportsFromEnv
, exportsFromEnv
, copyEnv
, extendEnv
, importEnv
, topmostEnv
, nullEnvWithParent
, findNamespacedEnv
, macroNamespace
, varNamespace
-- * Getters
, getVar
, getVar'
, getNamespacedVar
, getNamespacedVar'
, getNamespacedRef
-- * Setters
, defineVar
, defineNamespacedVar
, setVar
, setNamespacedVar
, updateObject
, updateNamespacedObject
-- * Predicates
, isBound
, isRecBound
, isNamespacedRecBound
-- * Pointers
, derefPtr
-- , derefPtrs
, recDerefPtrs
, safeRecDerefPtrs
, recDerefToFnc
) where
import Language.Scheme.Types
import Control.Monad.Except
import Data.Array
import Data.IORef
import qualified Data.Map
-- import Debug.Trace
-- |Internal namespace for macros
macroNamespace :: Char
macroNamespace = 'm'
-- |Internal namespace for variables
varNamespace :: Char
varNamespace = 'v'
-- Experimental code:
-- From: http://rafaelbarreto.com/2011/08/21/comparing-objects-by-memory-location-in-haskell/
--
-- import Foreign
-- isMemoryEquivalent :: a -> a -> IO Bool
-- isMemoryEquivalent obj1 obj2 = do
-- obj1Ptr <- newStablePtr obj1
-- obj2Ptr <- newStablePtr obj2
-- let result = obj1Ptr == obj2Ptr
-- freeStablePtr obj1Ptr
-- freeStablePtr obj2Ptr
-- return result
--
-- -- Using above, search an env for a variable definition, but stop if the upperEnv is
-- -- reached before the variable
-- isNamespacedRecBoundWUpper :: Env -> Env -> String -> String -> IO Bool
-- isNamespacedRecBoundWUpper upperEnvRef envRef namespace var = do
-- areEnvsEqual <- liftIO $ isMemoryEquivalent upperEnvRef envRef
-- if areEnvsEqual
-- then return False
-- else do
-- found <- liftIO $ isNamespacedBound envRef namespace var
-- if found
-- then return True
-- else case parentEnv envRef of
-- (Just par) -> isNamespacedRecBoundWUpper upperEnvRef par namespace var
-- Nothing -> return False -- Var never found
--
-- |Create a variable's name in an environment using given arguments
getVarName :: Char -> String -> String
getVarName namespace name = (namespace : ('_' : name))
-- |Show the contents of an environment
printEnv :: Env -- ^Environment
-> IO String -- ^Contents of the env as a string
printEnv env = do
binds <- liftIO $ readIORef $ bindings env
l <- mapM showVar $ Data.Map.toList binds
return $ unlines l
where
showVar (name, val) = do
v <- liftIO $ readIORef val
return $ "[" ++ name ++ "]" ++ ": " ++ show v
-- |Recursively print an environment to string
recPrintEnv :: Env -> IO String
recPrintEnv env = do
envStr <- liftIO $ printEnv env
case parentEnv env of
Just par -> do
parEnvStr <- liftIO $ recPrintEnv par
return $ envStr ++ "\n" ++ parEnvStr
Nothing -> return envStr
-- |Recursively find all exports from the given environment
recExportsFromEnv :: Env -> IO [LispVal]
recExportsFromEnv env = do
xs <- exportsFromEnv env
case parentEnv env of
Just par -> do
pxs <- liftIO $ recExportsFromEnv par
return $ xs ++ pxs
Nothing -> return xs
-- |Return a list of symbols exported from an environment
exportsFromEnv :: Env
-> IO [LispVal]
exportsFromEnv env = do
binds <- liftIO $ readIORef $ bindings env
return $ getExports [] $ fst $ unzip $ Data.Map.toList binds
where
getExports acc (('m':'_':b) : bs) = getExports (Atom b:acc) bs
getExports acc (('v':'_':b) : bs) = getExports (Atom b:acc) bs
getExports acc (_ : bs) = getExports acc bs
getExports acc [] = acc
-- |Create a deep copy of an environment
copyEnv :: Env -- ^ Source environment
-> IO Env -- ^ A copy of the source environment
copyEnv env = do
ptrs <- liftIO $ readIORef $ pointers env
ptrList <- newIORef ptrs
binds <- liftIO $ readIORef $ bindings env
bindingListT <- mapM addBinding $ Data.Map.toList binds
bindingList <- newIORef $ Data.Map.fromList bindingListT
return $ Environment (parentEnv env) bindingList ptrList
where addBinding (name, val) = do
x <- liftIO $ readIORef val
ref <- newIORef x
return (name, ref)
-- |Perform a deep copy of an environment's contents into
-- another environment.
--
-- The destination environment is modified!
--
importEnv
:: Env -- ^ Destination environment
-> Env -- ^ Source environment
-> IO Env
importEnv dEnv sEnv = do
sPtrs <- liftIO $ readIORef $ pointers sEnv
dPtrs <- liftIO $ readIORef $ pointers dEnv
writeIORef (pointers dEnv) $ Data.Map.union sPtrs dPtrs
sBinds <- liftIO $ readIORef $ bindings sEnv
dBinds <- liftIO $ readIORef $ bindings dEnv
writeIORef (bindings dEnv) $ Data.Map.union sBinds dBinds
case parentEnv sEnv of
Just ps -> importEnv dEnv ps
Nothing -> return dEnv
-- |Extend given environment by binding a series of values to a new environment.
extendEnv :: Env -- ^ Environment
-> [((Char, String), LispVal)] -- ^ Extensions to the environment
-> IO Env -- ^ Extended environment
extendEnv envRef abindings = do
bindinglistT <- (mapM addBinding abindings) -- >>= newIORef
bindinglist <- newIORef $ Data.Map.fromList bindinglistT
nullPointers <- newIORef $ Data.Map.fromList []
return $ Environment (Just envRef) bindinglist nullPointers
where addBinding ((namespace, name), val) = do ref <- newIORef val
return (getVarName namespace name, ref)
-- |Find the top-most environment
topmostEnv :: Env -> IO Env
topmostEnv envRef = do
case parentEnv envRef of
Just p -> topmostEnv p
Nothing -> return envRef
-- |Create a null environment with the given environment as its parent.
nullEnvWithParent :: Env -> IO Env
nullEnvWithParent p = do
Environment _ binds ptrs <- nullEnv
return $ Environment (Just p) binds ptrs
-- |Recursively search environments to find one that contains the given variable.
findNamespacedEnv
:: Env -- ^Environment to begin the search;
-- parent env's will be searched as well.
-> Char -- ^Namespace
-> String -- ^Variable
-> IO (Maybe Env) -- ^Environment, or Nothing if there was no match.
findNamespacedEnv envRef namespace var = do
found <- liftIO $ isNamespacedBound envRef namespace var
if found
then return (Just envRef)
else case parentEnv envRef of
(Just par) -> findNamespacedEnv par namespace var
Nothing -> return Nothing
-- |Determine if a variable is bound in the default namespace
isBound :: Env -- ^ Environment
-> String -- ^ Variable
-> IO Bool -- ^ True if the variable is bound
isBound envRef = isNamespacedBound envRef varNamespace
-- |Determine if a variable is bound in the default namespace,
-- in this environment or one of its parents.
isRecBound :: Env -- ^ Environment
-> String -- ^ Variable
-> IO Bool -- ^ True if the variable is bound
isRecBound envRef = isNamespacedRecBound envRef varNamespace
-- |Determine if a variable is bound in a given namespace
isNamespacedBound
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> IO Bool -- ^ True if the variable is bound
isNamespacedBound envRef namespace var =
readIORef (bindings envRef) >>= return . Data.Map.member (getVarName namespace var)
-- |Determine if a variable is bound in a given namespace
-- or a parent of the given environment.
isNamespacedRecBound
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> IO Bool -- ^ True if the variable is bound
isNamespacedRecBound envRef namespace var = do
env <- findNamespacedEnv envRef namespace var
case env of
(Just e) -> isNamespacedBound e namespace var
Nothing -> return False
-- |Retrieve the value of a variable defined in the default namespace
getVar :: Env -- ^ Environment
-> String -- ^ Variable
-> IOThrowsError LispVal -- ^ Contents of the variable
getVar envRef = getNamespacedVar envRef varNamespace
-- |Retrieve the value of a variable defined in the default namespace,
-- or Nothing if it is not defined
getVar' :: Env -- ^ Environment
-> String -- ^ Variable
-> IOThrowsError (Maybe LispVal) -- ^ Contents of the variable
getVar' envRef = getNamespacedVar' envRef varNamespace
-- |Retrieve an ioRef defined in a given namespace
getNamespacedRef :: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> IOThrowsError (IORef LispVal)
getNamespacedRef envRef
namespace
var = do
v <- getNamespacedObj' envRef namespace var return
case v of
Just a -> return a
Nothing -> (throwError $ UnboundVar "Getting an unbound variable" var)
-- |Retrieve the value of a variable defined in a given namespace
getNamespacedVar :: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> IOThrowsError LispVal -- ^ Contents of the variable
getNamespacedVar envRef
namespace
var = do
v <- getNamespacedVar' envRef namespace var
case v of
Just a -> return a
Nothing -> (throwError $ UnboundVar "Getting an unbound variable" var)
-- |Retrieve the value of a variable defined in a given namespace,
-- or Nothing if it is not defined
getNamespacedVar' :: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> IOThrowsError (Maybe LispVal) -- ^ Contents of the variable, if found
getNamespacedVar' envRef
namespace
var = do
getNamespacedObj' envRef namespace var readIORef
getNamespacedObj' :: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> (IORef LispVal -> IO a)
-> IOThrowsError (Maybe a) -- ^ Contents of the variable, if found
getNamespacedObj' envRef
namespace
var
unpackFnc = do
binds <- liftIO $ readIORef $ bindings envRef
case Data.Map.lookup (getVarName namespace var) binds of
(Just a) -> do
v <- liftIO $ unpackFnc a
return $ Just v
Nothing -> case parentEnv envRef of
(Just par) -> getNamespacedObj' par namespace var unpackFnc
Nothing -> return Nothing
-- |Set a variable in the default namespace
setVar
:: Env -- ^ Environment
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
setVar envRef = setNamespacedVar envRef varNamespace
-- |Set a variable in a given namespace
setNamespacedVar
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
setNamespacedVar envRef
namespace
var value = do
-- Issue #98 - Need to detect circular references
--
-- TODO:
-- Note this implementation is rather simplistic since
-- it does not take environments into account. The same
-- variable name could refer to 2 different variables in
-- different environments.
case value of
Pointer p _ -> do
if p == var
then return value
else next
_ -> next
where
next = do
_ <- updatePointers envRef namespace var
_setNamespacedVar envRef namespace var value
-- |An internal function that does the actual setting of a
-- variable, without all the extra code that keeps pointers
-- in sync when a variable is re-binded
--
-- Note this function still binds reverse pointers
-- for purposes of book-keeping.
_setNamespacedVar
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
_setNamespacedVar envRef
namespace
var value = do
-- Set the variable to its new value
valueToStore <- getValueToStore namespace var envRef value
_setNamespacedVarDirect envRef namespace var valueToStore
-- |Do the actual /set/ operation, with NO pointer operations.
-- Only call this if you know what you are doing!
_setNamespacedVarDirect
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
_setNamespacedVarDirect envRef
namespace
var valueToStore = do
env <- liftIO $ readIORef $ bindings envRef
case Data.Map.lookup (getVarName namespace var) env of
(Just a) -> do
liftIO $ writeIORef a valueToStore
return valueToStore
Nothing -> case parentEnv envRef of
(Just par) -> _setNamespacedVarDirect par namespace var valueToStore
Nothing -> throwError $ UnboundVar "Setting an unbound variable: " var
-- |This helper function is used to keep pointers in sync when
-- a variable is bound to a different value.
updatePointers :: Env -> Char -> String -> IOThrowsError LispVal
updatePointers envRef namespace var = do
ptrs <- liftIO $ readIORef $ pointers envRef
case Data.Map.lookup (getVarName namespace var) ptrs of
(Just valIORef) -> do
val <- liftIO $ readIORef valIORef
case val of
-- If var has any pointers, then we need to
-- assign the first pointer to the old value
-- of x, and the rest need to be updated to
-- point to that first var
-- This is the first pointer to (the old) var
(Pointer pVar pEnv : ps) -> do
-- Since var is now fresh, reset its pointers list
liftIO $ writeIORef valIORef []
-- The first pointer now becomes the old var,
-- so its pointers list should become ps
_ <- movePointers pEnv namespace pVar ps
-- Each ps needs to be updated to point to pVar
-- instead of var
_ <- pointToNewVar pEnv namespace pVar ps
-- Set first pointer to existing value of var
existingValue <- getNamespacedVar envRef namespace var
_setNamespacedVar pEnv namespace pVar existingValue
-- No pointers, so nothing to do
[] -> return $ Nil ""
_ -> throwError $ InternalError
"non-pointer value found in updatePointers"
Nothing -> return $ Nil ""
where
-- |Move the given pointers (ptr) to the list of
-- pointers for variable (var)
movePointers :: Env -> Char -> String -> [LispVal] -> IOThrowsError LispVal
movePointers envRef' namespace' var' ptrs = do
env <- liftIO $ readIORef $ pointers envRef'
case Data.Map.lookup (getVarName namespace' var') env of
Just ps' -> do
-- Append ptrs to existing list of pointers to var
ps <- liftIO $ readIORef ps'
liftIO $ writeIORef ps' $ ps ++ ptrs
return $ Nil ""
Nothing -> do
-- var does not have any pointers; create new list
valueRef <- liftIO $ newIORef ptrs
liftIO $ writeIORef (pointers envRef') (Data.Map.insert (getVarName namespace var') valueRef env)
return $ Nil ""
-- |Update each pointer's source to point to pVar
pointToNewVar pEnv namespace' pVar' (Pointer v e : ps) = do
_ <- _setNamespacedVarDirect e namespace' v (Pointer pVar' pEnv)
pointToNewVar pEnv namespace' pVar' ps
pointToNewVar _ _ _ [] = return $ Nil ""
pointToNewVar _ _ _ _ = throwError $ InternalError "pointToNewVar"
-- |A wrapper for updateNamespaceObject that uses the variable namespace.
updateObject :: Env -> String -> LispVal -> IOThrowsError LispVal
updateObject env =
updateNamespacedObject env varNamespace
-- |This function updates the object that the variable refers to. If it is
-- a pointer, that means this function will update that pointer (or the last
-- pointer in the chain) to point to the given /value/ object. If the variable
-- is not a pointer, the result is the same as a setVar (but without updating
-- any pointer references, see below).
--
-- Note this function only updates the object, it does not
-- update any associated pointers. So it should probably only be
-- used internally by husk, unless you really know what you are
-- doing!
updateNamespacedObject :: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
updateNamespacedObject env namespace var value = do
varContents <- getNamespacedVar env namespace var
obj <- findPointerTo varContents
case obj of
Pointer pVar pEnv -> do
_setNamespacedVar pEnv namespace pVar value
_ -> _setNamespacedVar env namespace var value
-- |Bind a variable in the default namespace
defineVar
:: Env -- ^ Environment
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
defineVar envRef = defineNamespacedVar envRef varNamespace
-- |Bind a variable in the given namespace
defineNamespacedVar
:: Env -- ^ Environment
-> Char -- ^ Namespace
-> String -- ^ Variable
-> LispVal -- ^ Value
-> IOThrowsError LispVal -- ^ Value
defineNamespacedVar envRef
namespace
var value = do
alreadyDefined <- liftIO $ isNamespacedBound envRef namespace var
if alreadyDefined
then setNamespacedVar envRef namespace var value >> return value
else do
--
-- Future optimization:
-- don't change anything if (define) is to existing pointer
-- (IE, it does not really change anything)
--
-- If we are assigning to a pointer, we need a reverse lookup to
-- note that the pointer @value@ points to @var@
--
-- So run through this logic to figure out what exactly to store,
-- both for bindings and for rev-lookup pointers
valueToStore <- getValueToStore namespace var envRef value
liftIO $ do
-- Write new value binding
valueRef <- newIORef valueToStore
env <- readIORef $ bindings envRef
writeIORef (bindings envRef) (Data.Map.insert (getVarName namespace var) valueRef env)
return valueToStore
-- |An internal helper function to get the value to save to an env
-- based on the value passed to the define/set function. Normally this
-- is straightforward, but there is book-keeping involved if a
-- pointer is passed, depending on if the pointer resolves to an object.
getValueToStore :: Char -> String -> Env -> LispVal -> IOThrowsError LispVal
getValueToStore namespace var env (Pointer p pEnv) = do
addReversePointer namespace p pEnv namespace var env
getValueToStore _ _ _ value = return value
-- |Accept input for a pointer (ptrVar) and a variable that the pointer is going
-- to be assigned to. If that variable is an object then we setup a reverse lookup
-- for future book-keeping. Otherwise, we just look it up and return it directly,
-- no booking-keeping required.
addReversePointer :: Char -> String -> Env -> Char -> String -> Env -> IOThrowsError LispVal
addReversePointer namespace var envRef ptrNamespace ptrVar ptrEnvRef = do
env <- liftIO $ readIORef $ bindings envRef
case Data.Map.lookup (getVarName namespace var) env of
(Just a) -> do
v <- liftIO $ readIORef a
if isObject v
then do
-- Store a reverse pointer for book keeping
ptrs <- liftIO $ readIORef $ pointers envRef
-- Lookup ptr for var
case Data.Map.lookup (getVarName namespace var) ptrs of
-- Append another reverse ptr to this var
-- FUTURE: make sure ptr is not already there,
-- before adding it to the list again?
(Just valueRef) -> liftIO $ do
value <- readIORef valueRef
writeIORef valueRef (value ++ [Pointer ptrVar ptrEnvRef])
return $ Pointer var envRef
-- No mapping, add the first reverse pointer
Nothing -> liftIO $ do
valueRef <- newIORef [Pointer ptrVar ptrEnvRef]
writeIORef (pointers envRef) (Data.Map.insert (getVarName namespace var) valueRef ptrs)
return $ Pointer var envRef -- Return non-reverse ptr to caller
else return v -- Not an object, return value directly
Nothing -> case parentEnv envRef of
(Just par) -> addReversePointer namespace var par ptrNamespace ptrVar ptrEnvRef
Nothing -> throwError $ UnboundVar "Getting an unbound variable: " var
-- |Return a value with a pointer dereferenced, if necessary
derefPtr :: LispVal -> IOThrowsError LispVal
-- Try dereferencing again if a ptr is found
--
-- Not sure if this is the best solution; it would be
-- nice if we did not have to worry about multiple levels
-- of ptrs, especially since I believe husk only needs to
-- have one level. but for now we will go with this to
-- move forward.
--
derefPtr (Pointer p env) = do
result <- getVar env p
derefPtr result
derefPtr v = return v
-- -- |Return the given list of values, but if any of the
-- -- original values is a pointer it will be dereferenced
-- derefPtrs :: [LispVal] -> IOThrowsError LispVal
-- derefPtrs lvs = mapM (liftThrows $ derefPtr) lvs
-- |Recursively process the given data structure, dereferencing
-- any pointers found along the way.
--
-- This could potentially be expensive on large data structures
-- since it must walk the entire object.
recDerefPtrs :: LispVal -> IOThrowsError LispVal
recDerefPtrs = safeRecDerefPtrs []
-- |Attempt to dereference pointers safely, without being caught in a cycle
safeRecDerefPtrs :: [LispVal] -> LispVal -> IOThrowsError LispVal
#ifdef UsePointers
safeRecDerefPtrs ps (List l) = do
result <- mapM (safeRecDerefPtrs ps) l
return $ List result
safeRecDerefPtrs ps (DottedList ls l) = do
ds <- mapM (safeRecDerefPtrs ps) ls
d <- safeRecDerefPtrs ps l
return $ DottedList ds d
safeRecDerefPtrs ps (Vector v) = do
let vs = elems v
ds <- mapM (safeRecDerefPtrs ps) vs
return $ Vector $ listArray (0, length vs - 1) ds
safeRecDerefPtrs ps (HashTable ht) = do
ks <- mapM (safeRecDerefPtrs ps)$ map (\ (k, _) -> k) $ Data.Map.toList ht
vs <- mapM (safeRecDerefPtrs ps)$ map (\ (_, v) -> v) $ Data.Map.toList ht
return $ HashTable $ Data.Map.fromList $ zip ks vs
#endif
safeRecDerefPtrs ps ptr@(Pointer p env) = do
if containsPtr ps ptr
then return ptr -- Avoid cycle
else do
result <- getVar env p
safeRecDerefPtrs (ptr : ps) result
safeRecDerefPtrs _ v = return v
containsPtr :: [LispVal] -> LispVal -> Bool
containsPtr ((Pointer pa ea):ps) p@(Pointer pb eb) = do
let found = (pa == pb) && ((bindings ea) == (bindings eb))
found || containsPtr ps p
containsPtr _ _ = False
-- |A helper to recursively dereference all pointers and
-- pass results to a function
recDerefToFnc :: ([LispVal] -> ThrowsError LispVal) -> [LispVal]
-> IOThrowsError LispVal
recDerefToFnc fnc lvs = do
List result <- recDerefPtrs $ List lvs
liftThrows $ fnc result
-- |A predicate to determine if the given lisp value
-- is an /object/ that can be pointed to.
isObject :: LispVal -> Bool
isObject (List _) = True
isObject (DottedList _ _) = True
isObject (String _) = True
isObject (Vector _) = True
isObject (HashTable _) = True
isObject (ByteVector _) = True
isObject (Pointer _ _) = True
isObject _ = False
-- |Same as dereferencing a pointer, except we want the
-- last pointer to an object (if there is one) instead
-- of the object itself
findPointerTo :: LispVal -> IOThrowsError LispVal
findPointerTo ptr@(Pointer p env) = do
result <- getVar env p
case result of
(Pointer _ _) -> findPointerTo result
_ -> return ptr
findPointerTo v = return v
| justinethier/husk-scheme | hs-src/Language/Scheme/Variables.hs | mit | 24,772 | 0 | 25 | 6,674 | 4,914 | 2,491 | 2,423 | 409 | 7 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
module Language.Core.TH where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Language.Core.Parser
core :: QuasiQuoter
core = QuasiQuoter { quoteExp = toExpQ . parseCore }
where toExpQ (Right ast) = dataToExpQ (const Nothing) ast
| notcome/ImplFP | src/Language/Core/TH.hs | mit | 321 | 0 | 9 | 49 | 77 | 46 | 31 | 9 | 1 |
-- |
-- Module : $Header$
-- Description : Passes over algorithm language terms to ensure certain invariants
-- Copyright : (c) Justus Adam 2017. All Rights Reserved.
-- License : EPL-1.0
-- Maintainer : sebastian.ertel@gmail.com, dev@justus.science
-- Stability : experimental
-- Portability : portable
--
-- This module implements a set of passes over ALang which perform
-- various tasks. The most important function is `normalize`, which
-- transforms an arbitrary ALang expression either into the normal
-- form of a sequence of let bindings which are invocations of
-- stateful functions on local or environment variables finalised by a
-- local binding as a return value.
-- This source code is licensed under the terms described in the associated LICENSE.TXT file
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Ohua.ALang.Passes where
import Ohua.Prelude
import Control.Comonad (extract)
import Control.Monad.RWS.Lazy (evalRWST)
import Control.Monad.Writer (listen, runWriter, tell)
import Data.Functor.Foldable
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Ohua.ALang.Lang
import Ohua.ALang.PPrint
import Ohua.ALang.Passes.If
import Ohua.ALang.Passes.Seq
import Ohua.ALang.Passes.Smap
import Ohua.ALang.Passes.Unit
import qualified Ohua.ALang.Refs as Refs
import Ohua.Stage
runCorePasses :: MonadOhua m => Expression -> m Expression
runCorePasses expr = do
let exprE = mkUnitFunctionsExplicit expr
stage "unit-transformation" exprE
smapE <- smapRewrite exprE
-- traceM $ "after 'smap' pass:\n" <> (show $ prettyExpr smapE)
stage "smap-transformation" smapE
ifE <- ifRewrite smapE
-- traceM $ "after 'if' pass:\n" <> (show $ prettyExpr ifE)
stage "conditionals-transformation" ifE
seqE <- seqRewrite ifE
-- traceM $ "after 'seq' pass:\n" <> (show $ prettyExpr seqE)
stage "seq-transformation" seqE
return seqE
-- | Inline all references to lambdas.
-- Aka `let f = (\a -> E) in f N` -> `(\a -> E) N`
inlineLambdaRefs :: MonadOhua m => Expression -> m Expression
inlineLambdaRefs = flip runReaderT mempty . para go
where
go (LetF b (Lambda _ _, l) (_, body)) =
l >>= \l' -> local (HM.insert b l') body
go (VarF bnd) = asks (fromMaybe (Var bnd) . HM.lookup bnd)
go e = embed <$> traverse snd e
-- | Reduce lambdas by simulating application
-- Aka `(\a -> E) N` -> `let a = N in E`
-- Assumes lambda refs have been inlined
inlineLambda :: Expression -> Expression
inlineLambda =
cata $ \case
e@(ApplyF func argument) ->
case func of
Lambda assignment body -> Let assignment argument body
Apply _ _ -> reduceLetCWith f func
where f (Lambda assignment body) =
Let assignment argument body
f v0 = Apply v0 argument
_ -> embed e
e -> embed e
-- recursively performs the substitution
--
-- let x = (let y = M in A) in E[x] -> let y = M in let x = A in E[x]
reduceLetA :: Expression -> Expression
reduceLetA =
\case
Let assign (Let assign2 val expr3) expr ->
Let assign2 val $ reduceLetA $ Let assign expr3 expr
e -> e
reduceLetCWith :: (Expression -> Expression) -> Expression -> Expression
reduceLetCWith f =
\case
Apply (Let assign val expr) argument ->
Let assign val $ reduceLetCWith f $ Apply expr argument
e -> f e
reduceLetC :: Expression -> Expression
reduceLetC = reduceLetCWith id
reduceAppArgument :: Expression -> Expression
reduceAppArgument =
\case
Apply function (Let assign val expr) ->
Let assign val $ reduceApplication $ Apply function expr
e -> e
-- recursively performs the substitution
--
-- (let x = M in A) N -> let x = M in A N
--
-- and then
--
-- A (let x = M in N) -> let x = M in A N
reduceApplication :: Expression -> Expression
reduceApplication = reduceLetCWith reduceAppArgument
-- | Lift all nested lets to the top level
-- Aka `let x = let y = E in N in M` -> `let y = E in let x = N in M`
-- and `(let x = E in F) a` -> `let x = E in F a`
letLift :: Expression -> Expression
letLift =
cata $ \e ->
let f =
case e of
LetF _ _ _ -> reduceLetA
ApplyF _ _ -> reduceApplication
_ -> id
in f $ embed e
-- -- | Inline all direct reassignments.
-- -- Aka `let x = E in let y = x in y` -> `let x = E in x`
inlineReassignments :: Expression -> Expression
inlineReassignments = flip runReader HM.empty . cata go
where
go (LetF bnd val body) =
val >>= \v ->
let requestReplace = local (HM.insert bnd v) body
in case v of
Var {} -> requestReplace
Lit {} -> requestReplace
_ -> Let bnd v <$> body
go (VarF val) = asks (fromMaybe (Var val) . HM.lookup val)
go e = embed <$> sequence e
-- | Transforms the final expression into a let expression with the result variable as body.
-- Aka `let x = E in some/sf a` -> `let x = E in let y = some/sf a in y`
--
-- EDIT: Now also does the same for any residual lambdas
ensureFinalLet :: MonadOhua m => Expression -> m Expression
ensureFinalLet = ensureFinalLetInLambdas >=> ensureFinalLet'
-- | Transforms the final expression into a let expression with the result variable as body.
ensureFinalLet' :: MonadOhua m => Expression -> m Expression
ensureFinalLet' =
para $ \case
LetF b (oldV, _) (_, recB) -> Let b oldV <$> recB -- Recurse only into let body, not the bound value
any
| isVarOrLambdaF any -> embed <$> traverse snd any -- Don't rebind a lambda or var. Continue or terminate
| otherwise -> do -- Rebind anything else
newBnd <- generateBinding
pure $ Let newBnd (embed $ fmap fst any) (Var newBnd)
where
isVarOrLambdaF =
\case
VarF _ -> True
LambdaF {} -> True
_ -> False
-- | Obsolete, will be removed soon. Replaced by `ensureFinalLet'`
ensureFinalLet'' :: MonadOhua m => Expression -> m Expression
ensureFinalLet'' (Let a e b) = Let a e <$> ensureFinalLet' b
ensureFinalLet'' v@(Var _) = return v
-- I'm not 100% sure about this case, perhaps this ought to be in
-- `ensureFinalLetInLambdas` instead
ensureFinalLet'' (Lambda b body) = Lambda b <$> ensureFinalLet' body
ensureFinalLet'' a = do
newBnd <- generateBinding
return $ Let newBnd a (Var newBnd)
ensureFinalLetInLambdas :: MonadOhua m => Expression -> m Expression
ensureFinalLetInLambdas =
cata $ \case
LambdaF bnd body -> Lambda bnd <$> (ensureFinalLet' =<< body)
a -> embed <$> sequence a
ensureAtLeastOneCall :: (Monad m, MonadGenBnd m) => Expression -> m Expression
ensureAtLeastOneCall e@(Var _) = do
newBnd <- generateBinding
pure $ Let newBnd (PureFunction Refs.id Nothing `Apply` e) $ Var newBnd
ensureAtLeastOneCall e = cata f e
where
f (LambdaF bnd body) =
body >>= \case
v@(Var _) -> do
newBnd <- generateBinding
pure $
Lambda bnd $
Let newBnd (PureFunction Refs.id Nothing `Apply` v) $
Var newBnd
eInner -> pure $ Lambda bnd eInner
f eInner = embed <$> sequence eInner
-- | Removes bindings that are never used.
-- This is actually not safe becuase sfn invocations may have side effects
-- and therefore cannot be removed.
-- Assumes ssa for simplicity
removeUnusedBindings :: Expression -> Expression
removeUnusedBindings = fst . runWriter . cata go
where
go (VarF val) = tell (HS.singleton val) >> return (Var val)
go (LetF b val body) = do
(inner, used) <- listen body
if not $ b `HS.member` used
then return inner
else do
val' <- val
pure $ Let b val' inner
go e = embed <$> sequence e
newtype MonoidCombineHashMap k v =
MonoidCombineHashMap (HashMap k v)
deriving (Show, Eq, Ord)
instance (Semigroup v, Eq k, Hashable k) =>
Semigroup (MonoidCombineHashMap k v) where
MonoidCombineHashMap m1 <> MonoidCombineHashMap m2 =
MonoidCombineHashMap $ HM.unionWith (<>) m1 m2
instance (Semigroup v, Eq k, Hashable k) =>
Monoid (MonoidCombineHashMap k v) where
mempty = MonoidCombineHashMap mempty
data WasTouched
= No
| Yes
deriving (Show, Eq, Ord)
instance Semigroup WasTouched where
(<>) = max
instance Monoid WasTouched where
mempty = No
type TouchMap = MonoidCombineHashMap Binding (WasTouched, WasTouched)
wasTouchedAsFunction :: Binding -> TouchMap
wasTouchedAsFunction bnd = MonoidCombineHashMap $ HM.singleton bnd (Yes, No)
wasTouchedAsValue :: Binding -> TouchMap
wasTouchedAsValue bnd = MonoidCombineHashMap $ HM.singleton bnd (No, Yes)
lookupTouchState :: Binding -> TouchMap -> (WasTouched, WasTouched)
lookupTouchState bnd (MonoidCombineHashMap m) =
fromMaybe mempty $ HM.lookup bnd m
-- | Reduce curried expressions. aka `let f = some/sf a in f b`
-- becomes `some/sf a b`. It both inlines the curried function and
-- removes the binding site. Recursively calls it self and therefore
-- handles redefinitions as well. It only substitutes vars in the
-- function positions of apply's hence it may produce an expression
-- with undefined local bindings. It is recommended therefore to
-- check this with 'noUndefinedBindings'. If an undefined binding is
-- left behind which indicates the source expression was not
-- fulfilling all its invariants.
removeCurrying ::
forall m. MonadError Error m
=> Expression
-> m Expression
removeCurrying e = fst <$> evalRWST (para inlinePartials e) mempty ()
where
inlinePartials (LetF bnd (_, val) (_, body)) = do
val' <- val
(body', touched) <- listen $ local (HM.insert bnd val') body
case lookupTouchState bnd touched of
(Yes, Yes) ->
throwErrorDebugS $
"Binding was used as function and value " <> show bnd
(Yes, _) -> pure body'
_ -> pure $ Let bnd val' body'
inlinePartials (ApplyF (Var bnd, _) (_, arg)) = do
tell $ wasTouchedAsFunction bnd
val <- asks (HM.lookup bnd)
Apply <$>
(maybe
(failWith $ "No suitable value found for binding " <> show bnd)
pure
val) <*>
arg
inlinePartials (VarF bnd) = tell (wasTouchedAsValue bnd) >> pure (Var bnd)
inlinePartials innerExpr = embed <$> traverse snd innerExpr
-- | Ensures the expression is a sequence of let statements terminated
-- with a local variable.
hasFinalLet :: MonadOhua m => Expression -> m ()
hasFinalLet =
cata $ \case
LetF _ _ body -> body
VarF {} -> return ()
_ -> failWith "Final value is not a var"
-- | Ensures all of the optionally provided stateful function ids are unique.
noDuplicateIds :: MonadError Error m => Expression -> m ()
noDuplicateIds = flip evalStateT mempty . cata go
where
go (PureFunctionF _ (Just funid)) = do
isMember <- gets (HS.member funid)
when isMember $ failWith $ "Duplicate id " <> show funid
modify (HS.insert funid)
go e = sequence_ e
-- | Checks that no apply to a local variable is performed. This is a
-- simple check and it will pass on complex expressions even if they
-- would reduce to an apply to a local variable.
applyToPureFunction :: MonadOhua m => Expression -> m ()
applyToPureFunction =
para $ \case
ApplyF (Var bnd, _) _ ->
failWith $ "Illegal Apply to local var " <> show bnd
e -> sequence_ $ fmap snd e
-- | Checks that all local bindings are defined before use.
-- Scoped. Aka bindings are only visible in their respective scopes.
-- Hence the expression does not need to be in SSA form.
noUndefinedBindings :: MonadOhua m => Expression -> m ()
noUndefinedBindings = flip runReaderT mempty . cata go
where
go (LetF b val body) = val >> registerBinding b body
go (VarF bnd) = do
isDefined <- asks (HS.member bnd)
unless isDefined $ failWith $ "Not in scope " <> show bnd
go (LambdaF b body) = registerBinding b body
go e = sequence_ e
registerBinding = local . HS.insert
checkProgramValidity :: MonadOhua m => Expression -> m ()
checkProgramValidity e = do
hasFinalLet e
noDuplicateIds e
applyToPureFunction e
noUndefinedBindings e
-- | Lifts something like @if (f x) a b@ to @let x0 = f x in if x0 a b@
liftApplyToApply :: MonadOhua m => Expression -> m Expression
liftApplyToApply =
lrPrewalkExprM $ \case
Apply fn arg@(Apply _ _) -> do
bnd <- generateBinding
return $ Let bnd arg $ Apply fn (Var bnd)
a -> return a
-- normalizeBind :: (MonadError Error m, MonadGenBnd m) => Expression -> m Expression
-- normalizeBind =
-- rewriteM $ \case
-- BindState e2 e1@(PureFunction _ _) ->
-- case e2 of
-- Var _ -> pure Nothing
-- Lit _ -> pure Nothing
-- _ ->
-- generateBinding >>= \b ->
-- pure $ Just $ Let b e2 (BindState (Var b) e1)
-- BindState _ _ -> throwError "State bind target must be a pure function reference"
-- _ -> pure Nothing
dumpNormalizeDebug = False
putStrLnND :: (Print str, MonadIO m) => str -> m ()
putStrLnND =
if dumpNormalizeDebug
then putStrLn
else const $ return ()
printND :: (Show a, MonadIO m) => a -> m ()
printND =
if dumpNormalizeDebug
then print
else const $ return ()
-- The canonical composition of the above transformations to create a
-- program with the invariants we expect.
normalize :: MonadOhua m => Expression -> m Expression
normalize e =
reduceLambdas (letLift e) >>=
(\a ->
putStrLnND ("Reduced lamdas" :: Text) >> printND (pretty a) >> return a) >>=
return . inlineReassignments >>=
removeCurrying >>=
(\a ->
putStrLnND ("Removed Currying" :: Text) >> printND (pretty a) >>
return a) >>=
liftApplyToApply >>=
(\a -> putStrLnND ("App to App" :: Text) >> printND (pretty a) >> return a) .
letLift >>=
ensureFinalLet . inlineReassignments >>=
ensureAtLeastOneCall
-- we repeat this step until a fix point is reached.
-- this is necessary as lambdas may be input to lambdas,
-- which means after inlining them we may be able again to
-- inline a ref and then inline the lambda.
-- I doubt this will ever do more than two or three iterations,
-- but to make sure it accepts every valid program this is necessary.
where
reduceLambdas expr = do
res <- letLift . inlineLambda <$> inlineLambdaRefs expr
if res == expr
then return res
else reduceLambdas res
-- letLift (Let assign1 (Let assign2 expr2 expr3) expr4) = letLift $ Let assign2 expr2 $ Let assign1 expr3 expr4
-- letLift (Let assign v@(Var _) expr) = Let assign v $ letLift expr
-- letLift (Let assign val expr) =
-- case letLift val of
-- v'@(Let _ _ _) -> letLift $ Let assign v' expr
-- _ -> Let assign v' $ letLift expr
-- letLift e@(Var _) = e
-- letLift (Apply v@(Var _) argument) = Apply v (letLift argument)
-- letLift (Apply (Let assign expr function) argument) = letLift $ Let assign expr $ Apply function argument
-- letLift (Apply function argument) =
-- case letLift argument of
| ohua-dev/ohua-core | core/src/Ohua/ALang/Passes.hs | epl-1.0 | 15,683 | 0 | 22 | 4,205 | 3,497 | 1,788 | 1,709 | -1 | -1 |
{-# LANGUAGE DatatypeContexts #-}
{-# LANGUAGE DatatypeContexts #-}
{-# LANGUAGE DeriveDataTypeable, TemplateHaskell #-}
{-# LANGUAGE DatatypeContexts #-}
module Rewriting.Derive.Instance where
import Autolib.ToDoc
import Autolib.Reporter
import Autolib.Size
import Autolib.Multilingual
import Autolib.Reader
import Control.Monad (when)
import Data.Typeable
-- | TODO: boolean combination
data Object_Restriction object = Fixed object | Sized Ordering Int
deriving (Eq, Typeable)
instance ToDoc object => Nice (Object_Restriction object) where
nice or = case or of
Fixed o -> toDoc o
Sized ord s -> hsep
[ multitext [(DE, "ein Objekt der Größe"), (UK, "an object of size")]
, case ord of LT -> text "<" ; EQ -> empty ; GT -> text ">"
, toDoc s
]
check_object_restriction msg or ob = case or of
Fixed o -> when (ob /= o) $ reject $ vcat
[ msg <+> multitext [(DE,"ist nicht"),(UK,"is not")] <+> toDoc o ]
Sized ord s -> when (compare (size ob) s /= ord) $ reject $ vcat
[ msg <+> multitext [(DE,"ist nicht"),(UK,"is not")] <+> nice or ]
derives [makeReader, makeToDoc] [''Object_Restriction]
data Derivation_Restriction = Length Ordering Int
deriving (Eq, Typeable)
instance Nice Derivation_Restriction where
nice dr = case dr of
Length ord l -> hsep
[ multitext [(DE, "mit Länge"), (UK, "of length")]
, case ord of LT -> text "<" ; EQ -> empty ; GT -> text ">"
, toDoc l
]
check_derivation_restriction msg dr l = case dr of
Length ord ell -> when (compare l ell /= ord) $ reject $ hsep
[ msg <+> multitext [(DE,"ist nicht"),(UK,"is not")] <+> nice dr ]
derives [makeReader] [''Ordering]
derives [makeReader, makeToDoc] [''Derivation_Restriction ]
data ( Reader system, ToDoc system
, Reader object, ToDoc object
)
=> Instance system object = Instance
{ system :: system
, derivation_restriction :: Derivation_Restriction
, from :: Object_Restriction object
, to :: Object_Restriction object
}
deriving ( Typeable
)
derives [makeReader, makeToDoc] [''Instance]
| marcellussiegburg/autotool | collection/src/Rewriting/Derive/Instance.hs | gpl-2.0 | 2,225 | 2 | 15 | 572 | 718 | 384 | 334 | 49 | 2 |
module Highlight (highlight, getLang) where
import qualified Data.Text as T
import Data.Monoid (mconcat)
import Text.Blaze (toValue, (!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Highlighting.Kate ( defaultFormatOpts
, highlightAs
, languagesByFilename )
import Text.Highlighting.Kate.Types
highlight :: String -> T.Text -> H.Html
highlight lang txt =
let
highlighted = highlightAs lang (T.unpack txt)
htmlList = map sourceLineToHtml highlighted
in
mconcat htmlList
sourceLineToHtml :: SourceLine -> H.Html
sourceLineToHtml line = mconcat $ htmlList ++ [H.toHtml "\n"]
where
htmlList = map (tokenToHtml defaultFormatOpts) line
tokenToHtml :: FormatOptions -> Token -> H.Html
tokenToHtml _ (NormalTok, str) = H.toHtml str
tokenToHtml opts (toktype, str) =
if titleAttributes opts
then sp ! A.title (toValue $ show toktype)
else sp
where sp = H.span ! A.class_ (toValue $ short toktype) $ H.toHtml str
short :: TokenType -> String
short KeywordTok = "kw"
short DataTypeTok = "dt"
short DecValTok = "dv"
short BaseNTok = "bn"
short FloatTok = "fl"
short CharTok = "ch"
short StringTok = "st"
short CommentTok = "co"
short OtherTok = "ot"
short AlertTok = "al"
short FunctionTok = "fu"
short RegionMarkerTok = "re"
short ErrorTok = "er"
short NormalTok = ""
getLang path =
case languagesByFilename path of
[] -> ""
lst -> head lst
| cdosborn/lit | src/Highlight.hs | gpl-2.0 | 1,633 | 0 | 12 | 451 | 477 | 256 | 221 | 45 | 2 |
{- |
Module : $Header$
Description : Signatures for DL logics, as extension of CASL signatures
Copyright : (c) Klaus Luettich, Uni Bremen 2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : luecke@informatik.uni-bremen.de
Stability : provisional
Portability : portable
Signatures for DL logics, as extension of CASL signatures.
-}
module CASL_DL.Sign where
import qualified Data.Map as Map
import Common.Id
import Common.Doc
import Common.DocUtils
import CASL.AS_Basic_CASL
import CASL_DL.AS_CASL_DL
import CASL_DL.Print_AS ()
import Data.List (union, (\\), isPrefixOf)
import Control.Exception
data CASL_DLSign =
CASL_DLSign { annoProperties :: Map.Map SIMPLE_ID PropertyType
, annoPropertySens :: [AnnoAppl]
} deriving (Show, Eq, Ord)
data PropertyType = AnnoProperty
| OntoProperty deriving (Show, Eq, Ord)
data AnnoAppl = AnnoAppl SIMPLE_ID Id AnnoLiteral
deriving (Show, Eq, Ord)
data AnnoLiteral = AL_Term (TERM DL_FORMULA)
| AL_Id Id
deriving (Show, Eq, Ord)
emptyCASL_DLSign :: CASL_DLSign
emptyCASL_DLSign = CASL_DLSign Map.empty []
addCASL_DLSign :: CASL_DLSign -> CASL_DLSign -> CASL_DLSign
addCASL_DLSign a b = a
{ annoProperties =
Map.unionWithKey (throwAnnoError "CASL_DL.Sign.addCASL_DLSign:")
(annoProperties a) (annoProperties b)
, annoPropertySens = union (annoPropertySens a) (annoPropertySens b)
}
throwAnnoError :: String -> SIMPLE_ID
-> PropertyType -> PropertyType -> PropertyType
throwAnnoError s k e1 e2 =
if e1 == e2
then e1
else error $ s ++ " Annotation Properties and Ontology Properties "
++ "must have distinct names! (" ++ show k ++ ")"
diffCASL_DLSign :: CASL_DLSign -> CASL_DLSign -> CASL_DLSign
diffCASL_DLSign a b = a
{ annoProperties = Map.difference (annoProperties a) (annoProperties b)
, annoPropertySens = annoPropertySens a \\ annoPropertySens b
}
isSubCASL_DLSign :: CASL_DLSign -> CASL_DLSign -> Bool
isSubCASL_DLSign a b =
Map.isSubmapOf (annoProperties a) (annoProperties b) &&
(annoPropertySens a `isSublistOf` annoPropertySens b)
instance Pretty CASL_DLSign where
pretty dlSign = if Map.null $ annoProperties dlSign
then assert (null $ annoPropertySens dlSign) empty
else printPropertyList AnnoProperty
"%OWLAnnoProperties("
$+$
printPropertyList OntoProperty
"%OWLOntologyProperties("
$+$
if null (annoPropertySens dlSign)
then empty
else text "%OWLAnnotations(" <+>
vcat (punctuate (text "; ") $
map pretty
(annoPropertySens dlSign)) <+>
text ")%"
where propertyList ty = filter (\ (_, x) -> x == ty) $
Map.toList $ annoProperties dlSign
printPropertyList ty str =
case propertyList ty of
[] -> empty
l -> text str <+>
fcat (punctuate comma $
map (pretty . fst) l) <+>
text ")%"
instance Pretty AnnoAppl where
pretty (AnnoAppl rel subj obj) = pretty rel <>
parens (pretty subj <> comma <> pretty obj)
instance Pretty AnnoLiteral where
pretty annoLit = case annoLit of
AL_Term t -> pretty t
AL_Id i -> pretty i
isSublistOf :: (Eq a) => [a] -> [a] -> Bool
isSublistOf ys l = case l of
[] -> null ys
_ : l' -> isPrefixOf ys l || isSublistOf ys l'
| nevrenato/HetsAlloy | CASL_DL/Sign.hs | gpl-2.0 | 3,971 | 0 | 18 | 1,404 | 917 | 474 | 443 | 80 | 2 |
import Tox.Core
import Control.Concurrent
import Control.Monad
import qualified Data.ByteString.Base16 as BS16
main = do
tox <- toxNew False
address <- toxGetAddress tox
putStrLn address
toxBootstrapFromAddress tox "54.199.139.199" False 33445 "7F9C31FE850E97CEFD4C4591DF93FC757C7C12549DDD55F8EEAECC34FE76C029"
forever $ do
toxDo tox
connected <- toxIsconnected tox
if connected then putStrLn "Connected" else return ()
threadDelay 1000
| ollieh/haskell-tox-core | Test.hs | gpl-3.0 | 526 | 0 | 12 | 138 | 118 | 55 | 63 | 14 | 2 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RankNTypes #-}
module Random ( runRandomSIR ) where
import System.IO
import Control.Monad.Random
import Control.Monad.Reader
import Control.Monad.Trans.MSF.Random
import qualified Data.Map as Map
import Data.Traversable as T
import FRP.BearRiver
import SIR
type AgentId = Int
type AgentData d = (AgentId, d)
data AgentIn d = AgentIn
{ aiId :: !AgentId
, aiData :: ![AgentData d]
} deriving (Show)
data AgentOut o d = AgentOut
{ aoData :: ![AgentData d]
, aoObservable :: !o
} deriving (Show)
type Agent m o d = SF m (AgentIn d) (AgentOut o d)
type SIRMonad g = Rand g
data SIRMsg = Contact SIRState deriving (Show, Eq)
type SIRAgentIn = AgentIn SIRMsg
type SIRAgentOut = AgentOut SIRState SIRMsg
type SIRAgent g = Agent (SIRMonad g) SIRState SIRMsg
agentCount :: Int
agentCount = 100
infectedCount :: Int
infectedCount = 10
rngSeed :: Int
rngSeed = 42
dt :: DTime
dt = 0.1
t :: Time
t = 150
runRandomSIR :: IO ()
runRandomSIR = do
hSetBuffering stdout NoBuffering
let g = mkStdGen rngSeed
let as = initAgents agentCount infectedCount
let ass = runSimulation g t dt as
let dyns = aggregateAllStates ass
let fileName = "STEP_4_BEARRIVER_DYNAMICS_" ++ show agentCount ++ "agents.m"
writeAggregatesToFile fileName dyns
runSimulation :: RandomGen g
=> g
-> Time
-> DTime
-> [(AgentId, SIRState)]
-> [[SIRState]]
runSimulation g t dt as = map (map aoObservable) aoss
where
steps = floor $ t / dt
dts = replicate steps ()
ais = map fst as
sfs = map (\(_, s) -> sirAgent ais s) as
ains = map (\(aid, _) -> agentIn aid) as
aossReader = embed (stepSimulation sfs ains) dts
aossRand = runReaderT aossReader dt
aoss = evalRand aossRand g
stepSimulation :: RandomGen g
=> [SIRAgent g]
-> [SIRAgentIn]
-> SF (SIRMonad g) () [SIRAgentOut]
stepSimulation sfs ains =
dpSwitch
(\_ sfs' -> (zip ains sfs'))
sfs
switchingEvt -- no need for 'notYet' in BearRiver as there is no time = 0 with dt = 0
stepSimulation
where
switchingEvt :: RandomGen g
=> SF (SIRMonad g) ((), [SIRAgentOut]) (Event [SIRAgentIn])
switchingEvt = proc (_, aos) -> do
let ais = map aiId ains
aios = zip ais aos
nextAins = distributeData aios
returnA -< Event nextAins
sirAgent :: RandomGen g => [AgentId] -> SIRState -> SIRAgent g
sirAgent ais Susceptible = susceptibleAgent ais
sirAgent _ Infected = infectedAgent
sirAgent _ Recovered = recoveredAgent
susceptibleAgent :: RandomGen g => [AgentId] -> SIRAgent g
susceptibleAgent ais =
switch
susceptible
(const infectedAgent)
where
susceptible :: RandomGen g
=> SF (SIRMonad g) SIRAgentIn (SIRAgentOut, Event ())
susceptible = proc ain -> do
infected <- arrM (lift . gotInfected infectivity) -< ain
if infected
then returnA -< (agentOut Infected, Event ())
else (do
makeContact <- occasionally (1 / contactRate) () -< ()
contactId <- drawRandomElemS -< ais
if isEvent makeContact
then returnA -< (dataFlow (contactId, Contact Susceptible) $ agentOut Susceptible, NoEvent)
else returnA -< (agentOut Susceptible, NoEvent))
infectedAgent :: RandomGen g => SIRAgent g
infectedAgent =
switch
infected
(const recoveredAgent)
where
infected :: RandomGen g => SF (SIRMonad g) SIRAgentIn (SIRAgentOut, Event ())
infected = proc ain -> do
recEvt <- occasionally illnessDuration () -< ()
let a = event Infected (const Recovered) recEvt
-- note that at the moment of recovery the agent can still infect others
-- because it will still reply with Infected
let ao = respondToContactWith Infected ain (agentOut a)
returnA -< (ao, recEvt)
recoveredAgent :: RandomGen g => SIRAgent g
recoveredAgent = arr (const $ agentOut Recovered)
drawRandomElemS :: MonadRandom m => SF m [a] a
drawRandomElemS = proc as -> do
r <- getRandomRS ((0, 1) :: (Double, Double)) -< ()
let len = length as
let idx = fromIntegral len * r
let a = as !! floor idx
returnA -< a
randomBoolM :: RandomGen g => Double -> Rand g Bool
randomBoolM p = getRandomR (0, 1) >>= (\r -> return $ r <= p)
initAgents :: Int -> Int -> [(AgentId, SIRState)]
initAgents n i = sus ++ inf
where
sus = map (\ai -> (ai, Susceptible)) [0..n-i-1]
inf = map (\ai -> (ai, Infected)) [n-i..n-1]
dataFlow :: AgentData d -> AgentOut o d -> AgentOut o d
dataFlow df ao = ao { aoData = df : aoData ao }
onDataM :: (Monad m)
=> (acc -> AgentData d -> m acc)
-> AgentIn d
-> acc
-> m acc
onDataM dHdl ai acc = foldM dHdl acc ds
where
ds = aiData ai
onData :: (AgentData d -> acc -> acc) -> AgentIn d -> acc -> acc
onData dHdl ai a = foldr dHdl a ds
where
ds = aiData ai
gotInfected :: RandomGen g => Double -> SIRAgentIn -> Rand g Bool
gotInfected p ain = onDataM gotInfectedAux ain False
where
gotInfectedAux :: RandomGen g => Bool -> AgentData SIRMsg -> Rand g Bool
gotInfectedAux False (_, Contact Infected) = randomBoolM p
gotInfectedAux x _ = return x
respondToContactWith :: SIRState -> SIRAgentIn -> SIRAgentOut -> SIRAgentOut
respondToContactWith state ain ao = onData respondToContactWithAux ain ao
where
respondToContactWithAux :: AgentData SIRMsg -> SIRAgentOut -> SIRAgentOut
respondToContactWithAux (senderId, Contact _) ao = dataFlow (senderId, Contact state) ao
distributeData :: [(AgentId, AgentOut o d)] -> [AgentIn d]
distributeData aouts = map (distributeDataAux allMsgs) ains -- NOTE: speedup by running in parallel (if +RTS -Nx)
where
allMsgs = collectAllData aouts
ains = map (\(ai, _) -> agentIn ai) aouts
distributeDataAux :: Map.Map AgentId [AgentData d]
-> AgentIn d
-> AgentIn d
distributeDataAux allMsgs ain = ain'
where
receiverId = aiId ain
msgs = aiData ain -- NOTE: ain may have already messages, they would be overridden if not incorporating them
mayReceiverMsgs = Map.lookup receiverId allMsgs
msgsEvt = maybe msgs (++ msgs) mayReceiverMsgs
ain' = ain { aiData = msgsEvt }
collectAllData :: [(AgentId, AgentOut o d)] -> Map.Map AgentId [AgentData d]
collectAllData aos = foldr collectAllDataAux Map.empty aos
where
collectAllDataAux :: (AgentId, AgentOut o d)
-> Map.Map AgentId [AgentData d]
-> Map.Map AgentId [AgentData d]
collectAllDataAux (senderId, ao) accMsgs
| not $ null msgs = foldr collectAllDataAuxAux accMsgs msgs
| otherwise = accMsgs
where
msgs = aoData ao
collectAllDataAuxAux :: AgentData d
-> Map.Map AgentId [AgentData d]
-> Map.Map AgentId [AgentData d]
collectAllDataAuxAux (receiverId, m) accMsgs = accMsgs'
where
msg = (senderId, m)
mayReceiverMsgs = Map.lookup receiverId accMsgs
newMsgs = maybe [msg] (\receiverMsgs -> msg : receiverMsgs) mayReceiverMsgs
-- NOTE: force evaluation of messages, will reduce memory-overhead EXTREMELY
accMsgs' = seq newMsgs (Map.insert receiverId newMsgs accMsgs)
agentIn :: AgentId -> AgentIn d
agentIn aid = AgentIn {
aiId = aid
, aiData = []
}
agentOut :: o -> AgentOut o d
agentOut o = AgentOut {
aoData = []
, aoObservable = o
}
-- NOTE: implemented by myself, not present in Duani/BearRiver so far
dpSwitch :: (Monad m, Traversable col)
=> (forall sf. (a -> col sf -> col (b, sf)))
-> col (SF m b c)
-> SF m (a, col c) (Event d)
-> (col (SF m b c) -> d -> SF m a (col c))
-> SF m a (col c)
dpSwitch rf sfs sfF sfCs = MSF $ \a -> do
let bsfs = rf a sfs
res <- T.mapM (\(b, sf) -> unMSF sf b) bsfs
let cs = fmap fst res
sfs' = fmap snd res
(e,sfF') <- unMSF sfF (a, cs)
let ct = case e of
Event d -> sfCs sfs' d
NoEvent -> dpSwitch rf sfs' sfF' sfCs
return (cs, ct) | thalerjonathan/phd | coding/papers/pfe/Step4_Dunai/src/Random.hs | gpl-3.0 | 8,510 | 4 | 20 | 2,523 | 2,850 | 1,467 | 1,383 | 208 | 3 |
{-
This file is part of pia.
pia is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
pia is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pia. If not, see <http://www.gnu.org/licenses/>.
-}
{- |
Copyright : (c) Simon Woertz 2011-2012
Maintainer : Simon Woertz <simon@woertz.at>
Stability : provisional
-}
module Main where
import Parser.Interpretation (parseInterpretation)
import Parser.TRS (parseTRS)
import TRS (TRS)
import Interpretation (Interpretation, Orientation)
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure, exitSuccess)
import System.Console.GetOpt
import Data.Maybe (isJust)
import Control.Monad (when, unless)
import System.IO (hPutStrLn)
import GHC.IO.Handle.FD (stderr)
import Interpretation (orientate, functions, polynomial, lhs, rhs, compatible, monotone)
import Polynomial (monotone, compatible, subtractPolynomials)
main::IO()
main = do
argv <- getArgs
prg <- getProgName
let(actions, _, errors) = getOpt Permute options argv
unless (null errors) $ do
printErrorAndExit $ (unlines errors) ++ "Try " ++ prg ++ " --help for more information"
opts <- foldl (>>=) (return defaultOptions) actions
(trsFile, interpretationFile) <- inputFiles opts
let interpretation = parseInterpretation interpretationFile
let termRewriteSystem = parseTRS trsFile
let orientation = (orientate termRewriteSystem interpretation)
printTRS termRewriteSystem
printInterpretation interpretation (verbose opts)
putStrLn ""
printOrientation orientation (verbose opts)
-- | 'printErrorAndExit' prints out an error-message to STDERR and exits with 1
printErrorAndExit :: String -> IO ()
printErrorAndExit msg = do
hPutStrLn stderr msg
exitFailure
-- | 'printTRS' prints out the given term rewrite system
printTRS :: TRS -> IO ()
printTRS t = do
putStrLn "=== TRS ==="
print t
putStrLn ""
-- | 'printInterpretation' prints out the parsed interpretation in normal or verbose-mode
printInterpretation :: Interpretation -> Bool -> IO ()
printInterpretation interpretation verboseMode= do
putStrLn "=== INTERPRETATIONS ==="
mapM_ (\f -> do
print f
let p = polynomial f
putStrLn ""
when verboseMode (putStrLn $ "monotone: " ++ (show $ Polynomial.monotone p) ++ "\n")) (functions interpretation)
putStrLn $ "monotone (whole interpretations): " ++ (show $ Interpretation.monotone interpretation)
-- | 'printOrientation' prints out the given orientation in normal or verbose-mode
printOrientation :: [Orientation] -> Bool -> IO ()
printOrientation orientation verboseMode = do
putStrLn "=== ORIENTATIONS ==="
mapM_ (\o -> do
print o
let p = subtractPolynomials (lhs o) (rhs o)
putStrLn ""
when verboseMode (putStrLn $ "compatible: " ++ (show $ Polynomial.compatible p) ++ "\n")) orientation
putStrLn $ "compatible (whole system): " ++ (show $ Interpretation.compatible orientation)
-- | data structure that holds the options specified via command line arguments
data Options = Options {
trs :: Maybe FilePath -- ^ if set this contains a filepath to a trs file
, int :: Maybe FilePath -- ^ if set this contains a filepath to an interpretation file
, verbose :: Bool -- ^ verbose output
} deriving Show
-- | 'usageMessage' takes the programm name as a parameter and gives back the usage string
usageMessage :: String -> String
usageMessage programName = usageInfo (programName ++ " -t FILE -i FILE [OPTIONS]") options
-- | 'defaultOptions' of pia
defaultOptions :: Options
defaultOptions = Options {
trs = Nothing
, int = Nothing
, verbose = False
}
-- | 'requiredOptions' takes the options and returns 'True' if all required arguments are set
requiredOptions :: Options -> Bool
requiredOptions opts | isJust (trs opts) && isJust (int opts) = True
| otherwise = False
-- | 'inputFiles' tries to read the files specified in the options and returns the content of the files
inputFiles :: Options -> IO (String, String)
inputFiles opts = do
trsFile <- maybe (error "trs file not found or not specified") readFile (trs opts)
intFile <- maybe (error "interpretation file not found or not specified") readFile (int opts)
return(trsFile, intFile)
options :: [OptDescr (Options -> IO Options)]
options =
[
Option ['t'] ["trs-file"] (ReqArg (\t opts -> return opts {trs = Just t}) "FILE") "path to trs file "
, Option ['i'] ["interpretation-file"] (ReqArg (\i opts -> return opts {int = Just i}) "FILE") "path to interpretation file "
, Option ['v'] ["verbose"] (NoArg (\opts -> return opts {verbose = True})) "be verbose"
, Option ['h'] ["help"] (NoArg (\_ -> do
prg <- getProgName
putStr $ usageMessage prg
exitSuccess)) ""
]
| swoertz/pia | src/PIA.hs | gpl-3.0 | 5,441 | 0 | 19 | 1,241 | 1,209 | 622 | 587 | 87 | 1 |
module STH.Lib.Read.CharSeq (
readCharSeq
) where
import STH.Lib.List (unfoldrMaybe)
import STH.Lib.Text.Esc (bsUnEsc)
--CharSeq.S
data CharSeq
= Single Char
| Range Char Char
deriving (Show)
readCharSeq :: String -> Maybe String
readCharSeq = fmap charSeqsToList . readCharSeqs . bsUnEsc
charSeqsToList :: [CharSeq] -> String
charSeqsToList = concatMap charSeqToList
where
charSeqToList (Single x) = [x]
charSeqToList (Range x y) = enumFromTo x y
readCharSeqs :: String -> Maybe [CharSeq]
readCharSeqs = unfoldrMaybe firstCharSeq
where
firstCharSeq :: String -> Maybe (Maybe (CharSeq, String))
firstCharSeq "" = Just Nothing
firstCharSeq [x] = Just (Just (Single x, ""))
firstCharSeq ('-':_) = Nothing
firstCharSeq [x,y] = Just (Just (Single x, [y]))
firstCharSeq (x:y:z:xs) = case y of
'-' -> Just (Just (Range x z, xs))
otherwise -> Just (Just (Single x, y:z:xs))
--CharSeq.E
| nbloomf/st-haskell | src/STH/Lib/Read/CharSeq.hs | gpl-3.0 | 953 | 0 | 15 | 201 | 373 | 200 | 173 | 24 | 6 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, OverloadedStrings #-}
module Carbon.Data.AcceptanceCondition where
import Control.Applicative
import Control.Monad
import Data.Aeson ((.=), ToJSON(..), object, FromJSON(..), Value(..), (.:))
import Data.Function (on)
import Data.Monoid (Monoid(..))
import qualified Data.Aeson as Aeson
import qualified Data.Text as Text
import Carbon.Data.Common
import Carbon.Data.Id
import Carbon.Data.Logic.Exp
import qualified Carbon.Data.Logic as Logic
data AcceptanceCondition leaf = AcceptanceCondition {
acceptanceConditionId :: Maybe Id
, proofStandard :: Maybe ProofStandard
, formula :: Exp leaf
}
data ProofStandard = PSScintillaOfEvidence
| PSPreponderanceOfEvidence
| PSBeyondResonableDoubt
deriving (Show, Read, Eq, Ord, Enum, Bounded)
-- Instances:
instance Eq l => Eq (AcceptanceCondition l) where
a == b = let eqId = (==) `on` acceptanceConditionId
eqPs = (==) `on` proofStandard
eqF = (==) `on` formula
in (eqId a b && eqPs a b && eqF a b)
instance FromJSON (AcceptanceCondition String) where
parseJSON (Object v) = do
let setI ac i = ac{acceptanceConditionId = Just i}
setPS ac p = ac{proofStandard = Just p}
setF ac f = ac{formula = f}
parseI ac = msum [liftM (setI ac) (v .: "id"), return ac]
parsePS ac = msum [liftM (setPS ac) (v .: "proofStandard"), return ac]
parseF ac = msum [liftM (setF ac) (v .: "formula"), return ac]
ac <- parseF =<< parsePS =<< parseI mempty
guard $ ac /= mempty
return ac
parseJSON _ = mzero
instance FromJSON (Exp String) where
parseJSON (String s) = let e = Logic.execParser' Logic.parseExp "FromJSON" $ Text.unpack s
in case e of
(Right e) -> return e
(Left f) -> fail f
parseJSON _ = mzero
instance FromJSON ProofStandard where
parseJSON (String "PSScintillaOfEvidence") = return PSScintillaOfEvidence
parseJSON (String "PSPreponderanceOfEvidence") = return PSPreponderanceOfEvidence
parseJSON (String "PSBeyondResonableDoubt") = return PSBeyondResonableDoubt
parseJSON _ = mzero
instance Functor AcceptanceCondition where
fmap f a = a{formula = fmap f (formula a)}
instance Insertable (AcceptanceCondition l) (Exp l) where
c <+ f = c{formula = f}
instance Insertable (AcceptanceCondition l) Id where
c <+ i = c{acceptanceConditionId = Just i}
instance Insertable (AcceptanceCondition l) ProofStandard where
c <+ p = c{proofStandard = Just p}
instance Monoid (AcceptanceCondition leaf) where
mempty = AcceptanceCondition {
acceptanceConditionId = Nothing
, proofStandard = Nothing
, formula = Logic.Const False
}
mappend a b = AcceptanceCondition {
acceptanceConditionId = (mappend `on` acceptanceConditionId) a b
, proofStandard = proofStandard b
, formula = formula b
}
instance Monoid ProofStandard where
mempty = minBound
mappend = max
instance Ord l => Ord (AcceptanceCondition l) where
compare a b = let cId = compare `on` acceptanceConditionId
cPs = compare `on` proofStandard
cF = compare `on` formula
cs = [cId a b, cPs a b, cF a b]
in head (filter (/= EQ) cs ++ [EQ])
instance Show (AcceptanceCondition Id) where
show = show . fmap show
instance Show (AcceptanceCondition String) where
show a = concat [
"AcceptanceCondition {proofStandard = "
, show (proofStandard a)
, ", formula = "
, show (formula a)
, "}"
]
instance ToJSON (AcceptanceCondition String) where
toJSON a = object [
"id" .= acceptanceConditionId a
, "proofStandard" .= proofStandard a
, "formula" .= show (formula a)
]
instance ToJSON ProofStandard where
toJSON = toJSON . show
instance VarContainer AcceptanceCondition where
vars = vars . formula
| runjak/carbon-adf | Carbon/Data/AcceptanceCondition.hs | gpl-3.0 | 4,056 | 0 | 15 | 1,057 | 1,289 | 687 | 602 | 93 | 0 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
module Wiretap.Data.MiniParser where
import GHC.Base
import GHC.Int
import GHC.Word
import Data.Bits
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Unsafe as BU
newtype MiniParser a = MiniParser
{ runMiniParser :: BL.ByteString -> Maybe (a, BL.ByteString)
}
instance MonadPlus MiniParser where
mzero = parseError
a `mplus` b = MiniParser $ \bs -> do
(_, bs') <- runMiniParser a bs
(b', bs'') <- runMiniParser b bs'
return (b', bs'')
instance Alternative MiniParser where
empty = mzero
a <|> b = MiniParser $ \bs ->
runMiniParser a bs <|> runMiniParser b bs
instance Functor MiniParser where
fmap f a = MiniParser $ \ bs -> do
(a', rest) <- runMiniParser a bs
return (f a', rest)
{-# INLINE fmap #-}
instance Applicative MiniParser where
pure a = MiniParser $ \ !bs -> Just (a, bs)
{-# INLINE pure #-}
f <*> m =
MiniParser $ \ !bs -> do
(f', !rest) <- runMiniParser f bs
(m', !rest') <- runMiniParser m rest
return (f' m', rest')
{-# INLINE (<*>) #-}
instance Monad MiniParser where
m1 >>= m2 = MiniParser $ \ bs -> do
(a, !rest) <- runMiniParser m1 bs
runMiniParser (m2 a) rest
{-# INLINE (>>=) #-}
parseError :: MiniParser a
parseError = MiniParser $ \_ -> Nothing
{-# INLINABLE parseError #-}
drawN :: Int64 -> MiniParser BL.ByteString
drawN n = MiniParser $ Just . BL.splitAt n
{-# INLINABLE drawN #-}
drawWord8 :: MiniParser Word8
drawWord8 = MiniParser $ BL.uncons
{-# INLINABLE drawWord8 #-}
drawInt32be :: MiniParser Int32
drawInt32be =
fromIntegral <$> drawWord32be
{-# INLINABLE drawInt32be #-}
drawWord16be :: MiniParser Word16
drawWord16be = MiniParser $ \bs ->
let (bs', rest) = BL.splitAt 4 bs in
if BL.length bs' == 4 then
let w1 = BL.index bs' 0
w2 = BL.index bs' 1
in
-- Borrowed from Data.Binary.Get
Just ( fromIntegral w1 `shiftl_w16` 8
.|. fromIntegral w2
, rest)
else Nothing
{-# INLINABLE drawWord16be #-}
drawWord32be :: MiniParser Word32
drawWord32be = MiniParser $ \bs ->
let (bs', rest) = BL.splitAt 4 bs in
if BL.length bs' == 4 then
let w1 = BL.index bs' 0
w2 = BL.index bs' 1
w3 = BL.index bs' 2
w4 = BL.index bs' 3 in
Just ( fromIntegral w1 `shiftl_w32` 24
.|. fromIntegral w2 `shiftl_w32` 16
.|. fromIntegral w3 `shiftl_w32` 8
.|. fromIntegral w4
, rest)
else
Nothing
{-# INLINABLE drawWord32be #-}
drawWord64be :: MiniParser Word64
drawWord64be = MiniParser $ \bs ->
let (bs', rest) = BL.splitAt 8 bs in
if BL.length bs' == 8 then
let w1 = BL.index bs' 0
w2 = BL.index bs' 1
w3 = BL.index bs' 2
w4 = BL.index bs' 3
w5 = BL.index bs' 4
w6 = BL.index bs' 5
w7 = BL.index bs' 6
w8 = BL.index bs' 7
in
-- Borrowed from Data.Binary.Get
Just ( fromIntegral w1 `shiftl_w64` 56
.|. fromIntegral w2 `shiftl_w64` 48
.|. fromIntegral w3 `shiftl_w64` 40
.|. fromIntegral w4 `shiftl_w64` 32
.|. fromIntegral w5 `shiftl_w64` 24
.|. fromIntegral w6 `shiftl_w64` 16
.|. fromIntegral w7 `shiftl_w64` 8
.|. fromIntegral w8
, rest)
else
Nothing
{-# INLINABLE drawWord64be #-}
type MP a = B.ByteString -> Int -> (Int, a)
parseWord8 :: MP Word8
parseWord8 bs !i = (i + 1, BU.unsafeIndex bs i)
{-# INLINABLE parseWord8 #-}
parseWord16be :: MP Word16
parseWord16be bs !i =
(i + 2, fromIntegral w1 `shiftl_w16` 8 .|. fromIntegral w2)
where
w1 = BU.unsafeIndex bs (i + 0)
w2 = BU.unsafeIndex bs (i + 1)
{-# INLINABLE parseWord16be #-}
parseInt32be :: MP Int32
parseInt32be bs !i =
fromIntegral <$> parseWord32be bs i
{-# INLINABLE parseInt32be #-}
parseWord32be :: MP Word32
parseWord32be bs !i =
(i + 4,
fromIntegral w1 `shiftl_w32` 24
.|. fromIntegral w2 `shiftl_w32` 16
.|. fromIntegral w3 `shiftl_w32` 8
.|. fromIntegral w4
)
where
w1 = BU.unsafeIndex bs (i + 0)
w2 = BU.unsafeIndex bs (i + 1)
w3 = BU.unsafeIndex bs (i + 2)
w4 = BU.unsafeIndex bs (i + 3)
{-# INLINABLE parseWord32be #-}
parseWord64be :: MP Word64
parseWord64be bs !i =
-- Borrowed from Data.Binary.Get
( i + 8
, fromIntegral w1 `shiftl_w64` 56
.|. fromIntegral w2 `shiftl_w64` 48
.|. fromIntegral w3 `shiftl_w64` 40
.|. fromIntegral w4 `shiftl_w64` 32
.|. fromIntegral w5 `shiftl_w64` 24
.|. fromIntegral w6 `shiftl_w64` 16
.|. fromIntegral w7 `shiftl_w64` 8
.|. fromIntegral w8
)
where
w1 = BU.unsafeIndex bs (i + 0)
w2 = BU.unsafeIndex bs (i + 1)
w3 = BU.unsafeIndex bs (i + 2)
w4 = BU.unsafeIndex bs (i + 3)
w5 = BU.unsafeIndex bs (i + 4)
w6 = BU.unsafeIndex bs (i + 5)
w7 = BU.unsafeIndex bs (i + 6)
w8 = BU.unsafeIndex bs (i + 7)
{-# INLINABLE parseWord64be #-}
-- Borrowed from Data.Binary.Get
------------------------------------------------------------------------
-- Unchecked shifts
shiftl_w16 :: Word16 -> Int -> Word16
shiftl_w32 :: Word32 -> Int -> Word32
shiftl_w64 :: Word64 -> Int -> Word64
shiftl_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftL#` i)
shiftl_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftL#` i)
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL64#` i)
| ucla-pls/wiretap-tools | src/Wiretap/Data/MiniParser.hs | gpl-3.0 | 5,595 | 0 | 28 | 1,521 | 1,858 | 979 | 879 | 144 | 2 |
module Hadolint.Rule.DL3020 (rule) where
import Data.Foldable (toList)
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
rule :: Rule args
rule = simpleRule code severity message check
where
code = "DL3020"
severity = DLErrorC
message = "Use COPY instead of ADD for files and folders"
check (Add (AddArgs srcs _ _ _)) =
and [isArchive src || isUrl src | SourcePath src <- toList srcs]
check _ = True
{-# INLINEABLE rule #-}
isArchive :: Text.Text -> Bool
isArchive path =
or
( [ ftype `Text.isSuffixOf` path
| ftype <- archiveFileFormatExtensions
]
)
isUrl :: Text.Text -> Bool
isUrl path = or ([proto `Text.isPrefixOf` path | proto <- ["https://", "http://"]])
| lukasmartinelli/hadolint | src/Hadolint/Rule/DL3020.hs | gpl-3.0 | 755 | 0 | 11 | 167 | 239 | 131 | 108 | 20 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.IAMCredentials.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.IAMCredentials.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
| brendanhay/gogol | gogol-iamcredentials/gen/Network/Google/IAMCredentials/Types/Sum.hs | mpl-2.0 | 1,231 | 0 | 11 | 292 | 197 | 114 | 83 | 26 | 0 |
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
{-# LANGUAGE TypeApplications #-}
layoutPatternBindFinal alignmentToken binderDoc mPatDoc clauseDocs = do
docAlt
$ -- one-line solution
[ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return body
, wherePart
]
]
| not hasComments
, [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
, wherePart <- case mWhereDocs of
Nothing -> return @[] $ docEmpty
Just [w] -> return @[] $ docSeq
[ docSeparator
, appSep $ docLit $ Text.pack "where"
, docSetIndentLevel $ docForceSingleline $ return w
]
_ -> []
]
++ -- one-line solution + where in next line(s)
[ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[appSep $ return binderDoc, docForceParSpacing $ return body]
]
]
++ wherePartMultiLine
| [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
, Data.Maybe.isJust mWhereDocs
]
++ -- two-line solution + where in next line(s)
[ docLines
$ [ docForceSingleline
$ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docEnsureIndent BrIndentRegular $ docForceSingleline $ return
body
]
++ wherePartMultiLine
| [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
]
| lspitzner/brittany | data/Test536.hs | agpl-3.0 | 1,870 | 0 | 21 | 641 | 436 | 228 | 208 | 41 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.