code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Handler.LibraryItemAdd where
import Data.Maybe (fromJust)
import Import
-- | Process the addition of new BOoks to the Library.
postLibraryItemAddR :: Handler Html
postLibraryItemAddR = do
((result, libraryWidget), libraryEnctype) <- runFormPost libraryItemIsbnForm
continueEditing <- runInputPost $ iopt textField "edit"
case result of
FormSuccess isbn -> do
bookId <- fromMaybe (error "create failed") <$>
createBookFromIsbn isbn
mItem <- runDB . getBy $ UniqueLibraryBook bookId
mItemId <- case mItem of
Just _ -> setMessage "That Book is already in your Library"
>> return Nothing
Nothing -> setMessage "Added Book to your Library" >>
runDB (deleteWhere [WishlistItemBook ==. bookId]) >>
Just <$> createLibraryItemFromBook bookId
if isJust continueEditing && isJust mItemId
then redirect (LibraryItemEditR $ fromJust mItemId)
else redirect LibraryR
_ -> defaultLayout $ do
setTitle "An Error Occured While Adding to the Library"
$(widgetFile "library/libraryItemAddError")
|
prikhi/MyBookList
|
Handler/LibraryItemAdd.hs
|
gpl-3.0
| 1,302
| 0
| 23
| 451
| 261
| 124
| 137
| 24
| 4
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Projects.InspectTemplates.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates an InspectTemplate for re-using frequently used configuration
-- for inspecting content, images, and storage. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates to learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.projects.inspectTemplates.create@.
module Network.Google.Resource.DLP.Projects.InspectTemplates.Create
(
-- * REST Resource
ProjectsInspectTemplatesCreateResource
-- * Creating a Request
, projectsInspectTemplatesCreate
, ProjectsInspectTemplatesCreate
-- * Request Lenses
, pitcParent
, pitcXgafv
, pitcUploadProtocol
, pitcAccessToken
, pitcUploadType
, pitcPayload
, pitcCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.projects.inspectTemplates.create@ method which the
-- 'ProjectsInspectTemplatesCreate' request conforms to.
type ProjectsInspectTemplatesCreateResource =
"v2" :>
Capture "parent" Text :>
"inspectTemplates" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GooglePrivacyDlpV2CreateInspectTemplateRequest
:> Post '[JSON] GooglePrivacyDlpV2InspectTemplate
-- | Creates an InspectTemplate for re-using frequently used configuration
-- for inspecting content, images, and storage. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates to learn more.
--
-- /See:/ 'projectsInspectTemplatesCreate' smart constructor.
data ProjectsInspectTemplatesCreate =
ProjectsInspectTemplatesCreate'
{ _pitcParent :: !Text
, _pitcXgafv :: !(Maybe Xgafv)
, _pitcUploadProtocol :: !(Maybe Text)
, _pitcAccessToken :: !(Maybe Text)
, _pitcUploadType :: !(Maybe Text)
, _pitcPayload :: !GooglePrivacyDlpV2CreateInspectTemplateRequest
, _pitcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInspectTemplatesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pitcParent'
--
-- * 'pitcXgafv'
--
-- * 'pitcUploadProtocol'
--
-- * 'pitcAccessToken'
--
-- * 'pitcUploadType'
--
-- * 'pitcPayload'
--
-- * 'pitcCallback'
projectsInspectTemplatesCreate
:: Text -- ^ 'pitcParent'
-> GooglePrivacyDlpV2CreateInspectTemplateRequest -- ^ 'pitcPayload'
-> ProjectsInspectTemplatesCreate
projectsInspectTemplatesCreate pPitcParent_ pPitcPayload_ =
ProjectsInspectTemplatesCreate'
{ _pitcParent = pPitcParent_
, _pitcXgafv = Nothing
, _pitcUploadProtocol = Nothing
, _pitcAccessToken = Nothing
, _pitcUploadType = Nothing
, _pitcPayload = pPitcPayload_
, _pitcCallback = Nothing
}
-- | Required. Parent resource name. The format of this value varies
-- depending on the scope of the request (project or organization) and
-- whether you have [specified a processing
-- location](https:\/\/cloud.google.com\/dlp\/docs\/specifying-location): +
-- Projects scope, location specified:
-- \`projects\/\`PROJECT_ID\`\/locations\/\`LOCATION_ID + Projects scope,
-- no location specified (defaults to global): \`projects\/\`PROJECT_ID +
-- Organizations scope, location specified:
-- \`organizations\/\`ORG_ID\`\/locations\/\`LOCATION_ID + Organizations
-- scope, no location specified (defaults to global):
-- \`organizations\/\`ORG_ID The following example \`parent\` string
-- specifies a parent project with the identifier \`example-project\`, and
-- specifies the \`europe-west3\` location for processing data:
-- parent=projects\/example-project\/locations\/europe-west3
pitcParent :: Lens' ProjectsInspectTemplatesCreate Text
pitcParent
= lens _pitcParent (\ s a -> s{_pitcParent = a})
-- | V1 error format.
pitcXgafv :: Lens' ProjectsInspectTemplatesCreate (Maybe Xgafv)
pitcXgafv
= lens _pitcXgafv (\ s a -> s{_pitcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pitcUploadProtocol :: Lens' ProjectsInspectTemplatesCreate (Maybe Text)
pitcUploadProtocol
= lens _pitcUploadProtocol
(\ s a -> s{_pitcUploadProtocol = a})
-- | OAuth access token.
pitcAccessToken :: Lens' ProjectsInspectTemplatesCreate (Maybe Text)
pitcAccessToken
= lens _pitcAccessToken
(\ s a -> s{_pitcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pitcUploadType :: Lens' ProjectsInspectTemplatesCreate (Maybe Text)
pitcUploadType
= lens _pitcUploadType
(\ s a -> s{_pitcUploadType = a})
-- | Multipart request metadata.
pitcPayload :: Lens' ProjectsInspectTemplatesCreate GooglePrivacyDlpV2CreateInspectTemplateRequest
pitcPayload
= lens _pitcPayload (\ s a -> s{_pitcPayload = a})
-- | JSONP
pitcCallback :: Lens' ProjectsInspectTemplatesCreate (Maybe Text)
pitcCallback
= lens _pitcCallback (\ s a -> s{_pitcCallback = a})
instance GoogleRequest ProjectsInspectTemplatesCreate
where
type Rs ProjectsInspectTemplatesCreate =
GooglePrivacyDlpV2InspectTemplate
type Scopes ProjectsInspectTemplatesCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsInspectTemplatesCreate'{..}
= go _pitcParent _pitcXgafv _pitcUploadProtocol
_pitcAccessToken
_pitcUploadType
_pitcCallback
(Just AltJSON)
_pitcPayload
dLPService
where go
= buildClient
(Proxy ::
Proxy ProjectsInspectTemplatesCreateResource)
mempty
|
brendanhay/gogol
|
gogol-dlp/gen/Network/Google/Resource/DLP/Projects/InspectTemplates/Create.hs
|
mpl-2.0
| 6,750
| 0
| 17
| 1,372
| 796
| 471
| 325
| 118
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Datafeeds.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Registers a datafeed configuration with your Merchant Center account.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.datafeeds.insert@.
module Network.Google.Resource.Content.Datafeeds.Insert
(
-- * REST Resource
DatafeedsInsertResource
-- * Creating a Request
, datafeedsInsert
, DatafeedsInsert
-- * Request Lenses
, diXgafv
, diMerchantId
, diUploadProtocol
, diAccessToken
, diUploadType
, diPayload
, diCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.datafeeds.insert@ method which the
-- 'DatafeedsInsert' request conforms to.
type DatafeedsInsertResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"datafeeds" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Datafeed :> Post '[JSON] Datafeed
-- | Registers a datafeed configuration with your Merchant Center account.
--
-- /See:/ 'datafeedsInsert' smart constructor.
data DatafeedsInsert =
DatafeedsInsert'
{ _diXgafv :: !(Maybe Xgafv)
, _diMerchantId :: !(Textual Word64)
, _diUploadProtocol :: !(Maybe Text)
, _diAccessToken :: !(Maybe Text)
, _diUploadType :: !(Maybe Text)
, _diPayload :: !Datafeed
, _diCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DatafeedsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'diXgafv'
--
-- * 'diMerchantId'
--
-- * 'diUploadProtocol'
--
-- * 'diAccessToken'
--
-- * 'diUploadType'
--
-- * 'diPayload'
--
-- * 'diCallback'
datafeedsInsert
:: Word64 -- ^ 'diMerchantId'
-> Datafeed -- ^ 'diPayload'
-> DatafeedsInsert
datafeedsInsert pDiMerchantId_ pDiPayload_ =
DatafeedsInsert'
{ _diXgafv = Nothing
, _diMerchantId = _Coerce # pDiMerchantId_
, _diUploadProtocol = Nothing
, _diAccessToken = Nothing
, _diUploadType = Nothing
, _diPayload = pDiPayload_
, _diCallback = Nothing
}
-- | V1 error format.
diXgafv :: Lens' DatafeedsInsert (Maybe Xgafv)
diXgafv = lens _diXgafv (\ s a -> s{_diXgafv = a})
-- | The ID of the account that manages the datafeed. This account cannot be
-- a multi-client account.
diMerchantId :: Lens' DatafeedsInsert Word64
diMerchantId
= lens _diMerchantId (\ s a -> s{_diMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
diUploadProtocol :: Lens' DatafeedsInsert (Maybe Text)
diUploadProtocol
= lens _diUploadProtocol
(\ s a -> s{_diUploadProtocol = a})
-- | OAuth access token.
diAccessToken :: Lens' DatafeedsInsert (Maybe Text)
diAccessToken
= lens _diAccessToken
(\ s a -> s{_diAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
diUploadType :: Lens' DatafeedsInsert (Maybe Text)
diUploadType
= lens _diUploadType (\ s a -> s{_diUploadType = a})
-- | Multipart request metadata.
diPayload :: Lens' DatafeedsInsert Datafeed
diPayload
= lens _diPayload (\ s a -> s{_diPayload = a})
-- | JSONP
diCallback :: Lens' DatafeedsInsert (Maybe Text)
diCallback
= lens _diCallback (\ s a -> s{_diCallback = a})
instance GoogleRequest DatafeedsInsert where
type Rs DatafeedsInsert = Datafeed
type Scopes DatafeedsInsert =
'["https://www.googleapis.com/auth/content"]
requestClient DatafeedsInsert'{..}
= go _diMerchantId _diXgafv _diUploadProtocol
_diAccessToken
_diUploadType
_diCallback
(Just AltJSON)
_diPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy DatafeedsInsertResource)
mempty
|
brendanhay/gogol
|
gogol-shopping-content/gen/Network/Google/Resource/Content/Datafeeds/Insert.hs
|
mpl-2.0
| 5,003
| 0
| 18
| 1,202
| 803
| 466
| 337
| 114
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ConsumerSurveys.Surveys.Start
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Begins running a survey.
--
-- /See:/ <https://developers.google.com/surveys/ Consumer Surveys API Reference> for @consumersurveys.surveys.start@.
module Network.Google.Resource.ConsumerSurveys.Surveys.Start
(
-- * REST Resource
SurveysStartResource
-- * Creating a Request
, surveysStart
, SurveysStart
-- * Request Lenses
, ssResourceId
, ssPayload
) where
import Network.Google.ConsumerSurveys.Types
import Network.Google.Prelude
-- | A resource alias for @consumersurveys.surveys.start@ method which the
-- 'SurveysStart' request conforms to.
type SurveysStartResource =
"consumersurveys" :>
"v2" :>
"surveys" :>
Capture "resourceId" Text :>
"start" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SurveysStartRequest :>
Post '[JSON] SurveysStartResponse
-- | Begins running a survey.
--
-- /See:/ 'surveysStart' smart constructor.
data SurveysStart =
SurveysStart'
{ _ssResourceId :: !Text
, _ssPayload :: !SurveysStartRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SurveysStart' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssResourceId'
--
-- * 'ssPayload'
surveysStart
:: Text -- ^ 'ssResourceId'
-> SurveysStartRequest -- ^ 'ssPayload'
-> SurveysStart
surveysStart pSsResourceId_ pSsPayload_ =
SurveysStart' {_ssResourceId = pSsResourceId_, _ssPayload = pSsPayload_}
ssResourceId :: Lens' SurveysStart Text
ssResourceId
= lens _ssResourceId (\ s a -> s{_ssResourceId = a})
-- | Multipart request metadata.
ssPayload :: Lens' SurveysStart SurveysStartRequest
ssPayload
= lens _ssPayload (\ s a -> s{_ssPayload = a})
instance GoogleRequest SurveysStart where
type Rs SurveysStart = SurveysStartResponse
type Scopes SurveysStart =
'["https://www.googleapis.com/auth/consumersurveys",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient SurveysStart'{..}
= go _ssResourceId (Just AltJSON) _ssPayload
consumerSurveysService
where go
= buildClient (Proxy :: Proxy SurveysStartResource)
mempty
|
brendanhay/gogol
|
gogol-consumersurveys/gen/Network/Google/Resource/ConsumerSurveys/Surveys/Start.hs
|
mpl-2.0
| 3,116
| 0
| 14
| 700
| 388
| 233
| 155
| 62
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Snapshots.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified Snapshot resource. Keep in mind that deleting a
-- single snapshot might not necessarily delete all the data on that
-- snapshot. If any data on the snapshot that is marked for deletion is
-- needed for subsequent snapshots, the data will be moved to the next
-- corresponding snapshot. For more information, see Deleting snaphots.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.snapshots.delete@.
module Network.Google.Resource.Compute.Snapshots.Delete
(
-- * REST Resource
SnapshotsDeleteResource
-- * Creating a Request
, snapshotsDelete
, SnapshotsDelete
-- * Request Lenses
, snaSnapshot
, snaProject
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.snapshots.delete@ method which the
-- 'SnapshotsDelete' request conforms to.
type SnapshotsDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"snapshots" :>
Capture "snapshot" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified Snapshot resource. Keep in mind that deleting a
-- single snapshot might not necessarily delete all the data on that
-- snapshot. If any data on the snapshot that is marked for deletion is
-- needed for subsequent snapshots, the data will be moved to the next
-- corresponding snapshot. For more information, see Deleting snaphots.
--
-- /See:/ 'snapshotsDelete' smart constructor.
data SnapshotsDelete = SnapshotsDelete'
{ _snaSnapshot :: !Text
, _snaProject :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SnapshotsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'snaSnapshot'
--
-- * 'snaProject'
snapshotsDelete
:: Text -- ^ 'snaSnapshot'
-> Text -- ^ 'snaProject'
-> SnapshotsDelete
snapshotsDelete pSnaSnapshot_ pSnaProject_ =
SnapshotsDelete'
{ _snaSnapshot = pSnaSnapshot_
, _snaProject = pSnaProject_
}
-- | Name of the Snapshot resource to delete.
snaSnapshot :: Lens' SnapshotsDelete Text
snaSnapshot
= lens _snaSnapshot (\ s a -> s{_snaSnapshot = a})
-- | Project ID for this request.
snaProject :: Lens' SnapshotsDelete Text
snaProject
= lens _snaProject (\ s a -> s{_snaProject = a})
instance GoogleRequest SnapshotsDelete where
type Rs SnapshotsDelete = Operation
type Scopes SnapshotsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient SnapshotsDelete'{..}
= go _snaProject _snaSnapshot (Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy SnapshotsDeleteResource)
mempty
|
rueshyna/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/Snapshots/Delete.hs
|
mpl-2.0
| 3,836
| 0
| 15
| 873
| 396
| 241
| 155
| 64
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Maps.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a map asset.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.maps.create@.
module Network.Google.Resource.MapsEngine.Maps.Create
(
-- * REST Resource
MapsCreateResource
-- * Creating a Request
, mapsCreate
, MapsCreate
-- * Request Lenses
, mcPayload
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.maps.create@ method which the
-- 'MapsCreate' request conforms to.
type MapsCreateResource =
"mapsengine" :>
"v1" :>
"maps" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Map :> Post '[JSON] Map
-- | Create a map asset.
--
-- /See:/ 'mapsCreate' smart constructor.
newtype MapsCreate = MapsCreate'
{ _mcPayload :: Map
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MapsCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcPayload'
mapsCreate
:: Map -- ^ 'mcPayload'
-> MapsCreate
mapsCreate pMcPayload_ =
MapsCreate'
{ _mcPayload = pMcPayload_
}
-- | Multipart request metadata.
mcPayload :: Lens' MapsCreate Map
mcPayload
= lens _mcPayload (\ s a -> s{_mcPayload = a})
instance GoogleRequest MapsCreate where
type Rs MapsCreate = Map
type Scopes MapsCreate =
'["https://www.googleapis.com/auth/mapsengine"]
requestClient MapsCreate'{..}
= go (Just AltJSON) _mcPayload mapsEngineService
where go
= buildClient (Proxy :: Proxy MapsCreateResource)
mempty
|
rueshyna/gogol
|
gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Maps/Create.hs
|
mpl-2.0
| 2,506
| 0
| 12
| 585
| 304
| 187
| 117
| 47
| 1
|
module Logic.Propositional
( Proposition(Val, Var, Not, And, Or)
, (&), (<|>), (-->), (<->), (</>)
, eval
, equiv
, depth
, varNames
, satisfiable, satisfying, tautology
, dnf, cnf
, isDnf, isCnf
) where
import Data.List (union)
import Data.Maybe (fromJust)
-- | Data type for propositions.
data Proposition
= Val Bool -- ^ boolean value
| Var String -- ^ boolean variable
| Not Proposition -- ^ negation
| And Proposition Proposition -- ^ conjunction
| Or Proposition Proposition -- ^ disjunction
deriving (Eq)
instance Show Proposition where
-- | Converts a proposition into a readable string.
show (Val False) = "0"
show (Val True) = "1"
show (Var name) = name
show (Not p@(_ `And` _)) = "!(" ++ show p ++ ")"
show (Not p@(_ `Or` _)) = "!(" ++ show p ++ ")"
show (Not p) = "!" ++ show p
show (p `And` q) = decorate p ++ " & " ++ decorate q
where decorate p = case p of
(Or _ _) -> "(" ++ show p ++ ")"
_ -> show p
show (p `Or` q) = decorate p ++ " | " ++ decorate q
where decorate p = case p of
(And _ _) -> "(" ++ show p ++ ")"
_ -> show p
-- Helper functions to make construction of propositions easier
(&) = And -- ^ and
(<|>) = Or -- ^ or
p --> q = Not p `Or` q -- ^ implies
p <-> q = (p --> q) & (q --> p) -- ^ equivalent
p </> q = (p & Not q) `Or` (Not p & q) -- ^ xor
-- | An interpretation is a mapping from variables to boolean values.
type Interpretation = [(String,Bool)]
-- | Evaluates a proposition.
-- If the given interpretation isn't fitting, Nothing is returned.
eval :: Proposition -> Interpretation -> Maybe Bool
eval (Val v) vars = Just v
eval (Var name) vars = lookup name vars
eval (Not p) vars = not <$> eval p vars
eval (p `And` q) vars = (&&) <$> eval p vars <*> eval q vars
eval (p `Or` q) vars = (||) <$> eval p vars <*> eval q vars
-- | Replaces all occurrences of a variable with a boolean value.
apply :: Proposition -> (String,Bool) -> Proposition
apply p@(Var name) (var,val) | name == var = Val val
| otherwise = p
apply (Not p) var = Not $ p ! var
apply (p `And` q) var = (p ! var) & (q ! var)
apply (p `Or` q) var = (p ! var) <|> (q ! var)
apply p _ = p
(!) = apply
-- | Checks if two propositions are equivalent.
equiv :: Proposition -> Proposition -> Bool
equiv p q = all (\x -> eval p x == eval q x) $ interps $ p & q
-- | Determines the depth of a proposition.
-- Atomic propositions (i.e. values, variables) are considered to have a
-- depth of 0. Every junctor adds 1 to the maximum level of its
-- subpropositions.
depth :: Proposition -> Int
depth (Val _) = 0
depth (Var _) = 0
depth (Not p) = depth p + 1
depth (p `And` q) = max (depth p) (depth q) + 1
depth (p `Or` q) = max (depth p) (depth q) + 1
-- | Determines the variables in a proposition.
varNames :: Proposition -> [String]
varNames (Var name) = [name]
varNames (Not p) = varNames p
varNames (p `And` q) = (varNames p) `union` (varNames q)
varNames (p `Or` q) = (varNames p) `union` (varNames q)
varNames _ = []
-- | Checks if a proposition is satisfiable.
satisfiable :: Proposition -> Bool
satisfiable p | varNames p == [] = fromJust $ eval p []
| otherwise = any ((== Just True) . eval p) $ interps p
-- | Generates all sarisfying interpretations for a proposition.
satisfying :: Proposition -> [Interpretation]
satisfying p = filter ((== Just True) . eval p) $ interps p
-- | Checks if a proposition is a tautology.
tautology :: Proposition -> Bool
tautology p | varNames p == [] = fromJust $ eval p []
| otherwise = all ((Just True ==) . eval p) $ interps p
-- | Generates all fitting interpretations.
interps :: Proposition -> [Interpretation]
interps p = map (zip vars) $ boolLists $ length vars
where vars = varNames p
-- | Generates all possible lists of n booleans.
boolLists :: Int -> [[Bool]]
boolLists 0 = []
boolLists 1 = [[False],[True]]
boolLists n = (map (False:) prev) ++ (map (True:) prev)
where prev = boolLists $ n - 1
-- | Converts a interpretation into a conjunctive term.
-- The conjunctive term evaluates to true for the given interpretation,
-- false otherwise.
conjunction :: Interpretation -> Proposition
conjunction i = case literals of
[] -> Val False
p:ps -> foldl And p ps
where literals = map toTerm i
toTerm (name,True) = Var name
toTerm (name,False) = Not $ Var name
disjunction :: Interpretation -> Proposition
disjunction i = case literals of
[] -> Val False
p:ps -> foldl Or p ps
where literals = map toTerm i
toTerm (name,True) = Not $ Var name
toTerm (name,False) = Var name
-- | Converts a proposition into its disjunctive normal form.
dnf :: Proposition -> Proposition
dnf p | varNames p == [] = Val $ fromJust $ eval p []
| otherwise = case terms of
[] -> Val False
q:qs -> foldl Or q qs
where terms = map conjunction $ satisfying p
cnf :: Proposition -> Proposition
cnf p | varNames p == [] = Val $ fromJust $ eval p []
| otherwise = case terms of
[] -> Val True
q:qs -> foldl And q qs
where terms = map disjunction $ satisfying $ Not p
-- / Checks if proposition is in disjunctive form.
isDnf :: Proposition -> Bool
isDnf (Val _) = True
isDnf p = isInnerDnf p
where isInnerDnf (p `Or` q) = isInnerDnf p && isInnerDnf q
isInnerDnf p = isConjunction p
isConjunction :: Proposition -> Bool
isConjunction (p `And` q) = isConjunction p && isConjunction q
isConjunction (Not (Var _)) = True
isConjunction (Var _) = True
isConjunction _ = False
-- / Checks if proposition is in conjunctive form.
isCnf :: Proposition -> Bool
isCnf (Val _) = True
isCnf p = isInnerCnf p
where isInnerCnf (p `And` q) = isInnerCnf p && isInnerCnf q
isInnerCnf p = isDisjunction p
isDisjunction :: Proposition -> Bool
isDisjunction (p `Or` q) = isConjunction p && isConjunction q
isDisjunction (Not (Var _)) = True
isDisjunction (Var _) = True
isDisjunction _ = False
|
LocalToasty/formal-logic
|
Logic/Propositional.hs
|
lgpl-3.0
| 6,551
| 0
| 13
| 1,986
| 2,320
| 1,224
| 1,096
| 132
| 3
|
addThree :: Int -> Int -> Int -> Int
addThree x y z = x + y + z
factorial :: Integer -> Integer
factorial n = product [1..n]
--we can do this recursively too, but that's actually for later.
--factorial :: Int -> Int
--factorial 0 = 1
--factorial n = n * (factorial (n - 1))
--factorial2 :: Integer -> Integer
--factorial2 0 = 1
--factorial2 n = n * (factorial2 (n - 1))
circumference :: Float -> Float
circumference r = 2 * pi * r
circumference' :: Double -> Double
circumference' r = 2 * pi * r
|
alexliew/learn_you_a_haskell
|
2_types_and_typeclasses.hs
|
unlicense
| 503
| 0
| 7
| 109
| 120
| 65
| 55
| 8
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.Requirements.Framework where
import qualified Config as GHCParse
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.State
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.List (foldl', intercalate, isPrefixOf)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import qualified DynFlags as GHCParse
import qualified FastString as GHCParse
import qualified Fingerprint as GHCParse
import qualified GHC.LanguageExtensions.Type as GHCParse
import qualified HeaderInfo as GHCParse
import qualified HsExtension as GHCParse
import qualified HsSyn as GHCParse
import qualified HscTypes as GHCParse
import Language.Haskell.Exts
import qualified Lexer as GHCParse
import qualified Panic as GHCParse
import qualified Parser as GHCParse
import qualified Platform as GHCParse
import qualified SrcLoc as GHCParse
import qualified StringBuffer as GHCParse
import System.Directory
import System.Exit (ExitCode (..))
import System.FilePath
import System.IO
import System.IO.Temp (withSystemTempDirectory)
import System.Process
data GHCParsedCode = GHCParsed (GHCParse.HsModule GHCParse.GhcPs) | GHCNoParse
ghcExtensionsByName :: Map String GHCParse.Extension
ghcExtensionsByName =
M.fromList
[ (GHCParse.flagSpecName spec, GHCParse.flagSpecFlag spec)
| spec <- GHCParse.xFlags
]
applyExtensionToFlags :: GHCParse.DynFlags -> String -> GHCParse.DynFlags
applyExtensionToFlags dflags name
| "No" `isPrefixOf` name =
GHCParse.xopt_unset dflags $ fromJust $ M.lookup (drop 2 name) ghcExtensionsByName
| otherwise =
GHCParse.xopt_set dflags $ fromJust $ M.lookup name ghcExtensionsByName
ghcParseCode :: GHCParse.DynFlags -> [String] -> Text -> GHCParsedCode
ghcParseCode flags exts src = do
let buffer = GHCParse.stringToStringBuffer (T.unpack src)
dflags = foldl' applyExtensionToFlags flags exts
location = GHCParse.mkRealSrcLoc (GHCParse.mkFastString "program.hs") 1 1
state = GHCParse.mkPState dflags buffer location
case GHCParse.unP GHCParse.parseModule state of
GHCParse.POk _ (GHCParse.L _ mod) -> GHCParsed mod
GHCParse.PFailed _ _ _ -> GHCNoParse
formatLocation :: SrcSpanInfo -> String
formatLocation spn@(SrcSpanInfo (SrcSpan fn l1 c1 l2 c2) _)
| spn == noSrcSpan = ""
| l1 /= l2 =
fn ++ ":(" ++ show l1 ++ "," ++ show c1 ++ ")-("
++ show l2
++ ","
++ show (max 1 (c2 - 1))
++ ")"
| c1 < c2 - 1 =
fn ++ ":" ++ show l1 ++ ":" ++ show c1 ++ "-"
++ show (max 1 (c2 - 1))
| otherwise = fn ++ ":" ++ show l1 ++ ":" ++ show c1
srcSpanFor :: Text -> Int -> Int -> SrcSpanInfo
srcSpanFor src off len =
SrcSpanInfo (SrcSpan "program.hs" ln1 col1 ln2 col2) []
where
(_, ln1, col1) = T.foldl' next (off, 1, 1) pre
(_, ln2, col2) = T.foldl' next (len, ln1, col1) mid
(pre, post) = T.splitAt off src
mid = T.take len post
next (!n, !ln, !col) '\r' = (n - 1, ln, col)
next (!n, !ln, !col) '\n' = (n - 1, ln + 1, 1)
next (!n, !ln, !col) '\t' = (n - 1, ln, col + 8 - (col - 1) `mod` 8)
next (!n, !ln, !col) _ = (n - 1, ln, col + 1)
|
google/codeworld
|
codeworld-requirements/src/CodeWorld/Requirements/Framework.hs
|
apache-2.0
| 4,228
| 0
| 14
| 764
| 1,160
| 647
| 513
| 92
| 4
|
module Marvin.API.Algorithms.LinearRegressionSpec where
import Test.Hspec
import Marvin.API
import Marvin.API.Algorithms.LinearRegression
import Marvin.Test.TestUtils
spec :: Spec
spec =
describe "linear regression" $ do
it "can be fit and evaluated" $
trained `shouldSatisfy` \res -> case res of
Right (x,y,x',y',model,pred,mse) ->
(mse `equals` 0.5872961757090328) ((+-) 1e-10) &&
(y `equals` y') ((+-) 1e-10) &&
(x `equals` x') ((+-) 1e-10) &&
(coefficients model `equals` tail expectedTheta) ((+-) 1e-10) &&
(intercept model `equals` head expectedTheta) ((+-) 1e-10)
Left _ -> False
it "gives error on empty table" $
fitEmpty `shouldSatisfy` \res -> case res of
(Left (ColumnSizeMismatch _)) -> True
_ -> False
it "gives error on empty table at prediction" $
predictEmpty `shouldSatisfy` \res -> case res of
(Left (RowLengthMismatch _)) -> True
_ -> False
where
fitEmpty = do
y <- trainY
fit linRegParams (emptyTable :: NumericTable, y)
predictEmpty = do
model <- linRegModel
predict model (emptyTable :: NumericTable)
linRegModel = fmap (\(_,_,_,_,m,_,_) -> m) trained
trained = do
wholeTable <- trainData
let targetCol = byIndex 11
(x', y') <- selectTargetVariable targetCol wholeTable
x <- trainX
y <- trainY
model <- fit linRegParams (x', y')
pred <- predict model x
mse <- evaluate model MeanSquaredError x y
return (x,y,x',y',model,pred,mse)
linRegParams = LinearRegression {
numberOfIterations = 30000,
learningRate = 0.005,
lambda = 0,
addIntercept = True
}
trainX :: Fallible NumericTable
trainX = fromRows xData
trainY :: Fallible NumericColumn
trainY = fromList yData
trainData :: Fallible NumericTable
trainData = fromRows trainMatrix
xData = map init trainMatrix
yData = map last trainMatrix
expectedTheta =
[13.781001191549892,
3.900308511353248e-2,
-8.360245732382168e-2,
2.8699265280611834e-2,
2.7797762929208453e-2,
0.13913675368346703,
3.900308511353248e-2,
0.11340373429004338,
0.1166082670688832,
4.446542254295697e-2,
0.14211000816593214,
4.446542254295697e-2]
|
gaborhermann/marvin
|
test-suite/Marvin/API/Algorithms/LinearRegressionSpec.hs
|
apache-2.0
| 2,361
| 0
| 19
| 635
| 705
| 385
| 320
| 68
| 4
|
{-# LANGUAGE OverloadedStrings #-}
module Web.Twitter.Enumerator.Utils
( enumLine
, enumJSON
, skipNothing
, debugEE
, fromJSON'
, fromJSONSearch'
, toMaybeByteString
, handleParseError
)
where
import Web.Twitter.Enumerator.Types
import Data.Aeson hiding (Error)
import Data.Aeson.Types (parseMaybe)
import Data.Attoparsec.Enumerator (iterParser)
import Data.Enumerator as E
import qualified Data.Enumerator.List as EL
import qualified Data.Enumerator.Binary as EB
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B8
import Data.Maybe
import Control.Monad.IO.Class (MonadIO (liftIO))
enumLine :: Monad m => E.Enumeratee ByteString ByteString m a
enumLine = EB.splitWhen newline
where newline x = (x == 10) || (x == 13)
skipNothing :: Monad m => E.Enumeratee (Maybe a) a m r
skipNothing = EL.concatMap (\x -> [fromJust x | isJust x])
debugEE :: (MonadIO m, Show a) => E.Enumeratee a a m r
debugEE = EL.mapM $ \x -> (liftIO . putStrLn . show) x >> return x
fromJSON' :: FromJSON a => Value -> Maybe a
fromJSON' = parseMaybe parseJSON
fromJSONSearch' :: FromJSON a => Value -> Maybe a
fromJSONSearch' (Object o) = parseMaybe (.: "results") o
fromJSONSearch' _ = Nothing
enumJSON :: Monad m => E.Enumeratee ByteString Value m a
enumJSON = E.sequence $ iterParser json
toMaybeByteString :: Show a => a -> Maybe ByteString
toMaybeByteString = Just . B8.pack . show
handleParseError :: Monad m => Iteratee ByteString m b -> Iteratee ByteString m b
handleParseError iter = iter `catchError` hndl
where
getChunk = continue return
hndl e = getChunk >>= \x -> case x of
Chunks xs -> throwError $ ParserException e xs
_ -> throwError $ ParserException e []
|
himura/twitter-enumerator
|
Web/Twitter/Enumerator/Utils.hs
|
bsd-2-clause
| 1,788
| 0
| 14
| 367
| 587
| 319
| 268
| 43
| 2
|
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.ScreenCorners
-- Copyright : (c) 2009 Nils Schweinsberg, 2015 Evgeny Kurnevsky
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Nils Schweinsberg <mail@n-sch.de>
-- Stability : unstable
-- Portability : unportable
--
-- Run @X ()@ actions by touching the edge of your screen with your mouse.
--
-----------------------------------------------------------------------------
module XMonad.Hooks.ScreenCorners
(
-- * Usage
-- $usage
-- * Adding screen corners
ScreenCorner (..)
, addScreenCorner
, addScreenCorners
-- * Event hook
, screenCornerEventHook
-- * Layout hook
, screenCornerLayoutHook
) where
import Data.Monoid
import Data.List (find)
import XMonad
import XMonad.Util.XUtils (fi)
import XMonad.Layout.LayoutModifier
import qualified Data.Map as M
import qualified XMonad.Util.ExtensibleState as XS
data ScreenCorner = SCUpperLeft
| SCUpperRight
| SCLowerLeft
| SCLowerRight
deriving (Eq, Ord, Show)
--------------------------------------------------------------------------------
-- ExtensibleState modifications
--------------------------------------------------------------------------------
newtype ScreenCornerState = ScreenCornerState (M.Map Window (ScreenCorner, X ()))
deriving Typeable
instance ExtensionClass ScreenCornerState where
initialValue = ScreenCornerState M.empty
-- | Add one single @X ()@ action to a screen corner
addScreenCorner :: ScreenCorner -> X () -> X ()
addScreenCorner corner xF = do
ScreenCornerState m <- XS.get
(win,xFunc) <- case find (\(_,(sc,_)) -> sc == corner) (M.toList m) of
Just (w, (_,xF')) -> return (w, xF' >> xF) -- chain X actions
Nothing -> flip (,) xF `fmap` createWindowAt corner
XS.modify $ \(ScreenCornerState m') -> ScreenCornerState $ M.insert win (corner,xFunc) m'
-- | Add a list of @(ScreenCorner, X ())@ tuples
addScreenCorners :: [ (ScreenCorner, X ()) ] -> X ()
addScreenCorners = mapM_ (\(corner, xF) -> addScreenCorner corner xF)
--------------------------------------------------------------------------------
-- Xlib functions
--------------------------------------------------------------------------------
-- "Translate" a ScreenCorner to real (x,y) Positions
createWindowAt :: ScreenCorner -> X Window
createWindowAt SCUpperLeft = createWindowAt' 0 0
createWindowAt SCUpperRight = withDisplay $ \dpy ->
let w = displayWidth dpy (defaultScreen dpy) - 1
in createWindowAt' (fi w) 0
createWindowAt SCLowerLeft = withDisplay $ \dpy ->
let h = displayHeight dpy (defaultScreen dpy) - 1
in createWindowAt' 0 (fi h)
createWindowAt SCLowerRight = withDisplay $ \dpy ->
let w = displayWidth dpy (defaultScreen dpy) - 1
h = displayHeight dpy (defaultScreen dpy) - 1
in createWindowAt' (fi w) (fi h)
-- Create a new X window at a (x,y) Position
createWindowAt' :: Position -> Position -> X Window
createWindowAt' x y = withDisplay $ \dpy -> io $ do
rootw <- rootWindow dpy (defaultScreen dpy)
let
visual = defaultVisualOfScreen $ defaultScreenOfDisplay dpy
attrmask = cWOverrideRedirect
w <- allocaSetWindowAttributes $ \attributes -> do
set_override_redirect attributes True
createWindow dpy -- display
rootw -- parent window
x -- x
y -- y
1 -- width
1 -- height
0 -- border width
0 -- depth
inputOnly -- class
visual -- visual
attrmask -- valuemask
attributes -- attributes
-- we only need mouse entry events
selectInput dpy w enterWindowMask
mapWindow dpy w
sync dpy False
return w
--------------------------------------------------------------------------------
-- Event hook
--------------------------------------------------------------------------------
-- | Handle screen corner events
screenCornerEventHook :: Event -> X All
screenCornerEventHook CrossingEvent { ev_window = win } = do
ScreenCornerState m <- XS.get
case M.lookup win m of
Just (_, xF) -> xF
Nothing -> return ()
return (All True)
screenCornerEventHook _ = return (All True)
--------------------------------------------------------------------------------
-- Layout hook
--------------------------------------------------------------------------------
data ScreenCornerLayout a = ScreenCornerLayout
deriving ( Read, Show )
instance LayoutModifier ScreenCornerLayout a where
hook ScreenCornerLayout = withDisplay $ \dpy -> do
ScreenCornerState m <- XS.get
io $ mapM_ (raiseWindow dpy) $ M.keys m
unhook = hook
screenCornerLayoutHook :: l a -> ModifiedLayout ScreenCornerLayout l a
screenCornerLayoutHook = ModifiedLayout ScreenCornerLayout
--------------------------------------------------------------------------------
-- $usage
--
-- This extension adds KDE-like screen corners to XMonad. By moving your cursor
-- into one of your screen corners you can trigger an @X ()@ action, for
-- example @"XMonad.Actions.GridSelect".goToSelected@ or
-- @"XMonad.Actions.CycleWS".nextWS@ etc.
--
-- To use it, import it on top of your @xmonad.hs@:
--
-- > import XMonad.Hooks.ScreenCorners
--
-- Then add your screen corners in our startup hook:
--
-- > myStartupHook = do
-- > ...
-- > addScreenCorner SCUpperRight (goToSelected defaultGSConfig { gs_cellwidth = 200})
-- > addScreenCorners [ (SCLowerRight, nextWS)
-- > , (SCLowerLeft, prevWS)
-- > ]
--
-- Then add layout hook:
-- > myLayout = screenCornerLayoutHook $ tiled ||| Mirror tiled ||| Full where
-- > tiled = Tall nmaster delta ratio
-- > nmaster = 1
-- > ratio = 1 / 2
-- > delta = 3 / 100
--
-- And finally wait for screen corner events in your event hook:
--
-- > myEventHook e = do
-- > ...
-- > screenCornerEventHook e
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Hooks/ScreenCorners.hs
|
bsd-2-clause
| 6,428
| 0
| 15
| 1,564
| 1,094
| 600
| 494
| 86
| 2
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main
( main
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.HashMap.Strict as HM
import Data.Int
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Vector as V
import Data.Word
import Test.HUnit
import Test.Framework as TF
import Test.Framework.Providers.HUnit as TF
import Test.QuickCheck
import Test.Framework.Providers.QuickCheck2 as TF
import Data.Csv hiding (record)
import qualified Data.Csv.Streaming as S
------------------------------------------------------------------------
-- Parse tests
decodesAs :: BL.ByteString -> [[B.ByteString]] -> Assertion
decodesAs input expected = assertResult input expected $ decode NoHeader input
decodesWithAs :: DecodeOptions -> BL.ByteString -> [[B.ByteString]] -> Assertion
decodesWithAs opts input expected =
assertResult input expected $ decodeWith opts NoHeader input
assertResult :: BL.ByteString -> [[B.ByteString]]
-> Either String (V.Vector (V.Vector B.ByteString)) -> Assertion
assertResult input expected res = case res of
Right r -> V.fromList (map V.fromList expected) @=? r
Left err -> assertFailure $
" input: " ++ show (BL8.unpack input) ++ "\n" ++
"parse error: " ++ err
encodesAs :: [[B.ByteString]] -> BL.ByteString -> Assertion
encodesAs input expected =
encode (map V.fromList input) @?= expected
encodesWithAs :: EncodeOptions -> [[B.ByteString]] -> BL.ByteString -> Assertion
encodesWithAs opts input expected =
encodeWith opts (map V.fromList input) @?= expected
namedEncodesAs :: [B.ByteString] -> [[(B.ByteString, B.ByteString)]]
-> BL.ByteString -> Assertion
namedEncodesAs hdr input expected =
encodeByName (V.fromList hdr) (map HM.fromList input) @?= expected
namedDecodesAs :: BL.ByteString -> [B.ByteString]
-> [[(B.ByteString, B.ByteString)]] -> Assertion
namedDecodesAs input ehdr expected = case decodeByName input of
Right r -> (V.fromList ehdr, expected') @=? r
Left err -> assertFailure $
" input: " ++ show (BL8.unpack input) ++ "\n" ++
"parse error: " ++ err
where
expected' = V.fromList $ map HM.fromList expected
recordsToList :: S.Records a -> Either String [a]
recordsToList (S.Nil (Just err) _) = Left err
recordsToList (S.Nil Nothing _) = Right []
recordsToList (S.Cons (Left err) _) = Left err
recordsToList (S.Cons (Right x) rs) = case recordsToList rs of
l@(Left _) -> l
(Right xs) -> Right (x : xs)
decodesStreamingAs :: BL.ByteString -> [[B.ByteString]] -> Assertion
decodesStreamingAs input expected =
assertResult input expected $ fmap (V.fromList . map V.fromList) $
recordsToList $ S.decode NoHeader input
decodesWithStreamingAs :: DecodeOptions -> BL.ByteString -> [[B.ByteString]]
-> Assertion
decodesWithStreamingAs opts input expected =
assertResult input expected $ fmap (V.fromList . map V.fromList) $
recordsToList $ S.decodeWith opts NoHeader input
namedDecodesStreamingAs :: BL.ByteString -> [B.ByteString]
-> [[(B.ByteString, B.ByteString)]] -> Assertion
namedDecodesStreamingAs input ehdr expected = case S.decodeByName input of
Right (hdr, rs) -> case recordsToList rs of
Right xs -> (V.fromList ehdr, expected') @=? (hdr, xs)
Left err -> assertFailure $
" input: " ++ show (BL8.unpack input) ++ "\n" ++
"conversion error: " ++ err
Left err -> assertFailure $
" input: " ++ show (BL8.unpack input) ++ "\n" ++
"parse error: " ++ err
where
expected' = map HM.fromList expected
positionalTests :: [TF.Test]
positionalTests =
[ testGroup "encode" $ map encodeTest
[ ("simple", [["abc"]], "abc\r\n")
, ("quoted", [["\"abc\""]], "\"\"\"abc\"\"\"\r\n")
, ("quote", [["a\"b"]], "\"a\"\"b\"\r\n")
, ("quotedQuote", [["\"a\"b\""]], "\"\"\"a\"\"b\"\"\"\r\n")
, ("leadingSpace", [[" abc"]], "\" abc\"\r\n")
, ("comma", [["abc,def"]], "\"abc,def\"\r\n")
, ("twoFields", [["abc","def"]], "abc,def\r\n")
, ("twoRecords", [["abc"], ["def"]], "abc\r\ndef\r\n")
, ("newline", [["abc\ndef"]], "\"abc\ndef\"\r\n")
]
, testGroup "encodeWith"
[ testCase "tab-delim" $ encodesWithAs (defEnc { encDelimiter = 9 })
[["1", "2"]] "1\t2\r\n"
]
, testGroup "decode" $ map decodeTest decodeTests
, testGroup "decodeWith" $ map decodeWithTest decodeWithTests
, testGroup "streaming"
[ testGroup "decode" $ map streamingDecodeTest decodeTests
, testGroup "decodeWith" $ map streamingDecodeWithTest decodeWithTests
]
]
where
rfc4180Input = BL8.pack $
"#field1,field2,field3\n" ++
"\"aaa\",\"bb\n" ++
"b\",\"ccc\"\n" ++
"\"a,a\",\"b\"\"bb\",\"ccc\"\n" ++
"zzz,yyy,xxx\n"
rfc4180Output = [["#field1", "field2", "field3"],
["aaa", "bb\nb", "ccc"],
["a,a", "b\"bb", "ccc"],
["zzz", "yyy", "xxx"]]
decodeTests =
[ ("simple", "a,b,c\n", [["a", "b", "c"]])
, ("crlf", "a,b\r\nc,d\r\n", [["a", "b"], ["c", "d"]])
, ("noEol", "a,b,c", [["a", "b", "c"]])
, ("blankLine", "a,b,c\n\nd,e,f\n\n",
[["a", "b", "c"], ["d", "e", "f"]])
, ("leadingSpace", " a, b, c\n", [[" a", " b", " c"]])
, ("rfc4180", rfc4180Input, rfc4180Output)
]
decodeWithTests =
[ ("tab-delim", defDec { decDelimiter = 9 }, "1\t2", [["1", "2"]])
]
encodeTest (name, input, expected) =
testCase name $ input `encodesAs` expected
decodeTest (name, input, expected) =
testCase name $ input `decodesAs` expected
decodeWithTest (name, opts, input, expected) =
testCase name $ decodesWithAs opts input expected
streamingDecodeTest (name, input, expected) =
testCase name $ input `decodesStreamingAs` expected
streamingDecodeWithTest (name, opts, input, expected) =
testCase name $ decodesWithStreamingAs opts input expected
defEnc = defaultEncodeOptions
defDec = defaultDecodeOptions
nameBasedTests :: [TF.Test]
nameBasedTests =
[ testGroup "encode" $ map encodeTest
[ ("simple", ["field"], [[("field", "abc")]], "field\r\nabc\r\n")
, ("twoFields", ["field1", "field2"],
[[("field1", "abc"), ("field2", "def")]],
"field1,field2\r\nabc,def\r\n")
, ("twoRecords", ["field"], [[("field", "abc")], [("field", "def")]],
"field\r\nabc\r\ndef\r\n")
]
, testGroup "decode" $ map decodeTest decodeTests
, testGroup "streaming"
[ testGroup "decode" $ map streamingDecodeTest decodeTests
]
]
where
decodeTests =
[ ("simple", "field\r\nabc\r\n", ["field"], [[("field", "abc")]])
, ("twoFields", "field1,field2\r\nabc,def\r\n", ["field1", "field2"],
[[("field1", "abc"), ("field2", "def")]])
, ("twoRecords", "field\r\nabc\r\ndef\r\n", ["field"],
[[("field", "abc")], [("field", "def")]])
]
encodeTest (name, hdr, input, expected) =
testCase name $ namedEncodesAs hdr input expected
decodeTest (name, input, hdr, expected) =
testCase name $ namedDecodesAs input hdr expected
streamingDecodeTest (name, input, hdr, expected) =
testCase name $ namedDecodesStreamingAs input hdr expected
------------------------------------------------------------------------
-- Conversion tests
instance Arbitrary B.ByteString where
arbitrary = B.pack `fmap` arbitrary
instance Arbitrary BL.ByteString where
arbitrary = BL.fromChunks `fmap` arbitrary
instance Arbitrary T.Text where
arbitrary = T.pack `fmap` arbitrary
instance Arbitrary LT.Text where
arbitrary = LT.fromChunks `fmap` arbitrary
-- A single column with an empty string is indistinguishable from an
-- empty line (which we will ignore.) We therefore encode at least two
-- columns.
roundTrip :: (Eq a, FromField a, ToField a) => a -> Bool
roundTrip x = Right (V.fromList record) == decode NoHeader (encode record)
where record = [(x, dummy)]
dummy = 'a'
roundTripUnicode :: T.Text -> Assertion
roundTripUnicode x = Right (V.fromList record) @=?
decode NoHeader (encode record)
where record = [(x, dummy)]
dummy = 'a'
boundary :: forall a. (Bounded a, Eq a, FromField a, ToField a) => a -> Bool
boundary _dummy = roundTrip (minBound :: a) && roundTrip (maxBound :: a)
conversionTests :: [TF.Test]
conversionTests =
[ testGroup "roundTrip"
[ testProperty "Char" (roundTrip :: Char -> Bool)
, testProperty "ByteString" (roundTrip :: B.ByteString -> Bool)
, testProperty "Int" (roundTrip :: Int -> Bool)
, testProperty "Integer" (roundTrip :: Integer -> Bool)
, testProperty "Int8" (roundTrip :: Int8 -> Bool)
, testProperty "Int16" (roundTrip :: Int16 -> Bool)
, testProperty "Int32" (roundTrip :: Int32 -> Bool)
, testProperty "Int64" (roundTrip :: Int64 -> Bool)
, testProperty "Word" (roundTrip :: Word -> Bool)
, testProperty "Word8" (roundTrip :: Word8 -> Bool)
, testProperty "Word16" (roundTrip :: Word16 -> Bool)
, testProperty "Word32" (roundTrip :: Word32 -> Bool)
, testProperty "Word64" (roundTrip :: Word64 -> Bool)
, testProperty "lazy ByteString"
(roundTrip :: BL.ByteString -> Bool)
, testProperty "Text" (roundTrip :: T.Text -> Bool)
, testProperty "lazy Text" (roundTrip :: LT.Text -> Bool)
]
, testGroup "boundary"
[ testProperty "Int" (boundary (undefined :: Int))
, testProperty "Int8" (boundary (undefined :: Int8))
, testProperty "Int16" (boundary (undefined :: Int16))
, testProperty "Int32" (boundary (undefined :: Int32))
, testProperty "Int64" (boundary (undefined :: Int64))
, testProperty "Word" (boundary (undefined :: Word))
, testProperty "Word8" (boundary (undefined :: Word8))
, testProperty "Word16" (boundary (undefined :: Word16))
, testProperty "Word32" (boundary (undefined :: Word32))
, testProperty "Word64" (boundary (undefined :: Word64))
]
, testGroup "Unicode"
[ testCase "Chinese" (roundTripUnicode "我能吞下玻璃而不伤身体。")
, testCase "Icelandic" (roundTripUnicode
"Sævör grét áðan því úlpan var ónýt.")
, testCase "Turkish" (roundTripUnicode
"Cam yiyebilirim, bana zararı dokunmaz.")
]
]
------------------------------------------------------------------------
-- Test harness
allTests :: [TF.Test]
allTests = [ testGroup "positional" positionalTests
, testGroup "named" nameBasedTests
, testGroup "conversion" conversionTests
]
main :: IO ()
main = defaultMain allTests
|
mikeizbicki/cassava
|
tests/UnitTests.hs
|
bsd-3-clause
| 11,371
| 0
| 18
| 2,786
| 3,351
| 1,880
| 1,471
| 216
| 3
|
{-# LANGUAGE
DeriveGeneric
#-}
module Vulgr.XML.Types where
import qualified Data.Text as T
import Data.Time
data NvdEntry = NvdEntry
{ entryVulnConf :: VulnConf
, entryVulnSoftList :: VulnSoftList
, entryCveId :: CveId
, entryPublishedTime :: LocalTime
, entryLastModifiedTime :: LocalTime
, entryCvss :: Cvss
, entryCwe :: Cwe
, entryReferences :: References
, entrySummary :: T.Text
} deriving (Eq, Show)
-- Represents the vulnerable-configuration tag.
data VulnConf = VulnConf
{ vulnConfId :: T.Text
, vulnConfLogicalTest :: LogicalTest
} deriving (Eq, Show)
data LogicalTest = LogicalTest
{ logicalTestOp :: T.Text
, logicalTestNegate :: Bool
, logicalTestFactRefs :: [FactRef]
} deriving (Eq, Show)
data FactRef = FactRef
{ factRefName :: T.Text
} deriving (Eq, Show)
data VulnSoftList = VulnSoftList
{ vulnSoftList :: [Product]
} deriving (Eq, Show)
data Product = Product
{ product :: T.Text
} deriving (Eq, Show)
data CveId = CveId
{ cveId :: T.Text
} deriving (Eq, Show)
data Cvss = Cvss
{ cvssBaseMetrics :: CvssBaseMetrics
} deriving (Eq, Show)
-- TODO : Investigate these, can likely type them
data CvssBaseMetrics = CvssBaseMetrics
{ cvssBmScore :: T.Text
, cvssBmAccessVector :: T.Text
, cvssBmAccessComplexity :: T.Text
, cvssBmConfidentiality :: T.Text
, cvssBmIntegImpact :: T.Text
, cvssBmAvailImpact :: T.Text
, cvssBmSource :: T.Text
, cvssBmGeneratedAt :: T.Text
} deriving (Eq, Show)
data Cwe = Cwe
{ cweId :: T.Text
} deriving (Eq, Show)
data References = References
{ refsLang :: T.Text
, refsRefType :: T.Text
, refsSource :: T.Text
, refsRefs :: [Reference]
} deriving (Eq, Show)
data Reference = Reference
{ refHref :: T.Text
, refLang :: T.Text
} deriving (Eq, Show)
|
wayofthepie/vulgr
|
vulgr-lib/src/Vulgr/XML/Types.hs
|
bsd-3-clause
| 1,989
| 0
| 9
| 553
| 516
| 311
| 205
| 63
| 0
|
{-# LANGUAGE DeriveDataTypeable, TemplateHaskell, TypeFamilies #-}
{-# LANGUAGE NamedFieldPuns #-}
module Dice (
DiceRoll(..),
DiceRoller(..),
DiceSpec(..),
Dicetabase(..),
resultText,
withState
) where
import Control.Applicative ((<$), (<$>))
import Control.Category ((.))
import Control.Monad (guard, replicateM)
import Control.Monad.Reader (asks)
import Control.Monad.State (modify)
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
import qualified Data.Sequence as Q
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time (UTCTime, getCurrentTime)
import Data.Typeable (Typeable)
import Prelude hiding (id, (.))
import Data.Acid (Query, Update,
closeAcidState, makeAcidic, openLocalState, query, runQuery, update)
import Data.Digest.Pure.SHA (sha256, bytestringDigest)
import Data.Lens.Common (Lens, setL, modL, iso, lens, mapLens)
import Data.SafeCopy (base, deriveSafeCopy)
import System.Random (randomRIO)
data DiceSpec = MkSpec {
specTimes :: Integer,
specDice :: Integer,
specFaces :: Integer,
specMod :: Integer }
deriving (Show, Typeable)
type RollResult = [Integer]
data DiceRoll = MkRoll {
rollReason :: T.Text,
rollTime :: UTCTime,
rollSpec :: DiceSpec,
rollResult :: RollResult }
deriving (Show, Typeable)
data DiceRoller = MkRoller {
rollerName :: T.Text,
rollerPW :: L.ByteString, -- hashed
rollerDate :: UTCTime,
rollerRolls :: Q.Seq DiceRoll }
deriving (Show, Typeable)
newtype DiceState = MkDS { unDS :: M.Map T.Text DiceRoller }
deriving (Show, Typeable)
data Dicetabase = DB {
fetchRoller :: T.Text -> IO (Maybe DiceRoller),
registerRoller :: T.Text -> T.Text -> IO Bool,
rollFor :: T.Text -> T.Text -> T.Text -> DiceSpec -> IO (Maybe RollResult) }
rollerL :: T.Text -> Lens DiceState (Maybe DiceRoller)
rollerL name = mapLens name . iso unDS MkDS
rollerRollsL :: Lens DiceRoller (Q.Seq DiceRoll)
rollerRollsL = lens rollerRolls (\rs r -> r { rollerRolls = rs })
-- This needn't be in IO but in practice that's where it will be used.
rollDice :: DiceSpec -> IO RollResult
rollDice MkSpec{ specTimes, specDice, specFaces, specMod } =
replicateM (fromInteger specTimes) roll
where
roll = (specMod +) . sum <$>
replicateM (fromInteger specDice) (randomRIO (1, specFaces))
-- ought to build the Text directly without using pack, but whatever.
resultText :: RollResult -> T.Text
resultText = T.unwords . map (T.pack . show)
fetchRollerQ :: T.Text -> Query DiceState (Maybe DiceRoller)
fetchRollerQ name = asks (M.lookup name . unDS)
-- | Return False if roller already exists
registerRollerU :: T.Text -> L.ByteString -> UTCTime -> Update DiceState Bool
registerRollerU name pwhash date =
maybe (True <$ reg) (const (return False)) =<< runQuery (fetchRollerQ name)
where
reg = modify (setL (rollerL name) (Just newRoller))
newRoller = MkRoller {
rollerName = name,
rollerPW = pwhash,
rollerDate = date,
rollerRolls = Q.empty }
-- | Return False if authentication failed (including if roller doesn't exist)
rollForU :: T.Text -> L.ByteString -> DiceRoll -> Update DiceState Bool
rollForU name pwhash roll = runQuery (fetchRollerQ name) >>= update
where
update Nothing = return False
update (Just r)
| pwhash /= rollerPW r = return False
| otherwise = True <$ modify (setL (rollerL name)
(Just (modL rollerRollsL (roll Q.<|) r)))
enhashen :: T.Text -> L.ByteString
enhashen = bytestringDigest . sha256 . L.fromChunks . (:[]) . T.encodeUtf8
-- TH puts requirements on the order we declare things, this makes me sad :(
fmap concat . mapM (deriveSafeCopy 1 'base) $
[''DiceSpec, ''DiceRoll, ''DiceRoller, ''DiceState]
makeAcidic ''DiceState
['fetchRollerQ, 'registerRollerU, 'rollForU]
withState :: (Dicetabase -> IO a) -> IO a
withState fn = do
h <- openLocalState (MkDS M.empty)
res <- fn $ DB {
fetchRoller = query h . FetchRollerQ,
registerRoller = \name pw -> do
now <- getCurrentTime
update h (RegisterRollerU name (enhashen pw) now),
rollFor = \name pw reason spec -> do
now <- getCurrentTime
res <- rollDice spec
authed <- update h $ RollForU name (enhashen pw) MkRoll{
rollReason = reason,
rollTime = now,
rollSpec = spec,
rollResult = res }
return $ res <$ guard authed }
closeAcidState h
return res
|
bmillwood/dice
|
src/Dice.hs
|
bsd-3-clause
| 4,392
| 0
| 18
| 825
| 1,441
| 802
| 639
| 107
| 2
|
{-# LANGUAGE MagicHash #-}
module Data.Array.Repa.Repr.Delayed
( D, Array(..)
, fromFunction, toFunction
, delay)
where
import Data.Array.Repa.Eval.Load
import Data.Array.Repa.Eval.Target
import Data.Array.Repa.Eval.Chunked
import Data.Array.Repa.Eval.Cursored
import Data.Array.Repa.Eval.Elt
import Data.Array.Repa.Index
import Data.Array.Repa.Shape
import Data.Array.Repa.Base
import Debug.Trace
import GHC.Exts
-- | Delayed arrays are represented as functions from the index to element value.
--
-- Every time you index into a delayed array the element at that position
-- is recomputed.
data D
-- | Compute elements of a delayed array.
instance Source D a where
data Array D sh a
= ADelayed
!sh
(sh -> a)
index (ADelayed _ f) ix = f ix
{-# INLINE index #-}
linearIndex (ADelayed sh f) ix = f (fromIndex sh ix)
{-# INLINE linearIndex #-}
extent (ADelayed sh _)
= sh
{-# INLINE extent #-}
deepSeqArray (ADelayed sh f) y
= sh `deepSeq` f `seq` y
{-# INLINE deepSeqArray #-}
-- Load -----------------------------------------------------------------------
-- | Compute all elements in an array.
instance Shape sh => Load D sh e where
loadP (ADelayed sh getElem) mvec
= mvec `deepSeqMVec`
do traceEventIO "Repa.loadP[Delayed]: start"
fillChunkedP (size sh) (unsafeWriteMVec mvec) (getElem . fromIndex sh)
touchMVec mvec
traceEventIO "Repa.loadP[Delayed]: end"
{-# INLINE [4] loadP #-}
loadS (ADelayed sh getElem) mvec
= mvec `deepSeqMVec`
do traceEventIO "Repa.loadS[Delayed]: start"
fillLinearS (size sh) (unsafeWriteMVec mvec) (getElem . fromIndex sh)
touchMVec mvec
traceEventIO "Repa.loadS[Delayed]: end"
{-# INLINE [4] loadS #-}
-- | Compute a range of elements in a rank-2 array.
instance Elt e => LoadRange D DIM2 e where
loadRangeP (ADelayed (Z :. _h :. (I# w)) getElem) mvec
(Z :. (I# y0) :. (I# x0)) (Z :. (I# h0) :. (I# w0))
= mvec `deepSeqMVec`
do traceEventIO "Repa.loadRangeP[Delayed]: start"
fillBlock2P (unsafeWriteMVec mvec)
getElem
w x0 y0 w0 h0
touchMVec mvec
traceEventIO "Repa.loadRangeP[Delayed]: end"
{-# INLINE [1] loadRangeP #-}
loadRangeS (ADelayed (Z :. _h :. (I# w)) getElem) mvec
(Z :. (I# y0) :. (I# x0)) (Z :. (I# h0) :. (I# w0))
= mvec `deepSeqMVec`
do traceEventIO "Repa.loadRangeS[Delayed]: start"
fillBlock2S (unsafeWriteMVec mvec)
getElem
w x0 y0 w0 h0
touchMVec mvec
traceEventIO "Repa.loadRangeS[Delayed]: end"
{-# INLINE [1] loadRangeS #-}
-- Conversions ----------------------------------------------------------------
-- | O(1). Wrap a function as a delayed array.
fromFunction :: sh -> (sh -> a) -> Array D sh a
fromFunction sh f
= ADelayed sh f
{-# INLINE fromFunction #-}
-- | O(1). Produce the extent of an array, and a function to retrieve an
-- arbitrary element.
toFunction
:: (Shape sh, Source r1 a)
=> Array r1 sh a -> (sh, sh -> a)
toFunction arr
= case delay arr of
ADelayed sh f -> (sh, f)
{-# INLINE toFunction #-}
-- | O(1). Delay an array.
-- This wraps the internal representation to be a function from
-- indices to elements, so consumers don't need to worry about
-- what the previous representation was.
--
delay :: Shape sh => Source r e
=> Array r sh e -> Array D sh e
delay arr = ADelayed (extent arr) (unsafeIndex arr)
{-# INLINE delay #-}
|
kairne/repa-lts
|
Data/Array/Repa/Repr/Delayed.hs
|
bsd-3-clause
| 3,629
| 0
| 13
| 926
| 899
| 476
| 423
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : csvzip
-- Copyright : (c) Keith Sheppard 2009-2010
-- License : BSD3
-- Maintainer : keithshep@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- Joins CSV files by pasting the columns together. Analogous to cbind for
-- those familliar with the R programming language. This utility streams
-- data so it can work on very large files. If the table row lengths don't
-- match then the shorter tables will be padded with empty cells.
--
-----------------------------------------------------------------------------
import System.Environment (getArgs, getProgName)
import Database.TxtSushi.FlatFile (csvFormat, formatTable, parseTable)
import Database.TxtSushi.IOUtil (getContentsFromFileOrStdin, versionStr)
main :: IO ()
main = do
fileNames <- getArgs
case fileNames of
-- parse all CSV files giving us a list of tables, then zip and print them
(_ : _ : _) -> do
tables <- mapM getAndParseTable fileNames
putStr $ formatTable csvFormat (zipAllColumns tables)
_ -> printUsage
-- | read the contents of the given files name and parse it as a CSV file
getAndParseTable :: String -> IO [[String]]
getAndParseTable = fmap (parseTable csvFormat) . getContentsFromFileOrStdin
-- | zips together the columns of a non-empty list of tables
zipAllColumns :: [[[String]]] -> [[String]]
zipAllColumns = foldl1 (zipCols [] [])
where
-- if row counts don't match we pad the table that fell short with empty cells
zipCols _ _ (x:xt) (y:yt) = (x ++ y) : zipCols x y xt yt
zipCols _ _ [] [] = []
zipCols _ prevY xs [] = zipWith (++) xs (padCols prevY)
zipCols prevX _ [] ys = zipWith (++) (padCols prevX) ys
padCols lastRow = repeat (replicate (length lastRow) "")
printUsage :: IO ()
printUsage = do
progName <- getProgName
putStrLn $ progName ++ " (" ++ versionStr ++ ")"
putStrLn $ "Usage: " ++ progName ++ " csvfile_or_dash csvfile_or_dash ..."
|
keithshep/txt-sushi
|
csvzip.hs
|
bsd-3-clause
| 2,161
| 0
| 15
| 518
| 439
| 237
| 202
| 25
| 4
|
{-# LANGUAGE GADTs, TypeFamilies, TypeOperators, EmptyDataDecls, FlexibleInstances, MultiParamTypeClasses, RankNTypes, QuasiQuotes, TemplateHaskell, ViewPatterns #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
-------------------------------------------------------------------------
-- lambda lifting for the lambda calculus with top-level declarations
-------------------------------------------------------------------------
module LambdaLifting2 where
import Ctx
import HobbitLibTH
import Data.List
import Control.Monad.Reader
import Control.Monad.Cont (Cont, ContT(..), runCont, cont)
import Control.Monad.Identity
------------------------------------------------------------
-- source terms
------------------------------------------------------------
-- Term datatype
data Term :: * -> * where
Var :: Name (L a) -> Term a
Lam :: Binding (L b) (Term a) -> Term (b -> a)
App :: Term (b -> a) -> Term b -> Term a
instance Show (Term a) where show = tpretty
-- helper functions to build terms without explicitly using nu or Var
lam :: (Term a -> Term b) -> Term (a -> b)
lam f = Lam $ nu (f . Var)
-- pretty print terms
tpretty :: Term a -> String
tpretty t = pretty' (emptyMb t) emptyMC 0
where pretty' :: Mb ctx (Term a) -> MapCtx StringF ctx -> Int -> String
pretty' [nuQQ| Var b |] varnames n =
case mbNameBoundP b of
Left pf -> unStringF (ctxLookup pf varnames)
Right n -> "(free-var " ++ show n ++ ")"
pretty' [nuQQ| Lam b |] varnames n =
let x = "x" ++ show n in
"(\\" ++ x ++ "." ++ pretty' (combineMb b) (varnames :> (StringF x)) (n+1) ++ ")"
pretty' [nuQQ| App b1 b2 |] varnames n =
"(" ++ pretty' b1 varnames n ++ " " ++ pretty' b2 varnames n ++ ")"
------------------------------------------------------------
-- target terms
------------------------------------------------------------
-- dummy datatypes for distinguishing Decl names from Lam names
data L a
data D a
data IsLType a where IsLType :: IsLType (L a)
type LCtx ctx = MapCtx IsLType ctx
data MbLName ctx a where
MbLName :: Mb ctx (Name (L a)) -> MbLName ctx (L a)
-- terms with top-level names
data DTerm :: * -> * where
TVar :: Name (L a) -> DTerm a
TDVar :: Name (D a) -> DTerm a
TApp :: DTerm (a -> b) -> DTerm a -> DTerm b
instance Show (DTerm a) where show = pretty
-- we use this type for a definiens instead of putting lambdas on the front
data Decl :: * -> * where
DeclOne :: Binding (L a) (DTerm b) -> Decl (a -> b)
DeclCons :: Binding (L a) (Decl b) -> Decl (a -> b)
-- top-level declarations with a return value
data Decls :: * -> * where
DeclsBase :: DTerm a -> Decls a
DeclsCons :: Decl b -> Binding (D b) (Decls a) -> Decls a
instance Show (Decls a) where show = decls_pretty
------------------------------------------------------------
-- pretty printing
------------------------------------------------------------
-- to make a function for MapCtx (for pretty)
newtype StringF x = StringF String
unStringF (StringF str) = str
-- pretty print terms
pretty :: DTerm a -> String
pretty t = mpretty (emptyMb t) emptyMC
mpretty :: Mb ctx (DTerm a) -> MapCtx StringF ctx -> String
mpretty [nuQQ| TVar b |] varnames =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| TDVar b |] varnames =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| TApp b1 b2 |] varnames =
"(" ++ mpretty b1 varnames
++ " " ++ mpretty b2 varnames ++ ")"
mprettyName (Left pf) varnames = unStringF (ctxLookup pf varnames)
mprettyName (Right n) varnames = "(free-var " ++ (show n) ++ ")"
-- pretty print decls
decls_pretty :: Decls a -> String
decls_pretty decls =
"let\n" ++ (mdecls_pretty (emptyMb decls) emptyMC 0)
mdecls_pretty :: Mb ctx (Decls a) -> MapCtx StringF ctx -> Int -> String
mdecls_pretty [nuQQ| DeclsBase t |] varnames n =
"in " ++ (mpretty t varnames)
mdecls_pretty [nuQQ| DeclsCons decl rest |] varnames n =
let fname = "F" ++ show n in
fname ++ " " ++ (mdecl_pretty decl varnames 0) ++ "\n"
++ mdecls_pretty (combineMb rest) (varnames :> (StringF fname)) (n+1)
mdecl_pretty :: Mb ctx (Decl a) -> MapCtx StringF ctx -> Int -> String
mdecl_pretty [nuQQ| DeclOne t|] varnames n =
let vname = "x" ++ show n in
vname ++ " = " ++ mpretty (combineMb t) (varnames :> StringF vname)
mdecl_pretty [nuQQ| DeclCons d|] varnames n =
let vname = "x" ++ show n in
vname ++ " " ++ mdecl_pretty (combineMb d) (varnames :> StringF vname) (n+1)
------------------------------------------------------------
-- "peeling" lambdas off of a term
------------------------------------------------------------
type family AddArrows ctx b
type instance AddArrows CtxNil b = b
type instance AddArrows (CtxCons ctx (L a)) b = AddArrows ctx (a -> b)
data PeelRet ctx a where
PeelRet :: lctx ~ CtxCons lctx0 b => LCtx lctx -> Mb (ctx :++: lctx) (Term a) ->
PeelRet ctx (AddArrows lctx a)
peelLambdas :: Mb ctx (Binding (L b) (Term a)) ->
PeelRet ctx (b -> a)
peelLambdas b =
peelLambdasH EmptyMC IsLType (combineMb b)
peelLambdasH :: lctx ~ CtxCons lctx0 b =>
LCtx lctx0 -> IsLType b -> Mb (ctx :++: lctx) (Term a) ->
PeelRet ctx (AddArrows lctx a)
peelLambdasH lctx0 isl [nuQQ| Lam b |] =
peelLambdasH (lctx0 :> isl) IsLType (combineMb b)
peelLambdasH lctx0 ilt t = PeelRet (lctx0 :> ilt) t
boundParams :: lctx ~ CtxCons lctx0 b =>
LCtx lctx ->
(MapCtx Name lctx -> DTerm a) ->
Decl (AddArrows lctx a)
boundParams (lctx0 :> IsLType) k = -- flagged as non-exhaustive, but is because of type
freeParams lctx0 (\ns -> DeclOne $ nu $ \n -> k (ns :> n))
freeParams :: LCtx lctx ->
(MapCtx Name lctx -> Decl a) ->
Decl (AddArrows lctx a)
freeParams EmptyMC k = k emptyMC
freeParams (lctx :> IsLType) k =
freeParams lctx (\names -> DeclCons $ nu $ \x -> k (names :> x))
------------------------------------------------------------
-- sub-contexts
------------------------------------------------------------
-- FIXME: use this type in place of functions
type SubCtx ctx' ctx = MapCtx Name ctx -> MapCtx Name ctx'
------------------------------------------------------------
-- operations on contexts of free variables
------------------------------------------------------------
type FVList ctx fvs = MapCtx (MbLName ctx) fvs
-- unioning free variable contexts: the data structure
data FVUnionRet ctx fvs1 fvs2 where
FVUnionRet :: FVList ctx fvs -> SubCtx fvs1 fvs -> SubCtx fvs2 fvs ->
FVUnionRet ctx fvs1 fvs2
fvUnion :: FVList ctx fvs1 -> FVList ctx fvs2 ->
FVUnionRet ctx fvs1 fvs2
fvUnion EmptyMC EmptyMC =
FVUnionRet EmptyMC (\_ -> EmptyMC) (\_ -> EmptyMC)
fvUnion EmptyMC (fvs2 :> fv2) =
case fvUnion EmptyMC fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv2 fvs of
Nothing -> FVUnionRet (fvs :> fv2) (\(xs :> x) -> f1 xs) (\(xs :> x) -> f2 xs :> x)
Just idx -> FVUnionRet fvs f1 (\xs -> f2 xs :> ctxLookup idx xs)
fvUnion (fvs1 :> fv1) fvs2 =
case fvUnion fvs1 fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv1 fvs of
Nothing -> FVUnionRet (fvs :> fv1) (\(xs :> x) -> f1 xs :> x) (\(xs :> x) -> f2 xs)
Just idx -> FVUnionRet fvs (\xs -> f1 xs :> ctxLookup idx xs) f2
elemMC :: MbLName ctx a -> FVList ctx fvs -> Maybe (InCtx fvs a)
elemMC _ EmptyMC = Nothing
elemMC mbLN@(MbLName n) (mc :> MbLName n') =
case mbCmpName n n' of
Just Refl -> Just InCtxBase
Nothing -> fmap InCtxStep (elemMC mbLN mc)
------------------------------------------------------------
-- deBruijn terms, i.e., closed terms
------------------------------------------------------------
fvsToLCtx :: FVList ctx lctx -> LCtx lctx
fvsToLCtx = ctxMap mbLNameToProof where
mbLNameToProof :: MbLName ctx a -> IsLType a
mbLNameToProof (MbLName _) = IsLType
data STerm ctx a where
SWeaken :: SubCtx ctx1 ctx -> STerm ctx1 a -> STerm ctx a
SVar :: InCtx ctx (L a) -> STerm ctx a
SDVar :: Name (D a) -> STerm ctx a
SApp :: STerm ctx (a -> b) -> STerm ctx a -> STerm ctx b
skelSubst :: STerm ctx a -> MapCtx Name ctx -> DTerm a
skelSubst (SWeaken f db) names = skelSubst db $ f names
skelSubst (SVar inCtx) names = TVar $ ctxLookup inCtx names
skelSubst (SDVar dTVar) _ = TDVar dTVar
skelSubst (SApp db1 db2) names =
TApp (skelSubst db1 names) (skelSubst db2 names)
-- applying a STerm to a context of names
skelAppMultiNames :: STerm fvs (AddArrows fvs a) -> FVList ctx fvs ->
STerm fvs a
skelAppMultiNames db args = skelAppMultiNamesH db args (ctxToInCtxs args)
skelAppMultiNamesH :: STerm fvs (AddArrows args a) ->
FVList ctx args -> MapCtx (InCtx fvs) args ->
STerm fvs a
skelAppMultiNamesH fun EmptyMC _ = fun
skelAppMultiNamesH fun (args :> MbLName _) (inCtxs :> inCtx) = -- flagged as non-exhaustive, but is because of type
SApp (skelAppMultiNamesH fun args inCtxs) (SVar inCtx)
ctxToInCtxs :: MapCtx f ctx -> MapCtx (InCtx ctx) ctx
ctxToInCtxs EmptyMC = EmptyMC
ctxToInCtxs (ctx :> _) = ctxMap InCtxStep (ctxToInCtxs ctx) :> InCtxBase
------------------------------------------------------------
-- STerms combined with their free variables
------------------------------------------------------------
data FVSTerm ctx lctx a where
FVSTerm :: FVList ctx fvs -> STerm (fvs :++: lctx) a ->
FVSTerm ctx lctx a
fvSSepLTVars :: MapCtx f lctx -> FVSTerm (ctx :++: lctx) CtxNil a ->
FVSTerm ctx lctx a
fvSSepLTVars lctx (FVSTerm fvs db) =
case fvSSepLTVarsH lctx Tag fvs of
SepRet fvs' f -> FVSTerm fvs' (SWeaken f db)
data SepRet lctx ctx fvs where
SepRet :: FVList ctx fvs' -> SubCtx fvs (fvs' :++: lctx) ->
SepRet lctx ctx fvs
fvSSepLTVarsH :: MapCtx f lctx -> Tag ctx -> FVList (ctx :++: lctx) fvs ->
SepRet lctx ctx fvs
fvSSepLTVarsH _ _ EmptyMC = SepRet EmptyMC (\_ -> EmptyMC)
fvSSepLTVarsH lctx ctx (fvs :> fv@(MbLName n)) =
case fvSSepLTVarsH lctx ctx fvs of
SepRet m f ->
case raiseAppName (ctxAppendL ctx lctx) n of
Left idx -> SepRet m (\xs -> f xs :> ctxLookup (weakenInCtxL (ctxTag m) idx) xs)
Right n ->
SepRet (m :> MbLName n)
(\xs -> case mapCtxSplit (ctxAppendL (ctxConsTag (ctxTag m) fv) lctx) xs of
(fvs' :> fv', lctxs) -> f (ctxAppend fvs' lctxs) :> fv')
raiseAppName :: IsAppend ctx1 ctx2 ctx -> Mb ctx (Name a) ->
Either (InCtx ctx2 a) (Mb ctx1 (Name a))
raiseAppName isTApp n =
case mbToplevel $(superComb [| mbNameBoundP |]) (separateMb isTApp n) of
[nuQQ| Left inCtx |] -> Left $ mbInCtx inCtx
[nuQQ| Right n |] -> Right n
------------------------------------------------------------
-- lambda-lifting, woo hoo!
------------------------------------------------------------
type LLBodyRet b ctx a = Cont (Decls b) (FVSTerm ctx CtxNil a)
llBody :: LCtx ctx -> Mb ctx (Term a) -> LLBodyRet b ctx a
llBody ctx [nuQQ| Var v |] =
return $ FVSTerm (EmptyMC :> MbLName v) $ SVar InCtxBase
llBody ctx [nuQQ| App t1 t2 |] = do
FVSTerm fvs1 db1 <- llBody ctx t1
FVSTerm fvs2 db2 <- llBody ctx t2
FVUnionRet names sub1 sub2 <- return $ fvUnion fvs1 fvs2
return $ FVSTerm names $ SApp (SWeaken sub1 db1) (SWeaken sub2 db2)
llBody ctx [nuQQ| Lam b |] = do
PeelRet lctx body <- return $ peelLambdas b
llret <- llBody (ctxAppend ctx lctx) body
FVSTerm fvs db <- return $ fvSSepLTVars lctx llret
cont $ \k ->
DeclsCons (freeParams (fvsToLCtx fvs) $ \names1 ->
boundParams lctx $ \names2 ->
skelSubst db (ctxAppend names1 names2))
$ nu $ \d -> k $ FVSTerm fvs (skelAppMultiNames (SDVar d) fvs)
-- the top-level lambda-lifting function
lambdaLift :: Term a -> Decls a
lambdaLift t =
runCont (llBody EmptyMC (emptyMb t))
(\(FVSTerm fvs db) ->
let vs = ctxMap (\(MbLName mbn) -> elimEmptyMb mbn) fvs
in DeclsBase (skelSubst db vs))
------------------------------------------------------------
-- examples
------------------------------------------------------------
ex1 = lam (\f -> (lam $ \x -> App f x))
res1 = lambdaLift ex1
ex2 = lam (\f1 -> App f1 (lam (\f2 -> lam (\x -> App f2 x))))
res2 = lambdaLift ex2
ex3 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` x `App` y)))))
res3 = lambdaLift ex3
ex4 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` (f1 `App` x `App` y))))))
res4 = lambdaLift ex4
ex5 = lam (\f1 -> lam $ \f2 -> App f1 (lam $ \x -> App f2 x))
res5 = lambdaLift ex5
-- lambda-lift with a free variable
ex6 = nu (\f -> App (Var f) (lam $ \x -> x))
res6 = mbToplevel $(superComb [| lambdaLift |]) ex6
-- lambda-lift with a free variable as part of a lambda's environment
ex7 = nu (\f -> lam $ \y -> App y $ App (Var f) (lam $ \x -> x))
res7 = mbToplevel $(superComb [| lambdaLift |]) ex7
-- example from paper's Section 4
exP = lam $ \f -> lam $ \g -> App f $ lam $ \x -> App g $ App g x
resP = lambdaLift exP
|
eddywestbrook/hobbits
|
archival/LambdaLiftingDB2.hs
|
bsd-3-clause
| 13,283
| 0
| 22
| 3,109
| 4,683
| 2,409
| 2,274
| -1
| -1
|
module DimensionalExample where
import Prelude hiding ((/))
import Numeric.Units.Dimensional.Prelude
( (*~)
, (/)
, Quantity, Recip, DTime, Length, Time
, one, minute
)
-- | "Ideal" turnover for steps while running is 180 steps per minute.
turnover :: Quantity (Recip DTime) Double
turnover = (180 *~ one) / (1 *~ minute)
requiredStrideLength
:: Length Double
-> Time Double
-> Length Double
requiredStrideLength distance goalTime =
distance / goalTime / turnover
|
FranklinChen/twenty-four-days2015-of-hackage
|
src/DimensionalExample.hs
|
bsd-3-clause
| 510
| 0
| 7
| 116
| 135
| 80
| 55
| 15
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
module Documentation.Haddocset.Index
( SearchIndex
, ReadWrite
, ReadOnly
, EntryType(..)
, IndexEntry(..)
, withSearchIndex
, withReadWrite
, insert
, sinkEntries
) where
import Data.Text (Text)
import Distribution.Package (PackageId)
import Distribution.Text (display)
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Database.SQLite.Simple as Sql
import qualified Database.SQLite.Simple.ToField as Sql
data ReadWrite
data ReadOnly
-- | A handle to a docset search index.
--
-- This will be tagged with 'ReadWrite' or 'ReadOnly'.
newtype SearchIndex a = SearchIndex Sql.Connection
data EntryType
= PackageEntry
| ModuleEntry
| TypeEntry
| ConstructorEntry
| FunctionEntry
deriving (Show, Ord, Eq)
instance Sql.ToField EntryType where
toField PackageEntry = Sql.SQLText "Package"
toField ModuleEntry = Sql.SQLText "Module"
toField TypeEntry = Sql.SQLText "Type"
toField ConstructorEntry = Sql.SQLText "Constructor"
toField FunctionEntry = Sql.SQLText "Function"
-- | An entry in the search index.
data IndexEntry = IndexEntry
{ entryName :: !Text
, entryType :: !EntryType
, entryPath :: !String
, entryPackage :: !PackageId
} deriving (Show, Ord, Eq)
instance Sql.ToRow IndexEntry where
toRow IndexEntry{..} =
Sql.toRow (entryName, entryType, entryPath, display entryPackage)
-- | Executes the given operation on the search index at the specified
-- location.
withSearchIndex :: FilePath -> (SearchIndex ReadOnly -> IO a) -> IO a
withSearchIndex path f =
Sql.withConnection path $ \conn -> do
Sql.execute_ conn
"CREATE TABLE IF NOT EXISTS searchIndex \
\ ( id INTEGER PRIMARY KEY \
\ , name TEXT \
\ , type TEXT \
\ , path TEXT \
\ , package TEXT \
\ )"
Sql.execute_ conn
"CREATE UNIQUE INDEX IF NOT EXISTS \
\ anchor ON searchIndex (name, type, path, package)"
f (SearchIndex conn)
-- | Executes an operation on a 'ReadWrite' SearchIndex.
--
-- Opens a database transaction. If the operation fails for any reason, the
-- changes are rolled back.
withReadWrite :: SearchIndex ReadOnly -> (SearchIndex ReadWrite -> IO a) -> IO a
withReadWrite (SearchIndex conn) f =
Sql.withTransaction conn $
f (SearchIndex conn)
-- | Inserts an item into a SearchIndex.
insert :: SearchIndex ReadWrite -> IndexEntry -> IO ()
insert (SearchIndex conn) = Sql.execute conn insertStmt
insertStmt :: Sql.Query
insertStmt =
"INSERT OR IGNORE INTO searchIndex \
\ (name, type, path,package) VALUES (?, ?, ?, ?)"
-- | A sink to write index entries.
sinkEntries :: SearchIndex ReadWrite -> C.Consumer IndexEntry IO ()
sinkEntries searchIndex = CL.mapM_ (insert searchIndex)
|
philopon/haddocset
|
Documentation/Haddocset/Index.hs
|
bsd-3-clause
| 3,085
| 0
| 11
| 791
| 594
| 326
| 268
| -1
| -1
|
{-# LANGUAGE CPP #-}
module UU.Parsing.Derived
( -- * Checking
acceptsepsilon
, mnz
-- * Prelude defs
, (<..>)
, pExcept
, opt
-- * Sequential compositions
, asList
, asList1
, asOpt
, (<+>)
, (<**>)
, (<$$>)
, (<??>)
, (<?>)
, pPacked
-- * Iterating parsers
, pFoldr_ng, pFoldr_gr, pFoldr
, pFoldr1_ng, pFoldr1_gr, pFoldr1
, pFoldrSep_ng, pFoldrSep_gr, pFoldrSep
, pFoldr1Sep_ng, pFoldr1Sep_gr, pFoldr1Sep
, pList_ng, pList_gr, pList
, pList1_ng, pList1_gr, pList1
, pListSep_ng, pListSep_gr, pListSep
, pList1Sep_ng, pList1Sep_gr, pList1Sep
, pChainr_ng, pChainr_gr, pChainr
, pChainl_ng, pChainl_gr, pChainl
-- * Misc
, pAny
, pAnySym
, pToks
, pLocate
)
where
import UU.Parsing.Interface
import Control.Applicative
infixl 2 <?>
-- infixl 4 <**>
infixl 4 <??>, <+>
infixl 2 `opt`
infixl 5 <..>
-- =======================================================================================
-- ===== CHECKING ========================================================================
-- =======================================================================================
-- | Checks if the parser accepts epsilon.
acceptsepsilon :: (IsParser p s) => p v -> Bool
acceptsepsilon p = case getzerop p of {Nothing -> False; _ -> True}
mnz :: (IsParser p s) => p v -> t -> String -> t
mnz p v comb
= if( acceptsepsilon p)
then usererror ("The combinator <" ++ comb ++ "> from <Derived.hs>is called with a parser that accepts the empty string.\n"
++
"The library cannot handle the resulting left recursive formulation (which is ambiguous too).\n"
-- ++
-- (case getfirsts p of
-- ESeq [] -> "There are no other alternatives for this parser"
-- d -> "The other alternatives of this parser may start with:\n"++ show d
) --)
else v
-- =======================================================================================
-- ===== START OF PRELUDE DEFINITIONS ========== =========================================
-- =======================================================================================
-- | Parses the specified range, see also 'pRange'.
--
-- Example:
--
-- > pDig = 'a' <..> 'z'
(<..>) :: (IsParser p s) => s -> s -> p s
a <..> b = pRange a (Range a b)
pExcept :: (IsParser p s, Symbol s, Ord s, Eq (SymbolR s)) => (s, s, s) -> [s] -> p s
pExcept (l,r,err) elems = let ranges = filter (/= EmptyR) (Range l r `except` elems)
in if null ranges then pFail
else foldr (<|>) pFail (map (pRange err) ranges)
-- | Optionally recognize parser 'p'.
--
-- If 'p' can be recognized, the return value of 'p' is used. Otherwise,
-- the value 'v' is used. Note that opt is greedy, if you do not want
-- this use @... <|> pSucceed v@ instead. Furthermore, 'p' should not
-- recognise the empty string.
opt :: (IsParser p s) => p a -> a -> p a
p `opt` v = mnz p (p <|> pLow v) "opt"
-- =======================================================================================
-- ===== Special sequential compositions =========================================
-- =======================================================================================
asList :: (IsParser p s) => Expecting s -> p v -> p v
asList exp = setfirsts (ESeq [EStr "(", exp, EStr " ...)*"])
asList1 :: (IsParser p s) => Expecting s -> p v -> p v
asList1 exp = setfirsts (ESeq [EStr "(", exp, EStr " ...)+"])
asOpt :: (IsParser p s) => Expecting s -> p v -> p v
asOpt exp = setfirsts (ESeq [EStr "( ", exp, EStr " ...)?"])
-- | Parses the sequence of 'pa' and 'pb', and combines them as a tuple.
(<+>) :: (IsParser p s) => p a -> p b -> p (a, b)
pa <+> pb = (,) <$> pa <*> pb
{-
-- | Suppose we have a parser a with two alternatives that both start
-- with recognizing a non-terminal p, then we will typically rewrite:
--
-- > a = f <$> p <*> q
-- > <|> g <$> p <*> r
--
-- into:
--
-- > a = p <**> (f <$$> q <|> g <$$> r)
(<**>) :: (IsParser p s) => p a -> p (a -> b) -> p b
p <**> q = (\ x f -> f x) <$> p <*> q
-}
(<$$>) :: (IsParser p s) => (a -> b -> c) -> p b -> p (a -> c)
f <$$> p = pSucceed (flip f) <*> p
(<??>) :: (IsParser p s) => p a -> p (a -> a) -> p a
p <??> q = p <**> (q `opt` id)
(<?>) :: (IsParser p s) => p v -> String -> p v
p <?> str = setfirsts (EStr str) p
-- | This can be used to parse 'x' surrounded by 'l' and 'r'.
--
-- Example:
--
-- > pParens = pPacked pOParen pCParen
pPacked :: (IsParser p s) => p a -> p b1 -> p b -> p b
pPacked l r x = l *> x <* r
-- =======================================================================================
-- ===== Iterating ps ===============================================================
-- =======================================================================================
pFoldr_ng :: (IsParser p s) => (a -> a1 -> a1, a1) -> p a -> p a1
pFoldr_ng alg@(op,e) p = mnz p (asList (getfirsts p) pfm) "pFoldr_ng"
where pfm = (op <$> p <*> pfm) <|> pSucceed e
pFoldr_gr :: (IsParser p s) => (a -> b -> b, b) -> p a -> p b
pFoldr_gr alg@(op,e) p = mnz p (asList (getfirsts p) pfm) "pFoldr_gr"
where pfm = (op <$> p <*> pfm) `opt` e
pFoldr :: (IsParser p s) =>(a -> b -> b, b) -> p a -> p b
pFoldr alg p = pFoldr_gr alg p
pFoldr1_gr :: (IsParser p s) => (v -> b -> b, b) -> p v -> p b
pFoldr1_gr alg@(op,e) p = asList1 (getfirsts p) (op <$> p <*> pFoldr_gr alg p)
pFoldr1_ng :: (IsParser p s) => (v -> b -> b, b) -> p v -> p b
pFoldr1_ng alg@(op,e) p = asList1 (getfirsts p) (op <$> p <*> pFoldr_ng alg p)
pFoldr1 :: (IsParser p s) => (v -> b -> b, b) -> p v -> p b
pFoldr1 alg p = pFoldr1_gr alg p
pFoldrSep_gr :: (IsParser p s) => (v -> b -> b, b) -> p a -> p v -> p b
pFoldrSep_gr alg@(op,e) sep p = mnz sepp (asList (getfirsts p)((op <$> p <*> pFoldr_gr alg sepp) `opt` e )) "pFoldrSep_gr (both args)"
where sepp = sep *> p
pFoldrSep_ng :: (IsParser p s) => (v -> b -> b, b) -> p a -> p v -> p b
pFoldrSep_ng alg@(op,e) sep p = mnz sepp (asList (getfirsts p)((op <$> p <*> pFoldr_ng alg sepp) <|> pSucceed e)) "pFoldrSep_ng (both args)"
where sepp = sep *> p
pFoldrSep :: (IsParser p s) => (v -> b -> b, b) -> p a -> p v -> p b
pFoldrSep alg sep p = pFoldrSep_gr alg sep p
pFoldr1Sep_gr :: (IsParser p s) => (a -> b -> b, b) -> p a1 -> p a -> p b
pFoldr1Sep_gr alg@(op,e) sep p = if acceptsepsilon sep then mnz p pfm "pFoldr1Sep_gr (both arguments)" else pfm
where pfm = op <$> p <*> pFoldr_gr alg (sep *> p)
pFoldr1Sep_ng :: (IsParser p s) => (a -> b -> b, b) -> p a1 -> p a -> p b
pFoldr1Sep_ng alg@(op,e) sep p = if acceptsepsilon sep then mnz p pfm "pFoldr1Sep_ng (both arguments)" else pfm
where pfm = op <$> p <*> pFoldr_ng alg (sep *> p)
pFoldr1Sep :: (IsParser p s) => (a -> b -> b, b) -> p a1 -> p a -> p b
pFoldr1Sep alg sep p = pFoldr1Sep_gr alg sep p
list_alg :: (a -> [a] -> [a], [a1])
list_alg = ((:), [])
pList_gr :: (IsParser p s) => p a -> p [a]
pList_gr p = pFoldr_gr list_alg p
pList_ng :: (IsParser p s) => p a -> p [a]
pList_ng p = pFoldr_ng list_alg p
pList :: (IsParser p s) => p a -> p [a]
pList p = pList_gr p
pList1_gr :: (IsParser p s) => p a -> p [a]
pList1_gr p = pFoldr1_gr list_alg p
pList1_ng :: (IsParser p s) => p a -> p [a]
pList1_ng p = pFoldr1_ng list_alg p
pList1 :: (IsParser p s) => p a -> p [a]
pList1 p = pList1_gr p
pListSep_gr :: (IsParser p s) => p a1 -> p a -> p [a]
pListSep_gr s p = pFoldrSep_gr list_alg s p
pListSep_ng :: (IsParser p s) => p a1 -> p a -> p [a]
pListSep_ng s p = pFoldrSep_ng list_alg s p
pListSep :: (IsParser p s) => p a -> p a1 -> p [a1]
pListSep s p = pListSep_gr s p
pList1Sep_gr :: (IsParser p s) => p a1 -> p a -> p [a]
pList1Sep_gr s p = pFoldr1Sep_gr list_alg s p
pList1Sep_ng :: (IsParser p s) => p a1 -> p a -> p [a]
pList1Sep_ng s p = pFoldr1Sep_ng list_alg s p
pList1Sep :: (IsParser p s) =>p a -> p a1 -> p [a1]
pList1Sep s p = pList1Sep_gr s p
pChainr_gr :: (IsParser p s) => p (c -> c -> c) -> p c -> p c
pChainr_gr op x = if acceptsepsilon op then mnz x r "pChainr_gr (both arguments)" else r
where r = x <??> (flip <$> op <*> r)
pChainr_ng :: (IsParser p s) => p (a -> a -> a) -> p a -> p a
pChainr_ng op x = if acceptsepsilon op then mnz x r "pChainr_ng (both arguments)" else r
where r = x <**> ((flip <$> op <*> r) <|> pSucceed id)
pChainr :: (IsParser p s) => p (c -> c -> c) -> p c -> p c
pChainr op x = pChainr_gr op x
pChainl_gr :: (IsParser p s) => p (c -> c -> c) -> p c -> p c
pChainl_gr op x = if acceptsepsilon op then mnz x r "pChainl_gr (both arguments)" else r
where
r = (f <$> x <*> pList_gr (flip <$> op <*> x) )
f x [] = x
f x (func:rest) = f (func x) rest
pChainl_ng :: (IsParser p s) => p (c -> c -> c) -> p c -> p c
pChainl_ng op x = if acceptsepsilon op then mnz x r "pChainl_ng (both arguments)" else r
where
r = (f <$> x <*> pList_ng (flip <$> op <*> x) )
f x [] = x
f x (func:rest) = f (func x) rest
pChainl :: (IsParser p s) => p (c -> c -> c) -> p c -> p c
pChainl op x = pChainl_gr op x
-- | Parses using any of the parsers in the list 'l'.
--
-- Warning: 'l' may not be an empty list.
pAny :: (IsParser p s) =>(a -> p a1) -> [a] -> p a1
pAny f l = if null l then usererror "pAny: argument may not be empty list" else foldr1 (<|>) (map f l)
-- | Parses any of the symbols in 'l'.
pAnySym :: (IsParser p s) =>[s] -> p s
pAnySym l = pAny pSym l -- used to be called pAnySym
pToks :: (IsParser p s) => [s] -> p [s]
pToks [] = pSucceed []
pToks (a:as) = (:) <$> pSym a <*> pToks as
pLocate :: (IsParser p s) => [[s]] -> p [s]
pLocate list = pAny pToks list
|
guillep19/uulib
|
src/UU/Parsing/Derived.hs
|
bsd-3-clause
| 10,521
| 0
| 12
| 3,086
| 3,747
| 1,978
| 1,769
| -1
| -1
|
module Network.Anarchy.Server.Internal where
import Data.Hashable
import Network.Anarchy
import Network.Anarchy
distance :: HostPort -> HostPort -> Int
distance myHp hp = let
myHpHash = p . hash $ myHp
hpHash = p . hash $ hp
in case (hpHash >= myHpHash) of
True -> hpHash - myHpHash
False -> (maxBound - myHpHash) + (hpHash)
where p x = if x < 0 then (-x) else x
|
davidhinkes/anarchy
|
src/Network/Anarchy/Server/Internal.hs
|
bsd-3-clause
| 383
| 0
| 12
| 85
| 146
| 80
| 66
| 12
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | This module is similare to `Network.Docker.Registry` but uses HUnit
-- assertions to check the returned HTTP status codes.
module Network.Docker.Registry.Checks where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LB
import Test.HUnit
import Network.Docker.Registry
import Network.Docker.Registry.Internal
import Network.Docker.Registry.Types
import Network.Http.Checks
-- TODO There is plenty of ways to mis-construct the queries, e.g. removing
-- the content-type, having the content-length wrong, or interrupting a
-- transfer-encoding: chunked, ...
----------------------------------------------------------------------
-- Repositories
----------------------------------------------------------------------
checkPushRepository :: Int -> String -> Repository -> Test
checkPushRepository n title r = TestLabel title $ TestCase $
pushRepository r >>= expectCode n
-- TODO 200 "" or true
-- TODO 400 Test the payload for something like {"error": "malformed json"}.
-- TODO 200 This returns a token, even if not requested.
checkPushRepository' :: Int -> String -> Repository -> LB.ByteString -> Test
checkPushRepository' n title Repository{..} jsonOverride = TestLabel title $ TestCase $
putRepository repositoryCredentials repositoryHost
repositoryNamespace repositoryName jsonOverride >>= expectCode n
checkPushRepositoryTag :: Int -> String -> Repository -> Image -> ByteString -> Test
checkPushRepositoryTag n title r i tag = TestLabel title $ TestCase $
pushRepositoryTag r i tag >>= expectCode n
----------------------------------------------------------------------
-- Images
----------------------------------------------------------------------
-- | Pushing an image is done in three steps: first the JSON meta-data, the
-- layer, then the checksum.
-- Note that we can start pushing an image even if it is not listed in a
-- repository.
checkPushImageJson :: Int -> String -> Repository -> Image -> Test
checkPushImageJson n title r i = TestLabel title $ TestCase $
pushImageJson r i >>= expectCode n
-- TODO 200 "" or true
-- TODO 400 Test the payload for something like {"error": "malformed json"}.
-- TODO 409 Test the payload for something like {"error": "image already exists"}.
checkPullImageJson :: Int -> String -> Repository -> Image -> Test
checkPullImageJson n title r i = TestLabel title $ TestCase $ do
(code, _) <- pullImageJson r i
expectCode n code
checkPushImageLayer :: Int -> String -> Repository -> Image -> Test
checkPushImageLayer n title r i = TestLabel title $ TestCase $
pushImageLayer r i >>= expectCode n
checkPushImageChecksum :: Int -> String -> Repository -> Image -> Test
checkPushImageChecksum n title r i = TestLabel title $ TestCase $ do
pushImageChecksum r i >>= expectCode n
checkPushImageChecksum' :: Int -> String -> Repository -> Image -> LB.ByteString -> Test
checkPushImageChecksum' n title Repository{..} Image{..} checksumOverride =
TestLabel title $ TestCase $ do
putImageChecksum repositoryCredentials repositoryHost imageName checksumOverride
>>= expectCode n
|
noteed/rescoyl-checks
|
Network/Docker/Registry/Checks.hs
|
bsd-3-clause
| 3,175
| 0
| 10
| 478
| 603
| 315
| 288
| 38
| 1
|
{-|
Module : Numeric.AERN.Basics.PartialOrdering
Description : extension of Prelude.Ordering with non-comparable variant
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Extension of 'Prelude.Ordering' with non-comparable variant.
-}
module Numeric.AERN.Basics.PartialOrdering
(
PartialOrderingPartialInfo(..),
partialOrderingPartialInfoAllNothing,
partialOrderingPartialInfoAllFalse,
partialOrderingPartialInfoAnd,
PartialOrdering(..),
partialOrdering2PartialInfo,
partialInfo2PartialOrdering,
partialOrderingVariants,
partialOrderingVariantsSet,
permittedInReflexiveOrder,
partialOrderingVariantsReflexive,
permittedInLinearOrder,
partialOrderingVariantsLinear,
toPartialOrdering,
fromPartialOrdering,
partialOrderingTranspose,
pickConsistentOrderings,
transitivityConsequences,
partialOrderingVariantsTriples
)
where
import qualified Prelude
import Prelude hiding (EQ, LT, GT)
import Test.QuickCheck
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.List as List
{-| A summary of /partial/ knowledge about the relative position
of two elements of a partial order.
This is more flexible than @Maybe PartialOrdering@ because,
for example, sometimes one can establish that two elements are NOT leq
(and thus also not EQ and not LESS) but not whether the elements
are GT or NC. This happens, in particular, when comparing functions
and it is established that GT or NC holds in some part of the function's
domain, such as at one point of the domain.
-}
data PartialOrderingPartialInfo =
PartialOrderingPartialInfo
{
pOrdInfLT :: Maybe Bool,
pOrdInfLEQ :: Maybe Bool,
pOrdInfEQ :: Maybe Bool,
pOrdInfNC :: Maybe Bool,
pOrdInfGEQ :: Maybe Bool,
pOrdInfGT :: Maybe Bool
}
instance Show PartialOrderingPartialInfo
where
show info =
"PartialOrderingPartialInfo{"
++ "LT:" ++ showM (pOrdInfLT info)
++ ",LEQ:" ++ showM (pOrdInfLEQ info)
++ ",EQ:" ++ showM (pOrdInfEQ info)
++ ",NC:" ++ showM (pOrdInfNC info)
++ ",GEQ:" ++ showM (pOrdInfGEQ info)
++ ",GT:" ++ showM (pOrdInfGT info)
++ "}"
where
showM (Just True) = "T"
showM (Just False) = "F"
showM _ = "?"
partialOrderingPartialInfoAllFalse :: PartialOrderingPartialInfo
partialOrderingPartialInfoAllFalse =
PartialOrderingPartialInfo jf jf jf jf jf jf
partialOrderingPartialInfoAllNothing :: PartialOrderingPartialInfo
partialOrderingPartialInfoAllNothing =
PartialOrderingPartialInfo Nothing Nothing Nothing Nothing Nothing Nothing
jf = Just False
jt = Just True
partialOrderingPartialInfoAnd ::
PartialOrderingPartialInfo -> PartialOrderingPartialInfo -> PartialOrderingPartialInfo
partialOrderingPartialInfoAnd comp1 comp2 =
PartialOrderingPartialInfo (t i1 j1) (t i2 j2) (t i3 j3) (t i4 j4) (t i5 j5) (t i6 j6)
where
(PartialOrderingPartialInfo i1 i2 i3 i4 i5 i6) = comp1
(PartialOrderingPartialInfo j1 j2 j3 j4 j5 j6) = comp2
t (Just a) (Just b) = Just $ a && b
t _ _ = Nothing
{-| Like 'Prelude.Ordering' but with a non-comparable option
-}
data PartialOrdering =
EQ -- equal
| LT -- less than
| GT -- greater than
| NC -- not comparable
deriving (Eq, Ord, Show, Enum, Bounded)
partialOrdering2PartialInfo :: Maybe PartialOrdering -> PartialOrderingPartialInfo
partialOrdering2PartialInfo (Just EQ) =
partialOrderingPartialInfoAllFalse
{
pOrdInfLEQ = jt, pOrdInfEQ = jt, pOrdInfGEQ = jt
}
partialOrdering2PartialInfo (Just LT) =
partialOrderingPartialInfoAllFalse
{
pOrdInfLEQ = jt, pOrdInfLT = jt
}
partialOrdering2PartialInfo (Just GT) =
partialOrderingPartialInfoAllFalse
{
pOrdInfGEQ = jt, pOrdInfGT = jt
}
partialOrdering2PartialInfo (Just NC) =
partialOrderingPartialInfoAllFalse
{
pOrdInfNC = jt
}
partialOrdering2PartialInfo Nothing =
partialOrderingPartialInfoAllNothing
{-|
For a given record of partial information about partial ordering
of two elements, give a list of all relations that are
not ruled out by the partial information.
(We assume that the partial information record is consistent.)
-}
partialInfo2PartialOrdering ::
PartialOrderingPartialInfo ->
[PartialOrdering]
partialInfo2PartialOrdering info
| infoEQ == jt = [EQ]
| infoLT == jt = [LT]
| infoGT == jt = [GT]
| infoLEQ == jt = [LT, EQ]
| infoGEQ == jt = [GT, EQ]
| infoNC == jt = [NC]
| otherwise = mnc ++ mlt ++ mgt ++ meq
where
infoEQ = pOrdInfEQ info
infoNC = pOrdInfNC info
infoLT = pOrdInfLT info
infoLEQ = pOrdInfLEQ info
infoGT = pOrdInfGT info
infoGEQ = pOrdInfGEQ info
mnc | infoNC /= jf = [NC]
| otherwise = []
mlt | (infoLT /= jf) && (infoLEQ /= jf) = [LT]
| otherwise = []
mgt | (infoGT /= jf) && (infoGEQ /= jf) = [GT]
| otherwise = []
meq | (infoEQ /= jf) && (infoLEQ /= jf) && (infoGEQ /= jf) = [EQ]
| otherwise = []
instance Arbitrary PartialOrdering
where
arbitrary = elements partialOrderingVariants
partialOrderingVariants :: [PartialOrdering]
partialOrderingVariants = [minBound..maxBound]
partialOrderingVariantsSet :: Set.Set PartialOrdering
partialOrderingVariantsSet = Set.fromList partialOrderingVariants
permittedInReflexiveOrder :: PartialOrdering -> Bool
--permittedInReflexiveOrder GEE = False
--permittedInReflexiveOrder LEE = False
permittedInReflexiveOrder _ = True
partialOrderingVariantsReflexive = [EQ, LT, GT, NC]
permittedInLinearOrder :: PartialOrdering -> Bool
--permittedInLinearOrder GEE = False
--permittedInLinearOrder LEE = False
permittedInLinearOrder NC = False
permittedInLinearOrder _ = True
partialOrderingVariantsLinear = [EQ, LT, GT]
toPartialOrdering :: Ordering -> PartialOrdering
toPartialOrdering Prelude.EQ = EQ
toPartialOrdering Prelude.LT = LT
toPartialOrdering Prelude.GT = GT
fromPartialOrdering :: PartialOrdering -> Ordering
fromPartialOrdering EQ = Prelude.EQ
fromPartialOrdering LT = Prelude.LT
fromPartialOrdering GT = Prelude.GT
fromPartialOrdering rel = error $ "cannot convert " ++ show rel ++ " to Prelude.Ordering"
{-| flip an ordering relation -}
partialOrderingTranspose :: PartialOrdering -> PartialOrdering
partialOrderingTranspose LT = GT
--partialOrderingTranspose LEE = GEE
partialOrderingTranspose GT = LT
--partialOrderingTranspose GEE = LEE
partialOrderingTranspose a = a
{-
From ambiguous contraints on the ordering of elements of a tuple of some size
produce all consistent unambiguous orderings of all pairs in such tuples.
-}
pickConsistentOrderings ::
(Eq ix, Ord ix, Show ix) =>
(PartialOrdering -> Bool) {-^ filter of permissible orderings -} ->
[ix] {-^ indices for elements of the resulting tuples -} ->
[((ix, ix), [PartialOrdering])] {-^ constraints on the ordering of the tuples -} ->
[[((ix, ix), PartialOrdering)]]
pickConsistentOrderings orderingsFilter indices constraints =
depthFirst Map.empty [] allPairsAndTriples
where
allPairsAndTriples = -- eg [((3,4),[]),((2,4),[]),((2,3),[4]),((1,4),[]),((1,3),[4]),((1,2),[3,4])]
reverse $
[((head postfix1, head postfix2), tail postfix2) |
postfix1 <- init (List.tails indices), -- eg [[1,2,3,4],[2,3,4],[3,4],[4]]
postfix2 <- init (List.tails (tail postfix1))]
depthFirst pairLookupMap setPairs [] = [setPairs]
depthFirst pairLookupMap setPairs ((pair@(i1,i2), i3s) : remainingPairs) =
concat $ map recurse allowedOrderings
where
recurse ordering =
depthFirst updatedPairLookupMap ((pair, ordering) : setPairs) remainingPairs
where
updatedPairLookupMap =
Map.insert (i2,i1) (partialOrderingTranspose ordering) $
Map.insert (i1,i2) ordering pairLookupMap
allowedOrderings =
Set.toList $
Set.intersection (lkConstraints pair) transitivityConstraints
transitivityConstraints =
foldl Set.intersection partialOrderingVariantsSet $
map transitivityConstraint i3s
transitivityConstraint i3 =
case (Map.lookup (i1,i3) pairLookupMap, Map.lookup (i3,i2) pairLookupMap) of
(Just rel13, Just rel32) -> transitivityConsequences rel13 rel32
_ -> error $
"pickConsistentOrderings: transitivityConstraint:"
++ "\n pairLookupMap = " ++ show pairLookupMap
++ "\n i1 = " ++ show i1 ++ "; i2 = " ++ show i2 ++ "; i3 = " ++ show i3
constraintsMap =
Map.map (Set.fromList . filter orderingsFilter) $ Map.fromList $ constraints
lkConstraints pair@(i1, i2) =
case (Map.lookup pair constraintsMap, Map.lookup (i2,i1) constraintsMap) of
(Just options, Just optionsT) ->
Set.intersection options $ Set.map partialOrderingTranspose optionsT
(Just options, Nothing) ->
options
(Nothing, Just optionsT) ->
Set.map partialOrderingTranspose optionsT
(Nothing, Nothing) ->
Set.fromList $ filter orderingsFilter partialOrderingVariants
transitivityConsequences :: PartialOrdering -> PartialOrdering -> Set.Set PartialOrdering
transitivityConsequences rel12 rel23 =
case (rel12, rel23) of
(EQ, rel23) -> Set.singleton rel23
(rel12, EQ) -> Set.singleton rel12
(LT, LT) -> Set.singleton LT
-- (LEE, LT) -> Set.singleton LT
-- (LT, LEE) -> Set.singleton LT
-- (LEE, LEE) -> Set.fromList [LEE, LT]
(GT, GT) -> Set.singleton GT
-- (GEE, GT) -> Set.singleton GT
-- (GT, GEE) -> Set.singleton GT
-- (GEE, GEE) -> Set.fromList [GEE, GT]
-- (LEE, GEE) -> noneOf [GT, LT]
-- (GEE, LEE) -> noneOf [GT, LT]
(LT, NC) -> noneOf [GT, EQ]
(GT, NC) -> noneOf [LT, EQ]
(NC, LT) -> noneOf [GT, EQ]
(NC, GT) -> noneOf [LT, EQ]
-- (LT, NC) -> noneOf [GT, GEE, EQ]
-- (LEE, NC) -> noneOf [GT, GEE, EQ]
-- (GT, NC) -> noneOf [LT, LEE, EQ]
-- (GEE, NC) -> noneOf [LT, LEE, EQ]
-- (NC, LT) -> noneOf [GT, GEE, EQ]
-- (NC, LEE) -> noneOf [GT, GEE, EQ]
-- (NC, GT) -> noneOf [LT, LEE, EQ]
-- (NC, GEE) -> noneOf [LT, LEE, EQ]
_ -> partialOrderingVariantsSet
where
noneOf list = partialOrderingVariantsSet `Set.difference` (Set.fromList list)
{-|
All 29 triples of Comparison orderings @(r1, r2, r3)@ for which
there could be elements satisfying
@e1 `r1` e2 && e2 `r2` e3 && e1 `r3` e3@
(ie not breaking transitivity).
-}
partialOrderingVariantsTriples :: [(PartialOrdering, PartialOrdering, PartialOrdering)]
partialOrderingVariantsTriples =
map convertToTriple $
pickConsistentOrderings (const True) [1,2,3] []
where
convertToTriple [((1,2),rel12), ((1,3),rel13), ((2,3),rel23)] = (rel12,rel23,rel13)
{- The following is useful for some manual testing -}
--allConsistentTriples = pickConsistentOrderings (const True) [1,2,3] []
--
--allConsistentTriplesReflexiveOrder =
-- pickConsistentOrderings permittedInReflexiveOrder [1,2,3] []
--
--allConsistentTriplesLinearOrder =
-- pickConsistentOrderings permittedInLinearOrder [1,2,3] []
--test =
-- Set.difference
-- (Set.fromList partialOrderingVariantsTriples)
-- (Set.fromList partialOrderingVariantsTriples2)
-- where
-- convertToTriple assocList =
-- (rel12, rel23, rel13)
-- where
-- rel12 = fromJust $ Map.lookup (1,2) orderingMap
-- rel23 = fromJust $ Map.lookup (2,3) orderingMap
-- rel13 = fromJust $ Map.lookup (1,3) orderingMap
-- orderingMap = Map.fromList assocList
--
--{-|
-- All 29 triples of Comparison orderings @(r1, r2, r3)@ for which
-- there could be elements satisfying
-- @e1 `r1` e2 && e2 `r2` e3 && e1 `r3` e3@
-- (ie not breaking transitivity).
---}
--partialOrderingVariantsTriples2 :: [(PartialOrdering, PartialOrdering, PartialOrdering)]
--partialOrderingVariantsTriples2 =
-- [(r1,r2,r3)|
-- r1 <- partialOrderingVariantsReflexive,
-- r2 <- partialOrderingVariantsReflexive,
-- r3 <- partialOrderingVariantsReflexive,
-- respectsTransitivity (r1, r2, r3)]
--
--{-|
-- Are there any elements satisfying
-- @e1 `r1` e2 && e2 `r2` e3 && e1 `r3` e3@
-- assuming equality and order are transitive.
---}
--respectsTransitivity ::
-- (PartialOrdering, PartialOrdering, PartialOrdering) {-^ @(r1,r2,r3)@ -} ->
-- Bool
--respectsTransitivity rels =
-- case rels of
-- -- when a pair is equal:
-- (EQ,r2,r3) -> r2 == r3 -- e1 = e2
-- (r1,EQ,r3) -> r1 == r3 -- e2 = e3
-- (r1,r2,EQ) -> r1 == partialOrderingTranspose r2 -- e1 = e3
-- -- 6 permutations of strict inequalities:
-- (LT,LT,LT) -> True -- e1 < e2 < e3 (1)
-- (LT,LT,_ ) -> False -- but not e1 < e3
-- (LT,GT,LT) -> True -- e1 < e3 < e2 (2)
-- (_ ,GT,LT) -> False -- but not e1 < e2
-- (GT,LT,LT) -> True -- e2 < e1 < e3 (3)
-- (GT,_ ,LT) -> False -- but not e2 < e3
-- (GT,LT,GT) -> True -- e2 < e3 < e1 (4)
-- (_ ,LT,GT) -> False -- but not e2 < e1
-- (LT,GT,GT) -> True -- e3 < e1 < e2 (5)
-- (LT,_ ,GT) -> False -- but not e3 < e2
-- (GT,GT,GT) -> True -- e3 < e2 < e1 (6)
-- (GT,GT,_ ) -> False -- but not e3 < e1
-- --
-- _ -> True -- all else is OK
|
michalkonecny/aern
|
aern-order/src/Numeric/AERN/Basics/PartialOrdering.hs
|
bsd-3-clause
| 14,021
| 0
| 20
| 3,483
| 2,475
| 1,373
| 1,102
| 201
| 9
|
{-|
Module: Data.Astro.Planet.PlanetDetails
Description: Planet Details
Copyright: Alexander Ignatyev, 2016-2017
Planet Details.
-}
module Data.Astro.Planet.PlanetDetails
(
Planet(..)
, PlanetDetails(..)
, j2010PlanetDetails
, isInnerPlanet
)
where
import Data.Astro.Types (DecimalDegrees(..), AstronomicalUnits, fromDMS)
import Data.Astro.Time.JulianDate (JulianDate)
import Data.Astro.Time.Epoch (j2010)
-- | Planets of the Solar System
data Planet = Mercury
| Venus
| Earth
| Mars
| Jupiter
| Saturn
| Uranus
| Neptune
deriving (Show, Eq)
-- | Details of the planetary orbit at the epoch
data PlanetDetails = PlanetDetails {
pdPlanet :: Planet
, pdEpoch :: JulianDate
, pdTp :: Double -- ^ Orbital period in tropical years
, pdEpsilon :: DecimalDegrees -- ^ Longitude at the Epoch
, pdOmegaBar :: DecimalDegrees -- ^ Longitude of the perihelion
, pdE :: Double -- ^ Eccentricity of the orbit
, pdAlpha :: AstronomicalUnits -- ^ Semi-major axis of the orbit in AU
, pdI :: DecimalDegrees -- ^ Orbital inclination
, pdBigOmega :: DecimalDegrees -- ^ Longitude of the ascending node
, pdBigTheta :: DecimalDegrees -- ^ Angular diameter at 1 AU
} deriving (Show, Eq)
-- | Return True if the planet is inner (its orbit lies inside the Earth's orbit)
isInnerPlanet :: PlanetDetails -> Bool
isInnerPlanet pd
| pdPlanet pd == Mercury = True
| pdPlanet pd == Venus = True
| otherwise = False
-- | PlanetDetails at the reference Epoch J2010.0
j2010PlanetDetails :: Planet -> PlanetDetails
-- Epoch Tp Epsilon Omega Bar e alpha i Big Omega Big Theta
j2010PlanetDetails Mercury = PlanetDetails Mercury j2010 0.24085 75.5671 77.612 0.205627 0.387098 7.0051 48.449 (arcsecs 6.74)
j2010PlanetDetails Venus = PlanetDetails Venus j2010 0.615207 272.30044 131.54 0.006812 0.723329 3.3947 76.769 (arcsecs 16.92)
j2010PlanetDetails Earth = PlanetDetails Earth j2010 0.999996 99.556772 103.2055 0.016671 0.999985 0 0 (arcsecs 0)
j2010PlanetDetails Mars = PlanetDetails Mars j2010 1.880765 109.09646 336.217 0.093348 1.523689 1.8497 49.632 (arcsecs 9.36)
j2010PlanetDetails Jupiter = PlanetDetails Jupiter j2010 11.857911 337.917132 14.6633 0.048907 5.20278 1.3035 100.595 (arcsecs 196.74)
j2010PlanetDetails Saturn = PlanetDetails Saturn j2010 29.310579 172.398316 89.567 0.053853 9.51134 2.4873 113.752 (arcsecs 165.6)
j2010PlanetDetails Uranus = PlanetDetails Uranus j2010 84.039492 356.135400 172.884833 0.046321 19.21814 0.773059 73.926961 (arcsecs 65.8)
j2010PlanetDetails Neptune = PlanetDetails Neptune j2010 165.845392 326.895127 23.07 0.010483 30.1985 1.7673 131.879 (arcsecs 62.2)
-- | arcseconds to DecimalHours
arcsecs = fromDMS 0 0
|
Alexander-Ignatyev/astro
|
src/Data/Astro/Planet/PlanetDetails.hs
|
bsd-3-clause
| 3,003
| 0
| 9
| 747
| 563
| 312
| 251
| 45
| 1
|
-- | A word frequence count program
--
module Data.Text.Benchmarks.Micro.WordCount
( benchmark
) where
import qualified Data.Text as T
import Data.Map (Map)
import qualified Data.Map as M
import Data.List (foldl')
import Criterion.Main (Benchmark, bench)
import Data.Text.Benchmarks.Micro.Util
benchmark :: FilePath -> Benchmark
benchmark = bench "WordCount" . withUtf8File (M.size . wordCount)
wordCount :: T.Text -> Map T.Text Int
wordCount =
foldl' (\m k -> M.insertWith (+) k 1 m) M.empty . map T.toLower . T.words
|
JensTimmerman/text-benchmarks
|
src/Data/Text/Benchmarks/Micro/WordCount.hs
|
bsd-3-clause
| 541
| 0
| 11
| 96
| 174
| 102
| 72
| 13
| 1
|
--
-- Parser AST.
--
-- Copyright (C) 2014, Galois, Inc.
-- All rights reserved.
--
module Ivory.Language.Syntax.Concrete.ParseAST where
import Data.List (foldl')
import Prelude ()
import Prelude.Compat hiding (init)
import Ivory.Language.Syntax.Concrete.Location
--------------------------------------------------------------------------------
type FnSym = String
type Var = String
type RefVar = String
type IxVar = String
type TypeVar = String
type FieldNm = String
type MacroVar = String
--------------------------------------------------------------------------------
-- Top level symbols.
data GlobalSym = GlobalProc ProcDef
| GlobalInclProc IncludeProc
| GlobalStruct StructDef
| GlobalBitData BitDataDef
| GlobalTypeDef TypeDef
| GlobalConstDef ConstDef
| GlobalInclude IncludeDef
| GlobalExtern Extern
| GlobalArea AreaDef
| GlobalAreaImport AreaImportDef
deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Mem Areas
data AreaDef = AreaDef
{ areaConst :: Bool
, areaType :: Type
, areaInit :: AllocRef
, memAreaLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
data AreaImportDef = AreaImportDef
{ aiSym :: String
, aiConst :: Bool
, aiType :: Type
, aiFile :: String
, aiLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Includes
data IncludeDef = IncludeDef
{ inclModule :: String
, inclDefLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Externs
data Extern = Extern
{ externSym :: String
, externFile :: String
, externType :: Type
, externLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Constant definition
data ConstDef = ConstDef
{ constSym :: String
, constExp :: Exp
, constType :: Maybe Type
, constDefLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Type definition
data TypeDef = TypeDef
{ tySym :: String
, tyDef :: Type
, tyDefLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Procs
data ProcDef = ProcDef
{ procTy :: Type -- ^ Return type
, procSym :: FnSym -- ^ Function name
, procArgs :: [(Type,Var)] -- ^ Argument types
, procStmt :: [Stmt] -- ^ Body
, procPrePost :: [PrePost]
, procLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
-- | We distinguish the name used from the name imported so the same symbol can
-- be used twice at different types. (E.g., @printf@).
data IncludeProc = IncludeProc
{ procInclTy :: Type -- ^ Return type
, procInclSym :: FnSym -- ^ Function name used
, procInclArgs :: [(Type,Var)] -- ^ Argument types
-- XXX add later
-- , procInclPrePost :: [PrePost]
, procIncl :: (String, FnSym) -- ^ Header to import from and function
-- name imported
, procInclLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
-- Pre and post conditions
data PrePost = PreCond Exp
| PostCond Exp
deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Types
type SzType = Either String Integer
data Type
= TyVoid -- ^ Unit type
| TyInt IntSize -- ^ Signed ints
| TyWord WordSize -- ^ Unsigned ints
| TyBool -- ^ Booleans
| TyChar -- ^ Characters
| TyFloat -- ^ Floats
| TyDouble -- ^ Doubles
-- XXX
-- | TyPtr Type -- ^ Pointers
| TyIx Integer -- ^ Index type
| TyString -- ^ Static strings
| TyStored Type -- ^ References
| TyStruct String -- ^ Structures
| TyArray Type SzType -- ^ Arrays of fixed length (can be a macro or integer)
| TyRef Scope Type -- ^ References
| TyConstRef Scope Type -- ^ Constant References
| TySynonym String -- ^ Type synonym
| LocTy (Located Type)
deriving (Show, Read, Eq, Ord)
-- Helper to put array type indexes in the right order.
tyArray :: Type -> SzType -> [SzType] -> Type
tyArray ty idx idxs =
let idxs' = idxs ++ [idx] in
let i = head idxs' in
foldl' TyArray (TyArray ty i) (tail idxs')
data Scope =
Stack (Maybe TypeVar)
-- ^ Stack allocated. If no type variable is provided, a fresh one is
-- constructed.
| Global
-- ^ Globally allocated
| PolyMem (Maybe TypeVar)
-- ^ Either allocation. If no type variable is provided, a fresh one is
-- constructed.
deriving (Show, Read, Eq, Ord)
data IntSize
= Int8
| Int16
| Int32
| Int64
deriving (Show, Read, Eq, Ord)
data WordSize
= Word8
| Word16
| Word32
| Word64
deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Expressions
data Literal
= LitInteger Integer
| LitFloat Double -- represents floats, too
| LitString String
deriving (Show, Read, Eq, Ord)
data Exp
= ExpLit Literal
| ExpVar Var
| ExpRet -- Used only in post-conditions
| ExpOp ExpOp [Exp]
| IvoryMacroExp (String,[Exp])
| ExpDeref Exp
| ExpArray Exp Exp
| ExpStruct Exp Exp
| ExpCall FnSym [Exp]
| ExpAddrOf Var
| LocExp (Located Exp)
deriving (Show, Read, Eq, Ord)
data ExpOp
= EqOp
| NeqOp
| CondOp
| GtOp Bool
-- ^ True is >=, False is >
| LtOp Bool
-- ^ True is <=, False is <
| NotOp
| AndOp
| OrOp
| MulOp
| AddOp
| SubOp
| NegateOp
| AbsOp
| SignumOp
| DivOp
| EucDivOp
| ModOp
| FExpOp
| FSqrtOp
| FLogOp
| FPowOp
| FSinOp
| FTanOp
| FCosOp
| FAsinOp
| FAtanOp
| FAtan2Op
| FAcosOp
| FSinhOp
| FTanhOp
| FCoshOp
| FAsinhOp
| FAtanhOp
| FAcoshOp
| IsNanOp
| IsInfOp
| RoundFOp
| CeilFOp
| FloorFOp
| BitAndOp
| BitOrOp
| BitXorOp
| BitComplementOp
| BitShiftLOp
| BitShiftROp
| ConstRefOp
| SafeCast
| BitCast
| CastWith
| TwosCompCast
| TwosCompRep
| ToIx
| FromIx
| IxSize
| ArrayLen
| SizeOf
| NullPtr
| RefToPtr
| ToCArray
deriving (Show, Read, Eq, Ord)
data StructInit
= Empty
| MacroInit (String, [Exp])
| FieldInits [(FieldNm, Exp)]
deriving (Show, Read, Eq, Ord)
data AllocRef
= AllocBase RefVar (Maybe Exp)
| AllocArr RefVar [Exp]
| AllocStruct RefVar StructInit
deriving (Show, Read, Eq, Ord)
allocRefVar :: AllocRef -> RefVar
allocRefVar a =
case a of
AllocBase v _ -> v
AllocArr v _ -> v
AllocStruct v _ -> v
-- | AST for parsing C-like statements.
data Stmt
= IfTE Exp [Stmt] [Stmt]
| Assert Exp
| Assume Exp
| Return Exp
| ReturnVoid
-- Deref dereferencing is an expression in our language here.
| Store Exp Exp
| Assign Var Exp (Maybe Type)
| NoBindCall Var [Exp]
| RefCopy Exp Exp
-- Local is AllocRef
| AllocRef AllocRef
| MapArr IxVar [Stmt]
| UpTo Exp IxVar [Stmt]
| UpFromTo Exp Exp IxVar [Stmt]
| DownFrom Exp IxVar [Stmt]
| DownFromTo Exp Exp IxVar [Stmt]
| Forever [Stmt]
| IvoryMacroStmt (Maybe Var) (String, [Exp])
| Break
| LocStmt (Located Stmt)
deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Structs
data StructDef
= StructDef String [Field] SrcLoc
| AbstractDef String FilePath SrcLoc
| StringDef String Integer SrcLoc
deriving (Show, Read, Eq, Ord)
structSym :: StructDef -> String
structSym s = case s of
StructDef sym _ _ -> sym
AbstractDef sym _ _ -> sym
StringDef sym _ _ -> ivoryStringStructName sym
ivoryStringStructName :: String -> String
ivoryStringStructName = ("ivory_string_" ++)
data Field = Field
{ fieldName :: FieldNm
, fieldType :: Type
, fieldLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Bit-data
-- | A "bitdata" definition.
data BitDataDef = BitDataDef
{ defName :: String
, defType :: BitTy
, defConstrs :: [Constr]
, bdLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
-- | Basic type representation allowed in bit definitions.
data BitTy = Bit
| Bits Integer
| BitArray Integer BitTy
| BitTySynonym String
| LocBitTy (Located BitTy)
deriving (Show, Read, Eq, Ord)
-- | A constructor definition within a "bitdata".
data Constr = Constr
{ constrName :: String
, constrFields :: [BitField]
, constrLayout :: [LayoutItem]
, constrLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
-- | One element of a bit data constructor layout.
data LayoutItem = LayoutConst BitLiteral
| LayoutField String
deriving (Show, Read, Eq, Ord)
-- | A bit integer literal with a known or unknown size.
data BitLiteral =
BitLitKnown { bitLitLen :: Integer , bitLitVal :: Integer }
| BitLitUnknown { bitLitVal :: Integer }
deriving (Show, Read, Eq, Ord)
-- | A record-like field defined within a "bitdata" constructor. If the name is
-- an underscore, we name it with 'Nothing'.
data BitField = BitField
{ bitFieldName :: Maybe String
, bitFieldType :: BitTy
, bitFieldLoc :: SrcLoc
} deriving (Show, Read, Eq, Ord)
--------------------------------------------------------------------------------
-- Instances
instance HasLocation GlobalSym where
getLoc = mempty
stripLoc g = case g of
GlobalProc p -> GlobalProc (stripLoc p)
GlobalInclProc p -> GlobalInclProc (stripLoc p)
GlobalStruct s -> GlobalStruct (stripLoc s)
GlobalBitData b -> GlobalBitData (stripLoc b)
GlobalTypeDef t -> GlobalTypeDef (stripLoc t)
GlobalConstDef c -> GlobalConstDef (stripLoc c)
GlobalInclude i -> GlobalInclude (stripLoc i)
GlobalExtern e -> GlobalExtern (stripLoc e)
GlobalArea a -> GlobalArea (stripLoc a)
GlobalAreaImport a -> GlobalAreaImport (stripLoc a)
instance HasLocation IncludeDef where
getLoc = inclDefLoc
stripLoc incl = incl { inclDefLoc = mempty }
instance HasLocation IncludeProc where
getLoc = procInclLoc
stripLoc incl = incl { procInclLoc = mempty }
instance HasLocation Extern where
getLoc = externLoc
stripLoc e = e { externLoc = mempty }
instance HasLocation ConstDef where
getLoc = constDefLoc
stripLoc c = c { constDefLoc = mempty }
instance HasLocation TypeDef where
getLoc = tyDefLoc
stripLoc td = td { tyDefLoc = mempty }
instance HasLocation ProcDef where
getLoc = procLoc
stripLoc p = p { procLoc = mempty }
instance HasLocation PrePost where
getLoc _ = mempty
stripLoc pp = case pp of
PreCond e -> PreCond (stripLoc e)
PostCond e -> PostCond (stripLoc e)
instance HasLocation AreaDef where
getLoc _ = mempty
stripLoc p = p { memAreaLoc = mempty }
instance HasLocation AreaImportDef where
getLoc _ = mempty
stripLoc p = p { aiLoc = mempty }
instance HasLocation Type where
getLoc ty = case ty of
LocTy t -> getLoc t
_ -> mempty
stripLoc ty = case ty of
TyVoid{} -> ty
TyInt{} -> ty
TyWord{} -> ty
TyBool{} -> ty
TyChar{} -> ty
TyFloat{} -> ty
TyDouble{} -> ty
TyString{} -> ty
TyIx{} -> ty
TyStored ty0 -> TyStored (stripLoc ty0)
TyStruct{} -> ty
TyArray ty0 i -> TyArray (stripLoc ty0) i
TyRef s ty0 -> TyRef s (stripLoc ty0)
TyConstRef s ty0 -> TyConstRef s (stripLoc ty0)
TySynonym{} -> ty
LocTy ty0 -> unLoc ty0
instance HasLocation Exp where
getLoc e = case e of
LocExp le -> getLoc le
_ -> mempty
stripLoc e = case e of
ExpLit{} -> e
ExpVar{} -> e
ExpRet{} -> e
ExpOp op args -> ExpOp op (stripLoc args)
IvoryMacroExp (s,args) -> IvoryMacroExp (s, stripLoc args)
ExpDeref e0 -> ExpDeref (stripLoc e0)
ExpArray e0 e1 -> ExpArray (stripLoc e0) (stripLoc e1)
ExpStruct e0 e1 -> ExpStruct (stripLoc e0) (stripLoc e1)
ExpCall fn args -> ExpCall fn (stripLoc args)
ExpAddrOf{} -> e
LocExp le -> unLoc le
instance HasLocation AllocRef where
getLoc _ = mempty
stripLoc a = case a of
AllocBase v e -> AllocBase v (stripLoc e)
AllocArr v es -> AllocArr v (stripLoc es)
AllocStruct v init -> AllocStruct v (stripLoc init)
instance HasLocation StructInit where
getLoc _ = mempty
stripLoc init = case init of
Empty -> Empty
MacroInit (fn,es) -> MacroInit (fn, map stripLoc es)
FieldInits fs -> FieldInits (map (\(n,e) -> (n, stripLoc e)) fs)
instance HasLocation Stmt where
getLoc s = case s of
LocStmt s0 -> getLoc s0
_ -> mempty
stripLoc s = case s of
IfTE e s0 s1 -> IfTE (stripLoc e) (stripLoc s0) (stripLoc s1)
Assert e -> Assert (stripLoc e)
Assume e -> Assume (stripLoc e)
Return e -> Return (stripLoc e)
ReturnVoid -> ReturnVoid
Break -> Break
Store e0 e1 -> Store (stripLoc e0) (stripLoc e1)
Assign v e t -> Assign v (stripLoc e) (stripLoc t)
NoBindCall v es -> NoBindCall v (stripLoc es)
RefCopy e0 e1 -> RefCopy (stripLoc e0) (stripLoc e1)
AllocRef ar -> AllocRef (stripLoc ar)
MapArr v ss -> MapArr v (stripLoc ss)
UpTo e v ss -> UpTo (stripLoc e) v (stripLoc ss)
UpFromTo e0 e1 v ss -> UpFromTo (stripLoc e0) (stripLoc e1) v (stripLoc ss)
DownFrom e v ss -> DownFrom (stripLoc e) v (stripLoc ss)
DownFromTo e0 e1 v ss -> DownFromTo (stripLoc e0) (stripLoc e1) v (stripLoc ss)
Forever ss -> Forever (stripLoc ss)
IvoryMacroStmt v (s0,es) -> IvoryMacroStmt v (s0, stripLoc es)
LocStmt s0 -> unLoc s0
instance HasLocation StructDef where
getLoc s = case s of
StructDef _ _ srcloc -> srcloc
AbstractDef _ _ srcloc -> srcloc
StringDef _ _ srcloc -> srcloc
stripLoc s = case s of
StructDef s0 fs _ -> StructDef s0 (stripLoc fs) mempty
AbstractDef s0 fp _ -> AbstractDef s0 fp mempty
StringDef s0 i _ -> StringDef s0 i mempty
instance HasLocation Field where
getLoc = fieldLoc
stripLoc (Field n t _) = Field n (stripLoc t) mempty
instance HasLocation BitDataDef where
getLoc = bdLoc
stripLoc (BitDataDef s t cs _) = BitDataDef s (stripLoc t) (stripLoc cs) mempty
instance HasLocation BitTy where
getLoc bt = case bt of
LocBitTy bt' -> getLoc bt'
_ -> mempty
stripLoc bt = case bt of
Bit -> bt
Bits{} -> bt
BitArray i bt0 -> BitArray i (stripLoc bt0)
BitTySynonym{} -> bt
LocBitTy bt0 -> unLoc bt0
instance HasLocation Constr where
getLoc = constrLoc
stripLoc (Constr n fs l _) = Constr n (stripLoc fs) l mempty
instance HasLocation BitField where
getLoc = bitFieldLoc
stripLoc (BitField n t _) = BitField n (stripLoc t) mempty
|
GaloisInc/ivory
|
ivory/src/Ivory/Language/Syntax/Concrete/ParseAST.hs
|
bsd-3-clause
| 16,126
| 0
| 15
| 4,851
| 4,400
| 2,380
| 2,020
| 425
| 3
|
{-# LANGUAGE NoImplicitPrelude #-}
module Mismi.CloudwatchLogs (
) where
|
ambiata/mismi
|
mismi-cloudwatch-logs/src/Mismi/CloudwatchLogs.hs
|
bsd-3-clause
| 76
| 0
| 3
| 12
| 10
| 7
| 3
| 2
| 0
|
{-# OPTIONS_GHC -Wall #-}
module Lazy06 where
import Data.List
-- Exercise 1 -----------------------------------------
fib :: Integer -> Integer
fib n | n < 0 = 0
fib 0 = 1
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
fibs1 :: [Integer]
fibs1 = map fib [0..]
-- Exercise 2 -----------------------------------------
fibs2 :: [Integer]
fibs2 = 1:1:zipWith (+) fibs2 (tail fibs2)
-- Exercise 3 -----------------------------------------
data Stream a = Cons a (Stream a)
-- Show instance prints the first 20 elements followed by ellipsis
instance Show a => Show (Stream a) where
show s = "[" ++ intercalate ", " (map show $ take 10 $ streamToList s)
++ ",..."
streamToList :: Stream a -> [a]
streamToList (Cons x xs)= x:streamToList xs
-- Exercise 4 -----------------------------------------
instance Functor Stream where
fmap f (Cons x xs) = Cons (f x) (fmap f xs)
-- Exercise 5 -----------------------------------------
sRepeat :: a -> Stream a
sRepeat x = Cons x (sRepeat x)
sIterate :: (a -> a) -> a -> Stream a
sIterate f x = Cons x (sIterate f (f x))
sInterleave :: Stream a -> Stream a -> Stream a
sInterleave (Cons x xs) y = Cons x (sInterleave y xs)
sTake :: Int -> Stream a -> [a]
sTake n _ | n<=0 = []
sTake n (Cons x xs) = x:(sTake (n-1) xs)
-- Exercise 6 -----------------------------------------
nats :: Stream Integer
nats = Cons 0 (fmap (+1) nats)
zipStream :: (a -> b -> c) -> Stream a -> Stream b -> Stream c
zipStream f (Cons x xs) (Cons y ys) = Cons (f x y) (zipStream f xs ys)
ruler :: Stream Integer
ruler = sInterleave (sRepeat 0) (zipStream (+) (sRepeat 1) ruler)
-- Exercise 7 -----------------------------------------
-- | Implementation of C rand
rand :: Int -> Stream Int
rand x = Cons x (rand next)
where next = mod (1103515245 * x + 12345) 2147483648
-- Exercise 8 -----------------------------------------
{- Total Memory in use: 80 MB -}
minMaxSlow :: [Int] -> Maybe (Int, Int)
minMaxSlow [] = Nothing -- no min or max if there are no elements
minMaxSlow xs = Just (minimum xs, maximum xs)
-- Exercise 9 -----------------------------------------
{- Total Memory in use: 3 MB -}
minMax :: [Int] -> Maybe (Int, Int)
minMax [] = Nothing
minMax (x:xs) = Just $ go (x,x) xs
where go r [] = r
go r@(mn,mx) (y:ys)
| mn > y = go (y,mx) ys
| mx < y = go (mn,y) ys
| otherwise = go r ys
main :: IO ()
main = print $ minMax $ sTake 1000000 $ rand 7666532
-- Exercise 10 ----------------------------------------
data Matrix = M Integer Integer Integer Integer deriving (Show)
instance Num Matrix where
(M x1 x2 y1 y2) * (M x1' x2' y1' y2') =
M (x1*x1'+x2*y1') (x1*x2'+x2*y2')
(y1*x1'+y2*y1') (y1*x2'+y2*y2')
fromInteger i = M (fromInteger i) 0 0 (fromInteger i)
negate (M x1 x2 y1 y2)= M (-x1) (-x2) (-y1) (-y2)
(+) = undefined
abs = undefined
signum = undefined
munit :: Matrix
munit = M 1 1 1 0
fastFib :: Int -> Integer
fastFib n | n <= 0 = 0
fastFib n = res
where M _ res _ _ = go n
go 1 = munit
go a = case mod a 2 of
0 -> (go $ div a 2)^(2::Integer)
_ -> (*) munit $ (go $div (a-1) 2)^(2::Integer)
|
Enzo-Liu/cis194
|
src/Lazy06.hs
|
bsd-3-clause
| 3,171
| 0
| 15
| 725
| 1,394
| 717
| 677
| 71
| 3
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PolyKinds #-}
module Queries where
import Control.Arrow (returnA)
import Control.Lens
import Data.ByteString.Lazy (ByteString)
import Data.Int (Int64)
import Data.List (intersperse)
import Data.Maybe (listToMaybe)
import Data.String.Conv (toS)
import Database.PostgreSQL.Simple (Connection)
import Opaleye (runInsertManyReturning, queryTable,
restrict, (.==), Query, Column, runQuery,
runUpdateReturning, runDelete)
import qualified Opaleye.PGTypes as P
import Types
--------------------------------------------------------------------------------
-- | Utils
--------------------------------------------------------------------------------
maybeResult :: Functor f => f [a] -> f (Maybe a)
maybeResult = fmap listToMaybe
class Queryable (k :: CrudType) where
createQuery :: Sing k -> Connection -> NewData k -> IO (Maybe (BaseData k))
readQuery :: Sing k -> Connection -> ReadData k -> IO (Maybe (BaseData k))
updateQuery :: Sing k -> Connection -> BaseData k -> IO (Maybe (BaseData k))
deleteQuery :: Sing k -> Connection -> ReadData k -> IO Int64
--------------------------------------------------------------------------------
-- | User
--------------------------------------------------------------------------------
createUser :: Connection -> NewUser -> IO (Maybe User)
createUser conn nUsr =
maybeResult $ runInsertManyReturning conn userTable [toPG nUsr] id
readUser :: Connection -> UserId -> IO (Maybe User)
readUser conn uId = maybeResult $ runQuery conn $ readUserQuery uId
where
readUserQuery :: UserId -> Query UserRow
readUserQuery uId = proc () -> do
row@(User' uId' _ _) <- queryTable userTable -< ()
restrict -< pgUId uId .== uId'
returnA -< row
pgUId :: UserId -> Column P.PGInt4
pgUId uId = P.pgInt4 $ uId ^. getUserId
updateUser :: Connection -> User -> IO (Maybe User)
updateUser conn usr =
let uId' = P.pgInt4 $ usr ^. userId ^. getUserId
in maybeResult $ runUpdateReturning conn userTable (const $ toPG usr)
(\u -> u ^. userId .== uId') id
deleteUser :: Connection -> UserId -> IO Int64
deleteUser conn uId =
let uId' = P.pgInt4 $ uId ^. getUserId
in runDelete conn userTable (\u -> u ^. userId .== uId')
instance Queryable 'CrudUser where
createQuery _ = createUser
readQuery _ = readUser
updateQuery _ = updateUser
deleteQuery _ = deleteUser
--createUser conn nUsr = maybeResult $ runInsertManyReturning conn userTable [toPG nUsr] id
--
--createUser :: Connection
-- -> NewUser
-- -> IO (Maybe User)
--createUser conn nUsr = maybeResult $ runInsertManyReturning conn userTable [toPG nUsr] id
--
--createMedia :: Connection -> NewMedia -> Handler Media
--createMedia conn nMed = maybeResult (query conn q nMed) !? err500
-- where
-- q = [sql|
--
-- INSERT INTO media (owner, caption, media_ref)
-- VALUES ?
-- RETURNING id, owner, caption, media_ref;
--
-- |]
--readUser :: Connection -> UserId -> Handler User
--readUser conn uId = maybeResult (query conn q (Only uId)) !?
-- err404 {errBody = "user_not_found-" <> toBody uId}
-- where
-- q = [sql|
--
-- SELECT id, username, email
-- FROM users
-- WHERE id = ?;
--
-- |]
--
--updateUser :: Connection -> User -> Handler User
--updateUser conn usr = maybeResult (query conn q usrData) !?
-- err404 {errBody = "user_not_found-" <> (toBody . userId $ usr)}
-- where
-- usrData = (userUsername usr, userEmail usr, userId usr)
-- q = [sql|
--
-- UPDATE users
-- SET username = ?, email = ?
-- WHERE id = ?
-- RETURNING id, username, email;
--
-- |]
--
--deleteUser :: Connection -> UserId -> Handler ()
--deleteUser conn uId = do
-- rowsAffected <- liftIO $ execute conn q (Only uId)
-- if rowsAffected == 1
-- then return ()
-- else throwE $ err404 {errBody = "user_not_found-" <> toBody uId}
-- where
-- q = [sql|
--
-- DELETE FROM users
-- WHERE id = ?;
--
-- |]
--
----------------------------------------------------------------------------------
---- | Media
----------------------------------------------------------------------------------
--
--createMedia :: Connection -> NewMedia -> Handler Media
--createMedia conn nMed = maybeResult (query conn q nMed) !? err500
-- where
-- q = [sql|
--
-- INSERT INTO media (owner, caption, media_ref)
-- VALUES ?
-- RETURNING id, owner, caption, media_ref;
--
-- |]
--
--readMedia :: Connection -> MediaId -> Handler Media
--readMedia conn mId = maybeResult (query conn q (Only mId)) !?
-- err404 {errBody = "media_not_found-" <> toBody mId}
-- where
-- q = [sql|
--
-- SELECT id, owner, caption, media_ref
-- FROM users
-- WHERE id = ?;
--
-- |]
--
--updateMedia :: Connection -> Media -> Handler Media
--updateMedia conn med = maybeResult (query conn q mediaData) !?
-- err404 {errBody = "media_not_found-" <> (toBody . mediaId $ med)}
-- where
-- mediaData = (mediaOwner med, mediaCaption med, mediaRef med, mediaId med)
-- q = [sql|
--
-- UPDATE media
-- SET owner = ?, caption = ?, media_ref = ?
-- WHERE id = ?
-- RETURNING id, owner, caption, media_ref;
--
-- |]
--
--deleteMedia :: Connection -> MediaId -> Handler ()
--deleteMedia conn mId = do
-- rowsAffected <- liftIO $ execute conn q (Only mId)
-- if rowsAffected == 1
-- then return ()
-- else throwE $ err404 {errBody = "media_not_found-" <> toBody mId}
-- where
-- q = [sql|
--
-- DELETE FROM media
-- WHERE id = ?;
--
-- |]
|
martyall/kafaka-test
|
src/Queries.hs
|
bsd-3-clause
| 6,189
| 1
| 14
| 1,632
| 865
| 505
| 360
| 54
| 1
|
-- |A module for managing the collection of links held by the Tor node.
module Tor.State.LinkManager(
LinkManager
, newLinkManager
, newLinkCircuit
, setIncomingLinkHandler
)
where
import Control.Concurrent
import Control.Monad
import Crypto.Random
import Data.Maybe
import Data.Word
import Network.TLS hiding (Credentials)
import Tor.Link
import Tor.NetworkStack
import Tor.Options
import Tor.RNG
import Tor.RouterDesc
import Tor.State.Credentials
import Tor.State.Routers
-- |The LinkManager, as you'd guess, serves as a unique management point for
-- holding all the links the current Tor node is operating on. The goal of this
-- module is to allow maximal re-use of incoming and outgoing links while also
-- maintaining enough links to provide anonymity guarantees.
data HasBackend s => LinkManager ls s = LinkManager {
lmNetworkStack :: TorNetworkStack ls s
, lmRouterDB :: RouterDB
, lmCredentials :: Credentials
, lmIdealLinks :: Int
, lmMaxLinks :: Int
, lmLog :: String -> IO ()
, lmRNG :: MVar TorRNG
, lmLinks :: MVar [TorLink]
, lmIncomingLinkHandler :: MVar (TorLink -> IO ())
}
-- |Create a new link manager with the given options, network stack, router
-- database and credentials.
newLinkManager :: HasBackend s =>
TorOptions ->
TorNetworkStack ls s ->
RouterDB -> Credentials ->
IO (LinkManager ls s)
newLinkManager o ns routerDB creds =
do rngMV <- newMVar =<< drgNew
linksMV <- newMVar []
ilHndlrMV <- newMVar (const (return ()))
let lm = LinkManager {
lmNetworkStack = ns
, lmRouterDB = routerDB
, lmCredentials = creds
, lmIdealLinks = idealLinks
, lmMaxLinks = maxLinks
, lmLog = torLog o
, lmRNG = rngMV
, lmLinks = linksMV
, lmIncomingLinkHandler = ilHndlrMV
}
when (isRelay || isExit) $
do lsock <- listen ns orPort
lmLog lm ("Waiting for Tor connections on port " ++ show orPort)
forkIO_ $ forever $
do (sock, addr) <- accept ns lsock
forkIO_ $
do link <- acceptLink creds routerDB rngMV (torLog o) sock addr
modifyMVar_ linksMV (return . (link:))
return lm
where
isRelay = isJust (torRelayOptions o)
isExit = isJust (torExitOptions o)
orPort = maybe 9374 torOnionPort (torRelayOptions o)
idealLinks = maybe 3 torTargetLinks (torEntranceOptions o)
maxLinks = maybe 3 torMaximumLinks (torRelayOptions o)
-- |Generate the first link in a new circuit, where the first hop meets the
-- given restrictions. The result is the new link, the router description of
-- that link, and a new circuit id to use when creating the circuit.
newLinkCircuit :: HasBackend s =>
LinkManager ls s -> [RouterRestriction] ->
IO (TorLink, RouterDesc, Word32)
newLinkCircuit lm restricts =
modifyMVar (lmLinks lm) $ \ curLinks ->
if length curLinks >= lmIdealLinks lm
then getExistingLink curLinks []
else buildNewLink curLinks
where
getExistingLink :: [TorLink] -> [TorLink] ->
IO ([TorLink], (TorLink, RouterDesc, Word32))
getExistingLink [] acc = buildNewLink acc
getExistingLink (link:rest) acc
| Just rd <- linkRouterDesc link
, rd `meetsRestrictions` restricts =
do circId <- modifyMVar (lmRNG lm) (linkNewCircuitId link)
return (rest ++ acc, (link, rd, circId))
| otherwise =
getExistingLink rest (acc ++ [link])
--
buildNewLink :: [TorLink] ->
IO ([TorLink], (TorLink, RouterDesc, Word32))
buildNewLink curLinks =
do entranceDesc <- modifyMVar (lmRNG lm)
(getRouter (lmRouterDB lm) restricts)
link <- initLink (lmNetworkStack lm) (lmCredentials lm)
(lmRNG lm) (lmLog lm)
entranceDesc
circId <- modifyMVar (lmRNG lm) (linkNewCircuitId link)
return (curLinks ++ [link], (link, entranceDesc, circId))
-- |Set a callback that will fire any time a new link is added to the system.
setIncomingLinkHandler :: HasBackend s =>
LinkManager ls s -> (TorLink -> IO ()) ->
IO ()
setIncomingLinkHandler lm h =
modifyMVar_ (lmIncomingLinkHandler lm) (const (return h))
forkIO_ :: IO () -> IO ()
forkIO_ m = forkIO m >> return ()
|
GaloisInc/haskell-tor
|
src/Tor/State/LinkManager.hs
|
bsd-3-clause
| 4,827
| 0
| 19
| 1,593
| 1,157
| 607
| 550
| 96
| 3
|
module Network.UV.Internal.TCP
( TCPWatcher(..)
) where
import Foreign.Ptr
-- | A TCP watcher.
newtype TCPWatcher = TCPWatcher (Ptr ())
|
aardvarrk/hlibuv
|
src/Network/UV/Internal/TCP.hs
|
bsd-3-clause
| 148
| 0
| 8
| 31
| 39
| 25
| 14
| 4
| 0
|
module Algebra.Lattice.Lifted (
Lifted(..)
) where
import Algebra.Lattice
--
-- Lifted
--
-- | Graft a distinct bottom onto an otherwise unbounded lattice.
-- As a bonus, the bottom will be an absorbing element for the meet.
data Lifted a = Lift a
| Bottom
instance JoinSemiLattice a => JoinSemiLattice (Lifted a) where
Lift x `join` Lift y = Lift (x `join` y)
Bottom `join` lift_y = lift_y
lift_x `join` Bottom = lift_x
instance MeetSemiLattice a => MeetSemiLattice (Lifted a) where
Lift x `meet` Lift y = Lift (x `meet` y)
Bottom `meet` _ = Bottom
_ `meet` Bottom = Bottom
instance Lattice a => Lattice (Lifted a) where
instance JoinSemiLattice a => BoundedJoinSemiLattice (Lifted a) where
bottom = Bottom
instance BoundedMeetSemiLattice a => BoundedMeetSemiLattice (Lifted a) where
top = Lift top
instance BoundedLattice a => BoundedLattice (Lifted a) where
|
batterseapower/lattices
|
Algebra/Lattice/Lifted.hs
|
bsd-3-clause
| 932
| 0
| 8
| 211
| 292
| 154
| 138
| 19
| 0
|
module Lang.LF.Internal.Typecheck where
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Lang.LF.Internal.Basics
import Lang.LF.Internal.Model
import Lang.LF.Internal.Print
import Lang.LF.Internal.Weak
validateKindLF :: forall f m γ γ'
. (LFModel f m, ?hyps::Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ KIND
-> m ()
validateKindLF w tm =
case unfoldLF tm of
Weak w' x -> validateKind (weakCompose w w') x
Type -> return ()
KPi nm a k -> do
validateType w a
extendCtx nm QPi (weaken w a) $ do
validateKind (WeakSkip w) k
{- subordination check -}
validateTypeLF :: forall f m γ γ'
. (LFModel f m, ?hyps:: Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ TYPE
-> m ()
validateTypeLF w tm =
case unfoldLF tm of
Weak w' x -> validateType (weakCompose w w') x
TyPi nm a1 a2 -> do
validateType w a1
extendCtx nm QPi (weaken w a1) $
validateType (WeakSkip w) a2
TyRow _ -> do
return ()
TyRecord row -> do
checkRow =<< inferType w row
AType p ->
checkK =<< inferKind w p
where
checkRow :: forall γ. f γ TYPE -> m ()
checkRow t =
case unfoldLF t of
Weak _ t' -> checkRow t'
TyRow _ -> return ()
TyRecord _ -> fail "invalid row type"
TyPi _ _ _ -> fail "invalid row type"
AType _ -> fail "invalid row type"
checkK :: forall γ. f γ KIND -> m ()
checkK k =
case unfoldLF k of
Weak _ k' -> checkK k'
Type -> return ()
KPi _ _ _ -> fail "invalid atomic type"
inferKindLF :: forall f m γ γ'
. (LFModel f m, ?hyps::Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ ATYPE
-> m (f γ' KIND)
inferKindLF w tm =
case unfoldLF tm of
Weak w' x -> inferKind (weakCompose w w') x
TyConst x -> weaken w <$> constKind x
TyApp p1 m2 -> do
k <- inferKind w p1
subK WeakRefl k (weaken w m2)
where
subK :: forall γ
. Weakening γ γ'
-> f γ KIND
-> f γ' TERM
-> m (f γ' KIND)
subK subw k m =
case unfoldLF k of
Weak w' x -> subK (weakCompose subw w') x m
KPi _ a2 k1 -> do
checkType (weaken w tm) m (weaken subw a2)
hsubst (SubstApply (SubstWeak subw SubstRefl) m) k1
_ -> do
kdoc <- displayLF (weaken subw k)
fail $ unwords ["invalid atomic type family", kdoc]
-- | Here we check for a very limited form of subtyping. We are really only
-- interested in checking subsumption of row types; that is, that any
-- disjointness conditions we assume about row variables are upheld.
checkSubtype
:: (LFModel f m, ?soln :: LFSoln f)
=> Weakening γ₁ γ
-> f γ₁ TYPE -- expected subtype
-> Weakening γ₂ γ
-> f γ₂ TYPE -- expected supertype
-> m Bool
checkSubtype w₁ sub w₂ super =
case (unfoldLF sub, unfoldLF super) of
(Weak w' x, _) -> checkSubtype (weakCompose w₁ w') x w₂ super
(_, Weak w' y) -> checkSubtype w₁ sub (weakCompose w₂ w') y
(TyPi _ a1 b1, TyPi _ a2 b2) ->
(&&) <$> checkSubtype w₂ a2 w₁ a1 -- NB: contravariant in arguments!
<*> checkSubtype (WeakSkip w₁) b1 (WeakSkip w₂) b2
(TyRecord row1, TyRecord row2) -> checkRowSubtype w₁ row1 w₂ row2
(TyRow flds1, TyRow flds2) ->
-- NB: this really the only interesing case. A row type is a subtype of
-- another iff the fields it declares are a subset.
return $ fieldSetSubset flds1 flds2
(AType r1, AType r2) ->
-- FIXME: is this too strong?
return $ alphaEq (weaken w₁ r1) (weaken w₂ r2)
_ -> return False
checkRowSubtype
:: (LFModel f m, ?soln :: LFSoln f)
=> Weakening γ₁ γ
-> f γ₁ TERM -- expected row subtype
-> Weakening γ₂ γ
-> f γ₂ TERM -- expected row supertype
-> m Bool
checkRowSubtype w₁ sub w₂ super =
case (unfoldLF sub, unfoldLF super) of
(Weak w' x, _) -> checkRowSubtype (weakCompose w₁ w') x w₂ super
(_, Weak w' y) -> checkRowSubtype w₁ sub (weakCompose w₂ w') y
(Row flds1, Row flds2) -> minimum <$> (sequence $ mergeFlds flds1 flds2)
(RowModify r1 del1 ins1, RowModify r2 del2 ins2) -> do
let br = alphaEq (weaken w₁ r1) (weaken w₂ r2)
let bdel = del1 == del2
bins <- minimum <$> (sequence $ mergeFlds ins1 ins2)
return (br && bdel && bins)
_ -> return False
where mergeFlds = Map.mergeWithKey
(\_k t1 t2 -> Just $ checkSubtype w₁ t1 w₂ t2)
(fmap (const $ return False))
(fmap (const $ return False))
checkType :: (LFModel f m, ?hyps :: Hyps f γ, ?soln :: LFSoln f)
=> f γ s -- ^ context of the term
-> f γ TERM -- ^ term to check
-> f γ TYPE -- ^ expected type
-> m ()
checkType z m a = do
a' <- inferType WeakRefl m
b <- checkSubtype WeakRefl a' WeakRefl a
if b then return ()
else do
zdoc <- displayLF z
mdoc <- displayLF m
adoc <- displayLF a
adoc' <- displayLF a'
fail $ unlines ["inferred type did not match expected type"
, " in term: " ++ zdoc
, " subterm: " ++ mdoc
, " expected: " ++ adoc
, " inferred: " ++ adoc'
]
inferTypeLF :: forall f m γ γ'
. (LFModel f m, ?hyps :: Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ TERM
-> m (f γ' TYPE)
inferTypeLF w m =
case unfoldLF m of
Weak w' x -> inferType (weakCompose w w') x
ATerm r -> do
a <- inferAType w r
checkTp WeakRefl a
return a
Row flds -> do
let flds' = Map.keysSet flds
foldLF (TyRow (PosFieldSet flds'))
RowModify r delSet insMap -> do
rset <- checkRowTy WeakRefl =<< inferAType w r
let rset' = fieldSetDifference rset (PosFieldSet delSet)
let insSet = PosFieldSet (Map.keysSet insMap)
let finalset = fieldSetUnion rset' insSet
let PosFieldSet conflictset = fieldSetIntersection insSet rset'
if Set.null conflictset then
foldLF (TyRow finalset)
else do
let msg = text "Field insertions confilct with possibly existing fields:"
<$$>
(indent 2 $ vcat $ map pretty $ Set.toList conflictset)
fail $ show msg
Record flds -> do
flds' <- traverse (inferType w) flds
foldLF . TyRecord =<< foldLF (Row flds')
RecordModify r delSet insMap -> do
row <- checkRecordTy WeakRefl =<< inferAType w r
insMap' <- traverse (inferType w) insMap
row' <- rowModify row WeakRefl delSet insMap'
foldLF (TyRecord row')
Lam nm a2 m -> do
let a2' = weaken w a2
extendCtx nm QLam a2' $ do
a1 <- inferType (WeakSkip w) m
foldLF (TyPi nm a2' a1)
where
checkRecordTy :: forall γ. Weakening γ γ' -> f γ TYPE -> m (f γ' TERM)
checkRecordTy subw a =
case unfoldLF a of
Weak w' x -> checkRecordTy (weakCompose subw w') x
TyRecord row -> return (weaken subw row)
TyRow _ -> fail "Expected record type"
TyPi _ _ _ -> fail "Expected record type"
AType _ -> fail "Expected record type"
checkRowTy :: forall γ. Weakening γ γ' -> f γ TYPE -> m (FieldSet f)
checkRowTy subw a =
case unfoldLF a of
Weak w' x -> checkRowTy (weakCompose subw w') x
TyRow flds -> return flds
TyRecord _ -> fail "Expected row type"
TyPi _ _ _ -> fail "Expected row type"
AType _ -> fail "Expected row type"
checkTp :: forall γ. Weakening γ γ' -> f γ TYPE -> m ()
checkTp subw a =
case unfoldLF a of
Weak w' x -> checkTp (weakCompose subw w') x
AType _ -> return ()
TyRecord _ -> return ()
TyRow _ -> return ()
TyPi _ _ _ -> do
mdoc <- ppLF TopPrec w m
adoc <- ppLF TopPrec subw a
fail $ unlines ["Term fails to be η-long:"
, show $ indent 2 $ group $ hang 2 $
mdoc <+> text "::" <> line <> adoc
]
inferATypeLF :: forall m f γ γ'
. (LFModel f m, ?hyps :: Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ ATERM
-> m (f γ' TYPE)
inferATypeLF w r =
case unfoldLF r of
Weak w' x -> inferAType (weakCompose w w') x
Var -> do
let (_,_,a) = lookupCtx (weakenVar w B)
return a
UVar u -> weaken w <$> uvarType u
Const c -> weaken w <$> constType c
App r1 m2 -> do
a <- inferAType w r1
checkArg (weaken w m2) WeakRefl a
Project r fld -> do
a <- inferAType w r
checkRecordProject a WeakRefl fld
where
checkRecordProject
:: forall γ
. f γ TYPE
-> Weakening γ γ'
-> LFRecordIndex f
-> m (f γ' TYPE)
checkRecordProject a wsub fld =
case unfoldLF a of
Weak w' x -> checkRecordProject x (weakCompose wsub w') fld
TyRecord row -> checkRowProject row wsub fld
_ -> do
adoc <- ppLF TopPrec wsub a
fail $ unwords ["Expected record type", show adoc]
checkRowProject :: forall γ
. f γ TERM
-> Weakening γ γ'
-> LFRecordIndex f
-> m (f γ' TYPE)
checkRowProject row wsub fld =
case unfoldLF row of
Weak w' x -> checkRowProject x (weakCompose wsub w') fld
RowModify _ _ insFields ->
case Map.lookup fld insFields of
Just ty -> return (weaken wsub ty)
Nothing -> do
rowdoc <- ppLF RecordPrec wsub row
fail $ unwords ["Could not prove field exists", show (pretty fld), show rowdoc]
Row flds ->
case Map.lookup fld flds of
Just ty -> return (weaken wsub ty)
Nothing -> do
rowdoc <- ppLF RecordPrec wsub row
fail $ unwords ["Record missing expected field", show (pretty fld), show rowdoc]
ATerm _ -> do
rowdoc <- ppLF RecordPrec wsub row
fail $ unwords ["Could not prove field exists", show (pretty fld), show rowdoc]
Lam _ _ _ -> fail "expected row value"
Record _ -> fail "expected row value"
RecordModify{} -> fail "expected row value"
checkArg :: forall γ
. f γ' TERM
-> Weakening γ γ'
-> f γ TYPE
-> m (f γ' TYPE)
checkArg m2 wsub a =
case unfoldLF a of
Weak w' x -> checkArg m2 (weakCompose wsub w') x
TyPi _ a2 a1 -> do
checkType (weaken w r) m2 (weaken wsub a2)
hsubst (SubstApply (SubstWeak wsub SubstRefl) m2) a1
_ -> do
adoc <- displayLF (weaken wsub a)
fail $ unwords ["Expected function type", adoc]
validateGoalLF :: forall f m γ γ'
. (LFModel f m, ?hyps:: Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ GOAL
-> m ()
validateGoalLF w g =
case unfoldLF g of
Weak w' x -> validateGoal (weakCompose w w') x
Sigma nm a g' -> do
validateType w a
extendCtx nm QSigma (weaken w a) $ validateGoal (WeakSkip w) g'
Goal m c -> do
_ <- inferType w m
validateCon w c
validateConLF :: forall f m γ γ'
. (LFModel f m, ?hyps:: Hyps f γ', ?soln :: LFSoln f)
=> Weakening γ γ'
-> f γ CON
-> m ()
validateConLF w c =
case unfoldLF c of
Weak w' x -> validateCon (weakCompose w w') x
Fail -> return ()
Unify r1 r2 -> do
-- FIXME? directly check for accecptability?
_ <- inferAType w r1
_ <- inferAType w r2
return ()
Forall nm a c' -> do
validateType w a
extendCtx nm QForall (weaken w a) $ validateCon (WeakSkip w) c'
Exists nm a c' -> do
validateType w a
extendCtx nm QExists (weaken w a) $ validateCon (WeakSkip w) c'
And cs ->
mapM_ (validateCon w) cs
|
robdockins/canonical-lf
|
src/Lang/LF/Internal/Typecheck.hs
|
bsd-3-clause
| 12,407
| 283
| 16
| 4,314
| 4,230
| 2,168
| 2,062
| -1
| -1
|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Data.Map (fromList)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import ETL (transform)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs =
describe "transform" $ do
it "a single letter" $
transform (fromList [(1, "A")])
`shouldBe` fromList [('a', 1)]
it "single score with multiple letters" $
transform (fromList [(1, "AEIOU")])
`shouldBe` fromList [('a', 1), ('e', 1), ('i', 1), ('o', 1), ('u', 1)]
it "multiple scores with multiple letters" $
transform (fromList [(1, "AE"), (2, "DG")])
`shouldBe` fromList [('a', 1), ('e', 1), ('d', 2), ('g', 2)]
it "multiple scores with differing numbers of letters" $
transform (fromList fullInput)
`shouldBe` fromList fullOutput
where
fullInput = [ ( 1, "AEIOULNRST")
, ( 2, "DG" )
, ( 3, "BCMP" )
, ( 4, "FHVWY" )
, ( 5, "K" )
, ( 8, "JX" )
, (10, "QZ" ) ]
fullOutput = [ ('a', 1) , ('b', 3) , ('c', 3) , ('d', 2)
, ('e', 1) , ('f', 4) , ('g', 2) , ('h', 4)
, ('i', 1) , ('j', 8) , ('k', 5) , ('l', 1)
, ('m', 3) , ('n', 1) , ('o', 1) , ('p', 3)
, ('q', 10) , ('r', 1) , ('s', 1) , ('t', 1)
, ('u', 1) , ('v', 4) , ('w', 4) , ('x', 8)
, ('y', 4) , ('z', 10) ]
|
enolive/exercism
|
haskell/etl/test/Tests.hs
|
mit
| 1,634
| 0
| 14
| 598
| 648
| 400
| 248
| 36
| 1
|
module Main where
import Graph.Op
import Expression.Op
import Autolib.Dot ( peng, Layout_Program (..) )
import Autolib.Graph.Graph
import Gateway.CGI
import Inter.Evaluate
import Autolib.ToDoc
import Autolib.Reporter
import Text.XHtml ( Html )
main :: IO ()
main = Gateway.CGI.execute "Graph.cgi" $ Gateway.CGI.wrap $ do
open table
input <- defaulted_textarea "Graph expression" $ show Graph.Op.example
-- open row ; submit "submit" ; close -- row
lout <- click_choice "Layouter" $ do
lout <- [ Dot , Neato , Twopi , Circo , Fdp ]
return ( show lout, lout )
close -- table
( res, com :: Html ) <- io $ run $ handler input lout
html com
handler input lout = do
exp :: Exp ( Graph Int ) <- parse_or_complain input
g <- eval0 exp
inform $ toDoc g
peng $ g { layout_program = lout
, layout_hints = [ "-Nheight=0.1", "-Nwidth=0.1"
, "-Nfixedsize=true"
, "-Gsize=5,5"
]
}
|
Erdwolf/autotool-bonn
|
src/Graph/Shell.hs
|
gpl-2.0
| 1,055
| 0
| 14
| 336
| 303
| 159
| 144
| -1
| -1
|
module Grammatik.Produktiv
-- -- $Id$
( produktiv
)
where
-- verallgemeiner von CFG auf beliebige Grammatiken
-- produktivität ist natürlich nicht entscheidbar
-- hier ist sichere Approximation in diesem sinne:
-- nach löschen aller nicht produktiven variablen
-- gibts immer noch die gleiche sprache
import Control.Monad (guard)
import Autolib.Util.Fix
import Autolib.Set
import Grammatik.Type
-- jede Variable der linken seite nennen wir produktiv,
-- wenn rechts alle Variablen produktiv sind.
-- zur vereinfachung zählen wir auch alle terminale als produktiv
produktiv :: Grammatik -> Set Char
produktiv g = fix ( \ qs -> union qs $ mkSet $ do
( lhs , rhs ) <- rules g
guard $ and $ do
x <- rhs
return $ ( x `elementOf` qs )
lhs
) ( terminale g )
|
Erdwolf/autotool-bonn
|
src/Grammatik/Produktiv.hs
|
gpl-2.0
| 796
| 0
| 16
| 169
| 149
| 83
| 66
| 14
| 1
|
module FunIn1 where
--In this example, the sub-expression '(y+1)' in function 'f' is generalised as parameter 'z'
y=0
f x =x + ( y + 1)
g = f 1
|
kmate/HaRe
|
old/testing/generaliseDef/FunIn1.hs
|
bsd-3-clause
| 148
| 0
| 7
| 34
| 37
| 21
| 16
| 4
| 1
|
module WithRenamingIn1 where
--The application of a function is replaced by the right-hand side of the definition,
--with actual parameters replacing formals.
--In this example, unfold the first 'sq' in 'sumSquares'
--This example aims to test renaming in order to avoid name clash or capture.
sumSquares x y pow_1=(sq x) + sq y +pow_1
sq x=x^pow
pow=2
|
kmate/HaRe
|
old/testing/foldDef/WithRenamingIn1_TokOut.hs
|
bsd-3-clause
| 361
| 0
| 8
| 65
| 54
| 30
| 24
| 4
| 1
|
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances, UndecidableInstances #-}
module Tc173a where
class FormValue value where
isFormValue :: value -> ()
isFormValue _ = ()
class FormTextField value
instance FormTextField String
instance {-# OVERLAPPABLE #-} FormTextField value => FormTextFieldIO value
class FormTextFieldIO value
instance FormTextFieldIO value => FormValue value
instance {-# OVERLAPPING #-} FormTextFieldIO value => FormTextFieldIO (Maybe value)
|
lukexi/ghc
|
testsuite/tests/typecheck/should_compile/Tc173a.hs
|
bsd-3-clause
| 480
| 0
| 8
| 70
| 106
| 51
| 55
| -1
| -1
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Applicative
-- Copyright : Conor McBride and Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- This module describes a structure intermediate between a functor and
-- a monad (technically, a strong lax monoidal functor). Compared with
-- monads, this interface lacks the full power of the binding operation
-- '>>=', but
--
-- * it has more instances.
--
-- * it is sufficient for many uses, e.g. context-free parsing, or the
-- 'Data.Traversable.Traversable' class.
--
-- * instances can perform analysis of computations before they are
-- executed, and thus produce shared optimizations.
--
-- This interface was introduced for parsers by Niklas Röjemo, because
-- it admits more sharing than the monadic interface. The names here are
-- mostly based on parsing work by Doaitse Swierstra.
--
-- For more details, see /Applicative Programming with Effects/,
-- by Conor McBride and Ross Paterson, online at
-- <http://www.soi.city.ac.uk/~ross/papers/Applicative.html>.
module Control.Applicative (
-- * Applicative functors
Applicative(..),
-- * Alternatives
Alternative(..),
-- * Instances
Const(..), WrappedMonad(..), WrappedArrow(..), ZipList(..),
-- * Utility functions
(<$>), (<$), (<**>),
liftA, liftA2, liftA3,
optional,
) where
import Prelude hiding (id,(.))
import Control.Category
import Control.Arrow (Arrow(arr, (&&&)), ArrowZero(zeroArrow), ArrowPlus((<+>)))
import Control.Monad (liftM, ap, MonadPlus(..))
import Control.Monad.Instances ()
#ifndef __NHC__
import Control.Monad.ST.Safe (ST)
import qualified Control.Monad.ST.Lazy.Safe as Lazy (ST)
#endif
import Data.Functor ((<$>), (<$))
import Data.Monoid (Monoid(..))
#ifdef __GLASGOW_HASKELL__
import GHC.Conc (STM, retry, orElse)
#endif
infixl 3 <|>
infixl 4 <*>, <*, *>, <**>
-- | A functor with application, providing operations to
--
-- * embed pure expressions ('pure'), and
--
-- * sequence computations and combine their results ('<*>').
--
-- A minimal complete definition must include implementations of these
-- functions satisfying the following laws:
--
-- [/identity/]
-- @'pure' 'id' '<*>' v = v@
--
-- [/composition/]
-- @'pure' (.) '<*>' u '<*>' v '<*>' w = u '<*>' (v '<*>' w)@
--
-- [/homomorphism/]
-- @'pure' f '<*>' 'pure' x = 'pure' (f x)@
--
-- [/interchange/]
-- @u '<*>' 'pure' y = 'pure' ('$' y) '<*>' u@
--
-- The other methods have the following default definitions, which may
-- be overridden with equivalent specialized implementations:
--
-- @
-- u '*>' v = 'pure' ('const' 'id') '<*>' u '<*>' v
-- u '<*' v = 'pure' 'const' '<*>' u '<*>' v
-- @
--
-- As a consequence of these laws, the 'Functor' instance for @f@ will satisfy
--
-- @
-- 'fmap' f x = 'pure' f '<*>' x
-- @
--
-- If @f@ is also a 'Monad', it should satisfy @'pure' = 'return'@ and
-- @('<*>') = 'ap'@ (which implies that 'pure' and '<*>' satisfy the
-- applicative functor laws).
class Functor f => Applicative f where
-- | Lift a value.
pure :: a -> f a
-- | Sequential application.
(<*>) :: f (a -> b) -> f a -> f b
-- | Sequence actions, discarding the value of the first argument.
(*>) :: f a -> f b -> f b
(*>) = liftA2 (const id)
-- | Sequence actions, discarding the value of the second argument.
(<*) :: f a -> f b -> f a
(<*) = liftA2 const
-- | A monoid on applicative functors.
--
-- Minimal complete definition: 'empty' and '<|>'.
--
-- If defined, 'some' and 'many' should be the least solutions
-- of the equations:
--
-- * @some v = (:) '<$>' v '<*>' many v@
--
-- * @many v = some v '<|>' 'pure' []@
class Applicative f => Alternative f where
-- | The identity of '<|>'
empty :: f a
-- | An associative binary operation
(<|>) :: f a -> f a -> f a
-- | One or more.
some :: f a -> f [a]
some v = some_v
where
many_v = some_v <|> pure []
some_v = (:) <$> v <*> many_v
-- | Zero or more.
many :: f a -> f [a]
many v = many_v
where
many_v = some_v <|> pure []
some_v = (:) <$> v <*> many_v
-- instances for Prelude types
instance Applicative Maybe where
pure = return
(<*>) = ap
instance Alternative Maybe where
empty = Nothing
Nothing <|> p = p
Just x <|> _ = Just x
instance Applicative [] where
pure = return
(<*>) = ap
instance Alternative [] where
empty = []
(<|>) = (++)
instance Applicative IO where
pure = return
(<*>) = ap
#ifndef __NHC__
instance Applicative (ST s) where
pure = return
(<*>) = ap
instance Applicative (Lazy.ST s) where
pure = return
(<*>) = ap
#endif
#ifdef __GLASGOW_HASKELL__
instance Applicative STM where
pure = return
(<*>) = ap
instance Alternative STM where
empty = retry
(<|>) = orElse
#endif
instance Applicative ((->) a) where
pure = const
(<*>) f g x = f x (g x)
instance Monoid a => Applicative ((,) a) where
pure x = (mempty, x)
(u, f) <*> (v, x) = (u `mappend` v, f x)
instance Applicative (Either e) where
pure = Right
Left e <*> _ = Left e
Right f <*> r = fmap f r
-- new instances
newtype Const a b = Const { getConst :: a }
instance Functor (Const m) where
fmap _ (Const v) = Const v
instance Monoid m => Applicative (Const m) where
pure _ = Const mempty
Const f <*> Const v = Const (f `mappend` v)
newtype WrappedMonad m a = WrapMonad { unwrapMonad :: m a }
instance Monad m => Functor (WrappedMonad m) where
fmap f (WrapMonad v) = WrapMonad (liftM f v)
instance Monad m => Applicative (WrappedMonad m) where
pure = WrapMonad . return
WrapMonad f <*> WrapMonad v = WrapMonad (f `ap` v)
instance MonadPlus m => Alternative (WrappedMonad m) where
empty = WrapMonad mzero
WrapMonad u <|> WrapMonad v = WrapMonad (u `mplus` v)
newtype WrappedArrow a b c = WrapArrow { unwrapArrow :: a b c }
instance Arrow a => Functor (WrappedArrow a b) where
fmap f (WrapArrow a) = WrapArrow (a >>> arr f)
instance Arrow a => Applicative (WrappedArrow a b) where
pure x = WrapArrow (arr (const x))
WrapArrow f <*> WrapArrow v = WrapArrow (f &&& v >>> arr (uncurry id))
instance (ArrowZero a, ArrowPlus a) => Alternative (WrappedArrow a b) where
empty = WrapArrow zeroArrow
WrapArrow u <|> WrapArrow v = WrapArrow (u <+> v)
-- | Lists, but with an 'Applicative' functor based on zipping, so that
--
-- @f '<$>' 'ZipList' xs1 '<*>' ... '<*>' 'ZipList' xsn = 'ZipList' (zipWithn f xs1 ... xsn)@
--
newtype ZipList a = ZipList { getZipList :: [a] }
instance Functor ZipList where
fmap f (ZipList xs) = ZipList (map f xs)
instance Applicative ZipList where
pure x = ZipList (repeat x)
ZipList fs <*> ZipList xs = ZipList (zipWith id fs xs)
-- extra functions
-- | A variant of '<*>' with the arguments reversed.
(<**>) :: Applicative f => f a -> f (a -> b) -> f b
(<**>) = liftA2 (flip ($))
-- | Lift a function to actions.
-- This function may be used as a value for `fmap` in a `Functor` instance.
liftA :: Applicative f => (a -> b) -> f a -> f b
liftA f a = pure f <*> a
-- | Lift a binary function to actions.
liftA2 :: Applicative f => (a -> b -> c) -> f a -> f b -> f c
liftA2 f a b = f <$> a <*> b
-- | Lift a ternary function to actions.
liftA3 :: Applicative f => (a -> b -> c -> d) -> f a -> f b -> f c -> f d
liftA3 f a b c = f <$> a <*> b <*> c
-- | One or none.
optional :: Alternative f => f a -> f (Maybe a)
optional v = Just <$> v <|> pure Nothing
|
ssaavedra/liquidhaskell
|
benchmarks/base-4.5.1.0/Control/Applicative.hs
|
bsd-3-clause
| 7,813
| 137
| 23
| 1,812
| 1,956
| 1,117
| 839
| 110
| 1
|
{-# LANGUAGE FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
{- |
Module : Numeric.LinearAlgebra.Algorithms
Copyright : (c) Alberto Ruiz 2006-9
License : GPL-style
Maintainer : Alberto Ruiz (aruiz at um dot es)
Stability : provisional
Portability : uses ffi
High level generic interface to common matrix computations.
Specific functions for particular base types can also be explicitly
imported from "Numeric.LinearAlgebra.LAPACK".
-}
-----------------------------------------------------------------------------
module Numeric.LinearAlgebra.Algorithms (
-- * Supported types
Field(),
-- * Linear Systems
linearSolve,
luSolve,
cholSolve,
linearSolveLS,
linearSolveSVD,
inv, pinv,
det, invlndet,
rank, rcond,
-- * Matrix factorizations
-- ** Singular value decomposition
svd,
fullSVD,
thinSVD,
compactSVD,
singularValues,
leftSV, rightSV,
-- ** Eigensystems
eig, eigSH, eigSH',
eigenvalues, eigenvaluesSH, eigenvaluesSH',
geigSH',
-- ** QR
qr, rq,
-- ** Cholesky
chol, cholSH, mbCholSH,
-- ** Hessenberg
hess,
-- ** Schur
schur,
-- ** LU
lu, luPacked,
-- * Matrix functions
expm,
sqrtm,
matFunc,
-- * Nullspace
nullspacePrec,
nullVector,
nullspaceSVD,
orth,
-- * Norms
Normed(..), NormType(..),
relativeError,
-- * Misc
eps, peps, i,
-- * Util
haussholder,
unpackQR, unpackHess,
pinvTol,
ranksv
) where
import Data.Packed.Internal hiding ((//))
import Data.Packed.Matrix
import Numeric.LinearAlgebra.LAPACK as LAPACK
import Data.List(foldl1')
import Data.Array
import Numeric.ContainerBoot
{- | Class used to define generic linear algebra computations for both real and complex matrices. Only double precision is supported in this version (we can
transform single precision objects using 'single' and 'double').
-}
class (Product t,
Convert t,
Container Vector t,
Container Matrix t,
Normed Matrix t,
Normed Vector t) => Field t where
svd' :: Matrix t -> (Matrix t, Vector Double, Matrix t)
thinSVD' :: Matrix t -> (Matrix t, Vector Double, Matrix t)
sv' :: Matrix t -> Vector Double
luPacked' :: Matrix t -> (Matrix t, [Int])
luSolve' :: (Matrix t, [Int]) -> Matrix t -> Matrix t
linearSolve' :: Matrix t -> Matrix t -> Matrix t
cholSolve' :: Matrix t -> Matrix t -> Matrix t
linearSolveSVD' :: Matrix t -> Matrix t -> Matrix t
linearSolveLS' :: Matrix t -> Matrix t -> Matrix t
eig' :: Matrix t -> (Vector (Complex Double), Matrix (Complex Double))
eigSH'' :: Matrix t -> (Vector Double, Matrix t)
eigOnly :: Matrix t -> Vector (Complex Double)
eigOnlySH :: Matrix t -> Vector Double
cholSH' :: Matrix t -> Matrix t
mbCholSH' :: Matrix t -> Maybe (Matrix t)
qr' :: Matrix t -> (Matrix t, Matrix t)
hess' :: Matrix t -> (Matrix t, Matrix t)
schur' :: Matrix t -> (Matrix t, Matrix t)
instance Field Double where
svd' = svdRd
thinSVD' = thinSVDRd
sv' = svR
luPacked' = luR
luSolve' (l_u,perm) = lusR l_u perm
linearSolve' = linearSolveR -- (luSolve . luPacked) ??
cholSolve' = cholSolveR
linearSolveLS' = linearSolveLSR
linearSolveSVD' = linearSolveSVDR Nothing
eig' = eigR
eigSH'' = eigS
eigOnly = eigOnlyR
eigOnlySH = eigOnlyS
cholSH' = cholS
mbCholSH' = mbCholS
qr' = unpackQR . qrR
hess' = unpackHess hessR
schur' = schurR
instance Field (Complex Double) where
#ifdef NOZGESDD
svd' = svdC
thinSVD' = thinSVDC
#else
svd' = svdCd
thinSVD' = thinSVDCd
#endif
sv' = svC
luPacked' = luC
luSolve' (l_u,perm) = lusC l_u perm
linearSolve' = linearSolveC
cholSolve' = cholSolveC
linearSolveLS' = linearSolveLSC
linearSolveSVD' = linearSolveSVDC Nothing
eig' = eigC
eigOnly = eigOnlyC
eigSH'' = eigH
eigOnlySH = eigOnlyH
cholSH' = cholH
mbCholSH' = mbCholH
qr' = unpackQR . qrC
hess' = unpackHess hessC
schur' = schurC
--------------------------------------------------------------
square m = rows m == cols m
vertical m = rows m >= cols m
exactHermitian m = m `equal` ctrans m
--------------------------------------------------------------
-- | Full singular value decomposition.
svd :: Field t => Matrix t -> (Matrix t, Vector Double, Matrix t)
svd = {-# SCC "svd" #-} svd'
-- | A version of 'svd' which returns only the @min (rows m) (cols m)@ singular vectors of @m@.
--
-- If @(u,s,v) = thinSVD m@ then @m == u \<> diag s \<> trans v@.
thinSVD :: Field t => Matrix t -> (Matrix t, Vector Double, Matrix t)
thinSVD = {-# SCC "thinSVD" #-} thinSVD'
-- | Singular values only.
singularValues :: Field t => Matrix t -> Vector Double
singularValues = {-# SCC "singularValues" #-} sv'
-- | A version of 'svd' which returns an appropriate diagonal matrix with the singular values.
--
-- If @(u,d,v) = fullSVD m@ then @m == u \<> d \<> trans v@.
fullSVD :: Field t => Matrix t -> (Matrix t, Matrix Double, Matrix t)
fullSVD m = (u,d,v) where
(u,s,v) = svd m
d = diagRect 0 s r c
r = rows m
c = cols m
-- | Similar to 'thinSVD', returning only the nonzero singular values and the corresponding singular vectors.
compactSVD :: Field t => Matrix t -> (Matrix t, Vector Double, Matrix t)
compactSVD m = (u', subVector 0 d s, v') where
(u,s,v) = thinSVD m
d = rankSVD (1*eps) m s `max` 1
u' = takeColumns d u
v' = takeColumns d v
-- | Singular values and all right singular vectors.
rightSV :: Field t => Matrix t -> (Vector Double, Matrix t)
rightSV m | vertical m = let (_,s,v) = thinSVD m in (s,v)
| otherwise = let (_,s,v) = svd m in (s,v)
-- | Singular values and all left singular vectors.
leftSV :: Field t => Matrix t -> (Matrix t, Vector Double)
leftSV m | vertical m = let (u,s,_) = svd m in (u,s)
| otherwise = let (u,s,_) = thinSVD m in (u,s)
--------------------------------------------------------------
-- | Obtains the LU decomposition of a matrix in a compact data structure suitable for 'luSolve'.
luPacked :: Field t => Matrix t -> (Matrix t, [Int])
luPacked = {-# SCC "luPacked" #-} luPacked'
-- | Solution of a linear system (for several right hand sides) from the precomputed LU factorization obtained by 'luPacked'.
luSolve :: Field t => (Matrix t, [Int]) -> Matrix t -> Matrix t
luSolve = {-# SCC "luSolve" #-} luSolve'
-- | Solve a linear system (for square coefficient matrix and several right-hand sides) using the LU decomposition. For underconstrained or overconstrained systems use 'linearSolveLS' or 'linearSolveSVD'.
-- It is similar to 'luSolve' . 'luPacked', but @linearSolve@ raises an error if called on a singular system.
linearSolve :: Field t => Matrix t -> Matrix t -> Matrix t
linearSolve = {-# SCC "linearSolve" #-} linearSolve'
-- | Solve a symmetric or Hermitian positive definite linear system using a precomputed Cholesky decomposition obtained by 'chol'.
cholSolve :: Field t => Matrix t -> Matrix t -> Matrix t
cholSolve = {-# SCC "cholSolve" #-} cholSolve'
-- | Minimum norm solution of a general linear least squares problem Ax=B using the SVD. Admits rank-deficient systems but it is slower than 'linearSolveLS'. The effective rank of A is determined by treating as zero those singular valures which are less than 'eps' times the largest singular value.
linearSolveSVD :: Field t => Matrix t -> Matrix t -> Matrix t
linearSolveSVD = {-# SCC "linearSolveSVD" #-} linearSolveSVD'
-- | Least squared error solution of an overconstrained linear system, or the minimum norm solution of an underconstrained system. For rank-deficient systems use 'linearSolveSVD'.
linearSolveLS :: Field t => Matrix t -> Matrix t -> Matrix t
linearSolveLS = {-# SCC "linearSolveLS" #-} linearSolveLS'
--------------------------------------------------------------
-- | Eigenvalues and eigenvectors of a general square matrix.
--
-- If @(s,v) = eig m@ then @m \<> v == v \<> diag s@
eig :: Field t => Matrix t -> (Vector (Complex Double), Matrix (Complex Double))
eig = {-# SCC "eig" #-} eig'
-- | Eigenvalues of a general square matrix.
eigenvalues :: Field t => Matrix t -> Vector (Complex Double)
eigenvalues = {-# SCC "eigenvalues" #-} eigOnly
-- | Similar to 'eigSH' without checking that the input matrix is hermitian or symmetric. It works with the upper triangular part.
eigSH' :: Field t => Matrix t -> (Vector Double, Matrix t)
eigSH' = {-# SCC "eigSH'" #-} eigSH''
-- | Similar to 'eigenvaluesSH' without checking that the input matrix is hermitian or symmetric. It works with the upper triangular part.
eigenvaluesSH' :: Field t => Matrix t -> Vector Double
eigenvaluesSH' = {-# SCC "eigenvaluesSH'" #-} eigOnlySH
-- | Eigenvalues and Eigenvectors of a complex hermitian or real symmetric matrix.
--
-- If @(s,v) = eigSH m@ then @m == v \<> diag s \<> ctrans v@
eigSH :: Field t => Matrix t -> (Vector Double, Matrix t)
eigSH m | exactHermitian m = eigSH' m
| otherwise = error "eigSH requires complex hermitian or real symmetric matrix"
-- | Eigenvalues of a complex hermitian or real symmetric matrix.
eigenvaluesSH :: Field t => Matrix t -> Vector Double
eigenvaluesSH m | exactHermitian m = eigenvaluesSH' m
| otherwise = error "eigenvaluesSH requires complex hermitian or real symmetric matrix"
--------------------------------------------------------------
-- | QR factorization.
--
-- If @(q,r) = qr m@ then @m == q \<> r@, where q is unitary and r is upper triangular.
qr :: Field t => Matrix t -> (Matrix t, Matrix t)
qr = {-# SCC "qr" #-} qr'
-- | RQ factorization.
--
-- If @(r,q) = rq m@ then @m == r \<> q@, where q is unitary and r is upper triangular.
rq :: Field t => Matrix t -> (Matrix t, Matrix t)
rq m = {-# SCC "rq" #-} (r,q) where
(q',r') = qr $ trans $ rev1 m
r = rev2 (trans r')
q = rev2 (trans q')
rev1 = flipud . fliprl
rev2 = fliprl . flipud
-- | Hessenberg factorization.
--
-- If @(p,h) = hess m@ then @m == p \<> h \<> ctrans p@, where p is unitary
-- and h is in upper Hessenberg form (it has zero entries below the first subdiagonal).
hess :: Field t => Matrix t -> (Matrix t, Matrix t)
hess = hess'
-- | Schur factorization.
--
-- If @(u,s) = schur m@ then @m == u \<> s \<> ctrans u@, where u is unitary
-- and s is a Shur matrix. A complex Schur matrix is upper triangular. A real Schur matrix is
-- upper triangular in 2x2 blocks.
--
-- \"Anything that the Jordan decomposition can do, the Schur decomposition
-- can do better!\" (Van Loan)
schur :: Field t => Matrix t -> (Matrix t, Matrix t)
schur = schur'
-- | Similar to 'cholSH', but instead of an error (e.g., caused by a matrix not positive definite) it returns 'Nothing'.
mbCholSH :: Field t => Matrix t -> Maybe (Matrix t)
mbCholSH = {-# SCC "mbCholSH" #-} mbCholSH'
-- | Similar to 'chol', without checking that the input matrix is hermitian or symmetric. It works with the upper triangular part.
cholSH :: Field t => Matrix t -> Matrix t
cholSH = {-# SCC "cholSH" #-} cholSH'
-- | Cholesky factorization of a positive definite hermitian or symmetric matrix.
--
-- If @c = chol m@ then @c@ is upper triangular and @m == ctrans c \<> c@.
chol :: Field t => Matrix t -> Matrix t
chol m | exactHermitian m = cholSH m
| otherwise = error "chol requires positive definite complex hermitian or real symmetric matrix"
-- | Joint computation of inverse and logarithm of determinant of a square matrix.
invlndet :: (Floating t, Field t)
=> Matrix t
-> (Matrix t, (t, t)) -- ^ (inverse, (log abs det, sign or phase of det))
invlndet m | square m = (im,(ladm,sdm))
| otherwise = error $ "invlndet of nonsquare "++ shSize m ++ " matrix"
where
lp@(lup,perm) = luPacked m
s = signlp (rows m) perm
dg = toList $ takeDiag $ lup
ladm = sum $ map (log.abs) dg
sdm = s* product (map signum dg)
im = luSolve lp (ident (rows m))
-- | Determinant of a square matrix. To avoid possible overflow or underflow use 'invlndet'.
det :: Field t => Matrix t -> t
det m | square m = {-# SCC "det" #-} s * (product $ toList $ takeDiag $ lup)
| otherwise = error $ "det of nonsquare "++ shSize m ++ " matrix"
where (lup,perm) = luPacked m
s = signlp (rows m) perm
-- | Explicit LU factorization of a general matrix.
--
-- If @(l,u,p,s) = lu m@ then @m == p \<> l \<> u@, where l is lower triangular,
-- u is upper triangular, p is a permutation matrix and s is the signature of the permutation.
lu :: Field t => Matrix t -> (Matrix t, Matrix t, Matrix t, t)
lu = luFact . luPacked
-- | Inverse of a square matrix. See also 'invlndet'.
inv :: Field t => Matrix t -> Matrix t
inv m | square m = m `linearSolve` ident (rows m)
| otherwise = error $ "inv of nonsquare "++ shSize m ++ " matrix"
-- | Pseudoinverse of a general matrix.
pinv :: Field t => Matrix t -> Matrix t
pinv m = linearSolveSVD m (ident (rows m))
-- | Numeric rank of a matrix from the SVD decomposition.
rankSVD :: Element t
=> Double -- ^ numeric zero (e.g. 1*'eps')
-> Matrix t -- ^ input matrix m
-> Vector Double -- ^ 'sv' of m
-> Int -- ^ rank of m
rankSVD teps m s = ranksv teps (max (rows m) (cols m)) (toList s)
-- | Numeric rank of a matrix from its singular values.
ranksv :: Double -- ^ numeric zero (e.g. 1*'eps')
-> Int -- ^ maximum dimension of the matrix
-> [Double] -- ^ singular values
-> Int -- ^ rank of m
ranksv teps maxdim s = k where
g = maximum s
tol = fromIntegral maxdim * g * teps
s' = filter (>tol) s
k = if g > teps then length s' else 0
-- | The machine precision of a Double: @eps = 2.22044604925031e-16@ (the value used by GNU-Octave).
eps :: Double
eps = 2.22044604925031e-16
-- | 1 + 0.5*peps == 1, 1 + 0.6*peps /= 1
peps :: RealFloat x => x
peps = x where x = 2.0 ** fromIntegral (1 - floatDigits x)
-- | The imaginary unit: @i = 0.0 :+ 1.0@
i :: Complex Double
i = 0:+1
-----------------------------------------------------------------------
-- | The nullspace of a matrix from its SVD decomposition.
nullspaceSVD :: Field t
=> Either Double Int -- ^ Left \"numeric\" zero (eg. 1*'eps'),
-- or Right \"theoretical\" matrix rank.
-> Matrix t -- ^ input matrix m
-> (Vector Double, Matrix t) -- ^ 'rightSV' of m
-> [Vector t] -- ^ list of unitary vectors spanning the nullspace
nullspaceSVD hint a (s,v) = vs where
tol = case hint of
Left t -> t
_ -> eps
k = case hint of
Right t -> t
_ -> rankSVD tol a s
vs = drop k $ toRows $ ctrans v
-- | The nullspace of a matrix. See also 'nullspaceSVD'.
nullspacePrec :: Field t
=> Double -- ^ relative tolerance in 'eps' units (e.g., use 3 to get 3*'eps')
-> Matrix t -- ^ input matrix
-> [Vector t] -- ^ list of unitary vectors spanning the nullspace
nullspacePrec t m = nullspaceSVD (Left (t*eps)) m (rightSV m)
-- | The nullspace of a matrix, assumed to be one-dimensional, with machine precision.
nullVector :: Field t => Matrix t -> Vector t
nullVector = last . nullspacePrec 1
orth :: Field t => Matrix t -> [Vector t]
-- ^ Return an orthonormal basis of the range space of a matrix
orth m = take r $ toColumns u
where
(u,s,_) = compactSVD m
r = ranksv eps (max (rows m) (cols m)) (toList s)
------------------------------------------------------------------------
{- Pseudoinverse of a real matrix with the desired tolerance, expressed as a
multiplicative factor of the default tolerance used by GNU-Octave (see 'pinv').
@\> let m = 'fromLists' [[1,0, 0]
,[0,1, 0]
,[0,0,1e-10]]
\ --
\> 'pinv' m
1. 0. 0.
0. 1. 0.
0. 0. 10000000000.
\ --
\> pinvTol 1E8 m
1. 0. 0.
0. 1. 0.
0. 0. 1.@
-}
--pinvTol :: Double -> Matrix Double -> Matrix Double
pinvTol t m = v' `mXm` diag s' `mXm` trans u' where
(u,s,v) = thinSVDRd m
sl@(g:_) = toList s
s' = fromList . map rec $ sl
rec x = if x < g*tol then 1 else 1/x
tol = (fromIntegral (max r c) * g * t * eps)
r = rows m
c = cols m
d = dim s
u' = takeColumns d u
v' = takeColumns d v
---------------------------------------------------------------------
-- many thanks, quickcheck!
haussholder :: (Field a) => a -> Vector a -> Matrix a
haussholder tau v = ident (dim v) `sub` (tau `scale` (w `mXm` ctrans w))
where w = asColumn v
zh k v = fromList $ replicate (k-1) 0 ++ (1:drop k xs)
where xs = toList v
zt 0 v = v
zt k v = join [subVector 0 (dim v - k) v, konst 0 k]
unpackQR :: (Field t) => (Matrix t, Vector t) -> (Matrix t, Matrix t)
unpackQR (pq, tau) = {-# SCC "unpackQR" #-} (q,r)
where cs = toColumns pq
m = rows pq
n = cols pq
mn = min m n
r = fromColumns $ zipWith zt ([m-1, m-2 .. 1] ++ repeat 0) cs
vs = zipWith zh [1..mn] cs
hs = zipWith haussholder (toList tau) vs
q = foldl1' mXm hs
unpackHess :: (Field t) => (Matrix t -> (Matrix t,Vector t)) -> Matrix t -> (Matrix t, Matrix t)
unpackHess hf m
| rows m == 1 = ((1><1)[1],m)
| otherwise = (uH . hf) m
uH (pq, tau) = (p,h)
where cs = toColumns pq
m = rows pq
n = cols pq
mn = min m n
h = fromColumns $ zipWith zt ([m-2, m-3 .. 1] ++ repeat 0) cs
vs = zipWith zh [2..mn] cs
hs = zipWith haussholder (toList tau) vs
p = foldl1' mXm hs
--------------------------------------------------------------------------
-- | Reciprocal of the 2-norm condition number of a matrix, computed from the singular values.
rcond :: Field t => Matrix t -> Double
rcond m = last s / head s
where s = toList (singularValues m)
-- | Number of linearly independent rows or columns.
rank :: Field t => Matrix t -> Int
rank m = rankSVD eps m (singularValues m)
{-
expm' m = case diagonalize (complex m) of
Just (l,v) -> v `mXm` diag (exp l) `mXm` inv v
Nothing -> error "Sorry, expm not yet implemented for non-diagonalizable matrices"
where exp = vectorMapC Exp
-}
diagonalize m = if rank v == n
then Just (l,v)
else Nothing
where n = rows m
(l,v) = if exactHermitian m
then let (l',v') = eigSH m in (real l', v')
else eig m
-- | Generic matrix functions for diagonalizable matrices. For instance:
--
-- @logm = matFunc log@
--
matFunc :: (Complex Double -> Complex Double) -> Matrix (Complex Double) -> Matrix (Complex Double)
matFunc f m = case diagonalize m of
Just (l,v) -> v `mXm` diag (mapVector f l) `mXm` inv v
Nothing -> error "Sorry, matFunc requires a diagonalizable matrix"
--------------------------------------------------------------
golubeps :: Integer -> Integer -> Double
golubeps p q = a * fromIntegral b / fromIntegral c where
a = 2^^(3-p-q)
b = fact p * fact q
c = fact (p+q) * fact (p+q+1)
fact n = product [1..n]
epslist = [ (fromIntegral k, golubeps k k) | k <- [1..]]
geps delta = head [ k | (k,g) <- epslist, g<delta]
{- | Matrix exponential. It uses a direct translation of Algorithm 11.3.1 in Golub & Van Loan,
based on a scaled Pade approximation.
-}
expm :: Field t => Matrix t -> Matrix t
expm = expGolub
expGolub :: ( Fractional t, Element t, Field t
, Normed Matrix t
, RealFrac (RealOf t)
, Floating (RealOf t)
) => Matrix t -> Matrix t
expGolub m = iterate msq f !! j
where j = max 0 $ floor $ logBase 2 $ pnorm Infinity m
a = m */ fromIntegral ((2::Int)^j)
q = geps eps -- 7 steps
eye = ident (rows m)
work (k,c,x,n,d) = (k',c',x',n',d')
where k' = k+1
c' = c * fromIntegral (q-k+1) / fromIntegral ((2*q-k+1)*k)
x' = a <> x
n' = n |+| (c' .* x')
d' = d |+| (((-1)^k * c') .* x')
(_,_,_,nf,df) = iterate work (1,1,eye,eye,eye) !! q
f = linearSolve df nf
msq x = x <> x
(<>) = multiply
v */ x = scale (recip x) v
(.*) = scale
(|+|) = add
--------------------------------------------------------------
{- | Matrix square root. Currently it uses a simple iterative algorithm described in Wikipedia.
It only works with invertible matrices that have a real solution. For diagonalizable matrices you can try @matFunc sqrt@.
@m = (2><2) [4,9
,0,4] :: Matrix Double@
@\>sqrtm m
(2><2)
[ 2.0, 2.25
, 0.0, 2.0 ]@
-}
sqrtm :: Field t => Matrix t -> Matrix t
sqrtm = sqrtmInv
sqrtmInv x = fst $ fixedPoint $ iterate f (x, ident (rows x))
where fixedPoint (a:b:rest) | pnorm PNorm1 (fst a |-| fst b) < peps = a
| otherwise = fixedPoint (b:rest)
fixedPoint _ = error "fixedpoint with impossible inputs"
f (y,z) = (0.5 .* (y |+| inv z),
0.5 .* (inv y |+| z))
(.*) = scale
(|+|) = add
(|-|) = sub
------------------------------------------------------------------
signlp r vals = foldl f 1 (zip [0..r-1] vals)
where f s (a,b) | a /= b = -s
| otherwise = s
swap (arr,s) (a,b) | a /= b = (arr // [(a, arr!b),(b,arr!a)],-s)
| otherwise = (arr,s)
fixPerm r vals = (fromColumns $ elems res, sign)
where v = [0..r-1]
s = toColumns (ident r)
(res,sign) = foldl swap (listArray (0,r-1) s, 1) (zip v vals)
triang r c h v = (r><c) [el s t | s<-[0..r-1], t<-[0..c-1]]
where el p q = if q-p>=h then v else 1 - v
luFact (l_u,perm) | r <= c = (l ,u ,p, s)
| otherwise = (l',u',p, s)
where
r = rows l_u
c = cols l_u
tu = triang r c 0 1
tl = triang r c 0 0
l = takeColumns r (l_u |*| tl) |+| diagRect 0 (konst 1 r) r r
u = l_u |*| tu
(p,s) = fixPerm r perm
l' = (l_u |*| tl) |+| diagRect 0 (konst 1 c) r c
u' = takeRows c (l_u |*| tu)
(|+|) = add
(|*|) = mul
---------------------------------------------------------------------------
data NormType = Infinity | PNorm1 | PNorm2 | Frobenius
class (RealFloat (RealOf t)) => Normed c t where
pnorm :: NormType -> c t -> RealOf t
instance Normed Vector Double where
pnorm PNorm1 = norm1
pnorm PNorm2 = norm2
pnorm Infinity = normInf
pnorm Frobenius = norm2
instance Normed Vector (Complex Double) where
pnorm PNorm1 = norm1
pnorm PNorm2 = norm2
pnorm Infinity = normInf
pnorm Frobenius = pnorm PNorm2
instance Normed Vector Float where
pnorm PNorm1 = norm1
pnorm PNorm2 = norm2
pnorm Infinity = normInf
pnorm Frobenius = pnorm PNorm2
instance Normed Vector (Complex Float) where
pnorm PNorm1 = norm1
pnorm PNorm2 = norm2
pnorm Infinity = normInf
pnorm Frobenius = pnorm PNorm2
instance Normed Matrix Double where
pnorm PNorm1 = maximum . map (pnorm PNorm1) . toColumns
pnorm PNorm2 = (@>0) . singularValues
pnorm Infinity = pnorm PNorm1 . trans
pnorm Frobenius = pnorm PNorm2 . flatten
instance Normed Matrix (Complex Double) where
pnorm PNorm1 = maximum . map (pnorm PNorm1) . toColumns
pnorm PNorm2 = (@>0) . singularValues
pnorm Infinity = pnorm PNorm1 . trans
pnorm Frobenius = pnorm PNorm2 . flatten
instance Normed Matrix Float where
pnorm PNorm1 = maximum . map (pnorm PNorm1) . toColumns
pnorm PNorm2 = realToFrac . (@>0) . singularValues . double
pnorm Infinity = pnorm PNorm1 . trans
pnorm Frobenius = pnorm PNorm2 . flatten
instance Normed Matrix (Complex Float) where
pnorm PNorm1 = maximum . map (pnorm PNorm1) . toColumns
pnorm PNorm2 = realToFrac . (@>0) . singularValues . double
pnorm Infinity = pnorm PNorm1 . trans
pnorm Frobenius = pnorm PNorm2 . flatten
-- | Approximate number of common digits in the maximum element.
relativeError :: (Normed c t, Container c t) => c t -> c t -> Int
relativeError x y = dig (norm (x `sub` y) / norm x)
where norm = pnorm Infinity
dig r = round $ -logBase 10 (realToFrac r :: Double)
----------------------------------------------------------------------
-- | Generalized symmetric positive definite eigensystem Av = lBv,
-- for A and B symmetric, B positive definite (conditions not checked).
geigSH' :: Field t
=> Matrix t -- ^ A
-> Matrix t -- ^ B
-> (Vector Double, Matrix t)
geigSH' a b = (l,v')
where
u = cholSH b
iu = inv u
c = ctrans iu <> a <> iu
(l,v) = eigSH' c
v' = iu <> v
(<>) = mXm
|
mightymoose/liquidhaskell
|
benchmarks/hmatrix-0.15.0.1/lib/Numeric/LinearAlgebra/Algorithms.hs
|
bsd-3-clause
| 25,190
| 25
| 16
| 6,549
| 7,040
| 3,693
| 3,347
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Test
-- Copyright : (c) Simon Marlow 2002
-- License : BSD-style
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- This module illustrates & tests most of the features of Haddock.
-- Testing references from the description: 'T', 'f', 'g', 'Visible.visible'.
--
-----------------------------------------------------------------------------
-- This is plain comment, ignored by Haddock.
module Test (
-- Section headings are introduced with '-- *':
-- * Type declarations
-- Subsection headings are introduced with '-- **' and so on.
-- ** Data types
T(..), T2, T3(..), T4(..), T5(..), T6(..),
N1(..), N2(..), N3(..), N4, N5(..), N6(..), N7(..),
-- ** Records
R(..), R1(..),
-- | test that we can export record selectors on their own:
p, q, u,
-- * Class declarations
C(a,b), D(..), E, F(..),
-- | Test that we can export a class method on its own:
a,
-- * Function types
f, g,
-- * Auxiliary stuff
-- $aux1
-- $aux2
-- $aux3
-- $aux4
-- $aux5
-- $aux6
-- $aux7
-- $aux8
-- $aux9
-- $aux10
-- $aux11
-- $aux12
-- | This is some inline documentation in the export list
--
-- > a code block using bird-tracks
-- > each line must begin with > (which isn't significant unless it
-- > is at the beginning of the line).
-- * A hidden module
module Hidden,
-- * A visible module
module Visible,
{-| nested-style doc comments -}
-- * Existential \/ Universal types
Ex(..),
-- * Type signatures with argument docs
k, l, m, o,
-- * A section
-- and without an intervening comma:
-- ** A subsection
{-|
> a literal line
$ a non /literal/ line $
-}
f',
) where
import Hidden
import Visible
-- | This comment applies to the /following/ declaration
-- and it continues until the next non-comment line
data T a b
= A Int (Maybe Float) -- ^ This comment describes the 'A' constructor
| -- | This comment describes the 'B' constructor
B (T a b, T Int Float) -- ^
-- | An abstract data declaration
data T2 a b = T2 a b
-- | A data declaration with no documentation annotations on the constructors
data T3 a b = A1 a | B1 b
-- A data declaration with no documentation annotations at all
data T4 a b = A2 a | B2 b
-- A data declaration documentation on the constructors only
data T5 a b
= A3 a -- ^ documents 'A3'
| B3 b -- ^ documents 'B3'
-- | Testing alternative comment styles
data T6
-- | This is the doc for 'A4'
= A4
| B4
| -- ^ This is the doc for 'B4'
-- | This is the doc for 'C4'
C4
-- | A newtype
newtype N1 a = N1 a
-- | A newtype with a fieldname
newtype N2 a b = N2 {n :: a b}
-- | A newtype with a fieldname, documentation on the field
newtype N3 a b = N3 {n3 :: a b -- ^ this is the 'n3' field
}
-- | An abstract newtype - we show this one as data rather than newtype because
-- the difference isn\'t visible to the programmer for an abstract type.
newtype N4 a b = N4 a
newtype N5 a b = N5 {n5 :: a b -- ^ no docs on the datatype or the constructor
}
newtype N6 a b = N6 {n6 :: a b
}
-- ^ docs on the constructor only
-- | docs on the newtype and the constructor
newtype N7 a b = N7 {n7 :: a b
}
-- ^ The 'N7' constructor
class (D a) => C a where
-- |this is a description of the 'a' method
a :: IO a
b :: [a]
-- ^ this is a description of the 'b' method
c :: a -- c is hidden in the export list
-- ^ This comment applies to the /previous/ declaration (the 'C' class)
class D a where
d :: T a b
e :: (a,a)
-- ^ This is a class declaration with no separate docs for the methods
instance D Int where
d = undefined
e = undefined
-- instance with a qualified class name
instance Test.D Float where
d = undefined
e = undefined
class E a where
ee :: a
-- ^ This is a class declaration with no methods (or no methods exported)
-- This is a class declaration with no documentation at all
class F a where
ff :: a
-- | This is the documentation for the 'R' record, which has four fields,
-- 'p', 'q', 'r', and 's'.
data R =
-- | This is the 'C1' record constructor, with the following fields:
C1 { p :: Int -- ^ This comment applies to the 'p' field
, q :: forall a . a->a -- ^ This comment applies to the 'q' field
, -- | This comment applies to both 'r' and 's'
r,s :: Int
}
| C2 { t :: T1 -> (T2 Int Int)-> (T3 Bool Bool) -> (T4 Float Float) -> T5 () (),
u,v :: Int
}
-- ^ This is the 'C2' record constructor, also with some fields:
-- | Testing different record commenting styles
data R1
-- | This is the 'C3' record constructor
= C3 {
-- | The 's1' record selector
s1 :: Int
-- | The 's2' record selector
, s2 :: Int
, s3 :: Int -- NOTE: In the original examples/Test.hs in Haddock, there is an extra "," here.
-- Since GHC doesn't allow that, I have removed it in this file.
-- ^ The 's3' record selector
}
-- These section headers are only used when there is no export list to
-- give the structure of the documentation:
-- * This is a section header (level 1)
-- ** This is a section header (level 2)
-- *** This is a section header (level 3)
{-|
In a comment string we can refer to identifiers in scope with
single quotes like this: 'T', and we can refer to modules by
using double quotes: "Foo". We can add emphasis /like this/.
* This is a bulleted list
- This is the next item (different kind of bullet)
(1) This is an ordered list
2. This is the next item (different kind of bullet)
@
This is a block of code, which can include other markup: 'R'
formatting
is
significant
@
> this is another block of code
We can also include URLs in documentation: <http://www.haskell.org/>.
-}
f :: C a => a -> Int
-- | we can export foreign declarations too
foreign import ccall "header.h" g :: Int -> IO CInt
-- | this doc string has a parse error in it: \'
h :: Int
h = 42
-- $aux1 This is some documentation that is attached to a name ($aux1)
-- rather than a source declaration. The documentation may be
-- referred to in the export list using its name.
--
-- @ code block in named doc @
-- $aux2 This is some documentation that is attached to a name ($aux2)
-- $aux3
-- @ code block on its own in named doc @
-- $aux4
--
-- @ code block on its own in named doc (after newline) @
{- $aux5 a nested, named doc comment
with a paragraph,
@ and a code block @
-}
-- some tests for various arrangements of code blocks:
{- $aux6
>test
>test1
@ test2
test3
@
-}
{- $aux7
@
test1
test2
@
-}
{- $aux8
>test3
>test4
-}
{- $aux9
@
test1
test2
@
>test3
>test4
-}
{- $aux10
>test3
>test4
@
test1
test2
@
-}
-- This one is currently wrong (Haddock 0.4). The @...@ part is
-- interpreted as part of the bird-tracked code block.
{- $aux11
aux11:
>test3
>test4
@
test1
test2
@
-}
-- $aux12
-- > foo
--
-- > bar
--
-- | A data-type using existential\/universal types
data Ex a
= forall b . C b => Ex1 b
| forall b . Ex2 b
| forall b . C a => Ex3 b -- NOTE: I have added "forall b" here make GHC accept this file
| Ex4 (forall a . a -> a)
-- | This is a function with documentation for each argument
k :: T () () -- ^ This argument has type 'T'
-> (T2 Int Int) -- ^ This argument has type 'T2 Int Int'
-> (T3 Bool Bool -> T4 Float Float) -- ^ This argument has type @T3 Bool Bool -> T4 Float Float@
-> T5 () () -- ^ This argument has a very long description that should
-- hopefully cause some wrapping to happen when it is finally
-- rendered by Haddock in the generated HTML page.
-> IO () -- ^ This is the result type
-- This function has arg docs but no docs for the function itself
l :: (Int, Int, Float) -- ^ takes a triple
-> Int -- ^ returns an 'Int'
-- | This function has some arg docs
m :: R
-> N1 () -- ^ one of the arguments
-> IO Int -- ^ and the return value
-- | This function has some arg docs but not a return value doc
-- can't use the original name ('n') with GHC
newn :: R -- ^ one of the arguments, an 'R'
-> N1 () -- ^ one of the arguments
-> IO Int
newn = undefined
-- | A foreign import with argument docs
foreign import ccall unsafe "header.h"
o :: Float -- ^ The input float
-> IO Float -- ^ The output float
-- | We should be able to escape this: \#\#\#
-- p :: Int
-- can't use the above original definition with GHC
newp :: Int
newp = undefined
-- | a function with a prime can be referred to as 'f''
-- but f' doesn't get link'd 'f\''
f' :: Int
-- Add some definitions here so that this file can be compiled with GHC
data T1
f = undefined
f' = undefined
type CInt = Int
k = undefined
l = undefined
m = undefined
|
spacekitteh/smcghc
|
testsuite/tests/haddock/haddock_examples/Test.hs
|
bsd-3-clause
| 8,851
| 75
| 10
| 2,221
| 1,184
| 775
| 409
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -ddump-splices -dsuppress-uniques #-}
module Bug where
$([d| type family Foo a b
type instance Foo (Maybe a) b = Either (Maybe a) (Maybe b)
data family Bar a b
data instance Bar (Maybe a) b = BarMaybe (Maybe a) (Maybe b)
|])
{-
type instance Foo (Maybe a) b = Either (Maybe a) (Maybe b)
becomes
[TySynInstD Bug.Foo
(TySynEqn
[AppT
(ConT GHC.Base.Maybe)
(VarT a_6989586621679027317)
,VarT b_6989586621679027318]
(AppT
(AppT
(ConT Data.Either.Either)
(AppT
(ConT GHC.Base.Maybe)
(VarT a_6989586621679027317)
)
)
(AppT (ConT GHC.Base.Maybe) (VarT b_6989586621679027318))
)
)
]
data instance Bar (Maybe a) b = BarMaybe (Maybe a) (Maybe b)
becomes
[DataInstD [] Bug.Bar
[AppT
(ConT GHC.Base.Maybe)
(VarT a_6989586621679027707)
,VarT b_6989586621679027708
]
Nothing
[NormalC
BarMaybe_6989586621679027706
[(Bang
NoSourceUnpackedness
NoSourceStrictness
,AppT
(ConT GHC.Base.Maybe)
(VarT a_6989586621679027707)
)
,(Bang
NoSourceUnpackedness
NoSourceStrictness
,AppT
(ConT GHC.Base.Maybe)
(VarT b_6989586621679027708)
)
]
]
[]]
-}
|
ezyang/ghc
|
testsuite/tests/printer/T13550.hs
|
bsd-3-clause
| 1,356
| 0
| 6
| 400
| 19
| 14
| 5
| 8
| 0
|
{-# LANGUAGE TemplateHaskell #-}
module T12130a where
import Language.Haskell.TH
data Block = Block
{ blockSelector :: ()
}
block :: Q Exp
block =
[| Block {
-- Using record syntax is necessary to trigger the bug.
blockSelector = ()
}
|]
|
shlevy/ghc
|
testsuite/tests/th/T12130a.hs
|
bsd-3-clause
| 283
| 0
| 9
| 87
| 44
| 28
| 16
| 9
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module T5434 where
import T5434a
$(genShadow1)
v :: Bool
v = True
$(genShadow2)
|
wxwxwwxxx/ghc
|
testsuite/tests/th/T5434.hs
|
bsd-3-clause
| 118
| 0
| 6
| 22
| 32
| 18
| 14
| 7
| 1
|
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Acronym (abbreviate)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "abbreviate" $ for_ cases test
where
test Case {..} = it description $ abbreviate input `shouldBe` expected
data Case = Case { description :: String
, input :: String
, expected :: String
}
cases :: [Case]
cases = [ Case { description = "basic"
, input = "Portable Network Graphics"
, expected = "PNG"
}
, Case { description = "lowercase words"
, input = "Ruby on Rails"
, expected = "ROR"
}
-- Although this case was removed in specification 1.1.0,
-- the Haskell track has chosen to keep it,
-- since it makes the problem more interesting.
, Case { description = "camelcase"
, input = "HyperText Markup Language"
, expected = "HTML"
}
, Case { description = "punctuation"
, input = "First In, First Out"
, expected = "FIFO"
}
, Case { description = "all caps word"
, input = "GNU Image Manipulation Program"
, expected = "GIMP"
}
, Case { description = "punctuation without whitespace"
, input = "Complementary metal-oxide semiconductor"
, expected = "CMOS"
}
]
|
enolive/exercism
|
haskell/acronym/test/Tests.hs
|
mit
| 1,755
| 0
| 9
| 693
| 319
| 195
| 124
| 32
| 1
|
module Rebase.Control.Selective.Rigid.Freer
(
module Control.Selective.Rigid.Freer
)
where
import Control.Selective.Rigid.Freer
|
nikita-volkov/rebase
|
library/Rebase/Control/Selective/Rigid/Freer.hs
|
mit
| 131
| 0
| 5
| 12
| 26
| 19
| 7
| 4
| 0
|
module ARD.SamplerSpec where
import ARD.Randomize
import ARD.Sampler
import ARD.Vector
import Test.Hspec
spec :: Spec
spec = describe "Sampler" $ do
jitteredSamplerSpec
standardSamplerSpec
randomSamplerSpec
regularSamplerSpec
randomSampler :: [Double] -> Randomize Sampler -> Sampler
randomSampler rs samplerFunc = fst $ runRandomized samplerFunc (mkRandomState' [0,1..] rs)
jitteredSamplerSpec :: Spec
jitteredSamplerSpec = describe "JitteredSampler" $ do
it "with non-random values matches RegularSampler" $
randomSampler [0.5,0.5..] (mkJittered 2) `shouldBe` randomSampler [0..] (mkRegular 2)
it "uses given list to jitter points" $ do
(head $ unitSquareSamples $ randomSampler [0,0..] (mkJittered 2)) `shouldBe` [Vector2 0 0, Vector2 0.5 0, Vector2 0 0.5, Vector2 0.5 0.5]
(head $ unitSquareSamples $ randomSampler [1,1..] (mkJittered 2)) `shouldBe` [Vector2 0.5 0.5, Vector2 1 0.5, Vector2 0.5 1, Vector2 1 1]
standardSamplerSpec :: Spec
standardSamplerSpec = describe "StandardSampler" $
it "has 1 point in the center" $ do
let
s = randomSampler [0..] mkStandard
numSamples s `shouldBe` 1
head (unitSquareSamples s) `shouldBe` [Vector2 0.5 0.5]
randomSamplerSpec :: Spec
randomSamplerSpec = describe "RandomSampler" $ do
it "has given number of samples" $ do
numSamples (randomSampler [0..] (mkRandom 2)) `shouldBe` 2
numSamples (randomSampler [0..] (mkRandom 3)) `shouldBe` 3
it "uses given list to generate samples" $
(head $ unitSquareSamples (randomSampler [0..4] (mkRandom 2))) `shouldNotBe` (head $ unitSquareSamples (randomSampler [1..5] (mkRandom 2)))
regularSamplerSpec :: Spec
regularSamplerSpec = describe "RegularSampler" $ do
it "has squared number of given argument" $ do
numSamples (randomSampler [0..] $ mkRegular 2) `shouldBe` 4
numSamples (randomSampler [0..] $ mkRegular 3) `shouldBe` 9
it "distributes evenly" $
(head $ unitSquareSamples (randomSampler [0..] $ mkRegular 2)) `shouldBe` [Vector2 0.25 0.25, Vector2 0.75 0.25, Vector2 0.25 0.75, Vector2 0.75 0.75]
|
crazymaik/ard-haskell
|
test/ARD/SamplerSpec.hs
|
mit
| 2,074
| 0
| 16
| 351
| 714
| 363
| 351
| 41
| 1
|
module PscInspect (
getImportableModules,
hasPursExtension
) where
import Control.Applicative
import Control.Arrow
import Data.List
import Data.Maybe
import qualified System.Directory as Directory
import qualified System.FilePath as FilePath
import System.FilePath ((</>))
bowerComponentDir :: FilePath
bowerComponentDir = "bower_components"
bowerSourceDir :: FilePath
bowerSourceDir = "src"
type ModuleName = String
hasPursExtension :: FilePath -> Bool
hasPursExtension fp = FilePath.takeExtension fp == ".purs"
-- TODO Check if the dir contains any .purs files
isPurescriptModuleDir :: FilePath -> IO Bool
isPurescriptModuleDir = Directory.doesDirectoryExist
toPureScriptModule :: FilePath -> IO (Maybe ModuleName)
toPureScriptModule fp =
if hasPursExtension fp
then return $ Just $ FilePath.takeBaseName fp
else do
dirExists <- isPurescriptModuleDir fp
if dirExists
then return $ Just $ FilePath.takeBaseName fp
else return Nothing
getBowerComponentModules :: FilePath -> IO [FilePath]
getBowerComponentModules bowerComponentPath = do
let srcDir = bowerComponentPath </> bowerSourceDir
srcDirExists <- Directory.doesDirectoryExist srcDir
if srcDirExists
then do
srcContents <- getEntries srcDir
pureScriptModules <- mapM toPureScriptModule srcContents
return $ catMaybes pureScriptModules
else return []
removeDotDirs :: [FilePath] -> [FilePath]
removeDotDirs = filter (\d -> d /= "." && d /= "..")
getEntries :: FilePath -> IO [FilePath]
getEntries fp = do
dirs <- Directory.getDirectoryContents fp
return $ (fp </>) <$> removeDotDirs dirs
uniqueSort :: Ord a => [a] -> [a]
uniqueSort = sort >>> group >>> fmap head
getImportableModules :: IO [String]
getImportableModules = do
cwd <- Directory.getCurrentDirectory
let bowerDir = cwd </> bowerComponentDir
bowerComponentDirs <- getEntries bowerDir
importableModules <- mapM getBowerComponentModules bowerComponentDirs
return $ uniqueSort $ concat importableModules
|
sgronblo/psc-inspect
|
src/PscInspect.hs
|
mit
| 2,106
| 0
| 11
| 425
| 523
| 270
| 253
| 53
| 3
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell, OverloadedStrings, QuasiQuotes, NamedFieldPuns #-}
module Control.OperationalTransformation.Text0.Specs where
import qualified Control.OperationalTransformation as C
import Control.OperationalTransformation.JSON.QuasiQuote
import Control.OperationalTransformation.Text0
import Data.Aeson as A
import qualified Data.Text as T
import qualified Test.Hspec as H
import Test.Hspec hiding (shouldBe)
import Test.Tasty
import Test.Tasty.Hspec hiding (shouldBe)
-- These tests are taken directly from
-- https://github.com/ottypes/json0/blob/master/test/text0.coffee
compose :: Text0Operation -> Text0Operation -> Text0Operation
compose op1 op2 = case C.compose op1 op2 of
Left err -> error err
Right ret -> ret
normalize = undefined
transformCursor = undefined
transformCursorLeft = undefined
transformCursorRight = undefined
transform :: Text0Operation -> Text0Operation -> (Text0Operation, Text0Operation)
transform op1 op2 = case C.transform op1 op2 of
Left err -> error err
Right x -> x
-- for ghci
d :: A.Value -> Text0Operation
d jsonValue = op
where
Success op = T0 <$> fromJSON jsonValue
apply :: T.Text -> Text0Operation -> T.Text
apply input op = case C.apply op input of
Left err -> error err
Right x -> x
transformLeft :: Text0Operation -> Text0Operation -> Text0Operation
transformLeft a b = a'
where (a', _) = transform a b
transformRight :: Text0Operation -> Text0Operation -> Text0Operation
transformRight a b = a'
where (_, a') = transform b a
shouldBe :: (Eq a, Show a) => a -> a -> Expectation
shouldBe = flip H.shouldBe
specs :: SpecWith ()
specs = do
describe "compose" $ do
-- Compose is actually pretty easy
it "is sane" $ do
shouldBe (compose [x|[]|] [x|[]|]) [x|[]|]
shouldBe (compose [x|[{"i":"x", "p":0}]|] [x|[]|]) [x|[{"i":"x", "p":0}]|]
shouldBe (compose [x|[]|] [x|[{"i":"x", "p":0}]|]) [x|[{"i":"x", "p":0}]|]
shouldBe (compose [x|[{"i":"y", "p":100}]|] [x|[{"i":"x", "p":0}]|]) [x|[{"i":"y", "p":100}, {"i":"x", "p":0}]|]
describe "transform" $ do
it "is sane" $ do
shouldBe [x|[]|] (transformLeft [x|[]|] [x|[]|])
shouldBe [x|[]|] (transformRight [x|[]|] [x|[]|])
shouldBe [x|[{"i":"y", "p":100}, {"i":"x", "p":0}]|] (transformLeft [x|[{"i":"y", "p":100}, {"i":"x", "p":0}]|] [x|[]|])
shouldBe [x|[]|] (transformRight [x|[]|] [x|[{"i":"y", "p":100}, {"i":"x", "p":0}]|])
it "inserts" $ do
shouldBe ([x|[{"i":"x", "p":10}]|], [x|[{"i":"a", "p":1}]|]) (transform [x|[{"i":"x", "p":9}]|] [x|[{"i":"a", "p":1}]|])
shouldBe ([x|[{"i":"x", "p":10}]|], [x|[{"i":"a", "p":11}]|]) (transform [x|[{"i":"x", "p":10}]|] [x|[{"i":"a", "p":10}]|])
shouldBe ([x|[{"i":"x", "p":10}]|], [x|[{"d":"a", "p":9}]|]) (transform [x|[{"i":"x", "p":11}]|] [x|[{"d":"a", "p":9}]|])
shouldBe ([x|[{"i":"x", "p":10}]|], [x|[{"d":"a", "p":10}]|]) (transform [x|[{"i":"x", "p":11}]|] [x|[{"d":"a", "p":10}]|])
shouldBe ([x|[{"i":"x", "p":11}]|], [x|[{"d":"a", "p":12}]|]) (transform [x|[{"i":"x", "p":11}]|] [x|[{"d":"a", "p":11}]|])
shouldBe [x|[{"i":"x", "p":10}]|] (transformLeft [x|[{"i":"x", "p":10}]|] [x|[{"d":"a", "p":11}]|])
shouldBe [x|[{"i":"x", "p":10}]|] (transformLeft [x|[{"i":"x", "p":10}]|] [x|[{"d":"a", "p":10}]|])
shouldBe ([x|[{"p":7,"i":"ab"}]|], [x|[{"p":0,"i":"abcdef"}]|]) (transform [x|[{"p":1,"i":"ab"}]|] [x|[{"p":0,"i":"abcdef"}]|])
shouldBe [x|[{"i":"x", "p":10}]|] (transformRight [x|[{"i":"x", "p":10}]|] [x|[{"d":"a", "p":10}]|])
it "deletes" $ do
shouldBe ([x|[{"d":"abc", "p":8}]|], [x|[{"d":"xy", "p":4}]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"xy", "p":4}]|])
shouldBe ([x|[{"d":"ac", "p":10}]|], [x|[]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"b", "p":11}]|])
shouldBe ([x|[]|], [x|[{"d":"ac", "p":10}]|]) (transform [x|[{"d":"b", "p":11}]|] [x|[{"d":"abc", "p":10}]|])
shouldBe ([x|[{"d":"a", "p":10}]|], [x|[]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"bc", "p":11}]|])
shouldBe ([x|[{"d":"c", "p":10}]|], [x|[]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"ab", "p":10}]|])
shouldBe ([x|[{"d":"a", "p":10}]|], [x|[{"d":"d", "p":10}]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"bcd", "p":11}]|])
shouldBe ([x|[{"d":"d", "p":10}]|], [x|[{"d":"a", "p":10}]|]) (transform [x|[{"d":"bcd", "p":11}]|] [x|[{"d":"abc", "p":10}]|])
shouldBe ([x|[{"d":"abc", "p":10}]|], [x|[{"d":"xy", "p":10}]|]) (transform [x|[{"d":"abc", "p":10}]|] [x|[{"d":"xy", "p":13}]|])
shouldBe ([x|[{"p":1,"d":"q"},{"p":6,"d":"3"}]|], [x|[{"p":1,"i":"abcde"}]|])
(transform [x|[{"p":1,"d":"q3"}]|] [x|[{"p":2,"i":"abcde"}]|])
-- describe "transformCursor" $ do
-- it "is sane" $ do
-- shouldBe 0 (transformCursorRight 0 [x|[]|])
-- shouldBe 0 (transformCursorLeft 0 [x|[]|])
-- shouldBe 100 (transformCursor 100 [x|[]|])
-- it "works vs insert" $ do
-- shouldBe 0 (transformCursorRight 0 [x|{"i":"asdf", "p":100}|])
-- shouldBe 0 (transformCursorLeft 0 [x|{"i":"asdf", "p":100}|])
-- shouldBe 204 (transformCursorRight 200 [x|[{"i":"asdf", "p":100}]|])
-- shouldBe 204 (transformCursorLeft 200 [x|[{"i":"asdf", "p":100}]|])
-- shouldBe 104 (transformCursorRight 100 [x|[{"i":"asdf", "p":100}]|])
-- shouldBe 100 (transformCursorLeft 100 [x|[{"i":"asdf", "p":100}]|])
-- it "works vs delete" $ do
-- shouldBe 0 (transformCursorRight 0 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 0 (transformCursorLeft 0 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 0 (transformCursor 0 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 196 (transformCursor 200 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 100 (transformCursor 100 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 100 (transformCursor 102 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 100 (transformCursor 104 [x|[{"d":"asdf", "p":100}]|])
-- shouldBe 101 (transformCursor 105 [x|[{"d":"asdf", "p":100}]|])
-- describe "normalize" $ do
-- it "is sane" $ do
-- let testUnchanged = \op -> shouldBe op (normalize op)
-- testUnchanged [x|[]|]
-- testUnchanged [x|[{"i":"asdf", "p":100}]|]
-- testUnchanged [x|[{"i":"asdf", "p":100}, {"d":"fdsa", "p":123}]|]
-- it "adds missing "p":0" $ do
-- shouldBe [x|[{"i":"abc", "p":0}]|] (normalize [x|[{"i":"abc"}]|])
-- shouldBe [x|[{"d":"abc", "p":0}]|] (normalize [x|[{"d":"abc"}]|])
-- shouldBe [x|[{"i":"abc", "p":0}, {"d":"abc", "p":0}]|] (normalize [x|[{"i":"abc"}, {"d":"abc"}]|])
-- it "converts op to an array" $ do
-- shouldBe [x|[{"i":"abc", "p":0}]|] (normalize [x|[{"i":"abc", "p":0}]|])
-- shouldBe [x|[{"d":"abc", "p":0}]|] (normalize [x|[{"d":"abc", "p":0}]|])
-- it "works with a really simple op" $ do
-- shouldBe [x|[{"i":"abc", "p":0}]|] (normalize [x|[{"i":"abc"}]|])
-- it "compress inserts" $ do
-- shouldBe [x|[{"i":"xyzabc", "p":10}]|] (normalize [x|[{"i":"abc", "p":10}]|] [x|[{"i":"xyz", "p":10}]|])
-- shouldBe [x|[{"i":"axyzbc", "p":10}]|] (normalize [x|[{"i":"abc", "p":10}]|] [x|[{"i":"xyz", "p":11}]|])
-- shouldBe [x|[{"i":"abcxyz", "p":10}]|] (normalize [x|[{"i":"abc", "p":10}]|] [x|[{"i":"xyz", "p":13}]|])
-- it "doesnt compress separate inserts" $ do
-- let t = \op -> shouldBe op (normalize op)
-- t [x|[{"i":"abc", "p":10}, {"i":"xyz", "p":9}]|]
-- t [x|[{"i":"abc", "p":10}, {"i":"xyz", "p":14}]|]
-- it "compress deletes" $ do
-- shouldBe [x|[{"d":"xyabc", "p":8}]|] (normalize [x|[{"d":"abc", "p":10}, {"d":"xy", "p":8}]|])
-- shouldBe [x|[{"d":"xabcy", "p":9}]|] (normalize [x|[{"d":"abc", "p":10}, {"d":"xy", "p":9}]|])
-- shouldBe [x|[{"d":"abcxy", "p":10}]|] (normalize [x|[{"d":"abc", "p":10}, {"d":"xy", "p":10}]|])
-- it "doesnt compress separate deletes" $ do
-- let t = \op -> shouldBe op (normalize op)
-- t [x|[{"d":"abc", "p":10}, {"d":"xyz", "p":6}]|]
-- t [x|[{"d":"abc", "p":10}, {"d":"xyz", "p":11}]|]
main :: IO ()
main = (testSpec "Text0 specs" specs) >>= defaultMain
|
thomasjm/ot.hs
|
test/Control/OperationalTransformation/Text0/Specs.hs
|
mit
| 8,237
| 0
| 15
| 1,392
| 1,472
| 952
| 520
| 76
| 2
|
{-# LANGUAGE DeriveGeneric #-}
module Data.Schema where
import Utils.Utils
import Data.Types
import Data.Name
import CodeGen.HaskellCode
import qualified Data.UnionFind as UF
import Data.Tuple.HT
import Data.Maybe
import Data.List
import Control.Monad
import qualified Data.Map as Map
import Data.Serialize
import qualified Data.ByteString as BS
import GHC.Generics
data SimplicialComplex = SC [(VertID,Name)] [[VertID]] deriving (Generic)
data Schema = Schema SimplicialComplex [(VertID, HaskellType)] deriving (Generic)
type Simplex = [VertID]
type ConnectedSchema = Schema
schemaVertices (Schema (SC verts _) _) = map fst verts
schemaVertexNames (Schema (SC verts _) _) = verts
schemaSimplices (Schema (SC _ simps) _) = simps
schemaTypes (Schema _ ts) = ts
subSchemaSimplices (SubSchema simps _) = simps
showSubSchema (SubSchema simps sch) = "{" ++ (cim "," (\x -> "{" ++ (cim "," id x) ++ "}") nmedsimps) ++ "}"
where nmedsimps = map (map (fromJust.(flip schemaLookupVertexName sch))) simps
subSchemaVertices ss = nub $ concat $ subSchemaSimplices ss
data SubSchema = SubSchema [Simplex] Schema deriving (Generic)
face :: Int -> Simplex -> Simplex
face = deleteAt
instance Show SimplicialComplex where
show (SC verts simps) =
"schema " ++ " where\n vetices:\n" ++
(concatMap (\v -> " " ++ (show v) ++ "\n") verts) ++ "\n simplices:\n" ++
(concatMap (\vs -> " (" ++ (init $ tail $ show vs) ++ ")\n") simps)
instance Show Schema where
show (Schema (SC _ simps) types) =
"schema where\n vertices:\n" ++
(concatMap (\(v,t) -> " " ++ (show v) ++ " :: " ++ (show t) ++ "\n") types) ++ "\n simplices:\n" ++
(concatMap (\vs -> " (" ++ (init $ tail $ show vs) ++ ")\n") simps)
instance Serialize SimplicialComplex
instance Serialize Schema
instance Serialize SubSchema
instance Show SubSchema where
show (SubSchema simps _) =
"subschema containing\n" ++ (concatMap (\s -> " " ++ (show s) ++ "\n") simps)
instance Named SubSchema where
name (SubSchema simps _) = concatMap (\s -> "s" ++ (name s) ++ "_") simps
emptySimplicialComplex = SC [] []
emptySchema = Schema emptySimplicialComplex []
schemaLookupVertex a sch = fmap fst $ find ((==a).snd) $ schemaVertexNames sch
schemaLookupVertexName i sch = lookup i $ schemaVertexNames sch
typeLookup :: Schema -> VertID -> HaskellType
typeLookup (Schema _ ts) x = case lookup x ts of
Nothing -> error "typeLookup: vertex not in Schema"
(Just t) -> t
univ :: Schema -> Simplex -> [HaskellType]
univ sch x = map (typeLookup sch) $ x
connectedComponants :: SubSchema -> [SubSchema]
connectedComponants (SubSchema simps schema) = map (\(i,simps) -> SubSchema simps schema)
$ zip [1..] (map (map fst) simpGroups)
where uf0 = UF.fromSets $ simps
simpGroups = sortGroupBy snd $ snd
$ mapAccumR (\uf s -> let (uf',rep) = UF.find uf $ head $ s in (uf',(s,rep))) uf0 simps
fullSubSchema sch = SubSchema (schemaSimplices sch) sch
containsSimplex :: SubSchema -> Simplex -> Bool
containsSimplex (SubSchema simps _) simp = any (simp`subset`) simps
containsSubSchema :: SubSchema -> SubSchema -> Bool
containsSubSchema ss (SubSchema simps _) = all (containsSimplex ss) simps
vertexSpan :: Schema -> [VertID] -> SubSchema
vertexSpan schema verts = SubSchema (map (intersect verts) $ schemaSimplices schema) schema
-- possible TODO: make this only add new simplices
insertSimplices :: [Simplex] -> Schema -> Schema
insertSimplices newSimps (Schema (SC verts simps) vs) = Schema (SC verts (newSimps ++ simps)) vs
schemaCoProduct :: [Schema] -> (Schema,[SchemaMap])
schemaCoProduct schs = (resultSchema,inclusions)
where n = length schs
renamed = map (\(i,(Schema (SC verts simps) types))
-> Schema
(SC (map (\(v,nmt) -> (n*v+i,nmt)) verts)
(map (map (\v -> n*v+i)) simps))
(map (\(v,t) -> (n*v+i,t)) types))
$ zip [0..] schs
resultSchema = foldr (\(Schema (SC s1 v1) t1) (Schema (SC s2 v2) t2) -> Schema (SC (s1++s2) (v1++v2)) (t1++t2))
(Schema (SC [] []) []) renamed
inclusions = map (\(i,s@(Schema _ ts)) -> SchemaMap s resultSchema $ map (\(v,t) -> (v,v*n+i,Lit "id")) ts)
$ zip [0..] schs
data SchemaMap = SchemaMap Schema Schema [(VertID,VertID,HaskellCode)] deriving (Generic)
instance Serialize SchemaMap
schemaMapCoDomain (SchemaMap _ trg _) = trg
schemaMapDomain (SchemaMap src _ _) = src
mapVertexFunc :: SchemaMap -> VertID -> HaskellCode
mapVertexFunc (SchemaMap _ _ f) v = case find (\(x,_,_) -> x==v) f of
(Just (_,_,g)) -> g
Nothing -> error $ "mapVertexFunc called on " ++ (show v) ++ " with map:\n" ++ (show f)
mapApplyVertex :: SchemaMap -> VertID -> VertID
mapApplyVertex (SchemaMap _ _ f) v = snd3 $ fromJust $ find (\(x,_,_) -> x==v) f
simplexImage :: SchemaMap -> Simplex -> Simplex
simplexImage (SchemaMap _ _ f) simp = map (\v -> snd3 $ fromJust $ find (\(x,_,_) -> x==v) f) simp
simplexIncluded :: SchemaMap -> Simplex -> Maybe Simplex
simplexIncluded (SchemaMap src _ f) vs = mapM (\v -> fmap fst3 $ find ((==v).snd3) f) vs
simplexPreimage :: SchemaMap -> Simplex -> SubSchema
simplexPreimage (SchemaMap src _ f) vs = vertexSpan src (map fst3 $ filter (\(_,u,_) -> u `elem` vs) f)
schemaImage :: SubSchema -> SchemaMap -> SubSchema
schemaImage (SubSchema simps _) f@(SchemaMap _ trg _) = SubSchema (map (simplexImage f) simps) trg
schemaPreimage :: SubSchema -> SchemaMap -> SubSchema
schemaPreimage (SubSchema simps _) f@(SchemaMap src _ _) = SubSchema (concatMap (subSchemaSimplices.(simplexPreimage f)) simps) src
schemaFullImage f = schemaImage (fullSubSchema $ schemaMapDomain f) f
schemaFullPreimage f = schemaPreimage (fullSubSchema $ schemaMapCoDomain f) f
|
jvictor0/JoSQL
|
Data/Schema.hs
|
mit
| 5,946
| 2
| 21
| 1,288
| 2,428
| 1,285
| 1,143
| 108
| 2
|
{-# htermination showSigned :: (Float -> (String -> String)) -> Int -> Float -> String -> String #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_showSigned_3.hs
|
mit
| 101
| 0
| 2
| 18
| 3
| 2
| 1
| 1
| 0
|
{-# LANGUAGE OverloadedLists #-}
-- | Miscellaneous utility functions
module Iris.Util where
import qualified Linear as L
import Iris.SceneGraph
import Iris.Visuals
makeCube :: IO DrawNode
makeCube = meshInit $ meshSpec
{ meshSpecData = Faces cubeVerts cubeIndices
, meshSpecColors = VectorMeshColor cubeColors
}
cubeVerts :: MeshFaceVertices
cubeVerts = [-- front
L.V3 (-1) (-1) 1
, L.V3 1 (-1) 1
, L.V3 1 1 1
, L.V3 (-1) 1 1
-- back
, L.V3 (-1) (-1) (-1)
, L.V3 1 (-1) (-1)
, L.V3 1 1 (-1)
, L.V3 (-1) 1 (-1)
]
cubeIndices :: MeshFaceIndices
cubeIndices = [-- front
L.V3 0 1 2
, L.V3 2 3 0
-- top
, L.V3 3 2 6
, L.V3 6 7 3
-- back
, L.V3 7 6 5
, L.V3 5 4 7
-- bottom
, L.V3 4 5 1
, L.V3 1 0 4
-- left
, L.V3 4 0 3
, L.V3 3 7 4
-- right
, L.V3 1 5 6
, L.V3 6 2 1
]
cubeColors :: MeshVectorColor
cubeColors = [-- front colors
L.V3 1 0 0
, L.V3 0 1 0
, L.V3 0 0 1
, L.V3 1 1 1
-- back colors
, L.V3 1 1 0
, L.V3 0 1 1
, L.V3 1 0 1
, L.V3 0 0 0
]
|
jdreaver/iris
|
src/Iris/Util.hs
|
mit
| 1,561
| 0
| 8
| 835
| 499
| 270
| 229
| 43
| 1
|
-- Mathematics/Fundamentals/Filling Jars
module Main where
import qualified HackerRank.Mathematics.FillingJarsVect as M
main :: IO ()
main = M.main
|
4e6/sandbox
|
haskell/hackerrank/FillingJarsVect.hs
|
mit
| 152
| 0
| 6
| 22
| 31
| 20
| 11
| 4
| 1
|
module Test where
import System.IO
import System.Process
import System.Directory
import Control.Monad
import TypeSystem
import GenerateCalculi
import FromLambdaProlog
import ToLambdaProlog
import Library
import ToLatex
test :: IO ()
test = do
mapM_ unitTest (tail preLibrary)
mapM_ typeability (tail preLibrary)
return ()
unitTest :: String -> IO ()
unitTest systemName = do
gradualize "All" LazyDTwo systemName
typeability :: String -> IO ()
typeability systemName = do
old <- getCurrentDirectory
setCurrentDirectory "Gradualized/"
mapM_ unitTypeTest (tail preLibrary)
setCurrentDirectory old
unitTypeTest :: String -> IO ()
unitTypeTest systemName = do
let source = "gradual_" ++ systemName
callCommand ("tjcc " ++ source ++ " > " ++ systemName ++ ".log")
readFileAndShow :: IO [String]
readFileAndShow = do
mapM readFileAndShow_ (tail preLibrary)
readFileAndShow_ :: String -> IO String
readFileAndShow_ name = do
streamSig <- readFile ("Repo of Static Type Systems/" ++ name ++ ".sig")
streamMod <- readFile ("Repo of Static Type Systems/" ++ name ++ ".mod")
return (":BEGIN SIG:" ++ name ++ "::\n" ++ streamSig ++ "\n\n:BEGIN MOD:" ++ streamMod)
-- let signature = lines streamSig
-- let moduleL = lines streamMod
-- let ts = parseLP (signature ++ moduleL)
-- let fileForTs = "typesystem_" ++ name
-- writeFile ("Gradualized/" ++ fileForTs) (show ts)
parseAndSpitTS_ :: String -> IO TypeSystem
parseAndSpitTS_ name = do
streamSig <- readFile ("Repo of Static Type Systems/" ++ name ++ ".sig")
streamMod <- readFile ("Repo of Static Type Systems/" ++ name ++ ".mod")
let signature = lines streamSig
let moduleL = lines streamMod
let ts = parseLP (signature ++ moduleL)
return ts
|
mcimini/Gradualizer
|
Test.hs
|
mit
| 2,052
| 0
| 13
| 619
| 477
| 233
| 244
| 45
| 1
|
{- Duplicate the elements of a list a given number of times. -}
duplicateX :: [a] -> Int -> [a]
duplicateX [] _ = []
duplicateX _ 0 = []
duplicateX l 1 = l
duplicateX (h:t) x = [h | _ <- [1..x]] ++ (duplicateX t x)
|
andrewaguiar/s99-haskell
|
p15.hs
|
mit
| 216
| 0
| 9
| 48
| 102
| 54
| 48
| 5
| 1
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.PerformanceResourceTiming
(js_getInitiatorType, getInitiatorType, js_getRedirectStart,
getRedirectStart, js_getRedirectEnd, getRedirectEnd,
js_getFetchStart, getFetchStart, js_getDomainLookupStart,
getDomainLookupStart, js_getDomainLookupEnd, getDomainLookupEnd,
js_getConnectStart, getConnectStart, js_getConnectEnd,
getConnectEnd, js_getSecureConnectionStart,
getSecureConnectionStart, js_getRequestStart, getRequestStart,
js_getResponseEnd, getResponseEnd, PerformanceResourceTiming,
castToPerformanceResourceTiming, gTypePerformanceResourceTiming)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"initiatorType\"]"
js_getInitiatorType :: PerformanceResourceTiming -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.initiatorType Mozilla PerformanceResourceTiming.initiatorType documentation>
getInitiatorType ::
(MonadIO m, FromJSString result) =>
PerformanceResourceTiming -> m result
getInitiatorType self
= liftIO (fromJSString <$> (js_getInitiatorType (self)))
foreign import javascript unsafe "$1[\"redirectStart\"]"
js_getRedirectStart :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.redirectStart Mozilla PerformanceResourceTiming.redirectStart documentation>
getRedirectStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getRedirectStart self = liftIO (js_getRedirectStart (self))
foreign import javascript unsafe "$1[\"redirectEnd\"]"
js_getRedirectEnd :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.redirectEnd Mozilla PerformanceResourceTiming.redirectEnd documentation>
getRedirectEnd ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getRedirectEnd self = liftIO (js_getRedirectEnd (self))
foreign import javascript unsafe "$1[\"fetchStart\"]"
js_getFetchStart :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.fetchStart Mozilla PerformanceResourceTiming.fetchStart documentation>
getFetchStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getFetchStart self = liftIO (js_getFetchStart (self))
foreign import javascript unsafe "$1[\"domainLookupStart\"]"
js_getDomainLookupStart :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.domainLookupStart Mozilla PerformanceResourceTiming.domainLookupStart documentation>
getDomainLookupStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getDomainLookupStart self = liftIO (js_getDomainLookupStart (self))
foreign import javascript unsafe "$1[\"domainLookupEnd\"]"
js_getDomainLookupEnd :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.domainLookupEnd Mozilla PerformanceResourceTiming.domainLookupEnd documentation>
getDomainLookupEnd ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getDomainLookupEnd self = liftIO (js_getDomainLookupEnd (self))
foreign import javascript unsafe "$1[\"connectStart\"]"
js_getConnectStart :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.connectStart Mozilla PerformanceResourceTiming.connectStart documentation>
getConnectStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getConnectStart self = liftIO (js_getConnectStart (self))
foreign import javascript unsafe "$1[\"connectEnd\"]"
js_getConnectEnd :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.connectEnd Mozilla PerformanceResourceTiming.connectEnd documentation>
getConnectEnd ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getConnectEnd self = liftIO (js_getConnectEnd (self))
foreign import javascript unsafe "$1[\"secureConnectionStart\"]"
js_getSecureConnectionStart ::
PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.secureConnectionStart Mozilla PerformanceResourceTiming.secureConnectionStart documentation>
getSecureConnectionStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getSecureConnectionStart self
= liftIO (js_getSecureConnectionStart (self))
foreign import javascript unsafe "$1[\"requestStart\"]"
js_getRequestStart :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.requestStart Mozilla PerformanceResourceTiming.requestStart documentation>
getRequestStart ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getRequestStart self = liftIO (js_getRequestStart (self))
foreign import javascript unsafe "$1[\"responseEnd\"]"
js_getResponseEnd :: PerformanceResourceTiming -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/PerformanceResourceTiming.responseEnd Mozilla PerformanceResourceTiming.responseEnd documentation>
getResponseEnd ::
(MonadIO m) => PerformanceResourceTiming -> m Double
getResponseEnd self = liftIO (js_getResponseEnd (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/PerformanceResourceTiming.hs
|
mit
| 6,408
| 66
| 10
| 887
| 1,065
| 603
| 462
| 84
| 1
|
{-
geniserver
Copyright (C) 2011 Eric Kow (on behalf of SRI)
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-}
module NLP.GenI.Server.Instruction (ServerInstruction(..))
where
import Control.Applicative ( (<$>), (<*>) )
import Text.JSON
data ServerInstruction = ServerInstruction
{ gParams :: [String]
, gSemantics :: String
}
instance JSON ServerInstruction where
readJSON j =
do jo <- fromJSObject `fmap` readJSON j
let fieldOr def x = maybe def readJSON (lookup x jo)
fieldOrNull = fieldOr (return [])
field x = fieldOr (fail $ "Could not find: " ++ x) x
ServerInstruction <$> fieldOrNull "params"
<*> field "semantics"
showJSON x =
JSObject . toJSObject $ [ ("params", showJSONs $ gParams x)
, ("semantics", showJSON $ gSemantics x)
]
|
kowey/GenI
|
geniserver/src/NLP/GenI/Server/Instruction.hs
|
gpl-2.0
| 1,522
| 0
| 14
| 377
| 230
| 125
| 105
| 17
| 0
|
module CSP.Trace where
import CSP.Syntax
import CSP.STS
import CSP.Step
import Autolib.NFA
import qualified Data.Set as S
import Autolib.ToDoc
auto p =
let s = sts p
q = CSP.STS.states s
a = NFA
{ nfa_info = text "Spursprache"
, Autolib.NFA.alphabet = CSP.Syntax.alphabet p
, Autolib.NFA.states = q
, starts = S.singleton $ start s
, finals = q
, trans = collect $ visible s
}
in add_epsilons a $ hidden s
|
marcellussiegburg/autotool
|
collection/src/CSP/Trace.hs
|
gpl-2.0
| 548
| 0
| 12
| 214
| 156
| 88
| 68
| 18
| 1
|
{- |
Module : FMP.Tree
Copyright : (c) 2003-2010 Peter Simons
(c) 2002-2003 Ferenc Wágner
(c) 2002-2003 Meik Hellmund
(c) 1998-2002 Ralf Hinze
(c) 1998-2002 Joachim Korittky
(c) 1998-2002 Marco Kuhlmann
License : GPLv3
Maintainer : simons@cryp.to
Stability : provisional
Portability : portable
-}
{-
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
-}
module FMP.Tree (
number, Tree'(..),
Tree(..), Edge(..), AlignSons(..), Distance(..),
edge, edge', cross, cross', enode, node, stair,
forEachNode, forEachLevelNode, forEachPic, forEachEdge,
defaultAlign, alignLeft, alignRight, alignLeftSon, alignRightSon,
alignOverN, alignAngles, alignConst, alignFunction,
setAlign, getAlign, setDistH, getDistH, setDistV, getDistV,
fit, fitLeft, fitRight,
distCenter, distBorder,
NodeName(..)
) where
import Prelude ( Read(..), Show(..), Eq(..), Num(..), Fractional(..), Int, Maybe(..)
, otherwise, map, length, zip, unzip, init, take, (++), showString, tail
, foldl, foldr, fst, snd, Bool(..), drop, fromIntegral, (.), concat, reverse
, Ord(..)
)
import FMP.Types
import FMP.Color
import FMP.Picture
instance (Read a,Read b) => Read (a -> b) where
readsPrec _ _ = []
instance (Eq a,Eq b) => Eq (a -> b) where
_ == _ = False
instance (Num a, Num b) => Num (a, b) where
(a1, b1) + (a2, b2) = (a1 + a2, b1 + b2)
(a1, b1) - (a2, b2) = (a1 - a2, b1 - b2)
(a1, b1) * (a2, b2) = (a1 * a2, b1 * b2)
negate (a, b) = (negate a, negate b)
abs a = a
signum _ = (1, 1)
fromInteger i = (fromInteger i, fromInteger i)
----------------------------------------------------------
stair :: Point -> Point -> Path
stair p1 p2 = p1 .-. p1 + vec (0, 0.5*ydist p2 p1)
.-. p2 - vec (0, 0.5*ydist p2 p1) .-. p2
edge :: Tree -> Edge
edge t = edge' (ref (This <+ C) ... ref (Parent <+ C)) t
edge' :: Path -> Tree -> Edge
edge' = Edge
cross :: Point -> Edge
cross p = cross' (ref (This <+ C) ... p)
cross' :: Path -> Edge
cross' = Cross
enode :: IsPicture a => a -> [Edge] -> Edge
enode p ts = edge (node p ts)
node :: IsPicture a => a -> [Edge] -> Tree
node p ts = Node (toPicture p) stdNodeDescr ts
defaultAlign, alignLeft, alignRight, alignLeftSon, alignRightSon :: AlignSons
defaultAlign = DefaultAlign
alignLeft = AlignLeft
alignRight = AlignRight
alignLeftSon = AlignLeftSon
alignRightSon = AlignRightSon
alignOverN :: Int -> AlignSons
alignOverN = AlignOverN
alignAngles :: [Numeric] -> AlignSons
alignAngles = AlignAngles
alignConst :: Numeric -> AlignSons
alignConst = AlignConst
alignFunction :: (NodeDescr -> [Extent] -> Int -> [Numeric]) -> AlignSons
alignFunction = AlignFunction
setAlign :: AlignSons -> Tree -> Tree
setAlign align (Node a nd ts) = Node a nd{nAlignSons = align} ts
getAlign :: Tree -> AlignSons
getAlign (Node _ nd _) = nAlignSons nd
setDistH :: Distance -> Tree -> Tree
setDistH sh (Node a nd ts) = Node a nd{nDistH = sh } ts
getDistH :: Tree -> Distance
getDistH (Node _ nd _) = nDistH nd
setDistV :: Distance -> Tree -> Tree
setDistV sh (Node a nd ts) = Node a nd{nDistV = sh } ts
getDistV :: Tree -> Distance
getDistV (Node _ nd _) = nDistV nd
distCenter :: Numeric -> Distance
distCenter = DistCenter
distBorder :: Numeric -> Distance
distBorder = DistBorder
----------------------------------------------------------
instance HasColor Edge where
setColor c (Edge e ts) = Edge (setColor c e) ts
setColor c (Cross e) = Cross (setColor c e)
setDefaultColor (Edge e ts)
= Edge (setDefaultColor e) ts
setDefaultColor (Cross e)
= Cross (setDefaultColor e)
getColor (Edge e _) = getColor e
getColor (Cross e) = getColor e
instance HasLabel Edge where
setLabel l i o (Edge e ts)
= Edge (setLabel l i o e) ts
setLabel l i o (Cross e)
= Cross (setLabel l i o e)
removeLabel (Edge e ts) = Edge (removeLabel e) ts
removeLabel (Cross e) = Cross (removeLabel e)
instance HasPattern Edge where
setPattern pat (Edge e ts)
= Edge (setPattern pat e) ts
setPattern pat (Cross e)
= Cross (setPattern pat e)
setDefaultPattern (Edge e ts)
= Edge (setDefaultPattern e) ts
setDefaultPattern (Cross e)
= Cross (setDefaultPattern e)
getPattern (Edge e _) = getPattern e
getPattern (Cross e) = getPattern e
instance HasArrowHead Edge where
setArrowHead ar (Edge e ts)
= Edge (setArrowHead ar e) ts
setArrowHead ar (Cross e)
= Cross (setArrowHead ar e)
removeArrowHead (Edge e ts)
= Edge (removeArrowHead e) ts
removeArrowHead (Cross e)
= Cross (removeArrowHead e)
getArrowHead (Edge e _) = getArrowHead e
getArrowHead (Cross e) = getArrowHead e
setStartArrowHead ar (Edge e ts)
= Edge (setStartArrowHead ar e) ts
setStartArrowHead ar (Cross e)
= Cross (setStartArrowHead ar e)
removeStartArrowHead (Edge e ts)
= Edge (removeStartArrowHead e) ts
removeStartArrowHead (Cross e)
= Cross (removeStartArrowHead e)
getStartArrowHead (Edge e _)
= getStartArrowHead e
getStartArrowHead (Cross e)
= getStartArrowHead e
instance HasPen Edge where
setPen pen (Edge e ts) = Edge (setPen pen e) ts
setPen pen (Cross e) = Cross (setPen pen e)
setDefaultPen (Edge e ts)
= Edge (setDefaultPen e) ts
setDefaultPen (Cross e)
= Cross (setDefaultPen e)
getPen (Edge e _) = getPen e
getPen (Cross e) = getPen e
instance HasStartEndDir Edge where
setStartAngle a (Edge e ts)
= Edge (setStartAngle a e) ts
setStartAngle a (Cross e)
= Cross (setStartAngle a e)
setEndAngle a (Edge e ts)
= Edge (setEndAngle a e) ts
setEndAngle a (Cross e) = Cross (setEndAngle a e)
setStartCurl a (Edge e ts)
= Edge (setStartCurl a e) ts
setStartCurl a (Cross e)= Cross (setStartCurl a e)
setEndCurl a (Edge e ts)= Edge (setEndCurl a e) ts
setEndCurl a (Cross e) = Cross (setEndCurl a e)
setStartVector a (Edge e ts)
= Edge (setStartVector a e) ts
setStartVector a (Cross e)
= Cross (setStartVector a e)
setEndVector a (Edge e ts)
= Edge (setEndVector a e) ts
setEndVector a (Cross e)= Cross (setEndVector a e)
removeStartDir (Edge e ts)
= Edge (removeStartDir e) ts
removeStartDir (Cross e)= Cross (removeStartDir e)
removeEndDir (Edge e ts)= Edge (removeEndDir e) ts
removeEndDir (Cross e) = Cross (removeEndDir e)
instance IsHideable Edge where
hide (Edge e ts) = Edge (hide e) ts
hide (Cross e) = Cross (hide e)
instance HasName Tree where
setName n (Node p nd es)= Node (setName n p) nd es
getNames (Node p _ _) = getNames p
instance HasColor Tree where
setColor c (Node p nd es )
= Node (setColor c p) nd es
setDefaultColor t = setColor default' t
getColor (Node p _ _) = getColor p
----------------------------------------------------------
data Tree = Node Picture NodeDescr [Edge]
deriving Show
data Edge = Edge Path Tree
| Cross Path
deriving Show
data AlignSons = DefaultAlign
| AlignLeft
| AlignRight
| AlignLeftSon
| AlignRightSon
| AlignOverN Int
| AlignAngles [Numeric]
| AlignConst Numeric
| AlignFunction (NodeDescr -> [Extent] -> Int -> [Numeric])
deriving Show
instance Show (a -> b) where -- RH
showsPrec _ _ = showString "<function>"
data Distance = DistCenter Numeric
| DistBorder Numeric
deriving (Eq, Show)
instance Num Distance where
(DistBorder a) + (DistBorder b)
= DistBorder (a + b)
(DistCenter a) + (DistCenter b)
= DistCenter (a + b)
a + _ = a
(DistBorder a) - (DistBorder b)
= DistBorder (a - b)
(DistCenter a) - (DistCenter b)
= DistCenter (a - b)
a - _ = a
(DistBorder a) * (DistBorder b)
= DistBorder (a * b)
(DistCenter a) * (DistCenter b)
= DistCenter (a * b)
a * _ = a
negate (DistBorder a) = DistBorder (-a)
negate (DistCenter a) = DistCenter (-a)
abs (DistBorder a) = DistBorder (abs a)
abs (DistCenter a) = DistCenter (abs a)
signum a = a
fromInteger = DistBorder . fromInteger
instance Fractional Distance where
(DistBorder a) / (DistBorder b)
= DistBorder (a / b)
(DistCenter a) / (DistCenter b)
= DistCenter (a / b)
a / _ = a
recip (DistBorder a) = DistBorder (1 / a)
recip (DistCenter a) = DistCenter (1 / a)
fromRational = DistBorder . fromRational
data NodeDescr = NodeDescr { nEdges :: [Path],
nAlignSons :: AlignSons,
nDistH, nDistV :: Distance }
deriving Show
stdNodeDescr :: NodeDescr
stdNodeDescr = NodeDescr { nEdges = [],
nAlignSons = DefaultAlign,
nDistH = 8,
nDistV = 10 }
-- Interne Baumstruktur.
data Tree' a = Node' a NodeDescr [Tree' a]
-- Baum inorder durchnumerieren. (Vater, This, Tiefe)
number :: Tree -> (Tree' (Int, Int, Int))
number t = snd (traverse (-1) 0 0 t [])
where
traverse j k l (Node _ nd ts) pe
= (k', Node' (j, k, l) nd{nEdges = edges ts} nts)
where
edges [] = pe
edges (Edge _ _:es) = edges es
edges (Cross e:es) = e:edges es
sons [] = []
sons (Edge e s:es) = (e,s):sons es
sons (_:es) = sons es
(k', nts) = traverses k (k+1) (l+1) (sons ts)
traverses _ k _ [] = (k, [])
traverses j k l ((e,t): ts)
= (k'', nt : nts)
where
(k', nt) = traverse j k l t [e]
(k'', nts) = traverses j k' l ts
-- Liste der Knotenbilder
-- toDo: effizienter
extractPics :: Tree -> [Picture]
extractPics (Node p _ ts) = p:pics ts
where
pics [] = []
pics (Edge _ t:es) = extractPics t ++ pics es
pics (_:es) = pics es
-- Relative Plazierung der Punkte
relPlacements :: Tree' (Int, Int, Int) -> [Equation]
relPlacements (Node' (a, b, l) nd ts)
= [case nDistV nd of
DistBorder v -> ref (b <* C) .= ref (a <* C)
+ vec(hoff b, voff l-v)
DistCenter v -> ref (b <* C) .= ref (a <* C)
+ vec(hoff b, -v)]
& map (equations.relPlacements) ts
-- Berechne Pfade aller Baumkanten
data NodeName = Parent | This | Root | Up Int | Son Int
deriving Show
instance IsName NodeName where
toName a = toName (show a)
edges :: [Int] -> Tree' (Int, Int, Int) -> [Path]
edges path (Node' (a,b,_) nd ts)
= [replacePath edge aliases | edge <- nEdges nd]
++ concat (map (edges (b:path)) ts)
where
aliases = [(toName Parent, toName a),
(toName This, toName b),
(toName Root, toName (0::Int))]
++ [(toName (Up n), toName u)| (u,n) <- zip (b:path) [0..]]
++ [(toName (Son n), toName s)
|(Node' (_,s,_) _ _,n) <- zip ts [0..]]
instance IsPicture Tree where
toPicture t = draw edgePaths
(overlay (widthsL & widthsR
& heightsTop & heightsBot
& voffs & hoffs
& placements)
(enumPics nodePics))
where
widthsL = [ widthL i .= xpart (ref (i <+ W)- ref (i <+ C))
| i <- [0..length nodePics-1]]
widthsR = [ widthR i .= xpart (ref (i <+ E)- ref (i <+ C))
| i <- [0..length nodePics-1]]
heightsTop = [ heightT l .= maximum' (map heightTop ns)
| (ns,l) <- zip (levels nt) [1..]]
heightsBot = [ heightB l .= maximum' (map heightBot ns)
| (ns,l) <- zip (levels nt) [1..]]
voffs = [ voff l .= - heightT (l+1) - heightB l
| l <- (tail [0..length (levels nt)-1])]
heightTop n = ypart (ref (n <+ N) - ref (n <+ C ))
heightBot n = ypart (ref (n <+ C) - ref (n <+ S ))
nt = number t
hoffs = design nt
placements = tail (relPlacements nt)
nodePics = extractPics t
edgePaths = edges [] nt
levels :: Tree' (a,b,c) -> [[b]]
levels (Node' (_,a,_) _ []) = [[a]]
levels (Node' (_,a,_) _ ts) = [a] : foldl zipLists [] (map levels ts)
where
zipLists [] l = l
zipLists l [] = l
zipLists (l:ls) (l':ls')= (l ++ l'):zipLists ls ls'
-- Wie zip, nur da"s in zwei Listen von Listen jeweils die ersten, die
-- zweiten, usw. Listen konkateniert werden.
-- Nicht gleich zipWith (++)
getHEqs :: Tree' [Equation] -> [Equation]
getHEqs (Node' eqs _ ts) = map (equations.getHEqs) ts & eqs
hoff, voff,widthL,widthR,heightT,heightB :: Int -> Numeric
hoff i = var (6*i)
voff i = var (6*i+1)
widthL i = var (6*i+2)
widthR i = var (6*i+3)
heightT i = var (6*i+4)
heightB i = var (6*i+5)
-- Design eines Baums
design :: Tree' (Int, Int, Int) -> [Equation]
design t = fst (design' t)
design' :: Tree' (Int, Int, Int) -> ([Equation], Extent)
design' (Node' (_,m,l) nd ts) = (foldl (&) [] designedTrees & eqs,
topExtent (nDistH nd) : mergedExtent)
where
(designedTrees, es) = unzip [ design' t| t <- ts ]
relPositions = calcPos nd es l
mergedExtent = mergeMany [ moveExtent h e | (h, e) <- zip hoffVars es ]
eqs = [ h .= rp | (h, rp) <- zip hoffVars relPositions]
hoffVars = [ hoff m | Node' (_, m, _) _ _ <- ts]
topExtent (DistBorder _)
= (widthL m, widthR m)
topExtent _ = (0, 0)
-- Transliteriert von Andrew J. Kennedy's "Drawing Tree's".
type Position = Numeric
type Extent = [(Position, Position)]
-- Position relativ zum Elternknoten.
--
-- moveTree :: Position -> PositionedTree a -> PositionedTree a
-- moveTree d (Node' (a,p) as ts)= Node' (a, p ) as ts
-- Absolute Positionen.
moveExtent :: Position -> Extent -> Extent
moveExtent x = map (+ (x, x))
merge :: Extent -> Extent -> Extent
merge [] qs = qs
merge ps [] = ps
merge ((l, _):ps) ((_, r):qs) = (l, r) : merge ps qs
mergeMany :: [Extent] -> Extent
mergeMany = foldr merge []
-- Zusammenpassen von Extents
fit :: Numeric -> Extent -> Extent -> Position
fit hDist ps qs = maximum' dists
where dists = [ r - l + hDist | ((_, r), (l, _)) <- zip ps qs]
fitLeft :: Numeric -> [Extent] -> [Position]
fitLeft hDist es = traverse hDist [] es
where
traverse :: Numeric -> Extent -> [Extent] -> [Position]
traverse _ _ [] = []
traverse hDist acc (e:es) = x:traverse hDist (merge acc (moveExtent x e)) es
where x = fit hDist acc e
fitRight :: Numeric -> [Extent] -> [Position]
fitRight hDist es = reverse (traverse hDist [] (reverse es))
where
traverse :: Numeric -> Extent -> [Extent] -> [Position]
traverse _ _ [] = []
traverse hDist acc (e:es) = x:traverse hDist (merge (moveExtent x e) acc) es
where x = -fit hDist e acc
fitMany :: Numeric -> [Extent] -> [Position]
fitMany hDist es = [ (x + y) / 2
| (x, y) <- zip (fitLeft hDist es) (fitRight hDist es) ]
getHDist :: NodeDescr -> Numeric
getHDist nd = case nDistH nd of
DistCenter h -> h
DistBorder h -> h
getVDist :: NodeDescr -> Numeric
getVDist nd = case nDistV nd of
DistCenter v -> v
DistBorder v -> v
calcPos :: NodeDescr -> [Extent] -> Int -> [Position]
calcPos nd es l = calcPos' (nAlignSons nd) nd es l
calcPos' :: AlignSons -> NodeDescr -> [Extent] -> Int -> [Position]
calcPos' DefaultAlign nd es _ = fitMany (getHDist nd) es
calcPos' AlignLeft nd es _ = fitLeft (getHDist nd) es
calcPos' AlignRight nd es _ = fitRight (getHDist nd) es
calcPos' AlignLeftSon nd [((l,_ ):_)] _
= [l-0.5*getHDist nd]
calcPos' AlignLeftSon nd es _ = fitMany (getHDist nd) es
calcPos' AlignRightSon nd [((_,r):_)] _
= [r+0.5*getHDist nd]
calcPos' AlignRightSon nd es _
= fitMany (getHDist nd) es
calcPos'(AlignOverN n) nd es _
= init (fitRight (getHDist nd) (take n es))
++ 0:tail (fitLeft (getHDist nd) (drop (n-1) es))
calcPos' (AlignAngles ds) nd es h
= take (length es) (calcOffsets ds
++ fitLeft (getHDist nd)
(drop (length ds) es))
where
calcOffsets [] = []
calcOffsets (d:ds) = offset d:calcOffsets ds
offset d = (voff (h+1)-getVDist nd)*cos d / sin d
calcPos' (AlignConst x) _ es _
= fitConst (fromIntegral (length es-1)) x
where
fitConst n x = [-n/2*x, -n/2*x+x .. n/2*x]
calcPos' (AlignFunction f) nd es h
= f nd es h
forEachNode :: (Tree -> Tree) -> Tree -> Tree
forEachNode f (Node a nd ts) = f (Node a nd (map (edge f) ts))
where
edge f (Edge e t) = Edge e (forEachNode f t)
edge _ e = e
forEachLevelNode :: Int -> (Tree -> Tree) -> Tree -> Tree
forEachLevelNode l f (Node a nd es)
| l < 0 = Node a nd es
| l == 0 = f (Node a nd es)
| otherwise = Node a nd (map (edge (l-1) f) es)
where
edge f l (Edge e t) = Edge e (forEachLevelNode f l t)
edge _ _ e = e
forEachPic :: (Picture -> Picture) -> Tree -> Tree
forEachPic f (Node a nd es) = Node (f a) nd (map (edge f) es)
where
edge f (Edge e t) = Edge e (forEachPic f t)
edge _ e = e
forEachEdge :: (Path -> Path) -> Tree -> Tree
forEachEdge f (Node a nd ts) = Node a nd (map (edge f) ts)
where
edge f (Edge e t) = Edge (f e) (forEachEdge f t)
edge f (Cross e) = Cross (f e)
-----------------------------------------------------------------
-----------------------------------------------------------------
replacePath :: Path -> [(Name, Name)] -> Path
replacePath (PathPoint p) al = PathPoint (replacePoint p al)
replacePath PathCycle _ = PathCycle
replacePath (PathJoin p1 pj p2 ) al
= PathJoin (replacePath p1 al)
(replacePathElemDescr pj al)
(replacePath p2 al)
replacePath (PathEndDir p d) al
= PathEndDir (replacePoint p al) (replaceDir' d al)
replacePath (PathBuildCycle p1 p2) al
= PathBuildCycle (replacePath p1 al) (replacePath p2 al)
replacePath (PathTransform t p ) al
= PathTransform t (replacePath p al)
replacePath (PathDefine eqs p) al
= PathDefine (replaceEquations eqs al)
(replacePath p al)
replacePathElemDescr :: PathElemDescr -> [(Name, Name)] -> PathElemDescr
replacePathElemDescr ped al = ped {peStartCut = case peStartCut ped of
Just a -> Just (replaceCutPic a al)
a -> a,
peEndCut = case peEndCut ped of
Just a -> Just (replaceCutPic a al)
a -> a,
peStartDir = replaceDir' (peStartDir ped) al,
peEndDir = replaceDir' (peEndDir ped) al}
replaceDir' :: Dir' -> [(Name, Name)] -> Dir'
replaceDir' (DirCurl a) al = DirCurl (replaceNumeric a al)
replaceDir' (DirDir a) al = DirDir (replaceNumeric a al)
replaceDir' (DirVector a) al = DirVector (replacePoint a al)
replaceDir' a _ = a
replaceCutPic :: CutPic -> [(Name, Name)] -> CutPic
replaceCutPic (CutPic name) al= CutPic (replaceName name al)
replaceCutPic c _ = c
replacePoint :: Point -> [(Name, Name)] -> Point
replacePoint (PointVar name) al
= PointVar (replaceName name al)
replacePoint (PointPPP c a b) al
= PointPPP c (replacePoint a al) (replacePoint b al)
replacePoint (PointVec (a, b)) al
= PointVec (replaceNumeric a al, replaceNumeric b al)
replacePoint (PointMediate a b c) al
= PointMediate (replaceNumeric a al)
(replacePoint b al)
(replacePoint c al)
replacePoint (PointDirection a ) al
= PointDirection (replaceNumeric a al)
replacePoint (PointNeg a) al = PointNeg (replacePoint a al)
replacePoint a _ = a
replaceName :: Name -> [(Name, Name)] -> Name
replaceName (Hier n n') al = Hier (replaceName n al) (replaceName n' al)
replaceName n al = replaceName' n al
where
replaceName' n [] = n
replaceName' n ((n',r):al)
| n == n' = r
| otherwise = replaceName' n al
replaceNumeric :: Numeric -> [(Name, Name)] -> Numeric
replaceNumeric (NumericVar a) al
= NumericVar (replaceName a al)
replaceNumeric (NumericDist a b) al
= NumericDist (replacePoint a al) (replacePoint b al)
replaceNumeric (NumericMediate a b c) al
= NumericMediate (replaceNumeric a al)
(replaceNumeric b al)
(replaceNumeric c al)
replaceNumeric (NumericPN c a) al
= NumericPN c (replacePoint a al)
replaceNumeric (NumericNN c a) al
= NumericNN c (replaceNumeric a al)
replaceNumeric (NumericNNN c a b) al
= NumericNNN c (replaceNumeric a al) (replaceNumeric b al)
replaceNumeric (NumericNsN c as) al
= NumericNsN c (map (\a -> replaceNumeric a al) as)
replaceNumeric a _ = a
replaceEquations :: [Equation] -> [(Name,Name)] -> [Equation]
replaceEquations eqs al = map (\a -> replaceEquation a al) eqs
replaceEquation :: Equation -> [(Name,Name)] -> Equation
replaceEquation (PEquations ps) al
= PEquations (map (\a -> replacePoint a al) ps)
replaceEquation (NEquations ns) al
= NEquations (map (\a -> replaceNumeric a al) ns)
replaceEquation (EquationCond b e1 e2) al
= EquationCond (replaceBoolean b al) (replaceEquation e1 al)
(replaceEquation e2 al)
replaceEquation (Equations eqs) al
= Equations (replaceEquations eqs al)
replaceBoolean :: Boolean -> [(Name,Name)] -> Boolean
replaceBoolean (BoolNum a c b) al
= BoolNum (replaceNumeric a al) c (replaceNumeric b al)
replaceBoolean (BoolPnt a c b) al
= BoolPnt (replacePoint a al) c (replacePoint b al)
replaceBoolean (BoolOr a b) al= BoolOr (replaceBoolean a al) (replaceBoolean b al)
replaceBoolean (BoolAnd a b) al
= BoolAnd (replaceBoolean a al) (replaceBoolean b al)
replaceBoolean (BoolNot a) al = BoolNot (replaceBoolean a al)
replaceBoolean a _ = a
|
peti/funcmp
|
FMP/Tree.hs
|
gpl-3.0
| 29,709
| 1
| 17
| 13,315
| 9,324
| 4,828
| 4,496
| 504
| 6
|
{- ============================================================================
| Copyright 2011 Matthew D. Steele <mdsteele@alum.mit.edu> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.Scenario.Triggers.Holmgare
(compileHolmgare)
where
import Fallback.Scenario.Compile
import Fallback.Scenario.Script
import Fallback.Scenario.Triggers.Globals
import Fallback.Scenario.Triggers.Script
import Fallback.State.Creature (MonsterTownAI(..))
import Fallback.State.Simple (FaceDir(..))
import Fallback.State.Tags
import Fallback.State.Tileset (TileTag(..))
-------------------------------------------------------------------------------
compileHolmgare :: Globals -> CompileScenario ()
compileHolmgare globals = compileArea Holmgare Nothing $ do
makeExit FrozenPass ["ToFrozenPass1", "ToFrozenPass2"] "FromFrozenPass"
makeExit SewerCaves ["ToSewerCaves"] "FromSewerCaves"
makeExit PerilousRoad ["ToPerilousRoad"] "FromPerilousRoad"
onStartDaily 472927 $ do
addUnlockedDoors globals
setAreaCleared Holmgare True
-- East gate:
eastGateOpen <- newPersistentVar 198400 False
trigger 798711 (varTrue eastGateOpen) $ do
setTerrain AdobeGateOpenTile =<< lookupTerrainMark "EastGate"
once 448901 (walkIn "NearEastGate") $ conversation $ convNode $ do
convText "The townspeople have erected a gated wall in the gap in the\
\ thick trees here. The construction is rather shoddy--it looks like it\
\ was put up relatively recently, in a big hurry, and is already\
\ starting to deteriorate."
whenP (varFalse eastGateOpen) $ do
convText " Still, with the gate closed, it's enough to keep you from\
\ getting through. Looks like you'll need to talk to someone in the\
\ village about having the gate opened before you'll be able to leave\
\ here."
whenP (varTrue eastGateOpen) $ do
convText "\n\n\
\Sure enough, Mayor Jarmir has already had the gate opened for you. \
\ Now, you're off to find this lost Sophia girl."
-- Smithy:
uniqueDevice 330389 "SmithySign" signRadius $ \_ _ -> do
narrate "The sign mounted on the wall reads:\n\n\
\ {b}GREGOR'S SMITHY{_}"
simpleTownsperson 217809 TownManApron "Gregor"
ImmobileAI $ \_ -> conversation $ do
let
aboutSmithy = convNode $ do convText "FIXME"
whatIsNews = convNode $ do convText "FIXME"
whatForSale = convNode $ do
convText "He frowns. \"Not a lot right now, to be honest,\" he says,\
\ with apparent regret. \"I can do repair work, and I can show you\
\ what little I've got in stock. But I'm short on raw materials,\
\ and until I can get more I'm not going to be able to do any\
\ commission work."
convChoice canWeHelp "\"You're short on raw materials? Is that\
\ something we could help with?\""
convChoice doShop "\"Well, let's see what you have on hand.\""
doShop = convNode $ do
startShopping $ map Right $
[WeaponItemTag Dagger, WeaponItemTag Shortsword,
WeaponItemTag Voulge, ArmorItemTag IronMail]
convText "You conclude your business. Gregor grunts and turns back to\
\ his anvil."
canWeHelp = convNode $ do convText "FIXME"
convNode $ do
convText "The blacksmith wipes his brow and sets down his tongs. \"The\
\ name's Gregor. What can I do for you?\""
convChoice (return ()) "\"I think we're all set.\" (Leave.)"
convChoice whatIsNews "\"What's been going on in this village?\""
convChoice whatForSale "\"What have you got for sale?\""
convChoice aboutSmithy "\"Tell us about your smithy.\""
-- Mushroom patch:
simpleTownsperson 720981 TownWomanRed "Laci"
(DrunkAI "LaciZone") $ \_ -> conversation $ do
let
whatsWrong = convNode $ convText "Your question just elicits a new round\
\ of sobbing from the young woman. She seems to be trying to get\
\ herself back under control, but isn't ready to answer any questions\
\ to a stranger just yet."
waitForHer = convResetNode $ do
convText "You patiently stand by and give her some quiet company for a\
\ couple minutes while she gets her tears out. Finally she quiets\
\ down a bit, blows her nose on her hankerchief, and starts wiping\
\ her eyes. \"I...I'm sorry abo-, about that. My na-, my name's\
\ Laci.\" She blows her nose again, which seems to help her voice. \
\ \"It's just so awful, what's happened, and now Sophia's gone\
\ missing. That poor little girl. I don't even kn-, I don't even\
\ know what to wish for--if we don't find her soon, th-, then...but\
\ if we do, what then? I can't even imagine what her parents must\
\ be going through.\"\n\n\
\Finally mostly recovered, Laci looks around at you and seems to\
\ become more aware of her surroundings. \"Why, visitors! Oh dear,\
\ I shouldn't be going on like that to you about our village's\
\ problems. I...what can I do for you?\""
convChoice (return ()) "\"We'll be going now.\" (Leave.)"
convChoice whereParents "\"Where are Sophia's parents?\""
convChoice girlMissing "\"A little girl has gone missing?\""
convChoice whatYouDoing "\"What are you working on here?\""
whatYouDoing = convNode $ do
convText "\"Oh!\" she says, looking around at her mushrooms as if she\
\ had forgotten all about them. \"I'm just tending the Snow\
\ Mushrooms here. They're almost they only thing that will grow\
\ during the winter here, so we eat a lot of them this time of\
\ year.\"\n\
\\n\"They're actually pretty tasty. I can sell you some of the\
\ one's I've picked if you'd like to try them.\""
convChoice buyMushrooms "\"Sure, we'll buy some.\" (Shop.)"
buyMushrooms = convNode $ do
startShopping [Right (PotionItemTag Mushroom)]
convText "Laci packs the rest of the mushrooms she's collected back\
\ into her bag, and then wipes a stray tear from her check. She\
\ seems to be doing a little better now, at least for the moment."
girlMissing = convNode $ convText "\"I...\" Laci looks around; she\
\ seems suddenly worried. \"I shouldn't really talk about it, I...\" \
\ She seems to be trying to make up her mind about something. \"You\
\ should just talk to the mayor. At the town hall.\" She points to\
\ the north end of town. \"Mayor Jarmir will tell you what's\
\ happened. Maybe you can help us?\""
whereParents = convNode $ convText "\"Dorvan and Eithne,\" she answers. \
\ \"They live in the house in the northwest corner of the village.\" \
\ She shakes her head. \"Sophia's their only child. What must they\
\ be thinking now?\""
convNode $ do
convText "A young woman is walking around the mushroom patches here,\
\ examining each one, and occasionally picking one that seems to be\
\ ready and placing it carefully into the small cloth sack she's\
\ carrying over her shoulder, which is already about half full. As\
\ she goes about her work, she is weeping quietly to herself. As you\
\ approach, she loses it a bit and has to stop what she is doing in\
\ order to stand there and sob."
convChoice (return ()) "\"Er, sorry to interrupt. We'll be going\" \
\ (Leave.)"
convChoice waitForHer "(Wait for her to be ready.)"
convChoice whatsWrong "\"Why are you crying?\""
-- Sophia's house:
simpleTownsperson 711833 TownManRed "Dorvan" ImmobileAI $ \_ge -> do
narrate "TODO"
simpleTownsperson 092833 TownWomanBlue "Eithne"
ImmobileAI $ \_ -> conversation $ do
narrate "TODO"
-- Outdoor guards:
simpleTownsperson 528013 GuardSmallShield "Kolmancok"
(GuardAI 0 "Kolmancok" FaceLeft) $ \_ -> conversation $ do
narrate "TODO"
simpleTownsperson 209831 GuardWoman "Reta"
(GuardAI 5 "Reta" FaceRight) $ \_ -> conversation $ do
narrate "TODO"
simpleTownsperson 502809 GuardSmallShield "Pavel"
(PatrolAI "Pavel" "PavelPatrol") $ \_ -> conversation $ do
narrate "TODO"
-- Town hall:
uniqueDevice 490018 "TownHallSign" signRadius $ \_ _ -> do
narrate "The sign mounted on the wall reads:\n\n\
\ {b}TOWN HALL{_}"
simpleTownsperson 309815 GuardLargeShield "Ivan"
(GuardAI 5 "Ivan" FaceRight) $ \_ -> conversation $ do
let
niceArmor = convNode $ do
convText "Ivan just grunts again, and says nothing.\n\n\
\It kind of seemed like an appreciative grunt, though. He probably\
\ appreciated your complement, right? You just hope he'll let you\
\ side with him in that barfight."
convNode $ do
convText "A soldier stands guard by the door here. You do a slight\
\ double-take as you walk by him; something seems a little off about\
\ his proportions. But quickly you realize that he is simply\
\ {i}big{_}. He's about six and a half feet tall, with unbelievibly\
\ broad shoulders, and biceps about as big around as your thighs. The\
\ next time you're choosing sides in a barfight, make sure you're on\
\ this guy's side.\n\n\
\He carries a large shield and broadsword, and his armor, though not\
\ fancy-looking, seems to be of unusually high quality.\n\
\\n\"Ivan,\" he grunts in response to your greeting. Even his voice\
\ reeks of brawn. \"Talk to th' mayor.\" He gestures towards the\
\ seat at the back of the room. You wait for him to say any more. \
\ \"Th' mayor,\" he repeats."
convChoice (return ()) "\"Okay. We'll do that.\" (Leave.)"
convChoice niceArmor "\"That's an impressive-looking suit of armor\
\ you've got there.\""
simpleTownsperson 290184 TownManBlue "Jarmir"
ImmobileAI $ \_ -> conversation $ do
narrate "TODO"
writeVar eastGateOpen True
-- Tavern:
simpleTownsperson 470982 TownWomanApron "Marunda"
(DrunkAI "MarundaZone") $ \_ -> conversation $ do
narrate "TODO"
simpleTownsperson 309810 TownManYellow "Zivon"
(GuardAI 5 "Zivon" FaceRight) $ \_ -> conversation $ do
narrate "TODO"
-- Tistra's house:
simpleTownsperson 509834 TownChildBlue "Tistra"
(DrunkAI "TistraZone") $ \_ -> conversation $ do
narrate "TODO"
-- Graveyard:
once 558092 (walkIn "Graveyard") $ do
narrate "You find the village graveyard tucked away behind the trees\
\ here. The gravestones, and the little cobblestone pavement at the\
\ center, are actually quite lovely. Despite the troubles that this\
\ village must be facing, it seems that they really put effort into\
\ honoring their dead, and each other."
uniqueDevice 779109 "Grave1" signRadius $ \_ _ -> do
narrate "The carving on the gravestone reads:\n\n\
\ {b}SVELA BARTHOLD{_}\n\
\ 1219-1271\n\n\
\ {i}The people of Holmgare{_}\n\
\ {i}will always remember{_}\n\
\ {i}your kindness.{_}\n\n\
\It would seem, from the dates, that this particular grave was put here\
\ only a few years ago."
uniqueDevice 806981 "Grave2" signRadius $ \_ _ -> do
narrate "The carving on the gravestone reads:\n\n\
\ {b}ALBION DORVA{_}\n\
\ 1086-1160\n\n\
\ {i}Tenth mayor of Holmgare{_}\n\
\ {i}\"The one who endures to{_}\n\
\ {i}the end will be saved.\"{_}"
-------------------------------------------------------------------------------
|
mdsteele/fallback
|
src/Fallback/Scenario/Triggers/Holmgare.hs
|
gpl-3.0
| 13,293
| 0
| 23
| 3,868
| 1,312
| 610
| 702
| 117
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CopySnapshot
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Copies a point-in-time snapshot of an Amazon EBS volume and stores it in
-- Amazon S3. You can copy the snapshot within the same region or from one
-- region to another. You can use the snapshot to create Amazon EBS volumes or
-- Amazon Machine Images (AMIs). The snapshot is copied to the regional endpoint
-- that you send the HTTP request to.
--
-- Copies of encrypted Amazon EBS snapshots remain encrypted. Copies of
-- unencrypted snapshots remain unencrypted.
--
-- Copying snapshots that were encrypted with non-default AWS Key Management
-- Service (KMS) master keys is not supported at this time.
--
-- For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-copy-snapshot.html Copying an Amazon EBS Snapshot> in the /AmazonElastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CopySnapshot.html>
module Network.AWS.EC2.CopySnapshot
(
-- * Request
CopySnapshot
-- ** Request constructor
, copySnapshot
-- ** Request lenses
, csDescription
, csDestinationRegion
, csDryRun
, csPresignedUrl
, csSourceRegion
, csSourceSnapshotId
-- * Response
, CopySnapshotResponse
-- ** Response constructor
, copySnapshotResponse
-- ** Response lenses
, csrSnapshotId
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CopySnapshot = CopySnapshot
{ _csDescription :: Maybe Text
, _csDestinationRegion :: Maybe Text
, _csDryRun :: Maybe Bool
, _csPresignedUrl :: Maybe Text
, _csSourceRegion :: Text
, _csSourceSnapshotId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'CopySnapshot' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csDescription' @::@ 'Maybe' 'Text'
--
-- * 'csDestinationRegion' @::@ 'Maybe' 'Text'
--
-- * 'csDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'csPresignedUrl' @::@ 'Maybe' 'Text'
--
-- * 'csSourceRegion' @::@ 'Text'
--
-- * 'csSourceSnapshotId' @::@ 'Text'
--
copySnapshot :: Text -- ^ 'csSourceRegion'
-> Text -- ^ 'csSourceSnapshotId'
-> CopySnapshot
copySnapshot p1 p2 = CopySnapshot
{ _csSourceRegion = p1
, _csSourceSnapshotId = p2
, _csDryRun = Nothing
, _csDescription = Nothing
, _csDestinationRegion = Nothing
, _csPresignedUrl = Nothing
}
-- | A description for the new Amazon EBS snapshot.
csDescription :: Lens' CopySnapshot (Maybe Text)
csDescription = lens _csDescription (\s a -> s { _csDescription = a })
-- | The destination region of the snapshot copy operation. This parameter is
-- required in the 'PresignedUrl'.
csDestinationRegion :: Lens' CopySnapshot (Maybe Text)
csDestinationRegion =
lens _csDestinationRegion (\s a -> s { _csDestinationRegion = a })
csDryRun :: Lens' CopySnapshot (Maybe Bool)
csDryRun = lens _csDryRun (\s a -> s { _csDryRun = a })
-- | The pre-signed URL that facilitates copying an encrypted snapshot. This
-- parameter is only required when copying an encrypted snapshot with the Amazon
-- EC2 Query API; it is available as an optional parameter in all other cases.
-- The 'PresignedUrl' should use the snapshot source endpoint, the 'CopySnapshot'
-- action, and include the 'SourceRegion', 'SourceSnapshotId', and 'DestinationRegion'
-- parameters. The 'PresignedUrl' must be signed using AWS Signature Version 4.
-- Because Amazon EBS snapshots are stored in Amazon S3, the signing algorithm
-- for this parameter uses the same logic that is described in <http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html AuthenticatingRequests by Using Query Parameters (AWS Signature Version 4)> in the /AmazonSimple Storage Service API Reference/. An invalid or improperly signed 'PresignedUrl' will cause the copy operation to fail asynchronously, and the snapshot will
-- move to an 'error' state.
csPresignedUrl :: Lens' CopySnapshot (Maybe Text)
csPresignedUrl = lens _csPresignedUrl (\s a -> s { _csPresignedUrl = a })
-- | The ID of the region that contains the snapshot to be copied.
csSourceRegion :: Lens' CopySnapshot Text
csSourceRegion = lens _csSourceRegion (\s a -> s { _csSourceRegion = a })
-- | The ID of the Amazon EBS snapshot to copy.
csSourceSnapshotId :: Lens' CopySnapshot Text
csSourceSnapshotId =
lens _csSourceSnapshotId (\s a -> s { _csSourceSnapshotId = a })
newtype CopySnapshotResponse = CopySnapshotResponse
{ _csrSnapshotId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CopySnapshotResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csrSnapshotId' @::@ 'Maybe' 'Text'
--
copySnapshotResponse :: CopySnapshotResponse
copySnapshotResponse = CopySnapshotResponse
{ _csrSnapshotId = Nothing
}
-- | The ID of the new snapshot.
csrSnapshotId :: Lens' CopySnapshotResponse (Maybe Text)
csrSnapshotId = lens _csrSnapshotId (\s a -> s { _csrSnapshotId = a })
instance ToPath CopySnapshot where
toPath = const "/"
instance ToQuery CopySnapshot where
toQuery CopySnapshot{..} = mconcat
[ "Description" =? _csDescription
, "DestinationRegion" =? _csDestinationRegion
, "DryRun" =? _csDryRun
, "PresignedUrl" =? _csPresignedUrl
, "SourceRegion" =? _csSourceRegion
, "SourceSnapshotId" =? _csSourceSnapshotId
]
instance ToHeaders CopySnapshot
instance AWSRequest CopySnapshot where
type Sv CopySnapshot = EC2
type Rs CopySnapshot = CopySnapshotResponse
request = post "CopySnapshot"
response = xmlResponse
instance FromXML CopySnapshotResponse where
parseXML x = CopySnapshotResponse
<$> x .@? "snapshotId"
|
dysinger/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/CopySnapshot.hs
|
mpl-2.0
| 6,884
| 0
| 9
| 1,443
| 793
| 483
| 310
| 86
| 1
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Route53
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Amazon Route 53 is a scalable Domain Name System (DNS) web service. It
-- provides secure and reliable routing to your infrastructure that uses
-- Amazon Web Services (AWS) products, such as Amazon Elastic Compute Cloud
-- (Amazon EC2), Elastic Load Balancing, or Amazon Simple Storage Service
-- (Amazon S3). You can also use Amazon Route 53 to route users to your
-- infrastructure outside of AWS.
--
-- /See:/ <http://docs.aws.amazon.com/Route53/latest/APIReference/Welcome.html AWS API Reference>
module Network.AWS.Route53
(
-- * Service Configuration
route53
-- * Errors
-- $errors
-- ** HealthCheckVersionMismatch
, _HealthCheckVersionMismatch
-- ** InvalidInput
, _InvalidInput
-- ** HostedZoneNotEmpty
, _HostedZoneNotEmpty
-- ** InvalidArgument
, _InvalidArgument
-- ** DelegationSetAlreadyReusable
, _DelegationSetAlreadyReusable
-- ** PriorRequestNotComplete
, _PriorRequestNotComplete
-- ** InvalidChangeBatch
, _InvalidChangeBatch
-- ** DelegationSetNotReusable
, _DelegationSetNotReusable
-- ** InvalidDomainName
, _InvalidDomainName
-- ** HostedZoneNotFound
, _HostedZoneNotFound
-- ** DelegationSetInUse
, _DelegationSetInUse
-- ** NoSuchDelegationSet
, _NoSuchDelegationSet
-- ** HealthCheckAlreadyExists
, _HealthCheckAlreadyExists
-- ** NoSuchGeoLocation
, _NoSuchGeoLocation
-- ** DelegationSetNotAvailable
, _DelegationSetNotAvailable
-- ** VPCAssociationNotFound
, _VPCAssociationNotFound
-- ** ThrottlingException
, _ThrottlingException
-- ** NoSuchChange
, _NoSuchChange
-- ** LimitsExceeded
, _LimitsExceeded
-- ** IncompatibleVersion
, _IncompatibleVersion
-- ** PublicZoneVPCAssociation
, _PublicZoneVPCAssociation
-- ** NoSuchHostedZone
, _NoSuchHostedZone
-- ** TooManyHostedZones
, _TooManyHostedZones
-- ** HealthCheckInUse
, _HealthCheckInUse
-- ** DelegationSetAlreadyCreated
, _DelegationSetAlreadyCreated
-- ** ConflictingDomainExists
, _ConflictingDomainExists
-- ** LastVPCAssociation
, _LastVPCAssociation
-- ** TooManyHealthChecks
, _TooManyHealthChecks
-- ** NoSuchHealthCheck
, _NoSuchHealthCheck
-- ** InvalidVPCId
, _InvalidVPCId
-- ** HostedZoneAlreadyExists
, _HostedZoneAlreadyExists
-- * Waiters
-- $waiters
-- ** ResourceRecordSetsChanged
, resourceRecordSetsChanged
-- * Operations
-- $operations
-- ** AssociateVPCWithHostedZone
, module Network.AWS.Route53.AssociateVPCWithHostedZone
-- ** GetCheckerIPRanges
, module Network.AWS.Route53.GetCheckerIPRanges
-- ** GetHealthCheckLastFailureReason
, module Network.AWS.Route53.GetHealthCheckLastFailureReason
-- ** DeleteReusableDelegationSet
, module Network.AWS.Route53.DeleteReusableDelegationSet
-- ** ListHostedZonesByName
, module Network.AWS.Route53.ListHostedZonesByName
-- ** ListReusableDelegationSets
, module Network.AWS.Route53.ListReusableDelegationSets
-- ** ListTagsForResource
, module Network.AWS.Route53.ListTagsForResource
-- ** GetChange
, module Network.AWS.Route53.GetChange
-- ** ChangeResourceRecordSets
, module Network.AWS.Route53.ChangeResourceRecordSets
-- ** DeleteHealthCheck
, module Network.AWS.Route53.DeleteHealthCheck
-- ** UpdateHealthCheck
, module Network.AWS.Route53.UpdateHealthCheck
-- ** CreateHostedZone
, module Network.AWS.Route53.CreateHostedZone
-- ** DisassociateVPCFromHostedZone
, module Network.AWS.Route53.DisassociateVPCFromHostedZone
-- ** CreateHealthCheck
, module Network.AWS.Route53.CreateHealthCheck
-- ** ChangeTagsForResource
, module Network.AWS.Route53.ChangeTagsForResource
-- ** ListHostedZones (Paginated)
, module Network.AWS.Route53.ListHostedZones
-- ** ListGeoLocations
, module Network.AWS.Route53.ListGeoLocations
-- ** GetHostedZone
, module Network.AWS.Route53.GetHostedZone
-- ** GetHealthCheck
, module Network.AWS.Route53.GetHealthCheck
-- ** ListResourceRecordSets (Paginated)
, module Network.AWS.Route53.ListResourceRecordSets
-- ** CreateReusableDelegationSet
, module Network.AWS.Route53.CreateReusableDelegationSet
-- ** GetHealthCheckCount
, module Network.AWS.Route53.GetHealthCheckCount
-- ** GetHostedZoneCount
, module Network.AWS.Route53.GetHostedZoneCount
-- ** GetReusableDelegationSet
, module Network.AWS.Route53.GetReusableDelegationSet
-- ** UpdateHostedZoneComment
, module Network.AWS.Route53.UpdateHostedZoneComment
-- ** GetHealthCheckStatus
, module Network.AWS.Route53.GetHealthCheckStatus
-- ** ListHealthChecks (Paginated)
, module Network.AWS.Route53.ListHealthChecks
-- ** DeleteHostedZone
, module Network.AWS.Route53.DeleteHostedZone
-- ** GetGeoLocation
, module Network.AWS.Route53.GetGeoLocation
-- ** ListTagsForResources
, module Network.AWS.Route53.ListTagsForResources
-- * Types
-- ** Re-exported Types
, module Network.AWS.Route53.Internal
-- ** ChangeAction
, ChangeAction (..)
-- ** ChangeStatus
, ChangeStatus (..)
-- ** Failover
, Failover (..)
-- ** HealthCheckType
, HealthCheckType (..)
-- ** RecordType
, RecordType (..)
-- ** TagResourceType
, TagResourceType (..)
-- ** VPCRegion
, VPCRegion (..)
-- ** AliasTarget
, AliasTarget
, aliasTarget
, atHostedZoneId
, atDNSName
, atEvaluateTargetHealth
-- ** Change
, Change
, change
, cAction
, cResourceRecordSet
-- ** ChangeBatch
, ChangeBatch
, changeBatch
, cbComment
, cbChanges
-- ** ChangeInfo
, ChangeInfo
, changeInfo
, ciComment
, ciId
, ciStatus
, ciSubmittedAt
-- ** DelegationSet
, DelegationSet
, delegationSet
, dsId
, dsCallerReference
, dsNameServers
-- ** GeoLocation
, GeoLocation
, geoLocation
, glSubdivisionCode
, glCountryCode
, glContinentCode
-- ** GeoLocationDetails
, GeoLocationDetails
, geoLocationDetails
, gldSubdivisionName
, gldSubdivisionCode
, gldCountryName
, gldCountryCode
, gldContinentCode
, gldContinentName
-- ** HealthCheck
, HealthCheck
, healthCheck
, hcId
, hcCallerReference
, hcHealthCheckConfig
, hcHealthCheckVersion
-- ** HealthCheckConfig
, HealthCheckConfig
, healthCheckConfig
, hccFailureThreshold
, hccIPAddress
, hccSearchString
, hccResourcePath
, hccFullyQualifiedDomainName
, hccRequestInterval
, hccPort
, hccType
-- ** HealthCheckObservation
, HealthCheckObservation
, healthCheckObservation
, hcoIPAddress
, hcoStatusReport
-- ** HostedZone
, HostedZone
, hostedZone
, hzConfig
, hzResourceRecordSetCount
, hzId
, hzName
, hzCallerReference
-- ** HostedZoneConfig
, HostedZoneConfig
, hostedZoneConfig
, hzcPrivateZone
, hzcComment
-- ** ResourceRecord
, ResourceRecord
, resourceRecord
, rrValue
-- ** ResourceRecordSet
, ResourceRecordSet
, resourceRecordSet
, rrsTTL
, rrsResourceRecords
, rrsAliasTarget
, rrsWeight
, rrsSetIdentifier
, rrsFailover
, rrsHealthCheckId
, rrsRegion
, rrsGeoLocation
, rrsName
, rrsType
-- ** ResourceTagSet
, ResourceTagSet
, resourceTagSet
, rtsResourceId
, rtsResourceType
, rtsTags
-- ** StatusReport
, StatusReport
, statusReport
, srStatus
, srCheckedTime
-- ** Tag
, Tag
, tag
, tagValue
, tagKey
-- ** VPC
, VPC
, vpc
, vpcVPCRegion
, vpcVPCId
) where
import Network.AWS.Route53.AssociateVPCWithHostedZone
import Network.AWS.Route53.ChangeResourceRecordSets
import Network.AWS.Route53.ChangeTagsForResource
import Network.AWS.Route53.CreateHealthCheck
import Network.AWS.Route53.CreateHostedZone
import Network.AWS.Route53.CreateReusableDelegationSet
import Network.AWS.Route53.DeleteHealthCheck
import Network.AWS.Route53.DeleteHostedZone
import Network.AWS.Route53.DeleteReusableDelegationSet
import Network.AWS.Route53.DisassociateVPCFromHostedZone
import Network.AWS.Route53.GetChange
import Network.AWS.Route53.GetCheckerIPRanges
import Network.AWS.Route53.GetGeoLocation
import Network.AWS.Route53.GetHealthCheck
import Network.AWS.Route53.GetHealthCheckCount
import Network.AWS.Route53.GetHealthCheckLastFailureReason
import Network.AWS.Route53.GetHealthCheckStatus
import Network.AWS.Route53.GetHostedZone
import Network.AWS.Route53.GetHostedZoneCount
import Network.AWS.Route53.GetReusableDelegationSet
import Network.AWS.Route53.Internal
import Network.AWS.Route53.ListGeoLocations
import Network.AWS.Route53.ListHealthChecks
import Network.AWS.Route53.ListHostedZones
import Network.AWS.Route53.ListHostedZonesByName
import Network.AWS.Route53.ListResourceRecordSets
import Network.AWS.Route53.ListReusableDelegationSets
import Network.AWS.Route53.ListTagsForResource
import Network.AWS.Route53.ListTagsForResources
import Network.AWS.Route53.Types
import Network.AWS.Route53.UpdateHealthCheck
import Network.AWS.Route53.UpdateHostedZoneComment
import Network.AWS.Route53.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'Route53'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
|
fmapfmapfmap/amazonka
|
amazonka-route53/gen/Network/AWS/Route53.hs
|
mpl-2.0
| 11,721
| 0
| 5
| 2,603
| 1,057
| 778
| 279
| 209
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.GetBucketLogging
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the logging status of a bucket and the permissions users have to
-- view and modify that status. To use GET, you must be the bucket owner.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/GetBucketLogging.html AWS API Reference> for GetBucketLogging.
module Network.AWS.S3.GetBucketLogging
(
-- * Creating a Request
getBucketLogging
, GetBucketLogging
-- * Request Lenses
, gBucket
-- * Destructuring the Response
, getBucketLoggingResponse
, GetBucketLoggingResponse
-- * Response Lenses
, gblrsLoggingEnabled
, gblrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.S3.Types
import Network.AWS.S3.Types.Product
-- | /See:/ 'getBucketLogging' smart constructor.
newtype GetBucketLogging = GetBucketLogging'
{ _gBucket :: BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketLogging' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gBucket'
getBucketLogging
:: BucketName -- ^ 'gBucket'
-> GetBucketLogging
getBucketLogging pBucket_ =
GetBucketLogging'
{ _gBucket = pBucket_
}
-- | Undocumented member.
gBucket :: Lens' GetBucketLogging BucketName
gBucket = lens _gBucket (\ s a -> s{_gBucket = a});
instance AWSRequest GetBucketLogging where
type Rs GetBucketLogging = GetBucketLoggingResponse
request = get s3
response
= receiveXML
(\ s h x ->
GetBucketLoggingResponse' <$>
(x .@? "LoggingEnabled") <*> (pure (fromEnum s)))
instance ToHeaders GetBucketLogging where
toHeaders = const mempty
instance ToPath GetBucketLogging where
toPath GetBucketLogging'{..}
= mconcat ["/", toBS _gBucket]
instance ToQuery GetBucketLogging where
toQuery = const (mconcat ["logging"])
-- | /See:/ 'getBucketLoggingResponse' smart constructor.
data GetBucketLoggingResponse = GetBucketLoggingResponse'
{ _gblrsLoggingEnabled :: !(Maybe LoggingEnabled)
, _gblrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketLoggingResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gblrsLoggingEnabled'
--
-- * 'gblrsResponseStatus'
getBucketLoggingResponse
:: Int -- ^ 'gblrsResponseStatus'
-> GetBucketLoggingResponse
getBucketLoggingResponse pResponseStatus_ =
GetBucketLoggingResponse'
{ _gblrsLoggingEnabled = Nothing
, _gblrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
gblrsLoggingEnabled :: Lens' GetBucketLoggingResponse (Maybe LoggingEnabled)
gblrsLoggingEnabled = lens _gblrsLoggingEnabled (\ s a -> s{_gblrsLoggingEnabled = a});
-- | The response status code.
gblrsResponseStatus :: Lens' GetBucketLoggingResponse Int
gblrsResponseStatus = lens _gblrsResponseStatus (\ s a -> s{_gblrsResponseStatus = a});
|
olorin/amazonka
|
amazonka-s3/gen/Network/AWS/S3/GetBucketLogging.hs
|
mpl-2.0
| 3,845
| 0
| 13
| 767
| 530
| 318
| 212
| 67
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.Jobs.Cancel
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Requests that a job be cancelled. This call will return immediately, and
-- the client will need to poll for the job status to see if the cancel
-- completed successfully. Cancelled jobs may still incur costs.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.jobs.cancel@.
module Network.Google.Resource.BigQuery.Jobs.Cancel
(
-- * REST Resource
JobsCancelResource
-- * Creating a Request
, jobsCancel
, JobsCancel
-- * Request Lenses
, jcJobId
, jcProjectId
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.jobs.cancel@ method which the
-- 'JobsCancel' request conforms to.
type JobsCancelResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"jobs" :>
Capture "jobId" Text :>
"cancel" :>
QueryParam "alt" AltJSON :>
Post '[JSON] JobCancelResponse
-- | Requests that a job be cancelled. This call will return immediately, and
-- the client will need to poll for the job status to see if the cancel
-- completed successfully. Cancelled jobs may still incur costs.
--
-- /See:/ 'jobsCancel' smart constructor.
data JobsCancel = JobsCancel'
{ _jcJobId :: !Text
, _jcProjectId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'JobsCancel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'jcJobId'
--
-- * 'jcProjectId'
jobsCancel
:: Text -- ^ 'jcJobId'
-> Text -- ^ 'jcProjectId'
-> JobsCancel
jobsCancel pJcJobId_ pJcProjectId_ =
JobsCancel'
{ _jcJobId = pJcJobId_
, _jcProjectId = pJcProjectId_
}
-- | [Required] Job ID of the job to cancel
jcJobId :: Lens' JobsCancel Text
jcJobId = lens _jcJobId (\ s a -> s{_jcJobId = a})
-- | [Required] Project ID of the job to cancel
jcProjectId :: Lens' JobsCancel Text
jcProjectId
= lens _jcProjectId (\ s a -> s{_jcProjectId = a})
instance GoogleRequest JobsCancel where
type Rs JobsCancel = JobCancelResponse
type Scopes JobsCancel =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient JobsCancel'{..}
= go _jcProjectId _jcJobId (Just AltJSON)
bigQueryService
where go
= buildClient (Proxy :: Proxy JobsCancelResource)
mempty
|
rueshyna/gogol
|
gogol-bigquery/gen/Network/Google/Resource/BigQuery/Jobs/Cancel.hs
|
mpl-2.0
| 3,388
| 0
| 15
| 832
| 392
| 237
| 155
| 63
| 1
|
{-# LANGUAGE FlexibleInstances, OverloadedStrings #-}
module Web.SpiraJira.Transition (
decodeTransitions,
transitionJson,
Transition
) where
import Data.Aeson(Value(..), (.:), (.=), FromJSON(..), ToJSON(..), eitherDecode, encode, object)
import Control.Monad
import Control.Applicative
import Data.Text (Text, unpack, pack)
import qualified Data.ByteString.Lazy as LBS
data Transition = Transition {
name :: Text,
ident :: String } deriving (Eq)
instance Show Transition where
show (Transition n i) =
i ++ " : " ++ (unpack n)
instance ToJSON Transition where
toJSON (Transition _ i) = object ["transition" .= object ["id" .= i]]
instance FromJSON Transition where
parseJSON (Object v) =
Transition <$> v .: "name"
<*> v .: "id"
parseJSON _ = mzero
instance FromJSON [Transition] where
parseJSON (Object v) = do
transitions <- mapM parseJSON =<< (v .: "transitions")
return $ transitions
parseJSON _ = mzero
decodeTransitions :: LBS.ByteString -> Either String [Transition]
decodeTransitions json = eitherDecode json
transitionJson :: String -> LBS.ByteString
transitionJson i = encode $ Transition (pack "") i
|
tobytripp/SpiraJira
|
src/Web/SpiraJira/Transition.hs
|
lgpl-3.0
| 1,175
| 0
| 11
| 218
| 382
| 212
| 170
| 32
| 1
|
{-
Created : 2013 Sep 09 (Mon) 17:41:15 by carr.
Last Modified : 2017 Jul 26 (Wed) 17:42:13 by Harold Carr.
-}
module Main where
import qualified Data.Text as T
import MakeMP3Copies
import Shelly
fromRoot :: Shelly.FilePath
fromRoot = fromText $ T.pack "."
main :: IO ()
main = shelly $ verbosely $
processDir fromRoot
-- End of file.
|
haroldcarr/make-mp3-copies
|
Main.hs
|
unlicense
| 374
| 0
| 7
| 98
| 67
| 39
| 28
| 9
| 1
|
module Data.P440.Domain.ZSO where
import Data.P440.Domain.SimpleTypes
import Data.P440.Domain.ComplexTypes
import Data.Text (Text)
-- 2.8 Запрос НО об остатках
data Файл = Файл {
идЭС :: GUID
,типИнф :: Text
,версПрог :: Text
,телОтпр :: Text
,должнОтпр :: Text
,фамОтпр :: Text
,версФорм :: Text
,запноостат :: ЗАПНООСТАТ
} deriving (Eq, Show)
data ЗАПНООСТАТ = ЗАПНООСТАТ {
номЗапр :: Text
,идЗапр :: ИдЗапр
,стНКРФ :: Text
,видЗапр :: Text
,основЗапр :: Text
,датаПодп :: Date
,свНО :: СвНО
,банкИлиУБР :: СвБанкИлиСвУБР
,свПл :: СвПл
,поВсемИлиПоУказанным :: ПоВсемИлиПоУказанным
,руководитель :: РукНО
} deriving (Eq, Show)
data СвПл = ПлЮЛ' ПлЮЛ
| ПлИП' ПлИП
| ПФЛ' ПФЛ
deriving (Eq, Show)
data СвБанкИлиСвУБР = СвБанк Банк
| СвУБР УБР
deriving (Eq, Show)
data НаДатуИлиВПериод = НаДату Date
| ВПериод Date Date
deriving (Eq, Show)
data ПоВсем = ПоВсем {
типЗапр :: Text
,остПоСост :: Date
,наДатуИлиЗаПериод :: НаДатуИлиВПериод
} deriving (Eq, Show)
data ВладСч = УказЛицо' Text
| ИноеЛицо' ИноеЛицо
deriving (Eq, Show)
data ИноеЛицо = ИноеЛицоПлЮЛ ПлЮЛ
| ИноеЛицоПлИП ПлИП
| ИноеЛицоПФЛ ПФЛ
deriving (Eq, Show)
data Счет = Счет {
номСч :: Text
,владСч :: ВладСч
} deriving (Eq, Show)
data ПоВсемИлиПоУказанным = ПоВсем' ПоВсем
| ПоУказанным' [Счет]
deriving (Eq, Show)
|
Macil-dev/p440
|
src/Data/P440/Domain/ZSO.hs
|
unlicense
| 2,278
| 42
| 11
| 633
| 1,002
| 531
| 471
| 56
| 0
|
-- |Formatted and colored output to the console.
module Crawling.Hephaestos.CLI.Format (
format,
error,
report,
input,
emphasize,
cliAction,) where
import Prelude hiding (error)
import Control.Concurrent.STM.Utils
import Control.Monad.IO.Class
import System.Console.ANSI
import System.IO.Unsafe (unsafePerformIO)
-- |Formats the console before executing an action. Afterwards, the
-- console is re-formatted with 'Reset'.
format :: MonadIO m => [SGR] -> m a -> m a
format sgr f = liftIO (setSGR sgr) >> f >>= \x -> liftIO (setSGR [Reset]) >> return x
-- |Formats the console to print an error.
error :: MonadIO m => m a -> m a
error = format [SetColor Foreground Vivid Red]
-- |Formats the console to print a report of a finished activity (i.e.
-- a finished downloading process).
report :: MonadIO m => m a -> m a
report = format [SetColor Foreground Vivid Cyan]
-- |Formats the console to print a request for user input.
input :: MonadIO m => m a -> m a
input = format [SetColor Foreground Vivid White]
-- |Formats the console to print a emphasized text.
emphasize :: MonadIO m => m a -> m a
emphasize = format [SetColor Foreground Vivid White,
SetConsoleIntensity BoldIntensity]
-- |Global mutex for using the CLI.
cliMutex :: TaskLimit
cliMutex = unsafePerformIO $ newTaskLimitIO (Just 1)
-- |Performs an action using 'cliMutex'. Use this to ensure that output
-- isn't interleaved in a multi-threaded environment.
--
-- __Note__: nesting calls of 'cliAction' leads to deadlocks. This is especially
-- the case when you call a function that uses 'cliAction' from
-- another function that uses it.
cliAction :: MonadIO m => IO a -> m a
cliAction = withTaskLimit cliMutex . liftIO
|
jtapolczai/Hephaestos
|
Crawling/Hephaestos/CLI/Format.hs
|
apache-2.0
| 1,753
| 0
| 11
| 346
| 387
| 208
| 179
| 27
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Type.Coroutine
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Application.HXournal.Type.Coroutine where
import Data.IORef
import Application.HXournal.Type.Event
import Application.HXournal.Type.XournalState
import Control.Monad.Coroutine
import Control.Monad.Coroutine.SuspensionFunctors
import Data.Functor.Identity (Identity(..))
type Trampoline m x = Coroutine Identity m x
type Generator a m x = Coroutine (Yield a) m x
type Iteratee a m x = Coroutine (Await a) m x
type SusAwait = Await MyEvent (Iteratee MyEvent XournalStateIO ())
type TRef = IORef SusAwait
type SRef = IORef HXournalState
type MainCoroutine a = Iteratee MyEvent XournalStateIO a
|
wavewave/hxournal
|
lib/Application/HXournal/Type/Coroutine.hs
|
bsd-2-clause
| 999
| 0
| 8
| 137
| 180
| 114
| 66
| 14
| 0
|
module TyClCoRec.DList where
import GhcPlugins
plugin :: Plugin
plugin = defaultPlugin -- {
-- }
|
frantisekfarka/tycl-corec-plu
|
src/TyClCoRec/DList.hs
|
bsd-2-clause
| 101
| 0
| 4
| 19
| 21
| 14
| 7
| 4
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Controller
( withGraphmind
) where
import Graphmind
import Settings
import Yesod.Helpers.Static
import Yesod.Helpers.Auth
import Database.Persist.GenericSql
-- Import all relevant handler modules here.
import Handler.Handlers
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Graphmind.hs. Please see
-- the comments there for more details.
mkYesodDispatch "Graphmind" resourcesGraphmind
-- Some default handlers that ship with the Yesod site template. You will
-- very rarely need to modify this.
getFaviconR :: Handler ()
getFaviconR = sendFile "image/x-icon" "favicon.ico"
getRobotsR :: Handler RepPlain
getRobotsR = return $ RepPlain $ toContent "User-agent: *"
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
withGraphmind :: (Application -> IO a) -> IO a
withGraphmind f = Settings.withConnectionPool $ \p -> do
runConnectionPool (runMigration migrateAll) p
let h = Graphmind s p
toWaiApp h >>= f
where
s = fileLookupDir Settings.staticdir typeByExt
|
shepheb/graphmind
|
Controller.hs
|
bsd-2-clause
| 1,339
| 0
| 12
| 218
| 195
| 106
| 89
| 21
| 1
|
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, RankNTypes,
TupleSections, TypeOperators, UndecidableInstances #-}
module Control.Monad.Generator (
GeneratorT (..), yield
) where
import Control.Arrow (second)
import Control.Applicative (Alternative, Applicative, (<*>), (<|>), pure, empty)
import Control.Monad (MonadPlus, ap, liftM, mzero, mplus)
import Control.Monad.Fix (MonadFix, mfix)
import Control.Monad.Trans (MonadIO, MonadTrans, lift, liftIO)
import Control.Monad.Error (MonadError, catchError, throwError)
import Control.Monad.Reader (MonadReader, ask, local)
import Control.Monad.State (MonadState, get, put)
newtype GeneratorT y m a = GeneratorT {
runGeneratorT :: m (Either a (y, GeneratorT y m a))
}
yield :: Monad m => y -> GeneratorT y m ()
yield = GeneratorT . return . Right . (, return ())
instance Monad m => Functor (GeneratorT y m) where
fmap = liftM
instance Monad m => Applicative (GeneratorT y m) where
pure = return
(<*>) = ap
instance MonadPlus m => Alternative (GeneratorT y m) where
empty = mzero
(<|>) = mplus
instance Monad m => Monad (GeneratorT y m) where
return = GeneratorT . return . Left
x >>= f = GeneratorT $
runGeneratorT x >>=
either (runGeneratorT . f)
(return . Right . second (>>= f))
instance MonadIO m => MonadIO (GeneratorT y m) where
liftIO = lift . liftIO
instance MonadFix m => MonadFix (GeneratorT y m) where
mfix f = GeneratorT $ mfix mfix'
where
mfix' (Left x) = runGeneratorT (f x)
mfix' (Right (y, n)) = return $
Right (y, GeneratorT (runGeneratorT n >>= mfix'))
instance MonadPlus m => MonadPlus (GeneratorT y m) where
mzero = GeneratorT mzero
mplus x y = GeneratorT $
liftM (fmap $ second $ mplus y) (runGeneratorT x)
`mplus`
liftM (fmap $ second $ mplus x) (runGeneratorT y)
instance MonadTrans (GeneratorT y) where
lift = GeneratorT . liftM Left
instance MonadError e m => MonadError e (GeneratorT y m) where
throwError = lift . throwError
catchError x f = GeneratorT $
liftM (either Left (Right . second (`catchError` f))) $
catchError (runGeneratorT x) (runGeneratorT . f)
instance MonadReader r m => MonadReader r (GeneratorT y m) where
ask = lift ask
local f x = GeneratorT $
liftM (either Left (Right . second (local f))) $
local f (runGeneratorT x)
instance MonadState s m => MonadState s (GeneratorT y m) where
get = lift get
put = lift . put
|
YellPika/nregions
|
src/Control/Monad/Generator.hs
|
bsd-3-clause
| 2,547
| 0
| 15
| 601
| 945
| 505
| 440
| 58
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module Ivory.Language.Array where
import Ivory.Language.IBool
import Ivory.Language.Area
import Ivory.Language.Proxy
import Ivory.Language.Ref
import Ivory.Language.Sint
import Ivory.Language.IIntegral
import Ivory.Language.Type
import Ivory.Language.Cast
import qualified Ivory.Language.Syntax as I
import GHC.TypeLits (Nat)
--------------------------------------------------------------------------------
-- Indexes
-- Note: it is assumed in ivory-opts and the ivory-backend that the associated
-- type is an Sint32, so this should not be changed in the front-end without
-- modifying the other packages.
type IxRep = Sint32
-- | The representation type of a @TyIndex@, this is fixed to @Int32@ for the
-- time being.
ixRep :: I.Type
ixRep = ivoryType (Proxy :: Proxy IxRep)
-- | Values in the range @0 .. n-1@.
newtype Ix (n :: Nat) = Ix { getIx :: I.Expr }
instance (ANat n) => IvoryType (Ix n) where
ivoryType _ = I.TyIndex (fromTypeNat (aNat :: NatType n))
instance (ANat n) => IvoryVar (Ix n) where
wrapVar = wrapVarExpr
unwrapExpr = getIx
instance (ANat n) => IvoryExpr (Ix n) where
wrapExpr = Ix
instance (ANat n) => IvoryStore (Ix n)
instance (ANat n) => Num (Ix n) where
(*) = ixBinop (*)
(-) = ixBinop (-)
(+) = ixBinop (+)
abs = ixUnary abs
signum = ixUnary signum
fromInteger = mkIx . fromInteger
instance (ANat n) => IvoryEq (Ix n)
instance (ANat n) => IvoryOrd (Ix n)
fromIx :: ANat n => Ix n -> IxRep
fromIx = wrapExpr . unwrapExpr
-- | Casting from a bounded Ivory expression to an index. This is safe,
-- although the value may be truncated. Furthermore, indexes are always
-- positive.
toIx :: forall a n. (SafeCast a IxRep, ANat n) => a -> Ix n
toIx = mkIx . unwrapExpr . (safeCast :: a -> IxRep)
-- | The number of elements that an index covers.
ixSize :: forall n. (ANat n) => Ix n -> Integer
ixSize _ = fromTypeNat (aNat :: NatType n)
instance ( ANat n, IvoryIntegral to, Default to
) => SafeCast (Ix n) to where
safeCast ix | Just s <- toMaxSize (ivoryType (Proxy :: Proxy to))
, ixSize ix <= s
= ivoryCast (fromIx ix)
| otherwise
= error ixCastError
-- -- It doesn't make sense to case an index downwards dynamically.
-- inBounds _ _ = error ixCastError
ixCastError :: String
ixCastError = "Idx cast : cannot cast index: result type is too small."
-- XXX don't export
mkIx :: forall n. (ANat n) => I.Expr -> Ix n
mkIx e = wrapExpr (I.ExpToIx e base)
where
base = ixSize (undefined :: Ix n)
-- XXX don't export
ixBinop :: (ANat n)
=> (I.Expr -> I.Expr -> I.Expr)
-> (Ix n -> Ix n -> Ix n)
ixBinop f x y = mkIx $ f (rawIxVal x) (rawIxVal y)
-- XXX don't export
ixUnary :: (ANat n) => (I.Expr -> I.Expr) -> (Ix n -> Ix n)
ixUnary f = mkIx . f . rawIxVal
-- XXX don't export
rawIxVal :: ANat n => Ix n -> I.Expr
rawIxVal n = case unwrapExpr n of
I.ExpToIx e _ -> e
e@(I.ExpVar _) -> e
e -> error $ "Front-end: can't unwrap ixVal: "
++ show e
-- Arrays ----------------------------------------------------------------------
arrayLen :: forall s len area n ref.
(Num n, ANat len, IvoryArea area, IvoryRef ref)
=> ref s (Array len area) -> n
arrayLen _ = fromInteger (fromTypeNat (aNat :: NatType len))
-- | Array indexing.
(!) :: forall s len area ref.
( ANat len, IvoryArea area, IvoryRef ref
, IvoryExpr (ref s (Array len area)), IvoryExpr (ref s area))
=> ref s (Array len area) -> Ix len -> ref s area
arr ! ix = wrapExpr (I.ExpIndex ty (unwrapExpr arr) ixRep (getIx ix))
where
ty = ivoryArea (Proxy :: Proxy (Array len area))
|
Hodapp87/ivory
|
ivory/src/Ivory/Language/Array.hs
|
bsd-3-clause
| 3,979
| 0
| 14
| 964
| 1,213
| 656
| 557
| 78
| 3
|
module Rachel.Doc (
primitivesReport
, showEntity
, printEnvironment
) where
import Rachel.Types
import Rachel.Primitive
import Rachel.Evaluation
import qualified Data.Map as Map
import Data.Char (isLetter)
primitivesReport :: String
primitivesReport = unlines $ fmap showEntity allPrimitives
showEntity :: Entity -> String
showEntity (Entity i t f v) = unlines
[ m i ++ " : " ++ pretty t
, m i ++ " = " ++ pretty v
, pretty f
]
where
m str
| null str = "#no identifier#"
| otherwise = let x = head str
in if isLetter x || x == '_'
then i
else "(" ++ i ++ ")"
printEnvironment :: RachelM ()
printEnvironment = do
e <- getEnv
let pr (i,(t,f,v)) = rachelIO $ putStrLn $ showEntity $ Entity i t f v
mapM_ pr $ Map.toList e
|
Daniel-Diaz/rachel
|
Rachel/Doc.hs
|
bsd-3-clause
| 848
| 0
| 13
| 262
| 295
| 151
| 144
| 27
| 2
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.CSL.Data
-- Copyright : (c) John MacFarlane
-- License : BSD-style (see LICENSE)
--
-- Maintainer : John MacFarlane <fiddlosopher@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-----------------------------------------------------------------------------
module Text.CSL.Data
( getLocale
, getDefaultCSL
, langBase
) where
import System.FilePath ()
import qualified Data.ByteString.Lazy as L
#ifdef EMBED_DATA_FILES
import Data.Maybe (fromMaybe)
import Text.CSL.Data.Embedded (localeFiles, defaultCSL)
import qualified Data.ByteString as S
#else
import Paths_scholdoc_citeproc (getDataFileName)
import System.Directory (doesFileExist)
#endif
getLocale :: String -> IO L.ByteString
getLocale s = do
#ifdef EMBED_DATA_FILES
f <- case length s of
0 -> maybe (return S.empty) return
$ lookup "locales-en-US.xml" localeFiles
2 -> let fn = ("locales-" ++ fromMaybe s (lookup s langBase) ++ ".xml")
in case lookup fn localeFiles of
Just x' -> return x'
_ -> error $ "could not find locale data for " ++ s
_ -> case lookup ("locales-" ++ take 5 s ++ ".xml") localeFiles of
Just x' -> return x'
_ -> -- try again with 2-letter locale
let s' = take 2 s in
case lookup ("locales-" ++ fromMaybe s'
(lookup s' langBase) ++ ".xml") localeFiles of
Just x'' -> return x''
_ -> error $
"could not find locale data for " ++ s
return $ L.fromChunks [f]
#else
f <- case length s of
0 -> return "locales/locales-en-US.xml"
2 -> getDataFileName ("locales/locales-" ++
maybe "en-US" id (lookup s langBase) ++ ".xml")
_ -> getDataFileName ("locales/locales-" ++ take 5 s ++ ".xml")
exists <- doesFileExist f
if not exists && length s > 2
then getLocale $ take 2 s -- try again with base locale
else L.readFile f
#endif
getDefaultCSL :: IO L.ByteString
getDefaultCSL =
#ifdef EMBED_DATA_FILES
return $ L.fromChunks [defaultCSL]
#else
getDataFileName "chicago-author-date.csl" >>= L.readFile
#endif
langBase :: [(String, String)]
langBase
= [("af", "af-ZA")
,("ar", "ar-AR")
,("bg", "bg-BG")
,("ca", "ca-AD")
,("cs", "cs-CZ")
,("da", "da-DK")
,("de", "de-DE")
,("el", "el-GR")
,("en", "en-US")
,("es", "es-ES")
,("et", "et-EE")
,("fa", "fa-IR")
,("fi", "fi-FI")
,("fr", "fr-FR")
,("he", "he-IL")
,("hr", "hr-HR")
,("hu", "hu-HU")
,("is", "is-IS")
,("it", "it-IT")
,("ja", "ja-JP")
,("km", "km-KH")
,("ko", "ko-KR")
,("lt", "lt-LT")
,("lv", "lv-LV")
,("mn", "mn-MN")
,("nb", "nb-NO")
,("nl", "nl-NL")
,("nn", "nn-NO")
,("pl", "pl-PL")
,("pt", "pt-PT")
,("ro", "ro-RO")
,("ru", "ru-RU")
,("sk", "sk-SK")
,("sl", "sl-SI")
,("sr", "sr-RS")
,("sv", "sv-SE")
,("th", "th-TH")
,("tr", "tr-TR")
,("uk", "uk-UA")
,("vi", "vi-VN")
,("zh", "zh-CN")
]
|
timtylin/scholdoc-citeproc
|
src/Text/CSL/Data.hs
|
bsd-3-clause
| 3,469
| 0
| 23
| 1,098
| 784
| 471
| 313
| 66
| 4
|
{-# LANGUAGE EmptyDataDecls, FlexibleInstances #-}
module System.Linux.Cgroups.Types
( -- * datatypes
Cgroup(..)
, Subsystem(..)
, Hierarchy(..)
, Spec
-- * classes
, HasCgroup(..)
, CgroupValue(..)
, CgroupRead(..)
, CgroupBox(..)
, TextSerial(..)
-- * helper datatypes
, Relative
, Absolute
-- * functions
, isNamed
, mntName
, fromLines
)
where
import BasicPrelude
import Data.Default
import Data.Text (Text)
import Filesystem
import qualified Filesystem.Path.CurrentOS as F
data Relative
data Absolute
-- | A @cgroup@ associates a set of tasks with a set of parameters for one
-- or more subsystems
newtype Cgroup a = Cgroup {unCgroup::FilePath} deriving (Eq, Show)
-- | A @subsystem@ is a module that makes use of the task grouping
-- facilities provided by cgroups to treat groups of tasks in
-- particular ways. A subsystem is typically a "resource controller" that
-- schedules a resource or applies per-cgroup limits, but it may be
-- anything that wants to act on a group of processes, e.g. a
-- virtualization subsystem.
data Subsystem = Controller Text
| Named Text
deriving (Eq, Show)
mntName :: Subsystem -> Text
mntName (Controller t) = t
mntName (Named t) = t
isNamed :: Subsystem -> Bool
isNamed (Named _) = True
isNamed _ = False
-- | A @hierarchy@ is a set of cgroups arranged in a tree, such that
-- every task in the system is in exactly one of the cgroups in the
-- hierarchy, and a set of subsystems; each subsystem has system-specific
-- state attached to each cgroup in the hierarchy. Each hierarchy has
-- an instance of the cgroup virtual filesystem associated with it.
data Hierarchy a = Hierarchy Subsystem (Cgroup a)
deriving (Eq, Show)
class HasCgroup a where
cgroup :: a -> FilePath
acgroup :: a -> FilePath -> a
instance HasCgroup (Cgroup Absolute) where
cgroup (Cgroup f) = f
acgroup (Cgroup _) g = Cgroup g
instance HasCgroup (Hierarchy Absolute) where
cgroup (Hierarchy _ (Cgroup f)) = f
acgroup (Hierarchy x _) g = Hierarchy x (Cgroup g)
class CgroupValue a where
subsystem :: a -> Subsystem
param :: a -> Text
class TextSerial a where
tencode :: a -> Text
tdecode :: Text -> a
class (CgroupValue a) => CgroupRead a where
unprint :: Text -> a
get :: Hierarchy Absolute -> IO a
get (Hierarchy _ (Cgroup f)) = inner undefined
where inner :: (CgroupRead a) => a -> IO a
inner hack = unprint <$> readTextFile (f </> F.fromText ssys <.> parm)
where
ssys = mntName (subsystem hack)
parm = param hack
{-# INLINE ssys #-}
{-# INLINE parm #-}
{-# INLINE inner #-}
class (CgroupValue a, CgroupRead a) => CgroupBox a where
pprint :: a -> Text
set :: Hierarchy Absolute -> a -> IO ()
modify :: (a -> a) -> Hierarchy Absolute -> IO ()
-- default realizations
set (Hierarchy _ (Cgroup f)) v = writeTextFile (f </> (F.fromText . mntName $! subsystem v) <.> (param v)) (pprint v)
modify f h = get h >>= set h . f
type Spec a = (Text, a -> [Text] -> a)
fromLines :: (Default a) => [Spec a] -> Text -> a
fromLines spec = go def spec . (map words) . lines
where
go x _ [] = x
go x [] (l:ls) = go x spec ls
go x _ ([l]:ls) = go x spec ls
go x (s@(sn,f):sx) l@((ln:lv):ls) | sn == ln = go (f x lv) spec ls
| otherwise = go x sx l
|
qnikst/cgroups-hs
|
System/Linux/Cgroups/Types.hs
|
bsd-3-clause
| 3,453
| 0
| 15
| 885
| 1,043
| 567
| 476
| -1
| -1
|
{-# LANGUAGE NoMonomorphismRestriction #-}
-- |Includes event handling concerns and mouse, application and keyboard state
-- query mechanisms.
module Tea.Event ( Event (..)
, EventQuery (..)
, (+>)
, eventHandler
, handleEvents
, updateEvents
, (?)
, is
, mouseCoords
, mouseButtons
, currentModKeys
) where
import qualified Graphics.UI.SDL as SDL
import Data.Array ((//),(!))
import Control.Applicative ((<$>))
import Control.Monad.State
import Control.Monad.Trans
import Control.Monad(when)
import Data.Monoid
import Tea.Input
import Tea.Tea
import Tea.TeaState
-- | A data type used for phrasing queries passed to the `?' and `is' functions
data EventQuery = KeyDown KeyCode -- ^ Is the specified key currently held down?
| KeyUp KeyCode -- ^ Opposite of KeyDown
| ModOn Mod -- ^ Is the specified modifier key currently enabled?
| ModOff Mod -- ^ Opposite of ModOn
| MouseIn (Int, Int) (Int, Int) -- ^ Is the mouse between these two coordinates?
| MouseOutside (Int, Int) (Int, Int) -- ^ Opposite of MouseIn
| AnyKeyDown -- ^ Is any key currently held down?
| NoKeyDown -- ^ Opposite of AnyKeyDown
| MouseDown Button -- ^ Is the specified mouse button being clicked?
| MouseUp Button -- ^ Opposite of MouseDown
| AnyMouseDown -- ^ Are any mouse buttons currently being clicked?
| NoMouseDown -- ^ Opposite of AnyMouseDown
| AppVisible -- ^ Is the app window currently non-minimized?
| AppInvisible -- ^ Opposite of AppVisible
deriving (Show, Eq)
-- | A monoidal data type that specifies what to do when an event occurs. Use the
-- default no-op `eventHandler' (or `mempty') constant and override fields rather
-- than specify this directly.
-- You can combine the functionality of many Events using `+>' or `mappend', which
-- will run both handlers in the order they were appended.
data Event s = Event { keyDown :: KeyCode -> [Mod] -> Tea s () -- ^ When a key is pressed
, keyUp :: KeyCode -> [Mod] -> Tea s () -- ^ When a key stops being pressed
, mouseDown :: Button -> (Int, Int) -> Tea s () -- ^ When a mouse button is pressed
, mouseUp :: Button -> (Int, Int) -> Tea s () -- ^ When a mouse button stops being pressed
, mouseMove :: (Int, Int) -> [Button] -> Tea s () -- ^ When the mouse moves
, mouseGained :: Tea s () -- ^ When the application gains mouse focus
, mouseLost :: Tea s () -- ^ When the application loses mouse focus
, keyboardGained :: Tea s () -- ^ When the application gains keyboard focus
, keyboardLost :: Tea s () -- ^ When the application loses keyboard focus
, exit :: Tea s () -- ^ When the application recieves the exit signal
, minimized :: Tea s () -- ^ When the application is minimized
, restored :: Tea s () -- ^ When the application ceases being minimized
}
instance Monoid (Event s) where
mappend (Event a1 a2 a3 a4 a5 a7 a8 a9 a10 a11 a12 a13) (Event b1 b2 b3 b4 b5 b7 b8 b9 b10 b11 b12 b13) = Event {
keyDown = \key mods -> a1 key mods >> b1 key mods,
keyUp = \key mods -> a2 key mods >> b2 key mods,
mouseDown = \c b -> a3 c b >> b3 c b,
mouseUp = \c b -> a4 c b >> b4 c b,
mouseMove = \c b -> a5 c b >> b5 c b,
mouseGained = a7 >> b7,
mouseLost = a8 >> b8,
keyboardGained = a9 >> b9,
keyboardLost = a10 >> b10,
exit = a11 >> b11,
minimized = a12 >> b12,
restored = a13 >> b13
}
mempty = Event {
keyDown = \key mods -> return (),
keyUp = \key mods -> return (),
mouseDown = \x b -> return (),
mouseUp = \x b -> return (),
mouseMove = \x b -> return (),
mouseGained = return (),
mouseLost = return (),
keyboardGained = return (),
keyboardLost = return (),
exit = return (),
minimized = return (),
restored = return ()
}
z :: Tea s ()
z = return ()
-- |Combine two event handlers. Analogous to mappend
(+>) :: Event s -> Event s -> Event s
(+>) = mappend
-- |A default, no-op event handler for overriding. Analogous to mzero.
eventHandler :: Event s
eventHandler = mempty
-- |Flush the event queue and update mouse and keyboard state queries, but
-- do not run any event handlers. Add this to your game loop if you use
-- `?' and `is' but not `handleEvents'.
updateEvents :: Tea s ()
updateEvents = handleEvents eventHandler
-- |Flush the event queue, updating mouse and keyboard state queries, and
-- executing actions defined in the specified event handler.
handleEvents :: Event s -> Tea s ()
handleEvents e = let e' = eventHandler {
keyDown = \code _-> (setEventQuery (KeyDown code) >> setEventQuery AnyKeyDown),
keyUp = \code _-> (setEventQuery (KeyUp code) >> setEventQuery NoKeyDown)
} +> e
this = do
event <- liftIO SDL.pollEvent
buttons <- mouseButtons
case event of
SDL.GotFocus l -> foldl (>>) (return ()) $ map gotFocus' l
SDL.LostFocus l -> foldl (>>) (return ()) $ map lostFocus' l
SDL.KeyDown ks -> keyDown e' (sdlKey (SDL.symKey ks)) (map sdlMod (SDL.symModifiers ks))
SDL.KeyUp ks -> keyUp e' (sdlKey (SDL.symKey ks)) (map sdlMod (SDL.symModifiers ks))
SDL.MouseButtonUp x y b -> mouseUp e' (sdlButton b) (fromIntegral x, fromIntegral y)
SDL.MouseButtonDown x y b -> mouseDown e' (sdlButton b) (fromIntegral x, fromIntegral y)
SDL.MouseMotion x y _ _ -> mouseMove e' (fromIntegral x, fromIntegral y) buttons
SDL.Quit -> exit e'
_ -> return ()
when (event /= SDL.NoEvent) this
gotFocus' SDL.MouseFocus = mouseGained e'
gotFocus' SDL.InputFocus = keyboardGained e'
gotFocus' SDL.ApplicationFocus = restored e'
lostFocus' SDL.MouseFocus = mouseLost e'
lostFocus' SDL.InputFocus = keyboardLost e'
lostFocus' SDL.ApplicationFocus = minimized e'
in this
setEventQuery (KeyDown code) = keyCodesModify (// [(code, True) ])
setEventQuery (KeyUp code) = keyCodesModify (// [(code, False)])
setEventQuery AnyKeyDown = anyKeyModify (+ 1)
setEventQuery NoKeyDown = anyKeyModify (subtract 1)
setEventQuery _ = undefined
getEventQuery :: EventQuery -> Tea s Bool
getEventQuery (KeyDown code) = queryKeyCode code
getEventQuery (KeyUp code) = not <$> queryKeyCode code
getEventQuery (AnyKeyDown) = queryKeyDown
getEventQuery (NoKeyDown) = not <$> queryKeyDown
getEventQuery (ModOn code) = queryModState code
getEventQuery (ModOff code) = not <$> queryModState code
getEventQuery (MouseIn c1 c2) = queryMouseIn c1 c2
getEventQuery (MouseOutside c1 c2) = not <$> queryMouseIn c1 c2
getEventQuery (AppVisible) = queryAppVisible
getEventQuery (AppInvisible) = not <$> queryAppVisible
getEventQuery (AnyMouseDown) = queryMouseDown
getEventQuery (NoMouseDown) = not <$> queryMouseDown
getEventQuery (MouseDown button) = queryMouseButton button
getEventQuery (MouseUp button) = not <$> queryMouseButton button
eventStateModify f = modifyT $ \ts@(TS {_eventState = es}) -> ts {_eventState = f es}
keyCodesModify f = eventStateModify $ \es@(ES { keyCodes = s }) -> es { keyCodes = f s }
anyKeyModify f = eventStateModify $ \es@(ES { keysDown = s }) -> es { keysDown = f s }
queryKeyCode code = (! code) <$> keyCodes <$> _eventState <$> getT
queryKeyDown = (> 0) <$> keysDown <$> _eventState <$> getT
queryMouseDown = (/= []) <$> mouseButtons
queryModState code = elem code <$> currentModKeys
queryMouseButton b = elem b <$> mouseButtons
queryMouseIn c1 c2 = within c1 c2 <$> mouseCoords
queryAppVisible = liftIO $ elem SDL.ApplicationFocus <$> SDL.getAppState
-- |Get the current modifier keys that are enabled
currentModKeys :: Tea s [Mod]
currentModKeys = liftIO $ map sdlMod <$> SDL.getModState
-- |Get the current mouse coordinates
mouseCoords :: Tea s (Int, Int)
mouseCoords = liftIO $ SDL.getMouseState >>= \(x, y, _) -> return (x,y)
-- |Get the currently pressed mouse buttons
mouseButtons :: Tea s [Button]
mouseButtons = liftIO $ SDL.getMouseState >>= \(_, _, l) -> return $ map sdlButton l
within (x1,y1) (x2,y2) (x, y) = x > x1 && y > y1 && x < x2 && y < y2
-- | Execute the specified Tea action if the EventQuery specified is true.
(?) :: EventQuery -> Tea s v -> Tea s ()
q ? m = getEventQuery q >>= flip when (m >> return ())
-- | Produces a boolean value based on the specified EventQuery.
is :: EventQuery -> Tea s Bool
is = getEventQuery
|
liamoc/tea-hs
|
Tea/Event.hs
|
bsd-3-clause
| 9,877
| 0
| 19
| 3,360
| 2,484
| 1,344
| 1,140
| -1
| -1
|
module Cube where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
cube w = do
renderPrimitive Quads $ do
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 w w w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) w (-w)
|
fnoble/3dconnexion-haskell
|
Cube.hs
|
bsd-3-clause
| 878
| 0
| 13
| 263
| 550
| 271
| 279
| 29
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PatternSynonyms #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Cli.Commands.SetPipelineSSHKeys
-- Copyright : (C) 2017 - 2018 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Cli.Commands.SetPipelineSSHKeys (
-- * Functions
setPipelinesSSHKeys
) where
import Irreverent.Bitbucket.Cli.Error
import Irreverent.Bitbucket.Core (Username(..), RepoName(..))
import Irreverent.Bitbucket.Core.Control (BitbucketT(..))
import Irreverent.Bitbucket.Core.Data.Auth (Auth(..))
import Irreverent.Bitbucket.Core.Data.Common (PublicSSHKey(..), PrivateSSHKey(..))
import Irreverent.Bitbucket.Core.Data.Pipelines.NewSSHKeyPair (NewPipelinesSSHKeyPair(..))
import Irreverent.Bitbucket.Http.Repositories.Pipelines.AddSSHKeyPair (addSSHKeyPair)
import Irreverent.Bitbucket.Json.Pipelines.SSHKeyPair (PipelinesSSHKeyPairJsonV2(..))
import Ultra.Control.Monad.Bracket (MonadBracket)
import Ultra.Control.Monad.Catch (MonadCatch)
import Ultra.Control.Monad.Trans.Either (EitherT, firstEitherT, mapEitherT)
import qualified Ultra.Data.Text as T
import qualified Ultra.Data.Text.Encoding as T
import Ultra.System.IO (IOMode(..), withBinaryFile)
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Network.Wreq.Session as S
import System.IO (stdout)
import Preamble
setPipelinesSSHKeys
:: (MonadBracket m, MonadCatch m, MonadIO m)
=> Auth
-> Username
-> RepoName
-> T.Text
-> T.Text
-> EitherT CliError m ()
setPipelinesSSHKeys auth owner rname privatePath publicPath = do
privateKey <- firstEitherT CliFileOpenError $
withBinaryFile privatePath ReadMode $ \h -> do
liftIO . fmap (PrivateSSHKey . T.decodeUtf8) $ BS.hGetContents h
publicKey <- firstEitherT CliFileOpenError $
withBinaryFile publicPath ReadMode $ \h -> do
liftIO . fmap (PublicSSHKey . T.decodeUtf8) $ BS.hGetContents h
firstEitherT BitbucketAPIFail . mapEitherT (flip runReaderT auth . runBitbucketT) $ do
s <- liftIO S.newSession
addSSHKeyPair s owner rname (NewPipelinesSSHKeyPair privateKey publicKey)
>>= liftIO . BSL.hPutStr stdout . encodePretty . PipelinesSSHKeyPairJsonV2
|
irreverent-pixel-feats/bitbucket
|
bitbucket-cli/src/Irreverent/Bitbucket/Cli/Commands/SetPipelineSSHKeys.hs
|
bsd-3-clause
| 2,462
| 0
| 17
| 313
| 560
| 335
| 225
| 43
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP, DisambiguateRecordFields, RecordWildCards #-}
module Nettle.OpenFlow.Packet (
-- * Sending packets
PacketOut (..)
, bufferedPacketOut
, unbufferedPacketOut
, receivedPacketOut
, BufferID
-- * Packets not handled by a switch
, PacketInfo (..)
, PacketInReason (..)
, NumBytes
, bufferedAtSwitch
) where
import qualified Data.ByteString as B
import Nettle.OpenFlow.Port
import Nettle.OpenFlow.Action
import Nettle.Ethernet.EthernetFrame
import Data.Word
import Data.Maybe (isJust)
-- | A switch can be remotely commanded to send a packet. The packet
-- can either be a packet buffered at the switch, in which case the
-- bufferID is provided, or it can be specified explicitly by giving
-- the packet data.
data PacketOut
= PacketOutRecord {
bufferIDData :: !(Either BufferID B.ByteString), -- ^either a buffer ID or the data itself
packetInPort :: !(Maybe PortID), -- ^the port at which the packet received, for the purposes of processing this command
packetActions :: !ActionSequence -- ^actions to apply to the packet
} deriving (Eq,Show)
-- |A switch may buffer a packet that it receives.
-- When it does so, the packet is assigned a bufferID
-- which can be used to refer to that packet.
type BufferID = Word32
-- | Constructs a @PacketOut@ value for a packet buffered at a switch.
bufferedPacketOut :: BufferID -> Maybe PortID -> ActionSequence -> PacketOut
bufferedPacketOut bufID inPort actions =
PacketOutRecord { bufferIDData = Left bufID
, packetInPort = inPort
, packetActions = actions
}
-- | Constructs a @PacketOut@ value for an unbuffered packet, including the packet data.
unbufferedPacketOut :: B.ByteString -> Maybe PortID -> ActionSequence -> PacketOut
unbufferedPacketOut pktData inPort actions =
PacketOutRecord { bufferIDData = Right pktData
, packetInPort = inPort
, packetActions = actions
}
-- | Constructs a @PacketOut@ value that processes the packet referred to by the @PacketInfo@ value
-- according to the specified actions.
receivedPacketOut :: PacketInfo -> ActionSequence -> PacketOut
receivedPacketOut (PacketInfo {..}) actions =
case bufferID of
Nothing -> unbufferedPacketOut packetData (Just receivedOnPort) actions
Just bufID -> bufferedPacketOut bufID (Just receivedOnPort) actions
-- | A switch receives packets on its ports. If the packet matches
-- some flow rules, the highest priority rule is executed. If no
-- flow rule matches, the packet is sent to the controller. When
-- packet is sent to the controller, the switch sends a message
-- containing the following information.
data PacketInfo
= PacketInfo {
bufferID :: !(Maybe BufferID), -- ^buffer ID if packet buffered
packetLength :: !NumBytes, -- ^full length of frame
receivedOnPort :: !PortID, -- ^port on which frame was received
reasonSent :: !PacketInReason, -- ^reason packet is being sent
packetData :: !B.ByteString, -- ^ethernet frame, includes full packet only if no buffer ID
enclosedFrame :: !(Either String EthernetFrame) -- ^result of parsing packetData field.
} deriving (Show,Eq)
-- |A PacketInfo message includes the reason that the message
-- was sent, namely either there was no match, or there was
-- a match, and that match's actions included a Sent-To-Controller
-- action.
data PacketInReason = NotMatched | ExplicitSend deriving (Show,Read,Eq,Ord,Enum)
-- | The number of bytes in a packet.
type NumBytes = Int
bufferedAtSwitch :: PacketInfo -> Bool
bufferedAtSwitch = isJust . bufferID
|
brownsys/nettle-openflow
|
src/Nettle/OpenFlow/Packet.hs
|
bsd-3-clause
| 3,836
| 0
| 12
| 898
| 510
| 303
| 207
| 71
| 2
|
{-# LANGUAGE RankNTypes
,FlexibleContexts
,ScopedTypeVariables #-}
-- | Enumeratees - pass terminals variant.
--
-- Provides enumeratees that pass terminal markers ('EOF') to the inner
-- 'iteratee'.
--
-- Most enumeratees, upon receipt of @EOF@, will enter a done state and return
-- the inner iteratee without sending @EOF@ to it. This allows for composing
-- enumerators as in:
--
-- > myEnum extraData i = do
-- > nested <- enumFile "file" (mapChunks unpacker i)
-- > inner <- run nested
-- > enumList extraData inner
--
-- if @mapChunks unpacker@ sent 'EOF' to the inner iteratee @i@, there would
-- be no way to submit extra data to it after 'run'ing the result from
-- @enumFile@.
--
-- In certain cases, this is not the desired behavior. Consider:
--
-- > consumer :: Iteratee String IO ()
-- > consumer = icont (go 0)
-- > where
-- > go c (Chunk xs) = liftIO (putStr s) >> icont (go c)
-- > go 10 e = liftIO (putStr "10 loops complete")
-- > >> idone () (Chunk "")
-- > go n e = I.seek 0 >> icont (go (n+1))
--
-- The @consumer@ iteratee does not complete until after it has received
-- 10 @EOF@s. If you attempt to use it in a standard enumeratee, it will
-- never terminate. When the outer enumeratee is terminated, the inner
-- iteratee will remain in a @cont@ state, but in general there is no longer
-- any valid data for the continuation. The enumeratee itself must pass the
-- EOF marker to the inner iteratee and remain in a cont state until the inner
-- iteratee signals its completion.
--
-- All enumeratees in this module will pass 'EOF' terminators to the inner
-- iteratees.
module Data.Iteratee.PTerm (
-- * Nested iteratee combinators
mapChunksPT
,mapChunksMPT
,convStreamPT
,unfoldConvStreamPT
,unfoldConvStreamCheckPT
-- * ListLike analog functions
,breakEPT
,takePT
,takeUpToPT
,takeWhileEPT
,mapStreamPT
,rigidMapStreamPT
,filterPT
)
where
import Prelude hiding (head, drop, dropWhile, take, break, foldl, foldl1, length, filter, sum, product)
import Data.Iteratee.Iteratee
import Data.Iteratee.ListLike (drop,dropWhile)
import qualified Data.ListLike as LL
import Control.Monad.Trans.Class
import Control.Monad
-- ---------------------------------------------------
-- The converters show a different way of composing two iteratees:
-- `vertical' rather than `horizontal'
-- | Convert one stream into another with the supplied mapping function.
--
-- A version of 'mapChunks' that sends 'EOF's to the inner iteratee.
--
mapChunksPT :: (Monad m) => (s -> s') -> Enumeratee s s' m a
mapChunksPT f = eneeCheckIfDonePass (icont . step)
where
go = eneeCheckIfDonePass (icont . step)
step k (Chunk xs) = doContEtee go k (f xs)
step k NoData = continue (step k)
step k s@(EOF mErr) = k (EOF mErr) >>= \ret -> case ret of
ContDone a _ -> contDoneM (return a) s
ContMore i' -> contMoreM $ go i'
ContErr i' e' -> contErrM (go i') e'
{-# INLINE mapChunksPT #-}
-- | Convert a stream of @s@ to a stream of @s'@ using the supplied function.
--
-- A version of 'mapChunksM' that sends 'EOF's to the inner iteratee.
mapChunksMPT
:: (Monad m)
=> (s -> m s')
-> Enumeratee s s' m a
mapChunksMPT f = eneeCheckIfDonePass (icont . step)
where
go = eneeCheckIfDonePass (icont . step)
-- step :: (Stream s' -> m (ContReturn s' m a))-> Stream s -> m (ContReturn s m (Iteratee s' m a))
step k (Chunk xs) = f xs >>= doContEtee go k
step k NoData = continue $ step k
step k s@EOF{} = k (EOF Nothing) >>= \ret -> case ret of
ContDone a _ -> contDoneM (idone a) s
ContMore i -> contMoreM (go i)
ContErr i e -> contErrM (go i) e
{-# INLINE mapChunksMPT #-}
-- |Convert one stream into another, not necessarily in lockstep.
--
-- A version of 'convStream' that sends 'EOF's to the inner iteratee.
convStreamPT
:: (Monad m, LL.ListLike s el)
=> Iteratee s m s'
-> Enumeratee s s' m a
convStreamPT fi = eneeCheckIfDonePass check
where
go = eneeCheckIfDonePass check
check k = isStreamFinished >>= maybe (step k) (hndl k)
hndl k (EOF e) = lift (k (EOF e)) >>= go . wrapCont
hndl _ _str = error "iteratee: internal error in convStreamPT"
step k = fi >>= lift . doContIteratee k . Chunk >>= go
{-# INLINE convStreamPT #-}
-- |The most general stream converter.
--
-- A version of 'unfoldConvStream' that sends 'EOF's to the inner iteratee.
unfoldConvStreamPT ::
(Monad m, LL.ListLike s el) =>
(acc -> Iteratee s m (acc, s'))
-> acc
-> Enumeratee s s' m a
unfoldConvStreamPT fi acc0 = unfoldConvStreamCheckPT eneeCheckIfDonePass fi acc0
{-# INLINE unfoldConvStreamPT #-}
-- | A version of 'unfoldConvStreamCheck' that sends 'EOF's
-- to the inner iteratee.
unfoldConvStreamCheckPT
:: (Monad m, LL.ListLike fromStr elo)
=> ((Cont toStr m a -> Iteratee fromStr m (Iteratee toStr m a))
-> Enumeratee fromStr toStr m a
)
-> (acc -> Iteratee fromStr m (acc, toStr))
-> acc
-> Enumeratee fromStr toStr m a
unfoldConvStreamCheckPT checkDone f acc0 = checkDone (check acc0)
where
go acc = checkDone (check acc)
check acc k = isStreamFinished >>= maybe (step acc k) (hndl acc k)
hndl acc k (EOF e) = lift (k (EOF e)) >>= go acc . wrapCont
hndl _ _ _str = error "iteratee: internal error in unfoldConvStreamCheckPT"
step acc k = do
(acc',s') <- f acc
i' <- lift . doContIteratee k $ Chunk s'
go acc' i'
{-# INLINE unfoldConvStreamCheckPT #-}
-- -------------------------------------
-- ListLike variants
-- | A variant of 'Data.Iteratee.ListLike.breakE' that passes 'EOF's.
breakEPT
:: (LL.ListLike s el, Monad m)
=> (el -> Bool)
-> Enumeratee s s m a
breakEPT cpred = eneeCheckIfDonePass (icont . step) >=>
\i' -> dropWhile (not . cpred) >> return i'
where
go = eneeCheckIfDonePass (icont . step)
step k (Chunk s)
| LL.null s = contMoreM (icont (step k))
| otherwise = case LL.break cpred s of
(str', tail')
| LL.null tail' -> doContEtee go k str'
| otherwise -> k (Chunk str') >>= \ret -> case ret of
ContDone a _ -> contDoneM (idone a) (Chunk tail')
ContMore i -> contDoneM i (Chunk tail')
ContErr i e -> contDoneM (ierr i e) (Chunk tail')
step k NoData = continue (step k)
step k stream@(EOF{}) = k stream >>= \ret -> case ret of
ContDone a _ -> contDoneM (idone a) stream
ContMore i -> contMoreM (go i)
ContErr i e -> contErrM (go i) e
{-# INLINE breakEPT #-}
-- | A variant of 'Data.Iteratee.ListLike.take' that passes 'EOF's.
takePT ::
(Monad m, LL.ListLike s el)
=> Int -- ^ number of elements to consume
-> Enumeratee s s m a
takePT = go
where
go n iter
| n <= 0 = return iter
| otherwise = runIter iter (onDone n) (onCont n) (onErr n)
onDone n x = drop n >> idone (idone x)
onCont n k = icont (step n k)
onErr n i = ierr (go n i)
step n k (Chunk str)
| LL.null str = continue (step n k)
| LL.length str <= n = k (Chunk str) >>= \ret -> case ret of
ContDone a _ -> contMoreM (go (n-LL.length str)
(idone a))
ContMore i -> contMoreM (go (n-LL.length str) i)
ContErr i e -> contErrM (go (n-LL.length str) i)
e
| otherwise = k (Chunk s1) >>= \ret -> case ret of
ContDone a _ -> contDoneM (idone a) (Chunk s2)
ContMore i -> contDoneM i (Chunk s2)
ContErr i e -> contErrM (idone i) e
where (s1, s2) = LL.splitAt n str
step n k NoData = continue (step n k)
step n k stream@EOF{} = k stream >>= \rk -> case rk of
ContDone a _str' -> contDoneM (idone a) stream
ContMore inner -> contMoreM (go n inner)
ContErr inner e -> contErrM (go n inner) e
{-# INLINE takePT #-}
-- | A variant of 'Data.Iteratee.ListLike.takeUpTo' that passes 'EOF's.
takeUpToPT :: (Monad m, LL.ListLike s el) => Int -> Enumeratee s s m a
takeUpToPT = go
where
go count iter
| count <= 0 = idone iter
| otherwise = runIter iter onDone (onCont count) (onErr count)
onDone x = idone (idone x)
onCont count k = icont (step count k)
onErr count i' = ierr (go count i')
step n k (Chunk str)
| LL.null str = continue (step n k)
| LL.length str < n = k (Chunk str) >>= \ret -> case ret of
ContDone a str' -> contDoneM (idone a) str'
ContMore i -> contMoreM (go
(n - LL.length str)
i)
ContErr i e -> contErrM (go
(n - LL.length str)
i)
e
| otherwise = do
-- check to see if the inner iteratee has completed, and if so,
-- grab any remaining stream to put it in the outer iteratee.
-- the outer iteratee is always complete at this stage, although
-- the inner may not be.
let (s1, s2) = LL.splitAt n str
ret <- k (Chunk s1)
case ret of
ContDone a preStr -> case preStr of
(Chunk pre)
| LL.null pre -> contDoneM (idone a) $ Chunk s2
| otherwise -> contDoneM (idone a) $ Chunk $ pre `LL.append` s2
NoData -> contDoneM (idone a) $ Chunk s2
-- this case shouldn't ever happen, except possibly
-- with broken iteratees
EOF{} -> contDoneM (idone a) preStr
ContMore i -> contDoneM i (Chunk s2)
ContErr i e -> contErrM (idone i) e
step n k NoData = continue (step n k)
step n k stream@EOF{} = k stream >>= \rk -> case rk of
ContDone a _str' -> contDoneM (idone a) stream
ContMore inner -> contMoreM (go n inner)
ContErr inner e -> contErrM (go n inner) e
{-# INLINE takeUpToPT #-}
-- | A variant of 'Data.Iteratee.ListLike.takeWhileE' that passes 'EOF's.
takeWhileEPT
:: (LL.ListLike s el, Monad m)
=> (el -> Bool)
-> Enumeratee s s m a
takeWhileEPT = breakEPT . (not .)
{-# INLINE takeWhileEPT #-}
-- | A variant of 'Data.Iteratee.ListLike.mapStream' that passes 'EOF's.
mapStreamPT
:: (LL.ListLike (s el) el
,LL.ListLike (s el') el'
,Monad m
,LooseMap s el el')
=> (el -> el')
-> Enumeratee (s el) (s el') m a
mapStreamPT f = mapChunksPT (lMap f)
{-# INLINE mapStreamPT #-}
-- | A variant of 'Data.Iteratee.ListLike.rigidMapStream' that passes 'EOF's.
rigidMapStreamPT
:: (LL.ListLike s el, Monad m)
=> (el -> el)
-> Enumeratee s s m a
rigidMapStreamPT f = mapChunksPT (LL.rigidMap f)
{-# INLINE rigidMapStreamPT #-}
-- | A variant of 'Data.Iteratee.ListLike.filter' that passes 'EOF's.
filterPT
:: (Monad m, LL.ListLike s el)
=> (el -> Bool)
-> Enumeratee s s m a
filterPT p = mapChunksPT (LL.filter p)
{-# INLINE filterPT #-}
|
JohnLato/iteratee
|
src/Data/Iteratee/PTerm.hs
|
bsd-3-clause
| 11,664
| 0
| 22
| 3,672
| 3,194
| 1,610
| 1,584
| 196
| 11
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Opaleye.Internal.Distinct where
import Opaleye.QueryArr (Query)
import Opaleye.Column (Column)
import Opaleye.Aggregate (Aggregator, groupBy, aggregate)
import Control.Applicative (Applicative, pure, (<*>))
import qualified Data.Profunctor as P
import qualified Data.Profunctor.Product as PP
import Data.Profunctor.Product.Default (Default, def)
-- We implement distinct simply by grouping by all columns. We could
-- instead implement it as SQL's DISTINCT but implementing it in terms
-- of something else that we already have is easier at this point.
distinctExplicit :: Distinctspec columns columns'
-> Query columns -> Query columns'
distinctExplicit (Distinctspec agg) = aggregate agg
data Distinctspec a b = Distinctspec (Aggregator a b)
instance Default Distinctspec (Column a) (Column a) where
def = Distinctspec groupBy
-- { Boilerplate instances
instance Functor (Distinctspec a) where
fmap f (Distinctspec g) = Distinctspec (fmap f g)
instance Applicative (Distinctspec a) where
pure = Distinctspec . pure
Distinctspec f <*> Distinctspec x = Distinctspec (f <*> x)
instance P.Profunctor Distinctspec where
dimap f g (Distinctspec q) = Distinctspec (P.dimap f g q)
instance PP.ProductProfunctor Distinctspec where
empty = PP.defaultEmpty
(***!) = PP.defaultProfunctorProduct
-- }
|
k0001/haskell-opaleye
|
Opaleye/Internal/Distinct.hs
|
bsd-3-clause
| 1,432
| 0
| 9
| 273
| 351
| 194
| 157
| 25
| 1
|
module TriangleKata.Day2Spec (spec) where
import Test.Hspec
import TriangleKata.Day2 (triangle, TriangleType(..))
spec :: Spec
spec = do
it "equilateral triangle has all sides equal" $ do
triangle (10, 10, 10) `shouldBe` Equilateral
it "isosceles triangle with first two sides equal" $ do
triangle (4, 4, 6) `shouldBe` Isosceles
it "isosceles triangle with last two sides equal" $ do
triangle (8, 5, 5) `shouldBe` Isosceles
it "isosceles triangle with first and last sides equal" $ do
triangle (6, 7, 6) `shouldBe` Isosceles
it "scalene triangle has no equal sides" $ do
triangle (4, 5, 6) `shouldBe` Scalene
it "illegal triangle all sides zero" $ do
triangle (0, 0, 0) `shouldBe` Illegal
it "illegal triangle has sum of the first two sides less than the third side" $ do
triangle (5, 5, 11) `shouldBe` Illegal
it "illegal triangle has sum of the last two sides less than the first side" $ do
triangle (11, 5, 5) `shouldBe` Illegal
it "illegal triangle has sum of first and last sides less than the second side" $ do
triangle (5, 11, 5) `shouldBe` Illegal
it "illegal triangle has at least one side with negative length" $ do
triangle (-1, 5, 7) `shouldBe` Illegal
|
Alex-Diez/haskell-tdd-kata
|
old-katas/test/TriangleKata/Day2Spec.hs
|
bsd-3-clause
| 1,392
| 0
| 13
| 424
| 355
| 188
| 167
| 25
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Perform a build
module Stack.Build.Execute
( printPlan
, preFetch
, executePlan
-- * Running Setup.hs
, ExecuteEnv
, withExecuteEnv
, withSingleContext
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Concurrent.Execute
import Control.Concurrent.Async (withAsync, wait)
import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
import Control.Exception.Enclosed (catchIO, tryIO)
import Control.Exception.Lifted
import Control.Monad (liftM, when, unless, void, join, guard, filterM, (<=<))
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import qualified Data.ByteString as S
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Foldable (forM_)
import Data.Function
import Data.IORef.RunOnce (runOnce)
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Streaming.Process hiding (callProcess, env)
import qualified Data.Streaming.Process as Process
import Data.Traversable (forM)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (getCurrentTime)
import Data.Word8 (_colon)
import Distribution.System (OS (Windows),
Platform (Platform))
import qualified Distribution.Text
import Language.Haskell.TH as TH (location)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Prelude hiding (FilePath, writeFile)
import Stack.Build.Cache
import Stack.Build.Coverage
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Types.Build
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.PackageDump
import Stack.Constants
import Stack.Types
import Stack.Types.StackT
import Stack.Types.Internal
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import qualified System.FilePath as FP
import System.IO
import System.PosixCompat.Files (createLink)
import System.Process.Read
import System.Process.Run
import System.Process.Log (showProcessArgDebug)
#if !MIN_VERSION_process(1,2,1)
import System.Process.Internals (createProcess_)
#endif
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
preFetch :: M env m => Plan -> m ()
preFetch plan
| Set.null idents = $logDebug "Nothing to fetch"
| otherwise = do
$logDebug $ T.pack $
"Prefetching: " ++
intercalate ", " (map packageIdentifierString $ Set.toList idents)
menv <- getMinimalEnvOverride
fetchPackages menv idents
where
idents = Set.unions $ map toIdent $ Map.toList $ planTasks plan
toIdent (name, task) =
case taskType task of
TTLocal _ -> Set.empty
TTUpstream package _ -> Set.singleton $ PackageIdentifier
name
(packageVersion package)
printPlan :: M env m
=> Plan
-> m ()
printPlan plan = do
case Map.elems $ planUnregisterLocal plan of
[] -> $logInfo "No packages would be unregistered."
xs -> do
$logInfo "Would unregister locally:"
forM_ xs $ \(ident, mreason) -> $logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
$logInfo ""
case Map.elems $ planTasks plan of
[] -> $logInfo "Nothing to build."
xs -> do
$logInfo "Would build:"
mapM_ ($logInfo . displayTask) xs
let hasTests = not . Set.null . lptbTests
hasBenches = not . Set.null . lptbBenches
tests = Map.elems $ fmap fst $ Map.filter (hasTests . snd) $ planFinals plan
benches = Map.elems $ fmap fst $ Map.filter (hasBenches . snd) $ planFinals plan
unless (null tests) $ do
$logInfo ""
$logInfo "Would test:"
mapM_ ($logInfo . displayTask) tests
unless (null benches) $ do
$logInfo ""
$logInfo "Would benchmark:"
mapM_ ($logInfo . displayTask) benches
$logInfo ""
case Map.toList $ planInstallExes plan of
[] -> $logInfo "No executables to be installed."
xs -> do
$logInfo "Would install executables:"
forM_ xs $ \(name, loc) -> $logInfo $ T.concat
[ name
, " from "
, case loc of
Snap -> "snapshot"
Local -> "local"
, " database"
]
-- | For a dry run
displayTask :: Task -> Text
displayTask task = T.pack $ concat
[ packageIdentifierString $ taskProvides task
, ": database="
, case taskLocation task of
Snap -> "snapshot"
Local -> "local"
, ", source="
, case taskType task of
TTLocal lp -> concat
[ toFilePath $ lpDir lp
]
TTUpstream _ _ -> "package index"
, if Set.null missing
then ""
else ", after: " ++ intercalate "," (map packageIdentifierString $ Set.toList missing)
]
where
missing = tcoMissing $ taskConfigOpts task
data ExecuteEnv = ExecuteEnv
{ eeEnvOverride :: !EnvOverride
, eeConfigureLock :: !(MVar ())
, eeInstallLock :: !(MVar ())
, eeBuildOpts :: !BuildOpts
, eeBaseConfigOpts :: !BaseConfigOpts
, eeGhcPkgIds :: !(TVar (Map PackageIdentifier Installed))
, eeTempDir :: !(Path Abs Dir)
, eeSetupHs :: !(Path Abs File)
-- ^ Temporary Setup.hs for simple builds
, eeSetupExe :: !(Maybe (Path Abs File))
-- ^ Compiled version of eeSetupHs
, eeCabalPkgVer :: !Version
, eeTotalWanted :: !Int
, eeWanted :: !(Set PackageName)
, eeLocals :: ![LocalPackage]
, eeSourceMap :: !SourceMap
, eeGlobalDB :: !(Path Abs Dir)
, eeGlobalPackages :: ![DumpPackage () ()]
}
-- | Get a compiled Setup exe
getSetupExe :: M env m
=> Path Abs File -- ^ Setup.hs input file
-> Path Abs Dir -- ^ temporary directory
-> m (Maybe (Path Abs File))
getSetupExe setupHs tmpdir = do
wc <- getWhichCompiler
econfig <- asks getEnvConfig
let config = getConfig econfig
baseNameS = concat
[ "setup-Simple-Cabal-"
, versionString $ envConfigCabalVersion econfig
, "-"
, Distribution.Text.display $ configPlatform config
, "-"
, compilerVersionString $ envConfigCompilerVersion econfig
]
exeNameS = baseNameS ++
case configPlatform config of
Platform _ Windows -> ".exe"
_ -> ""
outputNameS =
case wc of
Ghc -> exeNameS
Ghcjs -> baseNameS ++ ".jsexe"
jsExeNameS =
baseNameS ++ ".jsexe"
setupDir =
configStackRoot config </>
$(mkRelDir "setup-exe-cache")
exePath <- fmap (setupDir </>) $ parseRelFile exeNameS
jsExePath <- fmap (setupDir </>) $ parseRelDir jsExeNameS
exists <- liftIO $ D.doesFileExist $ toFilePath exePath
if exists
then return $ Just exePath
else do
tmpExePath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ exeNameS
tmpOutputPath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ outputNameS
tmpJsExePath <- fmap (setupDir </>) $ parseRelDir $ "tmp-" ++ jsExeNameS
liftIO $ D.createDirectoryIfMissing True $ toFilePath setupDir
menv <- getMinimalEnvOverride
let args =
[ "-clear-package-db"
, "-global-package-db"
, "-hide-all-packages"
, "-package"
, "base"
, "-package"
, "Cabal-" ++ versionString (envConfigCabalVersion econfig)
, toFilePath setupHs
, "-o"
, toFilePath tmpOutputPath
] ++
["-build-runner" | wc == Ghcjs]
runIn tmpdir (compilerExeName wc) menv args Nothing
when (wc == Ghcjs) $ renameDir tmpJsExePath jsExePath
renameFile tmpExePath exePath
return $ Just exePath
withExecuteEnv :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> SourceMap
-> (ExecuteEnv -> m a)
-> m a
withExecuteEnv menv bopts baseConfigOpts locals globals sourceMap inner = do
withCanonicalizedSystemTempDirectory stackProgName $ \tmpdir -> do
tmpdir' <- parseAbsDir tmpdir
configLock <- newMVar ()
installLock <- newMVar ()
idMap <- liftIO $ newTVarIO Map.empty
let setupHs = tmpdir' </> $(mkRelFile "Setup.hs")
liftIO $ writeFile (toFilePath setupHs) "import Distribution.Simple\nmain = defaultMain"
setupExe <- getSetupExe setupHs tmpdir'
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
globalDB <- getGlobalDB menv =<< getWhichCompiler
inner ExecuteEnv
{ eeEnvOverride = menv
, eeBuildOpts = bopts
-- Uncertain as to why we cannot run configures in parallel. This appears
-- to be a Cabal library bug. Original issue:
-- https://github.com/fpco/stack/issues/84. Ideally we'd be able to remove
-- this.
, eeConfigureLock = configLock
, eeInstallLock = installLock
, eeBaseConfigOpts = baseConfigOpts
, eeGhcPkgIds = idMap
, eeTempDir = tmpdir'
, eeSetupHs = setupHs
, eeSetupExe = setupExe
, eeCabalPkgVer = cabalPkgVer
, eeTotalWanted = length $ filter lpWanted locals
, eeWanted = wantedLocalPackages locals
, eeLocals = locals
, eeSourceMap = sourceMap
, eeGlobalDB = globalDB
, eeGlobalPackages = globals
}
-- | Perform the actual plan
executePlan :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ globals
-> SourceMap
-> InstalledMap
-> Plan
-> m ()
executePlan menv bopts baseConfigOpts locals globals sourceMap installedMap plan = do
withExecuteEnv menv bopts baseConfigOpts locals globals sourceMap (executePlan' installedMap plan)
unless (Map.null $ planInstallExes plan) $ do
snapBin <- (</> bindirSuffix) `liftM` installationRootDeps
localBin <- (</> bindirSuffix) `liftM` installationRootLocal
destDir <- asks $ configLocalBin . getConfig
createTree destDir
destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir
isInPATH <- liftIO . fmap (any (FP.equalFilePath destDir')) . (mapM D.canonicalizePath <=< filterM D.doesDirectoryExist) $ (envSearchPath menv)
when (not isInPATH) $
$logWarn $ T.concat
[ "Installation path "
, T.pack destDir'
, " not found in PATH environment variable"
]
platform <- asks getPlatform
let ext =
case platform of
Platform _ Windows -> ".exe"
_ -> ""
currExe <- liftIO getExecutablePath -- needed for windows, see below
installed <- forM (Map.toList $ planInstallExes plan) $ \(name, loc) -> do
let bindir =
case loc of
Snap -> snapBin
Local -> localBin
mfp <- resolveFileMaybe bindir $ T.unpack name ++ ext
case mfp of
Nothing -> do
$logWarn $ T.concat
[ "Couldn't find executable "
, name
, " in directory "
, T.pack $ toFilePath bindir
]
return Nothing
Just file -> do
let destFile = destDir' FP.</> T.unpack name ++ ext
$logInfo $ T.concat
[ "Copying from "
, T.pack $ toFilePath file
, " to "
, T.pack destFile
]
liftIO $ case platform of
Platform _ Windows | FP.equalFilePath destFile currExe ->
windowsRenameCopy (toFilePath file) destFile
_ -> D.copyFile (toFilePath file) destFile
return $ Just (destDir', [T.append name (T.pack ext)])
let destToInstalled = Map.fromListWith (++) (catMaybes installed)
unless (Map.null destToInstalled) $ $logInfo ""
forM_ (Map.toList destToInstalled) $ \(dest, executables) -> do
$logInfo $ T.concat
[ "Copied executables to "
, T.pack dest
, ":"]
forM_ executables $ \exe -> $logInfo $ T.append "- " exe
config <- asks getConfig
menv' <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
forM_ (boptsExec bopts) $ \(cmd, args) -> do
$logProcessRun cmd args
callProcess Nothing menv' cmd args
-- | Windows can't write over the current executable. Instead, we rename the
-- current executable to something else and then do the copy.
windowsRenameCopy :: FilePath -> FilePath -> IO ()
windowsRenameCopy src dest = do
D.copyFile src new
D.renameFile dest old
D.renameFile new dest
where
new = dest ++ ".new"
old = dest ++ ".old"
-- | Perform the actual plan (internal)
executePlan' :: M env m
=> InstalledMap
-> Plan
-> ExecuteEnv
-> m ()
executePlan' installedMap plan ee@ExecuteEnv {..} = do
wc <- getWhichCompiler
cv <- asks $ envConfigCompilerVersion . getEnvConfig
case Map.toList $ planUnregisterLocal plan of
[] -> return ()
ids -> do
localDB <- packageDatabaseLocal
forM_ ids $ \(id', (ident, mreason)) -> do
$logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, ": unregistering"
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
unregisterGhcPkgId eeEnvOverride wc cv localDB id' ident
-- Yes, we're explicitly discarding result values, which in general would
-- be bad. monad-unlift does this all properly at the type system level,
-- but I don't want to pull it in for this one use case, when we know that
-- stack always using transformer stacks that are safe for this use case.
runInBase <- liftBaseWith $ \run -> return (void . run)
let actions = concatMap (toActions installedMap' runInBase ee) $ Map.elems $ Map.mergeWithKey
(\_ b f -> Just (Just b, Just f))
(fmap (\b -> (Just b, Nothing)))
(fmap (\f -> (Nothing, Just f)))
(planTasks plan)
(planFinals plan)
threads <- asks $ configJobs . getConfig
concurrentTests <- asks $ configConcurrentTests . getConfig
let keepGoing =
case boptsKeepGoing eeBuildOpts of
Just kg -> kg
Nothing -> boptsTests eeBuildOpts || boptsBenchmarks eeBuildOpts
concurrentFinal =
-- TODO it probably makes more sense to use a lock for test suites
-- and just have the execution blocked. Turning off all concurrency
-- on finals based on the --test option doesn't fit in well.
if boptsTests eeBuildOpts
then concurrentTests
else True
terminal <- asks getTerminal
errs <- liftIO $ runActions threads keepGoing concurrentFinal actions $ \doneVar -> do
let total = length actions
loop prev
| prev == total =
runInBase $ $logStickyDone ("Completed all " <> T.pack (show total) <> " actions.")
| otherwise = do
when terminal $ runInBase $
$logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
done <- atomically $ do
done <- readTVar doneVar
check $ done /= prev
return done
loop done
if total > 1
then loop 0
else return ()
unless (null errs) $ throwM $ ExecutionFailure errs
when (boptsHaddock eeBuildOpts) $ do
generateLocalHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeLocals
generateDepsHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeLocals
generateSnapHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDB
when (toCoverage $ boptsTestOpts eeBuildOpts) generateHpcMarkupIndex
where
installedMap' = Map.difference installedMap
$ Map.fromList
$ map (\(ident, _) -> (packageIdentifierName ident, ()))
$ Map.elems
$ planUnregisterLocal plan
toActions :: M env m
=> InstalledMap
-> (m () -> IO ())
-> ExecuteEnv
-> (Maybe Task, Maybe (Task, LocalPackageTB)) -- build and final
-> [Action]
toActions installedMap runInBase ee (mbuild, mfinal) =
abuild ++ afinal
where
abuild =
case mbuild of
Nothing -> []
Just task@Task {..} ->
[ Action
{ actionId = ActionId taskProvides ATBuild
, actionDeps =
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap
}
]
afinal =
case mfinal of
Nothing -> []
Just (task@Task {..}, lptb) ->
[ Action
{ actionId = ActionId taskProvides ATFinal
, actionDeps = addBuild taskProvides $
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ do
unless (Set.null $ lptbTests lptb) $ do
singleTest runInBase topts lptb ac ee task installedMap
unless (Set.null $ lptbBenches lptb) $ do
singleBench runInBase beopts lptb ac ee task installedMap
}
]
where
addBuild ident =
case mbuild of
Nothing -> id
Just _ -> Set.insert $ ActionId ident ATBuild
bopts = eeBuildOpts ee
topts = boptsTestOpts bopts
beopts = boptsBenchmarkOpts bopts
-- | Generate the ConfigCache
getConfigCache :: MonadIO m
=> ExecuteEnv -> Task -> [Text]
-> m (Map PackageIdentifier GhcPkgId, ConfigCache)
getConfigCache ExecuteEnv {..} Task {..} extra = do
idMap <- liftIO $ readTVarIO eeGhcPkgIds
let getMissing ident =
case Map.lookup ident idMap of
Nothing -> error "singleBuild: invariant violated, missing package ID missing"
Just (Library ident' x) -> assert (ident == ident') $ Just (ident, x)
Just (Executable _) -> Nothing
missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing
TaskConfigOpts missing mkOpts = taskConfigOpts
opts = mkOpts missing'
allDeps = Set.fromList $ Map.elems missing' ++ Map.elems taskPresent
cache = ConfigCache
{ configCacheOpts = opts
{ coNoDirs = coNoDirs opts ++ map T.unpack extra
}
, configCacheDeps = allDeps
, configCacheComponents =
case taskType of
TTLocal lp -> Set.map renderComponent $ lpComponents lp
TTUpstream _ _ -> Set.empty
, configCacheHaddock =
shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides)
}
allDepsMap = Map.union missing' taskPresent
return (allDepsMap, cache)
-- | Ensure that the configuration for the package matches what is given
ensureConfig :: M env m
=> ConfigCache -- ^ newConfigCache
-> Path Abs Dir -- ^ package directory
-> ExecuteEnv
-> m () -- ^ announce
-> (Bool -> [String] -> m ()) -- ^ cabal
-> Path Abs File -- ^ .cabal file
-> m Bool
ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
newCabalMod <- liftIO (fmap modTime (D.getModificationTime (toFilePath cabalfp)))
needConfig <-
if boptsReconfigure eeBuildOpts
then return True
else do
-- Determine the old and new configuration in the local directory, to
-- determine if we need to reconfigure.
mOldConfigCache <- tryGetConfigCache pkgDir
mOldCabalMod <- tryGetCabalMod pkgDir
return $ mOldConfigCache /= Just newConfigCache
|| mOldCabalMod /= Just newCabalMod
let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache
when needConfig $ withMVar eeConfigureLock $ \_ -> do
deleteCaches pkgDir
announce
cabal False $ "configure" : dirs ++ nodirs
writeConfigCache pkgDir newConfigCache
writeCabalMod pkgDir newCabalMod
return needConfig
announceTask :: MonadLogger m => Task -> Text -> m ()
announceTask task x = $logInfo $ T.concat
[ T.pack $ packageIdentifierString $ taskProvides task
, ": "
, x
]
withSingleContext :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> Maybe (Map PackageIdentifier GhcPkgId)
-- ^ All dependencies' package ids to provide to Setup.hs. If
-- Nothing, just provide global and snapshot package
-- databases.
-> Maybe String
-> ( Package
-> Path Abs File
-> Path Abs Dir
-> (Bool -> [String] -> m ())
-> (Text -> m ())
-> Bool
-> Maybe (Path Abs File, Handle)
-> m a)
-> m a
withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffix inner0 =
withPackage $ \package cabalfp pkgDir ->
withLogFile package $ \mlogFile ->
withCabal package pkgDir mlogFile $ \cabal ->
inner0 package cabalfp pkgDir cabal announce console mlogFile
where
announce = announceTask task
wanted =
case taskType of
TTLocal lp -> lpWanted lp
TTUpstream _ _ -> False
console = wanted
&& all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining)
&& eeTotalWanted == 1
withPackage inner =
case taskType of
TTLocal lp -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
TTUpstream package _ -> do
mdist <- liftM Just distRelativeDir
m <- unpackPackageIdents eeEnvOverride eeTempDir mdist $ Set.singleton taskProvides
case Map.toList m of
[(ident, dir)]
| ident == taskProvides -> do
let name = packageIdentifierName taskProvides
cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
let cabalfp = dir </> cabalfpRel
inner package cabalfp dir
_ -> error $ "withPackage: invariant violated: " ++ show m
withLogFile package inner
| console = inner Nothing
| otherwise = do
logPath <- buildLogPath package msuffix
createTree (parent logPath)
let fp = toFilePath logPath
bracket
(liftIO $ openBinaryFile fp WriteMode)
(liftIO . hClose)
$ \h -> inner (Just (logPath, h))
withCabal package pkgDir mlogFile inner = do
config <- asks getConfig
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = True
}
getRunhaskellPath <- runOnce $ liftIO $ join $ findExecutable menv "runhaskell"
getGhcjsPath <- runOnce $ liftIO $ join $ findExecutable menv "ghcjs"
distRelativeDir' <- distRelativeDir
esetupexehs <-
-- Avoid broken Setup.hs files causing problems for simple build
-- types, see:
-- https://github.com/commercialhaskell/stack/issues/370
case (packageSimpleType package, eeSetupExe) of
(True, Just setupExe) -> return $ Left setupExe
_ -> liftIO $ fmap Right $ getSetupHs pkgDir
inner $ \stripTHLoading args -> do
let cabalPackageArg =
"-package=" ++ packageIdentifierString
(PackageIdentifier cabalPackageName
eeCabalPkgVer)
packageArgs =
case mdeps of
Just deps ->
-- Stack always builds with the global Cabal for various
-- reproducibility issues.
let depsMinusCabal
= map ghcPkgIdString
$ Set.toList
$ addGlobalPackages deps eeGlobalPackages
in
"-clear-package-db"
: "-global-package-db"
: ("-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts))
: ("-package-db=" ++ toFilePath (bcoLocalDB eeBaseConfigOpts))
: "-hide-all-packages"
: cabalPackageArg
: map ("-package-id=" ++) depsMinusCabal
-- This branch is debatable. It adds access to the
-- snapshot package database for Cabal. There are two
-- possible objections:
--
-- 1. This doesn't isolate the build enough; arbitrary
-- other packages available could cause the build to
-- succeed or fail.
--
-- 2. This doesn't provide enough packages: we should also
-- include the local database when building local packages.
--
-- Currently, this branch is only taken via `stack sdist`.
Nothing ->
[ cabalPackageArg
, "-clear-package-db"
, "-global-package-db"
, "-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts)
]
setupArgs = ("--builddir=" ++ toFilePath distRelativeDir') : args
runExe exeName fullArgs = do
$logProcessRun (toFilePath exeName) fullArgs
-- Use createProcess_ to avoid the log file being closed afterwards
(Nothing, moutH, merrH, ph) <- liftIO $ createProcess_ "singleBuild" cp
let makeAbsolute = stripTHLoading -- If users want control, we should add a config option for this
ec <-
liftIO $
withAsync (runInBase $ maybePrintBuildOutput stripTHLoading makeAbsolute LevelInfo mlogFile moutH) $ \outThreadID ->
withAsync (runInBase $ maybePrintBuildOutput False makeAbsolute LevelWarn mlogFile merrH) $ \errThreadID -> do
ec <- waitForProcess ph
wait errThreadID
wait outThreadID
return ec
case ec of
ExitSuccess -> return ()
_ -> do
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
throwM $ CabalExitedUnsuccessfully
ec
taskProvides
exeName
fullArgs
(fmap fst mlogFile)
bs
where
cp0 = proc (toFilePath exeName) fullArgs
cp = cp0
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
-- Ideally we'd create a new pipe here and then close it
-- below to avoid the child process from taking from our
-- stdin. However, if we do this, the child process won't
-- be able to get the codepage on Windows that we want.
-- See:
-- https://github.com/commercialhaskell/stack/issues/738
-- , std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
}
wc <- getWhichCompiler
(exeName, fullArgs) <- case (esetupexehs, wc) of
(Left setupExe, _) -> return (setupExe, setupArgs)
(Right setuphs, Ghc) -> do
exeName <- getRunhaskellPath
let fullArgs = packageArgs ++ (toFilePath setuphs : setupArgs)
return (exeName, fullArgs)
(Right setuphs, Ghcjs) -> do
distDir <- distDirFromDir pkgDir
let setupDir = distDir </> $(mkRelDir "setup")
outputFile = setupDir </> $(mkRelFile "setup")
createTree setupDir
ghcjsPath <- getGhcjsPath
runExe ghcjsPath $
[ "--make"
, "-odir", toFilePath setupDir
, "-hidir", toFilePath setupDir
, "-i", "-i."
] ++ packageArgs ++
[ toFilePath setuphs
, "-o", toFilePath outputFile
, "-build-runner"
]
return (outputFile, setupArgs)
runExe exeName $ (if boptsCabalVerbose eeBuildOpts then ("--verbose":) else id) fullArgs
maybePrintBuildOutput stripTHLoading makeAbsolute level mlogFile mh =
case mh of
Just h ->
case mlogFile of
Just{} -> return ()
Nothing -> printBuildOutput stripTHLoading makeAbsolute level h
Nothing -> return ()
singleBuild :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap = do
(allDepsMap, cache) <- getCache
mprecompiled <- getPrecompiled cache
minstalled <-
case mprecompiled of
Just precompiled -> copyPreCompiled precompiled
Nothing -> realConfigAndBuild cache allDepsMap
case minstalled of
Nothing -> return ()
Just installed -> do
writeFlagCache installed cache
liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed
where
pname = packageIdentifierName taskProvides
shouldHaddockPackage' = shouldHaddockPackage eeBuildOpts eeWanted pname
doHaddock package = shouldHaddockPackage' &&
-- Works around haddock failing on bytestring-builder since it has no modules
-- when bytestring is new enough.
packageHasExposedModules package
getCache = do
let extra =
-- We enable tests if the test suite dependencies are already
-- installed, so that we avoid unnecessary recompilation based on
-- cabal_macros.h changes when switching between 'stack build' and
-- 'stack test'. See:
-- https://github.com/commercialhaskell/stack/issues/805
case taskType of
TTLocal lp -> concat
[ ["--enable-tests" | depsPresent installedMap $ lpTestDeps lp]
, ["--enable-benchmarks" | depsPresent installedMap $ lpBenchDeps lp]
]
_ -> []
getConfigCache ee task extra
getPrecompiled cache =
case taskLocation task of
Snap | not shouldHaddockPackage' -> do
mpc <- readPrecompiledCache taskProvides $ configCacheOpts cache
case mpc of
Nothing -> return Nothing
Just pc -> do
let allM _ [] = return True
allM f (x:xs) = do
b <- f x
if b then allM f xs else return False
b <- liftIO $ allM D.doesFileExist $ maybe id (:) (pcLibrary pc) $ pcExes pc
return $ if b then Just pc else Nothing
_ -> return Nothing
copyPreCompiled (PrecompiledCache mlib exes) = do
announceTask task "copying precompiled package"
forM_ mlib $ \libpath -> do
menv <- getMinimalEnvOverride
withMVar eeInstallLock $ \() ->
readProcessNull Nothing menv "ghc-pkg"
[ "register"
, "--no-user-package-db"
, "--package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts)
, "--force"
, libpath
]
liftIO $ forM_ exes $ \exe -> do
D.createDirectoryIfMissing True bindir
let dst = bindir FP.</> FP.takeFileName exe
createLink exe dst `catchIO` \_ -> D.copyFile exe dst
-- Find the package in the database
wc <- getWhichCompiler
let pkgDbs = [bcoSnapDB eeBaseConfigOpts]
mpkgid <- findGhcPkgId eeEnvOverride wc pkgDbs pname
return $ Just $
case mpkgid of
Nothing -> Executable taskProvides
Just pkgid -> Library taskProvides pkgid
where
bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts </> bindirSuffix
realConfigAndBuild cache allDepsMap = withSingleContext runInBase ac ee task (Just allDepsMap) Nothing
$ \package cabalfp pkgDir cabal announce console _mlogFile -> do
_neededConfig <- ensureConfig cache pkgDir ee (announce "configure") cabal cabalfp
if boptsOnlyConfigure eeBuildOpts
then return Nothing
else liftM Just $ realBuild cache package pkgDir cabal announce console
realBuild cache package pkgDir cabal announce console = do
wc <- getWhichCompiler
markExeNotInstalled (taskLocation task) taskProvides
case taskType of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> return ()
() <- announce "build"
config <- asks getConfig
extraOpts <- extraBuildOptions eeBuildOpts
preBuildTime <- modTime <$> liftIO getCurrentTime
cabal (console && configHideTHLoading config) $
(case taskType of
TTLocal lp -> concat
[ ["build"]
, ["lib:" ++ packageNameString (packageName package)
-- TODO: get this information from target parsing instead,
-- which will allow users to turn off library building if
-- desired
| packageHasLibrary package]
, map (T.unpack . T.append "exe:") $ Set.toList $
case lpExeComponents lp of
Just exes -> exes
-- Build all executables in the event that no
-- specific list is provided (as happens with
-- extra-deps).
Nothing -> packageExes package
]
TTUpstream _ _ -> ["build"]) ++ extraOpts
case taskType of
TTLocal lp -> do
(addBuildCache,warnings) <-
addUnlistedToBuildCache
preBuildTime
(lpPackage lp)
(lpCabalFile lp)
(lpNewBuildCache lp)
mapM_ ($logWarn . ("Warning: " <>) . T.pack . show) warnings
unless (null addBuildCache) $
writeBuildCache pkgDir $
Map.unions (lpNewBuildCache lp : addBuildCache)
TTUpstream _ _ -> return ()
when (doHaddock package) $ do
announce "haddock"
hscolourExists <- doesExecutableExist eeEnvOverride "HsColour"
unless hscolourExists $ $logWarn
("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <>
"found on PATH (use 'stack install hscolour' to install).")
cabal False (concat [["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"]
,["--hyperlink-source" | hscolourExists]
,["--ghcjs" | wc == Ghcjs]])
withMVar eeInstallLock $ \() -> do
announce "install"
cabal False ["install"]
let pkgDbs =
case taskLocation task of
Snap -> [bcoSnapDB eeBaseConfigOpts]
Local ->
[ bcoSnapDB eeBaseConfigOpts
, bcoLocalDB eeBaseConfigOpts
]
mpkgid <- findGhcPkgId eeEnvOverride wc pkgDbs (packageName package)
let ident = PackageIdentifier (packageName package) (packageVersion package)
mpkgid' <- case (packageHasLibrary package, mpkgid) of
(False, _) -> assert (isNothing mpkgid) $ do
markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache?
return $ Executable ident
(True, Nothing) -> throwM $ Couldn'tFindPkgId $ packageName package
(True, Just pkgid) -> return $ Library ident pkgid
when (doHaddock package && shouldHaddockDeps eeBuildOpts) $
withMVar eeInstallLock $ \() ->
copyDepHaddocks
eeEnvOverride
wc
eeBaseConfigOpts
(pkgDbs ++ [eeGlobalDB])
(PackageIdentifier (packageName package) (packageVersion package))
Set.empty
case taskLocation task of
Snap -> writePrecompiledCache eeBaseConfigOpts taskProvides (configCacheOpts cache) mpkgid (packageExes package)
Local -> return ()
return mpkgid'
-- | Determine if all of the dependencies given are installed
depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool
depsPresent installedMap deps = all
(\(name, range) ->
case Map.lookup name installedMap of
Just (version, _, _) -> version `withinRange` range
Nothing -> False)
(Map.toList deps)
singleTest :: M env m
=> (m () -> IO ())
-> TestOpts
-> LocalPackageTB
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleTest runInBase topts lptb ac ee task installedMap = do
(allDepsMap, cache) <- getConfigCache ee task $
case taskType task of
TTLocal lp -> concat
[ ["--enable-tests"]
, ["--enable-benchmarks" | depsPresent installedMap $ lpBenchDeps lp]
]
_ -> []
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "test") $ \package cabalfp pkgDir cabal announce console mlogFile -> do
neededConfig <- ensureConfig cache pkgDir ee (announce "configure (test)") cabal cabalfp
config <- asks getConfig
testBuilt <- checkTestBuilt pkgDir
let needBuild = neededConfig ||
(case taskType task of
TTLocal lp -> lpDirtyFiles lp
_ -> assert False True) ||
not testBuilt
needHpc = toCoverage topts
testsToRun = Set.toList $ lptbTests lptb
components = map (T.unpack . T.append "test:") testsToRun
when needBuild $ do
announce "build (test)"
unsetTestBuilt pkgDir
unsetTestSuccess pkgDir
case taskType task of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> assert False $ return ()
extraOpts <- extraBuildOptions (eeBuildOpts ee)
cabal (console && configHideTHLoading config) $
"build" : (components ++ extraOpts)
setTestBuilt pkgDir
toRun <-
if toDisableRun topts
then do
announce "Test running disabled by --no-run-tests flag."
return False
else if toRerunTests topts
then return True
else do
success <- checkTestSuccess pkgDir
if success
then do
unless (null testsToRun) $ announce "skipping already passed test"
return False
else return True
when toRun $ do
bconfig <- asks getBuildConfig
buildDir <- distDirFromDir pkgDir
hpcDir <- hpcDirFromDir pkgDir
when needHpc (createTree hpcDir)
let exeExtension =
case configPlatform $ getConfig bconfig of
Platform _ Windows -> ".exe"
_ -> ""
errs <- liftM Map.unions $ forM testsToRun $ \testName -> do
nameDir <- parseRelDir $ T.unpack testName
nameExe <- parseRelFile $ T.unpack testName ++ exeExtension
nameTix <- liftM (pkgDir </>) $ parseRelFile $ T.unpack testName ++ ".tix"
let exeName = buildDir </> $(mkRelDir "build") </> nameDir </> nameExe
exists <- fileExists exeName
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
if exists
then do
-- We clear out the .tix files before doing a run.
when needHpc $ do
tixexists <- fileExists nameTix
when tixexists $
$logWarn ("Removing HPC file " <> T.pack (toFilePath nameTix))
removeFileIfExists nameTix
let args = toAdditionalArgs topts
argsDisplay = case args of
[] -> ""
_ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args)
announce $ "test (suite: " <> testName <> argsDisplay <> ")"
let cp = (proc (toFilePath exeName) args)
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
, std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
}
-- Use createProcess_ to avoid the log file being closed afterwards
(Just inH, Nothing, Nothing, ph) <- liftIO $ createProcess_ "singleBuild.runTests" cp
liftIO $ hClose inH
ec <- liftIO $ waitForProcess ph
-- Move the .tix file out of the package directory
-- into the hpc work dir, for tidiness.
when needHpc $
moveFileIfExists nameTix hpcDir
return $ case ec of
ExitSuccess -> Map.empty
_ -> Map.singleton testName $ Just ec
else do
$logError $ T.concat
[ "Test suite "
, testName
, " executable not found for "
, packageNameText $ packageName package
]
return $ Map.singleton testName Nothing
when needHpc $ do
wc <- getWhichCompiler
let pkgDbs =
[ bcoSnapDB (eeBaseConfigOpts ee)
, bcoLocalDB (eeBaseConfigOpts ee)
]
generateHpcReport pkgDir package testsToRun (findGhcPkgKey (eeEnvOverride ee) wc pkgDbs)
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
unless (Map.null errs) $ throwM $ TestSuiteFailure
(taskProvides task)
errs
(fmap fst mlogFile)
bs
setTestSuccess pkgDir
singleBench :: M env m
=> (m () -> IO ())
-> BenchmarkOpts
-> LocalPackageTB
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBench runInBase beopts _lptb ac ee task installedMap = do
(allDepsMap, cache) <- getConfigCache ee task $
case taskType task of
TTLocal lp -> concat
[ ["--enable-tests" | depsPresent installedMap $ lpTestDeps lp]
, ["--enable-benchmarks"]
]
_ -> []
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "bench") $ \_package cabalfp pkgDir cabal announce console _mlogFile -> do
neededConfig <- ensureConfig cache pkgDir ee (announce "configure (benchmarks)") cabal cabalfp
benchBuilt <- checkBenchBuilt pkgDir
let needBuild = neededConfig ||
(case taskType task of
TTLocal lp -> lpDirtyFiles lp
_ -> assert False True) ||
not benchBuilt
when needBuild $ do
announce "build (benchmarks)"
unsetBenchBuilt pkgDir
case taskType task of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> assert False $ return ()
config <- asks getConfig
extraOpts <- extraBuildOptions (eeBuildOpts ee)
cabal (console && configHideTHLoading config) ("build" : extraOpts)
setBenchBuilt pkgDir
let args = maybe []
((:[]) . ("--benchmark-options=" <>))
(beoAdditionalArgs beopts)
toRun <-
if beoDisableRun beopts
then do
announce "Benchmark running disabled by --no-run-benchmarks flag."
return False
else do
return True
when toRun $ do
announce "benchmarks"
cabal False ("bench" : args)
-- | Grab all output from the given @Handle@ and print it to stdout, stripping
-- Template Haskell "Loading package" lines. Does work in a separate thread.
printBuildOutput :: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Bool -- ^ exclude TH loading?
-> Bool -- ^ convert paths to absolute?
-> LogLevel
-> Handle -> m ()
printBuildOutput excludeTHLoading makeAbsolute level outH = void $
CB.sourceHandle outH
$$ CB.lines
=$ CL.map stripCarriageReturn
=$ CL.filter (not . isTHLoading)
=$ CL.mapM toAbsolutePath
=$ CL.mapM_ (monadLoggerLog $(TH.location >>= liftLoc) "" level)
where
-- | Is this line a Template Haskell "Loading package" line
-- ByteString
isTHLoading :: S8.ByteString -> Bool
isTHLoading _ | not excludeTHLoading = False
isTHLoading bs =
"Loading package " `S8.isPrefixOf` bs &&
("done." `S8.isSuffixOf` bs || "done.\r" `S8.isSuffixOf` bs)
-- | Convert GHC error lines with file paths to have absolute file paths
toAbsolutePath bs | not makeAbsolute = return bs
toAbsolutePath bs = do
let (x, y) = S.break (== _colon) bs
mabs <-
if isValidSuffix y
then do
efp <- liftIO $ tryIO $ D.canonicalizePath $ S8.unpack x
case efp of
Left _ -> return Nothing
Right fp -> return $ Just $ S8.pack fp
else return Nothing
case mabs of
Nothing -> return bs
Just fp -> return $ fp `S.append` y
-- | Match the line:column format at the end of lines
isValidSuffix bs0 = maybe False (const True) $ do
guard $ not $ S.null bs0
guard $ S.head bs0 == _colon
(_, bs1) <- S8.readInt $ S.drop 1 bs0
guard $ not $ S.null bs1
guard $ S.head bs1 == _colon
(_, bs2) <- S8.readInt $ S.drop 1 bs1
guard $ bs2 == ":"
-- | Strip @\r@ characters from the byte vector. Used because Windows.
stripCarriageReturn :: ByteString -> ByteString
stripCarriageReturn = S8.filter (not . (=='\r'))
-- | Find the Setup.hs or Setup.lhs in the given directory. If none exists,
-- throw an exception.
getSetupHs :: Path Abs Dir -- ^ project directory
-> IO (Path Abs File)
getSetupHs dir = do
exists1 <- fileExists fp1
if exists1
then return fp1
else do
exists2 <- fileExists fp2
if exists2
then return fp2
else throwM $ NoSetupHsFound dir
where
fp1 = dir </> $(mkRelFile "Setup.hs")
fp2 = dir </> $(mkRelFile "Setup.lhs")
-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled.
-- This helps running stack-compiled programs with dynamic interpreters like `hint`.
-- Cfr: https://github.com/commercialhaskell/stack/issues/997
extraBuildOptions :: M env m => BuildOpts -> m [String]
extraBuildOptions bopts = do
let ddumpOpts = " -ddump-hi -ddump-to-file"
case toCoverage (boptsTestOpts bopts) of
True -> do
hpcIndexDir <- toFilePath . (</> dotHpc) <$> hpcRelativeDir
return ["--ghc-options", "-hpcdir " ++ hpcIndexDir ++ ddumpOpts]
False -> return ["--ghc-options", ddumpOpts]
-- | Take the given list of package dependencies and the contents of the global
-- package database, and construct a set of installed package IDs that:
--
-- * Excludes the Cabal library (it's added later)
--
-- * Includes all packages depended on by this package
--
-- * Includes all global packages, unless: (1) it's hidden, (2) it's shadowed
-- by a depended-on package, or (3) one of its dependencies is not met.
--
-- See:
--
-- * https://github.com/commercialhaskell/stack/issues/941
--
-- * https://github.com/commercialhaskell/stack/issues/944
--
-- * https://github.com/commercialhaskell/stack/issues/949
addGlobalPackages :: Map PackageIdentifier GhcPkgId -- ^ dependencies of the package
-> [DumpPackage () ()] -- ^ global packages
-> Set GhcPkgId
addGlobalPackages deps globals0 =
res
where
-- Initial set of packages: the installed IDs of all dependencies
res0 = Map.elems $ Map.filterWithKey (\ident _ -> not $ isCabal ident) deps
-- First check on globals: it's not shadowed by a dep, it's not Cabal, and
-- it's exposed
goodGlobal1 dp = not (isDep dp)
&& not (isCabal $ dpPackageIdent dp)
&& dpIsExposed dp
globals1 = filter goodGlobal1 globals0
-- Create a Map of unique package names in the global database
globals2 = Map.fromListWith chooseBest
$ map (packageIdentifierName . dpPackageIdent &&& id) globals1
-- Final result: add in globals that have their dependencies met
res = loop id (Map.elems globals2) $ Set.fromList res0
----------------------------------
-- Some auxiliary helper functions
----------------------------------
-- Is the given package identifier for any version of Cabal
isCabal (PackageIdentifier name _) = name == $(mkPackageName "Cabal")
-- Is the given package name provided by the package dependencies?
isDep dp = packageIdentifierName (dpPackageIdent dp) `Set.member` depNames
depNames = Set.map packageIdentifierName $ Map.keysSet deps
-- Choose the best of two competing global packages (the newest version)
chooseBest dp1 dp2
| getVer dp1 < getVer dp2 = dp2
| otherwise = dp1
where
getVer = packageIdentifierVersion . dpPackageIdent
-- Are all dependencies of the given package met by the given Set of
-- installed packages
depsMet dp gids = all (`Set.member` gids) (dpDepends dp)
-- Find all globals that have all of their dependencies met
loop front (dp:dps) gids
-- This package has its deps met. Add it to the list of dependencies
-- and then traverse the list from the beginning (this package may have
-- been a dependency of an earlier one).
| depsMet dp gids = loop id (front dps) (Set.insert (dpGhcPkgId dp) gids)
-- Deps are not met, keep going
| otherwise = loop (front . (dp:)) dps gids
-- None of the packages we checked can be added, therefore drop them all
-- and return our results
loop _ [] gids = gids
|
robstewart57/stack
|
src/Stack/Build/Execute.hs
|
bsd-3-clause
| 58,886
| 0
| 31
| 23,506
| 12,832
| 6,359
| 6,473
| -1
| -1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.HE.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = HE}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 4)
[ "ארבעה"
]
]
|
rfranek/duckling
|
Duckling/Ordinal/HE/Corpus.hs
|
bsd-3-clause
| 721
| 0
| 9
| 137
| 107
| 68
| 39
| 15
| 1
|
module Abstract.Impl.Memcache.Counter.Dec (
module Abstract.Interfaces.Counter.Dec,
mkCounter'Memcache'Int'Dec
) where
import Abstract.Interfaces.Counter.Dec
import qualified Abstract.Impl.Memcache.Counter.Internal as MEMCACHE (mkCounter'Memcache'Int)
mkCounter'Memcache'Int'Dec s t = do
v <- MEMCACHE.mkCounter'Memcache'Int s t
return $ counterToDec v
|
adarqui/Abstract-Impl-Memcache
|
src/Abstract/Impl/Memcache/Counter/Dec.hs
|
bsd-3-clause
| 360
| 0
| 9
| 37
| 79
| 48
| 31
| 8
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Control.Monad.Skeleton (MonadView(..)
, hoistMV
, iterMV
, Skeleton(..)
, bone
, debone
, deboneBy
, boned
, hoistSkeleton
) where
import Control.Arrow
import Control.Applicative
import Control.Monad
import Control.Category
import Control.Monad.Skeleton.Internal
import Prelude hiding (id, (.))
-- | Re-add a bone. Inverse of 'debone'
boned :: MonadView t (Skeleton t) a -> Skeleton t a
boned (Return a) = ReturnS a
boned (t :>>= k) = BindS t $ Leaf $ Kleisli k
{-# INLINE boned #-}
-- | Extract the first instruction in 'Skeleton'.
debone :: Skeleton t a -> MonadView t (Skeleton t) a
debone (ReturnS a) = Return a
debone (BindS t c0) = t :>>= go c0 where
go :: Cat (Kleisli (Skeleton t)) a b -> a -> Skeleton t b
go c a = viewL c (\(Kleisli k) -> k a) $ \(Kleisli k) c' -> case k a of
ReturnS b -> go c' b
BindS t' c'' -> BindS t' (Tree c'' c')
-- | Continuation-passing variant of 'debone'
-- which allows nicer expression using @LambdaCase@.
--
-- Usecase:
--
-- > interpretM :: Monad m => Skeleton m a -> m a
-- > interpretM = deboneBy $ \case
-- > Return a -> return a
-- > x :>>= f -> x >>= interpretM . f
deboneBy :: (MonadView t (Skeleton t) a -> r) -> Skeleton t a -> r
deboneBy f s = f (debone s)
{-# INLINE deboneBy #-}
-- | A skeleton that has only one bone.
bone :: t a -> Skeleton t a
bone t = BindS t $ Leaf $ Kleisli ReturnS
{-# INLINABLE bone #-}
-- | Lift a transformation between bones into transformation between skeletons.
hoistSkeleton :: forall s t a. (forall x. s x -> t x) -> Skeleton s a -> Skeleton t a
hoistSkeleton f = go where
go :: forall x. Skeleton s x -> Skeleton t x
go (ReturnS a) = ReturnS a
go (BindS t c) = BindS (f t) $ transCat (transKleisli go) c
{-# INLINE hoistSkeleton #-}
-- | A deconstructed action
data MonadView t m x where
Return :: a -> MonadView t m a
(:>>=) :: !(t a) -> (a -> m b) -> MonadView t m b
infixl 1 :>>=
instance Functor m => Functor (MonadView t m) where
fmap f (Return a) = Return (f a)
fmap f (t :>>= k) = t :>>= fmap f . k
{-# INLINE fmap #-}
-- | Transform the instruction as well as the continuation.
hoistMV :: (forall x. s x -> t x) -> (m a -> n a) -> MonadView s m a -> MonadView t n a
hoistMV _ _ (Return a) = Return a
hoistMV f g (t :>>= k) = f t :>>= g . k
{-# INLINE hoistMV #-}
-- | Join 'MonadView' recursively.
iterMV :: Monad m => (t a -> MonadView m t a) -> t a -> m a
iterMV f = go where
go t = case f t of
m :>>= k -> m >>= go . k
Return a -> return a
{-# INLINE iterMV #-}
-- | @'Skeleton' t@ is a monadic skeleton (operational monad) made out of 't'.
-- Skeletons can be fleshed out by interpreting the instructions.
-- It provides O(1) ('>>=') and 'debone'.
data Skeleton t a where
ReturnS :: a -> Skeleton t a
BindS :: t a -> Cat (Kleisli (Skeleton t)) a b -> Skeleton t b
instance Functor (Skeleton t) where
fmap = liftM
{-# INLINE fmap #-}
instance Applicative (Skeleton t) where
pure = ReturnS
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
ReturnS _ *> k = k
BindS t c *> k = BindS t (c |> Kleisli (const k))
a <* b = a >>= \x -> b >> return x
instance Monad (Skeleton t) where
ReturnS a >>= k = k a
BindS t c >>= k = BindS t (c |> Kleisli k)
|
fumieval/monad-skeleton
|
src/Control/Monad/Skeleton.hs
|
bsd-3-clause
| 3,339
| 0
| 14
| 785
| 1,223
| 630
| 593
| 77
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CognitoIdentity.CreateIdentityPool
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new identity pool. The identity pool is a store of user
-- identity information that is specific to your AWS account. The limit on
-- identity pools is 60 per account. You must use AWS Developer credentials
-- to call this API.
--
-- /See:/ <http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_CreateIdentityPool.html AWS API Reference> for CreateIdentityPool.
module Network.AWS.CognitoIdentity.CreateIdentityPool
(
-- * Creating a Request
createIdentityPool
, CreateIdentityPool
-- * Request Lenses
, cipSupportedLoginProviders
, cipDeveloperProviderName
, cipOpenIdConnectProviderARNs
, cipIdentityPoolName
, cipAllowUnauthenticatedIdentities
-- * Destructuring the Response
, identityPool
, IdentityPool
-- * Response Lenses
, ipSupportedLoginProviders
, ipDeveloperProviderName
, ipOpenIdConnectProviderARNs
, ipIdentityPoolId
, ipIdentityPoolName
, ipAllowUnauthenticatedIdentities
) where
import Network.AWS.CognitoIdentity.Types
import Network.AWS.CognitoIdentity.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Input to the CreateIdentityPool action.
--
-- /See:/ 'createIdentityPool' smart constructor.
data CreateIdentityPool = CreateIdentityPool'
{ _cipSupportedLoginProviders :: !(Maybe (Map Text Text))
, _cipDeveloperProviderName :: !(Maybe Text)
, _cipOpenIdConnectProviderARNs :: !(Maybe [Text])
, _cipIdentityPoolName :: !Text
, _cipAllowUnauthenticatedIdentities :: !Bool
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateIdentityPool' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cipSupportedLoginProviders'
--
-- * 'cipDeveloperProviderName'
--
-- * 'cipOpenIdConnectProviderARNs'
--
-- * 'cipIdentityPoolName'
--
-- * 'cipAllowUnauthenticatedIdentities'
createIdentityPool
:: Text -- ^ 'cipIdentityPoolName'
-> Bool -- ^ 'cipAllowUnauthenticatedIdentities'
-> CreateIdentityPool
createIdentityPool pIdentityPoolName_ pAllowUnauthenticatedIdentities_ =
CreateIdentityPool'
{ _cipSupportedLoginProviders = Nothing
, _cipDeveloperProviderName = Nothing
, _cipOpenIdConnectProviderARNs = Nothing
, _cipIdentityPoolName = pIdentityPoolName_
, _cipAllowUnauthenticatedIdentities = pAllowUnauthenticatedIdentities_
}
-- | Optional key:value pairs mapping provider names to provider app IDs.
cipSupportedLoginProviders :: Lens' CreateIdentityPool (HashMap Text Text)
cipSupportedLoginProviders = lens _cipSupportedLoginProviders (\ s a -> s{_cipSupportedLoginProviders = a}) . _Default . _Map;
-- | The \"domain\" by which Cognito will refer to your users. This name acts
-- as a placeholder that allows your backend and the Cognito service to
-- communicate about the developer provider. For the
-- 'DeveloperProviderName', you can use letters as well as period ('.'),
-- underscore ('_'), and dash ('-').
--
-- Once you have set a developer provider name, you cannot change it.
-- Please take care in setting this parameter.
cipDeveloperProviderName :: Lens' CreateIdentityPool (Maybe Text)
cipDeveloperProviderName = lens _cipDeveloperProviderName (\ s a -> s{_cipDeveloperProviderName = a});
-- | A list of OpendID Connect provider ARNs.
cipOpenIdConnectProviderARNs :: Lens' CreateIdentityPool [Text]
cipOpenIdConnectProviderARNs = lens _cipOpenIdConnectProviderARNs (\ s a -> s{_cipOpenIdConnectProviderARNs = a}) . _Default . _Coerce;
-- | A string that you provide.
cipIdentityPoolName :: Lens' CreateIdentityPool Text
cipIdentityPoolName = lens _cipIdentityPoolName (\ s a -> s{_cipIdentityPoolName = a});
-- | TRUE if the identity pool supports unauthenticated logins.
cipAllowUnauthenticatedIdentities :: Lens' CreateIdentityPool Bool
cipAllowUnauthenticatedIdentities = lens _cipAllowUnauthenticatedIdentities (\ s a -> s{_cipAllowUnauthenticatedIdentities = a});
instance AWSRequest CreateIdentityPool where
type Rs CreateIdentityPool = IdentityPool
request = postJSON cognitoIdentity
response = receiveJSON (\ s h x -> eitherParseJSON x)
instance ToHeaders CreateIdentityPool where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("AWSCognitoIdentityService.CreateIdentityPool" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON CreateIdentityPool where
toJSON CreateIdentityPool'{..}
= object
(catMaybes
[("SupportedLoginProviders" .=) <$>
_cipSupportedLoginProviders,
("DeveloperProviderName" .=) <$>
_cipDeveloperProviderName,
("OpenIdConnectProviderARNs" .=) <$>
_cipOpenIdConnectProviderARNs,
Just ("IdentityPoolName" .= _cipIdentityPoolName),
Just
("AllowUnauthenticatedIdentities" .=
_cipAllowUnauthenticatedIdentities)])
instance ToPath CreateIdentityPool where
toPath = const "/"
instance ToQuery CreateIdentityPool where
toQuery = const mempty
|
fmapfmapfmap/amazonka
|
amazonka-cognito-identity/gen/Network/AWS/CognitoIdentity/CreateIdentityPool.hs
|
mpl-2.0
| 6,153
| 0
| 13
| 1,297
| 746
| 449
| 297
| 99
| 1
|
import qualified System.Directory as D
main :: IO ()
main = do
putStrLn "Hello World Haskell!"
D.getDirectoryContents "/" >>= mapM_ putStrLn
|
caiorss/Emacs-Elisp-Hacking
|
codes/myApp.hs
|
unlicense
| 148
| 0
| 9
| 28
| 45
| 22
| 23
| 5
| 1
|
{-# LANGUAGE ExistentialQuantification #-}
import Control.Concurrent.STM
import Benchmark
import Data.STM.Bag.Class as Bag
import Data.STM.Bag.Internal.ListBag
import Data.STM.Bag.Internal.TListBag
import Data.STM.Bag.Internal.PTLB
import Data.STM.Bag.Internal.PTTLB
data Box = forall b. Bag.Bag b => Box (STM (b Int))
impls :: [(String, Box)]
impls =
[ ("coarse-list-bag", Box (new :: STM (ListBag Int)))
, ("fine-list-bag", Box (new :: STM (TListBag Int)))
, ("per-thread-list-bag", Box (new :: STM (PTLB Int)))
, ("per-thread-tlist-bag", Box (new :: STM (PTTLB Int)))
]
findImpl :: String -> Maybe Box
findImpl name = find impls
where find [] = Nothing
find ((name', impl):is) =
if name == name' then Just impl
else find is
benchOne' :: (String, Box) -> IO ShortBenchReport
benchOne' (name, Box bcons) = do
let cons = atomically bcons
insOp b item = atomically $ Bag.add b item
delOp b = (atomically $ Bag.take b) >> return ()
struct = BenchStruct name cons insOp delOp
defProc = BenchProc 1000 50 3 1000 False
report <- execBenchmark struct defProc
return $ makeShortReport report
benchOne :: String -> IO ShortBenchReport
benchOne name =
case findImpl name of
Nothing -> error "Bag Bench: unknown implementation"
Just pqb -> benchOne' (name, pqb)
composedBench :: [String] -> IO ComposedReport
composedBench names = fmap makeComposedReport $ sequence $ map benchOne names
main :: IO ()
main = do
let toBenchNames = map fst impls
composedRep <- composedBench toBenchNames
let (toFile, stamp, text) = printedTable "bag-bench" composedRep
filename = stamp ++ ".log"
if toFile then
writeFile filename text
else
putStrLn text
|
Alllex/stm-data-collection
|
benchmarks/BagBench.hs
|
bsd-3-clause
| 1,802
| 0
| 14
| 420
| 621
| 327
| 294
| 46
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Tholos.App where
import Control.Monad.IO.Class (MonadIO)
import Data.Monoid ((<>))
import Network.Wai as Wai
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import Servant
import Tholos.API
import Tholos.App.Config
import Tholos.App.Transformer
import Tholos.Server (server)
app :: AppConfig -> Wai.Application
app cfg = logStdoutDev . cors (const $ Just corsPolicy) $
serve api (readerServer cfg)
readerServer :: AppConfig -> Server API
readerServer cfg = enter (readerToEither cfg) server
readerToEither :: AppConfig -> AppT :~> Handler
readerToEither cfg = Nat $ \tholosT -> runAppT cfg tholosT
corsPolicy :: CorsResourcePolicy
corsPolicy =
let allowedMethods = simpleMethods <> ["DELETE", "PUT", "PATCH", "OPTIONS"]
allowedHeaders = ["Content-Type"]
in
simpleCorsResourcePolicy { corsMethods = allowedMethods
, corsRequestHeaders = allowedHeaders
}
|
charlescrain/chainblock
|
src/Tholos/App.hs
|
bsd-3-clause
| 1,255
| 0
| 10
| 398
| 264
| 152
| 112
| 26
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
-- | CoreSyn holds all the main data types for use by for the Glasgow Haskell Compiler midsection
module CoreSyn (
-- * Main data types
Expr(..), Alt, Bind(..), AltCon(..), Arg,
Tickish(..), TickishScoping(..), TickishPlacement(..),
CoreProgram, CoreExpr, CoreAlt, CoreBind, CoreArg, CoreBndr,
TaggedExpr, TaggedAlt, TaggedBind, TaggedArg, TaggedBndr(..), deTagExpr,
-- ** 'Expr' construction
mkLets, mkLams,
mkApps, mkTyApps, mkCoApps, mkVarApps,
mkIntLit, mkIntLitInt,
mkWordLit, mkWordLitWord,
mkWord64LitWord64, mkInt64LitInt64,
mkCharLit, mkStringLit,
mkFloatLit, mkFloatLitFloat,
mkDoubleLit, mkDoubleLitDouble,
mkConApp, mkConApp2, mkTyBind, mkCoBind,
varToCoreExpr, varsToCoreExprs,
isId, cmpAltCon, cmpAlt, ltAlt,
-- ** Simple 'Expr' access functions and predicates
bindersOf, bindersOfBinds, rhssOfBind, rhssOfAlts,
collectBinders, collectTyBinders, collectTyAndValBinders,
collectArgs, collectArgsTicks, flattenBinds,
exprToType, exprToCoercion_maybe,
applyTypeToArg,
isValArg, isTypeArg, isTyCoArg, valArgCount, valBndrCount,
isRuntimeArg, isRuntimeVar,
tickishCounts, tickishScoped, tickishScopesLike, tickishFloatable,
tickishCanSplit, mkNoCount, mkNoScope,
tickishIsCode, tickishPlace,
tickishContains,
-- * Unfolding data types
Unfolding(..), UnfoldingGuidance(..), UnfoldingSource(..),
-- ** Constructing 'Unfolding's
noUnfolding, evaldUnfolding, mkOtherCon,
unSaturatedOk, needSaturated, boringCxtOk, boringCxtNotOk,
-- ** Predicates and deconstruction on 'Unfolding'
unfoldingTemplate, expandUnfolding_maybe,
maybeUnfoldingTemplate, otherCons,
isValueUnfolding, isEvaldUnfolding, isCheapUnfolding,
isExpandableUnfolding, isConLikeUnfolding, isCompulsoryUnfolding,
isStableUnfolding, hasStableCoreUnfolding_maybe,
isClosedUnfolding, hasSomeUnfolding,
canUnfold, neverUnfoldGuidance, isStableSource,
-- * Annotated expression data types
AnnExpr, AnnExpr'(..), AnnBind(..), AnnAlt,
-- ** Operations on annotated expressions
collectAnnArgs, collectAnnArgsTicks,
-- ** Operations on annotations
deAnnotate, deAnnotate', deAnnAlt, collectAnnBndrs,
-- * Orphanhood
IsOrphan(..), isOrphan, notOrphan, chooseOrphanAnchor,
-- * Core rule data types
CoreRule(..), RuleBase,
RuleName, RuleFun, IdUnfoldingFun, InScopeEnv,
RuleEnv(..), mkRuleEnv, emptyRuleEnv,
-- ** Operations on 'CoreRule's
ruleArity, ruleName, ruleIdName, ruleActivation,
setRuleIdName,
isBuiltinRule, isLocalRule, isAutoRule,
-- * Core vectorisation declarations data type
CoreVect(..)
) where
#include "HsVersions.h"
import CostCentre
import VarEnv( InScopeSet )
import Var
import Type
import Coercion
import Name
import NameSet
import NameEnv( NameEnv, emptyNameEnv )
import Literal
import DataCon
import Module
import TyCon
import BasicTypes
import DynFlags
import Outputable
import Util
import UniqFM
import SrcLoc ( RealSrcSpan, containsSpan )
import Binary
import Data.Data hiding (TyCon)
import Data.Int
import Data.Word
infixl 4 `mkApps`, `mkTyApps`, `mkVarApps`, `App`, `mkCoApps`
-- Left associative, so that we can say (f `mkTyApps` xs `mkVarApps` ys)
{-
************************************************************************
* *
\subsection{The main data types}
* *
************************************************************************
These data types are the heart of the compiler
-}
-- | This is the data type that represents GHCs core intermediate language. Currently
-- GHC uses System FC <http://research.microsoft.com/~simonpj/papers/ext-f/> for this purpose,
-- which is closely related to the simpler and better known System F <http://en.wikipedia.org/wiki/System_F>.
--
-- We get from Haskell source to this Core language in a number of stages:
--
-- 1. The source code is parsed into an abstract syntax tree, which is represented
-- by the data type 'HsExpr.HsExpr' with the names being 'RdrName.RdrNames'
--
-- 2. This syntax tree is /renamed/, which attaches a 'Unique.Unique' to every 'RdrName.RdrName'
-- (yielding a 'Name.Name') to disambiguate identifiers which are lexically identical.
-- For example, this program:
--
-- @
-- f x = let f x = x + 1
-- in f (x - 2)
-- @
--
-- Would be renamed by having 'Unique's attached so it looked something like this:
--
-- @
-- f_1 x_2 = let f_3 x_4 = x_4 + 1
-- in f_3 (x_2 - 2)
-- @
-- But see Note [Shadowing] below.
--
-- 3. The resulting syntax tree undergoes type checking (which also deals with instantiating
-- type class arguments) to yield a 'HsExpr.HsExpr' type that has 'Id.Id' as it's names.
--
-- 4. Finally the syntax tree is /desugared/ from the expressive 'HsExpr.HsExpr' type into
-- this 'Expr' type, which has far fewer constructors and hence is easier to perform
-- optimization, analysis and code generation on.
--
-- The type parameter @b@ is for the type of binders in the expression tree.
--
-- The language consists of the following elements:
--
-- * Variables
--
-- * Primitive literals
--
-- * Applications: note that the argument may be a 'Type'.
--
-- See "CoreSyn#let_app_invariant" for another invariant
--
-- * Lambda abstraction
--
-- * Recursive and non recursive @let@s. Operationally
-- this corresponds to allocating a thunk for the things
-- bound and then executing the sub-expression.
--
-- #top_level_invariant#
-- #letrec_invariant#
--
-- The right hand sides of all top-level and recursive @let@s
-- /must/ be of lifted type (see "Type#type_classification" for
-- the meaning of /lifted/ vs. /unlifted/).
--
-- See Note [CoreSyn let/app invariant]
--
-- #type_let#
-- We allow a /non-recursive/ let to bind a type variable, thus:
--
-- > Let (NonRec tv (Type ty)) body
--
-- This can be very convenient for postponing type substitutions until
-- the next run of the simplifier.
--
-- At the moment, the rest of the compiler only deals with type-let
-- in a Let expression, rather than at top level. We may want to revist
-- this choice.
--
-- * Case split. Operationally this corresponds to evaluating
-- the scrutinee (expression examined) to weak head normal form
-- and then examining at most one level of resulting constructor (i.e. you
-- cannot do nested pattern matching directly with this).
--
-- The binder gets bound to the value of the scrutinee,
-- and the 'Type' must be that of all the case alternatives
--
-- #case_invariants#
-- This is one of the more complicated elements of the Core language,
-- and comes with a number of restrictions:
--
-- 1. The list of alternatives may be empty;
-- See Note [Empty case alternatives]
--
-- 2. The 'DEFAULT' case alternative must be first in the list,
-- if it occurs at all.
--
-- 3. The remaining cases are in order of increasing
-- tag (for 'DataAlts') or
-- lit (for 'LitAlts').
-- This makes finding the relevant constructor easy,
-- and makes comparison easier too.
--
-- 4. The list of alternatives must be exhaustive. An /exhaustive/ case
-- does not necessarily mention all constructors:
--
-- @
-- data Foo = Red | Green | Blue
-- ... case x of
-- Red -> True
-- other -> f (case x of
-- Green -> ...
-- Blue -> ... ) ...
-- @
--
-- The inner case does not need a @Red@ alternative, because @x@
-- can't be @Red@ at that program point.
--
-- 5. Floating-point values must not be scrutinised against literals.
-- See Trac #9238 and Note [Rules for floating-point comparisons]
-- in PrelRules for rationale.
--
-- * Cast an expression to a particular type.
-- This is used to implement @newtype@s (a @newtype@ constructor or
-- destructor just becomes a 'Cast' in Core) and GADTs.
--
-- * Notes. These allow general information to be added to expressions
-- in the syntax tree
--
-- * A type: this should only show up at the top level of an Arg
--
-- * A coercion
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Expr b
= Var Id
| Lit Literal
| App (Expr b) (Arg b)
| Lam b (Expr b)
| Let (Bind b) (Expr b)
| Case (Expr b) b Type [Alt b] -- See #case_invariant#
| Cast (Expr b) Coercion
| Tick (Tickish Id) (Expr b)
| Type Type
| Coercion Coercion
deriving Data
-- | Type synonym for expressions that occur in function argument positions.
-- Only 'Arg' should contain a 'Type' at top level, general 'Expr' should not
type Arg b = Expr b
-- | A case split alternative. Consists of the constructor leading to the alternative,
-- the variables bound from the constructor, and the expression to be executed given that binding.
-- The default alternative is @(DEFAULT, [], rhs)@
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
type Alt b = (AltCon, [b], Expr b)
-- | A case alternative constructor (i.e. pattern match)
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data AltCon
= DataAlt DataCon -- ^ A plain data constructor: @case e of { Foo x -> ... }@.
-- Invariant: the 'DataCon' is always from a @data@ type, and never from a @newtype@
| LitAlt Literal -- ^ A literal: @case e of { 1 -> ... }@
-- Invariant: always an *unlifted* literal
-- See Note [Literal alternatives]
| DEFAULT -- ^ Trivial alternative: @case e of { _ -> ... }@
deriving (Eq, Data)
-- | Binding, used for top level bindings in a module and local bindings in a @let@.
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Bind b = NonRec b (Expr b)
| Rec [(b, (Expr b))]
deriving Data
{-
Note [Shadowing]
~~~~~~~~~~~~~~~~
While various passes attempt to rename on-the-fly in a manner that
avoids "shadowing" (thereby simplifying downstream optimizations),
neither the simplifier nor any other pass GUARANTEES that shadowing is
avoided. Thus, all passes SHOULD work fine even in the presence of
arbitrary shadowing in their inputs.
In particular, scrutinee variables `x` in expressions of the form
`Case e x t` are often renamed to variables with a prefix
"wild_". These "wild" variables may appear in the body of the
case-expression, and further, may be shadowed within the body.
So the Unique in an Var is not really unique at all. Still, it's very
useful to give a constant-time equality/ordering for Vars, and to give
a key that can be used to make sets of Vars (VarSet), or mappings from
Vars to other things (VarEnv). Moreover, if you do want to eliminate
shadowing, you can give a new Unique to an Id without changing its
printable name, which makes debugging easier.
Note [Literal alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Literal alternatives (LitAlt lit) are always for *un-lifted* literals.
We have one literal, a literal Integer, that is lifted, and we don't
allow in a LitAlt, because LitAlt cases don't do any evaluation. Also
(see Trac #5603) if you say
case 3 of
S# x -> ...
J# _ _ -> ...
(where S#, J# are the constructors for Integer) we don't want the
simplifier calling findAlt with argument (LitAlt 3). No no. Integer
literals are an opaque encoding of an algebraic data type, not of
an unlifted literal, like all the others.
Also, we do not permit case analysis with literal patterns on floating-point
types. See Trac #9238 and Note [Rules for floating-point comparisons] in
PrelRules for the rationale for this restriction.
-------------------------- CoreSyn INVARIANTS ---------------------------
Note [CoreSyn top-level invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #toplevel_invariant#
Note [CoreSyn letrec invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #letrec_invariant#
Note [CoreSyn let/app invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The let/app invariant
the right hand side of a non-recursive 'Let', and
the argument of an 'App',
/may/ be of unlifted type, but only if
the expression is ok-for-speculation.
This means that the let can be floated around
without difficulty. For example, this is OK:
y::Int# = x +# 1#
But this is not, as it may affect termination if the
expression is floated out:
y::Int# = fac 4#
In this situation you should use @case@ rather than a @let@. The function
'CoreUtils.needsCaseBinding' can help you determine which to generate, or
alternatively use 'MkCore.mkCoreLet' rather than this constructor directly,
which will generate a @case@ if necessary
Th let/app invariant is initially enforced by DsUtils.mkCoreLet and mkCoreApp
Note [CoreSyn case invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #case_invariants#
Note [CoreSyn let goal]
~~~~~~~~~~~~~~~~~~~~~~~
* The simplifier tries to ensure that if the RHS of a let is a constructor
application, its arguments are trivial, so that the constructor can be
inlined vigorously.
Note [Type let]
~~~~~~~~~~~~~~~
See #type_let#
Note [Empty case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The alternatives of a case expression should be exhaustive. But
this exhaustive list can be empty!
* A case expression can have empty alternatives if (and only if) the
scrutinee is bound to raise an exception or diverge. When do we know
this? See Note [Bottoming expressions] in CoreUtils.
* The possiblity of empty alternatives is one reason we need a type on
the case expression: if the alternatives are empty we can't get the
type from the alternatives!
* In the case of empty types (see Note [Bottoming expressions]), say
data T
we do NOT want to replace
case (x::T) of Bool {} --> error Bool "Inaccessible case"
because x might raise an exception, and *that*'s what we want to see!
(Trac #6067 is an example.) To preserve semantics we'd have to say
x `seq` error Bool "Inaccessible case"
but the 'seq' is just a case, so we are back to square 1. Or I suppose
we could say
x |> UnsafeCoerce T Bool
but that loses all trace of the fact that this originated with an empty
set of alternatives.
* We can use the empty-alternative construct to coerce error values from
one type to another. For example
f :: Int -> Int
f n = error "urk"
g :: Int -> (# Char, Bool #)
g x = case f x of { 0 -> ..., n -> ... }
Then if we inline f in g's RHS we get
case (error Int "urk") of (# Char, Bool #) { ... }
and we can discard the alternatives since the scrutinee is bottom to give
case (error Int "urk") of (# Char, Bool #) {}
This is nicer than using an unsafe coerce between Int ~ (# Char,Bool #),
if for no other reason that we don't need to instantiate the (~) at an
unboxed type.
* We treat a case expression with empty alternatives as trivial iff
its scrutinee is (see CoreUtils.exprIsTrivial). This is actually
important; see Note [Empty case is trivial] in CoreUtils
* An empty case is replaced by its scrutinee during the CoreToStg
conversion; remember STG is un-typed, so there is no need for
the empty case to do the type conversion.
************************************************************************
* *
Ticks
* *
************************************************************************
-}
-- | Allows attaching extra information to points in expressions
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Tickish id =
-- | An @{-# SCC #-}@ profiling annotation, either automatically
-- added by the desugarer as a result of -auto-all, or added by
-- the user.
ProfNote {
profNoteCC :: CostCentre, -- ^ the cost centre
profNoteCount :: !Bool, -- ^ bump the entry count?
profNoteScope :: !Bool -- ^ scopes over the enclosed expression
-- (i.e. not just a tick)
}
-- | A "tick" used by HPC to track the execution of each
-- subexpression in the original source code.
| HpcTick {
tickModule :: Module,
tickId :: !Int
}
-- | A breakpoint for the GHCi debugger. This behaves like an HPC
-- tick, but has a list of free variables which will be available
-- for inspection in GHCi when the program stops at the breakpoint.
--
-- NB. we must take account of these Ids when (a) counting free variables,
-- and (b) substituting (don't substitute for them)
| Breakpoint
{ breakpointId :: !Int
, breakpointFVs :: [id] -- ^ the order of this list is important:
-- it matches the order of the lists in the
-- appropriate entry in HscTypes.ModBreaks.
--
-- Careful about substitution! See
-- Note [substTickish] in CoreSubst.
}
-- | A source note.
--
-- Source notes are pure annotations: Their presence should neither
-- influence compilation nor execution. The semantics are given by
-- causality: The presence of a source note means that a local
-- change in the referenced source code span will possibly provoke
-- the generated code to change. On the flip-side, the functionality
-- of annotated code *must* be invariant against changes to all
-- source code *except* the spans referenced in the source notes
-- (see "Causality of optimized Haskell" paper for details).
--
-- Therefore extending the scope of any given source note is always
-- valid. Note that it is still undesirable though, as this reduces
-- their usefulness for debugging and profiling. Therefore we will
-- generally try only to make use of this property where it is
-- neccessary to enable optimizations.
| SourceNote
{ sourceSpan :: RealSrcSpan -- ^ Source covered
, sourceName :: String -- ^ Name for source location
-- (uses same names as CCs)
}
deriving (Eq, Ord, Data)
-- | A "counting tick" (where tickishCounts is True) is one that
-- counts evaluations in some way. We cannot discard a counting tick,
-- and the compiler should preserve the number of counting ticks as
-- far as possible.
--
-- However, we still allow the simplifier to increase or decrease
-- sharing, so in practice the actual number of ticks may vary, except
-- that we never change the value from zero to non-zero or vice versa.
tickishCounts :: Tickish id -> Bool
tickishCounts n@ProfNote{} = profNoteCount n
tickishCounts HpcTick{} = True
tickishCounts Breakpoint{} = True
tickishCounts _ = False
-- | Specifies the scoping behaviour of ticks. This governs the
-- behaviour of ticks that care about the covered code and the cost
-- associated with it. Important for ticks relating to profiling.
data TickishScoping =
-- | No scoping: The tick does not care about what code it
-- covers. Transformations can freely move code inside as well as
-- outside without any additional annotation obligations
NoScope
-- | Soft scoping: We want all code that is covered to stay
-- covered. Note that this scope type does not forbid
-- transformations from happening, as as long as all results of
-- the transformations are still covered by this tick or a copy of
-- it. For example
--
-- let x = tick<...> (let y = foo in bar) in baz
-- ===>
-- let x = tick<...> bar; y = tick<...> foo in baz
--
-- Is a valid transformation as far as "bar" and "foo" is
-- concerned, because both still are scoped over by the tick.
--
-- Note though that one might object to the "let" not being
-- covered by the tick any more. However, we are generally lax
-- with this - constant costs don't matter too much, and given
-- that the "let" was effectively merged we can view it as having
-- lost its identity anyway.
--
-- Also note that this scoping behaviour allows floating a tick
-- "upwards" in pretty much any situation. For example:
--
-- case foo of x -> tick<...> bar
-- ==>
-- tick<...> case foo of x -> bar
--
-- While this is always leagl, we want to make a best effort to
-- only make us of this where it exposes transformation
-- opportunities.
| SoftScope
-- | Cost centre scoping: We don't want any costs to move to other
-- cost-centre stacks. This means we not only want no code or cost
-- to get moved out of their cost centres, but we also object to
-- code getting associated with new cost-centre ticks - or
-- changing the order in which they get applied.
--
-- A rule of thumb is that we don't want any code to gain new
-- annotations. However, there are notable exceptions, for
-- example:
--
-- let f = \y -> foo in tick<...> ... (f x) ...
-- ==>
-- tick<...> ... foo[x/y] ...
--
-- In-lining lambdas like this is always legal, because inlining a
-- function does not change the cost-centre stack when the
-- function is called.
| CostCentreScope
deriving (Eq)
-- | Returns the intended scoping rule for a Tickish
tickishScoped :: Tickish id -> TickishScoping
tickishScoped n@ProfNote{}
| profNoteScope n = CostCentreScope
| otherwise = NoScope
tickishScoped HpcTick{} = NoScope
tickishScoped Breakpoint{} = CostCentreScope
-- Breakpoints are scoped: eventually we're going to do call
-- stacks, but also this helps prevent the simplifier from moving
-- breakpoints around and changing their result type (see #1531).
tickishScoped SourceNote{} = SoftScope
-- | Returns whether the tick scoping rule is at least as permissive
-- as the given scoping rule.
tickishScopesLike :: Tickish id -> TickishScoping -> Bool
tickishScopesLike t scope = tickishScoped t `like` scope
where NoScope `like` _ = True
_ `like` NoScope = False
SoftScope `like` _ = True
_ `like` SoftScope = False
CostCentreScope `like` _ = True
-- | Returns @True@ for ticks that can be floated upwards easily even
-- where it might change execution counts, such as:
--
-- Just (tick<...> foo)
-- ==>
-- tick<...> (Just foo)
--
-- This is a combination of @tickishSoftScope@ and
-- @tickishCounts@. Note that in principle splittable ticks can become
-- floatable using @mkNoTick@ -- even though there's currently no
-- tickish for which that is the case.
tickishFloatable :: Tickish id -> Bool
tickishFloatable t = t `tickishScopesLike` SoftScope && not (tickishCounts t)
-- | Returns @True@ for a tick that is both counting /and/ scoping and
-- can be split into its (tick, scope) parts using 'mkNoScope' and
-- 'mkNoTick' respectively.
tickishCanSplit :: Tickish id -> Bool
tickishCanSplit ProfNote{profNoteScope = True, profNoteCount = True}
= True
tickishCanSplit _ = False
mkNoCount :: Tickish id -> Tickish id
mkNoCount n | not (tickishCounts n) = n
| not (tickishCanSplit n) = panic "mkNoCount: Cannot split!"
mkNoCount n@ProfNote{} = n {profNoteCount = False}
mkNoCount _ = panic "mkNoCount: Undefined split!"
mkNoScope :: Tickish id -> Tickish id
mkNoScope n | tickishScoped n == NoScope = n
| not (tickishCanSplit n) = panic "mkNoScope: Cannot split!"
mkNoScope n@ProfNote{} = n {profNoteScope = False}
mkNoScope _ = panic "mkNoScope: Undefined split!"
-- | Return @True@ if this source annotation compiles to some backend
-- code. Without this flag, the tickish is seen as a simple annotation
-- that does not have any associated evaluation code.
--
-- What this means that we are allowed to disregard the tick if doing
-- so means that we can skip generating any code in the first place. A
-- typical example is top-level bindings:
--
-- foo = tick<...> \y -> ...
-- ==>
-- foo = \y -> tick<...> ...
--
-- Here there is just no operational difference between the first and
-- the second version. Therefore code generation should simply
-- translate the code as if it found the latter.
tickishIsCode :: Tickish id -> Bool
tickishIsCode SourceNote{} = False
tickishIsCode _tickish = True -- all the rest for now
-- | Governs the kind of expression that the tick gets placed on when
-- annotating for example using @mkTick@. If we find that we want to
-- put a tickish on an expression ruled out here, we try to float it
-- inwards until we find a suitable expression.
data TickishPlacement =
-- | Place ticks exactly on run-time expressions. We can still
-- move the tick through pure compile-time constructs such as
-- other ticks, casts or type lambdas. This is the most
-- restrictive placement rule for ticks, as all tickishs have in
-- common that they want to track runtime processes. The only
-- legal placement rule for counting ticks.
PlaceRuntime
-- | As @PlaceRuntime@, but we float the tick through all
-- lambdas. This makes sense where there is little difference
-- between annotating the lambda and annotating the lambda's code.
| PlaceNonLam
-- | In addition to floating through lambdas, cost-centre style
-- tickishs can also be moved from constructors, non-function
-- variables and literals. For example:
--
-- let x = scc<...> C (scc<...> y) (scc<...> 3) in ...
--
-- Neither the constructor application, the variable or the
-- literal are likely to have any cost worth mentioning. And even
-- if y names a thunk, the call would not care about the
-- evaluation context. Therefore removing all annotations in the
-- above example is safe.
| PlaceCostCentre
deriving (Eq)
-- | Placement behaviour we want for the ticks
tickishPlace :: Tickish id -> TickishPlacement
tickishPlace n@ProfNote{}
| profNoteCount n = PlaceRuntime
| otherwise = PlaceCostCentre
tickishPlace HpcTick{} = PlaceRuntime
tickishPlace Breakpoint{} = PlaceRuntime
tickishPlace SourceNote{} = PlaceNonLam
-- | Returns whether one tick "contains" the other one, therefore
-- making the second tick redundant.
tickishContains :: Eq b => Tickish b -> Tickish b -> Bool
tickishContains (SourceNote sp1 n1) (SourceNote sp2 n2)
= n1 == n2 && containsSpan sp1 sp2
tickishContains t1 t2
= t1 == t2
{-
************************************************************************
* *
Orphans
* *
************************************************************************
-}
-- | Is this instance an orphan? If it is not an orphan, contains an 'OccName'
-- witnessing the instance's non-orphanhood.
-- See Note [Orphans]
data IsOrphan
= IsOrphan
| NotOrphan OccName -- The OccName 'n' witnesses the instance's non-orphanhood
-- In that case, the instance is fingerprinted as part
-- of the definition of 'n's definition
deriving Data
-- | Returns true if 'IsOrphan' is orphan.
isOrphan :: IsOrphan -> Bool
isOrphan IsOrphan = True
isOrphan _ = False
-- | Returns true if 'IsOrphan' is not an orphan.
notOrphan :: IsOrphan -> Bool
notOrphan NotOrphan{} = True
notOrphan _ = False
chooseOrphanAnchor :: NameSet -> IsOrphan
-- Something (rule, instance) is relate to all the Names in this
-- list. Choose one of them to be an "anchor" for the orphan. We make
-- the choice deterministic to avoid gratuitious changes in the ABI
-- hash (Trac #4012). Specficially, use lexicographic comparison of
-- OccName rather than comparing Uniques
--
-- NB: 'minimum' use Ord, and (Ord OccName) works lexicographically
--
chooseOrphanAnchor local_names
| isEmptyNameSet local_names = IsOrphan
| otherwise = NotOrphan (minimum occs)
where
occs = map nameOccName $ nonDetEltsUFM local_names
-- It's OK to use nonDetEltsUFM here, see comments above
instance Binary IsOrphan where
put_ bh IsOrphan = putByte bh 0
put_ bh (NotOrphan n) = do
putByte bh 1
put_ bh n
get bh = do
h <- getByte bh
case h of
0 -> return IsOrphan
_ -> do
n <- get bh
return $ NotOrphan n
{-
Note [Orphans]
~~~~~~~~~~~~~~
Class instances, rules, and family instances are divided into orphans
and non-orphans. Roughly speaking, an instance/rule is an orphan if
its left hand side mentions nothing defined in this module. Orphan-hood
has two major consequences
* A module that contains orphans is called an "orphan module". If
the module being compiled depends (transitively) on an oprhan
module M, then M.hi is read in regardless of whether M is oherwise
needed. This is to ensure that we don't miss any instance decls in
M. But it's painful, because it means we need to keep track of all
the orphan modules below us.
* A non-orphan is not finger-printed separately. Instead, for
fingerprinting purposes it is treated as part of the entity it
mentions on the LHS. For example
data T = T1 | T2
instance Eq T where ....
The instance (Eq T) is incorprated as part of T's fingerprint.
In constrast, orphans are all fingerprinted together in the
mi_orph_hash field of the ModIface.
See MkIface.addFingerprints.
Orphan-hood is computed
* For class instances:
when we make a ClsInst
(because it is needed during instance lookup)
* For rules and family instances:
when we generate an IfaceRule (MkIface.coreRuleToIfaceRule)
or IfaceFamInst (MkIface.instanceToIfaceInst)
-}
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
The CoreRule type and its friends are dealt with mainly in CoreRules,
but CoreFVs, Subst, PprCore, CoreTidy also inspect the representation.
-}
-- | Gathers a collection of 'CoreRule's. Maps (the name of) an 'Id' to its rules
type RuleBase = NameEnv [CoreRule]
-- The rules are unordered;
-- we sort out any overlaps on lookup
-- | A full rule environment which we can apply rules from. Like a 'RuleBase',
-- but it also includes the set of visible orphans we use to filter out orphan
-- rules which are not visible (even though we can see them...)
data RuleEnv
= RuleEnv { re_base :: RuleBase
, re_visible_orphs :: ModuleSet
}
mkRuleEnv :: RuleBase -> [Module] -> RuleEnv
mkRuleEnv rules vis_orphs = RuleEnv rules (mkModuleSet vis_orphs)
emptyRuleEnv :: RuleEnv
emptyRuleEnv = RuleEnv emptyNameEnv emptyModuleSet
-- | A 'CoreRule' is:
--
-- * \"Local\" if the function it is a rule for is defined in the
-- same module as the rule itself.
--
-- * \"Orphan\" if nothing on the LHS is defined in the same module
-- as the rule itself
data CoreRule
= Rule {
ru_name :: RuleName, -- ^ Name of the rule, for communication with the user
ru_act :: Activation, -- ^ When the rule is active
-- Rough-matching stuff
-- see comments with InstEnv.ClsInst( is_cls, is_rough )
ru_fn :: Name, -- ^ Name of the 'Id.Id' at the head of this rule
ru_rough :: [Maybe Name], -- ^ Name at the head of each argument to the left hand side
-- Proper-matching stuff
-- see comments with InstEnv.ClsInst( is_tvs, is_tys )
ru_bndrs :: [CoreBndr], -- ^ Variables quantified over
ru_args :: [CoreExpr], -- ^ Left hand side arguments
-- And the right-hand side
ru_rhs :: CoreExpr, -- ^ Right hand side of the rule
-- Occurrence info is guaranteed correct
-- See Note [OccInfo in unfoldings and rules]
-- Locality
ru_auto :: Bool, -- ^ @True@ <=> this rule is auto-generated
-- (notably by Specialise or SpecConstr)
-- @False@ <=> generated at the users behest
-- See Note [Trimming auto-rules] in TidyPgm
-- for the sole purpose of this field.
ru_origin :: !Module, -- ^ 'Module' the rule was defined in, used
-- to test if we should see an orphan rule.
ru_orphan :: !IsOrphan, -- ^ Whether or not the rule is an orphan.
ru_local :: Bool -- ^ @True@ iff the fn at the head of the rule is
-- defined in the same module as the rule
-- and is not an implicit 'Id' (like a record selector,
-- class operation, or data constructor). This
-- is different from 'ru_orphan', where a rule
-- can avoid being an orphan if *any* Name in
-- LHS of the rule was defined in the same
-- module as the rule.
}
-- | Built-in rules are used for constant folding
-- and suchlike. They have no free variables.
-- A built-in rule is always visible (there is no such thing as
-- an orphan built-in rule.)
| BuiltinRule {
ru_name :: RuleName, -- ^ As above
ru_fn :: Name, -- ^ As above
ru_nargs :: Int, -- ^ Number of arguments that 'ru_try' consumes,
-- if it fires, including type arguments
ru_try :: RuleFun
-- ^ This function does the rewrite. It given too many
-- arguments, it simply discards them; the returned 'CoreExpr'
-- is just the rewrite of 'ru_fn' applied to the first 'ru_nargs' args
}
-- See Note [Extra args in rule matching] in Rules.hs
type RuleFun = DynFlags -> InScopeEnv -> Id -> [CoreExpr] -> Maybe CoreExpr
type InScopeEnv = (InScopeSet, IdUnfoldingFun)
type IdUnfoldingFun = Id -> Unfolding
-- A function that embodies how to unfold an Id if you need
-- to do that in the Rule. The reason we need to pass this info in
-- is that whether an Id is unfoldable depends on the simplifier phase
isBuiltinRule :: CoreRule -> Bool
isBuiltinRule (BuiltinRule {}) = True
isBuiltinRule _ = False
isAutoRule :: CoreRule -> Bool
isAutoRule (BuiltinRule {}) = False
isAutoRule (Rule { ru_auto = is_auto }) = is_auto
-- | The number of arguments the 'ru_fn' must be applied
-- to before the rule can match on it
ruleArity :: CoreRule -> Int
ruleArity (BuiltinRule {ru_nargs = n}) = n
ruleArity (Rule {ru_args = args}) = length args
ruleName :: CoreRule -> RuleName
ruleName = ru_name
ruleActivation :: CoreRule -> Activation
ruleActivation (BuiltinRule { }) = AlwaysActive
ruleActivation (Rule { ru_act = act }) = act
-- | The 'Name' of the 'Id.Id' at the head of the rule left hand side
ruleIdName :: CoreRule -> Name
ruleIdName = ru_fn
isLocalRule :: CoreRule -> Bool
isLocalRule = ru_local
-- | Set the 'Name' of the 'Id.Id' at the head of the rule left hand side
setRuleIdName :: Name -> CoreRule -> CoreRule
setRuleIdName nm ru = ru { ru_fn = nm }
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
Representation of desugared vectorisation declarations that are fed to the vectoriser (via
'ModGuts').
-}
data CoreVect = Vect Id CoreExpr
| NoVect Id
| VectType Bool TyCon (Maybe TyCon)
| VectClass TyCon -- class tycon
| VectInst Id -- instance dfun (always SCALAR) !!!FIXME: should be superfluous now
{-
************************************************************************
* *
Unfoldings
* *
************************************************************************
The @Unfolding@ type is declared here to avoid numerous loops
-}
-- | Records the /unfolding/ of an identifier, which is approximately the form the
-- identifier would have if we substituted its definition in for the identifier.
-- This type should be treated as abstract everywhere except in "CoreUnfold"
data Unfolding
= NoUnfolding -- ^ We have no information about the unfolding
| OtherCon [AltCon] -- ^ It ain't one of these constructors.
-- @OtherCon xs@ also indicates that something has been evaluated
-- and hence there's no point in re-evaluating it.
-- @OtherCon []@ is used even for non-data-type values
-- to indicated evaluated-ness. Notably:
--
-- > data C = C !(Int -> Int)
-- > case x of { C f -> ... }
--
-- Here, @f@ gets an @OtherCon []@ unfolding.
| DFunUnfolding { -- The Unfolding of a DFunId
-- See Note [DFun unfoldings]
-- df = /\a1..am. \d1..dn. MkD t1 .. tk
-- (op1 a1..am d1..dn)
-- (op2 a1..am d1..dn)
df_bndrs :: [Var], -- The bound variables [a1..m],[d1..dn]
df_con :: DataCon, -- The dictionary data constructor (never a newtype datacon)
df_args :: [CoreExpr] -- Args of the data con: types, superclasses and methods,
} -- in positional order
| CoreUnfolding { -- An unfolding for an Id with no pragma,
-- or perhaps a NOINLINE pragma
-- (For NOINLINE, the phase, if any, is in the
-- InlinePragInfo for this Id.)
uf_tmpl :: CoreExpr, -- Template; occurrence info is correct
uf_src :: UnfoldingSource, -- Where the unfolding came from
uf_is_top :: Bool, -- True <=> top level binding
uf_is_value :: Bool, -- exprIsHNF template (cached); it is ok to discard
-- a `seq` on this variable
uf_is_conlike :: Bool, -- True <=> applicn of constructor or CONLIKE function
-- Cached version of exprIsConLike
uf_is_work_free :: Bool, -- True <=> doesn't waste (much) work to expand
-- inside an inlining
-- Cached version of exprIsCheap
uf_expandable :: Bool, -- True <=> can expand in RULE matching
-- Cached version of exprIsExpandable
uf_guidance :: UnfoldingGuidance -- Tells about the *size* of the template.
}
-- ^ An unfolding with redundant cached information. Parameters:
--
-- uf_tmpl: Template used to perform unfolding;
-- NB: Occurrence info is guaranteed correct:
-- see Note [OccInfo in unfoldings and rules]
--
-- uf_is_top: Is this a top level binding?
--
-- uf_is_value: 'exprIsHNF' template (cached); it is ok to discard a 'seq' on
-- this variable
--
-- uf_is_work_free: Does this waste only a little work if we expand it inside an inlining?
-- Basically this is a cached version of 'exprIsWorkFree'
--
-- uf_guidance: Tells us about the /size/ of the unfolding template
------------------------------------------------
data UnfoldingSource
= -- See also Note [Historical note: unfoldings for wrappers]
InlineRhs -- The current rhs of the function
-- Replace uf_tmpl each time around
| InlineStable -- From an INLINE or INLINABLE pragma
-- INLINE if guidance is UnfWhen
-- INLINABLE if guidance is UnfIfGoodArgs/UnfoldNever
-- (well, technically an INLINABLE might be made
-- UnfWhen if it was small enough, and then
-- it will behave like INLINE outside the current
-- module, but that is the way automatic unfoldings
-- work so it is consistent with the intended
-- meaning of INLINABLE).
--
-- uf_tmpl may change, but only as a result of
-- gentle simplification, it doesn't get updated
-- to the current RHS during compilation as with
-- InlineRhs.
--
-- See Note [InlineRules]
| InlineCompulsory -- Something that *has* no binding, so you *must* inline it
-- Only a few primop-like things have this property
-- (see MkId.hs, calls to mkCompulsoryUnfolding).
-- Inline absolutely always, however boring the context.
-- | 'UnfoldingGuidance' says when unfolding should take place
data UnfoldingGuidance
= UnfWhen { -- Inline without thinking about the *size* of the uf_tmpl
-- Used (a) for small *and* cheap unfoldings
-- (b) for INLINE functions
-- See Note [INLINE for small functions] in CoreUnfold
ug_arity :: Arity, -- Number of value arguments expected
ug_unsat_ok :: Bool, -- True <=> ok to inline even if unsaturated
ug_boring_ok :: Bool -- True <=> ok to inline even if the context is boring
-- So True,True means "always"
}
| UnfIfGoodArgs { -- Arose from a normal Id; the info here is the
-- result of a simple analysis of the RHS
ug_args :: [Int], -- Discount if the argument is evaluated.
-- (i.e., a simplification will definitely
-- be possible). One elt of the list per *value* arg.
ug_size :: Int, -- The "size" of the unfolding.
ug_res :: Int -- Scrutinee discount: the discount to substract if the thing is in
} -- a context (case (thing args) of ...),
-- (where there are the right number of arguments.)
| UnfNever -- The RHS is big, so don't inline it
deriving (Eq)
{-
Note [Historical note: unfoldings for wrappers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have a nice clever scheme in interface files for
wrappers. A wrapper's unfolding can be reconstructed from its worker's
id and its strictness. This decreased .hi file size (sometimes
significantly, for modules like GHC.Classes with many high-arity w/w
splits) and had a slight corresponding effect on compile times.
However, when we added the second demand analysis, this scheme lead to
some Core lint errors. The second analysis could change the strictness
signatures, which sometimes resulted in a wrapper's regenerated
unfolding applying the wrapper to too many arguments.
Instead of repairing the clever .hi scheme, we abandoned it in favor
of simplicity. The .hi sizes are usually insignificant (excluding the
+1M for base libraries), and compile time barely increases (~+1% for
nofib). The nicer upshot is that the UnfoldingSource no longer mentions
an Id, so, eg, substitutions need not traverse them.
Note [DFun unfoldings]
~~~~~~~~~~~~~~~~~~~~~~
The Arity in a DFunUnfolding is total number of args (type and value)
that the DFun needs to produce a dictionary. That's not necessarily
related to the ordinary arity of the dfun Id, esp if the class has
one method, so the dictionary is represented by a newtype. Example
class C a where { op :: a -> Int }
instance C a -> C [a] where op xs = op (head xs)
The instance translates to
$dfCList :: forall a. C a => C [a] -- Arity 2!
$dfCList = /\a.\d. $copList {a} d |> co
$copList :: forall a. C a => [a] -> Int -- Arity 2!
$copList = /\a.\d.\xs. op {a} d (head xs)
Now we might encounter (op (dfCList {ty} d) a1 a2)
and we want the (op (dfList {ty} d)) rule to fire, because $dfCList
has all its arguments, even though its (value) arity is 2. That's
why we record the number of expected arguments in the DFunUnfolding.
Note that although it's an Arity, it's most convenient for it to give
the *total* number of arguments, both type and value. See the use
site in exprIsConApp_maybe.
-}
-- Constants for the UnfWhen constructor
needSaturated, unSaturatedOk :: Bool
needSaturated = False
unSaturatedOk = True
boringCxtNotOk, boringCxtOk :: Bool
boringCxtOk = True
boringCxtNotOk = False
------------------------------------------------
noUnfolding :: Unfolding
-- ^ There is no known 'Unfolding'
evaldUnfolding :: Unfolding
-- ^ This unfolding marks the associated thing as being evaluated
noUnfolding = NoUnfolding
evaldUnfolding = OtherCon []
mkOtherCon :: [AltCon] -> Unfolding
mkOtherCon = OtherCon
isStableSource :: UnfoldingSource -> Bool
-- Keep the unfolding template
isStableSource InlineCompulsory = True
isStableSource InlineStable = True
isStableSource InlineRhs = False
-- | Retrieves the template of an unfolding: panics if none is known
unfoldingTemplate :: Unfolding -> CoreExpr
unfoldingTemplate = uf_tmpl
-- | Retrieves the template of an unfolding if possible
-- maybeUnfoldingTemplate is used mainly wnen specialising, and we do
-- want to specialise DFuns, so it's important to return a template
-- for DFunUnfoldings
maybeUnfoldingTemplate :: Unfolding -> Maybe CoreExpr
maybeUnfoldingTemplate (CoreUnfolding { uf_tmpl = expr })
= Just expr
maybeUnfoldingTemplate (DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args })
= Just (mkLams bndrs (mkApps (Var (dataConWorkId con)) args))
maybeUnfoldingTemplate _
= Nothing
-- | The constructors that the unfolding could never be:
-- returns @[]@ if no information is available
otherCons :: Unfolding -> [AltCon]
otherCons (OtherCon cons) = cons
otherCons _ = []
-- | Determines if it is certainly the case that the unfolding will
-- yield a value (something in HNF): returns @False@ if unsure
isValueUnfolding :: Unfolding -> Bool
-- Returns False for OtherCon
isValueUnfolding (CoreUnfolding { uf_is_value = is_evald }) = is_evald
isValueUnfolding _ = False
-- | Determines if it possibly the case that the unfolding will
-- yield a value. Unlike 'isValueUnfolding' it returns @True@
-- for 'OtherCon'
isEvaldUnfolding :: Unfolding -> Bool
-- Returns True for OtherCon
isEvaldUnfolding (OtherCon _) = True
isEvaldUnfolding (CoreUnfolding { uf_is_value = is_evald }) = is_evald
isEvaldUnfolding _ = False
-- | @True@ if the unfolding is a constructor application, the application
-- of a CONLIKE function or 'OtherCon'
isConLikeUnfolding :: Unfolding -> Bool
isConLikeUnfolding (OtherCon _) = True
isConLikeUnfolding (CoreUnfolding { uf_is_conlike = con }) = con
isConLikeUnfolding _ = False
-- | Is the thing we will unfold into certainly cheap?
isCheapUnfolding :: Unfolding -> Bool
isCheapUnfolding (CoreUnfolding { uf_is_work_free = is_wf }) = is_wf
isCheapUnfolding _ = False
isExpandableUnfolding :: Unfolding -> Bool
isExpandableUnfolding (CoreUnfolding { uf_expandable = is_expable }) = is_expable
isExpandableUnfolding _ = False
expandUnfolding_maybe :: Unfolding -> Maybe CoreExpr
-- Expand an expandable unfolding; this is used in rule matching
-- See Note [Expanding variables] in Rules.hs
-- The key point here is that CONLIKE things can be expanded
expandUnfolding_maybe (CoreUnfolding { uf_expandable = True, uf_tmpl = rhs }) = Just rhs
expandUnfolding_maybe _ = Nothing
hasStableCoreUnfolding_maybe :: Unfolding -> Maybe Bool
-- Just True <=> has stable inlining, very keen to inline (eg. INLINE pragma)
-- Just False <=> has stable inlining, open to inlining it (eg. INLINEABLE pragma)
-- Nothing <=> not stable, or cannot inline it anyway
hasStableCoreUnfolding_maybe (CoreUnfolding { uf_src = src, uf_guidance = guide })
| isStableSource src
= case guide of
UnfWhen {} -> Just True
UnfIfGoodArgs {} -> Just False
UnfNever -> Nothing
hasStableCoreUnfolding_maybe _ = Nothing
isCompulsoryUnfolding :: Unfolding -> Bool
isCompulsoryUnfolding (CoreUnfolding { uf_src = InlineCompulsory }) = True
isCompulsoryUnfolding _ = False
isStableUnfolding :: Unfolding -> Bool
-- True of unfoldings that should not be overwritten
-- by a CoreUnfolding for the RHS of a let-binding
isStableUnfolding (CoreUnfolding { uf_src = src }) = isStableSource src
isStableUnfolding (DFunUnfolding {}) = True
isStableUnfolding _ = False
isClosedUnfolding :: Unfolding -> Bool -- No free variables
isClosedUnfolding (CoreUnfolding {}) = False
isClosedUnfolding (DFunUnfolding {}) = False
isClosedUnfolding _ = True
-- | Only returns False if there is no unfolding information available at all
hasSomeUnfolding :: Unfolding -> Bool
hasSomeUnfolding NoUnfolding = False
hasSomeUnfolding _ = True
neverUnfoldGuidance :: UnfoldingGuidance -> Bool
neverUnfoldGuidance UnfNever = True
neverUnfoldGuidance _ = False
canUnfold :: Unfolding -> Bool
canUnfold (CoreUnfolding { uf_guidance = g }) = not (neverUnfoldGuidance g)
canUnfold _ = False
{-
Note [InlineRules]
~~~~~~~~~~~~~~~~~
When you say
{-# INLINE f #-}
f x = <rhs>
you intend that calls (f e) are replaced by <rhs>[e/x] So we
should capture (\x.<rhs>) in the Unfolding of 'f', and never meddle
with it. Meanwhile, we can optimise <rhs> to our heart's content,
leaving the original unfolding intact in Unfolding of 'f'. For example
all xs = foldr (&&) True xs
any p = all . map p {-# INLINE any #-}
We optimise any's RHS fully, but leave the InlineRule saying "all . map p",
which deforests well at the call site.
So INLINE pragma gives rise to an InlineRule, which captures the original RHS.
Moreover, it's only used when 'f' is applied to the
specified number of arguments; that is, the number of argument on
the LHS of the '=' sign in the original source definition.
For example, (.) is now defined in the libraries like this
{-# INLINE (.) #-}
(.) f g = \x -> f (g x)
so that it'll inline when applied to two arguments. If 'x' appeared
on the left, thus
(.) f g x = f (g x)
it'd only inline when applied to three arguments. This slightly-experimental
change was requested by Roman, but it seems to make sense.
See also Note [Inlining an InlineRule] in CoreUnfold.
Note [OccInfo in unfoldings and rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In unfoldings and rules, we guarantee that the template is occ-analysed,
so that the occurrence info on the binders is correct. This is important,
because the Simplifier does not re-analyse the template when using it. If
the occurrence info is wrong
- We may get more simpifier iterations than necessary, because
once-occ info isn't there
- More seriously, we may get an infinite loop if there's a Rec
without a loop breaker marked
************************************************************************
* *
AltCon
* *
************************************************************************
-}
-- The Ord is needed for the FiniteMap used in the lookForConstructor
-- in SimplEnv. If you declared that lookForConstructor *ignores*
-- constructor-applications with LitArg args, then you could get
-- rid of this Ord.
instance Outputable AltCon where
ppr (DataAlt dc) = ppr dc
ppr (LitAlt lit) = ppr lit
ppr DEFAULT = text "__DEFAULT"
cmpAlt :: (AltCon, a, b) -> (AltCon, a, b) -> Ordering
cmpAlt (con1, _, _) (con2, _, _) = con1 `cmpAltCon` con2
ltAlt :: (AltCon, a, b) -> (AltCon, a, b) -> Bool
ltAlt a1 a2 = (a1 `cmpAlt` a2) == LT
cmpAltCon :: AltCon -> AltCon -> Ordering
-- ^ Compares 'AltCon's within a single list of alternatives
cmpAltCon DEFAULT DEFAULT = EQ
cmpAltCon DEFAULT _ = LT
cmpAltCon (DataAlt d1) (DataAlt d2) = dataConTag d1 `compare` dataConTag d2
cmpAltCon (DataAlt _) DEFAULT = GT
cmpAltCon (LitAlt l1) (LitAlt l2) = l1 `compare` l2
cmpAltCon (LitAlt _) DEFAULT = GT
cmpAltCon con1 con2 = WARN( True, text "Comparing incomparable AltCons" <+>
ppr con1 <+> ppr con2 )
LT
{-
************************************************************************
* *
\subsection{Useful synonyms}
* *
************************************************************************
Note [CoreProgram]
~~~~~~~~~~~~~~~~~~
The top level bindings of a program, a CoreProgram, are represented as
a list of CoreBind
* Later bindings in the list can refer to earlier ones, but not vice
versa. So this is OK
NonRec { x = 4 }
Rec { p = ...q...x...
; q = ...p...x }
Rec { f = ...p..x..f.. }
NonRec { g = ..f..q...x.. }
But it would NOT be ok for 'f' to refer to 'g'.
* The occurrence analyser does strongly-connected component analysis
on each Rec binding, and splits it into a sequence of smaller
bindings where possible. So the program typically starts life as a
single giant Rec, which is then dependency-analysed into smaller
chunks.
-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
type CoreProgram = [CoreBind] -- See Note [CoreProgram]
-- | The common case for the type of binders and variables when
-- we are manipulating the Core language within GHC
type CoreBndr = Var
-- | Expressions where binders are 'CoreBndr's
type CoreExpr = Expr CoreBndr
-- | Argument expressions where binders are 'CoreBndr's
type CoreArg = Arg CoreBndr
-- | Binding groups where binders are 'CoreBndr's
type CoreBind = Bind CoreBndr
-- | Case alternatives where binders are 'CoreBndr's
type CoreAlt = Alt CoreBndr
{-
************************************************************************
* *
\subsection{Tagging}
* *
************************************************************************
-}
-- | Binders are /tagged/ with a t
data TaggedBndr t = TB CoreBndr t -- TB for "tagged binder"
type TaggedBind t = Bind (TaggedBndr t)
type TaggedExpr t = Expr (TaggedBndr t)
type TaggedArg t = Arg (TaggedBndr t)
type TaggedAlt t = Alt (TaggedBndr t)
instance Outputable b => Outputable (TaggedBndr b) where
ppr (TB b l) = char '<' <> ppr b <> comma <> ppr l <> char '>'
instance Outputable b => OutputableBndr (TaggedBndr b) where
pprBndr _ b = ppr b -- Simple
pprInfixOcc b = ppr b
pprPrefixOcc b = ppr b
deTagExpr :: TaggedExpr t -> CoreExpr
deTagExpr (Var v) = Var v
deTagExpr (Lit l) = Lit l
deTagExpr (Type ty) = Type ty
deTagExpr (Coercion co) = Coercion co
deTagExpr (App e1 e2) = App (deTagExpr e1) (deTagExpr e2)
deTagExpr (Lam (TB b _) e) = Lam b (deTagExpr e)
deTagExpr (Let bind body) = Let (deTagBind bind) (deTagExpr body)
deTagExpr (Case e (TB b _) ty alts) = Case (deTagExpr e) b ty (map deTagAlt alts)
deTagExpr (Tick t e) = Tick t (deTagExpr e)
deTagExpr (Cast e co) = Cast (deTagExpr e) co
deTagBind :: TaggedBind t -> CoreBind
deTagBind (NonRec (TB b _) rhs) = NonRec b (deTagExpr rhs)
deTagBind (Rec prs) = Rec [(b, deTagExpr rhs) | (TB b _, rhs) <- prs]
deTagAlt :: TaggedAlt t -> CoreAlt
deTagAlt (con, bndrs, rhs) = (con, [b | TB b _ <- bndrs], deTagExpr rhs)
{-
************************************************************************
* *
\subsection{Core-constructing functions with checking}
* *
************************************************************************
-}
-- | Apply a list of argument expressions to a function expression in a nested fashion. Prefer to
-- use 'MkCore.mkCoreApps' if possible
mkApps :: Expr b -> [Arg b] -> Expr b
-- | Apply a list of type argument expressions to a function expression in a nested fashion
mkTyApps :: Expr b -> [Type] -> Expr b
-- | Apply a list of coercion argument expressions to a function expression in a nested fashion
mkCoApps :: Expr b -> [Coercion] -> Expr b
-- | Apply a list of type or value variables to a function expression in a nested fashion
mkVarApps :: Expr b -> [Var] -> Expr b
-- | Apply a list of argument expressions to a data constructor in a nested fashion. Prefer to
-- use 'MkCore.mkCoreConApps' if possible
mkConApp :: DataCon -> [Arg b] -> Expr b
mkApps f args = foldl App f args
mkCoApps f args = foldl (\ e a -> App e (Coercion a)) f args
mkVarApps f vars = foldl (\ e a -> App e (varToCoreExpr a)) f vars
mkConApp con args = mkApps (Var (dataConWorkId con)) args
mkTyApps f args = foldl (\ e a -> App e (typeOrCoercion a)) f args
where
typeOrCoercion ty
| Just co <- isCoercionTy_maybe ty = Coercion co
| otherwise = Type ty
mkConApp2 :: DataCon -> [Type] -> [Var] -> Expr b
mkConApp2 con tys arg_ids = Var (dataConWorkId con)
`mkApps` map Type tys
`mkApps` map varToCoreExpr arg_ids
-- | Create a machine integer literal expression of type @Int#@ from an @Integer@.
-- If you want an expression of type @Int@ use 'MkCore.mkIntExpr'
mkIntLit :: DynFlags -> Integer -> Expr b
-- | Create a machine integer literal expression of type @Int#@ from an @Int@.
-- If you want an expression of type @Int@ use 'MkCore.mkIntExpr'
mkIntLitInt :: DynFlags -> Int -> Expr b
mkIntLit dflags n = Lit (mkMachInt dflags n)
mkIntLitInt dflags n = Lit (mkMachInt dflags (toInteger n))
-- | Create a machine word literal expression of type @Word#@ from an @Integer@.
-- If you want an expression of type @Word@ use 'MkCore.mkWordExpr'
mkWordLit :: DynFlags -> Integer -> Expr b
-- | Create a machine word literal expression of type @Word#@ from a @Word@.
-- If you want an expression of type @Word@ use 'MkCore.mkWordExpr'
mkWordLitWord :: DynFlags -> Word -> Expr b
mkWordLit dflags w = Lit (mkMachWord dflags w)
mkWordLitWord dflags w = Lit (mkMachWord dflags (toInteger w))
mkWord64LitWord64 :: Word64 -> Expr b
mkWord64LitWord64 w = Lit (mkMachWord64 (toInteger w))
mkInt64LitInt64 :: Int64 -> Expr b
mkInt64LitInt64 w = Lit (mkMachInt64 (toInteger w))
-- | Create a machine character literal expression of type @Char#@.
-- If you want an expression of type @Char@ use 'MkCore.mkCharExpr'
mkCharLit :: Char -> Expr b
-- | Create a machine string literal expression of type @Addr#@.
-- If you want an expression of type @String@ use 'MkCore.mkStringExpr'
mkStringLit :: String -> Expr b
mkCharLit c = Lit (mkMachChar c)
mkStringLit s = Lit (mkMachString s)
-- | Create a machine single precision literal expression of type @Float#@ from a @Rational@.
-- If you want an expression of type @Float@ use 'MkCore.mkFloatExpr'
mkFloatLit :: Rational -> Expr b
-- | Create a machine single precision literal expression of type @Float#@ from a @Float@.
-- If you want an expression of type @Float@ use 'MkCore.mkFloatExpr'
mkFloatLitFloat :: Float -> Expr b
mkFloatLit f = Lit (mkMachFloat f)
mkFloatLitFloat f = Lit (mkMachFloat (toRational f))
-- | Create a machine double precision literal expression of type @Double#@ from a @Rational@.
-- If you want an expression of type @Double@ use 'MkCore.mkDoubleExpr'
mkDoubleLit :: Rational -> Expr b
-- | Create a machine double precision literal expression of type @Double#@ from a @Double@.
-- If you want an expression of type @Double@ use 'MkCore.mkDoubleExpr'
mkDoubleLitDouble :: Double -> Expr b
mkDoubleLit d = Lit (mkMachDouble d)
mkDoubleLitDouble d = Lit (mkMachDouble (toRational d))
-- | Bind all supplied binding groups over an expression in a nested let expression. Assumes
-- that the rhs satisfies the let/app invariant. Prefer to use 'MkCore.mkCoreLets' if
-- possible, which does guarantee the invariant
mkLets :: [Bind b] -> Expr b -> Expr b
-- | Bind all supplied binders over an expression in a nested lambda expression. Prefer to
-- use 'MkCore.mkCoreLams' if possible
mkLams :: [b] -> Expr b -> Expr b
mkLams binders body = foldr Lam body binders
mkLets binds body = foldr Let body binds
-- | Create a binding group where a type variable is bound to a type. Per "CoreSyn#type_let",
-- this can only be used to bind something in a non-recursive @let@ expression
mkTyBind :: TyVar -> Type -> CoreBind
mkTyBind tv ty = NonRec tv (Type ty)
-- | Create a binding group where a type variable is bound to a type. Per "CoreSyn#type_let",
-- this can only be used to bind something in a non-recursive @let@ expression
mkCoBind :: CoVar -> Coercion -> CoreBind
mkCoBind cv co = NonRec cv (Coercion co)
-- | Convert a binder into either a 'Var' or 'Type' 'Expr' appropriately
varToCoreExpr :: CoreBndr -> Expr b
varToCoreExpr v | isTyVar v = Type (mkTyVarTy v)
| isCoVar v = Coercion (mkCoVarCo v)
| otherwise = ASSERT( isId v ) Var v
varsToCoreExprs :: [CoreBndr] -> [Expr b]
varsToCoreExprs vs = map varToCoreExpr vs
{-
************************************************************************
* *
Getting a result type
* *
************************************************************************
These are defined here to avoid a module loop between CoreUtils and CoreFVs
-}
applyTypeToArg :: Type -> CoreExpr -> Type
-- ^ Determines the type resulting from applying an expression with given type
-- to a given argument expression
applyTypeToArg fun_ty arg = piResultTy fun_ty (exprToType arg)
-- | If the expression is a 'Type', converts. Otherwise,
-- panics. NB: This does /not/ convert 'Coercion' to 'CoercionTy'.
exprToType :: CoreExpr -> Type
exprToType (Type ty) = ty
exprToType _bad = pprPanic "exprToType" empty
-- | If the expression is a 'Coercion', converts.
exprToCoercion_maybe :: CoreExpr -> Maybe Coercion
exprToCoercion_maybe (Coercion co) = Just co
exprToCoercion_maybe _ = Nothing
{-
************************************************************************
* *
\subsection{Simple access functions}
* *
************************************************************************
-}
-- | Extract every variable by this group
bindersOf :: Bind b -> [b]
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
bindersOf (NonRec binder _) = [binder]
bindersOf (Rec pairs) = [binder | (binder, _) <- pairs]
-- | 'bindersOf' applied to a list of binding groups
bindersOfBinds :: [Bind b] -> [b]
bindersOfBinds binds = foldr ((++) . bindersOf) [] binds
rhssOfBind :: Bind b -> [Expr b]
rhssOfBind (NonRec _ rhs) = [rhs]
rhssOfBind (Rec pairs) = [rhs | (_,rhs) <- pairs]
rhssOfAlts :: [Alt b] -> [Expr b]
rhssOfAlts alts = [e | (_,_,e) <- alts]
-- | Collapse all the bindings in the supplied groups into a single
-- list of lhs\/rhs pairs suitable for binding in a 'Rec' binding group
flattenBinds :: [Bind b] -> [(b, Expr b)]
flattenBinds (NonRec b r : binds) = (b,r) : flattenBinds binds
flattenBinds (Rec prs1 : binds) = prs1 ++ flattenBinds binds
flattenBinds [] = []
-- | We often want to strip off leading lambdas before getting down to
-- business. Variants are 'collectTyBinders', 'collectValBinders',
-- and 'collectTyAndValBinders'
collectBinders :: Expr b -> ([b], Expr b)
collectTyBinders :: CoreExpr -> ([TyVar], CoreExpr)
collectValBinders :: CoreExpr -> ([Id], CoreExpr)
collectTyAndValBinders :: CoreExpr -> ([TyVar], [Id], CoreExpr)
collectBinders expr
= go [] expr
where
go bs (Lam b e) = go (b:bs) e
go bs e = (reverse bs, e)
collectTyBinders expr
= go [] expr
where
go tvs (Lam b e) | isTyVar b = go (b:tvs) e
go tvs e = (reverse tvs, e)
collectValBinders expr
= go [] expr
where
go ids (Lam b e) | isId b = go (b:ids) e
go ids body = (reverse ids, body)
collectTyAndValBinders expr
= (tvs, ids, body)
where
(tvs, body1) = collectTyBinders expr
(ids, body) = collectValBinders body1
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectArgs :: Expr b -> (Expr b, [Arg b])
collectArgs expr
= go expr []
where
go (App f a) as = go f (a:as)
go e as = (e, as)
-- | Like @collectArgs@, but also collects looks through floatable
-- ticks if it means that we can find more arguments.
collectArgsTicks :: (Tickish Id -> Bool) -> Expr b
-> (Expr b, [Arg b], [Tickish Id])
collectArgsTicks skipTick expr
= go expr [] []
where
go (App f a) as ts = go f (a:as) ts
go (Tick t e) as ts
| skipTick t = go e as (t:ts)
go e as ts = (e, as, reverse ts)
{-
************************************************************************
* *
\subsection{Predicates}
* *
************************************************************************
At one time we optionally carried type arguments through to runtime.
@isRuntimeVar v@ returns if (Lam v _) really becomes a lambda at runtime,
i.e. if type applications are actual lambdas because types are kept around
at runtime. Similarly isRuntimeArg.
-}
-- | Will this variable exist at runtime?
isRuntimeVar :: Var -> Bool
isRuntimeVar = isId
-- | Will this argument expression exist at runtime?
isRuntimeArg :: CoreExpr -> Bool
isRuntimeArg = isValArg
-- | Returns @True@ for value arguments, false for type args
-- NB: coercions are value arguments (zero width, to be sure,
-- like State#, but still value args).
isValArg :: Expr b -> Bool
isValArg e = not (isTypeArg e)
-- | Returns @True@ iff the expression is a 'Type' or 'Coercion'
-- expression at its top level
isTyCoArg :: Expr b -> Bool
isTyCoArg (Type {}) = True
isTyCoArg (Coercion {}) = True
isTyCoArg _ = False
-- | Returns @True@ iff the expression is a 'Type' expression at its
-- top level. Note this does NOT include 'Coercion's.
isTypeArg :: Expr b -> Bool
isTypeArg (Type {}) = True
isTypeArg _ = False
-- | The number of binders that bind values rather than types
valBndrCount :: [CoreBndr] -> Int
valBndrCount = count isId
-- | The number of argument expressions that are values rather than types at their top level
valArgCount :: [Arg b] -> Int
valArgCount = count isValArg
{-
************************************************************************
* *
\subsection{Annotated core}
* *
************************************************************************
-}
-- | Annotated core: allows annotation at every node in the tree
type AnnExpr bndr annot = (annot, AnnExpr' bndr annot)
-- | A clone of the 'Expr' type but allowing annotation at every tree node
data AnnExpr' bndr annot
= AnnVar Id
| AnnLit Literal
| AnnLam bndr (AnnExpr bndr annot)
| AnnApp (AnnExpr bndr annot) (AnnExpr bndr annot)
| AnnCase (AnnExpr bndr annot) bndr Type [AnnAlt bndr annot]
| AnnLet (AnnBind bndr annot) (AnnExpr bndr annot)
| AnnCast (AnnExpr bndr annot) (annot, Coercion)
-- Put an annotation on the (root of) the coercion
| AnnTick (Tickish Id) (AnnExpr bndr annot)
| AnnType Type
| AnnCoercion Coercion
-- | A clone of the 'Alt' type but allowing annotation at every tree node
type AnnAlt bndr annot = (AltCon, [bndr], AnnExpr bndr annot)
-- | A clone of the 'Bind' type but allowing annotation at every tree node
data AnnBind bndr annot
= AnnNonRec bndr (AnnExpr bndr annot)
| AnnRec [(bndr, AnnExpr bndr annot)]
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectAnnArgs :: AnnExpr b a -> (AnnExpr b a, [AnnExpr b a])
collectAnnArgs expr
= go expr []
where
go (_, AnnApp f a) as = go f (a:as)
go e as = (e, as)
collectAnnArgsTicks :: (Tickish Var -> Bool) -> AnnExpr b a
-> (AnnExpr b a, [AnnExpr b a], [Tickish Var])
collectAnnArgsTicks tickishOk expr
= go expr [] []
where
go (_, AnnApp f a) as ts = go f (a:as) ts
go (_, AnnTick t e) as ts | tickishOk t
= go e as (t:ts)
go e as ts = (e, as, reverse ts)
deAnnotate :: AnnExpr bndr annot -> Expr bndr
deAnnotate (_, e) = deAnnotate' e
deAnnotate' :: AnnExpr' bndr annot -> Expr bndr
deAnnotate' (AnnType t) = Type t
deAnnotate' (AnnCoercion co) = Coercion co
deAnnotate' (AnnVar v) = Var v
deAnnotate' (AnnLit lit) = Lit lit
deAnnotate' (AnnLam binder body) = Lam binder (deAnnotate body)
deAnnotate' (AnnApp fun arg) = App (deAnnotate fun) (deAnnotate arg)
deAnnotate' (AnnCast e (_,co)) = Cast (deAnnotate e) co
deAnnotate' (AnnTick tick body) = Tick tick (deAnnotate body)
deAnnotate' (AnnLet bind body)
= Let (deAnnBind bind) (deAnnotate body)
where
deAnnBind (AnnNonRec var rhs) = NonRec var (deAnnotate rhs)
deAnnBind (AnnRec pairs) = Rec [(v,deAnnotate rhs) | (v,rhs) <- pairs]
deAnnotate' (AnnCase scrut v t alts)
= Case (deAnnotate scrut) v t (map deAnnAlt alts)
deAnnAlt :: AnnAlt bndr annot -> Alt bndr
deAnnAlt (con,args,rhs) = (con,args,deAnnotate rhs)
-- | As 'collectBinders' but for 'AnnExpr' rather than 'Expr'
collectAnnBndrs :: AnnExpr bndr annot -> ([bndr], AnnExpr bndr annot)
collectAnnBndrs e
= collect [] e
where
collect bs (_, AnnLam b body) = collect (b:bs) body
collect bs body = (reverse bs, body)
|
vTurbine/ghc
|
compiler/coreSyn/CoreSyn.hs
|
bsd-3-clause
| 73,224
| 0
| 14
| 19,898
| 8,509
| 4,839
| 3,670
| 600
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import qualified GitHub.Endpoints.Users.PublicSSHKeys as PK
import qualified GitHub.Auth as Auth
import Data.List (intercalate)
import Data.Vector (toList)
main :: IO ()
main = do
-- Fetch the SSH public keys of another user
ePublicSSHKeys <- PK.publicSSHKeysFor' "github_name"
case ePublicSSHKeys of
(Left err) -> putStrLn $ "Error: " ++ (show err)
(Right publicSSHKeys) -> putStrLn $ intercalate "\n" $ map show (toList publicSSHKeys)
-- Fetch my SSH public keys
let auth = Auth.OAuth "auth_token"
eMyPublicSSHKeys <- PK.publicSSHKeys' auth
case eMyPublicSSHKeys of
(Left err) -> putStrLn $ "Error: " ++ (show err)
(Right publicSSHKeys) -> putStrLn $ intercalate "\n" $ map show (toList publicSSHKeys)
|
jwiegley/github
|
samples/Users/PublicSSHKeys/ListPublicSSHKeys.hs
|
bsd-3-clause
| 798
| 0
| 13
| 143
| 237
| 123
| 114
| 17
| 3
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
-- | This module implements tools for compiling variable reads and
-- writes.
module IR.FlatIR.LLVMGen.VarAccess(
-- * Types
Location(..),
Index(..),
Access(..),
ValMap,
-- * ValMap functions
getVarLocation,
-- * Indexing instruction generators
genGEP,
genExtractValue,
-- * Access generators
genVarAddr,
genVarRead,
genWrite
) where
import Data.Array.IArray
import Data.Array.Unboxed(UArray)
import Data.Foldable
import Data.Map(Map)
import Data.Maybe
import Data.Traversable
import Data.Word
import IR.FlatIR.Syntax
import IR.FlatIR.LLVMGen.LLVMValue
import IR.FlatIR.LLVMGen.MemAccess
import Prelude hiding (mapM_, mapM, foldr, foldl, sequence)
import qualified Data.Map as Map
import qualified LLVM.Core as LLVM
-- | Locations are stored in ValMaps to indicate how a given variable
-- is represented.
data Location =
-- | A variable stored in an SSA binding
BindLoc !LLVM.ValueRef
-- | A variable stored in a memory location
| MemLoc Type !Mutability !LLVM.ValueRef
-- | A structure, which refers to other local variables
| StructLoc !(UArray Fieldname Word)
-- | This is a type used to store indexes for constructing
-- getelementptr and extractvalue instructions
data Index =
-- | A field index. We keep the field name, so we can index into
-- structure types and locations.
FieldInd !Fieldname
-- | A value. These should only exist when indexing into an array.
| ValueInd !LLVM.ValueRef
-- | Accesses represent a slightly more complex value type. These are
-- essentially the dual of Locations, and are paired up with them in
-- genVarWrite to implement writes.
data Access =
-- | An LLVM value
DirectAcc !LLVM.ValueRef
-- | Equivalent to a structure constant.
| StructAcc (Array Fieldname Access)
-- | A map from Ids to locations, representing the state of the
-- program.
type ValMap = Map Word Location
-- | Generate a getelementptr instruction from the necessary information
genGEP :: LLVM.BuilderRef -> LLVM.ValueRef -> [Index] -> IO LLVM.ValueRef
genGEP _ val [] = return val
genGEP builder val indexes = LLVM.buildGEP builder val (map toValue indexes) ""
-- | Generate an extractvalue instruction from the necessary information
genExtractValue :: LLVM.BuilderRef -> Access -> [Index] -> IO Access
genExtractValue _ acc [] = return acc
genExtractValue builder (DirectAcc val) indexes =
let
genExtractValue' val' (FieldInd (Fieldname fname) : indexes') =
do
inner' <- genExtractValue' val' indexes'
LLVM.buildExtractValue builder inner' fname ""
genExtractValue' _ (ValueInd _ : _) =
error "Value index cannot occur in extractvalue"
genExtractValue' val' [] = return val'
in do
out <- genExtractValue' val indexes
return (DirectAcc out)
genExtractValue builder (StructAcc fields) (FieldInd field : indexes) =
genExtractValue builder (fields ! field) indexes
genExtractValue _ acc ind =
error ("Mismatched access " ++ show acc ++ " and index " ++ show ind)
-- | Lookup a variable in a value map and return its location
getVarLocation :: ValMap -> Id -> Location
getVarLocation valmap (Id ind) =
fromJust (Map.lookup ind valmap)
-- | Get the address of a variable, as well as its mutability
genVarAddr :: LLVM.BuilderRef -> ValMap -> [Index] -> Id ->
IO (LLVM.ValueRef, Mutability)
genVarAddr builder valmap indexes var =
case getVarLocation valmap var of
MemLoc _ mut addr ->
do
out <- genGEP builder addr indexes
return (out, mut)
_ -> error ("Location has no address")
-- | Generate an access to the given variable, with the given indexes.
genVarRead :: LLVM.ContextRef -> LLVM.BuilderRef -> ValMap -> [Index] -> Id ->
IO Access
genVarRead ctx builder valmap indexes var =
case getVarLocation valmap var of
-- Straightforward, it's a value. Make sure we have no indexes
-- and return the value.
BindLoc val ->
case indexes of
[] -> return (DirectAcc val)
_ -> error "Indexes in read of non-aggregate variable"
-- For a memory location, generate a GEP, then load, then build a
-- direct access.
MemLoc ty mut mem ->
do
addr <- genGEP builder mem indexes
val <- genLoad ctx builder addr mut ty
return (DirectAcc val)
-- For structures, we'll either recurse, or else build a structure
-- access.
StructLoc fields ->
case indexes of
-- If there's indexes, recurse
(FieldInd ind : indexes') ->
genVarRead ctx builder valmap indexes' (Id (fields ! ind))
-- Otherwise, build a structure access
[] ->
do
accs <- mapM (genVarRead ctx builder valmap [])
(map Id (elems fields))
return (StructAcc (listArray (bounds fields) accs))
_ -> error "Wrong kind of index for a structure location"
-- | This function handles writes to variables without indexes
genRawVarWrite :: LLVM.ContextRef -> LLVM.BuilderRef ->
ValMap -> Access -> Id -> IO ValMap
genRawVarWrite ctx builder valmap acc var @ (Id name) =
case getVarLocation valmap var of
BindLoc _ -> return (Map.insert name (BindLoc (toValue acc)) valmap)
loc -> genRawWrite ctx builder valmap acc loc
-- | This function handles writes to non-variables without indexes
genRawWrite :: LLVM.ContextRef -> LLVM.BuilderRef -> ValMap ->
Access -> Location -> IO ValMap
-- We've got a value and a memory location. Generate a store.
genRawWrite ctx builder valmap acc (MemLoc ty mut addr) =
do
genStore ctx builder (toValue acc) addr mut ty
return valmap
-- For structures, we end up recursing.
genRawWrite ctx builder valmap acc (StructLoc fields) =
case acc of
-- We've got a value (which ought to have a structure type),
-- and a local variable that's a structure. Go through and
-- generate writes into each field.
DirectAcc val ->
let
foldfun valmap' (Fieldname fname, var) =
do
val' <- LLVM.buildExtractValue builder val fname ""
genRawVarWrite ctx builder valmap' (DirectAcc val') (Id var)
in do
foldlM foldfun valmap (assocs fields)
-- We've got a structure access and a structure location, which
-- should match up. Pair up the fields and recurse on each pair
-- individually.
StructAcc accfields ->
let
foldfun valmap' (acc', var) =
genRawVarWrite ctx builder valmap' acc' (Id var)
fieldlist = zip (elems accfields) (elems fields)
in
foldlM foldfun valmap fieldlist
genRawWrite _ _ _ _ (BindLoc _) = error "genRawWrite can't handle BindLocs"
-- | Take an access, a non-variable location, and a list of indexes, and
-- do the work to write to the location. This involves many possible
-- cases.
genWrite :: LLVM.ContextRef -> LLVM.BuilderRef -> ValMap ->
Access -> [Index] -> Location -> IO ValMap
-- This case should never happen
genWrite _ _ _ _ _ (BindLoc _) = error "genWrite can't handle BindLocs"
-- For no index cases, pass off to genRawWrite
genWrite ctx builder valmap acc [] loc =
genRawWrite ctx builder valmap acc loc
-- We've got a value and a memory location. Generate a GEP and store
-- the value.
genWrite ctx builder valmap acc indexes (MemLoc ty mut mem) =
do
addr <- LLVM.buildGEP builder mem (map toValue indexes) ""
genStore ctx builder (toValue acc) addr mut ty
return valmap
-- For structures, we recurse to strip away the fields
genWrite ctx builder valmap acc (FieldInd field : indexes) (StructLoc fields) =
genVarWrite ctx builder valmap acc indexes (Id (fields ! field))
-- Any other kind of index is an error condition
genWrite _ _ _ _ _ (StructLoc _) = error "Bad indexes in assignment to variable"
-- | Take an access, a variable name, and a list of indexes and do the
-- work to write to the location.
genVarWrite :: LLVM.ContextRef -> LLVM.BuilderRef -> ValMap ->
Access -> [Index] -> Id -> IO ValMap
genVarWrite ctx builder valmap acc indexes var =
case getVarLocation valmap var of
BindLoc _ ->
case indexes of
[] -> genRawVarWrite ctx builder valmap acc var
_ -> error "Extra indexes in write to variable"
loc -> genWrite ctx builder valmap acc indexes loc
instance LLVMValue Index where
toValue (FieldInd fname) = toValue fname
toValue (ValueInd val) = val
instance LLVMValue Access where
toValue (DirectAcc val) = val
toValue (StructAcc arr) = LLVM.constStruct (map toValue (elems arr)) False
instance Show Index where
show (FieldInd (Fieldname fname)) = "field " ++ show fname
show (ValueInd _) = "value"
instance Show Access where
show (DirectAcc _) = "direct"
show (StructAcc _) = "struct"
|
emc2/chill
|
src/IR/FlatIR/LLVMGen/VarAccess.hs
|
bsd-3-clause
| 10,478
| 1
| 19
| 2,370
| 2,119
| 1,096
| 1,023
| 169
| 6
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- from https://ocharles.org.uk/blog/guest-posts/2014-12-15-deriving.html
import Data.Data
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.State
data MiniIoF a = Terminate
| PrintLine String a
| ReadLine (String -> a)
deriving (Functor)
-- data List a = Nil | Cons a (List a)
-- deriving (Eq, Show, Functor, Foldable, Traversable)
data List a = Nil | Cons a (List a)
deriving ( Eq, Show
, Functor, Foldable, Traversable
, Typeable, Data)
data Config = C String String
data AppState = S Int Bool
newtype App a = App { unApp :: ReaderT Config (StateT AppState IO) a }
deriving (Monad, MonadReader Config,
MonadState AppState, MonadIO,
Applicative,Functor)
|
mpickering/ghc-exactprint
|
tests/examples/ghc710/DerivingOC.hs
|
bsd-3-clause
| 1,060
| 0
| 9
| 318
| 201
| 117
| 84
| 23
| 0
|
module Network.MQTT.Gateway
( module Network.MQTT.Gateway.Core
, module Network.MQTT.Gateway.Socket
) where
import Network.MQTT.Gateway.Core
import Network.MQTT.Gateway.Socket
|
rasendubi/arachne
|
src/Network/MQTT/Gateway.hs
|
bsd-3-clause
| 183
| 0
| 5
| 21
| 39
| 28
| 11
| 5
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-| /proc/stat file parser
This module holds the definition of the parser that extracts information
about the CPU load of the system from the @/proc/stat@ file.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Cpu.LoadParser (cpustatParser) where
import Prelude ()
import Ganeti.Prelude
import Control.Applicative ((<|>))
import qualified Data.Attoparsec.Text as A
import qualified Data.Attoparsec.Combinator as AC
import Data.Attoparsec.Text (Parser)
import Ganeti.Parsers
import Ganeti.Cpu.Types
-- * Parser implementation
-- | The parser for one line of the CPU status file.
oneCPUstatParser :: Parser CPUstat
oneCPUstatParser =
let nameP = stringP
userP = integerP
niceP = integerP
systemP = integerP
idleP = integerP
iowaitP = integerP
irqP = integerP
softirqP = integerP
stealP = integerP
guestP = integerP
guest_niceP = integerP
in
CPUstat <$> nameP <*> userP <*> niceP <*> systemP <*> idleP <*> iowaitP
<*> irqP <*> softirqP <*> stealP <*> guestP <*> guest_niceP
<* A.endOfLine
-- | When this is satisfied all the lines containing information about
-- the CPU load are parsed.
intrFound :: Parser ()
intrFound = (A.string "intr" *> return ())
<|> (A.string "page" *> return ())
<|> (A.string "swap" *> return ())
-- | The parser for the fragment of CPU status file containing
-- information about the CPU load.
cpustatParser :: Parser [CPUstat]
cpustatParser = oneCPUstatParser `AC.manyTill` intrFound
|
andir/ganeti
|
src/Ganeti/Cpu/LoadParser.hs
|
bsd-2-clause
| 2,837
| 0
| 18
| 538
| 306
| 177
| 129
| 32
| 1
|
module Data.Char
(chr
,ord
,isAscii
,isLatin1
,toUpper
,toLower
,isAsciiLower
,isAsciiUpper
,isDigit
,isOctDigit
,isHexDigit
,isSpace
) where
import Fay.FFI
chr :: Int -> Char
chr = ffi "String.fromCharCode(%1)"
ord :: Char -> Int
ord = ffi "%1.charCodeAt(0)"
isAscii :: Char -> Bool
isAscii c = c < '\x80'
isLatin1 :: Char -> Bool
isLatin1 c = c <= '\xff'
toUpper :: Char -> Char
toUpper = ffi "%1.toUpperCase()"
toLower :: Char -> Char
toLower = ffi "%1.toLowerCase()"
isAsciiLower :: Char -> Bool
isAsciiLower c = c >= 'a' && c <= 'z'
isAsciiUpper :: Char -> Bool
isAsciiUpper c = c >= 'A' && c <= 'Z'
isDigit :: Char -> Bool
isDigit c = c >= '0' && c <= '9'
isOctDigit :: Char -> Bool
isOctDigit c = c >= '0' && c <= '7'
isHexDigit :: Char -> Bool
isHexDigit c = isDigit c || c >= 'A' && c <= 'F' ||
c >= 'a' && c <= 'f'
isSpace :: Char -> Bool
isSpace = ffi "%1.replace(/\\s/g,'') != %1"
|
fpco/fay-base
|
src/Data/Char.hs
|
bsd-3-clause
| 963
| 0
| 13
| 243
| 344
| 185
| 159
| 39
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.