code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
import Data.List
euler029 :: Int -> Int
euler029 n = length . group . sort $ combinations
where
n' = fromIntegral n
combinations = [ a ^ b | a <- [2..n'], b <- [2..n'] ] :: [Integer]
main :: IO ()
main = print $ euler029 100
|
marknsikora/euler
|
euler029/euler029.hs
|
mit
| 237
| 0
| 10
| 60
| 111
| 59
| 52
| 7
| 1
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Neural.FeedForward where
--module Data.Neural.FeedForward
-- ( Neuron (..)
-- , NetworkTemplate (..)
-- , Network (..)
-- , Activation
-- , Derivative
--
-- , makeNetwork
-- , e
-- , hyperbolic
-- , sigmoid
-- , sigmoid'
-- ) where
import Prelude as P
import System.Random
import Control.Monad.Identity
import Control.Monad.Random
import Control.Monad.Error
import Data.Text
import Data.Either
import Data.Array.Repa as R
data Neuron = Input
| Bias
| Hidden
| Output
deriving (Show)
data NetworkTemplate = NetworkTemplate [[Neuron]] deriving Show
type Active = Array U DIM1 Double
type Inert = Array U DIM1 Double
data Layer = Explicit Active Inert
| Weight (Array U DIM2 Double)
type Activation = (Double -> Double)
type Derivative = (Double -> Double)
data Network = Network Activation [Layer]
| Empty
e :: Double
e = 2.71828182845904523536028747135266249775724709369995
hyperbolic :: Activation
hyperbolic x = ((e ** (2.0 * x)) - 1.0) / ((e ** (2.0 * x)) + 1.0)
sigmoid :: Activation
sigmoid x = 1.0 / (1.0 + (e ** (0.0 - x)))
sigmoid' :: Derivative
sigmoid' x = (sigmoid x) * (1.0 - (sigmoid x))
instance (RandomGen g, MonadError e m) => MonadError e (RandT g m) where
throwError = lift . throwError
-- catchError = lift . catchError --No auto lift due to GHC 7.6.3 bug.
catchError = catchError
makeNetwork :: (RandomGen g) => Activation -> NetworkTemplate -> g -> Either String (Network, g)
makeNetwork act (NetworkTemplate net) gen = runIdentity (runErrorT (runRandT (makeNetwork' act net) gen))
makeNetwork' :: (RandomGen g) => Activation -> [[Neuron]] -> RandT g (ErrorT String Identity) Network
makeNetwork' _ [] = throwError "Network must have an input and output layer."
makeNetwork' _ [l] = throwError "Network must have an input and output layer."
makeNetwork' act (l:ls) = do
input <- validInput l
let (hs, [o]) = notLast ls
hidden <- P.mapM validHidden hs
output <- validOutput o
return Empty
decompose = undefined
notLast [] = ([], [])
notLast [x] = ([], [x])
notLast (x:xs) = notLast' ([x], []) xs
where notLast' (front, _) [x] = (front, [x])
notLast' (front, _) (x:xs) = notLast' (front P.++ [x], []) xs
validInput = P.mapM isValid
where isValid (Input) = return Input
isValid (Bias) = return Bias
isValid (Hidden) = throwError "Hidden neuron found in input layer."
isValid (Output) = throwError "Output neuron found in input layer."
validHidden = P.mapM isValid
where isValid (Input) = throwError "Input neuron found in hidden layer."
isValid (Bias) = return Bias
isValid (Hidden) = return Hidden
isValid (Output) = throwError "Output neuron found in hidden layer."
validOutput = P.mapM isValid
where isValid (Input) = throwError "Input neuron found in output layer."
isValid (Bias) = throwError "Bias neuron found in output layer."
isValid (Hidden) = throwError "Hidden neuron found in output layer."
isValid (Output) = return Output
breakLayer = P.foldl s ([], [])
where s (layer, bias) (Bias) = (layer, bias P.++ [Bias])
s (layer, bias) x = (layer P.++ [x], bias)
|
AndrewRademacher/acumen
|
src/Data/Neural/FeedForward.hs
|
mit
| 3,662
| 0
| 11
| 980
| 1,079
| 594
| 485
| 74
| 4
|
import Robotics.Thingomatic
import Robotics.Thingomatic.Commands
import Robotics.Thingomatic.Monad
import Robotics.Thingomatic.Points
import Robotics.Thingomatic.Setup
import Control.Monad
points::[Point2]
points = [(25,25),(25,35),(35,35),(35,25)]
start = do
move $ head points
extruderForward
pause 125
layers = withRate 1500 $ mapM_ (const layer) [1..100]
where layer = comment "Layer" >> moveRel ((0,0,0.35)::Point3) >> mapM_ move points
main = driver $ printModel $ raft (20,20) (20,20) >> start >> layers
|
matthewSorensen/weft
|
example.hs
|
gpl-3.0
| 525
| 0
| 11
| 74
| 215
| 121
| 94
| 15
| 1
|
{-# LANGUAGE Rank2Types, ScopedTypeVariables, TupleSections #-}
module LiName.Sort (
readSortType,
sortPathList
) where
import LiName.Types
import Control.Applicative ((<$>))
import Control.Monad (liftM)
import Control.Monad.Reader (ask)
import Data.Function (on)
import Data.List (sortBy)
import Data.Text (pack, toLower, Text)
import System.Directory (getModificationTime)
import System.FilePath.Posix (takeFileName)
-- FIXME IOError should occurs for unknow type.
readSortType :: String -> LiNameSortType
readSortType ('i':xs) = InvertedSort $ readSortType xs
readSortType "m" = SortByModTime
readSortType "n" = SortByFileName
readSortType "p" = SortByFilePath
readSortType "N" = SortByFileNameI
readSortType "P" = SortByFilePathI
readSortType "-" = DontSort
readSortType _ = error "Unknow sort type"
sortPathList :: [LiNamePath] -> L [LiNamePath]
sortPathList xs = do
st <- _sortType <$> ask
io $ sortPathList' st xs
sortPathList' :: LiNameSortType -> [LiNamePath] -> IO [LiNamePath]
sortPathList' SortByFilePath = sort' return
sortPathList' SortByFileName = sort' (return . takeFileName)
sortPathList' SortByFilePathI = sort' (return . ignoreCase)
sortPathList' SortByFileNameI = sort' (return . ignoreCase . takeFileName)
sortPathList' SortByModTime = sort' getModificationTime
sortPathList' DontSort = sort' getModificationTime
sortPathList' (InvertedSort st) = liftM reverse . sortPathList' st
ignoreCase :: LiNamePath -> Text
ignoreCase = toLower . pack
sort' :: Ord a => (LiNamePath -> IO a) -> [LiNamePath] -> IO [LiNamePath]
sort' f xs = map fst . sortBy (compare `on` snd) <$> mapM (\x -> (x,) <$> f x) xs
|
anekos/liname-hs
|
src/LiName/Sort.hs
|
gpl-3.0
| 1,707
| 0
| 10
| 299
| 501
| 266
| 235
| 38
| 1
|
module Database.SQL.SQLSolvent.Types (
FieldType
,FieldName
,TableName
,RelationField
,Field (Regular, Key, Relation)
,Table (Table)
,Scheme
,TableOperations (tName, tBody, description)
,RelationInGraph (RelationInGraph)
,RelWIthId()
,TableId
,EdgeId
,Mark
,EdgeMarkers
,NodeMarkers
,Markers
,dummyTable
,tempty
,LocGraph
,GlbGraph
) where
import qualified Data.Text as T
import qualified Data.Set as S
import Data.Graph.Inductive
type FieldType = T.Text --RegularField|Key|Relation
type DataType = T.Text --тип данных поля
type FieldName = T.Text
type TableName = T.Text
type RelationField = T.Text --куда поле указывает
type Description = T.Text --описание всего чего попало
data Field = Regular FieldName DataType Description
| Key FieldName DataType Description
| Relation FieldName DataType TableName RelationField Description
deriving (Eq,Ord,Show)
data Table = Table TableName Description (S.Set Field) deriving (Ord,Eq,Show)
class TableOperations a where
tName :: a -> TableName
description :: a -> Description
tBody :: a -> S.Set Field
instance TableOperations Table where
tName (Table name _ _ ) = name
description (Table _ description _ ) = description
tBody (Table _ _ body) = body
tempty :: Table --пустую табличку вертаем
tempty = Table T.empty T.empty S.empty
type Scheme = S.Set Table --типа база
data RelationInGraph = RelationInGraph ((TableName, FieldName),(TableName, FieldName)) deriving (Eq, Ord, Show)
type RelWIthId = (Int, RelationInGraph)
--сделано чтобы присвоить вес ребрам. вес ребра - еденица.
instance Num RelationInGraph where
(+) a b = 2
(*) a b = 1
abs a = 1
signum a = 1
fromInteger a = RelationInGraph ((T.pack "",T.pack ""),(T.pack "",T.pack ""))
negate a = 1
instance Real RelationInGraph where
toRational a = 1
dummyTable = Table (T.pack "dummy") (T.pack "dummy") (S.fromList [])
--маркировка подсветки
type TableId = Int
type EdgeId = Int
type Mark = Bool --подсвечено/нет
type EdgeMarkers = [(EdgeId, Mark)]
type NodeMarkers = [(TableId, Mark)]
type Markers = (NodeMarkers, EdgeMarkers)
--для удобства обозначения графов в сигнатурах
type LocGraph = Gr Table RelWIthId
type GlbGraph = Gr Table RelWIthId
|
nixorn/SQL-Solvent
|
src/Database/SQL/SQLSolvent/Types.hs
|
gpl-3.0
| 3,356
| 0
| 10
| 1,315
| 695
| 408
| 287
| 76
| 1
|
import qualified Data.Set as Set
import Test.HUnit
import Test.Framework
import Test.Framework.Providers.HUnit
import Pretzel
import Move
main :: IO ()
main =
defaultMainWithOpts tests mempty
tests =
[
]
|
mkovacs/pretzel
|
src/test/UnitMove.hs
|
gpl-3.0
| 214
| 0
| 6
| 38
| 59
| 35
| 24
| 11
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Main where
import Test.QuickCheck
import Test.QuickCheck.Property
import Test.QuickCheck.All
import Control.Monad
import Control.Monad.State
import Control.Monad.Identity
import qualified Data.ByteString.Char8 as BS
import qualified Data.Sequence as S
import IcfpcEndo.Endo
import IcfpcEndo.RuntimeSt
import IcfpcEndo.Execution
import IcfpcEndo.Constants
--import Application.Game.Engine.Runtime
prop_basicSubsequence1 = toDna "ICFP" |- (from 0 `to` 2) == toDna "IC"
prop_basicSubsequence2 = toDna "ICFP" |- (from 2 `to` 0) == emptyDna
prop_basicSubsequence3 = toDna "ICFP" |- (from 2 `to` 2) == emptyDna
prop_basicSubsequence4 = toDna "ICFP" |- (from 2 `to` 3) == toDna "F"
prop_basicSubsequence5 = toDna "ICFP" |- only 2 == toDna "F"
prop_basicSubsequence6 = toDna "ICFP" |- (from 2 `to` 6) == toDna "FP"
prop_basicSubsequence7 = toDna "ICFP" |- from 2 == toDna "FP"
prop_basicSubsequence8 = toDna "ICFP" |- only 6 == emptyDna
data TestRuntime = Rt { rtEndo :: Endo }
type TestRuntimeSt = State TestRuntime
instance RuntimeSt TestRuntimeSt where
getData = liftM rtEndo get
putData = put . Rt
testRt1 = Rt $ mkEndo (toDna "CIIC") 0
testRt2 = Rt $ mkEndo (toDna "IIPIPICPIICICIIF") 0
testPattern1, testPattern2 :: Pattern
testPattern1 = evalState pattern testRt1
testPattern2 = evalState pattern testRt2
prop_execution1 = testPattern1 == toPattern [iBasePI]
prop_execution2 = testPattern2 == toPattern [openPI, skipPI 2, closePI, pBasePI]
tests :: IO Bool
tests = $quickCheckAll
runTests = tests >>= \passed -> putStrLn $
if passed then "All tests passed."
else "Some tests failed."
main :: IO ()
main = runTests
|
graninas/ICFPC2007
|
Endo/Test/EndoDnaTest.hs
|
gpl-3.0
| 1,784
| 0
| 9
| 298
| 506
| 277
| 229
| 43
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Blogger.Comments.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a comment by blog id, post id and comment id.
--
-- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API v3 Reference> for @blogger.comments.delete@.
module Network.Google.Resource.Blogger.Comments.Delete
(
-- * REST Resource
CommentsDeleteResource
-- * Creating a Request
, commentsDelete
, CommentsDelete
-- * Request Lenses
, cdXgafv
, cdUploadProtocol
, cdAccessToken
, cdUploadType
, cdBlogId
, cdPostId
, cdCommentId
, cdCallback
) where
import Network.Google.Blogger.Types
import Network.Google.Prelude
-- | A resource alias for @blogger.comments.delete@ method which the
-- 'CommentsDelete' request conforms to.
type CommentsDeleteResource =
"v3" :>
"blogs" :>
Capture "blogId" Text :>
"posts" :>
Capture "postId" Text :>
"comments" :>
Capture "commentId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a comment by blog id, post id and comment id.
--
-- /See:/ 'commentsDelete' smart constructor.
data CommentsDelete =
CommentsDelete'
{ _cdXgafv :: !(Maybe Xgafv)
, _cdUploadProtocol :: !(Maybe Text)
, _cdAccessToken :: !(Maybe Text)
, _cdUploadType :: !(Maybe Text)
, _cdBlogId :: !Text
, _cdPostId :: !Text
, _cdCommentId :: !Text
, _cdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CommentsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cdXgafv'
--
-- * 'cdUploadProtocol'
--
-- * 'cdAccessToken'
--
-- * 'cdUploadType'
--
-- * 'cdBlogId'
--
-- * 'cdPostId'
--
-- * 'cdCommentId'
--
-- * 'cdCallback'
commentsDelete
:: Text -- ^ 'cdBlogId'
-> Text -- ^ 'cdPostId'
-> Text -- ^ 'cdCommentId'
-> CommentsDelete
commentsDelete pCdBlogId_ pCdPostId_ pCdCommentId_ =
CommentsDelete'
{ _cdXgafv = Nothing
, _cdUploadProtocol = Nothing
, _cdAccessToken = Nothing
, _cdUploadType = Nothing
, _cdBlogId = pCdBlogId_
, _cdPostId = pCdPostId_
, _cdCommentId = pCdCommentId_
, _cdCallback = Nothing
}
-- | V1 error format.
cdXgafv :: Lens' CommentsDelete (Maybe Xgafv)
cdXgafv = lens _cdXgafv (\ s a -> s{_cdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cdUploadProtocol :: Lens' CommentsDelete (Maybe Text)
cdUploadProtocol
= lens _cdUploadProtocol
(\ s a -> s{_cdUploadProtocol = a})
-- | OAuth access token.
cdAccessToken :: Lens' CommentsDelete (Maybe Text)
cdAccessToken
= lens _cdAccessToken
(\ s a -> s{_cdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cdUploadType :: Lens' CommentsDelete (Maybe Text)
cdUploadType
= lens _cdUploadType (\ s a -> s{_cdUploadType = a})
cdBlogId :: Lens' CommentsDelete Text
cdBlogId = lens _cdBlogId (\ s a -> s{_cdBlogId = a})
cdPostId :: Lens' CommentsDelete Text
cdPostId = lens _cdPostId (\ s a -> s{_cdPostId = a})
cdCommentId :: Lens' CommentsDelete Text
cdCommentId
= lens _cdCommentId (\ s a -> s{_cdCommentId = a})
-- | JSONP
cdCallback :: Lens' CommentsDelete (Maybe Text)
cdCallback
= lens _cdCallback (\ s a -> s{_cdCallback = a})
instance GoogleRequest CommentsDelete where
type Rs CommentsDelete = ()
type Scopes CommentsDelete =
'["https://www.googleapis.com/auth/blogger"]
requestClient CommentsDelete'{..}
= go _cdBlogId _cdPostId _cdCommentId _cdXgafv
_cdUploadProtocol
_cdAccessToken
_cdUploadType
_cdCallback
(Just AltJSON)
bloggerService
where go
= buildClient (Proxy :: Proxy CommentsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-blogger/gen/Network/Google/Resource/Blogger/Comments/Delete.hs
|
mpl-2.0
| 4,980
| 0
| 20
| 1,261
| 857
| 496
| 361
| 121
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Agent.EntityTypes.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the specified entity type.
--
-- /See:/ <https://cloud.google.com/dialogflow-enterprise/ Dialogflow API Reference> for @dialogflow.projects.agent.entityTypes.patch@.
module Network.Google.Resource.DialogFlow.Projects.Agent.EntityTypes.Patch
(
-- * REST Resource
ProjectsAgentEntityTypesPatchResource
-- * Creating a Request
, projectsAgentEntityTypesPatch
, ProjectsAgentEntityTypesPatch
-- * Request Lenses
, paetpXgafv
, paetpLanguageCode
, paetpUploadProtocol
, paetpUpdateMask
, paetpAccessToken
, paetpUploadType
, paetpPayload
, paetpName
, paetpCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.agent.entityTypes.patch@ method which the
-- 'ProjectsAgentEntityTypesPatch' request conforms to.
type ProjectsAgentEntityTypesPatchResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "languageCode" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] GoogleCloudDialogflowV2EntityType :>
Patch '[JSON] GoogleCloudDialogflowV2EntityType
-- | Updates the specified entity type.
--
-- /See:/ 'projectsAgentEntityTypesPatch' smart constructor.
data ProjectsAgentEntityTypesPatch =
ProjectsAgentEntityTypesPatch'
{ _paetpXgafv :: !(Maybe Xgafv)
, _paetpLanguageCode :: !(Maybe Text)
, _paetpUploadProtocol :: !(Maybe Text)
, _paetpUpdateMask :: !(Maybe GFieldMask)
, _paetpAccessToken :: !(Maybe Text)
, _paetpUploadType :: !(Maybe Text)
, _paetpPayload :: !GoogleCloudDialogflowV2EntityType
, _paetpName :: !Text
, _paetpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsAgentEntityTypesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paetpXgafv'
--
-- * 'paetpLanguageCode'
--
-- * 'paetpUploadProtocol'
--
-- * 'paetpUpdateMask'
--
-- * 'paetpAccessToken'
--
-- * 'paetpUploadType'
--
-- * 'paetpPayload'
--
-- * 'paetpName'
--
-- * 'paetpCallback'
projectsAgentEntityTypesPatch
:: GoogleCloudDialogflowV2EntityType -- ^ 'paetpPayload'
-> Text -- ^ 'paetpName'
-> ProjectsAgentEntityTypesPatch
projectsAgentEntityTypesPatch pPaetpPayload_ pPaetpName_ =
ProjectsAgentEntityTypesPatch'
{ _paetpXgafv = Nothing
, _paetpLanguageCode = Nothing
, _paetpUploadProtocol = Nothing
, _paetpUpdateMask = Nothing
, _paetpAccessToken = Nothing
, _paetpUploadType = Nothing
, _paetpPayload = pPaetpPayload_
, _paetpName = pPaetpName_
, _paetpCallback = Nothing
}
-- | V1 error format.
paetpXgafv :: Lens' ProjectsAgentEntityTypesPatch (Maybe Xgafv)
paetpXgafv
= lens _paetpXgafv (\ s a -> s{_paetpXgafv = a})
-- | Optional. The language of entity synonyms defined in \`entity_type\`. If
-- not specified, the agent\'s default language is used. [Many
-- languages](https:\/\/cloud.google.com\/dialogflow-enterprise\/docs\/reference\/language)
-- are supported. Note: languages must be enabled in the agent before they
-- can be used.
paetpLanguageCode :: Lens' ProjectsAgentEntityTypesPatch (Maybe Text)
paetpLanguageCode
= lens _paetpLanguageCode
(\ s a -> s{_paetpLanguageCode = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
paetpUploadProtocol :: Lens' ProjectsAgentEntityTypesPatch (Maybe Text)
paetpUploadProtocol
= lens _paetpUploadProtocol
(\ s a -> s{_paetpUploadProtocol = a})
-- | Optional. The mask to control which fields get updated.
paetpUpdateMask :: Lens' ProjectsAgentEntityTypesPatch (Maybe GFieldMask)
paetpUpdateMask
= lens _paetpUpdateMask
(\ s a -> s{_paetpUpdateMask = a})
-- | OAuth access token.
paetpAccessToken :: Lens' ProjectsAgentEntityTypesPatch (Maybe Text)
paetpAccessToken
= lens _paetpAccessToken
(\ s a -> s{_paetpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
paetpUploadType :: Lens' ProjectsAgentEntityTypesPatch (Maybe Text)
paetpUploadType
= lens _paetpUploadType
(\ s a -> s{_paetpUploadType = a})
-- | Multipart request metadata.
paetpPayload :: Lens' ProjectsAgentEntityTypesPatch GoogleCloudDialogflowV2EntityType
paetpPayload
= lens _paetpPayload (\ s a -> s{_paetpPayload = a})
-- | The unique identifier of the entity type. Required for
-- EntityTypes.UpdateEntityType and EntityTypes.BatchUpdateEntityTypes
-- methods. Format: \`projects\/\/agent\/entityTypes\/\`.
paetpName :: Lens' ProjectsAgentEntityTypesPatch Text
paetpName
= lens _paetpName (\ s a -> s{_paetpName = a})
-- | JSONP
paetpCallback :: Lens' ProjectsAgentEntityTypesPatch (Maybe Text)
paetpCallback
= lens _paetpCallback
(\ s a -> s{_paetpCallback = a})
instance GoogleRequest ProjectsAgentEntityTypesPatch
where
type Rs ProjectsAgentEntityTypesPatch =
GoogleCloudDialogflowV2EntityType
type Scopes ProjectsAgentEntityTypesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient ProjectsAgentEntityTypesPatch'{..}
= go _paetpName _paetpXgafv _paetpLanguageCode
_paetpUploadProtocol
_paetpUpdateMask
_paetpAccessToken
_paetpUploadType
_paetpCallback
(Just AltJSON)
_paetpPayload
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy ProjectsAgentEntityTypesPatchResource)
mempty
|
brendanhay/gogol
|
gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Agent/EntityTypes/Patch.hs
|
mpl-2.0
| 6,933
| 0
| 18
| 1,541
| 945
| 551
| 394
| 140
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.ChannelSections.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a resource.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.channelSections.delete@.
module Network.Google.Resource.YouTube.ChannelSections.Delete
(
-- * REST Resource
ChannelSectionsDeleteResource
-- * Creating a Request
, channelSectionsDelete
, ChannelSectionsDelete
-- * Request Lenses
, csdXgafv
, csdUploadProtocol
, csdAccessToken
, csdUploadType
, csdOnBehalfOfContentOwner
, csdId
, csdCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.channelSections.delete@ method which the
-- 'ChannelSectionsDelete' request conforms to.
type ChannelSectionsDeleteResource =
"youtube" :>
"v3" :>
"channelSections" :>
QueryParam "id" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a resource.
--
-- /See:/ 'channelSectionsDelete' smart constructor.
data ChannelSectionsDelete =
ChannelSectionsDelete'
{ _csdXgafv :: !(Maybe Xgafv)
, _csdUploadProtocol :: !(Maybe Text)
, _csdAccessToken :: !(Maybe Text)
, _csdUploadType :: !(Maybe Text)
, _csdOnBehalfOfContentOwner :: !(Maybe Text)
, _csdId :: !Text
, _csdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ChannelSectionsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csdXgafv'
--
-- * 'csdUploadProtocol'
--
-- * 'csdAccessToken'
--
-- * 'csdUploadType'
--
-- * 'csdOnBehalfOfContentOwner'
--
-- * 'csdId'
--
-- * 'csdCallback'
channelSectionsDelete
:: Text -- ^ 'csdId'
-> ChannelSectionsDelete
channelSectionsDelete pCsdId_ =
ChannelSectionsDelete'
{ _csdXgafv = Nothing
, _csdUploadProtocol = Nothing
, _csdAccessToken = Nothing
, _csdUploadType = Nothing
, _csdOnBehalfOfContentOwner = Nothing
, _csdId = pCsdId_
, _csdCallback = Nothing
}
-- | V1 error format.
csdXgafv :: Lens' ChannelSectionsDelete (Maybe Xgafv)
csdXgafv = lens _csdXgafv (\ s a -> s{_csdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
csdUploadProtocol :: Lens' ChannelSectionsDelete (Maybe Text)
csdUploadProtocol
= lens _csdUploadProtocol
(\ s a -> s{_csdUploadProtocol = a})
-- | OAuth access token.
csdAccessToken :: Lens' ChannelSectionsDelete (Maybe Text)
csdAccessToken
= lens _csdAccessToken
(\ s a -> s{_csdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
csdUploadType :: Lens' ChannelSectionsDelete (Maybe Text)
csdUploadType
= lens _csdUploadType
(\ s a -> s{_csdUploadType = a})
-- | *Note:* This parameter is intended exclusively for YouTube content
-- partners. The *onBehalfOfContentOwner* parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The CMS account that the user authenticates with must be linked
-- to the specified YouTube content owner.
csdOnBehalfOfContentOwner :: Lens' ChannelSectionsDelete (Maybe Text)
csdOnBehalfOfContentOwner
= lens _csdOnBehalfOfContentOwner
(\ s a -> s{_csdOnBehalfOfContentOwner = a})
csdId :: Lens' ChannelSectionsDelete Text
csdId = lens _csdId (\ s a -> s{_csdId = a})
-- | JSONP
csdCallback :: Lens' ChannelSectionsDelete (Maybe Text)
csdCallback
= lens _csdCallback (\ s a -> s{_csdCallback = a})
instance GoogleRequest ChannelSectionsDelete where
type Rs ChannelSectionsDelete = ()
type Scopes ChannelSectionsDelete =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient ChannelSectionsDelete'{..}
= go (Just _csdId) _csdXgafv _csdUploadProtocol
_csdAccessToken
_csdUploadType
_csdOnBehalfOfContentOwner
_csdCallback
(Just AltJSON)
youTubeService
where go
= buildClient
(Proxy :: Proxy ChannelSectionsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/ChannelSections/Delete.hs
|
mpl-2.0
| 5,776
| 0
| 18
| 1,298
| 806
| 471
| 335
| 115
| 1
|
module Lupo.View where
import qualified Data.Text as T
import qualified Heist as H
import qualified Lupo.Entry as E
import Lupo.Import
import qualified Lupo.Navigation as N
import qualified Lupo.URLMapper as U
data View h = View
{ render :: h ()
}
data ViewFactory h = ViewFactory
{ _singleDayView :: E.Page -> N.Navigation (H.HeistT h h) -> E.Comment -> [T.Text] -> [T.Text] -> View h
, _multiDaysView :: N.Navigation (H.HeistT h h) -> [E.Page] -> View h
, _monthView :: N.Navigation (H.HeistT h h) -> [E.Page] -> View h
, _searchResultView :: T.Text -> [E.Saved E.Entry] -> View h
, _loginView :: T.Text -> View h
, _initAccountView :: View h
, _adminView :: [E.Page] -> View h
, _entryEditorView :: E.Saved E.Entry -> T.Text -> Getter U.URLMapper U.Path -> View h
, _entryPreviewView :: E.Saved E.Entry -> T.Text -> Getter U.URLMapper U.Path -> View h
, _entriesFeed :: [E.Saved E.Entry] -> View h
}
singleDayView :: E.Page -> N.Navigation (H.HeistT h h) -> E.Comment -> [T.Text] -> [T.Text] -> Getter (ViewFactory h) (View h)
singleDayView p n c notice errs = to $ \self ->
_singleDayView self p n c notice errs
multiDaysView :: N.Navigation (H.HeistT h h) -> [E.Page] -> Getter (ViewFactory h) (View h)
multiDaysView n ps = to $ \self ->
_multiDaysView self n ps
monthView :: N.Navigation (H.HeistT h h) -> [E.Page] -> Getter (ViewFactory h) (View h)
monthView n ps = to $ \self ->
_monthView self n ps
searchResultView :: T.Text -> [E.Saved E.Entry] -> Getter (ViewFactory h) (View h)
searchResultView t es = to $ \self ->
_searchResultView self t es
loginView :: T.Text -> Getter (ViewFactory h) (View h)
loginView challenge = to $ \self ->
_loginView self challenge
initAccountView :: Getter (ViewFactory h) (View h)
initAccountView = to _initAccountView
adminView :: [E.Page] -> Getter (ViewFactory h) (View h)
adminView ps = to $ \self ->
_adminView self ps
entryEditorView :: E.Saved E.Entry -> T.Text -> Getter U.URLMapper U.Path -> Getter (ViewFactory h) (View h)
entryEditorView e t url = to $ \self ->
_entryEditorView self e t url
entryPreviewView :: E.Saved E.Entry -> T.Text -> Getter U.URLMapper U.Path -> Getter (ViewFactory h) (View h)
entryPreviewView e t url = to $ \self ->
_entryPreviewView self e t url
entriesFeed :: [E.Saved E.Entry] -> Getter (ViewFactory h) (View h)
entriesFeed es = to $ \self ->
_entriesFeed self es
|
keitax/lupo
|
src/Lupo/View.hs
|
lgpl-3.0
| 2,407
| 0
| 15
| 455
| 1,068
| 551
| 517
| 49
| 1
|
{-# LANGUAGE OverloadedStrings, UnicodeSyntax #-}
-- | Provides logging via fast-logger in a Magicbane app context.
module Magicbane.Logging (
module Magicbane.Logging
, module X
) where
import RIO
import System.Log.FastLogger
import System.Log.FastLogger as X (LogType(..), defaultBufSize)
type ModLogger = LogFunc
type Formatter = TimedFastLogger → CallStack → LogSource → LogLevel → Utf8Builder → IO ()
-- | Creates a logger module using a given formatting function.
-- | Also returns the underlying TimedFastLogger for use outside of your Magicbane app (e.g. in some WAI middleware).
newLogger ∷ LogType → Formatter → IO (TimedFastLogger, ModLogger)
newLogger logtype formatter = do
tc ← newTimeCache simpleTimeFormat'
(fl, _) ← newTimedFastLogger tc logtype
-- forget cleanup because the logger will exist for the lifetime of the (OS) process
return (fl, mkLogFunc $ formatter fl)
simpleFormatter ∷ Formatter
simpleFormatter logger cs src level msg =
logger $ \t →
toLogStr t <> " " <>
toLogStr (utf8BuilderToText $ displayCallStack cs) <> " " <>
toLogStr src <> " " <>
toLogStr (showLevel level) <>
toLogStr (utf8BuilderToText msg) <> "\n"
where showLevel LevelDebug = "[DEBUG] "
showLevel LevelInfo = "[ INFO] "
showLevel LevelWarn = "[ WARN] "
showLevel LevelError = "[ERROR] "
showLevel (LevelOther t) = "[" <> t <> "] "
|
myfreeweb/magicbane
|
library/Magicbane/Logging.hs
|
unlicense
| 1,474
| 0
| 17
| 327
| 323
| 172
| 151
| 27
| 5
|
-----------------------------------------------------------------------------
-- Copyright 2012 Microsoft Corporation.
--
-- This is free software; you can redistribute it and/or modify it under the
-- terms of the Apache License, Version 2.0. A copy of the License can be
-- found in the file "license.txt" at the root of this distribution.
-----------------------------------------------------------------------------
{- |
Finite maps from 'Common.Name.Name's to ...
-}
module Common.QNameMap
( QNameMap, Lookup(..)
, empty
, single
, fromList
, lookup, lookupQ
, insert
, union
, unions
, toAscList
, isEmpty
, filterNames
) where
import Prelude hiding (lookup)
import qualified Prelude
import qualified Data.Map as M
import Common.Name
import Common.Failure
----------------------------------------------------------------
-- Types
----------------------------------------------------------------
-- | A map from names to values
data QNameMap a = QM !(M.Map Name [(Name,a)])
instance Show a => Show (QNameMap a) where
show qm
= show (toAscList qm)
data Lookup a = Found Name a
| Ambiguous [Name]
| NotFound
empty :: QNameMap a
empty = QM M.empty
isEmpty :: QNameMap a -> Bool
isEmpty (QM m)
= M.null m
single :: Name -> a -> QNameMap a
single name x
= QM (M.singleton (unqualify name) [(name,x)])
fromList :: [(Name,a)] -> QNameMap a
fromList xs
= foldl (\qm (name,x) -> insert name x qm) empty xs
-- | Lookup a fully qualified name
lookupQ :: Name -> QNameMap a -> Maybe a
lookupQ name (QM m)
= case M.lookup (unqualify name) m of
Nothing -> Nothing
Just xs -> Prelude.lookup name xs
-- | Lookup a potentially unqualified name within a module context.
-- (The module context is ignored if a qualified name is looked up)
lookup :: Name -> Name -> QNameMap a -> Lookup a
lookup context name (QM m)
= case M.lookup (unqualify name) m of
Nothing -> NotFound
Just [(qname,x)] | not (isQualified name) -> Found qname x
Just xs -> let qname = if isQualified name then name else qualify context name
in case Prelude.filter (\p -> fst p == qname) xs of
[(realname,x)] -> Found realname x
_ -> Ambiguous (map fst xs)
filterNames :: (Name -> Bool) -> QNameMap a -> QNameMap a
filterNames pred (QM m)
= QM (M.map belongs m)
where
belongs xs = [(name,x) | (name,x) <- xs, pred name]
insert :: Name -> a -> QNameMap a -> QNameMap a
insert name x (QM m)
= QM (M.insertWith (safeCombine "insert") (unqualify name) [(name,x)] m)
union :: QNameMap a -> QNameMap a -> QNameMap a
union (QM m1) (QM m2)
= QM (M.unionWith (safeCombine "union") m1 m2)
unions :: [QNameMap a] -> QNameMap a
unions qs
= foldl union empty qs
toAscList :: QNameMap a -> [(Name,a)]
toAscList (QM m)
= concatMap snd (M.toAscList m)
safeCombine :: String -> [(Name,a)] -> [(Name,a)] -> [(Name,a)]
safeCombine method xs ys
= let ynames = map fst ys
xnames = map fst xs
in if any (`elem` ynames) xnames
then failure ("Common.QNameMap." ++ method ++ ": overlapping names: " ++ show (xnames,ynames))
else xs ++ ys
|
lpeterse/koka
|
src/Common/QNameMap.hs
|
apache-2.0
| 3,342
| 0
| 15
| 865
| 1,077
| 570
| 507
| 74
| 5
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.IDRange where
import GHC.Generics
import qualified Data.Aeson
-- |
data IDRange = IDRange
{ min :: Maybe Integer -- ^ min value for the range
, max :: Maybe Integer -- ^ min value for the range
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON IDRange
instance Data.Aeson.ToJSON IDRange
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/IDRange.hs
|
apache-2.0
| 505
| 0
| 9
| 88
| 88
| 53
| 35
| 14
| 0
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE TemplateHaskell #-}
module Polar.Shader.Types where
import qualified Data.Map as M
import Control.Lens.TH (makeFields)
data Token = LetT
| EqualsT
| PlusT
| AsteriskT
| NewLineT
| BracketOpenT
| BracketCloseT
| BraceOpenT
| BraceCloseT
| StatementEndT
| IdentifierT String
| LiteralT Double
deriving (Eq, Show)
data AST = Let String AST
| Assignment AST AST
| Additive AST AST
| Multiplicative AST AST
| Swizzle [AST]
| Literal Double
| Identifier String
| NamePosition
| NameVar String DataType
| NameGlobal String DataType
| NameInput String DataType
| NameOutput String DataType
deriving (Eq, Show)
data ShaderType = ShaderVertex | ShaderPixel
data DataType = DataFloat
| DataFloat2
| DataFloat3
| DataFloat4
| DataMatrix4x4
deriving (Eq, Show)
data Function = Function
{ _functionName :: String
, _functionLets :: [(String, DataType)]
, _functionAsts :: [AST]
} deriving (Eq, Show)
makeFields ''Function
data CompilerEnv = CompilerEnv
{ _compilerEnvFunctions :: M.Map String Function
, _compilerEnvGlobals :: M.Map String DataType
, _compilerEnvInputs :: M.Map String DataType
, _compilerEnvOutputs :: M.Map String DataType
}
makeFields ''CompilerEnv
class Compiler a where generate :: CompilerEnv -> a -> Either String (String, String)
class HasComponents a where numComponents :: a -> Either String Int
instance HasComponents DataType where
numComponents DataFloat = return 1
numComponents DataFloat2 = return 2
numComponents DataFloat3 = return 3
numComponents DataFloat4 = return 4
numComponents DataMatrix4x4 = return 16
instance HasComponents AST where
numComponents (Let _ right) = numComponents right
numComponents (Assignment left right) = do
l <- numComponents left
(l ==) <$> numComponents right >>= \case
True -> return l
False -> Left "number of components on left does not match number of components on right"
numComponents (Additive left right) = do
l <- numComponents left
(l ==) <$> numComponents right >>= \case
True -> return l
False -> Left "number of components on left does not match number of components on right"
numComponents (Multiplicative left right) = do
l <- numComponents left
r <- numComponents right
if l == 16 && r == 4
then return 4
else (l ==) <$> numComponents right >>= \case
True -> return l
False -> Left "number of components on left does not match number of components on right"
numComponents (Swizzle []) = return 0
numComponents (Swizzle (ast : asts)) = (+) <$> numComponents ast <*> numComponents (Swizzle asts)
numComponents (Literal _) = return 1
numComponents NamePosition = return 4
numComponents (NameVar _ ty) = numComponents ty
numComponents (NameGlobal _ ty) = numComponents ty
numComponents (NameInput _ ty) = numComponents ty
numComponents (NameOutput _ ty) = numComponents ty
numComponents (Identifier name) = Left ("numComponents: unresolved identifier (" ++ name ++ ")")
astType :: AST -> Either String DataType
astType ast = numComponents ast >>= \case
1 -> return DataFloat
2 -> return DataFloat2
3 -> return DataFloat3
4 -> return DataFloat4
16 -> return DataMatrix4x4
x -> Left ("number of components (" ++ show x ++ ") does not match any supported data type")
|
polar-engine/polar-shader
|
src/Polar/Shader/Types.hs
|
apache-2.0
| 3,945
| 0
| 13
| 1,140
| 1,010
| 521
| 489
| 99
| 6
|
{-| Implementation of cluster-wide logic.
This module holds all pure cluster-logic; I\/O related functionality
goes into the /Main/ module for the individual binaries.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Cluster
(
-- * Types
AllocDetails(..)
, Table(..)
, CStats(..)
, AllocNodes
, AllocResult
, AllocMethod
, GenericAllocSolutionList
, AllocSolutionList
-- * Generic functions
, totalResources
, computeAllocationDelta
, hasRequiredNetworks
-- * First phase functions
, computeBadItems
-- * Second phase functions
, printSolutionLine
, formatCmds
, involvedNodes
, getMoves
, splitJobs
-- * Display functions
, printNodes
, printInsts
-- * Balacing functions
, doNextBalance
, tryBalance
, iMoveToJob
-- * IAllocator functions
, genAllocNodes
, tryAlloc
, tryGroupAlloc
, tryMGAlloc
, filterMGResults
, sortMGResults
, tryChangeGroup
, allocList
-- * Allocation functions
, iterateAlloc
, tieredAlloc
-- * Node group functions
, instanceGroup
, findSplitInstances
) where
import Control.Applicative ((<$>), liftA2)
import Control.Arrow ((&&&))
import Control.Monad (unless)
import qualified Data.IntSet as IntSet
import Data.List
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Ord (comparing)
import Text.Printf (printf)
import Ganeti.BasicTypes
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..), defaultOptions)
import qualified Ganeti.HTools.Container as Container
import Ganeti.HTools.Cluster.AllocatePrimitives ( allocateOnSingle
, allocateOnPair)
import Ganeti.HTools.Cluster.AllocationSolution
( GenericAllocSolution(..) , AllocSolution, emptyAllocSolution
, sumAllocs, extractNl, updateIl
, annotateSolution, solutionDescription, collapseFailures
, emptyAllocCollection, concatAllocCollections, collectionToSolution )
import Ganeti.HTools.Cluster.Evacuate ( EvacSolution(..), emptyEvacSolution
, updateEvacSolution, reverseEvacSolution
, nodeEvacInstance)
import Ganeti.HTools.Cluster.Metrics (compCV, compClusterStatistics)
import Ganeti.HTools.Cluster.Moves (applyMoveEx)
import Ganeti.HTools.Cluster.Utils (splitCluster, instancePriGroup
, availableGroupNodes, iMoveToJob)
import Ganeti.HTools.GlobalN1 (allocGlobalN1, redundant)
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Nic as Nic
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import Ganeti.HTools.Types
import Ganeti.Compat
import Ganeti.Utils
import Ganeti.Types (EvacMode(..))
-- * Types
-- | Allocation details for an instance, specifying
-- required number of nodes, and
-- an optional group (name) to allocate to
data AllocDetails = AllocDetails Int (Maybe String)
deriving (Show)
-- | Allocation results, as used in 'iterateAlloc' and 'tieredAlloc'.
type AllocResult = (FailStats, Node.List, Instance.List,
[Instance.Instance], [CStats])
-- | Type alias for easier handling.
type GenericAllocSolutionList a =
[(Instance.Instance, GenericAllocSolution a)]
type AllocSolutionList = GenericAllocSolutionList Score
-- | A type denoting the valid allocation mode/pairs.
--
-- For a one-node allocation, this will be a @Left ['Ndx']@, whereas
-- for a two-node allocation, this will be a @Right [('Ndx',
-- ['Ndx'])]@. In the latter case, the list is basically an
-- association list, grouped by primary node and holding the potential
-- secondary nodes in the sub-list.
type AllocNodes = Either [Ndx] [(Ndx, [Ndx])]
-- | The complete state for the balancing solution.
data Table = Table Node.List Instance.List Score [Placement]
deriving (Show)
-- | Cluster statistics data type.
data CStats = CStats
{ csFmem :: Integer -- ^ Cluster free mem
, csFdsk :: Integer -- ^ Cluster free disk
, csFspn :: Integer -- ^ Cluster free spindles
, csAmem :: Integer -- ^ Cluster allocatable mem
, csAdsk :: Integer -- ^ Cluster allocatable disk
, csAcpu :: Integer -- ^ Cluster allocatable cpus
, csMmem :: Integer -- ^ Max node allocatable mem
, csMdsk :: Integer -- ^ Max node allocatable disk
, csMcpu :: Integer -- ^ Max node allocatable cpu
, csImem :: Integer -- ^ Instance used mem
, csIdsk :: Integer -- ^ Instance used disk
, csIspn :: Integer -- ^ Instance used spindles
, csIcpu :: Integer -- ^ Instance used cpu
, csTmem :: Double -- ^ Cluster total mem
, csTdsk :: Double -- ^ Cluster total disk
, csTspn :: Double -- ^ Cluster total spindles
, csTcpu :: Double -- ^ Cluster total cpus
, csVcpu :: Integer -- ^ Cluster total virtual cpus
, csNcpu :: Double -- ^ Equivalent to 'csIcpu' but in terms of
-- physical CPUs, i.e. normalised used phys CPUs
, csXmem :: Integer -- ^ Unnacounted for mem
, csNmem :: Integer -- ^ Node own memory
, csScore :: Score -- ^ The cluster score
, csNinst :: Int -- ^ The total number of instances
} deriving (Show)
-- | A simple type for allocation functions.
type AllocMethod = Node.List -- ^ Node list
-> Instance.List -- ^ Instance list
-> Maybe Int -- ^ Optional allocation limit
-> Instance.Instance -- ^ Instance spec for allocation
-> AllocNodes -- ^ Which nodes we should allocate on
-> [Instance.Instance] -- ^ Allocated instances
-> [CStats] -- ^ Running cluster stats
-> Result AllocResult -- ^ Allocation result
-- * Utility functions
-- | Verifies the N+1 status and return the affected nodes.
verifyN1 :: [Node.Node] -> [Node.Node]
verifyN1 = filter Node.failN1
{-| Computes the pair of bad nodes and instances.
The bad node list is computed via a simple 'verifyN1' check, and the
bad instance list is the list of primary and secondary instances of
those nodes.
-}
computeBadItems :: Node.List -> Instance.List ->
([Node.Node], [Instance.Instance])
computeBadItems nl il =
let bad_nodes = verifyN1 $ getOnline nl
bad_instances = map (`Container.find` il) .
sort . nub $
concatMap (\ n -> Node.sList n ++ Node.pList n) bad_nodes
in
(bad_nodes, bad_instances)
-- | Zero-initializer for the CStats type.
emptyCStats :: CStats
emptyCStats = CStats 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
-- | Update stats with data from a new node.
updateCStats :: CStats -> Node.Node -> CStats
updateCStats cs node =
let CStats { csFmem = x_fmem, csFdsk = x_fdsk,
csAmem = x_amem, csAcpu = x_acpu, csAdsk = x_adsk,
csMmem = x_mmem, csMdsk = x_mdsk, csMcpu = x_mcpu,
csImem = x_imem, csIdsk = x_idsk, csIcpu = x_icpu,
csTmem = x_tmem, csTdsk = x_tdsk, csTcpu = x_tcpu,
csVcpu = x_vcpu, csNcpu = x_ncpu,
csXmem = x_xmem, csNmem = x_nmem, csNinst = x_ninst,
csFspn = x_fspn, csIspn = x_ispn, csTspn = x_tspn
}
= cs
inc_amem = Node.fMem node - Node.rMem node
inc_amem' = if inc_amem > 0 then inc_amem else 0
inc_adsk = Node.availDisk node
inc_imem = truncate (Node.tMem node) - Node.nMem node
- Node.xMem node - Node.fMem node
inc_icpu = Node.uCpu node
inc_idsk = truncate (Node.tDsk node) - Node.fDsk node
inc_ispn = Node.tSpindles node - Node.fSpindles node
inc_vcpu = Node.hiCpu node
inc_acpu = Node.availCpu node
inc_ncpu = fromIntegral (Node.uCpu node) /
iPolicyVcpuRatio (Node.iPolicy node)
in cs { csFmem = x_fmem + fromIntegral (Node.fMem node)
, csFdsk = x_fdsk + fromIntegral (Node.fDsk node)
, csFspn = x_fspn + fromIntegral (Node.fSpindles node)
, csAmem = x_amem + fromIntegral inc_amem'
, csAdsk = x_adsk + fromIntegral inc_adsk
, csAcpu = x_acpu + fromIntegral inc_acpu
, csMmem = max x_mmem (fromIntegral inc_amem')
, csMdsk = max x_mdsk (fromIntegral inc_adsk)
, csMcpu = max x_mcpu (fromIntegral inc_acpu)
, csImem = x_imem + fromIntegral inc_imem
, csIdsk = x_idsk + fromIntegral inc_idsk
, csIspn = x_ispn + fromIntegral inc_ispn
, csIcpu = x_icpu + fromIntegral inc_icpu
, csTmem = x_tmem + Node.tMem node
, csTdsk = x_tdsk + Node.tDsk node
, csTspn = x_tspn + fromIntegral (Node.tSpindles node)
, csTcpu = x_tcpu + Node.tCpu node
, csVcpu = x_vcpu + fromIntegral inc_vcpu
, csNcpu = x_ncpu + inc_ncpu
, csXmem = x_xmem + fromIntegral (Node.xMem node)
, csNmem = x_nmem + fromIntegral (Node.nMem node)
, csNinst = x_ninst + length (Node.pList node)
}
-- | Compute the total free disk and memory in the cluster.
totalResources :: Node.List -> CStats
totalResources nl =
let cs = foldl' updateCStats emptyCStats . Container.elems $ nl
in cs { csScore = compCV nl }
-- | Compute the delta between two cluster state.
--
-- This is used when doing allocations, to understand better the
-- available cluster resources. The return value is a triple of the
-- current used values, the delta that was still allocated, and what
-- was left unallocated.
computeAllocationDelta :: CStats -> CStats -> AllocStats
computeAllocationDelta cini cfin =
let CStats {csImem = i_imem, csIdsk = i_idsk, csIcpu = i_icpu,
csNcpu = i_ncpu, csIspn = i_ispn } = cini
CStats {csImem = f_imem, csIdsk = f_idsk, csIcpu = f_icpu,
csTmem = t_mem, csTdsk = t_dsk, csVcpu = f_vcpu,
csNcpu = f_ncpu, csTcpu = f_tcpu,
csIspn = f_ispn, csTspn = t_spn } = cfin
rini = AllocInfo { allocInfoVCpus = fromIntegral i_icpu
, allocInfoNCpus = i_ncpu
, allocInfoMem = fromIntegral i_imem
, allocInfoDisk = fromIntegral i_idsk
, allocInfoSpn = fromIntegral i_ispn
}
rfin = AllocInfo { allocInfoVCpus = fromIntegral (f_icpu - i_icpu)
, allocInfoNCpus = f_ncpu - i_ncpu
, allocInfoMem = fromIntegral (f_imem - i_imem)
, allocInfoDisk = fromIntegral (f_idsk - i_idsk)
, allocInfoSpn = fromIntegral (f_ispn - i_ispn)
}
runa = AllocInfo { allocInfoVCpus = fromIntegral (f_vcpu - f_icpu)
, allocInfoNCpus = f_tcpu - f_ncpu
, allocInfoMem = truncate t_mem - fromIntegral f_imem
, allocInfoDisk = truncate t_dsk - fromIntegral f_idsk
, allocInfoSpn = truncate t_spn - fromIntegral f_ispn
}
in (rini, rfin, runa)
-- | Compute online nodes from a 'Node.List'.
getOnline :: Node.List -> [Node.Node]
getOnline = filter (not . Node.offline) . Container.elems
-- * Balancing functions
-- | Compute best table. Note that the ordering of the arguments is important.
compareTables :: Table -> Table -> Table
compareTables a@(Table _ _ a_cv _) b@(Table _ _ b_cv _ ) =
if a_cv > b_cv then b else a
-- | Tries to perform an instance move and returns the best table
-- between the original one and the new one.
checkSingleStep :: Bool -- ^ Whether to unconditionally ignore soft errors
-> Table -- ^ The original table
-> Instance.Instance -- ^ The instance to move
-> Table -- ^ The current best table
-> IMove -- ^ The move to apply
-> Table -- ^ The final best table
checkSingleStep force ini_tbl target cur_tbl move =
let Table ini_nl ini_il _ ini_plc = ini_tbl
tmp_resu = applyMoveEx force ini_nl target move
in case tmp_resu of
Bad _ -> cur_tbl
Ok (upd_nl, new_inst, pri_idx, sec_idx) ->
let tgt_idx = Instance.idx target
upd_cvar = compCV upd_nl
upd_il = Container.add tgt_idx new_inst ini_il
upd_plc = (tgt_idx, pri_idx, sec_idx, move, upd_cvar):ini_plc
upd_tbl = Table upd_nl upd_il upd_cvar upd_plc
in compareTables cur_tbl upd_tbl
-- | Generate all possible migration moves of an instance given some
-- additional parameters
migrationMoves :: MirrorType -- ^ The mirroring type of the instance
-> Bool -- ^ Whether the secondary node is active
-> [Ndx] -- ^ Target node candidate list
-> [IMove] -- ^ List of valid result moves
migrationMoves MirrorNone _ _ = []
migrationMoves MirrorInternal False _ = []
migrationMoves MirrorInternal True _ = [Failover]
migrationMoves MirrorExternal _ nodes_idx = map FailoverToAny nodes_idx
-- | Generate all possible disk moves (complex instance moves consist of disk
-- moves and maybe migrations) of an instance given some additional parameters
diskMoves :: MirrorType -- ^ The mirroring type of the instance
-> Bool -- ^ Whether the secondary node is a valid new node
-> Bool -- ^ Whether we can change the primary node
-> (Bool, Bool) -- ^ Whether migration is restricted and whether
-- the instance primary is offline
-> [Ndx] -- ^ Target node candidates list
-> [IMove] -- ^ List of valid result moves
diskMoves MirrorNone _ _ _ _ = []
diskMoves MirrorExternal _ _ _ _ = []
diskMoves MirrorInternal valid_sec inst_moves restr nodes_idx =
concatMap (intMirrSingleDiskMove valid_sec inst_moves restr) nodes_idx
where
intMirrSingleDiskMove _ False _ tdx =
[ReplaceSecondary tdx]
intMirrSingleDiskMove _ _ (True, False) tdx =
[ReplaceSecondary tdx]
intMirrSingleDiskMove True True (False, _) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, ReplacePrimary tdx
, FailoverAndReplace tdx
]
intMirrSingleDiskMove True True (True, True) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, FailoverAndReplace tdx
]
intMirrSingleDiskMove False True _ tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
]
-- | Compute the best move for a given instance.
checkInstanceMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ Original table
-> Instance.Instance -- ^ Instance to move
-> (Table, Table) -- ^ Pair of best new tables:
-- migrations only and with disk moves
checkInstanceMove opts nodes_idx ini_tbl@(Table nl _ _ _) target =
let force = algIgnoreSoftErrors opts
inst_moves = algInstanceMoves opts
rest_mig = algRestrictedMigration opts
opdx = Instance.pNode target
osdx = Instance.sNode target
bad_nodes = [opdx, osdx]
nodes = filter (`notElem` bad_nodes) nodes_idx
mir_type = Instance.mirrorType target
use_secondary = elem osdx nodes_idx && inst_moves
primary_drained = Node.offline
. flip Container.find nl
$ Instance.pNode target
migrations = migrationMoves mir_type use_secondary nodes
disk_moves = diskMoves mir_type use_secondary inst_moves
(rest_mig, primary_drained) nodes
-- iterate over the possible nodes and migrations for this instance
best_migr_tbl =
if inst_moves
then foldl' (checkSingleStep force ini_tbl target) ini_tbl migrations
else ini_tbl
-- iterate over the possible moves for this instance
best_tbl =
foldl' (checkSingleStep force ini_tbl target) best_migr_tbl disk_moves
in (best_migr_tbl, best_tbl)
-- | Compute the best next move.
checkMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ The current solution
-> [Instance.Instance] -- ^ List of instances still to move
-> Table -- ^ The new solution
checkMove opts nodes_idx ini_tbl@(Table _ _ ini_cv _) victims =
let disk_moves = algDiskMoves opts
disk_moves_f = algDiskMovesFactor opts
-- we're using rwhnf from the Control.Parallel.Strategies
-- package; we don't need to use rnf as that would force too
-- much evaluation in single-threaded cases, and in
-- multi-threaded case the weak head normal form is enough to
-- spark the evaluation
table_pairs = parMap rwhnf (checkInstanceMove opts nodes_idx ini_tbl)
victims
-- iterate over all instances, computing the best move
best_migr_tbl@(Table _ _ best_migr_cv _) =
foldl' compareTables ini_tbl $ map fst table_pairs
best_tbl@(Table _ _ best_cv _) =
foldl' compareTables ini_tbl $ map snd table_pairs
in if not disk_moves
|| ini_cv - best_cv <= (ini_cv - best_migr_cv) * disk_moves_f
then best_migr_tbl
else best_tbl -- best including disk moves
-- | Check if we are allowed to go deeper in the balancing.
doNextBalance :: Table -- ^ The starting table
-> Int -- ^ Remaining length
-> Score -- ^ Score at which to stop
-> Bool -- ^ The resulting table and commands
doNextBalance ini_tbl max_rounds min_score =
let Table _ _ ini_cv ini_plc = ini_tbl
ini_plc_len = length ini_plc
in (max_rounds < 0 || ini_plc_len < max_rounds) && ini_cv > min_score
-- | Run a balance move.
tryBalance :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> Table -- ^ The starting table
-> Maybe Table -- ^ The resulting table and commands
tryBalance opts ini_tbl =
let evac_mode = algEvacMode opts
mg_limit = algMinGainLimit opts
min_gain = algMinGain opts
Table ini_nl ini_il ini_cv _ = ini_tbl
all_inst = Container.elems ini_il
all_nodes = Container.elems ini_nl
(offline_nodes, online_nodes) = partition Node.offline all_nodes
all_inst' = if evac_mode
then let bad_nodes = map Node.idx offline_nodes
in filter (any (`elem` bad_nodes) .
Instance.allNodes) all_inst
else all_inst
reloc_inst = filter (\i -> Instance.movable i &&
Instance.autoBalance i) all_inst'
node_idx = map Node.idx online_nodes
fin_tbl = checkMove opts node_idx ini_tbl reloc_inst
(Table _ _ fin_cv _) = fin_tbl
in
if fin_cv < ini_cv && (ini_cv > mg_limit || ini_cv - fin_cv >= min_gain)
then Just fin_tbl -- this round made success, return the new table
else Nothing
-- * Allocation functions
-- | Generate the valid node allocation singles or pairs for a new instance.
genAllocNodes :: AlgorithmOptions -- ^ algorithmic options to honor
-> Group.List -- ^ Group list
-> Node.List -- ^ The node map
-> Int -- ^ The number of nodes required
-> Bool -- ^ Whether to drop or not
-- unallocable nodes
-> Result AllocNodes -- ^ The (monadic) result
genAllocNodes opts gl nl count drop_unalloc =
let filter_fn = if drop_unalloc
then filter (Group.isAllocable .
flip Container.find gl . Node.group)
else id
restrict_fn = maybe id (\ns -> filter (flip elem ns . Node.name))
$ algRestrictToNodes opts
all_nodes = restrict_fn . filter_fn $ getOnline nl
all_pairs = [(Node.idx p,
[Node.idx s | s <- all_nodes,
Node.idx p /= Node.idx s,
Node.group p == Node.group s]) |
p <- all_nodes]
in case count of
1 -> Ok (Left (map Node.idx all_nodes))
2 -> Ok (Right (filter (not . null . snd) all_pairs))
_ -> Bad "Unsupported number of nodes, only one or two supported"
-- | Try to allocate an instance on the cluster.
tryAlloc :: (Monad m) =>
AlgorithmOptions
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> AllocNodes -- ^ The allocation targets
-> m AllocSolution -- ^ Possible solution list
tryAlloc _ _ _ _ (Right []) = fail "Not enough online nodes"
tryAlloc opts nl il inst (Right ok_pairs) =
let cstat = compClusterStatistics $ Container.elems nl
n1pred = if algCapacity opts
then allocGlobalN1 nl il
else const True
psols = parMap rwhnf (\(p, ss) ->
collectionToSolution FailN1 n1pred $
foldl (\cstate ->
concatAllocCollections cstate
. allocateOnPair opts cstat nl inst p)
emptyAllocCollection ss) ok_pairs
sols = foldl' sumAllocs emptyAllocSolution psols
in return $ annotateSolution sols
tryAlloc _ _ _ _ (Left []) = fail "No online nodes"
tryAlloc opts nl il inst (Left all_nodes) =
let sols = foldl (\cstate ->
concatAllocCollections cstate
. allocateOnSingle opts nl inst
) emptyAllocCollection all_nodes
n1pred = if algCapacity opts
then allocGlobalN1 nl il
else const True
in return . annotateSolution
$ collectionToSolution FailN1 n1pred sols
-- | From a list of possibly bad and possibly empty solutions, filter
-- only the groups with a valid result. Note that the result will be
-- reversed compared to the original list.
filterMGResults :: [(Group.Group, Result (GenericAllocSolution a))]
-> [(Group.Group, GenericAllocSolution a)]
filterMGResults = foldl' fn []
where unallocable = not . Group.isAllocable
fn accu (grp, rasol) =
case rasol of
Bad _ -> accu
Ok sol | isNothing (asSolution sol) -> accu
| unallocable grp -> accu
| otherwise -> (grp, sol):accu
-- | Sort multigroup results based on policy and score.
sortMGResults :: Ord a
=> [(Group.Group, GenericAllocSolution a)]
-> [(Group.Group, GenericAllocSolution a)]
sortMGResults sols =
let extractScore (_, _, _, x) = x
solScore (grp, sol) = (Group.allocPolicy grp,
(extractScore . fromJust . asSolution) sol)
in sortBy (comparing solScore) sols
-- | Determines if a group is connected to the networks required by the
-- | instance.
hasRequiredNetworks :: Group.Group -> Instance.Instance -> Bool
hasRequiredNetworks ng = all hasNetwork . Instance.nics
where hasNetwork = maybe True (`elem` Group.networks ng) . Nic.network
-- | Removes node groups which can't accommodate the instance
filterValidGroups :: [(Group.Group, (Node.List, Instance.List))]
-> Instance.Instance
-> ([(Group.Group, (Node.List, Instance.List))], [String])
filterValidGroups [] _ = ([], [])
filterValidGroups (ng:ngs) inst =
let (valid_ngs, msgs) = filterValidGroups ngs inst
in if hasRequiredNetworks (fst ng) inst
then (ng:valid_ngs, msgs)
else (valid_ngs,
("group " ++ Group.name (fst ng) ++
" is not connected to a network required by instance " ++
Instance.name inst):msgs)
-- | Finds an allocation solution for an instance on a group
findAllocation :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Gdx -- ^ The group to allocate to
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (AllocSolution, [String])
findAllocation opts mggl mgnl mgil gdx inst cnt = do
let belongsTo nl' nidx = nidx `elem` map Node.idx (Container.elems nl')
nl = Container.filter ((== gdx) . Node.group) mgnl
il = Container.filter (belongsTo nl . Instance.pNode) mgil
group' = Container.find gdx mggl
unless (hasRequiredNetworks group' inst) . failError
$ "The group " ++ Group.name group' ++ " is not connected to\
\ a network required by instance " ++ Instance.name inst
solution <- genAllocNodes opts mggl nl cnt False >>= tryAlloc opts nl il inst
return (solution, solutionDescription (group', return solution))
-- | Finds the best group for an instance on a multi-group cluster.
--
-- Only solutions in @preferred@ and @last_resort@ groups will be
-- accepted as valid, and additionally if the allowed groups parameter
-- is not null then allocation will only be run for those group
-- indices.
findBestAllocGroup :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Maybe [Gdx] -- ^ The allowed groups
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (Group.Group, AllocSolution, [String])
findBestAllocGroup opts mggl mgnl mgil allowed_gdxs inst cnt =
let groups_by_idx = splitCluster mgnl mgil
groups = map (\(gid, d) -> (Container.find gid mggl, d)) groups_by_idx
groups' = maybe groups
(\gs -> filter ((`elem` gs) . Group.idx . fst) groups)
allowed_gdxs
(groups'', filter_group_msgs) = filterValidGroups groups' inst
sols = map (\(gr, (nl, il)) ->
(gr, genAllocNodes opts mggl nl cnt False >>=
tryAlloc opts nl il inst))
groups''::[(Group.Group, Result AllocSolution)]
all_msgs = filter_group_msgs ++ concatMap solutionDescription sols
goodSols = filterMGResults sols
sortedSols = sortMGResults goodSols
in case sortedSols of
[] -> Bad $ if null groups'
then "no groups for evacuation: allowed groups was " ++
show allowed_gdxs ++ ", all groups: " ++
show (map fst groups)
else intercalate ", " all_msgs
(final_group, final_sol):_ -> return (final_group, final_sol, all_msgs)
-- | Try to allocate an instance on a multi-group cluster.
tryMGAlloc :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Possible solution list
tryMGAlloc opts mggl mgnl mgil inst cnt = do
(best_group, solution, all_msgs) <-
findBestAllocGroup opts mggl mgnl mgil Nothing inst cnt
let group_name = Group.name best_group
selmsg = "Selected group: " ++ group_name
return $ solution { asLog = selmsg:all_msgs }
-- | Try to allocate an instance to a group.
tryGroupAlloc :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> String -- ^ The allocation group (name)
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Solution
tryGroupAlloc opts mggl mgnl ngil gn inst cnt = do
gdx <- Group.idx <$> Container.findByName mggl gn
(solution, msgs) <- findAllocation opts mggl mgnl ngil gdx inst cnt
return $ solution { asLog = msgs }
-- | Try to allocate a list of instances on a multi-group cluster.
allocList :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> [(Instance.Instance, AllocDetails)] -- ^ The instance to
-- allocate
-> AllocSolutionList -- ^ Possible solution
-- list
-> Result (Node.List, Instance.List,
AllocSolutionList) -- ^ The final solution
-- list
allocList _ _ nl il [] result = Ok (nl, il, result)
allocList opts gl nl il ((xi, AllocDetails xicnt mgn):xies) result = do
ares <- case mgn of
Nothing -> tryMGAlloc opts gl nl il xi xicnt
Just gn -> tryGroupAlloc opts gl nl il gn xi xicnt
let sol = asSolution ares
nl' = extractNl nl il sol
il' = updateIl il sol
allocList opts gl nl' il' xies ((xi, ares):result)
-- | Change-group IAllocator mode main function.
--
-- This is very similar to 'tryNodeEvac', the only difference is that
-- we don't choose as target group the current instance group, but
-- instead:
--
-- 1. at the start of the function, we compute which are the target
-- groups; either no groups were passed in, in which case we choose
-- all groups out of which we don't evacuate instance, or there were
-- some groups passed, in which case we use those
--
-- 2. for each instance, we use 'findBestAllocGroup' to choose the
-- best group to hold the instance, and then we do what
-- 'tryNodeEvac' does, except for this group instead of the current
-- instance group.
--
-- Note that the correct behaviour of this function relies on the
-- function 'nodeEvacInstance' to be able to do correctly both
-- intra-group and inter-group moves when passed the 'ChangeAll' mode.
tryChangeGroup :: AlgorithmOptions
-> Group.List -- ^ The cluster groups
-> Node.List -- ^ The node list (cluster-wide)
-> Instance.List -- ^ Instance list (cluster-wide)
-> [Gdx] -- ^ Target groups; if empty, any
-- groups not being evacuated
-> [Idx] -- ^ List of instance (indices) to be evacuated
-> Result (Node.List, Instance.List, EvacSolution)
tryChangeGroup opts gl ini_nl ini_il gdxs idxs =
let evac_gdxs = nub $ map (instancePriGroup ini_nl .
flip Container.find ini_il) idxs
target_gdxs = (if null gdxs
then Container.keys gl
else gdxs) \\ evac_gdxs
offline = map Node.idx . filter Node.offline $ Container.elems ini_nl
excl_ndx = foldl' (flip IntSet.insert) IntSet.empty offline
group_ndx = map (\(gdx, (nl, _)) -> (gdx, map Node.idx
(Container.elems nl))) $
splitCluster ini_nl ini_il
(fin_nl, fin_il, esol) =
foldl' (\state@(nl, il, _) inst ->
let solution = do
let ncnt = Instance.requiredNodes $
Instance.diskTemplate inst
(grp, _, _) <- findBestAllocGroup opts gl nl il
(Just target_gdxs) inst ncnt
let gdx = Group.idx grp
av_nodes <- availableGroupNodes group_ndx
excl_ndx gdx
nodeEvacInstance defaultOptions
nl il ChangeAll inst gdx av_nodes
in updateEvacSolution state (Instance.idx inst) solution
)
(ini_nl, ini_il, emptyEvacSolution)
(map (`Container.find` ini_il) idxs)
in return (fin_nl, fin_il, reverseEvacSolution esol)
-- | Standard-sized allocation method.
--
-- This places instances of the same size on the cluster until we're
-- out of space. The result will be a list of identically-sized
-- instances.
iterateAllocSmallStep :: AlgorithmOptions -> AllocMethod
iterateAllocSmallStep opts nl il limit newinst allocnodes ixes cstats =
let depth = length ixes
newname = printf "new-%d" depth::String
newidx = Container.size il
newi2 = Instance.setIdx (Instance.setName newinst newname) newidx
newlimit = fmap (flip (-) 1) limit
opts' = if Instance.diskTemplate newi2 == DTDrbd8
then opts { algCapacity = False }
else opts
in case tryAlloc opts' nl il newi2 allocnodes of
Bad s -> Bad s
Ok (AllocSolution { asFailures = errs, asSolution = sols3 }) ->
let newsol = Ok (collapseFailures errs, nl, il, ixes, cstats) in
case sols3 of
Nothing -> newsol
Just (xnl, xi, _, _) ->
if limit == Just 0
then newsol
else iterateAllocSmallStep opts xnl (Container.add newidx xi il)
newlimit newinst allocnodes (xi:ixes)
(totalResources xnl:cstats)
-- | Guess a number of machines worth trying to put on the cluster in one step.
-- The goal is to guess a number close to the actual capacity of the cluster but
-- preferrably not bigger, unless it is quite small (as we don't want to do
-- big steps smaller than 10).
guessBigstepSize :: Node.List -> Instance.Instance -> Int
guessBigstepSize nl inst =
let nodes = Container.elems nl
totalUnusedMemory = sum $ map Node.fMem nodes
reserved = round . maximum $ map Node.tMem nodes
capacity = (totalUnusedMemory - reserved) `div` Instance.mem inst
-- however, at every node we might lose almost an instance if it just
-- doesn't fit by a tiny margin
guess = capacity - Container.size nl
in if guess < 20 then 20 else guess
-- | A speed-up version of `iterateAllocSmallStep`.
--
-- This function returns precisely the same result as `iterateAllocSmallStep`.
-- However the computation is speed up by the following heuristic: allocate
-- a group of instances iteratively without considering global N+1 redundancy;
-- if the result of this is globally N+1 redundant, then everything was OK
-- inbetween and we can continue from there. Only if that fails, do a
-- step-by-step iterative allocation.
-- In order to further speed up the computation while keeping it robust, we
-- first try (if the first argument is True) a number of steps guessed from
-- the node capacity, then, if that failed, a fixed step size and only as last
-- restort step-by-step iterative allocation.
iterateAlloc' :: Bool -> AlgorithmOptions -> AllocMethod
iterateAlloc' tryHugestep opts nl il limit newinst allocnodes ixes cstats =
if not $ algCapacity opts
then iterateAllocSmallStep opts nl il limit newinst allocnodes ixes cstats
else let bigstepsize = if tryHugestep
then guessBigstepSize nl newinst
else 10
(limit', newlimit) = maybe (Just bigstepsize, Nothing)
(Just . min bigstepsize
&&& Just . max 0 . flip (-) bigstepsize)
limit
opts' = opts { algCapacity = False }
in case iterateAllocSmallStep opts' nl il limit'
newinst allocnodes ixes cstats of
Bad s -> Bad s
Ok res@(_, nl', il', ixes', cstats') | redundant nl' il' ->
if newlimit == Just 0 || length ixes' == length ixes
then return res
else iterateAlloc' tryHugestep opts nl' il' newlimit newinst
allocnodes ixes' cstats'
_ -> if tryHugestep
then iterateAlloc' False opts nl il limit newinst allocnodes
ixes cstats
else iterateAllocSmallStep opts nl il limit newinst
allocnodes ixes cstats
-- | A speed-up version of `iterateAllocSmallStep`.
iterateAlloc :: AlgorithmOptions -> AllocMethod
iterateAlloc = iterateAlloc' True
-- | Predicate whether shrinking a single resource can lead to a valid
-- allocation.
sufficesShrinking :: (Instance.Instance -> AllocSolution) -> Instance.Instance
-> FailMode -> Maybe Instance.Instance
sufficesShrinking allocFn inst fm =
case dropWhile (isNothing . asSolution . fst)
. takeWhile (liftA2 (||) (elem fm . asFailures . fst)
(isJust . asSolution . fst))
. map (allocFn &&& id) $
iterateOk (`Instance.shrinkByType` fm) inst
of x:_ -> Just . snd $ x
_ -> Nothing
-- | Tiered allocation method.
--
-- This places instances on the cluster, and decreases the spec until
-- we can allocate again. The result will be a list of decreasing
-- instance specs.
tieredAlloc :: AlgorithmOptions -> AllocMethod
tieredAlloc opts nl il limit newinst allocnodes ixes cstats =
case iterateAlloc opts nl il limit newinst allocnodes ixes cstats of
Bad s -> Bad s
Ok (errs, nl', il', ixes', cstats') ->
let newsol = Ok (errs, nl', il', ixes', cstats')
ixes_cnt = length ixes'
(stop, newlimit) = case limit of
Nothing -> (False, Nothing)
Just n -> (n <= ixes_cnt,
Just (n - ixes_cnt))
sortedErrs = map fst $ sortBy (comparing snd) errs
suffShrink = sufficesShrinking
(fromMaybe emptyAllocSolution
. flip (tryAlloc opts nl' il') allocnodes)
newinst
bigSteps = filter isJust . map suffShrink . reverse $ sortedErrs
progress (Ok (_, _, _, newil', _)) (Ok (_, _, _, newil, _)) =
length newil' > length newil
progress _ _ = False
in if stop then newsol else
let newsol' = case Instance.shrinkByType newinst . last
$ sortedErrs of
Bad _ -> newsol
Ok newinst' -> tieredAlloc opts nl' il' newlimit
newinst' allocnodes ixes' cstats'
in if progress newsol' newsol then newsol' else
case bigSteps of
Just newinst':_ -> tieredAlloc opts nl' il' newlimit
newinst' allocnodes ixes' cstats'
_ -> newsol
-- * Formatting functions
-- | Given the original and final nodes, computes the relocation description.
computeMoves :: Instance.Instance -- ^ The instance to be moved
-> String -- ^ The instance name
-> IMove -- ^ The move being performed
-> String -- ^ New primary
-> String -- ^ New secondary
-> (String, [String])
-- ^ Tuple of moves and commands list; moves is containing
-- either @/f/@ for failover or @/r:name/@ for replace
-- secondary, while the command list holds gnt-instance
-- commands (without that prefix), e.g \"@failover instance1@\"
computeMoves i inam mv c d =
case mv of
Failover -> ("f", [mig])
FailoverToAny _ -> (printf "fa:%s" c, [mig_any])
FailoverAndReplace _ -> (printf "f r:%s" d, [mig, rep d])
ReplaceSecondary _ -> (printf "r:%s" d, [rep d])
ReplaceAndFailover _ -> (printf "r:%s f" c, [rep c, mig])
ReplacePrimary _ -> (printf "f r:%s f" c, [mig, rep c, mig])
where morf = if Instance.isRunning i then "migrate" else "failover"
mig = printf "%s -f %s" morf inam::String
mig_any = printf "%s -f -n %s %s" morf c inam::String
rep n = printf "replace-disks -n %s %s" n inam::String
-- | Converts a placement to string format.
printSolutionLine :: Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Int -- ^ Maximum node name length
-> Int -- ^ Maximum instance name length
-> Placement -- ^ The current placement
-> Int -- ^ The index of the placement in
-- the solution
-> (String, [String])
printSolutionLine nl il nmlen imlen plc pos =
let pmlen = (2*nmlen + 1)
(i, p, s, mv, c) = plc
old_sec = Instance.sNode inst
inst = Container.find i il
inam = Instance.alias inst
npri = Node.alias $ Container.find p nl
nsec = Node.alias $ Container.find s nl
opri = Node.alias $ Container.find (Instance.pNode inst) nl
osec = Node.alias $ Container.find old_sec nl
(moves, cmds) = computeMoves inst inam mv npri nsec
-- FIXME: this should check instead/also the disk template
ostr = if old_sec == Node.noSecondary
then printf "%s" opri::String
else printf "%s:%s" opri osec::String
nstr = if s == Node.noSecondary
then printf "%s" npri::String
else printf "%s:%s" npri nsec::String
in (printf " %3d. %-*s %-*s => %-*s %12.8f a=%s"
pos imlen inam pmlen ostr pmlen nstr c moves,
cmds)
-- | Return the instance and involved nodes in an instance move.
--
-- Note that the output list length can vary, and is not required nor
-- guaranteed to be of any specific length.
involvedNodes :: Instance.List -- ^ Instance list, used for retrieving
-- the instance from its index; note
-- that this /must/ be the original
-- instance list, so that we can
-- retrieve the old nodes
-> Placement -- ^ The placement we're investigating,
-- containing the new nodes and
-- instance index
-> [Ndx] -- ^ Resulting list of node indices
involvedNodes il plc =
let (i, np, ns, _, _) = plc
inst = Container.find i il
in nub . filter (>= 0) $ [np, ns] ++ Instance.allNodes inst
-- | From two adjacent cluster tables get the list of moves that transitions
-- from to the other
getMoves :: (Table, Table) -> [MoveJob]
getMoves (Table _ initial_il _ initial_plc, Table final_nl _ _ final_plc) =
let
plctoMoves (plc@(idx, p, s, mv, _)) =
let inst = Container.find idx initial_il
inst_name = Instance.name inst
affected = involvedNodes initial_il plc
np = Node.alias $ Container.find p final_nl
ns = Node.alias $ Container.find s final_nl
(_, cmds) = computeMoves inst inst_name mv np ns
in (affected, idx, mv, cmds)
in map plctoMoves . reverse . drop (length initial_plc) $ reverse final_plc
-- | Inner function for splitJobs, that either appends the next job to
-- the current jobset, or starts a new jobset.
mergeJobs :: ([JobSet], [Ndx]) -> MoveJob -> ([JobSet], [Ndx])
mergeJobs ([], _) n@(ndx, _, _, _) = ([[n]], ndx)
mergeJobs (cjs@(j:js), nbuf) n@(ndx, _, _, _)
| null (ndx `intersect` nbuf) = ((n:j):js, ndx ++ nbuf)
| otherwise = ([n]:cjs, ndx)
-- | Break a list of moves into independent groups. Note that this
-- will reverse the order of jobs.
splitJobs :: [MoveJob] -> [JobSet]
splitJobs = fst . foldl mergeJobs ([], [])
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatJob :: Int -> Int -> (Int, MoveJob) -> [String]
formatJob jsn jsl (sn, (_, _, _, cmds)) =
let out =
printf " echo job %d/%d" jsn sn:
printf " check":
map (" gnt-instance " ++) cmds
in if sn == 1
then ["", printf "echo jobset %d, %d jobs" jsn jsl] ++ out
else out
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatCmds :: [JobSet] -> String
formatCmds =
unlines .
concatMap (\(jsn, js) -> concatMap (formatJob jsn (length js))
(zip [1..] js)) .
zip [1..]
-- | Print the node list.
printNodes :: Node.List -> [String] -> String
printNodes nl fs =
let fields = case fs of
[] -> Node.defaultFields
"+":rest -> Node.defaultFields ++ rest
_ -> fs
snl = sortBy (comparing Node.idx) (Container.elems nl)
(header, isnum) = unzip $ map Node.showHeader fields
in printTable "" header (map (Node.list fields) snl) isnum
-- | Print the instance list.
printInsts :: Node.List -> Instance.List -> String
printInsts nl il =
let sil = sortBy (comparing Instance.idx) (Container.elems il)
helper inst = [ if Instance.isRunning inst then "R" else " "
, Instance.name inst
, Container.nameOf nl (Instance.pNode inst)
, let sdx = Instance.sNode inst
in if sdx == Node.noSecondary
then ""
else Container.nameOf nl sdx
, if Instance.autoBalance inst then "Y" else "N"
, printf "%3d" $ Instance.vcpus inst
, printf "%5d" $ Instance.mem inst
, printf "%5d" $ Instance.dsk inst `div` 1024
, printf "%5.3f" lC
, printf "%5.3f" lM
, printf "%5.3f" lD
, printf "%5.3f" lN
]
where DynUtil lC lM lD lN = Instance.util inst
header = [ "F", "Name", "Pri_node", "Sec_node", "Auto_bal"
, "vcpu", "mem" , "dsk", "lCpu", "lMem", "lDsk", "lNet" ]
isnum = False:False:False:False:False:repeat True
in printTable "" header (map helper sil) isnum
-- * Node group functions
-- | Computes the group of an instance.
instanceGroup :: Node.List -> Instance.Instance -> Result Gdx
instanceGroup nl i =
let sidx = Instance.sNode i
pnode = Container.find (Instance.pNode i) nl
snode = if sidx == Node.noSecondary
then pnode
else Container.find sidx nl
pgroup = Node.group pnode
sgroup = Node.group snode
in if pgroup /= sgroup
then fail ("Instance placed accross two node groups, primary " ++
show pgroup ++ ", secondary " ++ show sgroup)
else return pgroup
-- | Compute the list of badly allocated instances (split across node
-- groups).
findSplitInstances :: Node.List -> Instance.List -> [Instance.Instance]
findSplitInstances nl =
filter (not . isOk . instanceGroup nl) . Container.elems
|
grnet/snf-ganeti
|
src/Ganeti/HTools/Cluster.hs
|
bsd-2-clause
| 48,814
| 0
| 23
| 15,251
| 10,256
| 5,592
| 4,664
| -1
| -1
|
module NinetyNine where
myRLE :: Eq a => [a] -> [(Int, a)]
myRLE = foldr rlencoder []
where rlencoder x acc
| null acc = [(1, x)]
| x == (snd (head acc)) = ((succ (fst (head acc))), x):(tail acc)
| otherwise = (1, x):acc
|
naphthalene/haskell-99onthewall
|
ten/10.hs
|
bsd-2-clause
| 265
| 0
| 15
| 89
| 152
| 80
| 72
| 7
| 1
|
module Helpful.Random where
import Control.Monad
import System.Random
randomRsIO :: (Random a) => (a, a) -> IO [a]
randomRsIO range = do
gen <- newStdGen
return $ randomRs range gen
-- Functions to get random values of a bounded, enumerated data type.
beRandomR :: (Enum a, RandomGen g) => (a, a) -> g -> (a, g)
beRandomR (l, u) =
(\(x,y) -> (toEnum x, y)) . randomR (fromEnum l, fromEnum u)
beRandomRs :: (Enum a, RandomGen g) => (a, a) -> g -> [a]
beRandomRs (l, u) = map toEnum . randomRs (fromEnum l, fromEnum u)
beRandomRIO :: (Enum a) => (a, a) -> IO a
beRandomRIO range = getStdRandom (beRandomR range)
beRandomRsIO :: (Enum a) => (a, a) -> IO [a]
beRandomRsIO range = do
gen <- newStdGen
return $ beRandomRs range gen
beRandom :: (Bounded a, Enum a, RandomGen g) => g -> (a, g)
beRandom = beRandomR (minBound, maxBound)
beRandoms :: (Bounded a, Enum a, RandomGen g) => g -> [a]
beRandoms g = (\(x,g') -> x : beRandoms g') (beRandom g)
beRandomIO :: (Bounded a, Enum a) => IO a
beRandomIO = getStdRandom beRandom
-- Functions to pick random values from a given list of values.
oneOf :: (RandomGen g) => [a] -> g -> (a, g)
oneOf domain = (\(x, y) -> (domain!!x, y)) . (randomR (0, (length domain) - 1))
oneOfIO :: [a] -> IO a
oneOfIO = getStdRandom . oneOf
randomsOf :: (RandomGen g) => [a] -> g -> [a]
randomsOf domain = map (domain!!) . (randomRs (0, (length domain) - 1))
randomsOfIO :: [a] -> IO [a]
randomsOfIO domain = do
gen <- newStdGen
return $ randomsOf domain gen
shuffle :: (RandomGen g) => [a] -> g -> ([a], g)
shuffle [] gen = ([], gen)
shuffle lst gen = (shuffle1 ++ x:shuffle2, newGen)
where
(shuffle1, newGen) = shuffle part1 gen'1
(shuffle2, _) = shuffle part2 gen'2
(gen'1, gen'2) = split gen'
(n, gen') = oneOf [0..length lst - 1] gen
(part1, x:part2) = splitAt n lst
|
spatial-reasoning/zeno
|
src/Helpful/Random.hs
|
bsd-2-clause
| 1,865
| 0
| 11
| 405
| 903
| 489
| 414
| 42
| 1
|
{-# LANGUAGE DeriveGeneric #-}
module Data.AhoCorasick
( ACMachine
, State(..)
, Match(..)
, construct
, constructWithValues
, root
, run
, step
, renderGraph
) where
import Control.Applicative
import Control.Monad
import Data.Hashable (Hashable)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.List
import Data.Maybe
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import GHC.Generics (Generic)
data ACMachine a v = ACMachine (Goto a) Failure (Output v)
type Goto a = Vector (HashMap a State)
type Failure = Vector State
type Output v = Vector [(Int, v)]
type GotoMap a = HashMap State (HashMap a State)
newtype State = State Int
deriving (Eq, Generic)
instance Hashable State
data Match v = Match
{ matchPos :: Int
, matchValue :: v
} deriving (Show)
construct :: (Eq a, Hashable a) => [[a]] -> ACMachine a [a]
construct ps = constructWithValues $ zip ps ps
constructWithValues :: (Eq a, Hashable a) => [([a], v)] -> ACMachine a v
constructWithValues pvs = ACMachine g f o
where
(m, gotoMap) = buildGoto ps
n = m + 1
g = toGotoArray n gotoMap
f = buildFailure g
o = buildOutput pvs g f
ps = map fst pvs
toGotoArray :: Int -> GotoMap a -> Goto a
toGotoArray n m = V.generate n (fromMaybe Map.empty . flip Map.lookup m . State)
root :: State
root = State 0
run :: (Eq a, Hashable a) => ACMachine a v -> [a] -> [Match v]
run acm = go root . zip [1..]
where
go _ [] = []
go s ((i, x):ixs) = map toMatch vs ++ go s' ixs
where
toMatch (l, v) = Match { matchPos = i - l + 1, matchValue = v }
(s', vs) = step acm x s
step :: (Eq a, Hashable a) => ACMachine a v -> a -> State -> (State, [(Int, v)])
step (ACMachine g f o) x s = (s', output s')
where
s' = head $ mapMaybe (flip goto x) $ iterate failure s
goto (State 0) x' = Map.lookup x' (g V.! 0) <|> Just root
goto (State i) x' = Map.lookup x' (g V.! i)
failure (State i) = f V.! i
output (State i) = o V.! i
buildOutput :: (Eq a, Hashable a) => [([a], v)] -> Goto a -> Failure -> Output v
buildOutput pvs g f = V.create $ do
o <- MV.replicate (V.length g) []
forM_ (map (fromJust . toKV) pvs) $ \(State i, vs) ->
MV.write o i vs
forM_ (tail $ toBFList g) $ \(State i) -> do
let ts = Map.toList (g V.! i)
forM_ ts $ \(_, s'@(State j)) -> do
let (State k) = failure s'
vs <- MV.read o j
vsf <- MV.read o k
let vs' = vsf `seq` vs ++ vsf
MV.write o j vs'
return o
where
failure (State i) = f V.! i
toKV (p, v) = do
s <- finalState g root p
return (s, [(length p, v)])
finalState :: (Eq a, Hashable a) => Goto a -> State -> [a] -> Maybe State
finalState g = foldM (\(State i) x -> Map.lookup x (g V.! i))
buildGoto :: (Eq a, Hashable a) => [[a]] -> (Int, GotoMap a)
buildGoto = foldl' (flip extend) (0, Map.empty)
extend :: (Eq a, Hashable a) => [a] -> (Int, GotoMap a) -> (Int, GotoMap a)
extend = go root
where
go _ [] nm = nm
go s (x:xs) nm@(n, m) = case Map.lookup x sm of
Nothing -> go s' xs (n', m')
where
s' = State n' -- root is 0
n' = n + 1
sm' = Map.insert x s' sm
m' = Map.insert s sm' m
Just s' -> go s' xs nm
where
sm = fromMaybe Map.empty $ Map.lookup s m
buildFailure :: (Eq a, Hashable a) => Goto a -> Failure
buildFailure g = V.create $ do
f <- MV.new (V.length g)
MV.write f 0 (error "Referencing the failure transition from the root node.")
forM_ (toBFList g) $ \s@(State i) -> do
let ts = Map.toList (g V.! i)
forM_ ts $ \(x, State j) -> do
s' <- failureState f s x
MV.write f j s'
return f
where
failureState _ (State 0) _ = return root
failureState f s x = go =<< failure s
where
go s' = case goto s' x of
Nothing -> go =<< failure s'
Just s'' -> return s''
failure (State i) = MV.read f i
goto (State 0) x = Map.lookup x (g V.! 0) <|> Just root
goto (State i) x = Map.lookup x (g V.! i)
toBFList :: Goto a -> [State]
toBFList g = ss0
where
ss0 = root : go 1 ss0
go 0 _ = []
go n (State i : ss) = children ++ go (n - 1 + Map.size sm) ss
where
sm = g V.! i
children = Map.elems sm
go _ _ = error "toBFList: invalid state"
renderGraph :: ACMachine Char [Char] -> String
renderGraph (ACMachine g f o) =
graph "digraph" $ statements [
attr "graph" [("rankdir", "LR")]
, statements $ map state (toBFList g)
, statements $ map stateWithOutput $ filter (not . null . snd) $ zip (map State [0..]) (V.toList o)
, statements $ map (\s@(State i) -> statements $ map (uncurry $ transEdge s) $ Map.toList $ g V.! i) (toBFList g)
, statements $ map (\s@(State i) -> failEdge s $ f V.! i) (tail $ toBFList g)
]
where
statements = intercalate " "
graph typ body = typ ++ " { " ++ body ++ " }"
attr typ attrList = typ ++ " " ++ "[" ++ intercalate "," (map kvStr attrList) ++ "];"
node nid attrList = nid ++ " " ++ "[" ++ intercalate "," (map kvStr attrList) ++ "];"
kvStr (k, v) = k ++ "=" ++ v
state s@(State 0) = node (stateID s) [("shape", "doublecircle")]
state s = node (stateID s) [("shape", "circle")]
stateWithOutput (s, xs) = node (stateID s) [("label", "<" ++ tableHTML (stateID s) ("{" ++ intercalate "," (map snd xs) ++ "}") ++ ">"), ("shape", "none")]
tableHTML row1 row2 = "<table cellborder=\"0\"><tr><td>" ++ row1 ++ "</td></tr><tr><td>" ++ row2 ++ "</td></tr></table>"
stateID (State 0) = "Root"
stateID (State n) = 'S' : show n
transEdge s x s' = stateID s ++ " -> " ++ stateID s' ++ " [label=\"" ++ [x] ++ "\"];"
failEdge s s' = stateID s ++ " -> " ++ stateID s' ++ " [style=dashed, constraint=false];"
|
yuttie/ac-machine
|
Data/AhoCorasick.hs
|
bsd-3-clause
| 6,146
| 0
| 20
| 1,865
| 2,840
| 1,466
| 1,374
| 141
| 4
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE Rank2Types #-}
module Main where
import Imports
import Utils
import Data.Word
import Data.ByteArray (Bytes, ScrubbedBytes, ByteArray)
import qualified Data.ByteArray as B
import qualified Data.ByteArray.Encoding as B
import qualified Data.ByteArray.Parse as Parse
import qualified SipHash
data Backend = BackendByte | BackendScrubbedBytes
deriving (Show,Eq,Bounded,Enum)
allBackends :: [Backend]
allBackends = enumFrom BackendByte
data ArbitraryBS = forall a . ByteArray a => ArbitraryBS a
arbitraryBS :: Int -> Gen ArbitraryBS
arbitraryBS n = do
backend <- elements allBackends
case backend of
BackendByte -> ArbitraryBS `fmap` ((B.pack `fmap` replicateM n arbitrary) :: Gen Bytes)
BackendScrubbedBytes -> ArbitraryBS `fmap` ((B.pack `fmap` replicateM n arbitrary) :: Gen ScrubbedBytes)
arbitraryBSof :: Int -> Int -> Gen ArbitraryBS
arbitraryBSof minBytes maxBytes = choose (minBytes, maxBytes) >>= arbitraryBS
newtype SmallList a = SmallList [a]
deriving (Show,Eq)
instance Arbitrary a => Arbitrary (SmallList a) where
arbitrary = choose (0,8) >>= \n -> SmallList `fmap` replicateM n arbitrary
instance Arbitrary ArbitraryBS where
arbitrary = arbitraryBSof 0 259
newtype Words8 = Words8 { unWords8 :: [Word8] }
deriving (Show,Eq)
instance Arbitrary Words8 where
arbitrary = choose (0, 259) >>= \n -> Words8 <$> replicateM n arbitrary
testGroupBackends :: String -> (forall ba . (Show ba, Eq ba, ByteArray ba) => (ba -> ba) -> [TestTree]) -> TestTree
testGroupBackends x l =
testGroup x
[ testGroup "Bytes" (l withBytesWitness)
, testGroup "ScrubbedBytes" (l withScrubbedBytesWitness)
]
base64Kats =
[ ("pleasure.", "cGxlYXN1cmUu")
, ("leasure.", "bGVhc3VyZS4=")
, ("easure.", "ZWFzdXJlLg==")
, ("asure.", "YXN1cmUu")
, ("sure.", "c3VyZS4=")
]
base64URLKats =
[ ("pleasure.", "cGxlYXN1cmUu")
, ("leasure.", "bGVhc3VyZS4")
, ("easure.", "ZWFzdXJlLg")
, ("asure.", "YXN1cmUu")
, ("sure.", "c3VyZS4")
, ("\DC4\251\156\ETX\217~", "FPucA9l-") -- From RFC4648
, ("\DC4\251\156\ETX\217\DEL", "FPucA9l_")
, ("", "")
]
base16Kats =
[ ("this is a string", "74686973206973206120737472696e67") ]
base32Kats =
[ ("-pleasure.", "FVYGYZLBON2XEZJO")
, ("pleasure.", "OBWGKYLTOVZGKLQ=")
, ("leasure.", "NRSWC43VOJSS4===")
, ("easure.", "MVQXG5LSMUXA====")
, ("asure.", "MFZXK4TFFY======")
, ("sure.", "ON2XEZJO")
, ("ure.", "OVZGKLQ=")
, ("re.", "OJSS4===")
, ("e.", "MUXA====")
, (".", "FY======")
, ("", "")
]
encodingTests witnessID =
[ testGroup "BASE64"
[ testGroup "encode-KAT" encodeKats64
, testGroup "decode-KAT" decodeKats64
]
, testGroup "BASE64URL"
[ testGroup "encode-KAT" encodeKats64URLUnpadded
, testGroup "decode-KAT" decodeKats64URLUnpadded
]
, testGroup "BASE32"
[ testGroup "encode-KAT" encodeKats32
, testGroup "decode-KAT" decodeKats32
]
, testGroup "BASE16"
[ testGroup "encode-KAT" encodeKats16
, testGroup "decode-KAT" decodeKats16
]
]
where
encodeKats64 = map (toTest B.Base64) $ zip [1..] base64Kats
decodeKats64 = map (toBackTest B.Base64) $ zip [1..] base64Kats
encodeKats32 = map (toTest B.Base32) $ zip [1..] base32Kats
decodeKats32 = map (toBackTest B.Base32) $ zip [1..] base32Kats
encodeKats16 = map (toTest B.Base16) $ zip [1..] base16Kats
decodeKats16 = map (toBackTest B.Base16) $ zip [1..] base16Kats
encodeKats64URLUnpadded = map (toTest B.Base64URLUnpadded) $ zip [1..] base64URLKats
decodeKats64URLUnpadded = map (toBackTest B.Base64URLUnpadded) $ zip [1..] base64URLKats
toTest :: B.Base -> (Int, (String, String)) -> TestTree
toTest base (i, (inp, out)) = testCase (show i) $
let inpbs = witnessID $ B.convertToBase base $ witnessID $ B.pack $ unS inp
outbs = witnessID $ B.pack $ unS out
in outbs @=? inpbs
toBackTest :: B.Base -> (Int, (String, String)) -> TestTree
toBackTest base (i, (inp, out)) = testCase (show i) $
let inpbs = witnessID $ B.pack $ unS inp
outbs = B.convertFromBase base $ witnessID $ B.pack $ unS out
in Right inpbs @=? outbs
parsingTests witnessID =
[ testCase "parse" $
let input = witnessID $ B.pack $ unS "xx abctest"
abc = witnessID $ B.pack $ unS "abc"
est = witnessID $ B.pack $ unS "est"
result = Parse.parse ((,,) <$> Parse.take 2 <*> Parse.byte 0x20 <*> (Parse.bytes abc *> Parse.anyByte)) input
in case result of
Parse.ParseOK remaining (_,_,_) -> est @=? remaining
_ -> assertFailure ""
]
main = defaultMain $ testGroup "memory"
[ localOption (QuickCheckTests 5000) $ testGroupBackends "basic" basicProperties
, testGroupBackends "encoding" encodingTests
, testGroupBackends "parsing" parsingTests
, testGroupBackends "hashing" $ \witnessID ->
[ testGroup "SipHash" $ SipHash.tests witnessID
]
]
where
basicProperties witnessID =
[ testProperty "unpack . pack == id" $ \(Words8 l) -> l == (B.unpack . witnessID . B.pack $ l)
, testProperty "self-eq" $ \(Words8 l) -> let b = witnessID . B.pack $ l in b == b
, testProperty "add-empty-eq" $ \(Words8 l) ->
let b = witnessID $ B.pack l
in B.append b B.empty == b
, testProperty "zero" $ \(Positive n) ->
let expected = witnessID $ B.pack $ replicate n 0
in expected == B.zero n
, testProperty "Ord" $ \(Words8 l1) (Words8 l2) ->
compare l1 l2 == compare (witnessID $ B.pack l1) (B.pack l2)
, testProperty "Monoid(mappend)" $ \(Words8 l1) (Words8 l2) ->
mappend l1 l2 == (B.unpack $ mappend (witnessID $ B.pack l1) (B.pack l2))
, testProperty "Monoid(mconcat)" $ \(SmallList l) ->
mconcat (map unWords8 l) == (B.unpack $ mconcat $ map (witnessID . B.pack . unWords8) l)
, testProperty "append (append a b) c == append a (append b c)" $ \(Words8 la) (Words8 lb) (Words8 lc) ->
let a = witnessID $ B.pack la
b = witnessID $ B.pack lb
c = witnessID $ B.pack lc
in B.append (B.append a b) c == B.append a (B.append b c)
, testProperty "concat l" $ \(SmallList l) ->
let chunks = map (witnessID . B.pack . unWords8) l
expected = concatMap unWords8 l
in B.pack expected == witnessID (B.concat chunks)
, testProperty "cons b bs == reverse (snoc (reverse bs) b)" $ \(Words8 l) b ->
let b1 = witnessID (B.pack l)
b2 = witnessID (B.pack (reverse l))
expected = B.pack (reverse (B.unpack (B.snoc b2 b)))
in B.cons b b1 == expected
, testProperty "all == Prelude.all" $ \(Words8 l) b ->
let b1 = witnessID (B.pack l)
p = (/= b)
in B.all p b1 == all p l
, testProperty "any == Prelude.any" $ \(Words8 l) b ->
let b1 = witnessID (B.pack l)
p = (== b)
in B.any p b1 == any p l
, testProperty "singleton b == pack [b]" $ \b ->
witnessID (B.singleton b) == B.pack [b]
]
|
NicolasDP/hs-memory
|
tests/Tests.hs
|
bsd-3-clause
| 7,675
| 0
| 21
| 2,237
| 2,516
| 1,329
| 1,187
| 153
| 2
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
module Ling.Compile.C where
{-
programs have parameters
the meaning is that no synch is expected
namely if the protocol can "recv" an int on chan c, then
it means we can read this location, similarily for send.
TODO, so far only a naive approach is selected
Single read parameter:
Short type (less or equal than a word):
void foo(const int c) { const int x0 = c; }
Large type:
void foo((const (const large_type) *) c) { const large_type x0 = *c; }
Another approach would be to avoid the copy of arguments when receiving.
Single write parameter:
void foo(const (int *) c) { *c = 42; }
Read then write parameter:
void foo(const (int *) c) { const int x = *c; *c = 42; }
example:
test1 (c : ?int) (d : !int) (e : {!int, ?int}) (f : [!int, !int]) =
recv c x (e(e0,e1) ...) ...
-}
import Prelude hiding (log)
import qualified Data.Map as Map
import Ling.Fwd (fwdProc')
import Ling.Norm hiding (mkCase)
import Ling.Prelude hiding (q)
import Ling.Print
import Ling.Proc (_Pref, _ArrayCs, _NewPatt)
import Ling.Reduce (reduce, reduced)
--import Ling.Rename (hDec)
import Ling.Session
import Ling.Scoped (Scoped(Scoped), ldefs, scoped)
import Ling.Subst (reduceS)
import qualified MiniC.Abs as C
import qualified MiniC.Print as C
type ATyp = (C.Typ, [C.Arr])
type AQTyp = (C.QTyp, [C.Arr])
qQual :: Lens' C.QTyp C.Qual
qQual f (C.QTyp q t) = (`C.QTyp` t) <$> f q
qTyp :: Lens' C.QTyp C.Typ
qTyp f (C.QTyp q t) = (C.QTyp q) <$> f t
aqATyp :: Lens' AQTyp ATyp
aqATyp = qTyp `alongside` id
voidQ :: C.QTyp
voidQ = C.QTyp C.NoQual C.TVoid
tPtr :: Endom ATyp
tPtr = _1 %~ C.TPtr
tVoidPtr :: ATyp
tVoidPtr = (C.TPtr C.TVoid, [])
tArr :: ATyp -> C.Exp -> ATyp
tArr (ty, arrs) e = (ty, C.AArr e : arrs)
tName :: String -> ATyp
tName s = (C.TName (C.TIdent s), [])
tChar :: ATyp
tChar = tName "char"
tInt :: ATyp
tInt = tName "int"
tDouble :: ATyp
tDouble = tName "double"
-- unused
eFld :: C.Exp -> C.Ident -> C.Exp
eFld (C.UOp C.UPtr l) = C.EArw l
eFld l = C.EFld l
lFld :: C.LVal -> C.Ident -> C.LVal
lFld (C.LPtr e) = C.LArw e
lFld e = C.LFld e
isEmptyTyp :: C.Typ -> Bool
isEmptyTyp C.TVoid = True
isEmptyTyp (C.TStr []) = True
isEmptyTyp _ = False
isEmptyQTyp :: C.QTyp -> Bool
isEmptyQTyp (C.QTyp _ t) = isEmptyTyp t
isEmptyAQTyp :: AQTyp -> Bool
isEmptyAQTyp = isEmptyQTyp . fst
dDec :: AQTyp -> C.Ident -> C.Dec
dDec (qt, arrs) x = C.Dec qt x arrs
sDec :: AQTyp -> C.Ident -> C.Init -> [C.Stm]
sDec aqtyp cid ini
| isEmptyAQTyp aqtyp = []
| otherwise = [C.SDec (dDec aqtyp cid) ini]
dSig :: C.Dec -> [C.Dec] -> C.Def
dSig d [] = C.DDec d
dSig d ds = C.DSig d ds
fldI :: Int -> C.Ident
fldI n = C.Ident ("f" ++ show n)
uniI :: Int -> C.Ident
uniI n = C.Ident ("u" ++ show n)
-- Note that we could define `Unique` as `fix Skip`.
data LocKind
= Normal
| Unique
| Skip LocKind
deriving (Show)
data Loc = Loc
{ _locKind :: LocKind
, _locLVal :: C.LVal
}
deriving (Show)
$(makeLenses ''Loc)
locOp :: Endom C.LVal -> Endom Loc
locOp f loc@(Loc k0 lval0) =
case k0 of
Normal -> Loc Normal $ f lval0
Unique -> loc
Skip k1 -> Loc k1 lval0
locSplit :: Loc -> TraverseKind -> [ChanDec] -> [(Channel, Loc)]
locSplit loc@(Loc k0 lval0) _ cds =
case k0 of
Normal ->
case _cdChan <$> cds of
[d] -> [ (d, loc) ]
ds -> [ (d, Loc Normal (lFld lval0 (fldI n)))
| (d,n) <- zip ds [0..]
]
Unique ->
[ (cd ^. cdChan, loc) | cd <- cds ]
Skip k1 ->
[ (cd ^. cdChan, Loc k1 lval0) | cd <- cds ]
locArr :: Loc -> C.Exp -> Loc
locArr loc = flip locOp loc . flip C.LArr
locPtr :: Loc -> Loc
locPtr = locOp C.LPtr
type EVar = Name
data Env =
Env { _locs :: Map Channel Loc
, _evars :: Map EVar C.Ident
, _edefs :: Defs
, _types :: Set Name
, _farr :: Bool
, _ixids :: [C.Ident]
}
deriving (Show)
$(makeLenses ''Env)
scope :: Getter Env (Scoped ())
scope = to $ \env -> Scoped (env ^. edefs) ø ()
addScope :: Scoped a -> Endom Env
addScope s = edefs <>~ s ^. ldefs
basicTypes :: Map Name ATyp
basicTypes = l2m [ (Name n, t) | (n,t) <-
[("Int", tInt)
,("Double", tDouble)
,("String", tPtr tChar)
,("Char", tChar)] ]
primTypes :: Set Name
primTypes = l2s (Name "Vec" : keys basicTypes)
ixIdents :: [C.Ident]
ixIdents = [C.Ident $ "ix" ++ show i | i <- [0 :: Int ..]]
emptyEnv :: Env
emptyEnv = Env ø ø ø primTypes True ixIdents
addChans :: [(Name, Loc)] -> Endom Env
addChans xys env = env & locs %~ Map.union (l2m xys)
rmChan :: Channel -> Endom Env
rmChan c env = env & locs .\\ c
rmChans :: [Channel] -> Endom Env
rmChans = composeMapOf each rmChan
renChan :: Channel -> Channel -> Endom Env
renChan c c' env = env & locs . at c' .~ (env ^. locs . at c)
& rmChan c
addEVar :: Name -> C.Ident -> Endom Env
addEVar x y env
| x == anonName = env
| env ^. evars . hasKey x = error $ "addEVar/IMPOSSIBLE: " ++ show x ++ " is already bound"
| otherwise = env & evars . at x ?~ y
(!) :: Env -> Name -> Loc
(!) = lookupEnv _Name locs
transCon :: Name -> C.Ident
transCon (Name x) = C.Ident ("con_" ++ x)
transName :: Name -> C.Ident
transName n | n == anonName = error "Compile.C.transName: unexpected `_`"
transName (Name x) = C.Ident (concatMap f x ++ "_lin") where
f '#' = "__"
f '\'' = "__"
f '+' = "_plus_"
f '*' = "_times_"
f '/' = "_div_"
f '-' = "_sub_"
f c = [c]
transBoundName :: Show a => Name -> a -> C.Ident
transBoundName x a
| x == anonName = transName $ internalNameFor a # x
| otherwise = transName x
transIxName :: Env -> Name -> (Env, C.Ident)
transIxName env x
| x == anonName = (env & ixids %~ tail, env ^?! ixids . each)
| otherwise = (env, transName x)
transOp :: EVar -> Maybe (Op2 C.Exp)
transOp (Name v) = (\f x y -> C.EParen (f x y)) <$> case v of
"_+_" -> Just C.Add
"_+D_" -> Just C.Add
"_+CD_" -> Just C.Add
"_*_" -> Just C.Mul
"_*D_" -> Just C.Mul
"_*CD_" -> Just C.Mul
"_/_" -> Just C.Div
"_/D_" -> Just C.Div
"_/CD_" -> Just C.Div
"_%_" -> Just C.Mod
"_%D_" -> Just C.Mod
"_%CD_" -> Just C.Mod
"_-_" -> Just C.Sub
"_-D_" -> Just C.Sub
"_-CD_" -> Just C.Sub
"_==D_" -> Just C.Eq
"_==I_" -> Just C.Eq
"_==C_" -> Just C.Eq
"_==CD_"-> Just C.Eq
"_<=D_" -> Just C.Le
"_<=I_" -> Just C.Le
"_<=C_" -> Just C.Le
"_<=CD_"-> Just C.Le
"_>=D_" -> Just C.Ge
"_>=I_" -> Just C.Ge
"_>=C_" -> Just C.Ge
"_>=CD_"-> Just C.Ge
"_<D_" -> Just C.Lt
"_<I_" -> Just C.Lt
"_<C_" -> Just C.Lt
"_<CD_" -> Just C.Lt
"_>D_" -> Just C.Gt
"_>I_" -> Just C.Gt
"_>C_" -> Just C.Gt
"_>CD_" -> Just C.Gt
_ -> Nothing
transEVar :: Env -> EVar -> C.Exp
transEVar env y = C.EVar (env ^. evars . at y ?| transName y)
mkCase :: C.Ident -> [C.Stm] -> C.Branch
mkCase = C.Case . C.EVar
switch :: C.Exp -> [(C.Ident,[C.Stm])] -> C.Stm
switch e brs = C.SSwi e (uncurry mkCase <$> brs)
isEVar :: C.Exp -> Bool
isEVar C.EVar{} = True
isEVar _ = False
switchE :: C.Exp -> [(C.Ident,C.Exp)] -> C.Exp
switchE e brs
-- If there is less than 3 branches then only comparison is done,
-- if `e` is a variable then it is cheap to compare it multiple times
| length brs < 3 || isEVar e =
case brs of
[] -> e -- dynamically impossible because of `e`, so just return `e`
[(_i0,e0)] -> e0 -- x must be equal to _i0, to go directly to e0
(i0,e0):ies -> C.Cond (C.Eq e (C.EVar i0)) e0 (switchE e ies)
| otherwise =
-- This could be replaced by a warning instead
transErrC "switchE" e
transLiteral :: Literal -> C.Literal
transLiteral l = case l of
LInteger n -> C.LInteger n
LDouble d -> C.LDouble d
LString s -> C.LString s
LChar c -> C.LChar c
eApp :: C.Ident -> [C.Exp] -> C.Exp
eApp x es = C.EApp (C.EVar x) es
transTerm :: Env -> Term -> C.Exp
transTerm env0 tm0 =
let
stm1 = reduce (env0 ^. scope $> tm0) ^. reduced
env1 = env0 & addScope stm1
tm1 = stm1 ^. scoped
in case tm1 of
Def _ (Name "cconst") [_,Lit (LString c)] ->
C.EVar (C.Ident c)
Def _ (Name "ccall") (_:Lit (LString c):es) ->
eApp (C.Ident c) (transTerm env1 <$> es)
Def _ f es0
| env1 ^. types . contains f -> dummyTyp
| otherwise ->
case transTerm env1 <$> es0 of
[] -> transEVar env1 f
[e0,e1] | Just d <- transOp f -> d e0 e1
es -> eApp (transName f) es
Let{} -> error $ "IMPOSSIBLE: Let after reduce (" ++ ppShow tm1 ++ ")"
Lit l -> C.ELit (transLiteral l)
Lam{} -> transErr "transTerm/Lam" tm1
Con n -> C.EVar (transCon n)
Case t brs -> switchE (transTerm env1 t)
(bimap transCon (transTerm env1) <$> brs)
Proc{} -> transErr "transTerm/Proc" tm1
TFun{} -> dummyTyp
TSig{} -> dummyTyp
TProto{} -> dummyTyp
TTyp -> dummyTyp
TSession{} -> dummyTyp -- so far one cannot match on sessions
-- Types are erased to 0
dummyTyp :: C.Exp
dummyTyp = C.ELit (C.LInteger 0)
transProc :: Env -> Proc -> [C.Stm]
transProc env0 proc0 =
let
proc1 = reduce (env0 ^. scope $> proc0) ^. reduced
env1 = env0 & addScope proc1
in
case proc1 ^. scoped of
proc2 `Dot` proc3
| Just pref <- proc2 ^? _Pref -> transPref env1 pref proc3
| otherwise -> transProc env1 proc2 ++ transProc env1 proc3
Act act -> transAct env1 act ^. _2
LetP defs proc2 ->
transProc (env1 & edefs <>~ defs) proc2
Procs (Prll procs) ->
case procs of
[] -> []
[p] -> transErr "transProc/Procs/[_] not a normal form" p
_ -> transErr "transProc/Procs: parallel processes should be in sequence" (Prll procs)
Replicate _k r xi proc2 ->
[stdFor i (transRFactor env1 r) $
transProc env3 proc2]
where
(env2, i) = transIxName env1 xi
slice l = locArr l $ C.EVar i
env3 = env2 & locs . mapped %~ slice
& addEVar xi i
transLVal :: C.LVal -> C.Exp
transLVal (C.LVar x) = C.EVar x
transLVal (C.LFld l f) = eFld (transLVal l) f
transLVal (C.LArw l f) = C.EArw (transLVal l) f
transLVal (C.LArr l i) = C.EArr (transLVal l) i
transLVal (C.LPtr l) = ePtr (transLVal l)
ePtr :: Endom C.Exp
ePtr = C.UOp C.UPtr
transErr :: Print a => String -> a -> b
transErr msg v = error $ msg ++ "\n" ++ pretty v
transErrC :: C.Print a => String -> a -> b
transErrC msg v = error $ msg ++ "\n" ++ C.render (C.prt 0 v)
transNewPatt :: Env -> NewPatt -> (Env, [C.Stm])
transNewPatt env = \case
NewChans _ cds -> (env', sDec typ cid C.NoInit)
where
-- Instead of extractSession we should either have a more general
-- mechanism to extract the underlying (session then) type.
-- Ideally type inference/checking should fill the holes.
cs = cds ^.. each . cdChan
cOSs = cds ^.. each . cdSession
s = log $ extractSession cOSs -- this can be wrong with allocations of
-- the form [:S,[:~S,S:]^n,~S:]
cid = transName (cs ^?! _head)
i = C.LVar cid
typ = transRSession env s
env' = addChans [(cd ^. cdChan, Loc (k cd) i) | cd <- cds] env
k cd | cd ^. cdRepl == ø = Normal
| otherwise = Skip (Skip Normal)
NewChan c ty -> (env', sDec typ cid C.NoInit)
where
cid = transName c
l = Loc Unique $ C.LVar cid
typ = transCTyp env C.NoQual ty
env' = addChans [(c,l)] env
-- Implement the effect of an action over:
-- * an environment
-- * a list of statements
transAct :: Env -> Act -> (Env, [C.Stm])
transAct env act =
case act of
Nu _ann cpatt ->
case cpatt ^? _NewPatt of
Just newpatt -> transNewPatt env newpatt
Nothing -> error "Sequential.transAct: unsupported `new`"
-- Issue #24: the annotation should be used to decide
-- operational choices on channel allocation.
Split c pat ->
case pat ^? _ArrayCs of
Just (k, ds) -> (transSplit c k ds env, [])
Nothing -> error "Sequential.transAct unsupported `split`"
Send c _ expr ->
(env, [C.SPut ((env ! c) ^. locLVal) (transTerm env expr)])
Recv c (Arg x typ) ->
(addEVar x y env, sDec ctyp y cinit)
where
ctyp = transMaybeCTyp (env & farr .~ False) C.QConst typ
y = transBoundName x (c, env) -- PERF (this is serializing the whole env)
cinit = C.SoInit (transLVal ((env ! c) ^. locLVal))
Ax s cs ->
-- TODO this should disappear once Ax becomes a term level primitive.
case fwdProc' (reduceS . (env ^. scope $>)) s cs of
Act (Ax{}) -> transErr "transAct/Ax" act
proc0 -> (rmChans cs env, transProc env proc0)
At{} ->
transErr "transAct/At (should have been reduced before)" act
-- The actions in this prefix are in parallel and thus can be reordered
transPref :: Env -> Pref -> Proc -> [C.Stm]
transPref env (Prll acts0) proc1 =
case acts0 of
[] -> transProc env proc1
act:acts ->
let (env', actStm) = transAct env act in
actStm ++ transPref env' (Prll acts) proc1
{- stdFor i t body ~~~> for (int i = 0; i < t; i = i + 1) { body } -}
stdFor :: C.Ident -> C.Exp -> [C.Stm] -> C.Stm
stdFor i t =
C.SFor (C.SDec (C.Dec (C.QTyp C.NoQual (C.TName (C.TIdent "int"))) i []) (C.SoInit (C.ELit (C.LInteger 0))))
(C.Lt (C.EVar i) t)
(C.SPut (C.LVar i) (C.Add (C.EVar i) (C.ELit (C.LInteger 1))))
{- See protoLoc.locSplit about the special case -}
transSplit :: Name -> TraverseKind -> [ChanDec] -> Endom Env
transSplit c k cds env = rmChan c $ addChans (locSplit (env ! c) k cds) env
unionT :: [ATyp] -> ATyp
unionT ts
| Just t <- theUniq ts = t
| otherwise = (u, [])
where u = C.TUni [ C.FFld t (uniI i) arrs | (i,(t,arrs)) <- zip [0..] ts ]
rwQual :: RW -> C.Qual
rwQual Read = C.QConst
rwQual Write = C.NoQual
unionQual :: Op2 C.Qual
unionQual C.QConst C.QConst = C.QConst
unionQual _ _ = C.NoQual
unionQuals :: [C.Qual] -> C.Qual
unionQuals = foldr unionQual C.QConst
unionQ :: [AQTyp] -> AQTyp
unionQ ts = (_1 %~ C.QTyp (unionQuals [ q | (C.QTyp q _, _) <- ts ]))
(unionT [ (t,a) | (C.QTyp _ t, a) <- ts, not (isEmptyTyp t) ])
{- See protoLoc.locSplit about the special case -}
tupQ :: [AQTyp] -> AQTyp
tupQ [t] = t
tupQ ts = (C.QTyp (unionQuals [ q | (C.QTyp q _, _) <- ts ])
(C.TStr [ C.FFld t (fldI i) arrs | (i,(C.QTyp _ t,arrs)) <- zip [0..] ts ])
,[])
unsupportedTyp :: Typ -> ATyp
unsupportedTyp ty = trace ("[WARNING] Unsupported type " ++ pretty ty) tVoidPtr
transTyp :: Env -> Typ -> ATyp
transTyp env0 ty0 =
let
sty1 = reduce (env0 ^. scope $> ty0) ^. reduced
env1 = env0 & addScope sty1
ty1 = sty1 ^. scoped
in case ty1 of
Def _ x es
| null es, Just t <- Map.lookup x basicTypes -> t
| otherwise ->
case (unName # x, es) of
("ctype", [e])
| Lit (LString s) <- reduce (env1 ^. scope $> e) ^. reduced . scoped ->
tName s
("Vec", [a,e])
| env1 ^. farr, Just i <- reduce (env1 ^. scope $> e) ^? reduced . scoped . _Lit . _LInteger ->
-- Here we could use transTerm if we could still fallback on a
-- pointer type.
tArr (transTyp env1 a) (C.ELit (C.LInteger i))
| otherwise -> tPtr (transTyp env1 a)
_ -> unsupportedTyp ty1
Let{} -> error $ "IMPOSSIBLE: Let after reduce (" ++ ppShow ty1 ++ ")"
TTyp{} -> tInt -- <- types are erased to 0
Case{} -> unsupportedTyp ty1
TProto{} -> unsupportedTyp ty1
TFun{} -> unsupportedTyp ty1
TSig{} -> unsupportedTyp ty1
TSession{} -> unsupportedTyp ty1
Lam{} -> transErr "transTyp: Not a type: Lam" ty1
Lit{} -> transErr "transTyp: Not a type: Lit" ty1
Con{} -> transErr "transTyp: Not a type: Con" ty1
Proc{} -> transErr "transTyp: Not a type: Proc" ty1
transCTyp :: Env -> C.Qual -> Typ -> AQTyp
transCTyp env qual = (_1 %~ C.QTyp qual) . transTyp env
transMaybeCTyp :: Env -> C.Qual -> Maybe Typ -> AQTyp
transMaybeCTyp env qual = \case
Nothing -> error "transMabyeCTyp: Missing type annotation"
Just ty -> transCTyp env qual ty
transSession :: Env -> Session -> AQTyp
transSession env x = case x of
IO rw (Arg n ty) s
| n == anonName -> unionQ [transMaybeCTyp env (rwQual rw) ty, transSession env s]
| otherwise -> transErr "Cannot compile a dependent session (yet): " x
Array _ ss -> tupQ (transSessions env ss)
TermS p t ->
let t' = reduce ((env ^. scope) $> t) ^. reduced in
case t' ^. scoped of
TSession s -> transSession (env & addScope t') (sessionOp p s)
ty -> unsupportedTyp ty & _1 %~ C.QTyp C.NoQual
transRFactor :: Env -> RFactor -> C.Exp
transRFactor env (RFactor t) = transTerm env t
transRSession :: Env -> RSession -> AQTyp
transRSession env (s `Repl` r)
| litR1 `is` r = transSession env s
| otherwise = transSession env s & aqATyp %~ (`tArr` transRFactor env r)
transSessions :: Env -> Sessions -> [AQTyp]
transSessions env = map (transRSession env) . view _Sessions
isPtrTyp :: C.Typ -> Bool
isPtrTyp (C.TPtr _) = True
isPtrTyp _ = False
isPtrQTyp :: AQTyp -> Bool
isPtrQTyp (C.QTyp _ t, []) = isPtrTyp t
isPtrQTyp _ = True
-- Turns a type into a pointer unless it is one already.
mkPtrTyp :: AQTyp -> (AQTyp, Endom Loc)
mkPtrTyp ctyp
| isPtrQTyp ctyp = (ctyp, id)
| otherwise = (ctyp & aqATyp %~ tPtr, locPtr)
transChanDec :: Env -> ChanDec -> (C.Dec , (Channel, Loc))
transChanDec env (ChanDec c _ (Just session)) =
(dDec ctyp d, (c, trloc (Loc Normal (C.LVar d))))
where
d = transName c
(ctyp, trloc) = mkPtrTyp (transRSession env session)
transChanDec _ (ChanDec c _ Nothing)
= transErr "transChanDec: TODO No Session for channel:" c
transMkProc :: Env -> [ChanDec] -> Proc -> ([C.Dec], [C.Stm])
transMkProc env0 cs proc0 = (fst <$> news, transProc env proc0)
where
news = transChanDec env0 <$> cs
env = addChans (snd <$> news) env0
-- Of course this does not properly handle dependent types
transSig :: Env -> Name -> Maybe Typ -> Term -> [C.Def]
transSig env0 f mty0 tm
| Def Undefined f' [] <- tm, f == f' =
case mty0 of
Nothing -> error "IMPOSSIBLE transSig missing type signature"
Just ty0 ->
let
go env1 ty1 l args =
let
sty2 = reduce (env1 ^. scope $> ty1) ^. reduced
env2 = env1 & addScope sty2
in
case sty2 ^. scoped of
TFun (Arg xn ms) t ->
let n = transBoundName xn (f, l) in
go (addEVar xn n env2) t (l + 1)
(dDec (transMaybeCTyp env2 C.QConst ms) n : args)
ty2 ->
[dSig (dDec (transCTyp env2 C.NoQual ty2) (transName f)) (reverse args)]
in go env0 ty0 (0 :: Int) []
| otherwise =
let
stm = reduce (env0 ^. scope $> tm) ^. reduced
env1 = env0 & addScope stm
in
case stm ^. scoped of
Proc cs proc0 -> [uncurry (C.DDef (C.Dec voidQ (transName f) [])) (transMkProc env1 cs proc0)]
_ -> trace ("[WARNING] Skipping compilation of unsupported definition " ++ pretty f) []
transDec :: Env -> Dec -> (Env, [C.Def])
transDec env dec = case {-hDec-} dec of
Sig d ty tm -> (env & edefs . at d ?~ Ann ty tm, transSig env d ty tm)
Dat _d _cs ->
(env, []) -- TODO typedef ? => [C.TEnum (C.EEnm . transCon <$> cs)]
Assert _a ->
(env, []) -- could be an assert.. but why?
transProgram :: Program -> C.Prg
transProgram (Program decs) =
C.PPrg (mapAccumL transDec emptyEnv decs ^.. _2 . each . each)
-- -}
-- -}
-- -}
-- -}
|
np/ling
|
Ling/Compile/C.hs
|
bsd-3-clause
| 20,193
| 0
| 26
| 5,756
| 7,927
| 4,020
| 3,907
| -1
| -1
|
{-
Problem 40
Product of decimals in a long number
Result
210
2.2 s
-}
module Problem40 (solution) where
import CommonFunctions
import Control.Monad
import Control.Monad.RWS
solution = product' $ map (concatInts !!) d_n
where
-- Concatenated decimals
-- = [1,2,3,4,5,6,7,8,9,1,0,1,1,1,2,1,3,1,4,1,5,...]
concatInts = [1..] >>= explodeInt10
-- Indices of d to be considered
-- = [1, 10, 100, 1000, 10000, 100000, 1000000]
d_n = [10^n - 1 | n <- [0..6]] -- -1 because lists are 0-indexed
|
quchen/HaskellEuler
|
src/Problem40.hs
|
bsd-3-clause
| 615
| 0
| 10
| 205
| 90
| 55
| 35
| 7
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
import ClassyPrelude
import Data.CSV.Conduit.Conversion
import Control.Lens hiding ((.=))
data AggregateRow = AggregateRow
{ _aggLabel :: Text
, _aggNSamples :: Int
, _aggAverage :: Double
, _aggMedian :: Double
, _aggNinetyPercentLine :: Double
, _aggNinetyFifthPercentLine :: Double
, _aggNinetyNinthPercentLine :: Double
, _aggMinVal :: Double
, _aggMaxVal :: Double
, _aggErrors :: Double
, _aggErrorPct :: Double
} deriving (Show, Eq)
makeLenses ''AggregateRow
instance ToNamedRecord AggregateRow where
toNamedRecord agg = namedRecord
[ "Label" .= toField (agg ^. aggLabel)
, "# Samples" .= toField (agg ^. aggNSamples)
, "Average" .= toField (agg ^. aggAverage)
, "Median" .= toField (agg ^. aggMedian)
, "90% Line" .= toField (agg ^. aggNinetyPercentLine)
, "95% Line" .= toField (agg ^. aggNinetyFifthPercentLine)
, "99% Line" .= toField (agg ^. aggNinetyNinthPercentLine)
, "Min" .= toField (agg ^. aggMinVal)
, "Max" .= toField (agg ^. aggMaxVal)
, "Errors" .= toField (agg ^. aggErrors)
, "Error%" .= toField (agg ^. aggErrorPct)
]
instance FromNamedRecord AggregateRow where
parseNamedRecord r = AggregateRow
<$> r .: "Label"
<*> r .: "# Samples"
<*> r .: "Average"
<*> r .: "Median"
<*> r .: "90% Line"
<*> r .: "95% Line"
<*> r .: "99% Line"
<*> r .: "Min"
<*> r .: "Max"
<*> r .: "Errors"
<*> r .: "Error%"
instance ToRecord AggregateRow where
toRecord agg = record
[ toField $ agg ^. aggLabel
, toField $ agg ^. aggNSamples
, toField $ agg ^. aggAverage
, toField $ agg ^. aggMedian
, toField $ agg ^. aggNinetyPercentLine
, toField $ agg ^. aggNinetyFifthPercentLine
, toField $ agg ^. aggNinetyNinthPercentLine
, toField $ agg ^. aggMinVal
, toField $ agg ^. aggMaxVal
, toField $ agg ^. aggErrors
, toField $ agg ^. aggErrorPct
]
instance FromRecord AggregateRow where
parseRecord r = AggregateRow
<$> r .! 0
<*> r .! 1
<*> r .! 2
<*> r .! 3
<*> r .! 4
<*> r .! 5
<*> r .! 6
<*> r .! 7
<*> r .! 8
<*> r .! 9
<*> r .! 10
-- BSS.readFile >>> parsed (row def) >>> S.concat >>> S.map fromList >>> S.map (runParser . parseRecord) >>> S.map (fmap asAgg) >>> S.concat >>> S.print >>> runResourceT $ "/tmp/aggregate_5.csv"
|
limaner2002/EPC-tools
|
stats/src/Stats/ParseAggregateRow.hs
|
bsd-3-clause
| 2,461
| 0
| 27
| 599
| 676
| 365
| 311
| 72
| 0
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[ConFold]{Constant Folder}
Conceptually, constant folding should be parameterized with the kind
of target machine to get identical behaviour during compilation time
and runtime. We cheat a little bit here...
ToDo:
check boundaries before folding, e.g. we can fold the Float addition
(i1 + i2) only if it results in a valid Float.
-}
{-# LANGUAGE CPP, RankNTypes #-}
{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-}
module PrelRules ( primOpRules, builtinRules ) where
#include "HsVersions.h"
#include "../includes/MachDeps.h"
import {-# SOURCE #-} MkId ( mkPrimOpId, magicDictId )
import CoreSyn
import MkCore
import Id
import Literal
import CoreSubst ( exprIsLiteral_maybe )
import PrimOp ( PrimOp(..), tagToEnumKey )
import TysWiredIn
import TysPrim
import TyCon ( tyConDataCons_maybe, isEnumerationTyCon, isNewTyCon, unwrapNewTyCon_maybe )
import DataCon ( dataConTag, dataConTyCon, dataConWorkId )
import CoreUtils ( cheapEqExpr, exprIsHNF )
import CoreUnfold ( exprIsConApp_maybe )
import Type
import TypeRep
import OccName ( occNameFS )
import PrelNames
import Maybes ( orElse )
import Name ( Name, nameOccName )
import Outputable
import FastString
import BasicTypes
import DynFlags
import Platform
import Util
import Coercion (mkUnbranchedAxInstCo,mkSymCo,Role(..))
#if __GLASGOW_HASKELL__ >= 709
import Control.Applicative ( Alternative(..) )
#else
import Control.Applicative ( Applicative(..), Alternative(..) )
#endif
import Control.Monad
import Data.Bits as Bits
import qualified Data.ByteString as BS
import Data.Int
import Data.Ratio
import Data.Word
{-
Note [Constant folding]
~~~~~~~~~~~~~~~~~~~~~~~
primOpRules generates a rewrite rule for each primop
These rules do what is often called "constant folding"
E.g. the rules for +# might say
4 +# 5 = 9
Well, of course you'd need a lot of rules if you did it
like that, so we use a BuiltinRule instead, so that we
can match in any two literal values. So the rule is really
more like
(Lit x) +# (Lit y) = Lit (x+#y)
where the (+#) on the rhs is done at compile time
That is why these rules are built in here.
-}
primOpRules :: Name -> PrimOp -> Maybe CoreRule
-- ToDo: something for integer-shift ops?
-- NotOp
primOpRules nm TagToEnumOp = mkPrimOpRule nm 2 [ tagToEnumRule ]
primOpRules nm DataToTagOp = mkPrimOpRule nm 2 [ dataToTagRule ]
-- Int operations
primOpRules nm IntAddOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 (+))
, identityDynFlags zeroi ]
primOpRules nm IntSubOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 (-))
, rightIdentityDynFlags zeroi
, equalArgs >> retLit zeroi ]
primOpRules nm IntMulOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 (*))
, zeroElem zeroi
, identityDynFlags onei ]
primOpRules nm IntQuotOp = mkPrimOpRule nm 2 [ nonZeroLit 1 >> binaryLit (intOp2 quot)
, leftZero zeroi
, rightIdentityDynFlags onei
, equalArgs >> retLit onei ]
primOpRules nm IntRemOp = mkPrimOpRule nm 2 [ nonZeroLit 1 >> binaryLit (intOp2 rem)
, leftZero zeroi
, do l <- getLiteral 1
dflags <- getDynFlags
guard (l == onei dflags)
retLit zeroi
, equalArgs >> retLit zeroi
, equalArgs >> retLit zeroi ]
primOpRules nm AndIOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 (.&.))
, idempotent
, zeroElem zeroi ]
primOpRules nm OrIOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 (.|.))
, idempotent
, identityDynFlags zeroi ]
primOpRules nm XorIOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 xor)
, identityDynFlags zeroi
, equalArgs >> retLit zeroi ]
primOpRules nm NotIOp = mkPrimOpRule nm 1 [ unaryLit complementOp
, inversePrimOp NotIOp ]
primOpRules nm IntNegOp = mkPrimOpRule nm 1 [ unaryLit negOp
, inversePrimOp IntNegOp ]
primOpRules nm ISllOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 Bits.shiftL)
, rightIdentityDynFlags zeroi ]
primOpRules nm ISraOp = mkPrimOpRule nm 2 [ binaryLit (intOp2 Bits.shiftR)
, rightIdentityDynFlags zeroi ]
primOpRules nm ISrlOp = mkPrimOpRule nm 2 [ binaryLit (intOp2' shiftRightLogical)
, rightIdentityDynFlags zeroi ]
-- Word operations
primOpRules nm WordAddOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 (+))
, identityDynFlags zerow ]
primOpRules nm WordSubOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 (-))
, rightIdentityDynFlags zerow
, equalArgs >> retLit zerow ]
primOpRules nm WordMulOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 (*))
, identityDynFlags onew ]
primOpRules nm WordQuotOp = mkPrimOpRule nm 2 [ nonZeroLit 1 >> binaryLit (wordOp2 quot)
, rightIdentityDynFlags onew ]
primOpRules nm WordRemOp = mkPrimOpRule nm 2 [ nonZeroLit 1 >> binaryLit (wordOp2 rem)
, rightIdentityDynFlags onew ]
primOpRules nm AndOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 (.&.))
, idempotent
, zeroElem zerow ]
primOpRules nm OrOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 (.|.))
, idempotent
, identityDynFlags zerow ]
primOpRules nm XorOp = mkPrimOpRule nm 2 [ binaryLit (wordOp2 xor)
, identityDynFlags zerow
, equalArgs >> retLit zerow ]
primOpRules nm NotOp = mkPrimOpRule nm 1 [ unaryLit complementOp
, inversePrimOp NotOp ]
primOpRules nm SllOp = mkPrimOpRule nm 2 [ wordShiftRule (const Bits.shiftL) ]
primOpRules nm SrlOp = mkPrimOpRule nm 2 [ wordShiftRule shiftRightLogical ]
-- coercions
primOpRules nm Word2IntOp = mkPrimOpRule nm 1 [ liftLitDynFlags word2IntLit
, inversePrimOp Int2WordOp ]
primOpRules nm Int2WordOp = mkPrimOpRule nm 1 [ liftLitDynFlags int2WordLit
, inversePrimOp Word2IntOp ]
primOpRules nm Narrow8IntOp = mkPrimOpRule nm 1 [ liftLit narrow8IntLit
, subsumedByPrimOp Narrow8IntOp
, Narrow8IntOp `subsumesPrimOp` Narrow16IntOp
, Narrow8IntOp `subsumesPrimOp` Narrow32IntOp ]
primOpRules nm Narrow16IntOp = mkPrimOpRule nm 1 [ liftLit narrow16IntLit
, subsumedByPrimOp Narrow8IntOp
, subsumedByPrimOp Narrow16IntOp
, Narrow16IntOp `subsumesPrimOp` Narrow32IntOp ]
primOpRules nm Narrow32IntOp = mkPrimOpRule nm 1 [ liftLit narrow32IntLit
, subsumedByPrimOp Narrow8IntOp
, subsumedByPrimOp Narrow16IntOp
, subsumedByPrimOp Narrow32IntOp
, removeOp32 ]
primOpRules nm Narrow8WordOp = mkPrimOpRule nm 1 [ liftLit narrow8WordLit
, subsumedByPrimOp Narrow8WordOp
, Narrow8WordOp `subsumesPrimOp` Narrow16WordOp
, Narrow8WordOp `subsumesPrimOp` Narrow32WordOp ]
primOpRules nm Narrow16WordOp = mkPrimOpRule nm 1 [ liftLit narrow16WordLit
, subsumedByPrimOp Narrow8WordOp
, subsumedByPrimOp Narrow16WordOp
, Narrow16WordOp `subsumesPrimOp` Narrow32WordOp ]
primOpRules nm Narrow32WordOp = mkPrimOpRule nm 1 [ liftLit narrow32WordLit
, subsumedByPrimOp Narrow8WordOp
, subsumedByPrimOp Narrow16WordOp
, subsumedByPrimOp Narrow32WordOp
, removeOp32 ]
primOpRules nm OrdOp = mkPrimOpRule nm 1 [ liftLit char2IntLit
, inversePrimOp ChrOp ]
primOpRules nm ChrOp = mkPrimOpRule nm 1 [ do [Lit lit] <- getArgs
guard (litFitsInChar lit)
liftLit int2CharLit
, inversePrimOp OrdOp ]
primOpRules nm Float2IntOp = mkPrimOpRule nm 1 [ liftLit float2IntLit ]
primOpRules nm Int2FloatOp = mkPrimOpRule nm 1 [ liftLit int2FloatLit ]
primOpRules nm Double2IntOp = mkPrimOpRule nm 1 [ liftLit double2IntLit ]
primOpRules nm Int2DoubleOp = mkPrimOpRule nm 1 [ liftLit int2DoubleLit ]
-- SUP: Not sure what the standard says about precision in the following 2 cases
primOpRules nm Float2DoubleOp = mkPrimOpRule nm 1 [ liftLit float2DoubleLit ]
primOpRules nm Double2FloatOp = mkPrimOpRule nm 1 [ liftLit double2FloatLit ]
-- Float
primOpRules nm FloatAddOp = mkPrimOpRule nm 2 [ binaryLit (floatOp2 (+))
, identity zerof ]
primOpRules nm FloatSubOp = mkPrimOpRule nm 2 [ binaryLit (floatOp2 (-))
, rightIdentity zerof ]
primOpRules nm FloatMulOp = mkPrimOpRule nm 2 [ binaryLit (floatOp2 (*))
, identity onef
, strengthReduction twof FloatAddOp ]
-- zeroElem zerof doesn't hold because of NaN
primOpRules nm FloatDivOp = mkPrimOpRule nm 2 [ guardFloatDiv >> binaryLit (floatOp2 (/))
, rightIdentity onef ]
primOpRules nm FloatNegOp = mkPrimOpRule nm 1 [ unaryLit negOp
, inversePrimOp FloatNegOp ]
-- Double
primOpRules nm DoubleAddOp = mkPrimOpRule nm 2 [ binaryLit (doubleOp2 (+))
, identity zerod ]
primOpRules nm DoubleSubOp = mkPrimOpRule nm 2 [ binaryLit (doubleOp2 (-))
, rightIdentity zerod ]
primOpRules nm DoubleMulOp = mkPrimOpRule nm 2 [ binaryLit (doubleOp2 (*))
, identity oned
, strengthReduction twod DoubleAddOp ]
-- zeroElem zerod doesn't hold because of NaN
primOpRules nm DoubleDivOp = mkPrimOpRule nm 2 [ guardDoubleDiv >> binaryLit (doubleOp2 (/))
, rightIdentity oned ]
primOpRules nm DoubleNegOp = mkPrimOpRule nm 1 [ unaryLit negOp
, inversePrimOp DoubleNegOp ]
-- Relational operators
primOpRules nm IntEqOp = mkRelOpRule nm (==) [ litEq True ]
primOpRules nm IntNeOp = mkRelOpRule nm (/=) [ litEq False ]
primOpRules nm CharEqOp = mkRelOpRule nm (==) [ litEq True ]
primOpRules nm CharNeOp = mkRelOpRule nm (/=) [ litEq False ]
primOpRules nm IntGtOp = mkRelOpRule nm (>) [ boundsCmp Gt ]
primOpRules nm IntGeOp = mkRelOpRule nm (>=) [ boundsCmp Ge ]
primOpRules nm IntLeOp = mkRelOpRule nm (<=) [ boundsCmp Le ]
primOpRules nm IntLtOp = mkRelOpRule nm (<) [ boundsCmp Lt ]
primOpRules nm CharGtOp = mkRelOpRule nm (>) [ boundsCmp Gt ]
primOpRules nm CharGeOp = mkRelOpRule nm (>=) [ boundsCmp Ge ]
primOpRules nm CharLeOp = mkRelOpRule nm (<=) [ boundsCmp Le ]
primOpRules nm CharLtOp = mkRelOpRule nm (<) [ boundsCmp Lt ]
primOpRules nm FloatGtOp = mkFloatingRelOpRule nm (>) []
primOpRules nm FloatGeOp = mkFloatingRelOpRule nm (>=) []
primOpRules nm FloatLeOp = mkFloatingRelOpRule nm (<=) []
primOpRules nm FloatLtOp = mkFloatingRelOpRule nm (<) []
primOpRules nm FloatEqOp = mkFloatingRelOpRule nm (==) [ litEq True ]
primOpRules nm FloatNeOp = mkFloatingRelOpRule nm (/=) [ litEq False ]
primOpRules nm DoubleGtOp = mkFloatingRelOpRule nm (>) []
primOpRules nm DoubleGeOp = mkFloatingRelOpRule nm (>=) []
primOpRules nm DoubleLeOp = mkFloatingRelOpRule nm (<=) []
primOpRules nm DoubleLtOp = mkFloatingRelOpRule nm (<) []
primOpRules nm DoubleEqOp = mkFloatingRelOpRule nm (==) [ litEq True ]
primOpRules nm DoubleNeOp = mkFloatingRelOpRule nm (/=) [ litEq False ]
primOpRules nm WordGtOp = mkRelOpRule nm (>) [ boundsCmp Gt ]
primOpRules nm WordGeOp = mkRelOpRule nm (>=) [ boundsCmp Ge ]
primOpRules nm WordLeOp = mkRelOpRule nm (<=) [ boundsCmp Le ]
primOpRules nm WordLtOp = mkRelOpRule nm (<) [ boundsCmp Lt ]
primOpRules nm WordEqOp = mkRelOpRule nm (==) [ litEq True ]
primOpRules nm WordNeOp = mkRelOpRule nm (/=) [ litEq False ]
primOpRules nm AddrAddOp = mkPrimOpRule nm 2 [ rightIdentityDynFlags zeroi ]
primOpRules nm SeqOp = mkPrimOpRule nm 4 [ seqRule ]
primOpRules nm SparkOp = mkPrimOpRule nm 4 [ sparkRule ]
primOpRules _ _ = Nothing
{-
************************************************************************
* *
\subsection{Doing the business}
* *
************************************************************************
-}
-- useful shorthands
mkPrimOpRule :: Name -> Int -> [RuleM CoreExpr] -> Maybe CoreRule
mkPrimOpRule nm arity rules = Just $ mkBasicRule nm arity (msum rules)
mkRelOpRule :: Name -> (forall a . Ord a => a -> a -> Bool)
-> [RuleM CoreExpr] -> Maybe CoreRule
mkRelOpRule nm cmp extra
= mkPrimOpRule nm 2 $ rules ++ extra
where
rules = [ binaryCmpLit cmp
, do equalArgs
-- x `cmp` x does not depend on x, so
-- compute it for the arbitrary value 'True'
-- and use that result
dflags <- getDynFlags
return (if cmp True True
then trueValInt dflags
else falseValInt dflags) ]
-- Note [Rules for floating-point comparisons]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- We need different rules for floating-point values because for floats
-- it is not true that x = x. The special case when this does not occur
-- are NaNs.
mkFloatingRelOpRule :: Name -> (forall a . Ord a => a -> a -> Bool)
-> [RuleM CoreExpr] -> Maybe CoreRule
mkFloatingRelOpRule nm cmp extra -- See Note [Rules for floating-point comparisons]
= mkPrimOpRule nm 2 $ binaryCmpLit cmp : extra
-- common constants
zeroi, onei, zerow, onew :: DynFlags -> Literal
zeroi dflags = mkMachInt dflags 0
onei dflags = mkMachInt dflags 1
zerow dflags = mkMachWord dflags 0
onew dflags = mkMachWord dflags 1
zerof, onef, twof, zerod, oned, twod :: Literal
zerof = mkMachFloat 0.0
onef = mkMachFloat 1.0
twof = mkMachFloat 2.0
zerod = mkMachDouble 0.0
oned = mkMachDouble 1.0
twod = mkMachDouble 2.0
cmpOp :: DynFlags -> (forall a . Ord a => a -> a -> Bool)
-> Literal -> Literal -> Maybe CoreExpr
cmpOp dflags cmp = go
where
done True = Just $ trueValInt dflags
done False = Just $ falseValInt dflags
-- These compares are at different types
go (MachChar i1) (MachChar i2) = done (i1 `cmp` i2)
go (MachInt i1) (MachInt i2) = done (i1 `cmp` i2)
go (MachInt64 i1) (MachInt64 i2) = done (i1 `cmp` i2)
go (MachWord i1) (MachWord i2) = done (i1 `cmp` i2)
go (MachWord64 i1) (MachWord64 i2) = done (i1 `cmp` i2)
go (MachFloat i1) (MachFloat i2) = done (i1 `cmp` i2)
go (MachDouble i1) (MachDouble i2) = done (i1 `cmp` i2)
go _ _ = Nothing
--------------------------
negOp :: DynFlags -> Literal -> Maybe CoreExpr -- Negate
negOp _ (MachFloat 0.0) = Nothing -- can't represent -0.0 as a Rational
negOp dflags (MachFloat f) = Just (mkFloatVal dflags (-f))
negOp _ (MachDouble 0.0) = Nothing
negOp dflags (MachDouble d) = Just (mkDoubleVal dflags (-d))
negOp dflags (MachInt i) = intResult dflags (-i)
negOp _ _ = Nothing
complementOp :: DynFlags -> Literal -> Maybe CoreExpr -- Binary complement
complementOp dflags (MachWord i) = wordResult dflags (complement i)
complementOp dflags (MachInt i) = intResult dflags (complement i)
complementOp _ _ = Nothing
--------------------------
intOp2 :: (Integral a, Integral b)
=> (a -> b -> Integer)
-> DynFlags -> Literal -> Literal -> Maybe CoreExpr
intOp2 = intOp2' . const
intOp2' :: (Integral a, Integral b)
=> (DynFlags -> a -> b -> Integer)
-> DynFlags -> Literal -> Literal -> Maybe CoreExpr
intOp2' op dflags (MachInt i1) (MachInt i2) =
let o = op dflags
in intResult dflags (fromInteger i1 `o` fromInteger i2)
intOp2' _ _ _ _ = Nothing -- Could find LitLit
shiftRightLogical :: DynFlags -> Integer -> Int -> Integer
-- Shift right, putting zeros in rather than sign-propagating as Bits.shiftR would do
-- Do this by converting to Word and back. Obviously this won't work for big
-- values, but its ok as we use it here
shiftRightLogical dflags x n
| wordSizeInBits dflags == 32 = fromIntegral (fromInteger x `shiftR` n :: Word32)
| wordSizeInBits dflags == 64 = fromIntegral (fromInteger x `shiftR` n :: Word64)
| otherwise = panic "shiftRightLogical: unsupported word size"
--------------------------
retLit :: (DynFlags -> Literal) -> RuleM CoreExpr
retLit l = do dflags <- getDynFlags
return $ Lit $ l dflags
wordOp2 :: (Integral a, Integral b)
=> (a -> b -> Integer)
-> DynFlags -> Literal -> Literal -> Maybe CoreExpr
wordOp2 op dflags (MachWord w1) (MachWord w2)
= wordResult dflags (fromInteger w1 `op` fromInteger w2)
wordOp2 _ _ _ _ = Nothing -- Could find LitLit
wordShiftRule :: (DynFlags -> Integer -> Int -> Integer) -> RuleM CoreExpr
-- Shifts take an Int; hence third arg of op is Int
-- See Note [Guarding against silly shifts]
wordShiftRule shift_op
= do { dflags <- getDynFlags
; [e1, Lit (MachInt shift_len)] <- getArgs
; case e1 of
_ | shift_len == 0
-> return e1
| shift_len < 0 || wordSizeInBits dflags < shift_len
-> return (mkRuntimeErrorApp rUNTIME_ERROR_ID wordPrimTy
("Bad shift length" ++ show shift_len))
Lit (MachWord x)
-> let op = shift_op dflags
in liftMaybe $ wordResult dflags (x `op` fromInteger shift_len)
-- Do the shift at type Integer, but shift length is Int
_ -> mzero }
wordSizeInBits :: DynFlags -> Integer
wordSizeInBits dflags = toInteger (platformWordSize (targetPlatform dflags) `shiftL` 3)
--------------------------
floatOp2 :: (Rational -> Rational -> Rational)
-> DynFlags -> Literal -> Literal
-> Maybe (Expr CoreBndr)
floatOp2 op dflags (MachFloat f1) (MachFloat f2)
= Just (mkFloatVal dflags (f1 `op` f2))
floatOp2 _ _ _ _ = Nothing
--------------------------
doubleOp2 :: (Rational -> Rational -> Rational)
-> DynFlags -> Literal -> Literal
-> Maybe (Expr CoreBndr)
doubleOp2 op dflags (MachDouble f1) (MachDouble f2)
= Just (mkDoubleVal dflags (f1 `op` f2))
doubleOp2 _ _ _ _ = Nothing
--------------------------
-- This stuff turns
-- n ==# 3#
-- into
-- case n of
-- 3# -> True
-- m -> False
--
-- This is a Good Thing, because it allows case-of case things
-- to happen, and case-default absorption to happen. For
-- example:
--
-- if (n ==# 3#) || (n ==# 4#) then e1 else e2
-- will transform to
-- case n of
-- 3# -> e1
-- 4# -> e1
-- m -> e2
-- (modulo the usual precautions to avoid duplicating e1)
litEq :: Bool -- True <=> equality, False <=> inequality
-> RuleM CoreExpr
litEq is_eq = msum
[ do [Lit lit, expr] <- getArgs
dflags <- getDynFlags
do_lit_eq dflags lit expr
, do [expr, Lit lit] <- getArgs
dflags <- getDynFlags
do_lit_eq dflags lit expr ]
where
do_lit_eq dflags lit expr = do
guard (not (litIsLifted lit))
return (mkWildCase expr (literalType lit) intPrimTy
[(DEFAULT, [], val_if_neq),
(LitAlt lit, [], val_if_eq)])
where
val_if_eq | is_eq = trueValInt dflags
| otherwise = falseValInt dflags
val_if_neq | is_eq = falseValInt dflags
| otherwise = trueValInt dflags
-- | Check if there is comparison with minBound or maxBound, that is
-- always true or false. For instance, an Int cannot be smaller than its
-- minBound, so we can replace such comparison with False.
boundsCmp :: Comparison -> RuleM CoreExpr
boundsCmp op = do
dflags <- getDynFlags
[a, b] <- getArgs
liftMaybe $ mkRuleFn dflags op a b
data Comparison = Gt | Ge | Lt | Le
mkRuleFn :: DynFlags -> Comparison -> CoreExpr -> CoreExpr -> Maybe CoreExpr
mkRuleFn dflags Gt (Lit lit) _ | isMinBound dflags lit = Just $ falseValInt dflags
mkRuleFn dflags Le (Lit lit) _ | isMinBound dflags lit = Just $ trueValInt dflags
mkRuleFn dflags Ge _ (Lit lit) | isMinBound dflags lit = Just $ trueValInt dflags
mkRuleFn dflags Lt _ (Lit lit) | isMinBound dflags lit = Just $ falseValInt dflags
mkRuleFn dflags Ge (Lit lit) _ | isMaxBound dflags lit = Just $ trueValInt dflags
mkRuleFn dflags Lt (Lit lit) _ | isMaxBound dflags lit = Just $ falseValInt dflags
mkRuleFn dflags Gt _ (Lit lit) | isMaxBound dflags lit = Just $ falseValInt dflags
mkRuleFn dflags Le _ (Lit lit) | isMaxBound dflags lit = Just $ trueValInt dflags
mkRuleFn _ _ _ _ = Nothing
isMinBound :: DynFlags -> Literal -> Bool
isMinBound _ (MachChar c) = c == minBound
isMinBound dflags (MachInt i) = i == tARGET_MIN_INT dflags
isMinBound _ (MachInt64 i) = i == toInteger (minBound :: Int64)
isMinBound _ (MachWord i) = i == 0
isMinBound _ (MachWord64 i) = i == 0
isMinBound _ _ = False
isMaxBound :: DynFlags -> Literal -> Bool
isMaxBound _ (MachChar c) = c == maxBound
isMaxBound dflags (MachInt i) = i == tARGET_MAX_INT dflags
isMaxBound _ (MachInt64 i) = i == toInteger (maxBound :: Int64)
isMaxBound dflags (MachWord i) = i == tARGET_MAX_WORD dflags
isMaxBound _ (MachWord64 i) = i == toInteger (maxBound :: Word64)
isMaxBound _ _ = False
-- Note that we *don't* warn the user about overflow. It's not done at
-- runtime either, and compilation of completely harmless things like
-- ((124076834 :: Word32) + (2147483647 :: Word32))
-- would yield a warning. Instead we simply squash the value into the
-- *target* Int/Word range.
intResult :: DynFlags -> Integer -> Maybe CoreExpr
intResult dflags result = Just (mkIntVal dflags result')
where result' = case platformWordSize (targetPlatform dflags) of
4 -> toInteger (fromInteger result :: Int32)
8 -> toInteger (fromInteger result :: Int64)
w -> panic ("intResult: Unknown platformWordSize: " ++ show w)
wordResult :: DynFlags -> Integer -> Maybe CoreExpr
wordResult dflags result = Just (mkWordVal dflags result')
where result' = case platformWordSize (targetPlatform dflags) of
4 -> toInteger (fromInteger result :: Word32)
8 -> toInteger (fromInteger result :: Word64)
w -> panic ("wordResult: Unknown platformWordSize: " ++ show w)
inversePrimOp :: PrimOp -> RuleM CoreExpr
inversePrimOp primop = do
[Var primop_id `App` e] <- getArgs
matchPrimOpId primop primop_id
return e
subsumesPrimOp :: PrimOp -> PrimOp -> RuleM CoreExpr
this `subsumesPrimOp` that = do
[Var primop_id `App` e] <- getArgs
matchPrimOpId that primop_id
return (Var (mkPrimOpId this) `App` e)
subsumedByPrimOp :: PrimOp -> RuleM CoreExpr
subsumedByPrimOp primop = do
[e@(Var primop_id `App` _)] <- getArgs
matchPrimOpId primop primop_id
return e
idempotent :: RuleM CoreExpr
idempotent = do [e1, e2] <- getArgs
guard $ cheapEqExpr e1 e2
return e1
{-
Note [Guarding against silly shifts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this code:
import Data.Bits( (.|.), shiftL )
chunkToBitmap :: [Bool] -> Word32
chunkToBitmap chunk = foldr (.|.) 0 [ 1 `shiftL` n | (True,n) <- zip chunk [0..] ]
This optimises to:
Shift.$wgo = \ (w_sCS :: GHC.Prim.Int#) (w1_sCT :: [GHC.Types.Bool]) ->
case w1_sCT of _ {
[] -> 0##;
: x_aAW xs_aAX ->
case x_aAW of _ {
GHC.Types.False ->
case w_sCS of wild2_Xh {
__DEFAULT -> Shift.$wgo (GHC.Prim.+# wild2_Xh 1) xs_aAX;
9223372036854775807 -> 0## };
GHC.Types.True ->
case GHC.Prim.>=# w_sCS 64 of _ {
GHC.Types.False ->
case w_sCS of wild3_Xh {
__DEFAULT ->
case Shift.$wgo (GHC.Prim.+# wild3_Xh 1) xs_aAX of ww_sCW { __DEFAULT ->
GHC.Prim.or# (GHC.Prim.narrow32Word#
(GHC.Prim.uncheckedShiftL# 1## wild3_Xh))
ww_sCW
};
9223372036854775807 ->
GHC.Prim.narrow32Word#
!!!!--> (GHC.Prim.uncheckedShiftL# 1## 9223372036854775807)
};
GHC.Types.True ->
case w_sCS of wild3_Xh {
__DEFAULT -> Shift.$wgo (GHC.Prim.+# wild3_Xh 1) xs_aAX;
9223372036854775807 -> 0##
} } } }
Note the massive shift on line "!!!!". It can't happen, because we've checked
that w < 64, but the optimiser didn't spot that. We DO NO want to constant-fold this!
Moreover, if the programmer writes (n `uncheckedShiftL` 9223372036854775807), we
can't constant fold it, but if it gets to the assember we get
Error: operand type mismatch for `shl'
So the best thing to do is to rewrite the shift with a call to error,
when the second arg is stupid.
************************************************************************
* *
\subsection{Vaguely generic functions}
* *
************************************************************************
-}
mkBasicRule :: Name -> Int -> RuleM CoreExpr -> CoreRule
-- Gives the Rule the same name as the primop itself
mkBasicRule op_name n_args rm
= BuiltinRule { ru_name = occNameFS (nameOccName op_name),
ru_fn = op_name,
ru_nargs = n_args,
ru_try = \ dflags in_scope _ -> runRuleM rm dflags in_scope }
newtype RuleM r = RuleM
{ runRuleM :: DynFlags -> InScopeEnv -> [CoreExpr] -> Maybe r }
instance Functor RuleM where
fmap = liftM
instance Applicative RuleM where
pure = return
(<*>) = ap
instance Monad RuleM where
return x = RuleM $ \_ _ _ -> Just x
RuleM f >>= g = RuleM $ \dflags iu e -> case f dflags iu e of
Nothing -> Nothing
Just r -> runRuleM (g r) dflags iu e
fail _ = mzero
instance Alternative RuleM where
empty = mzero
(<|>) = mplus
instance MonadPlus RuleM where
mzero = RuleM $ \_ _ _ -> Nothing
mplus (RuleM f1) (RuleM f2) = RuleM $ \dflags iu args ->
f1 dflags iu args `mplus` f2 dflags iu args
instance HasDynFlags RuleM where
getDynFlags = RuleM $ \dflags _ _ -> Just dflags
liftMaybe :: Maybe a -> RuleM a
liftMaybe Nothing = mzero
liftMaybe (Just x) = return x
liftLit :: (Literal -> Literal) -> RuleM CoreExpr
liftLit f = liftLitDynFlags (const f)
liftLitDynFlags :: (DynFlags -> Literal -> Literal) -> RuleM CoreExpr
liftLitDynFlags f = do
dflags <- getDynFlags
[Lit lit] <- getArgs
return $ Lit (f dflags lit)
removeOp32 :: RuleM CoreExpr
removeOp32 = do
dflags <- getDynFlags
if wordSizeInBits dflags == 32
then do
[e] <- getArgs
return e
else mzero
getArgs :: RuleM [CoreExpr]
getArgs = RuleM $ \_ _ args -> Just args
getInScopeEnv :: RuleM InScopeEnv
getInScopeEnv = RuleM $ \_ iu _ -> Just iu
-- return the n-th argument of this rule, if it is a literal
-- argument indices start from 0
getLiteral :: Int -> RuleM Literal
getLiteral n = RuleM $ \_ _ exprs -> case drop n exprs of
(Lit l:_) -> Just l
_ -> Nothing
unaryLit :: (DynFlags -> Literal -> Maybe CoreExpr) -> RuleM CoreExpr
unaryLit op = do
dflags <- getDynFlags
[Lit l] <- getArgs
liftMaybe $ op dflags (convFloating dflags l)
binaryLit :: (DynFlags -> Literal -> Literal -> Maybe CoreExpr) -> RuleM CoreExpr
binaryLit op = do
dflags <- getDynFlags
[Lit l1, Lit l2] <- getArgs
liftMaybe $ op dflags (convFloating dflags l1) (convFloating dflags l2)
binaryCmpLit :: (forall a . Ord a => a -> a -> Bool) -> RuleM CoreExpr
binaryCmpLit op = do
dflags <- getDynFlags
binaryLit (\_ -> cmpOp dflags op)
leftIdentity :: Literal -> RuleM CoreExpr
leftIdentity id_lit = leftIdentityDynFlags (const id_lit)
rightIdentity :: Literal -> RuleM CoreExpr
rightIdentity id_lit = rightIdentityDynFlags (const id_lit)
identity :: Literal -> RuleM CoreExpr
identity lit = leftIdentity lit `mplus` rightIdentity lit
leftIdentityDynFlags :: (DynFlags -> Literal) -> RuleM CoreExpr
leftIdentityDynFlags id_lit = do
dflags <- getDynFlags
[Lit l1, e2] <- getArgs
guard $ l1 == id_lit dflags
return e2
rightIdentityDynFlags :: (DynFlags -> Literal) -> RuleM CoreExpr
rightIdentityDynFlags id_lit = do
dflags <- getDynFlags
[e1, Lit l2] <- getArgs
guard $ l2 == id_lit dflags
return e1
identityDynFlags :: (DynFlags -> Literal) -> RuleM CoreExpr
identityDynFlags lit = leftIdentityDynFlags lit `mplus` rightIdentityDynFlags lit
leftZero :: (DynFlags -> Literal) -> RuleM CoreExpr
leftZero zero = do
dflags <- getDynFlags
[Lit l1, _] <- getArgs
guard $ l1 == zero dflags
return $ Lit l1
rightZero :: (DynFlags -> Literal) -> RuleM CoreExpr
rightZero zero = do
dflags <- getDynFlags
[_, Lit l2] <- getArgs
guard $ l2 == zero dflags
return $ Lit l2
zeroElem :: (DynFlags -> Literal) -> RuleM CoreExpr
zeroElem lit = leftZero lit `mplus` rightZero lit
equalArgs :: RuleM ()
equalArgs = do
[e1, e2] <- getArgs
guard $ e1 `cheapEqExpr` e2
nonZeroLit :: Int -> RuleM ()
nonZeroLit n = getLiteral n >>= guard . not . isZeroLit
-- When excess precision is not requested, cut down the precision of the
-- Rational value to that of Float/Double. We confuse host architecture
-- and target architecture here, but it's convenient (and wrong :-).
convFloating :: DynFlags -> Literal -> Literal
convFloating dflags (MachFloat f) | not (gopt Opt_ExcessPrecision dflags) =
MachFloat (toRational (fromRational f :: Float ))
convFloating dflags (MachDouble d) | not (gopt Opt_ExcessPrecision dflags) =
MachDouble (toRational (fromRational d :: Double))
convFloating _ l = l
guardFloatDiv :: RuleM ()
guardFloatDiv = do
[Lit (MachFloat f1), Lit (MachFloat f2)] <- getArgs
guard $ (f1 /=0 || f2 > 0) -- see Note [negative zero]
&& f2 /= 0 -- avoid NaN and Infinity/-Infinity
guardDoubleDiv :: RuleM ()
guardDoubleDiv = do
[Lit (MachDouble d1), Lit (MachDouble d2)] <- getArgs
guard $ (d1 /=0 || d2 > 0) -- see Note [negative zero]
&& d2 /= 0 -- avoid NaN and Infinity/-Infinity
-- Note [negative zero] Avoid (0 / -d), otherwise 0/(-1) reduces to
-- zero, but we might want to preserve the negative zero here which
-- is representable in Float/Double but not in (normalised)
-- Rational. (#3676) Perhaps we should generate (0 :% (-1)) instead?
strengthReduction :: Literal -> PrimOp -> RuleM CoreExpr
strengthReduction two_lit add_op = do -- Note [Strength reduction]
arg <- msum [ do [arg, Lit mult_lit] <- getArgs
guard (mult_lit == two_lit)
return arg
, do [Lit mult_lit, arg] <- getArgs
guard (mult_lit == two_lit)
return arg ]
return $ Var (mkPrimOpId add_op) `App` arg `App` arg
-- Note [Strength reduction]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- This rule turns floating point multiplications of the form 2.0 * x and
-- x * 2.0 into x + x addition, because addition costs less than multiplication.
-- See #7116
-- Note [What's true and false]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- trueValInt and falseValInt represent true and false values returned by
-- comparison primops for Char, Int, Word, Integer, Double, Float and Addr.
-- True is represented as an unboxed 1# literal, while false is represented
-- as 0# literal.
-- We still need Bool data constructors (True and False) to use in a rule
-- for constant folding of equal Strings
trueValInt, falseValInt :: DynFlags -> Expr CoreBndr
trueValInt dflags = Lit $ onei dflags -- see Note [What's true and false]
falseValInt dflags = Lit $ zeroi dflags
trueValBool, falseValBool :: Expr CoreBndr
trueValBool = Var trueDataConId -- see Note [What's true and false]
falseValBool = Var falseDataConId
ltVal, eqVal, gtVal :: Expr CoreBndr
ltVal = Var ltDataConId
eqVal = Var eqDataConId
gtVal = Var gtDataConId
mkIntVal :: DynFlags -> Integer -> Expr CoreBndr
mkIntVal dflags i = Lit (mkMachInt dflags i)
mkWordVal :: DynFlags -> Integer -> Expr CoreBndr
mkWordVal dflags w = Lit (mkMachWord dflags w)
mkFloatVal :: DynFlags -> Rational -> Expr CoreBndr
mkFloatVal dflags f = Lit (convFloating dflags (MachFloat f))
mkDoubleVal :: DynFlags -> Rational -> Expr CoreBndr
mkDoubleVal dflags d = Lit (convFloating dflags (MachDouble d))
matchPrimOpId :: PrimOp -> Id -> RuleM ()
matchPrimOpId op id = do
op' <- liftMaybe $ isPrimOpId_maybe id
guard $ op == op'
{-
************************************************************************
* *
\subsection{Special rules for seq, tagToEnum, dataToTag}
* *
************************************************************************
Note [tagToEnum#]
~~~~~~~~~~~~~~~~~
Nasty check to ensure that tagToEnum# is applied to a type that is an
enumeration TyCon. Unification may refine the type later, but this
check won't see that, alas. It's crude but it works.
Here's are two cases that should fail
f :: forall a. a
f = tagToEnum# 0 -- Can't do tagToEnum# at a type variable
g :: Int
g = tagToEnum# 0 -- Int is not an enumeration
We used to make this check in the type inference engine, but it's quite
ugly to do so, because the delayed constraint solving means that we don't
really know what's going on until the end. It's very much a corner case
because we don't expect the user to call tagToEnum# at all; we merely
generate calls in derived instances of Enum. So we compromise: a
rewrite rule rewrites a bad instance of tagToEnum# to an error call,
and emits a warning.
-}
tagToEnumRule :: RuleM CoreExpr
-- If data T a = A | B | C
-- then tag2Enum# (T ty) 2# --> B ty
tagToEnumRule = do
[Type ty, Lit (MachInt i)] <- getArgs
case splitTyConApp_maybe ty of
Just (tycon, tc_args) | isEnumerationTyCon tycon -> do
let tag = fromInteger i
correct_tag dc = (dataConTag dc - fIRST_TAG) == tag
(dc:rest) <- return $ filter correct_tag (tyConDataCons_maybe tycon `orElse` [])
ASSERT(null rest) return ()
return $ mkTyApps (Var (dataConWorkId dc)) tc_args
-- See Note [tagToEnum#]
_ -> WARN( True, ptext (sLit "tagToEnum# on non-enumeration type") <+> ppr ty )
return $ mkRuntimeErrorApp rUNTIME_ERROR_ID ty "tagToEnum# on non-enumeration type"
{-
For dataToTag#, we can reduce if either
(a) the argument is a constructor
(b) the argument is a variable whose unfolding is a known constructor
-}
dataToTagRule :: RuleM CoreExpr
dataToTagRule = a `mplus` b
where
a = do
[Type ty1, Var tag_to_enum `App` Type ty2 `App` tag] <- getArgs
guard $ tag_to_enum `hasKey` tagToEnumKey
guard $ ty1 `eqType` ty2
return tag -- dataToTag (tagToEnum x) ==> x
b = do
dflags <- getDynFlags
[_, val_arg] <- getArgs
in_scope <- getInScopeEnv
(dc,_,_) <- liftMaybe $ exprIsConApp_maybe in_scope val_arg
ASSERT( not (isNewTyCon (dataConTyCon dc)) ) return ()
return $ mkIntVal dflags (toInteger (dataConTag dc - fIRST_TAG))
{-
************************************************************************
* *
\subsection{Rules for seq# and spark#}
* *
************************************************************************
-}
-- seq# :: forall a s . a -> State# s -> (# State# s, a #)
seqRule :: RuleM CoreExpr
seqRule = do
[ty_a, Type ty_s, a, s] <- getArgs
guard $ exprIsHNF a
return $ mkConApp (tupleDataCon Unboxed 2)
[Type (mkStatePrimTy ty_s), ty_a, s, a]
-- spark# :: forall a s . a -> State# s -> (# State# s, a #)
sparkRule :: RuleM CoreExpr
sparkRule = seqRule -- reduce on HNF, just the same
-- XXX perhaps we shouldn't do this, because a spark eliminated by
-- this rule won't be counted as a dud at runtime?
{-
************************************************************************
* *
\subsection{Built in rules}
* *
************************************************************************
Note [Scoping for Builtin rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When compiling a (base-package) module that defines one of the
functions mentioned in the RHS of a built-in rule, there's a danger
that we'll see
f = ...(eq String x)....
....and lower down...
eqString = ...
Then a rewrite would give
f = ...(eqString x)...
....and lower down...
eqString = ...
and lo, eqString is not in scope. This only really matters when we get to code
generation. With -O we do a GlomBinds step that does a new SCC analysis on the whole
set of bindings, which sorts out the dependency. Without -O we don't do any rule
rewriting so again we are fine.
(This whole thing doesn't show up for non-built-in rules because their dependencies
are explicit.)
-}
builtinRules :: [CoreRule]
-- Rules for non-primops that can't be expressed using a RULE pragma
builtinRules
= [BuiltinRule { ru_name = fsLit "AppendLitString",
ru_fn = unpackCStringFoldrName,
ru_nargs = 4, ru_try = \_ _ _ -> match_append_lit },
BuiltinRule { ru_name = fsLit "EqString", ru_fn = eqStringName,
ru_nargs = 2, ru_try = \dflags _ _ -> match_eq_string dflags },
BuiltinRule { ru_name = fsLit "Inline", ru_fn = inlineIdName,
ru_nargs = 2, ru_try = \_ _ _ -> match_inline },
BuiltinRule { ru_name = fsLit "MagicDict", ru_fn = idName magicDictId,
ru_nargs = 4, ru_try = \_ _ _ -> match_magicDict }
]
++ builtinIntegerRules
builtinIntegerRules :: [CoreRule]
builtinIntegerRules =
[rule_IntToInteger "smallInteger" smallIntegerName,
rule_WordToInteger "wordToInteger" wordToIntegerName,
rule_Int64ToInteger "int64ToInteger" int64ToIntegerName,
rule_Word64ToInteger "word64ToInteger" word64ToIntegerName,
rule_convert "integerToWord" integerToWordName mkWordLitWord,
rule_convert "integerToInt" integerToIntName mkIntLitInt,
rule_convert "integerToWord64" integerToWord64Name (\_ -> mkWord64LitWord64),
rule_convert "integerToInt64" integerToInt64Name (\_ -> mkInt64LitInt64),
rule_binop "plusInteger" plusIntegerName (+),
rule_binop "minusInteger" minusIntegerName (-),
rule_binop "timesInteger" timesIntegerName (*),
rule_unop "negateInteger" negateIntegerName negate,
rule_binop_Prim "eqInteger#" eqIntegerPrimName (==),
rule_binop_Prim "neqInteger#" neqIntegerPrimName (/=),
rule_unop "absInteger" absIntegerName abs,
rule_unop "signumInteger" signumIntegerName signum,
rule_binop_Prim "leInteger#" leIntegerPrimName (<=),
rule_binop_Prim "gtInteger#" gtIntegerPrimName (>),
rule_binop_Prim "ltInteger#" ltIntegerPrimName (<),
rule_binop_Prim "geInteger#" geIntegerPrimName (>=),
rule_binop_Ordering "compareInteger" compareIntegerName compare,
rule_encodeFloat "encodeFloatInteger" encodeFloatIntegerName mkFloatLitFloat,
rule_convert "floatFromInteger" floatFromIntegerName (\_ -> mkFloatLitFloat),
rule_encodeFloat "encodeDoubleInteger" encodeDoubleIntegerName mkDoubleLitDouble,
rule_decodeDouble "decodeDoubleInteger" decodeDoubleIntegerName,
rule_convert "doubleFromInteger" doubleFromIntegerName (\_ -> mkDoubleLitDouble),
rule_rationalTo "rationalToFloat" rationalToFloatName mkFloatExpr,
rule_rationalTo "rationalToDouble" rationalToDoubleName mkDoubleExpr,
rule_binop "gcdInteger" gcdIntegerName gcd,
rule_binop "lcmInteger" lcmIntegerName lcm,
rule_binop "andInteger" andIntegerName (.&.),
rule_binop "orInteger" orIntegerName (.|.),
rule_binop "xorInteger" xorIntegerName xor,
rule_unop "complementInteger" complementIntegerName complement,
rule_Int_binop "shiftLInteger" shiftLIntegerName shiftL,
rule_Int_binop "shiftRInteger" shiftRIntegerName shiftR,
rule_bitInteger "bitInteger" bitIntegerName,
-- See Note [Integer division constant folding] in libraries/base/GHC/Real.hs
rule_divop_one "quotInteger" quotIntegerName quot,
rule_divop_one "remInteger" remIntegerName rem,
rule_divop_one "divInteger" divIntegerName div,
rule_divop_one "modInteger" modIntegerName mod,
rule_divop_both "divModInteger" divModIntegerName divMod,
rule_divop_both "quotRemInteger" quotRemIntegerName quotRem,
-- These rules below don't actually have to be built in, but if we
-- put them in the Haskell source then we'd have to duplicate them
-- between all Integer implementations
rule_XToIntegerToX "smallIntegerToInt" integerToIntName smallIntegerName,
rule_XToIntegerToX "wordToIntegerToWord" integerToWordName wordToIntegerName,
rule_XToIntegerToX "int64ToIntegerToInt64" integerToInt64Name int64ToIntegerName,
rule_XToIntegerToX "word64ToIntegerToWord64" integerToWord64Name word64ToIntegerName,
rule_smallIntegerTo "smallIntegerToWord" integerToWordName Int2WordOp,
rule_smallIntegerTo "smallIntegerToFloat" floatFromIntegerName Int2FloatOp,
rule_smallIntegerTo "smallIntegerToDouble" doubleFromIntegerName Int2DoubleOp
]
where rule_convert str name convert
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_Integer_convert convert }
rule_IntToInteger str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_IntToInteger }
rule_WordToInteger str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_WordToInteger }
rule_Int64ToInteger str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_Int64ToInteger }
rule_Word64ToInteger str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_Word64ToInteger }
rule_unop str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_Integer_unop op }
rule_bitInteger str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_IntToInteger_unop (bit . fromIntegral) }
rule_binop str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_binop op }
rule_divop_both str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_divop_both op }
rule_divop_one str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_divop_one op }
rule_Int_binop str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_Int_binop op }
rule_binop_Prim str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_binop_Prim op }
rule_binop_Ordering str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_binop_Ordering op }
rule_encodeFloat str name op
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_Integer_Int_encodeFloat op }
rule_decodeDouble str name
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_decodeDouble }
rule_XToIntegerToX str name toIntegerName
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_XToIntegerToX toIntegerName }
rule_smallIntegerTo str name primOp
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 1,
ru_try = match_smallIntegerTo primOp }
rule_rationalTo str name mkLit
= BuiltinRule { ru_name = fsLit str, ru_fn = name, ru_nargs = 2,
ru_try = match_rationalTo mkLit }
---------------------------------------------------
-- The rule is this:
-- unpackFoldrCString# "foo" c (unpackFoldrCString# "baz" c n)
-- = unpackFoldrCString# "foobaz" c n
match_append_lit :: [Expr CoreBndr] -> Maybe (Expr CoreBndr)
match_append_lit [Type ty1,
Lit (MachStr s1),
c1,
Var unpk `App` Type ty2
`App` Lit (MachStr s2)
`App` c2
`App` n
]
| unpk `hasKey` unpackCStringFoldrIdKey &&
c1 `cheapEqExpr` c2
= ASSERT( ty1 `eqType` ty2 )
Just (Var unpk `App` Type ty1
`App` Lit (MachStr (s1 `BS.append` s2))
`App` c1
`App` n)
match_append_lit _ = Nothing
---------------------------------------------------
-- The rule is this:
-- eqString (unpackCString# (Lit s1)) (unpackCString# (Lit s2) = s1==s2
match_eq_string :: DynFlags -> [Expr CoreBndr] -> Maybe (Expr CoreBndr)
match_eq_string _ [Var unpk1 `App` Lit (MachStr s1),
Var unpk2 `App` Lit (MachStr s2)]
| unpk1 `hasKey` unpackCStringIdKey,
unpk2 `hasKey` unpackCStringIdKey
= Just (if s1 == s2 then trueValBool else falseValBool)
match_eq_string _ _ = Nothing
---------------------------------------------------
-- The rule is this:
-- inline f_ty (f a b c) = <f's unfolding> a b c
-- (if f has an unfolding, EVEN if it's a loop breaker)
--
-- It's important to allow the argument to 'inline' to have args itself
-- (a) because its more forgiving to allow the programmer to write
-- inline f a b c
-- or inline (f a b c)
-- (b) because a polymorphic f wll get a type argument that the
-- programmer can't avoid
--
-- Also, don't forget about 'inline's type argument!
match_inline :: [Expr CoreBndr] -> Maybe (Expr CoreBndr)
match_inline (Type _ : e : _)
| (Var f, args1) <- collectArgs e,
Just unf <- maybeUnfoldingTemplate (realIdUnfolding f)
-- Ignore the IdUnfoldingFun here!
= Just (mkApps unf args1)
match_inline _ = Nothing
-- See Note [magicDictId magic] in `basicTypes/MkId.hs`
-- for a description of what is going on here.
match_magicDict :: [Expr CoreBndr] -> Maybe (Expr CoreBndr)
match_magicDict [Type _, Var wrap `App` Type a `App` Type _ `App` f, x, y ]
| Just (fieldTy, _) <- splitFunTy_maybe $ dropForAlls $ idType wrap
, Just (dictTy, _) <- splitFunTy_maybe fieldTy
, Just dictTc <- tyConAppTyCon_maybe dictTy
, Just (_,_,co) <- unwrapNewTyCon_maybe dictTc
= Just
$ f `App` Cast x (mkSymCo (mkUnbranchedAxInstCo Representational co [a]))
`App` y
match_magicDict _ = Nothing
-------------------------------------------------
-- Integer rules
-- smallInteger (79::Int#) = 79::Integer
-- wordToInteger (79::Word#) = 79::Integer
-- Similarly Int64, Word64
match_IntToInteger :: RuleFun
match_IntToInteger = match_IntToInteger_unop id
match_WordToInteger :: RuleFun
match_WordToInteger _ id_unf id [xl]
| Just (MachWord x) <- exprIsLiteral_maybe id_unf xl
= case idType id of
FunTy _ integerTy ->
Just (Lit (LitInteger x integerTy))
_ ->
panic "match_WordToInteger: Id has the wrong type"
match_WordToInteger _ _ _ _ = Nothing
match_Int64ToInteger :: RuleFun
match_Int64ToInteger _ id_unf id [xl]
| Just (MachInt64 x) <- exprIsLiteral_maybe id_unf xl
= case idType id of
FunTy _ integerTy ->
Just (Lit (LitInteger x integerTy))
_ ->
panic "match_Int64ToInteger: Id has the wrong type"
match_Int64ToInteger _ _ _ _ = Nothing
match_Word64ToInteger :: RuleFun
match_Word64ToInteger _ id_unf id [xl]
| Just (MachWord64 x) <- exprIsLiteral_maybe id_unf xl
= case idType id of
FunTy _ integerTy ->
Just (Lit (LitInteger x integerTy))
_ ->
panic "match_Word64ToInteger: Id has the wrong type"
match_Word64ToInteger _ _ _ _ = Nothing
-------------------------------------------------
match_Integer_convert :: Num a
=> (DynFlags -> a -> Expr CoreBndr)
-> RuleFun
match_Integer_convert convert dflags id_unf _ [xl]
| Just (LitInteger x _) <- exprIsLiteral_maybe id_unf xl
= Just (convert dflags (fromInteger x))
match_Integer_convert _ _ _ _ _ = Nothing
match_Integer_unop :: (Integer -> Integer) -> RuleFun
match_Integer_unop unop _ id_unf _ [xl]
| Just (LitInteger x i) <- exprIsLiteral_maybe id_unf xl
= Just (Lit (LitInteger (unop x) i))
match_Integer_unop _ _ _ _ _ = Nothing
{- Note [Rewriting bitInteger]
For most types the bitInteger operation can be implemented in terms of shifts.
The integer-gmp package, however, can do substantially better than this if
allowed to provide its own implementation. However, in so doing it previously lost
constant-folding (see Trac #8832). The bitInteger rule above provides constant folding
specifically for this function.
There is, however, a bit of trickiness here when it comes to ranges. While the
AST encodes all integers (even MachInts) as Integers, `bit` expects the bit
index to be given as an Int. Hence we coerce to an Int in the rule definition.
This will behave a bit funny for constants larger than the word size, but the user
should expect some funniness given that they will have at very least ignored a
warning in this case.
-}
match_IntToInteger_unop :: (Integer -> Integer) -> RuleFun
match_IntToInteger_unop unop _ id_unf fn [xl]
| Just (MachInt x) <- exprIsLiteral_maybe id_unf xl
= case idType fn of
FunTy _ integerTy ->
Just (Lit (LitInteger (unop x) integerTy))
_ ->
panic "match_IntToInteger_unop: Id has the wrong type"
match_IntToInteger_unop _ _ _ _ _ = Nothing
match_Integer_binop :: (Integer -> Integer -> Integer) -> RuleFun
match_Integer_binop binop _ id_unf _ [xl,yl]
| Just (LitInteger x i) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
= Just (Lit (LitInteger (x `binop` y) i))
match_Integer_binop _ _ _ _ _ = Nothing
-- This helper is used for the quotRem and divMod functions
match_Integer_divop_both
:: (Integer -> Integer -> (Integer, Integer)) -> RuleFun
match_Integer_divop_both divop _ id_unf _ [xl,yl]
| Just (LitInteger x t) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
, y /= 0
, (r,s) <- x `divop` y
= Just $ mkConApp (tupleDataCon Unboxed 2)
[Type t,
Type t,
Lit (LitInteger r t),
Lit (LitInteger s t)]
match_Integer_divop_both _ _ _ _ _ = Nothing
-- This helper is used for the quot and rem functions
match_Integer_divop_one :: (Integer -> Integer -> Integer) -> RuleFun
match_Integer_divop_one divop _ id_unf _ [xl,yl]
| Just (LitInteger x i) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
, y /= 0
= Just (Lit (LitInteger (x `divop` y) i))
match_Integer_divop_one _ _ _ _ _ = Nothing
match_Integer_Int_binop :: (Integer -> Int -> Integer) -> RuleFun
match_Integer_Int_binop binop _ id_unf _ [xl,yl]
| Just (LitInteger x i) <- exprIsLiteral_maybe id_unf xl
, Just (MachInt y) <- exprIsLiteral_maybe id_unf yl
= Just (Lit (LitInteger (x `binop` fromIntegral y) i))
match_Integer_Int_binop _ _ _ _ _ = Nothing
match_Integer_binop_Prim :: (Integer -> Integer -> Bool) -> RuleFun
match_Integer_binop_Prim binop dflags id_unf _ [xl, yl]
| Just (LitInteger x _) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
= Just (if x `binop` y then trueValInt dflags else falseValInt dflags)
match_Integer_binop_Prim _ _ _ _ _ = Nothing
match_Integer_binop_Ordering :: (Integer -> Integer -> Ordering) -> RuleFun
match_Integer_binop_Ordering binop _ id_unf _ [xl, yl]
| Just (LitInteger x _) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
= Just $ case x `binop` y of
LT -> ltVal
EQ -> eqVal
GT -> gtVal
match_Integer_binop_Ordering _ _ _ _ _ = Nothing
match_Integer_Int_encodeFloat :: RealFloat a
=> (a -> Expr CoreBndr)
-> RuleFun
match_Integer_Int_encodeFloat mkLit _ id_unf _ [xl,yl]
| Just (LitInteger x _) <- exprIsLiteral_maybe id_unf xl
, Just (MachInt y) <- exprIsLiteral_maybe id_unf yl
= Just (mkLit $ encodeFloat x (fromInteger y))
match_Integer_Int_encodeFloat _ _ _ _ _ = Nothing
---------------------------------------------------
-- constant folding for Float/Double
--
-- This turns
-- rationalToFloat n d
-- into a literal Float, and similarly for Doubles.
--
-- it's important to not match d == 0, because that may represent a
-- literal "0/0" or similar, and we can't produce a literal value for
-- NaN or +-Inf
match_rationalTo :: RealFloat a
=> (a -> Expr CoreBndr)
-> RuleFun
match_rationalTo mkLit _ id_unf _ [xl, yl]
| Just (LitInteger x _) <- exprIsLiteral_maybe id_unf xl
, Just (LitInteger y _) <- exprIsLiteral_maybe id_unf yl
, y /= 0
= Just (mkLit (fromRational (x % y)))
match_rationalTo _ _ _ _ _ = Nothing
match_decodeDouble :: RuleFun
match_decodeDouble _ id_unf fn [xl]
| Just (MachDouble x) <- exprIsLiteral_maybe id_unf xl
= case idType fn of
FunTy _ (TyConApp _ [integerTy, intHashTy]) ->
case decodeFloat (fromRational x :: Double) of
(y, z) ->
Just $ mkConApp (tupleDataCon Unboxed 2)
[Type integerTy,
Type intHashTy,
Lit (LitInteger y integerTy),
Lit (MachInt (toInteger z))]
_ ->
panic "match_decodeDouble: Id has the wrong type"
match_decodeDouble _ _ _ _ = Nothing
match_XToIntegerToX :: Name -> RuleFun
match_XToIntegerToX n _ _ _ [App (Var x) y]
| idName x == n
= Just y
match_XToIntegerToX _ _ _ _ _ = Nothing
match_smallIntegerTo :: PrimOp -> RuleFun
match_smallIntegerTo primOp _ _ _ [App (Var x) y]
| idName x == smallIntegerName
= Just $ App (Var (mkPrimOpId primOp)) y
match_smallIntegerTo _ _ _ _ _ = Nothing
|
ghc-android/ghc
|
compiler/prelude/PrelRules.hs
|
bsd-3-clause
| 58,648
| 825
| 14
| 17,593
| 11,993
| 6,516
| 5,477
| 869
| 9
|
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Parsing.Attoparsec (
parseQuote
) where
import Parsing.Base hiding (asks, bids)
import Control.Monad
import Control.Applicative
import qualified Data.Time as T
import qualified Data.ByteString.Char8 as BS
import qualified Data.Attoparsec.ByteString.Char8 as AP
import Debug.Trace
parseQuote (hdr, bs) = AP.parseOnly (quote (packetAcceptTimeFromHeader hdr)) bs
nDigitNumber n = AP.take n
>>= either fail return . AP.parseOnly (AP.decimal <* AP.endOfInput)
quote ptime = do
AP.take 42
AP.string quoteHeader
-- [note] the following can be used instead if the header had variable
-- locations, which is not the case for the sample input, though better
-- solution without backtracking is preferred:
-- AP.manyTill AP.anyChar (AP.string quoteHeader)
issueCode <- AP.take 12
AP.take 12
bs <- bids
AP.take 7
as <- asks
AP.take 50
aToD <- acceptTimeOfDay
case extrapolateAcceptTime ptime aToD of
Nothing -> fail "cannot parse time"
Just t -> return $ Quote t ptime issueCode bs as
-- partly applicative version of `quote`
-- [question] why is this slightly slower than the
-- fully monadic version? is it due to tuple packing?
-- quote ptime = do
-- (issueCode, bs, as) <- (,,)
-- <$ AP.take 42
-- <* AP.string quoteHeader
-- <*> AP.take 12
-- <* AP.take 12
-- <*> bids
-- <* AP.take 7
-- <*> asks
-- <* AP.take 50
-- aToD <- acceptTimeOfDay
-- case extrapolateAcceptTime ptime aToD of
-- Nothing -> fail "cannot parse time"
-- Just t ->
-- return $ Quote t ptime issueCode bs as
bids = AP.count 5 (Bid <$> nDigitNumber 5 <*> nDigitNumber 7)
-- [todo] verify order
asks = reverse <$> bids
acceptTimeOfDay = do
hh <- nDigitNumber 2
mm <- nDigitNumber 2
ss <- nDigitNumber 2
uu <- nDigitNumber 2
let pico = fromRational $ fromIntegral ss + fromIntegral uu / 100
return $ T.TimeOfDay hh mm pico
|
iteloo/tsuru-sample
|
src/Parsing/Attoparsec.hs
|
bsd-3-clause
| 1,997
| 0
| 13
| 426
| 419
| 218
| 201
| 36
| 2
|
module Handler.ListsSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getListsR" $ do
error "Spec not implemented: getListsR"
|
Ulrar/cstodo
|
test/Handler/ListsSpec.hs
|
bsd-3-clause
| 171
| 0
| 11
| 39
| 44
| 23
| 21
| 6
| 1
|
module Main (main)
where
-- nice example, using many of the same libraries, for writing specs
-- https://github.com/snoyberg/conduit/blob/master/attoparsec-conduit/test/main.hs
-- import Debug.Trace (trace)
import Test.Hspec (hspec, describe, it, shouldBe, shouldSatisfy, Spec, Expectation)
import Test.HUnit (assertFailure)
import qualified ZMQHS as Z
import qualified Data.Attoparsec.ByteString as AP
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Blaze as BL
import Control.Monad.Trans.Resource (runExceptionT)
import Data.Conduit (($$), ($=), await)
main :: IO ()
main = do
hspec $ (describe "complete frame parsing" $ do
mapM_ completeFrameSpec completeParses)
>>
(describe "messages" $ do
mapM_ messageSpec completeMessages)
>>
(describe "greeting" identitySpec)
--------------------------------
-- CONNECTION SPECS
--------------------------------
--greetingSpec :: Spec
--greetingSpec = do
-- it "greets" $ do
-- let inputs = map B.pack [[2,0,65], [2,1,66]]
-- inputss = CL.sourceList inputs
-- ident = B.pack [65]
-- payload = B.pack [66]
-- ea <- runExceptionT $ inputss $$ await
-- case ea of
-- Right (Just val) -> val `shouldBe` (B.pack [2,0,65])
-- Left _ -> error "should not be seen"
identitySpec :: Spec
identitySpec = do
it "packs up an Anonymous identity and ships it out" $ do
ea <- runExceptionT $ (Z.yieldIdentity Z.Anonymous $= BL.builderToByteString) $$ CL.consume
case ea of
Left _ -> assertFailure "should not fail"
Right list -> list `shouldBe` ([B.pack [1,0]])
it "packs up a named identity and ships it out" $ do
ea <- runExceptionT $ (Z.yieldIdentity (Z.Named (B8.pack "Billy")) $= BL.builderToByteString) $$ CL.consume
case ea of
Left _ -> assertFailure "should not fail"
Right list -> list `shouldBe` ([B.concat [B.pack [6,0], B8.pack "Billy"]])
--------------------------------
-- MESSAGE SPECS
--------------------------------
messageSpec :: (String,[B.ByteString],Z.Message) -> Spec
messageSpec (msg,frames,expected) = it msg (messageExample frames expected)
messageExample :: [B.ByteString] -> Z.Message -> Expectation
messageExample frames expected = case AP.parse Z.getMessage (B.concat frames) of
AP.Done leftover res -> (res `shouldBe` expected) >> (leftover `shouldSatisfy` B.null)
AP.Partial _ -> assertFailure "should not get a partial result"
AP.Fail _ _ _ -> assertFailure "should not get a failure from parsing"
completeMessages :: [(String,[B.ByteString],Z.Message)]
completeMessages = [("one part", [B.pack [2,0,65]], Z.Message [B8.pack "A"]),
("two part", [B.pack [2,1,65],B.pack [2,0,66]], Z.Message [B8.pack "A",B8.pack "B"])]
--------------------------------
-- FRAME SPECS
--------------------------------
completeFrameSpec :: (String,B.ByteString,Z.Frame) -> Spec
completeFrameSpec (msg,frame,expected) = it msg (frameExample frame expected)
frameExample :: B.ByteString -> Z.Frame -> Expectation
frameExample bytes expected = case AP.parse Z.frameParser bytes of
AP.Done leftover res -> (res `shouldBe` expected) >> (leftover `shouldSatisfy` B.null)
AP.Partial _ -> assertFailure "should not get a partial result"
AP.Fail _ _ _ -> assertFailure "should not get a failure from parsing"
completeParses :: [(String, B.ByteString, Z.Frame)]
completeParses = [("complete with payload", B.pack [2,0,65], Z.FinalFrame $ B8.pack "A"),
("incomplete with payload", B.pack [2,1,65], Z.MoreFrame $ B8.pack "A"),
("handshake w/o identity", B.pack [0x01,0x7E], Z.FinalFrame $ B8.pack "" ),
("handshake w/ identity", handshake_w_identity, Z.FinalFrame $ B8.pack "ASDFASDFASDFASDFASDFASDFASDF")]
where handshake_w_identity = B8.concat [B.pack [29,0x7E] , B8.pack "ASDFASDFASDFASDFASDFASDFASDF"]
|
xrl/zmqhs
|
test/unit/MessageTest.hs
|
bsd-3-clause
| 4,124
| 0
| 20
| 853
| 1,119
| 626
| 493
| 55
| 3
|
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module: $HEADER$
-- Description: Concrete proxies for types from Data.Int
-- Copyright: (c) 2014 Peter Trsko
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: experimental
-- Portability: NoImplicitPrelude
--
-- Concrete proxies for types from "Data.Int".
module Data.Proxy.Int
(
int
, int8
, int16
, int32
, int64
)
where
import Data.Int (Int, Int8, Int16, Int32, Int64)
import Data.Proxy (Proxy(Proxy))
-- | Type proxy for 'Int'.
int :: Proxy Int
int = Proxy
{-# INLINE int #-}
-- | Type proxy for 'Int8'.
int8 :: Proxy Int8
int8 = Proxy
{-# INLINE int8 #-}
-- | Type proxy for 'Int16'.
int16 :: Proxy Int16
int16 = Proxy
{-# INLINE int16 #-}
-- | Type proxy for 'Int32'.
int32 :: Proxy Int32
int32 = Proxy
{-# INLINE int32 #-}
-- | Type proxy for 'Int64'.
int64 :: Proxy Int64
int64 = Proxy
{-# INLINE int64 #-}
|
trskop/type-proxies
|
src/Data/Proxy/Int.hs
|
bsd-3-clause
| 939
| 0
| 6
| 213
| 147
| 96
| 51
| 25
| 1
|
module Database.Migrate.Loader where
import Database.Migrate.Data
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
import Data.List (sort)
import Data.Text hiding (filter, length)
import System.FilePath
import System.Directory
import System.IO
find :: FilePath -> EitherT String IO Migrations
find b = liftIO (getDirectoryContents b) >>= \fs -> liftM sort (liftIO (migrationids b fs) >>=
mapM (\p ->
do downexists <- liftIO $ doesFileExist (b </> p <.> "down.sql")
unless downexists (left $ "no down.sql for migration [" ++ p ++ "]")
u <- liftIO . readFile $ b </> p <.> "up.sql"
d <- liftIO . readFile $ b </> p <.> "down.sql"
right (Migration (MigrationId . pack $ p) (pack u) (pack d) ))) >>= \ms -> return $ Migrations ms
migrationids :: FilePath -> [FilePath] -> IO [String]
migrationids b ps =
filterM (\p -> doesFileExist (b </> p)) ps >>= \files ->
((return . fmap dropExtensions)
(filter (\p -> takeExtensions p == ".up.sql") files))
readFile' :: FilePath -> IO String
readFile' p = withFile p ReadMode hGetContents
hGetContents' :: Handle -> IO String
hGetContents' h = hGetContents h >>= \s -> length s `seq` return s
|
markhibberd/database-migrate
|
src/Database/Migrate/Loader.hs
|
bsd-3-clause
| 1,244
| 0
| 21
| 263
| 470
| 245
| 225
| 27
| 1
|
module TestUnionFind where
import Jade.UnionFindST
import Control.Monad
import Control.Monad.ST
import Data.Array.MArray
import Data.Array.ST
import Data.STRef
import Jade.Types hiding (ids)
import qualified Data.List as DL
import qualified Data.Map as DM
testComponents = do
let n1 = Node (0,0) (WireC (Wire (Coord5 0 0 Rot0 0 0) (Just (Signal (Just (SigSimple "in1")) Nothing Nothing))))
n2 = Node (0,0) (TermC (Terminal (Coord3 0 0 Rot0) (SigSimple "A")))
--n3 = Node (0,0) (PortC (Port (Coord3 0 0 Rot0) (Just (Signal (Just (SigSimple "in1")) Nothing Nothing))))
let edges1 = [Edge n1 n2]
edges2 = [Edge n2 n1]
comps1 = components edges1
comps2 = components edges2
print comps1 -- == comps2
print comps2
main = runST $ do
uf <- newUnionFind 10
unite uf 3 4 -- 0, 1, 2, {3, 4}, 5, 6, 7, 8, 9
unite uf 4 9 -- 0, 1, 2, {3, 4, 9}, 5, 6, 7, 8
unite uf 8 0 -- {0, 8}, 1, 2, {3, 4, 9}, 5, 6, 7, 8
unite uf 2 3 -- {0, 8}, 1, {2, 3, 4, 9}, 5, 6, 7
unite uf 5 6 -- {0, 8}, 1, {2, 3, 4, 9}, {5, 6}, 7
unite uf 5 9 -- {0, 8}, 1, {2, 3, 4, 5, 6, 9}, 7
unite uf 7 3 -- {0, 8}, 1, {2, 3, 4, 5, 6, 7, 9}
unite uf 4 8 -- 1, {0, 2, 3, 4, 5, 6, 7, 8, 9}
-- find uf 1 2 -- False
xs <- sequence [filterM (find uf x) [0..9] | x <- [0..9]]
return $ DL.nub xs
|
drhodes/jade2hdl
|
test/TestUnionFind.hs
|
bsd-3-clause
| 1,354
| 0
| 22
| 374
| 415
| 214
| 201
| 31
| 1
|
module Control.Concurrent.STM.TMonoid (TMonoid, writeTMonoid, readTMonoid, newTMonoid) where
import Control.Concurrent.STM
import Data.Monoid
import Control.Monad (when)
-- | a concurrent STM Monoid
data TMonoid m = TMonoid {
writeTMonoid :: m -> STM (), -- ^ mappend the value
readTMonoid :: STM m -- ^ peek the monoid and reset it
}
-- | create a TMonoid for a comparable Monoid. The created TMonoid waits for an empty update to release a read
newTMonoid :: (Monoid m, Eq m)
=> STM (TMonoid m) -- ^ a delayed TMonoid
newTMonoid = do
x <- newTVar mempty -- the monoid
let
write y = readTVar x >>= writeTVar x . (`mappend` y) --update monoid and reset counter
read' = do
y <- readTVar x
when (y == mempty) retry -- on empty monoid and lately busy
writeTVar x mempty -- reset the monoid
return y
return $ TMonoid write read'
|
paolino/hiernotify
|
Control/Concurrent/STM/TMonoid.hs
|
bsd-3-clause
| 884
| 0
| 14
| 206
| 218
| 118
| 100
| 19
| 1
|
{-# LANGUAGE OverloadedStrings #-}
---------------------------------------------------------------------------
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- the wildcard name (defined as "_") in Pire's flavour of Pi-forall syntax
---------------------------------------------------------------------------
module Pire.Syntax.Wildcard where
import Data.Text as T
wildcardName :: T.Text
wildcardName = "_"
|
reuleaux/pire
|
src/Pire/Syntax/Wildcard.hs
|
bsd-3-clause
| 559
| 0
| 5
| 85
| 37
| 28
| 9
| 5
| 1
|
-- modularforms.hs
module Math.ModularForms where
import Math.MathsPrimitives (deriv, integ, partialSums, partialProducts)
import Math.ArithmeticFunctions
import Math.CombinatoricsCounting (bernoulliNumber)
import Math.QQ
import Math.PowerSeries
-- EISENSTEIN SERIES
-- Eisenstein series as power series in q = e^(2*pi*i*z)
eisensteinE k | even k = PS (1 : [multiplier * fromInteger (sigma (k-1) n) | n <- [1..]])
where multiplier = fromInteger (-2 * toInteger k) / bernoulliNumber k -- -2k/b_k
-- (because it is a series in q, not z, we can't directly test the modular transformation rules)
-- Where we have a sequence of power series which are converging, one coefficient at a time, to a limit
-- We can construct the limit as the diagonal power series of the sequence
diagonalPS fs = PS (zipWith (\(PS as) i -> as !! i) fs [0..])
-- MODULAR DISCRIMINANT (DELTA)
-- the delta function is defined as delta(z) = g2(z)^3 - 27 g3(z)^2 = ((2*pi)^12 / 1728) (E4(z)^3 - E6(z)^2)
-- we omit the (2*pi)^12 factor - delta as power series in q
delta = 1/1728 * ( (eisensteinE 4)^3 - (eisensteinE 6)^2 )
-- Dedekind eta function == q^1/24 * product [1-q^n | n <- [1..] ]
-- we omit the q^1/24 term
dedekindEta =
let iterates = partialProducts [(1-t^n) | n <- [1..] ]
in diagonalPS (1 : iterates)
-- Koblitz p122
-- Alternative expression for delta' is dedekindEta^24 == q * product [(1-q^n)^24 | n <- [1..] ]
delta' = t * dedekindEta ^ 24
-- j function, but without the 1/q term
j = let j' = 1728 * t * (eisensteinE 4)^3 / ( (eisensteinE 4)^3 - (eisensteinE 6)^2 )
in (j' - 1) / t
-- RAMANUJAN TAU FUNCTION
-- delta/(2*pi)^12 is an OGF for the Ramanujan tau function
ramanujanTaus = fromOGF delta -- ignore the first coeff - tau(0) not defined
ramanujanTau n | n > 0 = (fromOGF delta) !! n
-- PARTITION FUNCTION
-- Apostol p94
-- sum p(n) t^n = product [1/(1-t^n) | n <- [1..] ]
partitionOGF =
let iterates = partialProducts [1/(1-t^n) | n <- [1..] ]
in diagonalPS (0 : iterates)
-- Note that this is almost the reciprocal of the Dedekind eta function, without the q^1/24 term. Hence, a faster way to calculate it is:
partitionOGF' = (t / delta) ^% (1/24) - 1
-- LAMBERT SERIES
-- See http://en.wikipedia.org/wiki/Lambert_series
-- Lambert Series
-- sum [a_n * q^n / (1-q^n) | n <- [1..] ] == sum [b_n * q^n | n <- [1..] ], where b_n = sum [a_d | d <- [1..n], d `divides` n]
-- express the Lambert series with coefficients a1,a2,... as a power series
-- note: Lambert series start from a1, not a0
lambertSeries as =
let iterates = partialSums (zipWith (*) as [t^n / (1 - t^n) | n <- [1..] ])
in diagonalPS (0 : iterates)
-- Of particular interest is:
-- sum [n^a * q^n / (1-q^n) | n <- [1..] ] == sum [sigma a n * q^n | n <- [1..] ]
-- So this gives us another way to calculate the Eisenstein series
-- See http://en.wikipedia.org/wiki/Eisenstein_series
-- Eisenstein series as Lambert series
eisensteinE' k | even k = 1 + multiplier * lambertSeries [fromInteger (n^(k-1)) | n <- [1..] ]
where multiplier = PS [fromInteger (-2 * toInteger k) / bernoulliNumber k] -- -2k/b_k
-- Note that in the wikipedia page for Eisenstein series, eisensteinE' 2, 4, 6 are referred to as L(q), M(q), N(q) respectively
-- WEIERSTRASS P FUNCTION
-- Experimental!!
-- p'(z)^2 == 4 p(z)^3 - g2 p(z) - g3
-- Put w = p(z)
-- => (dw/dz)^2 = 4 w^3 - g2 w - g3
-- => z == integ dw / sqrt (4 w^3 - g2 w - g3)
-- => w = p(z) = inverse of the above integral
weierstrassP g2 g3 = inversePS (integ (1 / sqrt (4 * t^3 - g2 * t - g3)))
-- !! Only works if g3 == -1
-- !! We could do better by extending PowerSeries.sqrt to cope with a0 being a perfect square
-- !! (Or even further, by using PowerSeries over QQ[i], with sqrt defined appropriately)
|
nfjinjing/bench-euler
|
src/Math/ModularForms.hs
|
bsd-3-clause
| 3,879
| 6
| 16
| 863
| 764
| 416
| 348
| 28
| 1
|
-- | Higher-level functions working with GState DB.
module Pos.GState.GState
( prepareGStateDB
) where
import Universum
import Pos.Chain.Block (HeaderHash)
import Pos.Chain.Genesis as Genesis (Config (..),
configHeavyDelegation, configVssCerts)
import Pos.DB.Block (initGStateBlockExtra, upgradeLastSlotsVersion)
import Pos.DB.Class (MonadDB)
import Pos.DB.Delegation (initGStateDlg)
import Pos.DB.GState.Common (initGStateCommon, isInitialized,
setInitialized)
import Pos.DB.Ssc (initSscDB)
import Pos.DB.Txp (initGStateStakes, initGStateUtxo)
import Pos.DB.Update (initGStateUS)
-- | Put missing initial data into GState DB.
prepareGStateDB ::
forall m.
( MonadIO m
, MonadDB m
)
=> Genesis.Config
-> HeaderHash
-> m ()
prepareGStateDB genesisConfig initialTip =
ifM isInitialized
(upgradeLastSlotsVersion genesisConfig)
initializeGStateDb
where
genesisData = configGenesisData genesisConfig
initializeGStateDb = do
initGStateCommon initialTip
initGStateUtxo genesisData
initSscDB $ configVssCerts genesisConfig
initGStateStakes genesisData
initGStateUS genesisConfig
initGStateDlg $ configHeavyDelegation genesisConfig
initGStateBlockExtra (configGenesisHash genesisConfig) initialTip
setInitialized
-- The following is not used in the project yet. To be added back at a
-- later stage when needed.
{-
usingGStateSnapshot :: (MonadRealDB ctx m, MonadMask m) => m a -> m a
usingGStateSnapshot action = do
db <- _gStateDB <$> getNodeDBs
let readOpts = rocksReadOpts db
usingSnapshot db (\(Snapshot sn) ->
usingReadOptions readOpts {Rocks.useSnapshot = Just sn} gStateDB action)
-}
|
input-output-hk/pos-haskell-prototype
|
lib/src/Pos/GState/GState.hs
|
mit
| 1,900
| 0
| 11
| 498
| 283
| 159
| 124
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Redshift.PurchaseReservedNodeOffering
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Allows you to purchase reserved nodes. Amazon Redshift offers a
-- predefined set of reserved node offerings. You can purchase one or more
-- of the offerings. You can call the DescribeReservedNodeOfferings API to
-- obtain the available reserved node offerings. You can call this API by
-- providing a specific reserved node offering and the number of nodes you
-- want to reserve.
--
-- For more information about reserved node offerings, go to
-- <http://docs.aws.amazon.com/redshift/latest/mgmt/purchase-reserved-node-instance.html Purchasing Reserved Nodes>
-- in the /Amazon Redshift Cluster Management Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/redshift/latest/APIReference/API_PurchaseReservedNodeOffering.html AWS API Reference> for PurchaseReservedNodeOffering.
module Network.AWS.Redshift.PurchaseReservedNodeOffering
(
-- * Creating a Request
purchaseReservedNodeOffering
, PurchaseReservedNodeOffering
-- * Request Lenses
, prnoNodeCount
, prnoReservedNodeOfferingId
-- * Destructuring the Response
, purchaseReservedNodeOfferingResponse
, PurchaseReservedNodeOfferingResponse
-- * Response Lenses
, prnorsReservedNode
, prnorsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Redshift.Types
import Network.AWS.Redshift.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- |
--
-- /See:/ 'purchaseReservedNodeOffering' smart constructor.
data PurchaseReservedNodeOffering = PurchaseReservedNodeOffering'
{ _prnoNodeCount :: !(Maybe Int)
, _prnoReservedNodeOfferingId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'PurchaseReservedNodeOffering' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prnoNodeCount'
--
-- * 'prnoReservedNodeOfferingId'
purchaseReservedNodeOffering
:: Text -- ^ 'prnoReservedNodeOfferingId'
-> PurchaseReservedNodeOffering
purchaseReservedNodeOffering pReservedNodeOfferingId_ =
PurchaseReservedNodeOffering'
{ _prnoNodeCount = Nothing
, _prnoReservedNodeOfferingId = pReservedNodeOfferingId_
}
-- | The number of reserved nodes you want to purchase.
--
-- Default: '1'
prnoNodeCount :: Lens' PurchaseReservedNodeOffering (Maybe Int)
prnoNodeCount = lens _prnoNodeCount (\ s a -> s{_prnoNodeCount = a});
-- | The unique identifier of the reserved node offering you want to
-- purchase.
prnoReservedNodeOfferingId :: Lens' PurchaseReservedNodeOffering Text
prnoReservedNodeOfferingId = lens _prnoReservedNodeOfferingId (\ s a -> s{_prnoReservedNodeOfferingId = a});
instance AWSRequest PurchaseReservedNodeOffering
where
type Rs PurchaseReservedNodeOffering =
PurchaseReservedNodeOfferingResponse
request = postQuery redshift
response
= receiveXMLWrapper
"PurchaseReservedNodeOfferingResult"
(\ s h x ->
PurchaseReservedNodeOfferingResponse' <$>
(x .@? "ReservedNode") <*> (pure (fromEnum s)))
instance ToHeaders PurchaseReservedNodeOffering where
toHeaders = const mempty
instance ToPath PurchaseReservedNodeOffering where
toPath = const "/"
instance ToQuery PurchaseReservedNodeOffering where
toQuery PurchaseReservedNodeOffering'{..}
= mconcat
["Action" =:
("PurchaseReservedNodeOffering" :: ByteString),
"Version" =: ("2012-12-01" :: ByteString),
"NodeCount" =: _prnoNodeCount,
"ReservedNodeOfferingId" =:
_prnoReservedNodeOfferingId]
-- | /See:/ 'purchaseReservedNodeOfferingResponse' smart constructor.
data PurchaseReservedNodeOfferingResponse = PurchaseReservedNodeOfferingResponse'
{ _prnorsReservedNode :: !(Maybe ReservedNode)
, _prnorsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'PurchaseReservedNodeOfferingResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prnorsReservedNode'
--
-- * 'prnorsResponseStatus'
purchaseReservedNodeOfferingResponse
:: Int -- ^ 'prnorsResponseStatus'
-> PurchaseReservedNodeOfferingResponse
purchaseReservedNodeOfferingResponse pResponseStatus_ =
PurchaseReservedNodeOfferingResponse'
{ _prnorsReservedNode = Nothing
, _prnorsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
prnorsReservedNode :: Lens' PurchaseReservedNodeOfferingResponse (Maybe ReservedNode)
prnorsReservedNode = lens _prnorsReservedNode (\ s a -> s{_prnorsReservedNode = a});
-- | The response status code.
prnorsResponseStatus :: Lens' PurchaseReservedNodeOfferingResponse Int
prnorsResponseStatus = lens _prnorsResponseStatus (\ s a -> s{_prnorsResponseStatus = a});
|
olorin/amazonka
|
amazonka-redshift/gen/Network/AWS/Redshift/PurchaseReservedNodeOffering.hs
|
mpl-2.0
| 5,687
| 0
| 13
| 1,057
| 640
| 387
| 253
| 84
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-| Implementation of the Ganeti data collector types.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.DataCollectors.Types
( addStatus
, DCCategory(..)
, DCKind(..)
, DCReport(..)
, DCStatus(..)
, DCStatusCode(..)
, DCVersion(..)
, CollectorData(..)
, CollectorMap
, buildReport
, mergeStatuses
, getCategoryName
, ReportBuilder(..)
, DataCollector(..)
) where
import Data.Char
import Data.Ratio
import qualified Data.Map as Map
import qualified Data.Sequence as Seq
import System.Time (ClockTime(..))
import Text.JSON
import Ganeti.Constants as C
import Ganeti.Objects (ConfigData)
import Ganeti.THH
import Ganeti.Utils (getCurrentTimeUSec)
-- | The possible classes a data collector can belong to.
data DCCategory = DCInstance | DCStorage | DCDaemon | DCHypervisor
deriving (Show, Eq, Read, Enum, Bounded)
-- | Get the category name and return it as a string.
getCategoryName :: DCCategory -> String
getCategoryName dcc = map toLower . drop 2 . show $ dcc
categoryNames :: Map.Map String DCCategory
categoryNames =
let l = [minBound ..]
in Map.fromList $ zip (map getCategoryName l) l
-- | The JSON instance for DCCategory.
instance JSON DCCategory where
showJSON = showJSON . getCategoryName
readJSON (JSString s) =
let s' = fromJSString s
in case Map.lookup s' categoryNames of
Just category -> Ok category
Nothing -> fail $ "Invalid category name " ++ s' ++ " for type\
\ DCCategory"
readJSON v = fail $ "Invalid JSON value " ++ show v ++ " for type DCCategory"
-- | The possible status codes of a data collector.
data DCStatusCode = DCSCOk -- ^ Everything is OK
| DCSCTempBad -- ^ Bad, but being automatically fixed
| DCSCUnknown -- ^ Unable to determine the status
| DCSCBad -- ^ Bad. External intervention required
deriving (Show, Eq, Ord)
-- | The JSON instance for CollectorStatus.
instance JSON DCStatusCode where
showJSON DCSCOk = showJSON (0 :: Int)
showJSON DCSCTempBad = showJSON (1 :: Int)
showJSON DCSCUnknown = showJSON (2 :: Int)
showJSON DCSCBad = showJSON (4 :: Int)
readJSON = error "JSON read instance not implemented for type DCStatusCode"
-- | The status of a \"status reporting data collector\".
$(buildObject "DCStatus" "dcStatus"
[ simpleField "code" [t| DCStatusCode |]
, simpleField "message" [t| String |]
])
-- | The type representing the kind of the collector.
data DCKind = DCKPerf -- ^ Performance reporting collector
| DCKStatus -- ^ Status reporting collector
deriving (Show, Eq)
-- | The JSON instance for CollectorKind.
instance JSON DCKind where
showJSON DCKPerf = showJSON (0 :: Int)
showJSON DCKStatus = showJSON (1 :: Int)
readJSON (JSRational _ x) =
if denominator x /= 1
then fail $ "Invalid JSON value " ++ show x ++ " for type DCKind"
else
let x' = (fromIntegral . numerator $ x) :: Int
in if x' == 0 then Ok DCKPerf
else if x' == 1 then Ok DCKStatus
else fail $ "Invalid JSON value " ++ show x' ++ " for type DCKind"
readJSON v = fail $ "Invalid JSON value " ++ show v ++ " for type DCKind"
-- | Type representing the version number of a data collector.
data DCVersion = DCVerBuiltin | DCVersion String deriving (Show, Eq)
-- | The JSON instance for DCVersion.
instance JSON DCVersion where
showJSON DCVerBuiltin = showJSON C.builtinDataCollectorVersion
showJSON (DCVersion v) = showJSON v
readJSON (JSString s) =
if fromJSString s == C.builtinDataCollectorVersion
then Ok DCVerBuiltin else Ok . DCVersion $ fromJSString s
readJSON v = fail $ "Invalid JSON value " ++ show v ++ " for type DCVersion"
-- | Type for the value field of the above map.
data CollectorData = CPULoadData (Seq.Seq (ClockTime, [Int]))
-- | Type for the map storing the data of the statefull DataCollectors.
type CollectorMap = Map.Map String CollectorData
-- | This is the format of the report produced by each data collector.
$(buildObject "DCReport" "dcReport"
[ simpleField "name" [t| String |]
, simpleField "version" [t| DCVersion |]
, simpleField "format_version" [t| Int |]
, simpleField "timestamp" [t| Integer |]
, optionalNullSerField $
simpleField "category" [t| DCCategory |]
, simpleField "kind" [t| DCKind |]
, simpleField "data" [t| JSValue |]
])
-- | Add the data collector status information to the JSON representation of
-- the collector data.
addStatus :: DCStatus -> JSValue -> JSValue
addStatus dcStatus (JSObject obj) =
makeObj $ ("status", showJSON dcStatus) : fromJSObject obj
addStatus dcStatus value = makeObj
[ ("status", showJSON dcStatus)
, ("data", value)
]
-- | Helper function for merging statuses.
mergeStatuses :: (DCStatusCode, String) -> (DCStatusCode, [String])
-> (DCStatusCode, [String])
mergeStatuses (newStat, newStr) (storedStat, storedStrs) =
let resStat = max newStat storedStat
resStrs =
if newStr == ""
then storedStrs
else storedStrs ++ [newStr]
in (resStat, resStrs)
-- | Utility function for building a report automatically adding the current
-- timestamp (rounded up to seconds).
-- If the version is not specified, it will be set to the value indicating
-- a builtin collector.
buildReport :: String -> DCVersion -> Int -> Maybe DCCategory -> DCKind
-> JSValue -> IO DCReport
buildReport name version format_version category kind jsonData = do
usecs <- getCurrentTimeUSec
let timestamp = usecs * 1000 :: Integer
return $
DCReport name version format_version timestamp category kind
jsonData
-- | A report of a data collector might be stateful or stateless.
data ReportBuilder = StatelessR (IO DCReport)
| StatefulR (Maybe CollectorData -> IO DCReport)
type Name = String
-- | Type describing a data collector basic information
data DataCollector = DataCollector
{ dName :: Name -- ^ Name of the data collector
, dCategory :: Maybe DCCategory -- ^ Category (storage, instance, ecc)
-- of the collector
, dKind :: DCKind -- ^ Kind (performance or status reporting) of
-- the data collector
, dReport :: ReportBuilder -- ^ Report produced by the collector
, dUpdate :: Maybe (Maybe CollectorData -> IO CollectorData)
-- ^ Update operation for stateful collectors.
, dActive :: Name -> ConfigData -> Bool
-- ^ Checks if the collector applies for the cluster.
, dInterval :: Name -> ConfigData -> Integer
-- ^ Interval between collection in microseconds
}
|
apyrgio/ganeti
|
src/Ganeti/DataCollectors/Types.hs
|
bsd-2-clause
| 8,121
| 0
| 13
| 1,878
| 1,474
| 828
| 646
| 123
| 2
|
module Main where
import Control.Exception
import Control.DeepSeq
import System.Mem
import Text.Show
import Data.Compact
import Data.Compact.Internal
assertFail :: String -> IO ()
assertFail msg = throwIO $ AssertionFailed msg
assertEquals :: (Eq a, Show a) => a -> a -> IO ()
assertEquals expected actual =
if expected == actual then return ()
else assertFail $ "expected " ++ (show expected)
++ ", got " ++ (show actual)
data Tree = Nil | Node Tree Tree Tree
instance Eq Tree where
Nil == Nil = True
Node _ l1 r1 == Node _ l2 r2 = l1 == l2 && r1 == r2
_ == _ = False
instance Show Tree where
showsPrec _ Nil = showString "Nil"
showsPrec _ (Node _ l r) = showString "(Node " . shows l .
showString " " . shows r . showString ")"
instance NFData Tree where
rnf Nil = ()
rnf (Node p l r) = p `seq` rnf l `seq` rnf r `seq` ()
{-# NOINLINE test #-}
test x = do
let a = Node Nil x b
b = Node a Nil Nil
str <- compactSized 4096 True a
-- check the value in the compact
assertEquals a (getCompact str)
performMajorGC
-- check again the value in the compact
assertEquals a (getCompact str)
main = test Nil
|
olsner/ghc
|
libraries/compact/tests/compact_loop.hs
|
bsd-3-clause
| 1,184
| 0
| 10
| 303
| 461
| 230
| 231
| 35
| 2
|
module B1.Program.Chart.VolumeBars
( getVboSpecs
) where
import Graphics.Rendering.OpenGL
import B1.Data.Price
import B1.Data.Range
import B1.Data.Technicals.StockData
import B1.Graphics.Rendering.OpenGL.Box
import B1.Graphics.Rendering.OpenGL.Shapes
import B1.Graphics.Rendering.OpenGL.Utils
import B1.Program.Chart.Animation
import B1.Program.Chart.Colors
import B1.Program.Chart.Dirty
import B1.Program.Chart.FragmentShader
import B1.Program.Chart.Resources
import B1.Program.Chart.Vbo
getVboSpecs :: StockPriceData -> Box -> [VboSpec]
getVboSpecs priceData bounds = [VboSpec Quads size quads]
where
size = getSize priceData
quads = getQuads priceData bounds
getSize :: StockPriceData -> Int
getSize priceData = size
where
numElements = numDailyElements priceData
size = numElements * (4 * (2 + 3))
getQuads :: StockPriceData -> Box -> [GLfloat]
getQuads priceData bounds =
concat $ map (createQuad bounds stockPrices numElements) indices
where
numElements = numDailyElements priceData
stockPrices = take numElements $ prices priceData
indices = [0 .. numElements - 1]
createQuad :: Box -> [Price] -> Int -> Int -> [GLfloat]
createQuad bounds prices numElements index =
[leftX, bottomY] ++ colorList
++ [leftX, topY] ++ colorList
++ [rightX, topY] ++ colorList
++ [rightX, bottomY] ++ colorList
where
colorList = color3ToList $
if getPriceChange prices index >= 0
then green3
else red3
totalWidth = boxWidth bounds
barWidth = realToFrac totalWidth / realToFrac numElements
spacing = barWidth / 3
rightX = boxRight bounds - realToFrac index * barWidth - spacing
leftX = rightX - barWidth + spacing
minVolume = minimum $ map volume prices
maxVolume = maximum $ map volume prices
adjustedMaxVolume = floor $ realToFrac maxVolume * 1.05
totalRange = adjustedMaxVolume - minVolume
currentVolume = volume $ prices !! index
range = currentVolume - minVolume
heightPercentage = realToFrac range / realToFrac totalRange
totalHeight = boxHeight bounds
height = totalHeight * realToFrac heightPercentage
bottomY = boxBottom bounds
topY = bottomY + height
|
madjestic/b1
|
src/B1/Program/Chart/VolumeBars.hs
|
bsd-3-clause
| 2,215
| 0
| 12
| 436
| 611
| 340
| 271
| 55
| 2
|
module Lisp.Evaluator where
import Lisp.Types
import Control.Lens
import qualified Data.Map as M
import qualified Data.Set as S
import Safe (lastMay)
data LispState = Form LispValue | Value LispValue |
StateList [LispState] Int |
Apply LispFunction [LispValue] |
InsideClosure LispState Int |
Special String [LispState]
deriving Show
specialForms :: S.Set String
specialForms = S.fromList ["def", "do", "if", "lambda", "quote"]
specialFormCheck :: LispValue -> Maybe (String, [LispValue])
specialFormCheck (LVList ((LVSymbol str):rest)) =
if str `S.member` specialForms
then Just (str, rest)
else Nothing
specialFormCheck _ = Nothing
macroFormCheck :: LispValue -> Lisp Bool
macroFormCheck (LVList ((LVSymbol str):_)) = (S.member str) `fmap` use macros
macroFormCheck _ = return False
(!!?) :: [a] -> Int -> Maybe a
(!!?) list n =
if n < 0 || n >= (length list)
then Nothing
else Just (list !! n)
resolveSymbol :: String -> [LispFrame] -> LispFrame -> Maybe LispValue
resolveSymbol str [] globals' = M.lookup str globals'
resolveSymbol str (frame:rest) globals' =
case M.lookup str frame of
Just val -> Just val
Nothing -> resolveSymbol str rest globals'
truthy :: LispValue -> Bool
truthy (LVBool False) = False
truthy _ = True
defineSymbol :: String -> LispValue -> Lisp ()
defineSymbol str value =
globals %= M.insert str value
undefineSymbol :: String -> Lisp ()
undefineSymbol str =
globals %= M.delete str
mkFrame :: [String] -> [LispValue] -> [(String, LispValue)]
mkFrame names values =
case lastMay names of
Just ('&':_) ->
let n = length names
tailValues = drop (n - 1) values
in zip names $ (take (n - 1) values) ++ [LVList tailValues]
_ -> zip names values
oneStep :: LispState -> Lisp LispState
oneStep v@(Value _) = return v
oneStep (Apply lispFn lispValues) =
case lispFn of
(LFPrimitive _ apply) ->
case apply lispValues of
Left err -> lispFail err
Right v -> return $ Value v
(LFAction _ action) ->
Value `fmap` action lispValues
s@(LFClosure name stack' params body) -> do
let self = LVFunction s
--TODO: need to handle TCO properly.
newFrame = M.fromList $ mkFrame (name:params) (self:lispValues)
newStack = newFrame:stack'
nFrames <- pushFrames newStack
return $ InsideClosure (Form body) nFrames
oneStep (InsideClosure value@(Value _) nFrames) = do
popFrames nFrames
return $ value
oneStep (InsideClosure state nFrames) = do
state' <- oneStep state
-- TODO: if the closure is a tail call, TCO can be done here.
return $ InsideClosure state' nFrames
oneStep (Form (LVSymbol str)) = do
theStack <- use stack
theGlobals <- use globals
case resolveSymbol str theStack theGlobals of
Just value -> return $ Value value
Nothing -> lispFail $ LispError (LVString $ "Can't resolve symbol: " ++ str)
-- empty list is "self-evaluating"
oneStep (Form (LVList [])) = return $ Value (LVList [])
oneStep (Form form@(LVList list)) =
case specialFormCheck form of
Just (string, rest) ->
return $ Special string (map Form rest)
Nothing -> do
macro <- macroFormCheck form
if macro
then error "Illegal state." -- macros should be expanded before we get here.
else return $ StateList (map Form list) 0
oneStep (Form selfEval) = return $ Value selfEval
oneStep (Special "quote" [(Form val)]) =
return $ Value val
oneStep (Special "quote" _) =
failWithString "quote : requires one form"
oneStep (Special "if" [(Value x), thenForm, elseForm]) =
return $ if truthy x then thenForm else elseForm
oneStep (Special "if" [condState, thenForm, elseForm]) = do
condState1 <- oneStep condState
return $ Special "if" [condState1, thenForm, elseForm]
oneStep (Special "if" _) = failWithString "if : requires 3 forms"
oneStep (Special "def" [(Form (LVSymbol str)), (Value x)]) = do
defineSymbol str x
return $ Value $ LVBool True
oneStep (Special "def" [name, defState]) = do
defState1 <- oneStep defState
return $ Special "def" [name, defState1]
oneStep (Special "def" _) = failWithString "def : requires a name (symbol) and 1 form"
oneStep (Special "do" []) = return $ Value $ LVBool True
oneStep (Special "do" ((Value x):[])) = return $ Value x
oneStep (Special "do" ((Value _):rest)) = return $ Special "do" rest
oneStep (Special "do" (state:rest)) = do
state1 <- oneStep state
return $ Special "do" (state1:rest)
oneStep (Special "lambda" [(Form (LVSymbol name)), (Form params), (Form body)]) = do
closure <- mkClosure name params body
return . Value . LVFunction $ closure
oneStep (Special "lambda" [(Form params), (Form body)]) = do
name <- genStr
closure <- mkClosure name params body
return . Value . LVFunction $ closure
oneStep (Special "lambda" _) = failWithString "lambda : requires 2 or 3 forms"
oneStep (Special name _) = error $ "illegal state : unknown special form " ++ name
oneStep (StateList states n) =
if n >= length states
then case states of
-- safe pattern match because n >= len --> all Value
(Value (LVFunction f)):vals ->
return $ Apply f (map (\(Value x) -> x) vals)
_ -> lispFail $ LispError $ LVString "function required in application position"
else case (states !! n) of
Value _ -> return $ StateList states (n + 1)
state' -> do
state1 <- oneStep state'
return $ StateList ((take n states) ++ [state1] ++ (drop (n+1) states)) n
oneStepTillValue :: LispState -> Lisp LispValue
oneStepTillValue ls =
loop ls
where loop state' =
case state' of
(Value v) -> return v
_ -> oneStep state' >>= loop
-- eval0 :: Eval without macros (or after macroexpansion).
eval0 :: LispValue -> Lisp LispValue
eval0 lv = oneStepTillValue (Form lv)
evalMacro :: LispValue -> Lisp LispValue
evalMacro lv =
case lv of
LVList ((LVSymbol name):rest) -> do
theStack <- use stack
theGlobals <- use globals
case resolveSymbol name theStack theGlobals of
Just (LVFunction f) -> oneStepTillValue (Apply f rest)
_ -> return lv
_ -> return lv
macroexpand1 :: LispValue -> Lisp LispValue
macroexpand1 lv = do
macro <- macroFormCheck lv
if macro
then evalMacro lv
else return lv
macroexpand :: LispValue -> Lisp LispValue
macroexpand lv = do
macro <- macroFormCheck lv
if macro
then do
lv' <- macroexpand1 lv
macroexpand lv'
else return lv
-- macroexpands a Lisp form with left-most outer-most macroexpansion.
macroexpandAll :: LispValue -> Lisp LispValue
macroexpandAll lv =
case lv of
LVList ((LVSymbol "quote"):_) -> return lv
_ -> do
lv1 <- macroexpand lv
case lv1 of
LVList subforms -> do
expandedForms <- mapM macroexpandAll subforms
return $ LVList expandedForms
_ -> return lv1
eval :: LispValue -> Lisp LispValue
eval value = do
-- FIXME : replace macroexpand with macroexpandAll once completed.
value' <- macroexpandAll value
eval0 value'
|
michaelochurch/summer-2015-haskell-class
|
Lisp/Evaluator.hs
|
mit
| 7,216
| 0
| 18
| 1,745
| 2,580
| 1,283
| 1,297
| 181
| 11
|
{-# LANGUAGE StandaloneDeriving, OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ParserSpec (main, spec) where
import Arbitrary ()
import Unparse
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Network.MPD.Commands.Parse
import Network.MPD.Commands.Types
import Network.MPD.Util hiding (read)
import qualified Data.ByteString.UTF8 as UTF8
import Data.List
import qualified Data.Map as M
import Data.Time
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parseIso8601" $ do
prop "parses dates in ISO8601 format" prop_parseIso8601
describe "parseCount" $ do
prop "parses counts" prop_parseCount
describe "parseOutputs" $ do
prop "parses outputs" prop_parseOutputs
describe "parseSong" $ do
prop "parses songs" prop_parseSong
describe "parseStats" $ do
prop "parses stats" prop_parseStats
-- This property also ensures, that (instance Arbitrary UTCTime) is sound.
-- Indeed, a bug in the instance declaration was the primary motivation to add
-- this property.
prop_parseIso8601 :: UTCTime -> Bool
prop_parseIso8601 t = Just t == (parseIso8601 . UTF8.fromString . formatIso8601) t
prop_parseCount :: Count -> Bool
prop_parseCount c = Right c == (parseCount . map UTF8.fromString . lines . unparse) c
prop_parseOutputs :: [Device] -> Bool
prop_parseOutputs ds =
Right ds == (parseOutputs . map UTF8.fromString . lines . concatMap unparse) ds
deriving instance Ord Value
prop_parseSong :: Song -> Bool
prop_parseSong s = Right (sortTags s) == sortTags `fmap` (parseSong . toAssocList . map UTF8.fromString . lines . unparse) s
where
-- We consider lists of tag values equal if they contain the same elements.
-- To ensure that two lists with the same elements are equal, we bring the
-- elements in a deterministic order.
sortTags song = song { sgTags = M.map sort $ sgTags song }
prop_parseStats :: Stats -> Bool
prop_parseStats s = Right s == (parseStats . map UTF8.fromString . lines . unparse) s
|
matthewleon/libmpd-haskell
|
tests/ParserSpec.hs
|
mit
| 2,131
| 0
| 12
| 476
| 504
| 262
| 242
| 41
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Wai.Handler.Warp.HTTP2 (isHTTP2, http2) where
import Control.Concurrent (forkIO, killThread)
import qualified Control.Exception as E
import Control.Monad (when, unless, replicateM_)
import Data.ByteString (ByteString)
import Network.HTTP2
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.HTTP2.EncodeFrame
import Network.Wai.Handler.Warp.HTTP2.Manager
import Network.Wai.Handler.Warp.HTTP2.Receiver
import Network.Wai.Handler.Warp.HTTP2.Request
import Network.Wai.Handler.Warp.HTTP2.Sender
import Network.Wai.Handler.Warp.HTTP2.Types
import Network.Wai.Handler.Warp.HTTP2.Worker
import qualified Network.Wai.Handler.Warp.Settings as S (Settings)
import Network.Wai.Handler.Warp.Types
----------------------------------------------------------------
http2 :: Connection -> InternalInfo -> SockAddr -> Transport -> S.Settings -> (BufSize -> IO ByteString) -> Application -> IO ()
http2 conn ii addr transport settings readN app = do
checkTLS
ok <- checkPreface
when ok $ do
ctx <- newContext
-- Workers & Manager
mgr <- start
let responder = response ctx mgr
action = worker ctx settings tm app responder
setAction mgr action
-- fixme: hard coding: 10
replicateM_ 10 $ spawnAction mgr
-- Receiver
let mkreq = mkRequest settings addr
tid <- forkIO $ frameReceiver ctx mkreq readN
-- Sender
-- frameSender is the main thread because it ensures to send
-- a goway frame.
frameSender ctx conn ii settings `E.finally` do
clearContext ctx
stop mgr
killThread tid
where
tm = timeoutManager ii
checkTLS = case transport of
TCP -> return () -- direct
tls -> unless (tls12orLater tls) $ goaway conn InadequateSecurity "Weak TLS"
tls12orLater tls = tlsMajorVersion tls == 3 && tlsMinorVersion tls >= 3
checkPreface = do
preface <- readN connectionPrefaceLength
if connectionPreface /= preface then do
goaway conn ProtocolError "Preface mismatch"
return False
else
return True
-- connClose must not be called here since Run:fork calls it
goaway :: Connection -> ErrorCodeId -> ByteString -> IO ()
goaway Connection{..} etype debugmsg = connSendAll bytestream
where
bytestream = goawayFrame 0 etype debugmsg
|
AndrewRademacher/wai
|
warp/Network/Wai/Handler/Warp/HTTP2.hs
|
mit
| 2,490
| 0
| 13
| 558
| 595
| 322
| 273
| 50
| 3
|
import Test.Framework (defaultMain, testGroup)
import Test.HUnit
import Test.Framework.Providers.HUnit (testCase)
main :: IO ()
main = defaultMain [
testGroup "(default)" [
testCase "isGood" (True @=? True)
]
]
|
telser/riemann-hs
|
test/Unit.hs
|
mit
| 225
| 0
| 11
| 41
| 72
| 40
| 32
| 7
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Common Library</title>
<maps>
<homeID>commonlib</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/commonlib/src/main/javahelp/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 965
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards #-}
-- | Implements a system to allow users to upvote packages.
--
module Distribution.Server.Features.Votes
( VotesFeature(..)
, initVotesFeature
) where
import Distribution.Server.Features.Votes.State
import qualified Distribution.Server.Features.Votes.Render as Render
import Distribution.Server.Framework
import Distribution.Server.Framework.BackupRestore
import Distribution.Server.Features.Core
import Distribution.Server.Features.Users
import Distribution.Server.Users.Types (UserId(..))
import Distribution.Server.Users.UserIdSet as UserIdSet
import Distribution.Package
import Distribution.Text
import Data.Aeson
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.HashMap.Strict as HashMap
import Control.Arrow (first)
import qualified Text.XHtml.Strict as X
-- | Define the prototype for this feature
data VotesFeature = VotesFeature {
votesFeatureInterface :: HackageFeature
, didUserVote :: forall m. MonadIO m => PackageName -> UserId -> m Bool
, pkgNumVotes :: forall m. MonadIO m => PackageName -> m Int
, renderVotesHtml :: PackageName -> ServerPartE (String, X.Html)
}
-- | Implement the isHackageFeature 'interface'
instance IsHackageFeature VotesFeature where
getFeatureInterface = votesFeatureInterface
-- | Called from Features.hs to initialize this feature
initVotesFeature :: ServerEnv
-> IO ( CoreFeature
-> UserFeature
-> IO VotesFeature)
initVotesFeature env@ServerEnv{serverStateDir} = do
dbVotesState <- votesStateComponent serverStateDir
return $ \coref@CoreFeature{..} userf@UserFeature{..} -> do
let feature = votesFeature env
dbVotesState
coref userf
return feature
-- | Define the backing store (i.e. database component)
votesStateComponent :: FilePath -> IO (StateComponent AcidState VotesState)
votesStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "Votes") initialVotesState
return StateComponent {
stateDesc = "Backing store for Map PackageName -> Users who voted for it"
, stateHandle = st
, getState = query st GetVotesState
, putState = update st . ReplaceVotesState
, resetState = votesStateComponent
, backupState = \_ _ -> []
, restoreState = RestoreBackup {
restoreEntry = error "Unexpected backup entry"
, restoreFinalize = return $ VotesState Map.empty
}
}
-- | Default constructor for building this feature.
votesFeature :: ServerEnv
-> StateComponent AcidState VotesState
-> CoreFeature -- To get site package list
-> UserFeature -- To authenticate users
-> VotesFeature
votesFeature ServerEnv{..}
votesState
CoreFeature { coreResource = CoreResource{..} }
UserFeature{..}
= VotesFeature{..}
where
votesFeatureInterface = (emptyHackageFeature "votes") {
featureDesc = "Allow users to upvote packages",
featureResources = [ packagesVotesResource
, packageVotesResource
]
, featureState = [abstractAcidStateComponent votesState]
}
-- Define resources for this feature's URIs
packagesVotesResource :: Resource
packagesVotesResource = (resourceAt "/packages/votes.:format") {
resourceDesc = [(GET, "Returns the number of votes for each package")]
, resourceGet = [("json", servePackageVotesGet)]
}
packageVotesResource :: Resource
packageVotesResource = (resourceAt "/package/:package/votes.:format") {
resourceDesc = [ (GET, "Returns the number of votes a package has")
, (PUT, "Adds a vote to this package")
, (DELETE, "Remove a user's vote from this package")
]
, resourceGet = [("json", servePackageNumVotesGet)]
, resourcePut = [("", servePackageVotePut)]
, resourceDelete = [("", servePackageVoteDelete)]
}
-- Implementations of the how the above resources are handled.
-- Retrive the entire map (from package names -> # of votes)
servePackageVotesGet :: DynamicPath -> ServerPartE Response
servePackageVotesGet _ = do
cacheControlWithoutETag [Public, maxAgeMinutes 10]
votesMap <- queryState votesState GetAllPackageVoteSets
ok . toResponse $ objectL
[ (display pkgname, toJSON (UserIdSet.size voterset))
| (pkgname, voterset) <- Map.toList votesMap ]
-- Get the number of votes a package has. If the package
-- has never been voted for, returns 0.
servePackageNumVotesGet :: DynamicPath -> ServerPartE Response
servePackageNumVotesGet dpath = do
pkgname <- packageInPath dpath
guardValidPackageName pkgname
cacheControlWithoutETag [Public, maxAgeMinutes 10]
voteCount <- queryState votesState (GetPackageVoteCount pkgname)
let obj = objectL
[ ("packageName", string $ display pkgname)
, ("numVotes", toJSON voteCount)
]
ok . toResponse $ obj
-- Add a vote to :packageName (must match name exactly)
servePackageVotePut :: DynamicPath -> ServerPartE Response
servePackageVotePut dpath = do
uid <- guardAuthorised [AnyKnownUser]
pkgname <- packageInPath dpath
guardValidPackageName pkgname
success <- updateState votesState (AddVote pkgname uid)
if success
then ok . toResponse $ Render.voteConfirmationPage pkgname
"Package voted for successfully"
else ok . toResponse $ Render.alreadyVotedPage pkgname
-- Removes a user's vote from a package. If the user has not voted
-- for this package, does nothing.
servePackageVoteDelete :: DynamicPath -> ServerPartE Response
servePackageVoteDelete dpath = do
uid <- guardAuthorised [AnyKnownUser]
pkgname <- packageInPath dpath
guardValidPackageName pkgname
success <- updateState votesState (RemoveVote pkgname uid)
let responseMsg | success = "Package vote removed successfully."
| otherwise = "User has not voted for this package."
ok . toResponse $ Render.voteConfirmationPage
pkgname responseMsg
-- Helper Functions (Used outside of responses, e.g. by other features.)
-- Returns true if a user has previously voted for the
-- package in question.
didUserVote :: MonadIO m => PackageName -> UserId -> m Bool
didUserVote pkgname uid =
queryState votesState (GetPackageUserVoted pkgname uid)
-- Returns the number of votes a package has.
pkgNumVotes :: MonadIO m => PackageName -> m Int
pkgNumVotes pkgname =
queryState votesState (GetPackageVoteCount pkgname)
-- Renders the HTML for the "Votes:" section on package pages.
renderVotesHtml :: PackageName -> ServerPartE (String, X.Html)
renderVotesHtml pkgname = do
numVotes <- pkgNumVotes pkgname
return $ Render.renderVotesAnon numVotes pkgname
-- Helper functions for constructing JSON responses.
-- Use to construct a list of tuples that can be toJSON'd
objectL :: [(String, Value)] -> Value
objectL = Object . HashMap.fromList . map (first T.pack)
-- Use inside an objectL to transform strings into json values
string :: String -> Value
string = String . T.pack
|
ocharles/hackage-server
|
Distribution/Server/Features/Votes.hs
|
bsd-3-clause
| 7,670
| 0
| 16
| 1,984
| 1,439
| 784
| 655
| 129
| 2
|
{-# LANGUAGE RecordWildCards, RecursiveDo #-}
{-# OPTIONS_GHC -Wno-unused-record-wildcards #-}
module TT where
data T = T {t1, t2 :: Int}
f :: T -> Int
f d = x
where T {t1 = x, ..} = d
g :: T -> Int
g (T {t1 = x, ..}) = x
-- The fix to this test also affected the dorec checking code, hence this:
h :: Maybe Int
h = do
rec
T {t1 = x, ..} <- Just $ T 1 1
return x
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_compile/T4404.hs
|
bsd-3-clause
| 386
| 0
| 12
| 108
| 143
| 79
| 64
| 14
| 1
|
module A6 where
data T a = T1 a | T2 a
data S = C1 | C2 | C3
over :: S -> (T Int) -> Int
over x (T1 y) = 42
over x (T2 y) = 42
|
kmate/HaRe
|
old/testing/addCon/A6.hs
|
bsd-3-clause
| 129
| 0
| 8
| 42
| 83
| 46
| 37
| 6
| 1
|
-- Glue to the usual Huges/SimonPJ pretty printer
module NewPrettyPrint(pp,render,quickrender,module PrettyPrint2,module PrettyEnv) where
import PrettyPrint2
import PrettyDoc
import PrettyEnv
import PrettyStd
import qualified Text.PrettyPrint as P
pp x = render (ppi x)
render = P.renderStyle normalstyle . toPretty . runEnv defaultMode
quickrender = P.renderStyle qstyle . toPretty .
runEnv defaultMode{layoutType=PPNoLayout}
qstyle = normalstyle{P.mode=P.OneLineMode}
normalstyle = P.style{P.lineLength=80}
toPretty d =
case d of
Empty -> P.empty
Char c -> P.char c
Text s -> P.text s
Attr _ d -> toPretty d
Nest n d -> P.nest n (toPretty d)
Group l ds -> group l (map toPretty ds)
where
group l =
case l of
Horiz Cat -> P.hcat
Horiz Sep -> P.hsep
Vert -> P.vcat
HorizOrVert Cat -> P.cat
HorizOrVert Sep -> P.sep
Fill Cat -> P.fcat
Fill Sep -> P.fsep
|
forste/haReFork
|
tools/base/pretty/NewPrettyPrint.hs
|
bsd-3-clause
| 949
| 2
| 13
| 228
| 345
| 176
| 169
| 29
| 12
|
module Main
( main
) where
import Test.Tasty
import qualified UnitTests.Distribution.Compat.CreatePipe
import qualified UnitTests.Distribution.Compat.ReadP
import qualified UnitTests.Distribution.Simple.Program.Internal
import qualified UnitTests.Distribution.Utils.NubList
import qualified UnitTests.Distribution.System
import qualified Test.Distribution.Version (versionTests, parseTests)
tests :: TestTree
tests = testGroup "Unit Tests" $
[ testGroup "Distribution.Compat.ReadP"
UnitTests.Distribution.Compat.ReadP.tests
, testGroup "Distribution.Compat.CreatePipe"
UnitTests.Distribution.Compat.CreatePipe.tests
, testGroup "Distribution.Simple.Program.Internal"
UnitTests.Distribution.Simple.Program.Internal.tests
, testGroup "Distribution.Utils.NubList"
UnitTests.Distribution.Utils.NubList.tests
, testGroup "Distribution.System"
UnitTests.Distribution.System.tests
, Test.Distribution.Version.versionTests
, Test.Distribution.Version.parseTests
]
main :: IO ()
main = defaultMain tests
|
randen/cabal
|
Cabal/tests/UnitTests.hs
|
bsd-3-clause
| 1,079
| 0
| 8
| 154
| 183
| 117
| 66
| 25
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Lamdu.Data.FFI (Env(..), table) where
import Control.Lens.Operators
import Data.Binary (Binary(..))
import Data.Derive.Binary (makeBinary)
import Data.DeriveTH (derive)
import Data.Map (Map)
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
import qualified Lamdu.Data.Definition as Definition
import qualified Lamdu.Data.Expression.Utils as ExprUtil
import qualified Lamdu.Data.Expression.Lens as ExprLens
import qualified Lamdu.Data.Expression.IRef as ExprIRef
data Env t = Env
{ trueDef :: ExprIRef.DefI t
, falseDef :: ExprIRef.DefI t
}
derive makeBinary ''Env
class FromExpr a where
fromExpr :: Env t -> ExprIRef.Expression t () -> a
class ToExpr a where
toExpr :: Env t -> a -> [ExprIRef.Expression t ()] -> ExprIRef.Expression t ()
instance FromExpr Integer where
fromExpr _ e =
fromMaybe (error "Expecting normalized Integer expression!") $
e ^? ExprLens.exprLiteralInteger
instance ToExpr Integer where
toExpr _ x [] = ExprUtil.pureLiteralInteger x
toExpr _ _ _ = error "Integer applied as a function"
instance (FromExpr a, ToExpr b) => ToExpr (a -> b) where
toExpr _ _ [] = error "Expecting more arguments"
toExpr env f (x:xs) = (toExpr env . f . fromExpr env) x xs
instance ToExpr Bool where
toExpr env b [] =
ExprLens.pureExpr . ExprLens.bodyDefinitionRef #
(if b then trueDef else falseDef) env
toExpr _ _ _ = error "Bool applied as a function"
instance FromExpr Bool where
fromExpr env expr =
case expr ^? ExprLens.exprDefinitionRef of
Just defRef
| defRef == trueDef env -> True
| defRef == falseDef env -> False
_ -> error "Expected a normalized bool expression!"
table :: Env t -> Map Definition.FFIName ([ExprIRef.Expression t ()] -> ExprIRef.Expression t ())
table env =
Map.fromList
[ prelude "==" ((==) :: Integer -> Integer -> Bool)
, prelude "+" ((+) :: Integer -> Integer -> Integer)
, prelude "-" ((-) :: Integer -> Integer -> Integer)
, prelude "*" ((*) :: Integer -> Integer -> Integer)
]
where
prelude name val =
(Definition.FFIName ["Prelude"] name, toExpr env val)
|
sinelaw/lamdu
|
Lamdu/Data/FFI.hs
|
gpl-3.0
| 2,148
| 0
| 13
| 414
| 748
| 401
| 347
| -1
| -1
|
-- \section[Hooks]{Low level API hooks}
-- NB: this module is SOURCE-imported by DynFlags, and should primarily
-- refer to *types*, rather than *code*
-- If you import too muchhere , then the revolting compiler_stage2_dll0_MODULES
-- stuff in compiler/ghc.mk makes DynFlags link to too much stuff
module Hooks ( Hooks
, emptyHooks
, lookupHook
, getHooked
-- the hooks:
, dsForeignsHook
, tcForeignImportsHook
, tcForeignExportsHook
, hscFrontendHook
, hscCompileOneShotHook
, hscCompileCoreExprHook
, ghcPrimIfaceHook
, runPhaseHook
, runMetaHook
, linkHook
, runRnSpliceHook
, getValueSafelyHook
) where
import DynFlags
import Name
import PipelineMonad
import HscTypes
import HsDecls
import HsBinds
import HsExpr
import OrdList
import Id
import TcRnTypes
import Bag
import RdrName
import CoreSyn
import BasicTypes
import Type
import SrcLoc
import Data.Maybe
{-
************************************************************************
* *
\subsection{Hooks}
* *
************************************************************************
-}
-- | Hooks can be used by GHC API clients to replace parts of
-- the compiler pipeline. If a hook is not installed, GHC
-- uses the default built-in behaviour
emptyHooks :: Hooks
emptyHooks = Hooks Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing
Nothing
data Hooks = Hooks
{ dsForeignsHook :: Maybe ([LForeignDecl Id] -> DsM (ForeignStubs, OrdList (Id, CoreExpr)))
, tcForeignImportsHook :: Maybe ([LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt))
, tcForeignExportsHook :: Maybe ([LForeignDecl Name] -> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt))
, hscFrontendHook :: Maybe (ModSummary -> Hsc TcGblEnv)
, hscCompileOneShotHook :: Maybe (HscEnv -> ModSummary -> SourceModified -> IO HscStatus)
, hscCompileCoreExprHook :: Maybe (HscEnv -> SrcSpan -> CoreExpr -> IO HValue)
, ghcPrimIfaceHook :: Maybe ModIface
, runPhaseHook :: Maybe (PhasePlus -> FilePath -> DynFlags -> CompPipeline (PhasePlus, FilePath))
, runMetaHook :: Maybe (MetaHook TcM)
, linkHook :: Maybe (GhcLink -> DynFlags -> Bool -> HomePackageTable -> IO SuccessFlag)
, runRnSpliceHook :: Maybe (HsSplice Name -> RnM (HsSplice Name))
, getValueSafelyHook :: Maybe (HscEnv -> Name -> Type -> IO (Maybe HValue))
}
getHooked :: (Functor f, HasDynFlags f) => (Hooks -> Maybe a) -> a -> f a
getHooked hook def = fmap (lookupHook hook def) getDynFlags
lookupHook :: (Hooks -> Maybe a) -> a -> DynFlags -> a
lookupHook hook def = fromMaybe def . hook . hooks
|
christiaanb/ghc
|
compiler/main/Hooks.hs
|
bsd-3-clause
| 3,051
| 0
| 16
| 875
| 627
| 347
| 280
| 54
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances, GeneralizedNewtypeDeriving #-}
-- Test deriving of a multi-parameter class for
-- one-argument newtype defined in the same module
module ShouldSucceed where
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
-- library stuff
class Monad m => MonadState s m | m -> s where
get :: m s
put :: s -> m ()
newtype State s a = State {
runState :: (s -> (a, s))
}
instance Functor (State s) where
fmap = liftM
instance Applicative (State s) where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State $ \s -> (a, s)
m >>= k = State $ \s -> let
(a, s') = runState m s
in runState (k a) s'
instance MonadState s (State s) where
get = State $ \s -> (s, s)
put s = State $ \_ -> ((), s)
-- test code
newtype Foo a = MkFoo (State Int a)
deriving (Functor, Applicative, Monad, MonadState Int)
f :: Foo Int
f = get
|
ezyang/ghc
|
testsuite/tests/deriving/should_compile/drv020.hs
|
bsd-3-clause
| 1,091
| 0
| 12
| 338
| 358
| 200
| 158
| 27
| 1
|
-- print1.hs
module Print1 where
main :: IO ()
main = putStrLn "hello world!"
|
RazvanCosmeanu/Haskellbook
|
ch03/print1.hs
|
mit
| 83
| 0
| 6
| 19
| 23
| 13
| 10
| 3
| 1
|
{-# LANGUAGE DeriveDataTypeable, PatternGuards, MultiParamTypeClasses, OverloadedStrings, TemplateHaskell, ImpredicativeTypes #-}
{-# LANGUAGE TypeOperators, TupleSections, FlexibleInstances, ScopedTypeVariables, FlexibleContexts #-}
module Main where
import Prelude hiding ((.), id) -- fclabels
import Control.Category ((.), id) -- fclabels
import Data.List
import BlazeHtml
import Data.Generics hiding (Data)
import Data.Char hiding (Space)
import Data.Function (on)
import Data.Maybe
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Debug.Trace
import System.Time
import Types
import ObloUtils
import Generics
import WebViewPrim
import WebViewLib
import HtmlLib
import Control.Monad.State hiding (get)
import qualified Control.Monad.State
import Server
import TemplateHaskell
import Data.Label -- fclabels
import Data.Label.Mono ((:~>)) -- fclabels
import WebViewLibExp
import Database
import BorrowItUtils
{-
Doc
View id path does not necessarily correspond to the child order, but is based on construction order in the monad.
Plan:
id stubid in node? instead of in webview?
eq for WebNodes, do we need ViewId? Probably good to add, although mainly left and right operand are looked up based on a viewId,
so the viewId's will be equal.
document computeMove and get rid of Just _ <- matches
search items view
autocomplete
template view with menu bar
TODO IMPORTANT
Current event override mechanism for widgets is broken.
Say we have Button_0 which is presented and has its handlers overwritten by script code in its parents.
If we then change the presentation to a new one that has a button in the same tree position, it will also be Button_0,
and since it hasn't changed, the incrementality will not update it, causing it to have the old button's handlers.
(what happens with widget content?)
TODO: hash paths don't seem to work okay when scripts are present. Reservations example has problems when switching from main to client or restaurant views
TODO: don't prevent submit when no textfield action is present!!!! (instead change script for this or something)
otherwise we cannot override return key for textfields without action
Fix weird <div op="new"> elements appearing before <html> element
Ideas
composeView $ \vid (a,b) -> do bla return (a,b)
instance Present (ComposeView ..)
Can't presentView simply supply a (PresentView )->Html?
mkWebViewInit (wv->wv) (wv->wv) init arg is initial
nice sort buttons with triangles
Home FAQ Profiel spullen geschiedenis Berichten aanmelden inloggen
-}
-- Utils
unsafeLookupM tag dbf key = withDb $ \db -> unsafeLookup tag key $ dbf db
-- BorrowIt utils
showName lender = get lenderFirstName lender ++ " " ++ get lenderLastName lender
-- WebViews
data Inline = Inline | Full deriving (Eq, Show)
isInline Inline = True
isInline Full = False
deriveInitial ''Inline
instance MapWebView Database Inline
-- TODO: maybe distance
data ItemView =
ItemView Inline Double Item (Maybe Item) Lender (Widget (Button Database)) (Maybe Lender) [(String,Property Database Item)] [Widget (Button Database)]
deriving (Eq, Show, Typeable)
instance Initial LenderId where
initial = LenderId "_uninitializedId_"
deriveInitial ''Gender
deriveInitial ''Lender
instance MapWebView db Lender
instance Initial ItemId where
initial = ItemId (-1)
deriveInitial ''Category
deriveInitial ''Item
instance MapWebView db Item
deriveInitial ''ItemView
deriveMapWebViewDb ''Database ''ItemView
-- todo: use partial lense here?
mEditedItem :: ItemView :-> Maybe Item
mEditedItem = lens (\(ItemView _ _ _ mItem _ _ _ _ _) -> mItem)
(\fn (ItemView a b c mItem e f g h i) -> (ItemView a b c (fn mItem) e f g h i))
--updateById id update db = let object = unsafeLookup
mkItemView inline item = mkWebView $
\vid oldItemView@(ItemView _ _ _ mEdited _ _ _ _ _) ->
do { owner <- unsafeLookupM "itemView" (get allLenders) (get itemOwner item)
; user <- getUser
; button <- case (get itemBorrowed item, user) of
(Nothing, Nothing) -> mkButton "Leen" False $ return ()
(Nothing, Just (userId,_)) | get itemOwner item == LenderId userId -> mkButton "Borrow" False $ return ()
| otherwise ->
mkButton "Borrow" True $ modifyDb $ \db ->
let items' = Map.update (\i -> Just $ set itemBorrowed (Just $ LenderId userId) item) (get itemId item) (get allItems db)
in set allItems items' db
(Just borrowerId,_) ->
mkButton "Return" True $ modifyDb $ \db ->
let items' = Map.update (\i -> Just $ set itemBorrowed Nothing item) (get itemId item) (get allItems db)
in set allItems items' db
; mBorrower <- maybe (return Nothing) (\borrowerId -> fmap Just $ unsafeLookupM "itemView2" (get allLenders) borrowerId) $ get itemBorrowed item
; distance <- case user of
Just (userId,_) -> do { userLender <- unsafeLookupM "itemView3" (get allLenders) (LenderId userId)
; return $ lenderDistance userLender owner
}
_ -> return $ -1
; props <- (if inline == Inline then getInlineCategoryProps else getFullCategoryProps) vid (isJust mEdited) item $ get itemCategory item
; buttons <- if False {- isInline inline -} then return [] else
do { deleteButton <- mkButton "Remove" True $ modifyDb $ deleteItem item
; editButton <- mkButton (maybe "Edit" (const "Done") mEdited) True $
case mEdited of
Nothing -> viewEdit vid $ set mEditedItem (Just item)
Just updatedItem -> do { modifyDb $ updateItem (get itemId updatedItem) $ \item -> updatedItem
; viewEdit vid $ set mEditedItem Nothing
; liftIO $ putStrLn $ "updating item \n" ++ show updatedItem
}
; buttons <- if not $ isJust mEdited then return [] else
fmap singleton $ mkButton "Cancel" True $ viewEdit vid $ set mEditedItem Nothing
; return $ [ deleteButton, editButton ] ++ buttons
}
; return $ ItemView inline distance item mEdited owner button mBorrower props buttons
}
instance Presentable ItemView where
present (ItemView Full dist item _ owner button mBorrower props buttons) =
vList [ h2 $ toHtml (getItemCategoryName item ++ ": " ++ get itemName item)
, hList [ (div_ (boxedEx 1 1 $ image ("items/" ++ get itemImage item) ! style "height: 200px")) ! style "width: 204px" ! align "top"
, nbsp
, nbsp
, vList $ [ with [style "color: #333; font-size: 16px"] $
presentEditableProperties -- ("Owner: ", Static linkedLenderFullName owner):
props
]
++ maybe [] (\borrower -> [ vSpace 10, with [style "color: red", class_ "BorrowedByLink"] $
"Borrowed by " +++ linkedLenderFullName borrower]) mBorrower
++ [ vSpace 10
, present button ]
]
, vSpace 10
, with [ style "font-weight: bold"] $ "Description:"
, multiLineStringToHtml $ get itemDescr item
, hList $ map present buttons
]
present (ItemView Inline dist item mEdited owner button mBorrower props buttons) =
-- todo present imdb link, present movieOrSeries
{-
vList [
hStretchList
[ E $ linkedItem item $ (div_ (boxedEx 1 1 $ image ("items/" ++ get itemImage item) ! style "height: 130px")) ! style "width: 134px" ! align "top"
, E $ nbsp +++ nbsp
-- TODO: this stretch doesn't work. Until we have good compositional layout combinators, just set the width.
, Stretch $ linkedItem item $
div_ ! style "height: 130px; width: 428px; font-size: 12px" $ sequence_
[ with [style "font-weight: bold; font-size: 15px"] $ toHtml (getItemCategoryName item ++ ": " ++ get itemName item)
, vSpace 2
, with [style "color: #333"] $
presentEditableProperties props -- ++
-- [("Points", toHtml . show $ get itemPrice item)]
, vSpace 3
, with [style "font-weight: bold"] $ "Description:"
, with [class_ "ellipsis multiline", style "height: 30px;"] $
{- 30 : 2 * 14 + 2 -}
multiLineStringToHtml $ get itemDescr item
] ! width "100%"
, E $ nbsp +++ nbsp
, E $ vDivList
([ presentProperties $ [ ("Owner", linkedLenderFullName owner)
, ("Rating", with [style "font-size: 17px; position: relative; top: -6px; height: 12px" ] $ presentRating 5 $ get lenderRating owner)
] ++
(if dist > 0 then [ ("Distance", toHtml $ showDistance dist) ] else [])
--, div_ $ presentPrice (itemPrice item)
] ++
maybe [] (\borrower -> [with [style "color: red; font-size: 12px", class_ "BorrowedByLink"] $ "Borrowed by " +++ linkedLenderFullName borrower]) mBorrower
++ [ vSpace 5
, present button
, vSpace 10
, hList $ map present buttons
]
) ! style "width: 200px; height: 130px; padding: 5px; font-size: 12px"
]
, -} xp $
addStyle "font-size: 12px; margin: 5px;" $
row [ h $ linkedItem item $ (div_ (boxedEx 1 1 $ image ("items/" ++ get itemImage item) ! style "height: 130px")) ! style "width: 134px" ! align "top"
, h $ hSpace 10
, vAlign Top $ hStretch $ h $ linkedItem item $ xp $ -- TODO: need the explicit widht here, or long names widen the column. Not nice.
col [ h $ with [style "font-weight: bold; font-size: 15px; overflow: hidden; width:416px"] $ toHtml (getItemCategoryName item ++ ": " ++ get itemName item)
, h $ vSpace 2
, h $ with [style "color: #333"] $
presentEditableProperties props -- ++
-- [("Punten", toHtml . show $ get itemPrice item)]
, h $ vSpace 3
, h $ with [style "font-weight: bold"] $ "Description:"
, hStretch $ h $ with [class_ "ellipsis multiline", style "height: 30px"] $
{- 30 : 2 * 14 + 2 -}
multiLineStringToHtml $ get itemDescr item
]
, h $ hSpace 10
, vAlign Top $ addStyle "width: 200px" $
col ([ h $ presentProperties $ [ ("Owner", linkedLenderFullName owner)
, ("Rating", with [style "font-size: 17px; position: relative; top: -6px; height: 12px" ] $ presentRating 5 $ get lenderRating owner)
] ++
(if dist > 0 then [ ("Distance", toHtml $ showDistance dist) ] else [])
--, div_ $ presentPrice (itemPrice item)
] ++
maybe [] (\borrower -> [h $ with [style "color: red", class_ "BorrowedByLink"] $ "Borrowed by " +++ linkedLenderFullName borrower]) mBorrower
++ [h $ vSpace 5
, h $ present button
, h $ vSpace 10
, row $ map (h . present) buttons
])
]
-- ]
vDivList elts = div_ $ mapM_ div_ elts
presentProperties :: [(String, Html)] -> Html
presentProperties props =
table $ sequence_ [ tr $ sequence_ [ td $ with [style "font-weight: bold"] $ toHtml propName, td $ nbsp +++ ":" +++ nbsp
, td $ toHtml propVal ]
| (propName, propVal) <- props
]
getInlineCategoryProps vid isEdited item c = do { props <- getAllCategoryProps vid isEdited item c
; return [ inlineProp | Left inlineProp <- props ]
}
getFullCategoryProps vid isEdited item c = fmap (map $ either id id) $ getAllCategoryProps vid isEdited item c
-- Left is only Full, Right is Full and Inline
getAllCategoryProps :: ViewId -> Bool -> Item -> Category -> WebViewM Database [Either (String,Property Database Item) (String, Property Database Item)]
getAllCategoryProps vid isEdited item c =
let mkStringProp :: ((String, Property Database Item) -> Either (String,Property Database Item) (String, Property Database Item)) -> String -> Category :~> String ->
WebViewM Database (Either (String,Property Database Item) (String, Property Database Item))
mkStringProp leftOrRight name catField = fmap (\p -> leftOrRight (name, p)) $ mkEditableProperty vid isEdited mEditedItem (pLens "getAllCategoryProps" $ catField . itemCategory) id Just toHtml item
mkIntProp :: ((String, Property Database Item) -> Either (String,Property Database Item) (String, Property Database Item)) -> String -> Category :~> Int ->
WebViewM Database (Either (String,Property Database Item) (String, Property Database Item))
mkIntProp leftOrRight name catField = fmap (\p -> leftOrRight (name, p)) $ mkEditableProperty vid isEdited mEditedItem (pLens "getAllCategoryProps" $ catField . itemCategory) (show :: Int -> String) readMaybe toHtml item
in sequence $ case c of
Book{} ->
[ mkStringProp Left "Author" bookAuthor
, mkIntProp Right "Year" bookYear
, mkStringProp Left "Language" bookLanguage
, mkStringProp Left "Genre" bookGenre
, mkIntProp Right "Nr. of pages" bookPages
]
Game{} ->
[ mkStringProp Left "Platform" gamePlatform
, mkIntProp Left "Year" gameYear
, mkStringProp Right "Developer" gameDeveloper
, mkStringProp Left "Genre" gameGenre
]
CD{} ->
[ mkStringProp Left "Artist" cdArtist
, mkIntProp Left "Year" cdYear
, mkStringProp Left "Genre" cdGenre
]
DVD{} ->
[ mkIntProp Right "Season" dvdSeason
, mkStringProp Right "Language" dvdLanguage
, mkIntProp Right "Year" dvdYear
, mkStringProp Left "Genre" dvdGenre
, mkStringProp Left "Director" dvdDirector
, mkIntProp Right "Nr. of episodes" dvdNrOfEpisodes
, mkIntProp Right "Running time" dvdRunningTime
, fmap (\p -> Left ("IMDb", p)) $ mkEditableProperty vid isEdited mEditedItem (pLens "getAllCategoryProps" $ dvdIMDb . itemCategory) id Just
(\str -> if null str then "" else a (toHtml str) ! href (toValue str) ! target "_blank" ! style "color: blue")
item -- this one is special because of the html presentation as a link
]
Tool{} ->
[ mkStringProp Left "Brand" toolBrand
, mkStringProp Left "Type" toolType
]
_ -> []
presentPrice price =
with [style "width:30px; height:28px; padding: 2px 0px 0px 0px; color: white; background-color: black; font-family: arial; font-size:24px; text-align: center"] $
toHtml $ show price
instance Storeable Database ItemView
linkedItemName item = linkedItem item $ toHtml (get itemName item)
linkedItem item html =
a ! (href $ (toValue $ "/#item?item=" ++ (show $ get (itemIdNr . itemId) item))) << html
linkedLenderName lender = linkedLender lender $ toHtml $ get (lenderIdLogin . lenderId) lender
linkedLenderFullName lender = linkedLender lender $ toHtml (get lenderFirstName lender ++ " " ++ get lenderLastName lender)
linkedLender lender html =
a! (href $ (toValue $ "/#lender?lender=" ++ get (lenderIdLogin . lenderId) lender)) << html
rootViewLink :: String -> Html -> Html
rootViewLink rootViewName html = a ! class_ "MenuBarLink" ! (href $ (toValue $ "/#" ++ rootViewName)) << html
data BorrowItLoginOutView = BorrowItLoginOutView (UntypedWebView Database) deriving (Eq, Show, Typeable)
deriveInitial ''BorrowItLoginOutView
deriveMapWebViewDb ''Database ''BorrowItLoginOutView
instance Storeable Database BorrowItLoginOutView
mkBorrowItLoginOutView = mkWebView $
\vid oldItemView@BorrowItLoginOutView{} ->
do { user <- getUser
; loginOutView <- if user == Nothing then mkUntypedWebView . mkLoginView $ \(login, fullName) ->
evalJSEdit [ jsNavigateTo $ "'#lender?lender="++login++"'" ]
else mkUntypedWebView mkLogoutView
; return $ BorrowItLoginOutView loginOutView
}
instance Presentable BorrowItLoginOutView where
present (BorrowItLoginOutView loginOutView) =
present loginOutView
mkItemRootView = mkMaybeView "Unknown item" $
do { args <- getHashArgs
; case lookup "item" args of
mItem | Just item <- mItem, Just i <- readMaybe item ->
do { mItem <- withDb $ \db -> Map.lookup (ItemId i) (get allItems db)
; case mItem of
Nothing -> return Nothing
Just item -> fmap Just $ mkItemView Full item
}
| otherwise -> return Nothing
}
data LenderView =
LenderView Inline User Lender (Maybe Lender) {- [Property Lender] -} [(String,Property Database Lender)] [(String,Property Database Lender)]
--(Maybe (Widget (TextView Database, TextView Database)))
[WV ItemView ] [Widget (Button Database)] [Widget (Button Database)]
deriving (Eq, Show, Typeable)
-- todo: edit button in Inline/Full datatype?
deriveInitial ''LenderView
deriveMapWebViewDb ''Database ''LenderView
mEditedLender :: LenderView :-> Maybe Lender
mEditedLender = lens (\(LenderView _ _ _ mLender _ _ _ _ _) -> mLender)
(\fn (LenderView a b c mLender e f g h i) -> (LenderView a b c (fn mLender) e f g h i))
instance Storeable Database LenderView -- where
-- save (LenderView _ _ _ modifiedLender@Lender{lenderId=lId} _ _ _ _) = updateLender lId $ \lender -> modifiedLender
mkLenderView inline lender = mkWebView $
\vid oldLenderView@(LenderView _ _ _ mEdited _ _ _ _ _) ->
do { mUser <- getUser
; let itemIds = get lenderItems lender
; items <- withDb $ \db -> getOwnedItems (get lenderId lender) db
; fName <- mkTextField (get lenderFirstName lender)
; lName <- mkTextField (get lenderLastName lender)
{-
; prop0 <- mkStaticProperty (lenderIdLogin . lenderId) toHtml lender
; prop1 <- mkEditableProperty vid (isJust mEdited) mEditedLender lenderFirstName id Just toHtml lender
; let testProps = [ prop0, prop1 ]
-}
; props <- (if lenderIsUser lender mUser then getLenderPropsSelf else getLenderPropsEveryone) vid (isJust mEdited) lender
; extraProps <- if lenderIsUser lender mUser then getExtraProps vid (isJust mEdited) lender else return []
; (itemWebViews, buttons, addButtons) <- if isInline inline then return ([],[],[]) else
do { itemWebViews <- mapM (mkItemView Inline) items
; editButton <- mkButton (maybe "Edit" (const "Done") mEdited) (lenderIsUser lender mUser) $
case mEdited of
Nothing -> viewEdit vid $ set mEditedLender (Just lender)
Just updatedLender -> if lender /= updatedLender
then
showDialogEdit ("Save changes?")
[ ("Save", Just $
do { modifyDb $ updateLender (get lenderId updatedLender) $ \lender -> updatedLender
; viewEdit vid $ set mEditedLender Nothing
; liftIO $ putStrLn $ "updating lender\n" ++ show updatedLender
})
, ("Don't save", Just $
do { viewEdit vid $ set mEditedLender Nothing
})
, ("Cancel", Nothing )
]
else
viewEdit vid $ set mEditedLender Nothing
; buttons <- if not $ isJust mEdited then return [] else
fmap singleton $ mkButton "Cancel" True $ viewEdit vid $ set mEditedLender Nothing
; addButtons <- sequence [ mkAddButton (someEmptyItem $ get lenderId lender)
| someEmptyItem <- [emptyBook, emptyGame,emptyCD,emptyDVD,emptyTool] ]
; return (itemWebViews, [ editButton ] ++ buttons,addButtons)
}
; return $ LenderView inline mUser lender mEdited {- testProps -} props extraProps itemWebViews buttons addButtons
}
mkAddButton :: Item -> WebViewM Database (Widget (Button Database))
mkAddButton item = mkButton (getItemCategoryName item) True $ modifyDb $ insertAsNewItem item
lenderIsUser lender Nothing = False
lenderIsUser lender (Just (login,_)) = get (lenderIdLogin . lenderId) lender == login
instance Presentable LenderView where
present (LenderView Full mUser lender _ {- testProps -} props extraProps itemWebViews buttons addButtons) =
vList [ vSpace 20
, hList [ (div_ (boxedEx 1 1 $ image ("lenders/" ++ get lenderImage lender) ! style "height: 200px")) ! style "width: 204px" ! align "top"
, hSpace 20
, vList [ withStyle "font-size: 24px; font-weight: bold; margin-bottom: 19px" $ {- if editing
then hList [ present fName, nbsp, present lName ]
else -} (toHtml $ showName lender) -- +++ (with [style "display: none"] $ concatHtml $ map present [fName,lName]) -- todo: not nice!
, hList [ vList [ presentEditableProperties props
--, vList $ map present testProps
, vSpace 20
, hList $ map present buttons
]
, hSpace 20
, presentEditableProperties extraProps
]
]
]
, vSpace 20
, withStyle "font-size: 24px; font-weight: bold; margin-bottom: 10px" $ (toHtml $ get lenderFirstName lender ++ "'s Items")
, hList $ "Add:" : map present addButtons
, vSpace 5
, vList $ map present itemWebViews
]
present (LenderView Inline mUser lender mEdited {- testProps -} props extraProps itemWebViews buttons _) =
linkedLender lender $
hList [ (div_ (boxedEx 1 1 $ image ("lenders/" ++ get lenderImage lender) ! style "height: 30px")) ! style "width: 34px" ! align "top"
, nbsp
, nbsp
, vList [ toHtml (showName lender)
--, vList $ map present testProps
, span_ (presentRating 5 $ get lenderRating lender) ! style "font-size: 20px"
, hList $ map present buttons
]
-- , with [style "display: none"] $ concatHtml $ map present [fName,lName] ++ [present editButton] -- todo: not nice!
]
getLenderPropsEveryone vid isEdited lender = do { props <- getLenderPropsAll vid isEdited lender
; return [ prop | Left prop <- props ]
}
getLenderPropsSelf vid isEdited lender = do { props <- getLenderPropsAll vid isEdited lender
; return [ either id id eProp | eProp <- props ]
}
-- todo: composed properties? address is street + nr
-- non-string properties?
getLenderPropsAll vid isEdited lender = sequence
[ fmap (\p -> Right ("BorrowIt ID", p)) $ mkStaticProperty (lenderIdLogin . lenderId) toHtml lender
, fmap (\p -> Left ("M/F", p)) $ mkEditableSelectProperty vid isEdited mEditedLender lenderGender show (toHtml . show) [M,F] lender
, fmap (\p -> Left ("E-mail", p)) $ mkEditableProperty vid isEdited mEditedLender lenderMail id Just toHtml lender
, fmap (\p -> Right ("Adress", p)) $ mkEditableProperty vid isEdited mEditedLender lenderStreet id Just toHtml lender
, fmap (\p -> Left ("Zip code", p)) $ mkEditableProperty vid isEdited mEditedLender lenderZipCode id Just toHtml lender
, fmap (\p -> Right ("City", p)) $ mkEditableProperty vid isEdited mEditedLender lenderCity id Just toHtml lender
]
getExtraProps' lender = [ ("Rating", with [style "font-size: 20px; position: relative; top: -5px; height: 17px" ] (presentRating 5 $ get lenderRating lender))
, ("Points", toHtml . show $ get lenderNrOfPoints lender)
, ("Nr. of items", toHtml . show $ length (get lenderItems lender))
]
getExtraProps vid isEdited lender = sequence
[ fmap ("Rating",) $ mkEditableSelectProperty vid isEdited mEditedLender lenderRating show (presentRating 5) [0..5] lender
, fmap ("Points",) $ mkStaticProperty lenderNrOfPoints (toHtml . show) lender
, fmap ("Nr. of items",) $ mkStaticProperty lenderItems (toHtml . show . length ) lender
]
{-
Lender { lenderId :: LenderId, lenderFirstName :: String, lenderLastName :: String, lenderGender :: Gender
, lenderMail :: String
, lenderStreet :: String, lenderStreetNr :: String, lenderCity :: String, lenderZipCode :: String
, lenderCoords :: (Double, Double) -- http://maps.google.com/maps/geo?q=adres&output=xml for lat/long
, lenderImage :: String
, lenderRating :: Int, lenderItems :: [ItemId]
-}
data ItemsRootView =
ItemsRootView (WV (SearchView Database (SortView SortDefaultPresent Database ItemView)))
deriving (Eq, Show, Typeable)
deriveInitial ''ItemsRootView
deriveMapWebViewDb ''Database ''ItemsRootView
instance Storeable Database ItemsRootView
mkItemsRootView :: WebViewM Database (WV ItemsRootView)
mkItemsRootView = mkWebView $
\vid oldLenderView@(ItemsRootView _) ->
do { let namedSortFunctions = [ ("Name", compare `on` get itemName)
, ("Owner", compare `on` get itemDescr)
, ("Category", compare `on` get itemCategory)
]
; searchView <- mkSearchView "Search items:" "q" $ \searchTerm ->
do { results :: [Item] <- withDb $ \db -> searchItems searchTerm db
; mkSortView namedSortFunctions (mkItemView Inline) results
}
; return $ ItemsRootView searchView
}
instance Presentable ItemsRootView where
present (ItemsRootView searchView) =
present searchView
data LendersRootView = LendersRootView (WV (SearchView Database (SortView SortDefaultPresent Database LenderView)))
deriving (Eq, Show, Typeable)
deriveInitial ''LendersRootView
deriveMapWebViewDb ''Database ''LendersRootView
instance Storeable Database LendersRootView
mkLendersRootView :: WebViewM Database (WV LendersRootView)
mkLendersRootView = mkWebView $
\vid oldLenderView@(LendersRootView _) ->
do { let namedSortFunctions = [ ("First name", compare `on` get lenderFirstName)
, ("Last name", compare `on` get lenderLastName)
, ("Rating", compare `on` get lenderRating)
]
; searchView <- mkSearchView "Search lenders: " "q" $ \searchTerm ->
do { results :: [Lender] <- withDb $ \db -> searchLenders searchTerm db
; mkSortView namedSortFunctions (mkLenderView Inline) results
}
; return $ LendersRootView searchView
}
instance Presentable LendersRootView where
present (LendersRootView searchView) = present searchView
mkLenderRootView = mkMaybeView "Unknown lender" $
do { args <- getHashArgs
; case lookup "lender" args of
Just lender -> do { mLender <- withDb $ \db -> Map.lookup (LenderId lender) (get allLenders db)
; case mLender of
Nothing -> return Nothing
Just lender -> fmap Just $ mkLenderView Full lender
}
Nothing -> return Nothing
}
data BorrowedRootView = BorrowedRootView [WV ItemView] [WV ItemView]
deriving (Eq, Show, Typeable)
deriveInitial ''BorrowedRootView
instance Storeable Database BorrowedRootView
mkBorrowedRootView :: WebViewM Database (WebView Database BorrowedRootView)
mkBorrowedRootView = mkWebView $
\vid oldLenderView@(BorrowedRootView _ _) ->
do { mUser <- getUser
; case mUser of
Just (login,_) ->
do {
; borrowedItems <- withDb $ \db -> getBorrowedItems (LenderId login) db
; borrowed <- mapM (mkItemView Inline) borrowedItems
; lendedItems <- withDb $ \db -> getLendedItems (LenderId login) db
; lended <- mapM (mkItemView Inline) lendedItems
; return $ BorrowedRootView borrowed lended
}
Nothing -> -- TODO: instead of showing empties, we should navigate to a different page on logout
return $ BorrowedRootView [] []
}
instance Presentable BorrowedRootView where
present (BorrowedRootView borrowed lended) =
h3 "Lended" +++
vList (map present lended) +++
h3 "Borrowed" +++
vList (map present borrowed)
-- unnecessary at the moment, as the page has no controls of its own
data BorrowItPageView = BorrowItPageView User String (Widget (EditAction Database)) (UntypedWebView Database) deriving (Eq, Show, Typeable)
deriveInitial ''BorrowItPageView
deriveMapWebViewDb ''Database ''BorrowItPageView
instance Storeable Database BorrowItPageView
--updateById id update db = let object = unsafeLookup
mkBorrowItPageView menuItemLabel mWebViewM = mkWebView $
\vid oldItemView@BorrowItPageView{} ->
do { user <- getUser
; wv <- mWebViewM
; logoutAction <- mkEditAction $ logoutEdit
; return $ BorrowItPageView user menuItemLabel logoutAction wv
}
-- TODO: click in padding does not select item
instance Presentable BorrowItPageView where
present (BorrowItPageView user menuItemLabel logoutAction wv) =
-- imdb: background-color: #E3E2DD; background-image: -moz-linear-gradient(50% 0%, #B3B3B0 0px, #E3E2DD 500px);
mkPage [style $ gradientStyle (Just 500) "#404040" {- "#B3B3B0" -} "#E3E2DD" ++ " font-family: arial"] $ xp $
{-vList [ (div_ ! style "float: left; font-size: 50px; color: #ddd" $ "BorrowIt") +++
case user of
Nothing -> noHtml
Just (login,_) -> div_ ! style "float: right; margin-top:35px; color: #ddd" $ "Ingelogd als "+++ (span_ ! style "color: white" $ toHtml login)
--, present loginOutView
, div_ ! thestyle "border: 1px solid black; background-color: #f0f0f0; box-shadow: 0 0 8px rgba(0, 0, 0, 0.7);" $
vList [ hStretchList (map (E . highlightItem) leftMenuItems ++ [space] ++ map (E . highlightItem) rightMenuItems)
! (thestyle $ "color: white; font-size: 17px;"++ gradientStyle Nothing "#707070" "#101010")
, div_ ! thestyle "padding: 10px" $ present wv ] ! width "800px"
]
-}
col [ row [ addStyle "font-size: 50px; color: #ddd" $ text "Borrow"
, addStyle "font-size: 50px; color: #888" $ text "It"
, flexHSpace
, vAlign Bottom $ h $
case user of
Nothing -> noHtml
Just (login,_) -> withStyle "margin-bottom:5px; color: #ddd" $ "Logged in as "+++ (span_ ! style "color: white" $ toHtml login)
]
, addStyle "width: 800px; border: 1px solid black; background-color: #f0f0f0; box-shadow: 0 0 8px rgba(0, 0, 0, 0.7);" $
hStretch $
col [ addStyle ("color: white; font-size: 17px;"++ gradientStyle Nothing "#707070" "#101010") $
row $ map (h . highlightItem) leftMenuItems ++ [flexHSpace] ++ map (h . highlightItem) rightMenuItems
, h $ with [class_ "PageContents" , style "padding: 10px"] $ present wv
]
]
where leftMenuItems = map (\(label,rootView) -> (label, rootViewLink rootView $ toHtml label)) $
[("Home",""), ("Items", "items"), ("Lenders", "lenders")] ++ userMenuItems user
rightMenuItems = [ if user == Nothing then ("Login", rootViewLink "login" "Login")
else ("Logout", withEditAction logoutAction "Logout") ] -- Logout is not a menu, so it will not be highlighted
userMenuItems Nothing = []
userMenuItems (Just (userId, _)) = [("My profile", "lender?lender="++userId), ("Lended", "lended")]
highlightItem (label, e) = with [ onmouseover "this.style.backgroundColor='#666'" -- not nice, but it works and prevents
, onmouseout "this.style.backgroundColor=''" -- the need for a css declaration
, style $ "height: 25px; margin: 0px 20px 0px 20px; " ++
if label == menuItemLabel
then gradientStyle Nothing "#303030" "#101010"
else "" ] $
with [style "padding: 2px 10px 5px 10px;" ] e
mkHomeView :: WebViewM Database (WebView Database HtmlTemplateView)
mkHomeView = mkHtmlTemplateView "BorrowItWelcome.html" []
--- Testing
data TestView =
TestView Int (Widget (RadioView Database)) (Widget (Button Database)) (Widget (TextView Database)) (WV HtmlView) (WV HtmlTemplateView)
(Widget (LabelView Database)) (Widget (TextView Database))
deriving (Eq, Show, Typeable)
deriveInitial ''TestView
deriveMapWebViewDb ''Database ''TestView
mkTestView :: WebViewM Database (WebView Database TestView)
mkTestView = mkWebView $
\vid oldTestView@(TestView _ radioOld _ _ _ _ _ oldTxtArea) ->
do { radio <- mkRadioViewWithChange ["Name", "Points", "Three"] (getSelection radioOld) True $ \sel -> viewEdit vid $ \v -> trace ("selected"++show sel) v :: TestView
; let radioSel = getSelection radioOld
; b <- mkButton "Test button" True $ viewEdit vid $ \(TestView a b c d e f g h) -> TestView 2 b c d e f g h
; tf <- mkTextField "bla"
; liftIO $ putStr $ show oldTestView
; wv1 <- mkHtmlView $ "one"
; wv2 <- mkHtmlTemplateView "test.html" []
; liftIO $ putStrLn $ "radio value " ++ show radioSel
; let (wv1',wv2') = if radioSel == 0 then (wv1,wv2) else (wv1, wv2) -- (wv2,wv1) -- switching no longer possible without using untyped
; lbl <- mkLabelViewWithStyle "label" "color: blue"
; txtArea <- mkTextAreaWithStyleChange (getStrVal oldTxtArea) ("background-color: "++if getStrVal oldTxtArea /= "" then "green" else "red") $ \str -> viewEdit vid $ \v -> trace ("edited "++show str) v :: TestView
; let wv = TestView radioSel radio b tf wv1' wv2' lbl txtArea
--; liftIO $ putStrLn $ "All top-level webnodes "++(show (everythingTopLevel webNodeQ wv :: [WebNode Database]))
; return $ wv
}
instance Presentable TestView where
present (TestView radioSel radio button tf wv1 wv2 lbl txtArea) =
vList [present radio, present button, present tf, toHtml $ show radioSel, present wv1, present wv2
, present lbl
, present txtArea
]
instance Storeable Database TestView
data TestView2 =
TestView2 (Widget (EditAction Database)) (Widget (RadioView Database)) (Widget (TextView Database))
String String
deriving (Eq, Show, Typeable)
deriveInitial ''TestView2
deriveMapWebViewDb ''Database ''TestView2
mkTestView2 :: WebViewM Database (WV TestView2)
mkTestView2 = mkWebView $
\vid oldTestView@(TestView2 ea radioOld text str1 str2) ->
do { ea <- mkEditAction $ viewEdit vid $ \(TestView2 a b c d e) -> TestView2 a b c "clicked" e
; radio <- mkRadioViewWithStyle ["Edit", "View"] (getSelection radioOld) True "background-color: red"
; let radioSel = getSelection radioOld
; text <- mkTextFieldWithStyle "Test" "background-color: red" `withTextViewChange` (\str -> viewEdit vid $ \(TestView2 a b c d e) -> TestView2 a b c d str)
; liftIO $ putStr $ show oldTestView
; liftIO $ putStrLn $ "radio value " ++ show radioSel
-- ; propV2 <- mkPropertyView (radioSel == 0) "Straat" str2 $ \str -> viewEdit vid $ \(TestView2 a b c d) -> TestView2 a b c str
; let wv = TestView2 ea radio text str1 str2
--; liftIO $ putStrLn $ "All top-level webnodes "++(show (everythingTopLevel webNodeQ wv :: [WebNode Database]))
; return $ wv
}
instance Presentable TestView2 where
present (TestView2 ea radio text p1str p2str) =
vList [ present radio
, present text
, toHtml $ "Property strings: " ++ show p1str ++ " and " ++ show p2str
, withEditAction ea "click me"
]
instance Storeable Database TestView2
{-
mkTestView3 msg = mkPresentView (\hs -> hList $ toHtml (msg :: String) : hs) $
do { wv1 <- mkHtmlView $ "een"
; wv2 <- mkHtmlTemplateView "test.html" []
; return $ [wv1,wv2]
}
-}
-- some webviews for testing with ghci
{-
data AView db = AView (WebView db (BView db)) (Widget (TextView db)) String (Widget (TextView db))
| AAView (WebView db (BView db)) deriving (Show, Eq, Typeable)
data BView db = BView String (AView db) deriving (Show, Eq, Typeable)
instance Presentable (AView db)
instance Storeable db (AView db)
instance Initial (AView db)
instance Presentable (BView db)
instance Storeable db (BView db)
instance Initial (BView db)
instance Typeable db => MapWebView db (AView db) where
mapWebView (AView wv1 wd1 str wd2) =
AView <$> mapWebView wv1 <*> mapWebView wd1 <*> mapWebView str <*> mapWebView wd2
--deriveMapWebView ''AView
instance Typeable db => MapWebView db (BView db) where
mapWebView (BView str a) = BView <$> mapWebView str <*> mapWebView a
-}
--testmkwv :: x -> WebView Database
testmkwv x = WebView (ViewId []) noId noId undefined $ x
testwv :: Int -> WV HtmlTemplateView
testwv i = testmkwv $ HtmlTemplateView (show i)
testwd :: String -> Widget (Button Database)
testwd str = buttonWidget (ViewId []) str True "" "" logoutEdit
testproplist :: [(String, Property Database Item)]
testproplist = [("BorrowIt ID",StaticProperty "martijn"),("M/V",EditableProperty (Right (PropertySelectView (Widget {getWidgetStubId = Id {unId = -1}, getWidgetId = Id {unId = -1}, getWidgetWidget = SelectView {getSelectViewId = ViewId [], getSelectItems = ["M","F"], getSelectSelection = 0, getSelectEnabled = True, getSelectStyle = "", getSelectChange = Just undefined}}))))]
deriveMapWebViewDb ''Database ''BorrowedRootView
---- Main (needs to be below all webviews that use deriveInitial)
main :: IO ()
main = server 8101 "BorrowIt" rootViews ["BorrowIt.css"] "BorrowItDB.txt" mkInitialDatabase users
rootViews :: RootViews Database
rootViews = [ mkRootView "" $ mkBorrowItPageView "Home" $ mkUntypedWebView mkHomeView
, mkRootView "lenders" $ mkBorrowItPageView "Lenders" $ mkUntypedWebView mkLendersRootView
, mkRootView "lender" $ mkBorrowItPageView "Lender" $ mkUntypedWebView mkLenderRootView
, mkRootView "items" $ mkBorrowItPageView "Items" $ mkUntypedWebView mkItemsRootView
, mkRootView "item" $ mkBorrowItPageView "Item" $ mkUntypedWebView mkItemRootView
, mkRootView "lended" $ mkBorrowItPageView "Lended" $ mkUntypedWebView mkBorrowedRootView
, mkRootView "login" $ mkBorrowItPageView "Login" $ mkUntypedWebView mkBorrowItLoginOutView
, mkRootView "test" $ mkTestView, mkRootView "test2" mkTestView2
]
|
Oblosys/webviews
|
src/exec/BorrowIt/Main.hs
|
mit
| 42,110
| 1
| 29
| 13,369
| 9,363
| 4,773
| 4,590
| 483
| 8
|
-- |
-- Module : Network.IRC.Client.Lens
-- Copyright : (c) 2017 Michael Walker
-- License : MIT
-- Maintainer : Michael Walker <mike@barrucadu.co.uk>
-- Stability : experimental
-- Portability : CPP
--
-- 'Lens'es and 'Prism's.
module Network.IRC.Client.Lens where
import Control.Concurrent.STM (TVar)
import Control.Monad.Catch (SomeException)
import Data.ByteString (ByteString)
import Data.Profunctor (Choice(right'),
Profunctor(dimap))
import Data.Text (Text)
import Data.Time (NominalDiffTime)
import Network.IRC.Client.Internal.Lens
import Network.IRC.Client.Internal.Types
{-# ANN module ("HLint: ignore Redundant lambda") #-}
-- CPP seem to dislike the first ' on the RHS…
-- This style of CPP usage doesn't work with clang, which means won't work on Mac.
{-
#define PRIME() '
#define LENS(S,F,A) \
{-# INLINE F #-}; \
{-| PRIME()Lens' for '_/**/F'. -}; \
F :: Lens' S A; \
F = \ afb s -> (\ b -> s {_/**/F = b}) <$> afb (_/**/F s)
#define GETTER(S,F,A) \
{-# INLINE F #-}; \
{-| PRIME()Getter' for '_/**/F'. -}; \
F :: Getter S A; \
F = \ afb s -> (\ b -> s {_/**/F = b}) <$> afb (_/**/F s)
#define PRISM(S,C,ARG,TUP,A) \
{-| PRIME()Prism' for 'C'. -}; \
{-# INLINE _/**/C #-}; \
_/**/C :: Prism' S A; \
_/**/C = dimap (\ s -> case s of C ARG -> Right TUP; _ -> Left s) \
(either pure $ fmap (\ TUP -> C ARG)) . right'
-}
-------------------------------------------------------------------------------
-- * Lenses for 'IRCState'
{-# INLINE connectionConfig #-}
{-| 'Getter' for '_connectionConfig'. -}
connectionConfig :: Getter (IRCState s) (ConnectionConfig s)
connectionConfig = \ afb s -> (\ b -> s {_connectionConfig = b}) <$> afb (_connectionConfig s)
{-# INLINE userState #-}
{-| 'Lens' for '_userState'. -}
userState :: Lens' (IRCState s) (TVar s)
userState = \ afb s -> (\ b -> s {_userState = b}) <$> afb (_userState s)
{-# INLINE instanceConfig #-}
{-| 'Lens' for '_instanceConfig'. -}
instanceConfig :: Lens' (IRCState s) (TVar (InstanceConfig s))
instanceConfig = \ afb s -> (\ b -> s {_instanceConfig = b}) <$> afb (_instanceConfig s)
{-# INLINE connectionState #-}
{-| 'Lens' for '_connectionState'. -}
connectionState :: Lens' (IRCState s) (TVar ConnectionState)
connectionState = \ afb s -> (\ b -> s {_connectionState = b}) <$> afb (_connectionState s)
-------------------------------------------------------------------------------
-- * Lenses for 'ConnectionConfig'
{-# INLINE server #-}
{-| 'Getter' for '_server'. -}
server :: Getter (ConnectionConfig s) ByteString
server = \ afb s -> (\ b -> s {_server = b}) <$> afb (_server s)
{-# INLINE port #-}
{-| 'Getter' for '_port'. -}
port :: Getter (ConnectionConfig s) Int
port = \ afb s -> (\ b -> s {_port = b}) <$> afb (_port s)
{-# INLINE username #-}
{-| 'Lens' for '_username'. -}
username :: Lens' (ConnectionConfig s) Text
username = \ afb s -> (\ b -> s {_username = b}) <$> afb (_username s)
{-# INLINE realname #-}
{-| 'Lens' for '_realname'. -}
realname :: Lens' (ConnectionConfig s) Text
realname = \ afb s -> (\ b -> s {_realname = b}) <$> afb (_realname s)
{-# INLINE password #-}
{-| 'Lens' for '_password'. -}
password :: Lens' (ConnectionConfig s) (Maybe Text)
password = \ afb s -> (\ b -> s {_password = b}) <$> afb (_password s)
{-# INLINE flood #-}
{-| 'Lens' for '_flood'. -}
flood :: Lens' (ConnectionConfig s) NominalDiffTime
flood = \ afb s -> (\ b -> s {_flood = b}) <$> afb (_flood s)
{-# INLINE timeout #-}
{-| 'Lens' for '_timeout'. -}
timeout :: Lens' (ConnectionConfig s) NominalDiffTime
timeout = \ afb s -> (\ b -> s {_timeout = b}) <$> afb (_timeout s)
{-# INLINE onconnect #-}
{-| 'Lens' for '_onconnect'. -}
onconnect :: Lens' (ConnectionConfig s) (IRC s ())
onconnect = \ afb s -> (\ b -> s {_onconnect = b}) <$> afb (_onconnect s)
{-# INLINE ondisconnect #-}
{-| 'Lens' for '_ondisconnect'. -}
ondisconnect :: Lens' (ConnectionConfig s) (Maybe SomeException -> IRC s ())
ondisconnect = \ afb s -> (\ b -> s {_ondisconnect = b}) <$> afb (_ondisconnect s)
{-# INLINE logfunc #-}
{-| 'Lens' for '_logfunc'. -}
logfunc :: Lens' (ConnectionConfig s) (Origin -> ByteString -> IO ())
logfunc = \ afb s -> (\ b -> s {_logfunc = b}) <$> afb (_logfunc s)
-------------------------------------------------------------------------------
-- * Lenses for 'InstanceConfig'
{-# INLINE nick #-}
{-| 'Lens' for '_nick'. -}
nick :: Lens' (InstanceConfig s) Text
nick = \ afb s -> (\ b -> s {_nick = b}) <$> afb (_nick s)
{-# INLINE channels #-}
{-| 'Lens' for '_channels'. -}
channels :: Lens' (InstanceConfig s) [Text]
channels = \ afb s -> (\ b -> s {_channels = b}) <$> afb (_channels s)
{-# INLINE version #-}
{-| 'Lens' for '_version'. -}
version :: Lens' (InstanceConfig s) Text
version = \ afb s -> (\ b -> s {_version = b}) <$> afb (_version s)
{-# INLINE handlers #-}
{-| 'Lens' for '_version'. -}
handlers :: Lens' (InstanceConfig s) [EventHandler s]
handlers = \ afb s -> (\ b -> s {_handlers = b}) <$> afb (_handlers s)
{-# INLINE ignore #-}
{-| 'Lens' for '_ignore'. -}
ignore :: Lens' (InstanceConfig s) [(Text, Maybe Text)]
ignore = \ afb s -> (\ b -> s {_ignore = b}) <$> afb (_ignore s)
-------------------------------------------------------------------------------
-- * Prisms for 'ConnectionState'
{-| 'Prism' for 'Connected'. -}
{-# INLINE _Connected #-}
_Connected :: Prism' ConnectionState ()
_Connected = dimap (\ s -> case s of Connected -> Right (); _ -> Left s)
(either pure $ fmap (\ () -> Connected)) . right'
{-| 'Prism' for 'Disconnecting'. -}
{-# INLINE _Disconnecting #-}
_Disconnecting :: Prism' ConnectionState ()
_Disconnecting = dimap (\ s -> case s of Disconnecting -> Right (); _ -> Left s)
(either pure $ fmap (\ () -> Disconnecting)) . right'
{-| 'Prism' for 'Disconnected'. -}
{-# INLINE _Disconnected #-}
_Disconnected :: Prism' ConnectionState ()
_Disconnected = dimap (\ s -> case s of Disconnected -> Right (); _ -> Left s)
(either pure $ fmap (\ () -> Disconnected)) . right'
-------------------------------------------------------------------------------
-- * Prisms for 'Origin'
{-| 'Prism' for 'FromServer'. -}
{-# INLINE _FromServer #-}
_FromServer :: Prism' Origin ()
_FromServer = dimap (\ s -> case s of FromServer -> Right (); _ -> Left s)
(either pure $ fmap (\ () -> FromServer)) . right'
{-| 'Prism' for 'FromClient'. -}
{-# INLINE _FromClient #-}
_FromClient :: Prism' Origin ()
_FromClient = dimap (\ s -> case s of FromClient -> Right (); _ -> Left s)
(either pure $ fmap (\ () -> FromClient)) . right'
|
barrucadu/irc-client
|
Network/IRC/Client/Lens.hs
|
mit
| 6,833
| 0
| 13
| 1,458
| 1,761
| 985
| 776
| 88
| 2
|
module Main (main) where
import qualified Data.List as List
import System.Environment
import Util
main :: IO ()
main = do
[module_] <- getArgs
index module_
index :: String -> IO ()
index module_ = inCabalNewRepl ("check-hs/" ++ module_ ++ ".check.hs") input >>= putStr . normalize
where
input = ":browse! " ++ module_
normalize :: String -> String
normalize = unlines . map removeSignature . removeTypeDefinitions . lines . normalizeLines . unlines . removeClasses . normalizeClasses . lines
removeTypeDefinitions :: [String] -> [String]
removeTypeDefinitions = filter (not . List.isPrefixOf "type ")
removeClasses :: [String] -> [String]
removeClasses = removeOmissionDots . go
where
removeOmissionDots = filter (/= "...")
go input = case input of
x : xs | "class " `List.isPrefixOf` x -> case span (" " `List.isPrefixOf`) xs of
(ys, zs) -> map (drop 2) ys ++ go zs
x : xs -> x : go xs
[] -> []
normalizeClasses :: [String] -> [String]
normalizeClasses input = case input of
x : y : ys | "class " `List.isPrefixOf` x && " " `List.isPrefixOf` y -> normalizeClasses ((x ++ y) : ys)
y : ys -> y : normalizeClasses ys
[] -> []
removeSignature :: String -> String
removeSignature = go
where
go input = case input of
x : xs | " :: " `List.isPrefixOf` xs -> [x]
x : xs -> x : go xs
[] -> []
|
haskell-compat/base-compat
|
check/exes/dumpindex.hs
|
mit
| 1,398
| 0
| 17
| 342
| 538
| 283
| 255
| 34
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Main (HUnit)
-- Copyright : (C) 2013 Yorick Laupa
-- License : (see the file LICENSE)
--
-- Maintainer : Yorick Laupa <yo.eight@gmail.com>
-- Stability : provisional
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Main where
import Control.Monad (forM_)
import Data.Text (Text)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.TH
import Test.HUnit (assertEqual)
import Data.Spellcheck.EditModel
import Data.Spellcheck.Distance
import Data.Spellcheck.HolbrookCorpus
testDataNT :: [(Text, Text, Int)]
testDataNT =
[("sitting", "kitten", 3)
,("an act", "a cat", 3)
,("sunday", "saturday", 3)
]
testDataT :: [(Text, Text, Int)]
testDataT =
[("an act", "a cat", 2)]
case_dist_non_trans = runTestCases False testDataNT
case_dist_trans = runTestCases True testDataT
case_load_corpus = do
c <- loadHolbrook "data/holbrook-tagged-dev.dat"
c `seq` return ()
case_load_edits = do
c <- loadEdits
c `seq` return ()
runTestCases b xs =
forM_ xs $ \(s1,s2,exp) ->
let exp_str = show exp
str = show (s1,s2) ++ "should equal to " ++ exp_str in
assertEqual exp_str exp (editDistance b s1 s2)
main :: IO ()
main = $defaultMainGenerator
|
YoEight/spellcheck
|
tests/HUnit.hs
|
mit
| 1,487
| 0
| 14
| 278
| 350
| 206
| 144
| 35
| 1
|
{-# LANGUAGE DataKinds #-}
module Main where
import Data.Monoid
import System.Environment
import System.IO
import qualified Text.PrettyPrint.ANSI.Leijen as Ppr
import qualified Text.Trifecta as P
import qualified Formura.Parser as P
main :: IO ()
main = do
argv <- getArgs
mapM_ process argv
process :: FilePath -> IO ()
process fn = do
mprog <- P.parseFromFileEx (P.runP $ P.program <* P.eof) fn
case mprog of
P.Success prog -> print $ prog
P.Failure doc -> Ppr.displayIO stdout $ Ppr.renderPretty 0.8 80 $ doc <> Ppr.linebreak
|
nushio3/formura
|
exe-src/formura-parser.hs
|
mit
| 583
| 0
| 14
| 136
| 188
| 100
| 88
| 18
| 2
|
import qualified Data.Map as Map
al = [(1, "a"), (2, "b"), (3, "c"), (4, "d")]
mapFromAL = Map.fromList al
mapFold = foldl (\map (k,v) -> Map.insert k v map) Map.empty al
mapManual =
Map.insert 2 "b" .
Map.insert 4 "d" .
Map.insert 1 "a" .
Map.insert 3 "c" $ Map.empty
|
zhangjiji/real-world-haskell
|
ch13/buildmap.hs
|
mit
| 281
| 0
| 10
| 62
| 150
| 83
| 67
| 9
| 1
|
module Main (main) where
import Text.Printf
import Formatting
import Formatting.Clock
import System.Clock
import Network.HTTP
import System.Environment
import Data.Maybe
import Data.List
import Text.XML.Light
import qualified Control.Monad.Parallel as P
data WeatherReport = Report String Float Float Float String
api_call api_key city =
simpleHTTP (getRequest url) >>= getResponseBody
where
url = "http://api.openweathermap.org/data/2.5/weather?q="++city++"&mode=xml&units=metric&lang=sp&appid="++api_key
print_reports [] = putStrLn ""
print_reports (r:rs) =
do print r
print_reports rs
where print (Report c t max min w) = printf "%-30s max:%5.1f min:%5.2f actual: %5.1f %s\n" c max min t w
xmlRead elem attr =
head . concatMap (map (fromJust.findAttr (unqual attr)) . filterElementsName (== unqual elem)) . onlyElems . parseXML
make_report x =
do val <- x
return (Report (name val) (temp val) (max val) (min val) (weather val))
where
name val = (xmlRead "city" "name" $ val)
temp val = read (xmlRead "temperature" "value" $ val) :: Float
max val = read (xmlRead "temperature" "max" $ val) :: Float
min val = read (xmlRead "temperature" "min" $ val) :: Float
weather val = (xmlRead "weather" "value" $ val)
cmp_rep :: WeatherReport -> WeatherReport -> Ordering
cmp_rep (Report _ t1 _ _ _) (Report _ t2 _ _ _) = compare t2 t1
process_seq api_key args =
do lreps <- reps
print_reports $ sortBy cmp_rep lreps
where reps = mapM (make_report . (api_call api_key)) args -- mapM => [IO r] -> IO [r]
process_par api_key args =
do lreps <- preps
print_reports $ sortBy cmp_rep lreps
where preps = P.mapM (make_report . (api_call api_key)) args -- mapM => [IO r] -> IO [r]
process_args :: [Char] -> [String] -> IO ()
process_args [] _ = putStrLn "debe configurar la variable de ambiente WEATHER_API_KEY"
process_args _ [] = putStrLn "debe ingresar una lista de ciudades"
process_args api_key (p:args)
| p == "-p" && (null args) = putStrLn "debe ingresar una lista de ciudades"
| p == "-p" = process_par api_key args
| otherwise = process_seq api_key (p:args)
main = do
t1 <- getTime Monotonic
api_key <- getEnv "WEATHER_API_KEY"
args <- getArgs
process_args api_key args
t2 <- getTime Monotonic
printf "tiempo ocupado para generar el reporte: %s\n" (format timeSpecs t1 t2)
|
lnds/9d9l
|
desafio2/haskell/src/Main.hs
|
mit
| 2,441
| 0
| 15
| 527
| 811
| 402
| 409
| 54
| 1
|
module Utils where
concatWith :: a -> [[a]] -> [a]
concatWith l = foldl1 (\a b -> a ++ [l] ++ b)
replace :: a -> [a] -> Int -> [a]
replace val list n =
replace' val list n 0
where replace' :: a -> [a] -> Int -> Int -> [a]
replace' val [] n c = []
replace' val (x:xs) n c
| n == c = val : xs
| otherwise = x : replace' val xs n (c + 1)
replace2D :: a -> [[a]] -> Int -> Int -> [[a]]
replace2D val list row col =
replace (replace val (list !! row) col) list row
|
crockeo/maze
|
src/Utils.hs
|
mit
| 514
| 0
| 11
| 165
| 284
| 150
| 134
| 14
| 2
|
--Find the last but one element of a list.
myButLast :: [a] -> a
myButLast [x,y] = x
myButLast (_:xs) = myButLast xs
main = print testPasses
where
testPasses = 3 == myButLast [1,2,3,4]
|
butchhoward/xhaskell
|
99questions/p02/p02.hs
|
mit
| 191
| 0
| 9
| 40
| 80
| 44
| 36
| 5
| 1
|
{-# LANGUAGE Safe #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE LambdaCase #-}
-- | Logic primitives. See @LogicT@ paper for details.
--
-- * [@LogicT@] [LogicT - backtracking monad transformer with fair operations and pruning](http://okmij.org/ftp/Computation/monads.html#LogicT)
module Control.Eff.Logic.Core where
import Control.Monad
import Control.Eff
import Control.Eff.Exception
import Data.Function (fix)
-- | The MSplit primitive from LogicT paper.
class MSplit m where
-- | The laws for 'msplit' are:
--
-- > msplit mzero = return Nothing
-- > msplit (return a `mplus` m) = return (Just(a, m))
msplit :: m a -> m (Maybe (a, m a))
-- | Embed a pure value into MSplit
{-# INLINE withMSplit #-}
withMSplit :: MonadPlus m => a -> m a -> m (Maybe (a, m a))
withMSplit a rest = return (Just (a, rest))
-- The handlers are defined in terms of the specific non-determinism
-- effects (instead of by way of a distinct MSplit handler
-- | Laws for 'reflect':
--
-- > msplit (lift m >> mzero) >>= reflect = lift m >> mzero
-- > msplit (lift m `mplus` ma) >>= reflect = lift m `mplus` (msplit ma >>= reflect)
{-# INLINE reflect #-}
reflect :: MonadPlus m => Maybe (a, m a) -> m a
reflect Nothing = mzero
reflect (Just (a,m)) = return a `mplus` m
-- Other committed choice primitives can be implemented in terms of msplit
-- The following implementations are directly from the LogicT paper
-- | Soft-cut: non-deterministic if-then-else, aka Prolog's @*->@
-- Declaratively,
--
-- > ifte t th el = (t >>= th) `mplus` ((not t) >> el)
--
-- However, @t@ is evaluated only once. In other words, @ifte t th el@
-- is equivalent to @t >>= th@ if @t@ has at least one solution.
-- If @t@ fails, @ifte t th el@ is the same as @el@. Laws:
--
-- > ifte (return a) th el = th a
-- > ifte mzero th el = el
-- > ifte (return a `mplus` m) th el = th a `mplus` (m >>= th)
ifte :: (MonadPlus m, MSplit m)
=> m t -> (t -> m b) -> m b -> m b
ifte t th el = msplit t >>= check
where check Nothing = el
check (Just (sg1,sg2)) = (th sg1) `mplus` (sg2 >>= th)
-- | Another pruning operation (ifte is the other). This selects one
-- solution out of possibly many.
once :: (MSplit m, MonadPlus m) => m b -> m b
once m = msplit m >>= check
where check Nothing = mzero
check (Just (sg1,_)) = return sg1
-- | Negation as failure
gnot :: (MonadPlus m, MSplit m) => m b -> m ()
gnot m = ifte (once m) (const mzero) (return ())
-- | Fair (i.e., avoids starvation) disjunction. It obeys the
-- following laws:
--
-- > interleave mzero m = m
-- > interleave (return a `mplus` m1) m2 = return a `mplus` (interleave m2 m1)
--
-- corollary:
--
-- > interleave m mzero = m
interleave :: (MSplit m, MonadPlus m) => m b -> m b -> m b
interleave sg1 sg2 =
do r <- msplit sg1
case r of
Nothing -> sg2
Just (sg11,sg12) ->
(return sg11) `mplus` (interleave sg2 sg12)
-- | Fair (i.e., avoids starvation) conjunction. It obeys the
-- following laws:
--
-- > mzero >>- k = mzero
-- > (return a `mplus` m) >>- k = interleave (k a) (m >>- k)
(>>-) :: (MonadPlus m, MSplit m) => m a -> (a -> m b) -> m b
sg >>- g =
do r <- msplit sg
case r of
Nothing -> mzero
Just (sg1 ,sg2) -> interleave (g sg1) (sg2 >>- g)
-- | Collect all solutions. This is from Hinze's @Backtr@ monad
-- class. Unsurprisingly, this can be implemented in terms of msplit.
sols :: (Monad m, MSplit m) => m a -> m [a]
sols m = (msplit m) >>= (fix step) [] where
step _ jq Nothing = return jq
step next jq (Just(a, ma)) = (msplit ma) >>= next (a:jq)
-- | Non-determinism with control (@cut@).
--
-- For the explanation of cut, see Section 5 of Hinze ICFP 2000 paper:
--
-- * [@Backtr@] [Deriving Backtracking Monad Transformers](https://dl.acm.org/citation.cfm?id=351240.351258)
--
-- Hinze suggests expressing @cut@ in terms of @cutfalse@:
--
-- > = return () `mplus` cutfalse
-- > where
-- > cutfalse :: m a
--
-- satisfies the following laws:
--
-- > cutfalse >>= k = cutfalse (F1)
-- > cutfalse | m = cutfalse (F2)
--
-- (note: @m \``mplus`\` cutfalse@ is different from @cutfalse \``mplus`\` m@).
-- In other words, cutfalse is the left zero of both bind and mplus.
--
-- Hinze also introduces the operation @`call` :: m a -> m a@ that
-- delimits the effect of cut: @`call` m@ executes m. If the cut is
-- invoked in m, it discards only the choices made since m was called.
-- Hinze postulates the axioms of `call`:
--
-- > call false = false (C1)
-- > call (return a | m) = return a | call m (C2)
-- > call (m | cutfalse) = call m (C3)
-- > call (lift m >>= k) = lift m >>= (call . k) (C4)
--
-- @`call` m@ behaves like @m@ except any cut inside @m@ has only a local effect,
-- he says.
--
-- Hinze noted a problem with the \"mechanical\" derivation of backtracing
-- monad transformer with cut: no axiom specifying the interaction of
-- call with bind; no way to simplify nested invocations of call.
class Call r where
-- | Mapping @Backtr@ interface to 'MonadPlus' and using exceptions for
-- @cutfalse@, every instance should ensure that the following laws hold:
--
-- > cutfalse `mplus` m = cutfalse --(F2)
-- > call mzero = mzero --(C1)
-- > call (return a `mplus` m) = return a `mplus` call m --(C2)
-- > call (m `mplus` cutfalse) = call m --(C3)
-- > call (lift m >>= k) = lift m >>= (call . k) --(C4)
call :: MonadPlus (Eff r) => Eff (Exc CutFalse : r) a -> Eff r a
data CutFalse = CutFalse
-- | We use exceptions for cutfalse
-- Therefore, the law @cutfalse >>= k = cutfalse@
-- is satisfied automatically since all exceptions have the above property.
cutfalse :: Member (Exc CutFalse) r => Eff r a
cutfalse = throwError CutFalse
-- | Prolog @cut@, taken from Hinze 2000 (Deriving backtracking monad
-- transformers).
(!) :: (Member (Exc CutFalse) r, MonadPlus (Eff r)) => Eff r ()
(!) = return () `mplus` cutfalse
-- | Case analysis for lists
{-# INLINE list #-}
list :: b -> (a -> [a] -> b)
-> [a] -> b
list z _ [] = z
list _ k (h:t) = k h t
|
suhailshergill/extensible-effects
|
src/Control/Eff/Logic/Core.hs
|
mit
| 6,278
| 0
| 12
| 1,555
| 1,140
| 632
| 508
| -1
| -1
|
(^^^) :: Int -> Int -> Int
m ^^^ 0 = 1
m ^^^ n = m * m ^^^ (n - 1)
(^^^^) :: Int -> Int -> Int
m ^^^^ 0 = 1
m ^^^^ n = m * (^^^^) m (n - 1)
mand [] = True
mand (x:xs) = x && mand xs
nand [] = True
nand (x:xs)
| x = and xs
| otherwise = False
oand [] = True
oand (x:xs)
| x == False = False
| otherwise = oand xs
pand [] = True
pand (x:xs) = pand xs && x
|
anwb/fp-one-on-one
|
lecture-05-hw.hs
|
mit
| 375
| 0
| 8
| 121
| 262
| 130
| 132
| 18
| 1
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE
MultiParamTypeClasses,
FlexibleContexts,
FlexibleInstances
#-}
-- |
-- Module : Statistics.Distribution.Hypergeometric.GenVar
-- Copyright : (c) 2015 Sam Rijs,
-- (c) 2005 Robert Kern,
-- (c) 1998 Ivan Frohne
-- License : MIT
--
-- Maintainer : srijs@airpost.net
-- Stability : experimental
--
-- The parameters of the distribution describe /k/ elements chosen
-- from a population of /l/, with /m/ elements of one type, and
-- /l/-/m/ of the other (all are positive integers).
module Data.Random.Distribution.Hypergeometric
( Hypergeometric
-- ** Constructors
, hypergeometric
-- ** Accessors
, getM, getL, getK
-- ** Variate Generation
, hypergeometricVar
, hypergeometricVarT
) where
import Data.Random.RVar
import Data.Random.Distribution
import Data.Random.Distribution.Uniform
import Data.Random.Distribution.Hypergeometric.Impl
data Hypergeometric t = Hypergeometric { getK :: !t, getL :: !t, getM :: !t }
-- | Constructs a hypergeometric distribution from the parameters /k/, /l/ and /m/.
-- Fails if /l/ is negative, /k/ is not in [0,/l/] or /m/ is not in [0,/l/].
hypergeometric :: (Num a, Ord a) => a -> a -> a -> Hypergeometric a
hypergeometric k l m
| l < 0 = error "l must not be negative"
| m < 0 || m > l = error "m must be in [0,l]"
| k < 0 || k > l = error "k must be in [0,l]"
| otherwise = Hypergeometric k l m
hypergeometricVar :: (Num a, Ord a, Distribution Hypergeometric a) => a -> a -> a -> RVar a
hypergeometricVar = hypergeometricVarT
hypergeometricVarT :: (Num a, Ord a, Distribution Hypergeometric a) => a -> a -> a -> RVarT m a
hypergeometricVarT k l m = rvarT (hypergeometric k l m)
instance (Integral t) => Distribution Hypergeometric t where
rvarT (Hypergeometric k l m) = rhyper (k, l, m)
|
srijs/random-hypergeometric
|
src/Data/Random/Distribution/Hypergeometric.hs
|
mit
| 1,851
| 0
| 10
| 380
| 400
| 222
| 178
| 34
| 1
|
data Tree a = Node
{ node :: a,
forest :: [Tree a]
}
deriving (Show)
dfs :: Tree a -> [a]
dfs (Node x ts) = x : concatMap dfs ts
dfsPostOrder :: Tree a -> [a]
dfsPostOrder (Node x ts) = concatMap dfsPostOrder ts ++ [x]
dfsInOrder :: Tree a -> [a] -- For binary trees only
dfsInOrder (Node x []) = [x]
dfsInOrder (Node x [l]) = dfsInOrder l ++ [x] -- Single branch assumed to be left
dfsInOrder (Node x [l, r]) = dfsInOrder l ++ [x] ++ dfsInOrder r
dfsInOrder _ = error "Not a binary tree"
dfsStack :: Tree a -> [a]
dfsStack t = go [t]
where
go [] = []
go ((Node x ts) : stack) = x : go (ts ++ stack)
bfs :: Tree a -> [a]
bfs (Node x ts) = x : go ts
where
go [] = []
go ts = map node ts ++ go (concatMap forest ts)
createTree :: Int -> Int -> Tree Int
createTree 0 _ = Node 0 []
createTree numRow numChild = Node numRow children
where
children = map (createTree (numRow - 1)) $ replicate numChild numChild
main = do
let testTree = createTree 2 3
showNodes = unwords . map show
putStrLn "[#]\nRecursive DFS:"
putStrLn $ showNodes $ dfs testTree
putStrLn "[#]\nRecursive Postorder DFS:"
putStrLn $ showNodes $ dfsPostOrder testTree
putStrLn "[#]\nStack-based DFS:"
putStrLn $ showNodes $ dfsStack testTree
putStrLn "[#]\nQueue-based BFS:"
putStrLn $ showNodes $ bfs testTree
putStrLn "[#]\nRecursive Inorder DFS for Binary Tree:"
putStrLn $ showNodes $ dfsInOrder $ createTree 3 2
|
leios/algorithm-archive
|
contents/tree_traversal/code/haskell/TreeTraversal.hs
|
mit
| 1,449
| 0
| 12
| 335
| 613
| 301
| 312
| 38
| 2
|
import Day06
import Day11
import Control.Monad
main = helper "hxbxwxba"
where helper i = do
let x = partA i
print x
helper $ increment x
|
edwardwas/adventOfCode
|
src/Main.hs
|
mit
| 180
| 0
| 12
| 69
| 58
| 27
| 31
| 8
| 1
|
module Storyteller.Definition
( Control(..)
, Formatter(..)
, Operator(..)
, Directive(..)
, Inline(..)
, Block(..)
, File(..)
) where
data Control
= Break
| Rule
deriving (Eq, Enum, Show)
data Formatter
= Italic
| Underline
| Bold
| Subscript
| Superscript
| Comment
| Header
deriving (Eq, Enum, Show)
data Operator
= Include
| Character
| Place
| Time
| Code
| Quote
| Math
deriving (Eq, Enum, Show)
data Directive
= Tag
| Mode
| Footnote
deriving (Eq, Enum, Show)
data Inline
= Str String
| Fmt Formatter [Inline]
| Opr Operator [[Inline]]
| Dir Directive [Inline] [Block]
deriving Show
data Block
= Ctl Control
| Par [Inline]
deriving Show
data File = File
{ name :: String
, paragraphs :: [Block]
} deriving Show
|
Soares/Storyteller.hs
|
src/Storyteller/Definition.hs
|
mit
| 900
| 0
| 9
| 307
| 287
| 177
| 110
| 49
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Backends.Ohua (toOhuaAppCodeWrapped, toOhuaCodeWrapped) where
import Backends.Clojure (toFunClj)
import LevelGraphs
import Backend.Language.Clojure
import Backend.Language.Common
import Data.Graph.Inductive (Gr)
import qualified Data.Graph.Inductive as Graph
import Data.Graph.Inductive.Graph as G
import Data.Maybe (fromMaybe)
toOhuaCodeWith :: ((Node -> [Node]) -> [[LNode CodeGraphNodeLabel]] -> Expr) -> String -> NestedCodeGraph -> Serialized
toOhuaCodeWith f testname gr = renderProgram (RenderOpts "defalgo" "algo") $ wrapMain $ toProgram f gr
where
wrapMain = alterMain $ \b -> Form [Sym "defn", Sym (Symbol testname), Vect [], Form [Sym "ohua", b]]
toOhuaAppCodeWrapped :: String -> NestedCodeGraph -> Serialized
toOhuaAppCodeWrapped = toOhuaCodeWith convertLevelsApp
toOhuaCodeWrapped :: String -> NestedCodeGraph -> Serialized
toOhuaCodeWrapped = toOhuaCodeWith convertLevels
convertLevelsWith :: ((LNode CodeGraphNodeLabel -> Expr) -> [(Int, CodeGraphNodeLabel)] -> [(Expr, Expr)])
-> (Node -> [Node])
-> [[(Int, CodeGraphNodeLabel)]] -> Expr
convertLevelsWith parBind getSuc lvls = mkLet assigns [finalExpr]
where
toFun n = toFunOhua n (map (Sym . varName) $ getSuc (fst n))
(assigns, finalExpr) = toAssign [] lvls
toAssign _ [] = error "empty"
toAssign l [x] =
case x of
[x] -> (l, toFun x)
_ -> error "last level must have exactly one node"
toAssign l (x:xs) = toAssign (l ++ e) xs
where
e = case x of
[] -> error "empty assignment"
[n@(id, _)] -> [(Sym $ varName id, toFun n)]
fs -> parBind toFun fs
convertLevels :: (Node -> [Node]) -> [[LNode CodeGraphNodeLabel]] -> Expr
convertLevels = convertLevelsWith $ \toFun -> map $ \n@(id, _) -> (Sym $ varName id, toFun n)
convertLevelsApp :: (Node -> [Node]) -> [[LNode CodeGraphNodeLabel]] -> Expr
convertLevelsApp = convertLevelsWith $ \toFun fs -> [(Vect (map (Sym . varName . fst) fs), Form $ Sym "mvector" : map toFun fs)]
toFunOhua :: LNode CodeGraphNodeLabel -> [Expr] -> Expr
toFunOhua node@(n, CodeGraphNodeLabel _ lab _) children =
case lab of
Custom "map"
| null children -> Nil -- removes empty maps
| otherwise -> Form [ Sym "count"
, Form [ Sym "smap"
, Sym $ fnName n
, Form $ Sym "mvector" : children
]
]
_ -> toFunClj node children
|
goens/rand-code-graph
|
src/Backends/Ohua.hs
|
mit
| 2,746
| 0
| 15
| 803
| 889
| 479
| 410
| 48
| 6
|
module Colors.MonokaiPro where
colorScheme = "monokai-pro"
colorBack = "#2D2A2E"
colorFore = "#FCFCFA"
-- Black
color00 = "#403E41"
color08 = "#727072"
-- Red
color01 = "#FF6188"
color09 = "#FF6188"
-- Green
color02 = "#A9DC76"
color10 = "#A9DC76"
-- Yellow
color03 = "#FFD866"
color11 = "#FFD866"
-- Blue
color04 = "#FC9867"
color12 = "#FC9867"
-- Magenta
color05 = "#AB9DF2"
color13 = "#AB9DF2"
-- Cyan
color06 = "#78DCE8"
color14 = "#78DCE8"
-- White
color07 = "#FCFCFA"
color15 = "#FCFCFA"
colorTrayer :: String
colorTrayer = "--tint 0x2D2A2E"
|
phdenzel/dotfiles
|
.config/xmonad/lib/Colors/MonokaiPro.hs
|
mit
| 552
| 0
| 4
| 87
| 119
| 75
| 44
| 22
| 1
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGFEDisplacementMapElement
(pattern SVG_CHANNEL_UNKNOWN, pattern SVG_CHANNEL_R,
pattern SVG_CHANNEL_G, pattern SVG_CHANNEL_B,
pattern SVG_CHANNEL_A, js_getIn1, getIn1, js_getIn2, getIn2,
js_getScale, getScale, js_getXChannelSelector, getXChannelSelector,
js_getYChannelSelector, getYChannelSelector,
SVGFEDisplacementMapElement, castToSVGFEDisplacementMapElement,
gTypeSVGFEDisplacementMapElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
pattern SVG_CHANNEL_UNKNOWN = 0
pattern SVG_CHANNEL_R = 1
pattern SVG_CHANNEL_G = 2
pattern SVG_CHANNEL_B = 3
pattern SVG_CHANNEL_A = 4
foreign import javascript unsafe "$1[\"in1\"]" js_getIn1 ::
SVGFEDisplacementMapElement -> IO (Nullable SVGAnimatedString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDisplacementMapElement.in1 Mozilla SVGFEDisplacementMapElement.in1 documentation>
getIn1 ::
(MonadIO m) =>
SVGFEDisplacementMapElement -> m (Maybe SVGAnimatedString)
getIn1 self = liftIO (nullableToMaybe <$> (js_getIn1 (self)))
foreign import javascript unsafe "$1[\"in2\"]" js_getIn2 ::
SVGFEDisplacementMapElement -> IO (Nullable SVGAnimatedString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDisplacementMapElement.in2 Mozilla SVGFEDisplacementMapElement.in2 documentation>
getIn2 ::
(MonadIO m) =>
SVGFEDisplacementMapElement -> m (Maybe SVGAnimatedString)
getIn2 self = liftIO (nullableToMaybe <$> (js_getIn2 (self)))
foreign import javascript unsafe "$1[\"scale\"]" js_getScale ::
SVGFEDisplacementMapElement -> IO (Nullable SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDisplacementMapElement.scale Mozilla SVGFEDisplacementMapElement.scale documentation>
getScale ::
(MonadIO m) =>
SVGFEDisplacementMapElement -> m (Maybe SVGAnimatedNumber)
getScale self = liftIO (nullableToMaybe <$> (js_getScale (self)))
foreign import javascript unsafe "$1[\"xChannelSelector\"]"
js_getXChannelSelector ::
SVGFEDisplacementMapElement -> IO (Nullable SVGAnimatedEnumeration)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDisplacementMapElement.xChannelSelector Mozilla SVGFEDisplacementMapElement.xChannelSelector documentation>
getXChannelSelector ::
(MonadIO m) =>
SVGFEDisplacementMapElement -> m (Maybe SVGAnimatedEnumeration)
getXChannelSelector self
= liftIO (nullableToMaybe <$> (js_getXChannelSelector (self)))
foreign import javascript unsafe "$1[\"yChannelSelector\"]"
js_getYChannelSelector ::
SVGFEDisplacementMapElement -> IO (Nullable SVGAnimatedEnumeration)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFEDisplacementMapElement.yChannelSelector Mozilla SVGFEDisplacementMapElement.yChannelSelector documentation>
getYChannelSelector ::
(MonadIO m) =>
SVGFEDisplacementMapElement -> m (Maybe SVGAnimatedEnumeration)
getYChannelSelector self
= liftIO (nullableToMaybe <$> (js_getYChannelSelector (self)))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGFEDisplacementMapElement.hs
|
mit
| 3,944
| 30
| 10
| 577
| 792
| 459
| 333
| 62
| 1
|
module FrontEnd.Warning(
Warning(..),
MonadWarn(..),
WarnType(..),
warnIsFatal,
processErrors,
warn,
err,
addWarn,
module FrontEnd.SrcLoc,
-- IO monad
processIOErrors,
printIOErrors
) where
import Control.Monad.Reader
import Control.Monad.Writer
import Data.IORef
import System.IO
import System.IO.Unsafe
import Util.Std
import FrontEnd.SrcLoc(SrcSpan(..),WithSrcLoc(..),MonadSetSrcLoc(..),SrcLoc(..),MonadSrcLoc(..),bogusASrcLoc,SLM(..))
import Name.Name
import Options
import PackedString
import StringTable.Atom
import Util.Gen
data Warning = Warning {
warnSrcLoc :: !SrcLoc,
warnType :: WarnType,
warnMessage :: String
} deriving(Eq,Ord)
class (Applicative m,Monad m) => MonadWarn m where
addWarning :: Warning -> m ()
addWarning w = fail $ show w
addWarn :: (MonadWarn m, MonadSrcLoc m) => WarnType -> String -> m ()
addWarn t m = do
sl <- getSrcLoc
warn sl t m
warn :: MonadWarn m => SrcLoc -> WarnType -> String -> m ()
warn warnSrcLoc warnType warnMessage = addWarning Warning { .. }
err :: MonadWarn m => WarnType -> String -> m ()
err t m = warn bogusASrcLoc t m
pad n s = case length s of
x | x >= n -> s
x -> s ++ replicate (n - x) ' '
processIOErrors :: String -> IO ()
processIOErrors s = do
ws <- readIORef ioWarnings
unless (null ws || null s) $ do
hFlush stdout
hFlush stderr
putErrLn $ "\nDiagnostic from " ++ s
processErrors' True ws
writeIORef ioWarnings []
-- | just show IO errors and return whether it would have died
printIOErrors :: IO Bool
printIOErrors = do
ws <- readIORef ioWarnings
b <- processErrors' False ws
writeIORef ioWarnings []
return b
processErrors :: [Warning] -> IO ()
processErrors ws = processErrors' True ws >> return ()
processErrors' :: Bool -> [Warning] -> IO Bool
processErrors' _ [] = return False
processErrors' doDie ws = putErrLn "" >> mapM_ s (snub ws) >> when (die && doDie) exitFailure >> return die where
s Warning { warnSrcLoc = srcLoc@SrcLoc { .. }, .. }
| srcLoc == bogusASrcLoc = putErrLn $ msg warnType warnMessage
| srcLocLine == -1 = putErrLn (unpackPS srcLocFileName ++ ": " ++ msg warnType warnMessage)
| otherwise = putErrLn (unpackPS srcLocFileName ++ ":" ++ pad 3 (show srcLocLine) ++ " - " ++ msg warnType warnMessage)
die = (any warnIsFatal (map warnType ws)) && not (optKeepGoing options)
--ws' = filter ((`notElem` ignore) . warnType ) $ snub ws
data WarnType
= AmbiguousExport Module [Name]
| AmbiguousName Name [Name]
| DuplicateInstances
| InvalidDecl
| InvalidExp
| InvalidFFIType
| LexError
| WarnFailure
| MissingDep String
| MissingModule Module
| MultiplyDefined Name [SrcLoc]
| OccursCheck
| ParseError
| ParseInfo
| PrimitiveBadType
| PrimitiveUnknown Atom
| TypeSynonymPartialAp
| TypeSynonymRecursive
| UndefinedName Name
| UnificationError
| UnknownDeriving [Class]
| UnknownOption
| UnknownPragma PackedString
| UnsupportedFeature
deriving(Eq,Ord)
warnIsFatal w = f w where
f AmbiguousExport {} = True
f AmbiguousName {} = True
f InvalidDecl {} = True
f InvalidExp {} = True
f InvalidFFIType {} = True
f DuplicateInstances {} = True
f MissingDep {} = True
f MissingModule {} = True
f WarnFailure {} = True
f MultiplyDefined {} = True
f OccursCheck {} = True
f TypeSynonymPartialAp {} = True
f TypeSynonymRecursive {} = True
f UndefinedName {} = True
f UnificationError {} = True
f UnknownDeriving {} = True
f UnsupportedFeature {} = True
f ParseError {} = True
f LexError {} = True
f _ = False
instance Show Warning where
show Warning { warnSrcLoc = sl, warnType = t, warnMessage = m }
| sl == bogusASrcLoc = msg t m
show Warning { warnSrcLoc = SrcLoc { srcLocFileName = fn, srcLocLine = l },
warnType = t ,warnMessage = m } =
(unpackPS fn ++ ":" ++ pad 3 (show l) ++ " - " ++ msg t m)
msg t m = (if warnIsFatal t then "Error: " else "Warning: ") ++ m
_warnings = [
("deprecations" ,"warn about uses of functions & types that are deprecated") ,
("duplicate-exports" ,"warn when an entity is exported multiple times") ,
("hi-shadowing" ,"warn when a .hi file in the current directory shadows a library") ,
("incomplete-patterns" ,"warn when a pattern match could fail") ,
("misc" ,"enable miscellaneous warnings") ,
("missing-fields" ,"warn when fields of a record are uninitialised") ,
("missing-methods" ,"warn when class methods are undefined") ,
("missing-signatures" ,"warn about top-level functions without signatures") ,
("name-shadowing" ,"warn when names are shadowed") ,
("overlapping-patterns" ,"warn about overlapping patterns") ,
("simple-patterns" ,"warn about lambda-patterns that can fail") ,
("type-defaults" ,"warn when defaulting happens") ,
("unused-binds" ,"warn about bindings that are unused") ,
("unused-imports" ,"warn about unnecessary imports") ,
("unused-matches" ,"warn about variables in patterns that aren't used")
]
----------------
-- Warning monad
----------------
{-# NOINLINE ioWarnings #-}
ioWarnings :: IORef [Warning]
ioWarnings = unsafePerformIO $ newIORef []
instance MonadWarn m => MonadWarn (SLM m) where
addWarning w = SLM $ addWarning w
instance MonadWarn IO where
addWarning w = modifyIORef ioWarnings (w:)
instance MonadWarn (Writer [Warning]) where
addWarning w = tell [w]
instance MonadWarn Identity
instance MonadWarn m => MonadWarn (ReaderT a m) where
addWarning w = lift $ addWarning w
|
m-alvarez/jhc
|
src/FrontEnd/Warning.hs
|
mit
| 6,180
| 0
| 15
| 1,780
| 1,745
| 927
| 818
| -1
| -1
|
{-# LANGUAGE CPP #-}
module Darcs.UI.Email
( makeEmail
, readEmail
, formatHeader
) where
import Data.Char ( digitToInt, isHexDigit, ord, intToDigit, isPrint, toUpper )
import Data.List ( isInfixOf )
import Darcs.Util.Printer
( Doc, ($$), (<+>), (<>), text, empty, packedString, renderPS
, RenderMode(..)
)
import Darcs.Util.ByteString ( packStringToUTF8, dropSpace, linesPS, betweenLinesPS )
import qualified Data.ByteString as B (ByteString, length, null, tail
,drop, head, concat, singleton
,pack, append, empty, unpack
)
import qualified Data.ByteString.Char8 as BC (index, head, pack)
import Data.ByteString.Internal as B (c2w, createAndTrim)
import System.IO.Unsafe ( unsafePerformIO )
import Foreign.Ptr ( Ptr, plusPtr )
import Foreign.Storable ( poke )
import Data.Word ( Word8 )
import Data.Maybe ( fromMaybe )
-- lineMax is maximum number of characters in an e-mail line excluding the CRLF
-- at the end. qlineMax is the number of characters in a q-encoded or
-- quoted-printable-encoded line.
lineMax, qlineMax :: Int
lineMax = 78
qlineMax = 75
-- | Formats an e-mail header by encoding any non-ascii characters using UTF-8
-- and Q-encoding, and folding lines at appropriate points. It doesn't do
-- more than that, so the header name and header value should be
-- well-formatted give or take line length and encoding. So no non-ASCII
-- characters within quoted-string, quoted-pair, or atom; no semantically
-- meaningful signs in names; no non-ASCII characters in the header name;
-- etcetera.
formatHeader :: String -> String -> B.ByteString
formatHeader headerName headerValue =
B.append nameColon encodedValue
where nameColon = B.pack (map B.c2w (headerName ++ ":")) -- space for folding
encodedValue = foldAndEncode (' ':headerValue)
(B.length nameColon) False False
-- run through a string and encode non-ascii words and fold where appropriate.
-- the integer argument is the current position in the current line.
-- the string in the first argument must begin with whitespace, or be empty.
foldAndEncode :: String -> Int -> Bool -> Bool -> B.ByteString
foldAndEncode [] _ _ _ = B.empty
foldAndEncode s p lastWordEncoded inMidWord =
let newline = B.singleton 10
space = B.singleton 32
s2bs = B.pack . map B.c2w
-- the twelve there is the max number of ASCII chars to encode a single
-- character: 4 * 3, 4 UTF-8 bytes times 3 ASCII chars per byte
safeEncChunkLength = (qlineMax - B.length encodedWordStart
- B.length encodedWordEnd) `div` 12
(curSpace, afterCurSpace) = span (== ' ') s
(curWord, afterCurWord) = break (== ' ') afterCurSpace
qEncWord | lastWordEncoded = qEncode (curSpace ++ curWord)
| otherwise = qEncode curWord
mustEncode = inMidWord
|| any (\c -> not (isPrint c) || ord c > 127) curWord
|| length curWord > lineMax - 1
|| isInfixOf "=?" curWord
mustFold
| mustEncode && lastWordEncoded
= p + 1 + B.length qEncWord > lineMax
| mustEncode
= p + length curSpace + B.length qEncWord > lineMax
| otherwise
= p + length curSpace + length curWord > lineMax
mustSplit = (B.length qEncWord > qlineMax && mustEncode)
|| length curWord > lineMax - 1
spaceToInsert | mustEncode && lastWordEncoded = space
| otherwise = s2bs curSpace
wordToInsert
| mustEncode && mustSplit = qEncode (take safeEncChunkLength curWord)
| mustEncode = qEncWord
| otherwise = s2bs curWord
doneChunk | mustFold = B.concat [newline, spaceToInsert, wordToInsert]
| otherwise = B.concat [spaceToInsert, wordToInsert]
(rest, nextP)
| mustSplit
= (drop safeEncChunkLength curWord ++ afterCurWord, qlineMax + 1)
| mustEncode && mustFold
= (afterCurWord, B.length spaceToInsert + B.length wordToInsert)
| otherwise
= (afterCurWord, p + B.length doneChunk)
in B.append doneChunk (foldAndEncode rest nextP mustEncode mustSplit)
-- | Turns a piece of string into a q-encoded block
-- Applies q-encoding, for use in e-mail header values, as defined in RFC 2047.
-- It just takes a string and builds an encoded-word from it, it does not check
-- length or necessity.
qEncode :: String -> B.ByteString
qEncode s = B.concat [encodedWordStart,
encodedString,
encodedWordEnd]
where encodedString = B.concat (map qEncodeChar s)
encodedWordStart, encodedWordEnd :: B.ByteString
encodedWordStart = B.pack (map B.c2w "=?UTF-8?Q?")
encodedWordEnd = B.pack (map B.c2w "?=")
-- turns a character into its q-encoded bytestring value. For most printable
-- ASCII characters, that's just the singleton bytestring with that char.
qEncodeChar :: Char -> B.ByteString
qEncodeChar c
| c == ' ' = c2bs '_'
| isPrint c
&& c `notElem` "?=_"
&& ord c < 128 = c2bs c
| otherwise = B.concat
(map qbyte
(B.unpack
(packStringToUTF8 [c])))
where c2bs = B.singleton . B.c2w
-- qbyte turns a byte into its q-encoded "=hh" representation
qbyte b = B.pack (map B.c2w ['='
,word8ToUDigit (b `div` 16)
,word8ToUDigit (b `mod` 16)
])
word8ToUDigit :: Word8 -> Char
word8ToUDigit = toUpper . intToDigit . fromIntegral
-- TODO is this doing mime encoding??
qpencode :: B.ByteString -> B.ByteString
qpencode s = unsafePerformIO
-- Really only (3 + 2/75) * length or something in the worst case
$ B.createAndTrim (4 * B.length s) (\buf -> encode s qlineMax buf 0)
encode :: B.ByteString -> Int -> Ptr Word8 -> Int -> IO Int
encode ps _ _ bufi | B.null ps = return bufi
encode ps n buf bufi = case B.head ps of
c | c == newline ->
do poke (buf `plusPtr` bufi) newline
encode ps' qlineMax buf (bufi+1)
| n == 0 && B.length ps > 1 ->
do poke (buf `plusPtr` bufi) equals
poke (buf `plusPtr` (bufi+1)) newline
encode ps qlineMax buf (bufi + 2)
| c == tab || c == space ->
if B.null ps' || B.head ps' == newline
then do poke (buf `plusPtr` bufi) c
poke (buf `plusPtr` (bufi+1)) equals
poke (buf `plusPtr` (bufi+2)) newline
encode ps' qlineMax buf (bufi + 3)
else do poke (buf `plusPtr` bufi) c
encode ps' (n - 1) buf (bufi + 1)
| c >= bang && c /= equals && c /= period && c <= tilde ->
do poke (buf `plusPtr` bufi) c
encode ps' (n - 1) buf (bufi + 1)
| n < 3 ->
encode ps 0 buf bufi
| otherwise ->
do let (x, y) = c `divMod` 16
h1 = intToUDigit x
h2 = intToUDigit y
poke (buf `plusPtr` bufi) equals
poke (buf `plusPtr` (bufi+1)) h1
poke (buf `plusPtr` (bufi+2)) h2
encode ps' (n - 3) buf (bufi + 3)
where ps' = B.tail ps
newline = B.c2w '\n'
tab = B.c2w '\t'
space = B.c2w ' '
bang = B.c2w '!'
tilde = B.c2w '~'
equals = B.c2w '='
period = B.c2w '.'
intToUDigit i
| i >= 0 && i <= 9 = B.c2w '0' + i
| i >= 10 && i <= 15 = B.c2w 'A' + i - 10
| otherwise = error $ "intToUDigit: '"++show i++"'not a digit"
qpdecode :: B.ByteString -> B.ByteString
qpdecode s = unsafePerformIO
-- Add 1 as linesPS "\n" -> ["", ""] -> "\n\n"
$ B.createAndTrim (B.length s + 1) (\buf -> decode (linesPS s) buf 0)
decode :: [B.ByteString] -> Ptr Word8 -> Int -> IO Int
decode [] _ bufi = return bufi
decode (ps:pss) buf bufi
| B.null (dropSpace ps)
= do poke (buf `plusPtr` bufi) newline
decode pss buf (bufi+1)
| is_equals && B.length ps >= 3 && isHexDigit c1 && isHexDigit c2
= do poke (buf `plusPtr` bufi)
(toWord8 $ digitToInt c1 * 16 + digitToInt c2)
decode (B.drop 3 ps:pss) buf (bufi+1)
| is_equals && B.null (dropSpace (B.tail ps)) = decode pss buf bufi
| otherwise = do poke (buf `plusPtr` bufi) (B.head ps)
decode (B.tail ps:pss) buf (bufi+1)
where is_equals = BC.head ps == '='
c1 = BC.index ps 1
c2 = BC.index ps 2
newline = B.c2w '\n'
toWord8 :: Int -> Word8
toWord8 = fromIntegral
makeEmail :: String -> [(String, String)] -> Maybe Doc -> Maybe String -> Doc -> Maybe String -> Doc
makeEmail repodir headers mcontents mcharset bundle mfilename =
text "DarcsURL:" <+> text repodir
$$ foldl (\m (h,v) -> m $$ (text (h ++ ":") <+> text v)) empty headers
$$ text "MIME-Version: 1.0"
$$ text "Content-Type: multipart/mixed; boundary=\"=_\""
$$ text ""
$$ text "--=_"
$$ (case mcontents of
Just contents ->
text ("Content-Type: text/plain; charset=\"" ++
fromMaybe "x-unknown" mcharset ++ "\"")
$$ text "Content-Transfer-Encoding: quoted-printable"
$$ text ""
$$ packedString (qpencode (renderPS Standard contents))
$$ text ""
$$ text "--=_"
Nothing -> empty)
$$ text "Content-Type: text/x-darcs-patch; name=\"patch-preview.txt\""
$$ text "Content-Disposition: inline"
$$ text "Content-Transfer-Encoding: quoted-printable"
$$ text "Content-Description: Patch preview"
$$ text ""
$$ (case betweenLinesPS (BC.pack "New patches:") (BC.pack "Context:") (renderPS Standard bundle) of
Just s -> packedString $ qpencode s
-- this should not happen, but in case it does, keep everything
Nothing -> packedString $ qpencode $ renderPS Standard bundle)
$$ text "--=_"
$$ text "Content-Type: application/x-darcs-patch" <>
(case mfilename of
Just filename -> text "; name=\"" <> text filename <> text "\""
Nothing -> empty)
$$ text "Content-Transfer-Encoding: quoted-printable"
$$ text "Content-Disposition: attachment"
$$ text "Content-Description: A darcs patch for your repository!"
$$ text ""
$$ packedString (qpencode (renderPS Standard bundle))
$$ text "--=_--"
$$ text ""
$$ text "."
$$ text ""
$$ text ""
readEmail :: B.ByteString -> B.ByteString
readEmail s =
case betweenLinesPS
(BC.pack "Content-Description: A darcs patch for your repository!")
(BC.pack "--=_--") s of
Nothing -> s -- if it wasn't an email in the first place, just pass along.
Just s' -> qpdecode s'
|
DavidAlphaFox/darcs
|
src/Darcs/UI/Email.hs
|
gpl-2.0
| 11,103
| 0
| 38
| 3,431
| 3,204
| 1,635
| 1,569
| 211
| 4
|
module Crypto where
import Data.ByteString.Lazy hiding (split)
import Data.Binary
import Control.Monad
import qualified Data.Map as M
import Reactive.Banana.Combinators
import Reactive.Banana.Frameworks
import Reactive.Banana.Switch
import Control.Event.Handler
type KeyHash = Int
type PubKey = Int
type PrivKey = Int
type Signature = Int
type RawData = ByteString
type EventSource a = (AddHandler a, Handler a)
data Payload = Payload { signedPayload :: RawData,
unsignedPayload :: RawData}
type CryptoPacket = Either Introduce DataPacket
data Introduce = Introduce {introKeyID :: KeyHash, introKey :: PubKey, introSig :: Signature, introContent :: Payload}
data DataPacket = DataPacket {datakeyID :: KeyHash, dataSig :: Signature, dataContent :: Payload}
checkSig _ _ _ = True
sign _ _ = 0
computeHashFromKey _ = 0
data CryptoEntry = CryptoEntry {pubKey :: PubKey,
handler :: Handler DataPacket}
data CryptoNewKey = CryptoNewKey {cnkPayload :: Introduce,
cnkKeyID :: KeyHash,
cnkCallback :: AddHandler DataPacket}
type CryptoMap = M.Map KeyHash CryptoEntry
data CryptoOrders = CryptoAdd KeyHash CryptoEntry |
CryptoDelete KeyHash
checkIntroSig (Introduce kH pKey sig pay) = computeHashFromKey pKey == kH &&
checkSig pKey sig (signedPayload pay)
{-| Calls outH everytimes a new introduce has been received from an unknown key.
- The cryptoEntry contains a handler called for each new datapacket from this source. |-}
buildCrypto :: Frameworks t => EventSource CryptoOrders -> Handler CryptoNewKey -> Event t CryptoPacket -> Moment t ()
buildCrypto (ordSource, fireOrder) outH inE = do orderE <- fromAddHandler ordSource
let cryptoMap = genCryptoMap orderE
(introE, dataE) = split inE
(newIntroE, _) = filterNewIntro (filterE checkIntroSig introE) cryptoMap
reactimate (onNewIntro outH fireOrder <$> newIntroE)
reactimate (apply (onDataPacket <$> cryptoMap) dataE)
onDataPacket :: CryptoMap -> DataPacket -> IO ()
onDataPacket cM d@(DataPacket kH s pay) = case M.lookup kH cM of
Nothing -> pure ()
Just (CryptoEntry pK h) -> when (checkSig pK s (signedPayload pay)) $ h d
genCryptoMap :: Event t CryptoOrders -> Behavior t CryptoMap
genCryptoMap orders = accumB M.empty (onOrder <$> orders)
onOrder :: CryptoOrders -> CryptoMap -> CryptoMap
onOrder (CryptoAdd kH cE) = M.insert kH cE
onOrder (CryptoDelete kH) = M.delete kH
filterNewIntro :: Event t Introduce -> Behavior t CryptoMap -> (Event t Introduce, Event t Introduce)
filterNewIntro introE cMapB = split $ apply (makeEither <$> cMapB) introE
where makeEither :: CryptoMap -> Introduce -> Either Introduce Introduce
makeEither cM i = case M.lookup (introKeyID i) cM of
Nothing -> Left i
Just _ -> Right i
onNewIntro :: Handler CryptoNewKey -> Handler CryptoOrders -> Introduce -> IO ()
onNewIntro outH ordH i@(Introduce kH k _ _) = do (addH, fire) <- newAddHandler
ordH $ CryptoAdd kH (CryptoEntry k fire)
outH $ CryptoNewKey i kH addH
|
JohnRambouilled/WeedNetwork
|
Crypto.hs
|
gpl-2.0
| 3,666
| 0
| 13
| 1,203
| 955
| 500
| 455
| 59
| 2
|
module Rosenfeld
( fmt )
where
import Expression
import WhiteBear ( kT )
import FMT ( n0, n1, n2, n3, sqr_n2v, n1v_dot_n2v )
phi1, phi2, phi3 :: Expression Scalar
phi1 = var "phi1" "\\Phi_1" $ integrate $ -kT*n0*log(1-n3)
phi2 = var "phi2" "\\Phi_2" $ integrate $ kT*(n2*n1 - n1v_dot_n2v)/(1-n3)
phi3 = var "phi3" "\\Phi_3" $ integrate $ kT*(n2**3/3 - sqr_n2v*n2)/(8*pi*(1-n3)**2)
fmt :: Expression Scalar
fmt = var "fmt" "F_{\\text{hard}}" $ (phi1 + phi2 + phi3)
|
droundy/deft
|
src/haskell/Rosenfeld.hs
|
gpl-2.0
| 482
| 0
| 11
| 93
| 237
| 130
| 107
| 11
| 1
|
module Program.InputHandle where
import Control.Applicative ( (<$>) )
import Control.Monad ( unless, forM )
import Data.Maybe ( isNothing, fromJust )
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import Data.BallBox ( BallBox(..), info, fitness )
import Data.Point ( Point )
import GL.Aliases ( int, float, readInt )
import qualified Program.State as P
-- |Callback for regular character keyboard actions
keyboardChar :: P.State -> Char -> Position -> IO ()
-- Quit application
keyboardChar state 'q' (Position x y) = keyboardChar state 'Q' (Position x y)
keyboardChar state 'Q' _ = P.close state $= True
-- Display population
keyboardChar state 'p' _ = P.drawMode state $= P.Population
-- Rank population
keyboardChar state 'r' _ = P.rankPopulation state
-- Select population
keyboardChar state 's' _ = P.selectPopulation state 10
-- Mate population
keyboardChar state 'm' _ = do
P.drawMode state $= P.Mating
P.matePopulation state
-- Mate population
keyboardChar state 'g' _ =
P.drawMode state $= P.Automate 0 P.autoTimestep P.Display
-- Add of remove one member form the pipulation
keyboardChar state '=' _ = P.increasePopulation state 1
keyboardChar state '-' _ = do
bs <- get $ P.boxes state
unless (null bs) (P.boxes state $= tail bs)
-- Display general info about the whole population
keyboardChar state 'd' _ = do
bs <- get $ P.boxes state
putStrLn $ "Average Fitness is "
++ show (sum (map fitness bs) / float (length bs))
P.prompt state $= True
-- Start auto mode
keyboardChar state 'a' _ = do
putStrLn "Enter the number of iterations you want to run"
ln <- readInt <$> filter (/='\n') <$> getLine
if isNothing ln then
putStrLn "You must enter the number of cycles"
else
P.drawMode state $= P.Automate (fromJust ln) P.autoTimestep P.Display
return ()
-- Display help
keyboardChar state 'h' _ = do
putStrLn "------\n\
\Commands:\n\
\ h - Show commands list\n\
\ = - Add boxes to the population\n\
\ - - Remove boxes from the population\n\
\ p - Switch to population mode\n\
\ m - Switch to mate mode\n\
\ r - Rank guys in population\n\
\ d - Show overall stats\n\
\ s - Select first n in the list and drop the rest\n\
\ a - Automate for a given amount of cycles"
P.prompt state $= True
-- Catch all
keyboardChar _ _ _ = return ()
-- |Callback for keyboard key up actions
keyboardCharUp :: P.State -> Char -> Position -> IO ()
keyboardCharUp _ _ _ = return ()
-- |Callback for mouse click events
mouse :: P.State -> MouseButton -> KeyState -> Position -> IO ()
mouse _ _ _ _ = return ()
-- |Callback for mouse motion
mouseMotion :: P.State -> Position -> IO ()
mouseMotion _ _ = return ()
-- |Callback for passive mouse motion
passiveMotion :: P.State -> Position -> IO ()
passiveMotion state mousePos = do
-- Get width, height and boxes with their positions
width <- int <$> get (P.width state)
height <- int <$> get (P.height state)
boxes <- get (P.boxes state)
-- Find out which pox (if any) the mouse if hovering over
bs' <- forM (zip (positions (width,height)) boxes)
(\(boxPosition, box@(BallBox _ boxIsSelected boxSize _) ) ->
-- If mouse over box
if mouseOverBox boxPosition boxSize then
do unless boxIsSelected $ do -- If the box wasnt selected
putStrLn (info box) -- before, the output the status
P.prompt state $= True
return box { selected = True } -- Mark box as selected
else return box { selected = False } )-- Unmark
-- Update boxes list
P.boxes state $= bs'
where
(x,y) = (\(Position x' y') -> (int x', int y')) mousePos
mouseOverBox (x1,y1) (w,h) = and [x > x1, y > y1, x < x1+w, y < y1+h]
-- |Positions to place boxes at on screen
positions :: (Enum a, Num a) => Point a-> [Point a]
positions (w,h) = [(x,y) | y <- [20,140..h-100], x <- [20,140..w-100] ]
|
szbokhar/genetic-boxes
|
Program/InputHandle.hs
|
gpl-2.0
| 4,198
| 0
| 19
| 1,151
| 1,201
| 616
| 585
| 65
| 2
|
main = do
let animals = dict[ "dog" := int(1),
"cat" := 2 ]
-- total <- animals.get("dog") + animals.get("cat")
-- ^ error: Couldn't match type ‘Maybe Integer’ with ‘Integer’
total <- animals.getdefault("dog", 0) + animals.getdefault("cat", 0)
print(total)
|
cblp/python5
|
examples/dict.hs
|
gpl-3.0
| 311
| 0
| 14
| 84
| 82
| 42
| 40
| 5
| 1
|
module Carbon.Data.Logic.Diamond(
DiamondResult(..)
, ResultType(..)
, Results(..)
, isEmpty
, implications
)where
import Control.Arrow (second)
import Data.Function (on)
import qualified Data.List as List
import Carbon.Common
import Carbon.Data.Id
import Carbon.Data.Logic.Parse
{-|
Datatypes that define the results
|-}
data DiamondResult a = DiamondResult {
inSet :: [a]
, udecSet :: [a]
, outSet :: [a]
}
data ResultType = TwoValued
| Stable
| Grounded
| Complete
| Admissible
| Preferred
deriving (Show, Read, Eq, Enum, Bounded, Ord)
newtype Results a = Results [(ResultType, [DiamondResult a])]
isEmpty :: Results a -> Bool
isEmpty (Results r) = null r
-- | admissible >= complete >= (grounded, pref >= stable >= two-valued)
implications :: ResultType -> [ResultType]
implications Admissible = [Admissible]
implications Complete = Complete : implications Admissible
implications Grounded = Grounded : implications Complete
implications Preferred = Preferred : implications Complete
implications Stable = Stable : implications Preferred
implications TwoValued = TwoValued : implications Stable
{-| Instance declarations: |-}
instance Ord a => Eq (DiamondResult a) where
d1 == d2 = let c = (==) `on` List.sort
cI = c `on` inSet
cO = c `on` outSet
in (cI d1 d2 && cO d1 d2)
instance Ord a => Eq (Results a) where
(Results r1) == (Results r2) = r1 == r2
instance Ord a => Ord (DiamondResult a) where
compare d1 d2 =
let ci = compare `on` inSet
cu = compare `on` udecSet
co = compare `on` outSet
cs = [ci d1 d2, co d1 d2, cu d1 d2]
in head $ filter (/= EQ) cs ++ [EQ]
instance Functor DiamondResult where
fmap f d =
let i = map f $ inSet d
u = map f $ udecSet d
o = map f $ outSet d
in DiamondResult i u o
instance Functor Results where
fmap f (Results r) = Results $ map (second . map $ fmap f) r
instance Show a => Show (DiamondResult a) where
show d =
let i = map (\e -> "t(" ++ show e ++ ")") $ inSet d
u = map (\e -> "u(" ++ show e ++ ")") $ udecSet d
o = map (\e -> "f(" ++ show e ++ ")") $ outSet d
in unwords $ concat [i,u,o]
instance Show a => Show (Results a) where
show (Results r) = unlines $ concatMap go r
where
go (t, drs) = tell t ++ answers drs ++ end
answers = zipWith (\n d -> show n ++ ":\t" ++ show d) [1..]
tell TwoValued = "two-valued models:":end
tell Stable = "stable models:":end
tell Grounded = "grounded models:":end
tell Complete = "complete models:":end
tell Admissible = "admissible model:":end
tell Preferred = "preferred models:":end
end = ["=============================="]
instance StartState (DiamondResult a) where
startState = DiamondResult [] [] []
instance StartState (Results a) where
startState = Results []
|
runjak/carbon-adf
|
Carbon/Data/Logic/Diamond.hs
|
gpl-3.0
| 3,036
| 0
| 16
| 850
| 1,105
| 582
| 523
| 76
| 1
|
-- Exercício 06: Faça uma função que calcule a persistência aditiva de um número.
additivePersistence :: Int -> Int
additivePersistence x
|x < 10 = 0
|otherwise = (additivePersistence (sum $ numSum x)) + 1
where
numSum 0 = [0]
numSum x = numSum (x `div` 10) ++ [x `rem` 10]
main = do
print (additivePersistence 111)
|
danielgoncalvesti/BIGDATA2017
|
Atividade01/Haskell/Activity1/Exercises2/Ex6.hs
|
gpl-3.0
| 393
| 0
| 11
| 128
| 121
| 62
| 59
| 8
| 2
|
#!/usr/bin/env stack
{- stack exec --verbosity info --stack-yaml stack8.0.yaml
--package base-prelude
--package directory
--package extra
--package pandoc
--package safe
--package shake
--package time
-- ghc -threaded
-}
{-
One of two project scripts files (Makefile, Shake.hs).
This one provides a stronger programming language and more
platform independence than Make. It will build needed packages (above)
on first run and whenever the resolver in stack.yaml changes.
To minimise such startup delays, and reduce sensitivity to git checkout,
compiling is recommended; run the script in interpreted mode to do that.
It requires stack (https://haskell-lang.org/get-started) and
auto-installs the packages above. Also, some rules require:
- site/hakyll-std/hakyll-std
- runhaskell
- groff
- m4
- makeinfo
- git
- patch
Usage: see below. Also:
$ find hledger-lib hledger | entr ./Shake website # rebuild web files on changes in these dirs
Shake wishes:
just one shake import
wildcards in phony rules
multiple individually accessible wildcards
not having to write :: Action ExitCode after a non-final cmd
-}
{-# LANGUAGE PackageImports, ScopedTypeVariables #-}
import Prelude ()
import "base-prelude" BasePrelude
import "extra" Data.List.Extra
import "safe" Safe
import "shake" Development.Shake
import "shake" Development.Shake.FilePath
import "time" Data.Time
import "directory" System.Directory as S (getDirectoryContents)
usage = unlines
["Usage:"
,"./Shake.hs # compile this script"
,"./Shake # show commands"
,"./Shake manuals # generate the txt/man/info manuals"
,"./Shake website # generate the html manuals and website"
-- ,"./Shake manpages # generate nroff files for man"
-- ,"./Shake txtmanpages # generate text man pages for embedding"
-- ,"./Shake infomanpages # generate info files for info"
-- ,"./Shake webmanpages # generate individual web man pages for hakyll"
-- ,"./Shake webmanall # generate all-in-one web manual for hakyll"
-- ,"./Shake guideall # generate all-in-one web user guide for hakyll"
,"./Shake site/doc/VER/.snapshot # generate and save a versioned web site snapshot"
,"./Shake all # generate everything"
,"./Shake clean # clean generated files"
,"./Shake Clean # clean harder"
,"./Shake --help # show options, eg --color"
]
pandoc = "stack exec -- pandoc" -- pandoc from project's stackage snapshot
hakyllstd = "site/hakyll-std/hakyll-std"
makeinfo = "makeinfo"
-- nroff = "nroff"
groff = "groff"
main = do
pandocFilters <-
map ("tools" </>). nub . sort . map (-<.> "") . filter ("pandoc-" `isPrefixOf`)
<$> S.getDirectoryContents "tools"
shakeArgs
shakeOptions{
shakeVerbosity=Loud
-- ,shakeReport=[".shake.html"]
} $ do
want ["help"]
phony "help" $ liftIO $ putStrLn usage
-- phony "compile" $ need ["Shake"]
--
-- "Shake" %> \out -> do
-- need [out <.> "hs"]
-- unit $ cmd "./Shake.hs" -- running as stack script installs deps and compiles
-- putLoud "You can now run ./Shake instead of ./Shake.hs"
phony "all" $ need ["manuals", "website"]
-- manuals
let
manpageNames = [ -- in suggested reading order
"hledger.1"
,"hledger-ui.1"
,"hledger-web.1"
,"hledger-api.1"
,"hledger_journal.5"
,"hledger_csv.5"
,"hledger_timeclock.5"
,"hledger_timedot.5"
]
-- manuals m4 source, may include other files (hledger/doc/hledger.1.m4.md)
m4manpages = [manpageDir m </> m <.> "m4.md" | m <- manpageNames]
-- manuals rendered to nroff, ready for man (hledger/doc/hledger.1)
nroffmanpages = [manpageDir m </> m | m <- manpageNames]
-- manuals rendered to text, ready for embedding (hledger/doc/hledger.1.txt)
txtmanpages = [manpageDir m </> m <.> "txt" | m <- manpageNames]
-- manuals rendered to info, ready for info (hledger/doc/hledger.1.info)
infomanpages = [manpageDir m </> m <.> "info" | m <- manpageNames]
-- manuals rendered to markdown, ready for web output by hakyll (site/hledger.md)
webmanpages = ["site" </> manpageNameToUri m <.>"md" | m <- manpageNames]
-- manuals rendered to markdown and combined, ready for web output by hakyll
webmanall = "site/manual.md"
-- user guide pages in markdown, ready for web output by hakyll (site/csv-import.md).
-- Keeping these in the main site directory allows hakyll-std to see them (and simpler urls).
-- These should be kept ordered like the links on the docs page, so that the
-- combined guide follows the same order.
-- XXX This, as well as keeping page link, heading, and filename synced, will be a bit tricky.
-- Current policy:
-- filenames are simple and stable as possible, beginning with TOPIC- prefix when appropriate
-- titles are succinct and practical/action/verb-oriented
guidepages = [
"site/start-journal.md"
,"site/version-control.md"
,"site/entries.md"
,"site/csv-import.md"
,"site/account-aliases.md"
,"site/account-separator.md"
,"site/investments.md"
,"site/argfiles.md"
]
-- guide pages combined, ready for web output by hakyll
guideall = "site/guide.md"
-- hledger.1 -> hledger/doc, hledger_journal.5 -> hledger-lib/doc
manpageDir m
| '_' `elem` m = "hledger-lib" </> "doc"
| otherwise = dropExtension m </> "doc"
-- hledger.1 -> hledger, hledger_journal.5 -> journal
manpageNameToUri m | "hledger_" `isPrefixOf` m = dropExtension $ drop 8 m
| otherwise = dropExtension m
-- hledger -> hledger.1, journal -> hledger_journal.5
manpageUriToName u | "hledger" `isPrefixOf` u = u <.> "1"
| otherwise = "hledger_" ++ u <.> "5"
phony "manuals" $ do
need $
nroffmanpages
++ infomanpages
++ txtmanpages
-- compile pandoc helpers
phony "pandocfilters" $ need pandocFilters
pandocFilters |%> \out -> do
need [out <.> "hs"]
cmd ("stack ghc") out
-- man pages
-- use m4 and pandoc to process macros, filter content, and convert to nroff suitable for man output
phony "manpages" $ need nroffmanpages
nroffmanpages |%> \out -> do -- hledger/doc/hledger.1
let src = out <.> "m4.md"
lib = "doc/lib.m4"
dir = takeDirectory out
tmpl = "doc/manpage.nroff"
-- assume all other m4 files in dir are included by this one XXX not true in hledger-lib
deps <- liftIO $ filter (/= src) . filter (".m4.md" `isSuffixOf`) . map (dir </>) <$> S.getDirectoryContents dir
need $ src : lib : tmpl : deps ++ pandocFilters
cmd Shell
"m4 -P -DMAN -I" dir lib src "|"
pandoc "-f markdown -s --template" tmpl
-- "--filter tools/pandoc-drop-web-blocks"
"--filter tools/pandoc-drop-html-blocks"
"--filter tools/pandoc-drop-html-inlines"
"--filter tools/pandoc-drop-links"
"--filter tools/pandoc-drop-notes"
"-o" out
-- render man page nroffs to fixed-width text for embedding in executables, with nroff
phony "txtmanpages" $ need txtmanpages
txtmanpages |%> \out -> do -- hledger/doc/hledger.1.txt
let src = dropExtension out
need [src]
cmd Shell groff "-t -e -mandoc -Tascii" src "| col -bx >" out -- http://www.tldp.org/HOWTO/Man-Page/q10.html
-- use m4 and pandoc to process macros, filter content, and convert to info, suitable for info viewing
phony "infomanpages" $ need infomanpages
infomanpages |%> \out -> do -- hledger/doc/hledger.1.info
let src = out -<.> "m4.md"
lib = "doc/lib.m4"
dir = takeDirectory out
-- assume all other m4 files in dir are included by this one XXX not true in hledger-lib
deps <- liftIO $ filter (/= src) . filter (".m4.md" `isSuffixOf`) . map (dir </>) <$> S.getDirectoryContents dir
need $ src : lib : deps ++ pandocFilters
cmd Shell
"m4 -P -I" dir lib src "|"
pandoc "-f markdown"
-- "--filter tools/pandoc-drop-web-blocks"
"--filter tools/pandoc-drop-html-blocks"
"--filter tools/pandoc-drop-html-inlines"
"--filter tools/pandoc-drop-links"
"--filter tools/pandoc-drop-notes"
"-t texinfo |"
makeinfo "--force --no-split -o" out
-- web site
phony "website" $ do
need $
webmanpages ++
[webmanall
,guideall
,hakyllstd
]
cmd Shell (Cwd "site") "hakyll-std/hakyll-std" "build"
-- website also links to old manuals, which are generated manually
-- with ./Shake websnapshot and committed
-- TODO: when pandoc filters are missing, ./Shake website complains about them before building them
-- ./Shake.hs && ./Shake Clean && (cd site/hakyll-std; ./hakyll-std.hs) && ./Shake website
-- use m4 and pandoc to process macros and filter content, leaving markdown suitable for web output
phony "webmanpages" $ need webmanpages
webmanpages |%> \out -> do -- site/hledger.md
let m = manpageUriToName $ dropExtension $ takeFileName out -- hledger.1
dir = manpageDir m
src = dir </> m <.> "m4.md"
lib = "doc/lib.m4"
heading = let h = dropExtension m
in if "hledger_" `isPrefixOf` h
then drop 8 h ++ " format"
else h
-- assume all other m4 files in dir are included by this one XXX not true in hledger-lib
deps <- liftIO $ filter (/= src) . filter (".m4.md" `isSuffixOf`) . map (dir </>) <$> S.getDirectoryContents dir
need $ src : lib : deps ++ pandocFilters
liftIO $ writeFile out $ "# " ++ heading ++ "\n\n"
cmd Shell
"m4 -P -DMAN -DWEB -I" dir lib src "|"
pandoc "-f markdown -t markdown --atx-headers"
"--filter tools/pandoc-demote-headers"
-- "--filter tools/pandoc-add-toc"
-- "--filter tools/pandoc-drop-man-blocks"
">>" out
-- adjust and combine man page mds for single-page web output, using pandoc
phony "webmanall" $ need [ webmanall ]
webmanall %> \out -> do
need webmanpages
liftIO $ writeFile webmanall "* toc\n\n" -- # Big Manual\n\n -- TOC style is better without main heading,
forM_ webmanpages $ \f -> do -- site/hledger.md, site/journal.md
cmd Shell ("printf '\\n\\n' >>") webmanall :: Action ExitCode
cmd Shell "pandoc" f "-t markdown --atx-headers"
-- "--filter tools/pandoc-drop-man-blocks"
"--filter tools/pandoc-drop-toc"
-- "--filter tools/pandoc-capitalize-headers"
"--filter tools/pandoc-demote-headers"
">>" webmanall :: Action ExitCode
-- adjust and combine recipe mds for single-page web output, using pandoc
phony "guideall" $ need [ guideall ]
guideall %> \out -> do
need guidepages -- XXX seems not to work, not rebuilt when a recipe changes
liftIO $ writeFile guideall "* toc\n\n" -- # User Guide\n\n -- TOC style is better without main heading,
forM_ guidepages $ \f -> do -- site/csv-import.md, site/account-aliases.md, ...
cmd Shell ("printf '\\n\\n' >>") guideall :: Action ExitCode
cmd Shell "pandoc" f "-t markdown --atx-headers"
-- "--filter tools/pandoc-drop-man-blocks"
"--filter tools/pandoc-drop-toc"
-- "--filter tools/pandoc-capitalize-headers"
"--filter tools/pandoc-demote-headers"
">>" guideall :: Action ExitCode
-- build the currently checked out web docs and save as a named snapshot
"site/doc/*/.snapshot" %> \out -> do
need [ webmanall ]
let snapshot = takeDirectory out
cmd Shell "mkdir -p" snapshot :: Action ExitCode
forM_ webmanpages $ \f -> do -- site/hledger.md, site/journal.md
cmd Shell "cp" f (snapshot </> takeFileName f) :: Action ExitCode
cmd Shell "cp" "site/manual.md" snapshot :: Action ExitCode
cmd Shell "cp -r site/images" snapshot :: Action ExitCode
cmd Shell "touch" out -- :: Action ExitCode
-- build standard hakyll script used for site rendering
hakyllstd %> \out -> do
let dir = takeDirectory out
need [out <.> "hs", dir </> "TableOfContents.hs"] -- XXX hard-coded dep
unit $ liftIO $
cmd (Cwd dir) "./hakyll-std.hs"
`catch` (\(e::IOException) -> putStr $ unlines $
["I could not run ./hakyll-std.hs in "++dir++" to install Hakyll."
,"If you see a hakyll-std build error after this, please do it manually:"
,"$ (cd site/hakyll-std; ./hakyll-std.hs)"
,"and try again."
])
-- cleanup
phony "clean" $ do
putNormal "Cleaning generated files"
removeFilesAfter "." webmanpages
removeFilesAfter "." [webmanall, guideall]
phony "Clean" $ do
need ["clean"]
putNormal "Cleaning all hakyll generated files"
removeFilesAfter "site" ["_*"]
putNormal "Cleaning executables"
removeFilesAfter "." $ hakyllstd : pandocFilters
putNormal "Cleaning object files" -- also forces rebuild of executables
removeFilesAfter "tools" ["*.o","*.p_o","*.hi"]
removeFilesAfter "site" ["*.o","*.p_o","*.hi"]
putNormal "Cleaning shake build files"
removeFilesAfter ".shake" ["//*"]
|
ony/hledger
|
Shake.hs
|
gpl-3.0
| 13,712
| 0
| 21
| 3,622
| 1,991
| 1,024
| 967
| 193
| 2
|
module Estuary.Types.Hint where
import Data.Text (Text)
import Data.Maybe (mapMaybe)
import Estuary.Types.Tempo
import Estuary.Utility
import Estuary.Types.Definition
import Estuary.Types.TranslatableText
data Hint =
SampleHint Text |
LogMessage TranslatableText |
SetGlobalDelayTime Double |
SilenceHint |
ZoneHint Int Definition |
ToggleTerminal |
ToggleSidebar |
ToggleStats |
ToggleHeader
deriving (Eq,Show)
justGlobalDelayTime :: [Hint] -> Maybe Double
justGlobalDelayTime = lastOrNothing . mapMaybe f
where f (SetGlobalDelayTime x) = Just x
f _ = Nothing
|
d0kt0r0/estuary
|
client/src/Estuary/Types/Hint.hs
|
gpl-3.0
| 596
| 0
| 9
| 102
| 156
| 90
| 66
| 22
| 2
|
{-|
Module : Lipid.Parsers.ClassLevel.Glycerophospholipid
Description :
Copyright : Michael Thomas
License : GPL-3
Maintainer : Michael Thomas <Michaelt293@gmail.com>
Stability : Experimental
-}
{-# LANGUAGE TemplateHaskell #-}
module Lipid.Parsers.ClassLevel.Glycerophospholipid where
import Lipid.Blocks
import Control.Lens
import Data.Monoid ((<>))
import Lipid.ClassLevel.Glycerophospholipid
import Lipid.Parsers.Blocks
import Text.Megaparsec
import Text.Megaparsec.String
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Lift
paP :: Parser PA
paP = do
_ <- string "PA "
PA <$> classLevelP
pa :: QuasiQuoter
pa = qQuoter paP
peP :: Parser PE
peP = do
_ <- string "PE "
PE <$> classLevelP
pe :: QuasiQuoter
pe = qQuoter peP
pcP :: Parser PC
pcP = do
_ <- string "PC "
PC <$> classLevelP
pc :: QuasiQuoter
pc = qQuoter pcP
pgP :: Parser PG
pgP = do
_ <- string "PG "
PG <$> classLevelP
pg :: QuasiQuoter
pg = qQuoter pgP
pgpP :: Parser PGP
pgpP = do
_ <- string "PGP "
PGP <$> classLevelP
pgp :: QuasiQuoter
pgp = qQuoter pgpP
psP :: Parser PS
psP = do
_ <- string "PS "
PS <$> classLevelP
ps :: QuasiQuoter
ps = qQuoter psP
piP :: Parser PI
piP = do
_ <- string "PI "
PI <$> classLevelP
pi :: QuasiQuoter
pi = qQuoter piP
pipP :: Parser PIP
pipP = do
hg <- phosphatidylinositolMonophosphateP
_ <- char ' '
PIP <$> pure hg <*> classLevelP
pip :: QuasiQuoter
pip = qQuoter pipP
pip2P :: Parser PIP2
pip2P = do
hg <- phosphatidylinositolBisphosphateP
_ <- char ' '
PIP2 <$> pure hg <*> classLevelP
pip2 :: QuasiQuoter
pip2 = qQuoter pip2P
pip3P :: Parser PIP3
pip3P = do
_ <- string "PIP3 "
PIP3 <$> classLevelP
pip3 :: QuasiQuoter
pip3 = qQuoter pip3P
$(deriveLift ''PA)
$(deriveLift ''PE)
$(deriveLift ''PC)
$(deriveLift ''PG)
$(deriveLift ''PGP)
$(deriveLift ''PS)
$(deriveLift ''PI)
$(deriveLift ''PIP)
$(deriveLift ''PIP2)
$(deriveLift ''PIP3)
|
Michaelt293/Lipid-Haskell
|
src/Lipid/Parsers/ClassLevel/Glycerophospholipid.hs
|
gpl-3.0
| 1,984
| 0
| 9
| 392
| 654
| 324
| 330
| 84
| 1
|
{-# Language RecursiveDo #-}
module Dappsys.Weth where
import EVM.Assembly
import qualified Prelude
abiCase :: [(Prelude.Integer, Label)] -> Assembly
abiCase xs = do
push 224; push 2; exp; push 0; calldataload; div
Prelude.mapM_
(\(i, t) -> do dup 1; push i; eq; refer t; jumpi)
xs
contract :: Assembly
contract = mdo
callvalue; iszero; refer dispatch; jumpi -- Skip deposit if no value sent
push 32; not; sload; callvalue; add -- Calculate new total supply
push 32; not; sstore -- Save new total supply to storage
caller; sload; callvalue; add -- Calculate new target balance
caller; sstore -- Save new target balance to storage
-- Emit `Join(address indexed, uint)'
push 0xb4e09949657f21548b58afe74e7b86cd2295da5ff1598ae1e5faecb1cf19ca95
callvalue; push 0; mstore; caller; swap 1; push 32; push 0; log 2
dispatch <- label
abiCase
[ (0x18160ddd, totalSupply)
, (0xdd62ed3e, allowance)
, (0x70a08231, balanceOf)
, (0x095ea7b3, approve)
, (0xa9059cbb, transfer)
, (0x23b872dd, transferFrom)
, (0xd0e30db0, join)
, (0x2e1a7d4d, exit) ]
fail <- label; revert
join <- label; stop
totalSupply <- label
push 32; not; sload -- Load supply from storage
push 0; mstore; push 32; push 0; return -- Return total supply
allowance <- label
push 4; calldataload; push 36; calldataload -- Load owner and spender
push 0; mstore; push 32; mstore -- Write addresses to memory
push 64; push 0; keccak256; sload -- Load allowance from storage
push 0; mstore; push 32; push 0; return -- Return allowance
balanceOf <- label
push 4; calldataload; sload -- Load balance from storage
push 0; mstore; push 32; push 0; return -- Return balance
approve <- label
push 36; calldataload; push 4; calldataload -- Load spender and new allowance
caller; push 0; mstore; dup 2; push 32; mstore
dup 2; push 64; push 0; keccak256; sstore -- Write new allowance to storage
-- Emit `Approval(address indexed, address indexed, uint)'
push 0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925
swap 3; push 0; mstore; caller; swap 1; push 0; push 0; log 3
push 1; push 0; mstore; push 32; push 0; return -- Return true
transfer <- label
push 36; calldataload
push 4; calldataload
caller
refer attemptTransfer; jump
transferFrom <- label
push 68; calldataload
push 36; calldataload
push 4; calldataload
attemptTransfer <- label
push 160; push 2; exp; dup 3; dup 3; or; div; refer fail; jumpi -- Abort if garbage in addresses
dup 2; sload; dup 2; sload -- Load source and target balances
dup 5; dup 2; lt; refer fail; jumpi -- Abort if insufficient balance
dup 3; caller; eq; refer performTransfer; jumpi -- Skip ahead if source is caller
dup 3; push 0; mstore; caller; push 32; mstore
push 32; push 0; keccak256 -- Determine allowance storage slot
dup 1; sload -- Load allowance from storage
push 32; not; dup 2; eq; refer performTransfer; jumpi -- Skip ahead if allowance is max
dup 7; dup 2; lt; refer fail; jumpi -- Abort if allowance is too low
dup 7; swap 2; sub; swap 2; sstore -- Save new allowance to storage
performTransfer <- label
dup 5; swap 1; sub; dup 3; sstore -- Save source balance to storage
dup 4; add; dup 3; sstore -- Save target balance to storage
-- Emit `Transfer(address indexed, address indexed, uint)'
push 0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef
swap 3; push 0; mstore; push 32; push 0; log 3
pop
push 1; push 0; mstore; push 32; push 0; return -- Return true
pop
exit <- label
push 4; calldataload -- Load amount to withdraw
caller; sload -- Load source balance from storage
dup 2; dup 2; sub -- Calculate new source balance
swap 1; dup 2; gt; refer fail; jumpi -- Abort if underflow occurred
caller; sstore -- Save new source balance to storage
push 32; not; sload -- Load total supply from storage
dup 2; swap 1; sub -- Decrement total supply
push 32; not; sstore -- Save new total supply to storage
push 0; push 0; push 0; push 0 -- No return data and no calldata
dup 5; caller -- Send withdrawal amount to caller
gas; call; iszero; refer fail; jumpi -- Make call, aborting on failure
-- Emit `Exit(address indexed, uint)'
push 0x22d324652c93739755cf4581508b60875ebdd78c20c0cff5cf8e23452b299631
swap 1; push 0; mstore; caller; swap 1; push 32; push 0; log 2
push 1; push 0; mstore; push 32; push 0; return -- Return true
|
mbrock/symbex
|
src/Dappsys/Weth.hs
|
agpl-3.0
| 5,757
| 0
| 12
| 2,208
| 1,557
| 719
| 838
| 92
| 1
|
{-# LANGUAGE GADTs #-}
module Gonimo.Server.Effects.Production (
runExceptionServer
, Config(..)
, ServerEffects ) where
import Control.Exception.Base (SomeException, toException, try)
import Control.Monad.Freer.Exception (Exc (..), runError)
import Control.Monad.Freer.Internal (Arrs, Eff (..),
decomp, qApp)
import Control.Monad.Logger (ToLogStr (..))
import Crypto.Random (SystemRandom,
genBytes, newGenIO)
import Data.Bifunctor
import Data.Time.Clock (getCurrentTime)
import Network.Mail.SMTP (sendMail)
import Servant.Subscriber
import Control.Concurrent.STM (atomically)
import Gonimo.Server.Effects.Internal
import Gonimo.Server.Effects.Common
import System.Random (getStdRandom)
runExceptionServer :: Config -> Eff (Exc SomeException ': '[Server]) w -> IO (Either SomeException w)
runExceptionServer c = runServer c . runError
runServer :: forall w . Config -> Eff '[Server] (Either SomeException w) -> IO (Either SomeException w)
runServer _ (Val v) = return v
runServer c (E u' q) = case decomp u' of
Right (Atomically m) -> execIO c q $ atomically m
Right (SendEmail mail) -> execIO c q $ sendMail "localhost" mail
Right (LogMessage loc ls ll msg) -> execIO c q $ doLog loc ls ll (toLogStr msg)
Right (GenRandomBytes l) ->
-- Throw away the new generator & make an exception of any occurred error:
bimap toException fst . genBytes l <$> (newGenIO :: IO SystemRandom)
>>= runServer c . qApp q
Right GetCurrentTime -> execIO c q getCurrentTime
Right GetState -> runServer c . qApp q $ Right (state c)
Right (Notify ev pE cB) -> execIO c q $ atomically (notify (subscriber c) ev pE cB)
Right (RunDb trans) -> runDatabaseServerIO pool trans >>= runServer c . qApp q
Right (RunRandom rand) -> execIO c q (getStdRandom rand)
Left _ -> error impossibleMessage
where
pool = configPool c
doLog = configLog c
-- runLogger loggerT = runLoggingT loggerT doLog
execIO :: Config
-> Arrs '[Server] (Either SomeException b) (Either SomeException w)
-> IO b
-> IO (Either SomeException w)
execIO c q action = try action >>= runServer c . qApp q
|
charringer/gonimo-back
|
src/Gonimo/Server/Effects/Production.hs
|
agpl-3.0
| 2,653
| 0
| 13
| 915
| 769
| 397
| 372
| -1
| -1
|
-- Author:
-- Brent Tubbs <brent.tubbs@gmail.com>
-- | MongoDB GridFS implementation
{-# LANGUAGE OverloadedStrings, RecordWildCards, NamedFieldPuns, TupleSections, FlexibleContexts, FlexibleInstances, UndecidableInstances, MultiParamTypeClasses, GeneralizedNewtypeDeriving, StandaloneDeriving, TypeSynonymInstances, TypeFamilies, CPP, RankNTypes #-}
module Database.MongoDB.GridFS
( Bucket
, files, chunks
, File
, document, bucket
-- ** Setup
, openDefaultBucket
, openBucket
-- ** Query
, findFile
, findOneFile
, fetchFile
-- ** Delete
, deleteFile
-- ** Conduits
, sourceFile
, sinkFile
)
where
import Control.Applicative((<$>))
import Control.Monad(when)
import Control.Monad.IO.Class
import Control.Monad.Trans(MonadTrans, lift)
import Data.Conduit
import Data.Digest.Pure.MD5
import Data.Int
import Data.Tagged(Tagged, untag)
import Data.Text(Text, append)
import Data.Time.Clock(getCurrentTime)
import Database.MongoDB
import Prelude
import qualified Data.Bson as B
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
defaultChunkSize :: Int64
-- ^ The default chunk size is 256 kB
defaultChunkSize = 256 * 1024
-- magic constant for md5Finalize
md5BlockSizeInBytes :: Int
md5BlockSizeInBytes = 64
data Bucket = Bucket {files :: Text, chunks :: Text}
-- ^ Files are stored in "buckets". You open a bucket with openDefaultBucket or openBucket
openDefaultBucket :: (Monad m, MonadIO m) => Action m Bucket
-- ^ Open the default 'Bucket' (named "fs")
openDefaultBucket = openBucket "fs"
openBucket :: (Monad m, MonadIO m) => Text -> Action m Bucket
-- ^ Open a 'Bucket'
openBucket name = do
let filesCollection = name `append` ".files"
let chunksCollection = name `append` ".chunks"
ensureIndex $ (index filesCollection ["filename" =: (1::Int), "uploadDate" =: (1::Int)])
ensureIndex $ (index chunksCollection ["files_id" =: (1::Int), "n" =: (1::Int)]) { iUnique = True, iDropDups = True }
return $ Bucket filesCollection chunksCollection
data File = File {bucket :: Bucket, document :: Document}
getChunk :: (Monad m, MonadIO m) => File -> Int -> Action m (Maybe S.ByteString)
-- ^ Get a chunk of a file
getChunk (File bucket doc) i = do
files_id <- B.look "_id" doc
result <- findOne $ select ["files_id" := files_id, "n" =: i] $ chunks bucket
let content = at "data" <$> result
case content of
Just (Binary b) -> return (Just b)
_ -> return Nothing
findFile :: MonadIO m => Bucket -> Selector -> Action m [File]
-- ^ Find files in the bucket
findFile bucket sel = do
cursor <- find $ select sel $ files bucket
results <- rest cursor
return $ File bucket <$> results
findOneFile :: MonadIO m => Bucket -> Selector -> Action m (Maybe File)
-- ^ Find one file in the bucket
findOneFile bucket sel = do
mdoc <- findOne $ select sel $ files bucket
return $ File bucket <$> mdoc
fetchFile :: MonadIO m => Bucket -> Selector -> Action m File
-- ^ Fetch one file in the bucket
fetchFile bucket sel = do
doc <- fetch $ select sel $ files bucket
return $ File bucket doc
deleteFile :: (MonadIO m) => File -> Action m ()
-- ^ Delete files in the bucket
deleteFile (File bucket doc) = do
files_id <- B.look "_id" doc
delete $ select ["_id" := files_id] $ files bucket
delete $ select ["files_id" := files_id] $ chunks bucket
putChunk :: (Monad m, MonadIO m) => Bucket -> ObjectId -> Int -> L.ByteString -> Action m ()
-- ^ Put a chunk in the bucket
putChunk bucket files_id i chunk = do
insert_ (chunks bucket) ["files_id" =: files_id, "n" =: i, "data" =: Binary (L.toStrict chunk)]
sourceFile :: (Monad m, MonadIO m) => File -> Producer (Action m) S.ByteString
-- ^ A producer for the contents of a file
sourceFile file = yieldChunk 0 where
yieldChunk i = do
mbytes <- lift $ getChunk file i
case mbytes of
Just bytes -> yield bytes >> yieldChunk (i+1)
Nothing -> return ()
-- Used to keep data during writing
data FileWriter = FileWriter
{ fwChunkSize :: Int64
, fwBucket :: Bucket
, fwFilesId :: ObjectId
, fwChunkIndex :: Int
, fwSize :: Int64
, fwAcc :: L.ByteString
, fwMd5Context :: MD5Context
, fwMd5acc :: L.ByteString
}
-- Finalize file, calculating md5 digest, saving the last chunk, and creating the file in the bucket
finalizeFile :: (Monad m, MonadIO m) => Text -> FileWriter -> Action m File
finalizeFile filename (FileWriter chunkSize bucket files_id i size acc md5context md5acc) = do
let md5digest = finalizeMD5 md5context (L.toStrict md5acc)
when (L.length acc > 0) $ putChunk bucket files_id i acc
timestamp <- liftIO $ getCurrentTime
let doc = [ "_id" =: files_id
, "length" =: size
, "uploadDate" =: timestamp
, "md5" =: show (md5digest)
, "chunkSize" =: chunkSize
, "filename" =: filename
]
insert_ (files bucket) doc
return $ File bucket doc
-- finalize the remainder and return the MD5Digest.
finalizeMD5 :: MD5Context -> S.ByteString -> MD5Digest
finalizeMD5 ctx rest =
md5Finalize ctx2 (S.drop lu rest) -- can only handle max md5BlockSizeInBytes length
where
l = S.length rest
r = l `mod` md5BlockSizeInBytes
lu = l - r
ctx2 = md5Update ctx (S.take lu rest)
-- Write as many chunks as can be written from the file writer
writeChunks :: (Monad m, MonadIO m) => FileWriter -> L.ByteString -> Action m FileWriter
writeChunks (FileWriter chunkSize bucket files_id i size acc md5context md5acc) chunk = do
-- Update md5 context
let md5BlockLength = fromIntegral $ untag (blockLength :: Tagged MD5Digest Int)
let md5acc_temp = (md5acc `L.append` chunk)
let (md5context', md5acc') =
if (L.length md5acc_temp < md5BlockLength)
then (md5context, md5acc_temp)
else let numBlocks = L.length md5acc_temp `div` md5BlockLength
(current, rest) = L.splitAt (md5BlockLength * numBlocks) md5acc_temp
in (md5Update md5context (L.toStrict current), rest)
-- Update chunks
let size' = (size + L.length chunk)
let acc_temp = (acc `L.append` chunk)
if (L.length acc_temp < chunkSize)
then return (FileWriter chunkSize bucket files_id i size' acc_temp md5context' md5acc')
else do
let (chunk, acc') = L.splitAt chunkSize acc_temp
putChunk bucket files_id i chunk
writeChunks (FileWriter chunkSize bucket files_id (i+1) size' acc' md5context' md5acc') L.empty
sinkFile :: (Monad m, MonadIO m) => Bucket -> Text -> Consumer S.ByteString (Action m) File
-- ^ A consumer that creates a file in the bucket and puts all consumed data in it
sinkFile bucket filename = do
files_id <- liftIO $ genObjectId
awaitChunk $ FileWriter defaultChunkSize bucket files_id 0 0 L.empty md5InitialContext L.empty
where
awaitChunk fw = do
mchunk <- await
case mchunk of
Nothing -> lift (finalizeFile filename fw)
Just chunk -> lift (writeChunks fw (L.fromStrict chunk)) >>= awaitChunk
|
Yuras/mongodb
|
Database/MongoDB/GridFS.hs
|
apache-2.0
| 6,967
| 0
| 18
| 1,416
| 2,108
| 1,102
| 1,006
| 136
| 3
|
#ifndef __ESBMC_HEADERS_STDARG_H_
#define __ESBMC_HEADERS_STDARG_H_
/* Define standard macros; esbmc currently copes with gcc internal forms,
* so we'll just replicate those */
#define va_start(v,l) __builtin_va_start(v,l)
#define va_end(v) __builtin_va_end(v)
#define va_arg(v,l) __builtin_va_arg(v,l)
#define va_copy(d,s) __builtin_va_copy(d,s)
#define __GNUC_VA_LIST
#define __gnuc_va_list __builtin_va_list
#define va_list __builtin_va_list
#endif /* __ESBMC_HEADERS_STDARG_H_ */
|
ssvlab/esbmc-gpu
|
ansi-c/headers/stdarg.hs
|
apache-2.0
| 503
| 7
| 6
| 65
| 74
| 44
| 30
| 1
| 0
|
selectEven :: [Int] -> [Int]
selectEven [] = []
selectEven xs
| even (last xs) = selectEven (init xs) ++ [last xs]
| otherwise = selectEven (init xs)
|
tonilopezmr/Learning-Haskell
|
Exercises/2/Exercise_8.hs
|
apache-2.0
| 152
| 0
| 10
| 30
| 88
| 42
| 46
| 5
| 1
|
module Calculus.Opra8
( module Calculus.Opra
, Opra8(..)
) where
-- standard modules
-- local modules
import Basics
import Calculus.Opra
data Opra8 = Opra8_0_0 | Opra8_0_1 | Opra8_0_2 | Opra8_0_3
| Opra8_0_4 | Opra8_0_5 | Opra8_0_6 | Opra8_0_7
| Opra8_0_8 | Opra8_0_9 | Opra8_0_10 | Opra8_0_11
| Opra8_0_12 | Opra8_0_13 | Opra8_0_14 | Opra8_0_15
| Opra8_0_16 | Opra8_0_17 | Opra8_0_18 | Opra8_0_19
| Opra8_0_20 | Opra8_0_21 | Opra8_0_22 | Opra8_0_23
| Opra8_0_24 | Opra8_0_25 | Opra8_0_26 | Opra8_0_27
| Opra8_0_28 | Opra8_0_29 | Opra8_0_30 | Opra8_0_31
| Opra8_1_0 | Opra8_1_1 | Opra8_1_2 | Opra8_1_3
| Opra8_1_4 | Opra8_1_5 | Opra8_1_6 | Opra8_1_7
| Opra8_1_8 | Opra8_1_9 | Opra8_1_10 | Opra8_1_11
| Opra8_1_12 | Opra8_1_13 | Opra8_1_14 | Opra8_1_15
| Opra8_1_16 | Opra8_1_17 | Opra8_1_18 | Opra8_1_19
| Opra8_1_20 | Opra8_1_21 | Opra8_1_22 | Opra8_1_23
| Opra8_1_24 | Opra8_1_25 | Opra8_1_26 | Opra8_1_27
| Opra8_1_28 | Opra8_1_29 | Opra8_1_30 | Opra8_1_31
| Opra8_2_0 | Opra8_2_1 | Opra8_2_2 | Opra8_2_3
| Opra8_2_4 | Opra8_2_5 | Opra8_2_6 | Opra8_2_7
| Opra8_2_8 | Opra8_2_9 | Opra8_2_10 | Opra8_2_11
| Opra8_2_12 | Opra8_2_13 | Opra8_2_14 | Opra8_2_15
| Opra8_2_16 | Opra8_2_17 | Opra8_2_18 | Opra8_2_19
| Opra8_2_20 | Opra8_2_21 | Opra8_2_22 | Opra8_2_23
| Opra8_2_24 | Opra8_2_25 | Opra8_2_26 | Opra8_2_27
| Opra8_2_28 | Opra8_2_29 | Opra8_2_30 | Opra8_2_31
| Opra8_3_0 | Opra8_3_1 | Opra8_3_2 | Opra8_3_3
| Opra8_3_4 | Opra8_3_5 | Opra8_3_6 | Opra8_3_7
| Opra8_3_8 | Opra8_3_9 | Opra8_3_10 | Opra8_3_11
| Opra8_3_12 | Opra8_3_13 | Opra8_3_14 | Opra8_3_15
| Opra8_3_16 | Opra8_3_17 | Opra8_3_18 | Opra8_3_19
| Opra8_3_20 | Opra8_3_21 | Opra8_3_22 | Opra8_3_23
| Opra8_3_24 | Opra8_3_25 | Opra8_3_26 | Opra8_3_27
| Opra8_3_28 | Opra8_3_29 | Opra8_3_30 | Opra8_3_31
| Opra8_4_0 | Opra8_4_1 | Opra8_4_2 | Opra8_4_3
| Opra8_4_4 | Opra8_4_5 | Opra8_4_6 | Opra8_4_7
| Opra8_4_8 | Opra8_4_9 | Opra8_4_10 | Opra8_4_11
| Opra8_4_12 | Opra8_4_13 | Opra8_4_14 | Opra8_4_15
| Opra8_4_16 | Opra8_4_17 | Opra8_4_18 | Opra8_4_19
| Opra8_4_20 | Opra8_4_21 | Opra8_4_22 | Opra8_4_23
| Opra8_4_24 | Opra8_4_25 | Opra8_4_26 | Opra8_4_27
| Opra8_4_28 | Opra8_4_29 | Opra8_4_30 | Opra8_4_31
| Opra8_5_0 | Opra8_5_1 | Opra8_5_2 | Opra8_5_3
| Opra8_5_4 | Opra8_5_5 | Opra8_5_6 | Opra8_5_7
| Opra8_5_8 | Opra8_5_9 | Opra8_5_10 | Opra8_5_11
| Opra8_5_12 | Opra8_5_13 | Opra8_5_14 | Opra8_5_15
| Opra8_5_16 | Opra8_5_17 | Opra8_5_18 | Opra8_5_19
| Opra8_5_20 | Opra8_5_21 | Opra8_5_22 | Opra8_5_23
| Opra8_5_24 | Opra8_5_25 | Opra8_5_26 | Opra8_5_27
| Opra8_5_28 | Opra8_5_29 | Opra8_5_30 | Opra8_5_31
| Opra8_6_0 | Opra8_6_1 | Opra8_6_2 | Opra8_6_3
| Opra8_6_4 | Opra8_6_5 | Opra8_6_6 | Opra8_6_7
| Opra8_6_8 | Opra8_6_9 | Opra8_6_10 | Opra8_6_11
| Opra8_6_12 | Opra8_6_13 | Opra8_6_14 | Opra8_6_15
| Opra8_6_16 | Opra8_6_17 | Opra8_6_18 | Opra8_6_19
| Opra8_6_20 | Opra8_6_21 | Opra8_6_22 | Opra8_6_23
| Opra8_6_24 | Opra8_6_25 | Opra8_6_26 | Opra8_6_27
| Opra8_6_28 | Opra8_6_29 | Opra8_6_30 | Opra8_6_31
| Opra8_7_0 | Opra8_7_1 | Opra8_7_2 | Opra8_7_3
| Opra8_7_4 | Opra8_7_5 | Opra8_7_6 | Opra8_7_7
| Opra8_7_8 | Opra8_7_9 | Opra8_7_10 | Opra8_7_11
| Opra8_7_12 | Opra8_7_13 | Opra8_7_14 | Opra8_7_15
| Opra8_7_16 | Opra8_7_17 | Opra8_7_18 | Opra8_7_19
| Opra8_7_20 | Opra8_7_21 | Opra8_7_22 | Opra8_7_23
| Opra8_7_24 | Opra8_7_25 | Opra8_7_26 | Opra8_7_27
| Opra8_7_28 | Opra8_7_29 | Opra8_7_30 | Opra8_7_31
| Opra8_8_0 | Opra8_8_1 | Opra8_8_2 | Opra8_8_3
| Opra8_8_4 | Opra8_8_5 | Opra8_8_6 | Opra8_8_7
| Opra8_8_8 | Opra8_8_9 | Opra8_8_10 | Opra8_8_11
| Opra8_8_12 | Opra8_8_13 | Opra8_8_14 | Opra8_8_15
| Opra8_8_16 | Opra8_8_17 | Opra8_8_18 | Opra8_8_19
| Opra8_8_20 | Opra8_8_21 | Opra8_8_22 | Opra8_8_23
| Opra8_8_24 | Opra8_8_25 | Opra8_8_26 | Opra8_8_27
| Opra8_8_28 | Opra8_8_29 | Opra8_8_30 | Opra8_8_31
| Opra8_9_0 | Opra8_9_1 | Opra8_9_2 | Opra8_9_3
| Opra8_9_4 | Opra8_9_5 | Opra8_9_6 | Opra8_9_7
| Opra8_9_8 | Opra8_9_9 | Opra8_9_10 | Opra8_9_11
| Opra8_9_12 | Opra8_9_13 | Opra8_9_14 | Opra8_9_15
| Opra8_9_16 | Opra8_9_17 | Opra8_9_18 | Opra8_9_19
| Opra8_9_20 | Opra8_9_21 | Opra8_9_22 | Opra8_9_23
| Opra8_9_24 | Opra8_9_25 | Opra8_9_26 | Opra8_9_27
| Opra8_9_28 | Opra8_9_29 | Opra8_9_30 | Opra8_9_31
| Opra8_10_0 | Opra8_10_1 | Opra8_10_2 | Opra8_10_3
| Opra8_10_4 | Opra8_10_5 | Opra8_10_6 | Opra8_10_7
| Opra8_10_8 | Opra8_10_9 | Opra8_10_10 | Opra8_10_11
| Opra8_10_12 | Opra8_10_13 | Opra8_10_14 | Opra8_10_15
| Opra8_10_16 | Opra8_10_17 | Opra8_10_18 | Opra8_10_19
| Opra8_10_20 | Opra8_10_21 | Opra8_10_22 | Opra8_10_23
| Opra8_10_24 | Opra8_10_25 | Opra8_10_26 | Opra8_10_27
| Opra8_10_28 | Opra8_10_29 | Opra8_10_30 | Opra8_10_31
| Opra8_11_0 | Opra8_11_1 | Opra8_11_2 | Opra8_11_3
| Opra8_11_4 | Opra8_11_5 | Opra8_11_6 | Opra8_11_7
| Opra8_11_8 | Opra8_11_9 | Opra8_11_10 | Opra8_11_11
| Opra8_11_12 | Opra8_11_13 | Opra8_11_14 | Opra8_11_15
| Opra8_11_16 | Opra8_11_17 | Opra8_11_18 | Opra8_11_19
| Opra8_11_20 | Opra8_11_21 | Opra8_11_22 | Opra8_11_23
| Opra8_11_24 | Opra8_11_25 | Opra8_11_26 | Opra8_11_27
| Opra8_11_28 | Opra8_11_29 | Opra8_11_30 | Opra8_11_31
| Opra8_12_0 | Opra8_12_1 | Opra8_12_2 | Opra8_12_3
| Opra8_12_4 | Opra8_12_5 | Opra8_12_6 | Opra8_12_7
| Opra8_12_8 | Opra8_12_9 | Opra8_12_10 | Opra8_12_11
| Opra8_12_12 | Opra8_12_13 | Opra8_12_14 | Opra8_12_15
| Opra8_12_16 | Opra8_12_17 | Opra8_12_18 | Opra8_12_19
| Opra8_12_20 | Opra8_12_21 | Opra8_12_22 | Opra8_12_23
| Opra8_12_24 | Opra8_12_25 | Opra8_12_26 | Opra8_12_27
| Opra8_12_28 | Opra8_12_29 | Opra8_12_30 | Opra8_12_31
| Opra8_13_0 | Opra8_13_1 | Opra8_13_2 | Opra8_13_3
| Opra8_13_4 | Opra8_13_5 | Opra8_13_6 | Opra8_13_7
| Opra8_13_8 | Opra8_13_9 | Opra8_13_10 | Opra8_13_11
| Opra8_13_12 | Opra8_13_13 | Opra8_13_14 | Opra8_13_15
| Opra8_13_16 | Opra8_13_17 | Opra8_13_18 | Opra8_13_19
| Opra8_13_20 | Opra8_13_21 | Opra8_13_22 | Opra8_13_23
| Opra8_13_24 | Opra8_13_25 | Opra8_13_26 | Opra8_13_27
| Opra8_13_28 | Opra8_13_29 | Opra8_13_30 | Opra8_13_31
| Opra8_14_0 | Opra8_14_1 | Opra8_14_2 | Opra8_14_3
| Opra8_14_4 | Opra8_14_5 | Opra8_14_6 | Opra8_14_7
| Opra8_14_8 | Opra8_14_9 | Opra8_14_10 | Opra8_14_11
| Opra8_14_12 | Opra8_14_13 | Opra8_14_14 | Opra8_14_15
| Opra8_14_16 | Opra8_14_17 | Opra8_14_18 | Opra8_14_19
| Opra8_14_20 | Opra8_14_21 | Opra8_14_22 | Opra8_14_23
| Opra8_14_24 | Opra8_14_25 | Opra8_14_26 | Opra8_14_27
| Opra8_14_28 | Opra8_14_29 | Opra8_14_30 | Opra8_14_31
| Opra8_15_0 | Opra8_15_1 | Opra8_15_2 | Opra8_15_3
| Opra8_15_4 | Opra8_15_5 | Opra8_15_6 | Opra8_15_7
| Opra8_15_8 | Opra8_15_9 | Opra8_15_10 | Opra8_15_11
| Opra8_15_12 | Opra8_15_13 | Opra8_15_14 | Opra8_15_15
| Opra8_15_16 | Opra8_15_17 | Opra8_15_18 | Opra8_15_19
| Opra8_15_20 | Opra8_15_21 | Opra8_15_22 | Opra8_15_23
| Opra8_15_24 | Opra8_15_25 | Opra8_15_26 | Opra8_15_27
| Opra8_15_28 | Opra8_15_29 | Opra8_15_30 | Opra8_15_31
| Opra8_16_0 | Opra8_16_1 | Opra8_16_2 | Opra8_16_3
| Opra8_16_4 | Opra8_16_5 | Opra8_16_6 | Opra8_16_7
| Opra8_16_8 | Opra8_16_9 | Opra8_16_10 | Opra8_16_11
| Opra8_16_12 | Opra8_16_13 | Opra8_16_14 | Opra8_16_15
| Opra8_16_16 | Opra8_16_17 | Opra8_16_18 | Opra8_16_19
| Opra8_16_20 | Opra8_16_21 | Opra8_16_22 | Opra8_16_23
| Opra8_16_24 | Opra8_16_25 | Opra8_16_26 | Opra8_16_27
| Opra8_16_28 | Opra8_16_29 | Opra8_16_30 | Opra8_16_31
| Opra8_17_0 | Opra8_17_1 | Opra8_17_2 | Opra8_17_3
| Opra8_17_4 | Opra8_17_5 | Opra8_17_6 | Opra8_17_7
| Opra8_17_8 | Opra8_17_9 | Opra8_17_10 | Opra8_17_11
| Opra8_17_12 | Opra8_17_13 | Opra8_17_14 | Opra8_17_15
| Opra8_17_16 | Opra8_17_17 | Opra8_17_18 | Opra8_17_19
| Opra8_17_20 | Opra8_17_21 | Opra8_17_22 | Opra8_17_23
| Opra8_17_24 | Opra8_17_25 | Opra8_17_26 | Opra8_17_27
| Opra8_17_28 | Opra8_17_29 | Opra8_17_30 | Opra8_17_31
| Opra8_18_0 | Opra8_18_1 | Opra8_18_2 | Opra8_18_3
| Opra8_18_4 | Opra8_18_5 | Opra8_18_6 | Opra8_18_7
| Opra8_18_8 | Opra8_18_9 | Opra8_18_10 | Opra8_18_11
| Opra8_18_12 | Opra8_18_13 | Opra8_18_14 | Opra8_18_15
| Opra8_18_16 | Opra8_18_17 | Opra8_18_18 | Opra8_18_19
| Opra8_18_20 | Opra8_18_21 | Opra8_18_22 | Opra8_18_23
| Opra8_18_24 | Opra8_18_25 | Opra8_18_26 | Opra8_18_27
| Opra8_18_28 | Opra8_18_29 | Opra8_18_30 | Opra8_18_31
| Opra8_19_0 | Opra8_19_1 | Opra8_19_2 | Opra8_19_3
| Opra8_19_4 | Opra8_19_5 | Opra8_19_6 | Opra8_19_7
| Opra8_19_8 | Opra8_19_9 | Opra8_19_10 | Opra8_19_11
| Opra8_19_12 | Opra8_19_13 | Opra8_19_14 | Opra8_19_15
| Opra8_19_16 | Opra8_19_17 | Opra8_19_18 | Opra8_19_19
| Opra8_19_20 | Opra8_19_21 | Opra8_19_22 | Opra8_19_23
| Opra8_19_24 | Opra8_19_25 | Opra8_19_26 | Opra8_19_27
| Opra8_19_28 | Opra8_19_29 | Opra8_19_30 | Opra8_19_31
| Opra8_20_0 | Opra8_20_1 | Opra8_20_2 | Opra8_20_3
| Opra8_20_4 | Opra8_20_5 | Opra8_20_6 | Opra8_20_7
| Opra8_20_8 | Opra8_20_9 | Opra8_20_10 | Opra8_20_11
| Opra8_20_12 | Opra8_20_13 | Opra8_20_14 | Opra8_20_15
| Opra8_20_16 | Opra8_20_17 | Opra8_20_18 | Opra8_20_19
| Opra8_20_20 | Opra8_20_21 | Opra8_20_22 | Opra8_20_23
| Opra8_20_24 | Opra8_20_25 | Opra8_20_26 | Opra8_20_27
| Opra8_20_28 | Opra8_20_29 | Opra8_20_30 | Opra8_20_31
| Opra8_21_0 | Opra8_21_1 | Opra8_21_2 | Opra8_21_3
| Opra8_21_4 | Opra8_21_5 | Opra8_21_6 | Opra8_21_7
| Opra8_21_8 | Opra8_21_9 | Opra8_21_10 | Opra8_21_11
| Opra8_21_12 | Opra8_21_13 | Opra8_21_14 | Opra8_21_15
| Opra8_21_16 | Opra8_21_17 | Opra8_21_18 | Opra8_21_19
| Opra8_21_20 | Opra8_21_21 | Opra8_21_22 | Opra8_21_23
| Opra8_21_24 | Opra8_21_25 | Opra8_21_26 | Opra8_21_27
| Opra8_21_28 | Opra8_21_29 | Opra8_21_30 | Opra8_21_31
| Opra8_22_0 | Opra8_22_1 | Opra8_22_2 | Opra8_22_3
| Opra8_22_4 | Opra8_22_5 | Opra8_22_6 | Opra8_22_7
| Opra8_22_8 | Opra8_22_9 | Opra8_22_10 | Opra8_22_11
| Opra8_22_12 | Opra8_22_13 | Opra8_22_14 | Opra8_22_15
| Opra8_22_16 | Opra8_22_17 | Opra8_22_18 | Opra8_22_19
| Opra8_22_20 | Opra8_22_21 | Opra8_22_22 | Opra8_22_23
| Opra8_22_24 | Opra8_22_25 | Opra8_22_26 | Opra8_22_27
| Opra8_22_28 | Opra8_22_29 | Opra8_22_30 | Opra8_22_31
| Opra8_23_0 | Opra8_23_1 | Opra8_23_2 | Opra8_23_3
| Opra8_23_4 | Opra8_23_5 | Opra8_23_6 | Opra8_23_7
| Opra8_23_8 | Opra8_23_9 | Opra8_23_10 | Opra8_23_11
| Opra8_23_12 | Opra8_23_13 | Opra8_23_14 | Opra8_23_15
| Opra8_23_16 | Opra8_23_17 | Opra8_23_18 | Opra8_23_19
| Opra8_23_20 | Opra8_23_21 | Opra8_23_22 | Opra8_23_23
| Opra8_23_24 | Opra8_23_25 | Opra8_23_26 | Opra8_23_27
| Opra8_23_28 | Opra8_23_29 | Opra8_23_30 | Opra8_23_31
| Opra8_24_0 | Opra8_24_1 | Opra8_24_2 | Opra8_24_3
| Opra8_24_4 | Opra8_24_5 | Opra8_24_6 | Opra8_24_7
| Opra8_24_8 | Opra8_24_9 | Opra8_24_10 | Opra8_24_11
| Opra8_24_12 | Opra8_24_13 | Opra8_24_14 | Opra8_24_15
| Opra8_24_16 | Opra8_24_17 | Opra8_24_18 | Opra8_24_19
| Opra8_24_20 | Opra8_24_21 | Opra8_24_22 | Opra8_24_23
| Opra8_24_24 | Opra8_24_25 | Opra8_24_26 | Opra8_24_27
| Opra8_24_28 | Opra8_24_29 | Opra8_24_30 | Opra8_24_31
| Opra8_25_0 | Opra8_25_1 | Opra8_25_2 | Opra8_25_3
| Opra8_25_4 | Opra8_25_5 | Opra8_25_6 | Opra8_25_7
| Opra8_25_8 | Opra8_25_9 | Opra8_25_10 | Opra8_25_11
| Opra8_25_12 | Opra8_25_13 | Opra8_25_14 | Opra8_25_15
| Opra8_25_16 | Opra8_25_17 | Opra8_25_18 | Opra8_25_19
| Opra8_25_20 | Opra8_25_21 | Opra8_25_22 | Opra8_25_23
| Opra8_25_24 | Opra8_25_25 | Opra8_25_26 | Opra8_25_27
| Opra8_25_28 | Opra8_25_29 | Opra8_25_30 | Opra8_25_31
| Opra8_26_0 | Opra8_26_1 | Opra8_26_2 | Opra8_26_3
| Opra8_26_4 | Opra8_26_5 | Opra8_26_6 | Opra8_26_7
| Opra8_26_8 | Opra8_26_9 | Opra8_26_10 | Opra8_26_11
| Opra8_26_12 | Opra8_26_13 | Opra8_26_14 | Opra8_26_15
| Opra8_26_16 | Opra8_26_17 | Opra8_26_18 | Opra8_26_19
| Opra8_26_20 | Opra8_26_21 | Opra8_26_22 | Opra8_26_23
| Opra8_26_24 | Opra8_26_25 | Opra8_26_26 | Opra8_26_27
| Opra8_26_28 | Opra8_26_29 | Opra8_26_30 | Opra8_26_31
| Opra8_27_0 | Opra8_27_1 | Opra8_27_2 | Opra8_27_3
| Opra8_27_4 | Opra8_27_5 | Opra8_27_6 | Opra8_27_7
| Opra8_27_8 | Opra8_27_9 | Opra8_27_10 | Opra8_27_11
| Opra8_27_12 | Opra8_27_13 | Opra8_27_14 | Opra8_27_15
| Opra8_27_16 | Opra8_27_17 | Opra8_27_18 | Opra8_27_19
| Opra8_27_20 | Opra8_27_21 | Opra8_27_22 | Opra8_27_23
| Opra8_27_24 | Opra8_27_25 | Opra8_27_26 | Opra8_27_27
| Opra8_27_28 | Opra8_27_29 | Opra8_27_30 | Opra8_27_31
| Opra8_28_0 | Opra8_28_1 | Opra8_28_2 | Opra8_28_3
| Opra8_28_4 | Opra8_28_5 | Opra8_28_6 | Opra8_28_7
| Opra8_28_8 | Opra8_28_9 | Opra8_28_10 | Opra8_28_11
| Opra8_28_12 | Opra8_28_13 | Opra8_28_14 | Opra8_28_15
| Opra8_28_16 | Opra8_28_17 | Opra8_28_18 | Opra8_28_19
| Opra8_28_20 | Opra8_28_21 | Opra8_28_22 | Opra8_28_23
| Opra8_28_24 | Opra8_28_25 | Opra8_28_26 | Opra8_28_27
| Opra8_28_28 | Opra8_28_29 | Opra8_28_30 | Opra8_28_31
| Opra8_29_0 | Opra8_29_1 | Opra8_29_2 | Opra8_29_3
| Opra8_29_4 | Opra8_29_5 | Opra8_29_6 | Opra8_29_7
| Opra8_29_8 | Opra8_29_9 | Opra8_29_10 | Opra8_29_11
| Opra8_29_12 | Opra8_29_13 | Opra8_29_14 | Opra8_29_15
| Opra8_29_16 | Opra8_29_17 | Opra8_29_18 | Opra8_29_19
| Opra8_29_20 | Opra8_29_21 | Opra8_29_22 | Opra8_29_23
| Opra8_29_24 | Opra8_29_25 | Opra8_29_26 | Opra8_29_27
| Opra8_29_28 | Opra8_29_29 | Opra8_29_30 | Opra8_29_31
| Opra8_30_0 | Opra8_30_1 | Opra8_30_2 | Opra8_30_3
| Opra8_30_4 | Opra8_30_5 | Opra8_30_6 | Opra8_30_7
| Opra8_30_8 | Opra8_30_9 | Opra8_30_10 | Opra8_30_11
| Opra8_30_12 | Opra8_30_13 | Opra8_30_14 | Opra8_30_15
| Opra8_30_16 | Opra8_30_17 | Opra8_30_18 | Opra8_30_19
| Opra8_30_20 | Opra8_30_21 | Opra8_30_22 | Opra8_30_23
| Opra8_30_24 | Opra8_30_25 | Opra8_30_26 | Opra8_30_27
| Opra8_30_28 | Opra8_30_29 | Opra8_30_30 | Opra8_30_31
| Opra8_31_0 | Opra8_31_1 | Opra8_31_2 | Opra8_31_3
| Opra8_31_4 | Opra8_31_5 | Opra8_31_6 | Opra8_31_7
| Opra8_31_8 | Opra8_31_9 | Opra8_31_10 | Opra8_31_11
| Opra8_31_12 | Opra8_31_13 | Opra8_31_14 | Opra8_31_15
| Opra8_31_16 | Opra8_31_17 | Opra8_31_18 | Opra8_31_19
| Opra8_31_20 | Opra8_31_21 | Opra8_31_22 | Opra8_31_23
| Opra8_31_24 | Opra8_31_25 | Opra8_31_26 | Opra8_31_27
| Opra8_31_28 | Opra8_31_29 | Opra8_31_30 | Opra8_31_31
| Opra8_s_0 | Opra8_s_1 | Opra8_s_2 | Opra8_s_3
| Opra8_s_4 | Opra8_s_5 | Opra8_s_6 | Opra8_s_7
| Opra8_s_8 | Opra8_s_9 | Opra8_s_10 | Opra8_s_11
| Opra8_s_12 | Opra8_s_13 | Opra8_s_14 | Opra8_s_15
| Opra8_s_16 | Opra8_s_17 | Opra8_s_18 | Opra8_s_19
| Opra8_s_20 | Opra8_s_21 | Opra8_s_22 | Opra8_s_23
| Opra8_s_24 | Opra8_s_25 | Opra8_s_26 | Opra8_s_27
| Opra8_s_28 | Opra8_s_29 | Opra8_s_30 | Opra8_s_31
deriving (Eq, Ord, Read, Show, Enum, Bounded)
instance Opram Opra8 where
m _ = 8
instance Calculus Opra8 where
rank _ = 2
cName _ = "opra-8"
cNameGqr _ = "opra8"
cReadRel = readOpram
cShowRel = showOpram
cSparqifyRel = sparqifyOpram
cGqrifyRel = sparqifyOpram
cBaserelationsArealList = areal cBaserelationsList
cBaserelationsNonArealList = nonAreal cBaserelationsList
bcConvert = opraConvert 8
|
spatial-reasoning/zeno
|
src/Calculus/Opra8.hs
|
bsd-2-clause
| 18,295
| 0
| 6
| 5,839
| 3,322
| 2,197
| 1,125
| 283
| 0
|
{- |
Module : Src
Description : Abstract syntax and pretty printer for the source language.
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Zhiyuan Shi <zhiyuan.shi@gmail.com>
Stability : experimental
Portability : portable
-}
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -Wall #-}
module Src
( Module(..), ReaderModule
, Kind(..)
, Type(..), ReaderType
, Expr(..), ReaderExpr, CheckedExpr
, Constructor(..), Alt(..)
, Bind(..), ReaderBind
, RecFlag(..), Lit(..), Operator(..), UnitPossibility(..), JCallee(..), JVMType(..), Label
, Name, ReaderId, CheckedId, LReaderId
, TypeValue(..), TypeContext, ValueContext
, DataBind(..)
, groupForall
, expandType
-- Relations between types
, subtype
, compatible
, leastUpperBound
, deThunkOnce
, recordFields
, freeTVars
, fsubstTT
, wrap
, opPrec
, intersperseBar
) where
import Config
import JavaUtils
import PrettyUtils
import Panic
import SrcLoc
import qualified Language.Java.Syntax as J (Op(..))
-- import qualified Language.Java.Pretty as P
import Text.PrettyPrint.ANSI.Leijen
import Control.Arrow (second)
import Data.Data
import Data.List (intersperse)
import qualified Data.Map as Map
import qualified Data.Set as Set
-- Names and identifiers.
type Name = String
type ReaderId = Name
type LReaderId = Located ReaderId
type CheckedId = (ReaderId, Type)
type Label = Name
-- Modules.
data Module id ty = Module id [Bind id ty] deriving (Eq, Show)
type ReaderModule = Located (Module Name Type)
-- Kinds k := * | k -> k
data Kind = Star | KArrow Kind Kind deriving (Eq, Show)
-- Types.
data Type
= TVar Name
| JType JVMType -- JClass ClassName
| Unit
| Fun Type Type
| Forall Name Type
| Product [Type]
-- Extensions
| And Type Type
| RecordType [(Label, Type)]
| Thunk Type
-- Type synonyms
| OpAbs Name Type -- Type-level abstraction: "type T A = t" becomes "type T = \A. t", and "\A. t" is the abstraction.
| OpApp Type Type -- Type-level application: t1 t2
| ListOf Type
| Datatype Name [Type] [Name]
-- Warning: If you ever add a case to this, you MUST also define the binary
-- relations on your new case. Namely, add cases for your data constructor in
-- `compatible` and `subtype` below.
deriving (Eq, Show, Data, Typeable)
type ReaderType = Type
data JVMType = JClass ClassName | JPrim String deriving (Eq, Show, Data, Typeable)
type LExpr id ty = Located (Expr id ty)
-- Expressions.
data Expr id ty
= Var id -- Variable
| Lit Lit -- Literals
| Lam (Name, ty) (LExpr id ty) -- Lambda
| App (LExpr id ty) (LExpr id ty) -- Application
| BLam Name (LExpr id ty) -- Big lambda
| TApp (LExpr id ty) ty -- Type application
| Tuple [LExpr id ty] -- Tuples
| Proj (LExpr id ty) Int -- Tuple projection
| PrimOp (LExpr id ty) Operator (LExpr id ty) -- Primitive operation
| If (LExpr id ty) (LExpr id ty) (LExpr id ty) -- If expression
| Let RecFlag [Bind id ty] (LExpr id ty) -- Let (rec) ... (and) ... in ...
| LetOut -- Post typecheck only
RecFlag
[(Name, Type, LExpr (Name,Type) Type)]
(LExpr (Name,Type) Type)
| Dot (LExpr id ty) Name (Maybe ([LExpr id ty], UnitPossibility))
-- The flag `UnitPossibility` is only used when length of the argument list is
-- 0, to distinguish the different possible interpretations of `e.x ( )` and
-- `e.x ()` -- the latter can be an application (of unit literal to a record
-- elim), while the former cannot.
| JNew ClassName [LExpr id ty]
| JMethod (JCallee (LExpr id ty)) MethodName [LExpr id ty] ClassName
| JField (JCallee (LExpr id ty)) FieldName ClassName
| Seq [LExpr id ty]
| PolyList [LExpr id ty] ty
| Merge (LExpr id ty) (LExpr id ty)
| RecordCon [(Label, LExpr id ty)]
| RecordProj (LExpr id ty) Label
| RecordUpdate (LExpr id ty) [(Label, LExpr id ty)]
| LetModule (Module id ty) (LExpr id ty)
| ModuleAccess Name Name
| Type -- type T A1 .. An = t in e
Name -- T -- Name of type constructor
[Name] -- A1 ... An -- Type parameters
Type -- t -- RHS of the equal sign
(LExpr id ty) -- e -- The rest of the expression
| Data RecFlag [DataBind] (LExpr id ty)
| Case (LExpr id ty) [Alt id ty]
| CaseString (LExpr id ty) [Alt id ty] --pattern match on string
| ConstrTemp Name
| Constr Constructor [LExpr id ty] -- post typecheck only
| JProxyCall (LExpr id ty) ty
| Premise (LExpr id ty) (LExpr id ty)
deriving (Eq, Show)
data DataBind = DataBind Name [Name] [Constructor] deriving (Eq, Show)
data Constructor = Constructor {constrName :: Name, constrParams :: [Type]}
deriving (Eq, Show)
data Alt id ty = ConstrAlt Constructor [Name] (LExpr id ty)
-- | Default (Expr id)
deriving (Eq, Show)
-- type RdrExpr = Expr Name
type ReaderExpr = LExpr Name Type
type CheckedExpr = LExpr CheckedId Type
-- type TcExpr = Expr TcId
-- type TcBinds = [(Name, Type, Expr TcId)] -- f1 : t1 = e1 and ... and fn : tn = en
data Lit -- Data constructor names match Haskell types
= Int Integer
| String String
| Bool Bool
| Char Char
| UnitLit
deriving (Eq, Show)
data Operator = Arith J.Op | Compare J.Op | Logic J.Op deriving (Eq, Show)
data Bind id ty = Bind
{ bindId :: id -- Identifier
, bindTyParams :: [Name] -- Type arguments
, bindParams :: [(Name, Type)] -- Arguments, each annotated with a type
, bindRhs :: LExpr id ty -- RHS to the "="
, bindRhsTyAscription :: Maybe Type -- Type annotation for the RHS
} deriving (Eq, Show)
type ReaderBind = Bind Name Type
data RecFlag = Rec | NonRec deriving (Eq, Show)
data UnitPossibility = UnitPossible | UnitImpossible deriving (Eq, Show)
data JCallee e = Static ClassName | NonStatic e deriving (Eq, Show)
instance Functor JCallee where
fmap _ (Static c) = Static c
fmap f (NonStatic e) = NonStatic (f e)
-- Type and value contexts
-- `TypeValue` is what's put inside a type context.
data TypeValue
= TerminalType -- Terminal types, e.g., the `a` of `forall a. `
| NonTerminalType Type
-- Non-terminal types, i.e. type synoyms. `Type` holds the RHS to the
-- equal sign of type synonym definitions.
type TypeContext = Map.Map ReaderId (Kind, TypeValue) -- Delta
type ValueContext = Map.Map ReaderId Type -- Gamma
-- | Recursively expand all type synonyms. The given type must be well-kinded.
-- Used in `compatible` and `subtype`.
expandType :: TypeContext -> Type -> Type
-- Interesting cases:
expandType d (TVar a)
= case Map.lookup a d of
Nothing -> prettyPanic "expandType" (pretty (TVar a))
Just (_, TerminalType) -> TVar a
Just (_, NonTerminalType def) -> expandType d def
expandType d (OpAbs x t) = OpAbs x (expandType (Map.insert x (Star, TerminalType) d) t)
expandType d (OpApp t1 t2)
= let t1' = expandType d t1
t2' = expandType d t2
in
case t1' of
OpAbs x t -> fsubstTT (x,t2') t
-- Uninteresting cases:
expandType _ (JType t) = JType t
expandType _ Unit = Unit
expandType d (Fun t1 t2) = Fun (expandType d t1) (expandType d t2)
expandType d (Forall a t) = Forall a (expandType (Map.insert a (Star, TerminalType) d) t)
expandType d (Product ts) = Product (map (expandType d) ts)
expandType d (RecordType fs) = RecordType (map (second (expandType d)) fs)
expandType d (ListOf t) = ListOf (expandType d t)
expandType d (And t1 t2) = And (expandType d t1) (expandType d t2)
expandType d (Thunk t) = Thunk (expandType d t)
expandType d (Datatype n ts ns) = Datatype n (map (expandType d) ts) ns
deThunkOnce :: Type -> Type
deThunkOnce (Thunk t) = t
deThunkOnce t = t
-- Relations between types
-- | Subtyping (<:) is defined only between types of kind *.
subtype :: TypeContext -> Type -> Type -> Bool
subtype d t1 t2 = subtypeS (expandType d t1) (expandType d t2)
-- | Subtyping of two *expanded* types.
subtypeS :: Type -> Type -> Bool
subtypeS t1 (Thunk t2) = subtypeS t1 t2
subtypeS (Thunk t1) t2 = subtypeS t1 t2
subtypeS (TVar a) (TVar b) = a == b
subtypeS (JType c) (JType d) = c == d
-- The subtypeS here shouldn't be aware of the subtyping relations in the Java world.
subtypeS (Fun t1 t2) (Fun t3 t4) = subtypeS t3 t1 && subtypeS t2 t4
subtypeS (Forall a1 t1) (Forall a2 t2) = subtypeS (fsubstTT (a1,TVar a2) t1) t2
subtypeS (Product ts1) (Product ts2) = length ts1 == length ts2 && uncurry subtypeS `all` zip ts1 ts2
subtypeS (RecordType [(l1,t1)]) (RecordType [(l2,t2)]) = l1 == l2 && subtypeS t1 t2
subtypeS (RecordType fs1) (RecordType fs2) = subtypeS (desugarMultiRecordType fs1) (desugarMultiRecordType fs2)
subtypeS (ListOf t1) (ListOf t2) = subtypeS t1 t2 -- List :: * -> * is covariant
-- The order is significant for the two `And` cases below.
subtypeS t1 (And t2 t3) = subtypeS t1 t2 && subtypeS t1 t3
subtypeS (And t1 t2) t3 = subtypeS t1 t3 || subtypeS t2 t3
subtypeS Unit Unit = True
subtypeS (Datatype n1 ts1 m1) (Datatype n2 ts2 m2) =
n1 == n2 && m1 == m2 && length ts1 == length ts2 && uncurry subtypeS `all` zip ts1 ts2
subtypeS t1 t2 = False `panicOnSameDataCons` ("Src.subtypeS", t1, t2)
-- | Two types are called "compatible" iff they are subtype of each other.
compatible :: TypeContext -> Type -> Type -> Bool
compatible d t1 t2 = subtype d t1' t2' && subtype d t2' t1'
where
t1' = expandType d t1
t2' = expandType d t2
-- | Computes the least upper bound of two types.
leastUpperBound :: TypeContext -> Type -> Type -> Maybe Type
leastUpperBound d t1 t2
| subtype d t1 t2 = Just t2
| subtype d t2 t1 = Just t1
| otherwise = Nothing
-- Records
-- TODO: refactor the following two functions
desugarRecordType :: Type -> Type
desugarRecordType (RecordType [(l,t)]) = RecordType [(l,t)]
desugarRecordType (RecordType fs) = desugarMultiRecordType fs
desugarRecordType (And t1 t2) = And (desugarRecordType t1) (desugarRecordType t2)
desugarRecordType t = t
-- FIXME: incomplete cases
desugarMultiRecordType :: [(Label,Type)] -> Type
desugarMultiRecordType [] = panic "Src.desugarMultiRecordTy"
desugarMultiRecordType [(l,t)] = RecordType [(l,t)]
desugarMultiRecordType ((l,t):fs) = RecordType [(l,t)] `And` desugarMultiRecordType fs
-- | Returns the record fields of a type. Note that a type does not have to be a
-- record by itself in order for it to have fields. (See the second example
-- below.)
-- Examples (in pseudo-code):
-- recordFields(String) = {}
-- recordFields(String&{name:String, age:Int}) = {"name" => String, "age" => Int}
recordFields :: Type -> Map.Map Label Type
recordFields (RecordType fs) =
case intersectionBias of
-- `Map.fromList` is right-biased.
-- For example:
-- ghci> Map.fromList [(1,"one"),(1,"yat")]
-- fromList [(1,"yat")]
LeftBiased -> Map.fromList (reverse fs)
RightBiased -> Map.fromList fs
recordFields (And t1 t2) =
case intersectionBias of
-- But `Map.union` is left-biased.
-- For example:
-- ghci> Map.fromList [(1,"one")] `Map.union` Map.fromList [(1,"yat")]
-- fromList [(1,"one")]
LeftBiased -> recordFields t1 `Map.union` recordFields t2
RightBiased -> recordFields t2 `Map.union` recordFields t1
recordFields (Thunk t) = recordFields t
recordFields _ = Map.empty
-- Free variable substitution
fsubstTT :: (Name, Type) -> Type -> Type
fsubstTT (x,r) (TVar a)
| a == x = r
| otherwise = TVar a
-- fsubstTT (_,_) (JClass c ) = JClass c
fsubstTT (_,_) (JType c) = JType c
fsubstTT (x,r) (Fun t1 t2) = Fun (fsubstTT (x,r) t1) (fsubstTT (x,r) t2)
fsubstTT (x,r) (Product ts) = Product (map (fsubstTT (x,r)) ts)
fsubstTT (x,r) (Forall a t)
| a == x || a `Set.member` freeTVars r = -- The freshness condition, crucial!
let fresh = freshName a (freeTVars t `Set.union` freeTVars r)
in Forall fresh (fsubstTT (x,r) (fsubstTT (a, TVar fresh) t))
| otherwise = Forall a (fsubstTT (x,r) t)
fsubstTT (x,r) (ListOf a) = ListOf (fsubstTT (x,r) a)
fsubstTT (_,_) Unit = Unit
fsubstTT (x,r) (RecordType fs) = RecordType (map (second (fsubstTT (x,r))) fs)
fsubstTT (x,r) (And t1 t2) = And (fsubstTT (x,r) t1) (fsubstTT (x,r) t2)
fsubstTT (x,r) (Thunk t1) = Thunk (fsubstTT (x,r) t1)
fsubstTT (x,r) (OpAbs a t)
| a == x || a `Set.member` freeTVars r = -- The freshness condition, crucial!
let fresh = freshName a (freeTVars t `Set.union` freeTVars r)
in OpAbs fresh (fsubstTT (x,r) (fsubstTT (a, TVar fresh) t))
| otherwise = OpAbs a (fsubstTT (x,r) t)
fsubstTT (x,r) (OpApp t1 t2) = OpApp (fsubstTT (x,r) t1) (fsubstTT (x,r) t2)
fsubstTT (x,r) (Datatype n ts ns) = Datatype n (map (fsubstTT (x,r)) ts) ns
freshName :: Name -> Set.Set Name -> Name
freshName name existedNames = head $ dropWhile (`Set.member` existedNames) [name ++ show i | i <- [1..]]
freeTVars :: Type -> Set.Set Name
freeTVars (TVar x) = Set.singleton x
-- freeTVars (JClass _) = Set.empty
freeTVars (JType _) = Set.empty
freeTVars Unit = Set.empty
freeTVars (Fun t1 t2) = freeTVars t1 `Set.union` freeTVars t2
freeTVars (Forall a t) = Set.delete a (freeTVars t)
freeTVars (Product ts) = Set.unions (map freeTVars ts)
freeTVars (RecordType fs) = Set.unions (map (\(_l,t) -> freeTVars t) fs)
freeTVars (ListOf t) = freeTVars t
freeTVars (And t1 t2) = Set.union (freeTVars t1) (freeTVars t2)
freeTVars (Thunk t) = freeTVars t
freeTVars (OpAbs _ t) = freeTVars t
freeTVars (OpApp t1 t2) = Set.union (freeTVars t1) (freeTVars t2)
freeTVars (Datatype _ ts _) = Set.unions (map freeTVars ts)
-- Pretty printers
instance Pretty Kind where
pretty Star = char '*'
pretty (KArrow k1 k2) = parens (pretty k1 <+> text "=>" <+> pretty k2)
instance Pretty Type where
pretty (TVar a) = text a
pretty (JType (JClass "java.lang.Integer")) = text "Int"
pretty (JType (JClass "java.lang.String")) = text "String"
pretty (JType (JClass "java.lang.Boolean")) = text "Bool"
pretty (JType (JClass "java.lang.Character")) = text "Char"
pretty (JType (JClass c)) = text c
pretty (JType (JPrim c)) = text c
pretty Unit = text "Unit"
pretty (Fun t1 t2) = parens $ pretty t1 <+> text "->" <+> pretty t2
pretty (Forall a t) = parens $ forall <+> hsep (map text as) <> dot <+> pretty t' where (as, t') = groupForall (Forall a t)
pretty (Product ts) = lparen <> hcat (intersperse comma (map pretty ts)) <> rparen
pretty (And t1 t2) = pretty t1 <> text "&" <> pretty t2
pretty (RecordType fs) = lbrace <> hcat (intersperse comma (map (\(l,t) -> text l <> colon <> pretty t) fs)) <> rbrace
pretty (Thunk t) = squote <> parens (pretty t)
pretty (OpAbs x t) = backslash <> text x <> dot <+> pretty t
pretty (OpApp t1 t2) = parens (pretty t1 <+> pretty t2)
pretty (ListOf a) = brackets $ pretty a
pretty (Datatype n ts _) = hsep (text n : map pretty ts)
groupForall :: Type -> ([Name], Type)
groupForall (Forall a t) = let (as, t') = groupForall t in (a:as, t')
groupForall t = ([], t)
instance (Show id, Pretty id, Show ty, Pretty ty) => Pretty (Located (Expr id ty)) where
pretty (L _ e) = pretty e
instance (Show id, Pretty id, Show ty, Pretty ty) => Pretty (Expr id ty) where
pretty (Var x) = pretty x
pretty (Lit (Int n)) = integer n
pretty (Lit (String n)) = string n
pretty (Lit (Bool n)) = bool n
pretty (Lit (Char n)) = char n
pretty (Lit UnitLit) = unit
pretty (BLam a e) = parens $ text "/\\" <> text a <> dot <+> pretty e
pretty (Lam (x,t) e) =
parens $
backslash <> parens (pretty x <+> colon <+> pretty t) <> dot <+>
pretty e
pretty (TApp e t) = parens $ pretty e <+> pretty t
pretty (App e1 e2) = parens $ pretty e1 <+> pretty e2
pretty (Tuple es) = lparen <> (hcat . intersperse comma $ map pretty es) <> rparen
pretty (Proj e i) = parens (pretty e) <> text "._" <> int i
pretty (PrimOp e1 op e2) = parens $
parens (pretty e1) <+>
text (show op) <+>
-- text (P.prettyPrint op) <+>
parens (pretty e2)
pretty (If e1 e2 e3) = parens $
text "if" <+> pretty e1 <+>
text "then" <+> pretty e2 <+>
text "else" <+> pretty e3
pretty (Let recFlag bs e) =
text "let" <+> pretty recFlag <+>
encloseSep empty empty (softline <> text "and" <> space) (map pretty bs) <+>
text "in" <+>
pretty e
pretty (LetOut recFlag bs e) =
text "let" <+> pretty recFlag <+>
encloseSep empty empty (softline <> text "and" <> space)
(map (\(f1,t1,e1) -> text f1 <+> colon <+> pretty t1 <+> equals <+> pretty e1) bs) <+>
text "in" <+>
pretty e
pretty (JNew c args) = text "new" <+> text c <> tupled (map pretty args)
pretty (JMethod e m args _) = case e of (Static c) -> pretty c <> dot <> text m <> tupled (map pretty args)
(NonStatic e') -> pretty e' <> dot <> text m <> tupled (map pretty args)
pretty (JField e f _) = case e of (Static c) -> pretty c <> dot <> text f
(NonStatic e') -> pretty e' <> dot <> text f
pretty (PolyList l _) = brackets . hcat . intersperse comma $ map pretty l
pretty (JProxyCall jmethod _) = pretty jmethod
pretty (Merge e1 e2) = parens $ pretty e1 <+> text ",," <+> pretty e2
pretty (RecordCon fs) = lbrace <> hcat (intersperse comma (map (\(l,t) -> text l <> equals <> pretty t) fs)) <> rbrace
pretty (Data recFlag datatypes e ) =
text "data" <+> pretty recFlag <+>
(vsep $ map pretty datatypes) <$>
pretty e
pretty (Case e alts) = hang 2 (text "case" <+> pretty e <+> text "of" <$> text " " <+> intersperseBar (map pretty alts))
pretty (CaseString e alts) = hang 2 (text "case" <+> pretty e <+> text "of" <$> text " " <+> intersperseBar (map pretty alts))
pretty (Constr c []) = text (constrName c)
pretty (Constr c es) = parens $ hsep $ text (constrName c) : map pretty es
pretty (Premise e1 e2) = pretty e1 <+> text "==>" <+> pretty e2
pretty e = text (show e)
instance (Show id, Pretty id, Show ty, Pretty ty) => Pretty (Bind id ty) where
pretty Bind{..} =
pretty bindId <+>
hsep (map pretty bindTyParams) <+>
hsep (map (\(x,t) -> parens (pretty x <+> colon <+> pretty t)) bindParams) <+>
case bindRhsTyAscription of { Nothing -> empty; Just t -> colon <+> pretty t } <+>
equals <+>
pretty bindRhs
instance Pretty RecFlag where
pretty Rec = text "rec"
pretty NonRec = empty
instance Pretty Constructor where
pretty (Constructor n ts) = hsep $ text n : map pretty ts
instance Pretty DataBind where
pretty (DataBind n tvars cons) = hsep (map text $ n:tvars) <+> align (equals <+> intersperseBar (map pretty cons) <$$> semi)
instance (Show id, Pretty id, Show ty, Pretty ty) => Pretty (Alt id ty) where
pretty (ConstrAlt c ns e2) = hsep (text (constrName c) : map text ns) <+> arrow <+> pretty e2
-- pretty (Default e) = text "_" <+> arrow <+> pretty e
-- Utilities
wrap :: (b -> a -> a) -> [b] -> a -> a
wrap cons xs t = foldr cons t xs
-- Precedence of operators based on the table in:
-- http://en.wikipedia.org/wiki/Order_of_operations#Programming_languages
opPrec :: Num a => Operator -> a
opPrec (Arith J.Mult) = 3
opPrec (Arith J.Div) = 3
opPrec (Arith J.Rem) = 3
opPrec (Arith J.Add) = 4
opPrec (Arith J.Sub) = 4
opPrec (Compare J.LThan) = 6
opPrec (Compare J.GThan) = 6
opPrec (Compare J.LThanE) = 6
opPrec (Compare J.GThanE) = 6
opPrec (Compare J.Equal) = 7
opPrec (Compare J.NotEq) = 7
opPrec (Logic J.CAnd) = 11
opPrec (Logic J.COr) = 12
|
wxzh/fcore
|
lib/Src.hs
|
bsd-2-clause
| 20,531
| 0
| 19
| 5,278
| 7,477
| 3,884
| 3,593
| 367
| 3
|
module Level where
import Control.Monad
import Data.Array
import Data.List
import Vector
data Level = Level
{ levelName :: String
, levelSize :: (Int,Int)
, legalMoves :: Array (Int,Int) [Direction]
, doors :: [(Direction,Int)]
, entrances :: [(Direction,Int,Int)]
} deriving Show
data Direction = North | East | South | West
deriving (Eq, Enum, Show)
fieldSize :: Int
fieldSize = 16
fieldMid :: Int
fieldMid = fieldSize `div` 2
fieldPos :: Vec -> (Int,Int)
fieldPos (V x y) = ((x+fieldMid) `div` fieldSize, (y+fieldMid) `div` fieldSize)
fieldSub :: Vec -> (Int,Int)
fieldSub (V x y) = (mkSub x, mkSub y)
where mkSub c = let s = c `mod` fieldSize in if s >= fieldMid then s-fieldSize else s
atFieldMid :: (Int,Int) -> Direction -> Bool
atFieldMid (sx,sy) dir = 0 == if isVertical dir then sx else sy
halfway :: (Int,Int) -> Direction -> Bool
halfway (sx,sy) dir = 0 /= if isVertical dir then sy else sx
dirFromInt :: Int -> Direction
dirFromInt = toEnum . (`mod` 4)
turnRight :: Direction -> Direction
turnRight = dirFromInt . (+1) . fromEnum
turnBack :: Direction -> Direction
turnBack = dirFromInt . (+2) . fromEnum
turnLeft :: Direction -> Direction
turnLeft = dirFromInt . (+3) . fromEnum
isVertical :: Direction -> Bool
isVertical North = True
isVertical South = True
isVertical _ = False
parseLevel :: [String] -> Maybe (Level,[String])
parseLevel lines = do
let lines' = dropWhile null lines
nameLine = head lines'
(mazeLines,rest) = span (not . null) (tail lines')
width = length (head mazeLines) `div` 2
height = length mazeLines `div` 2
mkEntrance (North,x) = (South,-1,x)
mkEntrance (South,x) = (North,height,x)
mkEntrance (East,y) = (West,y,width)
mkEntrance (West,y) = (East,y,-1)
makeMoves lines = concat . odds $ zipWith3 f lines (tail lines) (tail (tail lines))
where f n c s = odds $ zipWith4 f' (tail n) (tail s) c (tail (tail c))
f' n s w e = (if n == ' ' then [North] else []) ++
(if s == ' ' then [South] else []) ++
(if w == ' ' then [West] else []) ++
(if e == ' ' then [East] else [])
odds (x:_:xs) = x : odds xs
odds xs = xs
chars `onEdgeOf` lines =
[(d,i `div` 2) |
(d,f) <- [(North,head),(South,last),(West,map head),(East,map last)],
i <- findIndices (`elem` chars) (f lines)]
guard $ not (null lines')
guard $ length mazeLines > 2
guard $ head nameLine == '"'
return (Level { levelName = takeWhile (/= '"') (tail nameLine)
, levelSize = (width,height)
, legalMoves = listArray ((0,0),(height-1,width-1)) (makeMoves mazeLines)
, doors = "<>^v" `onEdgeOf` mazeLines
, entrances = map mkEntrance ("*" `onEdgeOf` mazeLines)
}, rest)
|
cobbpg/dow
|
src/Level.hs
|
bsd-3-clause
| 2,970
| 0
| 17
| 853
| 1,271
| 719
| 552
| 71
| 9
|
{-# LANGUAGE ExistentialQuantification #-}
module Data.IORef.WriteOnly
( WriteOnlyIORef
, toWriteOnlyIORef
) where
import Data.Functor.Contravariant
import Data.IORef (IORef)
import Data.IORef.Class
data WriteOnlyIORef a = forall b . WriteOnlyIORef (a -> b) (IORef b)
instance Contravariant WriteOnlyIORef where
contramap f (WriteOnlyIORef f' ref) = WriteOnlyIORef (f' . f) ref
toWriteOnlyIORef :: IORef a -> WriteOnlyIORef a
toWriteOnlyIORef = WriteOnlyIORef id
instance IORefWrite WriteOnlyIORef where
writeIORef (WriteOnlyIORef f ref) a = writeIORef ref (f a)
{-# INLINE writeIORef #-}
atomicWriteIORef (WriteOnlyIORef f ref) a = atomicWriteIORef ref (f a)
{-# INLINE atomicWriteIORef #-}
|
osa1/privileged-concurrency
|
Data/IORef/WriteOnly.hs
|
bsd-3-clause
| 715
| 0
| 8
| 113
| 197
| 106
| 91
| 17
| 1
|
{-# LANGUAGE PatternGuards, ScopedTypeVariables, TupleSections #-}
module Text.Themplates
( Chunk
, parseSplices, substSplices
-- * Parsec parsers
, spliceParser, curlySplice, thSplice, nestParser, escapeParser, delimParser
-- * Misc Utilities
, dedentQuote, generateNames
) where
import Control.Applicative ( (<$>), (<*>) )
import Control.Monad.Trans.Either ( EitherT )
import Data.Char ( isSpace )
import Data.Generics ( Data, extT, everywhere' )
import Data.List ( isPrefixOf, stripPrefix, tails )
import qualified Data.Map as M
import Data.Maybe ( maybeToList )
import Text.Parsec
( Parsec, parse, try, eof, anyToken, noneOf, char, string, choice, (<|>)
, lookAhead, anyChar, manyTill )
type Chunk c s = Either c s
substSplices :: forall t s e m r. (Monad m, Data r, Ord r)
=> (s -> [t])
-> ([t] -> EitherT e m r)
-> (s -> EitherT e m [(r, r)])
-> [Chunk [t] s]
-> EitherT e m r
substSplices placeholder parser subst xs = do
subs <- sequence [subst s | Right s <- xs]
let subs_map = M.fromList $ concat subs
do_subst :: r -> r
do_subst x
| Just r <- M.lookup x subs_map = r
| otherwise = x
parsed <- parser
$ concatMap (either id placeholder) xs
return $ everywhere' (id `extT` do_subst) parsed
-- Utilities for parsing spliced stuff.
parseSplices :: forall t s. Show t
=> Parsec [t] () s
-> [t]
-> Either String [Chunk [t] s]
parseSplices splice =
either (Left . show) Right . parse (spliceParser splice) ""
spliceParser :: forall t s. Show t
=> Parsec [t] () s
-> Parsec [t] () [Chunk [t] s]
spliceParser parse_splice = do
s <- (Right <$> try parse_splice)
<|> (Left . (:[]) <$> anyToken)
<|> (eof >> return (Left []))
case s of
c@(Left []) -> return [c]
_ -> do
rest <- spliceParser parse_splice
case (s, rest) of
(Left [c], Left acc:rs) -> return $ Left (c:acc) : rs
_ -> return $ s : rest
-- The same splice style as the old ast quoters.
-- TODO: Make the quasi-quoter configurable to use this.
thSplice :: Parsec String () (Maybe String, String)
thSplice = do
_ <- try $ string "$("
fancySplice (concat <$> nestParser (delimParser '(' ')')
[try $ char ')' >> return ""])
-- To be passed as the first parameter to parseSplices or spliceParser.
curlySplice :: Parsec String () (Maybe String, String)
curlySplice = do
_ <- try $ string "{{"
fancySplice (concat <$> nestParser (delimParser '{' '}')
[try $ string "}}" >> return ""])
fancySplice :: Parsec String () s
-> Parsec String () (Maybe String, s)
fancySplice code_parser = do
c <- lookAhead anyChar
case c of
'<' -> do
_ <- char '<'
splice <- manyTill
(escapeParser '\\' [('>', '>'), ('\\', '\\')])
(char '>')
code <- code_parser
return (Just splice, code)
_ -> do
code <- code_parser
return (Nothing, code)
{-
parseList :: Parsec String () (Either String (String, String, String))
parseList = do
input <- getInput
(try $ do
prefix <- manyTill anyChar (lookAhead ((string "..." >> return ())
<|> eof))
string "..."
rest <- many (noneOf " ")
return $ Right (prefix, rest)
) <|> (many anyChar >> return (Left input))
-}
nestParser
:: forall t r. Show t
=> Parsec [t] () (r, Maybe (Parsec [t] () r))
-> [Parsec [t] () r]
-> Parsec [t] () [r]
nestParser open closers = case closers of
[] -> return []
(close:cs)
-> ((:) <$> close <*> nestParser open cs)
<|> (open >>= \(x, c) -> (x:) <$> nestParser open (maybeToList c ++ closers))
<|> return []
escapeParser :: Char -> [(Char, Char)] -> Parsec String () Char
escapeParser c xs =
(char c >> choice (map escape xs)) <|> noneOf [c]
where
escape (code, repl) = char code >> return repl
delimParser :: Char -> Char
-> Parsec String () (String, Maybe (Parsec String () String))
delimParser start end = do
r <- try (string [start]) <|> ((:[]) <$> noneOf [end])
return (r, if r == [start] then Just (try $ string [end]) else Nothing)
-- | Dedents a qq based on the indentation of the first line that has content.
-- As a safety measure, if any of the lines have content but not the same
-- indentation, then an error is yielded.
dedentQuote :: String -> String -> Either String String
dedentQuote qq code =
case lines code of
(l0:l1:ls)
| all isSpace l0 -> dedent $ zip ixs (l1:ls)
| otherwise -> dedent $ zip ixs (l0':l1:ls)
where
l0' = replicate (length qq + 2) ' ' ++ l0
_ -> Right $ dropWhile isSpace code
where
ixs :: [Int]
ixs = [1..]
dedent [] = Right ""
dedent ls@((_, l):_) = unlines <$> mapM removePrefix ls
where
prefix = takeWhile isSpace l
removePrefix (ix, x)
| all isSpace x = Right x
| Just x' <- stripPrefix prefix x = Right x'
| otherwise = Left $ unwords
[ "While dedenting a quote, line "
, show ix
, " had less indentation, or used tabs."
]
generateNames :: String -> String -> [String]
generateNames prefix input =
[ prefix ++ s
| s <- map show [(0::Int)..]
, all (not . isPrefixOf s) avoids
]
where
avoids = [ drop (length prefix) t
| t <- tails input
, prefix `isPrefixOf` t
]
|
mgsloan/themplates
|
src/Text/Themplates.hs
|
bsd-3-clause
| 5,685
| 0
| 19
| 1,760
| 2,003
| 1,047
| 956
| 126
| 3
|
module Tinc.SandboxSpec where
import Helper
import System.Directory
import System.FilePath
import System.IO.Temp
import Tinc.Sandbox
import Tinc.Types
spec :: Spec
spec = do
describe "findPackageDb" $ do
it "finds the sandbox package db" $ do
withSystemTempDirectory "tinc" $ \ sandbox -> do
let packageDb = sandbox </> ".cabal-sandbox/x86_64-linux-ghc-7.8.4-packages.conf.d"
createDirectoryIfMissing True packageDb
findPackageDb (Path sandbox) `shouldReturn` (Path packageDb)
context "when sandbox does not contain a package db" $ do
it "throws an exception" $ do
withSystemTempDirectory "tinc" $ \ sandbox -> do
let p = sandbox </> ".cabal-sandbox"
createDirectory p
findPackageDb (Path sandbox) `shouldThrow` errorCall ("src/Tinc/Sandbox.hs: No package database found in " ++ show p)
|
beni55/tinc
|
test/Tinc/SandboxSpec.hs
|
bsd-3-clause
| 877
| 0
| 24
| 192
| 210
| 102
| 108
| 21
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Y.Frontend where
import Control.Lens.TH
import qualified FRP.Sodium as Sodium
import Y.Common
data Frontend = Frontend
{ _feInputEvent :: Sodium.Event InputOccurrence
, _feMainLoop :: Sodium.Event CoreOutput -> IO ()
}
makeLenses ''Frontend
|
ethercrow/y
|
src/Y/Frontend.hs
|
bsd-3-clause
| 298
| 0
| 11
| 53
| 74
| 42
| 32
| 9
| 0
|
module Day7.Test where
import Day7
import Test.Hspec
tests :: SpecWith ()
tests = do
describe "Part1" $ do
it "Test1" $ do
part1 test1 `shouldBe` [True, False, False, True]
describe "Part2" $ do
it "Test1" $ do
part2 test2 `shouldBe` [True, False, True, True]
|
z0isch/aoc2016
|
test/Day7/Test.hs
|
bsd-3-clause
| 306
| 0
| 14
| 90
| 117
| 61
| 56
| 11
| 1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Dimensions.FI
( allDimensions
) where
import Duckling.Dimensions.Types
allDimensions :: [Seal Dimension]
allDimensions =
[ Seal Numeral
]
|
facebookincubator/duckling
|
Duckling/Dimensions/FI.hs
|
bsd-3-clause
| 371
| 0
| 6
| 65
| 45
| 29
| 16
| 6
| 1
|
module Main where
import Interval (intervalP, showInterval, joinIntervals)
import Text.Parsec.Prim (parse)
import Text.Parsec.Combinator (endBy)
import MyParser (eol)
main = do
contents <- getContents
case parse (endBy intervalP eol) "" contents of
Right intervals -> do
mapM_ (putStrLn . showInterval) $ joinIntervals intervals
Left err -> putStrLn $ show err
|
wkoiking/intervals
|
app/Main.hs
|
bsd-3-clause
| 402
| 0
| 15
| 90
| 127
| 67
| 60
| 11
| 2
|
{-# LANGUAGE ExistentialQuantification #-}
module Numeral3 where
import Domains.Ring
data Numeral3 =
forall a. Numeral3 ((a -> a) -> (a -> a))
--instance Ring Numeral3
-- Numeral3 n1 + Numeral3 n2 = Numeral3 (\f x -> (n1 f (n2 f x)))
-- Numeral3 n1 * Numeral3 n2 = Numeral3 (\f -> (n1 (n2 f)))
|
pmilne/algebra
|
src/Data/Numeral/Numeral3.hs
|
bsd-3-clause
| 316
| 0
| 10
| 76
| 48
| 30
| 18
| 5
| 0
|
module Network.QUIC.Internal (
getIntN
, getInt8
, getInt16
, getInt24
, getInt32
, getInt48
, getInt56
, getInt64
) where
import Data.Int(Int8)
import Data.Bits
import Data.Binary
import qualified Data.Binary.Get as Get
getIntN :: Int -> Get Int
getIntN 8 = getInt8
getIntN 16 = getInt16
getIntN 24 = getInt24
getIntN 32 = getInt32
getIntN 48 = getInt48
getIntN 56 = getInt56
getIntN 64 = getInt64
getIntN _ = error "unsupport number"
getInt8 :: Get Int
getInt8 = getIntNbyte 1
getInt16 :: Get Int
getInt16 = getIntNbyte 2
getInt24 :: Get Int
getInt24 = getIntNbyte 3
getInt32 :: Get Int
getInt32 = getIntNbyte 4
getInt48 :: Get Int
getInt48 = getIntNbyte 6
getInt56 :: Get Int
getInt56 = getIntNbyte 7
getInt64 :: Get Int
getInt64 = getIntNbyte 8
getIntNbyte :: Int -> Get Int
getIntNbyte 0 = return 0
getIntNbyte n = foldl f 0 <$> list
where
f :: Int -> (Int8, Int) -> Int
f n (x,i) = n + (shiftL (i * 8) $ toInt x)
list :: Get [(Int8, Int)]
list = (\xs -> zip xs [0..]) <$> (sequence $ replicate n Get.getInt8)
toInt = fromIntegral . toInteger
|
keizo042/quic-prototype
|
Network/QUIC/Internal.hs
|
bsd-3-clause
| 1,106
| 0
| 12
| 249
| 419
| 224
| 195
| 44
| 1
|
import Tests.MsgPack
import Test.Framework
main :: IO ()
main = defaultMain
[ msgPackTests
]
|
GaloisInc/msf-haskell
|
tests/Tests.hs
|
bsd-3-clause
| 101
| 0
| 6
| 22
| 32
| 17
| 15
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Cheapskate.Util (
joinLines
, tabFilter
, isWhitespace
, isEscapable
, normalizeReference
, Scanner
, scanIndentSpace
, scanNonindentSpace
, scanSpacesToColumn
, scanChar
, scanBlankline
, scanSpaces
, scanSpnl
, nfb
, nfbChar
, upToCountChars
) where
import Data.Text (Text)
import qualified Data.Text as T
import Data.Char
import Control.Applicative
import Cheapskate.ParserCombinators
-- Utility functions.
-- Like T.unlines but does not add a final newline.
-- Concatenates lines with newlines between.
joinLines :: [Text] -> Text
joinLines = T.intercalate "\n"
-- Convert tabs to spaces using a 4-space tab stop.
tabFilter :: Text -> Text
tabFilter = T.concat . pad . T.split (== '\t')
where pad [] = []
pad [t] = [t]
pad (t:ts) = let tl = T.length t
n = tl + 4 - (tl `mod` 4)
in T.justifyLeft n ' ' t : pad ts
-- These are the whitespace characters that are significant in
-- parsing markdown. We can treat \160 (nonbreaking space) etc.
-- as regular characters. This function should be considerably
-- faster than the unicode-aware isSpace from Data.Char.
isWhitespace :: Char -> Bool
isWhitespace ' ' = True
isWhitespace '\t' = True
isWhitespace '\n' = True
isWhitespace '\r' = True
isWhitespace _ = False
-- The original Markdown only allowed certain symbols
-- to be backslash-escaped. It was hard to remember
-- which ones could be, so we now allow any ascii punctuation mark or
-- symbol to be escaped, whether or not it has a use in Markdown.
isEscapable :: Char -> Bool
isEscapable c = isAscii c && (isSymbol c || isPunctuation c)
-- Link references are case sensitive and ignore line breaks
-- and repeated spaces.
-- So, [APPLES are good] == [Apples are good] ==
-- [Apples
-- are good].
normalizeReference :: Text -> Text
normalizeReference = T.toCaseFold . T.concat . T.split isWhitespace
-- Scanners are implemented here as attoparsec parsers,
-- which consume input and capture nothing. They could easily
-- be implemented as regexes in other languages, or hand-coded.
-- With the exception of scanSpnl, they are all intended to
-- operate on a single line of input (so endOfInput = endOfLine).
type Scanner = Parser ()
-- Scan four spaces.
scanIndentSpace :: Scanner
scanIndentSpace = () <$ count 4 (skip (==' '))
scanSpacesToColumn :: Int -> Scanner
scanSpacesToColumn col = do
currentCol <- column <$> getPosition
case col - currentCol of
n | n >= 1 -> () <$ (count n (skip (==' ')))
| otherwise -> return ()
-- Scan 0-3 spaces.
scanNonindentSpace :: Scanner
scanNonindentSpace = () <$ upToCountChars 3 (==' ')
-- Scan a specified character.
scanChar :: Char -> Scanner
scanChar c = skip (== c) >> return ()
-- Scan a blankline.
scanBlankline :: Scanner
scanBlankline = scanSpaces *> endOfInput
-- Scan 0 or more spaces
scanSpaces :: Scanner
scanSpaces = skipWhile (==' ')
-- Scan 0 or more spaces, and optionally a newline
-- and more spaces.
scanSpnl :: Scanner
scanSpnl = scanSpaces *> option () (char '\n' *> scanSpaces)
-- Not followed by: Succeed without consuming input if the specified
-- scanner would not succeed.
nfb :: Parser a -> Scanner
nfb = notFollowedBy
-- Succeed if not followed by a character. Consumes no input.
nfbChar :: Char -> Scanner
nfbChar c = nfb (skip (==c))
upToCountChars :: Int -> (Char -> Bool) -> Parser Text
upToCountChars cnt f =
scan 0 (\n c -> if n < cnt && f c then Just (n+1) else Nothing)
|
nukisman/elm-format-short
|
markdown/Cheapskate/Util.hs
|
bsd-3-clause
| 3,554
| 0
| 16
| 739
| 770
| 428
| 342
| 68
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.