code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module HMenu.Matching.Distance (
editDistance,
subseqDistance,
) where
editDistance :: String -> String -> Integer
editDistance a "" = fromIntegral $ length a
editDistance "" a = fromIntegral $ length a
editDistance a@(x:xs) b@(y:ys) | x == y = editDistance xs ys
| otherwise = 1 + minimum [ editDistance xs b
, editDistance a ys
]
lcs :: String -> String -> Integer
lcs _ "" = 0
lcs "" _ = 0
lcs a@(x:xs) b@(y:ys) | x == y = 1 + lcs xs ys
| otherwise = maximum [ lcs a ys
, lcs xs b
]
subseqDistance :: String -> String -> Integer
subseqDistance a b = smaller * (smaller - lcs a b) + bigger - smaller
where x = (fromIntegral $ length a)
y = (fromIntegral $ length b)
smaller = min x y
bigger = max x y
|
thelastnode/hmenu
|
HMenu/Matching/Distance.hs
|
mit
| 996
| 0
| 10
| 437
| 349
| 178
| 171
| 21
| 1
|
{-|
Module : Control.Monad.Bayes.Traced
Description : Distributions on execution traces
Copyright : (c) Adam Scibior, 2015-2020
License : MIT
Maintainer : leonhard.markert@tweag.io
Stability : experimental
Portability : GHC
-}
module Control.Monad.Bayes.Traced (
module Control.Monad.Bayes.Traced.Static
) where
import Control.Monad.Bayes.Traced.Static
|
adscib/monad-bayes
|
src/Control/Monad/Bayes/Traced.hs
|
mit
| 372
| 0
| 5
| 57
| 28
| 21
| 7
| 3
| 0
|
import System.FilePath.Glob (glob)
import Test.DocTest (doctest)
main :: IO ()
main = glob "src/**/*.hs" >>= doctest
|
yamadapc/butter-core
|
test/DocTest.hs
|
mit
| 118
| 0
| 6
| 17
| 44
| 24
| 20
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Text.CorasickPark.Handler.Update (updateHandler) where
import Snap (Handler, Method(..), method, gets)
import Snap.Core (modifyResponse, setResponseStatus)
import Control.Monad.Trans (liftIO)
import Snap.Extras.JSON (getJSON, writeJSON)
import Text.CorasickPark.Types
import Text.CorasickPark.Handler.Utils (errorMessage, successMessage)
import Text.CorasickPark.Algorithm (updateStateMachines)
updateHandler :: Handler App App ()
updateHandler = method POST setter
where
setter :: Handler App App ()
setter = do
res <- getJSON
case res of
Left err -> do
modifyResponse $ setResponseStatus 422 "Invalid request"
writeJSON $ errorMessage $ "Invalid request JSON: " ++ err
Right newOpSet -> do
opmapvar <- gets _operations
_ <- liftIO $ updateStateMachines opmapvar newOpSet
writeJSON $ successMessage "Operations inserted successfully."
return ()
|
stackbuilders/corasick-park
|
src/Text/CorasickPark/Handler/Update.hs
|
mit
| 993
| 0
| 16
| 209
| 253
| 136
| 117
| 23
| 2
|
{-| Implementation of the LUXI loader.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Backend.Luxi
( loadData
, parseData
) where
import qualified Control.Exception as E
import Control.Monad (liftM)
import Text.JSON.Types
import qualified Text.JSON
import Ganeti.BasicTypes
import Ganeti.Errors
import qualified Ganeti.Luxi as L
import qualified Ganeti.Query.Language as Qlang
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.JSON
{-# ANN module "HLint: ignore Eta reduce" #-}
-- * Utility functions
-- | Get values behind \"data\" part of the result.
getData :: (Monad m) => JSValue -> m JSValue
getData (JSObject o) = fromObj (fromJSObject o) "data"
getData x = fail $ "Invalid input, expected dict entry but got " ++ show x
-- | Converts a (status, value) into m value, if possible.
parseQueryField :: (Monad m) => JSValue -> m (JSValue, JSValue)
parseQueryField (JSArray [status, result]) = return (status, result)
parseQueryField o =
fail $ "Invalid query field, expected (status, value) but got " ++ show o
-- | Parse a result row.
parseQueryRow :: (Monad m) => JSValue -> m [(JSValue, JSValue)]
parseQueryRow (JSArray arr) = mapM parseQueryField arr
parseQueryRow o =
fail $ "Invalid query row result, expected array but got " ++ show o
-- | Parse an overall query result and get the [(status, value)] list
-- for each element queried.
parseQueryResult :: (Monad m) => JSValue -> m [[(JSValue, JSValue)]]
parseQueryResult (JSArray arr) = mapM parseQueryRow arr
parseQueryResult o =
fail $ "Invalid query result, expected array but got " ++ show o
-- | Prepare resulting output as parsers expect it.
extractArray :: (Monad m) => JSValue -> m [[(JSValue, JSValue)]]
extractArray v =
getData v >>= parseQueryResult
-- | Testing result status for more verbose error message.
fromJValWithStatus :: (Text.JSON.JSON a, Monad m) => (JSValue, JSValue) -> m a
fromJValWithStatus (st, v) = do
st' <- fromJVal st
Qlang.checkRS st' v >>= fromJVal
annotateConvert :: String -> String -> String -> Result a -> Result a
annotateConvert otype oname oattr =
annotateResult $ otype ++ " '" ++ oname ++
"', error while reading attribute '" ++ oattr ++ "'"
-- | Annotate errors when converting values with owner/attribute for
-- better debugging.
genericConvert :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result a -- ^ The annotated result
genericConvert otype oname oattr =
annotateConvert otype oname oattr . fromJValWithStatus
convertArrayMaybe :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result [Maybe a] -- ^ The annotated result
convertArrayMaybe otype oname oattr (st, v) = do
st' <- fromJVal st
Qlang.checkRS st' v >>=
annotateConvert otype oname oattr . arrayMaybeFromJVal
-- * Data querying functionality
-- | The input data for node query.
queryNodesMsg :: L.LuxiOp
queryNodesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRNode)
["name", "mtotal", "mnode", "mfree", "dtotal", "dfree",
"ctotal", "cnos", "offline", "drained", "vm_capable",
"ndp/spindle_count", "group.uuid", "tags",
"ndp/exclusive_storage", "sptotal", "spfree"] Qlang.EmptyFilter
-- | The input data for instance query.
queryInstancesMsg :: L.LuxiOp
queryInstancesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRInstance)
["name", "disk_usage", "be/memory", "be/vcpus",
"status", "pnode", "snodes", "tags", "oper_ram",
"be/auto_balance", "disk_template",
"be/spindle_use", "disk.sizes", "disk.spindles"] Qlang.EmptyFilter
-- | The input data for cluster query.
queryClusterInfoMsg :: L.LuxiOp
queryClusterInfoMsg = L.QueryClusterInfo
-- | The input data for node group query.
queryGroupsMsg :: L.LuxiOp
queryGroupsMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRGroup)
["uuid", "name", "alloc_policy", "ipolicy", "tags"]
Qlang.EmptyFilter
-- | Wraper over 'callMethod' doing node query.
queryNodes :: L.Client -> IO (Result JSValue)
queryNodes = liftM errToResult . L.callMethod queryNodesMsg
-- | Wraper over 'callMethod' doing instance query.
queryInstances :: L.Client -> IO (Result JSValue)
queryInstances = liftM errToResult . L.callMethod queryInstancesMsg
-- | Wrapper over 'callMethod' doing cluster information query.
queryClusterInfo :: L.Client -> IO (Result JSValue)
queryClusterInfo = liftM errToResult . L.callMethod queryClusterInfoMsg
-- | Wrapper over callMethod doing group query.
queryGroups :: L.Client -> IO (Result JSValue)
queryGroups = liftM errToResult . L.callMethod queryGroupsMsg
-- | Parse a instance list in JSON format.
getInstances :: NameAssoc
-> JSValue
-> Result [(String, Instance.Instance)]
getInstances ktn arr = extractArray arr >>= mapM (parseInstance ktn)
-- | Construct an instance from a JSON object.
parseInstance :: NameAssoc
-> [(JSValue, JSValue)]
-> Result (String, Instance.Instance)
parseInstance ktn [ name, disk, mem, vcpus
, status, pnode, snodes, tags, oram
, auto_balance, disk_template, su
, dsizes, dspindles ] = do
xname <- annotateResult "Parsing new instance" (fromJValWithStatus name)
let convert a = genericConvert "Instance" xname a
xdisk <- convert "disk_usage" disk
xmem <- case oram of -- FIXME: remove the "guessing"
(_, JSRational _ _) -> convert "oper_ram" oram
_ -> convert "be/memory" mem
xvcpus <- convert "be/vcpus" vcpus
xpnode <- convert "pnode" pnode >>= lookupNode ktn xname
xsnodes <- convert "snodes" snodes::Result [String]
snode <- case xsnodes of
[] -> return Node.noSecondary
x:_ -> lookupNode ktn xname x
xrunning <- convert "status" status
xtags <- convert "tags" tags
xauto_balance <- convert "auto_balance" auto_balance
xdt <- convert "disk_template" disk_template
xsu <- convert "be/spindle_use" su
xdsizes <- convert "disk.sizes" dsizes
xdspindles <- convertArrayMaybe "Instance" xname "disk.spindles" dspindles
let disks = zipWith Instance.Disk xdsizes xdspindles
inst = Instance.create xname xmem xdisk disks
xvcpus xrunning xtags xauto_balance xpnode snode xdt xsu []
return (xname, inst)
parseInstance _ v = fail ("Invalid instance query result: " ++ show v)
-- | Parse a node list in JSON format.
getNodes :: NameAssoc -> JSValue -> Result [(String, Node.Node)]
getNodes ktg arr = extractArray arr >>= mapM (parseNode ktg)
-- | Construct a node from a JSON object.
parseNode :: NameAssoc -> [(JSValue, JSValue)] -> Result (String, Node.Node)
parseNode ktg [ name, mtotal, mnode, mfree, dtotal, dfree
, ctotal, cnos, offline, drained, vm_capable, spindles, g_uuid
, tags, excl_stor, sptotal, spfree ]
= do
xname <- annotateResult "Parsing new node" (fromJValWithStatus name)
let convert a = genericConvert "Node" xname a
xoffline <- convert "offline" offline
xdrained <- convert "drained" drained
xvm_capable <- convert "vm_capable" vm_capable
xgdx <- convert "group.uuid" g_uuid >>= lookupGroup ktg xname
xtags <- convert "tags" tags
xexcl_stor <- convert "exclusive_storage" excl_stor
let live = not xoffline && xvm_capable
lvconvert def n d = eitherLive live def $ convert n d
xsptotal <- if xexcl_stor
then lvconvert 0 "sptotal" sptotal
else convert "spindles" spindles
xspfree <- lvconvert 0 "spfree" spfree
xmtotal <- lvconvert 0.0 "mtotal" mtotal
xmnode <- lvconvert 0 "mnode" mnode
xmfree <- lvconvert 0 "mfree" mfree
xdtotal <- lvconvert 0.0 "dtotal" dtotal
xdfree <- lvconvert 0 "dfree" dfree
xctotal <- lvconvert 0.0 "ctotal" ctotal
xcnos <- lvconvert 0 "cnos" cnos
let node = flip Node.setNodeTags xtags $
Node.create xname xmtotal xmnode xmfree xdtotal xdfree
xctotal xcnos (not live || xdrained) xsptotal xspfree
xgdx xexcl_stor
return (xname, node)
parseNode _ v = fail ("Invalid node query result: " ++ show v)
-- | Parses the cluster tags.
getClusterData :: JSValue -> Result ([String], IPolicy, String)
getClusterData (JSObject obj) = do
let errmsg = "Parsing cluster info"
obj' = fromJSObject obj
ctags <- tryFromObj errmsg obj' "tags"
cpol <- tryFromObj errmsg obj' "ipolicy"
master <- tryFromObj errmsg obj' "master"
return (ctags, cpol, master)
getClusterData _ = Bad "Cannot parse cluster info, not a JSON record"
-- | Parses the cluster groups.
getGroups :: JSValue -> Result [(String, Group.Group)]
getGroups jsv = extractArray jsv >>= mapM parseGroup
-- | Parses a given group information.
parseGroup :: [(JSValue, JSValue)] -> Result (String, Group.Group)
parseGroup [uuid, name, apol, ipol, tags] = do
xname <- annotateResult "Parsing new group" (fromJValWithStatus name)
let convert a = genericConvert "Group" xname a
xuuid <- convert "uuid" uuid
xapol <- convert "alloc_policy" apol
xipol <- convert "ipolicy" ipol
xtags <- convert "tags" tags
-- TODO: parse networks to which this group is connected
return (xuuid, Group.create xname xuuid xapol [] xipol xtags)
parseGroup v = fail ("Invalid group query result: " ++ show v)
-- * Main loader functionality
-- | Builds the cluster data by querying a given socket name.
readData :: String -- ^ Unix socket to use as source
-> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
readData master =
E.bracket
(L.getLuxiClient master)
L.closeClient
(\s -> do
nodes <- queryNodes s
instances <- queryInstances s
cinfo <- queryClusterInfo s
groups <- queryGroups s
return (groups, nodes, instances, cinfo)
)
-- | Converts the output of 'readData' into the internal cluster
-- representation.
parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result ClusterData
parseData (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (group_names, group_idx) = assignIndices group_data
node_data <- nodes >>= getNodes group_names
let (node_names, node_idx) = assignIndices node_data
inst_data <- instances >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
(ctags, cpol, master) <- cinfo >>= getClusterData
node_idx' <- setMaster node_names node_idx master
return (ClusterData group_idx node_idx' inst_idx ctags cpol)
-- | Top level function for data loading.
loadData :: String -- ^ Unix socket to use as source
-> IO (Result ClusterData)
loadData = fmap parseData . readData
|
badp/ganeti
|
src/Ganeti/HTools/Backend/Luxi.hs
|
gpl-2.0
| 11,969
| 0
| 14
| 2,541
| 2,925
| 1,511
| 1,414
| 208
| 3
|
module Problem32Spec where
import Test.Hspec
import qualified Problem32
spec :: Spec
spec = describe "Check abou GCD" $ do
it "Verify that the GCD of 36 and 63 is 9" $
Problem32.myGCD 36 63 `shouldBe` 9
it "Verify that the GCD of 13 and 41 is 1" $
Problem32.myGCD 13 41 `shouldBe` 1
|
wando-hs/H-99
|
test/Problem32Spec.hs
|
gpl-3.0
| 325
| 0
| 11
| 95
| 76
| 40
| 36
| 9
| 1
|
{-# LANGUAGE NamedFieldPuns #-}
module Language.ArrayForth.Stack where
data Stack a = Stack { dataS, retS :: [a]
, a :: a }
data Operation = Dup | Over | Drop | Pop | Push | ToA | FromA deriving (Show, Eq, Bounded, Enum)
operate :: Stack a -> Operation -> Stack a
operate stack@Stack {dataS = d:ds, retS = r:rs, a} opr = case opr of
Dup -> stack { dataS = d:d:init ds }
Drop -> stack { dataS = ds }
Push -> stack { dataS = ds, retS = d:r:rs }
Pop -> stack { dataS = r:d:ds, retS = rs }
ToA -> stack { dataS = ds, a = d }
FromA -> stack { dataS = a:d:ds }
|
TikhonJelvis/array-forth-hll
|
src/Language/ArrayForth/HLL/Stack.hs
|
gpl-3.0
| 601
| 0
| 11
| 170
| 278
| 160
| 118
| 13
| 6
|
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
{-|
Module : Client.Video.Types
Description : Types used in the video modules
Copyright : (c) Robin Raymond, 2016-2017
License : GPL-3
Maintainer : robin@robinraymond.de
Portability : POSIX
-}
module Client.Video.Types
( Resolution(..)
) where
import Protolude
import qualified SDL
-- | The resoltuion of the window
newtype Resolution = Resolution { unResolution :: SDL.V2 Int32 }
|
r-raymond/purple-muon
|
src/Client/Video/Types.hs
|
gpl-3.0
| 1,141
| 0
| 8
| 222
| 59
| 45
| 14
| 5
| 0
|
{-
Copyright (C) 2014 Ellis Whitehead
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module OnTopOfThings.Actions.View where
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Logger (NoLoggingT)
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Trans.Resource (ResourceT)
import Data.List (inits, intercalate, partition, sort, sortBy)
import Data.Maybe
import Data.Monoid
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.ISO8601
import Database.Persist -- (PersistValue, entityVal, insert, toPersistValue, selectList)
--import Database.Persist.Sqlite
import Database.Persist.Sqlite (SqlPersistT, runSqlite, rawSql)
import Debug.Trace
import System.Console.ANSI
import System.Console.CmdArgs.Explicit
import System.Environment
import System.FilePath.Posix (joinPath, splitDirectories, takeDirectory)
import System.IO
import Text.RawString.QQ
import Text.Read (readMaybe)
import qualified Data.ByteString as BS
import qualified Data.Map as M
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.UUID as U
import qualified Data.UUID.V4 as U4
import qualified Data.Yaml as Yaml
import Args
import DatabaseTables
import DatabaseUtils
import Utils
import OnTopOfThings.Parsers.ItemFormatParser
import OnTopOfThings.Parsers.NumberList
import OnTopOfThings.Parsers.ViewParser
import OnTopOfThings.Actions.Action
import OnTopOfThings.Actions.Env
import OnTopOfThings.Actions.Utils (itemToAbsPathChain, lookupItem, uuidToItem)
import OnTopOfThings.Commands.Show
--import OnTopOfThings.Data.DatabaseJson
import OnTopOfThings.Data.FileJson
import OnTopOfThings.Data.Patch
import OnTopOfThings.Data.PatchDatabase
mode_view = Mode
{ modeGroupModes = mempty
, modeNames = ["view"]
, modeValue = options_empty "view"
, modeCheck = Right
, modeReform = Just . reform
, modeExpandAt = True
, modeHelp = "View by query."
, modeHelpSuffix = []
, modeArgs = ([], Just (flagArg (updN "query") "QUERY"))
, modeGroupFlags = toGroup
--[ flagReq ["folder"] (updN "folder") "FOLDER" "The folder under which to search"
[ flagReq ["sort"] (updN "sort") "FIELD" "Field to sort by, fields may be comma separated"
, flagNone ["help"] updHelp "display this help and exit"
]
}
instance Action ActionView where
--runAction env action | trace ("runAction") False = undefined
runAction env action = do
result <- view env action
case result of
Left msgs -> return (env, ActionResult [] False [] msgs)
Right env1 -> return (env1, mempty)
--actionFromOptions env opts | trace ("actionFromOptions "++(show opts)) False = undefined
actionFromOptions env opts = do
let queries = fromMaybe [] $ M.lookup "query" (optionsParamsN opts)
let sorts = fromMaybe [] $ M.lookup "sort" (optionsParamsN opts)
return (Right (ActionView queries sorts))
actionToRecordArgs action = Nothing
data QueryData = QueryData
{ queryWhere :: Maybe String
, queryTables :: Set.Set String
, queryValues :: [PersistValue]
}
data ViewItem = ViewItem
{ viewItemItem :: Item
, viewItemProperties :: [(String, String)]
, viewItemFolder :: FilePath
}
data ViewData = ViewData
{ viewDataItems :: [ViewItem]
, viewDataSortFns :: [(ViewItem -> ViewItem -> Ordering)]
, viewDataHeaderFn :: (Maybe ViewItem -> ViewItem -> SqlPersistT (NoLoggingT (ResourceT IO)) (Maybe String))
, viewDataItemFn :: (ViewItem -> SqlPersistT (NoLoggingT (ResourceT IO)) (String))
}
view :: Env -> ActionView -> SqlPersistT (NoLoggingT (ResourceT IO)) (Validation Env)
view env0 (ActionView queries sorts) = do
-- Remove all previous indexes
updateWhere [ItemIndex !=. Nothing] [ItemIndex =. Nothing]
-- Handle query
let vd0 = (ViewData [] [] showHeader showItem)
viewsub env0 vd0 queries' sorts
where
queries' = case queries of
[] -> ["(and )"]
_ -> queries
showHeader :: Maybe ViewItem -> ViewItem -> SqlPersistT (NoLoggingT (ResourceT IO)) (Maybe String)
showHeader prevMaybe vi = do
case prevMaybe of
Nothing -> return $ Just (viewItemFolder vi)
Just prev -> do
if (viewItemFolder prev) == (viewItemFolder vi)
then return Nothing
else return $ Just $ "\n" ++ (viewItemFolder vi)
showItem :: ViewItem -> SqlPersistT (NoLoggingT (ResourceT IO)) (String)
showItem vi = do
s <- formatItem format item
return $ prefix ++ s
where
item = viewItemItem vi
format = [r|${index "" "" "" ") "} ${X} ${times "" " " "" " --"}${name "" " (" "" ")"}${title "" " "}${estimate "" " (" "" ")"}${tags "" " (" "," ")"}|]
index :: Maybe Int
index = case lookup "index" (viewItemProperties vi) of
Nothing -> Nothing
Just s -> readMaybe s :: Maybe Int
index_s :: Maybe String
index_s = fmap (\i -> "(" ++ (show i) ++ ") ") index
prefix = fromMaybe "" index_s
viewsub :: Env -> ViewData -> [String] -> [String] -> SqlPersistT (NoLoggingT (ResourceT IO)) (Validation Env)
viewsub env0 vd [] _ = do
viewPrint vd
return (Right env0)
viewsub env0 vd (queryString:rest) sorts = do
let query_ = parseView queryString
case query_ of
Left msgs -> return (Left msgs)
Right (ViewElement_Value "print" _) -> do
viewPrint vd
viewsub env0 (vd { viewDataItems = [] }) rest sorts
Right elem -> do
liftIO $ putStrLn $ show elem
case constructViewQuery elem 1 of
Left msg -> return (Left [msg])
Right (qd, _) -> do
liftIO $ putStrLn wheres
liftIO $ putStrLn $ show $ queryTables qd
liftIO $ putStrLn stmt
liftIO $ putStrLn $ show $ queryValues qd
--tasks' <- rawSql (T.pack stmt) [] -- [toPersistValue $ formatTime' fromTime, toPersistValue $ head l]
items' <- rawSql (T.pack stmt) (queryValues qd)
let items = map entityVal items'
--let x = itemTitle $ head items
vis <- mapM itemToViewItem items
let vd' = vd { viewDataItems = (viewDataItems vd) ++ vis }
--liftIO $ mapM_ (putStrLn . show . itemTitle) tasks
--liftIO $ putStrLn $ show $ length tasks
--return (Right env0)
viewsub env0 vd' rest sorts
where
wheres = fromMaybe "" (queryWhere qd)
tables = filter (/= "item") $ Set.toList $ queryTables qd
froms = intercalate ", " $ "item" : (map (\s -> "property " ++ s) tables)
whereUuid = case tables of
[] -> ""
tables -> "(" ++ s ++ ") AND " where
s = intercalate " AND " $ map (\table -> "item.uuid = " ++ table ++ ".uuid") tables
whereExpr = case (whereUuid, wheres) of
("", "") -> ""
_ -> " WHERE " ++ whereUuid ++ "(" ++ wheres ++ ")"
stmt0 = "SELECT ?? FROM " ++ froms ++ whereExpr
stmt = case sorts of
[] -> stmt0
_ -> stmt0 ++ " ORDER BY " ++ intercalate " " sorts
itemToViewItem :: Item -> SqlPersistT (NoLoggingT (ResourceT IO)) ViewItem
itemToViewItem item = do
path <- getAbsPath item
let folder = takeDirectory path
return $ ViewItem item [] folder
getAbsPath :: Item -> SqlPersistT (NoLoggingT (ResourceT IO)) FilePath
getAbsPath item = do
parentPath <- case (itemParent item) of
Nothing -> return []
Just uuid -> do
parent_ <- getBy $ ItemUniqUuid uuid
case parent_ of
Nothing -> return []
Just parent -> do
parentPath <- getAbsPath (entityVal parent)
return [parentPath]
return $ joinPath (parentPath ++ [fromMaybe (itemUuid item) (itemName item)])
viewPrint :: ViewData -> SqlPersistT (NoLoggingT (ResourceT IO)) ()
viewPrint vd = do
let vis = viewDataItems vd
-- TODO: sort items
-- TODO: set item indexes
-- Remove all previous indexes
indexNext <- getNextIndex
let viToIndex_l = zip vis [indexNext..]
-- TODO: fold over items, printings headers where appropriate
foldM_ printHeaderAndItem Nothing viToIndex_l
--ss <- mapM (viewDataItemFn vd) vis
--liftIO $ mapM_ putStrLn ss
return ()
where
printHeaderAndItem prevMaybe (vi, index) = do
let item = viewItemItem vi
updateIndex item index
let item' = item { itemIndex = Just index }
let vi' = vi { viewItemItem = item' }
headerMaybe <- (viewDataHeaderFn vd) prevMaybe vi
s <- (viewDataItemFn vd) vi'
case headerMaybe of
Just header -> liftIO $ putStrLn header
Nothing -> return ()
liftIO $ putStrLn s
return (Just vi')
newtype QueryDataAnd = QueryDataAnd QueryData
newtype QueryDataOr = QueryDataOr QueryData
extractQueryDataAnd (QueryDataAnd qd) = qd
instance Monoid QueryDataAnd where
mempty = QueryDataAnd (QueryData Nothing mempty [])
mappend (QueryDataAnd (QueryData Nothing _ _)) b = b
mappend a (QueryDataAnd (QueryData Nothing _ _)) = a
mappend (QueryDataAnd (QueryData (Just where1) tables1 values1)) (QueryDataAnd (QueryData (Just where2) tables2 values2)) =
QueryDataAnd (QueryData (Just $ where1 ++ " AND " ++ where2) (tables1 `mappend` tables2) (values1 ++ values2))
constructViewQuery :: ViewElement -> Int -> Either String (QueryData, Int)
constructViewQuery (ViewElement_And elems) propertyIndex = case foldl step (Right ([], propertyIndex)) elems of
Left msg -> Left msg
Right (queries_r, propertyIndex') -> Right (extractQueryDataAnd (mconcat queries), propertyIndex') where
queries = reverse queries_r
where
step :: Either String ([QueryDataAnd], Int) -> ViewElement -> Either String ([QueryDataAnd], Int)
step (Left msg) _ = Left msg
step (Right (r, propertyIndex)) elem = case constructViewQuery elem propertyIndex of
Left msg -> Left msg
Right (qd, propertyIndex') -> Right (r', propertyIndex') where
r' = (QueryDataAnd qd) : r
constructViewQuery (ViewElement_Value field values) propertyIndex
| Set.member field (Set.fromList ["stage", "status"]) = Right $ constructViewItemQuery field values propertyIndex
| Set.member field (Set.fromList ["tag"]) = Right $ constructViewPropertyQuery field values propertyIndex
constructViewQuery (ViewElement_BinOp field op value) propertyIndex
| Set.member field (Set.fromList ["estimate"]) = Right $ constructItemBinOpIntQuery "item" field op (read value :: Int) propertyIndex
-- | field == "folder" = do
| otherwise = Right $ constructItemBinOpStringQuery "item" field op value propertyIndex
constructViewItemQuery :: String -> [String] -> Int -> (QueryData, Int)
constructViewItemQuery field values propertyIndex = (qd, propertyIndex) where
qd = constructViewQueryValue "item" field values
constructViewQueryValue :: String -> String -> [String] -> QueryData
constructViewQueryValue table property values = QueryData (Just wheres) tables values' where
wheres = table ++ "." ++ property ++ " " ++ rhs
(rhs, values') = case values of
[] -> ("IS NULL", [])
x:[] -> ("= ?", [toPersistValue x])
xs -> (s, l) where
s :: String
s = "IN (" ++ (intercalate "," (map (\_ -> "?") xs)) ++ ")"
l :: [PersistValue]
l = map toPersistValue xs
tables = Set.fromList [table]
constructItemBinOpStringQuery :: String -> String -> String -> String -> Int -> (QueryData, Int)
constructItemBinOpStringQuery table property op value propertyIndex = (QueryData (Just wheres) tables values', propertyIndex) where
wheres = table ++ "." ++ property ++ " " ++ rhs
rhs = constructItemBinOpQueryRhs op
values' = [toPersistValue value]
tables = Set.fromList [table]
constructItemBinOpIntQuery :: String -> String -> String -> Int -> Int -> (QueryData, Int)
constructItemBinOpIntQuery table property op value propertyIndex = (QueryData (Just wheres) tables values', propertyIndex) where
wheres = table ++ "." ++ property ++ " " ++ rhs
rhs = constructItemBinOpQueryRhs op
values' = [toPersistValue value]
tables = Set.fromList [table]
constructItemBinOpQueryRhs :: String -> String
constructItemBinOpQueryRhs op = rhs where
rhs = case op of
"=" -> "= ?"
"<" -> "< ?"
"<=" -> "<= ?"
constructViewPropertyQuery :: String -> [String] -> Int -> (QueryData, Int)
constructViewPropertyQuery field values propertyIndex = (qd, propertyIndex') where
qd = QueryData (Just s) tables values'
s = tableName ++ ".name = '" ++ field ++ "' AND " ++ tableName ++ ".value = ?"
tableName = "property" ++ show propertyIndex
tables = Set.fromList [tableName]
--values = map toPersistValue values
values' = [toPersistValue $ head values]
propertyIndex' = propertyIndex + 1
formatItem :: String -> Item -> SqlPersistT (NoLoggingT (ResourceT IO)) String
formatItem format item = case parseItemFormat format of
Left msgs -> return (intercalate ";" msgs)
Right elems -> do
l <- mapM (\elem -> formatItemElem elem item) elems
return (concat l)
formatItemElem :: ItemFormatElement -> Item -> SqlPersistT (NoLoggingT (ResourceT IO)) String
formatItemElem (ItemFormatElement_String s) _ = return s
formatItemElem (ItemFormatElement_Call name missing prefix infix_ suffix) item = do
ss <- case name of
"index" -> return $ maybeToList $ fmap show (itemIndex item)
"X" ->
return $ case (itemType item, itemStatus item) of
("list", "open") -> []
("list", "closed") -> ["[x]"]
("list", "deleted") -> ["XXX"]
("task", "open") -> ["[ ]"]
(_, "open") -> [" - "]
(_, "closed") -> ["[x]"]
(_, "deleted") -> ["XXX"]
_ -> []
"name" -> return (maybeToList $ itemName item)
"times" ->
return $ case (itemStart item, itemEnd item, itemDue item) of
(Just start, Just end, Just due) -> [start ++ " - " ++ end ++ ", due " ++ due]
(Just start, Just end, Nothing) -> [start ++ " - " ++ end]
(Just start, Nothing, Just due) -> [start ++ ", due " ++ due]
(Just start, Nothing, Nothing) -> [start]
(Nothing, Just end, Just due) -> ["end " ++ end ++ ", due " ++ due]
(Nothing, Just end, Nothing) -> [end]
(Nothing, Nothing, Just due) -> ["due " ++ due]
_ -> []
"title" -> return (maybeToList $ itemTitle item)
"estimate" -> do
return (maybeToList (fmap (\n -> show n ++ "min") $ itemEstimate item))
"tags" -> do
let l1 = maybeToList $ (itemStage item >>= \stage -> Just ('?':stage))
tags' <- selectList [PropertyUuid ==. itemUuid item, PropertyName ==. "tag"] []
let l2 = map (\x -> ('+' : (propertyValue $ entityVal x))) tags'
return (l1 ++ l2)
_ -> return ["unknown format spec: "++name]
let s = case ss of
[] -> missing
_ -> prefix ++ (intercalate infix_ ss) ++ suffix
return s
updateIndex :: Item -> Int -> SqlPersistT (NoLoggingT (ResourceT IO)) ()
updateIndex item index = do
updateWhere [ItemUuid ==. (itemUuid item)] [ItemIndex =. Just index]
|
ellis/OnTopOfThings
|
old-20150308/src/OnTopOfThings/Actions/View.hs
|
gpl-3.0
| 15,689
| 0
| 27
| 3,558
| 4,794
| 2,492
| 2,302
| 300
| 23
|
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Engine where
import Control.Applicative
import Control.Arrow
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Concurrent.Process
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class (MonadIO, liftIO)
-- import Data.Accessor
import Data.Bits
import Data.Char (ord)
import Data.Function (fix)
import qualified Data.List as List
import Data.Maybe
import Data.Version (showVersion)
import Mescaline (Time)
import Mescaline.Application (AppT)
import qualified Mescaline.Application as App
import qualified Mescaline.Application.Desktop as App
import qualified Mescaline.Application.Logger as Log
import qualified Mescaline.Database as DB
import qualified Mescaline.Database.Process as DatabaseP
import qualified Mescaline.Pattern.Sequencer as Sequencer
import qualified Mescaline.FeatureSpace.Model as FeatureSpace
import qualified Mescaline.FeatureSpace.Process as FeatureSpaceP
import qualified Mescaline.Pattern as Pattern
import qualified Mescaline.Pattern.Environment as Pattern
import qualified Mescaline.Pattern.Event as Event
import qualified Mescaline.Pattern.Patch as Patch
import qualified Mescaline.Pattern.Process as PatternP
import qualified Mescaline.Synth.OSCServer as OSCServer
import qualified Mescaline.Synth.Sampler.Process as SynthP
import Mescaline.Util (findFiles)
import qualified Sound.OpenSoundControl as OSC
import qualified Sound.SC3.Server.State as State
import qualified Sound.SC3.Server.Process as Server
import qualified Sound.SC3.Server.Process.CommandLine as Server
import System.Directory
import qualified System.Environment as Env
import System.Environment.FindBin (getProgPath)
import System.FilePath
import System.IO
import qualified System.Random as Random
pipe :: (a -> IO b) -> Chan a -> Chan b -> IO ()
pipe f ichan ochan = do
a <- readChan ichan
b <- f a
writeChan ochan b
pipe f ichan ochan
-- ====================================================================
-- Logging to text view
-- chanLogger :: Log.Priority -> String -> Chan String -> IO () -> Log.GenericHandler (Chan String)
-- chanLogger prio fmt chan action =
-- Log.GenericHandler
-- prio
-- (Log.simpleLogFormatter fmt)
-- chan
-- (\chan msg -> writeChan chan msg >> action)
-- (const (return ()))
--
-- createLoggers :: MainWindow -> IO ()
-- createLoggers logWindow = do
-- textEdit <- Qt.findChild logWindow ("<QTextEdit*>", "textEdit") :: IO (Qt.QTextEdit ())
-- chan <- newChan
-- Qt.connectSlot logWindow "logMessage()" logWindow "logMessage()" $ logMessage chan textEdit
-- let fmt = "[$prio][$loggername] $msg\n"
-- action = Qt.emitSignal logWindow "logMessage()" ()
-- components <- Log.getComponents
-- -- FIXME: The log levels have to be initialized first down in main, why?
-- mapM_ (\(logger, prio) -> do
-- Log.updateGlobalLogger
-- logger
-- (Log.setHandlers [chanLogger prio fmt chan action]))
-- components
-- -- Disable stderr logger
-- Log.updateGlobalLogger Log.rootLoggerName (Log.setHandlers ([] :: [Log.GenericHandler ()]))
-- where
-- logMessage :: Chan String -> Qt.QTextEdit () -> MainWindow -> IO ()
-- logMessage chan edit _ = do
-- msg <- readChan chan
-- c <- Qt.textCursor edit ()
-- Qt.insertText c msg
-- _ <- Qt.movePosition c (Qt.eEnd :: Qt.MoveOperation)
-- Qt.setTextCursor edit c
--
-- clearLog :: MainWindow -> IO ()
-- clearLog logWindow = do
-- edit <- Qt.findChild logWindow ("<QTextEdit*>", "textEdit") :: IO (Qt.QTextEdit ())
-- Qt.setPlainText edit ""
-- ====================================================================
-- Actions
logStrLn :: MonadIO m => String -> m ()
logStrLn = liftIO . hPutStrLn stderr
logAppDirs :: MonadIO m => AppT m ()
logAppDirs = do
d1 <- App.getProgramDirectory
d2 <- App.getDataDirectory
d3 <- App.getResourceDirectory
logStrLn $ show [d1, d2, d3]
engine :: FilePath -> String -> AppT IO (SynthP.Handle, FeatureSpaceP.Handle, IO ())
engine dbFile pattern = do
-- logAppDirs
--
-- docDir <- liftM (flip combine "Documents" . takeDirectory) App.getProgramDirectory
-- logStrLn $ "Documents: " ++ docDir
-- components <- Log.getComponents
-- liftIO $ mapM_ (\(l,p) -> Log.updateGlobalLogger l (Log.setLevel p)) components
-- createLoggers logWindow
-- Synth process
(synthP, synthQuit) <- SynthP.new
logStrLn "Synth started"
-- Feature space process
fspaceP <- liftIO FeatureSpaceP.new
logStrLn "FeatureSpace started"
-- -- Sequencer process
patternP <- PatternP.new Patch.defaultPatchEmbedded fspaceP
logStrLn "Sequencer started"
-- -- Database process
dbP <- liftIO DatabaseP.new
liftIO $ connect (\(DatabaseP.Changed path pattern) -> FeatureSpaceP.LoadDatabase path pattern) dbP fspaceP
-- let dbFile = docDir </> "mescaline.db"
sendTo dbP $ DatabaseP.Load dbFile pattern
logStrLn "Database started"
-- -- Pattern process
-- patternToFSpaceP <- spawn $ fix $ \loop -> do
-- x <- recv
-- case x of
-- PatternP.Event time event -> do
-- -- Event.withSynth (return ()) (sendTo synthP . SynthP.PlayUnit time) event
-- return ()
-- _ -> return ()
-- loop
-- patternToFSpaceP `listenTo` patternP
-- fspaceToPatternP <- spawn $ fix $ \loop -> do
-- x <- recv
-- case x of
-- FeatureSpaceP.RegionChanged _ -> do
-- fspace <- query fspaceP FeatureSpaceP.GetModel
-- sendTo patternP $ PatternP.SetFeatureSpace fspace
-- _ -> return ()
-- loop
-- fspaceToPatternP `listenTo` fspaceP
-- OSC server process
-- oscServer <- OSCServer.new 2010 synthP fspaceP
-- logStrLn "OSCServer started"
-- logStrLn "Starting event loop"
-- Signal synth thread and wait for it to exit.
-- Otherwise stale scsynth processes will be lingering around.
return (synthP, fspaceP, synthQuit >> logStrLn "Bye sucker.")
|
kaoskorobase/mescaline
|
tools/sts/Engine.hs
|
gpl-3.0
| 6,519
| 0
| 13
| 1,572
| 832
| 516
| 316
| 72
| 1
|
{-# OPTIONS_HADDOCK ignore-exports #-}
module Cube_Display (
display,
initfn
) where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import Control.Monad
-- | List of normals
n :: [Normal3 GLfloat]
n = [(Normal3 (-1.0) 0.0 0.0),
(Normal3 0.0 1.0 0.0),
(Normal3 1.0 0.0 0.0),
(Normal3 0.0 (-1.0) 0.0),
(Normal3 0.0 0.0 1.0),
(Normal3 0.0 0.0 (-1.0))]
-- | List of faces represented as four vertices
faces :: [[Vertex3 GLfloat]]
faces = [[(v 0), (v 1), (v 2), (v 3)],
[(v 3), (v 2), (v 6), (v 7)],
[(v 7), (v 6), (v 5), (v 4)],
[(v 4), (v 5), (v 1), (v 0)],
[(v 5), (v 6), (v 2), (v 1)],
[(v 7), (v 4), (v 0), (v 3)]]
-- | Setup cube vertex data (?)
v :: Int -> Vertex3 GLfloat
v x = Vertex3 v0 v1 v2
where v0
| x == 0 || x == 1 || x == 2 || x == 3 = -1
| x == 4 || x == 5 || x == 6 || x == 7 = 1
v1
| x == 0 || x == 1 || x == 4 || x == 5 = -1
| x == 2 || x == 3 || x == 6 || x == 7 = 1
v2
| x == 0 || x == 3 || x == 4 || x == 7 = 1
| x == 1 || x == 2 || x == 5 || x == 6 = -1
-- | Display callback.
display :: IO ()
display = do
clear [ColorBuffer, DepthBuffer]
drawBox
swapBuffers
-- | Apply the normal to each face and render them
drawBox :: IO ()
drawBox = zipWithM_ renderFace n faces
where
renderFace norm face = renderPrimitive Quads $ do
normal norm
mapM_ vertex face
-- | Initializes various things for the program
initfn :: IO ()
initfn = do
diffuse light0 $= lightDiffuse
position light0 $= lightPosition
light light0 $= Enabled
lighting $= Enabled
depthFunc $= Just Lequal
matrixMode $= Projection
perspective 40.0 1.0 1.0 10.0
matrixMode $= Modelview 0
lookAt (Vertex3 0.0 0.0 5.0) (Vertex3 0.0 0.0 0.0) (Vector3 0.0 1.0 0.0)
translate ((Vector3 0.0 0.0 (-1.0))::Vector3 GLfloat)
rotate 60 ((Vector3 1.0 0.0 0.0)::Vector3 GLfloat)
rotate (-20) ((Vector3 0.0 0.0 1.0)::Vector3 GLfloat)
where
light0 = Light 0
lightDiffuse = Color4 1.0 0.0 0.0 1.0
lightPosition = Vertex4 1.0 1.0 1.0 0.0
|
rmcmaho/Haskell_OpenGL_Examples
|
Cube/Cube_Display.hs
|
gpl-3.0
| 2,177
| 0
| 16
| 667
| 1,007
| 519
| 488
| 59
| 1
|
-- | Examples extracted from http://www.cse.chalmers.se/~hallgren/Papers/hallgren.pdf
--
module FunWithFunctionalDependencies where
|
capitanbatata/sandbox
|
typelevel-computations/src/FunWithFunctionalDependencies.hs
|
gpl-3.0
| 135
| 0
| 2
| 13
| 6
| 5
| 1
| 1
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DataTransfer.Transfers.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Inserts a data transfer request.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @datatransfer.transfers.insert@.
module Network.Google.Resource.DataTransfer.Transfers.Insert
(
-- * REST Resource
TransfersInsertResource
-- * Creating a Request
, transfersInsert
, TransfersInsert
-- * Request Lenses
, tiXgafv
, tiUploadProtocol
, tiAccessToken
, tiUploadType
, tiPayload
, tiCallback
) where
import Network.Google.DataTransfer.Types
import Network.Google.Prelude
-- | A resource alias for @datatransfer.transfers.insert@ method which the
-- 'TransfersInsert' request conforms to.
type TransfersInsertResource =
"admin" :>
"datatransfer" :>
"v1" :>
"transfers" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DataTransfer :>
Post '[JSON] DataTransfer
-- | Inserts a data transfer request.
--
-- /See:/ 'transfersInsert' smart constructor.
data TransfersInsert =
TransfersInsert'
{ _tiXgafv :: !(Maybe Xgafv)
, _tiUploadProtocol :: !(Maybe Text)
, _tiAccessToken :: !(Maybe Text)
, _tiUploadType :: !(Maybe Text)
, _tiPayload :: !DataTransfer
, _tiCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TransfersInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiXgafv'
--
-- * 'tiUploadProtocol'
--
-- * 'tiAccessToken'
--
-- * 'tiUploadType'
--
-- * 'tiPayload'
--
-- * 'tiCallback'
transfersInsert
:: DataTransfer -- ^ 'tiPayload'
-> TransfersInsert
transfersInsert pTiPayload_ =
TransfersInsert'
{ _tiXgafv = Nothing
, _tiUploadProtocol = Nothing
, _tiAccessToken = Nothing
, _tiUploadType = Nothing
, _tiPayload = pTiPayload_
, _tiCallback = Nothing
}
-- | V1 error format.
tiXgafv :: Lens' TransfersInsert (Maybe Xgafv)
tiXgafv = lens _tiXgafv (\ s a -> s{_tiXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tiUploadProtocol :: Lens' TransfersInsert (Maybe Text)
tiUploadProtocol
= lens _tiUploadProtocol
(\ s a -> s{_tiUploadProtocol = a})
-- | OAuth access token.
tiAccessToken :: Lens' TransfersInsert (Maybe Text)
tiAccessToken
= lens _tiAccessToken
(\ s a -> s{_tiAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tiUploadType :: Lens' TransfersInsert (Maybe Text)
tiUploadType
= lens _tiUploadType (\ s a -> s{_tiUploadType = a})
-- | Multipart request metadata.
tiPayload :: Lens' TransfersInsert DataTransfer
tiPayload
= lens _tiPayload (\ s a -> s{_tiPayload = a})
-- | JSONP
tiCallback :: Lens' TransfersInsert (Maybe Text)
tiCallback
= lens _tiCallback (\ s a -> s{_tiCallback = a})
instance GoogleRequest TransfersInsert where
type Rs TransfersInsert = DataTransfer
type Scopes TransfersInsert =
'["https://www.googleapis.com/auth/admin.datatransfer"]
requestClient TransfersInsert'{..}
= go _tiXgafv _tiUploadProtocol _tiAccessToken
_tiUploadType
_tiCallback
(Just AltJSON)
_tiPayload
dataTransferService
where go
= buildClient
(Proxy :: Proxy TransfersInsertResource)
mempty
|
brendanhay/gogol
|
gogol-admin-datatransfer/gen/Network/Google/Resource/DataTransfer/Transfers/Insert.hs
|
mpl-2.0
| 4,516
| 0
| 18
| 1,117
| 711
| 414
| 297
| 104
| 1
|
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Database.CQL.Protocol.Codec
( encodeByte
, decodeByte
, encodeSignedByte
, decodeSignedByte
, encodeShort
, decodeShort
, encodeSignedShort
, decodeSignedShort
, encodeInt
, decodeInt
, encodeString
, decodeString
, encodeLongString
, decodeLongString
, encodeBytes
, decodeBytes
, encodeShortBytes
, decodeShortBytes
, encodeUUID
, decodeUUID
, encodeList
, decodeList
, encodeMap
, decodeMap
, encodeMultiMap
, decodeMultiMap
, encodeSockAddr
, decodeSockAddr
, encodeConsistency
, decodeConsistency
, encodeOpCode
, decodeOpCode
, encodeColumnType
, decodeColumnType
, encodePagingState
, decodePagingState
, decodeKeyspace
, decodeTable
, decodeQueryId
, putValue
, getValue
) where
import Control.Applicative
import Control.Monad
import Data.Bits
import Data.ByteString (ByteString)
import Data.Decimal
import Data.Int
import Data.IP
import Data.List (unfoldr)
import Data.Text (Text)
import Data.UUID (UUID)
import Data.Word
import Data.Serialize hiding (decode, encode)
import Database.CQL.Protocol.Types
import Network.Socket (SockAddr (..), PortNumber (..))
import Prelude
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import qualified Data.UUID as UUID
------------------------------------------------------------------------------
-- Byte
encodeByte :: Putter Word8
encodeByte = put
decodeByte :: Get Word8
decodeByte = get
------------------------------------------------------------------------------
-- Signed Byte
encodeSignedByte :: Putter Int8
encodeSignedByte = put
decodeSignedByte :: Get Int8
decodeSignedByte = get
------------------------------------------------------------------------------
-- Short
encodeShort :: Putter Word16
encodeShort = put
decodeShort :: Get Word16
decodeShort = get
------------------------------------------------------------------------------
-- Signed Short
encodeSignedShort :: Putter Int16
encodeSignedShort = put
decodeSignedShort :: Get Int16
decodeSignedShort = get
------------------------------------------------------------------------------
-- Int
encodeInt :: Putter Int32
encodeInt = put
decodeInt :: Get Int32
decodeInt = get
------------------------------------------------------------------------------
-- String
encodeString :: Putter Text
encodeString = encodeShortBytes . T.encodeUtf8
decodeString :: Get Text
decodeString = T.decodeUtf8 <$> decodeShortBytes
------------------------------------------------------------------------------
-- Long String
encodeLongString :: Putter LT.Text
encodeLongString = encodeBytes . LT.encodeUtf8
decodeLongString :: Get LT.Text
decodeLongString = do
n <- get :: Get Int32
LT.decodeUtf8 <$> getLazyByteString (fromIntegral n)
------------------------------------------------------------------------------
-- Bytes
encodeBytes :: Putter LB.ByteString
encodeBytes bs = do
put (fromIntegral (LB.length bs) :: Int32)
putLazyByteString bs
decodeBytes :: Get (Maybe LB.ByteString)
decodeBytes = do
n <- get :: Get Int32
if n < 0
then return Nothing
else Just <$> getLazyByteString (fromIntegral n)
------------------------------------------------------------------------------
-- Short Bytes
encodeShortBytes :: Putter ByteString
encodeShortBytes bs = do
put (fromIntegral (B.length bs) :: Word16)
putByteString bs
decodeShortBytes :: Get ByteString
decodeShortBytes = do
n <- get :: Get Word16
getByteString (fromIntegral n)
------------------------------------------------------------------------------
-- UUID
encodeUUID :: Putter UUID
encodeUUID = putLazyByteString . UUID.toByteString
decodeUUID :: Get UUID
decodeUUID = do
uuid <- UUID.fromByteString <$> getLazyByteString 16
maybe (fail "decode-uuid: invalid") return uuid
------------------------------------------------------------------------------
-- String List
encodeList :: Putter [Text]
encodeList sl = do
put (fromIntegral (length sl) :: Word16)
mapM_ encodeString sl
decodeList :: Get [Text]
decodeList = do
n <- get :: Get Word16
replicateM (fromIntegral n) decodeString
------------------------------------------------------------------------------
-- String Map
encodeMap :: Putter [(Text, Text)]
encodeMap m = do
put (fromIntegral (length m) :: Word16)
forM_ m $ \(k, v) -> encodeString k >> encodeString v
decodeMap :: Get [(Text, Text)]
decodeMap = do
n <- get :: Get Word16
replicateM (fromIntegral n) ((,) <$> decodeString <*> decodeString)
------------------------------------------------------------------------------
-- String Multi-Map
encodeMultiMap :: Putter [(Text, [Text])]
encodeMultiMap mm = do
put (fromIntegral (length mm) :: Word16)
forM_ mm $ \(k, v) -> encodeString k >> encodeList v
decodeMultiMap :: Get [(Text, [Text])]
decodeMultiMap = do
n <- get :: Get Word16
replicateM (fromIntegral n) ((,) <$> decodeString <*> decodeList)
------------------------------------------------------------------------------
-- Inet Address
encodeSockAddr :: Putter SockAddr
encodeSockAddr (SockAddrInet p a) = do
putWord8 4
putWord32le a
putWord32be (fromIntegral p)
encodeSockAddr (SockAddrInet6 p _ (a, b, c, d) _) = do
putWord8 16
putWord32host a
putWord32host b
putWord32host c
putWord32host d
putWord32be (fromIntegral p)
encodeSockAddr (SockAddrUnix _) = fail "encode-socket: unix address not allowed"
#if MIN_VERSION_network(2,6,1)
encodeSockAddr (SockAddrCan _) = fail "encode-socket: can address not allowed"
#endif
decodeSockAddr :: Get SockAddr
decodeSockAddr = do
n <- getWord8
case n of
4 -> do
i <- getIPv4
p <- getPort
return $ SockAddrInet p i
16 -> do
i <- getIPv6
p <- getPort
return $ SockAddrInet6 p 0 i 0
_ -> fail $ "decode-socket: unknown: " ++ show n
where
getPort :: Get PortNumber
getPort = fromIntegral <$> getWord32be
getIPv4 :: Get Word32
getIPv4 = getWord32le
getIPv6 :: Get (Word32, Word32, Word32, Word32)
getIPv6 = (,,,) <$> getWord32host <*> getWord32host <*> getWord32host <*> getWord32host
------------------------------------------------------------------------------
-- Consistency
encodeConsistency :: Putter Consistency
encodeConsistency Any = encodeShort 0x00
encodeConsistency One = encodeShort 0x01
encodeConsistency Two = encodeShort 0x02
encodeConsistency Three = encodeShort 0x03
encodeConsistency Quorum = encodeShort 0x04
encodeConsistency All = encodeShort 0x05
encodeConsistency LocalQuorum = encodeShort 0x06
encodeConsistency EachQuorum = encodeShort 0x07
encodeConsistency Serial = encodeShort 0x08
encodeConsistency LocalSerial = encodeShort 0x09
encodeConsistency LocalOne = encodeShort 0x0A
decodeConsistency :: Get Consistency
decodeConsistency = decodeShort >>= mapCode
where
mapCode 0x00 = return Any
mapCode 0x01 = return One
mapCode 0x02 = return Two
mapCode 0x03 = return Three
mapCode 0x04 = return Quorum
mapCode 0x05 = return All
mapCode 0x06 = return LocalQuorum
mapCode 0x07 = return EachQuorum
mapCode 0x08 = return Serial
mapCode 0x09 = return LocalSerial
mapCode 0x10 = return LocalOne
mapCode code = fail $ "decode-consistency: unknown: " ++ show code
------------------------------------------------------------------------------
-- OpCode
encodeOpCode :: Putter OpCode
encodeOpCode OcError = encodeByte 0x00
encodeOpCode OcStartup = encodeByte 0x01
encodeOpCode OcReady = encodeByte 0x02
encodeOpCode OcAuthenticate = encodeByte 0x03
encodeOpCode OcOptions = encodeByte 0x05
encodeOpCode OcSupported = encodeByte 0x06
encodeOpCode OcQuery = encodeByte 0x07
encodeOpCode OcResult = encodeByte 0x08
encodeOpCode OcPrepare = encodeByte 0x09
encodeOpCode OcExecute = encodeByte 0x0A
encodeOpCode OcRegister = encodeByte 0x0B
encodeOpCode OcEvent = encodeByte 0x0C
encodeOpCode OcBatch = encodeByte 0x0D
encodeOpCode OcAuthChallenge = encodeByte 0x0E
encodeOpCode OcAuthResponse = encodeByte 0x0F
encodeOpCode OcAuthSuccess = encodeByte 0x10
decodeOpCode :: Get OpCode
decodeOpCode = decodeByte >>= mapCode
where
mapCode 0x00 = return OcError
mapCode 0x01 = return OcStartup
mapCode 0x02 = return OcReady
mapCode 0x03 = return OcAuthenticate
mapCode 0x05 = return OcOptions
mapCode 0x06 = return OcSupported
mapCode 0x07 = return OcQuery
mapCode 0x08 = return OcResult
mapCode 0x09 = return OcPrepare
mapCode 0x0A = return OcExecute
mapCode 0x0B = return OcRegister
mapCode 0x0C = return OcEvent
mapCode 0x0D = return OcBatch
mapCode 0x0E = return OcAuthChallenge
mapCode 0x0F = return OcAuthResponse
mapCode 0x10 = return OcAuthSuccess
mapCode word = fail $ "decode-opcode: unknown: " ++ show word
------------------------------------------------------------------------------
-- ColumnType
encodeColumnType :: Putter ColumnType
encodeColumnType (CustomColumn x) = encodeShort 0x0000 >> encodeString x
encodeColumnType AsciiColumn = encodeShort 0x0001
encodeColumnType BigIntColumn = encodeShort 0x0002
encodeColumnType BlobColumn = encodeShort 0x0003
encodeColumnType BooleanColumn = encodeShort 0x0004
encodeColumnType CounterColumn = encodeShort 0x0005
encodeColumnType DecimalColumn = encodeShort 0x0006
encodeColumnType DoubleColumn = encodeShort 0x0007
encodeColumnType FloatColumn = encodeShort 0x0008
encodeColumnType IntColumn = encodeShort 0x0009
encodeColumnType TextColumn = encodeShort 0x000A
encodeColumnType TimestampColumn = encodeShort 0x000B
encodeColumnType UuidColumn = encodeShort 0x000C
encodeColumnType VarCharColumn = encodeShort 0x000D
encodeColumnType VarIntColumn = encodeShort 0x000E
encodeColumnType TimeUuidColumn = encodeShort 0x000F
encodeColumnType InetColumn = encodeShort 0x0010
encodeColumnType (MaybeColumn x) = encodeColumnType x
encodeColumnType (ListColumn x) = encodeShort 0x0020 >> encodeColumnType x
encodeColumnType (MapColumn x y) = encodeShort 0x0021 >> encodeColumnType x >> encodeColumnType y
encodeColumnType (SetColumn x) = encodeShort 0x0022 >> encodeColumnType x
encodeColumnType (TupleColumn xs) = encodeShort 0x0031 >> mapM_ encodeColumnType xs
encodeColumnType (UdtColumn k n xs) = do
encodeShort 0x0030
encodeString (unKeyspace k)
encodeString n
encodeShort (fromIntegral (length xs))
forM_ xs $ \(x, t) -> encodeString x >> encodeColumnType t
decodeColumnType :: Get ColumnType
decodeColumnType = decodeShort >>= toType
where
toType 0x0000 = CustomColumn <$> decodeString
toType 0x0001 = return AsciiColumn
toType 0x0002 = return BigIntColumn
toType 0x0003 = return BlobColumn
toType 0x0004 = return BooleanColumn
toType 0x0005 = return CounterColumn
toType 0x0006 = return DecimalColumn
toType 0x0007 = return DoubleColumn
toType 0x0008 = return FloatColumn
toType 0x0009 = return IntColumn
toType 0x000A = return TextColumn
toType 0x000B = return TimestampColumn
toType 0x000C = return UuidColumn
toType 0x000D = return VarCharColumn
toType 0x000E = return VarIntColumn
toType 0x000F = return TimeUuidColumn
toType 0x0010 = return InetColumn
toType 0x0020 = ListColumn <$> (decodeShort >>= toType)
toType 0x0021 = MapColumn <$> (decodeShort >>= toType) <*> (decodeShort >>= toType)
toType 0x0022 = SetColumn <$> (decodeShort >>= toType)
toType 0x0030 = UdtColumn <$> (Keyspace <$> decodeString) <*> decodeString <*> do
n <- fromIntegral <$> decodeShort
replicateM n ((,) <$> decodeString <*> (decodeShort >>= toType))
toType 0x0031 = TupleColumn <$> do
n <- fromIntegral <$> decodeShort
replicateM n (decodeShort >>= toType)
toType other = fail $ "decode-type: unknown: " ++ show other
------------------------------------------------------------------------------
-- Paging State
encodePagingState :: Putter PagingState
encodePagingState (PagingState s) = encodeBytes s
decodePagingState :: Get (Maybe PagingState)
decodePagingState = liftM PagingState <$> decodeBytes
------------------------------------------------------------------------------
-- Value
putValue :: Version -> Putter Value
putValue V3 (CqlList x) = toBytes 4 $ do
encodeInt (fromIntegral (length x))
mapM_ (toBytes 4 . putNative) x
putValue V2 (CqlList x) = toBytes 4 $ do
encodeShort (fromIntegral (length x))
mapM_ (toBytes 2 . putNative) x
putValue V3 (CqlSet x) = toBytes 4 $ do
encodeInt (fromIntegral (length x))
mapM_ (toBytes 4 . putNative) x
putValue V2 (CqlSet x) = toBytes 4 $ do
encodeShort (fromIntegral (length x))
mapM_ (toBytes 2 . putNative) x
putValue V3 (CqlMap x) = toBytes 4 $ do
encodeInt (fromIntegral (length x))
forM_ x $ \(k, v) -> toBytes 4 (putNative k) >> toBytes 4 (putNative v)
putValue V2 (CqlMap x) = toBytes 4 $ do
encodeShort (fromIntegral (length x))
forM_ x $ \(k, v) -> toBytes 2 (putNative k) >> toBytes 2 (putNative v)
putValue V3 (CqlTuple x) = mapM_ (toBytes 4 . putValue V3) x
putValue V3 (CqlUdt x) = mapM_ (toBytes 4 . putValue V3 . snd) x
putValue _ (CqlMaybe Nothing) = put (-1 :: Int32)
putValue v (CqlMaybe (Just x)) = putValue v x
putValue _ value = toBytes 4 $ putNative value
putNative :: Putter Value
putNative (CqlCustom x) = putLazyByteString x
putNative (CqlBoolean x) = putWord8 $ if x then 1 else 0
putNative (CqlInt x) = put x
putNative (CqlBigInt x) = put x
putNative (CqlFloat x) = putFloat32be x
putNative (CqlDouble x) = putFloat64be x
putNative (CqlText x) = putByteString (T.encodeUtf8 x)
putNative (CqlUuid x) = encodeUUID x
putNative (CqlTimeUuid x) = encodeUUID x
putNative (CqlTimestamp x) = put x
putNative (CqlAscii x) = putByteString (T.encodeUtf8 x)
putNative (CqlBlob x) = putLazyByteString x
putNative (CqlCounter x) = put x
putNative (CqlInet x) = case x of
IPv4 i -> putWord32le (toHostAddress i)
IPv6 i -> do
let (a, b, c, d) = toHostAddress6 i
putWord32host a
putWord32host b
putWord32host c
putWord32host d
putNative (CqlVarInt x) = integer2bytes x
putNative (CqlDecimal x) = do
put (fromIntegral (decimalPlaces x) :: Int32)
integer2bytes (decimalMantissa x)
putNative v@(CqlList _) = fail $ "putNative: collection type: " ++ show v
putNative v@(CqlSet _) = fail $ "putNative: collection type: " ++ show v
putNative v@(CqlMap _) = fail $ "putNative: collection type: " ++ show v
putNative v@(CqlMaybe _) = fail $ "putNative: collection type: " ++ show v
putNative v@(CqlTuple _) = fail $ "putNative: tuple type: " ++ show v
putNative v@(CqlUdt _) = fail $ "putNative: UDT: " ++ show v
-- Note: Empty lists, maps and sets are represented as null in cassandra.
getValue :: Version -> ColumnType -> Get Value
getValue V3 (ListColumn t) = CqlList <$> (getList $ do
len <- decodeInt
replicateM (fromIntegral len) (withBytes 4 (getNative t)))
getValue V2 (ListColumn t) = CqlList <$> (getList $ do
len <- decodeShort
replicateM (fromIntegral len) (withBytes 2 (getNative t)))
getValue V3 (SetColumn t) = CqlSet <$> (getList $ do
len <- decodeInt
replicateM (fromIntegral len) (withBytes 4 (getNative t)))
getValue V2 (SetColumn t) = CqlSet <$> (getList $ do
len <- decodeShort
replicateM (fromIntegral len) (withBytes 2 (getNative t)))
getValue V3 (MapColumn t u) = CqlMap <$> (getList $ do
len <- decodeInt
replicateM (fromIntegral len)
((,) <$> withBytes 4 (getNative t) <*> withBytes 4 (getNative u)))
getValue V2 (MapColumn t u) = CqlMap <$> (getList $ do
len <- decodeShort
replicateM (fromIntegral len)
((,) <$> withBytes 2 (getNative t) <*> withBytes 2 (getNative u)))
getValue V3 (TupleColumn t) = CqlTuple <$> mapM (getValue V3) t
getValue V3 (UdtColumn _ _ x) = CqlUdt <$> do
let (n, t) = unzip x
zip n <$> mapM (getValue V3) t
getValue v (MaybeColumn t) = do
n <- lookAhead (get :: Get Int32)
if n < 0
then uncheckedSkip 4 >> return (CqlMaybe Nothing)
else CqlMaybe . Just <$> getValue v t
getValue _ colType = withBytes 4 $ getNative colType
getNative :: ColumnType -> Get Value
getNative (CustomColumn _) = CqlCustom <$> remainingBytesLazy
getNative BooleanColumn = CqlBoolean . (/= 0) <$> getWord8
getNative IntColumn = CqlInt <$> get
getNative BigIntColumn = CqlBigInt <$> get
getNative FloatColumn = CqlFloat <$> getFloat32be
getNative DoubleColumn = CqlDouble <$> getFloat64be
getNative TextColumn = CqlText . T.decodeUtf8 <$> remainingBytes
getNative VarCharColumn = CqlText . T.decodeUtf8 <$> remainingBytes
getNative AsciiColumn = CqlAscii . T.decodeUtf8 <$> remainingBytes
getNative BlobColumn = CqlBlob <$> remainingBytesLazy
getNative UuidColumn = CqlUuid <$> decodeUUID
getNative TimeUuidColumn = CqlTimeUuid <$> decodeUUID
getNative TimestampColumn = CqlTimestamp <$> get
getNative CounterColumn = CqlCounter <$> get
getNative InetColumn = CqlInet <$> do
len <- remaining
case len of
4 -> IPv4 . fromHostAddress <$> getWord32le
16 -> do
a <- (,,,) <$> getWord32host <*> getWord32host <*> getWord32host <*> getWord32host
return $ IPv6 (fromHostAddress6 a)
n -> fail $ "getNative: invalid Inet length: " ++ show n
getNative VarIntColumn = CqlVarInt <$> bytes2integer
getNative DecimalColumn = do
x <- get :: Get Int32
y <- bytes2integer
return (CqlDecimal (Decimal (fromIntegral x) y))
getNative c@(ListColumn _) = fail $ "getNative: collection type: " ++ show c
getNative c@(SetColumn _) = fail $ "getNative: collection type: " ++ show c
getNative c@(MapColumn _ _) = fail $ "getNative: collection type: " ++ show c
getNative c@(MaybeColumn _) = fail $ "getNative: collection type: " ++ show c
getNative c@(TupleColumn _) = fail $ "getNative: tuple type: " ++ show c
getNative c@(UdtColumn _ _ _) = fail $ "getNative: udt: " ++ show c
getList :: Get [a] -> Get [a]
getList m = do
n <- lookAhead (get :: Get Int32)
if n < 0 then uncheckedSkip 4 >> return []
else withBytes 4 m
withBytes :: Int -> Get a -> Get a
withBytes s p = do
n <- case s of
2 -> fromIntegral <$> (get :: Get Word16)
4 -> fromIntegral <$> (get :: Get Int32)
_ -> fail $ "withBytes: invalid size: " ++ show s
when (n < 0) $
fail "withBytes: null"
b <- getBytes n
case runGet p b of
Left e -> fail $ "withBytes: " ++ e
Right x -> return x
remainingBytes :: Get ByteString
remainingBytes = remaining >>= getByteString . fromIntegral
remainingBytesLazy :: Get LB.ByteString
remainingBytesLazy = remaining >>= getLazyByteString . fromIntegral
toBytes :: Int -> Put -> Put
toBytes s p = do
let bytes = runPut p
case s of
2 -> put (fromIntegral (B.length bytes) :: Word16)
_ -> put (fromIntegral (B.length bytes) :: Int32)
putByteString bytes
-- 'integer2bytes' and 'bytes2integer' implementations are taken
-- from cereal's instance declaration of 'Serialize' for 'Integer'
-- except that no distinction between small and large integers is made.
-- Cf. to LICENSE for copyright details.
integer2bytes :: Putter Integer
integer2bytes n = do
put sign
put (unroll (abs n))
where
sign = fromIntegral (signum n) :: Word8
unroll :: Integer -> [Word8]
unroll = unfoldr step
where
step 0 = Nothing
step i = Just (fromIntegral i, i `shiftR` 8)
bytes2integer :: Get Integer
bytes2integer = do
sign <- get
bytes <- get
let v = roll bytes
return $! if sign == (1 :: Word8) then v else - v
where
roll :: [Word8] -> Integer
roll = foldr unstep 0
where
unstep b a = a `shiftL` 8 .|. fromIntegral b
------------------------------------------------------------------------------
-- Various
decodeKeyspace :: Get Keyspace
decodeKeyspace = Keyspace <$> decodeString
decodeTable :: Get Table
decodeTable = Table <$> decodeString
decodeQueryId :: Get (QueryId k a b)
decodeQueryId = QueryId <$> decodeShortBytes
|
whitehead1415/cql
|
src/Database/CQL/Protocol/Codec.hs
|
mpl-2.0
| 21,351
| 0
| 18
| 4,629
| 6,156
| 3,054
| 3,102
| 474
| 23
|
{-# LANGUAGE OverloadedStrings #-}
module Store.AVTest where
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.ByteString.Char8 as Char8
import Paths_databrary (getDataFileName)
import Store.AV
test_all :: [TestTree]
test_all =
[ testCase "sanity" $ do
filename <- Char8.pack <$> getDataFileName "test/data/small.webm"
prb <- avProbe filename =<< initAV
avProbeFormat prb @?= "matroska,webm"
avProbeStreams prb
@?= [(AVMediaTypeVideo, "vp8"), (AVMediaTypeAudio, "vorbis")]
]
|
databrary/databrary
|
test/Store/AVTest.hs
|
agpl-3.0
| 550
| 0
| 11
| 116
| 131
| 73
| 58
| 15
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module CountdownGame.State.Definitions
( SpielParameter (..)
, Phasen (..)
, istWartend
, istInRunde
, nochZuWartendeSekunden
, ergebnisListe
, zielZahl
, verfuegbareZahlen
, Versuche
, Ergebnis (..), Ergebnisse, berechneErgebnisse
, State (..)
)where
import GHC.Generics (Generic)
import Control.Concurrent.Async (Async)
import Data.Aeson (ToJSON)
import Data.Function (on)
import Data.Int (Int64)
import Data.List (sortBy)
import qualified Data.Map.Strict as M
import Data.Text (Text)
import Data.Time.Clock (UTCTime, NominalDiffTime, getCurrentTime, diffUTCTime)
import Database.Persist.Sql (ConnectionPool)
import Countdown.Game (Attempt, AttemptsMap, Challange, Player, PlayersMap)
import qualified Countdown.Game as G
import CountdownGame.References
data State =
State
{ aktuellePhase :: Reference Phasen
, connectionPool :: ConnectionPool
}
istWartend :: State -> IO Bool
istWartend = readRef warted . aktuellePhase
where warted (WartePhase _ _ _) = True
warted _ = False
istInRunde :: State -> IO Bool
istInRunde = readRef inRunde . aktuellePhase
where inRunde (RundePhase _ _ _ _ _) = True
inRunde _ = False
nochZuWartendeSekunden :: State -> IO Int
nochZuWartendeSekunden state = do
now <- getCurrentTime
readRef (seks now) $ aktuellePhase state
where seks n (WartePhase t _ _) = bisT n t
seks n (RundePhase t _ _ _ _) = bisT n t
seks _ _ = 0
bisT n t = truncate $ min 0 $ t `diffUTCTime` n
ergebnisListe :: State -> IO Ergebnisse
ergebnisListe = readRef ergs . aktuellePhase
where ergs (WartePhase _ e _) = e
ergs _ = []
zielZahl :: State -> IO (Maybe Int)
zielZahl = readRef zz . aktuellePhase
where zz (RundePhase _ c _ _ _) = Just $ G.targetNumber c
zz _ = Nothing
verfuegbareZahlen :: State -> IO [Int]
verfuegbareZahlen = readRef aNrs . aktuellePhase
where aNrs (RundePhase _ c _ _ _) = G.availableNumbers c
aNrs _ = []
data SpielParameter =
SpielParameter
{ warteZeit :: NominalDiffTime
, rundenZeit :: NominalDiffTime
}
data Phasen
= Start
| WartePhase
{ startNaechsteRunde :: UTCTime
, letzteErgebnisse :: Ergebnisse
, naechsteChallange :: Async Challange }
| RundePhase
{ endeRunde :: UTCTime
, aufgabe :: Challange
, spielerVersuche :: Versuche
, databaseKey :: Int64
, ergebnisse :: Async Ergebnisse }
type Versuche = Reference AttemptsMap
type Ergebnisse = [Ergebnis]
data Ergebnis =
Ergebnis
{ name :: Text
, score :: Int
, value :: Maybe Int
, difference :: Maybe Int
, formula :: Text
} deriving (Generic, Show)
instance ToJSON Ergebnis
berechneErgebnisse :: AttemptsMap -> Ergebnisse
berechneErgebnisse attMap =
sortBy (compare `on` (negate . score)) scores
where
scores = map calcScore . M.toList $ attMap
calcScore (_, att) =
Ergebnis (G.nickName $ G.fromPlayer att) (G.score att) (G.value att) (G.difference att) (G.formula att)
|
CarstenKoenig/DOS2015
|
CountdownGame/src/web/CountdownGame/State/Definitions.hs
|
unlicense
| 3,266
| 0
| 11
| 873
| 976
| 540
| 436
| 92
| 3
|
{- |
If we list all the natural numbers below 10 that are multiples
of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
-}
ans = sum [x | x <- [0..999], (mod x 3) == 0 || (mod x 5) == 0]
main = putStrLn (show ans)
|
PiJoules/Project-Euler-Haskell
|
prob1.hs
|
unlicense
| 289
| 0
| 12
| 74
| 72
| 37
| 35
| 2
| 1
|
{-# START_FILE foo.hs #-}
module Foo (foo) where
foo x = 1
|
egaburov/funstuff
|
Haskell/BartoszBofH/3_PureFunctions/foo.hs
|
apache-2.0
| 60
| 0
| 5
| 13
| 18
| 11
| 7
| 2
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Codec.Packstream.CodingTest (
unitTests
) where
import Control.Monad
import Data.Binary (get, put)
import qualified Data.Binary.Get as G
import qualified Data.Binary.Put as P
import qualified Data.Vector as V
import Test.SmallCheck.Series.Instances ()
import Test.Tasty
import qualified Test.Tasty.SmallCheck as SC
import qualified Codec.Packstream.Coding as PSC
import Test.SCInstances
unitTests :: TestTree
unitTests =
testGroup "Codec.Packstream.Coding" [
SC.testProperty "marker coding" $ SC.changeDepth (const 255) $ propVerifyCoding putMarker getMarker,
SC.testProperty "signature coding" $ SC.changeDepth (const 255) $ propVerifyCoding putSignature getSignature,
SC.testProperty "null coding" $ propVerifyCoding (const PSC.putNull) PSC.getNull,
SC.testProperty "bool coding" $ propVerifyCoding PSC.putBool PSC.getBool,
SC.testProperty "float64 coding" $ propVerifyCoding PSC.putFloat64 PSC.getFloat64,
SC.testProperty "text coding" $ propVerifyCoding PSC.putText PSC.getText,
testGroup "int coding" [
SC.testProperty "tinyInt coding" $ propVerifyMaybeCoding PSC.putTinyInt PSC.getTinyInt,
SC.testProperty "int8 coding" $ SC.changeDepth (const 255) $ propVerifyCoding PSC.putInt8 PSC.getInt8,
SC.testProperty "int16 coding" $ propVerifyCoding PSC.putInt16 PSC.getInt16,
SC.testProperty "int32 coding" $ propVerifyCoding PSC.putInt32 PSC.getInt32,
SC.testProperty "int64 coding" $ propVerifyCoding PSC.putInt64 PSC.getInt64
],
testGroup "container coding" [
SC.testProperty "bool vector coding" $ propVerifyCoding (putVec PSC.putBool) (getVec PSC.getBool),
SC.testProperty "int8 list coding" $ propVerifyCoding (putList PSC.putInt8) (getList PSC.getInt8),
SC.testProperty "(text, int8) map coding" $ SC.changeDepth (const 4) $ propVerifyCoding (putMap PSC.putInt8) (getMap PSC.getInt8)
]
]
where
putMap putElt vec = PSC.putMap $ V.map (putEntry putElt) $ V.map unEntry $ vector vec
putEntry putElt (k, v) = PSC.putEntry (PSC.putText k) (putElt v)
getMap getElt = fmap MkVec $ liftM (V.map MkEntry) $ PSC.getMap $ PSC.getEntry PSC.getText getElt
putList putElt lst = PSC.streamList $ map putElt lst
getList = PSC.unStreamList
putVec putElt vec = PSC.putVector $ V.map putElt $ vector vec
getVec getElt = liftM MkVec $ PSC.getVector getElt
putMarker = put . marker
getMarker = liftM MkMarker get
putSignature = put . signature
getSignature = liftM MkSignature get
propVerifyMaybeCoding maybePut getter inputValue =
case maybePut inputValue of
Just putter -> inputValue == G.runGet getter (P.runPut putter)
Nothing -> True
propVerifyCoding putter getter inputValue = inputValue == decodedValue
where
encodedBytes = P.runPut (putter inputValue)
decodedValue = G.runGet getter encodedBytes
|
boggle/neo4j-haskell-driver
|
test/Codec/Packstream/CodingTest.hs
|
apache-2.0
| 3,200
| 0
| 14
| 693
| 857
| 436
| 421
| 53
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.DeploymentConfigRollback where
import GHC.Generics
import Data.Text
import Openshift.V1.DeploymentConfigRollbackSpec
import qualified Data.Aeson
-- | TypeMeta describes an individual object in an API response or request with strings representing the type of the object and its API schema version. Structures that are versioned or persisted should inline TypeMeta.
data DeploymentConfigRollback = DeploymentConfigRollback
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, spec :: DeploymentConfigRollbackSpec -- ^ options for rollback generation
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON DeploymentConfigRollback
instance Data.Aeson.ToJSON DeploymentConfigRollback
|
minhdoboi/deprecated-openshift-haskell-api
|
openshift/lib/Openshift/V1/DeploymentConfigRollback.hs
|
apache-2.0
| 1,432
| 0
| 9
| 190
| 106
| 65
| 41
| 17
| 0
|
-- inspired by http://dlaing.org/cofun/posts/free_and_cofree.html
-- see credits in that post/ Ed Kmett and Dan Piponi
module PolyGraph.Common.DslSupport.Pairing (Pairing(..)) where
import Data.Functor.Identity
import qualified Control.Monad.Free as Free
import qualified Control.Comonad.Cofree as Cofree
import PolyGraph.Common.DslSupport.Coproduct
import PolyGraph.Common.DslSupport.Product
--import qualified Control.Monad.Trans.Free as TFree
--import qualified Control.Comonad.Trans.Cofree as TCofree
class Pairing f g | f -> g, g -> f where
pair :: (a -> b -> r) -> f a -> g b -> r
--class (Functor f, Functor g) => Pairing f g where
-- pair :: (a -> b -> r) -> f a -> g b -> r
instance Pairing Identity Identity where
pair f (Identity a) (Identity b) = f a b
instance Pairing ((->) a) ((,) a) where
pair p f = uncurry (p . f)
instance Pairing ((,) a) ((->) a) where
pair p f g = p (snd f) (g (fst f))
instance Pairing f g => Pairing (Cofree.Cofree f) (Free.Free g) where
pair p (a Cofree.:< _ ) (Free.Pure x) = p a x
pair p (_ Cofree.:< fs) (Free.Free gs) = pair (pair p) fs gs
instance (Pairing f f', Pairing g g') => Pairing (f :+: g) (f' :*: g') where
pair p (InL x) (Pair a _) = pair p x a
pair p (InR x) (Pair _ b) = pair p x b
instance (Pairing f f', Pairing g g') => Pairing (f :*: g) (f' :+: g') where
pair p (Pair a _) (InL x) = pair p a x
pair p (Pair _ b) (InR x) = pair p b x
|
rpeszek/GraphPlay
|
src/PolyGraph/Common/DslSupport/Pairing.hs
|
bsd-3-clause
| 1,428
| 0
| 10
| 296
| 596
| 317
| 279
| -1
| -1
|
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Data.Bugs where
import Conduit
import Data.Map
import Data.Text
import Data.Time
import Text.XML.Cursor.Generic
import Control.Comonad
instance Functor Cursor where
fmap f c = Cursor
{ parent' = fmap f (parent' c)
, precedingSibling' = fmap f (precedingSibling' c)
, followingSibling' = fmap f (followingSibling' c)
, child = fmap f child
, node = f (node c)
}
instance Comonad Cursor where
extract cursor = node cursor
type family Product a :: *
type family Component a :: *
type family Version a :: *
type family Milestone a :: *
type family Platform a :: *
type family System a :: *
type family Status a :: *
type family Resolution a :: *
type family Severity a :: *
type family User a :: *
type family AttachType a :: *
data Bug a = Bug
{ bugNo :: Int -- Value within a range
, bugCreated :: UTCTime
, bugModified :: UTCTime
, bugTitle :: Text
, bugSummary :: Text
, bugProduct :: Product a
, bugComponent :: Component a
, bugVersion :: Version a
, bugMilestone :: Milestone a
, bugPlatform :: Platform a
, bugOS :: System a
, bugStatus :: Status a
, bugResolution :: Resolution a
, bugKeywords :: [Text]
, bugPriority :: Int
, bugSeverity :: Severity a
, bugReporter :: User a
, bugAssignee :: User a
, bugCc :: [User a]
, bugComments :: [BugComment a]
, bugMeta :: Map Text Text
}
data BugComment a = BugComment
{ commentId :: Int
, commentCreated :: UTCTime
, commentAuthor :: User a
, commentText :: Text
, commentAttachments :: [BugAttachment a]
}
data BugAttachment a = BugAttachment
{ attachId :: Int
, attachCreated :: UTCTime
, attachModified :: UTCTime
, attachTitle :: Text
, attachFilename :: FilePath
, attachType :: AttachType a
, attachSize :: AttachType a
, attachSubmitter :: User a
}
-- sourceBugs :: Connection a -> Producer m (Bug a)
-- sourceBugs conn = do
|
jwiegley/bugs
|
Data/Bugs.hs
|
bsd-3-clause
| 2,287
| 0
| 10
| 740
| 562
| 337
| 225
| 67
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- really, GHC, really?
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module Math.Category.Sum
( (+)(..)
, sumOb
) where
import Prelude (Either(..))
import Data.Constraint
import Data.Proxy
import Math.Category
import Math.Groupoid
class SumOb (p :: i -> i -> *) (q :: j -> j -> *) (o :: Either i j) where
sumOb :: proxy1 p -> proxy2 q -> proxy3 o ->
(forall a. Ob p a => (o ~ Left a) => r) -> (forall b. Ob q b => (o ~ Right b) => r) -> r
instance Ob p a => SumOb p q (Left a) where
sumOb _ _ _ l _ = l
instance Ob q b => SumOb p q (Right b) where
sumOb _ _ _ _ r = r
data (+) :: (i -> i -> *) -> (j -> j -> *) -> Either i j -> Either i j -> * where
L :: p a b -> (p + q) (Left a) (Left b)
R :: q a b -> (p + q) (Right a) (Right b)
instance (Category p, Category q) => Category (p + q) where
type Ob (p + q) = SumOb p q
id = it where
it :: forall o. SumOb p q o => (p + q) o o
it = sumOb (Proxy :: Proxy p) (Proxy :: Proxy q) (Proxy :: Proxy o) (L id) (R id)
L f . L g = L (f . g)
R f . R g = R (f . g)
source (L p) = case source p of Dict -> Dict
source (R q) = case source q of Dict -> Dict
target (L p) = case target p of Dict -> Dict
target (R q) = case target q of Dict -> Dict
instance (Groupoid p, Groupoid q) => Groupoid (p + q) where
inv (L f) = L (inv f)
inv (R g) = R (inv g)
|
ekmett/categories
|
src/Math/Category/Sum.hs
|
bsd-3-clause
| 1,767
| 0
| 18
| 434
| 819
| 428
| 391
| -1
| -1
|
module Sylvan where
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Marshal.Array
import Data.Word
import Control.Monad
import Data.Bits
import Control.Monad.Primitive
--Lace
foreign import ccall safe "lace_init"
c_laceInit :: CInt -> CInt -> IO ()
laceInit :: PrimMonad m => Int -> Int -> m ()
laceInit workers deque = unsafePrimToPrim $ c_laceInit (fromIntegral workers) (fromIntegral deque)
foreign import ccall safe "lace_startup"
c_laceStartup :: CInt -> Ptr () -> Ptr () -> IO ()
laceStartup :: PrimMonad m => m ()
laceStartup = unsafePrimToPrim $ c_laceStartup 0 nullPtr nullPtr
--Sylvan
type CBDD = CLLong
newtype BDD = BDD CBDD deriving (Eq, Show)
type CBDDVar = CUInt
type BDDVar = Word32
type CBDDMap = CLLong
newtype BDDMap = BDDMap CBDDMap deriving (Show)
c_sylvanComplement :: CLLong
c_sylvanComplement = 0x8000000000000000
c_sylvanFalse :: CLLong
c_sylvanFalse = 0x0000000000000000
c_sylvanTrue :: CLLong
c_sylvanTrue = c_sylvanFalse .|. c_sylvanComplement
sylvanFalse = BDD c_sylvanFalse
sylvanTrue = BDD c_sylvanTrue
foreign import ccall safe "sylvan_init_mtbdd"
c_sylvanInit :: IO ()
sylvanInit :: PrimMonad m => m ()
sylvanInit = unsafePrimToPrim c_sylvanInit
foreign import ccall safe "sylvan_init_package"
c_sylvanInitPackage :: CInt -> CInt -> CInt -> CInt -> IO ()
sylvanInitPackage :: PrimMonad m => Int -> Int -> Int -> Int -> m ()
sylvanInitPackage tableSize maxSize cacheSize maxCacheSize = unsafePrimToPrim $ c_sylvanInitPackage (fromIntegral tableSize) (fromIntegral maxSize) (fromIntegral cacheSize) (fromIntegral maxCacheSize)
foreign import ccall safe "sylvan_quit"
c_sylvanQuit :: IO ()
sylvanQuit :: PrimMonad m => m ()
sylvanQuit = unsafePrimToPrim c_sylvanQuit
foreign import ccall safe "sylvan_ithvar"
c_ithVar :: CBDDVar -> IO CBDD
ithVar :: PrimMonad m => BDDVar -> m BDD
ithVar var = liftM BDD $ unsafePrimToPrim $ c_ithVar (fromIntegral var)
nithVar :: PrimMonad m => BDDVar -> m BDD
nithVar var = liftM (BDD . xor c_sylvanComplement) $ unsafePrimToPrim $ c_ithVar (fromIntegral var)
foreign import ccall safe "mtbdd_ref"
c_ref :: CBDD -> IO (CBDD)
ref :: PrimMonad m => BDD -> m BDD
ref (BDD bdd) = liftM BDD $ unsafePrimToPrim $ c_ref bdd
refMap :: PrimMonad m => BDDMap -> m BDDMap
refMap (BDDMap bdd) = liftM BDDMap $ unsafePrimToPrim $ c_ref bdd
foreign import ccall safe "mtbdd_deref"
c_deref :: CBDD -> IO ()
deref :: PrimMonad m => BDD -> m ()
deref (BDD bdd) = unsafePrimToPrim $ c_deref bdd
derefMap :: PrimMonad m => BDDMap -> m ()
derefMap (BDDMap bdd) = unsafePrimToPrim $ c_deref bdd
foreign import ccall safe "sylvan_gc_stub"
c_gc :: IO ()
gc :: PrimMonad m => m ()
gc = unsafePrimToPrim $ c_gc
foreign import ccall safe "sylvan_gc_enable"
c_gcEnable :: IO ()
gcEnable :: PrimMonad m => m ()
gcEnable = unsafePrimToPrim c_gcEnable
foreign import ccall safe "sylvan_gc_disable"
c_gcDisable :: IO ()
gcDisable :: PrimMonad m => m ()
gcDisable = unsafePrimToPrim c_gcDisable
neg :: BDD -> BDD
neg (BDD x) = BDD $ xor c_sylvanComplement x
foreign import ccall safe "sylvan_ite_stub"
c_ite :: CBDD -> CBDD -> CBDD -> IO CBDD
ite :: PrimMonad m => BDD -> BDD -> BDD -> m BDD
ite (BDD a) (BDD b) (BDD c) = liftM BDD $ unsafePrimToPrim $ c_ite a b c
foreign import ccall safe "sylvan_xor_stub"
c_xor :: CBDD -> CBDD -> IO CBDD
bxor :: PrimMonad m => BDD -> BDD -> m BDD
bxor (BDD a) (BDD b) = liftM BDD $ unsafePrimToPrim $ c_xor a b
bequiv :: PrimMonad m => BDD -> BDD -> m BDD
bequiv a b = liftM neg $ bxor a b
foreign import ccall safe "sylvan_and_stub"
c_and :: CBDD -> CBDD -> IO CBDD
band :: PrimMonad m => BDD -> BDD -> m BDD
band (BDD a) (BDD b) = liftM BDD $ unsafePrimToPrim $ c_and a b
bor :: PrimMonad m => BDD -> BDD -> m BDD
bor a b = liftM neg $ band (neg a) (neg b)
bnand :: PrimMonad m => BDD -> BDD -> m BDD
bnand a b = liftM neg $ band a b
bnor :: PrimMonad m => BDD -> BDD -> m BDD
bnor a b = liftM neg $ bor a b
bimp :: PrimMonad m => BDD -> BDD -> m BDD
bimp a b = liftM neg $ band a (neg b)
bimpinv :: PrimMonad m => BDD -> BDD -> m BDD
bimpinv a b = liftM neg $ band (neg a) b
biimp :: PrimMonad m => BDD -> BDD -> m BDD
biimp = bequiv
diff :: PrimMonad m => BDD -> BDD -> m BDD
diff a b = band a (neg b)
less :: PrimMonad m => BDD -> BDD -> m BDD
less a b = band (neg a) b
foreign import ccall safe "sylvan_exists_stub"
c_exists :: CBDD -> CBDD -> IO CBDD
exists :: PrimMonad m => BDD -> BDD -> m BDD
exists (BDD a) (BDD variables) = liftM BDD $ unsafePrimToPrim $ c_exists a variables
forall :: PrimMonad m => BDD -> BDD -> m BDD
forall a variables = liftM neg $ exists (neg a) variables
foreign import ccall safe "sylvan_and_exists_stub"
c_and_exists :: CBDD -> CBDD -> CBDD -> IO CBDD
andExists :: PrimMonad m => BDD -> BDD -> BDD -> m BDD
andExists (BDD a) (BDD b) (BDD vars) = liftM BDD $ unsafePrimToPrim $ c_and_exists a b vars
foreign import ccall safe "mtbdd_fromarray"
c_setFromArray :: Ptr CBDDVar -> CSize -> IO CBDD
setFromArray :: PrimMonad m => [BDDVar] -> m BDD
setFromArray vars = liftM BDD $ unsafePrimToPrim $
withArrayLen (map fromIntegral vars) $ \l p ->
c_setFromArray p (fromIntegral l)
mapEmpty :: BDDMap
mapEmpty = BDDMap c_sylvanFalse
foreign import ccall safe "mtbdd_map_add"
c_mapAdd :: CBDDMap -> CBDDVar -> CBDD -> IO CBDDMap
mapAdd :: PrimMonad m => BDDMap -> BDDVar -> BDD -> m BDDMap
mapAdd (BDDMap m) var (BDD x) = liftM BDDMap $ unsafePrimToPrim $ c_mapAdd m (fromIntegral var) x
foreign import ccall safe "sylvan_compose_stub"
c_compose :: CBDD -> CBDDMap -> IO CBDD
compose :: PrimMonad m => BDD -> BDDMap -> m BDD
compose (BDD f) (BDDMap m) = liftM BDD $ unsafePrimToPrim $ c_compose f m
----TODO: doesnt seem to exist
--foreign import ccall safe "sylvan_report_stats"
-- c_reportStats :: IO ()
--
--reportStats :: PrimMonad m => m ()
--reportStats = unsafePrimToPrim c_reportStats
--
--foreign import ccall safe "sylvan_printdot"
-- c_printDot :: CBDD -> IO ()
--
--printDot :: PrimMonad m => BDD -> m ()
--printDot (BDD x) = unsafePrimToPrim $ c_printDot x
--
----TODO: a macro
--foreign import ccall safe "sylvan_print"
-- c_bddPrint :: CBDD -> IO ()
--
--bddPrint :: PrimMonad m => BDD -> m ()
--bddPrint (BDD x) = unsafePrimToPrim $ c_bddPrint x
--
----TODO: a macro
--foreign import ccall safe "sylvan_printsha"
-- c_printSHA :: CBDD -> IO ()
--
--printSHA :: BDD -> IO ()
--printSHA (BDD x) = unsafePrimToPrim $ c_printSHA x
foreign import ccall safe "sylvan_cube"
c_cube :: CBDD -> Ptr CUChar -> IO CBDD
data Polarity =
Negative
| Positive
| DontCare
deriving (Show)
polarityToInt :: Integral i => Polarity -> i
polarityToInt Negative = 0
polarityToInt Positive = 1
polarityToInt DontCare = 2
cube :: PrimMonad m => BDD -> [Polarity] -> m BDD
cube (BDD vars) polarities = liftM BDD $ unsafePrimToPrim $
withArrayLen (map polarityToInt polarities) $ \_ pp ->
c_cube vars pp
|
adamwalker/sylvan-haskell
|
Sylvan.hs
|
bsd-3-clause
| 7,054
| 0
| 11
| 1,452
| 2,457
| 1,232
| 1,225
| 145
| 1
|
-- | Main module
module Main where
import Lib (someFunc)
--import Week01.Lyah02
main :: IO ()
main = do
-- print (doubleUs 2.0 4.0)
someFunc
|
emaphis/Haskell-Practice
|
cis194/app/Main.hs
|
bsd-3-clause
| 147
| 0
| 6
| 31
| 33
| 20
| 13
| 5
| 1
|
{-# LANGUAGE MultiParamTypeClasses, GeneralizedNewtypeDeriving, DeriveDataTypeable, ScopedTypeVariables #-}
module B.Shake.File(
need, want,
defaultRuleFile,
(*>), (**>), (?>), phony,
newCache, newCacheIO
) where
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.HashMap.Strict as Map
import System.Directory
import B.Shake.Classes
import B.Shake.Core
import B.Shake.Core.Rules.Internal
import B.Shake.FilePattern
import B.Shake.FileTime
import B.Shake.Locks
import B.Shake.Types
import System.FilePath(takeDirectory) -- important that this is the system local filepath, or wrong slashes go wrong
infix 1 *>, ?>, **>
newtype FileQ = FileQ BS
deriving (Typeable,Eq,Hashable,Binary,NFData)
instance Show FileQ where show (FileQ x) = unpack x
newtype FileA = FileA FileTime
deriving (Typeable,Eq,Hashable,Binary,Show,NFData)
instance Rule FileQ FileA where
storedValue (FileQ x) = fmap (fmap FileA) $ getModTimeMaybe $ unpack_ x
{-
observed act = do
src <- getCurrentDirectory
old <- listDir src
sleepFileTime
res <- act
new <- listDir src
let obs = compareItems old new
-- if we didn't find anything used, then most likely we aren't tracking access time close enough
obs2 = obs{used = if used obs == Just [] then Nothing else (used obs)}
return (obs2, res)
data Item = ItemDir [(String,Item)] -- sorted
| ItemFile (Maybe FileTime) (Maybe FileTime) -- mod time, access time
deriving Show
listDir :: FilePath -> IO Item
listDir root = do
xs <- getDirectoryContents root
xs <- return $ sort $ filter (not . all (== '.')) xs
fmap ItemDir $ forM xs $ \x -> fmap ((,) x) $ do
let s = root </> x
b <- doesFileExist s
if b then listFile s else listDir s
listFile :: FilePath -> IO Item
listFile x = do
let f x = Control.Exception.catch (fmap Just x) $ \(_ :: SomeException) -> return Nothing
mod <- f $ getModTime x
acc <- f $ getAccTime x
return $ ItemFile mod acc
compareItems :: Item -> Item -> Observed File
compareItems = f ""
where
f path (ItemFile mod1 acc1) (ItemFile mod2 acc2) =
Observed (Just [File path | mod1 /= mod2]) (Just [File path | acc1 /= acc2])
f path (ItemDir xs) (ItemDir ys) = mconcat $ map g $ zips xs ys
where g (name, Just x, Just y) = f (path </> name) x y
g (name, x, y) = Observed (Just $ concatMap (files path) $ catMaybes [x,y]) Nothing
f path _ _ = Observed (Just [File path]) Nothing
files path (ItemDir xs) = concat [files (path </> a) b | (a,b) <- xs]
files path _ = [File path]
zips :: Ord a => [(a,b)] -> [(a,b)] -> [(a, Maybe b, Maybe b)]
zips ((x1,x2):xs) ((y1,y2):ys)
| x1 == y1 = (x1,Just x2,Just y2):zips xs ys
| x1 < y1 = (x1,Just x2,Nothing):zips xs ((y1,y2):ys)
| otherwise = (y1,Nothing,Just y2):zips ((x1,x2):xs) ys
zips xs ys = [(a,Just b,Nothing) | (a,b) <- xs] ++ [(a,Nothing,Just b) | (a,b) <- ys]
-}
-- | This function is not actually exported, but Haddock is buggy. Please ignore.
defaultRuleFile :: Rules ()
defaultRuleFile = defaultRule $ \(FileQ x) -> Just $
liftIO $ fmap FileA $ getModTimeError "Error, file does not exist and no rule available:" $ unpack_ x
-- | Require that the following files are built before continuing. Particularly
-- necessary when calling 'Development.Shake.system''. As an example:
--
-- @
-- \"\/\/*.rot13\" '*>' \\out -> do
-- let src = 'Development.Shake.FilePath.dropExtension' out
-- 'need' [src]
-- 'Development.Shake.system'' \"rot13\" [src,\"-o\",out]
-- @
need :: [FilePath] -> Action ()
need xs = (apply $ map (FileQ . pack) xs :: Action [FileA]) >> return ()
-- | Require that the following are built by the rules, used to specify the target.
--
-- @
-- main = 'Development.Shake.shake' 'shakeOptions' $ do
-- 'want' [\"Main.exe\"]
-- ...
-- @
--
-- This program will build @Main.exe@, given sufficient rules.
want :: [FilePath] -> Rules ()
want = action . need
root :: String -> (FilePath -> Bool) -> (FilePath -> Action ()) -> Rules ()
root help test act = rule $ \(FileQ x_) -> let x = unpack x_ in
if not $ test x then Nothing else Just $ do
liftIO $ createDirectoryIfMissing True $ takeDirectory x
act x
liftIO $ fmap FileA $ getModTimeError ("Error, rule " ++ help ++ " failed to build file:") $ unpack_ x_
-- | Declare a phony action, this is an action that does not produce a file, and will be rerun
-- in every execution that requires it. You can demand 'phony' rules using 'want' \/ 'need'.
--
-- Phony actions are intended to define command-line abbreviations. You should not 'need' phony actions
-- as dependencies of rules, as that will cause excessive rebuilding.
phony :: String -> Action () -> Rules ()
phony name act = rule $ \(FileQ x_) -> let x = unpack x_ in
if name /= x then Nothing else Just $ do
act
return $ FileA fileTimeNone
-- | Define a rule to build files. If the first argument returns 'True' for a given file,
-- the second argument will be used to build it. Usually '*>' is sufficient, but '?>' gives
-- additional power. For any file used by the build system, only one rule should return 'True'.
--
-- @
-- (all isUpper . 'Development.Shake.FilePath.takeBaseName') '?>' \\out -> do
-- let src = 'Development.Shake.FilePath.replaceBaseName' out $ map toLower $ takeBaseName out
-- 'Development.Shake.writeFile'' out . map toUpper =<< 'Development.Shake.readFile'' src
-- @
(?>) :: (FilePath -> Bool) -> (FilePath -> Action ()) -> Rules ()
(?>) = root "with ?>"
-- | Define a set of patterns, and if any of them match, run the associated rule. See '*>'.
(**>) :: [FilePattern] -> (FilePath -> Action ()) -> Rules ()
(**>) test = root "with **>" (\x -> any (?== x) test)
-- | Define a rule that matches a 'FilePattern'. No file required by the system must be
-- matched by more than one pattern. For the pattern rules, see '?=='.
--
-- @
-- \"*.asm.o\" '*>' \\out -> do
-- let src = 'Development.Shake.FilePath.dropExtension' out
-- 'need' [src]
-- 'Development.Shake.system'' \"as\" [src,\"-o\",out]
-- @
--
-- To define a build system for multiple compiled languages, we recommend using @.asm.o@,
-- @.cpp.o@, @.hs.o@, to indicate which language produces an object file.
-- I.e., the file @foo.cpp@ produces object file @foo.cpp.o@.
--
-- Note that matching is case-sensitive, even on Windows.
(*>) :: FilePattern -> (FilePath -> Action ()) -> Rules ()
(*>) test = root (show test) (test ?==)
-- | A version of 'newCache' that runs in IO, and can be called before calling 'Development.Shake.shake'.
-- Most people should use 'newCache' instead.
newCacheIO :: (FilePath -> IO a) -> IO (FilePath -> Action a)
newCacheIO act = do
var <- newVar Map.empty -- Var (Map FilePath (Barrier (Either SomeException a)))
let run = either (\e -> throwIO (e :: SomeException)) return
return $ \file -> do
need [file]
liftIO $ join $ modifyVar var $ \mp -> case Map.lookup file mp of
Just v -> return (mp, run =<< waitBarrier v)
Nothing -> do
v <- newBarrier
return $ (,) (Map.insert file v mp) $ do
res <- try $ act file
signalBarrier v res
run res
-- | Given a way of loading information from a file, produce a cached version that will load each file at most once.
-- Using the cached function will still result in a dependency on the original file.
-- The argument function should not access any files other than the one passed as its argument.
-- Each call to 'newCache' creates a separate cache that is independent of all other calls to 'newCache'.
--
-- This function is useful when creating files that store intermediate values,
-- to avoid the overhead of repeatedly reading from disk, particularly if the file requires expensive parsing.
-- As an example:
--
-- @
-- digits \<- 'newCache' $ \\file -> do
-- src \<- readFile file
-- return $ length $ filter isDigit src
-- \"*.digits\" '*>' \\x -> do
-- v1 \<- digits ('dropExtension' x)
-- v2 \<- digits ('dropExtension' x)
-- 'Development.Shake.writeFile'' x $ show (v1,v2)
-- @
--
-- To create the result @MyFile.txt.digits@ the file @MyFile.txt@ will be read and counted, but only at most
-- once per execution.
newCache :: (FilePath -> IO a) -> Rules (FilePath -> Action a)
newCache = rulesIO . newCacheIO
|
strager/b-shake
|
B/Shake/File.hs
|
bsd-3-clause
| 8,693
| 0
| 23
| 2,054
| 1,189
| 658
| 531
| 67
| 2
|
{-# language CPP #-}
-- No documentation found for Chapter "Result"
module Vulkan.Core10.Enums.Result (Result( SUCCESS
, NOT_READY
, TIMEOUT
, EVENT_SET
, EVENT_RESET
, INCOMPLETE
, ERROR_OUT_OF_HOST_MEMORY
, ERROR_OUT_OF_DEVICE_MEMORY
, ERROR_INITIALIZATION_FAILED
, ERROR_DEVICE_LOST
, ERROR_MEMORY_MAP_FAILED
, ERROR_LAYER_NOT_PRESENT
, ERROR_EXTENSION_NOT_PRESENT
, ERROR_FEATURE_NOT_PRESENT
, ERROR_INCOMPATIBLE_DRIVER
, ERROR_TOO_MANY_OBJECTS
, ERROR_FORMAT_NOT_SUPPORTED
, ERROR_FRAGMENTED_POOL
, ERROR_UNKNOWN
, OPERATION_NOT_DEFERRED_KHR
, OPERATION_DEFERRED_KHR
, THREAD_DONE_KHR
, THREAD_IDLE_KHR
, ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT
, ERROR_NOT_PERMITTED_KHR
, ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT
, ERROR_INVALID_SHADER_NV
, ERROR_VALIDATION_FAILED_EXT
, ERROR_INCOMPATIBLE_DISPLAY_KHR
, ERROR_OUT_OF_DATE_KHR
, SUBOPTIMAL_KHR
, ERROR_NATIVE_WINDOW_IN_USE_KHR
, ERROR_SURFACE_LOST_KHR
, PIPELINE_COMPILE_REQUIRED
, ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS
, ERROR_FRAGMENTATION
, ERROR_INVALID_EXTERNAL_HANDLE
, ERROR_OUT_OF_POOL_MEMORY
, ..
)) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showsPrec)
import Vulkan.Zero (Zero)
import Foreign.Storable (Storable)
import Data.Int (Int32)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
-- | VkResult - Vulkan command return codes
--
-- = Description
--
-- If a command returns a runtime error, unless otherwise specified any
-- output parameters will have undefined contents, except that if the
-- output parameter is a structure with @sType@ and @pNext@ fields, those
-- fields will be unmodified. Any structures chained from @pNext@ will also
-- have undefined contents, except that @sType@ and @pNext@ will be
-- unmodified.
--
-- @VK_ERROR_OUT_OF_*_MEMORY@ errors do not modify any currently existing
-- Vulkan objects. Objects that have already been successfully created
-- /can/ still be used by the application.
--
-- Note
--
-- As a general rule, @Free@, @Release@, and @Reset@ commands do not return
-- 'ERROR_OUT_OF_HOST_MEMORY', while any other command with a return code
-- /may/ return it. Any exceptions from this rule are described for those
-- commands.
--
-- 'ERROR_UNKNOWN' will be returned by an implementation when an unexpected
-- error occurs that cannot be attributed to valid behavior of the
-- application and implementation. Under these conditions, it /may/ be
-- returned from any command returning a 'Result'.
--
-- Note
--
-- 'ERROR_UNKNOWN' is not expected to ever be returned if the application
-- behavior is valid, and if the implementation is bug-free. If
-- 'ERROR_UNKNOWN' is received, the application should be checked against
-- the latest validation layers to verify correct behavior as much as
-- possible. If no issues are identified it could be an implementation
-- issue, and the implementor should be contacted for support.
--
-- Performance-critical commands generally do not have return codes. If a
-- runtime error occurs in such commands, the implementation will defer
-- reporting the error until a specified point. For commands that record
-- into command buffers (@vkCmd*@) runtime errors are reported by
-- 'Vulkan.Core10.CommandBuffer.endCommandBuffer'.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Extensions.VK_KHR_swapchain.PresentInfoKHR'
newtype Result = Result Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'SUCCESS' Command successfully completed
pattern SUCCESS = Result 0
-- | 'NOT_READY' A fence or query has not yet completed
pattern NOT_READY = Result 1
-- | 'TIMEOUT' A wait operation has not completed in the specified time
pattern TIMEOUT = Result 2
-- | 'EVENT_SET' An event is signaled
pattern EVENT_SET = Result 3
-- | 'EVENT_RESET' An event is unsignaled
pattern EVENT_RESET = Result 4
-- | 'INCOMPLETE' A return array was too small for the result
pattern INCOMPLETE = Result 5
-- | 'ERROR_OUT_OF_HOST_MEMORY' A host memory allocation has failed.
pattern ERROR_OUT_OF_HOST_MEMORY = Result (-1)
-- | 'ERROR_OUT_OF_DEVICE_MEMORY' A device memory allocation has failed.
pattern ERROR_OUT_OF_DEVICE_MEMORY = Result (-2)
-- | 'ERROR_INITIALIZATION_FAILED' Initialization of an object could not be
-- completed for implementation-specific reasons.
pattern ERROR_INITIALIZATION_FAILED = Result (-3)
-- | 'ERROR_DEVICE_LOST' The logical or physical device has been lost. See
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device Lost Device>
pattern ERROR_DEVICE_LOST = Result (-4)
-- | 'ERROR_MEMORY_MAP_FAILED' Mapping of a memory object has failed.
pattern ERROR_MEMORY_MAP_FAILED = Result (-5)
-- | 'ERROR_LAYER_NOT_PRESENT' A requested layer is not present or could not
-- be loaded.
pattern ERROR_LAYER_NOT_PRESENT = Result (-6)
-- | 'ERROR_EXTENSION_NOT_PRESENT' A requested extension is not supported.
pattern ERROR_EXTENSION_NOT_PRESENT = Result (-7)
-- | 'ERROR_FEATURE_NOT_PRESENT' A requested feature is not supported.
pattern ERROR_FEATURE_NOT_PRESENT = Result (-8)
-- | 'ERROR_INCOMPATIBLE_DRIVER' The requested version of Vulkan is not
-- supported by the driver or is otherwise incompatible for
-- implementation-specific reasons.
pattern ERROR_INCOMPATIBLE_DRIVER = Result (-9)
-- | 'ERROR_TOO_MANY_OBJECTS' Too many objects of the type have already been
-- created.
pattern ERROR_TOO_MANY_OBJECTS = Result (-10)
-- | 'ERROR_FORMAT_NOT_SUPPORTED' A requested format is not supported on this
-- device.
pattern ERROR_FORMAT_NOT_SUPPORTED = Result (-11)
-- | 'ERROR_FRAGMENTED_POOL' A pool allocation has failed due to
-- fragmentation of the pool’s memory. This /must/ only be returned if no
-- attempt to allocate host or device memory was made to accommodate the
-- new allocation. This /should/ be returned in preference to
-- 'ERROR_OUT_OF_POOL_MEMORY', but only if the implementation is certain
-- that the pool allocation failure was due to fragmentation.
pattern ERROR_FRAGMENTED_POOL = Result (-12)
-- | 'ERROR_UNKNOWN' An unknown error has occurred; either the application
-- has provided invalid input, or an implementation failure has occurred.
pattern ERROR_UNKNOWN = Result (-13)
-- | 'OPERATION_NOT_DEFERRED_KHR' A deferred operation was requested and no
-- operations were deferred.
pattern OPERATION_NOT_DEFERRED_KHR = Result 1000268003
-- | 'OPERATION_DEFERRED_KHR' A deferred operation was requested and at least
-- some of the work was deferred.
pattern OPERATION_DEFERRED_KHR = Result 1000268002
-- | 'THREAD_DONE_KHR' A deferred operation is not complete but there is no
-- work remaining to assign to additional threads.
pattern THREAD_DONE_KHR = Result 1000268001
-- | 'THREAD_IDLE_KHR' A deferred operation is not complete but there is
-- currently no work for this thread to do at the time of this call.
pattern THREAD_IDLE_KHR = Result 1000268000
-- | 'ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT' An operation on a swapchain
-- created with
-- 'Vulkan.Extensions.VK_EXT_full_screen_exclusive.FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT'
-- failed as it did not have exlusive full-screen access. This /may/ occur
-- due to implementation-dependent reasons, outside of the application’s
-- control.
pattern ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = Result (-1000255000)
-- No documentation found for Nested "VkResult" "VK_ERROR_NOT_PERMITTED_KHR"
pattern ERROR_NOT_PERMITTED_KHR = Result (-1000174001)
-- No documentation found for Nested "VkResult" "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"
pattern ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = Result (-1000158000)
-- | 'ERROR_INVALID_SHADER_NV' One or more shaders failed to compile or link.
-- More details are reported back to the application via
-- @VK_EXT_debug_report@ if enabled.
pattern ERROR_INVALID_SHADER_NV = Result (-1000012000)
-- No documentation found for Nested "VkResult" "VK_ERROR_VALIDATION_FAILED_EXT"
pattern ERROR_VALIDATION_FAILED_EXT = Result (-1000011001)
-- | 'ERROR_INCOMPATIBLE_DISPLAY_KHR' The display used by a swapchain does
-- not use the same presentable image layout, or is incompatible in a way
-- that prevents sharing an image.
pattern ERROR_INCOMPATIBLE_DISPLAY_KHR = Result (-1000003001)
-- | 'ERROR_OUT_OF_DATE_KHR' A surface has changed in such a way that it is
-- no longer compatible with the swapchain, and further presentation
-- requests using the swapchain will fail. Applications /must/ query the
-- new surface properties and recreate their swapchain if they wish to
-- continue presenting to the surface.
pattern ERROR_OUT_OF_DATE_KHR = Result (-1000001004)
-- | 'SUBOPTIMAL_KHR' A swapchain no longer matches the surface properties
-- exactly, but /can/ still be used to present to the surface successfully.
pattern SUBOPTIMAL_KHR = Result 1000001003
-- | 'ERROR_NATIVE_WINDOW_IN_USE_KHR' The requested window is already in use
-- by Vulkan or another API in a manner which prevents it from being used
-- again.
pattern ERROR_NATIVE_WINDOW_IN_USE_KHR = Result (-1000000001)
-- | 'ERROR_SURFACE_LOST_KHR' A surface is no longer available.
pattern ERROR_SURFACE_LOST_KHR = Result (-1000000000)
-- | 'PIPELINE_COMPILE_REQUIRED' A requested pipeline creation would have
-- required compilation, but the application requested compilation to not
-- be performed.
pattern PIPELINE_COMPILE_REQUIRED = Result 1000297000
-- | 'ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS' A buffer creation or memory
-- allocation failed because the requested address is not available. A
-- shader group handle assignment failed because the requested shader group
-- handle information is no longer valid.
pattern ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = Result (-1000257000)
-- | 'ERROR_FRAGMENTATION' A descriptor pool creation has failed due to
-- fragmentation.
pattern ERROR_FRAGMENTATION = Result (-1000161000)
-- | 'ERROR_INVALID_EXTERNAL_HANDLE' An external handle is not a valid handle
-- of the specified type.
pattern ERROR_INVALID_EXTERNAL_HANDLE = Result (-1000072003)
-- | 'ERROR_OUT_OF_POOL_MEMORY' A pool memory allocation has failed. This
-- /must/ only be returned if no attempt to allocate host or device memory
-- was made to accommodate the new allocation. If the failure was
-- definitely due to fragmentation of the pool, 'ERROR_FRAGMENTED_POOL'
-- /should/ be returned instead.
pattern ERROR_OUT_OF_POOL_MEMORY = Result (-1000069000)
{-# complete SUCCESS,
NOT_READY,
TIMEOUT,
EVENT_SET,
EVENT_RESET,
INCOMPLETE,
ERROR_OUT_OF_HOST_MEMORY,
ERROR_OUT_OF_DEVICE_MEMORY,
ERROR_INITIALIZATION_FAILED,
ERROR_DEVICE_LOST,
ERROR_MEMORY_MAP_FAILED,
ERROR_LAYER_NOT_PRESENT,
ERROR_EXTENSION_NOT_PRESENT,
ERROR_FEATURE_NOT_PRESENT,
ERROR_INCOMPATIBLE_DRIVER,
ERROR_TOO_MANY_OBJECTS,
ERROR_FORMAT_NOT_SUPPORTED,
ERROR_FRAGMENTED_POOL,
ERROR_UNKNOWN,
OPERATION_NOT_DEFERRED_KHR,
OPERATION_DEFERRED_KHR,
THREAD_DONE_KHR,
THREAD_IDLE_KHR,
ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
ERROR_NOT_PERMITTED_KHR,
ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
ERROR_INVALID_SHADER_NV,
ERROR_VALIDATION_FAILED_EXT,
ERROR_INCOMPATIBLE_DISPLAY_KHR,
ERROR_OUT_OF_DATE_KHR,
SUBOPTIMAL_KHR,
ERROR_NATIVE_WINDOW_IN_USE_KHR,
ERROR_SURFACE_LOST_KHR,
PIPELINE_COMPILE_REQUIRED,
ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
ERROR_FRAGMENTATION,
ERROR_INVALID_EXTERNAL_HANDLE,
ERROR_OUT_OF_POOL_MEMORY :: Result #-}
conNameResult :: String
conNameResult = "Result"
enumPrefixResult :: String
enumPrefixResult = ""
showTableResult :: [(Result, String)]
showTableResult =
[ (SUCCESS , "SUCCESS")
, (NOT_READY , "NOT_READY")
, (TIMEOUT , "TIMEOUT")
, (EVENT_SET , "EVENT_SET")
, (EVENT_RESET , "EVENT_RESET")
, (INCOMPLETE , "INCOMPLETE")
, (ERROR_OUT_OF_HOST_MEMORY , "ERROR_OUT_OF_HOST_MEMORY")
, (ERROR_OUT_OF_DEVICE_MEMORY , "ERROR_OUT_OF_DEVICE_MEMORY")
, (ERROR_INITIALIZATION_FAILED , "ERROR_INITIALIZATION_FAILED")
, (ERROR_DEVICE_LOST , "ERROR_DEVICE_LOST")
, (ERROR_MEMORY_MAP_FAILED , "ERROR_MEMORY_MAP_FAILED")
, (ERROR_LAYER_NOT_PRESENT , "ERROR_LAYER_NOT_PRESENT")
, (ERROR_EXTENSION_NOT_PRESENT , "ERROR_EXTENSION_NOT_PRESENT")
, (ERROR_FEATURE_NOT_PRESENT , "ERROR_FEATURE_NOT_PRESENT")
, (ERROR_INCOMPATIBLE_DRIVER , "ERROR_INCOMPATIBLE_DRIVER")
, (ERROR_TOO_MANY_OBJECTS , "ERROR_TOO_MANY_OBJECTS")
, (ERROR_FORMAT_NOT_SUPPORTED , "ERROR_FORMAT_NOT_SUPPORTED")
, (ERROR_FRAGMENTED_POOL , "ERROR_FRAGMENTED_POOL")
, (ERROR_UNKNOWN , "ERROR_UNKNOWN")
, (OPERATION_NOT_DEFERRED_KHR , "OPERATION_NOT_DEFERRED_KHR")
, (OPERATION_DEFERRED_KHR , "OPERATION_DEFERRED_KHR")
, (THREAD_DONE_KHR , "THREAD_DONE_KHR")
, (THREAD_IDLE_KHR , "THREAD_IDLE_KHR")
, (ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, "ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT")
, (ERROR_NOT_PERMITTED_KHR , "ERROR_NOT_PERMITTED_KHR")
, (ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, "ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT")
, (ERROR_INVALID_SHADER_NV , "ERROR_INVALID_SHADER_NV")
, (ERROR_VALIDATION_FAILED_EXT , "ERROR_VALIDATION_FAILED_EXT")
, (ERROR_INCOMPATIBLE_DISPLAY_KHR , "ERROR_INCOMPATIBLE_DISPLAY_KHR")
, (ERROR_OUT_OF_DATE_KHR , "ERROR_OUT_OF_DATE_KHR")
, (SUBOPTIMAL_KHR , "SUBOPTIMAL_KHR")
, (ERROR_NATIVE_WINDOW_IN_USE_KHR , "ERROR_NATIVE_WINDOW_IN_USE_KHR")
, (ERROR_SURFACE_LOST_KHR , "ERROR_SURFACE_LOST_KHR")
, (PIPELINE_COMPILE_REQUIRED , "PIPELINE_COMPILE_REQUIRED")
, (ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, "ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS")
, (ERROR_FRAGMENTATION , "ERROR_FRAGMENTATION")
, (ERROR_INVALID_EXTERNAL_HANDLE , "ERROR_INVALID_EXTERNAL_HANDLE")
, (ERROR_OUT_OF_POOL_MEMORY , "ERROR_OUT_OF_POOL_MEMORY")
]
instance Show Result where
showsPrec = enumShowsPrec enumPrefixResult showTableResult conNameResult (\(Result x) -> x) (showsPrec 11)
instance Read Result where
readPrec = enumReadPrec enumPrefixResult showTableResult conNameResult Result
|
expipiplus1/vulkan
|
src/Vulkan/Core10/Enums/Result.hs
|
bsd-3-clause
| 17,157
| 1
| 10
| 4,867
| 1,339
| 845
| 494
| -1
| -1
|
module Character
(
Character,
createCharacter,
tryParseChrFile
) where
import Prompt
import System.IO
data Gender = Male | Female deriving(Show)
data Character = Character String Gender
-- charName :: Character -> String
-- charName (Character name _) = name
tryParseGender :: String -> Maybe Gender
tryParseGender gender_str = case gender_str of
"m" -> Just Male
"f" -> Just Female
_ -> Nothing
tryParseName :: String -> Maybe String
tryParseName "" = Nothing
tryParseName s = Just s
promptDataGender :: IO Gender
promptDataGender = promptData "Gender (m/f):" tryParseGender "Gender must be either 'm' or 'f'"
promptDataName :: IO String
promptDataName = promptData "Name:" tryParseName "Name must contain at least one charcter"
createCharacter :: IO Character
createCharacter = do
putStrLn "Create your character."
name <- promptDataName
gender <- promptDataGender
let gender_str = show gender
putStrLn ("Character: " ++ name ++ ['(', head gender_str, ')'])
let filename = name ++ ".chr"
writeFile filename ("N:" ++ name ++ ", G:" ++ gender_str)
return (Character name gender)
tryParseChrFile :: String -> IO Character
tryParseChrFile filename = do
contents <- readFile filename
putStrLn contents
return (Character "DefaultName" Male)
|
monkeybits/rpgame
|
src/Character.hs
|
bsd-3-clause
| 1,294
| 0
| 11
| 238
| 346
| 171
| 175
| 36
| 3
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module SysTools.Settings
( SettingsError (..)
, initSettings
) where
#include "HsVersions.h"
import GhcPrelude
import GHC.Settings
import Config
import CliOption
import FileSettings
import Fingerprint
import GHC.Platform
import GhcNameVersion
import Outputable
import Settings
import SysTools.BaseDir
import ToolSettings
import Control.Monad.Trans.Except
import Control.Monad.IO.Class
import qualified Data.Map as Map
import System.FilePath
import System.Directory
data SettingsError
= SettingsError_MissingData String
| SettingsError_BadData String
initSettings
:: forall m
. MonadIO m
=> String -- ^ TopDir path
-> ExceptT SettingsError m Settings
initSettings top_dir = do
-- see Note [topdir: How GHC finds its files]
-- NB: top_dir is assumed to be in standard Unix
-- format, '/' separated
mtool_dir <- liftIO $ findToolDir top_dir
-- see Note [tooldir: How GHC finds mingw on Windows]
let installed :: FilePath -> FilePath
installed file = top_dir </> file
libexec :: FilePath -> FilePath
libexec file = top_dir </> "bin" </> file
settingsFile = installed "settings"
platformConstantsFile = installed "platformConstants"
readFileSafe :: FilePath -> ExceptT SettingsError m String
readFileSafe path = liftIO (doesFileExist path) >>= \case
True -> liftIO $ readFile path
False -> throwE $ SettingsError_MissingData $ "Missing file: " ++ path
settingsStr <- readFileSafe settingsFile
platformConstantsStr <- readFileSafe platformConstantsFile
settingsList <- case maybeReadFuzzy settingsStr of
Just s -> pure s
Nothing -> throwE $ SettingsError_BadData $
"Can't parse " ++ show settingsFile
let mySettings = Map.fromList settingsList
platformConstants <- case maybeReadFuzzy platformConstantsStr of
Just s -> pure s
Nothing -> throwE $ SettingsError_BadData $
"Can't parse " ++ show platformConstantsFile
-- See Note [Settings file] for a little more about this file. We're
-- just partially applying those functions and throwing 'Left's; they're
-- written in a very portable style to keep ghc-boot light.
let getSetting key = either pgmError pure $
getFilePathSetting0 top_dir settingsFile mySettings key
getToolSetting :: String -> ExceptT SettingsError m String
getToolSetting key = expandToolDir mtool_dir <$> getSetting key
getBooleanSetting :: String -> ExceptT SettingsError m Bool
getBooleanSetting key = either pgmError pure $
getBooleanSetting0 settingsFile mySettings key
targetPlatformString <- getSetting "target platform string"
tablesNextToCode <- getBooleanSetting "Tables next to code"
myExtraGccViaCFlags <- getSetting "GCC extra via C opts"
-- On Windows, mingw is distributed with GHC,
-- so we look in TopDir/../mingw/bin,
-- as well as TopDir/../../mingw/bin for hadrian.
-- It would perhaps be nice to be able to override this
-- with the settings file, but it would be a little fiddly
-- to make that possible, so for now you can't.
cc_prog <- getToolSetting "C compiler command"
cc_args_str <- getSetting "C compiler flags"
cxx_args_str <- getSetting "C++ compiler flags"
gccSupportsNoPie <- getBooleanSetting "C compiler supports -no-pie"
cpp_prog <- getToolSetting "Haskell CPP command"
cpp_args_str <- getSetting "Haskell CPP flags"
platform <- either pgmError pure $ getTargetPlatform settingsFile mySettings
let unreg_cc_args = if platformUnregisterised platform
then ["-DNO_REGS", "-DUSE_MINIINTERPRETER"]
else []
cpp_args = map Option (words cpp_args_str)
cc_args = words cc_args_str ++ unreg_cc_args
cxx_args = words cxx_args_str
ldSupportsCompactUnwind <- getBooleanSetting "ld supports compact unwind"
ldSupportsBuildId <- getBooleanSetting "ld supports build-id"
ldSupportsFilelist <- getBooleanSetting "ld supports filelist"
ldIsGnuLd <- getBooleanSetting "ld is GNU ld"
let globalpkgdb_path = installed "package.conf.d"
ghc_usage_msg_path = installed "ghc-usage.txt"
ghci_usage_msg_path = installed "ghci-usage.txt"
-- For all systems, unlit, split, mangle are GHC utilities
-- architecture-specific stuff is done when building Config.hs
unlit_path <- getToolSetting "unlit command"
windres_path <- getToolSetting "windres command"
libtool_path <- getToolSetting "libtool command"
ar_path <- getToolSetting "ar command"
ranlib_path <- getToolSetting "ranlib command"
-- TODO this side-effect doesn't belong here. Reading and parsing the settings
-- should be idempotent and accumulate no resources.
tmpdir <- liftIO $ getTemporaryDirectory
touch_path <- getToolSetting "touch command"
mkdll_prog <- getToolSetting "dllwrap command"
let mkdll_args = []
-- cpp is derived from gcc on all platforms
-- HACK, see setPgmP below. We keep 'words' here to remember to fix
-- Config.hs one day.
-- Other things being equal, as and ld are simply gcc
cc_link_args_str <- getSetting "C compiler link flags"
let as_prog = cc_prog
as_args = map Option cc_args
ld_prog = cc_prog
ld_args = map Option (cc_args ++ words cc_link_args_str)
llvmTarget <- getSetting "LLVM target"
-- We just assume on command line
lc_prog <- getSetting "LLVM llc command"
lo_prog <- getSetting "LLVM opt command"
lcc_prog <- getSetting "LLVM clang command"
let iserv_prog = libexec "ghc-iserv"
integerLibrary <- getSetting "integer library"
integerLibraryType <- case integerLibrary of
"integer-gmp" -> pure IntegerGMP
"integer-simple" -> pure IntegerSimple
_ -> pgmError $ unwords
[ "Entry for"
, show "integer library"
, "must be one of"
, show "integer-gmp"
, "or"
, show "integer-simple"
]
ghcWithInterpreter <- getBooleanSetting "Use interpreter"
ghcWithNativeCodeGen <- getBooleanSetting "Use native code generator"
ghcWithSMP <- getBooleanSetting "Support SMP"
ghcRTSWays <- getSetting "RTS ways"
leadingUnderscore <- getBooleanSetting "Leading underscore"
useLibFFI <- getBooleanSetting "Use LibFFI"
ghcThreaded <- getBooleanSetting "Use Threads"
ghcDebugged <- getBooleanSetting "Use Debugging"
ghcRtsWithLibdw <- getBooleanSetting "RTS expects libdw"
return $ Settings
{ sGhcNameVersion = GhcNameVersion
{ ghcNameVersion_programName = "ghc"
, ghcNameVersion_projectVersion = cProjectVersion
}
, sFileSettings = FileSettings
{ fileSettings_tmpDir = normalise tmpdir
, fileSettings_ghcUsagePath = ghc_usage_msg_path
, fileSettings_ghciUsagePath = ghci_usage_msg_path
, fileSettings_toolDir = mtool_dir
, fileSettings_topDir = top_dir
, fileSettings_globalPackageDatabase = globalpkgdb_path
}
, sToolSettings = ToolSettings
{ toolSettings_ldSupportsCompactUnwind = ldSupportsCompactUnwind
, toolSettings_ldSupportsBuildId = ldSupportsBuildId
, toolSettings_ldSupportsFilelist = ldSupportsFilelist
, toolSettings_ldIsGnuLd = ldIsGnuLd
, toolSettings_ccSupportsNoPie = gccSupportsNoPie
, toolSettings_pgm_L = unlit_path
, toolSettings_pgm_P = (cpp_prog, cpp_args)
, toolSettings_pgm_F = ""
, toolSettings_pgm_c = cc_prog
, toolSettings_pgm_a = (as_prog, as_args)
, toolSettings_pgm_l = (ld_prog, ld_args)
, toolSettings_pgm_dll = (mkdll_prog,mkdll_args)
, toolSettings_pgm_T = touch_path
, toolSettings_pgm_windres = windres_path
, toolSettings_pgm_libtool = libtool_path
, toolSettings_pgm_ar = ar_path
, toolSettings_pgm_ranlib = ranlib_path
, toolSettings_pgm_lo = (lo_prog,[])
, toolSettings_pgm_lc = (lc_prog,[])
, toolSettings_pgm_lcc = (lcc_prog,[])
, toolSettings_pgm_i = iserv_prog
, toolSettings_opt_L = []
, toolSettings_opt_P = []
, toolSettings_opt_P_fingerprint = fingerprint0
, toolSettings_opt_F = []
, toolSettings_opt_c = cc_args
, toolSettings_opt_cxx = cxx_args
, toolSettings_opt_a = []
, toolSettings_opt_l = []
, toolSettings_opt_windres = []
, toolSettings_opt_lcc = []
, toolSettings_opt_lo = []
, toolSettings_opt_lc = []
, toolSettings_opt_i = []
, toolSettings_extraGccViaCFlags = words myExtraGccViaCFlags
}
, sTargetPlatform = platform
, sPlatformMisc = PlatformMisc
{ platformMisc_targetPlatformString = targetPlatformString
, platformMisc_integerLibrary = integerLibrary
, platformMisc_integerLibraryType = integerLibraryType
, platformMisc_ghcWithInterpreter = ghcWithInterpreter
, platformMisc_ghcWithNativeCodeGen = ghcWithNativeCodeGen
, platformMisc_ghcWithSMP = ghcWithSMP
, platformMisc_ghcRTSWays = ghcRTSWays
, platformMisc_tablesNextToCode = tablesNextToCode
, platformMisc_leadingUnderscore = leadingUnderscore
, platformMisc_libFFI = useLibFFI
, platformMisc_ghcThreaded = ghcThreaded
, platformMisc_ghcDebugged = ghcDebugged
, platformMisc_ghcRtsWithLibdw = ghcRtsWithLibdw
, platformMisc_llvmTarget = llvmTarget
}
, sPlatformConstants = platformConstants
, sRawSettings = settingsList
}
|
sdiehl/ghc
|
compiler/main/SysTools/Settings.hs
|
bsd-3-clause
| 9,584
| 0
| 16
| 2,120
| 1,636
| 864
| 772
| 188
| 7
|
module Types.Controlable where
class Controlable s where
x :: s -> Double
y :: s -> Double
|
Smurf/dodgem
|
src/Types/Controlable.hs
|
bsd-3-clause
| 100
| 0
| 7
| 26
| 33
| 18
| 15
| 4
| 0
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | Interface specification for crypto exchange markets
module Market.Interface
( module Market.Interface
, Price(..)
, Vol(..)
, Cost(..)
, OrderSide(..)
, Quote(..)
, QuoteBook(..)
, StrategyAdvice(..)
, Coin
, coinSymbol
, showBare
, readBare
) where
import Data.Hashable
import Market.Types
( Price(..)
, Vol(..)
, Cost(..)
, OrderSide(..)
, Quote(..)
, QuoteBook(..)
, StrategyAdvice(..)
, Coin
, coinSymbol
, showBare
, readBare
)
---------------------------------------
-- Market/Strategy Interface
newtype ClientOID = COID Int deriving (Show, Eq, Num, Hashable)
data FillEv price vol
= FillEv
{ fSide :: OrderSide
, fPrice :: Price price -- the price that was actually used
, fVol :: Vol vol -- the volume executed in this fill
, fCOID :: ClientOID
}
deriving (Show, Eq)
data TradingEv price vol quoteTail counter
= PlaceEv ClientOID
| CancelEv ClientOID
| DoneEv ClientOID
| FillsEv [FillEv price vol]
| BookEv (QuoteBook price vol quoteTail counter)
deriving (Show, Eq)
data Action price vol
= PlaceLimit
{ aSide :: OrderSide
, aPrice :: Price price
, aVol :: Vol vol
, aCOID :: ClientOID }
| PlaceMarket
{ aSide :: OrderSide
, aVol :: Vol vol
, aCOID :: ClientOID }
| CancelOrder
{ aCOID :: ClientOID }
deriving (Show, Eq)
---------------------------------------
-- Strategy Control Interface
data ControlEv = ShutdownEv deriving (Show, Eq)
data ControlAction
= ShutdownDone Int
| Error Int String
deriving (Show, Eq)
|
dimitri-xyz/market-model
|
src/Market/Interface.hs
|
bsd-3-clause
| 1,764
| 0
| 9
| 519
| 433
| 272
| 161
| 60
| 0
|
module Tut.Pandoc
( module Tut.Pandoc
, module Text.Pandoc
) where
import Text.Pandoc
import Text.Pandoc.Error
import Tut.Misc
import Tut.Imports
import Tut.Transformation
import Tut.Metadata
class AsYamlParseError e =>
AsPandocError e where
pandocError :: PandocError -> e
instance AsPandocError String where
pandocError = show
readTransformMarkdown
:: (AsPandocError e, MonadError e m)
=> ReaderOptions -> TransformationT m a -> String -> m (a, Pandoc)
readTransformMarkdown r t s = do
(meta, contents) <- parsePage s
doc <- eitherError . left pandocError $ readMarkdown r contents
doTransformation t meta doc
transformMarkdown
:: (MonadError e f, AsPandocError e)
=> String
-> ReaderOptions
-> WriterOptions
-> TransformationT f a
-> f (a, String)
transformMarkdown input rOpts wOpts t =
fmap (writeMarkdown wOpts) <$> readTransformMarkdown rOpts t input
transformMarkdownFile
:: (MonadError e m, MonadBase IO m, AsPandocError e)
=> FilePath
-> FilePath
-> ReaderOptions
-> WriterOptions
-> TransformationT m a
-> m a
transformMarkdownFile input output rOpts wOpts t = do
contents <- liftBase $ readFile input
(a, s) <- transformMarkdown contents rOpts wOpts t
liftBase $ writeFile output s
return a
|
aaronvargo/htut
|
src/Tut/Pandoc.hs
|
bsd-3-clause
| 1,272
| 0
| 13
| 243
| 413
| 208
| 205
| -1
| -1
|
module Vish.Data.Stage where
import Vish.MessageBox (MessageBox, mkMsgBox)
import Vish.Graphics.Image (Image (..))
import qualified Vish.Graphics.Image as Img
import Linear.V2 (V2 (..))
import qualified Linear.Vector as Vec
import Vish.Graphics.Font (Font)
import Control.Lens
data Stage = Stage
{ _stageSize :: V2 Int
, _stageBackground :: Image
, _stageLeft :: Image
, _stageCenter :: Image
, _stageRight :: Image
, _stageMsgBox :: MessageBox
}
mkStage :: Font -> IO Stage
mkStage fnt = do
blankImg <- Img.blank
msgBox <- mkMsgBox fnt
return $ Stage
{ _stageSize = Vec.zero
, _stageBackground = blankImg
, _stageLeft = blankImg
, _stageCenter = blankImg
, _stageRight = blankImg
, _stageMsgBox = msgBox
}
makeLenses ''Stage
|
andgate/vish
|
src/Vish/Data/Stage.hs
|
bsd-3-clause
| 847
| 0
| 10
| 228
| 224
| 135
| 89
| -1
| -1
|
module MergeList
( mergelist
) where
choose :: Integer -> Integer -> Integer
choose n 0 = 1
choose 0 k = 0
choose n k = ( choose (n-1) (k-1) * n `div` k )
mergelist :: IO ()
mergelist = do
n_temp <- getLine
let n = read n_temp :: Int
getMultipleLines n
getMultipleLines :: Int -> IO ()
getMultipleLines cnt
| cnt <= 0 = putStrLn ""
| otherwise = do
x_temp <- getLine
let x_t = words x_temp
let m = read $ x_t!!0 :: Integer
let n = read $ x_t!!1 :: Integer
print ( (choose (n+m) m) `mod` 1000000007)
getMultipleLines (cnt-1)
|
zuoqin/hackerrank
|
src/MergeList.hs
|
bsd-3-clause
| 633
| 2
| 11
| 216
| 265
| 136
| 129
| 21
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Run commands in a nix-shell
module Stack.Nix
(reexecWithOptionalShell
,nixCmdName
,nixHelpOptName
) where
import Control.Applicative
import Control.Arrow ((***))
import Control.Exception (Exception,throw)
import Control.Monad hiding (mapM)
import Control.Monad.Catch (MonadMask)
import Control.Monad.IO.Class (MonadIO,liftIO)
import Control.Monad.Logger (MonadLogger,logDebug)
import Control.Monad.Reader (MonadReader,asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Char (toUpper)
import Data.List (intercalate)
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Traversable
import Data.Typeable (Typeable)
import Data.Version (showVersion)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Prelude hiding (mapM) -- Fix redundant import warnings
import Stack.Config (makeConcreteResolver)
import Stack.Config.Nix (nixCompiler)
import Stack.Constants (stackProgName,platformVariantEnvVar)
import Stack.Docker (reExecArgName)
import Stack.Exec (exec)
import Stack.Types
import Stack.Types.Internal
import System.Environment (lookupEnv,getArgs,getExecutablePath)
import System.Process.Read (getEnvOverride)
-- | If Nix is enabled, re-runs the currently running OS command in a Nix container.
-- Otherwise, runs the inner action.
reexecWithOptionalShell
:: M env m
=> Maybe (Path Abs Dir)
-> Maybe AbstractResolver
-> Maybe CompilerVersion
-> IO ()
-> m ()
reexecWithOptionalShell mprojectRoot maresolver mcompiler inner =
do config <- asks getConfig
inShell <- getInShell
isReExec <- asks getReExec
if nixEnable (configNix config) && not inShell && not isReExec
then runShellAndExit mprojectRoot maresolver mcompiler getCmdArgs
else liftIO inner
where
getCmdArgs = do
args <-
fmap
(("--" ++ reExecArgName ++ "=" ++ showVersion Meta.version) :)
(liftIO getArgs)
exePath <- liftIO getExecutablePath
return (exePath, args)
runShellAndExit
:: M env m
=> Maybe (Path Abs Dir)
-> Maybe AbstractResolver
-> Maybe CompilerVersion
-> m (String, [String])
-> m ()
runShellAndExit mprojectRoot maresolver mcompiler getCmdArgs = do
config <- asks getConfig
mresolver <- mapM makeConcreteResolver maresolver
envOverride <- getEnvOverride (configPlatform config)
(cmnd,args) <- fmap (escape *** map escape) getCmdArgs
mshellFile <-
traverse (resolveFile (fromMaybeProjectRoot mprojectRoot)) $
nixInitFile (configNix config)
let pkgsInConfig = nixPackages (configNix config)
ghc = nixCompiler config mresolver mcompiler
pkgs = pkgsInConfig ++ [ghc]
pureShell = nixPureShell (configNix config)
nixopts = case mshellFile of
Just fp -> [toFilePath fp, "--arg", "ghc"
,"with (import <nixpkgs> {}); " ++ T.unpack ghc]
Nothing -> ["-E", T.unpack $ T.intercalate " " $ concat
[["with (import <nixpkgs> {});"
,"runCommand \"myEnv\" {"
,"buildInputs=lib.optional stdenv.isLinux glibcLocales ++ ["],pkgs,["];"
,T.pack platformVariantEnvVar <> "=''nix'';"
,T.pack inShellEnvVar <> "=1;"
,"STACK_IN_NIX_EXTRA_ARGS=''"]
, (map (\p -> T.concat
["--extra-lib-dirs=${",p,"}/lib"
," --extra-include-dirs=${",p,"}/include "])
pkgs), ["'' ;"
,"} \"\""]]]
-- glibcLocales is necessary on Linux to avoid warnings about GHC being incapable to set the locale.
fullArgs = concat [if pureShell then ["--pure"] else [],
map T.unpack (nixShellOptions (configNix config))
,nixopts
,["--run", intercalate " " (cmnd:"$STACK_IN_NIX_EXTRA_ARGS":args)]
]
-- Using --run instead of --command so we cannot
-- end up in the nix-shell if stack build is Ctrl-C'd
$logDebug $
"Using a nix-shell environment " <> (case mshellFile of
Just path -> "from file: " <> (T.pack (toFilePath path))
Nothing -> "with nix packages: " <> (T.intercalate ", " pkgs))
exec envOverride "nix-shell" fullArgs
-- | Shell-escape quotes inside the string and enclose it in quotes.
escape :: String -> String
escape str = "'" ++ foldr (\c -> if c == '\'' then
("'\"'\"'"++)
else (c:)) "" str
++ "'"
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
fromMaybeProjectRoot = fromMaybe (throw CannotDetermineProjectRootException)
-- | 'True' if we are currently running inside a Nix.
getInShell :: (MonadIO m) => m Bool
getInShell = liftIO (isJust <$> lookupEnv inShellEnvVar)
-- | Environment variable used to indicate stack is running in container.
-- although we already have STACK_IN_NIX_EXTRA_ARGS that is set in the same conditions,
-- it can happen that STACK_IN_NIX_EXTRA_ARGS is set to empty.
inShellEnvVar :: String
inShellEnvVar = concat [map toUpper stackProgName,"_IN_NIXSHELL"]
-- | Command-line argument for "nix"
nixCmdName :: String
nixCmdName = "nix"
nixHelpOptName :: String
nixHelpOptName = nixCmdName ++ "-help"
-- | Exceptions thrown by "Stack.Nix".
data StackNixException
= CannotDetermineProjectRootException
-- ^ Can't determine the project root (location of the shell file if any).
deriving (Typeable)
instance Exception StackNixException
instance Show StackNixException where
show CannotDetermineProjectRootException =
"Cannot determine project root directory."
type M env m =
(MonadIO m
,MonadReader env m
,MonadLogger m
,MonadBaseControl IO m
,MonadMask m
,HasConfig env
,HasTerminal env
,HasReExec env
,HasHttpManager env
)
|
sjakobi/stack
|
src/Stack/Nix.hs
|
bsd-3-clause
| 6,781
| 8
| 24
| 2,040
| 1,390
| 752
| 638
| 136
| 4
|
import qualified Data.ByteString.Char8 as C
import Data.Array.Unboxed
import System.Environment
import System.IO
getinputs = map getline . C.lines
where getline s = (s1, s2)
where (s1:s2:_) = C.split ';' s
data P = P {-# UNPACK #-} !Int String
instance Eq P where
(P x _) == (P y _) = x == y
instance Ord P where
compare (P x _) (P y _) = compare x y
instance Show P where
show (P _ s) = reverse s
lcs s t = cache ! (len1, len2)
where u1 = listArray (1, len1) (C.unpack s) :: UArray Int Char
u2 = listArray (1, len2) (C.unpack t) :: UArray Int Char
len1 = C.length s
len2 = C.length t
go i j
| i == 0 || j == 0 = P 0 []
| u1 ! i == u2 ! j = append (u1!i) (cache ! (pred i, pred j))
| u1 ! i /= u2 ! j = max (cache ! (pred i, j)) (cache ! (i, pred j))
cache = listArray ((0,0), (len1, len2)) [ go x y | x <- [0..len1], y <- [0..len2]] :: Array (Int, Int) P
append c (P x s) = P (succ x) (c:s)
main = do fname:_ <- getArgs
withFile fname ReadMode mainloop
where mainloop handle = C.hGetContents handle >>= mapM_ (print . uncurry lcs) . getinputs
|
wangbj/excises
|
lcs.hs
|
bsd-3-clause
| 1,172
| 0
| 12
| 354
| 625
| 322
| 303
| 28
| 1
|
import Prelude
import Network.HTTP.Enumerator
import Text.Feed.Import
import Text.Feed.Query
import Text.Feed.Export
import Text.XML.Light
import Text.XML.Light.Proc
import Text.XML.Light.Types
import Text.XML.Light.Lexer
import qualified Data.ByteString.Lazy as L
import qualified System.IO.UTF8 as U
import Data.ByteString.Lazy.UTF8
import Codec.Binary.UTF8.String
import Control.Monad
import Control.Arrow
import Data.Maybe
import Data.List
--import Control.Monad.Trans
getAttrVal name = findAttrByName name >>> fromJust >>> attrVal
where findAttrByName name el = find (\a -> (qName.attrKey) a == name ) attrs
where attrs = elAttribs el
getCollectionEl doc = head $ filterElements (\el -> (qName.elName) el == "collection" && getAttrVal "id" el == "album-list" ) doc
urlFromEl = getAttrVal "href"
--main::IO()
main = do
s <- simpleHttp "http://api-fotki.yandex.ru/api/users/mrschaos/"
feed <- simpleHttp $ urlFromEl $ getCollectionEl $ fromJust $ parseXMLDoc s
--L.putStrLn feed
let items = getFeedItems $ fromJust $ readAtom $ fromJust $ parseXMLDoc feed
albumFeed <- parseUrl $ (decodeString.fromJust.getItemLink) $ fromJust$ (find (\item -> (fromJust.getItemTitle) item == encodeString "Кошки и собаки") ) items
print albumFeed
|
drchaos/ws_uploader
|
ws_uploader.hs
|
bsd-3-clause
| 1,295
| 0
| 17
| 199
| 367
| 201
| 166
| 28
| 1
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses,TemplateHaskell, OverloadedStrings #-}
module Handler.Vote ( postVotedR
, postVoteiR
, getVoteiR
, getVotedR
) where
import Foundation
import Forms.Image
import qualified Data.ByteString.Lazy as L
import Helpers.Document
import Control.Applicative
import qualified Data.Text as T
postVotedR :: ImagesId ->Handler RepHtml
postVotedR id = do
((dresult, dwidget), denctype) <- runFormPost (imageForm images_thumbsdown_jpg)
case dresult of
FormSuccess _ -> do
currentUserid <- requireAuthId
alreadyInVotes <- runDB (getBy $ UniqueVote currentUserid id)
case alreadyInVotes of
Nothing -> do runDB (insert $ Votes currentUserid id (-1))
runDB (update id [ImagesVotes -=. 1])
Just (qid , val) -> do
case votesValue val of
0 -> do
runDB (update qid [ VotesValue =. (-1)])
runDB (update id [ImagesVotes -=. 1])
1 -> do
runDB (update qid [ VotesValue =. 0])
runDB (update id [ImagesVotes -=. 1])
(-1) -> return ()
redirect RedirectTemporary $ ImageR id
_ -> redirect RedirectTemporary $ ImageR id
postVoteiR :: ImagesId ->Handler RepHtml
postVoteiR id = do
((iresult, iwidget), ienctype) <- runFormPost (imageForm images_thumbsup_jpg)
case iresult of
FormSuccess _ -> do
currentUserid <- requireAuthId
alreadyInVotes <- runDB (getBy $ UniqueVote currentUserid id)
case alreadyInVotes of
Nothing -> do runDB (insert $ Votes currentUserid id 1)
runDB (update id [ImagesVotes +=. 1])
Just (qid , val) -> do
case votesValue val of
0 -> do
runDB (update qid [ VotesValue =. 1])
runDB (update id [ImagesVotes +=. 1])
(-1) -> do
runDB (update qid [ VotesValue =. 0])
runDB (update id [ImagesVotes +=. 1])
1 -> return ()
redirect RedirectTemporary $ ImageR id
_ -> redirect RedirectTemporary $ ImageR id
getVoteiR = postVoteiR
getVotedR = postVotedR
|
pankajmore/Imghost
|
Handler/Vote.hs
|
bsd-3-clause
| 3,206
| 0
| 28
| 1,658
| 703
| 347
| 356
| 56
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module Web.EmbedThis.DataParser (
DataParser(..),
TwitterDataParser(..)
) where
import Web.EmbedThis.DataParser.Class
import Web.EmbedThis.DataParser.Twitter
|
rtrvrtg/embed-this
|
src/Web/EmbedThis/DataParser.hs
|
bsd-3-clause
| 203
| 0
| 5
| 25
| 38
| 27
| 11
| 6
| 0
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-|
Module : $Header$
Copyright : (c) 2016 Deakin Software & Technology Innovation Lab
License : BSD3
Maintainer : Shannon Pace <shannon.pace@deakin.edu.au>
Stability : unstable
Portability : portable
Tests for state functionality.
-}
module Eclogues.StateSpec (spec) where
import Eclogues.Prelude
import Eclogues.API
( Action (..), DependencyPatch (..), JobError (..), StageExpectation (..) )
import qualified Eclogues.Job as Job
import Eclogues.Job
( Stage (..), QueueStage (..), Sealed (..)
, RunErrorReason (..), FailureReason (..)
, singleton)
import Eclogues.State
( AtomicFailure (..)
, addFile, atomicActs, deleteBox, deleteContainer, deleteJob, killJob
, patchDepends, sealBox, updateJobs )
import qualified Eclogues.State.Monad as ES
import Eclogues.State.Types (AppState, jobs)
import TestUtils
import qualified Data.HashMap.Strict as HM
import Control.Monad.State (get, gets, put)
import Data.UUID (nil)
import Test.Hspec
{-# ANN module ("HLint: ignore Use ." :: String) #-}
job :: Job.Name
job = forceName "job"
job2 :: Job.Name
job2 = forceName "job2"
dep :: Job.Name
dep = forceName "dep"
optDepName :: Job.Name
optDepName = forceName "optDep"
optDep :: Job.Dependency
optDep = Job.Dependency optDepName True
createJobWithDep' :: Job.Dependency -> Job.Stage -> Scheduler ()
createJobWithDep' (Job.Dependency depName depOpt) depStage = do
createJob' $ isolatedJob' depName
ES.setJobStage depName depStage
createJob' $ isolatedJob' job & Job.dependsOn . Job.dependenciesMap .~ HM.singleton depName depOpt
createJobWithDep :: Job.Name -> Job.Stage -> Scheduler ()
createJobWithDep = createJobWithDep' . (`Job.Dependency` False)
testCreateJob :: Spec
testCreateJob = describe "createJob" $ do
it "should succeed when given a unique, valid job" $
let result = schedule . createJob' $ isolatedJob' job
in result `shouldHave` jobInStage job (Queued LocalQueue)
context "when provided with dependency in Finished stage" $
it "should be placed in the Queued stage" $
let result = schedule $ createJobWithDep dep Finished
in result `shouldHave` jobInStage job (Queued LocalQueue)
context "when provided with dependency in an active stage" $
it "should be waiting on one job" $
let result = schedule $ createJobWithDep dep Running
in result `shouldHave` jobInStage job (Waiting $ singleton dep)
context "when provided with a dependency" $
it "should update dependants list for that dependency" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
in result `shouldHave` jobWithRevDeps dep [job]
context "when provided a job with a name that already exists" $
it "should return JobNameUsed error" $
let result = schedule $ do
createJob' $ isolatedJob' job
createJob' $ isolatedJob' job
in result `shouldHave` producedError NameUsed
context "when provided a dependency that doesn't exist" $
it "should return JobMustBe ExpectExtant error" $
let result = schedule . createJob' $ dependentJob' job [dep]
in result `shouldHave` producedError (JobMustBe dep ExpectExtant)
context "when provided a dependency that has failed" $
it "should return JobMustBe ExpectNonFailed error" $
let result = schedule $ do
createJob' $ isolatedJob' dep
ES.setJobStage dep (Failed UserKilled)
createJob' $ dependentJob' job [dep]
in result `shouldHave` producedError (JobMustBe dep ExpectNonFailed)
context "if the specified container does not exist" $
it "should return ContainerMustExist error" $
let result = schedule $ createJob' (isolatedJob' job & Job.container .~ con)
con = Job.ContainerId $ forceName "unknowncontainer"
in result `shouldHave` producedError (ContainerMustExist con)
testKillJob :: Spec
testKillJob = describe "killJob" $ do
it "should transition job with given name to Killing stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
killJob job
in result `shouldHave` jobInStage job Killing
context "when provided the name of a job that doesn't exist" $
it "should return NoSuch error" $
schedule (killJob job) `shouldHave` producedError NoSuch
context "when provided the name of a job that is in a termination stage" $
it "should do nothing" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' job
ES.setJobStage job Finished
killJob job
in result `shouldHave` noSideEffects
testPatchDepends :: Spec
testPatchDepends = describe "patchDepends" $ do
it "should fail if the job isn't Waiting" $
let result = schedule $ do
discardSideEffects . createJob' $ isolatedJob' job
patchDepends job $ DependencyPatch mempty mempty
in result `shouldHave` producedError (JobMustBe job ExpectWaiting)
it "should queue a job if its remaining active dependencies are removed" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
patchDepends' job [] [dep]
in do
result `shouldHave` jobInStage job (Queued LocalQueue)
result `shouldHave` jobWithDeps job []
it "should do nothing if an already terminated dependency is removed" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
ES.setJobStage dep Finished
createJob' $ isolatedJob' job2
createJob' $ dependentJob' job [dep, job2]
patchDepends' job [] [dep]
in do
result `shouldHave` jobInStage job (Waiting $ singleton job2)
result `shouldHave` jobWithDeps job [job2]
result `shouldHave` jobWithRevDeps dep []
result `shouldHave` noSchedulingSideEffects
it "should do nothing if non-dependency is removed" $
let result = schedule $ do
createWaitingOnJob2 job
patchDepends' job [] [dep]
in do
result `shouldHave` jobInStage job (Waiting $ singleton job2)
result `shouldHave` noSideEffects
it "should fail when attempting to add a dependency that doesn't exist" $
let result = schedule $ do
createWaitingOnJob2 job
patchDepends' job [dep] []
in result `shouldHave` producedError (JobMustBe dep ExpectExtant)
it "should add to Waiting when an active dependency is added" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
createJob' $ isolatedJob' job2
createJob' $ dependentJob' job [dep]
patchDepends' job [job2] []
in do
result `shouldHave` jobInStage job (Waiting $ singleton dep <> singleton job2)
result `shouldHave` jobWithDeps job [dep, job2]
result `shouldHave` jobWithRevDeps job2 [job]
it "should do nothing when a Finished dependency is added" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
ES.setJobStage dep Finished
createWaitingOnJob2 job
patchDepends' job [dep] []
in do
result `shouldHave` jobInStage job (Waiting $ singleton job2)
result `shouldHave` jobWithRevDeps dep [job]
result `shouldHave` jobWithRevDeps job2 [job]
result `shouldHave` noSchedulingSideEffects
it "should do nothing when an existing dependency is added" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
patchDepends' job [dep] []
in do
result `shouldHave` jobInStage job (Waiting $ singleton dep)
result `shouldHave` noSideEffects
it "should fail when attempting to add a dependency that has failed" $
let result = schedule $ do
discardSideEffects $ do
createJob' $ isolatedJob' dep
ES.setJobStage dep (Failed $ NonZeroExitCode 5)
createWaitingOnJob2 job
patchDepends' job [dep] []
in result `shouldHave` producedError (JobMustBe dep ExpectNonFailed)
where
patchDepends' n add rm = patchDepends n $ DependencyPatch (namesToDeps add) rm
createWaitingOnJob2 n = discardSideEffects $ createJob' (isolatedJob' job2) *> createJob' (dependentJob' n [job2])
discardSideEffects :: Scheduler a -> Scheduler a
discardSideEffects sch = do
st <- get
a <- sch
st' <- get
put $ st & ES.appState .~ (st' ^. ES.appState)
pure a
testDeleteJob :: Spec
testDeleteJob = describe "deleteJob" $ do
it "should remove the finished job with the given name from application stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Finished
deleteJob job
in result `shouldHave` noEntity job
context "when provided the name of an active job with dependencies" $
it "should remove any reverse dependencies" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
ES.setJobStage job Running
deleteJob job
in do
result `shouldHave` noEntity job
result `shouldHave` noRevDep dep
context "when provided the name of a job that has outstanding dependants" $
it "should return OutstandingDependants error" $
let result = schedule $ do
createJobWithDep dep Finished
deleteJob dep
in result `shouldHave` producedError (OutstandingDependants (pure job))
testDeleteContainer :: Spec
testDeleteContainer = describe "deleteContainer" $ do
it "should refuse to delete containers with active jobs" $
let result = schedule $ do
createJob' $ isolatedJob' job
deleteContainer echoContainer
in result `shouldHave` producedError (OutstandingDependants (pure job))
it "should be fine deleting containers where all the jobs are done" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Finished
deleteContainer echoContainer
in result `shouldHave` noContainer echoContainer
testDeleteBox :: Spec
testDeleteBox = describe "deleteBox" $ do
it "should refuse to delete boxes with active dependants" $
let result = schedule $ do
createBox' dep
createJob' $ dependentJob' job [dep]
deleteBox dep
in result `shouldHave` producedError (OutstandingDependants (pure job))
it "should be fine deleting boxes with a finished job dependant" $
let result = schedule $ do
createBox' dep
createJob' $ dependentJob' job [dep]
updateJobs' [(job, Finished)]
deleteBox dep
in result `shouldHave` noEntity dep
updateJobs' :: [(Job.Name, Job.Stage)] -> Scheduler ()
updateJobs' statuses = (`updateJobs` statuses) =<< gets (^.. jobs)
testUpdateJobs :: Spec
testUpdateJobs = describe "updateJobs" $ do
context "when job is on scheduler and no new status information is received" $
it "should do nothing" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job (Queued SchedulerQueue)
updateJobs' [(job, Queued SchedulerQueue)]
in result `shouldHave` jobInStage job (Queued SchedulerQueue)
context "when job is in stage Killing and no updated information is provided" $
it "should change job to Failed UserKilled stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Killing
updateJobs' []
in result `shouldHave` jobInStage job (Failed UserKilled)
context "when job is in stage Killing and is updated to stage Finished" $
it "should change job to Failed UserKilled stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Killing
updateJobs' [(job, Finished)]
in result `shouldHave` jobInStage job (Failed UserKilled)
context "when job is in stage Killing and is updated to stage RunError SchedulerLost" $
it "should change job to RunError SchedulerLost stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Killing
updateJobs' [(job, RunError SchedulerLost)]
in result `shouldHave` jobInStage job (RunError SchedulerLost)
context "when job is in stage Killing and is updated to stage Running" $
it "should change job to Killing stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Killing
updateJobs' [(job, Running)]
in result `shouldHave` jobInStage job Killing
context "when job is on scheduler and is provided no stage information" $
it "should change job to RunError SchedulerLost stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Running
updateJobs' []
in result `shouldHave` jobInStage job (RunError SchedulerLost)
context "when job is not on scheduler and is provided no stage information" $
it "should change job to Queued LocalQueue stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
updateJobs' []
in result `shouldHave` jobInStage job (Queued LocalQueue)
context "when provided job stage that is an expected transition from current stage" $
it "should change job to the provided stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
updateJobs' [(job, Running)]
in result `shouldHave` jobInStage job Running
context "when provided job stage is not an expected transition from current stage" $
it "should change job to RunError BadSchedulerTransition stage" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job Running
updateJobs' [(job, Waiting $ singleton dep)]
in result `shouldHave` jobInStage job (RunError BadSchedulerTransition)
context "when job does not transition into a termination stage" $
it "should not change the stage of any dependent jobs" $
let result = schedule $ do
createJobWithDep dep Running
updateJobs' [(dep, Running)]
in do
result `shouldHave` jobInStage dep Running
result `shouldHave` jobInStage job (Waiting $ singleton dep)
context "when optional job does not transition into a termination stage" $
it "should not change the stage of any dependent jobs" $
let result = schedule $ do
createJobWithDep' optDep Running
updateJobs' [(optDepName, Running)]
in do
result `shouldHave` jobInStage optDepName Running
result `shouldHave` jobInStage job (Waiting $ singleton optDepName)
context "when job transitions into non-Finished termination stage" $
it "should change the stage of dependent jobs to Failed DependencyFailed" $
let result = schedule $ do
createJobWithDep dep Running
updateJobs' [(dep, Failed UserKilled)]
in do
result `shouldHave` jobInStage dep (Failed UserKilled)
result `shouldHave` jobInStage job (Failed (DependencyFailed dep))
context "when optional job transitions into non-Finished termination stage" $
it "should change the stage of exclusively dependent job to Queued LocalQueue" $
let result = schedule $ do
createJobWithDep' optDep Running
updateJobs' [(optDepName, Failed UserKilled)]
in do
result `shouldHave` jobInStage optDepName (Failed UserKilled)
result `shouldHave` jobInStage job (Queued LocalQueue)
context "when job transitions into Finished stage" $
it "should change the stage of exclusively dependent job to Queued LocalQueue" $
let result = schedule $ do
createJobWithDep dep Running
updateJobs' [(dep, Finished)]
in do
result `shouldHave` jobInStage dep Finished
result `shouldHave` jobInStage job (Queued LocalQueue)
context "when job transitions into Finished stage but dependent is also waiting on another job" $
it "should change the stage of dependent job to Waiting 1" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ isolatedJob' job2
createJob' $ dependentJob' job [dep, job2]
updateJobs' [(dep, Finished)]
in do
result `shouldHave` jobInStage dep Finished
result `shouldHave` jobInStage job (Waiting $ singleton job2)
context "when dependent job finishes" $ do
it "should remove reverse dependency on the job on which it depended" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
updateJobs' [(dep, Finished), (job, Finished)]
in result `shouldHave` noRevDep dep
it "should allow job and dependent to be deleted" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
updateJobs' [(dep, Failed UserKilled)]
ES.deleteJob dep
ES.deleteJob job
in do
result `shouldHave` noEntity dep
result `shouldHave` noEntity job
it "should recursively cause all dependencies to fail" $
let result = schedule $ do
createJob' $ isolatedJob' dep
createJob' $ dependentJob' job [dep]
createJob' $ dependentJob' job2 [job]
updateJobs' [(dep, Failed UserKilled)]
in do
result `shouldHave` noRevDep dep
result `shouldHave` noRevDep job
result `shouldHave` jobInStage job (Failed (DependencyFailed dep))
result `shouldHave` jobInStage job2 (Failed (DependencyFailed job))
context "a job sent to the scheduler isn't seen in the list" $ do
it "should change the stage to SchedulerLost if the job has been seen on the scheduler before" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job $ Queued SchedulerQueue
updateJobs' []
in result `shouldHave` jobInStage job (RunError SchedulerLost)
it "should do nothing if the job hasn't been seen on the scheduler before" $
let result = schedule $ do
createJob' $ isolatedJob' job
ES.setJobStage job $ Queued SentToScheduler
updateJobs' []
in result `shouldHave` jobInStage job (Queued SentToScheduler)
testAddFile :: Spec
testAddFile = describe "addFile" $ do
it "can add files under both jobs and boxes" $
let result = schedule $ do
createBox' job
createJob' $ isolatedJob' job2
addFile job f1 nil
addFile job2 f2 nil
f1 = "afile"
f2 = "file"
in do
result `shouldHave` file job f1
result `shouldHave` file job2 f2
it "fails if given an unknown namespace" $
let result = schedule $ addFile job "afile" nil
in result `shouldHave` producedError NoSuch
it "fails if a filename is already taken" $
let result = schedule $ do
createBox' job
addFile job f1 nil
addFile job f1 nil
f1 = "file"
in result `shouldHave` producedError (FileAlreadyExists job f1)
testSealBox :: Spec
testSealBox = describe "sealBox" $ do
it "triggers dependants of the box" $
let result = schedule $ do
createBox' dep
createJob' $ dependentJob' job [dep]
sealBox dep
in do
result `shouldHave` boxInStage dep Sealed
result `shouldHave` jobInStage job (Queued LocalQueue)
it "does nothing on an already sealed box" $
let result = schedule $ do
createBox' dep
sealBox dep
createJob' $ dependentJob' job [dep]
ES.setJobStage job (Waiting $ singleton dep)
sealBox dep -- should do nothing
in do
result `shouldHave` boxInStage dep Sealed
result `shouldHave` jobInStage job (Waiting $ singleton dep)
atomicActs' :: [Action] -> Either AtomicFailure AppState
atomicActs' as = (^. ES.appState) <$> atomicActs defWithContainer Nothing ((nil,) <$> as)
testAtomic :: Spec
testAtomic = describe "atomicActs" $ do
it "stops at the first failure" $
let result = atomicActs' [ cr1, cr2 ]
cr1 = ActCreateJob $ dependentJob' job [dep]
cr2 = ActCreateJob $ dependentJob' job2 [job]
in result `shouldBe` Left (AtomicFailure cr1 $ JobMustBe dep ExpectExtant)
it "applies all actions" $
let result = atomicActs' [ cr1, cr2, cr3, k2 ]
cr1 = ActCreateJob $ isolatedJob' job
cr2 = ActCreateJob $ dependentJob' job2 [job]
cr3 = ActCreateJob $ dependentJob' job3 [job]
k2 = ActKillJob job2
job3 = forceName "job3"
in do
result `shouldSatisfy` has _Right
let res = either (error "left") id result
pure res `shouldHave` jobInStage job (Queued LocalQueue)
pure res `shouldHave` jobInStage job2 Killing
pure res `shouldHave` jobInStage job3 (Waiting $ singleton job)
spec :: Spec
spec = do
testCreateJob
testKillJob
testPatchDepends
testDeleteJob
testDeleteBox
testDeleteContainer
testUpdateJobs
testAddFile
testSealBox
testAtomic
|
rimmington/eclogues
|
eclogues-impl/test/Eclogues/StateSpec.hs
|
bsd-3-clause
| 23,855
| 5
| 30
| 7,878
| 5,605
| 2,663
| 2,942
| -1
| -1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- | The simplification engine is only willing to hoist allocations
-- out of loops if the memory block resulting from the allocation is
-- dead at the end of the loop. If it is not, we may cause data
-- hazards.
--
-- This module rewrites loops with memory block merge parameters such
-- that each memory block is copied at the end of the iteration, thus
-- ensuring that any allocation inside the loop is dead at the end of
-- the loop. This is only possible for allocations whose size is
-- loop-invariant, although the initial size may differ from the size
-- produced by the loop result.
--
-- Additionally, inside parallel kernels we also copy the initial
-- value. This has the effect of making the memory block returned by
-- the array non-existential, which is important for later memory
-- expansion to work.
module Futhark.Optimise.DoubleBuffer
( doubleBuffer )
where
import Control.Applicative
import Control.Monad.State
import Control.Monad.Writer
import Control.Monad.Reader
import qualified Data.HashMap.Lazy as HM
import qualified Data.HashSet as HS
import Data.Maybe
import Data.List
import Prelude
import Futhark.MonadFreshNames
import Futhark.Tools (intraproceduralTransformation)
import Futhark.Representation.ExplicitMemory
import qualified Futhark.Representation.ExplicitMemory.IndexFunction as IxFun
import qualified Futhark.Analysis.ScalExp as SE
import Futhark.Pass
doubleBuffer :: Pass ExplicitMemory ExplicitMemory
doubleBuffer =
Pass { passName = "Double buffer"
, passDescription = "Perform double buffering for merge parameters of sequential loops."
, passFunction = intraproceduralTransformation optimiseFunDef
}
optimiseFunDef :: MonadFreshNames m => FunDef -> m FunDef
optimiseFunDef fundec = do
body' <- runReaderT (runDoubleBufferM $ inScopeOf fundec $
optimiseBody $ funDefBody fundec) $
Env emptyScope False
return fundec { funDefBody = body' }
where emptyScope :: Scope ExplicitMemory
emptyScope = mempty
data Env = Env { envScope :: Scope ExplicitMemory
, envCopyInit :: Bool
-- ^ If true, copy initial values of merge
-- parameters. This is necessary to remove
-- existential memory inside kernels, but seems to
-- break C compiler vectorisation in sequential code.
-- We set this to true once we enter kernels.
}
newtype DoubleBufferM m a = DoubleBufferM { runDoubleBufferM :: ReaderT Env m a }
deriving (Functor, Applicative, Monad,
MonadReader Env, MonadFreshNames)
instance (Applicative m, Monad m) =>
HasScope ExplicitMemory (DoubleBufferM m) where
askScope = asks envScope
instance (Applicative m, Monad m) =>
LocalScope ExplicitMemory (DoubleBufferM m) where
localScope scope = local $ \env -> env { envScope = envScope env <> scope }
optimiseBody :: MonadFreshNames m => Body -> DoubleBufferM m Body
optimiseBody body = do
bnds' <- optimiseBindings $ bodyBindings body
return $ body { bodyBindings = bnds' }
optimiseBindings :: MonadFreshNames m => [Binding] -> DoubleBufferM m [Binding]
optimiseBindings [] = return []
optimiseBindings (e:es) = do
e_es <- optimiseBinding e
es' <- inScopeOf e_es $ optimiseBindings es
return $ e_es ++ es'
optimiseBinding :: MonadFreshNames m => Binding -> DoubleBufferM m [Binding]
optimiseBinding (Let pat () (DoLoop ctx val form body)) = do
body' <- localScope (scopeOfLoopForm form <> scopeOfFParams (map fst $ ctx++val)) $
optimiseBody body
(bnds, ctx', val', body'') <- optimiseLoop ctx val body'
return $ bnds ++ [Let pat () $ DoLoop ctx' val' form body'']
optimiseBinding (Let pat () e) = pure . Let pat () <$> mapExpM optimise e
where optimise = identityMapper { mapOnBody = optimiseBody
, mapOnOp = optimiseOp
}
where optimiseOp (Inner k) = Inner <$> optimiseKernel k
optimiseOp op = return op
optimiseKernel = local (\env -> env { envCopyInit = True }) .
mapKernelM identityKernelMapper
{ mapOnKernelBody = optimiseBody
, mapOnKernelKernelBody = optimiseKernelBody
, mapOnKernelLambda = optimiseLambda
}
optimiseKernelBody :: MonadFreshNames m =>
KernelBody ExplicitMemory
-> DoubleBufferM m (KernelBody ExplicitMemory)
optimiseKernelBody kbody = do
stms' <- optimiseKernelStms $ kernelBodyStms kbody
return $ kbody { kernelBodyStms = stms' }
where optimiseKernelStms [] =
return []
optimiseKernelStms (e:es) = do
e' <- optimiseKernelStm e
es' <- inScopeOf e' $ optimiseKernelStms es
return $ e' ++ es'
optimiseKernelStm (Thread threads bnd) =
map (Thread threads) <$> optimiseBinding bnd
optimiseKernelStm (GroupReduce pes w lam input) =
pure <$> (GroupReduce pes w <$> optimiseLambda lam <*> pure input)
optimiseKernelStm stm =
return [stm]
optimiseLambda :: MonadFreshNames m => Lambda -> DoubleBufferM m Lambda
optimiseLambda lam = do
body <- inScopeOf lam $ optimiseBody $ lambdaBody lam
return lam { lambdaBody = body }
optimiseLoop :: MonadFreshNames m =>
[(FParam, SubExp)] -> [(FParam, SubExp)] -> Body
-> DoubleBufferM m ([Binding], [(FParam, SubExp)], [(FParam, SubExp)], Body)
optimiseLoop ctx val body = do
-- We start out by figuring out which of the merge variables should
-- be double-buffered.
buffered <- doubleBufferMergeParams
(zip (map fst ctx) (bodyResult body))
(map fst merge)
(boundInBody body)
-- Then create the allocations of the buffers and copies of the
-- initial values.
(merge', allocs) <- allocBindings merge buffered
-- Modify the loop body to copy buffered result arrays.
let body' = doubleBufferResult (map fst merge) buffered body
(ctx', val') = splitAt (length ctx) merge'
-- Modify the initial merge p
return (allocs, ctx', val', body')
where merge = ctx ++ val
-- | The booleans indicate whether we should also play with the
-- initial merge values.
data DoubleBuffer = BufferAlloc VName SubExp Space Bool
| BufferCopy VName (IxFun.IxFun SE.ScalExp) VName Bool
-- ^ First name is the memory block to copy to,
-- second is the name of the array copy.
| NoBuffer
deriving (Show)
doubleBufferMergeParams :: MonadFreshNames m =>
[(FParam,SubExp)] -> [FParam] -> Names
-> DoubleBufferM m [DoubleBuffer]
doubleBufferMergeParams ctx_and_res val_params bound_in_loop = do
copy_init <- asks envCopyInit
evalStateT (mapM (buffer copy_init) val_params) HM.empty
where loopInvariantSize copy_init (Constant v) =
Just (Constant v, copy_init)
loopInvariantSize copy_init (Var v) =
case find ((==v) . paramName . fst) ctx_and_res of
Just (_, Constant val) ->
Just (Constant val, False)
Just (_, Var v') | not $ v' `HS.member` bound_in_loop ->
Just (Var v', False)
Just _ ->
Nothing
Nothing ->
Just (Var v, copy_init)
buffer copy_init fparam = case paramType fparam of
Mem size space
| Just (size', b) <- loopInvariantSize copy_init size -> do
-- Let us double buffer this!
bufname <- lift $ newVName "double_buffer_mem"
modify $ HM.insert (paramName fparam) (bufname, b)
return $ BufferAlloc bufname size' space b
Array {}
| ArrayMem _ _ _ mem ixfun <- paramAttr fparam -> do
buffered <- gets $ HM.lookup mem
case buffered of
Just (bufname, b) -> do
copyname <- lift $ newVName "double_buffer_array"
return $ BufferCopy bufname ixfun copyname b
Nothing ->
return NoBuffer
_ -> return NoBuffer
allocBindings :: MonadFreshNames m =>
[(FParam,SubExp)] -> [DoubleBuffer]
-> DoubleBufferM m ([(FParam,SubExp)], [Binding])
allocBindings merge = runWriterT . zipWithM allocation merge
where allocation m@(Param pname _, _) (BufferAlloc name size space b) = do
tell [Let (Pattern [] [PatElem name BindVar $ MemMem size space]) () $
Op $ Alloc size space]
if b
then return (Param pname $ MemMem size space, Var name)
else return m
allocation (f, Var v) (BufferCopy mem _ _ b) | b = do
v_copy <- lift $ newVName $ baseString v ++ "_double_buffer_copy"
(_v_mem, v_ixfun) <- lift $ lookupArraySummary v
let bt = elemType $ paramType f
shape = arrayShape $ paramType f
bound = ArrayMem bt shape NoUniqueness mem v_ixfun
tell [Let (Pattern []
[PatElem v_copy BindVar bound]) () $
PrimOp $ Copy v]
return (f, Var v_copy)
allocation (f, se) _ =
return (f, se)
doubleBufferResult :: [FParam] -> [DoubleBuffer] -> Body -> Body
doubleBufferResult valparams buffered (Body () bnds res) =
let (ctx_res, val_res) = splitAt (length res - length valparams) res
(copybnds,val_res') =
unzip $ zipWith3 buffer valparams buffered val_res
in Body () (bnds++catMaybes copybnds) $ ctx_res ++ val_res'
where buffer _ (BufferAlloc bufname _ _ _) _ =
(Nothing, Var bufname)
buffer fparam (BufferCopy bufname ixfun copyname _) (Var v) =
-- To construct the copy we will need to figure out its type
-- based on the type of the function parameter.
let t = resultType $ paramType fparam
summary = ArrayMem (elemType t) (arrayShape t) NoUniqueness bufname ixfun
copybnd = Let (Pattern [] [PatElem copyname BindVar summary]) () $
PrimOp $ Copy v
in (Just copybnd, Var copyname)
buffer _ _ se =
(Nothing, se)
parammap = HM.fromList $ zip (map paramName valparams) res
resultType t = t `setArrayDims` map substitute (arrayDims t)
substitute (Var v)
| Just replacement <- HM.lookup v parammap = replacement
substitute se =
se
|
mrakgr/futhark
|
src/Futhark/Optimise/DoubleBuffer.hs
|
bsd-3-clause
| 11,036
| 0
| 20
| 3,348
| 2,805
| 1,440
| 1,365
| 189
| 8
|
module Execs where
import CBSD.Search
import CBSD.Utils.GetPortArg
import CBSD.Messages.Types
import qualified CBSD.Ataxx as Ataxx
import qualified CBSD.Potyogos as Potyogos
import qualified CBSD.Components.Heuristic as Heu
import qualified CBSD.Components.Tree as Tree
import qualified CBSD.Components.Logic as Logic
import Control.Concurrent
import Data.Coerce
import Network
import Data.Maybe
import Data.Aeson
import Text.Printf
-- TODO : Factor out all the code duplication
-- Export JSON-friendly functions form Ataxx and Potyogos
potyogosHeu :: IO ()
potyogosHeu = withSocketsDo $
Heu.main
getPortArg
(coerce Potyogos.smarterHeu :: Potyogos.GStateJSON -> Int)
ataxxHeu :: IO ()
ataxxHeu = withSocketsDo $
Heu.main
getPortArg
(coerce Ataxx.heu :: Ataxx.GStateJSON -> Int)
potyogosLogic :: IO ()
potyogosLogic = withSocketsDo $
Logic.main
getPortArg
moves
(coerce Potyogos.start :: Potyogos.GStateJSON)
makeMove
"PotyogosLogic"
Amoeba
where
moves :: Player -> Potyogos.GStateJSON -> [Potyogos.MoveJSON]
moves p s = coerce (map snd $ Potyogos.moves p (coerce s))
makeMove :: Player -> Potyogos.GStateJSON -> Potyogos.MoveJSON -> Potyogos.GStateJSON
makeMove p s m =
fromMaybe
(error $ printf "EVALUATE_MOVE: invalid move: %s\n" (show $ encode m))
(coerce Potyogos.makeMove p s m)
potyogosTreeWithHeu :: IO ()
potyogosTreeWithHeu = withSocketsDo $ do
Tree.main
getPortArg
startHeu
(orderWith 0 minimax alphaBeta)
"PotyogsTree"
[Amoeba]
1000000
makeMove
where
makeMove :: Player -> Potyogos.GStateJSON -> Potyogos.MoveJSON -> Potyogos.GStateJSON
makeMove p s m =
fromMaybe
(error $ printf "EVALUATE_MOVE: invalid move: %s\n" (show $ encode m))
(coerce Potyogos.makeMove p s m)
startHeu :: PortNumber -> IO ()
startHeu port = do
forkIO $ Heu.main (pure port) (coerce Potyogos.smarterHeu :: Potyogos.GStateJSON -> Int)
pure ()
ataxxLogic :: IO ()
ataxxLogic = withSocketsDo $ do
Logic.main
getPortArg
moves
(coerce Ataxx.start :: Ataxx.GStateJSON)
makeMove
"AtaxxLogic"
Ataxx
where
moves :: Player -> Ataxx.GStateJSON -> [Ataxx.MoveJSON]
moves p s = coerce (map snd $ Ataxx.moves p (coerce s))
makeMove :: Player -> Ataxx.GStateJSON -> Ataxx.MoveJSON -> Ataxx.GStateJSON
makeMove p s m =
fromMaybe
(error $ printf "EVALUATE_MOVE: invalid move: %s\n" (show $ encode m))
(coerce Ataxx.makeMove p s m)
ataxxTreeWithHeu :: IO ()
ataxxTreeWithHeu = withSocketsDo $ do
Tree.main
getPortArg
startHeu
(orderWith 0 minimax alphaBeta)
"AtaxxTree"
[Ataxx]
1000000
makeMove
where
makeMove :: Player -> Ataxx.GStateJSON -> Ataxx.MoveJSON -> Ataxx.GStateJSON
makeMove p s m =
fromMaybe
(error $ printf "EVALUATE_MOVE: invalid move: %s\n" (show $ encode m))
(coerce Ataxx.makeMove p s m)
startHeu :: PortNumber -> IO ()
startHeu port = do
forkIO $ Heu.main (pure port) (coerce Ataxx.heu :: Ataxx.GStateJSON -> Int)
pure ()
|
AndrasKovacs/elte-cbsd
|
TestExecs/Components/Execs.hs
|
bsd-3-clause
| 3,178
| 0
| 13
| 754
| 939
| 487
| 452
| 95
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RankNTypes #-}
-- | This module provides a large suite of utilities that resemble Unix
-- utilities.
--
-- Many of these commands are just existing Haskell commands renamed to match
-- their Unix counterparts:
--
-- >>> :set -XOverloadedStrings
-- >>> cd "/tmp"
-- >>> pwd
-- FilePath "/tmp"
--
-- Some commands are `Shell`s that emit streams of values. `view` prints all
-- values in a `Shell` stream:
--
-- >>> view (ls "/usr")
-- FilePath "/usr/lib"
-- FilePath "/usr/src"
-- FilePath "/usr/sbin"
-- FilePath "/usr/include"
-- FilePath "/usr/share"
-- FilePath "/usr/games"
-- FilePath "/usr/local"
-- FilePath "/usr/bin"
-- >>> view (find (suffix "Browser.py") "/usr/lib")
-- FilePath "/usr/lib/python3.4/idlelib/ClassBrowser.py"
-- FilePath "/usr/lib/python3.4/idlelib/RemoteObjectBrowser.py"
-- FilePath "/usr/lib/python3.4/idlelib/PathBrowser.py"
-- FilePath "/usr/lib/python3.4/idlelib/ObjectBrowser.py"
--
-- Use `fold` to reduce the output of a `Shell` stream:
--
-- >>> import qualified Control.Foldl as Fold
-- >>> fold (ls "/usr") Fold.length
-- 8
-- >>> fold (find (suffix "Browser.py") "/usr/lib") Fold.head
-- Just (FilePath "/usr/lib/python3.4/idlelib/ClassBrowser.py")
--
-- Create files using `output`:
--
-- >>> output "foo.txt" ("123" <|> "456" <|> "ABC")
-- >>> realpath "foo.txt"
-- FilePath "/tmp/foo.txt"
--
-- Read in files using `input`:
--
-- >>> stdout (input "foo.txt")
-- 123
-- 456
-- ABC
--
-- Format strings in a type safe way using `format`:
--
-- >>> dir <- pwd
-- >>> format ("I am in the "%fp%" directory") dir
-- "I am in the /tmp directory"
--
-- Commands like `grep`, `sed` and `find` accept arbitrary `Pattern`s
--
-- >>> stdout (grep ("123" <|> "ABC") (input "foo.txt"))
-- 123
-- ABC
-- >>> let exclaim = fmap (<> "!") (plus digit)
-- >>> stdout (sed exclaim (input "foo.txt"))
-- 123!
-- 456!
-- ABC
--
-- Note that `grep` and `find` differ from their Unix counterparts by requiring
-- that the `Pattern` matches the entire line or file name by default. However,
-- you can optionally match the prefix, suffix, or interior of a line:
--
-- >>> stdout (grep (has "2") (input "foo.txt"))
-- 123
-- >>> stdout (grep (prefix "1") (input "foo.txt"))
-- 123
-- >>> stdout (grep (suffix "3") (input "foo.txt"))
-- 123
--
-- You can also build up more sophisticated `Shell` programs using `sh` in
-- conjunction with @do@ notation:
--
-- >{-# LANGUAGE OverloadedStrings #-}
-- >
-- >import Turtle
-- >
-- >main = sh example
-- >
-- >example = do
-- > -- Read in file names from "files1.txt" and "files2.txt"
-- > file <- fmap fromText (input "files1.txt" <|> input "files2.txt")
-- >
-- > -- Stream each file to standard output only if the file exists
-- > True <- liftIO (testfile file)
-- > line <- input file
-- > liftIO (echo line)
--
-- See "Turtle.Tutorial" for an extended tutorial explaining how to use this
-- library in greater detail.
module Turtle.Prelude (
-- * IO
echo
, err
, readline
, Filesystem.readTextFile
, Filesystem.writeTextFile
, arguments
#if __GLASGOW_HASKELL__ >= 710
, export
, unset
#endif
, need
, env
, cd
, pwd
, home
, realpath
, mv
, mkdir
, mktree
, cp
, rm
, rmdir
, rmtree
, testfile
, testdir
, testpath
, date
, datefile
, touch
, time
, hostname
, which
, whichAll
, sleep
, exit
, die
, (.&&.)
, (.||.)
-- * Managed
, readonly
, writeonly
, appendonly
, mktemp
, mktempfile
, mktempdir
, fork
, wait
, pushd
-- * Shell
, stdin
, input
, inhandle
, stdout
, output
, outhandle
, append
, stderr
, strict
, ls
, lsif
, lstree
, cat
, grep
, sed
, onFiles
, inplace
, find
, yes
, nl
, paste
, endless
, limit
, limitWhile
, cache
, parallel
-- * Folds
, countChars
, countWords
, countLines
-- * Text
, cut
-- * Subprocess management
, proc
, shell
, system
, procs
, shells
, inproc
, inshell
, inprocWithErr
, inshellWithErr
, procStrict
, shellStrict
, procStrictWithErr
, shellStrictWithErr
-- * Permissions
, Permissions
, chmod
, getmod
, setmod
, copymod
, readable, nonreadable
, writable, nonwritable
, executable, nonexecutable
, searchable, nonsearchable
, ooo,roo,owo,oox,oos,rwo,rox,ros,owx,rwx,rws
-- * File size
, du
, Size
, sz
, bytes
, kilobytes
, megabytes
, gigabytes
, terabytes
, kibibytes
, mebibytes
, gibibytes
, tebibytes
-- * File status
, PosixCompat.FileStatus
, stat
, lstat
, fileSize
, accessTime
, modificationTime
, statusChangeTime
, PosixCompat.isBlockDevice
, PosixCompat.isCharacterDevice
, PosixCompat.isNamedPipe
, PosixCompat.isRegularFile
, PosixCompat.isDirectory
, PosixCompat.isSymbolicLink
, PosixCompat.isSocket
-- * Headers
, WithHeader(..)
, header
-- * Exceptions
, ProcFailed(..)
, ShellFailed(..)
) where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
(Async, withAsync, withAsyncWithUnmask, waitSTM, concurrently,
Concurrently(..))
import qualified Control.Concurrent.Async
import Control.Concurrent.MVar (newMVar, modifyMVar_)
import qualified Control.Concurrent.STM as STM
import qualified Control.Concurrent.STM.TQueue as TQueue
import Control.Exception (Exception, bracket, bracket_, finally, mask_, throwIO)
import Control.Foldl (Fold, FoldM(..), genericLength, handles, list, premap)
import qualified Control.Foldl
import qualified Control.Foldl.Text
import Control.Monad (guard, liftM, msum, when, unless, (>=>))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Managed (MonadManaged(..), managed, managed_, runManaged)
#ifdef mingw32_HOST_OS
import Data.Bits ((.&.))
#endif
import Data.IORef (newIORef, readIORef, writeIORef)
import Data.Text (Text, pack, unpack)
import Data.Time (NominalDiffTime, UTCTime, getCurrentTime)
import Data.Time.Clock.POSIX (POSIXTime)
import Data.Traversable
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import Data.Typeable (Typeable)
import qualified Filesystem
import Filesystem.Path.CurrentOS (FilePath, (</>))
import qualified Filesystem.Path.CurrentOS as Filesystem
import GHC.IO.Exception (IOErrorType(UnsupportedOperation))
import Network.HostName (getHostName)
import System.Clock (Clock(..), TimeSpec(..), getTime)
import System.Environment (
getArgs,
#if __GLASGOW_HASKELL__ >= 710
setEnv,
unsetEnv,
#endif
#if __GLASGOW_HASKELL__ >= 708
lookupEnv,
#endif
getEnvironment )
import System.Directory (Permissions)
import qualified System.Directory as Directory
import System.Exit (ExitCode(..), exitWith)
import System.IO (Handle, hClose)
import qualified System.IO as IO
import System.IO.Temp (withTempDirectory, withTempFile)
import System.IO.Error
(catchIOError, ioeGetErrorType, isPermissionError, isDoesNotExistError)
import qualified System.PosixCompat as PosixCompat
import qualified System.Process as Process
#ifdef mingw32_HOST_OS
import qualified System.Win32 as Win32
#else
import System.Posix (
openDirStream,
readDirStream,
closeDirStream,
touchFile )
#endif
import Prelude hiding (FilePath)
import Turtle.Pattern (Pattern, anyChar, chars, match, selfless, sepBy)
import Turtle.Shell
import Turtle.Format (Format, format, makeFormat, d, w, (%))
import Turtle.Internal (ignoreSIGPIPE)
import Turtle.Line
{-| Run a command using @execvp@, retrieving the exit code
The command inherits @stdout@ and @stderr@ for the current process
-}
proc
:: MonadIO io
=> Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> io ExitCode
-- ^ Exit code
proc cmd args =
system
( (Process.proc (unpack cmd) (map unpack args))
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.Inherit
, Process.std_err = Process.Inherit
} )
{-| Run a command line using the shell, retrieving the exit code
This command is more powerful than `proc`, but highly vulnerable to code
injection if you template the command line with untrusted input
The command inherits @stdout@ and @stderr@ for the current process
-}
shell
:: MonadIO io
=> Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> io ExitCode
-- ^ Exit code
shell cmdLine =
system
( (Process.shell (unpack cmdLine))
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.Inherit
, Process.std_err = Process.Inherit
} )
data ProcFailed = ProcFailed
{ procCommand :: Text
, procArguments :: [Text]
, procExitCode :: ExitCode
} deriving (Show, Typeable)
instance Exception ProcFailed
{-| This function is identical to `proc` except this throws `ProcFailed` for
non-zero exit codes
-}
procs
:: MonadIO io
=> Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> io ()
procs cmd args s = do
exitCode <- proc cmd args s
case exitCode of
ExitSuccess -> return ()
_ -> liftIO (throwIO (ProcFailed cmd args exitCode))
data ShellFailed = ShellFailed
{ shellCommandLine :: Text
, shellExitCode :: ExitCode
} deriving (Show, Typeable)
instance Exception ShellFailed
{-| This function is identical to `shell` except this throws `ShellFailed` for
non-zero exit codes
-}
shells
:: MonadIO io
=> Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> io ()
-- ^ Exit code
shells cmdline s = do
exitCode <- shell cmdline s
case exitCode of
ExitSuccess -> return ()
_ -> liftIO (throwIO (ShellFailed cmdline exitCode))
{-| Run a command using @execvp@, retrieving the exit code and stdout as a
non-lazy blob of Text
The command inherits @stderr@ for the current process
-}
procStrict
:: MonadIO io
=> Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text)
-- ^ Exit code and stdout
procStrict cmd args =
systemStrict (Process.proc (Text.unpack cmd) (map Text.unpack args))
{-| Run a command line using the shell, retrieving the exit code and stdout as a
non-lazy blob of Text
This command is more powerful than `proc`, but highly vulnerable to code
injection if you template the command line with untrusted input
The command inherits @stderr@ for the current process
-}
shellStrict
:: MonadIO io
=> Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text)
-- ^ Exit code and stdout
shellStrict cmdLine = systemStrict (Process.shell (Text.unpack cmdLine))
{-| Run a command using @execvp@, retrieving the exit code, stdout, and stderr
as a non-lazy blob of Text
-}
procStrictWithErr
:: MonadIO io
=> Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text, Text)
-- ^ (Exit code, stdout, stderr)
procStrictWithErr cmd args =
systemStrictWithErr (Process.proc (Text.unpack cmd) (map Text.unpack args))
{-| Run a command line using the shell, retrieving the exit code, stdout, and
stderr as a non-lazy blob of Text
This command is more powerful than `proc`, but highly vulnerable to code
injection if you template the command line with untrusted input
-}
shellStrictWithErr
:: MonadIO io
=> Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text, Text)
-- ^ (Exit code, stdout, stderr)
shellStrictWithErr cmdLine =
systemStrictWithErr (Process.shell (Text.unpack cmdLine))
-- | Halt an `Async` thread, re-raising any exceptions it might have thrown
halt :: Async a -> IO ()
halt a = do
m <- Control.Concurrent.Async.poll a
case m of
Nothing -> Control.Concurrent.Async.cancel a
Just (Left e) -> throwIO e
Just (Right _) -> return ()
{-| `system` generalizes `shell` and `proc` by allowing you to supply your own
custom `CreateProcess`. This is for advanced users who feel comfortable
using the lower-level @process@ API
-}
system
:: MonadIO io
=> Process.CreateProcess
-- ^ Command
-> Shell Line
-- ^ Lines of standard input
-> io ExitCode
-- ^ Exit code
system p s = liftIO (do
let open = do
(m, Nothing, Nothing, ph) <- Process.createProcess p
case m of
Just hIn -> IO.hSetBuffering hIn IO.LineBuffering
_ -> return ()
return (m, ph)
-- Prevent double close
mvar <- newMVar False
let close handle = do
modifyMVar_ mvar (\finalized -> do
unless finalized (ignoreSIGPIPE (hClose handle))
return True )
let close' (Just hIn, ph) = do
close hIn
Process.terminateProcess ph
close' (Nothing , ph) = do
Process.terminateProcess ph
let handle (Just hIn, ph) = do
let feedIn :: (forall a. IO a -> IO a) -> IO ()
feedIn restore =
restore (ignoreSIGPIPE (outhandle hIn s)) `finally` close hIn
mask_ (withAsyncWithUnmask feedIn (\a -> Process.waitForProcess ph <* halt a) )
handle (Nothing , ph) = do
Process.waitForProcess ph
bracket open close' handle )
systemStrict
:: MonadIO io
=> Process.CreateProcess
-- ^ Command
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text)
-- ^ Exit code and stdout
systemStrict p s = liftIO (do
let p' = p
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.CreatePipe
, Process.std_err = Process.Inherit
}
let open = do
(Just hIn, Just hOut, Nothing, ph) <- liftIO (Process.createProcess p')
IO.hSetBuffering hIn IO.LineBuffering
return (hIn, hOut, ph)
-- Prevent double close
mvar <- newMVar False
let close handle = do
modifyMVar_ mvar (\finalized -> do
unless finalized (ignoreSIGPIPE (hClose handle))
return True )
bracket open (\(hIn, _, ph) -> close hIn >> Process.terminateProcess ph) (\(hIn, hOut, ph) -> do
let feedIn :: (forall a. IO a -> IO a) -> IO ()
feedIn restore =
restore (ignoreSIGPIPE (outhandle hIn s)) `finally` close hIn
concurrently
(mask_ (withAsyncWithUnmask feedIn (\a -> liftIO (Process.waitForProcess ph) <* halt a)))
(Text.hGetContents hOut) ) )
systemStrictWithErr
:: MonadIO io
=> Process.CreateProcess
-- ^ Command
-> Shell Line
-- ^ Lines of standard input
-> io (ExitCode, Text, Text)
-- ^ Exit code and stdout
systemStrictWithErr p s = liftIO (do
let p' = p
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.CreatePipe
, Process.std_err = Process.CreatePipe
}
let open = do
(Just hIn, Just hOut, Just hErr, ph) <- liftIO (Process.createProcess p')
IO.hSetBuffering hIn IO.LineBuffering
return (hIn, hOut, hErr, ph)
-- Prevent double close
mvar <- newMVar False
let close handle = do
modifyMVar_ mvar (\finalized -> do
unless finalized (ignoreSIGPIPE (hClose handle))
return True )
bracket open (\(hIn, _, _, ph) -> close hIn >> Process.terminateProcess ph) (\(hIn, hOut, hErr, ph) -> do
let feedIn :: (forall a. IO a -> IO a) -> IO ()
feedIn restore =
restore (ignoreSIGPIPE (outhandle hIn s)) `finally` close hIn
runConcurrently $ (,,)
<$> Concurrently (mask_ (withAsyncWithUnmask feedIn (\a -> liftIO (Process.waitForProcess ph) <* halt a)))
<*> Concurrently (Text.hGetContents hOut)
<*> Concurrently (Text.hGetContents hErr) ) )
{-| Run a command using @execvp@, streaming @stdout@ as lines of `Text`
The command inherits @stderr@ for the current process
-}
inproc
:: Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> Shell Line
-- ^ Lines of standard output
inproc cmd args = stream (Process.proc (unpack cmd) (map unpack args))
{-| Run a command line using the shell, streaming @stdout@ as lines of `Text`
This command is more powerful than `inproc`, but highly vulnerable to code
injection if you template the command line with untrusted input
The command inherits @stderr@ for the current process
-}
inshell
:: Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> Shell Line
-- ^ Lines of standard output
inshell cmd = stream (Process.shell (unpack cmd))
stream
:: Process.CreateProcess
-- ^ Command
-> Shell Line
-- ^ Lines of standard input
-> Shell Line
-- ^ Lines of standard output
stream p s = do
let p' = p
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.CreatePipe
, Process.std_err = Process.Inherit
}
let open = do
(Just hIn, Just hOut, Nothing, ph) <- liftIO (Process.createProcess p')
IO.hSetBuffering hIn IO.LineBuffering
return (hIn, hOut, ph)
-- Prevent double close
mvar <- liftIO (newMVar False)
let close handle = do
modifyMVar_ mvar (\finalized -> do
unless finalized (hClose handle)
return True )
(hIn, hOut, ph) <- using (managed (bracket open (\(hIn, _, ph) -> close hIn >> Process.terminateProcess ph)))
let feedIn :: (forall a. IO a -> IO a) -> IO ()
feedIn restore = restore (outhandle hIn s) `finally` close hIn
a <- using (managed (mask_ . withAsyncWithUnmask feedIn))
inhandle hOut <|> (liftIO (Process.waitForProcess ph *> halt a) *> empty)
streamWithErr
:: Process.CreateProcess
-- ^ Command
-> Shell Line
-- ^ Lines of standard input
-> Shell (Either Line Line)
-- ^ Lines of standard output
streamWithErr p s = do
let p' = p
{ Process.std_in = Process.CreatePipe
, Process.std_out = Process.CreatePipe
, Process.std_err = Process.CreatePipe
}
let open = do
(Just hIn, Just hOut, Just hErr, ph) <- liftIO (Process.createProcess p')
IO.hSetBuffering hIn IO.LineBuffering
return (hIn, hOut, hErr, ph)
-- Prevent double close
mvar <- liftIO (newMVar False)
let close handle = do
modifyMVar_ mvar (\finalized -> do
unless finalized (hClose handle)
return True )
(hIn, hOut, hErr, ph) <- using (managed (bracket open (\(hIn, _, _, ph) -> close hIn >> Process.terminateProcess ph)))
let feedIn :: (forall a. IO a -> IO a) -> IO ()
feedIn restore = restore (outhandle hIn s) `finally` close hIn
queue <- liftIO TQueue.newTQueueIO
let forwardOut :: (forall a. IO a -> IO a) -> IO ()
forwardOut restore =
restore (sh (do
line <- inhandle hOut
liftIO (STM.atomically (TQueue.writeTQueue queue (Just (Right line)))) ))
`finally` STM.atomically (TQueue.writeTQueue queue Nothing)
let forwardErr :: (forall a. IO a -> IO a) -> IO ()
forwardErr restore =
restore (sh (do
line <- inhandle hErr
liftIO (STM.atomically (TQueue.writeTQueue queue (Just (Left line)))) ))
`finally` STM.atomically (TQueue.writeTQueue queue Nothing)
let drain = Shell (\(FoldM step begin done) -> do
x0 <- begin
let loop x numNothing
| numNothing < 2 = do
m <- STM.atomically (TQueue.readTQueue queue)
case m of
Nothing -> loop x $! numNothing + 1
Just e -> do
x' <- step x e
loop x' numNothing
| otherwise = return x
x1 <- loop x0 (0 :: Int)
done x1 )
a <- using (managed (mask_ . withAsyncWithUnmask feedIn ))
b <- using (managed (mask_ . withAsyncWithUnmask forwardOut))
c <- using (managed (mask_ . withAsyncWithUnmask forwardErr))
let l `also` r = do
_ <- l <|> (r *> STM.retry)
_ <- r
return ()
let waitAll = STM.atomically (waitSTM a `also` (waitSTM b `also` waitSTM c))
drain <|> (liftIO (Process.waitForProcess ph *> waitAll) *> empty)
{-| Run a command using the shell, streaming @stdout@ and @stderr@ as lines of
`Text`. Lines from @stdout@ are wrapped in `Right` and lines from @stderr@
are wrapped in `Left`. This does /not/ throw an exception if the command
returns a non-zero exit code
-}
inprocWithErr
:: Text
-- ^ Command
-> [Text]
-- ^ Arguments
-> Shell Line
-- ^ Lines of standard input
-> Shell (Either Line Line)
-- ^ Lines of either standard output (`Right`) or standard error (`Left`)
inprocWithErr cmd args =
streamWithErr (Process.proc (unpack cmd) (map unpack args))
{-| Run a command line using the shell, streaming @stdout@ and @stderr@ as lines
of `Text`. Lines from @stdout@ are wrapped in `Right` and lines from
@stderr@ are wrapped in `Left`. This does /not/ throw an exception if the
command returns a non-zero exit code
This command is more powerful than `inprocWithErr`, but highly vulnerable to
code injection if you template the command line with untrusted input
-}
inshellWithErr
:: Text
-- ^ Command line
-> Shell Line
-- ^ Lines of standard input
-> Shell (Either Line Line)
-- ^ Lines of either standard output (`Right`) or standard error (`Left`)
inshellWithErr cmd = streamWithErr (Process.shell (unpack cmd))
{-| Print exactly one line to @stdout@
To print more than one line see `Turtle.Format.printf`, which also supports
formatted output
-}
echo :: MonadIO io => Line -> io ()
echo line = liftIO (Text.putStrLn (lineToText line))
-- | Print exactly one line to @stderr@
err :: MonadIO io => Line -> io ()
err line = liftIO (Text.hPutStrLn IO.stderr (lineToText line))
{-| Read in a line from @stdin@
Returns `Nothing` if at end of input
-}
readline :: MonadIO io => io (Maybe Line)
readline = liftIO (do
eof <- IO.isEOF
if eof
then return Nothing
else fmap (Just . unsafeTextToLine . pack) getLine )
-- | Get command line arguments in a list
arguments :: MonadIO io => io [Text]
arguments = liftIO (fmap (map pack) getArgs)
#if __GLASGOW_HASKELL__ >= 710
-- | Set or modify an environment variable
export :: MonadIO io => Text -> Text -> io ()
export key val = liftIO (setEnv (unpack key) (unpack val))
-- | Delete an environment variable
unset :: MonadIO io => Text -> io ()
unset key = liftIO (unsetEnv (unpack key))
#endif
-- | Look up an environment variable
need :: MonadIO io => Text -> io (Maybe Text)
#if __GLASGOW_HASKELL__ >= 708
need key = liftIO (fmap (fmap pack) (lookupEnv (unpack key)))
#else
need key = liftM (lookup key) env
#endif
-- | Retrieve all environment variables
env :: MonadIO io => io [(Text, Text)]
env = liftIO (fmap (fmap toTexts) getEnvironment)
where
toTexts (key, val) = (pack key, pack val)
-- | Change the current directory
cd :: MonadIO io => FilePath -> io ()
cd path = liftIO (Filesystem.setWorkingDirectory path)
{-| Change the current directory. Once the current 'Shell' is done, it returns
back to the original directory.
>>> :set -XOverloadedStrings
>>> cd "/"
>>> view (pushd "/tmp" >> pwd)
FilePath "/tmp"
>>> pwd
FilePath "/"
-}
pushd :: MonadManaged managed => FilePath -> managed ()
pushd path = do
cwd <- pwd
using (managed_ (bracket_ (cd path) (cd cwd)))
-- | Get the current directory
pwd :: MonadIO io => io FilePath
pwd = liftIO Filesystem.getWorkingDirectory
-- | Get the home directory
home :: MonadIO io => io FilePath
home = liftIO Filesystem.getHomeDirectory
-- | Canonicalize a path
realpath :: MonadIO io => FilePath -> io FilePath
realpath path = liftIO (Filesystem.canonicalizePath path)
#ifdef mingw32_HOST_OS
fILE_ATTRIBUTE_REPARSE_POINT :: Win32.FileAttributeOrFlag
fILE_ATTRIBUTE_REPARSE_POINT = 1024
reparsePoint :: Win32.FileAttributeOrFlag -> Bool
reparsePoint attr = fILE_ATTRIBUTE_REPARSE_POINT .&. attr /= 0
#endif
{-| Stream all immediate children of the given directory, excluding @\".\"@ and
@\"..\"@
-}
ls :: FilePath -> Shell FilePath
ls path = Shell (\(FoldM step begin done) -> do
x0 <- begin
let path' = Filesystem.encodeString path
canRead <- fmap
Directory.readable
(Directory.getPermissions (deslash path'))
#ifdef mingw32_HOST_OS
reparse <- fmap reparsePoint (Win32.getFileAttributes path')
if (canRead && not reparse)
then bracket
(Win32.findFirstFile (Filesystem.encodeString (path </> "*")))
(\(h, _) -> Win32.findClose h)
(\(h, fdat) -> do
let loop x = do
file' <- Win32.getFindDataFileName fdat
let file = Filesystem.decodeString file'
x' <- if (file' /= "." && file' /= "..")
then step x (path </> file)
else return x
more <- Win32.findNextFile h fdat
if more then loop $! x' else done x'
loop $! x0 )
else done x0 )
#else
if canRead
then bracket (openDirStream path') closeDirStream (\dirp -> do
let loop x = do
file' <- readDirStream dirp
case file' of
"" -> done x
_ -> do
let file = Filesystem.decodeString file'
x' <- if (file' /= "." && file' /= "..")
then step x (path </> file)
else return x
loop $! x'
loop $! x0 )
else done x0 )
#endif
{-| This is used to remove the trailing slash from a path, because
`getPermissions` will fail if a path ends with a trailing slash
-}
deslash :: String -> String
deslash [] = []
deslash (c0:cs0) = c0:go cs0
where
go [] = []
go ['\\'] = []
go (c:cs) = c:go cs
-- | Stream all recursive descendents of the given directory
lstree :: FilePath -> Shell FilePath
lstree path = do
child <- ls path
isDir <- testdir child
if isDir
then return child <|> lstree child
else return child
{-| Stream all recursive descendents of the given directory
This skips any directories that fail the supplied predicate
> lstree = lsif (\_ -> return True)
-}
lsif :: (FilePath -> IO Bool) -> FilePath -> Shell FilePath
lsif predicate path = do
child <- ls path
isDir <- testdir child
if isDir
then do
continue <- liftIO (predicate child)
if continue
then return child <|> lsif predicate child
else return child
else return child
{-| Move a file or directory
Works if the two paths are on the same filesystem.
If not, @mv@ will still work when dealing with a regular file,
but the operation will not be atomic
-}
mv :: MonadIO io => FilePath -> FilePath -> io ()
mv oldPath newPath = liftIO $ catchIOError (Filesystem.rename oldPath newPath)
(\ioe -> if ioeGetErrorType ioe == UnsupportedOperation -- certainly EXDEV
then do
Filesystem.copyFile oldPath newPath
Filesystem.removeFile oldPath
else ioError ioe)
{-| Create a directory
Fails if the directory is present
-}
mkdir :: MonadIO io => FilePath -> io ()
mkdir path = liftIO (Filesystem.createDirectory False path)
{-| Create a directory tree (equivalent to @mkdir -p@)
Does not fail if the directory is present
-}
mktree :: MonadIO io => FilePath -> io ()
mktree path = liftIO (Filesystem.createTree path)
-- | Copy a file
cp :: MonadIO io => FilePath -> FilePath -> io ()
cp oldPath newPath = liftIO (Filesystem.copyFile oldPath newPath)
-- | Remove a file
rm :: MonadIO io => FilePath -> io ()
rm path = liftIO (Filesystem.removeFile path)
-- | Remove a directory
rmdir :: MonadIO io => FilePath -> io ()
rmdir path = liftIO (Filesystem.removeDirectory path)
{-| Remove a directory tree (equivalent to @rm -r@)
Use at your own risk
-}
rmtree :: MonadIO io => FilePath -> io ()
rmtree path0 = liftIO (sh (loop path0))
where
loop path = do
linkstat <- lstat path
let isLink = PosixCompat.isSymbolicLink linkstat
isDir = PosixCompat.isDirectory linkstat
if isLink
then rm path
else do
if isDir
then (do
child <- ls path
loop child ) <|> rmdir path
else rm path
-- | Check if a file exists
testfile :: MonadIO io => FilePath -> io Bool
testfile path = liftIO (Filesystem.isFile path)
-- | Check if a directory exists
testdir :: MonadIO io => FilePath -> io Bool
testdir path = liftIO (Filesystem.isDirectory path)
-- | Check if a path exists
testpath :: MonadIO io => FilePath -> io Bool
testpath path = do
exists <- testfile path
if exists
then return exists
else testdir path
{-| Touch a file, updating the access and modification times to the current time
Creates an empty file if it does not exist
-}
touch :: MonadIO io => FilePath -> io ()
touch file = do
exists <- testfile file
liftIO (if exists
#ifdef mingw32_HOST_OS
then do
handle <- Win32.createFile
(Filesystem.encodeString file)
Win32.gENERIC_WRITE
Win32.fILE_SHARE_NONE
Nothing
Win32.oPEN_EXISTING
Win32.fILE_ATTRIBUTE_NORMAL
Nothing
(creationTime, _, _) <- Win32.getFileTime handle
systemTime <- Win32.getSystemTimeAsFileTime
Win32.setFileTime handle creationTime systemTime systemTime
#else
then touchFile (Filesystem.encodeString file)
#endif
else output file empty )
{-| Update a file or directory's user permissions
> chmod rwo "foo.txt" -- chmod u=rw foo.txt
> chmod executable "foo.txt" -- chmod u+x foo.txt
> chmod nonwritable "foo.txt" -- chmod u-w foo.txt
-}
chmod
:: MonadIO io
=> (Permissions -> Permissions)
-- ^ Permissions update function
-> FilePath
-- ^ Path
-> io Permissions
-- ^ Updated permissions
chmod modifyPermissions path = liftIO (do
let path' = deslash (Filesystem.encodeString path)
permissions <- Directory.getPermissions path'
let permissions' = modifyPermissions permissions
changed = permissions /= permissions'
when changed (Directory.setPermissions path' permissions')
return permissions' )
-- | Get a file or directory's user permissions
getmod :: MonadIO io => FilePath -> io Permissions
getmod path = liftIO (do
let path' = deslash (Filesystem.encodeString path)
Directory.getPermissions path' )
-- | Set a file or directory's user permissions
setmod :: MonadIO io => Permissions -> FilePath -> io ()
setmod permissions path = liftIO (do
let path' = deslash (Filesystem.encodeString path)
Directory.setPermissions path' permissions )
-- | Copy a file or directory's permissions (analogous to @chmod --reference@)
copymod :: MonadIO io => FilePath -> FilePath -> io ()
copymod sourcePath targetPath = liftIO (do
let sourcePath' = deslash (Filesystem.encodeString sourcePath)
targetPath' = deslash (Filesystem.encodeString targetPath)
Directory.copyPermissions sourcePath' targetPath' )
-- | @+r@
readable :: Permissions -> Permissions
readable = Directory.setOwnerReadable True
-- | @-r@
nonreadable :: Permissions -> Permissions
nonreadable = Directory.setOwnerReadable False
-- | @+w@
writable :: Permissions -> Permissions
writable = Directory.setOwnerWritable True
-- | @-w@
nonwritable :: Permissions -> Permissions
nonwritable = Directory.setOwnerWritable False
-- | @+x@
executable :: Permissions -> Permissions
executable = Directory.setOwnerExecutable True
-- | @-x@
nonexecutable :: Permissions -> Permissions
nonexecutable = Directory.setOwnerExecutable False
-- | @+s@
searchable :: Permissions -> Permissions
searchable = Directory.setOwnerSearchable True
-- | @-s@
nonsearchable :: Permissions -> Permissions
nonsearchable = Directory.setOwnerSearchable False
-- | @-r -w -x@
ooo :: Permissions -> Permissions
ooo = const Directory.emptyPermissions
-- | @+r -w -x@
roo :: Permissions -> Permissions
roo = readable . ooo
-- | @-r +w -x@
owo :: Permissions -> Permissions
owo = writable . ooo
-- | @-r -w +x@
oox :: Permissions -> Permissions
oox = executable . ooo
-- | @-r -w +s@
oos :: Permissions -> Permissions
oos = searchable . ooo
-- | @+r +w -x@
rwo :: Permissions -> Permissions
rwo = readable . writable . ooo
-- | @+r -w +x@
rox :: Permissions -> Permissions
rox = readable . executable . ooo
-- | @+r -w +s@
ros :: Permissions -> Permissions
ros = readable . searchable . ooo
-- | @-r +w +x@
owx :: Permissions -> Permissions
owx = writable . executable . ooo
-- | @+r +w +x@
rwx :: Permissions -> Permissions
rwx = readable . writable . executable . ooo
-- | @+r +w +s@
rws :: Permissions -> Permissions
rws = readable . writable . searchable . ooo
{-| Time how long a command takes in monotonic wall clock time
Returns the duration alongside the return value
-}
time :: MonadIO io => io a -> io (a, NominalDiffTime)
time io = do
TimeSpec seconds1 nanoseconds1 <- liftIO (getTime Monotonic)
a <- io
TimeSpec seconds2 nanoseconds2 <- liftIO (getTime Monotonic)
let t = fromIntegral ( seconds2 - seconds1)
+ fromIntegral (nanoseconds2 - nanoseconds1) / 10^(9::Int)
return (a, fromRational t)
-- | Get the system's host name
hostname :: MonadIO io => io Text
hostname = liftIO (fmap Text.pack getHostName)
-- | Show the full path of an executable file
which :: MonadIO io => FilePath -> io (Maybe FilePath)
which cmd = fold (whichAll cmd) Control.Foldl.head
-- | Show all matching executables in PATH, not just the first
whichAll :: FilePath -> Shell FilePath
whichAll cmd = do
Just paths <- need "PATH"
path <- select (Text.split (== ':') paths)
let path' = Filesystem.fromText path </> cmd
True <- testfile path'
let handler :: IOError -> IO Permissions
handler e =
if isPermissionError e || isDoesNotExistError e
then return Directory.emptyPermissions
else throwIO e
perms <- liftIO (getmod path' `catchIOError` handler)
guard (Directory.executable perms)
return path'
{-| Sleep for the given duration
A numeric literal argument is interpreted as seconds. In other words,
@(sleep 2.0)@ will sleep for two seconds.
-}
sleep :: MonadIO io => NominalDiffTime -> io ()
sleep n = liftIO (threadDelay (truncate (n * 10^(6::Int))))
{-| Exit with the given exit code
An exit code of @0@ indicates success
-}
exit :: MonadIO io => ExitCode -> io a
exit code = liftIO (exitWith code)
-- | Throw an exception using the provided `Text` message
die :: MonadIO io => Text -> io a
die txt = liftIO (throwIO (userError (unpack txt)))
infixr 2 .||.
infixr 3 .&&.
{-| Analogous to `&&` in Bash
Runs the second command only if the first one returns `ExitSuccess`
-}
(.&&.) :: Monad m => m ExitCode -> m ExitCode -> m ExitCode
cmd1 .&&. cmd2 = do
r <- cmd1
case r of
ExitSuccess -> cmd2
_ -> return r
{-| Analogous to `||` in Bash
Run the second command only if the first one returns `ExitFailure`
-}
(.||.) :: Monad m => m ExitCode -> m ExitCode -> m ExitCode
cmd1 .||. cmd2 = do
r <- cmd1
case r of
ExitFailure _ -> cmd2
_ -> return r
{-| Create a temporary directory underneath the given directory
Deletes the temporary directory when done
-}
mktempdir
:: MonadManaged managed
=> FilePath
-- ^ Parent directory
-> Text
-- ^ Directory name template
-> managed FilePath
mktempdir parent prefix = using (do
let parent' = Filesystem.encodeString parent
let prefix' = unpack prefix
dir' <- managed (withTempDirectory parent' prefix')
return (Filesystem.decodeString dir'))
{-| Create a temporary file underneath the given directory
Deletes the temporary file when done
Note that this provides the `Handle` of the file in order to avoid a
potential race condition from the file being moved or deleted before you
have a chance to open the file. The `mktempfile` function provides a
simpler API if you don't need to worry about that possibility.
-}
mktemp
:: MonadManaged managed
=> FilePath
-- ^ Parent directory
-> Text
-- ^ File name template
-> managed (FilePath, Handle)
mktemp parent prefix = using (do
let parent' = Filesystem.encodeString parent
let prefix' = unpack prefix
(file', handle) <- managed (\k ->
withTempFile parent' prefix' (\file' handle -> k (file', handle)) )
return (Filesystem.decodeString file', handle) )
{-| Create a temporary file underneath the given directory
Deletes the temporary file when done
-}
mktempfile
:: MonadManaged managed
=> FilePath
-- ^ Parent directory
-> Text
-- ^ File name template
-> managed FilePath
mktempfile parent prefix = using (do
let parent' = Filesystem.encodeString parent
let prefix' = unpack prefix
(file', handle) <- managed (\k ->
withTempFile parent' prefix' (\file' handle -> k (file', handle)) )
liftIO (hClose handle)
return (Filesystem.decodeString file') )
-- | Fork a thread, acquiring an `Async` value
fork :: MonadManaged managed => IO a -> managed (Async a)
fork io = using (managed (withAsync io))
-- | Wait for an `Async` action to complete
wait :: MonadIO io => Async a -> io a
wait a = liftIO (Control.Concurrent.Async.wait a)
-- | Read lines of `Text` from standard input
stdin :: Shell Line
stdin = inhandle IO.stdin
-- | Read lines of `Text` from a file
input :: FilePath -> Shell Line
input file = do
handle <- using (readonly file)
inhandle handle
-- | Read lines of `Text` from a `Handle`
inhandle :: Handle -> Shell Line
inhandle handle = Shell (\(FoldM step begin done) -> do
x0 <- begin
let loop x = do
eof <- IO.hIsEOF handle
if eof
then done x
else do
txt <- Text.hGetLine handle
x' <- step x (unsafeTextToLine txt)
loop $! x'
loop $! x0 )
-- | Stream lines of `Text` to standard output
stdout :: MonadIO io => Shell Line -> io ()
stdout s = sh (do
line <- s
liftIO (echo line) )
-- | Stream lines of `Text` to a file
output :: MonadIO io => FilePath -> Shell Line -> io ()
output file s = sh (do
handle <- using (writeonly file)
line <- s
liftIO (Text.hPutStrLn handle (lineToText line)) )
-- | Stream lines of `Text` to a `Handle`
outhandle :: MonadIO io => Handle -> Shell Line -> io ()
outhandle handle s = sh (do
line <- s
liftIO (Text.hPutStrLn handle (lineToText line)) )
-- | Stream lines of `Text` to append to a file
append :: MonadIO io => FilePath -> Shell Line -> io ()
append file s = sh (do
handle <- using (appendonly file)
line <- s
liftIO (Text.hPutStrLn handle (lineToText line)) )
-- | Stream lines of `Text` to standard error
stderr :: MonadIO io => Shell Line -> io ()
stderr s = sh (do
line <- s
liftIO (err line) )
-- | Read in a stream's contents strictly
strict :: MonadIO io => Shell Line -> io Text
strict s = liftM linesToText (fold s list)
-- | Acquire a `Managed` read-only `Handle` from a `FilePath`
readonly :: MonadManaged managed => FilePath -> managed Handle
readonly file = using (managed (Filesystem.withTextFile file IO.ReadMode))
-- | Acquire a `Managed` write-only `Handle` from a `FilePath`
writeonly :: MonadManaged managed => FilePath -> managed Handle
writeonly file = using (managed (Filesystem.withTextFile file IO.WriteMode))
-- | Acquire a `Managed` append-only `Handle` from a `FilePath`
appendonly :: MonadManaged managed => FilePath -> managed Handle
appendonly file = using (managed (Filesystem.withTextFile file IO.AppendMode))
-- | Combine the output of multiple `Shell`s, in order
cat :: [Shell a] -> Shell a
cat = msum
-- | Keep all lines that match the given `Pattern`
grep :: Pattern a -> Shell Line -> Shell Line
grep pattern s = do
line <- s
_:_ <- return (match pattern (lineToText line))
return line
{-| Replace all occurrences of a `Pattern` with its `Text` result
`sed` performs substitution on a line-by-line basis, meaning that
substitutions may not span multiple lines. Additionally, substitutions may
occur multiple times within the same line, like the behavior of
@s\/...\/...\/g@.
Warning: Do not use a `Pattern` that matches the empty string, since it will
match an infinite number of times. `sed` tries to detect such `Pattern`s
and `die` with an error message if they occur, but this detection is
necessarily incomplete.
-}
sed :: Pattern Text -> Shell Line -> Shell Line
sed pattern s = do
when (matchesEmpty pattern) (die message)
let pattern' = fmap Text.concat
(many (pattern <|> fmap Text.singleton anyChar))
line <- s
txt':_ <- return (match pattern' (lineToText line))
select (textToLines txt')
where
message = "sed: the given pattern matches the empty string"
matchesEmpty = not . null . flip match ""
-- | Make a `Shell Text -> Shell Text` function work on `FilePath`s instead.
-- | Ignores any paths which cannot be decoded as valid `Text`.
onFiles :: (Shell Text -> Shell Text) -> Shell FilePath -> Shell FilePath
onFiles f = fmap Filesystem.fromText . f . getRights . fmap Filesystem.toText
where
getRights :: forall a. Shell (Either a Text) -> Shell Text
getRights s = s >>= either (const empty) return
-- | Like `sed`, but operates in place on a `FilePath` (analogous to @sed -i@)
inplace :: MonadIO io => Pattern Text -> FilePath -> io ()
inplace pattern file = liftIO (runManaged (do
here <- pwd
(tmpfile, handle) <- mktemp here "turtle"
outhandle handle (sed pattern (input file))
liftIO (hClose handle)
copymod file tmpfile
mv tmpfile file ))
-- | Search a directory recursively for all files matching the given `Pattern`
find :: Pattern a -> FilePath -> Shell FilePath
find pattern dir = do
path <- lsif isNotSymlink dir
Right txt <- return (Filesystem.toText path)
_:_ <- return (match pattern txt)
return path
where
isNotSymlink :: FilePath -> IO Bool
isNotSymlink file = do
file_stat <- lstat file
return (not (PosixCompat.isSymbolicLink file_stat))
-- | A Stream of @\"y\"@s
yes :: Shell Line
yes = fmap (\_ -> "y") endless
-- | Number each element of a `Shell` (starting at 0)
nl :: Num n => Shell a -> Shell (n, a)
nl s = Shell _foldIO'
where
_foldIO' (FoldM step begin done) = _foldIO s (FoldM step' begin' done')
where
step' (x, n) a = do
x' <- step x (n, a)
let n' = n + 1
n' `seq` return (x', n')
begin' = do
x0 <- begin
return (x0, 0)
done' (x, _) = done x
data ZipState a b = Empty | HasA a | HasAB a b | Done
{-| Merge two `Shell`s together, element-wise
If one `Shell` is longer than the other, the excess elements are
truncated
-}
paste :: Shell a -> Shell b -> Shell (a, b)
paste sA sB = Shell _foldIOAB
where
_foldIOAB (FoldM stepAB beginAB doneAB) = do
x0 <- beginAB
tvar <- STM.atomically (STM.newTVar Empty)
let begin = return ()
let stepA () a = STM.atomically (do
x <- STM.readTVar tvar
case x of
Empty -> STM.writeTVar tvar (HasA a)
Done -> return ()
_ -> STM.retry )
let doneA () = STM.atomically (do
x <- STM.readTVar tvar
case x of
Empty -> STM.writeTVar tvar Done
Done -> return ()
_ -> STM.retry )
let foldA = FoldM stepA begin doneA
let stepB () b = STM.atomically (do
x <- STM.readTVar tvar
case x of
HasA a -> STM.writeTVar tvar (HasAB a b)
Done -> return ()
_ -> STM.retry )
let doneB () = STM.atomically (do
x <- STM.readTVar tvar
case x of
HasA _ -> STM.writeTVar tvar Done
Done -> return ()
_ -> STM.retry )
let foldB = FoldM stepB begin doneB
withAsync (foldIO sA foldA) (\asyncA -> do
withAsync (foldIO sB foldB) (\asyncB -> do
let loop x = do
y <- STM.atomically (do
z <- STM.readTVar tvar
case z of
HasAB a b -> do
STM.writeTVar tvar Empty
return (Just (a, b))
Done -> return Nothing
_ -> STM.retry )
case y of
Nothing -> return x
Just ab -> do
x' <- stepAB x ab
loop $! x'
x' <- loop $! x0
wait asyncA
wait asyncB
doneAB x' ) )
-- | A `Shell` that endlessly emits @()@
endless :: Shell ()
endless = Shell (\(FoldM step begin _) -> do
x0 <- begin
let loop x = do
x' <- step x ()
loop $! x'
loop $! x0 )
-- | Limit a `Shell` to a fixed number of values
limit :: Int -> Shell a -> Shell a
limit n s = Shell (\(FoldM step begin done) -> do
ref <- newIORef 0 -- I feel so dirty
let step' x a = do
n' <- readIORef ref
writeIORef ref (n' + 1)
if n' < n then step x a else return x
foldIO s (FoldM step' begin done) )
{-| Limit a `Shell` to values that satisfy the predicate
This terminates the stream on the first value that does not satisfy the
predicate
-}
limitWhile :: (a -> Bool) -> Shell a -> Shell a
limitWhile predicate s = Shell (\(FoldM step begin done) -> do
ref <- newIORef True
let step' x a = do
b <- readIORef ref
let b' = b && predicate a
writeIORef ref b'
if b' then step x a else return x
foldIO s (FoldM step' begin done) )
{-| Cache a `Shell`'s output so that repeated runs of the script will reuse the
result of previous runs. You must supply a `FilePath` where the cached
result will be stored.
The stored result is only reused if the `Shell` successfully ran to
completion without any exceptions. Note: on some platforms Ctrl-C will
flush standard input and signal end of file before killing the program,
which may trick the program into \"successfully\" completing.
-}
cache :: (Read a, Show a) => FilePath -> Shell a -> Shell a
cache file s = do
let cached = do
line <- input file
case reads (Text.unpack (lineToText line)) of
[(ma, "")] -> return ma
_ ->
die (format ("cache: Invalid data stored in "%w) file)
exists <- testfile file
mas <- fold (if exists then cached else empty) list
case [ () | Nothing <- mas ] of
_:_ -> select [ a | Just a <- mas ]
_ -> do
handle <- using (writeonly file)
let justs = do
a <- s
liftIO (Text.hPutStrLn handle (Text.pack (show (Just a))))
return a
let nothing = do
let n = Nothing :: Maybe ()
liftIO (Text.hPutStrLn handle (Text.pack (show n)))
empty
justs <|> nothing
{-| Run a list of IO actions in parallel using fork and wait.
>>> view (parallel [(sleep 3) >> date, date, date])
2016-12-01 17:22:10.83296 UTC
2016-12-01 17:22:07.829876 UTC
2016-12-01 17:22:07.829963 UTC
-}
parallel :: [IO a] -> Shell a
parallel = traverse fork >=> select >=> wait
-- | Split a line into chunks delimited by the given `Pattern`
cut :: Pattern a -> Text -> [Text]
cut pattern txt = head (match (selfless chars `sepBy` pattern) txt)
-- This `head` should be safe ... in theory
-- | Get the current time
date :: MonadIO io => io UTCTime
date = liftIO getCurrentTime
-- | Get the time a file was last modified
datefile :: MonadIO io => FilePath -> io UTCTime
datefile path = liftIO (Filesystem.getModified path)
-- | Get the size of a file or a directory
du :: MonadIO io => FilePath -> io Size
du path = liftIO (do
isDir <- testdir path
size <- do
if isDir
then do
let sizes = do
child <- lstree path
True <- testfile child
liftIO (Filesystem.getSize child)
fold sizes Control.Foldl.sum
else Filesystem.getSize path
return (Size size) )
{-| An abstract file size
Specify the units you want by using an accessor like `kilobytes`
The `Num` instance for `Size` interprets numeric literals as bytes
-}
newtype Size = Size { _bytes :: Integer } deriving (Eq, Ord, Num)
instance Show Size where
show = show . _bytes
{-| `Format` a `Size` using a human readable representation
>>> format sz 42
"42 B"
>>> format sz 2309
"2.309 KB"
>>> format sz 949203
"949.203 KB"
>>> format sz 1600000000
"1.600 GB"
>>> format sz 999999999999999999
"999999.999 TB"
-}
sz :: Format r (Size -> r)
sz = makeFormat (\(Size numBytes) ->
let (numKilobytes, remainingBytes ) = numBytes `quotRem` 1000
(numMegabytes, remainingKilobytes) = numKilobytes `quotRem` 1000
(numGigabytes, remainingMegabytes) = numMegabytes `quotRem` 1000
(numTerabytes, remainingGigabytes) = numGigabytes `quotRem` 1000
in if numKilobytes <= 0
then format (d%" B" ) remainingBytes
else if numMegabytes == 0
then format (d%"."%d%" KB") remainingKilobytes remainingBytes
else if numGigabytes == 0
then format (d%"."%d%" MB") remainingMegabytes remainingKilobytes
else if numTerabytes == 0
then format (d%"."%d%" GB") remainingGigabytes remainingMegabytes
else format (d%"."%d%" TB") numTerabytes remainingGigabytes )
-- | Extract a size in bytes
bytes :: Integral n => Size -> n
bytes = fromInteger . _bytes
-- | @1 kilobyte = 1000 bytes@
kilobytes :: Integral n => Size -> n
kilobytes = (`div` 1000) . bytes
-- | @1 megabyte = 1000 kilobytes@
megabytes :: Integral n => Size -> n
megabytes = (`div` 1000) . kilobytes
-- | @1 gigabyte = 1000 megabytes@
gigabytes :: Integral n => Size -> n
gigabytes = (`div` 1000) . megabytes
-- | @1 terabyte = 1000 gigabytes@
terabytes :: Integral n => Size -> n
terabytes = (`div` 1000) . gigabytes
-- | @1 kibibyte = 1024 bytes@
kibibytes :: Integral n => Size -> n
kibibytes = (`div` 1024) . bytes
-- | @1 mebibyte = 1024 kibibytes@
mebibytes :: Integral n => Size -> n
mebibytes = (`div` 1024) . kibibytes
-- | @1 gibibyte = 1024 mebibytes@
gibibytes :: Integral n => Size -> n
gibibytes = (`div` 1024) . mebibytes
-- | @1 tebibyte = 1024 gibibytes@
tebibytes :: Integral n => Size -> n
tebibytes = (`div` 1024) . gibibytes
{-| Count the number of characters in the stream (like @wc -c@)
This uses the convention that the elements of the stream are implicitly
ended by newlines that are one character wide
-}
countChars :: Integral n => Fold Line n
countChars =
premap lineToText Control.Foldl.Text.length +
charsPerNewline * countLines
charsPerNewline :: Num a => a
#ifdef mingw32_HOST_OS
charsPerNewline = 2
#else
charsPerNewline = 1
#endif
-- | Count the number of words in the stream (like @wc -w@)
countWords :: Integral n => Fold Line n
countWords = premap (Text.words . lineToText) (handles traverse genericLength)
{-| Count the number of lines in the stream (like @wc -l@)
This uses the convention that each element of the stream represents one
line
-}
countLines :: Integral n => Fold Line n
countLines = genericLength
-- | Get the status of a file
stat :: MonadIO io => FilePath -> io PosixCompat.FileStatus
stat = liftIO . PosixCompat.getFileStatus . Filesystem.encodeString
-- | Size of the file in bytes. Does not follow symlinks
fileSize :: PosixCompat.FileStatus -> Size
fileSize = fromIntegral . PosixCompat.fileSize
-- | Time of last access
accessTime :: PosixCompat.FileStatus -> POSIXTime
accessTime = realToFrac . PosixCompat.accessTime
-- | Time of last modification
modificationTime :: PosixCompat.FileStatus -> POSIXTime
modificationTime = realToFrac . PosixCompat.modificationTime
-- | Time of last status change (i.e. owner, group, link count, mode, etc.)
statusChangeTime :: PosixCompat.FileStatus -> POSIXTime
statusChangeTime = realToFrac . PosixCompat.statusChangeTime
-- | Get the status of a file, but don't follow symbolic links
lstat :: MonadIO io => FilePath -> io PosixCompat.FileStatus
lstat = liftIO . PosixCompat.getSymbolicLinkStatus . Filesystem.encodeString
data WithHeader a
= Header a
-- ^ The first line with the header
| Row a a
-- ^ Every other line: 1st element is header, 2nd element is original row
deriving (Show)
data Pair a b = Pair !a !b
header :: Shell a -> Shell (WithHeader a)
header (Shell k) = Shell k'
where
k' (FoldM step begin done) = k (FoldM step' begin' done')
where
step' (Pair x Nothing ) a = do
x' <- step x (Header a)
return (Pair x' (Just a))
step' (Pair x (Just a)) b = do
x' <- step x (Row a b)
return (Pair x' (Just a))
begin' = do
x <- begin
return (Pair x Nothing)
done' (Pair x _) = done x
|
PierreR/Haskell-Turtle-Library
|
src/Turtle/Prelude.hs
|
bsd-3-clause
| 55,444
| 192
| 30
| 15,397
| 13,406
| 6,940
| 6,466
| 1,030
| 12
|
/*Owner & Copyrights: Vance King Saxbe. A.*//* Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.*/{-# Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting, GoldSax Money, GoldSax Treasury, GoldSax Finance, GoldSax Banking and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. This Engagement sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager. LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
module Main where
import System.IO
import Control.DeepSeq
import Data.Conduit
import qualified Data.Conduit.List as L
import GoldSaxMachineModule9.Types
import System.Random
import Control.Monad.Trans
import Control.Monad.State
import qualified Data.ByteString.Char8 as BS
import Data.Monoid
import qualified Data.Conduit.Binary as B
-- import qualified Data.Binary as S
import qualified Data.Conduit.Serialization.Binary as S
main :: IO ()
{-
main = do h <- openFile "/home/serras/comandos" ReadMode
s <- hGetContents h
s `deepseq` hClose h
print s
-}
{-
main = do p <- L.sourceList [ GovOrg 1 "Zas", Individual 2 (Person "Alejandro" "Serrano")] $$ people =$ L.consume
print p
-}
{-
main = let clients = [ GovOrg 1 "Zas", Individual 2 (Person "Alejandro" "Serrano")]
conduitGovOrgs = L.sourceList clients $$ countGovOrgs
in print $ execState conduitGovOrgs 0
-}
{-
main = runResourceT $
B.sourceFile "clients.db" $$ B.lines =$= winnersFile =$ B.sinkFile "clientsWinners.db"
-}
{-
main = runResourceT $ L.sourceList clients $$ S.conduitEncode =$ B.sinkFile "people.db"
where clients = [ Person "Alejandro" "Serrano", Person "The Doctor" "Who?" ]
-}
main = runResourceT $
B.sourceFile "people.db" $$ S.conduitDecode
=$ L.mapM_ (\(p :: Person) -> lift $ putStrLn $ show p)
people :: Monad m => Conduit (Client i) m Person
people = do client <- await
case client of
Nothing -> return ()
Just c -> do case c of
Company { person = p } -> yield p
Individual { person = p } -> yield p
_ -> return ()
people
winners :: Conduit (Client i) IO (Client i, Bool, Int)
winners = do client <- await
case client of
Nothing -> return ()
Just c -> do (w :: Bool) <- lift $ randomIO
(y :: Int ) <- lift $ randomRIO (0, 3000)
yield (c, w, y)
winners
countGovOrgs :: MonadState Int m => Sink (Client i) m Int
countGovOrgs = do client <- await
case client of
Nothing -> do n <- lift $ get
return n
Just c -> do case c of
GovOrg { } -> lift $ modify (+1)
_ -> return ()
countGovOrgs
winnersFile :: (Monad m, MonadIO m) => Conduit BS.ByteString m BS.ByteString
winnersFile = do client <- await
case client of
Nothing -> return ()
Just c -> do (w :: Bool) <- liftIO $ randomIO
(y :: Int ) <- liftIO $ randomRIO (0, 3000)
yield $ c <> BS.pack (" " ++ show w ++ " " ++ show y)
winnersFile
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/
|
VanceKingSaxbeA/GoldSaxMachineStore
|
GoldSaxMachineModule9/src/Streams.hs
|
mit
| 4,177
| 23
| 19
| 1,320
| 926
| 473
| 453
| -1
| -1
|
-- https://www.hackerrank.com/challenges/angry-professor/problem
import Text.Printf
data Answer = YES | NO deriving (Show)
data Case = Case { totalStudents :: Int
, cancellationThreshold :: Int
, arrivals :: [Int] }
toPairs :: [a] -> [(a, a)]
toPairs [] = []
toPairs [_] = error "list size is odd, should be even" -- Break the program on erroneous input
toPairs (first:second:tail) = (first, second) : toPairs tail
toCase :: (String, String) -> Case
toCase (firstLine, secondLine) = do
Case { totalStudents = students
, cancellationThreshold = threshold
, arrivals = map read $ words secondLine :: [Int] }
where firstList = map read $ words firstLine :: [Int]
students = head firstList
threshold = last firstList
validate :: Case -> Case
validate (Case students threshold arrivals)
| students == length arrivals = Case students threshold arrivals
| otherwise = error errorMessage -- Break the program on erroneous input
where errorMessage = printf "Total students expected to be %s, but is %s" (show students) (show $ length arrivals)
countEarly :: [Int] -> Int
countEarly arrivals = length $ filter (<= 0) arrivals
isClassCancelled :: Case -> Answer
isClassCancelled (Case _ cancellationThreshold arrivals)
| countEarly arrivals >= cancellationThreshold = NO
| otherwise = YES
solve :: Case -> Answer
solve = isClassCancelled . validate
main :: IO ()
main =
interact $ -- Reads an input, prints an output
unlines . -- Join elements with \n. Ex: unlines ["YES", "NO", "YES"] -> "YES\nNO\nYES\n"
map (show . solve . toCase) . -- For each element do: 1) toCase, 2) solve, 3) show
toPairs . -- Each test case has two lines, put tme in a pair: (line 1, line 2)
tail . -- Drop the first element (we dont need it)
lines -- Read lines from stdin
|
julianespinel/training
|
hackerrank/AngryProfessor.hs
|
mit
| 2,009
| 0
| 12
| 562
| 486
| 261
| 225
| 38
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module System.Nagios.Plugin.Ekg.Types (
MetricTree
) where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Data.Aeson.Types
import Data.Int
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.Map (Map)
import qualified Data.Map as M
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import System.Nagios.Plugin
data EkgMetric =
-- | Nondecreasing counter, e.g., all-time number of requests.
EkgCounter Int64
-- | Measure of a quantity over time, e.g., number of requests per minute.
| EkgGauge Double
-- | Can't meaningfully turn labels into perfdata, this is a placeholder.
| EkgLabel
-- | Can't meaningfully turn distributions into perfdata, this is a placeholder.
| EkgDistribution
deriving (Eq, Show)
instance FromJSON EkgMetric where
parseJSON (Object o) = do
metric_type <- o .: "type"
case metric_type of
"c" -> EkgCounter <$> o .: "val"
"g" -> EkgGauge <$> o .: "val"
"l" -> return EkgLabel
"d" -> return EkgDistribution
x -> fail $ "Invalid metric type " <> T.unpack x
parseJSON _ = fail "EkgMetric must be an object"
-- | A node in the 'MetricTree'; a Leaf is a single metric.
data MetricNode =
Leaf EkgMetric
| Branch (Map Text MetricNode)
instance FromJSON MetricNode where
parseJSON (Object o) = do
leaf <- isLeaf <$> parseJSON (Object o)
if leaf
then Leaf <$> parseJSON (Object o)
else Branch <$> parseJSON (Object o)
where
-- Educated guess as to whether this object is a leaf. It'll
-- definitely have "type"; it'll have "val" if it's a counter,
-- gauge or label and it'll have "variance" and "mean" if it's
-- a distribution.
--
-- My kingdom for a schema.
isLeaf :: HashMap Text Value -> Bool
isLeaf m = HM.member "type" m &&
(HM.member "val" m ||
(HM.member "variance" m && HM.member "mean" m)
)
parseJSON x = fail $ "MetricNode must be an object, not " <> show x
-- | Top-level object for parsed EKG metrics. Structurally, this is an
-- n-ary tree; the leaves are the metrics themselves and the
-- non-leaf nodes are used to construct the metric labels.
newtype MetricTree = MetricTree
{ unMetricTree :: Map Text MetricNode }
instance FromJSON MetricTree where
parseJSON (Object o) = MetricTree <$> parseJSON (Object o)
parseJSON _ = fail "MetricTree must be an object"
instance ToPerfData MetricTree where
toPerfData (MetricTree m) = M.foldrWithKey (renderValue Nothing) [] m
-- | Build perfdata from a single metric. The Nagios perfdata format
-- doesn't allow us to sensibly represent the EKG 'Distribution' or
-- 'Label' types so we don't try.
renderMetric :: Text
-> EkgMetric
-> Maybe PerfDatum
renderMetric lbl (EkgCounter n) =
Just $ barePerfDatum lbl (IntegralValue n) Counter
renderMetric lbl (EkgGauge n) =
Just $ barePerfDatum lbl (RealValue n) NullUnit
renderMetric _ EkgLabel = Nothing
renderMetric _ EkgDistribution = Nothing
-- | Build perfdata from a node in the metric tree. Produce a
-- 'PerfDatum' from a 'Leaf', recursively walk a 'Branch' and
-- mappend the leaves.
renderValue :: Maybe Text
-> Text
-> MetricNode
-> [PerfDatum]
-> [PerfDatum]
renderValue prefix lbl (Leaf val) acc =
case renderMetric (withPrefix prefix lbl) val of
Nothing -> acc
Just pd -> pd : acc
renderValue prefix lbl (Branch branch) acc = acc
<> M.foldrWithKey (renderValue (Just $ withPrefix prefix lbl)) [] branch
-- | Construct a metric name, optionally prepended with a prefix (we
-- want a prefix for every component of the name except the first one).
withPrefix :: Maybe Text
-> Text
-> Text
withPrefix Nothing suff = suff
withPrefix (Just prefix) suff = prefix <> "_" <> suff
|
olorin/nagios-plugin-ekg
|
lib/System/Nagios/Plugin/Ekg/Types.hs
|
mit
| 4,121
| 0
| 14
| 1,085
| 861
| 456
| 405
| 78
| 2
|
import Data.List
getnum pies racks = helper 0 pies racks
where helper acc p r | p == [] || r == [] = acc
helper acc p r = if racks_aval /= []
then helper (acc+1) (tail p) (init r)
else helper acc (tail p) r
where racks_aval = filter (>=(head p)) r
cool = do
getLine
pie_str <- getLine
let pies = reverse . sort $ map (\x -> read x :: Int) $ words pie_str
rack_str <- getLine
let racks = sort $ map (\x -> read x :: Int) $ words rack_str
print $ getnum pies racks
main = do
terms <- readLn
sequence $ replicate terms cool
return ()
|
paramsingh/codechef-solutions
|
src/practice/cooling.hs
|
mit
| 771
| 0
| 15
| 354
| 290
| 140
| 150
| 18
| 3
|
{-# LANGUAGE StrictData #-}
{-# LANGUAGE Trustworthy #-}
module Network.Tox.Crypto.NonceSpec where
import Control.Monad.IO.Class (liftIO)
import Test.Hspec
import Test.QuickCheck
import qualified Network.Tox.Crypto.Nonce as Nonce
spec :: Spec
spec = do
describe "newNonce" $
it "generates a different nonce on subsequent calls to newNonce" $ do
nonce1 <- Nonce.newNonce
nonce2 <- Nonce.newNonce
liftIO $ nonce1 `shouldNotBe` nonce2
describe "nudge" $
it "creates a nonce that is different from the passed nonce" $
property $ \nonce ->
Nonce.nudge nonce `shouldNotBe` nonce
describe "increment" $ do
it "generates a different nonce for arbitrary nonces" $
property $ \nonce -> do
let incremented = Nonce.increment nonce
incremented `shouldNotBe` nonce
it "increments a 0 nonce to 1" $ do
let nonce = read "\"000000000000000000000000000000000000000000000000\""
let nonce' = read "\"000000000000000000000000000000000000000000000001\""
let incremented = Nonce.increment nonce
incremented `shouldBe` nonce'
it "increments a max nonce to 0" $ do
let nonce = read "\"ffffffffffffffffffffffffffffffffffffffffffffffff\""
let nonce' = read "\"000000000000000000000000000000000000000000000000\""
let incremented = Nonce.increment nonce
incremented `shouldBe` nonce'
it "increments a max-1 nonce to max" $ do
let nonce = read "\"fffffffffffffffffffffffffffffffffffffffffffffffe\""
let nonce' = read "\"ffffffffffffffffffffffffffffffffffffffffffffffff\""
let incremented = Nonce.increment nonce
incremented `shouldBe` nonce'
it "increments a little endian max-1 nonce to little endian 255" $ do
let nonce = read "\"feffffffffffffffffffffffffffffffffffffffffffffff\""
let nonce' = read "\"ff0000000000000000000000000000000000000000000000\""
let incremented = Nonce.increment nonce
incremented `shouldBe` nonce'
|
iphydf/hs-toxcore
|
test/Network/Tox/Crypto/NonceSpec.hs
|
gpl-3.0
| 2,024
| 7
| 13
| 443
| 372
| 194
| 178
| 43
| 1
|
-- (c) The FFI task force, [2000..2001]
--
-- Provides parametrised data and function pointers
module Ptr (
-- Data pointers
--
Ptr, -- data Ptr a; instances: Eq, Ord, Show
nullPtr, -- :: Ptr a
castPtr, -- :: Ptr a -> Ptr b
plusPtr, -- :: Ptr a -> Int -> Ptr b
alignPtr, -- :: Ptr a -> Int -> Ptr a
minusPtr, -- :: Ptr a -> Ptr b -> Int
-- Function pointers.
--
FunPtr, -- data FunPtr a; instances: Eq, Ord, Show
nullFunPtr, -- :: FunPtr a
castFunPtr, -- :: FunPtr a -> FunPtr b
castFunPtrToPtr, -- :: FunPtr a -> Ptr b
castPtrToFunPtr, -- :: Ptr a -> FunPtr b
freeHaskellFunPtr -- :: FunPtr a -> IO ()
) where
import Word
-- Parametrised data pointer
--
-- * Suitable type arguments can be used to segregate pointers to incompatible
-- values such that the type checker can spot coding mistakes
--
-- * The size of the value representing a data pointer is system-dependent
--
-- * In C land, values of type `Ptr a' are represented by
--
-- typedef void *HsPtr;
--
newtype Ptr a = Ptr WordXY
deriving (Eq, Ord, Show)
-- Special pointer value that indicates the absence of a data pointer
--
-- * This value should be compatible with the C constant `NULL'
--
nullPtr :: Ptr a
nullPtr = Ptr primNullPtr
-- Change the type parameter of a pointer
--
castPtr :: Ptr a -> Ptr b
castPtr (Ptr p) = Ptr p
-- Advance a pointer by a given number of bytes
--
plusPtr :: Ptr a -> Int -> Ptr b
plusPtr (Ptr p) d = Ptr (p `primPlusPtr` d)
-- Align the given pointer at the next higher address boundary that is a
-- multiple of the second argument
--
-- * This operation is idempotent
--
alignPtr :: Ptr a -> Int -> Ptr a
alignPtr (Ptr p) a = Ptr (p `primAlignPtr` a)
-- Compute the byte difference between two pointers
--
-- * We have
--
-- p2 == p1 `plusPtr` (p2 `minusPtr` p1)
--
minusPtr :: Ptr a -> Ptr b -> Int
minusPtr (Ptr p1) (Ptr p2) = p1 `primMinusPtr` p2
-- Parametrised function pointer
--
-- * Suitable type arguments can be used to segregate pointers to incompatible
-- values such that the type checker can spot coding mistakes; type
-- arguments should be functionals
--
-- * The size of the value representing a function pointer is system-dependent
--
-- * Data and function pointers may be represented differently on some
-- architectures
--
-- * Routines defined with "foreign export dynamic" should be declared to
-- produce function pointers of the present type
--
-- * In C land, values of type `FunPtr a' are represented by
--
-- typedef void (*HsFunPtr)(void);
--
newtype FunPtr a = FunPtr WordXY
deriving (Eq, Ord, Show)
-- Special pointer value that indicates the absence of a function pointer
--
-- * This value should be compatible with the C constant `NULL'
--
nullFunPtr :: FunPtr a
nullPtr = Ptr primNullFunPtr
-- Change the type parameter of a function pointer
--
castFunPtr :: FunPtr a -> FunPtr b
castFunPtr (FunPtr p) = FunPtr p
-- Convert a function into a data pointer
--
castFunPtrToPtr :: FunPtr a -> Ptr b
castFunPtrToPtr (FunPtr p) = Ptr (fromIntegral p)
-- Convert a data into a function pointer
--
castPtrToFunPtr :: Ptr a -> FunPtr b
castPtrToFunPtr (Ptr p) = FunPtr (fromIntegral p)
-- Deallocate a function pointer obtained via a "foreign export dynamic"
--
freeHaskellFunPtr :: FunPtr a -> IO ()
freeHaskellFunPtr (FunPtr p) = primFreeHaskellFunctionPtr p
|
k0001/gtk2hs
|
tools/c2hs/doc/c2hs/lib/Ptr.hs
|
gpl-3.0
| 3,550
| 0
| 7
| 879
| 519
| 307
| 212
| 38
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DataPipeline.ReportTaskRunnerHeartbeat
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Task runners call 'ReportTaskRunnerHeartbeat' every 15 minutes to
-- indicate that they are operational. If the AWS Data Pipeline Task Runner
-- is launched on a resource managed by AWS Data Pipeline, the web service
-- can use this call to detect when the task runner application has failed
-- and restart a new instance.
--
-- /See:/ <http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_ReportTaskRunnerHeartbeat.html AWS API Reference> for ReportTaskRunnerHeartbeat.
module Network.AWS.DataPipeline.ReportTaskRunnerHeartbeat
(
-- * Creating a Request
reportTaskRunnerHeartbeat
, ReportTaskRunnerHeartbeat
-- * Request Lenses
, rtrhHostname
, rtrhWorkerGroup
, rtrhTaskrunnerId
-- * Destructuring the Response
, reportTaskRunnerHeartbeatResponse
, ReportTaskRunnerHeartbeatResponse
-- * Response Lenses
, rtrhrsResponseStatus
, rtrhrsTerminate
) where
import Network.AWS.DataPipeline.Types
import Network.AWS.DataPipeline.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Contains the parameters for ReportTaskRunnerHeartbeat.
--
-- /See:/ 'reportTaskRunnerHeartbeat' smart constructor.
data ReportTaskRunnerHeartbeat = ReportTaskRunnerHeartbeat'
{ _rtrhHostname :: !(Maybe Text)
, _rtrhWorkerGroup :: !(Maybe Text)
, _rtrhTaskrunnerId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportTaskRunnerHeartbeat' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rtrhHostname'
--
-- * 'rtrhWorkerGroup'
--
-- * 'rtrhTaskrunnerId'
reportTaskRunnerHeartbeat
:: Text -- ^ 'rtrhTaskrunnerId'
-> ReportTaskRunnerHeartbeat
reportTaskRunnerHeartbeat pTaskrunnerId_ =
ReportTaskRunnerHeartbeat'
{ _rtrhHostname = Nothing
, _rtrhWorkerGroup = Nothing
, _rtrhTaskrunnerId = pTaskrunnerId_
}
-- | The public DNS name of the task runner.
rtrhHostname :: Lens' ReportTaskRunnerHeartbeat (Maybe Text)
rtrhHostname = lens _rtrhHostname (\ s a -> s{_rtrhHostname = a});
-- | The type of task the task runner is configured to accept and process.
-- The worker group is set as a field on objects in the pipeline when they
-- are created. You can only specify a single value for 'workerGroup'.
-- There are no wildcard values permitted in 'workerGroup'; the string must
-- be an exact, case-sensitive, match.
rtrhWorkerGroup :: Lens' ReportTaskRunnerHeartbeat (Maybe Text)
rtrhWorkerGroup = lens _rtrhWorkerGroup (\ s a -> s{_rtrhWorkerGroup = a});
-- | The ID of the task runner. This value should be unique across your AWS
-- account. In the case of AWS Data Pipeline Task Runner launched on a
-- resource managed by AWS Data Pipeline, the web service provides a unique
-- identifier when it launches the application. If you have written a
-- custom task runner, you should assign a unique identifier for the task
-- runner.
rtrhTaskrunnerId :: Lens' ReportTaskRunnerHeartbeat Text
rtrhTaskrunnerId = lens _rtrhTaskrunnerId (\ s a -> s{_rtrhTaskrunnerId = a});
instance AWSRequest ReportTaskRunnerHeartbeat where
type Rs ReportTaskRunnerHeartbeat =
ReportTaskRunnerHeartbeatResponse
request = postJSON dataPipeline
response
= receiveJSON
(\ s h x ->
ReportTaskRunnerHeartbeatResponse' <$>
(pure (fromEnum s)) <*> (x .:> "terminate"))
instance ToHeaders ReportTaskRunnerHeartbeat where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("DataPipeline.ReportTaskRunnerHeartbeat" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON ReportTaskRunnerHeartbeat where
toJSON ReportTaskRunnerHeartbeat'{..}
= object
(catMaybes
[("hostname" .=) <$> _rtrhHostname,
("workerGroup" .=) <$> _rtrhWorkerGroup,
Just ("taskrunnerId" .= _rtrhTaskrunnerId)])
instance ToPath ReportTaskRunnerHeartbeat where
toPath = const "/"
instance ToQuery ReportTaskRunnerHeartbeat where
toQuery = const mempty
-- | Contains the output of ReportTaskRunnerHeartbeat.
--
-- /See:/ 'reportTaskRunnerHeartbeatResponse' smart constructor.
data ReportTaskRunnerHeartbeatResponse = ReportTaskRunnerHeartbeatResponse'
{ _rtrhrsResponseStatus :: !Int
, _rtrhrsTerminate :: !Bool
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportTaskRunnerHeartbeatResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rtrhrsResponseStatus'
--
-- * 'rtrhrsTerminate'
reportTaskRunnerHeartbeatResponse
:: Int -- ^ 'rtrhrsResponseStatus'
-> Bool -- ^ 'rtrhrsTerminate'
-> ReportTaskRunnerHeartbeatResponse
reportTaskRunnerHeartbeatResponse pResponseStatus_ pTerminate_ =
ReportTaskRunnerHeartbeatResponse'
{ _rtrhrsResponseStatus = pResponseStatus_
, _rtrhrsTerminate = pTerminate_
}
-- | The response status code.
rtrhrsResponseStatus :: Lens' ReportTaskRunnerHeartbeatResponse Int
rtrhrsResponseStatus = lens _rtrhrsResponseStatus (\ s a -> s{_rtrhrsResponseStatus = a});
-- | Indicates whether the calling task runner should terminate.
rtrhrsTerminate :: Lens' ReportTaskRunnerHeartbeatResponse Bool
rtrhrsTerminate = lens _rtrhrsTerminate (\ s a -> s{_rtrhrsTerminate = a});
|
fmapfmapfmap/amazonka
|
amazonka-datapipeline/gen/Network/AWS/DataPipeline/ReportTaskRunnerHeartbeat.hs
|
mpl-2.0
| 6,386
| 20
| 15
| 1,297
| 759
| 457
| 302
| 98
| 1
|
-- Copyright (C) 2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.RPM.BuildSpec(spec)
where
import Codec.RPM.Tags(Tag(..))
import Control.Exception(evaluate)
import Test.Hspec
import BDCS.DB(Builds(..))
import BDCS.Exceptions(DBException(..))
import BDCS.RPM.Builds(mkBuild)
import Utils(fakeKey)
spec :: Spec
spec = describe "BDCS.RPM.Builds Tests" $ do
it "Handles no Epoch" $
buildsEpoch (mkBuild [ Release "1", Arch "x86-64", BuildTime 0, ChangeLogText [""] ] fakeKey) `shouldBe` 0
it "No Release raises" $
evaluate (mkBuild [ Arch "x86-64", BuildTime 0, ChangeLogText [""] ] fakeKey) `shouldThrow` (== MissingRPMTag "Release")
it "No Arch raises" $
evaluate (mkBuild [ Release "1", BuildTime 0, ChangeLogText [""] ] fakeKey) `shouldThrow` (== MissingRPMTag "Arch")
it "No BuildTime raises" $
evaluate (mkBuild [ Release "1", Arch "x86-64", ChangeLogText [""] ] fakeKey) `shouldThrow` (== MissingRPMTag "BuildTime")
it "No ChangeLogText raises" $
evaluate (mkBuild [ Release "1", Arch "x86-64", BuildTime 0 ] fakeKey) `shouldThrow` (== MissingRPMTag "ChangeLogText")
|
atodorov/bdcs
|
src/tests/BDCS/RPM/BuildSpec.hs
|
lgpl-2.1
| 1,794
| 0
| 15
| 320
| 391
| 217
| 174
| 20
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ur-PK">
<title>TLS Debug | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/tlsdebug/src/main/javahelp/org/zaproxy/zap/extension/tlsdebug/resources/help_ur_PK/helpset_ur_PK.hs
|
apache-2.0
| 971
| 80
| 66
| 160
| 415
| 210
| 205
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Dizge Tarayıcı | ZAP Uzantısı</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçerikler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Dizin</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/sequence/src/main/javahelp/org/zaproxy/zap/extension/sequence/resources/help_tr_TR/helpset_tr_TR.hs
|
apache-2.0
| 981
| 80
| 66
| 160
| 426
| 215
| 211
| -1
| -1
|
module Acme.OmittedSpec where
import Acme.Omitted
import Test.Hspec
spec :: Spec
spec = do
describe "omitted" $ do
it "denotes an omitted definition" $ do
omitted `shouldThrow` errorCall "Acme.Omitted.omitted"
|
beni55/acme-omitted
|
tests/Acme/OmittedSpec.hs
|
bsd-2-clause
| 224
| 0
| 14
| 42
| 58
| 30
| 28
| 8
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module UnstarGist where
import qualified GitHub.Data.Name as N
import qualified GitHub.Endpoints.Gists as GH
import qualified Data.Text as T
import qualified Data.Text.IO as T
main :: IO ()
main = do
let gid = "your-gist-id"
result <- GH.unstarGist (GH.OAuth "your-token") gid
case result of
Left err -> putStrLn $ "Error: " ++ show err
Right () -> T.putStrLn $ T.concat ["Unstarred: ", N.untagName gid]
|
jwiegley/github
|
samples/Gists/UnstarGist.hs
|
bsd-3-clause
| 481
| 0
| 14
| 109
| 146
| 81
| 65
| 13
| 2
|
-- |this module provides a simple mechanism for adding IO operations
-- to a queue and running them in a single thread. This is useful if
-- the IO operations have side-effects which could collide if run from
-- multiple threads. For example, creating an image thumbnail and
-- storing it on disk, running latex, etc.
module Extra.IOThread where
import Control.Concurrent (ThreadId, forkIO)
import Control.Concurrent.Chan (Chan,newChan, readChan, writeChan)
import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, readMVar)
import Control.Exception
import Control.Monad (forever)
newtype IOThread a b = IOThread (Chan (a, MVar (Either SomeException b)))
-- |start the IO thread.
startIOThread :: (a -> IO b) -- ^ the IO function that does all the work
-> IO (ThreadId, IOThread a b) -- ^ a ThreadId which can be used to kill the IOThread, and a handle that can be used to issue requests to the thread.
startIOThread f =
do c <- newChan
tid <- forkIO $ ioThread f c
return (tid, IOThread c)
where
ioThread f c =
forever $ do (a, mvar) <- readChan c
b <- try $ f a
putMVar mvar b
-- |issue a request to the IO thread and get back the result
-- if the thread function throws an exception 'ioRequest' will rethrow the exception.
ioRequest :: (IOThread a b) -- ^ handle to the IOThread
-> a -- ^ argument to the function in the IOThread
-> IO b -- ^ value returned by the function in the IOThread
ioRequest (IOThread chan) a =
do resp <- newEmptyMVar
writeChan chan (a, resp)
e <- readMVar resp
case e of
(Right r) -> return r
(Left err) -> throwIO err
|
eigengrau/haskell-extra
|
Extra/IOThread.hs
|
bsd-3-clause
| 1,733
| 0
| 12
| 454
| 361
| 193
| 168
| 27
| 2
|
module Development.Abba.Types
( Rule (..)
, Dependency
, Recipe
) where
import qualified Data.Set as Set
import Text.Printf
-- |Make-like rule definition, designed to be interpreted to produce one or
-- more targets from zero or more dependencies, optionally using the supplied
-- 'Recipe'.
data Rule = Rule {
targets :: [Dependency]
-- ^List of targets that this 'Rule' builds.
, dependencies :: [Dependency]
-- ^List of dependencies that the targets require.
, recipe :: Maybe Recipe
-- ^'Recipe' to build the targets from the dependencies, if any.
}
-- Define a simple method for showing a 'Rule' to make debugging easier.
instance Show Rule where
show (Rule {targets, dependencies, recipe})
= printf "Rule {targets=%s, dependencies=%s, recipe=%s}"
(show targets) (show dependencies) shownRecipe
where
shownRecipe = case recipe of
Just _ -> "Just Recipe(..)"
Nothing -> "Nothing"
-- 'Rule' equality is uniquely determined by its targets to prevent conflicts.
instance Eq Rule where
(Rule {targets=targetsA}) == (Rule {targets=targetsB})
= targetsA == targetsB
-- 'Rule's are ordered by their targets first, then by their dependencies.
instance Ord Rule where
compare
(Rule {targets=targetsA, dependencies=dependenciesA})
(Rule {targets=targetsB, dependencies=dependenciesB})
= case compare targetsA targetsB of
EQ -> compare dependenciesA dependenciesB
comp -> comp
-- |A function which builds a list of targets from a list of dependencies,
-- often producing filesystem side effects.
type Recipe
= [Dependency]
-- ^List of targets to build.
-> [Dependency]
-- ^List of dependencies required to build the targets.
-> IO ()
-- |A target or dependency within a 'Rule'.
type Dependency
= String
|
mgeorgehansen/Abba
|
Development/Abba/Types.hs
|
bsd-3-clause
| 1,897
| 0
| 11
| 456
| 321
| 186
| 135
| -1
| -1
|
-- Use GHC generics to automatically generate good instances.
{-# LANGUAGE CPP #-}
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Twitter.Generic
(
Metadata(..)
, Geo(..)
, Story(..)
, Result(..)
) where
import Prelude ()
import Prelude.Compat
import Twitter
#ifndef HAS_BOTH_AESON_AND_BENCHMARKS
import Data.Aeson (ToJSON, FromJSON)
#else
import "aeson" Data.Aeson (ToJSON, FromJSON)
import qualified "aeson-benchmarks" Data.Aeson as B
#endif
instance ToJSON Metadata
instance FromJSON Metadata
instance ToJSON Geo
instance FromJSON Geo
instance ToJSON Story
instance FromJSON Story
instance ToJSON Result
instance FromJSON Result
#ifdef HAS_BOTH_AESON_AND_BENCHMARKS
instance B.ToJSON Metadata
instance B.FromJSON Metadata
instance B.ToJSON Geo
instance B.FromJSON Geo
instance B.ToJSON Story
instance B.FromJSON Story
instance B.ToJSON Result
instance B.FromJSON Result
#endif
|
tolysz/prepare-ghcjs
|
spec-lts8/aeson/examples/Twitter/Generic.hs
|
bsd-3-clause
| 948
| 0
| 6
| 148
| 200
| 107
| 93
| 21
| 0
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Taken quite directly from the Peyton Jones/Lester paper.
-}
{-# LANGUAGE CPP #-}
-- | A module concerned with finding the free variables of an expression.
module CoreFVs (
-- * Free variables of expressions and binding groups
exprFreeVars, -- CoreExpr -> VarSet -- Find all locally-defined free Ids or tyvars
exprFreeIds, -- CoreExpr -> IdSet -- Find all locally-defined free Ids
exprsFreeVars, -- [CoreExpr] -> VarSet
bindFreeVars, -- CoreBind -> VarSet
-- * Selective free variables of expressions
InterestingVarFun,
exprSomeFreeVars, exprsSomeFreeVars,
-- * Free variables of Rules, Vars and Ids
varTypeTyVars,
idUnfoldingVars, idFreeVars, idRuleAndUnfoldingVars,
idRuleVars, idRuleRhsVars, stableUnfoldingVars,
ruleRhsFreeVars, ruleFreeVars, rulesFreeVars,
ruleLhsOrphNames, ruleLhsFreeIds,
vectsFreeVars,
-- * Core syntax tree annotation with free variables
CoreExprWithFVs, -- = AnnExpr Id VarSet
CoreBindWithFVs, -- = AnnBind Id VarSet
freeVars, -- CoreExpr -> CoreExprWithFVs
freeVarsOf -- CoreExprWithFVs -> IdSet
) where
#include "HsVersions.h"
import CoreSyn
import Id
import IdInfo
import NameSet
import UniqFM
import Name
import VarSet
import Var
import TcType
import Coercion
import Maybes( orElse )
import Util
import BasicTypes( Activation )
import Outputable
{-
************************************************************************
* *
\section{Finding the free variables of an expression}
* *
************************************************************************
This function simply finds the free variables of an expression.
So far as type variables are concerned, it only finds tyvars that are
* free in type arguments,
* free in the type of a binder,
but not those that are free in the type of variable occurrence.
-}
-- | Find all locally-defined free Ids or type variables in an expression
exprFreeVars :: CoreExpr -> VarSet
exprFreeVars = exprSomeFreeVars isLocalVar
-- | Find all locally-defined free Ids in an expression
exprFreeIds :: CoreExpr -> IdSet -- Find all locally-defined free Ids
exprFreeIds = exprSomeFreeVars isLocalId
-- | Find all locally-defined free Ids or type variables in several expressions
exprsFreeVars :: [CoreExpr] -> VarSet
exprsFreeVars = mapUnionVarSet exprFreeVars
-- | Find all locally defined free Ids in a binding group
bindFreeVars :: CoreBind -> VarSet
bindFreeVars (NonRec b r) = rhs_fvs (b,r) isLocalVar emptyVarSet
bindFreeVars (Rec prs) = addBndrs (map fst prs)
(foldr (union . rhs_fvs) noVars prs)
isLocalVar emptyVarSet
-- | Finds free variables in an expression selected by a predicate
exprSomeFreeVars :: InterestingVarFun -- ^ Says which 'Var's are interesting
-> CoreExpr
-> VarSet
exprSomeFreeVars fv_cand e = expr_fvs e fv_cand emptyVarSet
-- | Finds free variables in several expressions selected by a predicate
exprsSomeFreeVars :: InterestingVarFun -- Says which 'Var's are interesting
-> [CoreExpr]
-> VarSet
exprsSomeFreeVars fv_cand = mapUnionVarSet (exprSomeFreeVars fv_cand)
-- | Predicate on possible free variables: returns @True@ iff the variable is interesting
type InterestingVarFun = Var -> Bool
type FV = InterestingVarFun
-> VarSet -- Locally bound
-> VarSet -- Free vars
-- Return the vars that are both (a) interesting
-- and (b) not locally bound
-- See function keep_it
keep_it :: InterestingVarFun -> VarSet -> Var -> Bool
keep_it fv_cand in_scope var
| var `elemVarSet` in_scope = False
| fv_cand var = True
| otherwise = False
union :: FV -> FV -> FV
union fv1 fv2 fv_cand in_scope = fv1 fv_cand in_scope `unionVarSet` fv2 fv_cand in_scope
noVars :: FV
noVars _ _ = emptyVarSet
-- Comment about obselete code
-- We used to gather the free variables the RULES at a variable occurrence
-- with the following cryptic comment:
-- "At a variable occurrence, add in any free variables of its rule rhss
-- Curiously, we gather the Id's free *type* variables from its binding
-- site, but its free *rule-rhs* variables from its usage sites. This
-- is a little weird. The reason is that the former is more efficient,
-- but the latter is more fine grained, and a makes a difference when
-- a variable mentions itself one of its own rule RHSs"
-- Not only is this "weird", but it's also pretty bad because it can make
-- a function seem more recursive than it is. Suppose
-- f = ...g...
-- g = ...
-- RULE g x = ...f...
-- Then f is not mentioned in its own RHS, and needn't be a loop breaker
-- (though g may be). But if we collect the rule fvs from g's occurrence,
-- it looks as if f mentions itself. (This bites in the eftInt/eftIntFB
-- code in GHC.Enum.)
--
-- Anyway, it seems plain wrong. The RULE is like an extra RHS for the
-- function, so its free variables belong at the definition site.
--
-- Deleted code looked like
-- foldVarSet add_rule_var var_itself_set (idRuleVars var)
-- add_rule_var var set | keep_it fv_cand in_scope var = extendVarSet set var
-- | otherwise = set
-- SLPJ Feb06
oneVar :: Id -> FV
oneVar var fv_cand in_scope
= ASSERT( isId var )
if keep_it fv_cand in_scope var
then unitVarSet var
else emptyVarSet
someVars :: VarSet -> FV
someVars vars fv_cand in_scope
= filterVarSet (keep_it fv_cand in_scope) vars
addBndr :: CoreBndr -> FV -> FV
addBndr bndr fv fv_cand in_scope
= someVars (varTypeTyVars bndr) fv_cand in_scope
-- Include type varibles in the binder's type
-- (not just Ids; coercion variables too!)
`unionVarSet` fv fv_cand (in_scope `extendVarSet` bndr)
addBndrs :: [CoreBndr] -> FV -> FV
addBndrs bndrs fv = foldr addBndr fv bndrs
expr_fvs :: CoreExpr -> FV
expr_fvs (Type ty) = someVars (tyVarsOfType ty)
expr_fvs (Coercion co) = someVars (tyCoVarsOfCo co)
expr_fvs (Var var) = oneVar var
expr_fvs (Lit _) = noVars
expr_fvs (Tick t expr) = tickish_fvs t `union` expr_fvs expr
expr_fvs (App fun arg) = expr_fvs fun `union` expr_fvs arg
expr_fvs (Lam bndr body) = addBndr bndr (expr_fvs body)
expr_fvs (Cast expr co) = expr_fvs expr `union` someVars (tyCoVarsOfCo co)
expr_fvs (Case scrut bndr ty alts)
= expr_fvs scrut `union` someVars (tyVarsOfType ty) `union` addBndr bndr
(foldr (union . alt_fvs) noVars alts)
where
alt_fvs (_, bndrs, rhs) = addBndrs bndrs (expr_fvs rhs)
expr_fvs (Let (NonRec bndr rhs) body)
= rhs_fvs (bndr, rhs) `union` addBndr bndr (expr_fvs body)
expr_fvs (Let (Rec pairs) body)
= addBndrs (map fst pairs)
(foldr (union . rhs_fvs) (expr_fvs body) pairs)
---------
rhs_fvs :: (Id,CoreExpr) -> FV
rhs_fvs (bndr, rhs) = expr_fvs rhs `union`
someVars (bndrRuleAndUnfoldingVars bndr)
-- Treat any RULES as extra RHSs of the binding
---------
exprs_fvs :: [CoreExpr] -> FV
exprs_fvs exprs = foldr (union . expr_fvs) noVars exprs
tickish_fvs :: Tickish Id -> FV
tickish_fvs (Breakpoint _ ids) = someVars (mkVarSet ids)
tickish_fvs _ = noVars
{-
************************************************************************
* *
\section{Free names}
* *
************************************************************************
-}
-- | ruleLhsOrphNames is used when deciding whether
-- a rule is an orphan. In particular, suppose that T is defined in this
-- module; we want to avoid declaring that a rule like:
--
-- > fromIntegral T = fromIntegral_T
--
-- is an orphan. Of course it isn't, and declaring it an orphan would
-- make the whole module an orphan module, which is bad.
ruleLhsOrphNames :: CoreRule -> NameSet
ruleLhsOrphNames (BuiltinRule { ru_fn = fn }) = unitNameSet fn
ruleLhsOrphNames (Rule { ru_fn = fn, ru_args = tpl_args })
= extendNameSet (exprsOrphNames tpl_args) fn
-- No need to delete bndrs, because
-- exprsOrphNames finds only External names
-- | Finds the free /external/ names of an expression, notably
-- including the names of type constructors (which of course do not show
-- up in 'exprFreeVars').
exprOrphNames :: CoreExpr -> NameSet
-- There's no need to delete local binders, because they will all
-- be /internal/ names.
exprOrphNames e
= go e
where
go (Var v)
| isExternalName n = unitNameSet n
| otherwise = emptyNameSet
where n = idName v
go (Lit _) = emptyNameSet
go (Type ty) = orphNamesOfType ty -- Don't need free tyvars
go (Coercion co) = orphNamesOfCo co
go (App e1 e2) = go e1 `unionNameSet` go e2
go (Lam v e) = go e `delFromNameSet` idName v
go (Tick _ e) = go e
go (Cast e co) = go e `unionNameSet` orphNamesOfCo co
go (Let (NonRec _ r) e) = go e `unionNameSet` go r
go (Let (Rec prs) e) = exprsOrphNames (map snd prs) `unionNameSet` go e
go (Case e _ ty as) = go e `unionNameSet` orphNamesOfType ty
`unionNameSet` unionNameSets (map go_alt as)
go_alt (_,_,r) = go r
-- | Finds the free /external/ names of several expressions: see 'exprOrphNames' for details
exprsOrphNames :: [CoreExpr] -> NameSet
exprsOrphNames es = foldr (unionNameSet . exprOrphNames) emptyNameSet es
{-
************************************************************************
* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
* *
************************************************************************
-}
-- | Those variables free in the right hand side of a rule
ruleRhsFreeVars :: CoreRule -> VarSet
ruleRhsFreeVars (BuiltinRule {}) = noFVs
ruleRhsFreeVars (Rule { ru_fn = _, ru_bndrs = bndrs, ru_rhs = rhs })
= addBndrs bndrs (expr_fvs rhs) isLocalVar emptyVarSet
-- See Note [Rule free var hack]
-- | Those variables free in the both the left right hand sides of a rule
ruleFreeVars :: CoreRule -> VarSet
ruleFreeVars (BuiltinRule {}) = noFVs
ruleFreeVars (Rule { ru_fn = _, ru_bndrs = bndrs, ru_rhs = rhs, ru_args = args })
= addBndrs bndrs (exprs_fvs (rhs:args)) isLocalVar emptyVarSet
-- See Note [Rule free var hack]
idRuleRhsVars :: (Activation -> Bool) -> Id -> VarSet
-- Just the variables free on the *rhs* of a rule
idRuleRhsVars is_active id
= mapUnionVarSet get_fvs (idCoreRules id)
where
get_fvs (Rule { ru_fn = fn, ru_bndrs = bndrs
, ru_rhs = rhs, ru_act = act })
| is_active act
-- See Note [Finding rule RHS free vars] in OccAnal.lhs
= delFromUFM fvs fn -- Note [Rule free var hack]
where
fvs = addBndrs bndrs (expr_fvs rhs) isLocalVar emptyVarSet
get_fvs _ = noFVs
-- | Those variables free in the right hand side of several rules
rulesFreeVars :: [CoreRule] -> VarSet
rulesFreeVars rules = mapUnionVarSet ruleFreeVars rules
ruleLhsFreeIds :: CoreRule -> VarSet
-- ^ This finds all locally-defined free Ids on the left hand side of a rule
ruleLhsFreeIds (BuiltinRule {}) = noFVs
ruleLhsFreeIds (Rule { ru_bndrs = bndrs, ru_args = args })
= addBndrs bndrs (exprs_fvs args) isLocalId emptyVarSet
{-
Note [Rule free var hack] (Not a hack any more)
~~~~~~~~~~~~~~~~~~~~~~~~~
We used not to include the Id in its own rhs free-var set.
Otherwise the occurrence analyser makes bindings recursive:
f x y = x+y
RULE: f (f x y) z ==> f x (f y z)
However, the occurrence analyser distinguishes "non-rule loop breakers"
from "rule-only loop breakers" (see BasicTypes.OccInfo). So it will
put this 'f' in a Rec block, but will mark the binding as a non-rule loop
breaker, which is perfectly inlinable.
-}
-- |Free variables of a vectorisation declaration
vectsFreeVars :: [CoreVect] -> VarSet
vectsFreeVars = mapUnionVarSet vectFreeVars
where
vectFreeVars (Vect _ rhs) = expr_fvs rhs isLocalId emptyVarSet
vectFreeVars (NoVect _) = noFVs
vectFreeVars (VectType _ _ _) = noFVs
vectFreeVars (VectClass _) = noFVs
vectFreeVars (VectInst _) = noFVs
-- this function is only concerned with values, not types
{-
************************************************************************
* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
* *
************************************************************************
The free variable pass annotates every node in the expression with its
NON-GLOBAL free variables and type variables.
-}
-- | Every node in a binding group annotated with its
-- (non-global) free variables, both Ids and TyVars
type CoreBindWithFVs = AnnBind Id VarSet
-- | Every node in an expression annotated with its
-- (non-global) free variables, both Ids and TyVars
type CoreExprWithFVs = AnnExpr Id VarSet
freeVarsOf :: CoreExprWithFVs -> IdSet
-- ^ Inverse function to 'freeVars'
freeVarsOf (free_vars, _) = free_vars
noFVs :: VarSet
noFVs = emptyVarSet
aFreeVar :: Var -> VarSet
aFreeVar = unitVarSet
unionFVs :: VarSet -> VarSet -> VarSet
unionFVs = unionVarSet
delBindersFV :: [Var] -> VarSet -> VarSet
delBindersFV bs fvs = foldr delBinderFV fvs bs
delBinderFV :: Var -> VarSet -> VarSet
-- This way round, so we can do it multiple times using foldr
-- (b `delBinderFV` s) removes the binder b from the free variable set s,
-- but *adds* to s
--
-- the free variables of b's type
--
-- This is really important for some lambdas:
-- In (\x::a -> x) the only mention of "a" is in the binder.
--
-- Also in
-- let x::a = b in ...
-- we should really note that "a" is free in this expression.
-- It'll be pinned inside the /\a by the binding for b, but
-- it seems cleaner to make sure that a is in the free-var set
-- when it is mentioned.
--
-- This also shows up in recursive bindings. Consider:
-- /\a -> letrec x::a = x in E
-- Now, there are no explicit free type variables in the RHS of x,
-- but nevertheless "a" is free in its definition. So we add in
-- the free tyvars of the types of the binders, and include these in the
-- free vars of the group, attached to the top level of each RHS.
--
-- This actually happened in the defn of errorIO in IOBase.lhs:
-- errorIO (ST io) = case (errorIO# io) of
-- _ -> bottom
-- where
-- bottom = bottom -- Never evaluated
delBinderFV b s = (s `delVarSet` b) `unionFVs` varTypeTyVars b
-- Include coercion variables too!
varTypeTyVars :: Var -> TyVarSet
-- Find the type/kind variables free in the type of the id/tyvar
varTypeTyVars var = tyVarsOfType (varType var)
idFreeVars :: Id -> VarSet
-- Type variables, rule variables, and inline variables
idFreeVars id = ASSERT( isId id)
varTypeTyVars id `unionVarSet`
idRuleAndUnfoldingVars id
bndrRuleAndUnfoldingVars ::Var -> VarSet
-- A 'let' can bind a type variable, and idRuleVars assumes
-- it's seeing an Id. This function tests first.
bndrRuleAndUnfoldingVars v | isTyVar v = emptyVarSet
| otherwise = idRuleAndUnfoldingVars v
idRuleAndUnfoldingVars :: Id -> VarSet
idRuleAndUnfoldingVars id = ASSERT( isId id)
idRuleVars id `unionVarSet`
idUnfoldingVars id
idRuleVars ::Id -> VarSet -- Does *not* include CoreUnfolding vars
idRuleVars id = ASSERT( isId id) specInfoFreeVars (idSpecialisation id)
idUnfoldingVars :: Id -> VarSet
-- Produce free vars for an unfolding, but NOT for an ordinary
-- (non-inline) unfolding, since it is a dup of the rhs
-- and we'll get exponential behaviour if we look at both unf and rhs!
-- But do look at the *real* unfolding, even for loop breakers, else
-- we might get out-of-scope variables
idUnfoldingVars id = stableUnfoldingVars (realIdUnfolding id) `orElse` emptyVarSet
stableUnfoldingVars :: Unfolding -> Maybe VarSet
stableUnfoldingVars unf
= case unf of
CoreUnfolding { uf_tmpl = rhs, uf_src = src }
| isStableSource src
-> Just (exprFreeVars rhs)
DFunUnfolding { df_bndrs = bndrs, df_args = args }
-> Just (exprs_fvs args isLocalVar (mkVarSet bndrs))
-- DFuns are top level, so no fvs from types of bndrs
_other -> Nothing
{-
************************************************************************
* *
\subsection{Free variables (and types)}
* *
************************************************************************
-}
freeVars :: CoreExpr -> CoreExprWithFVs
-- ^ Annotate a 'CoreExpr' with its (non-global) free type and value variables at every tree node
freeVars (Var v)
= (fvs, AnnVar v)
where
-- ToDo: insert motivating example for why we *need*
-- to include the idSpecVars in the FV list.
-- Actually [June 98] I don't think it's necessary
-- fvs = fvs_v `unionVarSet` idSpecVars v
fvs | isLocalVar v = aFreeVar v
| otherwise = noFVs
freeVars (Lit lit) = (noFVs, AnnLit lit)
freeVars (Lam b body)
= (b `delBinderFV` freeVarsOf body', AnnLam b body')
where
body' = freeVars body
freeVars (App fun arg)
= (freeVarsOf fun2 `unionFVs` freeVarsOf arg2, AnnApp fun2 arg2)
where
fun2 = freeVars fun
arg2 = freeVars arg
freeVars (Case scrut bndr ty alts)
= ((bndr `delBinderFV` alts_fvs) `unionFVs` freeVarsOf scrut2 `unionFVs` tyVarsOfType ty,
AnnCase scrut2 bndr ty alts2)
where
scrut2 = freeVars scrut
(alts_fvs_s, alts2) = mapAndUnzip fv_alt alts
alts_fvs = foldr unionFVs noFVs alts_fvs_s
fv_alt (con,args,rhs) = (delBindersFV args (freeVarsOf rhs2),
(con, args, rhs2))
where
rhs2 = freeVars rhs
freeVars (Let (NonRec binder rhs) body)
= (freeVarsOf rhs2
`unionFVs` body_fvs
`unionFVs` bndrRuleAndUnfoldingVars binder,
-- Remember any rules; cf rhs_fvs above
AnnLet (AnnNonRec binder rhs2) body2)
where
rhs2 = freeVars rhs
body2 = freeVars body
body_fvs = binder `delBinderFV` freeVarsOf body2
freeVars (Let (Rec binds) body)
= (delBindersFV binders all_fvs,
AnnLet (AnnRec (binders `zip` rhss2)) body2)
where
(binders, rhss) = unzip binds
rhss2 = map freeVars rhss
rhs_body_fvs = foldr (unionFVs . freeVarsOf) body_fvs rhss2
all_fvs = foldr (unionFVs . idRuleAndUnfoldingVars) rhs_body_fvs binders
-- The "delBinderFV" happens after adding the idSpecVars,
-- since the latter may add some of the binders as fvs
body2 = freeVars body
body_fvs = freeVarsOf body2
freeVars (Cast expr co)
= (freeVarsOf expr2 `unionFVs` cfvs, AnnCast expr2 (cfvs, co))
where
expr2 = freeVars expr
cfvs = tyCoVarsOfCo co
freeVars (Tick tickish expr)
= (tickishFVs tickish `unionFVs` freeVarsOf expr2, AnnTick tickish expr2)
where
expr2 = freeVars expr
tickishFVs (Breakpoint _ ids) = mkVarSet ids
tickishFVs _ = emptyVarSet
freeVars (Type ty) = (tyVarsOfType ty, AnnType ty)
freeVars (Coercion co) = (tyCoVarsOfCo co, AnnCoercion co)
|
green-haskell/ghc
|
compiler/coreSyn/CoreFVs.hs
|
bsd-3-clause
| 20,289
| 0
| 12
| 5,365
| 3,515
| 1,906
| 1,609
| -1
| -1
|
-- | Produces HTML related to the "Votes:" section in the package page.
-- | Should only be used via the Votes feature (see renderVotesHtml)
module Distribution.Server.Features.Votes.Render
( renderVotesAnon
, voteConfirmationPage
, alreadyVotedPage
) where
import Distribution.Package
import Distribution.Server.Pages.Template
import qualified Distribution.Server.Framework.ResponseContentTypes as Resource
import Text.XHtml.Strict
-- When the user is not authenticated/logged in, simply
-- display the number of votes the package has and a link
-- to add a vote (which prompts for authentication).
renderVotesAnon :: Int -> PackageName -> (String, Html)
renderVotesAnon numVotes pkgname =
( "Votes",
form ! [ action $ "/package/" ++ unPackageName pkgname ++ "/votes"
, method "POST" ]
<< thespan <<
[ toHtml $ show numVotes ++ " "
, toHtml $ ("[" +++
hidden "_method" "PUT" +++
input ! [ thetype "submit"
, value "Vote for this package"
, theclass "text-button" ]
+++ "]")
]
)
-- A page that confirms a package was successfully voted for and
-- provides a link back to the package page.
voteConfirmationPage :: PackageName -> String -> Resource.XHtml
voteConfirmationPage pkgname message =
Resource.XHtml $ hackagePage "Vote for a Package"
[ h3 << message
, br
, anchor ! [ href $ "/package/" ++ unPackageName pkgname ] << "Return"
]
-- Shown when a user has already voted for a package.
-- Gives an option to remove the vote, and provides a link
-- back to the package page.
alreadyVotedPage :: PackageName -> Resource.XHtml
alreadyVotedPage pkgname =
Resource.XHtml $ hackagePage "Vote for a Package"
[ h3 << "You have already voted for this package."
, form ! [ action $ "/package/" ++ unPackageName pkgname ++ "/votes"
, method "POST" ]
<< thespan <<
("[" +++
hidden "_method" "DELETE" +++
input ! [ thetype "submit"
, value "Remove your vote from this package"
, theclass "text-button" ]
+++ "]")
, br
, anchor ! [ href $ "/package/" ++ unPackageName pkgname ] << "Return"
]
|
ocharles/hackage-server
|
Distribution/Server/Features/Votes/Render.hs
|
bsd-3-clause
| 2,240
| 0
| 14
| 575
| 409
| 226
| 183
| 42
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Product
-- Copyright : (c) Ross Paterson 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Products, lifted to functors.
--
-- @since 4.9.0.0
-----------------------------------------------------------------------------
module Data.Functor.Product (
Product(..),
) where
import Control.Applicative
import Control.Monad (MonadPlus(..))
import Control.Monad.Fix (MonadFix(..))
import Control.Monad.Zip (MonadZip(mzipWith))
import Data.Data (Data)
import Data.Foldable (Foldable(foldMap))
import Data.Functor.Classes
import Data.Monoid (mappend)
import Data.Traversable (Traversable(traverse))
import GHC.Generics (Generic, Generic1)
import Text.Read (Read(..), readListDefault, readListPrecDefault)
-- | Lifted product of functors.
data Product f g a = Pair (f a) (g a)
deriving (Data, Generic, Generic1)
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g) => Eq1 (Product f g) where
liftEq eq (Pair x1 y1) (Pair x2 y2) = liftEq eq x1 x2 && liftEq eq y1 y2
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g) => Ord1 (Product f g) where
liftCompare comp (Pair x1 y1) (Pair x2 y2) =
liftCompare comp x1 x2 `mappend` liftCompare comp y1 y2
-- | @since 4.9.0.0
instance (Read1 f, Read1 g) => Read1 (Product f g) where
liftReadPrec rp rl = readData $
readBinaryWith (liftReadPrec rp rl) (liftReadPrec rp rl) "Pair" Pair
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance (Show1 f, Show1 g) => Show1 (Product f g) where
liftShowsPrec sp sl d (Pair x y) =
showsBinaryWith (liftShowsPrec sp sl) (liftShowsPrec sp sl) "Pair" d x y
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g, Eq a) => Eq (Product f g a)
where (==) = eq1
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g, Ord a) => Ord (Product f g a) where
compare = compare1
-- | @since 4.9.0.0
instance (Read1 f, Read1 g, Read a) => Read (Product f g a) where
readPrec = readPrec1
readListPrec = readListPrecDefault
readList = readListDefault
-- | @since 4.9.0.0
instance (Show1 f, Show1 g, Show a) => Show (Product f g a) where
showsPrec = showsPrec1
-- | @since 4.9.0.0
instance (Functor f, Functor g) => Functor (Product f g) where
fmap f (Pair x y) = Pair (fmap f x) (fmap f y)
-- | @since 4.9.0.0
instance (Foldable f, Foldable g) => Foldable (Product f g) where
foldMap f (Pair x y) = foldMap f x `mappend` foldMap f y
-- | @since 4.9.0.0
instance (Traversable f, Traversable g) => Traversable (Product f g) where
traverse f (Pair x y) = liftA2 Pair (traverse f x) (traverse f y)
-- | @since 4.9.0.0
instance (Applicative f, Applicative g) => Applicative (Product f g) where
pure x = Pair (pure x) (pure x)
Pair f g <*> Pair x y = Pair (f <*> x) (g <*> y)
liftA2 f (Pair a b) (Pair x y) = Pair (liftA2 f a x) (liftA2 f b y)
-- | @since 4.9.0.0
instance (Alternative f, Alternative g) => Alternative (Product f g) where
empty = Pair empty empty
Pair x1 y1 <|> Pair x2 y2 = Pair (x1 <|> x2) (y1 <|> y2)
-- | @since 4.9.0.0
instance (Monad f, Monad g) => Monad (Product f g) where
Pair m n >>= f = Pair (m >>= fstP . f) (n >>= sndP . f)
where
fstP (Pair a _) = a
sndP (Pair _ b) = b
-- | @since 4.9.0.0
instance (MonadPlus f, MonadPlus g) => MonadPlus (Product f g) where
mzero = Pair mzero mzero
Pair x1 y1 `mplus` Pair x2 y2 = Pair (x1 `mplus` x2) (y1 `mplus` y2)
-- | @since 4.9.0.0
instance (MonadFix f, MonadFix g) => MonadFix (Product f g) where
mfix f = Pair (mfix (fstP . f)) (mfix (sndP . f))
where
fstP (Pair a _) = a
sndP (Pair _ b) = b
-- | @since 4.9.0.0
instance (MonadZip f, MonadZip g) => MonadZip (Product f g) where
mzipWith f (Pair x1 y1) (Pair x2 y2) = Pair (mzipWith f x1 x2) (mzipWith f y1 y2)
|
rahulmutt/ghcvm
|
libraries/base/Product.hs
|
bsd-3-clause
| 4,150
| 0
| 10
| 904
| 1,556
| 830
| 726
| 68
| 0
|
-- Intermission: Exercises
-- 1. foldr (*) 1 [1..5] will return the same result as which of the following:
-- a) flip (*) 1 [1..5]
-- b) foldl (flip (*)) 1 [1..5]
-- c) foldl (*) 1 [1..5]
-- c
-- 2. Write out the evaluation steps for foldl (flip (*)) 1 [1..3]
-- ???
-- 3. One difference between foldr and foldl is:
-- a) foldr, but not foldl, traverses the spine of a list from right to left
-- b) foldr, but not foldl, always forces the rest of the fold
-- c) foldr, but not foldl, associates to the right
-- d) foldr, but not foldl, is recursive
-- c
-- 4. Folds are catamorphisms, which means they are generally used to
-- a) reduce structure
-- b) expand structure
-- c) render you catatonic
-- d) generate infinite data structures
-- a
-- 5. The following are simple folds very similar to what you’ve already seen,
-- but each has at least one error. Please fix them and test in your REPL:
-- a) foldr (++) ["woot", "WOOT", "woot"]
-- foldr (++) "" ["woot", "WOOT", "woot"]
-- b) foldr max [] "fear is the little death"
-- foldr max 'a' "fear is the little death"
-- c) foldr and True [False, True]
-- foldr (&&) True [False, True]
-- d) This one is more subtle than the previous. Can it ever return a different
-- answer? foldr (||) True [False, True]
-- foldr (||) False [False, True]
-- e) foldl (++) "" $ map show [1..5]
-- f) foldr const 'a' [1..5]
-- foldr const 0 [1..5]
-- g) foldr const 0 "tacos"
-- foldr const '0' "tacos"
-- h) foldl (flip const) 0 "burritos"
-- foldl (flip const) '0' "burritos"
-- i) foldl (flip const) 'z' [1..5]
-- foldl (flip const) 0 [1..5]
|
diminishedprime/.org
|
reading-list/haskell_programming_from_first_principles/10_05.hs
|
mit
| 1,601
| 0
| 2
| 326
| 42
| 41
| 1
| 1
| 0
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Z80.Assembler
( Z80
, Z80ASM
, ASMBlock (..)
, org
, code
, Bytes (..)
, db
, equ
, label
, labelled
, withLabel
, end
, beginExecution
) where
import Data.Word
import qualified Data.ByteString as BS
import Data.ByteString (ByteString)
import Control.Monad.RWS
import Data.Maybe
import Control.Applicative
import Data.Traversable (traverse)
import Prelude
import Z80.Operands
data ASMState
= ASMState
{ loc :: Location
, entry :: Maybe Location
}
newtype Z80 a = Z80 (RWS () ByteString ASMState a)
deriving (Functor, Applicative, Monad, MonadFix)
type Z80ASM = Z80 ()
data ASMBlock
= ASMBlock
{ asmOrg :: Location
, asmEntry :: Location
, asmData :: ByteString
} deriving (Eq, Show)
incrementLoc :: Location -> ASMState -> ASMState
incrementLoc x st = st { loc = loc st + x }
code :: [Word8] -> Z80ASM
code bytes = Z80 $ do
tell $ BS.pack bytes
modify (incrementLoc . fromIntegral $ length bytes)
class Bytes a where
defb :: a -> Z80ASM
instance Bytes ByteString where
defb = defByteString
instance (b ~ Word8) => Bytes [b] where
defb = defByteString . BS.pack
db :: Bytes a => a -> Z80ASM
db = defb
defByteString :: ByteString -> Z80ASM
defByteString bs = Z80 $ do
tell bs
modify (incrementLoc . fromIntegral $ BS.length bs)
label :: Z80 Location
label = loc <$> Z80 get
labelled :: Z80 a -> Z80 Location
labelled asm = do
l <- label
asm >> return l
withLabel :: (Location -> Z80 a) -> Z80 a
withLabel asm = do
l <- label
asm l
end :: Z80ASM
end = return ()
beginExecution :: Z80ASM
beginExecution = do
l <- label
Z80 . modify $ setEntry l
where setEntry l st@(ASMState _ Nothing) = st { entry = Just l }
setEntry l st@(ASMState _ (Just e)) =
error $ "Cannot set execution start point twice. First start point: " ++ show e ++
" This start point: " ++ show l
org :: Location -> Z80ASM -> ASMBlock
org addr (Z80 mc) = ASMBlock { asmOrg = addr,
asmEntry = fromMaybe addr $ entry finalState,
asmData = asm }
where ((), finalState, asm) = runRWS mc () (ASMState addr Nothing)
equ :: a -> Z80 a
equ = return
|
dpwright/z80
|
src/Z80/Assembler.hs
|
mit
| 2,332
| 0
| 12
| 587
| 802
| 430
| 372
| 84
| 2
|
{-# LANGUAGE ExistentialQuantification, RankNTypes #-}
module Control.Monad.Hoist where
import Control.Monad.Trans
class (MonadTrans t) => MonadHoist t where
hoist :: (Monad m, Monad n) => (forall x. m x -> n x) -> t m a -> t n a
|
DanielWaterworth/siege
|
src/Control/Monad/Hoist.hs
|
mit
| 236
| 0
| 12
| 45
| 88
| 47
| 41
| 5
| 0
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLOListElement
(js_setCompact, setCompact, js_getCompact, getCompact, js_setStart,
setStart, js_getStart, getStart, js_setReversed, setReversed,
js_getReversed, getReversed, js_setType, setType, js_getType,
getType, HTMLOListElement, castToHTMLOListElement,
gTypeHTMLOListElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"compact\"] = $2;"
js_setCompact :: JSRef HTMLOListElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.compact Mozilla HTMLOListElement.compact documentation>
setCompact :: (MonadIO m) => HTMLOListElement -> Bool -> m ()
setCompact self val
= liftIO (js_setCompact (unHTMLOListElement self) val)
foreign import javascript unsafe "($1[\"compact\"] ? 1 : 0)"
js_getCompact :: JSRef HTMLOListElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.compact Mozilla HTMLOListElement.compact documentation>
getCompact :: (MonadIO m) => HTMLOListElement -> m Bool
getCompact self = liftIO (js_getCompact (unHTMLOListElement self))
foreign import javascript unsafe "$1[\"start\"] = $2;" js_setStart
:: JSRef HTMLOListElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.start Mozilla HTMLOListElement.start documentation>
setStart :: (MonadIO m) => HTMLOListElement -> Int -> m ()
setStart self val
= liftIO (js_setStart (unHTMLOListElement self) val)
foreign import javascript unsafe "$1[\"start\"]" js_getStart ::
JSRef HTMLOListElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.start Mozilla HTMLOListElement.start documentation>
getStart :: (MonadIO m) => HTMLOListElement -> m Int
getStart self = liftIO (js_getStart (unHTMLOListElement self))
foreign import javascript unsafe "$1[\"reversed\"] = $2;"
js_setReversed :: JSRef HTMLOListElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.reversed Mozilla HTMLOListElement.reversed documentation>
setReversed :: (MonadIO m) => HTMLOListElement -> Bool -> m ()
setReversed self val
= liftIO (js_setReversed (unHTMLOListElement self) val)
foreign import javascript unsafe "($1[\"reversed\"] ? 1 : 0)"
js_getReversed :: JSRef HTMLOListElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.reversed Mozilla HTMLOListElement.reversed documentation>
getReversed :: (MonadIO m) => HTMLOListElement -> m Bool
getReversed self
= liftIO (js_getReversed (unHTMLOListElement self))
foreign import javascript unsafe "$1[\"type\"] = $2;" js_setType ::
JSRef HTMLOListElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.type Mozilla HTMLOListElement.type documentation>
setType ::
(MonadIO m, ToJSString val) => HTMLOListElement -> val -> m ()
setType self val
= liftIO (js_setType (unHTMLOListElement self) (toJSString val))
foreign import javascript unsafe "$1[\"type\"]" js_getType ::
JSRef HTMLOListElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOListElement.type Mozilla HTMLOListElement.type documentation>
getType ::
(MonadIO m, FromJSString result) => HTMLOListElement -> m result
getType self
= liftIO (fromJSString <$> (js_getType (unHTMLOListElement self)))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/HTMLOListElement.hs
|
mit
| 4,272
| 56
| 11
| 585
| 972
| 546
| 426
| 61
| 1
|
module Parse.Thread
(
getThreadFromHtml
) where
import Types (Thread (Thread), User (User))
import Text.HTML.TagSoup (Tag(TagOpen), fromAttrib, (~/=))
import Data.ByteString.Char8 (ByteString, unpack)
import qualified Data.ByteString.Char8 as BS (words, drop, length, dropWhile, takeWhile)
import Text.Regex.Posix ((=~))
import Text.Read (readMaybe)
import Data.Char (isDigit)
import Parse.User
import Debug.Trace (trace)
getThreadFromHtml :: [Tag ByteString] -> Maybe Thread
getThreadFromHtml html = do
threadId <- getThreadIdFromHtml html
forumId <- getForumIdFromHtml html
let threadName = getThreadNameFromHtml html
let user = getThreadUserFromHtml html
return (Thread threadId forumId threadName user)
getForumIdFromHtml :: [Tag ByteString] -> Maybe Int
getForumIdFromHtml html = do
let bodyTag = (head . dropWhile (~/= ("<body>" :: String))) html
let forumId = (read . unpack . BS.drop 4 . head . BS.words . fromAttrib ("class" :: ByteString)) bodyTag :: Int
return forumId
getThreadIdFromHtml :: [Tag ByteString] -> Maybe Int
getThreadIdFromHtml html = do
url <- getThreadUrlFromHtml html
threadId <- getThreadIdFromUrl url
return threadId
getThreadUrlFromHtml :: [Tag ByteString] -> Maybe ByteString
getThreadUrlFromHtml html = do
metaTag <- getMetaThreadUrlTag html
return (fromAttrib "content" metaTag)
getMetaThreadUrlTag :: [Tag ByteString] -> Maybe (Tag ByteString)
getMetaThreadUrlTag html = case dropWhile (~/= metaTag) html of
[] -> trace "Unable to find meta tag with property 'og:url'" Nothing
xs -> Just (head xs)
where metaTag = TagOpen "meta" [("property", "og:url")] :: Tag ByteString
getThreadIdFromUrl :: ByteString -> Maybe Int
getThreadIdFromUrl url = case url =~ ("\\.?([0-9]+)/" :: ByteString) :: ByteString of
"" -> trace ("Unable to find any matches in the URL: " ++ (show url)) Nothing
match -> case fixMatch match of
Nothing -> trace ("Incorrect number of non-number characters in match: " ++ (show match)) Nothing
Just match -> case (readMaybe . unpack) match :: Maybe Int of
Nothing -> trace ("Unable to convert `" ++ (show match) ++ "` to Int") Nothing
x -> x
fixMatch :: ByteString -> Maybe ByteString
fixMatch match
| (BS.length $ doFix match) < (BS.length match) - 2 = Nothing
| otherwise = Just $ doFix match
where doFix = BS.takeWhile isDigit . BS.dropWhile (not . isDigit)
getThreadNameFromHtml :: [Tag ByteString] -> ByteString
getThreadNameFromHtml = fromAttrib ("content" :: ByteString) . head . take 1 . dropWhile (~/= tag)
where tag = TagOpen "meta" [("property", "og:title")] :: Tag ByteString
|
JacobLeach/xen-parsing
|
app/Parse/Thread.hs
|
mit
| 2,624
| 6
| 23
| 445
| 893
| 458
| 435
| 54
| 4
|
module Fretted.LilyPond where
version :: String
version = "2.18.0"
enclose :: String -> String -> String -> String
enclose open close body = unlines [open, body, close]
quote :: String -> String
quote body = "\"" ++ body ++ "\""
curly :: String -> String
curly body = enclose " { " " } " body
arrow :: String -> String
arrow body = enclose " << " " >> " body
command :: String -> String
command cmd = "\\" ++ cmd
assign :: String -> String -> String
assign lhs rhs = lhs ++ " = " ++ rhs
singleVoiceFile :: String -> Int -> String
singleVoiceFile voice tempo = unlines [voiceSection, scoreSection, versionSection]
where
voiceSection = assign "voice" $ curly voice
scoreSection = unwords [(command "score"), (curly scoreBody)]
scoreBody = unlines [staffs, layout, midi]
staffs = arrow $ unlines [tradStaff, tabStaff]
tradStaff = unwords [(command "new"), "Staff", (curly staffBody)]
staffBody = unlines [instrument, clef, (command "voice")]
instrument = unwords [(command "set"),
(assign "Staff.midiInstrument"
"#\"acoustic guitar (nylon)\"")]
clef = unwords [(command "clef"), (quote "treble_8")]
tabStaff = unwords [(command "new"), "TabStaff", (curly tabBody)]
tabBody = unlines [instrument, (command "voice")]
layout = concat [(command "layout"), (curly "")]
midi = concat [(command "midi"), (curly tempoSection)]
tempoSection = unwords [(command "tempo"), (assign "4" (show tempo))]
versionSection = unwords [(command "version"), (quote version)]
|
sklam/fretted
|
Fretted/LilyPond.hs
|
mit
| 1,643
| 0
| 12
| 413
| 562
| 302
| 260
| 33
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module RouterTests where
import Control.Lens
import Control.Monad.Trans
import Control.Monad.State (execStateT)
import qualified Data.ByteString as B
import Data.Monoid ((<>))
import Web.Growler
import Web.Growler.Router
import Web.Growler.Types (GrowlerT(..))
import Network.HTTP.Types
import Network.Wai.Internal
testRoute = Request "GET" http11 "/api/v1/users" "" [] False undefined ["api", "v1", "users"] [] (return "") undefined ChunkedBody Nothing Nothing
pat :: GrowlerT IO ()
pat = mount (literal "/api") $ mount "/:version/users" $ get "/api" $ (text "win")
test = do
[(_, p, _)] <- execStateT (fromGrowlerT pat) []
return $ runRoutePattern p testRoute
main = growl id defaultConfig $ do
get "/" $ text "Hello, World!"
get "/:name" $ do
name <- param "name"
text ("Hello, " <> name <> "!")
|
iand675/growler
|
test/RouterTests.hs
|
mit
| 856
| 0
| 14
| 138
| 301
| 162
| 139
| 23
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE TemplateHaskell #-}
module PureScript.Ide.Internal.Sandbox where
import Control.Concurrent.STM
import Control.Lens (over, _1, _2)
import Control.Monad.Except
import "monad-logger" Control.Monad.Logger
import Control.Monad.Reader
import qualified Data.Map as M
import qualified Data.Text as T
import Language.PureScript.Externs
import Language.PureScript.Names
import Language.PureScript.Pretty
import PureScript.Ide.CaseSplit
import PureScript.Ide.Error
import PureScript.Ide.Externs
import PureScript.Ide.State
import PureScript.Ide.Types
import PureScript.Ide.Filter
---- Testing stuff
env ss = PscEnvironment
{
envStateVar = ss
, envConfiguration = Configuration "" True
}
type PscM = ReaderT PscEnvironment (ExceptT PscIdeError (LoggingT IO))
runWithExternsFile
:: FilePath -> PscM b -> IO (Either PscIdeError b)
runWithExternsFile fp = runWithExternsFiles [fp]
runWithExternsFiles
:: [FilePath] -> PscM b -> IO (Either PscIdeError b)
runWithExternsFiles fps f = do
serverState <- newTVarIO emptyPscState
runStdoutLoggingT $ runExceptT $ flip runReaderT (env serverState) $ do
efs <- liftIO $ runExceptT $ traverse readExternFile fps
_ <- either
(const (error "parsing the externs failed"))
(traverse insertModule)
efs
f
runConway = runWithExternsFile "../conway-purescript/output/MyModule/externs.json"
runDataList = runWithExternsFile "../conway-purescript/output/Data.List/externs.json"
runPrelude = runWithExternsFile "../conway-purescript/output/Prelude/externs.json"
runEither = runWithExternsFile "../conway-purescript/output/Data.Either/externs.json"
runHalogen = runWithExternsFile "../conway-purescript/output/Halogen.Query.StateF/externs.json"
run = runWithExternsFiles [
"../conway-purescript/output/MyModule/externs.json"
, "../conway-purescript/output/Data.List/externs.json"
, "../conway-purescript/output/Prelude/externs.json"
, "../conway-purescript/output/Data.Either/externs.json"
, "../conway-purescript/output/Halogen.Query.StateF/externs.json"
]
-- testing :: (PscIde m) => m ()
-- testing = do
-- (Just ef) <- M.lookup (moduleNameFromString "Data.Either") <$> getExternFiles
-- liftIO $ print $ getCtorArgs ef (ProperName "Either")
unsafeRight :: (Either a b) -> b
unsafeRight (Right b) = b
testModuleFilter = moduleFilter ["Halogen.HTML.Indexed"]
testDependencyFilter = dependencyFilter ["Halogen.HTML.Indexed"]
runTest externsFile =
convertExterns . unsafeRight <$> (runExceptT $ readExternFile externsFile)
runTestFilter filter pscmod =
applyFilters [filter] [pscmod]
|
kRITZCREEK/psc-ide
|
src/PureScript/Ide/Internal/Sandbox.hs
|
mit
| 2,877
| 0
| 15
| 554
| 514
| 284
| 230
| 57
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
module Monad.Bracket (
BracketT,
MonadBracket(..),
runBracketT
) where
import Monad.Cont
import Monad.Try
import Control.Monad.Trans.Cont (ContT(ContT), runContT)
import Control.Monad.Trans.Class
import Control.Monad.Lift
type BracketT m a = (MonadTry m, Monad m) => ContT () m a
class (Monad m) => MonadBracket m where
bracketC :: (Monad n, MonadTrans t, m ~ (t n)) => n a -> (a -> n b) -> m a
bracketC_ :: (Monad n, MonadTrans t, m ~ (t n)) => n a -> n b -> m a
bracketC_ start final = bracketC start $ const final
instance (MonadTry m) => MonadBracket (ContT () m) where
bracketC start final = ContT $ bracket start final
runBracketT :: (MonadTry m) => BracketT m a -> m ()
runBracketT b = runContT b (\_ -> return ())
|
duncanburke/bracket-monad
|
Monad/Bracket.hs
|
mit
| 895
| 0
| 12
| 169
| 346
| 186
| 160
| 22
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Intro.Trustworthy
-- Copyright : (c) Daniel Mendler 2016-2017
-- License : MIT
--
-- Maintainer : mail@daniel-mendler.de
-- Stability : experimental
-- Portability : portable
--
-- Trustworthy reexports from 'GHC.Exts' and 'Debug.Trace'
--
-----------------------------------------------------------------------------
module Intro.Trustworthy (
GHC.Exts.IsList(
Item
, fromList
, toList
)
, Constraint
, HasCallStack
, trace
, traceIO
, traceId
, traceM
, traceShow
, traceShowId
, traceShowM
, Data.Hashable.Lifted.Hashable1
, Data.Hashable.Lifted.Hashable2
) where
import Control.Monad.Trans (MonadIO(liftIO))
import Data.Function ((.))
import Data.Text (Text, unpack)
import Text.Show (Show)
import qualified Debug.Trace
import qualified GHC.Exts
import qualified Data.Hashable.Lifted
import Control.Applicative (Applicative)
import Data.Kind (Constraint)
import GHC.Stack (HasCallStack)
-- | The 'trace' function outputs the trace message given as its first argument,
-- before returning the second argument as its result.
--
-- For example, this returns the value of @f x@ but first outputs the message.
--
-- > trace ("calling f with x = " ++ show x) (f x)
--
-- The 'trace' function should /only/ be used for debugging, or for monitoring
-- execution. The function is not referentially transparent: its type indicates
-- that it is a pure function but it has the side effect of outputting the
-- trace message.
trace :: Text -> a -> a
trace = Debug.Trace.trace . unpack
{-# WARNING trace "'trace' should be used only for debugging" #-}
-- | Like 'trace' but returning unit in an arbitrary 'Applicative' context. Allows
-- for convenient use in do-notation.
--
-- Note that the application of 'traceM' is not an action in the 'Applicative'
-- context, as 'traceIO' is in the 'MonadIO' type. While the fresh bindings in the
-- following example will force the 'traceM' expressions to be reduced every time
-- the @do@-block is executed, @traceM "not crashed"@ would only be reduced once,
-- and the message would only be printed once. If your monad is in 'MonadIO',
-- @traceIO@ may be a better option.
--
-- > ... = do
-- > x <- ...
-- > traceM $ "x: " ++ show x
-- > y <- ...
-- > traceM $ "y: " ++ show y
traceM :: Applicative m => Text -> m ()
traceM = Debug.Trace.traceM . unpack
{-# WARNING traceM "'traceM' should be used only for debugging" #-}
-- | Like 'trace', but uses 'show' on the argument to convert it to a 'String'.
--
-- This makes it convenient for printing the values of interesting variables or
-- expressions inside a function. For example here we print the value of the
-- variables @x@ and @z@:
--
-- > f x y =
-- > traceShow (x, z) $ result
-- > where
-- > z = ...
-- > ...
traceShow :: Show a => a -> b -> b
traceShow = Debug.Trace.traceShow
{-# WARNING traceShow "'traceShow' should be used only for debugging" #-}
-- | Like 'traceM', but uses 'show' on the argument to convert it to a 'String'.
--
-- > ... = do
-- > x <- ...
-- > traceShowM $ x
-- > y <- ...
-- > traceShowM $ x + y
traceShowM :: (Show a, Applicative m) => a -> m ()
traceShowM = Debug.Trace.traceShowM
{-# WARNING traceShowM "'traceShowM' should be used only for debugging" #-}
-- | The 'traceIO' function outputs the trace message from the IO monad.
-- This sequences the output with respect to other IO actions.
traceIO :: MonadIO m => Text -> m ()
traceIO = liftIO . Debug.Trace.traceIO . unpack
{-# WARNING traceIO "'traceIO' should be used only for debugging" #-}
-- | Like 'traceShow' but returns the shown value instead of a third value.
traceShowId :: Show a => a -> a
traceShowId = Debug.Trace.traceShowId
{-# WARNING traceShowId "'traceShowId' should be used only for debugging" #-}
-- | Like 'trace' but returns the message instead of a third value.
traceId :: Text -> Text
traceId a = Debug.Trace.trace (unpack a) a
{-# WARNING traceId "'traceId' should be used only for debugging" #-}
|
minad/intro
|
src/Intro/Trustworthy.hs
|
mit
| 4,233
| 0
| 8
| 798
| 449
| 293
| 156
| 54
| 1
|
module QuickSort where
import Data.List
quicksort :: Ord a => [a] -> [a]
quicksort [] = []
quicksort (p:xs) = quicksort lesser ++ [p] ++ quicksort greater
where
(lesser, greater) = partition (<p) xs
|
antalsz/hs-to-coq
|
examples/quicksort/QuickSort.hs
|
mit
| 213
| 0
| 8
| 48
| 98
| 53
| 45
| 6
| 1
|
-- XMonad config by Erik Bjäreholt
-- Inspired by: http://thinkingeek.com/2011/11/21/simple-guide-configure-xmonad-dzen2-conky/
-- Imports {{{
import XMonad
import XMonad.Util.Run
import XMonad.Util.Loggers
import Data.Monoid
-- Prompt
import XMonad.Prompt
import XMonad.Prompt.RunOrRaise (runOrRaisePrompt)
import XMonad.Prompt.AppendFile (appendFilePrompt)
import System.IO
import System.Exit
import System.Directory
import System.IO.Unsafe
import Codec.Binary.UTF8.String
-- Actions
import XMonad.Actions.PhysicalScreens -- Used to order xinerama displays properly
import XMonad.Actions.Volume
import XMonad.Actions.GridSelect
import XMonad.Actions.CycleWS
import Graphics.X11.ExtraTypes.XF86
-- Hooks
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.FadeInactive
import XMonad.Hooks.EwmhDesktops
-- Layouts
import XMonad.Layout.Spacing
import XMonad.Layout.NoBorders
import XMonad.Layout.Named
import XMonad.Layout.PerWorkspace (onWorkspace, onWorkspaces)
import XMonad.Layout.IM
import XMonad.Layout.Grid
import XMonad.Layout.SimpleFloat
import XMonad.Layout.ResizableTile
import Data.Ratio ((%))
import qualified XMonad.StackSet as W
import qualified Data.Map as M
--}}}
-- Config {{{
-- Sets super as mod key
myModMask = mod4Mask
-- Sets default terminal
myTerminal = "konsole"
-- Sets name of the workspaces
myWorkspaces = map show [1..10]
myHomeDir = "/home/erb"
myBitmapsDir = myHomeDir ++ "/.xmonad/dzen2"
-- Conky config for status bar
dzenConkyColor color = "^fg(\\\\" ++ color ++ ")"
imageBarColor = "#2070FF"
textBarColor = "#70B0FF"
sepBarColor = "#FFA050"
-- dcic & dctz, dzenConkyImageBarColor and dzenConkyTextBarColor
dcic = dzenConkyColor imageBarColor
dctc = dzenConkyColor textBarColor
dcsc = dzenConkyColor sepBarColor
sepBar = "|" -- "┃", "\x2503"
dzenSegment image text = concat [dcic, " ^i(", myBitmapsDir, "/", image, ") ", dctc, text, " ", dcsc, sepBar]
hasBattery = unsafePerformIO $ doesDirectoryExist "/sys/class/power_supply/BAT0"
audioController = if hasBattery then "-c 1" else "-c 0"
cpuSeg = dzenSegment "cpu.xbm" "${loadavg}"
memSeg = dzenSegment "mem.xbm" "${memperc}%"
volSeg = dzenSegment "volume.xbm" $ "${exec amixer " ++ audioController ++ " get Master | egrep -o \"[0-9]+%\" | head -1 | egrep -o \"[0-9]*\"}%"
batSeg = if hasBattery
then dzenSegment "battery.xbm" "${battery_percent BAT0}%"
else ""
traySeg = dzenSegment "info_01.xbm" "{ }"
clkSeg = dzenSegment "clock.xbm" "${time %Y/%m/%d} ${time %R:%S}"
conkyText = init $ concat [ dcsc, "[", cpuSeg, memSeg, volSeg, batSeg, traySeg, clkSeg ]
-- Bar
-- barFont = "-*-terminus-*-*-*-*-14-*-*-*-*-*-iso10646-*"
barFont = "-*-clean-*-*-*-*-15-*-*-*-*-*-iso10646-*"
barHeight = "16"
barColor = "#303030"
wsBarStartX = "60"
wsBarStartY = "5"
wsBarWidth = "580"
barSplitX = "500"
myXmonadBar = concat ["dzen2 -xs '0' -x ", wsBarStartX, " -y ", wsBarStartY, " -w '", wsBarWidth, "' -h '", barHeight, "' -ta 'l' -sa 'r' -fg '#FFFFFF' -bg '", barColor, "' -fn '", barFont, "'"]
myStatusBar = concat ["conky -c ~/.xmonad/.conky_dzen -t '", conkyText , "' | dzen2 -xs '1' -x '", barSplitX, "' -h '", barHeight, "' -ta 'r' -bg '", barColor, "' -fg '#FFFFFF' -fn '", barFont, "'"]
myTray = "trayer --monitor 'primary' --edge top --align right --margin 197 --distancefrom top --distance 2 --widthtype pixel --width 200 --transparent true --alpha 0 --tint 0x" ++ tail barColor ++ " --heighttype pixel --height " ++ (show $ (read barHeight :: Int)-4 :: String)
--}}}
-- Main {{{
main = do
dzenLeftBar <- spawnPipe myXmonadBar
---dzenRightBar <- spawnPipe myStatusBar
---trayBar <- spawnPipe myTray
---hPutStrLn dzenRightBar conkyText
xmonad $ ewmh defaultConfig {
-- General section
terminal = myTerminal
, modMask = myModMask
, logHook = myLogHook dzenLeftBar
, layoutHook = myLayoutHook
, handleEventHook = fullscreenEventHook
--, handleEventHook = ewmhDesktopsEventHook <+> fullscreenEventHook
, manageHook = manageDocks <+> myManageHook
--, startupHook = ewmhDesktopsStartup
-- Keyboard
, keys = myKeys
, mouseBindings = myMouseBindings
-- Style and appearance
, workspaces = myWorkspaces
, borderWidth = myBorderWidth
, normalBorderColor = myNormalBorderColor
, focusedBorderColor = myFocusedBorderColor
}
--}}}
--
-- Hooks {{{
-- ManageHook {{{
-- stuff to do when a new window is opened
myManageHook :: ManageHook
myManageHook = (composeAll . concat $
[ [resource =? r --> doIgnore | r <- myIgnoreResources ] -- ignore desktop
, [className =? c --> doShift "1:main" | c <- myTerm ] -- move term to main
, [className =? c --> doShift "2:web" | c <- myWebs ] -- move webs to web
, [className =? c --> doShift "3:dev" | c <- myDevs ] -- move devs to dev
, [className =? c --> doShift "4:chat" | c <- myChat ] -- move chat to chat
, [className =? c --> doShift "5:music" | c <- myMusic ] -- move music to music
, [className =? c --> doCenterFloat | c <- myFloatClasses ] -- float these classes
, [name =? n --> doCenterFloat | n <- myFloatNames ] -- float these names
, [isFullscreen --> myDoFullFloat ]
])
where
role = stringProperty "WM_WINDOW_ROLE"
name = stringProperty "WM_NAME"
-- classnames
myTerm = ["Terminator"]
myWebs = ["Firefox", "Google-chrome", "Chromium", "Chromium-browser"]
myDevs = ["Sublime_text", "jetbrains-pycharm", "jetbrains-idea-ce"]
myChat = ["Pidgin", "Buddy List"]
myMusic = ["Rhythmbox", "Spotify"]
-- floats
myFloatClasses = ["Vlc", "VirtualBox", "Xmessage", "Steam", "Kalarm",
"XFontSel", "Downloads", "Nm-connection-editor", "Alarmclock", "Xfce4-panel"]
myFloatNames = ["bashrun","Google Chrome Options","Chromium Options"]
-- resources, a list of roles, not names
myIgnoreResources = ["desktop", "desktop_window", "notify-osd", "stalonetray",
"trayer", "xfce4-notifyd", "xfce4-desktop"]
-- a trick for fullscreen but stil allow focusing of other WSs
myDoFullFloat :: ManageHook
myDoFullFloat = doF W.focusDown <+> doFullFloat
--}}}
-----------------------------------------------------------------------------------
-- Appearance and layout
-- myLayout = spacing 2 $ Tall 1 (3/100) (1/2)
--
myLayoutHook = onWorkspaces ["1:term"] termLayout $
onWorkspaces ["2:web"] webLayout $
onWorkspaces ["4:term2"] termLayout $
onWorkspaces ["5:sys"] termLayout $
defaultLayout
defaultLayout = avoidStruts $ tiled ||| Grid ||| noBorders Full ||| simpleFloat
where
tiled = ResizableTall 1 (2/100) (1/2) []
-- Mirror (Tall 1 (3/100) (1/2))) |||
-- noBorders = (named "Full" $ fullscreenFull Full)
termLayout = avoidStruts $ noBorders Full ||| tiled ||| Grid
where
tiled = ResizableTall 1 (2/100) (1/2) []
webLayout = avoidStruts $ noBorders Full ||| tiled
where
tiled = ResizableTall 1 (2/100) (1/2) []
-- avoidStruts (
-- mode (master add/max) (default proportion occupied by master)
-- Tall (3/100) (1/2) |||
-- Mirror tile (3/100) (1/2)) |||
-- noBorders Full |||
-- noBorders (fullscreenFull Full)
-- Window border
myBorderWidth = 1
myNormalBorderColor = "#000000"
myFocusedBorderColor = "#2222bb"
----------------------------------------------------------------------------------
-- Keyboard shortcuts
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $
[
-- launch terminal
((modm, xK_Return), spawn $ XMonad.terminal conf)
-- launch dmenu
, ((modm, xK_r ), spawn "dmenu_run")
-- lock with slock after turning off screen
, ((modm, xK_l ), spawn "sleep 0.5; xset dpms force off; slock")
-- lock with slock and suspend
, ((modm .|. shiftMask, xK_l ), spawn "sleep 0.5; systemctl suspend; slock")
-- close focused window
, ((modm, xK_q ), kill)
-- Volume
, ((modm, xK_Print ), lowerVolumeChannels ["Master"] 5 >> return ())
, ((modm, xK_Scroll_Lock ), lowerVolumeChannels ["Master"] 100 >> return ())
, ((modm, xK_Pause ), raiseVolumeChannels ["Master"] 5 >> return ())
, ((0, xF86XK_AudioLowerVolume ), lowerVolumeChannels ["Master"] 5 >> return ())
, ((0, xF86XK_AudioMute ), lowerVolumeChannels ["Master"] 100 >> return ())
, ((0, xF86XK_AudioRaiseVolume ), raiseVolumeChannels ["Master"] 5 >> return ())
-- Screen brightness
, ((0, xF86XK_MonBrightnessUp ), spawn "xbacklight +5")
, ((0, xF86XK_MonBrightnessDown ), spawn "xbacklight -5")
-- Screen temperature & software brightness
, ((modm, xK_F11 ), spawn "systemctl --user stop redshift; redshift -O 7000 -b 1.0")
, ((modm, xK_F12 ), spawn "systemctl --user start redshift")
-- Used in testing
, ((modm, xK_t ), spawn ("echo -e \"" ++ sepBar ++ "\" >> /home/erb/.xmonad/test.txt"))
-- Printscreen
, ((0, xK_Print ), spawn "gnome-screenshot")
-- Rotate through the available layout algorithms
, ((modm, xK_space ), sendMessage NextLayout)
--, ((modm, xK_apostrophe ), gridselectWorkspace defaultGSConfig (\ws -> W.greedyView ws))
--, ((modm, xK_apostrophe ), goToSelected defaultGSConfig)
-- Reset the layouts on the current workspace to default
, ((modm .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size
, ((modm, xK_n ), refresh)
-- Move focus to the next/prev/master window
, ((modm, xK_Tab ), windows W.focusDown )
, ((modm, xK_j ), windows W.focusDown )
, ((modm, xK_k ), windows W.focusUp )
, ((modm, xK_m ), windows W.focusMaster )
-- Swap the focused window with the next/prev window
, ((modm .|. shiftMask, xK_Tab ), windows W.swapDown )
, ((modm .|. shiftMask, xK_j ), windows W.swapDown )
, ((modm .|. shiftMask, xK_k ), windows W.swapUp )
-- Shrink/expand the master area
, ((modm, xK_minus ), sendMessage Shrink)
, ((modm, xK_plus ), sendMessage Expand)
-- Push window back into tiling
, ((modm, xK_t ), withFocused $ windows . W.sink)
-- Increment/deincrement the number of windows in the master area
, ((modm , xK_comma ), sendMessage (IncMasterN 1))
, ((modm , xK_period), sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap
-- Use this binding with avoidStruts from Hooks.ManageDocks.
-- See also the statusBar function from Hooks.DynamicLog.
, ((modm , xK_b ), sendMessage ToggleStruts)
-- Quit xmonad
, ((modm .|. shiftMask, xK_x ), io (exitWith ExitSuccess))
-- Restart xmonad
, ((modm , xK_x ), spawn "killall conky dzen2 trayer; xmonad --recompile && xmonad --restart")
]
++
--
-- mod-[1..9], Switch to workspace N
--
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
--
[((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) ([xK_1 .. xK_9] ++ [xK_0])
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
--
-- mod-{a,s,d}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{a,s,d}, Move client to screen 1, 2, or 3
--
[((m .|. modm, key), f sc)
| (key, sc) <- zip [xK_a, xK_s, xK_d] [0..]
, (f, m) <- [(viewScreen, 0), (sendToScreen, shiftMask)]]
-- | Mouse bindings: default actions bound to mouse events
myMouseBindings :: XConfig Layout -> M.Map (KeyMask, Button) (Window -> X ())
myMouseBindings (XConfig {XMonad.modMask = modm}) = M.fromList
-- mod-button1 %! Set the window to floating mode and move by dragging
[ ((modm, button1), \w -> focus w >> mouseMoveWindow w
>> windows W.shiftMaster)
-- mod-button2 %! Raise the window to the top of the stack
, ((modm .|. shiftMask, button3), \ws -> gridselectWorkspace defaultGSConfig (\ws -> W.greedyView ws))
-- mod-button3 %! Set the window to floating mode and resize by dragging
, ((modm, button3), \w -> focus w >> mouseResizeWindow w
>> windows W.shiftMaster)
-- you may also bind events to the mouse scroll wheel (button4 and button5)
, ((modm, button4), \w -> prevWS)
, ((modm, button5), \w -> nextWS)
]
------------------------------------------------------------------------
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'XMonad.Hooks.DynamicLog' extension for example
--myStatusHook :: Handle -> X ()
--myStatusHook h = dynamicLogWithPP $ PP
-- {
-- ppExtras = [myStatusLogger]
-- , ppOutput = hPutStrLn h
-- }
myLogHook :: Handle -> X ()
myLogHook h = dynamicLogWithPP $ defaultPP
{
ppCurrent = dzenColor "#50FF5F" barColor . wrap "(" ")"
, ppVisible = dzenColor "#FF50FF" barColor . wrap "[" "]"
, ppHidden = dzenColor "#AAAAAA" barColor . pad
, ppHiddenNoWindows = dzenColor "#505050" barColor . pad
, ppUrgent = dzenColor "#FF0000" barColor . pad
, ppWsSep = ""
, ppSep = "^fg(" ++ sepBarColor ++ ") "++sepBar++" "
, ppLayout = dzenColor imageBarColor barColor .
(\x -> case x of
"ResizableTall" -> "^i(" ++ myBitmapsDir ++ "/tall.xbm)"
"Mirror ResizableTall" -> "^i(" ++ myBitmapsDir ++ "/mtall.xbm)"
"Full" -> "^i(" ++ myBitmapsDir ++ "/full.xbm)"
"Simple Float" -> "~"
"IM Grid" -> "IM"
_ -> x
)
, ppTitle = dzenColor "white" barColor . dzenEscape
, ppExtras = []
, ppOutput = hPutStrLn h
}
--}}}
|
ErikBjare/dotfiles
|
home/.xmonad/xmonad.hs
|
mit
| 16,393
| 136
| 14
| 5,578
| 3,038
| 1,791
| 1,247
| 196
| 6
|
module Day22Spec (spec) where
import Day22
import Data.Array (Array)
import qualified Data.Array as Array
import Test.Hspec
main :: IO ()
main = hspec spec
sampleInput :: String
sampleInput = unlines
[ "root@ebhq-gridcenter# df -h"
, "Filesystem Size Used Avail Use%"
, "/dev/grid/node-x0-y0 10T 8T 2T 80%"
, "/dev/grid/node-x0-y1 11T 6T 5T 54%"
, "/dev/grid/node-x0-y2 32T 28T 4T 87%"
, "/dev/grid/node-x1-y0 9T 7T 2T 77%"
, "/dev/grid/node-x1-y1 8T 0T 8T 0%"
, "/dev/grid/node-x1-y2 11T 7T 4T 63%"
, "/dev/grid/node-x2-y0 10T 6T 4T 60%"
, "/dev/grid/node-x2-y1 9T 8T 1T 88%"
, "/dev/grid/node-x2-y2 9T 6T 3T 66%"
]
sampleParsed :: Grid
sampleParsed = Array.array ((0, 0), (2, 2))
[ ((0, 0), Node 10 8 2)
, ((0, 1), Node 11 6 5)
, ((0, 2), Node 32 28 4)
, ((1, 0), Node 9 7 2)
, ((1, 1), Node 8 0 8)
, ((1, 2), Node 11 7 4)
, ((2, 0), Node 10 6 4)
, ((2, 1), Node 9 8 1)
, ((2, 2), Node 9 6 3)
]
spec :: Spec
spec = do
describe "parseInput" $ do
it "works for sample input" $ do
parseInput sampleInput `shouldBe` sampleParsed
describe "day22" $ do
it "works for sample input" $ do
day22 sampleInput `shouldBe` 7
it "works for actual input" $ do
actualInput <- readFile "inputs/day22.txt"
day22 actualInput `shouldBe` 903
describe "day22'" $ do
it "works for actual input" $ do
-- actualInput <- readFile "inputs/day22.txt"
-- day22' actualInput `shouldBe` "fdhgacbe"
pending
|
brianshourd/adventOfCode2016
|
test/Day22Spec.hs
|
mit
| 1,725
| 0
| 14
| 591
| 463
| 258
| 205
| 45
| 1
|
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.ClassHierarchy
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | The pane of ide where modules are presented in tree form with their
-- packages and exports
--
-------------------------------------------------------------------------------
module IDE.Pane.ClassHierarchy (
IDEClassHierarchy(..)
, ClassHierarchyState(..)
, showClasses
--, showInstances
, selectClass
--, reloadKeepSelection
) where
import Graphics.UI.Gtk hiding (get)
import Data.Maybe
import Control.Monad.Reader
import qualified Data.Map as Map
import Data.Tree
import Data.List
import Data.Typeable
import Prelude hiding (catch)
import IDE.Core.State
-- | A modules pane description
--
data IDEClassHierarchy = IDEClassHierarchy {
outer :: VBox
, paned :: HPaned
, treeView :: TreeView
, treeStore :: TreeStore ClassWrapper
--, facetView :: TreeView
--, facetStore :: TreeStore FacetWrapper
, localScopeB :: RadioButton
, packageScopeB :: RadioButton
, worldScopeB :: RadioButton
, blacklistB :: CheckButton
} deriving Typeable
data ClassHierarchyState = ClassHierarchyState Int (Scope,Bool)
(Maybe Text, Maybe Text)
deriving(Eq,Ord,Read,Show,Typeable)
instance IDEObject IDEClassHierarchy
instance Pane IDEClassHierarchy IDEM
where
primPaneName _ = "ClassHierarchy"
getAddedIndex _ = 0
getTopWidget = liftIO . toWidget . outer
paneId b = "*ClassHierarchy"
makeActive p = activatePane p []
close = closePane
instance RecoverablePane IDEClassHierarchy ClassHierarchyState IDEM where
saveState p = return Nothing
recoverState pp _ = return ()
{--
instance RecoverablePane IDEClassHierarchy ClassHierarchyState where
saveState p = do
(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _) <- getModules
sc <- getScope
mbModules <- getPane
case mbModules of
Nothing -> return Nothing
Just p -> liftIO $ do
i <- panedGetPosition (paned p)
mbTreeSelection <- getSelectionTree treeView treeStore
mbFacetSelection <- getSelectionFacet facetView facetStore
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
return (Just (ModulesState i sc mbs))
recoverState pp (ModulesState i sc@(scope,useBlacklist) se) = do
nb <- getNotebook pp
initModules pp nb
mod@(IDEModules _ _ treeView treeStore facetView facetStore lb pb wb blb)
<- getModules
case scope of
Local -> liftIO $ toggleButtonSetActive lb True
Package -> liftIO $ toggleButtonSetActive pb True
World -> liftIO $ toggleButtonSetActive wb True
liftIO $ toggleButtonSetActive blb useBlacklist
liftIO $ panedSetPosition (paned mod) i
fillModulesList sc
selectNames se
--}
selectClass :: Descr -> IDEAction
selectClass d@(Descr descrName _ descrModu _ _ details) =
case details of
(ClassDescr _ _)-> selectClass' descrModu descrName
_ -> return ()
selectClass _ = return ()
selectClass' moduleName symbol = return ()
{--
selectClass' :: ModuleIdentifier -> Symbol -> IDEAction
selectClass' moduleName symbol =
let nameArray = breakAtDots [] moduleName
in do
mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _) <- getModules
mbTree <- liftIO $ treeStoreGetTreeSave treeStore []
case treePathFromNameArray mbTree nameArray [] of
Just treePath -> liftIO $ do
treeViewExpandToPath treeView treePath
sel <- treeViewGetSelection treeView
treeSelectionSelectPath sel treePath
col <- treeViewGetColumn treeView 0
treeViewScrollToCell treeView treePath (fromJust col) (Just (0.3,0.3))
mbFacetTree <- treeStoreGetTreeSave facetStore []
selF <- treeViewGetSelection facetView
case findPathFor symbol mbFacetTree of
Nothing -> sysMessage Normal "no path found"
Just path -> do
treeSelectionSelectPath selF path
col <- treeViewGetColumn facetView 0
treeViewScrollToCell facetView path (fromJust col) (Just (0.3,0.3))
bringPaneToFront mods
Nothing -> return ()
--}
showClasses :: IDEAction
showClasses = do
m <- getClassHierarchy
liftIO $ bringPaneToFront m
liftIO $ widgetGrabFocus (treeView m)
--showInstances :: IDEAction
--showInstances = do
-- m <- getClassHierarchy
-- liftIO $ bringPaneToFront m
-- liftIO $ widgetGrabFocus (facetView m)
getClassHierarchy :: IDEM IDEClassHierarchy
getClassHierarchy = do
mbCH <- getPane
case mbCH of
Nothing -> do
pp <- getBestPathForId "*ClassHierarchy"
nb <- getNotebook pp
ci <- readIDE currentInfo
newPane pp nb (builder ci)
mbCH <- getPane
case mbCH of
Nothing -> throwIDE "Can't init class hierarchy"
Just m -> return m
Just m -> return m
type ClassHierarchy = Forest ClassWrapper
type ClassWrapper = (Symbol, [Symbol], Descr)
--
-- | Make a Tree with a class hierarchy for display.
--
buildClassHierarchyTree :: (PackageScope,PackageScope) -> ClassHierarchy
buildClassHierarchyTree ((_,sc1),(_,sc2)) =
let allClasses = nub
$ filter isClassDescr
$ concat (Map.elems sc1)
++ concat (Map.elems sc2)
wrappers = map asClassWrapper allClasses
(basics,other) = partition (\(_,sc,_) -> null sc) wrappers
basicForest = map (\ n -> Node n []) basics
resultForest = insertInForest basicForest other
in sortForest resultForest
where
insertInForest :: ClassHierarchy -> [ClassWrapper] -> ClassHierarchy
insertInForest basicForest [] = basicForest
insertInForest basicForest other =
let (newForest,rest) = foldl' insertInForest' (basicForest,[]) other
in if length rest >= length other
then throwIDE "ClassHierarchy>>buildClassHierarchyTree: Can't build tree"
else insertInForest newForest rest
insertInForest' :: (ClassHierarchy,[ClassWrapper]) -> ClassWrapper
-> (ClassHierarchy,[ClassWrapper])
insertInForest' (forest,rest) wrap@(id,superList,idDescr) =
let (newForest,newSuperList) = foldl' (insertInForest2 wrap)
(forest, []) superList
in if null newSuperList
then (newForest,rest)
else (newForest,(id,newSuperList,idDescr): rest)
insertInForest2 :: ClassWrapper -> (ClassHierarchy,[Text]) -> Text
-> (ClassHierarchy,[Text])
insertInForest2 wrapper (forest,rest) super =
let (newForest,success) = foldl' (insertInTree wrapper super) ([],False) forest
in if success
then (newForest,rest)
else (newForest, super : rest)
insertInTree :: ClassWrapper -> Text -> (ClassHierarchy,Bool)
-> Tree ClassWrapper -> (ClassHierarchy,Bool)
insertInTree wrapper superS (forest,bool) n@(Node w@(symbol,super,idDescr) subForest) =
if superS == symbol
then (Node w ((Node wrapper []) : subForest) : forest, True)
else
let (newSubForest,newBool) = foldl' (insertInTree wrapper superS) ([],False)
subForest
in if newBool
then ((Node w newSubForest) : forest, True)
else (n: forest, bool)
isClassDescr :: Descr -> Bool
isClassDescr descr = case details descr of
ClassDescr _ _ -> True
_ -> False
asClassWrapper :: Descr -> ClassWrapper
asClassWrapper descr =
case details descr of
ClassDescr super _ -> (descrName descr, super, descr)
_ -> throwIDE "ClassHierarchy>>asClassWrapper: No class"
instance Ord a => Ord (Tree a) where
compare (Node l1 _) (Node l2 _) = compare l1 l2
sortForest :: Ord a => Forest a -> Forest a
sortForest forest = sort (map sortTree forest)
sortTree :: Ord a => Tree a -> Tree a
sortTree (Node l forest) = Node l (sort (map sortTree forest))
builder :: Maybe (PackageScope, PackageScope) ->
PanePath ->
Notebook ->
Window ->
IDERef ->
IO (IDEClassHierarchy, Connections)
builder currentInfo pp nb windows ideR = do
let forest = case currentInfo of
Nothing -> []
Just pair -> buildClassHierarchyTree pair
treeStore <- treeStoreNew forest
treeView <- treeViewNew
treeViewSetModel treeView treeStore
--treeViewSetRulesHint treeView True
renderer0 <- cellRendererPixbufNew
set renderer0 [ cellPixbufStockId := "ide_no_source" ]
renderer <- cellRendererTextNew
col <- treeViewColumnNew
treeViewColumnSetTitle col "Classes"
treeViewColumnSetSizing col TreeViewColumnSizingAutosize
treeViewColumnSetResizable col True
treeViewColumnSetReorderable col True
treeViewAppendColumn treeView col
cellLayoutPackStart col renderer0 False
cellLayoutPackStart col renderer True
cellLayoutSetDataFunction col renderer treeStore
$ \(s,_,_) -> setCellRendererTextText renderer s
cellLayoutSetDataFunction col renderer0 treeStore
$ \(_,_,d) -> [
cellPixbufStockId :=
if isJust (mbLocation d)
then "ide_source"
else "ide_no_source"]
treeViewSetHeadersVisible treeView True
-- treeViewSetEnableSearch treeView True
-- treeViewSetSearchColumn treeView 0
-- treeViewSetSearchEqualFunc treeView (treeViewSearch treeView treeStore)
-- Facet view
{--
facetView <- treeViewNew
facetStore <- treeStoreNew []
treeViewSetModel facetView facetStore
renderer30 <- cellRendererPixbufNew
renderer31 <- cellRendererPixbufNew
renderer3 <- cellRendererTextNew
col <- treeViewColumnNew
treeViewColumnSetTitle col "Interface"
--treeViewColumnSetSizing col TreeViewColumnSizingAutosize
treeViewAppendColumn facetView col
cellLayoutPackStart col renderer30 False
cellLayoutPackStart col renderer31 False
cellLayoutPackStart col renderer3 True
cellLayoutSetDataFunction col renderer3 facetStore
$ setCellRendererTextText renderer3 . facetTreeText row
cellLayoutSetDataFunction col renderer30 facetStore
$ \row -> [
cellPixbufStockId := stockIdFromType (facetIdType row)]
cellLayoutSetDataFunction col renderer31 facetStore
$ \row -> [
cellPixbufStockId := if isJust (mbLocation(facetIdDescr row))
then "ide_source"
else ""]
treeViewSetHeadersVisible facetView True
treeViewSetEnableSearch facetView True
treeViewSetSearchColumn facetView 0
treeViewSetSearchEqualFunc facetView (facetViewSearch facetView facetStore)
--}
pane' <- hPanedNew
sw <- scrolledWindowNew noAdjustment noAdjustment
scrolledWindowSetShadowType sw ShadowTypeIn
containerAdd sw treeView
scrolledWindowSetPolicy sw PolicyTypeAutomatic PolicyTypeAutomatic
{-- sw2 <- scrolledWindowNew noAdjustment noAdjustment
containerAdd sw2 facetView
scrolledWindowSetPolicy sw2 PolicyTypeAutomatic PolicyTypeAutomatic--}
panedAdd1 pane' sw
-- panedAdd2 pane' sw2
(x,y) <- widgetGetSize nb
panedSetPosition pane' (x `quot` 2)
box <- hBoxNew True 2
rb1 <- radioButtonNewWithLabel "Local"
rb2 <- radioButtonNewWithLabelFromWidget rb1 "Package"
rb3 <- radioButtonNewWithLabelFromWidget rb1 "World"
toggleButtonSetActive rb3 True
cb <- checkButtonNewWithLabel "Blacklist"
boxPackStart' box rb1 PackGrow 2
boxPackStart' box rb2 PackGrow 2
boxPackStart' box rb3 PackGrow 2
boxPackEnd' box cb PackNatural 2
boxOuter <- vBoxNew False 2
boxPackStart' boxOuter box PackNatural 2
boxPackStart' boxOuter pane' PackGrow 2
let classes = IDEClassHierarchy boxOuter pane' treeView treeStore
{--facetView facetStore--} rb1 rb2 rb3 cb
cid3 <- treeView `onRowActivated`
(\ treePath _ -> do
treeViewExpandRow treeView treePath False
return ())
cid1 <- treeView `afterFocusIn`
(\_ -> do reflectIDE (makeActive classes) ideR; return True)
-- cid2 <- facetView `afterFocusIn`
-- (\_ -> do runReaderT (makeActive classes) ideR; return True)
-- treeView `onButtonPress` (treeViewPopup ideR treeStore treeView)
-- facetView `onButtonPress` (facetViewPopup ideR facetStore facetView)
-- rb1 `onToggled` (runReaderT scopeSelection ideR)
-- rb2 `onToggled` (runReaderT scopeSelection ideR)
-- rb3 `onToggled` (runReaderT scopeSelection ideR)
-- cb `onToggled` (runReaderT scopeSelection ideR)
sel <- treeViewGetSelection treeView
-- sel `onSelectionChanged` (fillFacets treeView treeStore facetView facetStore)
-- sel2 <- treeViewGetSelection facetView
-- sel2 `onSelectionChanged` (fillInfo facetView facetStore ideR)
return (classes,[ConnectC cid1{--,ConnectC cid2--}, ConnectC cid3])
{--
treeViewSearch :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> Int
-> Text
-> TreeIter
-> IO Bool
treeViewSearch treeView treeStore _ string iter = do
path <- treeModelGetPath treeStore iter
val <- treeStoreGetValue treeStore path
mbTree <- treeStoreGetTreeSave treeStore path
exp <- treeViewRowExpanded treeView path
when (isJust mbTree && (not (null (subForest (fromJust mbTree)))) && not exp) $
let found = searchInModSubnodes (fromJust mbTree) string
in when found $ do
treeViewExpandRow treeView path False
return ()
let str2 = case snd val of
[] -> fst val
(m,_):_ -> showPackModule (moduleIdMD m)
return (isInfixOf (map toLower string) (map toLower str2))
searchInModSubnodes :: ModTree -> Text -> Bool
searchInModSubnodes tree str =
not $ null
$ filter (\ val ->
let cstr = case snd val of
[] -> fst val
(m,_):_ -> showPackModule (moduleIdMD m)
in isInfixOf (map toLower str) (map toLower cstr))
$ concatMap flatten (subForest tree)
facetViewSearch :: TreeView
-> TreeStore FacetWrapper
-> Int
-> Text
-> TreeIter
-> IO Bool
facetViewSearch facetView facetStore _ string iter = do
path <- treeModelGetPath facetStore iter
val <- treeStoreGetValue facetStore path
tree <- treeStoreGetTree facetStore path
exp <- treeViewRowExpanded facetView path
when (not (null (subForest tree)) && not exp) $
let found = searchInFacetSubnodes tree string
in when found $ do
treeViewExpandRow facetView path False
return ()
return (isInfixOf (map toLower string) (map toLower (facetTreeText val)))
searchInFacetSubnodes :: FacetTree -> Text -> Bool
searchInFacetSubnodes tree str =
not $ null
$ filter (\ val ->
isInfixOf (map toLower str) (map toLower (facetTreeText val)))
$ concatMap flatten (subForest tree)
--}
{--
fillFacets :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> TreeView
-> TreeStore FacetWrapper
-> IO ()
fillFacets treeView treeStore facetView facetStore = do
sel <- getSelectionTree treeView treeStore
case sel of
Just val
-> case snd val of
((mod,package):_)
-> let forest = buildFacetForest mod in do
emptyModel <- treeStoreNew []
treeViewSetModel facetView emptyModel
treeStoreClear facetStore
mapM_ (\(e,i) -> treeStoreInsertTree facetStore [] i e)
$ zip forest [0 .. length forest]
treeViewSetModel facetView facetStore
treeViewSetEnableSearch facetView True
treeViewSetSearchColumn facetView 0
treeViewSetSearchEqualFunc facetView (facetViewSearch facetView facetStore)
[] -> return ()
Nothing
-> do
treeStoreClear facetStore
return ()
--}
{--
getSelectionTree :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> IO (Maybe (Text, [(ModuleDescr,PackageDescr)]))
getSelectionTree treeView treeStore = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows' treeSelection
case paths of
[] -> return Nothing
a:r -> do
val <- treeStoreGetValue treeStore a
return (Just val)
getSelectionFacet :: TreeView
-> TreeStore FacetWrapper
-> IO (Maybe FacetWrapper)
getSelectionFacet treeView treeStore = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows' treeSelection
case paths of
a:r -> do
val <- treeStoreGetValue treeStore a
return (Just val)
_ -> return Nothing
fillInfo :: TreeView
-> TreeStore FacetWrapper
-> IDERef
-> IO ()
fillInfo treeView lst ideR = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows' treeSelection
case paths of
[] -> return ()
[a] -> do
wrapper <- treeStoreGetValue lst a
runReaderT (setInfos [facetIdDescr wrapper]) ideR
return ()
_ -> return ()
findDescription :: PackModule -> SymbolTable -> Symbol -> Maybe (Symbol,IdentifierDescr)
findDescription md st s =
case Map.lookup s st of
Nothing -> Nothing
Just l -> case filter (\id -> md == moduleIdID id) l of
[] -> Nothing
l -> Just (s,head l)
fillModulesList :: (Scope,Bool) -> IDEAction
fillModulesList (scope,useBlacklist) = do
(IDEModules _ _ treeView treeStore _ _ _ _ _ _) <- getModules
prefs <- readIDE prefs
currentInfo' <- readIDE currentInfo
accessibleInfo' <- readIDE accessibleInfo
case currentInfo' of
Nothing -> case (scope,accessibleInfo') of
(World,Just ai@(pm,ps)) ->
let p2 = if useBlacklist
then (Map.filter (filterBlacklist
(packageBlacklist prefs)) pm, ps)
else ai
(Node _ li) = buildModulesTree
((Map.empty,Map.empty),p2)
in liftIO $ do
treeStoreClear treeStore
mapM_ (\(e,i) -> treeStoreInsertTree treeStore [] i e)
$ zip li [0 .. length li]
_ -> liftIO $ do
treeStoreClear treeStore
treeStoreInsertTree treeStore [] 0 (Node ("",[]) [])
Just (l,p) -> let (l',p'@(pm,ps)) = case scope of
Local -> (l,(Map.empty,Map.empty))
Package -> (l,p)
World -> case accessibleInfo' of
Just ai -> (l,ai)
Nothing -> (l,p)
p2 = if useBlacklist
then (Map.filter (filterBlacklist
(packageBlacklist prefs)) pm, ps)
else p'
(Node _ li) = buildModulesTree (l',p2)
in liftIO $ do
emptyModel <- treeStoreNew []
treeViewSetModel treeView emptyModel
treeStoreClear treeStore
mapM_ (\(e,i) -> treeStoreInsertTree treeStore [] i e)
$ zip li [0 .. length li]
treeViewSetModel treeView treeStore
treeViewSetEnableSearch treeView True
treeViewSetSearchColumn treeView 0
treeViewSetSearchEqualFunc treeView (treeViewSearch treeView treeStore)
where
filterBlacklist :: [Dependency] -> PackageDescr -> Bool
filterBlacklist dependencies packageDescr =
let packageId = packagePD packageDescr
name = pkgName packageId
version = pkgVersion packageId
in isNothing $ find (\ (Dependency str vr) -> str == name && withinRange version vr)
dependencies
type FacetForest = Forest FacetWrapper
type FacetTree = Tree FacetWrapper
facetTreeText :: FacetWrapper -> Text
facetTreeText (Itself (SimpleDescr id FunctionS _ _ _ _)) = {-- "function " ++ --} id
facetTreeText (Itself (SimpleDescr id NewtypeS _ _ _ _)) = {-- "newtype " ++ --} id
facetTreeText (Itself (SimpleDescr id TypeS _ _ _ _)) = {-- "type " ++ --} id
facetTreeText (Itself (SimpleDescr id _ _ _ _ _)) = id
facetTreeText (Itself (DataDescr id _ _ _ _ _ _)) = {-- "data " ++ --} id
facetTreeText (Itself (ClassDescr id _ _ _ _ _)) = {-- "class " ++ --} id
facetTreeText (Itself (InstanceDescr cl _ _ _ _ )) = {-- "instance " ++ --} cl
facetTreeText (ConstructorW s _) = {-- "constructor " ++ --} s
facetTreeText (FieldW s _) = {-- "slot " ++ --} s
facetTreeText (MethodW s _) = {-- "method " ++ --} s
facetTreeText (OrphanedData (InstanceDescr cl binds _ _ _)) = {-- "instance " ++ --} cl
++ " " ++ printBinds binds
where
printBinds [] = ""
printBinds (a:[]) = a
printBinds (a:b) = a ++ " " ++ printBinds b
facetTreeText _ = throwIDE "impossible in facetTreeText"
facetIdType :: FacetWrapper -> IdType
facetIdType (Itself descr) = idType descr
facetIdType (ConstructorW _ _) = Constructor
facetIdType (FieldW _ _) = Field
facetIdType (MethodW _ _) = Method
facetIdType (OrphanedData _) = OrphanedInstance
facetIdDescr :: FacetWrapper -> IdentifierDescr
facetIdDescr (Itself descr) = descr
facetIdDescr (ConstructorW _ descr) = descr
facetIdDescr (FieldW _ descr) = descr
facetIdDescr (MethodW _ descr) = descr
facetIdDescr (OrphanedData descr) = descr
buildFacetForest :: ModuleDescr -> FacetForest
buildFacetForest modDescr =
let (instances,other) = partition (\id -> case id of
InstanceDescr _ _ _ _ _ -> True
_ -> False)
$ idDescriptionsMD modDescr
forestWithoutInstances = map buildFacet other
(forest2,orphaned) = foldl' addInstances (forestWithoutInstances,[])
instances
orphanedNodes = map (\ inst -> Node (OrphanedData inst) []) orphaned
in forest2 ++ reverse orphanedNodes
where
buildFacet :: IdentifierDescr -> FacetTree
buildFacet d@(SimpleDescr _ _ _ _ _ _)
= Node (Itself d) []
buildFacet d@(DataDescr _ _ _ constID fieldsID _ _)
= (Node (Itself d) ((map (\ s -> Node (ConstructorW s d) []) constID)
++ (map (\ s -> Node (FieldW s d) []) fieldsID)))
buildFacet d@(ClassDescr _ _ _ classOpsID _ _)
= Node (Itself d) (map (\ s -> Node (MethodW s d) []) classOpsID)
buildFacet d@(InstanceDescr _ _ _ _ _)
= throwIDE "Impossible in buildFacet"
addInstances :: (FacetForest,[IdentifierDescr])
-> IdentifierDescr
-> (FacetForest,[IdentifierDescr])
addInstances (forest,orphaned) instDescr =
case foldl' (matches instDescr) ([],False) forest of
(f,True) -> (f,orphaned)
(f,False) -> (forest, instDescr:orphaned)
matches :: IdentifierDescr
-> (FacetForest,Bool)
-> FacetTree
-> (FacetForest,Bool)
matches instDescr (forest,False) (Node (Itself dd@(DataDescr id _ _ _ _ _ _)) sub)
| [id] == binds instDescr
= ((Node (Itself dd) (sub ++ [Node (Itself instDescr) []])):forest,True)
matches instDescr (forest,False) (Node (Itself dd@(SimpleDescr id ty _ _ _ _ )) sub)
| [id] == binds instDescr && ty == NewtypeS
= ((Node (Itself dd) (sub ++ [Node (Itself instDescr) []])):forest,True)
matches _ (forest,b) node = (node:forest,b)
--}
{--
treeViewPopup :: IDERef
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> TreeView
-> Event
-> IO (Bool)
treeViewPopup ideR store treeView (Button _ click _ _ _ _ button _ _) = do
if button == RightButton
then do
theMenu <- menuNew
menuAttachToWidget theMenu treeView
item1 <- menuItemNewWithLabel "Edit"
item1 `onActivateLeaf` do
sel <- getSelectionTree treeView store
case sel of
Just (_,[(m,_)]) -> case mbSourcePathMD m of
Nothing -> return ()
Just fp -> do
runReaderT (selectSourceBuf fp) ideR
return ()
otherwise -> return ()
item2 <- menuItemNewWithLabel "ExpandAll"
item2 `onActivateLeaf` (treeViewExpandAll treeView)
item3 <- menuItemNewWithLabel "CollapseAll"
item3 `onActivateLeaf` (treeViewCollapseAll treeView)
mapM_ (menuShellAppend theMenu) [item1,item2,item3]
menuPopup theMenu Nothing
widgetShowAll theMenu
return True
else if button == LeftButton && click == DoubleClick
then do sel <- getSelectionTree treeView store
case sel of
Just (_,[(m,_)]) -> case mbSourcePathMD m of
Nothing -> return ()
Just fp -> do
runReaderT (selectSourceBuf fp) ideR
return ()
otherwise -> return ()
return True
else return False
treeViewPopup _ _ _ _ = throwIDE "treeViewPopup wrong event type"
facetViewPopup :: IDERef
-> TreeStore FacetWrapper
-> TreeView
-> Event
-> IO (Bool)
facetViewPopup ideR store facetView (Button _ click _ _ _ _ button _ _) = do
if button == RightButton
then do
theMenu <- menuNew
menuAttachToWidget theMenu treeView
item1 <- menuItemNewWithLabel "Go to definition"
item1 `onActivateLeaf` do
sel <- getSelectionFacet facetView store
case sel of
Just wrapper -> runReaderT
(goToDefinition (facetIdDescr wrapper)) ideR
otherwise -> sysMessage Normal "no selection"
menuShellAppend theMenu item1
menuPopup theMenu Nothing
widgetShowAll theMenu
return True
else if button == LeftButton && click == DoubleClick
then do sel <- getSelectionFacet facetView store
case sel of
Just wrapper -> runReaderT (goToDefinition
(facetIdDescr wrapper)) ideR
otherwise -> sysMessage Normal "no selection"
return True
else do
mbPane :: Maybe IDEInfo <- runReaderT getPane ideR
when (isJust mbPane) $ bringPaneToFront (fromJust mbPane)
return False
facetViewPopup _ _ _ _ = throwIDE "facetViewPopup wrong event type"
--}
{--
getScope :: IDEM (Scope,Bool)
getScope = do
(IDEModules _ _ treeView treeStore facetView facetStore localScopeB
packageScopeB worldScopeB blacklistB) <- getModules
rb1s <- liftIO $ toggleButtonGetActive localScopeB
rb2s <- liftIO $ toggleButtonGetActive packageScopeB
rb3s <- liftIO $ toggleButtonGetActive worldScopeB
cbs <- liftIO $ toggleButtonGetActive blacklistB
let scope = if rb1s
then Local
else if rb2s
then Package
else if rb3s
then World
else throwIDE
"ModulesPane.scopeSelection: No check button selected"
return (scope,cbs)
scopeSelection :: IDEAction
scopeSelection = do
mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
<- getModules
mbTreeSelection <- liftIO $ getSelectionTree treeView treeStore
mbFacetSelection <- liftIO $ getSelectionFacet facetView facetStore
sc <- getScope
ts <- liftIO $ treeViewGetSelection treeView
liftIO $ treeSelectionUnselectAll ts
fillModulesList sc
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
selectNames mbs
liftIO $ bringPaneToFront mods
selectNames :: (Maybe Text, Maybe Symbol) -> IDEAction
selectNames (mbModuleName, mbIdName) = do
(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
<- getModules
case mbModuleName of
Nothing -> return ()
Just moduleName ->
let nameArray = breakAtDots [] moduleName
in do
mbTree <- liftIO $ treeStoreGetTreeSave treeStore []
case treePathFromNameArray mbTree nameArray [] of
Nothing -> return ()
Just treePath -> liftIO $ do
treeViewExpandToPath treeView treePath
sel <- treeViewGetSelection treeView
treeSelectionSelectPath sel treePath
col <- treeViewGetColumn treeView 0
treeViewScrollToCell treeView treePath (fromJust col)
(Just (0.3,0.3))
case mbIdName of
Nothing -> return ()
Just symbol -> do
mbFacetTree <- treeStoreGetTreeSave facetStore []
selF <- treeViewGetSelection facetView
case findPathFor symbol mbFacetTree of
Nothing -> sysMessage Normal "no path found"
Just path -> do
treeSelectionSelectPath selF path
col <- treeViewGetColumn facetView 0
treeViewScrollToCell facetView path (fromJust col)
(Just (0.3,0.3))
symbolFromFacetWrapper :: FacetWrapper -> Symbol
symbolFromFacetWrapper (Itself idDescr) = identifierID idDescr
symbolFromFacetWrapper (ConstructorW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (FieldW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (MethodW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (OrphanedData idDescr) = identifierID idDescr
reloadKeepSelection :: IDEAction
reloadKeepSelection = do
mbMod <- getPane
case mbMod of
Nothing -> return ()
Just mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
-> do
mbTreeSelection <- liftIO $ getSelectionTree treeView treeStore
mbFacetSelection <- liftIO $ getSelectionFacet facetView facetStore
sc <- getScope
fillModulesList sc
liftIO $ treeStoreClear facetStore
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
selectNames mbs
treeStoreGetTreeSave :: TreeStore a -> TreePath -> IO (Maybe (Tree a))
treeStoreGetTreeSave treeStore treePath = catch (do
res <- treeStoreGetTree treeStore treePath
return (Just res)) (\ _ -> return Nothing)
findPathFor :: Symbol -> Maybe (Tree FacetWrapper) -> Maybe TreePath
findPathFor symbol (Just (Node _ forest)) =
foldr ( \i mbTreePath -> findPathFor' [i] (forest !! i) mbTreePath)
Nothing [0 .. ((length forest) - 1)]
where
findPathFor' :: TreePath -> Tree FacetWrapper -> Maybe TreePath -> Maybe TreePath
findPathFor' _ node (Just p) = Just p
findPathFor' path (Node wrap sub) Nothing =
if identifierID (facetIdDescr wrap) == symbol
then Just (reverse path)
else
foldr ( \i mbTreePath -> findPathFor' (i:path) (sub !! i) mbTreePath)
Nothing [0 .. ((length sub) - 1)]
findPathFor symbol Nothing = Nothing
treePathFromNameArray :: Maybe ModTree -> [Text] -> [Int] -> Maybe [Int]
treePathFromNameArray (Just tree) [] accu = Just (reverse accu)
treePathFromNameArray (Just tree) (h:t) accu =
let names = map (\t -> fst $ rootLabel t) (subForest tree)
mbIdx = elemIndex h names
in case mbIdx of
Nothing -> Nothing
Just i -> treePathFromNameArray (Just (subForest tree !! i)) t (i:accu)
treePathFromNameArray Nothing _ _ = Nothing
--}
{--
extractSuperclasses :: Text -> [Text]
extractSuperclasses str =
let parseRes = trace ("now extracting superclasses for " ++ show str)
parse superclassParser "" str
in case parseRes of
Left err -> throwIDE $show err
Right l -> trace ("found " ++ show l) l
lexer = haskell
lexeme = P.lexeme lexer
whiteSpace = P.whiteSpace lexer
symbol = P.symbol lexer
superclassParser :: CharParser () [Text]
superclassParser = do
symbol "class"
whiteSpace
try (do
sc <- classDefParser
symbol "=>"
return [sc])
<|> try (do
symbol "("
scs <- sepBy classDefParser (char ',')
symbol ")"
symbol "=>"
return scs)
<|> return []
<?> "superclasses"
classDefParser :: CharParser () Text
classDefParser = do
whiteSpace
c <- oneOf['A'..'Z']
cs <- many (alphaNum <|> oneOf "_'.")
many typeVarParser
return (c:cs)
<?> "classDef"
typeVarParser :: CharParser () Text
typeVarParser = do
whiteSpace
c <- oneOf['a'..'z']
cs <- many (alphaNum <|> oneOf "_'.")
return (c:cs)
<?> "typeVar"
--}
|
JPMoresmau/leksah
|
src/IDE/Pane/ClassHierarchy.hs
|
gpl-2.0
| 39,087
| 0
| 15
| 14,865
| 2,325
| 1,209
| 1,116
| -1
| -1
|
--
-- This module provides an implementation to interface with
-- libXL, a flavor of toolstack for the Xen Hypervisor.
-- Most calls here simply invoke the xl command line utility
-- for simplicity, although one could bypass the xl command line
-- utility by using the Haskell FFI (foreign function interface)
-- to hook directly into libXL if more robust features are desired.
--
-- Author: Chris Rogers <rogersc@ainfosec.com>
--
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module XenMgr.Connect.Xl
(
--xl domain control
start
, shutdown
, unpause
, pause
, destroy
, resumeFromSleep
, reboot
, sleep
, hibernate
, suspendToFile
, resumeFromFile
, changeCd
, setMemTarget
, acpiState
, waitForAcpiState
, waitForState
, signal
--xl/toolstack queries
, domainID
, domainXsPath
, getDomainId
, isRunning
, isFocused
, state
--dbus stuff
, onNotify
, onNotifyRemove
, setNicBackendDom
, removeNic
, addNic
, connectVif
, changeNicNetwork
, wakeIfS3
) where
import Control.Exception as E
import Control.Applicative
import Control.Monad
import Control.Monad.Error hiding (liftIO)
import Control.Concurrent
import Data.String
import Data.List as L
import Data.Typeable
import Data.Text as T
import Data.Maybe
import Data.Int ( Int32 )
import Vm.Types
import Vm.DmTypes
import Vm.State
import Tools.Misc as TM
import Tools.XenStore
import Tools.Log
import Tools.Process
import Tools.Text as TT
import System
import System.Cmd
import System.Process
import System.Directory
import System.IO
import XenMgr.Rpc
import XenMgr.Db
import XenMgr.Errors
import XenMgr.Connect.NetworkDaemon
import qualified Data.Map as M
import Text.Printf
import System.Posix.Signals
import System.Posix.Types ( CPid, ProcessID )
type NotifyHandler = [String] -> Rpc ()
type Params = [(String, String)]
data XlExceptionClass = XlException String
deriving (Typeable)
instance Exception XlExceptionClass
instance Show XlExceptionClass where
show e@(XlException s) = show s
bailIfError :: ExitCode -> String -> IO ()
bailIfError exitCode msg =
do
case exitCode of
ExitSuccess -> return ()
_ -> throw $ XlException msg
resumeFromSleep :: Uuid -> IO Bool
resumeFromSleep uuid = do
domid <- getDomainId uuid
exitCode <- system ("xl trigger " ++ domid ++ " s3resume")
case exitCode of
_ -> waitForAcpiState uuid 0 (Just 10)
--Given the uuid, get the domid, if there is one
domainID :: Uuid -> IO (Maybe DomainID)
domainID uuid = do
domid <- getDomainId uuid
return $ if domid == "" then Nothing else Just (read domid :: DomainID)
--Wait the provided duration for the domain to get into the specified acpi state
waitForAcpiState :: Uuid -> Int -> Maybe Int -> IO Bool
waitForAcpiState uuid expected timeout = do
s <- acpiState uuid
case (s, timeout) of
(x, _) | x == expected -> return True
(_, Just t) | t <= 0 -> return False
(_, Just t) | t > 0 -> do liftIO (threadDelay $ 10^6)
waitForAcpiState uuid expected (Just $ t-1)
(_, Nothing) -> liftIO (threadDelay $ 10^6) >> waitForAcpiState uuid expected Nothing
_ -> error "impossible"
--We need to concoct acpi states (5 or 0) for fully PV domains
--since get_hvm_param doesn't apply
acpiState :: Uuid -> IO AcpiState
acpiState uuid = do
domid <- getDomainId uuid
case domid of
"" -> return 5 --no domid indicates domain is off, so acpi state 5
_ -> do
acpi_state <- readProcessOrDie "xl" ["acpi-state", domid] []
let plain_acpi = (T.unpack (T.stripEnd (T.pack acpi_state)))
case plain_acpi of
"-1" -> return 0 --If we have the domid but xl returns us -1 for acpi state, it's likely the domain
--is fully PV, so just return 0.
_ -> return $ (read plain_acpi :: Int)
--Return whether the vm is currently in focus
isFocused :: Uuid -> IO Bool
isFocused uuid = do
s <- state uuid
p <- domainXsPath uuid
haveFocus s p
where
haveFocus Shutdown _ = return False
haveFocus _ domP = let path = domP ++ "/switcher/have_focus" in
liftIO $ xsRead path >>= return . maybe False (== "1")
--Return the path to the domain in the xenstore
domainXsPath :: Uuid -> IO String
domainXsPath uuid = do
domid <- getDomainId uuid
case domid of
"" -> return $ "/local/domain/unknown"
_ -> return $ "/local/domain/" ++ domid
--The following functions are all domain lifecycle operations, and self-explanatory
reboot :: Uuid -> IO ()
reboot uuid =
do
domid <- getDomainId uuid
exitCode <- system ("xl reboot " ++ domid)
case exitCode of
ExitSuccess -> return ()
_ -> do _ <- system ("xl reboot -F " ++ domid)
return ()
shutdown :: Uuid -> IO ()
shutdown uuid =
do
domid <- getDomainId uuid
stubdomid <- getStubDomainID uuid
let xs_path = "/local/domain/" ++ stubdomid ++ "/device-model/" ++ domid
gpe <- xsRead (xs_path ++ "/hvm-powerbutton-enable")
case gpe of
Just g -> do exitCode <- system ("xl shutdown -w " ++ domid)
case exitCode of
ExitSuccess -> return ()
_ -> do xsWrite (xs_path ++ "/hvm-shutdown") "poweroff"
_ <- system ("xl trigger " ++ domid ++ " power")
_ <- system ("xl shutdown -F -w " ++ domid)
return ()
Nothing -> do system ("xl shutdown -c -w " ++ domid)
return ()
pause :: Uuid -> IO ()
pause uuid =
do
domid <- getDomainId uuid
exitCode <- system ("xl pause " ++ domid)
bailIfError exitCode "Error parsing domain."
unpause :: Uuid -> IO ()
unpause uuid = do
domid <- getDomainId uuid
case domid of
"" -> return ()
_ -> do
exitCode <- system ("xl unpause " ++ domid)
bailIfError exitCode "Error unpausing domain."
getXlProcess :: Uuid -> IO String
getXlProcess uuid = do
(ec,str_pid,_) <- readProcessWithExitCode_closeFds "pgrep" ["-f", "^xl create " ++ configPath uuid ++ " -p"] ""
case ec of
ExitSuccess -> return $ TT.strip str_pid
_ -> return ""
-- Sends sigusr1 to specified xl process, in order to unblock
-- it from a reboot
signal :: Uuid -> IO ()
signal uuid = do
pid <- getXlProcess uuid
if pid /= ""
then do
info $ "signal xl process for uuid: " ++ (show uuid) ++ " pid: " ++ pid
readProcessOrDie "kill" ["-s", "SIGUSR1", pid] ""
return ()
else do
info $ "Couldn't find xl process for uuid: " ++ (show uuid)
return ()
--It should be noted that by design, we start our domains paused to ensure all the
--backend components are created and xenstore nodes are written before the domain
--begins running.
start :: Uuid -> IO ()
start uuid =
do
--if domain already has a pid don't try to create another.
pid <- getXlProcess uuid
state <- state uuid
if pid == ""
then do
case state of
Shutdown -> do
(_, _, Just err, handle) <- createProcess (proc "xl" ["create", configPath uuid, "-p"]){std_err = CreatePipe,
close_fds = True}
ec <- waitForProcess handle
stderr <- hGetContents err
case ec of
ExitSuccess -> return ()
_ -> do
updateVmDomainStateIO uuid Shutdown
throw $ XlException $ L.intercalate "<br>" $ L.lines stderr
_ -> do return ()
else do
throw $ XlException "Don't try to start a guest twice"
--if domain has no domid, the domain is already dead. But we should make sure
--the xenstore state is set to 'shutdown'. Sometimes when domains crash on startup,
--UI shows either 'starting' or 'off', but the internal state is 'creating-devices',
--preventing further boots
destroy :: Uuid -> IO ()
destroy uuid = do
domid <- getDomainId uuid
case domid of
"" -> do maybe_state <- xsRead ("/state/" ++ show uuid ++ "/state")
case maybe_state of
Just state -> if state /= "shutdown" then do xsWrite ("/state/" ++ show uuid ++ "/state") "shutdown" else return ()
Nothing -> return ()
_ -> do exitCode <- system ("xl destroy " ++ domid)
bailIfError exitCode "Error destroying domain."
sleep :: Uuid -> IO ()
sleep uuid =
do
domid <- getDomainId uuid
exitCode <- system ("xl trigger " ++ domid ++ " sleep")
bailIfError exitCode "Error entering s3."
hibernate :: Uuid -> IO ()
hibernate uuid =
do
domid <- getDomainId uuid
exitCode <- system ("xl hiberate " ++ domid)
bailIfError exitCode "Error entering s4."
suspendToFile :: Uuid -> FilePath -> IO ()
suspendToFile uuid file =
do
domid <- getDomainId uuid
exitCode <- system ("xl save " ++ domid ++ " " ++ file ++ " " ++ configPath uuid)
bailIfError exitCode "Error suspending to file."
resumeFromFile :: Uuid -> FilePath -> Bool -> Bool -> IO ()
resumeFromFile uuid file delete paused =
do
let p = if paused then "-p" else ""
_ <- system ("xl restore " ++ p ++ " " ++ configPath uuid ++ " " ++ file)
if delete then removeFile file else return ()
--Ask xl directly for the domid
getDomainId :: Uuid -> IO String
getDomainId uuid = do
domid <- readProcessOrDie "xl" ["uuid-to-domid", show uuid] []
let plain_domid = (T.unpack (T.stripEnd (T.pack domid)))
case plain_domid of
"-1" -> return ("")
_ -> return (plain_domid) --remove trailing newline
getStubDomainID :: Uuid -> IO String
getStubDomainID uuid =
fromMaybe "0" <$> xsRead ("/xenmgr/vms/" ++ show uuid ++ "/stubdomid")
--For a given uuid, change the iso in the cd drive slot
changeCd :: Uuid -> String -> IO ()
changeCd uuid path = do
domid <- getDomainId uuid
(exitCode, _, _) <- readProcessWithExitCode_closeFds "xl" ["cd-insert", domid, "hdc", path] []
bailIfError exitCode "Error changing cd."
--Return the frontend xenstore path of the nic device (or Nothing)
nicFrontendPath :: Uuid -> NicID -> IO (Maybe String)
nicFrontendPath uuid (XbDeviceID nicid) =
do domainP <- domainXsPath uuid
vifs <- liftIO . xsDir $ domainP ++ "/device/vif"
vwifs <- liftIO . xsDir $ domainP ++ "/device/vwif"
let nicid_str = show nicid
case () of
_ | nicid_str `elem` vifs -> return $ Just (domainP ++ "/device/vif/" ++ nicid_str)
| nicid_str `elem` vwifs -> return $ Just (domainP ++ "/device/vwif/" ++ nicid_str)
| otherwise -> return Nothing
--For a given nic, reassign the backend network it should belong to
changeNicNetwork :: Uuid -> NicID -> Network -> IO ()
changeNicNetwork uuid nid@(XbDeviceID nicid) network = do
domid <- getDomainId uuid
domainP <- domainXsPath uuid
backendPath <- xsRead (domainP ++ "/device/vif/" ++ show nicid ++ "/backend")
case backendPath of
Just b -> do xsWrite (b ++ "/bridge") (show network)
return ()
Nothing -> return ()
--Ask a vif to switch to the connected or disconnected state
connectVif :: Uuid -> NicID -> Bool -> IO ()
connectVif uuid nicid connect = do
domainP <- domainXsPath uuid
front <- nicFrontendPath uuid nicid
case front of
Nothing -> warn $ "failed to lookup nic " ++ show nicid
Just fp -> do let p = fp ++ "/disconnect"
liftIO $ xsWrite p value
where
value | connect == True = "0"
| otherwise = "1"
-- Check if domain is in S3, if so, wake it up
wakeIfS3 :: Uuid -> IO ()
wakeIfS3 uuid = do
acpi_state <- acpiState uuid
case acpi_state of
3 -> do resumeFromSleep uuid
return ()
_ -> return ()
--Adjust memory through the balloon driver, unreliable, requires correct
--paravirt drivers. Implemented here in the event ballooning is ever desired
--and implemented correctly
setMemTarget :: Uuid -> Int -> IO ()
setMemTarget uuid mbs = do
domid <- getDomainId uuid
exitCode <- system ("xl mem-set " ++ domid ++ " " ++ show mbs ++ "m")
bailIfError exitCode "Error setting mem target."
removeNic :: Uuid -> NicID -> DomainID -> IO ()
removeNic uuid nic back_domid = do
domid <- getDomainId uuid
system ("xl network-detach " ++ domid ++ " " ++ show nic)
return ()
addNic :: Uuid -> NicID -> String -> DomainID -> IO ()
addNic uuid nic net back_domid = do
domid <- getDomainId uuid
stubdomid <- (liftIO $ xsRead ("/xenmgr/vms/" ++ show uuid ++ "/stubdomid"))
let typ = isJust stubdomid
let wireless = L.isInfixOf "wifi" net
(ec,stdout,_)<- readProcessWithExitCode_closeFds "xl" ["network-attach", domid, printf "bridge=%s" net, printf "backend=%s" (show back_domid),
if typ then "type=ioemu" else "type=vif", if wireless then "wireless=1" else "wireless=0", printf "devid=%s" (show nic)] []
return ()
--Given the uuid of a domain and a nic id, set the target backend domid for that nic
setNicBackendDom :: Uuid -> NicID -> DomainID -> IO ()
setNicBackendDom uuid nic back_domid = do
domid <- getDomainId uuid
exitCode <- system ("xl network-detach " ++ show domid ++ " " ++ show nic)
bailIfError exitCode "Error detatching nic from domain."
exitCode <- system ("xl network-attach " ++ domid ++ " backend=" ++ show back_domid)
bailIfError exitCode "Error attaching new nic to domain."
--Implement signal watcher to fire off handler upon receiving
--notify message over dbus
onNotify :: Uuid -> String -> NotifyHandler -> Rpc ()
onNotify uuid msgname action =
let rule = matchSignal "com.citrix.xenclient.xenmgr" "notify"
in
rpcOnSignal rule process
where
process _ signal =
let [uuidV, statusV] = signalArgs signal
uuid' = let Just v = fromVariant $ uuidV in v
status = let Just v = fromVariant $ statusV in v
splits = TM.split ':' status
in
when (uuid == uuid') $
case splits of
(msg:args) | msg == msgname -> action args
_ -> return ()
--Remove the handler setup by onNotify
onNotifyRemove :: Uuid -> String -> NotifyHandler -> Rpc ()
onNotifyRemove uuid msgname action =
let rule = matchSignal "com.citrix.xenclient.xenmgr" "notify"
in
rpcOnSignalRemove rule process
where
process _ signal =
let [uuidV, statusV] = signalArgs signal
uuid' = let Just v = fromVariant $ uuidV in v
status = let Just v = fromVariant $ statusV in v
splits = TM.split ':' status
in
when (uuid == uuid') $
case splits of
(msg:args) | msg == msgname -> action args
_ -> return ()
--Path to the xl config file generated on domain creation
configPath uuid = "/tmp/xenmgr-xl-" ++ show uuid
stubConfigPath uuid = "/tmp/xenmgr-xl-" ++ show uuid ++ "-dm"
--Check the domain state to see if the domain is running
isRunning :: (MonadRpc e m) => Uuid -> m Bool
isRunning uuid = (not . (`elem` [Shutdown, Rebooted])) `fmap` (liftIO $ state uuid)
--Xl will write any state updates to the xenstore node "/state/<uuid>/state"
--It's up to xenmgr to setup and maintain a watch on this node to detect state changes
state :: Uuid -> IO VmState
state uuid =
do
maybe_state <- xsRead ("/state/" ++ show uuid ++ "/state")
case maybe_state of
Just state -> do
debug $ "active vm " ++ show uuid ++ " state = " ++ show state
return $ stateFromStr state
Nothing -> return $ stateFromStr "shutdown"
--Wait for provided duration for the domain to reach a specific state,
--returning false if that never happens.
waitForState :: Uuid -> VmState -> Maybe Int -> IO Bool
waitForState uuid expected timeout = do
s <- state uuid
case (s, timeout) of
-- got right state, exit
(x, _) | x == expected -> return True
-- we timed out while waiting for state
(_, Just t) | t <= 0 -> return False
-- we continue waiting with lesser timeout
(_, Just t) | t > 0 -> do (threadDelay $ 10^6)
waitForState uuid expected (Just $ t-1)
-- we have no timeout, wait indifinitely
(_, Nothing) -> (threadDelay $ 10^6) >> waitForState uuid expected Nothing
_ -> error "impossible"
|
OpenXT/manager
|
xenmgr/XenMgr/Connect/Xl.hs
|
gpl-2.0
| 17,357
| 0
| 23
| 5,261
| 4,648
| 2,292
| 2,356
| 362
| 5
|
import Probability
import Data.Frame
-- Shift the value when mu or sigma changes.
normal' mu sigma = do
z <- normal 0.0 1.0
return $ mu + z*sigma
model floor_values county_code_values log_radon_data = do
let n_counties = length $ nub $ county_code_values
mu_a <- normal 0.0 (100.0**2.0)
sigma_a <- half_cauchy 0.0 5.0
mu_b <- normal 0.0 (100.0**2.0)
sigma_b <- half_cauchy 0.0 5.0
-- This basically associates 0..(n-1) with normal a/b
a <- iid n_counties (normal' mu_a sigma_a)
b <- iid n_counties (normal' mu_b sigma_b)
-- This constructs the distribution of predicted values given county_code and floor
eps <- half_cauchy 0.0 5.0
let dist county_code floor = normal (a!!county_code + b!!county_code*floor) eps
let loggers = ["mu_a" %=% mu_a, "sigma_a" %=% sigma_a, "mu_b" %=% mu_b, "sigma_b" %=% sigma_b]
log_radon_data ~> independent [ dist county_code floor | (floor,county_code) <- zip floor_values county_code_values]
return loggers
main = do
radon <- readTable "radon.csv"
let floor_values = radon $$ ("floor", AsDouble)
county_code_values = radon $$ ("county_code", AsInt)
log_radon_data = radon $$ ("log_radon", AsDouble)
mcmc $ model floor_values county_code_values log_radon_data
|
bredelings/BAli-Phy
|
tests/prob_prog/examples.3/glm_hierarchical_model/Main.hs
|
gpl-2.0
| 1,274
| 0
| 15
| 256
| 388
| 189
| 199
| 24
| 1
|
{-# LANGUAGE PatternGuards #-}
module Optimize ( optimize,
findToDo, findNamedSubexpression,
findNamedScalar, findFFTtodo, findFFTinputtodo )
where
import Expression
import Statement
import qualified Data.Set as Set
numtotake :: Int
numtotake = 20000
subsq :: [a] -> [[a]]
subsq xs = -- map (:[]) xs ++
rest xs
where rest (y:ys@(_:_)) = map (:[y]) ys ++ rest ys
rest _ = []
findNamedScalar :: Type b => Expression b -> Maybe Exprn
findNamedScalar xxxx = myconcat [find volume,
find mydV,
find mydr,
searchExpressionDepthFirst Set.empty helper xxxx]
where helper e@(Var _ _ _ _ (Just _)) | ES _ <- mkExprn e = Just $ mkExprn e
helper _ = Nothing
find :: Expression Scalar -> Maybe Exprn
find x = if hasexpression x xxxx then Just $ mkExprn x else Nothing
mydV = substitute volume (scalarVariable volume) dVscalar
mydr = substitute dVscalar (scalarVariable dVscalar) $ var "dr" "\\Delta r" $ dVscalar ** (1.0/3)
findNamedSubexpression :: Type b => Expression b -> Maybe Exprn
findNamedSubexpression = searchExpressionDepthFirst Set.empty helper
where helper e@(Var _ _ _ _ (Just _)) = Just $ mkExprn e
helper _ = Nothing
-- In the following, we refuse to "find" a subexpression that has a
-- "k" in it, according to the hasK function. This is to avoid
-- certain issues where we are unable to set k to zero correctly,
-- since we have created a variable that happens to be zero when k ==
-- 0, but which is divided by k (or by k^2). This is a hokey
-- overkill, but I don't have a better idea.
findToDo :: Type b => Set.Set String -> [Exprn] -> Expression b -> Maybe Exprn
findToDo i everything = searchExpression i helper
where helper (Sum _ ii) | Set.size ii < 2 = Nothing
helper (Product _ ii) | Set.size ii < 2 = Nothing
helper (Sum s _) | todo:_ <- filter simplifiable subes = Just todo
where subes = map (mkExprn . pairs2sum) $ take numtotake $ subsq $
take numtotake $ filter (\(_,e) -> countVars [mkExprn e] > 0 &&
not (hasFFT e) &&
not (hasK e)) $ sum2pairs s
simplifiable sube = countVarssube > 1 && countVarssube < 3 && ithelps sube
where countVarssube = countVars [sube]
oldnum = countVars everything
ithelps e = countAfterRemovalE e everything + 1 < oldnum
helper (Product p _) | todo:_ <- filter simplifiable subes = Just todo
where subes = map (mkExprn . pairs2product) $ take numtotake $ subsq $
take numtotake $ filter (\(e,_) -> countVars [mkExprn e] > 0 &&
not (hasFFT e) &&
not (hasK e)) $ product2pairs p
simplifiable sube = countVarssube > 1 && countVarssube < 3 && ithelps sube
where countVarssube = countVars [sube]
oldnum = countVars everything
ithelps e = countAfterRemovalE e everything + 1 < oldnum
helper _ = Nothing
findFFTtodo :: Type b => Expression b -> Maybe Exprn
findFFTtodo = searchExpression Set.empty helper
where helper e@(Expression _)
| EK (Expression (FFT (Var _ _ _ _ Nothing))) <- mkExprn e = Just $ mkExprn e
| ER (Expression (IFFT (Var _ _ _ _ Nothing))) <- mkExprn e = Just $ mkExprn e
helper _ = Nothing
findFFTinputtodo :: Type b => Set.Set String -> Expression b -> Maybe Exprn
findFFTinputtodo i = searchExpressionDepthFirst i helper
where helper e@(Expression _)
| EK (Expression (FFT e')) <- mkExprn e, not (hasFFT e') = Just $ ER e'
| ER (Expression (IFFT e')) <- mkExprn e, not (hasFFT e') = Just $ EK e'
| ES (Expression (Summate e')) <- mkExprn e, not (hasFFT e') = Just $ mkExprn e
helper _ = Nothing
scalarVariable :: Expression Scalar -> Expression Scalar
scalarVariable (Var _ _ x t _) = Var CannotBeFreed x x t Nothing
scalarVariable _ = error "oopsisse"
optimize :: [Exprn] -> ([Statement], [Exprn])
optimize eee = case optimizeScalars [] 0 eee of
(a,_,b) -> (a,b)
-- Evaluate any purely scalar expressions that we can. These are the
-- simplest computations we can do. Currently this function also
-- enables all other optimizations, but I'd like to separate them out
-- again later so we can easily experiment with different sets of
-- optimizations.
optimizeScalars :: [Statement] -> Int -> [Exprn] -> ([Statement], Int, [Exprn])
optimizeScalars sts n everything = case myconcat $ map (mapExprn findNamedScalar) everything of
Just (ES s@(Var _ _ _ _ (Just e))) ->
case optimizeHelper Set.empty n [] everything [mkExprn e] of
([],_,_) -> optimizeScalars (sts++[Initialize (ES v), Assign (ES v) (ES s)]) n
(map (mapExprn (mkExprn . substitute s v)) everything)
where v = scalarVariable s
(sts',n',everything') -> optimizeScalars (sts++sts') n' everything'
Nothing -> optimizeHelper Set.empty (n :: Int) sts everything everything
_ -> error "bad result in optimizeScalars"
-- Then we go looking for memory to save or ffts to evaluate...
optimizeHelper :: Set.Set String -> Int -> [Statement] -> [Exprn] -> [Exprn]
-> ([Statement], Int, [Exprn])
optimizeHelper i n sts everything e = case handleSubstitution sts n everything e todos of
Nothing -> (sts, n, everything)
Just (vs, sts', n', everything', e') -> optimizeHelper vs n' sts' everything' e'
where todos = if Set.size i == 0
then [myconcat $ map (mapExprn (findToDo i everything)) e,
myconcat $ map (mapExprn findFFTtodo) e,
myconcat $ map (mapExprn (findFFTinputtodo i)) e]
else [myconcat $ map (mapExprn (findToDo i everything)) e,
myconcat $ map (mapExprn findFFTtodo) e,
myconcat $ map (mapExprn (findFFTinputtodo i)) e,
myconcat $ map (mapExprn (findFFTinputtodo Set.empty)) e]
handleSubstitution :: [Statement] -> Int -> [Exprn] -> [Exprn] -> [Maybe Exprn]
-> Maybe (Set.Set String, [Statement], Int, [Exprn], [Exprn])
handleSubstitution _ _ _ _ [] = Nothing
handleSubstitution sts n e1 e2 (Nothing:todos) = handleSubstitution sts n e1 e2 todos
handleSubstitution sts n e1 e2 (Just (EK ke):_) = Just (varSet v,
sts++[Initialize (EK v), Assign (EK v) (EK ke)],
n+1,
map (mapExprn (mkExprn . substitute ke v)) e1,
map (mapExprn (mkExprn . substitute ke v)) e2)
where v :: Expression KSpace
v = case ke of
Var _ xi x t _ -> Var IsTemp xi x t Nothing
_ -> Var IsTemp ("ktemp" ++ show n++"[i]")
("ktemp"++show n)
("\\tilde{f}_{" ++ show n ++ "}") Nothing
handleSubstitution sts n e1 e2 (Just (ER re):_) = Just (varSet v,
sts++[Initialize (ER v), Assign (ER v) (ER re)],
n+1,
map (mapExprn (mkExprn . substitute re v)) e1,
map (mapExprn (mkExprn . substitute re v)) e2)
where v :: Expression RealSpace
v = case re of
Var _ xi x t _ -> Var IsTemp xi x t Nothing
_ -> Var IsTemp ("rtemp" ++ show n++"[i]")
("rtemp"++show n)
("f_{" ++ show n ++ "}") Nothing
handleSubstitution sts n e1 e2 (Just (ES se):_) = Just (varSet v,
sts++[Initialize (ES v), Assign (ES v) (ES se)],
n+1,
map (mapExprn (mkExprn . substitute se v)) e1,
map (mapExprn (mkExprn . substitute se v)) e2)
where v :: Expression Scalar
v = case se of
Var _ xi x t _ -> Var IsTemp xi x t Nothing
_ -> Var CannotBeFreed ("s" ++ show n)
("s"++show n)
("s_{" ++ show n ++ "}") Nothing
|
droundy/deft
|
src/haskell/Optimize.hs
|
gpl-2.0
| 9,090
| 0
| 19
| 3,509
| 2,966
| 1,491
| 1,475
| 131
| 5
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./TPTP/AS.der.hs
Description : Abstract syntax for TPTP v6.4.0.11
Copyright : (c) Eugen Kuksa University of Magdeburg 2017
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Eugen Kuksa <kuksa@iks.cs.ovgu.de>
Stability : provisional
Portability : portable
Definition of abstract syntax for TPTP taken from [1]
References
[1] G. Sutcliffe et al.: The TPTP language grammar in BNF.
<http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html>
Note: The implemented version is saved at TPTP/Documents/SyntaxBNF.html
Note: The names of the data types are aligned with the names of the
grammar rules at this reference page (modulo case).
[2] C. Kaliszyk, G. Sutcliffe and F. Rabe:
TH1: The TPTP Typed Higher-Order Form with Rank-1 Polymorphism
<https://kwarc.info/people/frabe/Research/KRS_thf1_16.pdf>
Note: for further information on TF0, TF1, TH0 and TH1
-}
module TPTP.AS where
import Common.Id as Id
import Common.IRI
import Syntax.AS_Structured ()
import qualified Common.AS_Annotation as AS_Anno
import Data.Data
-- DrIFT command
{-! global: GetRange !-}
newtype BASIC_SPEC = Basic_spec [AS_Anno.Annoted TPTP]
deriving (Show, Ord, Eq, Data, Typeable)
-- Files
-- %----Files. Empty file is OK.
-- <TPTP_file> ::= <TPTP_input>*<Paste>
newtype TPTP = TPTP [TPTP_input]
deriving (Show, Ord, Eq, Data, Typeable)
-- <TPTP_input> ::= <annotated_formula> | <include>
data TPTP_input = Annotated_formula Annotated_formula
| TPTP_include Include
| TPTP_comment Comment
| TPTP_defined_comment DefinedComment
| TPTP_system_comment SystemComment
deriving (Show, Ord, Eq, Data, Typeable)
-- Comments
data Comment = Comment_line Token
| Comment_block Token
deriving (Show, Eq, Ord, Data, Typeable)
data DefinedComment = Defined_comment_line Token
| Defined_comment_block Token
deriving (Show, Eq, Ord, Data, Typeable)
data SystemComment = System_comment_line Token
| System_comment_block Token
deriving (Show, Eq, Ord, Data, Typeable)
-- %----Formula records
-- <annotated_formula> ::= <thf_annotated> | <tfx_annotated> | <tff_annotated> |
-- <tcf_annotated> | <fof_annotated> | <cnf_annotated> |
-- <tpi_annotated>
data Annotated_formula = AF_THF_Annotated THF_annotated
| AF_TFX_Annotated TFX_annotated
| AF_TFF_Annotated TFF_annotated
| AF_TCF_Annotated TCF_annotated
| AF_FOF_Annotated FOF_annotated
| AF_CNF_Annotated CNF_annotated
| AF_TPI_Annotated TPI_annotated
deriving (Show, Ord, Eq, Data, Typeable)
-- <tpi_annotated> ::= tpi(<name>,<formula_role>,<tpi_formula><annotations>).
data TPI_annotated = TPI_annotated Name Formula_role TPI_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <tpi_formula> ::= <fof_formula>
type TPI_formula = FOF_formula
-- <thf_annotated> ::= thf(<name>,<formula_role>,<thf_formula>
-- <annotations>).
data THF_annotated = THF_annotated Name Formula_role THF_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <tfx_annotated> ::= tfx(<name>,<formula_role>,<tfx_formula>
-- <annotations>).
data TFX_annotated = TFX_annotated Name Formula_role TFX_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_annotated> ::= tff(<name>,<formula_role>,<tff_formula>
-- <annotations>).
data TFF_annotated = TFF_annotated Name Formula_role TFF_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <tcf_annotated> ::= tcf(<name>,<formula_role>,<tcf_formula>
-- <annotations>).
data TCF_annotated = TCF_annotated Name Formula_role TCF_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_annotated> ::= fof(<name>,<formula_role>,<fof_formula>
-- <annotations>).
data FOF_annotated = FOF_annotated Name Formula_role FOF_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
-- <cnf_annotated> ::= cnf(<name>,<formula_role>,<cnf_formula>
-- <annotations>).
data CNF_annotated = CNF_annotated Name Formula_role CNF_formula Annotations
deriving (Show, Ord, Eq, Data, Typeable)
name :: Annotated_formula -> Name
name f = case f of
AF_THF_Annotated (THF_annotated n _ _ _) -> n
AF_TFX_Annotated (TFX_annotated n _ _ _) -> n
AF_TFF_Annotated (TFF_annotated n _ _ _) -> n
AF_TCF_Annotated (TCF_annotated n _ _ _) -> n
AF_FOF_Annotated (FOF_annotated n _ _ _) -> n
AF_CNF_Annotated (CNF_annotated n _ _ _) -> n
AF_TPI_Annotated (TPI_annotated n _ _ _) -> n
formulaRole :: Annotated_formula -> Formula_role
formulaRole f = case f of
AF_THF_Annotated (THF_annotated _ r _ _) -> r
AF_TFX_Annotated (TFX_annotated _ r _ _) -> r
AF_TFF_Annotated (TFF_annotated _ r _ _) -> r
AF_TCF_Annotated (TCF_annotated _ r _ _) -> r
AF_FOF_Annotated (FOF_annotated _ r _ _) -> r
AF_CNF_Annotated (CNF_annotated _ r _ _) -> r
AF_TPI_Annotated (TPI_annotated _ r _ _) -> r
annotations :: Annotated_formula -> Annotations
annotations f = case f of
AF_THF_Annotated (THF_annotated _ _ _ a) -> a
AF_TFX_Annotated (TFX_annotated _ _ _ a) -> a
AF_TFF_Annotated (TFF_annotated _ _ _ a) -> a
AF_TCF_Annotated (TCF_annotated _ _ _ a) -> a
AF_FOF_Annotated (FOF_annotated _ _ _ a) -> a
AF_CNF_Annotated (CNF_annotated _ _ _ a) -> a
AF_TPI_Annotated (TPI_annotated _ _ _ a) -> a
-- <annotations> ::= ,<source><optional_info> | <null>
newtype Annotations = Annotations (Maybe (Source, Optional_info))
deriving (Show, Ord, Eq, Data, Typeable)
-- Types for problems
-- %----Types for problems.
-- <formula_role> ::= <lower_word>
-- <formula_role> :== axiom | hypothesis | definition | assumption |
-- lemma | theorem | corollary | conjecture |
-- negated_conjecture | plain | type |
-- fi_domain | fi_functors | fi_predicates | unknown
data Formula_role = Axiom
| Hypothesis
| Definition
| Assumption
| Lemma
| Theorem
| Corollary
| Conjecture
| Negated_conjecture
| Plain
| Type
| Fi_domain
| Fi_functors
| Fi_predicates
| Unknown
| Other_formula_role Token
-- ^ For future updates. Should not be used.
deriving (Show, Ord, Eq, Data, Typeable)
-- %----THF formulae.
-- <thf_formula> ::= <thf_logic_formula> | <thf_sequent>
data THF_formula = THFF_logic THF_logic_formula
| THFF_sequent THF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_logic_formula> ::= <thf_binary_formula> | <thf_unitary_formula> |
-- <thf_type_formula> | <thf_subtype>
data THF_logic_formula = THFLF_binary THF_binary_formula
| THFLF_unitary THF_unitary_formula
| THFLF_type THF_type_formula
| THFLF_subtype THF_subtype
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_binary_formula> ::= <thf_binary_pair> | <thf_binary_tuple>
data THF_binary_formula = THFBF_pair THF_binary_pair
| THFBF_tuple THF_binary_tuple
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Only some binary connectives can be written without ()s.
-- %----There's no precedence among binary connectives
-- <thf_binary_pair> ::= <thf_unitary_formula> <thf_pair_connective>
-- <thf_unitary_formula>
data THF_binary_pair = THF_binary_pair THF_pair_connective THF_unitary_formula THF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_binary_tuple> ::= <thf_or_formula> | <thf_and_formula> |
-- <thf_apply_formula>
data THF_binary_tuple = THFBT_or THF_or_formula
| THFBT_and THF_and_formula
| THFBT_apply THF_apply_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_or_formula> ::= <thf_unitary_formula> <vline> <thf_unitary_formula> |
-- <thf_or_formula> <vline> <thf_unitary_formula>
type THF_or_formula = [THF_unitary_formula]
-- <thf_and_formula> ::= <thf_unitary_formula> & <thf_unitary_formula> |
-- <thf_and_formula> & <thf_unitary_formula>
type THF_and_formula = [THF_unitary_formula]
-- <thf_apply_formula> ::= <thf_unitary_formula> @ <thf_unitary_formula> |
-- <thf_apply_formula> @ <thf_unitary_formula>
type THF_apply_formula = [THF_unitary_formula]
-- <thf_unitary_formula> ::= <thf_quantified_formula> | <thf_unary_formula> |
-- <thf_atom> | <thf_conditional> | <thf_let> |
-- <thf_tuple> | (<thf_logic_formula>)
data THF_unitary_formula = THFUF_quantified THF_quantified_formula
| THFUF_unary THF_unary_formula
| THFUF_atom THF_atom
| THFUF_conditional THF_conditional
| THFUF_let THF_let
| THFUF_tuple THF_tuple
| THFUF_logic THF_logic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_quantified_formula> ::= <thf_quantification> <thf_unitary_formula>
data THF_quantified_formula = THF_quantified_formula THF_quantification THF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_quantification> ::= <thf_quantifier> [<thf_variable_list>] :
data THF_quantification = THF_quantification THF_quantifier THF_variable_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_variable_list> ::= <thf_variable> | <thf_variable>,<thf_variable_list>
type THF_variable_list = [THF_variable]
-- <thf_variable> ::= <thf_typed_variable> | <variable>
data THF_variable = THFV_typed THF_typed_variable
| THFV_variable Variable
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_typed_variable> ::= <variable> : <thf_top_level_type>
data THF_typed_variable = THF_typed_variable Variable THF_top_level_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_unary_formula> ::= <thf_unary_connective> (<thf_logic_formula>)
data THF_unary_formula = THF_unary_formula THF_unary_connective THF_logic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_atom> ::= <thf_function> | <variable> | <defined_term> |
-- <thf_conn_term>
data THF_atom = THF_atom_function THF_function
| THF_atom_variable Variable
| THF_atom_defined Defined_term
| THF_atom_conn THF_conn_term
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_function> ::= <atom> | <functor>(<thf_arguments>) |
-- <defined_functor>(<thf_arguments>) |
-- <system_functor>(<thf_arguments>)
data THF_function = THFF_atom Atom
| THFF_functor TPTP_functor THF_arguments
| THFF_defined Defined_functor THF_arguments
| THFF_system System_functor THF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_conn_term> ::= <thf_pair_connective> | <assoc_connective> |
-- <thf_unary_connective>
data THF_conn_term = THFC_pair THF_pair_connective
| THFC_assoc Assoc_connective
| THFC_unary THF_unary_connective
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_conditional> ::= $ite(<thf_logic_formula>,<thf_logic_formula>,
-- <thf_logic_formula>)
data THF_conditional = THF_conditional THF_logic_formula THF_logic_formula THF_logic_formula -- $ite
deriving (Show, Ord, Eq, Data, Typeable)
-- %----The LHS of a term or formula binding must be a non-variable term that
-- %----is flat with pairwise distinct variable arguments, and the variables in
-- %----the LHS must be exactly those bound in the universally quantified variable
-- %----list, in the same order. Let definitions are not recursive: a non-variable
-- %----symbol introduced in the LHS of a let definition cannot occur in the RHS.
-- %----If a symbol with the same signature as the one in the LHS of the binding
-- %----is declared above the let expression (at the top level or in an
-- %----encompassing let) then it can be used in the RHS of the binding, but it is
-- %----not accessible in the term or formula of the let expression. Let
-- %----expressions can be eliminated by a simple definition expansion.
-- <thf_let> ::= $let(<thf_unitary_formula>,<thf_formula>)
-- <thf_let> :== $let(<thf_let_defns>,<thf_formula>)
data THF_let = THF_let THF_let_defns THF_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_let_defns> :== <thf_let_defn> | [<thf_let_defn_list>]
data THF_let_defns = THFLD_single THF_let_defn
| THFLD_many THF_let_defn_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_let_defn_list> :== <thf_let_defn> | <thf_let_defn>,<thf_let_defn_list>
type THF_let_defn_list = [THF_let_defn]
-- <thf_let_defn> :== <thf_let_quantified_defn> | <thf_let_plain_defn>
data THF_let_defn = THFLD_quantified THF_let_quantified_defn
| THFLD_plain THF_let_plain_defn
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_let_quantified_defn> :== <thf_quantification> (<thf_let_plain_defn>)
data THF_let_quantified_defn = THF_let_quantified_defn THF_quantification THF_let_plain_defn
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_let_plain_defn> :== <thf_let_defn_LHS> <assignment> <thf_formula>
data THF_let_plain_defn = THF_let_plain_defn THF_let_defn_LHS THF_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_let_defn_LHS> :== <constant> | <functor>(<fof_arguments>) |
-- <thf_tuple>
-- %----The <fof_arguments> must all be <variable>s, and the <thf_tuple> may
-- %----contain only <constant>s and <functor>(<fof_arguments>)s
data THF_let_defn_LHS = THFLDL_constant Constant
| THFLDL_functor TPTP_functor FOF_arguments
| THFLDL_tuple THF_tuple
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_arguments> ::= <thf_formula_list>
type THF_arguments = THF_formula_list
-- <thf_type_formula> ::= <thf_typeable_formula> : <thf_top_level_type>
-- <thf_type_formula> :== <constant> : <thf_top_level_type>
data THF_type_formula = THFTF_typeable THF_typeable_formula THF_top_level_type
| THFTF_constant Constant THF_top_level_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_typeable_formula> ::= <thf_atom> | (<thf_logic_formula>)
data THF_typeable_formula = THFTF_atom THF_atom
| THFTF_logic THF_logic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_subtype> ::= <thf_atom> <subtype_sign> <thf_atom>
data THF_subtype = THF_subtype THF_atom THF_atom
deriving (Show, Ord, Eq, Data, Typeable)
-- %----<thf_top_level_type> appears after ":", where a type is being specified
-- %----for a term or variable. <thf_unitary_type> includes <thf_unitary_formula>,
-- %----so the syntax allows just about any lambda expression with "enough"
-- %----parentheses to serve as a type. The expected use of this flexibility is
-- %----parametric polymorphism in types, expressed with lambda abstraction.
-- %----Mapping is right-associative: o > o > o means o > (o > o).
-- %----Xproduct is left-associative: o * o * o means (o * o) * o.
-- %----Union is left-associative: o + o + o means (o + o) + o.
-- <thf_top_level_type> ::= <thf_unitary_type> | <thf_mapping_type>
data THF_top_level_type = THFTLT_unitary THF_unitary_type
| THFTLT_mapping THF_mapping_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_unitary_type> ::= <thf_unitary_formula> | (<thf_binary_type>)
data THF_unitary_type = THFUT_unitary THF_unitary_formula
| THFUT_binary THF_binary_type
deriving (Show, Ord, Eq, Data, Typeable)
-- Each of these binary types has at least two (!) list entries.
-- <thf_binary_type> ::= <thf_mapping_type> | <thf_xprod_type> |
-- <thf_union_type>
data THF_binary_type = THFBT_mapping THF_mapping_type
| THFBT_xprod THF_xprod_type
| THFBT_union THF_union_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_mapping_type> ::= <thf_unitary_type> <arrow> <thf_unitary_type> |
-- <thf_unitary_type> <arrow> <thf_mapping_type>
type THF_mapping_type = [THF_unitary_type] -- right associative
-- <thf_xprod_type> ::= <thf_unitary_type> <star> <thf_unitary_type> |
-- <thf_xprod_type> <star> <thf_unitary_type>
type THF_xprod_type = [THF_unitary_type] -- left associative
-- <thf_union_type> ::= <thf_unitary_type> <plus> <thf_unitary_type> |
-- <thf_union_type> <plus> <thf_unitary_type>
type THF_union_type = [THF_unitary_type] -- right associative
-- %----Sequents using the Gentzen arrow
-- <thf_sequent> ::= <thf_tuple> <gentzen_arrow> <thf_tuple> |
-- (<thf_sequent>)
data THF_sequent = THFS_plain THF_tuple THF_tuple
| THFS_parens THF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_tuple> ::= [] | [<thf_formula_list>]
newtype THF_tuple = THF_tuple THF_formula_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_formula_list> ::= <thf_logic_formula> |
-- <thf_logic_formula>,<thf_formula_list>
type THF_formula_list = [THF_logic_formula]
-- NOTE: not used by parser
-- %----New material for modal logic semantics, not integrated yet
-- <logic_defn_rule> :== <logic_defn_LHS> <assignment> <logic_defn_RHS>-
-- data Logic_defn_rule = Logic_defn_rule Logic_defn_LHS Logic_defn_RHS
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_LHS> :== <logic_defn_value> | <thf_top_level_type> | <name>
-- <logic_defn_LHS> :== $constants | $quantification | $consequence |
-- $modalities
-- %----The $constants, $quantification, and $consequence apply to all of the
-- %----$modalities. Each of these may be specified only once, but not necessarily
-- %----all in a single annotated formula.-
-- data Logic_defn_LHS = Logic_defn_LHS_value Logic_defn_value
-- | Logic_defn_LHS_THF_Top_level_type THF_top_level_type
-- | Logic_defn_LHS_name Name
-- | LDLC_constants
-- | LDLC_quantification
-- | LDLC_consequence
-- | LDLC_modalities
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_RHS> :== <logic_defn_value> | <thf_unitary_formula>-
-- data Logic_defn_RHS = Logic_defn_RHS_value Logic_defn_value
-- | Logic_defn_RNG_THF_Unitary_forumla THF_unitary_formula
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_value> :== <defined_constant>
-- <logic_defn_value> :== $rigid | $flexible |
-- $constant | $varying | $cumulative | $decreasing |
-- $local | $global |
-- $modal_system_K | $modal_system_T | $modal_system_D |
-- $modal_system_S4 | $modal_system_S5 |
-- $modal_axiom_K | $modal_axiom_T | $modal_axiom_B |
-- $modal_axiom_D | $modal_axiom_4 | $modal_axiom_5-
-- data Logic_defn_value = Rigid
-- | Flexible
-- | Constant
-- | Varying
-- | Cumulative
-- | Decreasing
-- | Local
-- | Global
-- | Modal_system_K
-- | Modal_system_T
-- | Modal_system_D
-- | Modal_system_S4
-- | Modal_system_S5
-- | Modal_axiom_K
-- | Modal_axiom_T
-- | Modal_axiom_B
-- | Modal_axiom_D
-- | Modal_axiom_4
-- | Modal_axiom_5
-- deriving (Show, Ord, Eq, Data, Typeable)
-- %----TFX formulae
-- <tfx_formula> ::= <tfx_logic_formula> | <thf_sequent>
data TFX_formula = TFXF_logic TFX_logic_formula
| TFXF_sequent THF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <tfx_logic_formula> ::= <thf_logic_formula>
-- % <tfx_logic_formula> ::= <thf_binary_formula> | <thf_unitary_formula> |
-- % <tff_typed_atom> | <tff_subtype>
data TFX_logic_formula = TFXLF_binary THF_binary_formula
| TFXLF_unitary THF_unitary_formula
| TFXLF_typed TFF_typed_atom
| TFXLF_subtype TFF_subtype
deriving (Show, Ord, Eq, Data, Typeable)
-- %----TFF formulae.
-- <tff_formula> ::= <tff_logic_formula> | <tff_typed_atom> |
-- <tff_sequent>
data TFF_formula = TFFF_logic TFF_logic_formula
| TFFF_atom TFF_typed_atom
| TFFF_sequent TFF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_logic_formula> ::= <tff_binary_formula> | <tff_unitary_formula> |
-- <tff_subtype>
data TFF_logic_formula = TFFLF_binary TFF_binary_formula
| TFFLF_unitary TFF_unitary_formula
| TFFLF_subtype TFF_subtype
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_binary_formula> ::= <tff_binary_nonassoc> | <tff_binary_assoc>
data TFF_binary_formula = TFFBF_nonassoc TFF_binary_nonassoc
| TFFBF_assoc TFF_binary_assoc
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_binary_nonassoc> ::= <tff_unitary_formula> <binary_connective>
-- <tff_unitary_formula>
data TFF_binary_nonassoc = TFF_binary_nonassoc Binary_connective TFF_unitary_formula TFF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_binary_assoc> ::= <tff_or_formula> | <tff_and_formula>
data TFF_binary_assoc = TFFBA_or TFF_or_formula
| TFFBA_and TFF_and_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_or_formula> ::= <tff_unitary_formula> <vline> <tff_unitary_formula> |
-- <tff_or_formula> <vline> <tff_unitary_formula>
type TFF_or_formula = [TFF_unitary_formula]
-- <tff_and_formula> ::= <tff_unitary_formula> & <tff_unitary_formula> |
-- <tff_and_formula> & <tff_unitary_formula>
type TFF_and_formula = [TFF_unitary_formula]
-- <tff_unitary_formula> ::= <tff_quantified_formula> | <tff_unary_formula> |
-- <tff_atomic_formula> | <tff_conditional> |
-- <tff_let> | (<tff_logic_formula>)
data TFF_unitary_formula = TFFUF_quantified TFF_quantified_formula
| TFFUF_unary TFF_unary_formula
| TFFUF_atomic TFF_atomic_formula
| TFFUF_conditional TFF_conditional
| TFFUF_let TFF_let
| TFFUF_logic TFF_logic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_quantified_formula> ::= <fof_quantifier> [<tff_variable_list>] :
-- <tff_unitary_formula>
data TFF_quantified_formula = TFF_quantified_formula FOF_quantifier TFF_variable_list TFF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_variable_list> ::= <tff_variable> | <tff_variable>,<tff_variable_list>
type TFF_variable_list = [TFF_variable]
-- <tff_variable> ::= <tff_typed_variable> | <variable>
data TFF_variable = TFFV_typed TFF_typed_variable
| TFFV_variable Variable
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_typed_variable> ::= <variable> : <tff_atomic_type>
data TFF_typed_variable = TFF_typed_variable Variable TFF_atomic_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_unary_formula> ::= <unary_connective> <tff_unitary_formula> |
-- <fof_infix_unary>
data TFF_unary_formula = TFFUF_connective Unary_connective TFF_unitary_formula
| TFFUF_infix FOF_infix_unary
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_atomic_formula> ::= <fof_atomic_formula>
type TFF_atomic_formula = FOF_atomic_formula
-- <tff_conditional> ::= $ite_f(<tff_logic_formula>,<tff_logic_formula>,
-- <tff_logic_formula>)
data TFF_conditional = TFF_conditional TFF_logic_formula TFF_logic_formula TFF_logic_formula -- $ite_f
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let> ::= $let_tf(<tff_let_term_defns>,<tff_formula>) |
-- $let_ff(<tff_let_formula_defns>,<tff_formula>)
data TFF_let = TFF_let_term_defns TFF_let_term_defns TFF_formula
| TFF_let_formula_defns TFF_let_formula_defns TFF_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- %----See the commentary for <thf_let>.
-- <tff_let_term_defns> ::= <tff_let_term_defn> | [<tff_let_term_list>]
data TFF_let_term_defns = TFFLTD_single TFF_let_term_defn
| TFFLTD_many TFF_let_term_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let_term_list> ::= <tff_let_term_defn> |
-- <tff_let_term_defn>,<tff_let_term_list>
type TFF_let_term_list = [TFF_let_term_defn]
-- <tff_let_term_defn> ::= ! [<tff_variable_list>] : <tff_let_term_defn> |
-- <tff_let_term_binding>
data TFF_let_term_defn = TFFLTD_variable TFF_variable_list TFF_let_term_defn
| TFFLTD_binding TFF_let_term_binding
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let_term_binding> ::= <fof_plain_term> = <fof_term> |
-- (<tff_let_term_binding>)
data TFF_let_term_binding = TFFLTB_plain FOF_plain_term FOF_term
| TFFLTB_binding TFF_let_term_binding
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let_formula_defns> ::= <tff_let_formula_defn> | [<tff_let_formula_list>]
data TFF_let_formula_defns = TFFLFD_single TFF_let_formula_defn
| TFFLFD_many TFF_let_formula_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let_formula_list> ::= <tff_let_formula_defn> |
-- <tff_let_formula_defn>,<tff_let_formula_list>
type TFF_let_formula_list = [TFF_let_formula_defn]
-- <tff_let_formula_defn> ::= ! [<tff_variable_list>] : <tff_let_formula_defn> |
-- <tff_let_formula_binding>
data TFF_let_formula_defn = TFFLFD_variable TFF_variable_list TFF_let_formula_defn
| TFFLFD_binding TFF_let_formula_binding
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_let_formula_binding> ::= <fof_plain_atomic_formula> <=>
-- <tff_unitary_formula> | (<tff_let_formula_binding>)
data TFF_let_formula_binding = TFFLFB_plain FOF_plain_atomic_formula TFF_unitary_formula
| TFFLFB_binding TFF_let_formula_binding
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_sequent> ::= <tff_formula_tuple> <gentzen_arrow>
-- <tff_formula_tuple> | (<tff_sequent>)
data TFF_sequent = TFFS_plain TFF_formula_tuple TFF_formula_tuple
| TFFS_parens TFF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_formula_tuple> ::= [] | [<tff_formula_tuple_list>]
newtype TFF_formula_tuple = TFF_formula_tuple TFF_formula_tuple_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_formula_tuple_list> ::= <tff_logic_formula> |
-- <tff_logic_formula>,<tff_formula_tuple_list>
type TFF_formula_tuple_list = [TFF_logic_formula]
-- %----<tff_typed_atom> can appear only at top level
-- <tff_typed_atom> ::= <untyped_atom> : <tff_top_level_type> |
-- (<tff_typed_atom>)
data TFF_typed_atom = TFFTA_plain Untyped_atom TFF_top_level_type
| TFFTA_parens TFF_typed_atom
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_subtype> ::= <untyped_atom> <subtype_sign> <atom>
data TFF_subtype = TFF_subtype Untyped_atom Atom
deriving (Show, Ord, Eq, Data, Typeable)
-- %----See <thf_top_level_type> for commentary.
-- <tff_top_level_type> ::= <tff_atomic_type> | <tff_mapping_type> |
-- <tf1_quantified_type> | (<tff_top_level_type>)
data TFF_top_level_type = TFFTLT_atomic TFF_atomic_type
| TFFTLT_mapping TFF_mapping_type
| TFFTLT_quantified TF1_quantified_type
| TFFTLT_parens TFF_top_level_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <tf1_quantified_type> ::= !> [<tff_variable_list>] : <tff_monotype>
data TF1_quantified_type = TF1_quantified_type TFF_variable_list TFF_monotype
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_monotype> ::= <tff_atomic_type> | (<tff_mapping_type>)
data TFF_monotype = TFFMT_atomic TFF_atomic_type
| TFFMT_mapping TFF_mapping_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_unitary_type> ::= <tff_atomic_type> | (<tff_xprod_type>)
data TFF_unitary_type = TFFUT_atomic TFF_atomic_type
| TFFUT_xprod TFF_xprod_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_atomic_type> ::= <type_constant> | <defined_type> |
-- <type_functor>(<tff_type_arguments>) | <variable>
data TFF_atomic_type = TFFAT_constant Type_constant
| TFFAT_defined Defined_type
| TFFAT_functor Type_functor TFF_type_arguments
| TFFAT_variable Variable
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_type_arguments> ::= <tff_atomic_type> |
-- <tff_atomic_type>,<tff_type_arguments>
type TFF_type_arguments = [TFF_atomic_type]
-- %----For consistency with <thf_unitary_type> (the analogue in thf),
-- %----<tff_atomic_type> should also allow (<tff_atomic_type>), but that causes
-- %----ambiguity.
-- <tff_mapping_type> ::= <tff_unitary_type> <arrow> <tff_atomic_type>
data TFF_mapping_type = TFF_mapping_type TFF_unitary_type TFF_atomic_type
deriving (Show, Ord, Eq, Data, Typeable)
-- <tff_xprod_type> ::= <tff_unitary_type> <star> <tff_atomic_type> |
-- <tff_xprod_type> <star> <tff_atomic_type>
data TFF_xprod_type = TFF_xprod_type TFF_unitary_type [TFF_atomic_type]
deriving (Show, Ord, Eq, Data, Typeable)
-- %----TCF formulae.
-- <tcf_formula> ::= <tcf_logic_formula> | <tff_typed_atom>
data TCF_formula = TCFF_logic TCF_logic_formula
| TCFF_atom TFF_typed_atom
deriving (Show, Ord, Eq, Data, Typeable)
-- <tcf_logic_formula> ::= <tcf_quantified_formula> | <cnf_formula>
data TCF_logic_formula = TCFLF_quantified TCF_quantified_formula
| TCFLF_cnf CNF_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <tcf_quantified_formula> ::= ! [<tff_variable_list>] : <cnf_formula>
data TCF_quantified_formula = TCF_quantified TFF_variable_list CNF_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- %----FOF formulae.
-- <fof_formula> ::= <fof_logic_formula> | <fof_sequent>
data FOF_formula = FOFF_logic FOF_logic_formula
| FOFF_sequent FOF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_logic_formula> ::= <fof_binary_formula> | <fof_unitary_formula>
data FOF_logic_formula = FOFLF_binary FOF_binary_formula
| FOFLF_unitary FOF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Future answer variable ideas | <answer_formula>
-- <fof_binary_formula> ::= <fof_binary_nonassoc> | <fof_binary_assoc>
data FOF_binary_formula = FOFBF_nonassoc FOF_binary_nonassoc
| FOFBF_assoc FOF_binary_assoc
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Only some binary connectives are associative
-- %----There's no precedence among binary connectives
-- <fof_binary_nonassoc> ::= <fof_unitary_formula> <binary_connective>
-- <fof_unitary_formula>
data FOF_binary_nonassoc = FOF_binary_nonassoc Binary_connective FOF_unitary_formula FOF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Associative connectives & and | are in <binary_assoc>
-- <fof_binary_assoc> ::= <fof_or_formula> | <fof_and_formula>
data FOF_binary_assoc = FOFBA_or FOF_or_formula
| FOFBA_and FOF_and_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_or_formula> ::= <fof_unitary_formula> <vline> <fof_unitary_formula> |
-- <fof_or_formula> <vline> <fof_unitary_formula>
type FOF_or_formula = [FOF_unitary_formula]
-- <fof_and_formula> ::= <fof_unitary_formula> & <fof_unitary_formula> |
-- <fof_and_formula> & <fof_unitary_formula>
type FOF_and_formula = [FOF_unitary_formula]
-- %----<fof_unitary_formula> are in ()s or do not have a <binary_connective> at
-- %----the top level.
-- <fof_unitary_formula> ::= <fof_quantified_formula> | <fof_unary_formula> |
-- <fof_atomic_formula> | (<fof_logic_formula>)
data FOF_unitary_formula = FOFUF_quantified FOF_quantified_formula
| FOFUF_unary FOF_unary_formula
| FOFUF_atomic FOF_atomic_formula
| FOFUF_logic FOF_logic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_quantified_formula> ::= <fof_quantifier> [<fof_variable_list>] :
-- <fof_unitary_formula>
data FOF_quantified_formula = FOF_quantified_formula FOF_quantifier FOF_variable_list FOF_unitary_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_variable_list> ::= <variable> | <variable>,<fof_variable_list>
type FOF_variable_list = [Variable]
-- <fof_unary_formula> ::= <unary_connective> <fof_unitary_formula> |
-- <fof_infix_unary>
data FOF_unary_formula = FOFUF_connective Unary_connective FOF_unitary_formula
| FOFUF_infix FOF_infix_unary
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_infix_unary> ::= <fof_term> <infix_inequality> <fof_term>
data FOF_infix_unary = FOF_infix_unary FOF_term FOF_term
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_atomic_formula> ::= <fof_plain_atomic_formula> |
-- <fof_defined_atomic_formula> |
-- <fof_system_atomic_formula>
data FOF_atomic_formula = FOFAT_plain FOF_plain_atomic_formula
| FOFAT_defined FOF_defined_atomic_formula
| FOFAT_system FOF_system_atomic_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_plain_atomic_formula> ::= <fof_plain_term>
-- <fof_plain_atomic_formula> :== <proposition> | <predicate>(<fof_arguments>)
data FOF_plain_atomic_formula = FOFPAF_proposition Proposition
| FOFPAF_predicate Predicate FOF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_defined_atomic_formula> ::= <fof_defined_plain_formula> |
-- <fof_defined_infix_formula>
data FOF_defined_atomic_formula = FOFDAF_plain FOF_defined_plain_formula
| FOFDAF_infix FOF_defined_infix_formula
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_defined_plain_formula> ::= <fof_defined_plain_term>
-- <fof_defined_plain_formula> :== <defined_proposition> |
-- <defined_predicate>(<fof_arguments>)
data FOF_defined_plain_formula = FOFDPF_proposition Defined_proposition
| FOFDPF_predicate Defined_predicate FOF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_defined_infix_formula> ::= <fof_term> <defined_infix_pred> <fof_term>
data FOF_defined_infix_formula = FOF_defined_infix_formula Defined_infix_pred FOF_term FOF_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %----System terms have system specific interpretations
-- <fof_system_atomic_formula> ::= <fof_system_term>
-- %----<fof_system_atomic_formula>s are used for evaluable predicates that are
-- %----available in particular tools. The predicate names are not controlled
-- %----by the TPTP syntax, so use with due care. The same is true for
-- %----<fof_system_term>s.
newtype FOF_system_atomic_formula = FOF_system_atomic_formula FOF_system_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %----FOF terms.
-- <fof_plain_term> ::= <constant> | <functor>(<fof_arguments>)
data FOF_plain_term = FOFPT_constant Constant
| FOFPT_functor TPTP_functor FOF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Defined terms have TPTP specific interpretations
-- <fof_defined_term> ::= <defined_term> | <fof_defined_atomic_term>
data FOF_defined_term = FOFDT_term Defined_term
| FOFDT_atomic FOF_defined_atomic_term
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_defined_atomic_term> ::= <fof_defined_plain_term>
-- %----None yet | <defined_infix_term>
data FOF_defined_atomic_term = FOFDAT_plain FOF_defined_plain_term
-- | FOFDAT_indix Defined_infix_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %----None yet <defined_infix_term> ::= <fof_term> <defined_infix_func> <fof_term>
-- data Defined_infix_term = Defined_infix_term Defined_infix_func FOF_term FOF_term
-- deriving (Show, Ord, Eq, Data, Typeable)
-- %----None yet <defined_infix_func> ::=
-- data Defined_infix_func =
-- <fof_defined_plain_term> ::= <defined_constant> |
-- <defined_functor>(<fof_arguments>)
-- %----Add $tuple for tuples, because [<fof_arguments>] doesn't work.
data FOF_defined_plain_term = FOFDPT_constant Defined_constant
| FOFDPT_functor Defined_functor FOF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- %----System terms have system specific interpretations
-- <fof_system_term> ::= <system_constant> | <system_functor>(<fof_arguments>)
data FOF_system_term = FOFST_constant System_constant
| FOFST_functor System_functor FOF_arguments
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Arguments recurse back up to terms (this is the FOF world here)
-- <fof_arguments> ::= <fof_term> | <fof_term>,<fof_arguments>
type FOF_arguments = [FOF_term]
-- %----These are terms used as arguments. Not the entry point for terms because
-- %----<fof_plain_term> is also used as <fof_plain_atomic_formula>
-- <fof_term> ::= <fof_function_term> | <variable> |
-- <tff_conditional_term> | <tff_let_term>
data FOF_term = FOFT_function FOF_function_term
| FOFT_variable Variable
| FOFT_conditional TFF_conditional_term
| FOFT_let TFF_let_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %% DAMN THIS JUST WON'T WORK | <tuple_term>
-- %----<tuple_term> is for TFF only, but it's here because it's used in
-- %----<fof_atomic_formula>, which is also used as <tff_atomic_formula>.
-- % <tuple_term> ::= [] | [<fof_arguments>]
-- <fof_function_term> ::= <fof_plain_term> | <fof_defined_term> |
-- <fof_system_term>
data FOF_function_term = FOFFT_plain FOF_plain_term
| FOFFT_defined FOF_defined_term
| FOFFT_system FOF_system_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Conditional terms should be used by only TFF.
-- <tff_conditional_term> ::= $ite_t(<tff_logic_formula>,<fof_term>,<fof_term>)
data TFF_conditional_term = TFF_conditional_term TFF_logic_formula FOF_term FOF_term
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Let terms should be used by only TFF. $let_ft is for use when there is
-- %----a $ite_t in the <fof_term>. See the commentary for $let_tf and $let_ff.
-- <tff_let_term> ::= $let_ft(<tff_let_formula_defns>,<fof_term>) |
-- $let_tt(<tff_let_term_defns>,<fof_term>)
data TFF_let_term = TFFLT_formula TFF_let_formula_defns FOF_term
| TFFLT_term TFF_let_term_defns FOF_term
deriving (Show, Ord, Eq, Data, Typeable)
{-
%----This section is the FOFX syntax. Not yet in use.
% <fof_let> ::= := [<fof_let_list>] : <fof_unitary_formula>
% <fof_let_list> ::= <fof_defined_var> |
% <fof_defined_var>,<fof_let_list>
% <fof_defined_var> ::= <variable> := <fof_logic_formula> |
% <variable> :- <fof_term> | (<fof_defined_var>)
%
% <fof_conditional> ::= $ite_f(<fof_logic_formula>,<fof_logic_formula>,
% <fof_logic_formula>)
%
% <fof_conditional_term> ::= $ite_t(<fof_logic_formula>,<fof_term>,<fof_term>)
-}
-- <fof_sequent> ::= <fof_formula_tuple> <gentzen_arrow>
-- <fof_formula_tuple> | (<fof_sequent>)
data FOF_sequent = FOFS_plain FOF_formula_tuple FOF_formula_tuple
| FOFS_parens FOF_sequent
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_formula_tuple> ::= [] | [<fof_formula_tuple_list>]
newtype FOF_formula_tuple = FOF_formula_tuple FOF_formula_tuple_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <fof_formula_tuple_list> ::= <fof_logic_formula> |
-- <fof_logic_formula>,<fof_formula_tuple_list>
type FOF_formula_tuple_list = [FOF_logic_formula]
-- %----CNF formulae (variables implicitly universally quantified)
-- <cnf_formula> ::= <disjunction> | (<disjunction>)
data CNF_formula = CNFF_plain Disjunction
| CNFF_parens Disjunction
deriving (Show, Ord, Eq, Data, Typeable)
-- <disjunction> ::= <literal> | <disjunction> <vline> <literal>
newtype Disjunction = Disjunction [Literal]
deriving (Show, Ord, Eq, Data, Typeable)
-- <literal> ::= <fof_atomic_formula> | ~ <fof_atomic_formula> |
-- <fof_infix_unary>
data Literal = Lit_atomic FOF_atomic_formula
| Lit_negative FOF_atomic_formula
| Lit_fof_infix FOF_infix_unary
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Connectives - THF
-- <thf_quantifier> ::= <fof_quantifier> | <th0_quantifier> |
-- <th1_quantifier>
data THF_quantifier = THFQ_fof FOF_quantifier
| THFQ_th0 TH0_quantifier
| THFQ_th1 TH1_quantifier
deriving (Show, Ord, Eq, Data, Typeable)
-- %----TH0 quantifiers are also available in TH1
-- <th1_quantifier> ::= !> | ?*
data TH1_quantifier = TH1_DependentProduct -- !>
| TH1_DependentSum -- ?*
deriving (Show, Ord, Eq, Data, Typeable)
-- <th0_quantifier> ::= ^ | @+ | @-
data TH0_quantifier = TH0_LambdaBinder -- ^
| TH0_IndefiniteDescription -- @+
| TH0_DefiniteDescription -- @-
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_pair_connective> ::= <infix_equality> | <infix_inequality> |
-- <binary_connective> | <assignment>
data THF_pair_connective = THF_infix_equality
| Infix_inequality
| THFPC_binary Binary_connective
| THF_assignment
deriving (Show, Ord, Eq, Data, Typeable)
-- <thf_unary_connective> ::= <unary_connective> | <th1_unary_connective>
data THF_unary_connective = THFUC_unary Unary_connective
| THFUC_th1 TH1_unary_connective
deriving (Show, Ord, Eq, Data, Typeable)
-- <th1_unary_connective> ::= !! | ?? | @@+ | @@- | @=
data TH1_unary_connective = TH1_PiForAll -- !!
| TH1_PiSigmaExists -- ??
| TH1_PiIndefiniteDescription -- @@+
| TH1_PiDefiniteDescription -- @@-
| TH1_PiEquality -- @=
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Connectives - TFF
-- % <tff_pair_connective> ::= <binary_connective> | <assignment>
-- Note: not used
-- data TFF_pair_connective = TFFPC_binary Binary_connective
-- | TFFPC_assignment TFF_assignment
-- deriving (Show, Ord, Eq, Data, Typeable)
-- %----Connectives - FOF
-- <fof_quantifier> ::= ! | ?
data FOF_quantifier = ForAll -- !
| Exists -- ?
deriving (Show, Ord, Eq, Data, Typeable)
-- <binary_connective> ::= <=> | => | <= | <~> | ~<vline> | ~&
data Binary_connective = Equivalence
| Implication
| ReverseImplication
| XOR
| NOR
| NAND
deriving (Show, Ord, Eq, Data, Typeable)
-- <assoc_connective> ::= <vline> | &
data Assoc_connective = OR
| AND
deriving (Show, Ord, Eq, Data, Typeable)
-- <unary_connective> ::= ~
data Unary_connective = NOT deriving (Show, Ord, Eq, Data, Typeable)
-- %----Types for THF and TFF
-- <type_constant> ::= <type_functor>
type Type_constant = Type_functor
-- <type_functor> ::= <atomic_word>
type Type_functor = Token
-- <defined_type> ::= <atomic_defined_word>
-- <defined_type> :== $oType | $o | $iType | $i | $tType |
-- $real | $rat | $int
data Defined_type = OType -- $oType/$o is the Boolean type, i.e., the type of $true and $false.
| O -- $oType/$o is the Boolean type, i.e., the type of $true and $false.
| IType -- $iType/$i is non-empty type of individuals, which may be finite or infinite.
| I -- $iType/$i is non-empty type of individuals, which may be finite or infinite.
| TType -- $tType is the type (kind) of all types.
| Real -- $real is the type of <real>s.
| Rat -- $rat is the type of <rational>s.
| Int -- $int is the type of <signed_integer>s and <unsigned_integer>s.
deriving (Show, Ord, Eq, Data, Typeable)
-- <system_type> :== <atomic_system_word>
-- Note: not used
-- type System_type = Token
-- %----For all language types
-- <atom> ::= <untyped_atom> | <defined_constant>
data Atom = Atom_untyped Untyped_atom
| Atom_constant Defined_constant
deriving (Show, Ord, Eq, Data, Typeable)
-- <untyped_atom> ::= <constant> | <system_constant>
data Untyped_atom = UA_constant Constant
| UA_system System_constant
deriving (Show, Ord, Eq, Data, Typeable)
type Proposition = Predicate
type Predicate = Token
-- <defined_proposition> :== <atomic_defined_word>
-- <defined_proposition> :== $true | $false
data Defined_proposition = TPTP_true
| TPTP_false
deriving (Show, Ord, Eq, Data, Typeable)
-- <defined_predicate> :== <atomic_defined_word>
-- <defined_predicate> :== $distinct |
-- $less | $lesseq | $greater | $greatereq |
-- $is_int | $is_rat |
-- $box_P | $box_i | $box_int | $box |
-- $dia_P | $dia_i | $dia_int | $dia
-- %----$distinct means that each of it's constant arguments are pairwise !=. It
-- %----is part of the TFF syntax. It can be used only as a fact, not under any
-- %----connective.
data Defined_predicate = Distinct
| Less
| Lesseq
| Greater
| Greatereq
| Is_int
| Is_rat
| Box_P
| Box_i
| Box_int
| Box
| Dia_P
| Dia_i
| Dia_int
| Dia
deriving (Show, Ord, Eq, Data, Typeable)
-- <defined_infix_pred> ::= <infix_equality> | <assignment>
-- <infix_equality> ::= =
-- <infix_inequality> ::= !=
data Defined_infix_pred = Defined_infix_equality
| Defined_assignment
deriving (Show, Ord, Eq, Data, Typeable)
-- <constant> ::= <functor>
type Constant = TPTP_functor
-- <functor> ::= <atomic_word>
type TPTP_functor = Token
-- <system_constant> ::= <system_functor>
type System_constant = System_functor
-- <system_functor> ::= <atomic_system_word>
type System_functor = Token
-- <defined_constant> ::= <defined_functor>
type Defined_constant = Defined_functor
-- <defined_functor> ::= <atomic_defined_word>
-- <defined_functor> :== $uminus | $sum | $difference | $product |
-- $quotient | $quotient_e | $quotient_t | $quotient_f |
-- $remainder_e | $remainder_t | $remainder_f |
-- $floor | $ceiling | $truncate | $round |
-- $to_int | $to_rat | $to_real
data Defined_functor = Uminus
| Sum
| Difference
| Product
| Quotient
| Quotient_e
| Quotient_t
| Quotient_f
| Remainder_e
| Remainder_t
| Remainder_f
| Floor
| Ceiling
| Truncate
| Round
| To_int
| To_rat
| To_real
| DF_atomic_defined_word Atomic_defined_word
deriving (Show, Ord, Eq, Data, Typeable)
-- <defined_term> ::= <number> | <distinct_object>
data Defined_term = DT_number Number
| DT_object Distinct_object
deriving (Show, Ord, Eq, Data, Typeable)
-- <variable> ::= <upper_word>
type Variable = Token
-- %----Formula sources
-- <source> ::= <general_term>
-- <source> :== <dag_source> | <internal_source> |
-- <external_source> | unknown | [<sources>]
data Source = Source_DAG DAG_source
| Source_internal Internal_source
| Source_external External_source
| Unknown_source
| Source_many Sources
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Alternative sources are recorded like this, thus allowing representation
-- %----of alternative derivations with shared parts.
-- <sources> :== <source> | <source>,<sources>
type Sources = [Source]
-- %----Only a <dag_source> can be a <name>, i.e., derived formulae can be
-- %----identified by a <name> or an <inference_record>
-- <dag_source> :== <name> | <inference_record>
data DAG_source = DAGS_name Name
| DAGS_record Inference_record
deriving (Show, Ord, Eq, Data, Typeable)
-- <inference_record> :== inference(<inference_rule>,<useful_info>,
-- <inference_parents>)
data Inference_record = Inference_record Inference_rule Useful_info Inference_parents
deriving (Show, Ord, Eq, Data, Typeable)
-- <inference_rule> :== <atomic_word>
-- %----Examples are deduction | modus_tollens | modus_ponens | rewrite |
-- % resolution | paramodulation | factorization |
-- % cnf_conversion | cnf_refutation | ...
type Inference_rule = Token
-- %----<inference_parents> can be empty in cases when there is a justification
-- %----for a tautologous theorem. In case when a tautology is introduced as
-- %----a leaf, e.g., for splitting, then use an <internal_source>.
-- <inference_parents> :== [] | [<parent_list>]
type Inference_parents = Parent_list
-- <parent_list> :== <parent_info> | <parent_info>,<parent_list>
type Parent_list = [Parent_info]
-- <parent_info> :== <source><parent_details>
data Parent_info = Parent_info Source Parent_details
deriving (Show, Ord, Eq, Data, Typeable)
-- <parent_details> :== :<general_list> | <null>
type Parent_details = Maybe General_list
-- <internal_source> :== introduced(<intro_type><optional_info>)
data Internal_source = Internal_source Intro_type Optional_info
deriving (Show, Ord, Eq, Data, Typeable)
-- <intro_type> :== definition | axiom_of_choice | tautology | assumption
-- %----This should be used to record the symbol being defined, or the function
-- %----for the axiom of choice
data Intro_type = IntroTypeDefinition
| AxiomOfChoice
| Tautology
| IntroTypeAssumption
deriving (Show, Ord, Eq, Data, Typeable)
-- <external_source> :== <file_source> | <theory> | <creator_source>
data External_source = ExtSrc_file File_source
| ExtSrc_theory Theory
| ExtSrc_creator Creator_source
deriving (Show, Ord, Eq, Data, Typeable)
-- <file_source> :== file(<file_name><file_info>)
data File_source = File_source File_name File_info
deriving (Show, Ord, Eq, Data, Typeable)
-- <file_info> :== ,<name> | <null>
type File_info = Maybe Name
-- <theory> :== theory(<theory_name><optional_info>)
data Theory = Theory Theory_name Optional_info
deriving (Show, Ord, Eq, Data, Typeable)
-- <theory_name> :== equality | ac
data Theory_name = TN_equality
| TN_ac
deriving (Show, Ord, Eq, Data, Typeable)
-- %----More theory names may be added in the future. The <optional_info> is
-- %----used to store, e.g., which axioms of equality have been implicitly used,
-- %----e.g., theory(equality,[rst]). Standard format still to be decided.
-- <creator_source> :== creator(<creator_name><optional_info>)
data Creator_source = Creator_source Creator_name Optional_info
deriving (Show, Ord, Eq, Data, Typeable)
-- <creator_name> :== <atomic_word>
type Creator_name = Token
-- %----Useful info fields
-- <optional_info> ::= ,<useful_info> | <null>
type Optional_info = Maybe Useful_info
-- <useful_info> ::= <general_list>
-- <useful_info> :== [] | [<info_items>]
data Useful_info = UI_items Info_items
| UI_general_list General_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <info_items> :== <info_item> | <info_item>,<info_items>
type Info_items = [Info_item]
-- <info_item> :== <formula_item> | <inference_item> |
-- <general_function>
data Info_item = Info_formula Formula_item
| Info_inference Inference_item
| Info_general General_function
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Useful info for formula records
-- <formula_item> :== <description_item> | <iquote_item>
data Formula_item = FI_description Description_item
| FI_iquote Iquote_item
deriving (Show, Ord, Eq, Data, Typeable)
-- <description_item> :== description(<atomic_word>)
type Description_item = Token
-- <iquote_item> :== iquote(<atomic_word>)
-- %----<iquote_item>s are used for recording exactly what the system output about
-- %----the inference step. In the future it is planned to encode this information
-- %----in standardized forms as <parent_details> in each <inference_record>.
-- %----Useful info for inference records
type Iquote_item = Token
-- <inference_item> :== <inference_status> | <assumptions_record> |
-- <new_symbol_record> | <refutation>
data Inference_item = Inf_status Inference_status
| Inf_assumption Assumptions_record
| Inf_symbol New_symbol_record
| Inf_refutation Refutation
deriving (Show, Ord, Eq, Data, Typeable)
-- <inference_status> :== status(<status_value>) | <inference_info>
data Inference_status = Inf_value Status_value
| Inf_info Inference_info
deriving (Show, Ord, Eq, Data, Typeable)
-- %----These are the success status values from the SZS ontology. The most
-- %----commonly used values are:
-- %---- thm - Every model of the parent formulae is a model of the inferred
-- %---- formula. Regular logical consequences.
-- %---- cth - Every model of the parent formulae is a model of the negation of
-- %---- the inferred formula. Used for negation of conjectures in FOF to
-- %---- CNF conversion.
-- %---- esa - There exists a model of the parent formulae iff there exists a
-- %---- model of the inferred formula. Used for Skolemization steps.
-- %----For the full hierarchy see the SZSOntology file distributed with the TPTP.
-- <status_value> :== suc | unp | sap | esa | sat | fsa | thm | eqv | tac |
-- wec | eth | tau | wtc | wth | cax | sca | tca | wca |
-- cup | csp | ecs | csa | cth | ceq | unc | wcc | ect |
-- fun | uns | wuc | wct | scc | uca | noc
data Status_value = SUC | UNP | SAP | ESA | SAT | FSA | THM | EQV | TAC
| WEC | ETH | TAU | WTC | WTH | CAX | SCA | TCA | WCA
| CUP | CSP | ECS | CSA | CTH | CEQ | UNC | WCC | ECT
| FUN | UNS | WUC | WCT | SCC | UCA | NOC
deriving (Show, Ord, Eq, Data, Typeable)
-- %----<inference_info> is used to record standard information associated with an
-- %----arbitrary inference rule. The <inference_rule> is the same as the
-- %----<inference_rule> of the <inference_record>. The <atomic_word> indicates
-- %----the information being recorded in the <general_list>. The <atomic_word>
-- %----are (loosely) set by TPTP conventions, and include esplit, sr_split, and
-- %----discharge.
-- <inference_info> :== <inference_rule>(<atomic_word>,<general_list>)
data Inference_info = Inference_info Inference_rule Atomic_word General_list
deriving (Show, Ord, Eq, Data, Typeable)
-- %----An <assumptions_record> lists the names of assumptions upon which this
-- %----inferred formula depends. These must be discharged in a completed proof.
-- <assumptions_record> :== assumptions([<name_list>])
type Assumptions_record = Name_list
-- %----A <refutation> record names a file in which the inference recorded here
-- %----is recorded as a proof by refutation.
-- <refutation> :== refutation(<file_source>)
type Refutation = File_source
-- %----A <new_symbol_record> provides information about a newly introduced symbol.
-- <new_symbol_record> :== new_symbols(<atomic_word>,[<new_symbol_list>])
data New_symbol_record = New_symbol_record Atomic_word New_symbol_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <new_symbol_list> :== <principal_symbol> |
-- <principal_symbol>,<new_symbol_list>
type New_symbol_list = [Principal_symbol]
-- %----Principal symbols are predicates, functions, variables
-- <principal_symbol> :== <functor> | <variable>
data Principal_symbol = PS_functor TPTP_functor
| PS_variable Variable
deriving (Show, Ord, Eq, Data, Typeable)
-- %----Include directives
-- <include> ::= include(<file_name><formula_selection>).
data Include = Include File_name Formula_selection
deriving (Show, Ord, Eq, Data, Typeable)
-- <formula_selection> ::= ,[<name_list>] | <null>
type Formula_selection = Maybe [Name]
-- <name_list> ::= <name> | <name>,<name_list>
type Name_list = [Name]
-- %----Non-logical data
-- <general_term> ::= <general_data> | <general_data>:<general_term> |
-- <general_list>
data General_term = GT_data General_data
| GT_DataTerm General_data General_term
| GT_list General_list
deriving (Show, Ord, Eq, Data, Typeable)
-- <general_data> ::= <atomic_word> | <general_function> |
-- <variable> | <number> | <distinct_object> |
-- <formula_data>
data General_data = GD_atomic_word Atomic_word
| GD_general_function General_function
| GD_variable Variable
| GD_number Number
| GD_distinct_object Distinct_object
| GD_formula_data Formula_data
-- %----A <general_data> bind() term is used to record a variable binding in an
-- %----inference, as an element of the <parent_details> list.
-- <general_data> :== bind(<variable>,<formula_data>)
| GD_bind Variable Formula_data -- only used in inference
deriving (Show, Ord, Eq, Data, Typeable)
-- <general_function> ::= <atomic_word>(<general_terms>)
data General_function = General_function Atomic_word General_terms
deriving (Show, Ord, Eq, Data, Typeable)
-- <formula_data> ::= $thf(<thf_formula>) | $tff(<tff_formula>) |
-- $fof(<fof_formula>) | $cnf(<cnf_formula>) |
-- $fot(<fof_term>)
-- only used in inference
data Formula_data = FD_THF THF_formula
| FD_TFF TFF_formula
| FD_FOF FOF_formula
| FD_CNF CNF_formula
| FD_FOT FOF_term
deriving (Show, Ord, Eq, Data, Typeable)
-- <general_list> ::= [] | [<general_terms>]
type General_list = [General_term]
-- <general_terms> ::= <general_term> | <general_term>,<general_terms>
type General_terms = [General_term]
-- %----General purpose
-- <name> ::= <atomic_word> | <integer>
-- %----Integer names are expected to be unsigned
data Name = NameString Token
| NameInteger Integer
deriving (Show, Ord, Eq, Data, Typeable)
-- <atomic_word> ::= <lower_word> | <single_quoted>
type Atomic_word = Token
-- <atomic_defined_word> ::= <dollar_word>
type Atomic_defined_word = Token
-- <atomic_system_word> ::= <dollar_dollar_word>
type Atomic_system_word = Token
-- <number> ::= <integer> | <rational> | <real>
data Number = NumInteger Integer
| NumRational Rational
| NumReal Double
deriving (Show, Ord, Eq, Data, Typeable)
-- <distinct_object> ::- <double_quote><do_char>*<double_quote>
type Distinct_object = Token
-- <file_name> ::= <single_quoted>
type File_name = IRI
set_formula_role :: Annotated_formula -> Formula_role -> Annotated_formula
set_formula_role (AF_THF_Annotated (THF_annotated n _ f an)) role =
(AF_THF_Annotated (THF_annotated n role f an))
set_formula_role (AF_TFX_Annotated (TFX_annotated n _ f an)) role =
(AF_TFX_Annotated (TFX_annotated n role f an))
set_formula_role (AF_TFF_Annotated (TFF_annotated n _ f an)) role =
(AF_TFF_Annotated (TFF_annotated n role f an))
set_formula_role (AF_TCF_Annotated (TCF_annotated n _ f an)) role =
(AF_TCF_Annotated (TCF_annotated n role f an))
set_formula_role (AF_FOF_Annotated (FOF_annotated n _ f an)) role =
(AF_FOF_Annotated (FOF_annotated n role f an))
set_formula_role (AF_CNF_Annotated (CNF_annotated n _ f an)) role =
(AF_CNF_Annotated (CNF_annotated n role f an))
set_formula_role (AF_TPI_Annotated (TPI_annotated n _ f an)) role =
(AF_TPI_Annotated (TPI_annotated n role f an))
adjust_formula_role :: AS_Anno.Named Annotated_formula -> AS_Anno.Named Annotated_formula
adjust_formula_role anno_sen =
let sen1 = AS_Anno.sentence anno_sen
sen2 = if AS_Anno.isAxiom anno_sen
then set_formula_role sen1 Axiom
else sen1
in anno_sen { AS_Anno.sentence = sen2 }
|
spechub/Hets
|
TPTP/AS.der.hs
|
gpl-2.0
| 67,875
| 0
| 11
| 20,248
| 8,074
| 4,664
| 3,410
| 644
| 7
|
{-# LANGUAGE TypeFamilies #-}
module LifeGame.Data.CellGrid (
CellGrid(..)
, cellGrid
, randCellGrid
, indices
, neighbours
, distance
, directionTo
, searchCell
, alives
, deads
, population
) where
import LifeGame.Data.Cell (Cell(..), State(..), setState, isAlive, isDead)
import Data.Maybe (catMaybes)
import Math.Geometry.Grid
import Math.Geometry.Grid.SquareInternal (RectSquareGrid(..), SquareDirection)
import qualified System.Random as R (newStdGen, random)
import qualified Control.Monad as M (mapM)
data CellGrid = CellGrid (Int, Int) [Cell]
deriving (Eq)
cellGrid :: Int -> Int -> State -> CellGrid
cellGrid r c s = CellGrid (r, c) $ [Cell s (x, y) | x <- [0..c-1], y <- [0..r-1]]
-- A CellGrid with random State Cells
randCellGrid :: Int -> Int -> IO (CellGrid)
randCellGrid r c = (M.mapM (\cell -> R.newStdGen >>= return . setState cell . fst . R.random)
$ indices . cellGrid r c $ Alive)
>>= return . CellGrid (r, c)
searchCell :: CellGrid -> (Int, Int) -> Maybe Cell
searchCell (CellGrid _ cs) i = case filter (\(Cell _ ind) -> ind == i) cs of
[] -> Nothing
xs -> Just . head $ xs
instance Show CellGrid where
show (CellGrid (r, c) _) = "CellGrid " ++ (show r) ++ " " ++ (show c)
instance Grid CellGrid where
type Index CellGrid = Cell
type Direction CellGrid = SquareDirection
indices (CellGrid _ xs) = xs
neighbours cg (Cell _ (x, y)) = catMaybes [
searchCell cg (x, y+1)
, searchCell cg (x, y-1)
, searchCell cg (x+1, y)
, searchCell cg (x-1, y)]
distance _ (Cell _ (x1, y1)) (Cell _ (x2, y2)) = abs (x2-x1) + abs (y2-y1)
-- We transform our Cells into Indexes (so our CellGrid into RectSquareGrid) so we can get directions
directionTo (CellGrid rc cs) (Cell _ xy1) (Cell _ xy2) = (\rc' -> directionTo rc' xy1 xy2) . RectSquareGrid rc . map (\(Cell _ xy) -> xy) $ cs
alives :: CellGrid -> [Cell]
alives = filter (isAlive) . indices
deads :: CellGrid -> [Cell]
deads = filter (isDead) . indices
population :: CellGrid -> Integer
population = fromIntegral . length . alives
|
qleguennec/lifegame
|
src/LifeGame/Data/CellGrid.hs
|
gpl-3.0
| 2,132
| 0
| 18
| 486
| 874
| 484
| 390
| 50
| 2
|
{-# OPTIONS_GHC -Wall #-}
module Yorgey.Wk2.LogAnalysis where
import Yorgey.Wk2.Log
-- This one is not on me. Had to look it up on Google.
-- But learned read, words and unwords functions, which is cool.
-- Converting to a List of String, instead of parsing as list of characters,
-- which is what I tried to do originally, obviously makes much more sense.
parseMessage :: String -> LogMessage
parseMessage message =
case words message of
("I" : ts : msg) -> LogMessage Info (read ts) (unwords msg)
("W" : ts : msg) -> LogMessage Warning (read ts) (unwords msg)
("E" : sev : ts : msg) -> LogMessage (Error (read sev)) (read ts) (unwords msg)
(unknownMsg) -> Unknown (unwords unknownMsg)
-- Yay for map, resulting in this one-liner.
parse :: String -> [LogMessage]
parse logData = map parseMessage (lines logData)
-- Not sure whether I like my solution here, but sticking with it for now
-- Pattern matches are also non-exhaustive. Whatever!
insert :: LogMessage -> MessageTree -> MessageTree
insert lm@(LogMessage _ ts _) tree =
case tree of
Node lt msg@(LogMessage _ ts1 _) rt ->
if ts < ts1 then
Node (insert lm lt) msg rt
else
Node lt msg (insert lm rt)
Leaf -> Node Leaf lm Leaf
insert _ tree = tree
-- Now this is amazing. build is just an alias for foldR using insert.
build :: [LogMessage] -> MessageTree
build = foldr insert Leaf
-- inOrder is a fairly straightforward recursive implementation
inOrder :: MessageTree -> [LogMessage]
inOrder (Node lt lm rt) = inOrder lt ++ (lm : inOrder rt)
inOrder Leaf = []
-- Helper function to filter Error messages with severity > 50
-- so that we can use this to filter over [LogMessage] below
filterSeverity :: LogMessage -> Bool
filterSeverity (LogMessage (Error sev) _ _) = sev > 50
filterSeverity _ = False
-- Helper function to extract the message from a LogMessage
-- so that we can use this to map [LogMessage] -> [String] below
showMsg :: LogMessage -> String
showMsg (LogMessage _ _ msg) = msg
showMsg (Unknown msg) = msg
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong msgList = map showMsg (filter filterSeverity (inOrder (build msgList)))
|
balajisivaraman/yorgey-course
|
src/Yorgey/Wk2/LogAnalysis.hs
|
gpl-3.0
| 2,194
| 0
| 12
| 454
| 568
| 298
| 270
| 34
| 4
|
-- Pretty.hs ---
--
-- Filename: Pretty.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Sat Feb 28 19:47:39 2015 (+0100)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Sat Feb 28 20:05:38 2015 (+0100)
-- By: Manuel Schneckenreither
-- Update #: 11
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
--
--
-- Code:
-- | TODO: comment this module
module Data.ML.RandomForest.Pretty where
import Data.ML.RandomForest.Type
import qualified Data.Map as M
import Text.PrettyPrint.ANSI.Leijen
instance (Pretty b) => Pretty (Forest a b) where
pretty (Forest dts) =
vcat $ punctuate line $
fmap (\(n,dt) -> text ("Decision Tree No. " ++ show n) <$$>
text "---------------------" <$$> line <> pretty dt) $ zip [1..] dts
--
-- Pretty.hs ends here
|
schnecki/HaskellMachineLearning
|
src/Data/ML/RandomForest/Pretty.hs
|
gpl-3.0
| 903
| 0
| 17
| 202
| 175
| 112
| 63
| 9
| 0
|
{-# LANGUAGE BangPatterns #-}
module View
( View()
, ViewOptions(..)
, startView
, stopView
, waitForView
, addResult
, clearResults
) where
import Prelude hiding (FilePath, (.))
import Control.Category
import Control.Concurrent
import Control.Monad
import Control.Monad.Trans
import Data.Lens hiding (focus)
import qualified Data.Text as T
import Graphics.Vty.Attributes
import Graphics.Vty.LLInput
import Graphics.Vty.Widgets.All
import View.Types
startView :: (MonadIO m) => ViewOptions -> m View
startView opts = liftIO $ do
(root, fg, mkView) <- makeWidgets opts
coll <- newCollection
void $ addToCollection coll root fg
stopped <- newEmptyMVar
let view = mkView stopped
addListeners view
void $ forkIO $ do
runUi coll defaultContext
putMVar stopped ()
return view
waitForView :: (MonadIO m) => View -> m ()
waitForView = liftIO . takeMVar . _vStopped
stopView :: (MonadIO m) => View -> Maybe T.Text -> m ()
stopView view result = liftIO $ schedule $ do
(voResultHandler . vOpts ^$ view) $ result
shutdownUi
makeWidgets opts = do
title <- plainText "Picker"
query <- editWidget
-- TODO pass different attrs here - it's for the selected item
results <- newTextList def_attr []
scroll <- newProgressBar def_attr def_attr
top <- vBox title query
bottom <- hBox results scroll
all <- vBox top bottom
fg <- newFocusGroup
void $ addToFocusGroup fg query
setFocusGroupNextKey fg KDown []
setFocusGroupPrevKey fg KUp []
return (all, fg, View opts query results scroll)
addListeners view = do
let opts = vOpts ^$ view
let query = vQuery ^$ view
let results = vResults ^$ view
onChange query $ voQueryHandler ^$ opts
onItemActivated results $ stopView view . Just . evText
let act = activateCurrentItem results
onActivate query $ const act
onKeyPressed query $ \_ key mods -> do
case (key, mods) of
(KEsc, []) -> stopView view Nothing >> return True
(KASCII '\t', []) -> do
n <- getListSize results
mi <- getSelected results
case mi of
_ | n == 0 -> return ()
Nothing -> setSelected results 0
Just (i, _) | i + 1 == n -> setSelected results 0
Just (i, _) -> setSelected results (i + 1)
return True
-- vim hack
(KASCII 'm', [MCtrl]) -> act >> return True
_ -> return False
where
evText (ActivateItemEvent _ xs _) = xs
addResult :: (MonadIO m) => View -> T.Text -> m ()
addResult View{ _vResults = !rs } !xs = liftIO $ do
tw <- plainText xs
schedule $ do
addToList rs xs tw
getSelected rs >>= maybe (setSelected rs 0) (const $ return ())
clearResults :: (MonadIO m) => View -> m ()
clearResults View{ _vResults = !rs } = liftIO $ schedule $ clearList rs
|
ktvoelker/Picker
|
src/View.hs
|
gpl-3.0
| 2,793
| 0
| 23
| 678
| 1,039
| 508
| 531
| -1
| -1
|
-- Copyright John F. Miller 2017
-- | A Hole is a TMVar that can only ever be written to once.
module Runtime.Hole
( Hole
, mkHole
, readHole
, maybeReadHole
, writeHole) where
import Control.Concurrent.STM
-- | a STM location that can be written once then read as many times as
-- needed.
newtype Hole a = Hole {unHole :: TMVar a}
-- | Make a new empty Hole
mkHole :: STM(Hole a)
mkHole = Hole <$> newEmptyTMVar
-- | Read the contents of a Hole, block if the hole is empty
readHole :: Hole a -> STM(a)
readHole (Hole tmv) = readTMVar tmv
-- | If the Hole is full returns its value, otherwise returns the first
-- argument.
maybeReadHole :: a -> Hole a -> STM a
maybeReadHole alt (Hole tmv) = do
res <- tryReadTMVar tmv
case res of
Just a -> return a
Nothing -> return alt
-- | Write to an empty hole. If the hole is already full just return false
-- without changing the content of the hole. The return valuse is typically
-- ignored.
writeHole :: Hole a -> a ->STM Bool
writeHole (Hole tmv) x = tryPutTMVar tmv x
|
antarestrader/sapphire
|
Runtime/Hole.hs
|
gpl-3.0
| 1,052
| 0
| 10
| 236
| 228
| 122
| 106
| 20
| 2
|
{-# LANGUAGE UndecidableInstances, RankNTypes, FlexibleInstances, MultiParamTypeClasses #-}
{-| Module : TypeConstraints
License : GPL
Maintainer : helium@cs.uu.nl
Stability : experimental
Portability : portable
The type constraints used by the Helium compiler (all derived from the
basic constraints that are supplied by the Top framework). Some constraints
are lifted to work on finite maps as well.
-}
module Helium.StaticAnalysis.Miscellaneous.TypeConstraints where
import Top.Constraint
import Top.Constraint.Equality hiding ((.==.))
import Top.Constraint.Qualifier
import Top.Constraint.Polymorphism hiding ((.::.))
import Top.Constraint.Information
import Top.Interface.Basic
import Top.Interface.Substitution
import Top.Interface.TypeInference
import Top.Interface.Qualification
import Top.Types
import qualified Data.Map as M
type TypeConstraints info = [TypeConstraint info]
data TypeConstraint info
= TC1 (EqualityConstraint info)
| TC2 (ExtraConstraint info)
| TC3 (PolymorphismConstraint info)
| TCOper String (forall m . HasSubst m info => m ())
instance (HasBasic m info, HasTI m info, HasSubst m info, HasQual m info, PolyTypeConstraintInfo info)
=> Solvable (TypeConstraint info) m where
solveConstraint (TC1 c) = solveConstraint c
solveConstraint (TC2 c) = solveConstraint c
solveConstraint (TC3 c) = solveConstraint c
solveConstraint (TCOper _ f) = f
checkCondition (TC1 c) = checkCondition c
checkCondition (TC2 c) = checkCondition c
checkCondition (TC3 c) = checkCondition c
checkCondition (TCOper _ _) = return True
instance Show info => Show (TypeConstraint info) where
show (TC1 c) = show c
show (TC2 c) = show c
show (TC3 c) = show c
show (TCOper s _) = s
instance Substitutable (TypeConstraint info) where
sub |-> (TC1 c) = TC1 (sub |-> c)
sub |-> (TC2 c) = TC2 (sub |-> c)
sub |-> (TC3 c) = TC3 (sub |-> c)
_ |-> tc = tc
ftv (TC1 c) = ftv c
ftv (TC2 c) = ftv c
ftv (TC3 c) = ftv c
ftv _ = []
------------
polySubst :: M.Map Int (Scheme Predicates) -> TypeConstraint info -> TypeConstraint info
polySubst schemeMap tc =
case tc of
TC3 (Instantiate tp sigma info) -> TC3 (Instantiate tp (f sigma) info)
TC3 (Skolemize tp (monos, sigma) info) -> TC3 (Skolemize tp (monos, f sigma) info)
_ -> tc
where
f :: Sigma Predicates -> Sigma Predicates
f sigma =
case sigma of
SigmaVar i -> maybe sigma SigmaScheme (M.lookup i schemeMap)
_ -> sigma
spreadFunction :: TypeConstraint info -> Maybe Int
spreadFunction tc =
case tc of
TC1 (Equality _ t2 _) -> spreadFromType t2
TC3 (Instantiate tp _ _) -> spreadFromType tp
TC3 (Skolemize tp _ _) -> spreadFromType tp
TC3 (Implicit t1 (_, _) _) -> spreadFromType t1
_ -> Nothing
spreadFromType :: Tp -> Maybe Int
spreadFromType (TVar i) = Just i
spreadFromType _ = Nothing
------------------------------------------------------------------------------
-- Lifted constructors
infix 3 .==., .===., .::., .:::., !::!, !:::!, .<=., .<==., !<=!, !<==!
lift :: Ord k => (a1 -> t1 -> t2 -> a) -> M.Map k a1
-> M.Map k [(t, t1)] -> (t -> t2) -> ([a], M.Map k [(t, t1)])
lift combinator as bs cf =
let constraints = concat (M.elems (M.intersectionWith f as bs))
rest = bs M.\\ as
f a list = [ (a `combinator` b) (cf name) | (name,b) <- list ]
in (constraints, rest)
(.==.) :: Show info => Tp -> Tp -> info -> TypeConstraint info
(t1 .==. t2) info = TC1 (Equality t1 t2 info)
(.===.) :: (Show info, Ord key) => M.Map key Tp -> M.Map key [(key,Tp)] -> (key -> info) -> ([TypeConstraint info], M.Map key [(key,Tp)])
(.===.) = lift (.==.)
(.::.) :: Show info => Tp -> TpScheme -> info -> TypeConstraint info
tp .::. ts = tp .<=. SigmaScheme ts
(.:::.) :: (Show info, Ord key) => M.Map key TpScheme -> M.Map key [(key,Tp)] -> (key -> info) -> ([TypeConstraint info], M.Map key [(key,Tp)])
(.:::.) = lift (flip (.::.))
(!::!) :: Tp -> TpScheme -> Tps -> info -> TypeConstraint info
(tp !::! ts) monos info = TC3 (Skolemize tp (monos, SigmaScheme ts) info)
(!:::!) :: (Show info, Ord key) => M.Map key TpScheme -> M.Map key Tp -> Tps -> (Tps -> key -> key -> info) -> ([TypeConstraint info], M.Map key Tp)
(as !:::! bs) monos info =
let op key tp (cs, fm) =
case M.lookup key as of
Just ts ->
let -- the key of the type scheme (equal, but may have a different range).
key' = head (filter (==key) (M.keys as)) {- this is the other name -}
in ((tp !::! ts) monos (info monos key key') : cs, fm)
Nothing -> (cs, M.insert key tp fm)
in M.foldWithKey op ([], M.empty) bs
(.<=.) :: Show info => Tp -> Sigma Predicates -> info -> TypeConstraint info
(tp .<=. ts) info = TC3 (Instantiate tp ts info)
(.<==.) :: (Show info, Ord key) => M.Map key (Sigma Predicates) -> M.Map key [(key,Tp)] -> (key -> info) -> ([TypeConstraint info], M.Map key [(key,Tp)])
(.<==.) = lift (flip (.<=.))
-- the old implicit instance constraint
(!<=!) :: Show info => Tps -> Tp -> Tp -> info -> TypeConstraint info
(!<=!) ms t1 t2 info = TC3 (Implicit t1 (ms, t2) info)
(!<==!) :: (Show info, Ord key) => Tps -> M.Map key Tp -> M.Map key [(key,Tp)] -> (key -> info) -> ([TypeConstraint info], M.Map key [(key,Tp)])
(!<==!) ms = lift (ms !<=!)
genConstraints :: Tps -> (key -> info) -> [(Int, (key, Tp))] -> TypeConstraints info
genConstraints monos infoF =
let f (sv, (key, tp)) = TC3 (Generalize sv (monos, tp) (infoF key))
in map f
predicate :: Predicate -> info -> TypeConstraint info
predicate p cinfo = TC2 (Prove p cinfo)
|
roberth/uu-helium
|
src/Helium/StaticAnalysis/Miscellaneous/TypeConstraints.hs
|
gpl-3.0
| 5,995
| 0
| 21
| 1,560
| 2,370
| 1,242
| 1,128
| 107
| 5
|
module Handlers
( module Handlers.Chunks
, module Handlers.People
, module Handlers.Browse
, module Handlers.Submit
, module Handlers.Login
, module Handlers.Session
, module Handlers.App
) where
import Handlers.Chunks
import Handlers.People
import Handlers.Browse
import Handlers.Submit
import Handlers.Login
import Handlers.Session
import Handlers.App
|
athanclark/happ-store
|
src/Handlers.hs
|
gpl-3.0
| 371
| 0
| 5
| 56
| 83
| 53
| 30
| 15
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Prediction.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Prediction.Types.Sum where
import Network.Google.Prelude
|
rueshyna/gogol
|
gogol-prediction/gen/Network/Google/Prediction/Types/Sum.hs
|
mpl-2.0
| 604
| 0
| 4
| 109
| 29
| 25
| 4
| 8
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.PubSub.Projects.Subscriptions.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a NOT_FOUND error.
--
-- /See:/ <https://cloud.google.com/pubsub/docs Google Cloud Pub/Sub API Reference> for @pubsub.projects.subscriptions.testIamPermissions@.
module Network.Google.Resource.PubSub.Projects.Subscriptions.TestIAMPermissions
(
-- * REST Resource
ProjectsSubscriptionsTestIAMPermissionsResource
-- * Creating a Request
, projectsSubscriptionsTestIAMPermissions
, ProjectsSubscriptionsTestIAMPermissions
-- * Request Lenses
, pstiampXgafv
, pstiampUploadProtocol
, pstiampPp
, pstiampAccessToken
, pstiampUploadType
, pstiampPayload
, pstiampBearerToken
, pstiampResource
, pstiampCallback
) where
import Network.Google.Prelude
import Network.Google.PubSub.Types
-- | A resource alias for @pubsub.projects.subscriptions.testIamPermissions@ method which the
-- 'ProjectsSubscriptionsTestIAMPermissions' request conforms to.
type ProjectsSubscriptionsTestIAMPermissionsResource
=
"v1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestIAMPermissionsRequest :>
Post '[JSON] TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a NOT_FOUND error.
--
-- /See:/ 'projectsSubscriptionsTestIAMPermissions' smart constructor.
data ProjectsSubscriptionsTestIAMPermissions = ProjectsSubscriptionsTestIAMPermissions'
{ _pstiampXgafv :: !(Maybe Xgafv)
, _pstiampUploadProtocol :: !(Maybe Text)
, _pstiampPp :: !Bool
, _pstiampAccessToken :: !(Maybe Text)
, _pstiampUploadType :: !(Maybe Text)
, _pstiampPayload :: !TestIAMPermissionsRequest
, _pstiampBearerToken :: !(Maybe Text)
, _pstiampResource :: !Text
, _pstiampCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsSubscriptionsTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pstiampXgafv'
--
-- * 'pstiampUploadProtocol'
--
-- * 'pstiampPp'
--
-- * 'pstiampAccessToken'
--
-- * 'pstiampUploadType'
--
-- * 'pstiampPayload'
--
-- * 'pstiampBearerToken'
--
-- * 'pstiampResource'
--
-- * 'pstiampCallback'
projectsSubscriptionsTestIAMPermissions
:: TestIAMPermissionsRequest -- ^ 'pstiampPayload'
-> Text -- ^ 'pstiampResource'
-> ProjectsSubscriptionsTestIAMPermissions
projectsSubscriptionsTestIAMPermissions pPstiampPayload_ pPstiampResource_ =
ProjectsSubscriptionsTestIAMPermissions'
{ _pstiampXgafv = Nothing
, _pstiampUploadProtocol = Nothing
, _pstiampPp = True
, _pstiampAccessToken = Nothing
, _pstiampUploadType = Nothing
, _pstiampPayload = pPstiampPayload_
, _pstiampBearerToken = Nothing
, _pstiampResource = pPstiampResource_
, _pstiampCallback = Nothing
}
-- | V1 error format.
pstiampXgafv :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Xgafv)
pstiampXgafv
= lens _pstiampXgafv (\ s a -> s{_pstiampXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pstiampUploadProtocol :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Text)
pstiampUploadProtocol
= lens _pstiampUploadProtocol
(\ s a -> s{_pstiampUploadProtocol = a})
-- | Pretty-print response.
pstiampPp :: Lens' ProjectsSubscriptionsTestIAMPermissions Bool
pstiampPp
= lens _pstiampPp (\ s a -> s{_pstiampPp = a})
-- | OAuth access token.
pstiampAccessToken :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Text)
pstiampAccessToken
= lens _pstiampAccessToken
(\ s a -> s{_pstiampAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pstiampUploadType :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Text)
pstiampUploadType
= lens _pstiampUploadType
(\ s a -> s{_pstiampUploadType = a})
-- | Multipart request metadata.
pstiampPayload :: Lens' ProjectsSubscriptionsTestIAMPermissions TestIAMPermissionsRequest
pstiampPayload
= lens _pstiampPayload
(\ s a -> s{_pstiampPayload = a})
-- | OAuth bearer token.
pstiampBearerToken :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Text)
pstiampBearerToken
= lens _pstiampBearerToken
(\ s a -> s{_pstiampBearerToken = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- \`resource\` is usually specified as a path. For example, a Project
-- resource is specified as \`projects\/{project}\`.
pstiampResource :: Lens' ProjectsSubscriptionsTestIAMPermissions Text
pstiampResource
= lens _pstiampResource
(\ s a -> s{_pstiampResource = a})
-- | JSONP
pstiampCallback :: Lens' ProjectsSubscriptionsTestIAMPermissions (Maybe Text)
pstiampCallback
= lens _pstiampCallback
(\ s a -> s{_pstiampCallback = a})
instance GoogleRequest
ProjectsSubscriptionsTestIAMPermissions where
type Rs ProjectsSubscriptionsTestIAMPermissions =
TestIAMPermissionsResponse
type Scopes ProjectsSubscriptionsTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub"]
requestClient
ProjectsSubscriptionsTestIAMPermissions'{..}
= go _pstiampResource _pstiampXgafv
_pstiampUploadProtocol
(Just _pstiampPp)
_pstiampAccessToken
_pstiampUploadType
_pstiampBearerToken
_pstiampCallback
(Just AltJSON)
_pstiampPayload
pubSubService
where go
= buildClient
(Proxy ::
Proxy
ProjectsSubscriptionsTestIAMPermissionsResource)
mempty
|
rueshyna/gogol
|
gogol-pubsub/gen/Network/Google/Resource/PubSub/Projects/Subscriptions/TestIAMPermissions.hs
|
mpl-2.0
| 7,307
| 0
| 18
| 1,621
| 940
| 548
| 392
| 144
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.File.Projects.Locations.Backups.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a backup.
--
-- /See:/ <https://cloud.google.com/filestore/ Cloud Filestore API Reference> for @file.projects.locations.backups.create@.
module Network.Google.Resource.File.Projects.Locations.Backups.Create
(
-- * REST Resource
ProjectsLocationsBackupsCreateResource
-- * Creating a Request
, projectsLocationsBackupsCreate
, ProjectsLocationsBackupsCreate
-- * Request Lenses
, plbcParent
, plbcXgafv
, plbcUploadProtocol
, plbcAccessToken
, plbcBackupId
, plbcUploadType
, plbcPayload
, plbcCallback
) where
import Network.Google.File.Types
import Network.Google.Prelude
-- | A resource alias for @file.projects.locations.backups.create@ method which the
-- 'ProjectsLocationsBackupsCreate' request conforms to.
type ProjectsLocationsBackupsCreateResource =
"v1" :>
Capture "parent" Text :>
"backups" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "backupId" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Backup :> Post '[JSON] Operation
-- | Creates a backup.
--
-- /See:/ 'projectsLocationsBackupsCreate' smart constructor.
data ProjectsLocationsBackupsCreate =
ProjectsLocationsBackupsCreate'
{ _plbcParent :: !Text
, _plbcXgafv :: !(Maybe Xgafv)
, _plbcUploadProtocol :: !(Maybe Text)
, _plbcAccessToken :: !(Maybe Text)
, _plbcBackupId :: !(Maybe Text)
, _plbcUploadType :: !(Maybe Text)
, _plbcPayload :: !Backup
, _plbcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsBackupsCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plbcParent'
--
-- * 'plbcXgafv'
--
-- * 'plbcUploadProtocol'
--
-- * 'plbcAccessToken'
--
-- * 'plbcBackupId'
--
-- * 'plbcUploadType'
--
-- * 'plbcPayload'
--
-- * 'plbcCallback'
projectsLocationsBackupsCreate
:: Text -- ^ 'plbcParent'
-> Backup -- ^ 'plbcPayload'
-> ProjectsLocationsBackupsCreate
projectsLocationsBackupsCreate pPlbcParent_ pPlbcPayload_ =
ProjectsLocationsBackupsCreate'
{ _plbcParent = pPlbcParent_
, _plbcXgafv = Nothing
, _plbcUploadProtocol = Nothing
, _plbcAccessToken = Nothing
, _plbcBackupId = Nothing
, _plbcUploadType = Nothing
, _plbcPayload = pPlbcPayload_
, _plbcCallback = Nothing
}
-- | Required. The backup\'s project and location, in the format
-- projects\/{project_number}\/locations\/{location}. In Cloud Filestore,
-- backup locations map to GCP regions, for example **us-west1**.
plbcParent :: Lens' ProjectsLocationsBackupsCreate Text
plbcParent
= lens _plbcParent (\ s a -> s{_plbcParent = a})
-- | V1 error format.
plbcXgafv :: Lens' ProjectsLocationsBackupsCreate (Maybe Xgafv)
plbcXgafv
= lens _plbcXgafv (\ s a -> s{_plbcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plbcUploadProtocol :: Lens' ProjectsLocationsBackupsCreate (Maybe Text)
plbcUploadProtocol
= lens _plbcUploadProtocol
(\ s a -> s{_plbcUploadProtocol = a})
-- | OAuth access token.
plbcAccessToken :: Lens' ProjectsLocationsBackupsCreate (Maybe Text)
plbcAccessToken
= lens _plbcAccessToken
(\ s a -> s{_plbcAccessToken = a})
-- | Required. The ID to use for the backup. The ID must be unique within the
-- specified project and location. This value must start with a lowercase
-- letter followed by up to 62 lowercase letters, numbers, or hyphens, and
-- cannot end with a hyphen. Values that do not match this pattern will
-- trigger an INVALID_ARGUMENT error.
plbcBackupId :: Lens' ProjectsLocationsBackupsCreate (Maybe Text)
plbcBackupId
= lens _plbcBackupId (\ s a -> s{_plbcBackupId = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plbcUploadType :: Lens' ProjectsLocationsBackupsCreate (Maybe Text)
plbcUploadType
= lens _plbcUploadType
(\ s a -> s{_plbcUploadType = a})
-- | Multipart request metadata.
plbcPayload :: Lens' ProjectsLocationsBackupsCreate Backup
plbcPayload
= lens _plbcPayload (\ s a -> s{_plbcPayload = a})
-- | JSONP
plbcCallback :: Lens' ProjectsLocationsBackupsCreate (Maybe Text)
plbcCallback
= lens _plbcCallback (\ s a -> s{_plbcCallback = a})
instance GoogleRequest ProjectsLocationsBackupsCreate
where
type Rs ProjectsLocationsBackupsCreate = Operation
type Scopes ProjectsLocationsBackupsCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsBackupsCreate'{..}
= go _plbcParent _plbcXgafv _plbcUploadProtocol
_plbcAccessToken
_plbcBackupId
_plbcUploadType
_plbcCallback
(Just AltJSON)
_plbcPayload
fileService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsBackupsCreateResource)
mempty
|
brendanhay/gogol
|
gogol-file/gen/Network/Google/Resource/File/Projects/Locations/Backups/Create.hs
|
mpl-2.0
| 6,086
| 0
| 18
| 1,351
| 866
| 506
| 360
| 125
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceBroker.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a NOT_FOUND error. Note: This operation is designed to be used for
-- building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ <https://cloud.google.com/kubernetes-engine/docs/concepts/add-on/service-broker Service Broker API Reference> for @servicebroker.testIamPermissions@.
module Network.Google.Resource.ServiceBroker.TestIAMPermissions
(
-- * REST Resource
TestIAMPermissionsResource
-- * Creating a Request
, testIAMPermissions
, TestIAMPermissions
-- * Request Lenses
, tipXgafv
, tipUploadProtocol
, tipAccessToken
, tipUploadType
, tipPayload
, tipResource
, tipCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceBroker.Types
-- | A resource alias for @servicebroker.testIamPermissions@ method which the
-- 'TestIAMPermissions' request conforms to.
type TestIAMPermissionsResource =
"v1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleIAMV1__TestIAMPermissionsRequest
:>
Post '[JSON] GoogleIAMV1__TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a NOT_FOUND error. Note: This operation is designed to be used for
-- building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ 'testIAMPermissions' smart constructor.
data TestIAMPermissions =
TestIAMPermissions'
{ _tipXgafv :: !(Maybe Xgafv)
, _tipUploadProtocol :: !(Maybe Text)
, _tipAccessToken :: !(Maybe Text)
, _tipUploadType :: !(Maybe Text)
, _tipPayload :: !GoogleIAMV1__TestIAMPermissionsRequest
, _tipResource :: !Text
, _tipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tipXgafv'
--
-- * 'tipUploadProtocol'
--
-- * 'tipAccessToken'
--
-- * 'tipUploadType'
--
-- * 'tipPayload'
--
-- * 'tipResource'
--
-- * 'tipCallback'
testIAMPermissions
:: GoogleIAMV1__TestIAMPermissionsRequest -- ^ 'tipPayload'
-> Text -- ^ 'tipResource'
-> TestIAMPermissions
testIAMPermissions pTipPayload_ pTipResource_ =
TestIAMPermissions'
{ _tipXgafv = Nothing
, _tipUploadProtocol = Nothing
, _tipAccessToken = Nothing
, _tipUploadType = Nothing
, _tipPayload = pTipPayload_
, _tipResource = pTipResource_
, _tipCallback = Nothing
}
-- | V1 error format.
tipXgafv :: Lens' TestIAMPermissions (Maybe Xgafv)
tipXgafv = lens _tipXgafv (\ s a -> s{_tipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tipUploadProtocol :: Lens' TestIAMPermissions (Maybe Text)
tipUploadProtocol
= lens _tipUploadProtocol
(\ s a -> s{_tipUploadProtocol = a})
-- | OAuth access token.
tipAccessToken :: Lens' TestIAMPermissions (Maybe Text)
tipAccessToken
= lens _tipAccessToken
(\ s a -> s{_tipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tipUploadType :: Lens' TestIAMPermissions (Maybe Text)
tipUploadType
= lens _tipUploadType
(\ s a -> s{_tipUploadType = a})
-- | Multipart request metadata.
tipPayload :: Lens' TestIAMPermissions GoogleIAMV1__TestIAMPermissionsRequest
tipPayload
= lens _tipPayload (\ s a -> s{_tipPayload = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- See the operation documentation for the appropriate value for this
-- field.
tipResource :: Lens' TestIAMPermissions Text
tipResource
= lens _tipResource (\ s a -> s{_tipResource = a})
-- | JSONP
tipCallback :: Lens' TestIAMPermissions (Maybe Text)
tipCallback
= lens _tipCallback (\ s a -> s{_tipCallback = a})
instance GoogleRequest TestIAMPermissions where
type Rs TestIAMPermissions =
GoogleIAMV1__TestIAMPermissionsResponse
type Scopes TestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient TestIAMPermissions'{..}
= go _tipResource _tipXgafv _tipUploadProtocol
_tipAccessToken
_tipUploadType
_tipCallback
(Just AltJSON)
_tipPayload
serviceBrokerService
where go
= buildClient
(Proxy :: Proxy TestIAMPermissionsResource)
mempty
|
brendanhay/gogol
|
gogol-servicebroker/gen/Network/Google/Resource/ServiceBroker/TestIAMPermissions.hs
|
mpl-2.0
| 5,983
| 0
| 16
| 1,333
| 787
| 463
| 324
| 116
| 1
|
{-|
Module : Eval
-}
module Eval where
import Ast
import Invert
import Text.PrettyPrint
import Data.List (intersect, nub)
import Debug.Trace
lookupEnv :: Ident -> Env -> Val
lookupEnv x ev = case lookup x ev of
Nothing -> error ("in lookupEnv: Variable " ++ x ++ " not found\n" ++
render (prettyEnv ev))
Just v -> v
update :: Ident -> Val -> Env -> Env
update x v [] = error ("in update: Variable " ++ x ++ " is not found")
update z v ((y,w) : ev) = if z == y then (y,v) : ev
else (y,w) : update z v ev
minus :: Env -> Ident -> Env
ev `minus` x = filter (\ (y,_) -> y /= x) ev
initEnv :: [Ident] -> Env
initEnv = map (\z -> (z,Nil))
-- |Evaluator
eval :: [Proc] -> Env -> Exp -> Val
eval ps ev (AVar x) = lookupEnv x ev
eval ps _ (AVal v) = v
eval ps ev (ACons e f) = Cons (eval ps ev e) (eval ps ev f)
eval ps ev (AHd e) = case eval ps ev e of
Cons v _ -> v
Atom _ -> error ("RWhileWM.Eval.eval hd atom: " ++ show (AHd e) ++ "\n" ++ show ev)
Nil -> error ("RWhileWM.Eval.eval hd nil: " ++ show (AHd e) ++ "\n" ++ show ev)
eval ps ev (ATl f) = case eval ps ev f of
Cons _ v -> v
Atom _ -> error ("in RWhileWM.Eval.eval tl nil: " ++ show f ++ ":" ++ show ev)
Nil -> error ("in RWhileWM.Eval.eval tl nil: " ++ show f ++ ":" ++ show ev)
eval ps ev (AEq e f) = if v1 == v2 then Cons Nil Nil else Nil
where v1 = eval ps ev e
v2 = eval ps ev f
-- |Execution of commands
exec :: [Proc] -> Env -> Cmd -> Env
-- exec ps ev c | traceShow (pretty c,prettyEnv ev) False = undefined
-- exec ps ev c | traceShow ("exec: " ++ show c) False = undefined
exec ps ev (SAss x e) =
let v' = eval ps (ev `minus` x) e in
case lookupEnv x ev of
Nil -> update x v' ev
v -> if v == v'
then update x Nil ev
else if v' == Nil
then ev
else error ("Variable " ++ x ++ " does not match: " ++ render (prettyEnv ev) ++ "\n(" ++ show x ++ ", " ++ show e ++ ") = " ++ show v ++ "," ++ show v')
exec ps ev (SCond e cs ds f)
| eval ps ev e /= Nil = let ev' = foldl (exec ps) ev cs
in if eval ps ev' f /= Nil then ev'
else error ("in exec.SCond.true: " ++ show f ++ " must be true\n" ++
show ev)
| otherwise = let ev' = foldl (exec ps) ev ds
in if eval ps ev' f == Nil then ev' else error ("in exec.SCond.false: " ++ show f ++ " must be false")
exec ps ev (SLoop e cs ds f)
| eval ps ev e /= Nil = loop ps ev (e,cs,ds,f)
| otherwise = error ("Assertion failed on entry of loop: " ++ show (SLoop e cs ds f))
exec ps ev (SRep q r) = let v = eval ps ev r
ev' = clear ev (vars r)
in execSRep ev' q v
exec ps ev (SUpdate e f) = let v1 = eval ps ev e
v2 = eval ps ev f
vl = lookupEnv "X0" ev -- Vl
vl' = update' v1 v2 vl
in update "X0" vl' ev -- Vl
where update' Nil v2 (Cons Nil vl) = Cons v2 vl
update' Nil v2 (Cons v vl) | v2 == v = Cons Nil vl
update' Nil Nil vl = vl
update' (Cons Nil v1) v2 (Cons v vl) = Cons v (update' v1 v2 vl)
update' v1 v2 vl = error ("Illegal update: \n" ++
"Vl: " ++ show (pretty vl) ++ "\n" ++
"j: " ++ show (pretty v1) ++ "\n" ++
"E: " ++ show (pretty v2) ++ "\n" ++
render (pretty (SUpdate e f))
)
exec ps ev (SLookup e x) =
let v = eval ps ev e
x' = lookupEnv x ev
vl = lookupEnv "X0" ev
v' = lookup v vl
in if v' == x' then update x Nil ev
else if x' == Nil then update x v' ev
else if v' == Nil then ev
else error ("in exec.SLookup: " ++ render (pretty (SLookup e x)))
where lookup Nil (Cons v1 vl) = v1
lookup (Cons Nil v) (Cons v1 vl) = lookup v vl
lookup e f = error $ "in RWhileWM.Eval.exec.SLookup (" ++ show e ++ ") (" ++ show f ++ ")"
-- exec ev (SAssert x v) = if lookupEnv x ev == v then ev
-- else error ("SAsssert failed: " ++ show ev ++ "\n(x,v)=" ++ show x ++ "," ++ show v)
exec ps ev (SAssert str e)
| eval ps ev e /= Nil = ev
| otherwise = error ("Impossible happened, assertion failed (" ++ str ++ "): " ++ show e ++ ", " ++ show ev)
exec ps ev (SShow e) = trace (show (pretty (eval ps ev e))) ev
exec ps ev (SCall id args) = let Just (fs, cs) = lookup id (map tip ps)
tip (Proc x fs cs) = (x,(fs,cs))
in foldl (exec ps) ev (map (subst (zip fs args)) cs)
exec ps ev (SUncall id args) =
let Just (fs, cs) = lookup id (map tip ps)
tip (Proc x fs cs) = (x,(fs,cs))
in foldl (exec ps) ev (map (subst (zip fs args)) (invert cs))
clear :: Env -> [Ident] -> Env
clear ev vs = map f ev
where f (x,v) | x `elem` vs = (x,Nil)
| otherwise = (x,v)
execSRep :: Env -> Exp -> Val -> Env
-- execSRep _ e f | traceShow (e,f) False = undefined
execSRep ev (AVar x) d = if lookupEnv x ev == Nil
then update x d ev
-- else if lookupEnv x ev == d
-- then update x Nil ev
else error ("in execSrep: lookup failed: " ++ show x ++ "\n" ++
show ev)
execSRep ev (AVal e) d | e == d = ev
execSRep ev (ACons q r) (Cons e f) = execSRep (execSRep ev r f) q e
execSRep ev q d = error $ "in execSRep (pat,val): (" ++ render (pretty q) ++ ", " ++ render (pretty d) ++ ")"
loop :: [Proc] -> Env -> (Exp,[Cmd],[Cmd],Exp) -> Env
-- loop ev1 (e1,cs1,cs2,e2) | traceShow ("loop",cs1,cs2) False = undefined
loop ps ev1 (e1,cs1,cs2,e2) =
let ev2 = foldl (exec ps) ev1 cs1
in if eval ps ev2 e2 /= Nil
then ev2
else let ev3 = foldl (exec ps) ev2 cs2
in if eval ps ev3 e1 == Nil
then loop ps ev3 (e1,cs1,cs2,e2)
else error ("Assertion failed in loop (assertion should be false): " ++ show e1 ++ ", " ++ show ev3)
execProgram :: Program -> Val -> Val
execProgram prog@(PDefs ps x cs y) v =
let ev0 = update x v (initEnv (vars prog))
ev1 = foldl (exec ps) ev0 cs
in if allNil (ev1 `minus` y)
then lookupEnv y ev1
else error $ "Some vars are not Nil: " ++ render (prettyEnv ev1)
where allNil :: Env -> Bool
allNil ev = all (== Nil) (map snd ev)
class WellFormed a where
wellFormedRWhileWM :: a -> Bool
wellFormedRWhile :: a -> Bool
wellFormedRCore :: a -> Bool
instance WellFormed Program where
wellFormedRWhileWM (PDefs ps x cs y) = all wellFormedRWhileWM cs
wellFormedRWhile (PDefs ps x cs y) = all wellFormedRWhile cs
wellFormedRCore (PDefs ps x cs y) = all wellFormedRCore cs
instance WellFormed Cmd where
wellFormedRWhileWM (SAss x e) = x `notElem` vars e
wellFormedRWhileWM (SRep _ _) = True
wellFormedRWhileWM (SCond _ cs ds _) = all wellFormedRWhileWM cs &&
all wellFormedRWhileWM ds
wellFormedRWhileWM (SLoop _ cs ds _) = all wellFormedRWhileWM cs &&
all wellFormedRWhileWM ds
wellFormedRWhileWM (SUpdate e f) = null (vars e `intersect` vars f)
wellFormedRWhileWM (SLookup e x) = x `notElem` vars e
wellFormedRWhileWM (SShow _) = True
wellFormedRWhileWM (SAssert _ _) = True
wellFormedRWhileWM (SCall _ _) = True
wellFormedRWhileWM (SUncall _ _) = True
wellFormedRWhile (SAss x e) = x `notElem` vars e
wellFormedRWhile (SRep _ _) = True
wellFormedRWhile (SCond _ cs ds _) = all wellFormedRWhile cs && all wellFormedRWhile ds
wellFormedRWhile (SLoop _ cs ds _) = all wellFormedRWhile cs && all wellFormedRWhile ds
wellFormedRWhile (SUpdate e f) = False
wellFormedRWhile (SLookup e x) = False
wellFormedRWhile (SShow _) = True
wellFormedRWhile (SAssert _ _) = True
wellFormedRWhile (SCall _ _) = False
wellFormedRWhile (SUncall _ _) = False
wellFormedRCore (SAss x e) = x `notElem` vars e && simpleExp e
wellFormedRCore (SRep q r) = simpleExp q && simpleExp r
wellFormedRCore (SCond x cs ds y) = isVar x && isVar y &&
all wellFormedRCore cs && all wellFormedRCore ds
wellFormedRCore (SLoop x cs ds y) = isVar x && isVar y &&
all wellFormedRCore cs && all wellFormedRCore ds
wellFormedRCore (SUpdate e f) = False
wellFormedRCore (SLookup e x) = False
wellFormedRCore (SShow _) = True
wellFormedRCore (SAssert _ _) = True
wellFormedRCore (SCall _ _) = False
wellFormedRCore (SUncall _ _) = False
isVar :: Exp -> Bool
isVar (AVar _) = True
isVar _ = False
simpleExp :: Exp -> Bool
simpleExp (AVar _) = True
simpleExp (ACons (AVar _) (AVar _)) = True
simpleExp (AVal _) = True
simpleExp (AEq (AVar _) (AVar _)) = True
simpleExp _ = False
|
tyoko-dev/rwhile-B-haskell
|
src/Eval.hs
|
agpl-3.0
| 9,918
| 0
| 25
| 3,851
| 3,706
| 1,850
| 1,856
| 176
| 15
|
{-
- This file is part of Bilder.
-
- Bilder is free software: you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- Bilder is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public License
- along with Bilder. If not, see <http://www.gnu.org/licenses/>.
-
- Copyright © 2012-2013 Filip Lundborg
- Copyright © 2012-2013 Ingemar Ådahl
-
-}
{-# LANGUAGE UnicodeSyntax #-}
module Compiler.Desugar.SimpleDecs where
import qualified Data.Map as Map (map)
import Compiler.Utils
import TypeChecker.Types
import FrontEnd.AbsGrammar
simpleDecs ∷ Source → Source
simpleDecs src = src { functions = Map.map simplifyFun (functions src)}
simplifyFun ∷ Function → Function
simplifyFun fun = fun { statements = simpleDec (statements fun )}
simpleDec ∷ [Stm] → [Stm]
simpleDec (SDecl (Dec qs ps):ss) = newDec qs ps ++ simpleDec ss
simpleDec ss = expandStm simpleDec ss
newDec ∷ [Qualifier] → DeclPost → [Stm]
newDec qs (Vars cids) = map (makeDec qs) cids
newDec qs (DecAss cids tk ex) = map (makeDec qs) (init cids) ++ [makeDecAss qs tk ex (last cids)]
makeDec ∷ [Qualifier] → CIdent → Stm
makeDec qs cid = SDecl (Dec qs (Vars [cid]))
makeDecAss ∷ [Qualifier] → TkAss → Exp → CIdent → Stm
makeDecAss qs tk ex cid = SDecl (Dec qs (DecAss [cid] tk ex))
|
ingemaradahl/bilder
|
src/Compiler/Desugar/SimpleDecs.hs
|
lgpl-3.0
| 1,748
| 0
| 10
| 344
| 388
| 205
| 183
| 20
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
-- needed (despite what hlint says):
{-# LANGUAGE TemplateHaskell #-}
module Lib where
import Control.Applicative
import Control.Arrow (left)
import Control.Natural
import Data.Validation
import Refined
{-
https://gvolpe.github.io/blog/parallel-typeclass-for-haskell/
https://github.com/gvolpe/types-matter
Parallel typeclass for Haskell
Apr 20, 2020 by Gabriel Volpe
https://hackage.haskell.org/package/refined
https://hackage.haskell.org/package/liquidhaskell
Refinement types give the ability to
- define validation rules (aka "predicates") at the type level
-}
type Age = Refined (GreaterThan 17) Int
type Name = Refined NonEmpty String
data Person = Person
{ personAge :: Age
, personName :: Name
} deriving Show
-- validate reation of Person at compile-time using Template Haskell:
me :: Person
me = Person $$(refineTH 32)
$$(refineTH "Gabriel")
{-# ANN me ("HLint: ignore Redundant bracket" :: String) #-}
{-
If age < 18, or name empty, then compile error.
To do runtime validation Refined provides functions (i.e., replacing smart constructors).
refine :: Predicate p x => x -> Either RefineException (Refined p x)
-}
mkPerson0 :: Int -> String -> Either RefineException Person
mkPerson0 a n = Person <$> refine a <*> refine n
{-
Above stops on first error.
To validate all inputs in parallel and accumulates errors convert Either values into Validation.
-}
{-
mkPerson :: Int -> String -> Either RefineException Person
mkPerson a n = toEither $ Person <$> fromEither (refine a) <*> fromEither (refine n)
• No instance for (Semigroup RefineException)
-}
{-
Above is clunky and repetitive.
Use the "Parallel" typeclass to improve.
Defines relationship between a Monad that can also be an Applicative with "parallely" behavior.
(Note: that means an Applicative instance that will not pass the monadic laws.)
-}
class (Monad m, Applicative f)
=> Parallel f m | m -> f
, f -> m where
parallel :: m :~> f
sequential :: f :~> m
-- Either and Validation are isomorphic, except Validation's Applicative instance accumulate errors.
instance Semigroup e => Parallel (Validation e) (Either e) where
parallel = NT fromEither
sequential = NT toEither
-- Relationship between [] and ZipList:
instance Parallel ZipList [] where
parallel = NT ZipList
sequential = NT getZipList
-- parMapN (parMap2 here, but should be abstracted over its arity).
parMapN
:: (Applicative f, Parallel f m)
=> m a0
-> m a1
-> (a0 -> a1 -> a)
-> m a
parMapN ma0 ma1 f = unwrapNT sequential
(f <$> unwrapNT parallel ma0 <*> unwrapNT parallel ma1)
-- function ref, converts RefineExceptions into a [Text]
-- because error type needs to be a Semigroup.
ref :: Predicate p x => x -> Either [String] (Refined p x)
ref x = left (\e -> [show e]) (refine x)
mkPerson :: Int -> String -> Either [String] Person
mkPerson a n = parMapN (ref a) (ref n) Person
{-
Now validates all inputs in parallel via implicit round-trip Either/Validation given by
Parallel instance.
Can use parMapN on lists without manually wrapping / unwrapping ZipLists.
-}
n1,n2 :: [Int]
n1 = [1..5]
n2 = [6..10]
n3 :: [Int]
n3 = (+) <$> n1 <*> n2
n4 :: [Int]
n4 = parMapN n1 n2 (+)
-- Without Parallel’s simplicity, it would look as follows:
n4' :: [Int]
n4' = getZipList $ (+) <$> ZipList n1 <*> ZipList n2
parTupled :: Parallel f m => m a0 -> m a1 -> m (a0, a1)
parTupled ma0 ma1 = parMapN ma0 ma1 (,)
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/type-level/2020-04-gabriel-volpe-parallel-typeclass/src/Lib.hs
|
unlicense
| 3,714
| 0
| 11
| 769
| 678
| 368
| 310
| 58
| 1
|
-- Copyright 2020-2021 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module WordLiterals where
import Data.Word (Word8, Word16, Word32, Word64)
x :: Word
x = 4
x8 :: Word8
x8 = 4
x16 :: Word16
x16 = 4
x32 :: Word32
x32 = 4
x64 :: Word64
x64 = 4
|
google/hs-dependent-literals
|
dependent-literals-plugin/tests/WordLiterals.hs
|
apache-2.0
| 776
| 0
| 5
| 146
| 86
| 58
| 28
| 12
| 1
|
{-# LANGUAGE Unsafe #-}
{-|
Copyright : (C) 2015, University of Twente
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <christiaan.baaij@gmail.com>
-}
module CLaSH.Promoted.Nat.Unsafe
(unsafeSNat)
where
import Data.Reflection (reifyNat)
import Unsafe.Coerce (unsafeCoerce)
import CLaSH.Promoted.Nat (SNat (..))
-- | I hope you know what you're doing
unsafeSNat :: Integer -> SNat k
unsafeSNat i = reifyNat i (unsafeCoerce . SNat)
{-# NOINLINE unsafeSNat #-}
|
Ericson2314/clash-prelude
|
src/CLaSH/Promoted/Nat/Unsafe.hs
|
bsd-2-clause
| 499
| 0
| 7
| 87
| 84
| 51
| 33
| 9
| 1
|
module BrownPLT.Testing
( runTest
) where
import Text.Printf
import System.IO
import Test.HUnit.Base
import System.Console.ANSI
reportStart :: ReportStart us
reportStart _ us = return us
reportError :: ReportProblem us
reportError msg s us = do
hSetSGR stderr [SetUnderlining SingleUnderline]
hPutStr stderr "Error:"
hSetSGR stderr [SetUnderlining NoUnderline]
hPutStrLn stderr (' ':msg)
return us
reportFailure :: ReportProblem us
reportFailure msg s us = do
hSetSGR stderr [SetUnderlining SingleUnderline]
hPutStr stderr "Failed:"
hSetSGR stderr [SetUnderlining NoUnderline]
hPutStrLn stderr (' ':msg)
return us
runTest :: Test -> IO ()
runTest test = do
(counts, _) <- performTest reportStart reportError reportFailure () test
hPutStrLn stderr $ printf "Cases: %d Tried: %d Errors: %d Failures: %d"
(cases counts) (tried counts) (errors counts)
(failures counts)
|
brownplt/strobe-old
|
src/BrownPLT/Testing.hs
|
bsd-2-clause
| 960
| 0
| 10
| 211
| 301
| 144
| 157
| 28
| 1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[SimplCore]{Driver for simplifying @Core@ programs}
-}
{-# LANGUAGE CPP #-}
module SimplCore ( core2core, simplifyExpr ) where
#include "HsVersions.h"
import DynFlags
import CoreSyn
import HscTypes
import CSE ( cseProgram )
import Rules ( mkRuleBase, unionRuleBase,
extendRuleBaseList, ruleCheckProgram, addRuleInfo, )
import PprCore ( pprCoreBindings, pprCoreExpr )
import OccurAnal ( occurAnalysePgm, occurAnalyseExpr )
import IdInfo
import CoreStats ( coreBindsSize, coreBindsStats, exprSize )
import CoreUtils ( mkTicks, stripTicksTop )
import CoreLint ( showPass, endPass, lintPassResult, dumpPassResult,
lintAnnots )
import Simplify ( simplTopBinds, simplExpr, simplRules )
import SimplUtils ( simplEnvForGHCi, activeRule )
import SimplEnv
import SimplMonad
import CoreMonad
import qualified ErrUtils as Err
import FloatIn ( floatInwards )
import FloatOut ( floatOutwards )
import FamInstEnv
import Id
import BasicTypes ( CompilerPhase(..), isDefaultInlinePragma )
import VarSet
import VarEnv
import LiberateCase ( liberateCase )
import SAT ( doStaticArgs )
import Specialise ( specProgram)
import SpecConstr ( specConstrProgram)
import DmdAnal ( dmdAnalProgram )
import CallArity ( callArityAnalProgram )
import WorkWrap ( wwTopBinds )
import Vectorise ( vectorise )
import FastString
import SrcLoc
import Util
import Module
import Maybes
import UniqSupply ( UniqSupply, mkSplitUniqSupply, splitUniqSupply )
import Outputable
import Control.Monad
#ifdef GHCI
import DynamicLoading ( loadPlugins )
import Plugins ( installCoreToDos )
#endif
{-
************************************************************************
* *
\subsection{The driver for the simplifier}
* *
************************************************************************
-}
core2core :: HscEnv -> ModGuts -> IO ModGuts
core2core hsc_env guts@(ModGuts { mg_module = mod
, mg_loc = loc
, mg_deps = deps
, mg_rdr_env = rdr_env })
= do { us <- mkSplitUniqSupply 's'
-- make sure all plugins are loaded
; let builtin_passes = getCoreToDo dflags
orph_mods = mkModuleSet (mod : dep_orphs deps)
;
; (guts2, stats) <- runCoreM hsc_env hpt_rule_base us mod
orph_mods print_unqual loc $
do { all_passes <- addPluginPasses builtin_passes
; runCorePasses all_passes guts }
; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl_stats
"Grand total simplifier statistics"
(pprSimplCount stats)
; return guts2 }
where
dflags = hsc_dflags hsc_env
home_pkg_rules = hptRules hsc_env (dep_mods deps)
hpt_rule_base = mkRuleBase home_pkg_rules
print_unqual = mkPrintUnqualified dflags rdr_env
-- mod: get the module out of the current HscEnv so we can retrieve it from the monad.
-- This is very convienent for the users of the monad (e.g. plugins do not have to
-- consume the ModGuts to find the module) but somewhat ugly because mg_module may
-- _theoretically_ be changed during the Core pipeline (it's part of ModGuts), which
-- would mean our cached value would go out of date.
{-
************************************************************************
* *
Generating the main optimisation pipeline
* *
************************************************************************
-}
getCoreToDo :: DynFlags -> [CoreToDo]
getCoreToDo dflags
= core_todo
where
opt_level = optLevel dflags
phases = simplPhases dflags
max_iter = maxSimplIterations dflags
rule_check = ruleCheck dflags
call_arity = gopt Opt_CallArity dflags
strictness = gopt Opt_Strictness dflags
full_laziness = gopt Opt_FullLaziness dflags
do_specialise = gopt Opt_Specialise dflags
do_float_in = gopt Opt_FloatIn dflags
cse = gopt Opt_CSE dflags
spec_constr = gopt Opt_SpecConstr dflags
liberate_case = gopt Opt_LiberateCase dflags
late_dmd_anal = gopt Opt_LateDmdAnal dflags
static_args = gopt Opt_StaticArgumentTransformation dflags
rules_on = gopt Opt_EnableRewriteRules dflags
eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags
maybe_rule_check phase = runMaybe rule_check (CoreDoRuleCheck phase)
maybe_strictness_before phase
= runWhen (phase `elem` strictnessBefore dflags) CoreDoStrictness
base_mode = SimplMode { sm_phase = panic "base_mode"
, sm_names = []
, sm_rules = rules_on
, sm_eta_expand = eta_expand_on
, sm_inline = True
, sm_case_case = True }
simpl_phase phase names iter
= CoreDoPasses
$ [ maybe_strictness_before phase
, CoreDoSimplify iter
(base_mode { sm_phase = Phase phase
, sm_names = names })
, maybe_rule_check (Phase phase) ]
-- Vectorisation can introduce a fair few common sub expressions involving
-- DPH primitives. For example, see the Reverse test from dph-examples.
-- We need to eliminate these common sub expressions before their definitions
-- are inlined in phase 2. The CSE introduces lots of v1 = v2 bindings,
-- so we also run simpl_gently to inline them.
++ (if gopt Opt_Vectorise dflags && phase == 3
then [CoreCSE, simpl_gently]
else [])
vectorisation
= runWhen (gopt Opt_Vectorise dflags) $
CoreDoPasses [ simpl_gently, CoreDoVectorisation ]
-- By default, we have 2 phases before phase 0.
-- Want to run with inline phase 2 after the specialiser to give
-- maximum chance for fusion to work before we inline build/augment
-- in phase 1. This made a difference in 'ansi' where an
-- overloaded function wasn't inlined till too late.
-- Need phase 1 so that build/augment get
-- inlined. I found that spectral/hartel/genfft lost some useful
-- strictness in the function sumcode' if augment is not inlined
-- before strictness analysis runs
simpl_phases = CoreDoPasses [ simpl_phase phase ["main"] max_iter
| phase <- [phases, phases-1 .. 1] ]
-- initial simplify: mk specialiser happy: minimum effort please
simpl_gently = CoreDoSimplify max_iter
(base_mode { sm_phase = InitialPhase
, sm_names = ["Gentle"]
, sm_rules = rules_on -- Note [RULEs enabled in SimplGently]
, sm_inline = False
, sm_case_case = False })
-- Don't do case-of-case transformations.
-- This makes full laziness work better
-- New demand analyser
demand_analyser = (CoreDoPasses ([
CoreDoStrictness,
CoreDoWorkerWrapper,
simpl_phase 0 ["post-worker-wrapper"] max_iter
]))
core_todo =
if opt_level == 0 then
[ vectorisation
, CoreDoSimplify max_iter
(base_mode { sm_phase = Phase 0
, sm_names = ["Non-opt simplification"] })
]
else {- opt_level >= 1 -} [
-- We want to do the static argument transform before full laziness as it
-- may expose extra opportunities to float things outwards. However, to fix
-- up the output of the transformation we need at do at least one simplify
-- after this before anything else
runWhen static_args (CoreDoPasses [ simpl_gently, CoreDoStaticArgs ]),
-- We run vectorisation here for now, but we might also try to run
-- it later
vectorisation,
-- initial simplify: mk specialiser happy: minimum effort please
simpl_gently,
-- Specialisation is best done before full laziness
-- so that overloaded functions have all their dictionary lambdas manifest
runWhen do_specialise CoreDoSpecialising,
runWhen full_laziness $
CoreDoFloatOutwards FloatOutSwitches {
floatOutLambdas = Just 0,
floatOutConstants = True,
floatOutOverSatApps = False },
-- Was: gentleFloatOutSwitches
--
-- I have no idea why, but not floating constants to
-- top level is very bad in some cases.
--
-- Notably: p_ident in spectral/rewrite
-- Changing from "gentle" to "constantsOnly"
-- improved rewrite's allocation by 19%, and
-- made 0.0% difference to any other nofib
-- benchmark
--
-- Not doing floatOutOverSatApps yet, we'll do
-- that later on when we've had a chance to get more
-- accurate arity information. In fact it makes no
-- difference at all to performance if we do it here,
-- but maybe we save some unnecessary to-and-fro in
-- the simplifier.
simpl_phases,
-- Phase 0: allow all Ids to be inlined now
-- This gets foldr inlined before strictness analysis
-- At least 3 iterations because otherwise we land up with
-- huge dead expressions because of an infelicity in the
-- simpifier.
-- let k = BIG in foldr k z xs
-- ==> let k = BIG in letrec go = \xs -> ...(k x).... in go xs
-- ==> let k = BIG in letrec go = \xs -> ...(BIG x).... in go xs
-- Don't stop now!
simpl_phase 0 ["main"] (max max_iter 3),
runWhen do_float_in CoreDoFloatInwards,
-- Run float-inwards immediately before the strictness analyser
-- Doing so pushes bindings nearer their use site and hence makes
-- them more likely to be strict. These bindings might only show
-- up after the inlining from simplification. Example in fulsom,
-- Csg.calc, where an arg of timesDouble thereby becomes strict.
runWhen call_arity $ CoreDoPasses
[ CoreDoCallArity
, simpl_phase 0 ["post-call-arity"] max_iter
],
runWhen strictness demand_analyser,
runWhen full_laziness $
CoreDoFloatOutwards FloatOutSwitches {
floatOutLambdas = floatLamArgs dflags,
floatOutConstants = True,
floatOutOverSatApps = True },
-- nofib/spectral/hartel/wang doubles in speed if you
-- do full laziness late in the day. It only happens
-- after fusion and other stuff, so the early pass doesn't
-- catch it. For the record, the redex is
-- f_el22 (f_el21 r_midblock)
runWhen cse CoreCSE,
-- We want CSE to follow the final full-laziness pass, because it may
-- succeed in commoning up things floated out by full laziness.
-- CSE used to rely on the no-shadowing invariant, but it doesn't any more
runWhen do_float_in CoreDoFloatInwards,
maybe_rule_check (Phase 0),
-- Case-liberation for -O2. This should be after
-- strictness analysis and the simplification which follows it.
runWhen liberate_case (CoreDoPasses [
CoreLiberateCase,
simpl_phase 0 ["post-liberate-case"] max_iter
]), -- Run the simplifier after LiberateCase to vastly
-- reduce the possiblility of shadowing
-- Reason: see Note [Shadowing] in SpecConstr.hs
runWhen spec_constr CoreDoSpecConstr,
maybe_rule_check (Phase 0),
-- Final clean-up simplification:
simpl_phase 0 ["final"] max_iter,
runWhen late_dmd_anal $ CoreDoPasses [
CoreDoStrictness,
CoreDoWorkerWrapper,
simpl_phase 0 ["post-late-ww"] max_iter
],
maybe_rule_check (Phase 0)
]
-- Loading plugins
addPluginPasses :: [CoreToDo] -> CoreM [CoreToDo]
#ifndef GHCI
addPluginPasses builtin_passes = return builtin_passes
#else
addPluginPasses builtin_passes
= do { hsc_env <- getHscEnv
; named_plugins <- liftIO (loadPlugins hsc_env)
; foldM query_plug builtin_passes named_plugins }
where
query_plug todos (_, plug, options) = installCoreToDos plug options todos
#endif
{-
************************************************************************
* *
The CoreToDo interpreter
* *
************************************************************************
-}
runCorePasses :: [CoreToDo] -> ModGuts -> CoreM ModGuts
runCorePasses passes guts
= foldM do_pass guts passes
where
do_pass guts CoreDoNothing = return guts
do_pass guts (CoreDoPasses ps) = runCorePasses ps guts
do_pass guts pass
= do { showPass pass
; guts' <- lintAnnots (ppr pass) (doCorePass pass) guts
; endPass pass (mg_binds guts') (mg_rules guts')
; return guts' }
doCorePass :: CoreToDo -> ModGuts -> CoreM ModGuts
doCorePass pass@(CoreDoSimplify {}) = {-# SCC "Simplify" #-}
simplifyPgm pass
doCorePass CoreCSE = {-# SCC "CommonSubExpr" #-}
doPass cseProgram
doCorePass CoreLiberateCase = {-# SCC "LiberateCase" #-}
doPassD liberateCase
doCorePass CoreDoFloatInwards = {-# SCC "FloatInwards" #-}
doPassD floatInwards
doCorePass (CoreDoFloatOutwards f) = {-# SCC "FloatOutwards" #-}
doPassDUM (floatOutwards f)
doCorePass CoreDoStaticArgs = {-# SCC "StaticArgs" #-}
doPassU doStaticArgs
doCorePass CoreDoCallArity = {-# SCC "CallArity" #-}
doPassD callArityAnalProgram
doCorePass CoreDoStrictness = {-# SCC "NewStranal" #-}
doPassDFM dmdAnalProgram
doCorePass CoreDoWorkerWrapper = {-# SCC "WorkWrap" #-}
doPassDFU wwTopBinds
doCorePass CoreDoSpecialising = {-# SCC "Specialise" #-}
specProgram
doCorePass CoreDoSpecConstr = {-# SCC "SpecConstr" #-}
specConstrProgram
doCorePass CoreDoVectorisation = {-# SCC "Vectorise" #-}
vectorise
doCorePass CoreDoPrintCore = observe printCore
doCorePass (CoreDoRuleCheck phase pat) = ruleCheckPass phase pat
doCorePass CoreDoNothing = return
doCorePass (CoreDoPasses passes) = runCorePasses passes
#ifdef GHCI
doCorePass (CoreDoPluginPass _ pass) = {-# SCC "Plugin" #-} pass
#endif
doCorePass pass = pprPanic "doCorePass" (ppr pass)
{-
************************************************************************
* *
\subsection{Core pass combinators}
* *
************************************************************************
-}
printCore :: DynFlags -> CoreProgram -> IO ()
printCore dflags binds
= Err.dumpIfSet dflags True "Print Core" (pprCoreBindings binds)
ruleCheckPass :: CompilerPhase -> String -> ModGuts -> CoreM ModGuts
ruleCheckPass current_phase pat guts = do
rb <- getRuleBase
dflags <- getDynFlags
vis_orphs <- getVisibleOrphanMods
liftIO $ Err.showPass dflags "RuleCheck"
liftIO $ log_action dflags dflags Err.SevDump noSrcSpan defaultDumpStyle
(ruleCheckProgram current_phase pat
(RuleEnv rb vis_orphs) (mg_binds guts))
return guts
doPassDUM :: (DynFlags -> UniqSupply -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDUM do_pass = doPassM $ \binds -> do
dflags <- getDynFlags
us <- getUniqueSupplyM
liftIO $ do_pass dflags us binds
doPassDM :: (DynFlags -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDM do_pass = doPassDUM (\dflags -> const (do_pass dflags))
doPassD :: (DynFlags -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassD do_pass = doPassDM (\dflags -> return . do_pass dflags)
doPassDU :: (DynFlags -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDU do_pass = doPassDUM (\dflags us -> return . do_pass dflags us)
doPassU :: (UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassU do_pass = doPassDU (const do_pass)
doPassDFM :: (DynFlags -> FamInstEnvs -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDFM do_pass guts = do
dflags <- getDynFlags
p_fam_env <- getPackageFamInstEnv
let fam_envs = (p_fam_env, mg_fam_inst_env guts)
doPassM (liftIO . do_pass dflags fam_envs) guts
doPassDFU :: (DynFlags -> FamInstEnvs -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDFU do_pass guts = do
dflags <- getDynFlags
us <- getUniqueSupplyM
p_fam_env <- getPackageFamInstEnv
let fam_envs = (p_fam_env, mg_fam_inst_env guts)
doPass (do_pass dflags fam_envs us) guts
-- Most passes return no stats and don't change rules: these combinators
-- let us lift them to the full blown ModGuts+CoreM world
doPassM :: Monad m => (CoreProgram -> m CoreProgram) -> ModGuts -> m ModGuts
doPassM bind_f guts = do
binds' <- bind_f (mg_binds guts)
return (guts { mg_binds = binds' })
doPass :: (CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPass bind_f guts = return $ guts { mg_binds = bind_f (mg_binds guts) }
-- Observer passes just peek; don't modify the bindings at all
observe :: (DynFlags -> CoreProgram -> IO a) -> ModGuts -> CoreM ModGuts
observe do_pass = doPassM $ \binds -> do
dflags <- getDynFlags
_ <- liftIO $ do_pass dflags binds
return binds
{-
************************************************************************
* *
Gentle simplification
* *
************************************************************************
-}
simplifyExpr :: DynFlags -- includes spec of what core-to-core passes to do
-> CoreExpr
-> IO CoreExpr
-- simplifyExpr is called by the driver to simplify an
-- expression typed in at the interactive prompt
--
-- Also used by Template Haskell
simplifyExpr dflags expr
= do {
; Err.showPass dflags "Simplify"
; us <- mkSplitUniqSupply 's'
; let sz = exprSize expr
; (expr', counts) <- initSmpl dflags emptyRuleEnv
emptyFamInstEnvs us sz
(simplExprGently (simplEnvForGHCi dflags) expr)
; Err.dumpIfSet dflags (dopt Opt_D_dump_simpl_stats dflags)
"Simplifier statistics" (pprSimplCount counts)
; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl "Simplified expression"
(pprCoreExpr expr')
; return expr'
}
simplExprGently :: SimplEnv -> CoreExpr -> SimplM CoreExpr
-- Simplifies an expression
-- does occurrence analysis, then simplification
-- and repeats (twice currently) because one pass
-- alone leaves tons of crud.
-- Used (a) for user expressions typed in at the interactive prompt
-- (b) the LHS and RHS of a RULE
-- (c) Template Haskell splices
--
-- The name 'Gently' suggests that the SimplifierMode is SimplGently,
-- and in fact that is so.... but the 'Gently' in simplExprGently doesn't
-- enforce that; it just simplifies the expression twice
-- It's important that simplExprGently does eta reduction; see
-- Note [Simplifying the left-hand side of a RULE] above. The
-- simplifier does indeed do eta reduction (it's in Simplify.completeLam)
-- but only if -O is on.
simplExprGently env expr = do
expr1 <- simplExpr env (occurAnalyseExpr expr)
simplExpr env (occurAnalyseExpr expr1)
{-
************************************************************************
* *
\subsection{The driver for the simplifier}
* *
************************************************************************
-}
simplifyPgm :: CoreToDo -> ModGuts -> CoreM ModGuts
simplifyPgm pass guts
= do { hsc_env <- getHscEnv
; us <- getUniqueSupplyM
; rb <- getRuleBase
; liftIOWithCount $
simplifyPgmIO pass hsc_env us rb guts }
simplifyPgmIO :: CoreToDo
-> HscEnv
-> UniqSupply
-> RuleBase
-> ModGuts
-> IO (SimplCount, ModGuts) -- New bindings
simplifyPgmIO pass@(CoreDoSimplify max_iterations mode)
hsc_env us hpt_rule_base
guts@(ModGuts { mg_module = this_mod
, mg_rdr_env = rdr_env
, mg_deps = deps
, mg_binds = binds, mg_rules = rules
, mg_fam_inst_env = fam_inst_env })
= do { (termination_msg, it_count, counts_out, guts')
<- do_iteration us 1 [] binds rules
; Err.dumpIfSet dflags (dopt Opt_D_verbose_core2core dflags &&
dopt Opt_D_dump_simpl_stats dflags)
"Simplifier statistics for following pass"
(vcat [text termination_msg <+> text "after" <+> ppr it_count
<+> text "iterations",
blankLine,
pprSimplCount counts_out])
; return (counts_out, guts')
}
where
dflags = hsc_dflags hsc_env
print_unqual = mkPrintUnqualified dflags rdr_env
simpl_env = mkSimplEnv mode
active_rule = activeRule simpl_env
do_iteration :: UniqSupply
-> Int -- Counts iterations
-> [SimplCount] -- Counts from earlier iterations, reversed
-> CoreProgram -- Bindings in
-> [CoreRule] -- and orphan rules
-> IO (String, Int, SimplCount, ModGuts)
do_iteration us iteration_no counts_so_far binds rules
-- iteration_no is the number of the iteration we are
-- about to begin, with '1' for the first
| iteration_no > max_iterations -- Stop if we've run out of iterations
= WARN( debugIsOn && (max_iterations > 2)
, hang (ptext (sLit "Simplifier bailing out after") <+> int max_iterations
<+> ptext (sLit "iterations")
<+> (brackets $ hsep $ punctuate comma $
map (int . simplCountN) (reverse counts_so_far)))
2 (ptext (sLit "Size =") <+> ppr (coreBindsStats binds)))
-- Subtract 1 from iteration_no to get the
-- number of iterations we actually completed
return ( "Simplifier baled out", iteration_no - 1
, totalise counts_so_far
, guts { mg_binds = binds, mg_rules = rules } )
-- Try and force thunks off the binds; significantly reduces
-- space usage, especially with -O. JRS, 000620.
| let sz = coreBindsSize binds
, () <- sz `seq` () -- Force it
= do {
-- Occurrence analysis
let { -- Note [Vectorisation declarations and occurrences]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- During the 'InitialPhase' (i.e., before vectorisation), we need to make sure
-- that the right-hand sides of vectorisation declarations are taken into
-- account during occurrence analysis. After the 'InitialPhase', we need to ensure
-- that the binders representing variable vectorisation declarations are kept alive.
-- (In contrast to automatically vectorised variables, their unvectorised versions
-- don't depend on them.)
vectVars = mkVarSet $
catMaybes [ fmap snd $ lookupVarEnv (vectInfoVar (mg_vect_info guts)) bndr
| Vect bndr _ <- mg_vect_decls guts]
++
catMaybes [ fmap snd $ lookupVarEnv (vectInfoVar (mg_vect_info guts)) bndr
| bndr <- bindersOfBinds binds]
-- FIXME: This second comprehensions is only needed as long as we
-- have vectorised bindings where we get "Could NOT call
-- vectorised from original version".
; (maybeVects, maybeVectVars)
= case sm_phase mode of
InitialPhase -> (mg_vect_decls guts, vectVars)
_ -> ([], vectVars)
; tagged_binds = {-# SCC "OccAnal" #-}
occurAnalysePgm this_mod active_rule rules
maybeVects maybeVectVars binds
} ;
Err.dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings tagged_binds);
-- Get any new rules, and extend the rule base
-- See Note [Overall plumbing for rules] in Rules.hs
-- We need to do this regularly, because simplification can
-- poke on IdInfo thunks, which in turn brings in new rules
-- behind the scenes. Otherwise there's a danger we'll simply
-- miss the rules for Ids hidden inside imported inlinings
eps <- hscEPS hsc_env ;
let { rule_base1 = unionRuleBase hpt_rule_base (eps_rule_base eps)
; rule_base2 = extendRuleBaseList rule_base1 rules
; fam_envs = (eps_fam_inst_env eps, fam_inst_env)
; vis_orphs = this_mod : dep_orphs deps } ;
-- Simplify the program
((binds1, rules1), counts1) <-
initSmpl dflags (mkRuleEnv rule_base2 vis_orphs) fam_envs us1 sz $
do { env1 <- {-# SCC "SimplTopBinds" #-}
simplTopBinds simpl_env tagged_binds
-- Apply the substitution to rules defined in this module
-- for imported Ids. Eg RULE map my_f = blah
-- If we have a substitution my_f :-> other_f, we'd better
-- apply it to the rule to, or it'll never match
; rules1 <- simplRules env1 Nothing rules
; return (getFloatBinds env1, rules1) } ;
-- Stop if nothing happened; don't dump output
if isZeroSimplCount counts1 then
return ( "Simplifier reached fixed point", iteration_no
, totalise (counts1 : counts_so_far) -- Include "free" ticks
, guts { mg_binds = binds1, mg_rules = rules1 } )
else do {
-- Short out indirections
-- We do this *after* at least one run of the simplifier
-- because indirection-shorting uses the export flag on *occurrences*
-- and that isn't guaranteed to be ok until after the first run propagates
-- stuff from the binding site to its occurrences
--
-- ToDo: alas, this means that indirection-shorting does not happen at all
-- if the simplifier does nothing (not common, I know, but unsavoury)
let { binds2 = {-# SCC "ZapInd" #-} shortOutIndirections binds1 } ;
-- Dump the result of this iteration
dump_end_iteration dflags print_unqual iteration_no counts1 binds2 rules1 ;
lintPassResult hsc_env pass binds2 ;
-- Loop
do_iteration us2 (iteration_no + 1) (counts1:counts_so_far) binds2 rules1
} }
| otherwise = panic "do_iteration"
where
(us1, us2) = splitUniqSupply us
-- Remember the counts_so_far are reversed
totalise :: [SimplCount] -> SimplCount
totalise = foldr (\c acc -> acc `plusSimplCount` c)
(zeroSimplCount dflags)
simplifyPgmIO _ _ _ _ _ = panic "simplifyPgmIO"
-------------------
dump_end_iteration :: DynFlags -> PrintUnqualified -> Int
-> SimplCount -> CoreProgram -> [CoreRule] -> IO ()
dump_end_iteration dflags print_unqual iteration_no counts binds rules
= dumpPassResult dflags print_unqual mb_flag hdr pp_counts binds rules
where
mb_flag | dopt Opt_D_dump_simpl_iterations dflags = Just Opt_D_dump_simpl_iterations
| otherwise = Nothing
-- Show details if Opt_D_dump_simpl_iterations is on
hdr = ptext (sLit "Simplifier iteration=") <> int iteration_no
pp_counts = vcat [ ptext (sLit "---- Simplifier counts for") <+> hdr
, pprSimplCount counts
, ptext (sLit "---- End of simplifier counts for") <+> hdr ]
{-
************************************************************************
* *
Shorting out indirections
* *
************************************************************************
If we have this:
x_local = <expression>
...bindings...
x_exported = x_local
where x_exported is exported, and x_local is not, then we replace it with this:
x_exported = <expression>
x_local = x_exported
...bindings...
Without this we never get rid of the x_exported = x_local thing. This
save a gratuitous jump (from \tr{x_exported} to \tr{x_local}), and
makes strictness information propagate better. This used to happen in
the final phase, but it's tidier to do it here.
Note [Transferring IdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to propagage any useful IdInfo on x_local to x_exported.
STRICTNESS: if we have done strictness analysis, we want the strictness info on
x_local to transfer to x_exported. Hence the copyIdInfo call.
RULES: we want to *add* any RULES for x_local to x_exported.
Note [Messing up the exported Id's RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must be careful about discarding (obviously) or even merging the
RULES on the exported Id. The example that went bad on me at one stage
was this one:
iterate :: (a -> a) -> a -> [a]
[Exported]
iterate = iterateList
iterateFB c f x = x `c` iterateFB c f (f x)
iterateList f x = x : iterateList f (f x)
[Not exported]
{-# RULES
"iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x)
"iterateFB" iterateFB (:) = iterateList
#-}
This got shorted out to:
iterateList :: (a -> a) -> a -> [a]
iterateList = iterate
iterateFB c f x = x `c` iterateFB c f (f x)
iterate f x = x : iterate f (f x)
{-# RULES
"iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x)
"iterateFB" iterateFB (:) = iterate
#-}
And now we get an infinite loop in the rule system
iterate f x -> build (\cn -> iterateFB c f x)
-> iterateFB (:) f x
-> iterate f x
Old "solution":
use rule switching-off pragmas to get rid
of iterateList in the first place
But in principle the user *might* want rules that only apply to the Id
he says. And inline pragmas are similar
{-# NOINLINE f #-}
f = local
local = <stuff>
Then we do not want to get rid of the NOINLINE.
Hence hasShortableIdinfo.
Note [Rules and indirection-zapping]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Problem: what if x_exported has a RULE that mentions something in ...bindings...?
Then the things mentioned can be out of scope! Solution
a) Make sure that in this pass the usage-info from x_exported is
available for ...bindings...
b) If there are any such RULES, rec-ify the entire top-level.
It'll get sorted out next time round
Other remarks
~~~~~~~~~~~~~
If more than one exported thing is equal to a local thing (i.e., the
local thing really is shared), then we do one only:
\begin{verbatim}
x_local = ....
x_exported1 = x_local
x_exported2 = x_local
==>
x_exported1 = ....
x_exported2 = x_exported1
\end{verbatim}
We rely on prior eta reduction to simplify things like
\begin{verbatim}
x_exported = /\ tyvars -> x_local tyvars
==>
x_exported = x_local
\end{verbatim}
Hence,there's a possibility of leaving unchanged something like this:
\begin{verbatim}
x_local = ....
x_exported1 = x_local Int
\end{verbatim}
By the time we've thrown away the types in STG land this
could be eliminated. But I don't think it's very common
and it's dangerous to do this fiddling in STG land
because we might elminate a binding that's mentioned in the
unfolding for something.
Note [Indirection zapping and ticks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unfortunately this is another place where we need a special case for
ticks. The following happens quite regularly:
x_local = <expression>
x_exported = tick<x> x_local
Which we want to become:
x_exported = tick<x> <expression>
As it makes no sense to keep the tick and the expression on separate
bindings. Note however that that this might increase the ticks scoping
over the execution of x_local, so we can only do this for floatable
ticks. More often than not, other references will be unfoldings of
x_exported, and therefore carry the tick anyway.
-}
type IndEnv = IdEnv (Id, [Tickish Var]) -- Maps local_id -> exported_id, ticks
shortOutIndirections :: CoreProgram -> CoreProgram
shortOutIndirections binds
| isEmptyVarEnv ind_env = binds
| no_need_to_flatten = binds' -- See Note [Rules and indirect-zapping]
| otherwise = [Rec (flattenBinds binds')] -- for this no_need_to_flatten stuff
where
ind_env = makeIndEnv binds
-- These exported Ids are the subjects of the indirection-elimination
exp_ids = map fst $ varEnvElts ind_env
exp_id_set = mkVarSet exp_ids
no_need_to_flatten = all (null . ruleInfoRules . idSpecialisation) exp_ids
binds' = concatMap zap binds
zap (NonRec bndr rhs) = [NonRec b r | (b,r) <- zapPair (bndr,rhs)]
zap (Rec pairs) = [Rec (concatMap zapPair pairs)]
zapPair (bndr, rhs)
| bndr `elemVarSet` exp_id_set = []
| Just (exp_id, ticks) <- lookupVarEnv ind_env bndr
= [(transferIdInfo exp_id bndr,
mkTicks ticks rhs),
(bndr, Var exp_id)]
| otherwise = [(bndr,rhs)]
makeIndEnv :: [CoreBind] -> IndEnv
makeIndEnv binds
= foldr add_bind emptyVarEnv binds
where
add_bind :: CoreBind -> IndEnv -> IndEnv
add_bind (NonRec exported_id rhs) env = add_pair (exported_id, rhs) env
add_bind (Rec pairs) env = foldr add_pair env pairs
add_pair :: (Id,CoreExpr) -> IndEnv -> IndEnv
add_pair (exported_id, exported) env
| (ticks, Var local_id) <- stripTicksTop tickishFloatable exported
, shortMeOut env exported_id local_id
= extendVarEnv env local_id (exported_id, ticks)
add_pair _ env = env
-----------------
shortMeOut :: IndEnv -> Id -> Id -> Bool
shortMeOut ind_env exported_id local_id
-- The if-then-else stuff is just so I can get a pprTrace to see
-- how often I don't get shorting out because of IdInfo stuff
= if isExportedId exported_id && -- Only if this is exported
isLocalId local_id && -- Only if this one is defined in this
-- module, so that we *can* change its
-- binding to be the exported thing!
not (isExportedId local_id) && -- Only if this one is not itself exported,
-- since the transformation will nuke it
not (local_id `elemVarEnv` ind_env) -- Only if not already substituted for
then
if hasShortableIdInfo exported_id
then True -- See Note [Messing up the exported Id's IdInfo]
else WARN( True, ptext (sLit "Not shorting out:") <+> ppr exported_id )
False
else
False
-----------------
hasShortableIdInfo :: Id -> Bool
-- True if there is no user-attached IdInfo on exported_id,
-- so we can safely discard it
-- See Note [Messing up the exported Id's IdInfo]
hasShortableIdInfo id
= isEmptyRuleInfo (ruleInfo info)
&& isDefaultInlinePragma (inlinePragInfo info)
&& not (isStableUnfolding (unfoldingInfo info))
where
info = idInfo id
-----------------
transferIdInfo :: Id -> Id -> Id
-- See Note [Transferring IdInfo]
-- If we have
-- lcl_id = e; exp_id = lcl_id
-- and lcl_id has useful IdInfo, we don't want to discard it by going
-- gbl_id = e; lcl_id = gbl_id
-- Instead, transfer IdInfo from lcl_id to exp_id
-- Overwriting, rather than merging, seems to work ok.
transferIdInfo exported_id local_id
= modifyIdInfo transfer exported_id
where
local_info = idInfo local_id
transfer exp_info = exp_info `setStrictnessInfo` strictnessInfo local_info
`setUnfoldingInfo` unfoldingInfo local_info
`setInlinePragInfo` inlinePragInfo local_info
`setRuleInfo` addRuleInfo (ruleInfo exp_info) new_info
new_info = setRuleInfoHead (idName exported_id)
(ruleInfo local_info)
-- Remember to set the function-name field of the
-- rules as we transfer them from one function to another
|
siddhanathan/ghc
|
compiler/simplCore/SimplCore.hs
|
bsd-3-clause
| 40,090
| 2
| 22
| 13,570
| 5,305
| 2,854
| 2,451
| 436
| 3
|
{- Some positive integers n have the property that the sum [ n + reverse(n) ] consists entirely of odd (decimal) digits. For instance, 36 + 63 = 99 and 409 + 904 = 1313. We will call such numbers reversible; so 36, 63, 409, and 904 are reversible. Leading zeroes are not allowed in either n or reverse(n).
There are 120 reversible numbers below one-thousand.
How many reversible numbers are there below one-billion (10^9)? -}
import qualified Zora.List as ZList
import qualified Data.Char as Char
reverse_number :: Integer -> Integer
reverse_number = read . reverse . show
all_digits_odd :: Integer -> Bool
all_digits_odd = all odd . map Char.digitToInt . show
reversible :: Integer -> Bool
reversible n = ((n `mod` 10) /= 0) && (all_digits_odd $ n + n')
where
n' :: Integer
n' = reverse_number n
ub = 10^7
main :: IO ()
main = do
putStrLn . show $ ub
putStrLn . show $ ZList.count reversible [1..(ub - 1)]--(10^9 - 1)]
|
bgwines/project-euler
|
src/in progress/problem145.hs
|
bsd-3-clause
| 935
| 5
| 12
| 180
| 211
| 109
| 102
| 15
| 1
|
module Main where
import Data.Proxy (Proxy(..))
import Safe.Length (safeLength)
main :: IO ()
main = print $ safeLength (Proxy :: Proxy [Char]) ('a', 'b')
|
stepcut/safe-length
|
example/Tuple.hs
|
bsd-3-clause
| 157
| 0
| 9
| 26
| 70
| 41
| 29
| 5
| 1
|
{-# LANGUAGE TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Compile.IdentString
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Strings encoding identifiers and ways to convert other things to them.
--
-----------------------------------------------------------------------------
module Berp.Compile.IdentString where
import Language.Python.Common.AST
newtype IdentString = IdentString { fromIdentString :: String }
deriving (Eq, Ord, Show)
class ToIdentString t where
toIdentString :: t -> IdentString
instance ToIdentString IdentString where
toIdentString = id
instance ToIdentString String where
toIdentString str = IdentString str
instance ToIdentString (Ident a) where
toIdentString (Ident { ident_string = name }) = IdentString name
instance ToIdentString (Expr a) where
toIdentString (Var { var_ident = ident }) = toIdentString ident
toIdentString _other = error "toIdentString applied to an expression which is not a variable"
identString :: ToIdentString a => a -> String
identString = fromIdentString . toIdentString
|
bjpop/berp
|
libs/src/Berp/Compile/IdentString.hs
|
bsd-3-clause
| 1,253
| 0
| 10
| 194
| 215
| 121
| 94
| 18
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.