code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-- | This module duplicates the "Control.Monad.Trans.Class" module for
-- constrained monads.
module Control.Monad.Constrained.Trans
(MonadTrans(..))
where
import Control.Monad.Constrained
import Control.Monad.Trans.Cont (ContT (..))
import Control.Monad.Trans.Except (ExceptT (..))
import Control.Monad.Trans.Identity (IdentityT (..))
import Control.Monad.Trans.Maybe (MaybeT (..))
import Control.Monad.Trans.Reader (ReaderT (..))
import Control.Monad.Trans.State.Lazy as Lazy (StateT (..))
import Control.Monad.Trans.State.Strict as Strict (StateT (..))
import GHC.Exts
-- | A class for monad transformers with constraints. See
-- "Control.Monad.Trans.Class" for full documentation on the class without
-- constraints.
class MonadTrans t where
-- | A type for monads that are liftable into the outer monad. For instance,
-- since 'StateT' is defined like so:
--
-- @newtype 'StateT' s m a = 'StateT' { 'runStateT' :: s -> m (a, s) }@
--
-- the underlying monad needs not to be able to hold @a@, but @(a, s)@.
type SuitableLift (t :: (* -> *) -> * -> *) (m :: * -> *) (a :: *) :: Constraint
-- | Lift a monad into an outer monad.
lift
:: (Monad m, SuitableLift t m a)
=> m a -> t m a
instance MonadTrans (ContT r) where
type SuitableLift (ContT r) m a = Suitable m r
lift m = ContT (m >>=)
{-# INLINE lift #-}
instance MonadTrans (ReaderT r) where
type SuitableLift (ReaderT r) m a = ()
lift m = ReaderT (const m)
{-# INLINE lift #-}
instance MonadTrans (Strict.StateT r) where
type SuitableLift (Strict.StateT r) m a = Suitable m (a,r)
lift m = Strict.StateT (\s -> fmap (flip (,) s) m)
{-# INLINE lift #-}
instance MonadTrans (Lazy.StateT r) where
type SuitableLift (Lazy.StateT r) m a = Suitable m (a,r)
lift m = Lazy.StateT (\s -> fmap (flip (,) s) m)
{-# INLINE lift #-}
instance MonadTrans IdentityT where
type SuitableLift IdentityT m a = ()
lift = IdentityT
{-# INLINE lift #-}
instance MonadTrans MaybeT where
type SuitableLift MaybeT m a = Suitable m (Maybe a)
lift = MaybeT . fmap Just
{-# INLINE lift #-}
instance MonadTrans (ExceptT e) where
type SuitableLift (ExceptT e) m a = Suitable m (Either e a)
lift = ExceptT . fmap Right
{-# INLINE lift #-}
| oisdk/constrained-monads | src/Control/Monad/Constrained/Trans.hs | mit | 2,554 | 0 | 12 | 655 | 664 | 382 | 282 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module LogAnalysis where
import Log
parseMessage :: String -> LogMessage
parseMessage msg = case (words msg) of
("I":time:string) -> LogMessage Info (read time) (unwords string)
("W":time:string) -> LogMessage Warning (read time) (unwords string)
("E":code:time:string) -> LogMessage (Error (read code)) (read time) (unwords string)
_ -> Unknown msg
parse :: String -> [LogMessage]
parse content = map parseMessage $ lines content
insert :: LogMessage -> MessageTree -> MessageTree
insert (Unknown _) tree = tree
insert msg Leaf = Node Leaf msg Leaf
insert msg (Node left node right)
| (time msg) <= (time node) = Node (insert msg left) node right
| otherwise = Node left node (insert msg right)
where
time (LogMessage _ t _) = t
time _ = 0
build :: [LogMessage] -> MessageTree
build [] = Leaf
build (x:xs) = insert x (build xs)
inOrder :: MessageTree -> [LogMessage]
inOrder Leaf = []
inOrder (Node l m r) = inOrder l ++ [m] ++ inOrder r
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong xs = map message $ filter important xs
where message (LogMessage _ _ m) = m
message (Unknown _) = ""
important (LogMessage (Error sev) _ _) = (sev >= 30)
important _ = False
| tamasgal/haskell_exercises | CIS-194/homework-02/LogAnalysis.hs | mit | 1,296 | 0 | 12 | 305 | 566 | 285 | 281 | 31 | 4 |
{-# htermination (<=) :: Bool -> Bool -> Bool #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_LTEQ_8.hs | mit | 50 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Data.Bitcoin.ScriptSpec where
import Data.Bitcoin.Script
import qualified Data.ByteString.Char8 as BS8 (pack)
import qualified Data.ByteString.Lazy.Char8 as BSL8 (pack)
import Test.Hspec
spec :: Spec
spec = do
describe "when decompiling a specific script" $ do
it "encoding a decoding a script results in the original hex" $ do
let hex = BSL8.pack "76a914975efcba1e058667594dc57146022ec46560a63c88ac"
(encode . decode) hex `shouldBe` hex
it "succesfully parses a script into a meaningful object" $ do
let hex = BSL8.pack "76a914975efcba1e058667594dc57146022ec46560a63c88ac"
decoded = decode hex
case decoded of
(Script [OP_DUP, OP_HASH160, OP_PUSHDATA _ OPCODE, OP_EQUALVERIFY, OP_CHECKSIG]) -> return ()
_ -> expectationFailure ("Result does not match expected: " ++ show decoded)
it "succesfully parses an OP_RETURN script into a meaningful object" $ do
let hex = BSL8.pack "6a0b68656c6c6f20776f726c64"
decoded = decode hex
decoded `shouldBe` (Script [OP_RETURN, OP_PUSHDATA (BS8.pack "hello world") OPCODE])
| solatis/haskell-bitcoin-script | test/Data/Bitcoin/ScriptSpec.hs | mit | 1,196 | 0 | 20 | 282 | 272 | 142 | 130 | 22 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- | A set of example programs to demonstrate NATS features and the
-- API of the "Network.Nats" library.
module Main
( main
) where
import Control.Exception
import Control.Monad
import Data.Maybe
import Network.Nats
import System.Environment
import Text.Printf
main :: IO ()
main =
natsHandler `handle` do
demo <- getArgs
case demo of
["sync-sub"] -> syncSub
["async-sub"] -> asyncSub
["async-req"] -> asyncReq
["topic"] -> topic'
["queue-group"] -> queueGroup
_ -> mapM_ putStrLn usage
where
-- Take care of the exceptions that can be thrown out from within
-- 'withNats'.
natsHandler :: NatsException -> IO ()
natsHandler e =
case e of
ConnectionGiveUpException -> putStrLn "No NATS connection!"
AuthorizationException -> putStrLn "Can't authorize!"
URIError err -> putStrLn err
_ -> throwIO e
-- | Simple messaging.
syncSub :: IO ()
syncSub =
withNats defaultSettings ["nats://localhost"] $ \nats -> do
-- Subscribe to the topic "foo".
(s, q) <- subscribe nats "foo" Nothing
-- Publish to topic "foo", do not request a reply.
publish nats "foo" Nothing "Some payload"
-- Wait for a message, print the message's payload
msg <- nextMsg q
printf "Received %s\n" (show $ payload msg)
-- Unsubscribe from topic "foo".
unsubscribe nats s Nothing
-- | Request help from a simple help service. The help service is
-- asynchronous.
asyncSub :: IO ()
asyncSub =
withNats defaultSettings ["nats://localhost"] $ \nats -> do
-- A simple - asynchronous - help service that will answer
-- requesters that give a reply topic with "I can help".
s1 <- subscribeAsync nats "help" Nothing $ \msg -> do
printf "Help service received: %s\n" (show $ payload msg)
when (isJust $ replyTo msg) $
publish nats (fromJust $ replyTo msg) Nothing "I can help"
-- Subscribe to help replies.
(s2, q) <- subscribe nats "help.reply" Nothing
-- Request help.
publish nats "help" (Just "help.reply") "Please ..."
-- Wait for reply.
msg <- nextMsg q
printf "Received: %s\n" (show $ payload msg)
-- Unsubscribe from topics.
unsubscribe nats s1 Nothing
unsubscribe nats s2 Nothing
-- | As 'asyncSub', but using the 'request' function to simplify.
asyncReq :: IO ()
asyncReq =
withNats defaultSettings ["nats://localhost"] $ \nats -> do
-- A simple - asynchronous - help service that will answer
-- requesters that give a reply topic with "I can help".
s <- subscribeAsync nats "help" Nothing $ \msg -> do
printf "Help service received: %s\n" (show $ payload msg)
when (isJust $ replyTo msg) $
publish nats (fromJust $ replyTo msg) Nothing "I can help"
-- Request help.
msg <- request nats "help" "Please ..."
printf "Received: %s\n" (show $ payload msg)
-- Unsubscribe.
unsubscribe nats s Nothing
-- | Demonstration of topic strings and how they are interpreted by
-- NATS.
topic' :: IO ()
topic' =
withNats defaultSettings ["nats://localhost"] $ \nats -> do
-- "*" matches any token, at any level of the subject.
(_, queue1) <- subscribe nats "foo.*.baz" Nothing
(_, queue2) <- subscribe nats "foo.bar.*" Nothing
-- ">" matches any length of the tail of the subject, and can
-- only be the last token.
(_, queue3) <- subscribe nats "foo.>" Nothing
-- This publishing matches all the above.
publish nats "foo.bar.baz" Nothing "Hello world"
-- Show that the message showed up on all queues.
forM_ [queue1, queue2, queue3] $ \queue -> do
msg <- nextMsg queue
printf "Received: %s\n" (show $ payload msg)
-- The NATS server will purge the subscriptions once we
-- have disconnected.
-- | Some fun with queue groups. Subscribers that share the same
-- queue group will be load shared by NATS, i.e. only one subscriber
-- will answer each request.
queueGroup :: IO ()
queueGroup =
withNats defaultSettings ["nats://localhost"] $ \nats -> do
-- Install a couple of message echo workers. All sharing the
-- same queue group.
void $ subscribeAsync nats "echo" (Just "workers") $ worker nats "one"
void $ subscribeAsync nats "echo" (Just "workers") $ worker nats "two"
void $ subscribeAsync nats "echo" (Just "workers") $ worker nats "three"
void $ subscribeAsync nats "echo" (Just "workers") $ worker nats "four"
-- Request some echos. There will only be one of the echo
-- workers answering each request.
msg1 <- request nats "echo" "E1 E1 E1"
printf "Received: %s\n" (show $ payload msg1)
msg2 <- request nats "echo" "E2 E2 E2"
printf "Received: %s\n" (show $ payload msg2)
where
worker :: Nats -> String -> Msg -> IO ()
worker nats name msg = do
printf "Request handled by %s\n" name
when (isJust $ replyTo msg) $
publish nats (fromJust $ replyTo msg) Nothing (payload msg)
usage :: [String]
usage =
[ "Usage: hats-examples <example>"
, ""
, "Examples:"
, ""
, "sync-sub : Demo of synchronous handling of messages."
, "async-sub : Demo of asynchronous handling of messages."
, "async-req : Demo of the request API."
, "topic : Demo of topic structure."
, "queue-group : Demo of queue group handling."
]
| kosmoskatten/hats | example/Examples.hs | mit | 5,850 | 0 | 18 | 1,767 | 1,187 | 596 | 591 | 95 | 9 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SQLResultSetRowList
(item, item_, getLength, SQLResultSetRowList(..),
gTypeSQLResultSetRowList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLResultSetRowList.item Mozilla SQLResultSetRowList.item documentation>
item ::
(MonadDOM m) =>
SQLResultSetRowList -> Word -> m (Record DOMString SQLValue)
item self index
= liftDOM
((self ^. jsf "item" [toJSVal index]) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLResultSetRowList.item Mozilla SQLResultSetRowList.item documentation>
item_ :: (MonadDOM m) => SQLResultSetRowList -> Word -> m ()
item_ self index
= liftDOM (void (self ^. jsf "item" [toJSVal index]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLResultSetRowList.length Mozilla SQLResultSetRowList.length documentation>
getLength :: (MonadDOM m) => SQLResultSetRowList -> m Word
getLength self
= liftDOM (round <$> ((self ^. js "length") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SQLResultSetRowList.hs | mit | 1,920 | 0 | 12 | 244 | 495 | 302 | 193 | 31 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
module Text.PrintfA
where
import Text.Printf
data PrintfArgT = forall a. PrintfArg a => P a
data PrintfTypeT = T { unT :: forall r. PrintfType r => r }
printfa :: PrintfType t => String -> [ PrintfArgT ] -> t
printfa format = (\(T t) -> t) . foldl (\(T r) (P a) -> T $ r a ) (T $ printf format)
| tdammers/ginger | src/Text/PrintfA.hs | mit | 369 | 0 | 11 | 75 | 154 | 83 | 71 | 8 | 1 |
{-
Copyright (c) 2008
Russell O'Connor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
-- |Specifies 'Colour's in accordance with the sRGB standard.
module Data.Colour.SRGB
(Colour, RGB(..)
,sRGB24, sRGBBounded, sRGB
,toSRGB24, toSRGBBounded, toSRGB
,sRGB24shows, sRGB24show
,sRGB24reads, sRGB24read
,sRGBSpace
)
where
import Data.Word
import Numeric
import Data.Colour.Internal (quantize)
import Data.Colour.SRGB.Linear
import Data.Colour.RGBSpace hiding (transferFunction)
{- Non-linear colour space -}
{- the sRGB transfer function approximates a gamma of about 1/2.2 -}
transferFunction lin | lin == 1 = 1
| lin <= 0.0031308 = 12.92*lin
| otherwise = (1 + a)*lin**(1/2.4) - a
where
a = 0.055
invTransferFunction nonLin | nonLin == 1 = 1
| nonLin <= 0.04045 = nonLin/12.92
| otherwise =
((nonLin + a)/(1 + a))**2.4
where
a = 0.055
-- |Construct a colour from an sRGB specification.
-- Input components are expected to be in the range [0..1].
sRGB :: (Ord b, Floating b) => b -> b -> b -> Colour b
sRGB = curryRGB (uncurryRGB rgb . fmap invTransferFunction)
-- |Construct a colour from an sRGB specification.
-- Input components are expected to be in the range [0..'maxBound'].
sRGBBounded :: (Ord b, Floating b, Integral a, Bounded a) =>
a -> a -> a -> Colour b
sRGBBounded r' g' b' = uncurryRGB sRGB (fmap f (RGB r' g' b'))
where
f x' = (fromIntegral x'/m)
m = fromIntegral $ maxBound `asTypeOf` r'
-- |Construct a colour from a 24-bit (three 8-bit words) sRGB
-- specification.
sRGB24 :: (Ord b, Floating b) => Word8 -> Word8 -> Word8 -> Colour b
sRGB24 = sRGBBounded
-- |Return the sRGB colour components in the range [0..1].
toSRGB :: (Ord b, Floating b) => Colour b -> RGB b
toSRGB c = fmap transferFunction (toRGB c)
{- Results are clamped and quantized -}
-- |Return the approximate sRGB colour components in the range
-- [0..'maxBound'].
-- Out of range values are clamped.
toSRGBBounded :: (RealFrac b, Floating b, Integral a, Bounded a) =>
Colour b -> RGB a
toSRGBBounded c = fmap f (toSRGB c)
where
f x' = quantize (m*x')
m = fromIntegral $ maxBound `asTypeOf` (f undefined)
-- |Return the approximate 24-bit sRGB colour components as three 8-bit
-- components.
-- Out of range values are clamped.
toSRGB24 :: (RealFrac b, Floating b) => Colour b -> RGB Word8
toSRGB24 = toSRGBBounded
-- |Show a colour in hexadecimal form, e.g. \"#00aaff\"
sRGB24shows :: (RealFrac b, Floating b) => Colour b -> ShowS
sRGB24shows c =
("#"++) . showHex2 r' . showHex2 g' . showHex2 b'
where
RGB r' g' b' = toSRGB24 c
showHex2 x | x <= 0xf = ("0"++) . showHex x
| otherwise = showHex x
-- |Show a colour in hexadecimal form, e.g. \"#00aaff\"
sRGB24show :: (RealFrac b, Floating b) => Colour b -> String
sRGB24show x = sRGB24shows x ""
-- |Read a colour in hexadecimal form, e.g. \"#00aaff\" or \"00aaff\"
sRGB24reads :: (Ord b, Floating b) => ReadS (Colour b)
sRGB24reads "" = []
sRGB24reads x =
[(sRGB24 a b c, c0)
|(a,a0) <- readPair x', (b,b0) <- readPair a0, (c,c0) <- readPair b0]
where
x' | head x == '#' = tail x
| otherwise = x
readPair [] = []
readPair [_] = []
readPair a = [(x,a1)|(x,"") <- readHex a0]
where
(a0,a1) = splitAt 2 a
-- |Read a colour in hexadecimal form, e.g. \"#00aaff\" or \"00aaff\"
sRGB24read :: (Ord b, Floating b) => String -> (Colour b)
sRGB24read x | length rx /= 1 || not (null (snd (head rx))) =
error "Data.Colour.SRGB.sRGB24read: no parse"
| otherwise = fst (head rx)
where
rx = sRGB24reads x
-- |The sRGB colour space
sRGBSpace :: (Ord a, Floating a) => RGBSpace a
sRGBSpace = mkRGBSpace sRGBGamut transfer
where
transfer = TransferFunction transferFunction invTransferFunction (recip 2.2) | haasn/colour | Data/Colour/SRGB.hs | mit | 4,868 | 0 | 15 | 1,049 | 1,208 | 633 | 575 | 66 | 3 |
{-|
Module: Sally.Game
Description: Core game logic and some database operations
Maintainer: lawrence.dunn.iii@gmail.com
License: MIT
-}
{-# language OverloadedStrings #-}
{-# language TypeOperators #-}
{-# language DeriveGeneric #-}
module Sally.Game where
import Data.Char (isSpace)
import Data.Text (Text)
import qualified Data.Text as T
import Database.SQLite.Simple
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Aeson
import GHC.Generics
import Data.UUID
-- | A user's guess. We choose to store the user submitting the guess as a
-- simple `Text`
data Gs = Gs
{ gsLikes :: Text
, gsNotLikes :: Text
, gsUser :: Text
} deriving (Show, Eq, Generic)
instance ToRow Gs where
toRow (Gs lk nlk us) = toRow (lk, nlk, us)
instance FromRow Gs where
fromRow = Gs <$> field <*> field <*> field
instance ToJSON Gs where
instance FromJSON Gs where
-- | Sally likes guesses where she likes the first word but
-- not the second. Sally likes words with @ >= 2@ letters repeated after
-- mapping to lower case and removing spaces
isValidGs :: Gs -> Bool
isValidGs (Gs lk nlk _) =
(likes lk) && (not. likes $ nlk)
where
nospc = T.filter (not. isSpace)
likes :: Text -> Bool
likes w =
-- | Test if any characters equal their subsequent
any (uncurry (==)) $ T.zip (nospc w) (T.tail $ nospc w)
initTable :: Connection -> IO ()
initTable conn =
execute_ conn
" CREATE TABLE IF NOT EXISTS \
\ gs ( likes text \
\ , notlikes text \
\ , user text \
\ , isvalid boolean \
\ , time datetime default \
\ (datetime('now','localtime')) )"
dropTable :: Connection -> IO ()
dropTable conn =
execute_ conn "DROP table gs"
-- | The game result of a user's guess, which records time and validity of the
-- guess. These are the objects seen by the database.
data GsRes = GsRes
{ resGs :: Gs
, resValid :: Bool
, resTime :: UTCTime
} deriving (Generic, Show)
instance ToRow GsRes where
toRow (GsRes gs v t) = toRow $ gs :. (Only v) :. (Only t)
instance FromRow GsRes where
fromRow = GsRes <$> fromRow
<*> field
<*> field
instance ToJSON GsRes where
instance FromJSON GsRes where
gsResOf :: Gs -> IO (GsRes)
gsResOf gs = do
tm <- getCurrentTime
return $ GsRes gs (isValidGs gs) tm
-- | Grab a whole number of guesses from the database for displaying
nGuessFrom :: Int -> Connection -> IO [GsRes]
nGuessFrom n conn = query conn
"SELECT * FROM gs order by time desc limit (?)"
(Only n)
-- | Grab a whole number of guesses from the database for displaying
nGuessFromUser :: Int -> UUID -> Connection -> IO [GsRes]
nGuessFromUser n uuid conn = query conn
"SELECT * FROM gs WHERE user = (?) order by time desc limit (?)"
(Only (toText uuid) :. Only n)
-- | Accept a user's guess, process it, and store the result. Time is recorded
-- by the database server.
insertGuess :: GsRes -> Connection -> IO ()
insertGuess (GsRes gs v tm) conn = do
execute conn
"INSERT INTO gs (likes, notlikes, user, isvalid, time) values ((?),(?),(?),(?),(?))"
(gs :. Only v :. Only tm)
| dunnl/sally | src/Sally/Game.hs | mit | 3,286 | 0 | 11 | 872 | 731 | 391 | 340 | 67 | 1 |
module KMC.Syntax.Parser (parseRegex, anchoredRegexP) where
import Control.Applicative hiding (optional, many)
import Data.Char (chr)
import Data.Functor.Identity (Identity)
import Text.Parsec.Expr (Assoc (..), Operator (..),
OperatorTable, buildExpressionParser)
import Text.Parsec.Prim (parserZero)
import Text.ParserCombinators.Parsec hiding (Parser, (<|>))
import KMC.Syntax.Config
import KMC.Syntax.External
import KMC.Syntax.Numeral
import KMC.Syntax.ParserCombinators
import KMC.Syntax.ParserTypes
import KMC.Syntax.Unicode
import Prelude
-- | Parse a regular expression or fail with an error message.
parseRegex :: RegexParserConfig
-> String -- ^ Input string
-> Either String (Anchoring, ParsedRegex) -- ^ Error message or parse result.
parseRegex conf str = case parse (anchoredRegexP conf) "-" str of
Left e -> Left $ show e
Right re -> Right re
-- | Wraps a parser in a parser that throws away whitespace and
-- comments if the configuration object tells it to.
freespaced :: RegexParserConfig -> Parser a -> Parser a
freespaced conf p = if rep_freespacing conf
then spacesAndCommentsP *> p <* spacesAndCommentsP
else p
anchoredRegexP :: RegexParserConfig -> Parser (Anchoring, ParsedRegex)
anchoredRegexP conf = freespaced conf $ do
let conf' = if rep_with_unit conf
then conf { rep_illegal_chars = '1' : rep_illegal_chars conf}
else conf
anStart <- anchorStart
re <- regexP conf'
anEnd <- anchorEnd
return $ flip (,) re $ case (anStart, anEnd) of
(True, True) -> AnchorBoth
(True, _ ) -> AnchorStart
(_ , True) -> AnchorEnd
_ -> AnchorNone
where
(anchorStart, anchorEnd) = if rep_anchoring conf
then ((char '^' >> return True) <|> (return False),
(char '$' >> return True) <|> (return False))
else (return False, return False)
-- | The main regexp parser.
regexP :: RegexParserConfig -> Parser ParsedRegex
regexP conf = freespaced conf $ buildExpressionParser (table conf) $
ifElseP (rep_grouping conf)
( (nonGroupParens (regexP conf) >>= return . Group False)
<|> (parens (regexP conf) >>= return . Group True))
( parens (regexP conf) >>= return . Group False)
<|> ifP (rep_posix_names conf) (freespaced conf posixNamedSetP)
<|> ifP (rep_charclass conf) (brackets (classP conf))
<|> ifP (rep_suppression conf) (suppressDelims (suppressedP conf))
<|> ifP (rep_wildcard conf) (freespaced conf wildcardP)
<|> ifP (rep_with_unit conf) (freespaced conf unitP)
<|> (freespaced conf $ charP NoCC conf)
-- | Throw away whitespace and comments.
spacesAndCommentsP :: Parser ()
spacesAndCommentsP = spaces
>> optional (
char '#' >>
manyTill anyChar (eof <|> (newline >> return ())) >>
spaces)
ifP :: Bool -> Parser a -> Parser a
ifP b p = ifElseP b p parserZero
ifElseP :: Bool -> Parser a -> Parser a -> Parser a
ifElseP True t _ = t
ifElseP False _ f = f
-- | A "conditional cons" operator. It conses an element like the normal cons
-- if the boolean is true, otherwise it leaves the list intact.
(?:) :: (Bool, a) -> [a] -> [a]
(?:) (True, x) = (:) x
(?:) (False, _) = id
infixr 5 ?: -- Same fixity as (:)
-- | The operator table, defining how subexpressions are glued together with
-- the operators, along with their fixity and associativity information.
table :: RegexParserConfig -> OperatorTable String () Identity ParsedRegex
table conf = [
-- The various postfix operators (which are determined by the
-- configuration object) bind tightest.
map Postfix $ map (freespaced conf) $
-- Lazy and greedy Kleene Star:
(rep_lazyness conf, try (string "*?") >> return LazyStar) ?:
(char '*' >> return Star) :
-- Lazy and greedy ?-operator (1 or 0 repetitions):
(rep_lazyness conf && rep_question conf,
try (string "??") >> return LazyQuestion) ?:
(rep_question conf, char '?' >> return Question) ?:
-- Lazy and greedy +-operator (1 or more repetitions):
(rep_lazyness conf && rep_plus conf,
try (string "+?") >> return LazyPlus) ?:
(rep_plus conf, char '+' >> return Plus) ?:
-- Lazy and greedy range expressions:
(rep_lazyness conf && rep_ranges conf,
try (braces rangeP <* char '?')
>>= \(n, m) -> return (\e -> LazyRange e n m)) ?:
(rep_ranges conf,
braces (rangeP)
>>= \(n, m) -> return (\e -> Range e n m)) ?:
[]
-- Product (juxtaposition) binds tigther than sum.
, [ Infix (freespaced conf (notFollowedBy (char '|') >> return Concat)) AssocRight ]
-- Sum binds least tight.
, [ Infix (freespaced conf (char '|' >> return Branch)) AssocRight ]
]
-- | Parse a regular expression and suppress it.
suppressedP :: RegexParserConfig -> Parser ParsedRegex
suppressedP conf = Suppress <$> regexP conf
-- | Parse a dot, returning a constructor representing the "wildcard symbol".
-- This must be added to the datatype Regex before it can be used.
wildcardP :: Parser ParsedRegex
wildcardP = Dot <$ char '.'
-- | If the parser is configured to recognize unit, this parser will be in
unitP :: Parser ParsedRegex
unitP = One <$ char '1'
-- | Parse a single character and build a Regex for it.
charP :: CharClassPos -> RegexParserConfig -> Parser ParsedRegex
charP ccp conf = Chr <$> legalChar ccp conf
-- | Signal whether the character to be parsed is outside of a character class,
-- or if it is inside one, whether it is the first or not.
data CharClassPos = NoCC | FirstInCC | InCC
-- | Parse a legal character. Characters can be specificed directly as byte values
-- using the syntax \xFF where FF is the byte in hexadecimal notation, or the syntax
-- \x{F...} where at least one hexadecimal digit is given between the braces.
legalChar :: CharClassPos -> RegexParserConfig -> Parser Char
legalChar ccp conf = try (char '\\' *> ( u <$> oneOf (map fst cs)
<|> char 'x' *> (namedByte <|> namedByteSequence)))
<|> try (ifP (rep_unicode conf) unicodeCodePointP)
<|> noneOf notChars
where cs = [ ('n', '\n'), ('t', '\t'), ('r', '\r'),
('a', '\a'), ('f', '\f'), ('v', '\v') ] ++
zip notChars notChars
notChars = (if inCC then "" -- These are always special
else "*|()\\" ++ rep_illegal_chars conf)
++ (
(outofCC && rep_wildcard conf, '.') ?: -- All these are special
(outofCC && rep_anchoring conf, '$') ?: -- on the condition that
(outofCC && rep_anchoring conf, '^') ?: -- their superpowers have
(outofCC && rep_charclass conf, '[') ?: -- been "unlocked" by the
(outofCC && rep_question conf, '?') ?: -- RegexParserConfig.
(outofCC && rep_plus conf, '+') ?: --
(outofCC && rep_ranges conf, '{') ?: --
(outofCC && rep_freespacing conf, '#') ?: --
(inCC && not inCCFirst, ']') ?: [] )
u c = let Just x = lookup c cs in x
inCC = not outofCC
outofCC = case ccp of
NoCC -> True
_ -> False
inCCFirst = case ccp of
FirstInCC -> True
_ -> False
-- | Parse a string of the form "FF" into the char with byte value FF.
namedByte :: Parser Char
namedByte = chr <$> numeralP Hexadecimal (Just (EQ, 2))
-- | Parse a string of the form "{ABCD...}" into a char with the given hex code.
namedByteSequence :: Parser Char
namedByteSequence = chr <$>
braces (numeralP Hexadecimal (Just (GT, 1)))
-- | Parse a range of numbers. For n, m natural numbers:
-- 'n' - "n repetitions" - (n, Just n)
-- 'n,' - "n or more repetitions" - (n, Nothing)
-- 'n,m' - "between n and m repetitions" - (n, Just m)
rangeP :: Parser (Int, Maybe Int)
rangeP = do
n <- numeralP Decimal Nothing
((,) n) <$> (char ',' *> optionMaybe (numeralP Decimal (Just (GT, 1)))
<|> pure (Just n))
-- | Parse a character class. A character class consists of a sequence of
-- ranges: [a-ctx-z] is the range [(a,c), (t,t), (x,z)]. If the first symbol
-- is a caret ^, the character class is negative, i.e., it specifies all
-- symbols *not* in the given ranges.
classP :: RegexParserConfig -> Parser ParsedRegex
classP conf = Class <$> ((False <$ char '^') <|> pure True)
<*> ((:) <$> charClassP True <*> many (charClassP False))
where
charClassP isFirst = do
c1 <- ifElseP isFirst (legalChar FirstInCC conf) (legalChar InCC conf)
try (char '-' >> ((,) c1) <$> (legalChar InCC conf))
<|> pure (c1, c1)
-- From the sed manual:
-- Though character classes don't generally conserve space on the line, they help make scripts portable for international use. The equivalent character sets /for U.S. users/ follows:
-- [[:alnum:]] - [A-Za-z0-9] Alphanumeric characters
-- [[:alpha:]] - [A-Za-z] Alphabetic characters
-- [[:blank:]] - [ \x09] Space or tab characters only
-- [[:cntrl:]] - [\x00-\x19\x7F] Control characters
-- [[:digit:]] - [0-9] Numeric characters
-- [[:graph:]] - [!-~] Printable and visible characters
-- [[:lower:]] - [a-z] Lower-case alphabetic characters
-- [[:print:]] - [ -~] Printable (non-Control) characters
-- [[:punct:]] - [!-/:-@[-`{-~] Punctuation characters
-- [[:space:]] - [ \t\v\f] All whitespace chars
-- [[:upper:]] - [A-Z] Upper-case alphabetic characters
-- [[:xdigit:]] - [0-9a-fA-F] Hexadecimal digit characters
-- FIXME: What's the difference between NamedSet True and NamedSet False?
posixNamedSetP :: Parser ParsedRegex
posixNamedSetP = parseTable
(try . delims "[[:" ":]]" . string) (NamedSet True)
[ ("alnum", NSAlnum)
, ("alpha", NSAlpha)
, ("ascii", NSAscii)
, ("blank", NSBlank)
, ("cntrl", NSCntrl)
, ("digit", NSDigit)
, ("graph", NSGraph)
, ("lower", NSLower)
, ("print", NSPrint)
, ("punct", NSPunct)
, ("space", NSSpace)
, ("upper", NSUpper)
, ("word", NSWord)
, ("digit", NSXDigit)
]
| diku-kmc/regexps-syntax | KMC/Syntax/Parser.hs | mit | 11,095 | 0 | 20 | 3,415 | 2,534 | 1,360 | 1,174 | 160 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module PureScript.Ide.FilterSpec where
import Test.Hspec
import PureScript.Ide.Filter
import PureScript.Ide.Types
modules :: [Module]
modules =
[
("Module.A", [FunctionDecl "function1" ""]),
("Module.B", [DataDecl "data1" ""]),
("Module.C", [ModuleDecl "Module.C" []]),
("Module.D", [Dependency "Module.C" [] Nothing, FunctionDecl "asd" ""])
]
runEq s = runFilter (equalityFilter s) modules
runPrefix s = runFilter (prefixFilter s) modules
runModule ms = runFilter (moduleFilter ms) modules
runDependency ms = runFilter (dependencyFilter ms) modules
spec = do
describe "equality Filter" $ do
it "removes empty modules" $
runEq "test" `shouldBe` []
it "keeps function declarations that are equal" $
runEq "function1" `shouldBe` [head modules]
-- TODO: It would be more sensible to match Constructors
it "keeps data declarations that are equal" $
runEq "data1" `shouldBe` [modules !! 1]
describe "prefixFilter" $ do
it "keeps everything on empty string" $
runPrefix "" `shouldBe` modules
it "keeps functionname prefix matches" $
runPrefix "fun" `shouldBe` [head modules]
it "keeps data decls prefix matches" $
runPrefix "dat" `shouldBe` [modules !! 1]
it "keeps module decl prefix matches" $
runPrefix "Mod" `shouldBe` [modules !! 2]
describe "moduleFilter" $ do
it "removes everything on empty input" $
runModule [] `shouldBe` []
it "only keeps the specified modules" $
runModule ["Module.A", "Module.C"] `shouldBe` [head modules, modules !! 2]
it "ignores modules that are not in scope" $
runModule ["Module.A", "Module.C", "Unknown"] `shouldBe` [head modules, modules !! 2]
describe "dependencyFilter" $ do
it "removes everything on empty input" $
runDependency [] `shouldBe` []
it "only keeps the specified modules if they have no imports" $
runDependency ["Module.A", "Module.B"] `shouldBe` [head modules, modules !! 1]
it "keeps the specified modules and their imports" $
runDependency ["Module.A", "Module.D"] `shouldBe` [head modules, modules !! 2, modules !! 3]
| kRITZCREEK/psc-ide | test/PureScript/Ide/FilterSpec.hs | mit | 2,170 | 0 | 13 | 447 | 618 | 318 | 300 | 47 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PostgREST.DbStructure (
getDbStructure
, accessibleTables
) where
import qualified Hasql.Decoders as HD
import qualified Hasql.Encoders as HE
import qualified Hasql.Query as H
import Control.Applicative
import qualified Data.HashMap.Strict as M
import Data.List (elemIndex)
import Data.Maybe (fromJust)
import Data.Text (split, strip,
breakOn, dropAround)
import qualified Data.Text as T
import qualified Hasql.Session as H
import PostgREST.Types
import Text.InterpolatedString.Perl6 (q)
import GHC.Exts (groupWith)
import Protolude
import Unsafe (unsafeHead)
getDbStructure :: Schema -> H.Session DbStructure
getDbStructure schema = do
tabs <- H.query () allTables
cols <- H.query () $ allColumns tabs
syns <- H.query () $ allSynonyms cols
rels <- H.query () $ allRelations tabs cols
keys <- H.query () $ allPrimaryKeys tabs
procs <- H.query schema accessibleProcs
let rels' = (addManyToManyRelations . raiseRelations schema syns . addParentRelations . addSynonymousRelations syns) rels
cols' = addForeignKeys rels' cols
keys' = synonymousPrimaryKeys syns keys
return DbStructure {
dbTables = tabs
, dbColumns = cols'
, dbRelations = rels'
, dbPrimaryKeys = keys'
, dbProcs = procs
}
decodeTables :: HD.Result [Table]
decodeTables =
HD.rowsList tblRow
where
tblRow = Table <$> HD.value HD.text <*> HD.value HD.text
<*> HD.value HD.bool
decodeColumns :: [Table] -> HD.Result [Column]
decodeColumns tables =
mapMaybe (columnFromRow tables) <$> HD.rowsList colRow
where
colRow =
(,,,,,,,,,,)
<$> HD.value HD.text <*> HD.value HD.text
<*> HD.value HD.text <*> HD.value HD.int4
<*> HD.value HD.bool <*> HD.value HD.text
<*> HD.value HD.bool
<*> HD.nullableValue HD.int4
<*> HD.nullableValue HD.int4
<*> HD.nullableValue HD.text
<*> HD.nullableValue HD.text
decodeRelations :: [Table] -> [Column] -> HD.Result [Relation]
decodeRelations tables cols =
mapMaybe (relationFromRow tables cols) <$> HD.rowsList relRow
where
relRow = (,,,,,)
<$> HD.value HD.text
<*> HD.value HD.text
<*> HD.value (HD.array (HD.arrayDimension replicateM (HD.arrayValue HD.text)))
<*> HD.value HD.text
<*> HD.value HD.text
<*> HD.value (HD.array (HD.arrayDimension replicateM (HD.arrayValue HD.text)))
decodePks :: [Table] -> HD.Result [PrimaryKey]
decodePks tables =
mapMaybe (pkFromRow tables) <$> HD.rowsList pkRow
where
pkRow = (,,) <$> HD.value HD.text <*> HD.value HD.text <*> HD.value HD.text
decodeSynonyms :: [Column] -> HD.Result [(Column,Column)]
decodeSynonyms cols =
mapMaybe (synonymFromRow cols) <$> HD.rowsList synRow
where
synRow = (,,,,,)
<$> HD.value HD.text <*> HD.value HD.text
<*> HD.value HD.text <*> HD.value HD.text
<*> HD.value HD.text <*> HD.value HD.text
accessibleProcs :: H.Query Schema (M.HashMap Text ProcDescription)
accessibleProcs =
H.statement sql (HE.value HE.text)
(M.fromList . map addName <$>
HD.rowsList (
ProcDescription <$> HD.value HD.text
<*> (parseArgs <$> HD.value HD.text)
<*> (parseRetType <$>
HD.value HD.text <*>
HD.value HD.text <*>
HD.value HD.bool <*>
HD.value HD.char)
<*> (parseVolatility <$>
HD.value HD.char)
)
) True
where
addName :: ProcDescription -> (Text, ProcDescription)
addName pd = (pdName pd, pd)
parseArgs :: Text -> [PgArg]
parseArgs = mapMaybe (parseArg . strip) . split (==',')
parseArg :: Text -> Maybe PgArg
parseArg a =
let (body, def) = breakOn " DEFAULT " a
(name, typ) = breakOn " " body in
if T.null typ
then Nothing
else Just $
PgArg (dropAround (== '"') name) (strip typ) (T.null def)
parseRetType :: Text -> Text -> Bool -> Char -> RetType
parseRetType schema name isSetOf typ
| isSetOf = SetOf pgType
| otherwise = Single pgType
where
qi = QualifiedIdentifier schema name
pgType = case typ of
'c' -> Composite qi
'p' -> Pseudo name
_ -> Scalar qi -- 'b'ase, 'd'omain, 'e'num, 'r'ange
parseVolatility :: Char -> ProcVolatility
parseVolatility 'i' = Immutable
parseVolatility 's' = Stable
parseVolatility 'v' = Volatile
parseVolatility _ = Volatile -- should not happen, but be pessimistic
sql = [q|
SELECT p.proname as "proc_name",
pg_get_function_arguments(p.oid) as "args",
tn.nspname as "rettype_schema",
coalesce(comp.relname, t.typname) as "rettype_name",
p.proretset as "rettype_is_setof",
t.typtype as "rettype_typ",
p.provolatile
FROM pg_proc p
JOIN pg_namespace pn ON pn.oid = p.pronamespace
JOIN pg_type t ON t.oid = p.prorettype
JOIN pg_namespace tn ON tn.oid = t.typnamespace
LEFT JOIN pg_class comp ON comp.oid = t.typrelid
WHERE pn.nspname = $1|]
accessibleTables :: H.Query Schema [Table]
accessibleTables =
H.statement sql (HE.value HE.text) decodeTables True
where
sql = [q|
select
n.nspname as table_schema,
relname as table_name,
c.relkind = 'r' or (c.relkind IN ('v', 'f')) and (pg_relation_is_updatable(c.oid::regclass, false) & 8) = 8
or (exists (
select 1
from pg_trigger
where pg_trigger.tgrelid = c.oid and (pg_trigger.tgtype::integer & 69) = 69)
) as insertable
from
pg_class c
join pg_namespace n on n.oid = c.relnamespace
where
c.relkind in ('v', 'r', 'm')
and n.nspname = $1
and (
pg_has_role(c.relowner, 'USAGE'::text)
or has_table_privilege(c.oid, 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'::text)
or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES'::text)
)
order by relname |]
synonymousColumns :: [(Column,Column)] -> [Column] -> [[Column]]
synonymousColumns allSyns cols = synCols'
where
syns = case headMay cols of
Just firstCol -> sort $ filter ((== colTable firstCol) . colTable . fst) allSyns
Nothing -> []
synCols = transpose $ map (\c -> map snd $ filter ((== c) . fst) syns) cols
synCols' = (filter sameTable . filter matchLength) synCols
matchLength cs = length cols == length cs
sameTable (c:cs) = all (\cc -> colTable c == colTable cc) (c:cs)
sameTable [] = False
addForeignKeys :: [Relation] -> [Column] -> [Column]
addForeignKeys rels = map addFk
where
addFk col = col { colFK = fk col }
fk col = join $ relToFk col <$> find (lookupFn col) rels
lookupFn :: Column -> Relation -> Bool
lookupFn c Relation{relColumns=cs, relType=rty} = c `elem` cs && rty==Child
relToFk col Relation{relColumns=cols, relFColumns=colsF} = do
pos <- elemIndex col cols
colF <- atMay colsF pos
return $ ForeignKey colF
addSynonymousRelations :: [(Column,Column)] -> [Relation] -> [Relation]
addSynonymousRelations _ [] = []
addSynonymousRelations syns (rel:rels) = rel : synRelsP ++ synRelsF ++ addSynonymousRelations syns rels
where
synRelsP = synRels (relColumns rel) (\t cs -> rel{relTable=t,relColumns=cs})
synRelsF = synRels (relFColumns rel) (\t cs -> rel{relFTable=t,relFColumns=cs})
synRels cols mapFn = map (\cs -> mapFn (colTable $ unsafeHead cs) cs) $ synonymousColumns syns cols
addParentRelations :: [Relation] -> [Relation]
addParentRelations [] = []
addParentRelations (rel@(Relation t c ft fc _ _ _ _):rels) = Relation ft fc t c Parent Nothing Nothing Nothing : rel : addParentRelations rels
addManyToManyRelations :: [Relation] -> [Relation]
addManyToManyRelations rels = rels ++ addMirrorRelation (mapMaybe link2Relation links)
where
links = join $ map (combinations 2) $ filter (not . null) $ groupWith groupFn $ filter ( (==Child). relType) rels
groupFn :: Relation -> Text
groupFn Relation{relTable=Table{tableSchema=s, tableName=t}} = s<>"_"<>t
combinations k ns = filter ((k==).length) (subsequences ns)
addMirrorRelation [] = []
addMirrorRelation (rel@(Relation t c ft fc _ lt lc1 lc2):rels') = Relation ft fc t c Many lt lc2 lc1 : rel : addMirrorRelation rels'
link2Relation [
Relation{relTable=lt, relColumns=lc1, relFTable=t, relFColumns=c},
Relation{ relColumns=lc2, relFTable=ft, relFColumns=fc}
]
| lc1 /= lc2 && length lc1 == 1 && length lc2 == 1 = Just $ Relation t c ft fc Many (Just lt) (Just lc1) (Just lc2)
| otherwise = Nothing
link2Relation _ = Nothing
raiseRelations :: Schema -> [(Column,Column)] -> [Relation] -> [Relation]
raiseRelations schema syns = map raiseRel
where
raiseRel rel
| tableSchema table == schema = rel
| isJust newCols = rel{relFTable=fromJust newTable,relFColumns=fromJust newCols}
| otherwise = rel
where
cols = relFColumns rel
table = relFTable rel
newCols = listToMaybe $ filter ((== schema) . tableSchema . colTable . unsafeHead) (synonymousColumns syns cols)
newTable = (colTable . unsafeHead) <$> newCols
synonymousPrimaryKeys :: [(Column,Column)] -> [PrimaryKey] -> [PrimaryKey]
synonymousPrimaryKeys _ [] = []
synonymousPrimaryKeys syns (key:keys) = key : newKeys ++ synonymousPrimaryKeys syns keys
where
keySyns = filter ((\c -> colTable c == pkTable key && colName c == pkName key) . fst) syns
newKeys = map ((\c -> PrimaryKey{pkTable=colTable c,pkName=colName c}) . snd) keySyns
allTables :: H.Query () [Table]
allTables =
H.statement sql HE.unit decodeTables True
where
sql = [q|
SELECT
n.nspname AS table_schema,
c.relname AS table_name,
c.relkind = 'r' OR (c.relkind IN ('v','f'))
AND (pg_relation_is_updatable(c.oid::regclass, FALSE) & 8) = 8
OR (EXISTS
( SELECT 1
FROM pg_trigger
WHERE pg_trigger.tgrelid = c.oid
AND (pg_trigger.tgtype::integer & 69) = 69) ) AS insertable
FROM pg_class c
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('v','r','m')
AND n.nspname NOT IN ('pg_catalog', 'information_schema')
GROUP BY table_schema, table_name, insertable
ORDER BY table_schema, table_name |]
allColumns :: [Table] -> H.Query () [Column]
allColumns tabs =
H.statement sql HE.unit (decodeColumns tabs) True
where
sql = [q|
SELECT DISTINCT
info.table_schema AS schema,
info.table_name AS table_name,
info.column_name AS name,
info.ordinal_position AS position,
info.is_nullable::boolean AS nullable,
info.data_type AS col_type,
info.is_updatable::boolean AS updatable,
info.character_maximum_length AS max_len,
info.numeric_precision AS precision,
info.column_default AS default_value,
array_to_string(enum_info.vals, ',') AS enum
FROM (
/*
-- CTE based on information_schema.columns to remove the owner filter
*/
WITH columns AS (
SELECT current_database()::information_schema.sql_identifier AS table_catalog,
nc.nspname::information_schema.sql_identifier AS table_schema,
c.relname::information_schema.sql_identifier AS table_name,
a.attname::information_schema.sql_identifier AS column_name,
a.attnum::information_schema.cardinal_number AS ordinal_position,
pg_get_expr(ad.adbin, ad.adrelid)::information_schema.character_data AS column_default,
CASE
WHEN a.attnotnull OR t.typtype = 'd'::"char" AND t.typnotnull THEN 'NO'::text
ELSE 'YES'::text
END::information_schema.yes_or_no AS is_nullable,
CASE
WHEN t.typtype = 'd'::"char" THEN
CASE
WHEN bt.typelem <> 0::oid AND bt.typlen = (-1) THEN 'ARRAY'::text
WHEN nbt.nspname = 'pg_catalog'::name THEN format_type(t.typbasetype, NULL::integer)
ELSE format_type(a.atttypid, a.atttypmod)
END
ELSE
CASE
WHEN t.typelem <> 0::oid AND t.typlen = (-1) THEN 'ARRAY'::text
WHEN nt.nspname = 'pg_catalog'::name THEN format_type(a.atttypid, NULL::integer)
ELSE format_type(a.atttypid, a.atttypmod)
END
END::information_schema.character_data AS data_type,
information_schema._pg_char_max_length(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS character_maximum_length,
information_schema._pg_char_octet_length(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS character_octet_length,
information_schema._pg_numeric_precision(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS numeric_precision,
information_schema._pg_numeric_precision_radix(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS numeric_precision_radix,
information_schema._pg_numeric_scale(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS numeric_scale,
information_schema._pg_datetime_precision(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.cardinal_number AS datetime_precision,
information_schema._pg_interval_type(information_schema._pg_truetypid(a.*, t.*), information_schema._pg_truetypmod(a.*, t.*))::information_schema.character_data AS interval_type,
NULL::integer::information_schema.cardinal_number AS interval_precision,
NULL::character varying::information_schema.sql_identifier AS character_set_catalog,
NULL::character varying::information_schema.sql_identifier AS character_set_schema,
NULL::character varying::information_schema.sql_identifier AS character_set_name,
CASE
WHEN nco.nspname IS NOT NULL THEN current_database()
ELSE NULL::name
END::information_schema.sql_identifier AS collation_catalog,
nco.nspname::information_schema.sql_identifier AS collation_schema,
co.collname::information_schema.sql_identifier AS collation_name,
CASE
WHEN t.typtype = 'd'::"char" THEN current_database()
ELSE NULL::name
END::information_schema.sql_identifier AS domain_catalog,
CASE
WHEN t.typtype = 'd'::"char" THEN nt.nspname
ELSE NULL::name
END::information_schema.sql_identifier AS domain_schema,
CASE
WHEN t.typtype = 'd'::"char" THEN t.typname
ELSE NULL::name
END::information_schema.sql_identifier AS domain_name,
current_database()::information_schema.sql_identifier AS udt_catalog,
COALESCE(nbt.nspname, nt.nspname)::information_schema.sql_identifier AS udt_schema,
COALESCE(bt.typname, t.typname)::information_schema.sql_identifier AS udt_name,
NULL::character varying::information_schema.sql_identifier AS scope_catalog,
NULL::character varying::information_schema.sql_identifier AS scope_schema,
NULL::character varying::information_schema.sql_identifier AS scope_name,
NULL::integer::information_schema.cardinal_number AS maximum_cardinality,
a.attnum::information_schema.sql_identifier AS dtd_identifier,
'NO'::character varying::information_schema.yes_or_no AS is_self_referencing,
'NO'::character varying::information_schema.yes_or_no AS is_identity,
NULL::character varying::information_schema.character_data AS identity_generation,
NULL::character varying::information_schema.character_data AS identity_start,
NULL::character varying::information_schema.character_data AS identity_increment,
NULL::character varying::information_schema.character_data AS identity_maximum,
NULL::character varying::information_schema.character_data AS identity_minimum,
NULL::character varying::information_schema.yes_or_no AS identity_cycle,
'NEVER'::character varying::information_schema.character_data AS is_generated,
NULL::character varying::information_schema.character_data AS generation_expression,
CASE
WHEN c.relkind = 'r'::"char" OR (c.relkind = ANY (ARRAY['v'::"char", 'f'::"char"])) AND pg_column_is_updatable(c.oid::regclass, a.attnum, false) THEN 'YES'::text
ELSE 'NO'::text
END::information_schema.yes_or_no AS is_updatable
FROM pg_attribute a
LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum
JOIN (pg_class c
JOIN pg_namespace nc ON c.relnamespace = nc.oid) ON a.attrelid = c.oid
JOIN (pg_type t
JOIN pg_namespace nt ON t.typnamespace = nt.oid) ON a.atttypid = t.oid
LEFT JOIN (pg_type bt
JOIN pg_namespace nbt ON bt.typnamespace = nbt.oid) ON t.typtype = 'd'::"char" AND t.typbasetype = bt.oid
LEFT JOIN (pg_collation co
JOIN pg_namespace nco ON co.collnamespace = nco.oid) ON a.attcollation = co.oid AND (nco.nspname <> 'pg_catalog'::name OR co.collname <> 'default'::name)
WHERE NOT pg_is_other_temp_schema(nc.oid) AND a.attnum > 0 AND NOT a.attisdropped AND (c.relkind = ANY (ARRAY['r'::"char", 'v'::"char", 'f'::"char"]))
/*--AND (pg_has_role(c.relowner, 'USAGE'::text) OR has_column_privilege(c.oid, a.attnum, 'SELECT, INSERT, UPDATE, REFERENCES'::text))*/
)
SELECT
table_schema,
table_name,
column_name,
ordinal_position,
is_nullable,
data_type,
is_updatable,
character_maximum_length,
numeric_precision,
column_default,
udt_name
/*-- FROM information_schema.columns*/
FROM columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
) AS info
LEFT OUTER JOIN (
SELECT
n.nspname AS s,
t.typname AS n,
array_agg(e.enumlabel ORDER BY e.enumsortorder) AS vals
FROM pg_type t
JOIN pg_enum e ON t.oid = e.enumtypid
JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
GROUP BY s,n
) AS enum_info ON (info.udt_name = enum_info.n)
ORDER BY schema, position |]
columnFromRow :: [Table] ->
(Text, Text, Text,
Int32, Bool, Text,
Bool, Maybe Int32, Maybe Int32,
Maybe Text, Maybe Text)
-> Maybe Column
columnFromRow tabs (s, t, n, pos, nul, typ, u, l, p, d, e) = buildColumn <$> table
where
buildColumn tbl = Column tbl n pos nul typ u l p d (parseEnum e) Nothing
table = find (\tbl -> tableSchema tbl == s && tableName tbl == t) tabs
parseEnum :: Maybe Text -> [Text]
parseEnum str = fromMaybe [] $ split (==',') <$> str
allRelations :: [Table] -> [Column] -> H.Query () [Relation]
allRelations tabs cols =
H.statement sql HE.unit (decodeRelations tabs cols) True
where
sql = [q|
SELECT ns1.nspname AS table_schema,
tab.relname AS table_name,
column_info.cols AS columns,
ns2.nspname AS foreign_table_schema,
other.relname AS foreign_table_name,
column_info.refs AS foreign_columns
FROM pg_constraint,
LATERAL (SELECT array_agg(cols.attname) AS cols,
array_agg(cols.attnum) AS nums,
array_agg(refs.attname) AS refs
FROM ( SELECT unnest(conkey) AS col, unnest(confkey) AS ref) k,
LATERAL (SELECT * FROM pg_attribute
WHERE attrelid = conrelid AND attnum = col)
AS cols,
LATERAL (SELECT * FROM pg_attribute
WHERE attrelid = confrelid AND attnum = ref)
AS refs)
AS column_info,
LATERAL (SELECT * FROM pg_namespace WHERE pg_namespace.oid = connamespace) AS ns1,
LATERAL (SELECT * FROM pg_class WHERE pg_class.oid = conrelid) AS tab,
LATERAL (SELECT * FROM pg_class WHERE pg_class.oid = confrelid) AS other,
LATERAL (SELECT * FROM pg_namespace WHERE pg_namespace.oid = other.relnamespace) AS ns2
WHERE confrelid != 0
ORDER BY (conrelid, column_info.nums) |]
relationFromRow :: [Table] -> [Column] -> (Text, Text, [Text], Text, Text, [Text]) -> Maybe Relation
relationFromRow allTabs allCols (rs, rt, rcs, frs, frt, frcs) =
Relation <$> table <*> cols <*> tableF <*> colsF <*> pure Child <*> pure Nothing <*> pure Nothing <*> pure Nothing
where
findTable s t = find (\tbl -> tableSchema tbl == s && tableName tbl == t) allTabs
findCol s t c = find (\col -> tableSchema (colTable col) == s && tableName (colTable col) == t && colName col == c) allCols
table = findTable rs rt
tableF = findTable frs frt
cols = mapM (findCol rs rt) rcs
colsF = mapM (findCol frs frt) frcs
allPrimaryKeys :: [Table] -> H.Query () [PrimaryKey]
allPrimaryKeys tabs =
H.statement sql HE.unit (decodePks tabs) True
where
sql = [q|
/*
-- CTE to replace information_schema.table_constraints to remove owner limit
*/
WITH tc AS (
SELECT current_database()::information_schema.sql_identifier AS constraint_catalog,
nc.nspname::information_schema.sql_identifier AS constraint_schema,
c.conname::information_schema.sql_identifier AS constraint_name,
current_database()::information_schema.sql_identifier AS table_catalog,
nr.nspname::information_schema.sql_identifier AS table_schema,
r.relname::information_schema.sql_identifier AS table_name,
CASE c.contype
WHEN 'c'::"char" THEN 'CHECK'::text
WHEN 'f'::"char" THEN 'FOREIGN KEY'::text
WHEN 'p'::"char" THEN 'PRIMARY KEY'::text
WHEN 'u'::"char" THEN 'UNIQUE'::text
ELSE NULL::text
END::information_schema.character_data AS constraint_type,
CASE
WHEN c.condeferrable THEN 'YES'::text
ELSE 'NO'::text
END::information_schema.yes_or_no AS is_deferrable,
CASE
WHEN c.condeferred THEN 'YES'::text
ELSE 'NO'::text
END::information_schema.yes_or_no AS initially_deferred
FROM pg_namespace nc,
pg_namespace nr,
pg_constraint c,
pg_class r
WHERE nc.oid = c.connamespace AND nr.oid = r.relnamespace AND c.conrelid = r.oid AND (c.contype <> ALL (ARRAY['t'::"char", 'x'::"char"])) AND r.relkind = 'r'::"char" AND NOT pg_is_other_temp_schema(nr.oid)
/*--AND (pg_has_role(r.relowner, 'USAGE'::text) OR has_table_privilege(r.oid, 'INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'::text) OR has_any_column_privilege(r.oid, 'INSERT, UPDATE, REFERENCES'::text))*/
UNION ALL
SELECT current_database()::information_schema.sql_identifier AS constraint_catalog,
nr.nspname::information_schema.sql_identifier AS constraint_schema,
(((((nr.oid::text || '_'::text) || r.oid::text) || '_'::text) || a.attnum::text) || '_not_null'::text)::information_schema.sql_identifier AS constraint_name,
current_database()::information_schema.sql_identifier AS table_catalog,
nr.nspname::information_schema.sql_identifier AS table_schema,
r.relname::information_schema.sql_identifier AS table_name,
'CHECK'::character varying::information_schema.character_data AS constraint_type,
'NO'::character varying::information_schema.yes_or_no AS is_deferrable,
'NO'::character varying::information_schema.yes_or_no AS initially_deferred
FROM pg_namespace nr,
pg_class r,
pg_attribute a
WHERE nr.oid = r.relnamespace AND r.oid = a.attrelid AND a.attnotnull AND a.attnum > 0 AND NOT a.attisdropped AND r.relkind = 'r'::"char" AND NOT pg_is_other_temp_schema(nr.oid)
/*--AND (pg_has_role(r.relowner, 'USAGE'::text) OR has_table_privilege(r.oid, 'INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'::text) OR has_any_column_privilege(r.oid, 'INSERT, UPDATE, REFERENCES'::text))*/
),
/*
-- CTE to replace information_schema.key_column_usage to remove owner limit
*/
kc AS (
SELECT current_database()::information_schema.sql_identifier AS constraint_catalog,
ss.nc_nspname::information_schema.sql_identifier AS constraint_schema,
ss.conname::information_schema.sql_identifier AS constraint_name,
current_database()::information_schema.sql_identifier AS table_catalog,
ss.nr_nspname::information_schema.sql_identifier AS table_schema,
ss.relname::information_schema.sql_identifier AS table_name,
a.attname::information_schema.sql_identifier AS column_name,
(ss.x).n::information_schema.cardinal_number AS ordinal_position,
CASE
WHEN ss.contype = 'f'::"char" THEN information_schema._pg_index_position(ss.conindid, ss.confkey[(ss.x).n])
ELSE NULL::integer
END::information_schema.cardinal_number AS position_in_unique_constraint
FROM pg_attribute a,
( SELECT r.oid AS roid,
r.relname,
r.relowner,
nc.nspname AS nc_nspname,
nr.nspname AS nr_nspname,
c.oid AS coid,
c.conname,
c.contype,
c.conindid,
c.confkey,
c.confrelid,
information_schema._pg_expandarray(c.conkey) AS x
FROM pg_namespace nr,
pg_class r,
pg_namespace nc,
pg_constraint c
WHERE nr.oid = r.relnamespace AND r.oid = c.conrelid AND nc.oid = c.connamespace AND (c.contype = ANY (ARRAY['p'::"char", 'u'::"char", 'f'::"char"])) AND r.relkind = 'r'::"char" AND NOT pg_is_other_temp_schema(nr.oid)) ss
WHERE ss.roid = a.attrelid AND a.attnum = (ss.x).x AND NOT a.attisdropped
/*--AND (pg_has_role(ss.relowner, 'USAGE'::text) OR has_column_privilege(ss.roid, a.attnum, 'SELECT, INSERT, UPDATE, REFERENCES'::text))*/
)
SELECT
kc.table_schema,
kc.table_name,
kc.column_name
FROM
/*
--information_schema.table_constraints tc,
--information_schema.key_column_usage kc
*/
tc, kc
WHERE
tc.constraint_type = 'PRIMARY KEY' AND
kc.table_name = tc.table_name AND
kc.table_schema = tc.table_schema AND
kc.constraint_name = tc.constraint_name AND
kc.table_schema NOT IN ('pg_catalog', 'information_schema') |]
pkFromRow :: [Table] -> (Schema, Text, Text) -> Maybe PrimaryKey
pkFromRow tabs (s, t, n) = PrimaryKey <$> table <*> pure n
where table = find (\tbl -> tableSchema tbl == s && tableName tbl == t) tabs
allSynonyms :: [Column] -> H.Query () [(Column,Column)]
allSynonyms cols =
H.statement sql HE.unit (decodeSynonyms cols) True
where
-- query explanation at https://gist.github.com/ruslantalpa/2eab8c930a65e8043d8f
sql = [q|
with view_columns as (
select
c.oid as view_oid,
a.attname::information_schema.sql_identifier as column_name
from pg_attribute a
join pg_class c on a.attrelid = c.oid
join pg_namespace nc on c.relnamespace = nc.oid
where
not pg_is_other_temp_schema(nc.oid)
and a.attnum > 0
and not a.attisdropped
and (c.relkind = 'v'::"char")
and nc.nspname not in ('information_schema', 'pg_catalog')
),
view_column_usage as (
select distinct
v.oid as view_oid,
nv.nspname::information_schema.sql_identifier as view_schema,
v.relname::information_schema.sql_identifier as view_name,
nt.nspname::information_schema.sql_identifier as table_schema,
t.relname::information_schema.sql_identifier as table_name,
a.attname::information_schema.sql_identifier as column_name,
pg_get_viewdef(v.oid)::information_schema.character_data as view_definition
from pg_namespace nv
join pg_class v on nv.oid = v.relnamespace
join pg_depend dv on v.oid = dv.refobjid
join pg_depend dt on dv.objid = dt.objid
join pg_class t on dt.refobjid = t.oid
join pg_namespace nt on t.relnamespace = nt.oid
join pg_attribute a on t.oid = a.attrelid and dt.refobjsubid = a.attnum
where
nv.nspname not in ('information_schema', 'pg_catalog')
and v.relkind = 'v'::"char"
and dv.refclassid = 'pg_class'::regclass::oid
and dv.classid = 'pg_rewrite'::regclass::oid
and dv.deptype = 'i'::"char"
and dv.refobjid <> dt.refobjid
and dt.classid = 'pg_rewrite'::regclass::oid
and dt.refclassid = 'pg_class'::regclass::oid
and (t.relkind = any (array['r'::"char", 'v'::"char", 'f'::"char"]))
),
candidates as (
select
vcu.*,
(
select case when match is not null then coalesce(match[8], match[7], match[4]) end
from regexp_matches(
CONCAT('SELECT ', SPLIT_PART(vcu.view_definition, 'SELECT', 2)),
CONCAT('SELECT.*?((',vcu.table_name,')|(\w+))\.(', vcu.column_name, ')(\s+AS\s+("([^"]+)"|([^, \n\t]+)))?.*?FROM.*?(',vcu.table_schema,'\.|)(\2|',vcu.table_name,'\s+(as\s)?\3)'),
'nsi'
) match
) as view_column_name
from view_column_usage as vcu
)
select
c.table_schema,
c.table_name,
c.column_name as table_column_name,
c.view_schema,
c.view_name,
c.view_column_name
from view_columns as vc, candidates as c
where
vc.view_oid = c.view_oid
and vc.column_name = c.view_column_name
order by c.view_schema, c.view_name, c.table_name, c.view_column_name
|]
synonymFromRow :: [Column] -> (Text,Text,Text,Text,Text,Text) -> Maybe (Column,Column)
synonymFromRow allCols (s1,t1,c1,s2,t2,c2) = (,) <$> col1 <*> col2
where
col1 = findCol s1 t1 c1
col2 = findCol s2 t2 c2
findCol s t c = find (\col -> (tableSchema . colTable) col == s && (tableName . colTable) col == t && colName col == c) allCols
| Skyfold/postgrest | src/PostgREST/DbStructure.hs | mit | 32,378 | 0 | 19 | 8,892 | 4,189 | 2,199 | 1,990 | 232 | 7 |
import Distribution.Simple
import Distribution.Simple.Setup ( haddockDistPref, Flag(..))
import Distribution.Verbosity ( normal )
import Distribution.Simple.Utils ( copyFiles )
import Distribution.Text ( display )
import System.FilePath ((</>))
import System.Directory
-- Ugly hack, logic copied from Distribution.Simple.Haddock
haddockOutputDir flags pkg = destDir
where
baseDir = case haddockDistPref flags of
NoFlag -> "."
Flag x -> x
destDir = baseDir </> "doc" </> "html" </> display (packageName pkg)
main = defaultMainWithHooks simpleUserHooks {
postHaddock = \args flags pkg lbi -> do
copyFiles normal (haddockOutputDir flags pkg) [("doc","split.png")]
postHaddock simpleUserHooks args flags pkg lbi
}
| lpsmith/split-channel | Setup.hs | mit | 810 | 1 | 13 | 190 | 206 | 112 | 94 | 16 | 2 |
#!/usr/bin/env runhaskell
main = do putStrLn "What is your name ?"
n <- getLine
putStrLn ("Happy New Year " ++ n)
-- main = putStrLn "Hello World!"
megaAct = do act1
act2
act3
act1 = putStrLn "This is a string on a line"
act2 = putStrLn "This is another string on a line"
act3 = putStrLn "This is the last string i promise you"
| UCSD-PL/230-web | static/hello.hs | mit | 383 | 0 | 9 | 119 | 72 | 34 | 38 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-applicationsource.html
module Stratosphere.ResourceProperties.AutoScalingPlansScalingPlanApplicationSource where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.AutoScalingPlansScalingPlanTagFilter
-- | Full data type definition for
-- AutoScalingPlansScalingPlanApplicationSource. See
-- 'autoScalingPlansScalingPlanApplicationSource' for a more convenient
-- constructor.
data AutoScalingPlansScalingPlanApplicationSource =
AutoScalingPlansScalingPlanApplicationSource
{ _autoScalingPlansScalingPlanApplicationSourceCloudFormationStackARN :: Maybe (Val Text)
, _autoScalingPlansScalingPlanApplicationSourceTagFilters :: Maybe [AutoScalingPlansScalingPlanTagFilter]
} deriving (Show, Eq)
instance ToJSON AutoScalingPlansScalingPlanApplicationSource where
toJSON AutoScalingPlansScalingPlanApplicationSource{..} =
object $
catMaybes
[ fmap (("CloudFormationStackARN",) . toJSON) _autoScalingPlansScalingPlanApplicationSourceCloudFormationStackARN
, fmap (("TagFilters",) . toJSON) _autoScalingPlansScalingPlanApplicationSourceTagFilters
]
-- | Constructor for 'AutoScalingPlansScalingPlanApplicationSource' containing
-- required fields as arguments.
autoScalingPlansScalingPlanApplicationSource
:: AutoScalingPlansScalingPlanApplicationSource
autoScalingPlansScalingPlanApplicationSource =
AutoScalingPlansScalingPlanApplicationSource
{ _autoScalingPlansScalingPlanApplicationSourceCloudFormationStackARN = Nothing
, _autoScalingPlansScalingPlanApplicationSourceTagFilters = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-applicationsource.html#cfn-autoscalingplans-scalingplan-applicationsource-cloudformationstackarn
aspspasCloudFormationStackARN :: Lens' AutoScalingPlansScalingPlanApplicationSource (Maybe (Val Text))
aspspasCloudFormationStackARN = lens _autoScalingPlansScalingPlanApplicationSourceCloudFormationStackARN (\s a -> s { _autoScalingPlansScalingPlanApplicationSourceCloudFormationStackARN = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-applicationsource.html#cfn-autoscalingplans-scalingplan-applicationsource-tagfilters
aspspasTagFilters :: Lens' AutoScalingPlansScalingPlanApplicationSource (Maybe [AutoScalingPlansScalingPlanTagFilter])
aspspasTagFilters = lens _autoScalingPlansScalingPlanApplicationSourceTagFilters (\s a -> s { _autoScalingPlansScalingPlanApplicationSourceTagFilters = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/AutoScalingPlansScalingPlanApplicationSource.hs | mit | 2,776 | 0 | 12 | 206 | 266 | 155 | 111 | 28 | 1 |
import Char
sumDigits :: Integer -> Int
sumDigits n = sum $ map digitToInt (show n)
result :: Int
result = sumDigits $ product [1..100]
main = putStrLn $ show $ result
| nbartlomiej/haskeuler | 020/problem-020.hs | mit | 171 | 0 | 8 | 35 | 71 | 37 | 34 | 6 | 1 |
import qualified Data.Map as Map
ns = [7,6..1]
chunks n xs
| n <= length xs = fst (splitAt n xs) : chunks n (tail xs)
| otherwise = []
rootmap str =
Map.fromListWith (Map.unionWith (+)) t
where
t = [(init s, Map.singleton (last s) 1) | s <- chks str]
chks str = concat [chunks x str | x <- ns]
mapAccumFsum = Map.mapAccum fsum 0
where
fsum a b = (a + b, (a+1,a+b))
treeV r = Map.showTreeWith (\k x -> show (k,x)) True False r
filterMe (d,m) c = Map.filter (\(a,b) -> a<=c && c<=b) m
main = do
putStrLn "rootmap = "
let r = rootmap "mare imbrium"
print r
putStrLn "treeV = "
putStrLn (treeV r)
let lu = r Map.! ""
putStrLn "lu = "
print lu
--let ma = Map.mapAccum fsum 0 lu
let ma = mapAccumFsum lu
putStrLn " ma = "
print ma
let fm = map (Map.keys . filterMe ma) [1..15]
putStrLn "fm = "
print fm
| jsavatgy/dit-doo | code/fold-map-02.hs | gpl-2.0 | 866 | 2 | 13 | 236 | 457 | 221 | 236 | 28 | 1 |
module Main
( main )
where
{- External Library Module Imports -}
{- Standard Library Module Imports -}
{- Local Module Imports -}
import Smc.Cli
( srmcManual )
import Ipc.Cli
( printManual )
{- End of Imports -}
{-
This is the main function which when the file is run as program will generate
latex source for the manual.
-}
main :: IO ()
main =
putStrLn manual
where
manual = printManual srmcManual
| allanderek/ipclib | Smc/CliDoc.hs | gpl-2.0 | 420 | 0 | 7 | 93 | 62 | 37 | 25 | 10 | 1 |
{-# OPTIONS -w -O0 #-}
{- |
Module : ATC/DevGraph.der.hs
Description : generated Typeable, ShATermLG instances
Copyright : (c) DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(overlapping Typeable instances)
Automatic derivation of instances via DrIFT-rule Typeable, ShATermLG
for the type(s):
'Static.DevGraph.NodeSig'
'Static.DevGraph.MaybeNode'
'Static.DevGraph.Renamed'
'Static.DevGraph.MaybeRestricted'
'Static.DevGraph.DGOrigin'
'Static.DevGraph.DGNodeInfo'
'Static.DevGraph.DGNodeLab'
'Static.DevGraph.Fitted'
'Static.DevGraph.DGLinkOrigin'
'Static.DevGraph.DGLinkType'
'Static.DevGraph.DGLinkLab'
'Static.DevGraph.GenSig'
'Static.DevGraph.ExtGenSig'
'Static.DevGraph.ExtViewSig'
'Static.DevGraph.UnitSig'
'Static.DevGraph.ImpUnitSigOrSig'
'Static.DevGraph.RefSig'
'Static.DevGraph.BranchSig'
'Static.DevGraph.GlobalEntry'
'Static.DevGraph.DGChange'
'Static.DevGraph.HistElem'
'Static.DevGraph.RTNodeType'
'Static.DevGraph.RTNodeLab'
'Static.DevGraph.RTLinkType'
'Static.DevGraph.RTLinkLab'
'Static.DevGraph.DiagNodeLab'
'Static.DevGraph.DiagLinkLab'
'Static.DevGraph.Diag'
'Static.DevGraph.DGraph'
-}
{-
Generated by 'genRules' (automatic rule generation for DrIFT). Don't touch!!
dependency files:
Static/DevGraph.hs
-}
module ATC.DevGraph () where
import ATC.AS_Library
import ATC.Grothendieck
import ATC.XGraph
import ATerm.Lib
import Common.AS_Annotation
import Common.Consistency
import Common.GlobalAnnotations
import Common.IRI
import Common.Id
import Common.LibName
import Common.Result
import Control.Concurrent.MVar
import Data.Graph.Inductive.Basic
import Data.Graph.Inductive.Graph as Graph
import Data.Graph.Inductive.Query.DFS
import Data.List
import Data.Maybe
import Data.Ord
import Data.Typeable
import Logic.Comorphism
import Logic.ExtSign
import Logic.Grothendieck
import Logic.Logic
import Logic.Prover
import Static.DevGraph
import Static.DgUtils
import Static.GTheory
import Syntax.AS_Library
import Syntax.AS_Structured
import qualified Common.Lib.Graph as Tree
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.SizedList as SizedList
import qualified Common.OrderedMap as OMap
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Static.XGraph as XGraph
{-! for Static.DevGraph.NodeSig derive : Typeable !-}
{-! for Static.DevGraph.MaybeNode derive : Typeable !-}
{-! for Static.DevGraph.Renamed derive : Typeable !-}
{-! for Static.DevGraph.MaybeRestricted derive : Typeable !-}
{-! for Static.DevGraph.DGOrigin derive : Typeable !-}
{-! for Static.DevGraph.DGNodeInfo derive : Typeable !-}
{-! for Static.DevGraph.DGNodeLab derive : Typeable !-}
{-! for Static.DevGraph.Fitted derive : Typeable !-}
{-! for Static.DevGraph.DGLinkOrigin derive : Typeable !-}
{-! for Static.DevGraph.DGLinkType derive : Typeable !-}
{-! for Static.DevGraph.DGLinkLab derive : Typeable !-}
{-! for Static.DevGraph.GenSig derive : Typeable !-}
{-! for Static.DevGraph.ExtGenSig derive : Typeable !-}
{-! for Static.DevGraph.ExtViewSig derive : Typeable !-}
{-! for Static.DevGraph.UnitSig derive : Typeable !-}
{-! for Static.DevGraph.ImpUnitSigOrSig derive : Typeable !-}
{-! for Static.DevGraph.RefSig derive : Typeable !-}
{-! for Static.DevGraph.BranchSig derive : Typeable !-}
{-! for Static.DevGraph.GlobalEntry derive : Typeable !-}
{-! for Static.DevGraph.DGChange derive : Typeable !-}
{-! for Static.DevGraph.HistElem derive : Typeable !-}
{-! for Static.DevGraph.RTNodeType derive : Typeable !-}
{-! for Static.DevGraph.RTNodeLab derive : Typeable !-}
{-! for Static.DevGraph.RTLinkType derive : Typeable !-}
{-! for Static.DevGraph.RTLinkLab derive : Typeable !-}
{-! for Static.DevGraph.DiagNodeLab derive : Typeable !-}
{-! for Static.DevGraph.DiagLinkLab derive : Typeable !-}
{-! for Static.DevGraph.Diag derive : Typeable !-}
{-! for Static.DevGraph.DGraph derive : Typeable !-}
{-! for Static.DevGraph.NodeSig derive : ShATermLG !-}
{-! for Static.DevGraph.MaybeNode derive : ShATermLG !-}
{-! for Static.DevGraph.Renamed derive : ShATermLG !-}
{-! for Static.DevGraph.MaybeRestricted derive : ShATermLG !-}
{-! for Static.DevGraph.DGOrigin derive : ShATermLG !-}
{-! for Static.DevGraph.DGNodeInfo derive : ShATermLG !-}
{-! for Static.DevGraph.DGNodeLab derive : ShATermLG !-}
{-! for Static.DevGraph.Fitted derive : ShATermLG !-}
{-! for Static.DevGraph.DGLinkOrigin derive : ShATermLG !-}
{-! for Static.DevGraph.DGLinkType derive : ShATermLG !-}
{-! for Static.DevGraph.DGLinkLab derive : ShATermLG !-}
{-! for Static.DevGraph.GenSig derive : ShATermLG !-}
{-! for Static.DevGraph.ExtGenSig derive : ShATermLG !-}
{-! for Static.DevGraph.ExtViewSig derive : ShATermLG !-}
{-! for Static.DevGraph.UnitSig derive : ShATermLG !-}
{-! for Static.DevGraph.ImpUnitSigOrSig derive : ShATermLG !-}
{-! for Static.DevGraph.RefSig derive : ShATermLG !-}
{-! for Static.DevGraph.BranchSig derive : ShATermLG !-}
{-! for Static.DevGraph.GlobalEntry derive : ShATermLG !-}
{-! for Static.DevGraph.DGChange derive : ShATermLG !-}
{-! for Static.DevGraph.HistElem derive : ShATermLG !-}
{-! for Static.DevGraph.RTNodeType derive : ShATermLG !-}
{-! for Static.DevGraph.RTNodeLab derive : ShATermLG !-}
{-! for Static.DevGraph.RTLinkType derive : ShATermLG !-}
{-! for Static.DevGraph.RTLinkLab derive : ShATermLG !-}
{-! for Static.DevGraph.DiagNodeLab derive : ShATermLG !-}
{-! for Static.DevGraph.DiagLinkLab derive : ShATermLG !-}
{-! for Static.DevGraph.Diag derive : ShATermLG !-}
{-! for Static.DevGraph.DGraph derive : ShATermLG !-}
| nevrenato/Hets_Fork | ATC/DevGraph.der.hs | gpl-2.0 | 5,689 | 0 | 4 | 640 | 293 | 216 | 77 | 39 | 0 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-}
{- |
Module : $Header$
Description : Comorphism from CommonLogic to CommonLogic
Copyright : (c) Eugen Kuksa, Uni Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : eugenk@informatik.uni-bremen.de
Stability : experimental (not complete: typleclass-instances missing)
Portability : non-portable (via Logic.Logic)
Translating comorphism from CommonLogic to CommonLogic in order to eliminate
modules.
-}
module Comorphisms.CommonLogicModuleElimination (
CommonLogicModuleElimination (..)
, eliminateModules
)
where
import qualified Data.Set as Set
import CommonLogic.Tools
import Common.Id
import Logic.Logic as Logic
import Logic.Comorphism
import Common.ProofTree
import Common.Result
import qualified Common.AS_Annotation as AS_Anno
-- Common Logic
import CommonLogic.AS_CommonLogic
import qualified CommonLogic.Logic_CommonLogic as Logic
import qualified CommonLogic.Sign as Sign
import qualified CommonLogic.Symbol as Symbol
import qualified CommonLogic.Morphism as Mor
import qualified CommonLogic.Sublogic as Sl
data CommonLogicModuleElimination = CommonLogicModuleElimination deriving Show
instance Language CommonLogicModuleElimination where
language_name CommonLogicModuleElimination = "CommonLogicModuleElimination"
instance Comorphism
CommonLogicModuleElimination -- comorphism
Logic.CommonLogic -- lid domain
Sl.CommonLogicSL -- sublogics codomain
BASIC_SPEC -- Basic spec domain
TEXT_META -- sentence domain
SYMB_ITEMS -- symb_items
SYMB_MAP_ITEMS -- symbol map items domain
Sign.Sign -- signature domain
Mor.Morphism -- morphism domain
Symbol.Symbol -- symbol domain
Symbol.Symbol -- rawsymbol domain
ProofTree -- proof tree codomain
Logic.CommonLogic -- lid domain
Sl.CommonLogicSL -- sublogics codomain
BASIC_SPEC -- Basic spec domain
TEXT_META -- sentence domain
SYMB_ITEMS -- symb_items
SYMB_MAP_ITEMS -- symbol map items domain
Sign.Sign -- signature domain
Mor.Morphism -- morphism domain
Symbol.Symbol -- symbol domain
Symbol.Symbol -- rawsymbol domain
ProofTree -- proof tree codomain
where
sourceLogic CommonLogicModuleElimination = Logic.CommonLogic
sourceSublogic CommonLogicModuleElimination = Sl.top
targetLogic CommonLogicModuleElimination = Logic.CommonLogic
mapSublogic CommonLogicModuleElimination = Just . mapSub
map_theory CommonLogicModuleElimination = mapTheory
map_morphism CommonLogicModuleElimination = mapMor
map_sentence CommonLogicModuleElimination = mapSentence
--hasCommonLogicModuleElimination_model_expansion = True -- TODO: check if it is really True
mapSub :: Sl.CommonLogicSL -> Sl.CommonLogicSL
mapSub = id
mapMor :: Mor.Morphism -> Result Mor.Morphism
mapMor mor = return mor
mapSentence :: Sign.Sign -> TEXT_META -> Result TEXT_META
mapSentence _ txt = return $ eliminateModules txt
-------------------------------------------------------------------------------
-- MODULE ELIMINATION --
-------------------------------------------------------------------------------
mapTheory :: (Sign.Sign, [AS_Anno.Named TEXT_META])
-> Result (Sign.Sign, [AS_Anno.Named TEXT_META])
mapTheory (srcSign, srcTexts) =
return (srcSign,
map ((uncurry AS_Anno.makeNamed) . elimModSnd . senAndName) srcTexts)
where senAndName :: AS_Anno.Named TEXT_META -> (String, TEXT_META)
senAndName t = (AS_Anno.senAttr t, AS_Anno.sentence t)
elimModSnd :: (String, TEXT_META) -> (String, TEXT_META)
elimModSnd (s, t) = (s, eliminateModules t)
-- | Result is a CL-equivalent text without modules
eliminateModules :: TEXT_META -> TEXT_META
eliminateModules tm =
tm { getText = Text [Sentence (me_text newName [] $ getText tm)] nullRange }
where (newName, _) = freeName ("item", 0) (indvC_text $ getText tm)
-- NOTE: ignores importations
me_text :: NAME -> [NAME] -> TEXT -> SENTENCE
me_text newName modules txt =
case txt of
Text phrs _ -> me_phrases newName modules $ filter nonImportation phrs
Named_text _ t _ -> me_text newName modules t
where nonImportation p = case p of
Importation _ -> False
_ -> True
-- Table 2: R5a - R5b, ignoring importations and comments
me_phrases :: NAME -> [NAME] -> [PHRASE] -> SENTENCE
me_phrases newName modules phrs =
case length phrs of
1 -> me_phrase newName modules $ head phrs
_ -> Bool_sent (
Conjunction (
map (me_phrase newName modules) (filter mod_sen phrs)
)
) nullRange
where mod_sen p = case p of
Module _ -> True
Sentence _ -> True
_ -> False
-- | converts comment-texts to comment-sentences
me_phrase :: NAME -> [NAME] -> PHRASE -> SENTENCE
me_phrase newName modules p =
case p of
Module m -> me_module newName modules m
Sentence s -> me_sentence newName modules s
Comment_text c txt r -> Comment_sent c (me_text newName modules txt) r
Importation _ -> undefined
me_sentence :: NAME -> [NAME] -> SENTENCE -> SENTENCE
me_sentence newName modules sen =
if null modules then sen else -- this keeps the sentence simple
case sen of
Bool_sent bs _ -> Bool_sent (me_boolsent newName modules bs) nullRange
Quant_sent qs _ -> Quant_sent (me_quantsent newName modules qs) nullRange
x -> x -- Table 2: R1a - R2b
-- Table 2: R2a - R2e
me_boolsent :: NAME -> [NAME] -> BOOL_SENT -> BOOL_SENT
me_boolsent newName modules bs =
case bs of
Conjunction sens -> Conjunction $ map me_sen_mod sens
Disjunction sens -> Disjunction $ map me_sen_mod sens
Negation sen -> Negation $ me_sen_mod sen
Implication s1 s2 -> Implication (me_sen_mod s1) (me_sen_mod s2)
Biconditional s1 s2 -> Biconditional (me_sen_mod s1) (me_sen_mod s2)
where me_sen_mod = me_sentence newName modules --TODO: check whether dn stays the same
-- Table 2: R3a - R3b
me_quantsent :: NAME -> [NAME] -> QUANT_SENT -> QUANT_SENT
me_quantsent newName modules qs =
case qs of
Universal noss sen -> Universal noss (
Bool_sent (Implication
(anticedent modules noss)
(me_sentence newName modules sen)
) nullRange)
Existential noss sen -> Existential noss (
Bool_sent (Implication
(anticedent modules noss)
(me_sentence newName modules sen)
) nullRange)
anticedent :: [NAME] -> [NAME_OR_SEQMARK] -> SENTENCE
anticedent modules noss =
case modules of
[m] -> anticedent1 m noss
_ -> Bool_sent (Conjunction (map (flip anticedent1 noss) modules)) nullRange
anticedent1 :: NAME -> [NAME_OR_SEQMARK] -> SENTENCE
anticedent1 m noss = case noss of
[nos] -> Atom_sent (Atom (Name_term m) [nos2termseq nos]) nullRange
_ -> Bool_sent (Conjunction $ map (\nos -> anticedent1 m [nos]) noss) nullRange
nos2termseq :: NAME_OR_SEQMARK -> TERM_SEQ
nos2termseq nos = case nos of
Name n -> Term_seq $ Name_term n
SeqMark s -> Seq_marks s
-- Table 2 R4
me_module :: NAME -> [NAME] -> MODULE -> SENTENCE
me_module newName modules m =
case m of
Mod n t _ -> Bool_sent (Conjunction (
(me_text newName (n:modules) t)
: (ex_conj newName (n:modules))
: (map (ex_conj_indvC newName (n:modules)) $ Set.elems $ indvC_text t)
)) nullRange
Mod_ex n excl t _ -> Bool_sent (Conjunction (
(me_text newName (n:modules) t)
: (ex_conj newName (n:modules))
: (map (ex_conj_indvC newName (n:modules)) $ Set.elems $ indvC_text t)
++ (map (not_ex_conj_excl newName (n:modules)) excl)
)) nullRange
-- Table 2 R4: each line in the conjunction
ex_conj :: NAME -> [NAME] -> SENTENCE
ex_conj n modules =
Quant_sent (Existential [Name n] (Bool_sent ( Conjunction (
map (modNameToPredicate n) modules
)) nullRange)) nullRange
-- Table 2 R4: each line with indvC-elements in the conjunction
ex_conj_indvC :: NAME -> [NAME] -> NAME -> SENTENCE
ex_conj_indvC n modules c =
Quant_sent (Existential [Name n] (Bool_sent ( Conjunction (
(Atom_sent (Equation (Name_term n) (Name_term c)) nullRange)
: map (modNameToPredicate n) modules
)) nullRange)) nullRange
-- Table 2 R4: each line with excluded elements in the conjunction
not_ex_conj_excl :: NAME -> [NAME] -> NAME -> SENTENCE
not_ex_conj_excl n modules c =
Bool_sent (Negation (ex_conj_indvC n modules c)) nullRange
-- Table 2 R4: makes a Predicate out of the module name
modNameToPredicate :: NAME -> NAME -> SENTENCE
modNameToPredicate n m =
Atom_sent (Atom (Name_term m) [Term_seq (Name_term n)]) nullRange
-- what if the module name already occurs as a predicate?
| nevrenato/Hets_Fork | Comorphisms/CommonLogicModuleElimination.hs | gpl-2.0 | 9,335 | 0 | 21 | 2,399 | 2,216 | 1,157 | 1,059 | 171 | 5 |
{-|
Re-export the modules of the hledger-web program.
-}
module Hledger.Web (
module Hledger.Web.Foundation,
module Hledger.Web.Application,
module Hledger.Web.Handlers,
module Hledger.Web.Options,
module Hledger.Web.Settings,
module Hledger.Web.Settings.StaticFiles,
tests_Hledger_Web
)
where
import Test.HUnit
import Hledger.Web.Foundation
import Hledger.Web.Application
import Hledger.Web.Handlers
import Hledger.Web.Options
import Hledger.Web.Settings
import Hledger.Web.Settings.StaticFiles
tests_Hledger_Web :: Test
tests_Hledger_Web = TestList
[
-- tests_Hledger_Web_Foundation
-- ,tests_Hledger_Web_Application
-- ,tests_Hledger_Web_EmbeddedFiles
-- ,tests_Hledger_Web_Handlers
-- ,tests_Hledger_Web_Settings
-- ,tests_Hledger_Web_Settings_StaticFiles
]
| Lainepress/hledger | hledger-web/Hledger/Web.hs | gpl-3.0 | 935 | 0 | 6 | 234 | 118 | 81 | 37 | 18 | 1 |
module ShowFunctions where
main :: IO ()
main = putStr $ unlines
[ show (-412)
, show 0
, show 34
, show 1000
, show ' '
, show '\n'
, show 'A'
, show True
, show False
, show ()
, show (1,2)
, show (1,2,3)
, show (1,2,3,4)
, show (1,2,3,4,5)
, show (1,2,3,4,5,6)
, show (1,2,3,4,5,6,7)
, show (1,2,3,4,5,6,7,8)
, show (1,2,3,4,5,6,7,8,9)
, show (1,2,3,4,5,6,7,8,9,10)
, showFunction showInt showInt id
, showIO showChar getChar
, showString "hello"
, showString ""
, showString "abc\ndef"
, show [1..10]
, show [5]
, show []
]
| Helium4Haskell/helium | test/correct/ShowFunctions.hs | gpl-3.0 | 647 | 0 | 10 | 217 | 373 | 216 | 157 | 30 | 1 |
{-|
The Hledger.Data library allows parsing and querying of C++ ledger-style
journal files. It generally provides a compatible subset of C++ ledger's
functionality. This package re-exports all the Hledger.Data.* modules
(except UTF8, which requires an explicit import.)
-}
module Hledger.Data (
module Hledger.Data.Account,
module Hledger.Data.AccountName,
module Hledger.Data.Amount,
module Hledger.Data.Commodity,
module Hledger.Data.Dates,
module Hledger.Data.Journal,
module Hledger.Data.Ledger,
module Hledger.Data.MarketPrice,
module Hledger.Data.Period,
module Hledger.Data.Posting,
module Hledger.Data.RawOptions,
module Hledger.Data.StringFormat,
module Hledger.Data.Timeclock,
module Hledger.Data.Transaction,
module Hledger.Data.AutoTransaction,
module Hledger.Data.Types,
tests_Hledger_Data
)
where
import Test.HUnit
import Hledger.Data.Account
import Hledger.Data.AccountName
import Hledger.Data.Amount
import Hledger.Data.Commodity
import Hledger.Data.Dates
import Hledger.Data.Journal
import Hledger.Data.Ledger
import Hledger.Data.MarketPrice
import Hledger.Data.Period
import Hledger.Data.Posting
import Hledger.Data.RawOptions
import Hledger.Data.StringFormat
import Hledger.Data.Timeclock
import Hledger.Data.Transaction
import Hledger.Data.AutoTransaction
import Hledger.Data.Types
tests_Hledger_Data :: Test
tests_Hledger_Data = TestList
[
tests_Hledger_Data_Account
,tests_Hledger_Data_AccountName
,tests_Hledger_Data_Amount
,tests_Hledger_Data_Commodity
,tests_Hledger_Data_Journal
,tests_Hledger_Data_MarketPrice
,tests_Hledger_Data_Ledger
,tests_Hledger_Data_Posting
-- ,tests_Hledger_Data_RawOptions
-- ,tests_Hledger_Data_StringFormat
,tests_Hledger_Data_Timeclock
,tests_Hledger_Data_Transaction
-- ,tests_Hledger_Data_Types
]
| ony/hledger | hledger-lib/Hledger/Data.hs | gpl-3.0 | 2,092 | 0 | 6 | 471 | 271 | 185 | 86 | 48 | 1 |
{-# LANGUAGE TupleSections #-}
module Data.IORef.Utils
( atomicModifyIORef_
) where
import Data.IORef
atomicModifyIORef_ :: IORef a -> (a -> a) -> IO ()
atomicModifyIORef_ var f = atomicModifyIORef var ((, ()) . f)
| da-x/lamdu | bottlelib/Data/IORef/Utils.hs | gpl-3.0 | 225 | 0 | 9 | 43 | 74 | 41 | 33 | 6 | 1 |
module Main where
import Application.PVP2LibLinear.ArgsParser as AP
import Application.PVP2LibLinear.Conduit
import Application.PVP2LibLinear.Utility
import Classifier.LibLinear
import Control.Monad as M
import Data.Conduit
import Data.Conduit.List as CL
import PetaVision.PVPFile.IO
import PetaVision.Data.Pooling
import PetaVision.Utility.Parallel as PA
import Prelude as P
import System.Environment
import Control.Monad.Trans.Resource
main =
do args <- getArgs
if null args
then error "run with --help to see options."
else return ()
params <- parseArgs args
print params
header <-
M.mapM (readPVPHeader . P.head)
(pvpFile params)
batchHeader <- M.mapM readPVPHeader . P.head . pvpFile $ params
let source =
P.map (\filePath ->
(sequenceSources . P.map pvpFileSource $ filePath) =$=
CL.concat)
(pvpFile params)
nbands = P.sum . P.map nBands $ batchHeader
dims = dimOffset header
trainParams =
TrainParams {trainSolver = L2R_L2LOSS_SVC_DUAL -- L2R_L2LOSS_SVC_DUAL -- L2R_LR
,trainC = (c params)
,trainNumExamples = nbands
,trainFeatureIndexMax =
(\((nf,ny,nx),n) -> n + nf * ny * nx) . P.last $ dims
,trainModel = (modelName params)}
-- print trainParams
if poolingFlag params
then do putStrLn $
"Using CPU for " ++ show (poolingType params) ++ " Pooling"
runResourceT $
sequenceSources
(P.zipWith (\s offset ->
s =$=
poolVecConduit
(ParallelParams (AP.numThread params)
(AP.batchSize params))
(poolingType params)
(poolingSize params)
offset)
source
(snd . unzip $ dims)) $$
concatPooledConduit =$
trainSink trainParams
(labelFile params)
(AP.batchSize params)
(findC params)
else runResourceT $
sequenceSources source $$ concatConduit (snd . unzip $ dims) =$
trainSink trainParams
(labelFile params)
(AP.batchSize params)
(findC params)
| XinhuaZhang/PetaVisionHaskell | Application/PVP2LibLinear/Train.hs | gpl-3.0 | 2,886 | 0 | 24 | 1,371 | 592 | 316 | 276 | 65 | 3 |
module Main where
import Control.Monad.State
import System.Random
import GeneA.Genetic
import GeneA.Genetic.Operators
import GeneA.Population
import GeneA.Prob
size = 100
sRate = 0.25
mRate = 0.01
pSize = 100
target = 100
main = do
putStr "Getting random number generator..."
st <- getStdGen
putStrLn "Ok"
putStrLn "Running GA with the following parameters:"
putStr " Target: " >> print target
putStr " Mutation Rate: " >> print mRate
putStr " Selection Rate: " >> print sRate
putStr " Population Size: " >> print pSize
putStrLn "Running Evolution Process..."
let population = evolve pSize st in
case length $ populace population of
0 -> putStrLn "Population Died..."
1000 -> do putStrLn "Population maxed out. Best solution found:"
putStr " "
print (optimal population :: Chromo)
_ -> do putStrLn "Optimal Solution Found:"
putStr " "
print (optimal population :: Chromo)
data Chromo = Chromo { base :: [Bool] }
instance Show Chromo where
show = show . map (\b -> if b then 1 else 0) . base
instance Eq Chromo where
(==) x y = (fitness x == fitness y)
instance Ord Chromo where
compare x y = compare (fitness x) (fitness y)
instance Genetic Chromo where
fitness = (1/) . fromIntegral . (target -) . sum . map fromEnum . base
crossover = cross
mutate = mute
mkGenetic = fmap Chromo $ getRandoms size
selection = truncation sRate
cross :: (Chromo, Chromo) -> Prob (Chromo, Chromo, Chromo, Chromo)
cross (p1,p2) = do
pnt <- getRandomR (0, size-1)
let (s1,s1') = splitAt pnt (base p1)
(s2,s2') = splitAt pnt (base p2) in
return (Chromo (s1++s2'), Chromo (s2++s1'), p1, p2)
mute :: Chromo -> Prob Chromo
mute (Chromo xs) = do
is <- (getRandomRs (0,1) size :: Prob [Float])
return . Chromo $ toggleAll xs is
where toggleAll xs is = zipWith toggle xs is
toggle x i = if i <= mRate then not x else x
| dglmoore/genea | src/examples/BinGen.hs | gpl-3.0 | 2,049 | 0 | 16 | 571 | 727 | 365 | 362 | 56 | 3 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Amazon Simple Storage Service is storage for the Internet. Amazon S3 has
-- a simple web services interface that you can use to store and retrieve
-- any amount of data, at any time, from anywhere on the web. It gives any
-- developer access to the same highly scalable, reliable, fast,
-- inexpensive data storage infrastructure that Amazon uses to run its own
-- global network of web sites. The service aims to maximize benefits of
-- scale and to pass those benefits on to developers.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/Welcome.html AWS API Reference>
module Network.AWS.S3
(
-- * Service Configuration
s3
-- * Errors
-- $errors
-- ** ObjectAlreadyInActiveTierError
, _ObjectAlreadyInActiveTierError
-- ** BucketAlreadyExists
, _BucketAlreadyExists
-- ** ObjectNotInActiveTierError
, _ObjectNotInActiveTierError
-- ** NoSuchUpload
, _NoSuchUpload
-- ** NoSuchBucket
, _NoSuchBucket
-- ** NoSuchKey
, _NoSuchKey
-- * Waiters
-- $waiters
-- ** ObjectNotExists
, objectNotExists
-- ** BucketExists
, bucketExists
-- ** ObjectExists
, objectExists
-- ** BucketNotExists
, bucketNotExists
-- * Operations
-- $operations
-- ** PutBucketRequestPayment
, module Network.AWS.S3.PutBucketRequestPayment
-- ** PutObject
, module Network.AWS.S3.PutObject
-- ** DeleteObject
, module Network.AWS.S3.DeleteObject
-- ** PutBucketLogging
, module Network.AWS.S3.PutBucketLogging
-- ** ListBuckets
, module Network.AWS.S3.ListBuckets
-- ** DeleteBucket
, module Network.AWS.S3.DeleteBucket
-- ** CreateBucket
, module Network.AWS.S3.CreateBucket
-- ** DeleteBucketTagging
, module Network.AWS.S3.DeleteBucketTagging
-- ** PutObjectACL
, module Network.AWS.S3.PutObjectACL
-- ** PutBucketTagging
, module Network.AWS.S3.PutBucketTagging
-- ** GetBucketLocation
, module Network.AWS.S3.GetBucketLocation
-- ** GetBucketNotificationConfiguration
, module Network.AWS.S3.GetBucketNotificationConfiguration
-- ** GetObject
, module Network.AWS.S3.GetObject
-- ** PutBucketReplication
, module Network.AWS.S3.PutBucketReplication
-- ** GetBucketWebsite
, module Network.AWS.S3.GetBucketWebsite
-- ** GetBucketRequestPayment
, module Network.AWS.S3.GetBucketRequestPayment
-- ** DeleteBucketReplication
, module Network.AWS.S3.DeleteBucketReplication
-- ** GetBucketLifecycle
, module Network.AWS.S3.GetBucketLifecycle
-- ** ListObjectVersions (Paginated)
, module Network.AWS.S3.ListObjectVersions
-- ** HeadBucket
, module Network.AWS.S3.HeadBucket
-- ** PutBucketLifecycle
, module Network.AWS.S3.PutBucketLifecycle
-- ** DeleteBucketLifecycle
, module Network.AWS.S3.DeleteBucketLifecycle
-- ** CreateMultipartUpload
, module Network.AWS.S3.CreateMultipartUpload
-- ** UploadPart
, module Network.AWS.S3.UploadPart
-- ** GetBucketReplication
, module Network.AWS.S3.GetBucketReplication
-- ** PutBucketWebsite
, module Network.AWS.S3.PutBucketWebsite
-- ** DeleteBucketWebsite
, module Network.AWS.S3.DeleteBucketWebsite
-- ** CompleteMultipartUpload
, module Network.AWS.S3.CompleteMultipartUpload
-- ** ListMultipartUploads (Paginated)
, module Network.AWS.S3.ListMultipartUploads
-- ** ListObjects (Paginated)
, module Network.AWS.S3.ListObjects
-- ** DeleteBucketPolicy
, module Network.AWS.S3.DeleteBucketPolicy
-- ** AbortMultipartUpload
, module Network.AWS.S3.AbortMultipartUpload
-- ** PutBucketPolicy
, module Network.AWS.S3.PutBucketPolicy
-- ** GetObjectTorrent
, module Network.AWS.S3.GetObjectTorrent
-- ** DeleteObjects
, module Network.AWS.S3.DeleteObjects
-- ** PutBucketNotificationConfiguration
, module Network.AWS.S3.PutBucketNotificationConfiguration
-- ** GetBucketVersioning
, module Network.AWS.S3.GetBucketVersioning
-- ** DeleteBucketCORS
, module Network.AWS.S3.DeleteBucketCORS
-- ** PutBucketCORS
, module Network.AWS.S3.PutBucketCORS
-- ** GetBucketCORS
, module Network.AWS.S3.GetBucketCORS
-- ** GetObjectACL
, module Network.AWS.S3.GetObjectACL
-- ** RestoreObject
, module Network.AWS.S3.RestoreObject
-- ** HeadObject
, module Network.AWS.S3.HeadObject
-- ** PutBucketVersioning
, module Network.AWS.S3.PutBucketVersioning
-- ** GetBucketTagging
, module Network.AWS.S3.GetBucketTagging
-- ** CopyObject
, module Network.AWS.S3.CopyObject
-- ** GetBucketPolicy
, module Network.AWS.S3.GetBucketPolicy
-- ** GetBucketLogging
, module Network.AWS.S3.GetBucketLogging
-- ** GetBucketACL
, module Network.AWS.S3.GetBucketACL
-- ** ListParts (Paginated)
, module Network.AWS.S3.ListParts
-- ** UploadPartCopy
, module Network.AWS.S3.UploadPartCopy
-- ** PutBucketACL
, module Network.AWS.S3.PutBucketACL
-- * Types
-- ** Re-exported Types
, module Network.AWS.S3.Internal
-- ** BucketCannedACL
, BucketCannedACL (..)
-- ** BucketLogsPermission
, BucketLogsPermission (..)
-- ** BucketVersioningStatus
, BucketVersioningStatus (..)
-- ** EncodingType
, EncodingType (..)
-- ** Event
, Event (..)
-- ** ExpirationStatus
, ExpirationStatus (..)
-- ** FilterRuleName
, FilterRuleName (..)
-- ** MFADelete
, MFADelete (..)
-- ** MFADeleteStatus
, MFADeleteStatus (..)
-- ** MetadataDirective
, MetadataDirective (..)
-- ** ObjectCannedACL
, ObjectCannedACL (..)
-- ** ObjectStorageClass
, ObjectStorageClass (..)
-- ** ObjectVersionStorageClass
, ObjectVersionStorageClass (..)
-- ** Payer
, Payer (..)
-- ** Permission
, Permission (..)
-- ** Protocol
, Protocol (..)
-- ** ReplicationRuleStatus
, ReplicationRuleStatus (..)
-- ** ReplicationStatus
, ReplicationStatus (..)
-- ** RequestCharged
, RequestCharged (..)
-- ** RequestPayer
, RequestPayer (..)
-- ** ServerSideEncryption
, ServerSideEncryption (..)
-- ** StorageClass
, StorageClass (..)
-- ** TransitionStorageClass
, TransitionStorageClass (..)
-- ** Type
, Type (..)
-- ** AccessControlPolicy
, AccessControlPolicy
, accessControlPolicy
, acpGrants
, acpOwner
-- ** Bucket
, Bucket
, bucket
, bCreationDate
, bName
-- ** BucketLoggingStatus
, BucketLoggingStatus
, bucketLoggingStatus
, blsLoggingEnabled
-- ** CORSConfiguration
, CORSConfiguration
, corsConfiguration
, ccCORSRules
-- ** CORSRule
, CORSRule
, corsRule
, crAllowedMethods
, crMaxAgeSeconds
, crAllowedHeaders
, crAllowedOrigins
, crExposeHeaders
-- ** CommonPrefix
, CommonPrefix
, commonPrefix
, cpPrefix
-- ** CompletedMultipartUpload
, CompletedMultipartUpload
, completedMultipartUpload
, cmuParts
-- ** CompletedPart
, CompletedPart
, completedPart
, cpPartNumber
, cpETag
-- ** Condition
, Condition
, condition
, cKeyPrefixEquals
, cHTTPErrorCodeReturnedEquals
-- ** CopyObjectResult
, CopyObjectResult
, copyObjectResult
, corETag
, corLastModified
-- ** CopyPartResult
, CopyPartResult
, copyPartResult
, cprETag
, cprLastModified
-- ** CreateBucketConfiguration
, CreateBucketConfiguration
, createBucketConfiguration
, cbcLocationConstraint
-- ** Delete
, Delete
, delete'
, dQuiet
, dObjects
-- ** DeleteMarkerEntry
, DeleteMarkerEntry
, deleteMarkerEntry
, dmeVersionId
, dmeIsLatest
, dmeOwner
, dmeKey
, dmeLastModified
-- ** DeletedObject
, DeletedObject
, deletedObject
, dVersionId
, dDeleteMarker
, dDeleteMarkerVersionId
, dKey
-- ** Destination
, Destination
, destination
, dBucket
-- ** ErrorDocument
, ErrorDocument
, errorDocument
, edKey
-- ** FilterRule
, FilterRule
, filterRule
, frValue
, frName
-- ** Grant
, Grant
, grant
, gPermission
, gGrantee
-- ** Grantee
, Grantee
, grantee
, gURI
, gEmailAddress
, gDisplayName
, gId
, gType
-- ** IndexDocument
, IndexDocument
, indexDocument
, idSuffix
-- ** Initiator
, Initiator
, initiator
, iDisplayName
, iId
-- ** LambdaFunctionConfiguration
, LambdaFunctionConfiguration
, lambdaFunctionConfiguration
, lfcId
, lfcFilter
, lfcLambdaFunctionARN
, lfcEvents
-- ** LifecycleConfiguration
, LifecycleConfiguration
, lifecycleConfiguration
, lcRules
-- ** LifecycleExpiration
, LifecycleExpiration
, lifecycleExpiration
, leDays
, leDate
-- ** LoggingEnabled
, LoggingEnabled
, loggingEnabled
, leTargetBucket
, leTargetGrants
, leTargetPrefix
-- ** MultipartUpload
, MultipartUpload
, multipartUpload
, muInitiated
, muInitiator
, muOwner
, muKey
, muStorageClass
, muUploadId
-- ** NoncurrentVersionExpiration
, NoncurrentVersionExpiration
, noncurrentVersionExpiration
, nveNoncurrentDays
-- ** NoncurrentVersionTransition
, NoncurrentVersionTransition
, noncurrentVersionTransition
, nvtNoncurrentDays
, nvtStorageClass
-- ** NotificationConfiguration
, NotificationConfiguration
, notificationConfiguration
, ncQueueConfigurations
, ncTopicConfigurations
, ncLambdaFunctionConfigurations
-- ** NotificationConfigurationFilter
, NotificationConfigurationFilter
, notificationConfigurationFilter
, ncfKey
-- ** Object
, Object
, object'
, oOwner
, oETag
, oSize
, oKey
, oStorageClass
, oLastModified
-- ** ObjectIdentifier
, ObjectIdentifier
, objectIdentifier
, oiVersionId
, oiKey
-- ** ObjectVersion
, ObjectVersion
, objectVersion
, ovETag
, ovVersionId
, ovSize
, ovIsLatest
, ovOwner
, ovKey
, ovStorageClass
, ovLastModified
-- ** Owner
, Owner
, owner
, oDisplayName
, oId
-- ** Part
, Part
, part
, pETag
, pSize
, pPartNumber
, pLastModified
-- ** QueueConfiguration
, QueueConfiguration
, queueConfiguration
, qcId
, qcFilter
, qcQueueARN
, qcEvents
-- ** Redirect
, Redirect
, redirect
, rHostName
, rProtocol
, rHTTPRedirectCode
, rReplaceKeyWith
, rReplaceKeyPrefixWith
-- ** RedirectAllRequestsTo
, RedirectAllRequestsTo
, redirectAllRequestsTo
, rartProtocol
, rartHostName
-- ** ReplicationConfiguration
, ReplicationConfiguration
, replicationConfiguration
, rcRole
, rcRules
-- ** ReplicationRule
, ReplicationRule
, replicationRule
, rrId
, rrPrefix
, rrStatus
, rrDestination
-- ** RequestPaymentConfiguration
, RequestPaymentConfiguration
, requestPaymentConfiguration
, rpcPayer
-- ** RestoreRequest
, RestoreRequest
, restoreRequest
, rrDays
-- ** RoutingRule
, RoutingRule
, routingRule
, rrCondition
, rrRedirect
-- ** Rule
, Rule
, rule
, rNoncurrentVersionExpiration
, rTransition
, rExpiration
, rNoncurrentVersionTransition
, rId
, rPrefix
, rStatus
-- ** S3KeyFilter
, S3KeyFilter
, s3KeyFilter
, skfFilterRules
-- ** S3ServiceError
, S3ServiceError
, s3ServiceError
, sseVersionId
, sseKey
, sseCode
, sseMessage
-- ** Tag
, Tag
, tag
, tagKey
, tagValue
-- ** Tagging
, Tagging
, tagging
, tTagSet
-- ** TargetGrant
, TargetGrant
, targetGrant
, tgPermission
, tgGrantee
-- ** TopicConfiguration
, TopicConfiguration
, topicConfiguration
, tcId
, tcFilter
, tcTopicARN
, tcEvents
-- ** Transition
, Transition
, transition
, tDays
, tDate
, tStorageClass
-- ** VersioningConfiguration
, VersioningConfiguration
, versioningConfiguration
, vcStatus
, vcMFADelete
-- ** WebsiteConfiguration
, WebsiteConfiguration
, websiteConfiguration
, wcRedirectAllRequestsTo
, wcErrorDocument
, wcIndexDocument
, wcRoutingRules
) where
import Network.AWS.S3.AbortMultipartUpload
import Network.AWS.S3.CompleteMultipartUpload
import Network.AWS.S3.CopyObject
import Network.AWS.S3.CreateBucket
import Network.AWS.S3.CreateMultipartUpload
import Network.AWS.S3.DeleteBucket
import Network.AWS.S3.DeleteBucketCORS
import Network.AWS.S3.DeleteBucketLifecycle
import Network.AWS.S3.DeleteBucketPolicy
import Network.AWS.S3.DeleteBucketReplication
import Network.AWS.S3.DeleteBucketTagging
import Network.AWS.S3.DeleteBucketWebsite
import Network.AWS.S3.DeleteObject
import Network.AWS.S3.DeleteObjects
import Network.AWS.S3.GetBucketACL
import Network.AWS.S3.GetBucketCORS
import Network.AWS.S3.GetBucketLifecycle
import Network.AWS.S3.GetBucketLocation
import Network.AWS.S3.GetBucketLogging
import Network.AWS.S3.GetBucketNotificationConfiguration
import Network.AWS.S3.GetBucketPolicy
import Network.AWS.S3.GetBucketReplication
import Network.AWS.S3.GetBucketRequestPayment
import Network.AWS.S3.GetBucketTagging
import Network.AWS.S3.GetBucketVersioning
import Network.AWS.S3.GetBucketWebsite
import Network.AWS.S3.GetObject
import Network.AWS.S3.GetObjectACL
import Network.AWS.S3.GetObjectTorrent
import Network.AWS.S3.HeadBucket
import Network.AWS.S3.HeadObject
import Network.AWS.S3.Internal
import Network.AWS.S3.ListBuckets
import Network.AWS.S3.ListMultipartUploads
import Network.AWS.S3.ListObjects
import Network.AWS.S3.ListObjectVersions
import Network.AWS.S3.ListParts
import Network.AWS.S3.PutBucketACL
import Network.AWS.S3.PutBucketCORS
import Network.AWS.S3.PutBucketLifecycle
import Network.AWS.S3.PutBucketLogging
import Network.AWS.S3.PutBucketNotificationConfiguration
import Network.AWS.S3.PutBucketPolicy
import Network.AWS.S3.PutBucketReplication
import Network.AWS.S3.PutBucketRequestPayment
import Network.AWS.S3.PutBucketTagging
import Network.AWS.S3.PutBucketVersioning
import Network.AWS.S3.PutBucketWebsite
import Network.AWS.S3.PutObject
import Network.AWS.S3.PutObjectACL
import Network.AWS.S3.RestoreObject
import Network.AWS.S3.Types
import Network.AWS.S3.UploadPart
import Network.AWS.S3.UploadPartCopy
import Network.AWS.S3.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'S3'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
| fmapfmapfmap/amazonka | amazonka-s3/gen/Network/AWS/S3.hs | mpl-2.0 | 17,331 | 0 | 5 | 4,353 | 1,944 | 1,415 | 529 | 398 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.SSLCerts.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a particular SSL certificate. Does not include the private key
-- (required for usage). The private key must be saved from the response to
-- initial creation.
--
-- /See:/ <https://cloud.google.com/sql/docs/reference/latest Cloud SQL Administration API Reference> for @sql.sslCerts.get@.
module Network.Google.Resource.SQL.SSLCerts.Get
(
-- * REST Resource
SSLCertsGetResource
-- * Creating a Request
, sslCertsGet
, SSLCertsGet
-- * Request Lenses
, scgProject
, scgSha1Fingerprint
, scgInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.sslCerts.get@ method which the
-- 'SSLCertsGet' request conforms to.
type SSLCertsGetResource =
"sql" :>
"v1beta4" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
"sslCerts" :>
Capture "sha1Fingerprint" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] SSLCert
-- | Retrieves a particular SSL certificate. Does not include the private key
-- (required for usage). The private key must be saved from the response to
-- initial creation.
--
-- /See:/ 'sslCertsGet' smart constructor.
data SSLCertsGet = SSLCertsGet'
{ _scgProject :: !Text
, _scgSha1Fingerprint :: !Text
, _scgInstance :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scgProject'
--
-- * 'scgSha1Fingerprint'
--
-- * 'scgInstance'
sslCertsGet
:: Text -- ^ 'scgProject'
-> Text -- ^ 'scgSha1Fingerprint'
-> Text -- ^ 'scgInstance'
-> SSLCertsGet
sslCertsGet pScgProject_ pScgSha1Fingerprint_ pScgInstance_ =
SSLCertsGet'
{ _scgProject = pScgProject_
, _scgSha1Fingerprint = pScgSha1Fingerprint_
, _scgInstance = pScgInstance_
}
-- | Project ID of the project that contains the instance.
scgProject :: Lens' SSLCertsGet Text
scgProject
= lens _scgProject (\ s a -> s{_scgProject = a})
-- | Sha1 FingerPrint.
scgSha1Fingerprint :: Lens' SSLCertsGet Text
scgSha1Fingerprint
= lens _scgSha1Fingerprint
(\ s a -> s{_scgSha1Fingerprint = a})
-- | Cloud SQL instance ID. This does not include the project ID.
scgInstance :: Lens' SSLCertsGet Text
scgInstance
= lens _scgInstance (\ s a -> s{_scgInstance = a})
instance GoogleRequest SSLCertsGet where
type Rs SSLCertsGet = SSLCert
type Scopes SSLCertsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient SSLCertsGet'{..}
= go _scgProject _scgInstance _scgSha1Fingerprint
(Just AltJSON)
sQLAdminService
where go
= buildClient (Proxy :: Proxy SSLCertsGetResource)
mempty
| rueshyna/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/SSLCerts/Get.hs | mpl-2.0 | 3,847 | 0 | 16 | 917 | 467 | 280 | 187 | 75 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.TagManager.Accounts.Containers.Workspaces.BuiltInVariables.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the enabled Built-In Variables of a GTM Container.
--
-- /See:/ <https://developers.google.com/tag-manager Tag Manager API Reference> for @tagmanager.accounts.containers.workspaces.built_in_variables.list@.
module Network.Google.Resource.TagManager.Accounts.Containers.Workspaces.BuiltInVariables.List
(
-- * REST Resource
AccountsContainersWorkspacesBuiltInVariablesListResource
-- * Creating a Request
, accountsContainersWorkspacesBuiltInVariablesList
, AccountsContainersWorkspacesBuiltInVariablesList
-- * Request Lenses
, acwbivlParent
, acwbivlXgafv
, acwbivlUploadProtocol
, acwbivlAccessToken
, acwbivlUploadType
, acwbivlPageToken
, acwbivlCallback
) where
import Network.Google.Prelude
import Network.Google.TagManager.Types
-- | A resource alias for @tagmanager.accounts.containers.workspaces.built_in_variables.list@ method which the
-- 'AccountsContainersWorkspacesBuiltInVariablesList' request conforms to.
type AccountsContainersWorkspacesBuiltInVariablesListResource
=
"tagmanager" :>
"v2" :>
Capture "parent" Text :>
"built_in_variables" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListEnabledBuiltInVariablesResponse
-- | Lists all the enabled Built-In Variables of a GTM Container.
--
-- /See:/ 'accountsContainersWorkspacesBuiltInVariablesList' smart constructor.
data AccountsContainersWorkspacesBuiltInVariablesList =
AccountsContainersWorkspacesBuiltInVariablesList'
{ _acwbivlParent :: !Text
, _acwbivlXgafv :: !(Maybe Xgafv)
, _acwbivlUploadProtocol :: !(Maybe Text)
, _acwbivlAccessToken :: !(Maybe Text)
, _acwbivlUploadType :: !(Maybe Text)
, _acwbivlPageToken :: !(Maybe Text)
, _acwbivlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsContainersWorkspacesBuiltInVariablesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acwbivlParent'
--
-- * 'acwbivlXgafv'
--
-- * 'acwbivlUploadProtocol'
--
-- * 'acwbivlAccessToken'
--
-- * 'acwbivlUploadType'
--
-- * 'acwbivlPageToken'
--
-- * 'acwbivlCallback'
accountsContainersWorkspacesBuiltInVariablesList
:: Text -- ^ 'acwbivlParent'
-> AccountsContainersWorkspacesBuiltInVariablesList
accountsContainersWorkspacesBuiltInVariablesList pAcwbivlParent_ =
AccountsContainersWorkspacesBuiltInVariablesList'
{ _acwbivlParent = pAcwbivlParent_
, _acwbivlXgafv = Nothing
, _acwbivlUploadProtocol = Nothing
, _acwbivlAccessToken = Nothing
, _acwbivlUploadType = Nothing
, _acwbivlPageToken = Nothing
, _acwbivlCallback = Nothing
}
-- | GTM Workspace\'s API relative path. Example:
-- accounts\/{account_id}\/containers\/{container_id}\/workspaces\/{workspace_id}
acwbivlParent :: Lens' AccountsContainersWorkspacesBuiltInVariablesList Text
acwbivlParent
= lens _acwbivlParent
(\ s a -> s{_acwbivlParent = a})
-- | V1 error format.
acwbivlXgafv :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Xgafv)
acwbivlXgafv
= lens _acwbivlXgafv (\ s a -> s{_acwbivlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
acwbivlUploadProtocol :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Text)
acwbivlUploadProtocol
= lens _acwbivlUploadProtocol
(\ s a -> s{_acwbivlUploadProtocol = a})
-- | OAuth access token.
acwbivlAccessToken :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Text)
acwbivlAccessToken
= lens _acwbivlAccessToken
(\ s a -> s{_acwbivlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
acwbivlUploadType :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Text)
acwbivlUploadType
= lens _acwbivlUploadType
(\ s a -> s{_acwbivlUploadType = a})
-- | Continuation token for fetching the next page of results.
acwbivlPageToken :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Text)
acwbivlPageToken
= lens _acwbivlPageToken
(\ s a -> s{_acwbivlPageToken = a})
-- | JSONP
acwbivlCallback :: Lens' AccountsContainersWorkspacesBuiltInVariablesList (Maybe Text)
acwbivlCallback
= lens _acwbivlCallback
(\ s a -> s{_acwbivlCallback = a})
instance GoogleRequest
AccountsContainersWorkspacesBuiltInVariablesList
where
type Rs
AccountsContainersWorkspacesBuiltInVariablesList
= ListEnabledBuiltInVariablesResponse
type Scopes
AccountsContainersWorkspacesBuiltInVariablesList
=
'["https://www.googleapis.com/auth/tagmanager.edit.containers",
"https://www.googleapis.com/auth/tagmanager.readonly"]
requestClient
AccountsContainersWorkspacesBuiltInVariablesList'{..}
= go _acwbivlParent _acwbivlXgafv
_acwbivlUploadProtocol
_acwbivlAccessToken
_acwbivlUploadType
_acwbivlPageToken
_acwbivlCallback
(Just AltJSON)
tagManagerService
where go
= buildClient
(Proxy ::
Proxy
AccountsContainersWorkspacesBuiltInVariablesListResource)
mempty
| brendanhay/gogol | gogol-tagmanager/gen/Network/Google/Resource/TagManager/Accounts/Containers/Workspaces/BuiltInVariables/List.hs | mpl-2.0 | 6,566 | 0 | 18 | 1,406 | 789 | 460 | 329 | 127 | 1 |
{-# language TupleSections #-}
{-# language FlexibleContexts #-}
module Nanocoin.Network.P2P (
bootstrap,
) where
import Protolude
import Control.Monad.Base
import Control.Distributed.Process.Lifted
import Control.Distributed.Process.Lifted.Class
import Control.Distributed.Process.Node.Lifted
import qualified Data.Set as Set
import Network.Socket (ServiceName)
import Network.Transport.TCP
import Nanocoin.Network.Cmd (Cmd, cmdProc)
import Nanocoin.Network.Message (Msg, msgProc)
import Nanocoin.Network.Node (NodeProcessM)
import Nanocoin.Network.Peer (Peer(..), Peers)
import Nanocoin.Network.Service (Service(..))
import Nanocoin.Network.Utils (HostName)
import qualified Nanocoin.Network.Node as Node
import Logger (Logger, runLoggerT, logWarning, logInfo)
-- | Bootstrap the multiprocess architecture, spawing all processes
bootstrap
:: Logger
-> Node.NodeEnv
-> Chan Cmd
-> [NodeId]
-> IO ()
bootstrap logger nodeEnv cmdChan bootnodes = do
let hostname = Node.host $ Node.nodeConfig nodeEnv
let p2pPort = Node.p2pPort $ Node.nodeConfig nodeEnv
-- Create a local node to run processes on
eLocalNode <- createLocalNode hostname $ show p2pPort
case eLocalNode of
Left err -> Protolude.die err
Right localNode -> do
runProcess localNode $ void $
Node.runNodeProcessM logger nodeEnv $ do
-- Initialize P2P controller process
spawnLocal $ p2pControllerProc bootnodes
-- Wait for P2P Controller to boot and execute the other processes
runProcess localNode $
Node.runNodeProcessM logger nodeEnv $ do
waitP2PController $ do
-- Boot Messaging proc
msgingPid <- spawnLocal msgProc
register (show Messaging) msgingPid
-- Boot Cmd (Relay) proc
void $ spawnLocal $ cmdProc cmdChan
-- Hang forever so as not to kill parent proc
forever $ liftBase $
threadDelay 3000000
--------------------------------------------------------------------------------
-- P2P Controller Process
--------------------------------------------------------------------------------
p2pControllerProc :: [NodeId] -> NodeProcessM ()
p2pControllerProc bootnodes = do
pid <- getSelfPid
register (show PeerDiscovery) pid
-- Add self as peer node
Node.addPeer =<< fmap Peer getSelfNode
-- Discover bootnode peers in the network
mapM_ discoverPeer bootnodes
controlP $ \runInProc ->
forever $ receiveWait
[ match $ runInProc . onPeerReply
, match $ runInProc . onMonitorNotif
, match $ runInProc . onPeerQuery
, match $ runInProc . onPeers
, match $ runInProc . onMonitorNotif
]
waitP2PController :: Node.NodeProcessM () -> Node.NodeProcessM ()
waitP2PController proc = do
mPid <- whereis (show PeerDiscovery)
case mPid of
Nothing -> do
logWarning "Could not connect to PeerDiscovery process. Retrying..."
liftBase $ threadDelay 1000000
waitP2PController proc
Just pid -> do
logInfo $ "Found PeerDiscovery process. " <> (show pid :: Text)
proc
--------------------------------------------------------------------------------
-- P2P Messages & Handlers
--------------------------------------------------------------------------------
isPeerReply :: WhereIsReply -> Bool
isPeerReply (WhereIsReply nm pid) =
nm == (show PeerDiscovery) && isJust pid
-- | Add the new process to the peers list and monitor it's process
onPeerReply :: WhereIsReply -> Node.NodeProcessM ()
onPeerReply (WhereIsReply _ mPid) = do
putText $ "Recieved WhereIsReply: " <> show mPid
case mPid of
Nothing -> pure ()
Just pid -> do
peers <- Node.getPeers
let peer = Peer $ processNodeId pid
unless (peer `Set.member` peers) $ do
Node.addPeer peer
void $ monitor pid
-- Ask new peer for their peers
selfPid <- getSelfPid
Node.nsendPeer' peer PeerDiscovery selfPid
onPeerQuery :: ProcessId -> Node.NodeProcessM ()
onPeerQuery pid = do
say "Received peer query..."
peers <- Node.getPeers
let peer = Peer $ processNodeId pid
unless (peer `Set.member` peers) $ do
Node.addPeer peer
void $ monitor pid
Node.nsendPeer' peer PeerDiscovery peers
onPeers :: Peers -> Node.NodeProcessM ()
onPeers newpeers = do
say "Received list of peers..."
forM_ (Set.toList newpeers) $ \peer@(Peer nid) -> do
peers <- Node.getPeers
unless (peer `Set.member` peers) $
discoverPeer nid
-- | Remove a peer from the peers list and unmonitor the process
onMonitorNotif :: ProcessMonitorNotification -> Node.NodeProcessM ()
onMonitorNotif (ProcessMonitorNotification mref pid _) = do
unmonitor mref
peers <- Node.getPeers
let peer = Peer $ processNodeId pid
when (peer `Set.member` peers) $ do
Node.removePeer peer
--------------------------------------------------------------------------------
-- P2P Utils
--------------------------------------------------------------------------------
type P2PPort = ServiceName
-- | Send a 'whereis' message to a node's P2PDiscovery process.
-- Replies come in the form of a WhereIsReply message.
discoverPeer :: NodeId -> Node.NodeProcessM ()
discoverPeer nid = do
putText $ "Pinging: " <> show nid
whereisRemoteAsync nid (show PeerDiscovery)
createLocalNode
:: HostName
-> P2PPort
-> IO (Either Text LocalNode)
createLocalNode host port = do
eTransport <- createTransport host port (host,) defaultTCPParameters
case eTransport of
Left err -> pure $ Left $ show err
Right transport -> Right <$>
newLocalNode transport initRemoteTable
| tdietert/nanocoin | src/Nanocoin/Network/P2P.hs | apache-2.0 | 5,646 | 0 | 20 | 1,141 | 1,359 | 682 | 677 | 122 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Language.Sigil.Show
( showSigil
) where
import Control.Monad.Free
import Language.Sigil.Types
showSigil :: (Show a, Show r) => Free (Sigil a) r -> String
showSigil (Free (Push s next)) = show s ++ "\n" ++ showSigil next
showSigil (Free (Pop next)) = ".\n" ++ showSigil next
showSigil (Pure r) = show r ++ "\n"
| erochest/sigil | Language/Sigil/Show.hs | apache-2.0 | 403 | 0 | 9 | 108 | 144 | 75 | 69 | 9 | 1 |
module M0ld.Yeast where
import M0ld.AST
import M0ld.M0ld
import qualified M0ld.C
import qualified Data.Map as Map
wrap a b (f,c,p) = (f,a++c++b,p)
indent depth code = unlines $ map indentLine $ lines code where
indentLine "" = ""
indentLine line = (take depth $ repeat ' ') ++ line
emitStmt regs labels hints (i,c) stmt =
let emit code = (i+1,c ++ "case "++ (show i) ++ ":\n" ++ indent 2 code)
reg r = "frame->reg[" ++ (show $ resolveReg r regs) ++ "]"
list regs = "(SMOP__Object*[]) {" ++ (concat $ map (\r -> "SMOP_REFERENCE(interpreter," ++ reg r ++ "),") regs) ++ "NULL}"
assign target expr = "if (" ++ reg target ++ ") SMOP_RELEASE(interpreter," ++ reg target ++ ");\n" ++ reg target ++ " = " ++ expr ++ ";\n"
hint typ reg = Map.lookup (typ,reg) hints
in
case stmt of
Call target identifier capture@(Capture invocant positional named) -> case hint RI invocant of
Just (StringConstant "capture") -> case hint Constant identifier of
Just (StringConstant "positional") -> case capture of
(Capture invocant [i] []) -> emit $ assign target $ "SMOP__NATIVE__capture_positional(interpreter," ++ reg invocant ++ ",SMOP__NATIVE__int_fetch(" ++ reg i ++ "))"
_ -> dispatch
Just (StringConstant "interpreter") -> case hint Constant identifier of
Just (StringConstant "goto") -> case capture of
(Capture invocant [c] []) -> emit $ "frame->pc = " ++ (show $ i+1) ++ ";\n" ++ (assign target $ "smop_shortcut_interpreter_goto(interpreter," ++ reg invocant ++ ",SMOP_REFERENCE(interpreter," ++ reg c ++ "))") ++ "break;\n"
_ -> dispatch
Just (StringConstant "lexical scope") -> case hint Constant identifier of
Just (StringConstant "exists") -> case capture of
(Capture invocant [key] []) -> emit $ "frame->pc = " ++ (show $ i+1) ++ ";\n" ++ "frame->ret = &" ++ reg target ++ ";\n" ++ (assign target $ "smop_shortcut_lexical_scope_exists(interpreter," ++ reg invocant ++ ",SMOP_REFERENCE(interpreter," ++ reg key ++ "))") ++ "break;\n"
_ -> dispatch
_ -> dispatch
where
dispatch = emit $
"frame->pc = " ++ (show $ i+1) ++ ";\n" ++
"frame->ret = &" ++ reg target ++ ";\n" ++
(assign target $ "SMOP_DISPATCH(\n" ++ (indent 2 $ "interpreter,\nSMOP_RI(" ++ reg invocant ++ "),\n" ++
reg identifier ++
",\nSMOP__NATIVE__capture_create(interpreter," ++
list (invocant:positional) ++ ","
++ list named ++
")\n" ) ++ ")") ++
"break;\n"
--Call2 target responder identifier capture ->
-- map (\r -> resolveReg r regs) [target,responder,identifier,capture]
Goto label -> emit $ "frame->pc = "++(show $ resolveLabelDef label labels) ++ ";\n" ++ "break;\n";
Br value iftrue iffalse ->
emit $ "frame->pc = " ++
reg value
++
" == SMOP__NATIVE__bool_false ? " ++
(show $ resolveLabelDef iffalse labels) ++
" : " ++
(show $ resolveLabelDef iftrue labels) ++
";\n" ++
"break;\n"
LabelDef label -> (i,c)
Decl reg value -> (i,c)
Hint _ _ _ -> (i,c)
Assign lvalue rvalue -> emit $ reg lvalue ++ " = " ++ reg rvalue ++ ";\n"
emitFunc (prefix,id) regMap labelsMap hints stmts = let
(i,cases) = foldl (emitStmt regMap labelsMap hints) (0,"") stmts
name = prefix++(show id) in
("static void " ++ name ++ "(SMOP__Object* interpreter,SMOP__Yeast__Frame* frame) {\n" ++
" switch (frame->pc) {\n" ++
indent 4 cases ++
"case " ++ show i ++ ":" ++ "frame->pc = -1;\n" ++
" }\n" ++
"}\n",name,(prefix,id+1))
stmtSize (Decl _ _) = 0
stmtSize (LabelDef _) = 0
stmtSize (Hint _ _ _) = 0
stmtSize _ = 1
mapLabelsToStmts :: [Stmt] -> LabelsMap
mapLabelsToStmts stmts = fst $ foldl addLabelDef (Map.empty,0) stmts
where
addLabelDef (labels,offset) (LabelDef label) = (Map.insert label offset labels,offset)
addLabelDef (labels,offset) stmt = (labels,offset+stmtSize stmt)
extractHints = foldl addHint Map.empty
where
addHint hints (Hint typ reg ri) = Map.insert (typ,reg) ri hints
addHint hints _ = hints
compileToYeast prefix stmts =
let labelsMap = mapLabelsToStmts stmts
regMap = mapRegisters stmts
freeRegs = countRegister stmts
hints = extractHints stmts
(functions,constants,prefix') = dumpConstantsToC prefix stmts
(funcBody,funcName,prefix'') = emitFunc prefix' regMap labelsMap hints stmts
in (funcBody:functions,"SMOP__Yeast_create(" ++ show freeRegs ++ "," ++ constants ++ ","
++ funcName ++ ")",prefix'')
dumpConstantsToC prefix stmts =
wrap "(SMOP__Object*[]) {" "NULL}" $ foldl dumpConstantToC ([],"",prefix) stmts
dumpConstantToC (f,c,p) (Decl reg (SubMold stmts)) = let
(f',c',p') = compileToYeast p stmts in
(f++f',c++c'++",",p')
dumpConstantToC (f,c,p) (Decl reg constant) = (f,c++M0ld.C.dumpConstantToC constant,p)
dumpConstantToC fcp _ = fcp
| gitpan/SMOP | m0ld/M0ld/Yeast.hs | artistic-2.0 | 5,175 | 0 | 28 | 1,355 | 1,780 | 906 | 874 | 91 | 13 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOptionViewItemV3.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QStyleOptionViewItemV3 (
QStyleOptionViewItemV3StyleOptionVersion
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CQStyleOptionViewItemV3StyleOptionVersion a = CQStyleOptionViewItemV3StyleOptionVersion a
type QStyleOptionViewItemV3StyleOptionVersion = QEnum(CQStyleOptionViewItemV3StyleOptionVersion Int)
ieQStyleOptionViewItemV3StyleOptionVersion :: Int -> QStyleOptionViewItemV3StyleOptionVersion
ieQStyleOptionViewItemV3StyleOptionVersion x = QEnum (CQStyleOptionViewItemV3StyleOptionVersion x)
instance QEnumC (CQStyleOptionViewItemV3StyleOptionVersion Int) where
qEnum_toInt (QEnum (CQStyleOptionViewItemV3StyleOptionVersion x)) = x
qEnum_fromInt x = QEnum (CQStyleOptionViewItemV3StyleOptionVersion x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QStyleOptionViewItemV3StyleOptionVersion -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeVersion QStyleOptionViewItemV3StyleOptionVersion where
eVersion
= ieQStyleOptionViewItemV3StyleOptionVersion $ 3
| uduki/hsQt | Qtc/Enums/Gui/QStyleOptionViewItemV3.hs | bsd-2-clause | 2,747 | 0 | 18 | 509 | 572 | 288 | 284 | 48 | 1 |
module Propellor.Git.VerifiedBranch where
import Propellor.Base
import Propellor.Git
import Propellor.PrivData.Paths
import Utility.FileMode
{- To verify origin branch commit's signature, have to convince gpg
- to use our keyring.
- While running git log. Which has no way to pass options to gpg.
- Argh!
-}
verifyOriginBranch :: String -> IO Bool
verifyOriginBranch originbranch = do
let gpgconf = privDataDir </> "gpg.conf"
keyring <- privDataKeyring
writeFile gpgconf $ unlines
[ " keyring " ++ keyring
, "no-auto-check-trustdb"
]
-- gpg is picky about perms
modifyFileMode privDataDir (removeModes otherGroupModes)
s <- readProcessEnv "git" ["log", "-n", "1", "--format=%G?", originbranch]
(Just [("GNUPGHOME", privDataDir)])
nukeFile $ privDataDir </> "trustdb.gpg"
nukeFile $ privDataDir </> "pubring.gpg"
nukeFile $ privDataDir </> "gpg.conf"
return (s == "U\n" || s == "G\n")
-- Returns True if HEAD is changed by fetching and merging from origin.
fetchOrigin :: IO Bool
fetchOrigin = do
branchref <- getCurrentBranch
let originbranch = "origin" </> branchref
void $ actionMessage "Pull from central git repository" $
boolSystem "git" [Param "fetch"]
oldsha <- getCurrentGitSha1 branchref
keyring <- privDataKeyring
whenM (doesFileExist keyring) $
ifM (verifyOriginBranch originbranch)
( do
putStrLn $ "git branch " ++ originbranch ++ " gpg signature verified; merging"
hFlush stdout
void $ boolSystem "git" [Param "merge", Param originbranch]
, warningMessage $ "git branch " ++ originbranch ++ " is not signed with a trusted gpg key; refusing to deploy it! (Running with previous configuration instead.)"
)
newsha <- getCurrentGitSha1 branchref
return $ oldsha /= newsha
| ArchiveTeam/glowing-computing-machine | src/Propellor/Git/VerifiedBranch.hs | bsd-2-clause | 1,743 | 14 | 15 | 301 | 406 | 199 | 207 | 36 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Home
( homeView
) where
import Control.Monad (forM_)
import Lib
import Text.Blaze.XHtml5 ((!))
import qualified Text.Blaze.XHtml5 as H
import qualified Text.Blaze.XHtml5.Attributes as A
homeView :: H.Html
homeView = do
H.head $ do
H.title "GRE-1"
H.body $ do
H.p "Cards:"
H.ul $ do
forM_ cards cardSelect
type CardAmt = (Card, Int)
cardSelect :: CardAmt -> H.Html
cardSelect (c, i) = H.li $ do
H.toHtml (show c ++ ", count: " ++ show i)
H.button "+1" ! A.onclick "bar"
H.button "-1"
cards :: [CardAmt]
cards = [(NumberCard 1, 0)
, (BuffCard Board Friendly (Add 5), 0)
]
| rubenpieters/gre-project | ui/src/Home.hs | bsd-3-clause | 694 | 0 | 13 | 182 | 258 | 140 | 118 | 25 | 1 |
module Machines where
import Turing
readMachine :: String -> Machine
readMachine = zip x . map f . words where
x = concat [[(s, blankSymbol), (s, '1')] | s<-['A'..]]
f [st, s, d] = (s, read [d], st)
machines :: [Machine]
machines = map (readMachine . fst) table
table :: [(String, String)]
table =
[ (,) "C1L E1L H1L D1L D1R D0L A1L E1R B0L C0R" "slow"
, (,) "C1L E0R H1L C0R D1R A0L A1R D1R A1L B0R" "fast, down slopes"
, (,) "C1L A0R H1L E1L D1R B0L A1R C1R C0L D1L" "slow, down slopes"
, (,) "C1L D0R H1L E0L D1R C1L E1L A1R B1L D0L" "tale #1"
, (,) "C1L A1L H1L D0L D1R E0L A1L C0R C1R B0L" "fast"
, (,) "C1L B0R H1L D0R D1L A0R E1R C0L C1R E1R" "fast with slow parts"
, (,) "C1L B0R H1L E1R D1L A1L A1R D0L A0R C1R" "fast, hills"
, (,) "C1L B0R H1L C0R D1L C0L E0R C1L A0R E1R" "slow, romboids"
, (,) "C1L D1R H1L C0L A1R C1L E1R A0R B1L E0L" "repeat #5"
, (,) "C1L A0L H1L C0L D0R A1L B1L E1R D1R E0R" "mountains in both direction #4"
, (,) "C1L A0L H1L A0R D0R A1L E0R D1R A1L B0R" "slow, romboids #2"
, (,) "C1L E0L H1L E1L D0R A1L A0L C1R C1R B0L" "slow increase"
, (,) "C1L B0R H1L A1R D0L E1R E0R C1L C1R A0R" "slow, hills"
, (,) "B1L H1L C1R E0R D1L B0R D0L A1L C0R A0L" "fast"
, (,) "B1L H1L C1L B1R D1R E1L B1R D0R A1L C0L" "slow with fast parts, left mountains"
, (,) "B1L H1L C0R D1L D1R C1R E1L E0L A0L B0R" "slow with fast parts"
, (,) "B1L H1L C0R E1L D0R C1R A1L B1R B0L A0L" "very slow"
, (,) "B1L H1L C0L D0R D1L E0R E1L A0L C1R D0R" "fast"
, (,) "B1L H1L C0L B0L C1R D0R A1L E0R A0R E0R" "fast, mountains in clouds || ||, #1"
, (,) "B1L H1L C0L D1L D0R C1L E1R A0L A1L E0R" "fast & slow, left mountains"
, (,) "C1L E1L A1L H1L D1R E0R B1R E1R C1R A0L" "fast, |/"
, (,) "C1L E0L A1R H1L D1R A0L D0R B1R C0L B0R" "fast"
, (,) "C1L C0R D0L H1L D1R E0L C1L E0R A1R B1L" "tale"
, (,) "C1L A1L E1R H1L D1R D0R B0R E0L A0L C1R" "hills with clouds"
, (,) "C1L A0R A1L H1L D1R E1L A1R D0R E0L B0R" "tale"
, (,) "C1L E1R D1R H1L D1L C0L A1R D1L B1R A0R" "mountains - hills"
, (,) "C1L E0L D1R H1L B1L E1L A1R E1R A1L D0R" "fast, left mountains"
, (,) "C1L D0R A0L H1L A1R D0L E1R B1L C1L C0R" "tale"
, (,) "C1L E0L C1R H1L D0R A1L A1R E0R B1R E0L" "hills with clouds"
, (,) "C1L B0R E0R H1L D0L C1L E1L C0L A1R C0R" "slow"
, (,) "C1L E0R C0L H1L D0L B0L D1R A0R A1R D1L" "fast, decreasing"
, (,) "C1L D1R E1R H1L D0L C0L B1R A0R A1R E1L" "slow tale, mountains"
, (,) "C1L D1R E1R H1L D0L C0L B1R A0R A1R A1L" "jazz, hills"
, (,) "C1L D1R E1R H1L D0L C0L B1R A0R A1R A0R" "mountains - hills #1"
, (,) "C1L E1R D1R H1L D0L C0L B1R A1L D1L A0R" "mountains - hills"
, (,) "C1L B0R C1R H1L D0L D0R A1R E0L D1L E1L" "fast, | |/"
, (,) "C1L C0L D1L H1L B0L D0R E0R A1L A1R E1R" "slow with fast parts"
, (,) "B1L D1L C1R H1L E1R D1R E1L C0R A1L D0L" "left mountains"
, (,) "B1L A0L C1R H1L C0R D0R E1L B0L E0L A1L" "slow, |"
, (,) "B1L A0R C1L H1L D0L E1R E1L A0L C1R A0R" "inverse mountains"
, (,) "B1L E0R C1L H1L D0L C0L D1R A0R B0R E0R" "fast, mountains in clouds || ||"
, (,) "B1L A0R C0L H1L C1R D1L E1L A1R B0L D0R" "slow jazz #2"
]
hints :: [String]
hints = map h $ zip [0::Int ..] table where
h (n, (_, s)) = show n ++ replicate (5 - length (show n)) ' ' ++ s
| divipp/turing-music | Machines.hs | bsd-3-clause | 3,538 | 0 | 14 | 1,067 | 701 | 387 | 314 | 55 | 1 |
module Main(main) where
import Control.Exception (bracket_)
import System.Environment (getArgs, getProgName)
import System.IO (hSetEcho, hFlush, stdin, stdout)
import Web.Zenfolio.API
import qualified Web.Zenfolio.Photos.Upload as Upload
import qualified Web.Zenfolio.Users as Users
prompt :: String -> IO Password
prompt message = do
putStr message >> hFlush stdout
bracket_ (hSetEcho stdin False)
(hSetEcho stdin True >> putStrLn "")
(getLine)
findPhotoSet :: LoginName -> String -> ZM (Maybe PhotoSet)
findPhotoSet username title = do
rootGroup <- Users.loadGroupHierarchy username
findPhotoSet' $ groupElements rootGroup
where findPhotoSet' [] = return Nothing
findPhotoSet' (GroupElementPhotoSet ps:ges) =
if psTitle ps == Just title
then return $ Just ps
else findPhotoSet' ges
findPhotoSet' (_:ges) = findPhotoSet' ges
uploadPhoto :: LoginName -> Password -> String -> FilePath -> ZM ()
uploadPhoto username password photoset filename = do
token <- login username password
withToken token $ do
mps <- findPhotoSet username photoset
case mps of
Just ps -> do
photoId <- Upload.uploadFile ps filename
liftIO $ putStrLn ("Photo id: " ++ show photoId)
Nothing -> fail $ "No photosets available to upload to"
main :: IO ()
main = do
ls <- getArgs
case ls of
(username:photoset:filename:_) -> do
password <- prompt "Password: "
zenfolio $ uploadPhoto username password photoset filename
_ -> do
prg <- getProgName
putStrLn ("Usage: " ++ prg ++ " user-name photo-set filename")
| md5/hs-zenfolio | examples/UploadPhoto.hs | bsd-3-clause | 1,801 | 0 | 19 | 536 | 518 | 257 | 261 | 43 | 4 |
module Roguelike.AI.Class where
import Control.Monad.Random
import Roguelike.Event
import Roguelike.Creature
import Roguelike.Action
import Roguelike.SubjectiveWorld
class AI a where
perceive :: MonadRandom m => SubjectiveWorld -> Creature -> a -> Event -> a
action :: MonadRandom m => TurnStatus -> a -> Action
| abbradar/roguelike | src/Roguelike/AI/Class.hs | bsd-3-clause | 319 | 0 | 11 | 48 | 90 | 49 | 41 | 9 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE DeriveFunctor #-}
module Types.Users
( UserInfo(..)
, UserStatus(..)
, Users -- constructor remains internal
-- * Lenses created for accessing UserInfo fields
, uiName, uiId, uiStatus, uiInTeam, uiNickName, uiFirstName, uiLastName, uiEmail
, uiDeleted
-- * Various operations on UserInfo
-- * Creating UserInfo objects
, userInfoFromUser
-- * Miscellaneous
, userSigil
, trimUserSigil
, statusFromText
, findUserById
, findUserByName
, findUserByDMChannelName
, findUserByNickname
, noUsers, addUser, allUsers
, modifyUserById
, getDMChannelName
, userIdForDMChannel
, userDeleted
, TypingUsers
, noTypingUsers
, addTypingUser
, allTypingUsers
, expireTypingUsers
, getAllUserIds
)
where
import Prelude ()
import Prelude.MH
import qualified Data.HashMap.Strict as HM
import Data.Semigroup ( Max(..) )
import qualified Data.Text as T
import Lens.Micro.Platform ( (%~), makeLenses, ix )
import Network.Mattermost.Types ( Id(Id), UserId(..), User(..)
, idString )
import Types.Common
-- * 'UserInfo' Values
-- | A 'UserInfo' value represents everything we need to know at
-- runtime about a user
data UserInfo = UserInfo
{ _uiName :: Text
, _uiId :: UserId
, _uiStatus :: UserStatus
, _uiInTeam :: Bool
, _uiNickName :: Maybe Text
, _uiFirstName :: Text
, _uiLastName :: Text
, _uiEmail :: Text
, _uiDeleted :: Bool
} deriving (Eq, Show)
-- | Is this user deleted?
userDeleted :: User -> Bool
userDeleted u = userDeleteAt u > userCreateAt u
-- | Create a 'UserInfo' value from a Mattermost 'User' value
userInfoFromUser :: User -> Bool -> UserInfo
userInfoFromUser up inTeam = UserInfo
{ _uiName = userUsername up
, _uiId = userId up
, _uiStatus = Offline
, _uiInTeam = inTeam
, _uiNickName =
let nick = sanitizeUserText $ userNickname up
in if T.null nick then Nothing else Just nick
, _uiFirstName = sanitizeUserText $ userFirstName up
, _uiLastName = sanitizeUserText $ userLastName up
, _uiEmail = sanitizeUserText $ userEmail up
, _uiDeleted = userDeleted up
}
-- | The 'UserStatus' value represents possible current status for
-- a user
data UserStatus
= Online
| Away
| Offline
| DoNotDisturb
| Other Text
deriving (Eq, Show)
statusFromText :: Text -> UserStatus
statusFromText t = case t of
"online" -> Online
"offline" -> Offline
"away" -> Away
"dnd" -> DoNotDisturb
_ -> Other t
-- ** 'UserInfo' lenses
makeLenses ''UserInfo
-- ** Manage the collection of all Users
-- | Define a binary kinded type to allow derivation of functor.
newtype AllMyUsers a = AllUsers { _ofUsers :: HashMap UserId a }
deriving Functor
makeLenses ''AllMyUsers
-- | Define the exported typename which universally binds the
-- collection to the UserInfo type.
type Users = AllMyUsers UserInfo
-- | Initial collection of Users with no members
noUsers :: Users
noUsers = AllUsers HM.empty
getAllUserIds :: Users -> [UserId]
getAllUserIds = HM.keys . _ofUsers
-- | Add a member to the existing collection of Users
addUser :: UserInfo -> Users -> Users
addUser userinfo = AllUsers . HM.insert (userinfo^.uiId) userinfo . _ofUsers
-- | Get a list of all known users
allUsers :: Users -> [UserInfo]
allUsers = HM.elems . _ofUsers
-- | Define the exported typename to represent the collection of users
-- | who are currently typing. The values kept against the user id keys are the
-- | latest timestamps of typing events from the server.
type TypingUsers = AllMyUsers (Max UTCTime)
-- | Initial collection of TypingUsers with no members
noTypingUsers :: TypingUsers
noTypingUsers = AllUsers HM.empty
-- | Add a member to the existing collection of TypingUsers
addTypingUser :: UserId -> UTCTime -> TypingUsers -> TypingUsers
addTypingUser uId ts = AllUsers . HM.insertWith (<>) uId (Max ts) . _ofUsers
-- | Get a list of all typing users
allTypingUsers :: TypingUsers -> [UserId]
allTypingUsers = HM.keys . _ofUsers
-- | Remove all the expired users from the collection of TypingUsers.
-- | Expiry is decided by the given timestamp.
expireTypingUsers :: UTCTime -> TypingUsers -> TypingUsers
expireTypingUsers expiryTimestamp =
AllUsers . HM.filter (\(Max ts') -> ts' >= expiryTimestamp) . _ofUsers
-- | Get the User information given the UserId
findUserById :: UserId -> Users -> Maybe UserInfo
findUserById uId = HM.lookup uId . _ofUsers
-- | Get the User information given the user's name. This is an exact
-- match on the username field, not necessarly the presented name. It
-- will automatically trim a user sigil from the input.
findUserByName :: Users -> Text -> Maybe (UserId, UserInfo)
findUserByName allusers name =
case filter ((== trimUserSigil name) . _uiName . snd) $ HM.toList $ _ofUsers allusers of
(usr : []) -> Just usr
_ -> Nothing
-- | Get the User information given the user's name. This is an exact
-- match on the nickname field, not necessarily the presented name. It
-- will automatically trim a user sigil from the input.
findUserByNickname :: [UserInfo] -> Text -> Maybe UserInfo
findUserByNickname uList nick =
find (nickCheck nick) uList
where
nickCheck n = maybe False (== (trimUserSigil n)) . _uiNickName
userSigil :: Text
userSigil = "@"
trimUserSigil :: Text -> Text
trimUserSigil n
| userSigil `T.isPrefixOf` n = T.tail n
| otherwise = n
-- | Extract a specific user from the collection and perform an
-- endomorphism operation on it, then put it back into the collection.
modifyUserById :: UserId -> (UserInfo -> UserInfo) -> Users -> Users
modifyUserById uId f = ofUsers.ix(uId) %~ f
getDMChannelName :: UserId -> UserId -> Text
getDMChannelName me you = cname
where
[loUser, hiUser] = sort $ idString <$> [ you, me ]
cname = loUser <> "__" <> hiUser
-- | Extract the corresponding other user from a direct channel name.
-- Returns Nothing if the string is not a direct channel name or if it
-- is but neither user ID in the name matches the current user's ID.
userIdForDMChannel :: UserId
-- ^ My user ID
-> Text
-- ^ The channel name
-> Maybe UserId
userIdForDMChannel me chanName =
-- Direct channel names are of the form "UID__UID" where one of the
-- UIDs is mine and the other is the other channel participant.
let vals = T.splitOn "__" chanName
in case vals of
[u1, u2] -> if | (UI $ Id u1) == me -> Just $ UI $ Id u2
| (UI $ Id u2) == me -> Just $ UI $ Id u1
| otherwise -> Nothing
_ -> Nothing
findUserByDMChannelName :: Users
-> Text -- ^ the dm channel name
-> UserId -- ^ me
-> Maybe UserInfo -- ^ you
findUserByDMChannelName users dmchan me = listToMaybe
[ user
| u <- HM.keys $ _ofUsers users
, getDMChannelName me u == dmchan
, user <- maybeToList (HM.lookup u $ _ofUsers users)
]
| aisamanra/matterhorn | src/Types/Users.hs | bsd-3-clause | 7,234 | 0 | 17 | 1,753 | 1,502 | 842 | 660 | -1 | -1 |
{-
- Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Hacq.
- Hacq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
module Data.Quantum.Gate (Wire(..),
IsGate(..), Gate(..)) where
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Data.Typeable (Typeable)
import Data.Quantum.Wire (Wire(..))
class Traversable g => IsGate g where
gateX :: w -> g w
gateXC :: w -> w -> Bool -> g w
gateXCC :: w -> w -> Bool -> w -> Bool -> g w
gateH :: w -> g w
gateY :: w -> g w -- Y|0> = i|1>, Y|1> = -i|0>
gateZ :: w -> g w
gateS :: w -> Bool -> g w -- gateS w False = diag[1, i], gateS w True = diag[1, -i]
gateT :: w -> Bool -> g w -- gateT w False = diag[1, e^(i pi/4)], gateT w True = diag[1, e^(-i pi/4)]
invertGate :: g w -> g w
data Gate w
= GateX !w
| GateXC !w !w !Bool
| GateXCC !w !w !Bool !w !Bool
| GateH !w
| GateY !w
| GateZ !w
| GateS !w !Bool
| GateT !w !Bool
deriving (Eq, Show, Typeable, Functor, Foldable, Traversable)
instance IsGate Gate where
gateX = GateX
gateXC = GateXC
gateXCC = GateXCC
gateH = GateH
gateY = GateY
gateZ = GateZ
gateS = GateS
gateT = GateT
invertGate (GateS targ inv) = GateS targ (not inv)
invertGate (GateT targ inv) = GateT targ (not inv)
invertGate e = e
| ti1024/hacq | src/Data/Quantum/Gate.hs | bsd-3-clause | 1,508 | 0 | 12 | 353 | 456 | 242 | 214 | 74 | 0 |
module Control.Concurrent.STM.ChunkedQueue (
-- * Simple Chunked Queue
ChunkedQueue(..),
consumeQueue,
enqueueOne,
enqueueMany,
) where
import Data.Monoid
-- | Abstract type representing a chunked queue. Acts as API for drainining
-- queues.
data ChunkedQueue a = ChunkedQueue [Chunk a]
instance Monoid (ChunkedQueue a) where
mempty = ChunkedQueue []
(ChunkedQueue []) `mappend` b = b
(ChunkedQueue a) `mappend` (ChunkedQueue b) = ChunkedQueue (b ++ a)
-- | Consume a @ChunkedQueue@ into a list
consumeQueue :: ChunkedQueue a -> [a]
consumeQueue (ChunkedQueue chunks) = go [] chunks
where
go acc [] = acc
go acc (Forward x : xs) = go (x ++ acc) xs
go acc (Reverse x : xs) = go ((reverse x) ++ acc) xs
enqueueOne :: ChunkedQueue a -> a -> ChunkedQueue a
enqueueOne (ChunkedQueue (Reverse xs:chunks)) x =
ChunkedQueue (Reverse (x:xs):chunks)
enqueueOne (ChunkedQueue chunks) x =
ChunkedQueue (Reverse [x]:chunks)
enqueueMany :: ChunkedQueue a -> [a] -> ChunkedQueue a
enqueueMany chQueue [x] = enqueueOne chQueue x
enqueueMany (ChunkedQueue chunks) xs = ChunkedQueue (Forward xs:chunks)
data Chunk a = Forward [a] | Reverse [a]
| KholdStare/stm-chunked-queues | src/Control/Concurrent/STM/ChunkedQueue.hs | bsd-3-clause | 1,207 | 0 | 11 | 255 | 439 | 231 | 208 | 25 | 3 |
module Language.SequentCore.WiredIn (
kontKindTyCon, kontTyCon, ubxExistsTyCon,
mkKontKind, mkKontTy, mkUbxExistsTy,
isKontKind, isKontKind_maybe, isKontTy, isKontTy_maybe,
isUbxExistsTy, isUbxExistsTy_maybe,
applyUbxExists, applyUbxExists_maybe, applysUbxExists_maybe,
sequentCoreTag, sequentCoreWiredInTag,
mkKontId, mkKontArgId, mkInlinableJoinBinder
) where
import FastString
import Id
import Kind
import Maybes
import Name
import Outputable
import PrelNames
import TyCon
import Type
import TysPrim
import Unique
import Control.Monad
sequentCoreTag, sequentCoreWiredInTag :: Char
-- Must be different from any other unique tag!! See the Unique module
sequentCoreTag = 'Q'
sequentCoreWiredInTag = 'q'
kontKindKey, kontTypeKey, ubxExistsTypeKey, kontIdKey, kontArgKey, inlinableJoinBndrKey :: Unique
kontKindKey: kontTypeKey: ubxExistsTypeKey: kontIdKey: kontArgKey: inlinableJoinBndrKey: _
= map (mkUnique sequentCoreWiredInTag) [1..]
kontArgName, kontIdName, inlinableJoinBinderName :: Name
[kontArgName, kontIdName, inlinableJoinBinderName] = zipWith mkSystemVarName
[kontArgKey, kontIdKey, inlinableJoinBndrKey]
[fsLit "karg", fsLit "*ret", fsLit "*inj"]
kontKindTyConName, kontTyConName, ubxExistsTyConName :: Name
kontKindTyConName = mkPrimTyConName (fsLit "ContKind") kontKindKey kontKindTyCon
kontTyConName = mkPrimTyConName (fsLit "Cont#") kontTypeKey kontTyCon
ubxExistsTyConName = mkPrimTyConName (fsLit "Exists#") ubxExistsTypeKey ubxExistsTyCon
mkKontArgId :: Type -> Id
mkKontArgId ty = mkLocalId kontArgName ty
mkKontId :: Type -> Var
mkKontId ty = mkLocalId kontIdName ty
mkInlinableJoinBinder :: Type -> Var
mkInlinableJoinBinder ty = mkLocalId inlinableJoinBinderName ty
kontKindTyCon, kontTyCon, ubxExistsTyCon :: TyCon
kontKindTyCon = mkKindTyCon kontKindTyConName (superKind `mkArrowKind` superKind)
-- TODO VoidRep isn't really right, but does it matter? This type should never
-- appear in Core anyway.
kontTyCon = mkPrimTyCon kontTyConName kind roles VoidRep
where
kKi = mkTyVarTy kKiVar
kind = mkPiTypes [kKiVar] (mkFunTy kKi (mkKontKind kKi))
roles = [Representational, Representational]
-- TODO We might be able to finagle unboxed existentials by calling mkTupleTyCon
-- with a special DataCon
ubxExistsTyCon = mkPrimTyCon ubxExistsTyConName kind [Representational] VoidRep
where
kind = openTypeKind `mkArrowKind` unliftedTypeKind
mkKontKind :: Kind -> Kind
mkKontKind kind = mkTyConApp kontKindTyCon [kind]
-- | Form the type of a continuation accepting a value of the given type.
mkKontTy :: Type -> Type
mkKontTy ty = mkTyConApp kontTyCon [typeKind ty, ty]
mkUbxExistsTy :: TyVar -> Type -> Type
mkUbxExistsTy a ty = mkTyConApp ubxExistsTyCon [mkForAllTy a ty]
isKontKind :: Kind -> Bool
isKontKind = isJust . isKontKind_maybe
isKontKind_maybe :: Kind -> Maybe Kind
isKontKind_maybe ki = do [arg] <- matchTyConApp ki kontKindTyCon
return arg
isKontTy, isUbxExistsTy :: Type -> Bool
isKontTy = isJust . isKontTy_maybe
isUbxExistsTy = isJust . isUbxExistsTy_maybe
isKontTy_maybe :: Type -> Maybe Type
isKontTy_maybe ty = do [_, arg] <- matchTyConApp ty kontTyCon
return arg
isUbxExistsTy_maybe :: Type -> Maybe (TyVar, Type)
isUbxExistsTy_maybe ty = do [arg] <- matchTyConApp ty ubxExistsTyCon
splitForAllTy_maybe arg
applyUbxExists :: Type -> Type -> Type
applyUbxExists ty tyArg
= applyUbxExists_maybe ty tyArg `orElse` pprPanic "applyUbxExists" (ppr ty <+> ppr tyArg)
applyUbxExists_maybe :: Type -> Type -> Maybe Type
applyUbxExists_maybe ty tyArg
= do
(a, body) <- isUbxExistsTy_maybe ty
return $ substTyWith [a] [tyArg] body
applysUbxExists_maybe :: Type -> [Type] -> Maybe Type
applysUbxExists_maybe = foldM applyUbxExists_maybe
matchTyConApp :: Type -> TyCon -> Maybe [Type]
matchTyConApp ty con = do (con', args) <- splitTyConApp_maybe ty
guard (con == con')
return args
| pdownen/sequent-core | src/Language/SequentCore/WiredIn.hs | bsd-3-clause | 4,087 | 0 | 11 | 750 | 972 | 531 | 441 | 82 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Test.Hspec
import Data.Text.Paragraph
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "left justify" $ do
let go xs ys = justifyLeft 5 xs `shouldBe` ys
it "doesnt affect short lines" (go "foo" "foo")
it "retains newlines 1" (go "foo\nbar" "foo\nbar")
it "retains newlines 2" (go "foo\n\nbar" "foo\n\nbar")
it "moves short words down a line" (go "foo bar" "foo\nbar")
it "moves long words down a line" (go "foo barbar" "foo b\narbar")
it "splits long words 1" (go "foobar" "fooba\nr")
it "splits long words 2" (go "foobarfoobar" "fooba\nrfoob\nar")
it "splits long words 3" (go "foobar\nfoobar" "fooba\nr\nfooba\nr")
| mitchellwrosen/reddit-cli | test/Spec.hs | bsd-3-clause | 854 | 0 | 14 | 286 | 204 | 95 | 109 | 17 | 1 |
{-# LANGUAGE RecordWildCards, DeriveDataTypeable #-}
-- | Nix configuration
module Stack.Config.Nix
(nixOptsFromMonoid
,nixCompiler
,StackNixException(..)
) where
import Control.Applicative
import Control.Monad (join, when)
import Data.Maybe
import Data.Monoid.Extra
import qualified Data.Text as T
import Data.Typeable
import Distribution.System (OS (..))
import Stack.Types
import Control.Exception.Lifted
import Control.Monad.Catch (throwM,MonadCatch)
import Prelude
-- | Interprets NixOptsMonoid options.
nixOptsFromMonoid
:: (Monad m, MonadCatch m)
=> NixOptsMonoid
-> OS
-> m NixOpts
nixOptsFromMonoid NixOptsMonoid{..} os = do
let nixEnable = fromFirst (getAny nixMonoidDefaultEnable) nixMonoidEnable
defaultPure = case os of
OSX -> False
_ -> True
nixPureShell = fromFirst defaultPure nixMonoidPureShell
nixPackages = fromFirst [] nixMonoidPackages
nixInitFile = getFirst nixMonoidInitFile
nixShellOptions = fromFirst [] nixMonoidShellOptions
++ prefixAll (T.pack "-I") (fromFirst [] nixMonoidPath)
when (not (null nixPackages) && isJust nixInitFile) $
throwM NixCannotUseShellFileAndPackagesException
return NixOpts{..}
where prefixAll p (x:xs) = p : x : prefixAll p xs
prefixAll _ _ = []
nixCompiler :: Config -> Maybe Resolver -> Maybe CompilerVersion -> T.Text
nixCompiler config resolverOverride compilerOverride =
let mproject = fst <$> configMaybeProject config
mresolver = resolverOverride <|> fmap projectResolver mproject
mcompiler = compilerOverride <|> join (fmap projectCompiler mproject)
nixCompilerFromVersion v = T.filter (/= '.') $ T.append (T.pack "haskell.compiler.ghc") (versionText v)
in case (mresolver, mcompiler) of
(_, Just (GhcVersion v)) -> nixCompilerFromVersion v
(Just (ResolverCompiler (GhcVersion v)), _) -> nixCompilerFromVersion v
(Just (ResolverSnapshot (LTS x y)), _) ->
T.pack ("haskell.packages.lts-" ++ show x ++ "_" ++ show y ++ ".ghc")
_ -> T.pack "ghc"
-- Exceptions thown specifically by Stack.Nix
data StackNixException
= NixCannotUseShellFileAndPackagesException
-- ^ Nix can't be given packages and a shell file at the same time
deriving (Typeable)
instance Exception StackNixException
instance Show StackNixException where
show NixCannotUseShellFileAndPackagesException =
"You cannot have packages and a shell-file filled at the same time in your nix-shell configuration."
| phadej/stack | src/Stack/Config/Nix.hs | bsd-3-clause | 2,575 | 0 | 16 | 535 | 657 | 343 | 314 | 55 | 4 |
module Test4b
where
import qualified Data.HashMap.Strict as H
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.List
isAlpha ch = ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
wrds :: T.Text -> [ T.Text ]
wrds bs =
let
(w, r2) = T.span isAlpha $ T.dropWhile (not . isAlpha) bs
in if T.null w then [] else T.toLower w : wrds r2
readDict = do
allwords <- fmap wrds $ T.readFile "big.txt"
let h = foldl' add H.empty allwords
add h w = let c = H.lookupDefault (0 :: Int) w h
in H.insert w (c+1) h
member = \k -> H.member k h
frequency = \k -> H.lookupDefault 0 k h
return (member, frequency, T.pack)
| erantapaa/test-spelling | src/Test4b.hs | bsd-3-clause | 689 | 0 | 15 | 183 | 316 | 167 | 149 | 19 | 2 |
-- Copyright : (C) 2009 Corey O'Connor
-- License : BSD-style (see the file LICENSE)
-- The current ghc compiler is not capable of solving some tests within a reasonable time.
#define ONLY_SMALL_CASES 1
{-# LANGUAGE RankNTypes #-}
import Bind.Marshal.Prelude
import Bind.Marshal.Verify
import qualified Control.Monad
-- The type equations governing data models need to account for unifying data model transformations
-- from constraint sets that include monad constraints.
--
-- HWRLJKAL:FDJ
-- OK. How about this: The presence of a dynamic action cons implies that the action must be
-- executed in order to determine the marshalled byte size.
--
-- constrain on the Snoc between a dynamic buffer data model and a static buffer data model
#if 0
-- These type classes only provide documentation to the equations and some double-checking.
class StaticBufferAction sa
class DynamicBufferAction da
#endif
-- A data model consisting of a sequence of static buffer actions.
data SBCons a_0 sba_1
-- A data model consisting of a static buffer action sequence followed by any action.
-- this differs from the other ...Cons... forms in that the sb_0 is not a bare action tag but a
-- SBUnit or SBCons
data SBDCons sb_0 ba_1
-- A data model consisting of a dynamic buffer action followed by any action.
data DBDCons a_0 ba_1
-- presume whatever the dynamic action is we can use DARep to represent it.
data DummyAction
-- An empty data model is neither static or dynamic.
data DMNil
-- A data model consisting of a single static buffer action.
data SBUnit t
-- A data model consisting of a dynamic buffer action.
data DBUnit t
-- Examining some example cases.
--
-- dm_sync >> dm_sync
t_0 :: SBCons DummyAction (SBUnit DummyAction)
t_0 = undefined
-- dm_sync >> ( forM_ [1..4] $ \i -> dm_sync )
t_1 :: SBDCons (SBUnit DummyAction) -- for "dm_sync >>"
(DBUnit DummyAction) -- for iterated "\i -> dm_sync"
t_1 = undefined
-- forM_ [1..4] $ \i -> dm_sync
t_2 :: DBUnit DummyAction
t_2 = undefined
-- ( forM_ [1..4] $ \i -> dm_sync ) >> dm_sync
t_3 :: DBDCons DummyAction -- for iterated "\i -> dm_sync"
(SBUnit DummyAction)
t_3 = undefined
#if DEV_NONCOMPILE_SWITCH
-- Ideally I would want the following type expressions to not compile:
-- However I don't know how the SBDCons type could enforce the structure of the required types it is
-- paramterized with. Can type classes be applied in this case?
t_4 :: SBDCons (DBUnit DummyAction) (SBUnit DummyAction)
t_4 = undefined
#endif
data DMAction tModel tRunState tModel' tRunState' a
instance Functor (DMAction tModel tRunState tModel' tRunState') where
fmap = undefined
-- The DMApp type function governs the combining of
type family DMApp dm a
type instance DMApp DMNil (SBUnit a_1) = SBUnit a_1
type instance DMApp DMNil (SBCons sa sb) = SBCons sa sb
-- XXX: type instance DMApp (SBUnit a_0) sb_as = SBCons a_0 sb_as
--
type instance DMApp (SBUnit a_0) (SBUnit a_1) = SBCons a_0 (SBUnit a_1)
type instance DMApp (SBCons a_1 sb_as_0) sb_as_1 = SBApp (SBCons a_1 sb_as_0) sb_as_1
class UnknownDMApp tModel_lhs tModel_rhs tModel
instance tModel_rhs ~ tModel => UnknownDMApp DMNil tModel_rhs tModel
instance tModel_lhs ~ tModel => UnknownDMApp tModel_lhs DMNil tModel
-- Somehow the rules governing sequencing need to be encoded into the bind instance...
-- Not really sure how that is possible. I though the UnknownDMApp would enable the introduction of
-- the rule inference.
instance ( UnknownDMApp tModel_0 implied_app1 tModel_1
, UnknownDMApp tModel_2 implied_app2 tModel_3
, DMApp tModel_1 implied_app2 ~ DMApp (DMApp tModel_0 implied_app1) implied_app2
, tModel_0 ~ tModel_4
, tModel_1 ~ tModel_2
, tModel_3 ~ tModel_5
) => Bind (DMAction tModel_0 tRunState_0 tModel_1 tRunState_1)
(DMAction tModel_2 tRunState_2 tModel_3 tRunState_3)
(DMAction tModel_4 tRunState_4 tModel_5 tRunState_5)
where
(>>=) = undefined
instance Return (DMAction tModel tRunState tModel' tRunState') where
returnM = undefined
sb_dummy :: ( tModel' ~ DMApp tModel (SBUnit DummyAction)
) => DMAction tModel () tModel' () ()
sb_dummy = returnM ()
t_sb_0 :: DMAction DMNil () (SBUnit DummyAction) () ()
t_sb_0 = sb_dummy
type family SBApp sb_as_0 sb_as_1
type instance SBApp (SBUnit a_0) sb_as_1 = SBCons a_0 sb_as_1
type instance SBApp (SBCons a_0 sb_as_0) sb_as_1 = SBCons a_0 (SBApp sb_as_0 sb_as_1)
t_sb_1 :: DMAction DMNil ()
(SBCons DummyAction (SBUnit DummyAction)) ()
()
t_sb_1 = sb_dummy >> sb_dummy
instance ( a_0 ~ a_2
, SBCons a_2 sb_as_1 ~ SBApp (SBUnit a_0) a_1
) => UnknownDMApp (SBUnit a_0) a_1 (SBCons a_2 sb_as_1)
t_sb_1_alt :: DMAction DMNil ()
(SBCons DummyAction (SBUnit DummyAction)) ()
()
t_sb_1_alt = do
sb_dummy
sb_dummy
t_sb_2 :: DMAction DMNil ()
(SBCons DummyAction (SBCons DummyAction (SBUnit DummyAction))) ()
()
t_sb_2 = sb_dummy >> sb_dummy >> sb_dummy
instance ( a_0 ~ a_1
, SBCons a_1 sb_as_2 ~ SBApp (SBCons a_0 sb_as_0) sb_as_1
) => UnknownDMApp (SBCons a_0 sb_as_0) sb_as_1 (SBCons a_1 sb_as_2)
t_sb_2_alt :: DMAction DMNil ()
(SBCons DummyAction (SBCons DummyAction (SBUnit DummyAction))) ()
()
t_sb_2_alt = do
sb_dummy
sb_dummy
sb_dummy
t_sb_3 :: DMAction DMNil ()
(SBCons DummyAction (SBCons DummyAction (SBCons DummyAction (SBUnit DummyAction)))) ()
()
t_sb_3 = sb_dummy >> sb_dummy >> sb_dummy >> sb_dummy
t_sb_3_alt :: DMAction DMNil ()
(SBCons DummyAction (SBCons DummyAction (SBCons DummyAction (SBUnit DummyAction)))) ()
()
t_sb_3_alt = do
sb_dummy
sb_dummy
sb_dummy
sb_dummy
#if !ONLY_SMALL_CASES
t_sb_4 = sb_dummy >> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy >> sb_dummy
>> sb_dummy >> sb_dummy
#endif
-- Dynamic buffer actions should be introducable into the data model.
db_dummy :: ( tModel' ~ DBApp tModel (DBUnit DummyAction)
) => DMAction tModel () tModel' () ()
db_dummy = returnM ()
type family DBApp as_0 as_1
type instance DBApp DMNil dm_rhs = dm_rhs
-- Pair each test with a type that should not be possible for the data model.
#if !DEV_NONCOMPILE_SWITCH
t_db_0 :: DMAction DMNil () (DBUnit DummyAction) () ()
#else
t_db_0 :: DMAction DMNil () (SBUnit DummyAction) () ()
#endif
t_db_0 = db_dummy
t_db_0_alt :: DMAction DMNil () (DBUnit DummyAction) () ()
t_db_0_alt = do
db_dummy
type instance DMApp DMNil (DBUnit a_1) = DBUnit a_1
type instance DMApp DMNil (DBDCons da dm) = DBDCons da dm
-- A sequence of only dynamic buffer actions are treated identically to a sequence of static buffer
-- actions on the type level
t_db_1 :: DMAction DMNil () (DBDCons DummyAction (DBUnit DummyAction)) () ()
t_db_1 = db_dummy >> db_dummy
t_db_1_alt = do
db_dummy
db_dummy
type instance DBApp (DBUnit a_0) as_1 = DBDCons a_0 as_1
type instance DMApp (DBUnit a_0) (DBUnit a_1) = DBDCons a_0 (DBUnit a_1)
instance ( a_lhs ~ da_0
, DBDCons da_0 (DBUnit da) ~ DBApp (DBUnit a_0) db_rhs
, db_rhs ~ DBUnit da
) => UnknownDMApp (DBUnit a_lhs) db_rhs (DBDCons da_0 (DBUnit da))
t_db_2 :: DMAction DMNil () (DBDCons DummyAction (DBDCons DummyAction (DBUnit DummyAction))) () ()
t_db_2 = db_dummy >> db_dummy >> db_dummy
t_db_2_alt :: DMAction DMNil () (DBDCons DummyAction (DBDCons DummyAction (DBUnit DummyAction))) () ()
t_db_2_alt = do
db_dummy
db_dummy
db_dummy
type instance DMApp (DBDCons a_0 as_0) as_1 = DBApp (DBDCons a_0 as_0) as_1
type instance DBApp (DBDCons a_0 as_0) as_1 = DBDCons a_0 (DBApp as_0 as_1)
instance ( da_lhs ~ da
, DBDCons da dm ~ DBApp (DBDCons da_lhs dm_lhs) dm_rhs
) => UnknownDMApp (DBDCons da_lhs dm_lhs) dm_rhs (DBDCons da dm)
instance ( a_lhs ~ da_0
, DBDCons da_0 (DBDCons da_1 dm) ~ DBApp (DBUnit a_lhs) db_rhs
) => UnknownDMApp (DBUnit a_lhs) db_rhs (DBDCons da_0 (DBDCons da_1 dm))
t_db_3 :: DMAction DMNil () (DBDCons DummyAction (DBDCons DummyAction (DBDCons DummyAction (DBUnit DummyAction)))) () ()
t_db_3 = db_dummy >> db_dummy >> db_dummy >> db_dummy
t_db_3_alt :: DMAction DMNil () (DBDCons DummyAction (DBDCons DummyAction (DBDCons DummyAction (DBUnit DummyAction)))) () ()
t_db_3_alt = do
db_dummy
db_dummy
db_dummy
db_dummy
-- The goal is to group static buffer actions while persisting dynamic buffer actions and cons.
-- A dynamic buffer action preceeding a static buffer action.
#if !DEV_NONCOMPILE_SWITCH
t_db_sb_0 :: DMAction DMNil () (DBDCons DummyAction (SBUnit DummyAction)) () ()
#else
t_db_sb_0 :: DMAction DMNil () (SBDCons (DBUnit DummyAction) (SBUnit DummyAction)) () ()
#endif
t_db_sb_0 = db_dummy >> sb_dummy
type instance DMApp (DBUnit a_0) (SBUnit a_1) = DBDCons a_0 (SBUnit a_1)
instance ( a_lhs ~ da_0
, DBDCons da_0 (SBUnit sa) ~ DBApp (DBUnit a_0) sb_rhs
, sb_rhs ~ SBUnit sa
) => UnknownDMApp (DBUnit a_lhs) db_rhs (DBDCons da_0 (SBUnit sa))
-- A static buffer action preceeding a dynamic buffer action.
#if !DEV_NONCOMPILE_SWITCH
t_sb_db_0 :: DMAction DMNil () (SBDCons (SBUnit DummyAction) (DBUnit DummyAction)) () ()
#else
t_sb_db_0 :: DMAction DMNil () (DBDCons DummyAction (DBUnit DummyAction)) () ()
#endif
t_sb_db_0 = sb_dummy >> db_dummy
type instance DBApp (SBUnit a_0) (DBUnit a_1) = SBDCons (SBUnit a_0) (DBUnit a_1)
instance ( sa_lhs ~ sa
, SBDCons (SBUnit sa) (DBUnit da) ~ DBApp (SBUnit sa_lhs) db_rhs
, db_rhs ~ DBUnit da
) => UnknownDMApp (SBUnit sa_lhs) db_rhs (SBDCons (SBUnit sa) (DBUnit da))
t_db_sb_1 = sb_dummy >> db_dummy >> sb_dummy
type instance DBApp (SBUnit a_0) (SBUnit a_1) = SBCons a_0 (SBUnit a_1)
-- static buffer actions should be grouped into a SBCons
t_db_sb_2 :: DMAction DMNil () (DBDCons DummyAction (SBCons DummyAction (SBUnit DummyAction))) () ()
t_db_sb_2 = db_dummy >> sb_dummy >> sb_dummy
#if 0
instance ( a_lhs ~ da
, DBCons da (SBUnit sa) ~ DBApp (DBUnit a_lhs) sb_rhs
) => UnknownDMApp (SBUnit a_lhs) sb_rhs (SBCons da (SBUnit sa))
type instance DBApp (DBUnit a_0) (SBCons a_1 sb_1) = DBDCons a_0 (SBCons a_1 sb_1)
instance ( a_lhs ~ da
, DBDCons da (SBCons sa sbs) ~ DBApp (DBUnit a_lhs) sb_rhs
) => UnknownDMApp (DBUnit a_lhs) sb_rhs (DBDCons da (SBCons sa sbs))
instance ( a_0 ~ a_1
, as_0 ~ as_1
) => UnknownDMApp (SBCons a_0 as_0) (SBDCons (SBCons a_1 as_1) as_2)
-- t_sb_db_1 :: DMAction DMNil () (SBDCons (SBCons DummyAction (SBUnit DummyAction)) (DBUnit DummyAction)) () ()
t_sb_db_1 = sb_dummy >> sb_dummy >> db_dummy
#endif
main = run_test $ do
returnM () :: Test ()
| coreyoconnor/bind-marshal | test/old_verify_data_model_proto.hs | bsd-3-clause | 11,194 | 0 | 24 | 2,527 | 2,876 | 1,489 | 1,387 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Mismi.Autoscaling.Amazonka (
module AWS
) where
import Network.AWS.AutoScaling as AWS
| ambiata/mismi | mismi-autoscaling/src/Mismi/Autoscaling/Amazonka.hs | bsd-3-clause | 146 | 0 | 4 | 33 | 22 | 16 | 6 | 4 | 0 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ThemeSpec where
import Test.Hspec
import Graphics.HSD3.Chart
import Graphics.HSD3.Theme
import Utils
spec :: Spec
spec = describe "Themes" $ do
describe "Bar Graphs" $ do
sample "The default theme bar graph"
(take 50 . concat . repeat $
[1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8])
(ThemeChart def barGraph)
sample "Colorful Banaani theme bar graph"
(take 50 . concat . repeat $
[1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8])
(ThemeChart banaaniTheme barGraph)
sample "Rainbow theme bar graph"
(take 50 . concat . repeat $
[1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8])
(ThemeChart rainbowTheme barGraph)
describe "Stacked Bar Graphs" $ do
let dat = [
take 50 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 40 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 20 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 2 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 14 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8]
]
sample "The default theme stacked bar graph"
dat
(ThemeChart def stackedBarGraph)
sample "Colorful Banaani theme stacked bar graph"
dat
(ThemeChart banaaniTheme stackedBarGraph)
sample "Rainbow theme stacked bar graph"
dat
(ThemeChart rainbowTheme stackedBarGraph)
describe "Grid layouts" $ do
let dat = fmap (uncurry take) . zip [5, 4, 4, 2, 5] . replicate 5 $ [
take 50 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 40 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 30 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 10 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8],
take 50 . concat . repeat $ [1.0, 0.5, 0.25, 0.7, 0.9, 0.76, 0.2, 0.3, 0.1, 1.0, 0.6, 0.4, 0.8]
]
sample "The default theme grid bar graph"
dat
(ThemeChart def gridBarGraph)
sample "Colorful Banaani theme grid bar graph"
dat
(ThemeChart banaaniTheme gridBarGraph)
sample "Rainbow theme grid bar graph"
dat
(ThemeChart rainbowTheme gridBarGraph)
| Soostone/hs-d3 | test/suite/ThemeSpec.hs | bsd-3-clause | 3,102 | 0 | 19 | 1,062 | 1,038 | 598 | 440 | 56 | 1 |
module Examples.Blog where
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Foundation.Common
import Control.Applicative
import Data.List (intersperse)
import Data.IORef
-- Nav Bar {{{
navBar :: IO Element
navBar = row12ColClass
[ navBarRight
[ ("Link 1","#")
, ("Link 2","#")
, ("Link 3","#")
, ("Link 4","#")
]
, UI.h1 #+
[ string "Blog "
, UI.small #+! string "This is my blog. It's awesome"
]
]
row12ColClass :: [IO Element] -> IO Element
row12ColClass es = rowClass #+ map (twelveColClass #+!) es
twelveColClass :: IO Element
twelveColClass = divClasses ["large-12","columns"]
navBarRight :: [(String,String)] -> IO Element
navBarRight bs = divClasses ["nav-bar","right"] #+ [buttonGroup bs]
buttonGroup :: [(String,String)] -> IO Element
buttonGroup bs = UI.ul # set UI.class_ "button-group" #+ map mkBut bs
where
mkBut (lab,lnk) = buttonLink lab lnk
-- }}}
-- Article Entry {{{
articleEntry :: String -> String -> String -> [String] -> [String]-> IO Element
articleEntry title author date besideImg belowImg = article #+ concat
[ [ UI.h3 #+! link title "#"
, UI.h6 #+
[ string "Written by "
, link author "#"
, string $ " on " ++ date
]
, row6ColClass
[ map par besideImg
, [ image "http://placehold.it/400x240&text=[img]" ]
]
]
, map par belowImg
]
row6ColClass :: [[IO Element]] -> IO Element
row6ColClass es = rowClass #+ map (sixColClass #+) es
sixColClass :: IO Element
sixColClass = divClasses ["large-6","columns"]
-- }}}
-- Side Bar {{{
sideBar :: String -> IO Element
sideBar txt = threeColAside #+
[ UI.h5 #+! string "Categories"
, UI.ul # set UI.class_ "side-nav" #+
listItems
[ link "News" "#"
, link "Code" "#"
, link "Design" "#"
, link "Fun" "#"
, link "Weasels" "#"
]
, panelClass #+
[ UI.h5 #+! string "Featured"
, par txt
, link "Read More →" "#"
]
]
panelClass :: IO Element
panelClass = divClass "panel"
threeColAside :: IO Element
threeColAside = threeColClass aside
threeColClass :: IO Element -> IO Element
threeColClass el = el # set classes ["large-3","columns"]
-- }}}
-- Footer {{{
blogFooter :: IO Element
blogFooter = footer # set UI.class_ "row" #+
[ twelveColClass #+
[ UI.hr
, row6ColClass $ map (:[])
[ par "© Copyright no one at all. Go to town."
, UI.ul # set classes ["inline-list","right"] #+
listItems
[ link "Link 1" "#"
, link "Link 2" "#"
, link "Link 3" "#"
, link "Link 4" "#"
]
]
]
]
-- }}}
| kylcarte/threepenny-extras | src/Examples/Blog.hs | bsd-3-clause | 2,695 | 0 | 15 | 703 | 849 | 454 | 395 | 73 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main
( main
) where
import qualified Crypto.Hash.MD5 as MD5
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Crypto.Hash.SHA256 as SHA2
import qualified Crypto.Hash.SHA3 as SHA3
import qualified Crypto.Hash.Skein256 as Skein
import qualified Crypto.Hash.Tiger as Tiger
import Data.ByteString ( hGetSome )
import System.IO
import Criterion.Main
import qualified Crypto.Hash.Tsuraan.Blake2.Parallel as ParBlake2
import qualified Crypto.Hash.Tsuraan.Blake2.Serial as SerBlake2
import qualified Crypto.Hash.Tsuraan.Blake2 as Blake2
main :: IO ()
main = do
strings <- withFile "/dev/urandom" ReadMode $ \handle -> do
-- let lens = [1000,2000,3000,4000,5000,6000,7000,8000,9000,10000]
let lens = [1,10,100,1000,3000,6000,10000,30000,60000,100000]
ss <- mapM (hGetSome handle) lens
return $ zip lens ss
defaultMain $
[ bgroup (show sz)
[ bench "Parallel" $ nf (ParBlake2.hash 32) st
, bench "Sequential" $ nf (SerBlake2.hash 32) st
, bench "Auto" $ nf (Blake2.hash 32) st
, bench "MD5" $ nf MD5.hash st
, bench "SHA1" $ nf SHA1.hash st
, bench "SHA2" $ nf SHA2.hash st
, bench "SHA3" $ nf (SHA3.hash 32) st
, bench "Skein" $ nf (Skein.hash 32) st
, bench "Tiger" $ nf Tiger.hash st
]
| (sz, st) <- strings ]
| tsuraan/hs-blake2 | benches/BenchAll.hs | bsd-3-clause | 1,415 | 0 | 15 | 343 | 440 | 248 | 192 | 33 | 1 |
-- | Contains the main functions for interfacing with the engine.
-- This can be thought of Helm's own Prelude.
module Helm
(
-- * Types
Cmd(..)
, Engine
, GameLifecycle(..)
, GameConfig(..)
, Graphics(..)
, Image
, FPSLimit(..)
, Sub(..)
-- * Engine
, run
, defaultConfig
) where
import Control.Concurrent
import Control.Exception (finally)
import Control.Monad (foldM, void, (>=>))
import Control.Monad.Trans.State.Lazy (runStateT)
import FRP.Elerea.Param (start, embed)
import Helm.Asset (Image)
import Helm.Engine (Cmd(..), Sub(..), Game(..), GameConfig(..), GameLifecycle(..), Engine(..), FPSLimit(..), defaultConfig)
import Helm.Graphics
-- | The context of an engine running a game.
-- This is used to track the connection of an engine's state to a game state.
data EngineContext e m a = EngineContext e (Game e m a)
-- | Runs a Helm game using an engine and some configuration for a game.
-- An engine should first be initialized separately to Helm, and then passed
-- to this function. Helm is written this way so that library users can
-- choose what backend engine they want to use (and hence Helm is engine-agnostic).
--
-- The best engine to get started with is the SDL implementation of Helm,
-- which is currently bundled with the engine (although it will eventually be moved
-- to its own package). See 'Helm.Engine.SDL.startup' for how
-- to startup the SDL engine, which can then be run by this function.
run
:: Engine e
=> e -- ^ The engine to use to run the game.
-> GameConfig
-> GameLifecycle e m a -- ^ The configuration for running the game.
-> IO () -- ^ An IO monad that blocks the main thread until the engine quits.
run engine config lifecycle@GameLifecycle { initialFn, subscriptionsFn = Sub sigGen } = void $ do
{- The call to 'embed' here is a little bit hacky, but seems necessary
to get this working. This is because 'start' actually computes the signal
gen passed to it, and all of our signal gens try to fetch
the 'input' value within the top layer signal gen (rather than in the
contained signal). But we haven't sampled with the input value yet, so it'll
be undefined unless we 'embed'. -}
smp <- start $ embed (return engine) sigGen
-- Setup the initial engine context and perform the initial game step
ctx@(EngineContext engine_ _) <- flip stepCmd (snd initialFn) $ EngineContext engine Game
{ gameConfig = config
, gameLifecycle = lifecycle
, gameModel = fst initialFn
, dirtyModel = True
, actionSmp = smp
, lastRender = 0
, updateCount = 0
}
step ctx `finally` cleanup engine_
-- | Step the engine context forward.
step
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> IO (Maybe (EngineContext e m a)) -- ^ An IO monad that produces the stepped engine context.
step = updateStep >=> maybe (return Nothing) (renderStep >=> delayWithinFPSLimit >=> step)
-- | Continiuslu steps engine context with processing all actions while limit is not reached or Nothing returned in case engine quits.
updateStep
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> IO (Maybe (EngineContext e m a)) -- ^ An IO monad that produces the engine context stepped with maximum allowed ticks and action.
updateStep = stepTick >=> maybe (return Nothing) continueUpdateWithinLimit
-- | Proceeed to the next updateStep if updateCount is not reached yet, returns Nothing in case engine quits.
continueUpdateWithinLimit
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> IO (Maybe (EngineContext e m a)) -- ^ An IO monad that produces the engine context stepped with maximum allowed ticks and action.
continueUpdateWithinLimit context = if updateCount >= updateLimit
then return (Just (EngineContext engine game { updateCount = 0 }))
else updateStep (EngineContext engine game { updateCount = updateCount + 1 })
where (EngineContext engine game@Game { gameConfig = GameConfig { updateLimit }, updateCount }) = context
-- | Step the engine context forward with all game actions available, returns Nothing in case engine quits.
stepTick
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> IO (Maybe (EngineContext e m a)) -- ^ An IO monad that produces the engine context stepped with the action.
stepTick (EngineContext engine game) = tick engine >>= maybe (return Nothing) (\engine_ -> Just <$> (stepActions game engine_))
-- | Step the engine forward with all game actions available.
stepActions :: Engine e
=> Game e m a -- ^ The engine context to step forward.
-> e -- ^ The action to step the engine context with.
-> IO (EngineContext e m a) -- ^ An IO monad that produces the engine context stepped with all actions available.
stepActions game@Game { actionSmp } engine = (actionSmp engine) >>= foldM stepAction (EngineContext engine game)
-- | Step the engine context forward with a specific game action.
stepAction
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> a -- ^ The action to step the engine context with.
-> IO (EngineContext e m a) -- ^ An IO monad that produces the engine context stepped with the action.
stepAction (EngineContext engine game@Game { gameModel, gameLifecycle = GameLifecycle { updateFn } }) action =
stepCmd ctx cmd
where
(updatedModel, cmd) = updateFn gameModel action
-- Mark the game as dirty and adjust the new game model
ctx = EngineContext engine $ game
{ dirtyModel = True
, gameModel = updatedModel
}
-- | Step the engine context forward with a specific command.
-- This will recursively call 'stepAction' with any actions
-- that are produced by the command.
stepCmd
:: Engine e
=> EngineContext e m a -- ^ The engine context to step forward.
-> Cmd e a -- ^ The command to step the engine context with.
-> IO (EngineContext e m a) -- ^ An IO monad that produces the engine context stepped with the command.
stepCmd (EngineContext engine game) (Cmd monad) = do
(actions, engine_) <- runStateT monad engine
-- Step any actions returned from the command
foldM stepAction (EngineContext engine_ game) actions
-- | Renders model if needed
renderStep :: Engine e => EngineContext e m a -> IO (EngineContext e m a)
renderStep = skipWhenDirty Helm.render
-- | Throttle operation based on GameModel dirtiness
skipWhenDirty :: Engine e => (EngineContext e m a -> IO (EngineContext e m a)) -> EngineContext e m a -> IO (EngineContext e m a)
skipWhenDirty operation context@(EngineContext _ game) = if (dirtyModel game)
then operation context
else return context
-- | Renders context, resets game's model dirtiness
render :: Engine e => EngineContext e m a -> IO (EngineContext e m a)
render (EngineContext engine game@Game { gameModel, gameLifecycle = GameLifecycle { viewFn } }) = do
Helm.Engine.render engine $ viewFn gameModel
return (EngineContext engine game { dirtyModel = False })
-- | Delays thread to satisfy FPSLimit requirement and updates when last frame was renedered.
delayWithinFPSLimit :: Engine e => EngineContext e m a -> IO (EngineContext e m a)
delayWithinFPSLimit context = do
runningTime engine >>= delayIfNeeded fpsLimit lastRender
runningTime engine >>= \currentTime -> return (EngineContext engine game { lastRender = currentTime })
where EngineContext engine game@Game { gameConfig = GameConfig { fpsLimit = fpsLimit }, lastRender } = context
-- | Delays thread to satisfy FPSLimit requirement.
delayIfNeeded
:: FPSLimit -- ^ FPS limit setting.
-> Double -- ^ Last time when frame was rendered.
-> Double -- ^ Current time.
-> IO () -- ^ An IO monad that delays the main thread to ensure that frames are not rendered faster than limit allows.
delayIfNeeded Unlimited _ _ = return ()
delayIfNeeded (Limited fpsLimit) lastRender currentTime = if delay > 0
then threadDelay delay
else putStrLn "Warning: FPS degradation. You may want to tune your update or FPS limits."
where
microsecondsLimitPerFrame = ceiling $ 1000000.0 / (fromIntegral fpsLimit)
microsecondsForLastFrame = ceiling $ currentTime - lastRender
delay = microsecondsLimitPerFrame - microsecondsForLastFrame
| switchface/helm | src/Helm.hs | mit | 8,593 | 0 | 14 | 1,944 | 1,674 | 902 | 772 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
module Builder.Sequence(newCornerPointsWith10DegreesBuilder, newCornerPointsWith5DegreesBuilder, newCornerPointsWithDegreesBuilder,
(||@~+++^||), (@~+++#@|>), (@~+++@|>)) where
import CornerPoints.CornerPointsWithDegrees(DegreeRange(..), CornerPointsWithDegrees(..), (@~+++#@), (|@~+++#@|), (|@~+++@|),
newCornerPointsWith10DegreesList, newCornerPointsWith5DegreesList, newCornerPointsWithDegreesList)
import CornerPoints.CornerPoints(CornerPoints(..))
import Stl.StlBase(Triangle(..))
import Stl.StlCornerPoints((+++^), Faces(..))
import Stl.StlCornerPointsWithDegrees( (|@~?+++^|), FacesWithRange(..), {-(||@~?+++^||)-}cornerPointsWithDegreesToTriangles)
import qualified Data.Sequence as S
import qualified Data.Foldable as F
--make type signatures more readable
--degrees from right to left of a cube. Eg: Each radius is 5 degrees apart resulting in a cube with a DegreeSpread 5
type DegreeSpread = Double
data CornerPointsWithDegreesList = CornerPointsWithDegreesList {cornerPointsWithDegreesList::[CornerPointsWithDegrees]}
--get the last [CornerPointsWithDegrees] in the (Seq [CornerPointsWithDegrees])
x :: (S.Seq a) -> a
x (S.viewr -> xs S.:> x') = x'
{- |
Apply (|@~+++@| cornerPoint ) to the right end of the sequence
and add result to the right end of the sequence.
-}
(@~+++@|>) :: (S.Seq [CornerPointsWithDegrees]) -> [CornerPoints] -> (S.Seq [CornerPointsWithDegrees])
cornerPointsSeq @~+++@|> cornerPointsList =
cornerPointsSeq S.|> ((x cornerPointsSeq) |@~+++@| cornerPointsList)
{- |
Apply (|@~+++#@| cornerPoint ) to the right end of the sequence
and add result to the right end of the sequence.
-}
(@~+++#@|>) :: (S.Seq [CornerPointsWithDegrees]) -> (CornerPoints -> CornerPoints) -> (S.Seq [CornerPointsWithDegrees])
cornerPointsSeq @~+++#@|> f =
cornerPointsSeq S.|> ((x cornerPointsSeq) |@~+++#@| f)
{- |
Process a shape made up of Seq[CornerPointsWithDegrees] into stl [Triangle]'s for output to stl file.
-}
(||@~+++^||) :: (S.Seq[CornerPointsWithDegrees]) -> [[FacesWithRange]] -> [Triangle]
cornerPointsWithDegreesSeq ||@~+++^|| facesWithRangeList = concat $
zipWith cornerPointsWithDegreesToTriangles (F.toList cornerPointsWithDegreesSeq) facesWithRangeList
{- |
Used by numerous infix functions such as (&@~+++#@) for building up a [[CornerPointsWithDegrees]].
Each layer of a stl shape is made up of [CornerPointsWithDegrees].
This Builder allows these layer to be built up, by adding another top/bottome face to the top of the
Builder list.
The 10 indicates it is based on a 10 degree interval of the radial shape.
Eg: A scan that is taken at 10 degree intervals such as 0,10..360
-}
--ToDo: get rid of in favor of newCornerPointsWithDegreesBuilder
newCornerPointsWith10DegreesBuilder :: [CornerPoints] -> (S.Seq [CornerPointsWithDegrees])
newCornerPointsWith10DegreesBuilder cornerPoints = S.singleton $ newCornerPointsWith10DegreesList cornerPoints
{- |
Used by numerous infix functions such as (&@~+++#@) for building up a [[CornerPointsWithDegrees]].
Each layer of a stl shape is made up of [CornerPointsWithDegrees].
This Builder allows these layer to be built up, by adding another top/bottome face to the top of the
Builder list.
The 5 indicates it is based on a 5 degree interval of the radial shape.
Eg: A scan that is taken at 5 degree intervals such as 0,5..360
-}
--ToDo: get rid of in favor of newCornerPointsWithDegreesBuilder
newCornerPointsWith5DegreesBuilder :: [CornerPoints] -> (S.Seq [CornerPointsWithDegrees])
newCornerPointsWith5DegreesBuilder cornerPoints = S.singleton $ newCornerPointsWith5DegreesList cornerPoints
{- |
Do the equivalent of newCornerPointsWith5(or 10)DegreesBuilder, but pass in the degrees spread
-}
--ToDo: write tests.
newCornerPointsWithDegreesBuilder :: DegreeSpread -> [CornerPoints] -> (S.Seq [CornerPointsWithDegrees])
newCornerPointsWithDegreesBuilder spread cornerPoints = S.singleton $ newCornerPointsWithDegreesList spread cornerPoints
| heathweiss/Tricad | src/Builder/Sequence.hs | gpl-2.0 | 4,071 | 0 | 9 | 575 | 577 | 343 | 234 | 30 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.OpsWorks.UpdateRDSDBInstance
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an Amazon RDS instance.
--
-- __Required Permissions__: To use this action, an IAM user must have a
-- Manage permissions level for the stack, or an attached policy that
-- explicitly grants permissions. For more information on user permissions,
-- see
-- <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing User Permissions>.
--
-- /See:/ <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_UpdateRDSDBInstance.html AWS API Reference> for UpdateRDSDBInstance.
module Network.AWS.OpsWorks.UpdateRDSDBInstance
(
-- * Creating a Request
updateRDSDBInstance
, UpdateRDSDBInstance
-- * Request Lenses
, urdiDBUser
, urdiDBPassword
, urdiRDSDBInstanceARN
-- * Destructuring the Response
, updateRDSDBInstanceResponse
, UpdateRDSDBInstanceResponse
) where
import Network.AWS.OpsWorks.Types
import Network.AWS.OpsWorks.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'updateRDSDBInstance' smart constructor.
data UpdateRDSDBInstance = UpdateRDSDBInstance'
{ _urdiDBUser :: !(Maybe Text)
, _urdiDBPassword :: !(Maybe Text)
, _urdiRDSDBInstanceARN :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateRDSDBInstance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'urdiDBUser'
--
-- * 'urdiDBPassword'
--
-- * 'urdiRDSDBInstanceARN'
updateRDSDBInstance
:: Text -- ^ 'urdiRDSDBInstanceARN'
-> UpdateRDSDBInstance
updateRDSDBInstance pRDSDBInstanceARN_ =
UpdateRDSDBInstance'
{ _urdiDBUser = Nothing
, _urdiDBPassword = Nothing
, _urdiRDSDBInstanceARN = pRDSDBInstanceARN_
}
-- | The master user name.
urdiDBUser :: Lens' UpdateRDSDBInstance (Maybe Text)
urdiDBUser = lens _urdiDBUser (\ s a -> s{_urdiDBUser = a});
-- | The database password.
urdiDBPassword :: Lens' UpdateRDSDBInstance (Maybe Text)
urdiDBPassword = lens _urdiDBPassword (\ s a -> s{_urdiDBPassword = a});
-- | The Amazon RDS instance\'s ARN.
urdiRDSDBInstanceARN :: Lens' UpdateRDSDBInstance Text
urdiRDSDBInstanceARN = lens _urdiRDSDBInstanceARN (\ s a -> s{_urdiRDSDBInstanceARN = a});
instance AWSRequest UpdateRDSDBInstance where
type Rs UpdateRDSDBInstance =
UpdateRDSDBInstanceResponse
request = postJSON opsWorks
response = receiveNull UpdateRDSDBInstanceResponse'
instance ToHeaders UpdateRDSDBInstance where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("OpsWorks_20130218.UpdateRdsDbInstance" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON UpdateRDSDBInstance where
toJSON UpdateRDSDBInstance'{..}
= object
(catMaybes
[("DbUser" .=) <$> _urdiDBUser,
("DbPassword" .=) <$> _urdiDBPassword,
Just ("RdsDbInstanceArn" .= _urdiRDSDBInstanceARN)])
instance ToPath UpdateRDSDBInstance where
toPath = const "/"
instance ToQuery UpdateRDSDBInstance where
toQuery = const mempty
-- | /See:/ 'updateRDSDBInstanceResponse' smart constructor.
data UpdateRDSDBInstanceResponse =
UpdateRDSDBInstanceResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateRDSDBInstanceResponse' with the minimum fields required to make a request.
--
updateRDSDBInstanceResponse
:: UpdateRDSDBInstanceResponse
updateRDSDBInstanceResponse = UpdateRDSDBInstanceResponse'
| fmapfmapfmap/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/UpdateRDSDBInstance.hs | mpl-2.0 | 4,459 | 0 | 12 | 937 | 568 | 341 | 227 | 78 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.DeleteSSHPublicKey
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified SSH public key.
--
-- The SSH public key deleted by this action is used only for
-- authenticating the associated IAM user to an AWS CodeCommit repository.
-- For more information about using SSH keys to authenticate to an AWS
-- CodeCommit repository, see
-- <http://docs.aws.amazon.com/codecommit/latest/userguide/setting-up-credentials-ssh.html Set up AWS CodeCommit for SSH Connections>
-- in the /AWS CodeCommit User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_DeleteSSHPublicKey.html AWS API Reference> for DeleteSSHPublicKey.
module Network.AWS.IAM.DeleteSSHPublicKey
(
-- * Creating a Request
deleteSSHPublicKey
, DeleteSSHPublicKey
-- * Request Lenses
, dspkUserName
, dspkSSHPublicKeyId
-- * Destructuring the Response
, deleteSSHPublicKeyResponse
, DeleteSSHPublicKeyResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteSSHPublicKey' smart constructor.
data DeleteSSHPublicKey = DeleteSSHPublicKey'
{ _dspkUserName :: !Text
, _dspkSSHPublicKeyId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteSSHPublicKey' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dspkUserName'
--
-- * 'dspkSSHPublicKeyId'
deleteSSHPublicKey
:: Text -- ^ 'dspkUserName'
-> Text -- ^ 'dspkSSHPublicKeyId'
-> DeleteSSHPublicKey
deleteSSHPublicKey pUserName_ pSSHPublicKeyId_ =
DeleteSSHPublicKey'
{ _dspkUserName = pUserName_
, _dspkSSHPublicKeyId = pSSHPublicKeyId_
}
-- | The name of the IAM user associated with the SSH public key.
dspkUserName :: Lens' DeleteSSHPublicKey Text
dspkUserName = lens _dspkUserName (\ s a -> s{_dspkUserName = a});
-- | The unique identifier for the SSH public key.
dspkSSHPublicKeyId :: Lens' DeleteSSHPublicKey Text
dspkSSHPublicKeyId = lens _dspkSSHPublicKeyId (\ s a -> s{_dspkSSHPublicKeyId = a});
instance AWSRequest DeleteSSHPublicKey where
type Rs DeleteSSHPublicKey =
DeleteSSHPublicKeyResponse
request = postQuery iAM
response = receiveNull DeleteSSHPublicKeyResponse'
instance ToHeaders DeleteSSHPublicKey where
toHeaders = const mempty
instance ToPath DeleteSSHPublicKey where
toPath = const "/"
instance ToQuery DeleteSSHPublicKey where
toQuery DeleteSSHPublicKey'{..}
= mconcat
["Action" =: ("DeleteSSHPublicKey" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"UserName" =: _dspkUserName,
"SSHPublicKeyId" =: _dspkSSHPublicKeyId]
-- | /See:/ 'deleteSSHPublicKeyResponse' smart constructor.
data DeleteSSHPublicKeyResponse =
DeleteSSHPublicKeyResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteSSHPublicKeyResponse' with the minimum fields required to make a request.
--
deleteSSHPublicKeyResponse
:: DeleteSSHPublicKeyResponse
deleteSSHPublicKeyResponse = DeleteSSHPublicKeyResponse'
| fmapfmapfmap/amazonka | amazonka-iam/gen/Network/AWS/IAM/DeleteSSHPublicKey.hs | mpl-2.0 | 3,948 | 0 | 9 | 758 | 445 | 272 | 173 | 63 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Aturan Aktif Pemindai | ZAP Eksistensi</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Konten</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Pencarian</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorit</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/ascanrules/src/main/javahelp/org/zaproxy/zap/extension/ascanrules/resources/help_id_ID/helpset_id_ID.hs | apache-2.0 | 984 | 85 | 52 | 161 | 400 | 211 | 189 | -1 | -1 |
{-# LANGUAGE TupleSections #-}
module PatHOCon where
import CLaSH.Prelude
topEntity :: Vec 8 (Unsigned 8) -> Vec 8 (Unsigned 4,Unsigned 8)
topEntity = map (4,)
| christiaanb/clash-compiler | tests/shouldwork/Vector/PatHOCon.hs | bsd-2-clause | 162 | 0 | 8 | 27 | 59 | 32 | 27 | -1 | -1 |
{-# OPTIONS_HADDOCK hide #-}
module Foundation.System.Bindings.Windows
where
| vincenthz/hs-foundation | foundation/Foundation/System/Bindings/Windows.hs | bsd-3-clause | 82 | 0 | 3 | 12 | 9 | 7 | 2 | 2 | 0 |
module Solver
(Solver,solvers)
where
import Common
import qualified Vector as SV
import qualified Vectorised as SPA
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
import qualified Data.Array.Parallel as P
import qualified Data.Array.Parallel.PArray as P
type Solver = VU.Vector Vec3 -> VU.Vector (Int,Int,Int,Colour) -> VU.Vector Vec3 -> Double -> VU.Vector Colour
solvers :: [(String,Solver)]
solvers =
[("vectorised", solverPA)
,("vector", SV.solveV)]
solverPA verts tris rays time
= tu3 (SPA.solvePA (fu3 verts) (fu4_3 tris) (fu3 rays) time)
fu as = P.fromUArray as
fu3 as
= let (xs,ys,zs) = VU.unzip3 as
in P.zip3 (fu xs) (fu ys) (fu zs)
fu4_3 as
= let (xs,ys,zs,bs) = VU.unzip4 as
in P.zip4 (fu xs) (fu ys) (fu zs) (fu3 bs)
tu as = P.toUArray as
tu3 as
= let (xs,ys,zs) = P.unzip3 as
in VU.zip3 (tu xs) (tu ys) (tu zs)
| mainland/dph | dph-examples/examples/spectral/Pluecker/Solver.hs | bsd-3-clause | 967 | 4 | 10 | 249 | 429 | 235 | 194 | 27 | 1 |
-- Spark pool and fishing
--
-- Visibility: HdpH, HdpH.Internal
-- Author: Patrick Maier <P.Maier@hw.ac.uk>
-- Created: 02 Jul 2011
--
-----------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-} -- req'd for type annotations
module Control.Parallel.HdpH.Internal.Sparkpool
( -- * spark pool monad
SparkM, -- synonym: SparkM m = ReaderT <State m> CommM
run, -- :: RTSConf -> ActionServer -> Sem -> SparkM m a -> CommM a
liftCommM, -- :: Comm a -> SparkM m a
liftIO, -- :: IO a -> SparkM m a
-- * blocking and unblocking idle schedulers
blockSched, -- :: SparkM m ()
wakeupSched, -- :: Int -> SparkM m ()
-- * local (ie. scheduler) access to spark pool
getFishingFlag,
clearFishingFlag,
waitingFishingReplyFrom,
getSpark, -- :: Int -> SparkM m (Maybe (Spark m))
putSpark, -- :: Int -> Spark m -> SparkM m ()
-- * Allow scheduler to pop guard post to sparkpool on 'DEADNODE' message
popGuardPostToSparkpool,
-- * messages
Msg(..), -- instances: Show, NFData, Serialize
-- * handle messages related to fishing
dispatch, -- :: Msg m a -> SparkM m ()
handleFISH, -- :: Msg m a -> SparkM m ()
handleSCHEDULE, -- :: Msg m a -> SparkM m ()
handleNOWORK, -- :: Msg m a -> SparkM m ()
-- * access to stats data
readPoolSize, -- :: SparkM m Int
readFishSentCtr, -- :: SparkM m Int
readSparkRcvdCtr, -- :: SparkM m Int
readMaxSparkCtr, -- :: SparkM m Int
readSparkGenCtr, -- :: SparkM m Int
readSparkConvCtr, -- :: SparkM m Int
-- * access to fault recovery stats data
incCtr,
getSparkRecCtr,
getThreadRecCtr,
getTaskNotRecCtr,
readSparkRec,
readThreadRec,
readTaskNotRec,
) where
import Prelude hiding (error)
import Control.Concurrent (threadDelay)
import Control.Monad (unless, when, replicateM_, void)
import Control.Monad.Reader (ReaderT, runReaderT, ask)
import Control.Monad.Trans (lift)
import Data.Functor ((<$>))
import Data.IORef (IORef, newIORef, readIORef, writeIORef, atomicModifyIORef)
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set (size, singleton, notMember)
import System.Random (randomRIO)
import Control.Parallel.HdpH.Conf
(RTSConf(maxHops,maxFish, minSched, minFishDly, maxFishDly))
import Control.Parallel.HdpH.Internal.Comm (CommM)
import qualified Control.Parallel.HdpH.Internal.Comm as Comm
(liftIO, send, nodes, myNode, allNodes)
import Control.Parallel.HdpH.Internal.Data.Deque
(DequeIO, emptyIO, pushBackIO, popFrontIO, popBackIO,
lengthIO, maxLengthIO)
import Control.Parallel.HdpH.Internal.Data.Sem (Sem)
import qualified Control.Parallel.HdpH.Internal.Data.Sem as Sem (wait, signal)
import Control.Parallel.HdpH.Internal.Location (NodeId, dbgMsgSend, dbgSpark, dbgFailure, error)
import qualified Control.Parallel.HdpH.Internal.Location as Location (debug)
import Control.Parallel.HdpH.Internal.Misc (encodeLazy, ActionServer, reqAction)
import Control.Parallel.HdpH.Internal.Type.Msg
import Control.Parallel.HdpH.Internal.Type.Par
import Control.Parallel.HdpH.Internal.IVar
import Control.Parallel.HdpH.Closure (Closure,unClosure)
import Control.Parallel.HdpH.Internal.Type.GRef (atT)
-----------------------------------------------------------------------------
-- SparkM monad
-- 'SparkM m' is a reader monad sitting on top of the 'CommM' monad;
-- the parameter 'm' abstracts a monad (cf. module Control.Parallel.Control.Parallel.HdpH.Internal.Type.Par).
type SparkM m = ReaderT (State m) CommM
type GuardPost m = Maybe (OccupiedGuardPost m)
data OccupiedGuardPost m = OccupiedGuardPost
{
guardedSpark :: Closure (SupervisedSpark m)
, destinedFor :: NodeId -- not strictly needed; sanity check.
}
-- spark pool state (mutable bits held in IORefs and the like)
data State m =
State {
s_conf :: RTSConf, -- config data
s_pool :: DequeIO (Task m), -- actual spark pool
s_guard_post
:: IORef (GuardPost m), -- spark waiting for scheduling authorisation
s_sparkOrig :: IORef (Maybe NodeId), -- origin of most recent spark recvd
s_fishing :: IORef (Maybe NodeId), -- True iff FISH outstanding
s_noWork :: ActionServer, -- for clearing "FISH outstndg" flag
s_idleScheds :: Sem, -- semaphore for idle schedulers
s_fishSent :: IORef Int, -- m -> SparkM m ()
s_sparkRcvd :: IORef Int, -- #sparks received
s_sparkGen :: IORef Int, -- #sparks generated
s_sparkConv :: IORef Int, -- #sparks converted
s_sparkRec :: IORef Int,
s_threadRec :: IORef Int,
s_taskNotRec :: IORef Int }
-- Eliminates the 'SparkM' layer by executing the given 'SparkM' action on
-- an empty spark pool; expects a config data, an action server (for
-- clearing "FISH outstanding" flag) and a semaphore (for idle schedulers).
run :: RTSConf -> ActionServer -> Sem -> SparkM m a -> CommM a
run conf noWorkServer idleSem action = do
-- set up spark pool state
pool <- Comm.liftIO emptyIO
guardPost <- Comm.liftIO $ newIORef Nothing
sparkOrig <- Comm.liftIO $ newIORef Nothing
fishing <- Comm.liftIO $ newIORef Nothing
fishSent <- Comm.liftIO $ newIORef 0
sparkRcvd <- Comm.liftIO $ newIORef 0
sparkGen <- Comm.liftIO $ newIORef 0
sparkConv <- Comm.liftIO $ newIORef 0
sparkRec <- Comm.liftIO $ newIORef 0
threadRec <- Comm.liftIO $ newIORef 0
taskNotRec <- Comm.liftIO $ newIORef 0
let s0 = State { s_conf = conf,
s_pool = pool,
s_guard_post = guardPost,
s_sparkOrig = sparkOrig,
s_fishing = fishing,
s_noWork = noWorkServer,
s_idleScheds = idleSem,
s_fishSent = fishSent,
s_sparkRcvd = sparkRcvd,
s_sparkGen = sparkGen,
s_sparkConv = sparkConv,
s_sparkRec = sparkRec,
s_threadRec = threadRec,
s_taskNotRec = taskNotRec }
-- run monad
runReaderT action s0
-- Lifting lower layers.
liftCommM :: CommM a -> SparkM m a
liftCommM = lift
liftIO :: IO a -> SparkM m a
liftIO = liftCommM . Comm.liftIO
-----------------------------------------------------------------------------
-- access to state
-- | local spark pool
getPool :: SparkM m (DequeIO (Task m))
getPool = s_pool <$> ask
getGuardPost :: SparkM m (IORef (GuardPost m))
getGuardPost = s_guard_post <$> ask
-- | adds a spark to the guard post
guardSpark :: Closure (SupervisedSpark m) -> NodeId -> SparkM m ()
guardSpark newSupervisedSpark fisher = do
x <- getGuardPost
liftIO $ atomicModifyIORef x $ \currentGuardPost ->
if isJust currentGuardPost
then error "Another spark already occupying guard post"
else
let newGuardPost = OccupiedGuardPost
{ guardedSpark = newSupervisedSpark
, destinedFor = fisher }
in (Just newGuardPost,())
-- | pops guard post back to sparkpool iff
-- a) the guard post is occupied &&
-- b) the the guarded spark is destined for dead node
-- This is called when DEADNODE message is received
-- by any node.
popGuardPostToSparkpool :: NodeId -> SparkM m ()
popGuardPostToSparkpool deadNode = do
x <- getGuardPost
poppedSpark <- liftIO $ atomicModifyIORef x $ \currentGuardPost ->
if isNothing currentGuardPost
then (Nothing,Nothing)
else
let destination = destinedFor $ fromJust currentGuardPost
in if destination == deadNode
then (Nothing,currentGuardPost) -- move from guard post to sparkpool
else (currentGuardPost,Nothing) -- leave guardpost (and sparkpool) alone
-- put the guarded spark back in the sparkpool
when (isJust poppedSpark) $ do
pool <- getPool
liftIO $ pushBackIO pool (Left $ guardedSpark (fromJust poppedSpark))
-- | pops the spark in the guard post
toppleGuardPost :: SparkM m (Maybe (Closure (SupervisedSpark m)))
toppleGuardPost = do
x <- getGuardPost
liftIO $ atomicModifyIORef x $ \currentGuardPost ->
if isNothing currentGuardPost
then (Nothing,Nothing) -- error "no spark to topple"
else (Nothing,Just $ guardedSpark $ fromJust currentGuardPost)
-- | returns True iff there is a spark in the guard post
waitingReqResponse :: SparkM m Bool
waitingReqResponse = do
maybe_spark <- getGuardPost >>= liftIO . readIORef
return $ isJust maybe_spark
readPoolSize :: SparkM m Int
readPoolSize = getPool >>= liftIO . lengthIO
getSparkOrigHist :: SparkM m (IORef (Maybe NodeId))
getSparkOrigHist = s_sparkOrig <$> ask
readSparkOrigHist :: SparkM m (Maybe NodeId)
readSparkOrigHist = getSparkOrigHist >>= liftIO . readIORef
clearSparkOrigHist :: SparkM m ()
clearSparkOrigHist = do
sparkOrigHistRef <- getSparkOrigHist
liftIO $ writeIORef sparkOrigHistRef Nothing
updateSparkOrigHist :: NodeId -> SparkM m ()
updateSparkOrigHist mostRecentOrigin = do
sparkOrigHistRef <- getSparkOrigHist
liftIO $ writeIORef sparkOrigHistRef (Just mostRecentOrigin)
getFishingFlagIORef :: SparkM m (IORef (Maybe NodeId))
getFishingFlagIORef = s_fishing <$> ask
getFishingFlag :: SparkM m Bool
getFishingFlag = do
ioref <- s_fishing <$> ask
maybe_victim <- liftIO $ readIORef ioref
return $ isJust maybe_victim
setFishingFlag :: NodeId -> SparkM m ()
setFishingFlag victim = do
s_fishing <$> ask >>= \ref -> liftIO (atomicWriteIORef ref (Just victim))
waitingFishingReplyFrom :: SparkM m (Maybe NodeId)
waitingFishingReplyFrom = do
ioref <- s_fishing <$> ask
liftIO $ readIORef ioref
clearFishingFlag = do
s_fishing <$> ask >>= \ref -> liftIO (atomicWriteIORef ref Nothing)
getNoWorkServer :: SparkM m ActionServer
getNoWorkServer = s_noWork <$> ask
getIdleSchedsSem :: SparkM m Sem
getIdleSchedsSem = s_idleScheds <$> ask
getFishSentCtr :: SparkM m (IORef Int)
getFishSentCtr = s_fishSent <$> ask
readFishSentCtr :: SparkM m Int
readFishSentCtr = getFishSentCtr >>= readCtr
getSparkRcvdCtr :: SparkM m (IORef Int)
getSparkRcvdCtr = s_sparkRcvd <$> ask
readSparkRcvdCtr :: SparkM m Int
readSparkRcvdCtr = getSparkRcvdCtr >>= readCtr
getSparkGenCtr :: SparkM m (IORef Int)
getSparkGenCtr = s_sparkGen <$> ask
readSparkGenCtr :: SparkM m Int
readSparkGenCtr = getSparkGenCtr >>= readCtr
getSparkConvCtr :: SparkM m (IORef Int)
getSparkConvCtr = s_sparkConv <$> ask
readSparkConvCtr :: SparkM m Int
readSparkConvCtr = getSparkConvCtr >>= readCtr
readMaxSparkCtr :: SparkM m Int
readMaxSparkCtr = getPool >>= liftIO . maxLengthIO
getMaxHops :: SparkM m Int
getMaxHops = maxHops <$> s_conf <$> ask
getMaxFish :: SparkM m Int
getMaxFish = maxFish <$> s_conf <$> ask
getMinSched :: SparkM m Int
getMinSched = minSched <$> s_conf <$> ask
getMinFishDly :: SparkM m Int
getMinFishDly = minFishDly <$> s_conf <$> ask
getMaxFishDly :: SparkM m Int
getMaxFishDly = maxFishDly <$> s_conf <$> ask
getSparkRecCtr :: SparkM m (IORef Int)
getSparkRecCtr = s_sparkRec <$> ask
getThreadRecCtr :: SparkM m (IORef Int)
getThreadRecCtr = s_threadRec <$> ask
getTaskNotRecCtr :: SparkM m (IORef Int)
getTaskNotRecCtr = s_taskNotRec <$> ask
readSparkRec :: SparkM m Int
readSparkRec = getSparkRecCtr >>= readCtr
readThreadRec :: SparkM m Int
readThreadRec = getThreadRecCtr >>= readCtr
readTaskNotRec :: SparkM m Int
readTaskNotRec = getTaskNotRecCtr >>= readCtr
-----------------------------------------------------------------------------
-- blocking and unblocking idle schedulers
-- Put executing scheduler to
blockSched :: SparkM m ()
blockSched = getIdleSchedsSem >>= liftIO . Sem.wait
-- Wake up 'n' sleeping schedulers.
wakeupSched :: Int -> SparkM m ()
wakeupSched n = getIdleSchedsSem >>= liftIO . replicateM_ n . Sem.signal
-----------------------------------------------------------------------------
-- local access to spark pool
-- Get a spark from the front of the spark pool, if there is any;
-- possibly send a FISH message and update stats (ie. count sparks converted);
getSpark :: SparkM m (Maybe (Task m))
getSpark = do
pool <- getPool
-- maybe_spark <- liftIO $ popFrontIO pool
maybe_spark <- liftIO $ popBackIO pool
sendFISH
case maybe_spark of
Just _ -> do getSparkConvCtr >>= incCtr
sparks <- liftIO $ lengthIO pool
debug dbgSpark $
"#sparks=" ++ show sparks ++ " (spark converted)"
return maybe_spark
Nothing -> return maybe_spark
-- Put a new spark at the back of the spark pool, wake up 1 sleeping scheduler,
-- and update stats (ie. count sparks generated locally);
putSpark :: Task m -> SparkM m ()
putSpark spark = do
pool <- getPool
liftIO $ pushBackIO pool spark
wakeupSched 1
getSparkGenCtr >>= incCtr
sparks <- liftIO $ lengthIO pool
debug dbgSpark $
"#sparks=" ++ show sparks ++ " (spark created)"
-----------------------------------------------------------------------------
-- fishing and the like
-- Send a FISH message, but only if there is no FISH outstanding and the
-- number of sparks in the pool is less or equal to the 'maxFish' parameter;
-- the target is the sender of the most recent SCHEDULE message, if a
-- SCHEDULE message has yet been received, otherwise the target is random.
sendFISH :: SparkM m ()
sendFISH = do
pool <- getPool
isFishing <- getFishingFlag
unless isFishing $ do
-- no FISH currently outstanding
nodes <- liftCommM Comm.nodes
maxFish' <- getMaxFish
sparks <- liftIO $ lengthIO pool
when (nodes > 1 && sparks <= maxFish') $ do
-- there are other nodes and the pool has too few sparks;
-- set flag indicating that we are going to send a FISH
-- ok <- setFlag fishingFlag
--when True $ do
-- flag was clear before: go ahead sending FISH;
-- construct message
fisher <- liftCommM Comm.myNode
-- hops <- getMaxHops
-- target is node where most recent spark came from (if such exists)
maybe_target <- readSparkOrigHist
target <- case maybe_target of
Just node -> return node
Nothing -> do allNodes <- liftCommM Comm.allNodes
let avoidNodes = Set.singleton fisher
-- select random target (other than fisher)
randomOtherElem avoidNodes allNodes nodes
setFishingFlag target
let fishMsg = FISH fisher :: Msg m
-- send FISH message
debug dbgMsgSend $
show fishMsg ++ " ->> " ++ show target
void $ liftCommM $ Comm.send target $ encodeLazy fishMsg
getFishSentCtr >>= incCtr
-- Dispatch FISH, SCHEDULE and NOWORK messages to their respective handlers.
dispatch :: Msg m -> SparkM m ()
dispatch msg@FISH{} = handleFISH msg
dispatch msg@SCHEDULE{} = handleSCHEDULE msg
dispatch msg@NOWORK = handleNOWORK msg
dispatch msg@REQ{} = handleREQ msg
dispatch msg@AUTH{} = handleAUTH msg
dispatch msg@DENIED{} = handleDENIED msg
dispatch msg@ACK{} = handleACK msg
dispatch msg@OBSOLETE{} = handleOBSOLETE msg
dispatch msg = error $ "Control.Parallel.Control.Parallel.HdpH.Internal.Sparkpool.dispatch: " ++ show msg ++ " unexpected"
handleFISH :: Msg m -> SparkM m ()
handleFISH (FISH fisher) = do
here <- liftCommM Comm.myNode
sparks <- readPoolSize
minSchd <- getMinSched
waitingAuthorisation <- waitingReqResponse
-- send SCHEDULE if pool has enough sparks
-- and the node is not waiting for a response
-- from a REQ message (i.e. NOWORK or SCHEDULE)
done <- if sparks < minSchd || waitingAuthorisation
then return False
else do
-- maybe_spark <- getPool >>= liftIO . popBackIO
maybe_spark <- getPool >>= liftIO . popFrontIO
case maybe_spark of
Just spark ->
case spark of
(Left supSparkClo) -> do
-- 1) Move supervised spark to guard post.
-- This also has the effect of blocking
-- whilst waiting from the supervisor
-- on the REQ response below
guardSpark supSparkClo fisher
-- 2) Send REQ message to supervisor of spark
let supSpark = unClosure supSparkClo
let ref = remoteRef supSpark
seqN = thisReplica supSpark
supervisor = atT ref
requestMsg = REQ ref seqN here fisher
debug dbgMsgSend $
show requestMsg ++ " ->> " ++ show supervisor
-- shortcut message delivery
if (supervisor == here)
then handleREQ requestMsg
else void $ liftCommM $ Comm.send supervisor $ encodeLazy requestMsg
return True
(Right _normalSparkClo) -> do
-- fault oblivious scheduling
let schedMsg = SCHEDULE spark here
debug dbgMsgSend $
show schedMsg ++ " ->> " ++ show fisher
void $ liftCommM $ Comm.send fisher $ encodeLazy schedMsg
return True
Nothing -> return False
unless done $ do
let noWorkMsg = NOWORK :: Msg m
debug dbgMsgSend $
show noWorkMsg ++ " ->> " ++ show fisher
void (liftCommM $ Comm.send fisher $ encodeLazy noWorkMsg)
{- No, we cannot use hops in FT scheduler.
-- Reason: If DEADNODE is received, and a node is waiting
-- for a FISH reply, it may be waiting for a reply from the dead
-- message. A node should start fishing again. If the FISH has been
-- forwarded to a node that then fails, a node would be waiting forever
-- for a reply.
-- no SCHEDULE sent; check whether to forward FISH
nodes <- liftCommM $ Comm.nodes
let avoidNodes = Set.fromList [fisher, target, here]
if hops > 0 && nodes > Set.size avoidNodes
then do -- fwd FISH to random node (other than those in avoidNodes)
allNodes <- liftCommM $ Comm.allNodes
node <- randomOtherElem avoidNodes allNodes nodes
let fishMsg = FISH fisher target (hops - 1) :: Msg m
debug dbgMsgSend $
show fishMsg ++ " ->> " ++ show node
attempt <- liftCommM $ Comm.send node $ encodeLazy fishMsg
case attempt of
(Left (NT.TransportError _ e)) -> do -- either SendClosed or SendFailed
debug dbgFailure $
"FISH delivery unsuccessful to "
++ show target
++ "\t" ++ show e
-- tell fisher that the FISH send failed
let fishFailedMsg = FISHFAILED
-- send FISHFAILED message to the original fisher
debug dbgMsgSend $
show fishFailedMsg ++ " ->> " ++ show fisher
void $ liftCommM $ Comm.send fisher $ encodeLazy fishFailedMsg
(Right ()) -> return ()
else do -- notify fisher that there is no work
-}
-- Handle a SCHEDULE message;
-- * puts the spark at the front of the spark pool,
-- * records spark sender and updates stats, and
-- * clears the "FISH outstanding" flag.
handleSCHEDULE :: Msg m -> SparkM m ()
handleSCHEDULE (SCHEDULE spark sender) = do
-- 1) If a supervised spark, send ACK to supervisor
when (taskSupervised spark) $ ackSupervisedTask
-- put spark into pool
pool <- getPool
-- liftIO $ pushFrontIO pool spark
liftIO $ pushBackIO pool spark
-- record sender of spark
updateSparkOrigHist sender
-- update stats
getSparkRcvdCtr >>= incCtr
-- clear FISHING flag
--void $ getFishingFlag >>= clearFlag
clearFishingFlag
return ()
where
ackSupervisedTask :: SparkM m ()
ackSupervisedTask = do
me <- liftCommM Comm.myNode
let (Left supSpkClo) = spark
supSpark = unClosure supSpkClo
ref = remoteRef supSpark
seqN = thisReplica supSpark
let ackMsg = ACK ref seqN me
supervisor = atT ref
debug dbgMsgSend $
show ackMsg ++ " ->> " ++ show supervisor
void (liftCommM $ Comm.send supervisor $ encodeLazy ackMsg)
-- Handle a NOWORK message;
-- asynchronously, after a random delay, clear the "FISH outstanding" flag
-- and wake one scheduler (if some are sleeping) to resume fishing.
-- Rationale for random delay: to prevent FISH flooding when there is
-- (almost) no work.
handleNOWORK :: Msg m -> SparkM m ()
handleNOWORK NOWORK = do
clearSparkOrigHist
fishingFlagRef <- getFishingFlagIORef
noWorkServer <- getNoWorkServer
idleSchedsSem <- getIdleSchedsSem
minDelay <- getMinFishDly
maxDelay <- getMaxFishDly
-- compose delay and clear flag action
let action = do -- random delay
delay <- randomRIO (minDelay, max minDelay maxDelay)
threadDelay delay
-- clear fishing flag
atomicModifyIORef fishingFlagRef $ const (Nothing, ())
-- wakeup 1 sleeping scheduler (to fish again)
Sem.signal idleSchedsSem
-- post action request to server
liftIO $ reqAction noWorkServer action
------------- Fault tolerance handling
-- REQ is sent to the supervisor of a spark
handleREQ :: Msg m -> SparkM m ()
handleREQ (REQ taskRef seqN from to) = do
-- check that task has the highest sequence number
maybe_isNewest <- liftIO $ isNewestReplica taskRef seqN
if isNothing maybe_isNewest
then do -- another copy of the task has completed
let obsoleteMsg = (OBSOLETE to)
debug dbgMsgSend $
show obsoleteMsg ++ " ->> " ++ show from
void $ liftCommM $ Comm.send from $ encodeLazy obsoleteMsg
else do
if not (fromJust maybe_isNewest) -- not the newest copy
then do
let obsoleteMsg = (OBSOLETE to)
debug dbgMsgSend $
show obsoleteMsg ++ " ->> " ++ show from
void $ liftCommM $ Comm.send from $ encodeLazy obsoleteMsg
else do
nodes <- liftCommM Comm.allNodes
-- check fisher hasn't died in the meantime (from verified model)
if (to `elem` nodes)
then do
loc <- liftIO $ fromJust <$> locationOfTask taskRef
case loc of
OnNode current -> do
if current == from
then do -- authorise the schedule
-- update the book keeping to InTransition
liftIO $ taskInTransition taskRef from to
-- Send AUTH to owner (from)
let authMsg = AUTH taskRef to
debug dbgMsgSend $
show authMsg ++ " ->> " ++ show from
here <- liftCommM $ Comm.myNode
-- shortcutting message delivery
if (from == here)
then handleAUTH authMsg
else void $ liftCommM $ Comm.send from $ encodeLazy authMsg
else error "spark not on the peer we expected"
InTransition _ _ -> do
let deniedMsg = DENIED to
debug dbgMsgSend $
show deniedMsg ++ " ->> " ++ show from
void $ liftCommM $ Comm.send from $ encodeLazy deniedMsg
-- fisher has died in the meantime, and removed from VM (allNodes)
else do
let deniedMsg = DENIED to
debug dbgMsgSend $
show deniedMsg ++ " ->> " ++ show from
void $ liftCommM $ Comm.send from $ encodeLazy deniedMsg
-- 1) Send a SCHEDULE to the fisher
-- 2) Empty the guard post
handleAUTH :: Msg m -> SparkM m ()
handleAUTH (AUTH taskRef fisher) = do
-- pops the guarded spark from the guard post
maybe_spark <- toppleGuardPost
if (isJust maybe_spark)
then do
let spark = fromJust maybe_spark
let ref = remoteRef $ unClosure spark
when (taskRef /= ref) $
error $ "Guarded task is not the one authorised with AUTH\n"
++ show taskRef ++ " /= " ++ show ref
me <- liftCommM Comm.myNode
let scheduleMsg = SCHEDULE (Left spark) me
debug dbgMsgSend $
show scheduleMsg ++ " ->> " ++ show fisher
void $ liftCommM $ Comm.send fisher $ encodeLazy scheduleMsg
else debug dbgFailure $ -- guard post empty
"AUTH message arrived, but guard post empty. " ++
"DEADNODE message must be have been received about thief, flushing guard post."
-- 1) Send NOWORK message to the fisher
-- 2) Remove spark from guard post,
-- push back into sparkpool.
-- 3) reset isFishing spark
handleDENIED :: Msg m -> SparkM m ()
handleDENIED (DENIED fisher) = do
-- 1)
let noworkMsg = NOWORK
debug dbgMsgSend $
show noworkMsg ++ " ->> " ++ show fisher
void $ liftCommM $ Comm.send fisher $ encodeLazy noworkMsg
-- 2)
maybe_spark <- toppleGuardPost
when (isJust maybe_spark) $ putSpark (Left (fromJust maybe_spark))
-- 3)
--void $ getFishingFlag >>= clearFlag
clearFishingFlag
handleACK :: Msg m -> SparkM m ()
handleACK (ACK taskRef seqN newNode) = do
maybe_isNewest <- liftIO $ isNewestReplica taskRef seqN
if isNothing maybe_isNewest
then -- not supervised
liftIO $ taskOnNode taskRef newNode
else do
if fromJust maybe_isNewest -- valid copy, modify book keeping
then liftIO $ taskOnNode taskRef newNode
else debug dbgFailure $ -- obsolete copy
"received ACK for old task copy, ignoring."
-- 1) topple obsolete spark from guard post (do NOT put back in sparkpool)
-- 2) return NOWORK to the fisher
handleOBSOLETE :: Msg m -> SparkM m ()
handleOBSOLETE (OBSOLETE fisher) = do
-- 1)
void toppleGuardPost
-- 2)
let noworkMsg = NOWORK
debug dbgMsgSend $
show noworkMsg ++ " ->> " ++ show fisher
void $ liftCommM $ Comm.send fisher $ encodeLazy noworkMsg
-----------------------------------------------------------------------------
-- auxiliary stuff
{- Not used
readFlag :: IORef Bool -> SparkM m Bool
readFlag = liftIO . readIORef
-- Sets given 'flag'; returns True iff 'flag' did actually change.
setFlag :: IORef Bool -> SparkM m Bool
setFlag flag = liftIO $ atomicModifyIORef flag $ \ v -> (True, not v)
-- Clears given 'flag'; returns True iff 'flag' did actually change.
clearFlag :: IORef Bool -> SparkM m Bool
clearFlag flag = liftIO $ atomicModifyIORef flag $ \ v -> (False, v)
-}
readCtr :: IORef Int -> SparkM m Int
readCtr = liftIO . readIORef
incCtr :: IORef Int -> SparkM m ()
incCtr ctr = liftIO $ atomicModifyIORef ctr $ \ v ->
let v' = v + 1 in v' `seq` (v', ())
-- 'randomOtherElem avoid xs n' returns a random element of the list 'xs'
-- different from any of the elements in the set 'avoid'.
-- Requirements: 'n <= length xs' and 'xs' contains no duplicates.
randomOtherElem :: Ord a => Set a -> [a] -> Int -> SparkM m a
randomOtherElem avoid xs n = do
let candidates = filter (`Set.notMember` avoid) xs
-- length candidates == length xs - Set.size avoid >= n - Set.size avoid
i <- liftIO $ randomRIO (0, n - Set.size avoid - 1)
-- 0 <= i <= n - Set.size avoid - 1 < length candidates
return (candidates !! i)
-- debugging
debug :: Int -> String -> SparkM m ()
debug level message = liftIO $ Location.debug level message
----------
-- part of base library in GHC 7.6
-- | Variant of 'writeIORef' with the \"barrier to reordering\" property that
-- 'atomicModifyIORef' has.
atomicWriteIORef :: IORef a -> a -> IO ()
atomicWriteIORef ref a = do
x <- atomicModifyIORef ref (const (a, ()))
x `seq` return ()
| robstewart57/hdph-rs | src/Control/Parallel/HdpH/Internal/Sparkpool.hs | bsd-3-clause | 28,112 | 9 | 28 | 7,564 | 5,573 | 2,855 | 2,718 | 462 | 7 |
{-# LANGUAGE ConstraintKinds, TypeFamilies, FlexibleContexts, DataKinds, GADTs, ScopedTypeVariables, FlexibleInstances, RecursiveDo, TemplateHaskell #-}
module Reflex.Dom.Widget.Input (module Reflex.Dom.Widget.Input, def, (&), (.~)) where
import Prelude
import Reflex.Dom.Class
import Reflex.Dom.Widget.Basic
import Reflex
import Reflex.Host.Class
import GHCJS.DOM.HTMLInputElement as Input
import GHCJS.DOM.HTMLTextAreaElement as TextArea
import GHCJS.DOM.Element
import GHCJS.DOM.HTMLSelectElement as Select
import GHCJS.DOM.EventM
import GHCJS.DOM.UIEvent
import Data.Monoid
import Data.Map as Map
import Control.Lens
import Control.Monad hiding (forM_)
import Control.Monad.IO.Class
import Data.Default
import Data.Maybe
import Safe
import Data.Dependent.Sum (DSum (..))
data TextInput t
= TextInput { _textInput_value :: Dynamic t String
, _textInput_input :: Event t String
, _textInput_keypress :: Event t Int
, _textInput_keydown :: Event t Int
, _textInput_keyup :: Event t Int
, _textInput_hasFocus :: Dynamic t Bool
, _textInput_element :: HTMLInputElement
}
data TextInputConfig t
= TextInputConfig { _textInputConfig_inputType :: String
, _textInputConfig_initialValue :: String
, _textInputConfig_setValue :: Event t String
, _textInputConfig_attributes :: Dynamic t (Map String String)
}
instance Reflex t => Default (TextInputConfig t) where
def = TextInputConfig { _textInputConfig_inputType = "text"
, _textInputConfig_initialValue = ""
, _textInputConfig_setValue = never
, _textInputConfig_attributes = constDyn mempty
}
textInput :: MonadWidget t m => TextInputConfig t -> m (TextInput t)
textInput (TextInputConfig inputType initial eSetValue dAttrs) = do
e <- liftM castToHTMLInputElement $ buildEmptyElement "input" =<< mapDyn (Map.insert "type" inputType) dAttrs
Input.setValue e $ Just initial
performEvent_ $ fmap (Input.setValue e . Just) eSetValue
eChange <- wrapDomEvent e (`on` input) $ fromMaybe "" <$> Input.getValue e
postGui <- askPostGui
runWithActions <- askRunWithActions
eChangeFocus <- newEventWithTrigger $ \eChangeFocusTrigger -> do
unsubscribeOnblur <- on e blurEvent $ liftIO $ do
postGui $ runWithActions [eChangeFocusTrigger :=> False]
unsubscribeOnfocus <- on e focusEvent $ liftIO $ do
postGui $ runWithActions [eChangeFocusTrigger :=> True]
return $ liftIO $ unsubscribeOnblur >> unsubscribeOnfocus
dFocus <- holdDyn False eChangeFocus
eKeypress <- wrapDomEvent e (`on` keyPress) getKeyEvent
eKeydown <- wrapDomEvent e (`on` keyDown) getKeyEvent
eKeyup <- wrapDomEvent e (`on` keyUp) getKeyEvent
dValue <- holdDyn initial $ leftmost [eSetValue, eChange]
return $ TextInput dValue eChange eKeypress eKeydown eKeyup dFocus e
textInputGetEnter :: Reflex t => TextInput t -> Event t ()
textInputGetEnter i = fmapMaybe (\n -> if n == keycodeEnter then Just () else Nothing) $ _textInput_keypress i
data TextAreaConfig t
= TextAreaConfig { _textAreaConfig_initialValue :: String
, _textAreaConfig_setValue :: Event t String
, _textAreaConfig_attributes :: Dynamic t (Map String String)
}
instance Reflex t => Default (TextAreaConfig t) where
def = TextAreaConfig { _textAreaConfig_initialValue = ""
, _textAreaConfig_setValue = never
, _textAreaConfig_attributes = constDyn mempty
}
data TextArea t
= TextArea { _textArea_value :: Dynamic t String
, _textArea_input :: Event t String
, _textArea_element :: HTMLTextAreaElement
, _textArea_hasFocus :: Dynamic t Bool
, _textArea_keypress :: Event t Int
}
textArea :: MonadWidget t m => TextAreaConfig t -> m (TextArea t)
textArea (TextAreaConfig initial eSet attrs) = do
e <- liftM castToHTMLTextAreaElement $ buildEmptyElement "textarea" attrs
TextArea.setValue e $ Just initial
postGui <- askPostGui
runWithActions <- askRunWithActions
eChangeFocus <- newEventWithTrigger $ \eChangeFocusTrigger -> do
unsubscribeOnblur <- on e blurEvent $ liftIO $ do
postGui $ runWithActions [eChangeFocusTrigger :=> False]
unsubscribeOnfocus <- on e focusEvent $ liftIO $ do
postGui $ runWithActions [eChangeFocusTrigger :=> True]
return $ liftIO $ unsubscribeOnblur >> unsubscribeOnfocus
performEvent_ $ fmap (TextArea.setValue e . Just) eSet
f <- holdDyn False eChangeFocus
ev <- wrapDomEvent e (`on` input) $ fromMaybe "" <$> TextArea.getValue e
v <- holdDyn initial $ leftmost [eSet, ev]
eKeypress <- wrapDomEvent e (`on` keyPress) getKeyEvent
return $ TextArea v ev e f eKeypress
data CheckboxConfig t
= CheckboxConfig { _checkboxConfig_setValue :: Event t Bool
, _checkboxConfig_attributes :: Dynamic t (Map String String)
}
instance Reflex t => Default (CheckboxConfig t) where
def = CheckboxConfig { _checkboxConfig_setValue = never
, _checkboxConfig_attributes = constDyn mempty
}
data Checkbox t
= Checkbox { _checkbox_value :: Dynamic t Bool
, _checkbox_change :: Event t Bool
}
--TODO: Make attributes possibly dynamic
-- | Create an editable checkbox
-- Note: if the "type" or "checked" attributes are provided as attributes, they will be ignored
checkbox :: MonadWidget t m => Bool -> CheckboxConfig t -> m (Checkbox t)
checkbox checked config = do
attrs <- mapDyn (\c -> Map.insert "type" "checkbox" $ (if checked then Map.insert "checked" "checked" else Map.delete "checked") c) (_checkboxConfig_attributes config)
e <- liftM castToHTMLInputElement $ buildEmptyElement "input" attrs
eClick <- wrapDomEvent e (`on` click) $ Input.getChecked e
performEvent_ $ fmap (\v -> Input.setChecked e $! v) $ _checkboxConfig_setValue config
dValue <- holdDyn checked $ leftmost [_checkboxConfig_setValue config, eClick]
return $ Checkbox dValue eClick
checkboxView :: MonadWidget t m => Dynamic t (Map String String) -> Dynamic t Bool -> m (Event t Bool)
checkboxView dAttrs dValue = do
e <- liftM castToHTMLInputElement $ buildEmptyElement "input" =<< mapDyn (Map.insert "type" "checkbox") dAttrs
eClicked <- wrapDomEvent e (`on` click) $ do
preventDefault
Input.getChecked e
schedulePostBuild $ do
v <- sample $ current dValue
when v $ Input.setChecked e True
performEvent_ $ fmap (\v -> Input.setChecked e $! v) $ updated dValue
return eClicked
data Dropdown t k
= Dropdown { _dropdown_value :: Dynamic t k
, _dropdown_change :: Event t k
}
data DropdownConfig t k
= DropdownConfig { _dropdownConfig_setValue :: Event t k
, _dropdownConfig_attributes :: Dynamic t (Map String String)
}
instance (Reflex t, Ord k, Show k, Read k) => Default (DropdownConfig t k) where
def = DropdownConfig { _dropdownConfig_setValue = never
, _dropdownConfig_attributes = constDyn mempty
}
--TODO: We should allow the user to specify an ordering instead of relying on the ordering of the Map
--TODO: Get rid of Show k and Read k by indexing the possible values ourselves
-- | Create a dropdown box
-- The first argument gives the initial value of the dropdown; if it is not present in the map of options provided, it will be added with an empty string as its text
dropdown :: forall k t m. (MonadWidget t m, Ord k, Show k, Read k) => k -> Dynamic t (Map k String) -> DropdownConfig t k -> m (Dropdown t k)
dropdown k0 options (DropdownConfig setK attrs) = do
(eRaw, _) <- elDynAttr' "select" attrs $ do
optionsWithDefault <- mapDyn (`Map.union` (k0 =: "")) options
listWithKey optionsWithDefault $ \k v -> do
elAttr "option" ("value" =: show k <> if k == k0 then "selected" =: "selected" else mempty) $ dynText v
let e = castToHTMLSelectElement $ _el_element eRaw
performEvent_ $ fmap (Select.setValue e . Just . show) setK
eChange <- wrapDomEvent e (`on` change) $ do
kStr <- fromMaybe "" <$> Select.getValue e
return $ readMay kStr
let readKey opts mk = fromMaybe k0 $ do
k <- mk
guard $ Map.member k opts
return k
dValue <- combineDyn readKey options =<< holdDyn (Just k0) (leftmost [eChange, fmap Just setK])
return $ Dropdown dValue (attachDynWith readKey options eChange)
liftM concat $ mapM makeLenses
[ ''TextAreaConfig
, ''TextArea
, ''TextInputConfig
, ''TextInput
, ''DropdownConfig
, ''Dropdown
, ''CheckboxConfig
, ''Checkbox
]
class HasAttributes a where
type Attrs a :: *
attributes :: Lens' a (Attrs a)
instance HasAttributes (TextAreaConfig t) where
type Attrs (TextAreaConfig t) = Dynamic t (Map String String)
attributes = textAreaConfig_attributes
instance HasAttributes (TextInputConfig t) where
type Attrs (TextInputConfig t) = Dynamic t (Map String String)
attributes = textInputConfig_attributes
instance HasAttributes (DropdownConfig t k) where
type Attrs (DropdownConfig t k) = Dynamic t (Map String String)
attributes = dropdownConfig_attributes
instance HasAttributes (CheckboxConfig t) where
type Attrs (CheckboxConfig t) = Dynamic t (Map String String)
attributes = checkboxConfig_attributes
class HasSetValue a where
type SetValue a :: *
setValue :: Lens' a (SetValue a)
instance HasSetValue (TextAreaConfig t) where
type SetValue (TextAreaConfig t) = Event t String
setValue = textAreaConfig_setValue
instance HasSetValue (TextInputConfig t) where
type SetValue (TextInputConfig t) = Event t String
setValue = textInputConfig_setValue
instance HasSetValue (DropdownConfig t k) where
type SetValue (DropdownConfig t k) = Event t k
setValue = dropdownConfig_setValue
instance HasSetValue (CheckboxConfig t) where
type SetValue (CheckboxConfig t) = Event t Bool
setValue = checkboxConfig_setValue
class HasValue a where
type Value a :: *
value :: a -> Value a
instance HasValue (TextArea t) where
type Value (TextArea t) = Dynamic t String
value = _textArea_value
instance HasValue (TextInput t) where
type Value (TextInput t) = Dynamic t String
value = _textInput_value
instance HasValue (Dropdown t k) where
type Value (Dropdown t k) = Dynamic t k
value = _dropdown_value
instance HasValue (Checkbox t) where
type Value (Checkbox t) = Dynamic t Bool
value = _checkbox_value
{-
type family Controller sm t a where
Controller Edit t a = (a, Event t a) -- Initial value and setter
Controller View t a = Dynamic t a -- Value (always)
type family Output sm t a where
Output Edit t a = Dynamic t a -- Value (always)
Output View t a = Event t a -- Requested changes
data CheckboxConfig sm t
= CheckboxConfig { _checkbox_input :: Controller sm t Bool
, _checkbox_attributes :: Attributes
}
instance Reflex t => Default (CheckboxConfig Edit t) where
def = CheckboxConfig (False, never) mempty
data Checkbox sm t
= Checkbox { _checkbox_output :: Output sm t Bool
}
data StateMode = Edit | View
--TODO: There must be a more generic way to get this witness and allow us to case on the type-level StateMode
data StateModeWitness (sm :: StateMode) where
EditWitness :: StateModeWitness Edit
ViewWitness :: StateModeWitness View
class HasStateModeWitness (sm :: StateMode) where
stateModeWitness :: StateModeWitness sm
instance HasStateModeWitness Edit where
stateModeWitness = EditWitness
instance HasStateModeWitness View where
stateModeWitness = ViewWitness
-}
| hamishmack/reflex-dom | src/Reflex/Dom/Widget/Input.hs | bsd-3-clause | 11,905 | 0 | 20 | 2,722 | 3,078 | 1,577 | 1,501 | 202 | 2 |
-----------------------------------------------------------------------------
-- |
-- License : BSD-3-Clause
-- Maintainer : Oleg Grenrus <oleg.grenrus@iki.fi>
--
-- The API for underlying git commits of a Github repo, as described on
-- <http://developer.github.com/v3/git/commits/>.
module GitHub.Endpoints.GitData.Commits (
gitCommitR,
module GitHub.Data,
) where
import GitHub.Data
import Prelude ()
-- | Query a commit.
-- See <https://developer.github.com/v3/git/commits/#get-a-commit>
gitCommitR :: Name Owner -> Name Repo -> Name GitCommit -> Request k GitCommit
gitCommitR user repo sha =
query ["repos", toPathPart user, toPathPart repo, "git", "commits", toPathPart sha] []
| jwiegley/github | src/GitHub/Endpoints/GitData/Commits.hs | bsd-3-clause | 706 | 0 | 8 | 102 | 117 | 68 | 49 | 8 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
module Bead.Controller.ServiceContext (
UsrToken
, UserToken(..)
, UserState(..)
, StatusMessage(..)
, statusMessage
, userStateCata
, userNotLoggedIn
, userRole
, getStatus
, setStatus
, clearStatus
, usernameInState
, InRole(..)
, actualPage
, UserContainer(..)
, ServiceContext(..)
, serviceContext
, ioUserContainer
) where
import qualified Data.Map as Map
import Control.Concurrent.STM
import Bead.Controller.Pages as Pages
import Bead.Controller.Logging
import Bead.Domain.Entities as Entities
import Bead.View.Translation
import qualified Bead.Persistence.Persist as Persist
newtype UsrToken = UsrToken (Username, String)
deriving (Show, Eq, Ord)
class UserToken u where
userToken :: u -> UsrToken
instance UserToken (Username, String) where
userToken (u,t) = UsrToken (u,t)
data UserState
= UserNotLoggedIn
| Registration
| TestAgent
| UserState {
user :: Username -- Username
, uid :: Uid
, page :: PageDesc -- The page descriptor of the last requested one
, name :: String -- User's full name
, role :: Role -- User's role
, token :: String -- Token for the active user session
, timezone :: TimeZoneName -- Timezone of the user
, status :: Maybe (StatusMessage (Translation String)) -- The last status message
} deriving (Show)
userStateCata
userNotLoggedIn
registration
testAgent
userState
s = case s of
UserNotLoggedIn -> userNotLoggedIn
Registration -> registration
TestAgent -> testAgent
UserState u ui p n r t tz s -> userState u ui p n r t tz s
userNotLoggedIn :: UserState
userNotLoggedIn = UserNotLoggedIn
-- Converts the user state to a Role
userRole :: UserState -> Either OutsideRole Role
userRole = userStateCata
(Left EmptyRole) -- userNotLoggedIn
(Left RegRole) -- registration
(Left TestAgentRole) -- testAgent
(\_u _ui _p _n role _t _tz _s -> Right role) -- userState
-- Produces a new user state from the old one, setting
-- the status message to the given one
setStatus msg = userStateCata UserNotLoggedIn Registration TestAgent userState where
userState u ui p n r t tz _ = UserState u ui p n r t tz (Just msg)
-- Produces the status message of the UserState, otherwise Nothing
getStatus = userStateCata Nothing Nothing Nothing status where
status _ _ _ _ _ _ _ s = s
-- Produces a new status expect that the status message is cleared.
clearStatus = userStateCata UserNotLoggedIn Registration TestAgent userState where
userState u ui p n r t tz _ = UserState u ui p n r t tz Nothing
-- Returns a username stored in the user state, or a description
-- string for the state
usernameInState = userStateCata
(Username "NotLoggedIn")
(Username "Registration")
(Username "TestAgent")
(\user _ui _p _n _r _t _tz _s -> user)
instance UserToken UserState where
userToken = userStateCata
(UsrToken (Username "UNL", "UNL")) -- userNotLoggedIn
(UsrToken (Username "REG", "REG")) -- registration
(UsrToken (Username "TA", "TA")) -- testAgent
(\user _ui _p _n _r token _tz _s -> UsrToken (user, token))
instance InRole UserState where
isAdmin = userStateCata False False False (\_u _ui _p _n role _t _tz _s -> isAdmin role)
isCourseAdmin = userStateCata False False False (\_u _ui _p _n role _t _tz _s -> Entities.isCourseAdmin role)
isGroupAdmin = userStateCata False False False (\_u _ui _p _n role _t _tz _s -> isGroupAdmin role)
isStudent = userStateCata False False False (\_u _ui _p _n role _t _tz _s -> isStudent role)
-- | The actual page that corresponds to the user's state
actualPage :: UserState -> PageDesc
actualPage = userStateCata login' login' login' (\_u _ui page _n _r _t _tz _s -> page)
where
login' = login ()
data UserContainer a = UserContainer {
isUserLoggedIn :: UsrToken -> IO Bool
, userLogsIn :: UsrToken -> a -> IO ()
, userLogsOut :: UsrToken -> IO ()
, userData :: UsrToken -> IO (Maybe a)
, modifyUserData :: UsrToken -> (a -> a) -> IO ()
}
data ServiceContext = ServiceContext {
userContainer :: UserContainer UserState
, logger :: Logger
, persistInterpreter :: Persist.Interpreter
}
serviceContext :: UserContainer UserState -> Logger -> Persist.Interpreter -> IO ServiceContext
serviceContext u l i = do
return $ ServiceContext u l i
ioUserContainer :: IO (UserContainer a)
ioUserContainer = do
v <- newTVarIO Map.empty
let mvIsUserLoggedIn name = atomically $
fmap (Map.member name) (readTVar v)
mvUserLogsIn name val = atomically $
withTVar v (Map.insert name val)
mvUserLogsOut name = atomically $
withTVar v (Map.delete name)
mvUserData name = atomically $ do
fmap (Map.lookup name) (readTVar v)
mvModifyUserData name f = atomically $ do
m <- readTVar v
case Map.lookup name m of
Nothing -> return ()
Just x -> writeTVar v (Map.insert name (f x) m)
return UserContainer {
isUserLoggedIn = mvIsUserLoggedIn
, userLogsIn = mvUserLogsIn
, userLogsOut = mvUserLogsOut
, userData = mvUserData
, modifyUserData = mvModifyUserData
}
where
withTVar :: TVar a -> (a -> a) -> STM ()
withTVar var f = do
x <- readTVar var
writeTVar var (f x)
| pgj/bead | src/Bead/Controller/ServiceContext.hs | bsd-3-clause | 5,400 | 0 | 20 | 1,240 | 1,547 | 835 | 712 | 130 | 4 |
module LetIn2 where
--A definition can be removed if it is not used by other declarations.
--Where a definition is removed, it's type signature should also be removed.
--In this Example: remove the defintion 'square'
sumSquares x y = sq x + sq y
sq x=x^pow
where pow=2
anotherFun x =sumSquares x x
| SAdams601/HaRe | old/testing/removeDef/LetIn2_TokOut.hs | bsd-3-clause | 319 | 0 | 6 | 74 | 59 | 31 | 28 | 5 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
{-
Playing with lens zippers, to understand how they can be used to
manage a Data.Tree
-}
import Control.Applicative
import Control.Lens.Combinators
import Control.Lens.Indexed
import Control.Lens.Type
import Data.Functor.Identity
import Control.Monad
-- import Control.Applicative
import Control.Lens
import Control.Lens.Zipper
import Data.Tree
import Data.Tree.Lens
-- ---------------------------------------------------------------------
main = putStrLn "hello"
-- ---------------------------------------------------------------------
myTree = [tree1,tree2]
tree1 =
Node "a"
[Node "aa" []
,Node "ab"
[Node "aba" []
,Node "abb" []
]
]
tree2 =
Node "b"
[Node "ba" []
,Node "bb" []
]
-- z :: Top :> Forest String
z = zipper myTree
-- Top :>> Tree a :>> a
t1 :: Forest String
t1 = z & fromWithin traverse & rightward <&> view focus
eg1 = zipper ("hello","world")
& downward _1
& fromWithin traverse
& focus .~ 'J'
& rightmost
& focus .~ 'y'
& rezip
{-
-- eg2 :: String
eg2 = zipper tree1
& downward branches -- branches is in Data.Tree.Lens
& fromWithin traverse
& rightward
& rezip
-- <&> view focus
-}
-- Attempt: in tree1, go down to [aa,ab], then down to focus on aba
eg2 = zipper tree1
& downward branches -- focus is now [aa,ab]
-- & fromWithin traverse & rightward
& rightmost
-- & focus .~ (Node "new" [])
-- rezip
-- <&> view focus
eg2' :: (Functor f, MonadPlus f) => f (Tree String)
eg2' = zipper tree1
& downward branches -- focus is now [aa,ab]
& fromWithin traverse & rightward
<&> focus .~ tree2
<&> rezip
{-
*Main> fdt eg2'
a
|
+- aa
|
`- b
|
+- ba
|
`- bb
*Main>
-}
-- Look for a specific label in the subtree, and replace it
eg5 :: (Functor f, MonadPlus f) => f (Tree String)
eg5 = zipper tree1
& downward branches -- focus is now [aa,ab]
& fromWithin traverse
-- (&) :: a -> (a -> b) -> b
& rightward
-- & rightward
<&> focus .~ tree2
<&> rezip
{-
eg6 = ft
where
z = zipper tree1 & downward branches & fromWithin traverse
ft = findSubTree z "ab"
findSubTree
:: Zipper
(Zipper (Zipper Top Int (Tree [Char])) Int [Tree [Char]])
Int
(Tree [Char])
-> [Char]
-> Zipper
(Zipper (Zipper Top Int (Tree [Char])) Int [Tree [Char]])
Int
(Tree [Char])
-- findSubTree z w = tree1
findSubTree tr what = res
where
ft = tr & view focus :: Tree [Char]
res = if (rootLabel ft == what)
then tr
else findSubTree (tr & rightward) what
-- res = tr
-}
-- ------------------------
-- Data.Tree.Lens provides
-- root :: Lens' (Tree a) a
-- branches :: Lens' (Tree a) [Tree a]
--
-- The branches are a list,
eg3 :: Tree [Char]
eg3 = zipper tree1
& downward branches
& focus .~ [tree2]
& rezip
-- eg4 shows tree surgery
eg4 = z1'
where
z1 = zipper tree1
& downward branches
subF = view focus z1
z1' = z1 & focus .~ ([head subF ] ++ [tree2] ++ tail subF)
& rezip
-- p3 = df $ view focus eg3
-- eg5 = zipper tree1 & downward branches & fromWithin traverse & rightward
-- The following generates output on ghci, as expected
-- zipper tree1 & downward branches & fromWithin traverse & rightward <&> view focus
-- Node {rootLabel = "ab", subForest = [Node {rootLabel = "aba", subForest = []},Node {rootLabel = "abb", subForest = []}]}
------------------------------------------------------------------------
-- how to search for a specific element in the tree?
-- Assumption:
-- 1. the root strings are arranged in a prefix tree, i.e. all sub
-- elements have the same prefix as the parent, and the branches
-- are sorted. [as in tree1 and tree2]
-- 2. the element being searched for is in fact in the tree
{-
focusOn tree key = z
where
z1 = zipper tree
focusOn' z key =
let
node = view focus z
z' = if key == rootLabel node
then z
else -- find the sub tree
where
-}
eg6:: Top :>> (Tree [Char]) :>> [(Tree [Char])]
eg6 = z1
where
z = zipper tree1
& downward branches
-- z1 = view focus z
z1 = z
------------------------------------------------------------------------
-- Utilities to show a forest/tree in ghci
df = putStrLn . drawForest
dt = putStrLn . drawTree
-- |Draw a tree using fmap
fdt ft = do
t <- fmap drawTree ft
putStr t
-- --------------------------------------------------
s = ["aa","ab","ac"]
s1 = zipper s
-- ss1 = s1 & fromWithin traverse & rightward <&> view focus
-- -----------------------------------------------------------
-- from http://stackoverflow.com/questions/15489324/traversal-tree-with-lens-and-zippers
testTree = Node 1 [ Node 2 [ Node 4 [ Node 6 [], Node 8 [] ],
Node 5 [ Node 7 [], Node 9 [] ] ],
Node 3 [ Node 10 [],
Node 11 [] ]
]
zipperTree = zipper testTree
z1 = zipperTree & downward branches
& fromWithin traverse
& downward root
& focus .~ 500
& rezip
z1' = Node 1 [ Node 500 [Node {rootLabel = 4, subForest = [Node {rootLabel = 6, subForest = []},Node {rootLabel = 8, subForest = []}]},Node {rootLabel = 5, subForest = [Node {rootLabel = 7, subForest = []},Node {rootLabel = 9, subForest = []}]}],
Node {rootLabel = 3, subForest = [Node {rootLabel = 10, subForest = []},Node {rootLabel = 11, subForest = []}]}]
tape = zipperTree & downward branches
& fromWithin traverse
& downward root
& saveTape
z2 :: Maybe (Tree Integer)
z2 = do
t <- (restoreTape tape testTree)
return (t & focus .~ 15 & rezip)
z3 = zipperTree & downward branches
-- & fromWithin traverse
| RefactoringTools/HaRe | experiments/ZipperPlayLens.hs | bsd-3-clause | 6,133 | 48 | 14 | 1,702 | 1,193 | 621 | 572 | 97 | 1 |
module HsPropPretty where
import HsPropStruct
import PrettyPrint
import PrettySymbols as S
import HsIdentPretty()
--delta = kwIfUnicode '\x0394' "$"
delta = "$"
instance Printable Quantifier where
ppi All = S.all
ppi Exist = exist
instance (Printable i,Printable pa,Printable pp) => Printable (PD i pa pp) where
wrap = ppi
ppi pd =
case pd of
HsPropDecl s n ns p -> sep [kw "property" <+> n <+> fsep ns <+> equals,
funNest p]
HsAssertion s optn a -> sep [kw "assert" <+> maybe empty (<+>equals) optn,
funNest a]
instance (Printable i,Printable e,Printable t,Printable pa,Printable pp)
=> Printable (PA i e t pa pp) where
wrap pa =
case pa of
PropApp i _ [] -> wrap i
PropParen p -> parens p
_ -> parens pa
ppi pa =
case pa of
Quant q i optt pa -> sep [q <+> i <+> ppOptType optt <+> kw ".", ppi pa]
--PropId i -> ppi i
PropApp i ts [] -> wrap i
PropApp i ts ps -> wrap i <+> fsep (map ppPredArg ps)
PropNeg a -> S.not <+> a
PropOp op a1 a2 -> ppiBinOp (wrap a1) (ppOp op) (wrap a2)
PropEqual e1 e2 -> ppiBinOp (braces e1) (kw "===") (braces e2)
PropHas e p -> ppiBinOp (braces e) (kw ":::") (ppi p)
PropParen p -> parens p
-- PropLambda i pa -> lambda<+>i<+>rarrow<+>pa
-- PropLet i optt e pa -> sep ["let"<+>i<+>ppOptType optt<+>"="<+>e,
-- "in"<+>pa]
instance Printable PropOp where ppi = ppOp
ppOp op =
ppi $ case op of
Conj -> S.and
Disj -> S.or
Imp -> implies
Equiv -> equiv
instance (PrintableOp i,Printable e,Printable p,Printable t,Printable pa,Printable pp)
=> Printable (PP i e p t pa pp) where
wrap pp =
case pp of
PredApp i _ [] -> wrap i
PredNil -> kw "[]"
PredLifted e -> kw "!"<>braces e
PredStrong p -> delta<>wrap p
PredParen p -> parens p
PredComp pts a -> kw "{|"<+>ppiFSeq (map ppts pts)<+>kw "|"<+>a<+>kw "|}"
where ppts (p,optt) = p<+>ppOptType optt
_ -> parens pp
ppi pp =
case pp of
PredApp i ts [] -> wrap i
PredApp i ts ps -> wrap i <+> fsep (ppPredArgs ps)
PredArrow p1 p2 -> ppiBinOp (wrap p1) rarrow (wrap p2)
PredInfixApp p1 i p2 -> ppiBinOp (wrap p1) (ppiOp i) (wrap p2)
PredNeg optt p -> S.not <+> p
PredOp op optt p1 p2 -> ppiBinOp (wrap p1) (ppOp op) (wrap p2)
PredLfp i optt p -> mu <+> i <+> ppOptType optt <+> kw "." <+> p
PredGfp i optt p -> nu <+> i <+> ppOptType optt <+> kw "." <+> p
_ -> wrap pp
ppPredArgs as = map ppPredArg as
ppPredArg a = either braces wrap a
ppOptType x = maybe empty (el<+>) x
| forste/haReFork | tools/property/AST/HsPropPretty.hs | bsd-3-clause | 2,666 | 2 | 15 | 781 | 1,147 | 553 | 594 | 67 | 4 |
{-# OPTIONS_GHC -Wall #-}
module AST.Expression.Valid where
import qualified AST.Expression.General as General
import qualified AST.Pattern as Pattern
import qualified AST.Type as Type
import qualified AST.Variable as Var
import qualified Reporting.Region as R
{-| "Normal" expressions. When the compiler checks that type annotations and
ports are all paired with definitions in the appropriate order, it collapses
them into a Def that is easier to work with in later phases of compilation.
-}
type Expr =
General.Expr R.Region Def Var.Raw Type.Raw
type Expr' =
General.Expr' R.Region Def Var.Raw Type.Raw
data Def
= Definition Pattern.RawPattern Expr (Maybe Type.Raw)
deriving (Show)
| Axure/elm-compiler | src/AST/Expression/Valid.hs | bsd-3-clause | 704 | 0 | 9 | 116 | 126 | 78 | 48 | 14 | 0 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE DataKinds #-}
module MoreExplicitForalls where
import Data.Proxy
data family F1 a
data instance forall (x :: Bool). F1 (Proxy x) = MkF
class C a where
type F2 a b
instance forall a. C [a] where
type forall b. F2 [a] b = Int
type family G a b where
forall x y. G [x] (Proxy y) = Double
forall z. G z z = Bool
| sdiehl/ghc | testsuite/tests/hiefile/should_compile/hie010.hs | bsd-3-clause | 510 | 1 | 9 | 114 | 159 | 88 | 71 | -1 | -1 |
module IncludeTest where
import Test.QuickCheck
import IncludeTest_Types
import IncludedTest_Types
import Util
propToFoo = foo == roundTrip foo
where foo = Foo { foo_Bar = 5, foo_Baz = 10 }
roundTrip = to_Foo . from_Foo
propDefaultFoo = default_Foo { foo_Baz = 1 } == bar_baz default_Bar
main :: IO ()
main = aggregateResults $ fmap quickCheckResult [propToFoo, propDefaultFoo]
| getyourguide/fbthrift | thrift/lib/hs/tests/IncludeTest.hs | apache-2.0 | 394 | 0 | 8 | 73 | 110 | 63 | 47 | 11 | 1 |
module C2 where
{-Unfold 'sq' should fail as 'pow', which is used in the definiton of 'sq'
is not in scope.
-}
import D2 hiding (main,pow)
sumSquares1 (x:xs) = sq x + sumSquares1 xs
sumSquares1 [] = 0
| kmate/HaRe | old/testing/unfoldDef/C2_TokOut.hs | bsd-3-clause | 209 | 0 | 7 | 46 | 52 | 29 | 23 | 4 | 1 |
{-# LANGUAGE GADTs, MultiParamTypeClasses #-}
module T14325 where
class (a~b) => C a b
foo :: C a b => a -> b
foo x = x
hm3 :: C (f b) b => b -> f b
hm3 x = foo x
| ezyang/ghc | testsuite/tests/typecheck/should_fail/T14325.hs | bsd-3-clause | 167 | 0 | 8 | 47 | 89 | 45 | 44 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{- |
Provides the primary functons for the storage system
-}
module Solar.Storage.Functions
where
import Solar.Data.KV as K
import Solar.Storage.Types
import Solar.Storage.Class
import Control.Monad.Trans.RWS as R
import Control.Monad.Trans.Class (lift)
import Data.Monoid(Monoid(..))
-- | Retrieves the 'KV' within the 'RWST' environment
getSF :: (StorageFC n r c d c' m a, Monad m, Monoid b)
=> KVIdentifier n -- ^ The identifier of the 'KV' desired
-> RWST a b Context m (Maybe (KV n r c d c'))
-- ^ Results in the action that might have a 'KV' value by
-- that identifier.
getSF i = do
c <- get
s <- ask
(v, c') <- lift $ sfcGet s c i
put c'
return v
-- | Persists the 'KV' within the 'RWST' environment
putSF :: (StorageFC n r c d c' m a, Monad m, Monoid b)
=> KV n r c d c' -- ^ The 'KV' that you want to put up
-> RWST a b Context m (KV n r c d c')
-- ^ The resulting action. It may be important
-- to have a 'Context' that was set by a previous
-- 'getSF' call.
putSF kv = do
c <- get
s <- ask
(v, c') <- lift $ sfcPut s c kv
put c'
return v
-- | Deletes the 'KV' within the 'RWST' environment
delSF :: (StorageFC n r c d c' m a, Monad m, Monoid b)
=> TaggedIdentifier n r c (d n r c) (c' n r c)
-- ^ A type-tagged identifier for this entity.
-- The types may be significant.
-> RWST a b Context m (Bool)
-- ^ The resulting action with a success flag.
delSF i = do
c <- get
s <- ask
(v, c') <- lift $ sfcDel s c i
put c'
return v
| Cordite-Studios/solar | solar-cells/Solar/Storage/Functions.hs | mit | 1,717 | 0 | 11 | 553 | 492 | 257 | 235 | 35 | 1 |
module Control.Monad.Pool (WithResource, withResource, tryWithResource, runPooled, runDedicated) where
import qualified Control.Monad.Trans.Pool as T
import Data.Pool (Pool)
newtype WithResource r a = WithResource (T.WithResource r a)
runPooled :: WithResource r a -> Pool r -> IO a
runPooled (WithResource m) = T.runPooled m
runDedicated :: WithResource r a -> r -> IO a
runDedicated (WithResource m) = T.runDedicated m
withResource :: (r -> IO a) -> WithResource r a
withResource = WithResource . T.withResource
tryWithResource :: (r -> IO a) -> WithResource r (Maybe a)
tryWithResource = WithResource . T.tryWithResource
| srijs/haskell-resource-pool-monad | src/Control/Monad/Pool.hs | mit | 631 | 0 | 8 | 96 | 223 | 120 | 103 | 12 | 1 |
module Main(main) where
import Text.Printf
import Graphics.Gloss
import Graphics.Gloss.Data.Vector
import Data.List
import Bezier
data Rule = Rule Char String deriving (Show)
newtype Axiom = Axiom String deriving (Show)
newtype Angle = Angle Float deriving (Show)
data LSystem = LSystem Angle Axiom [Rule] deriving (Show)
chosenSystem = terDragon
startIterations = 1
endIterations = 5
smoothness = 2
dragon = LSystem (Angle (pi/2)) (Axiom "f") [Rule 'f' "f-h", Rule 'h' "f+h"]
terDragon = LSystem (Angle (2*pi/3)) (Axiom "f") [Rule 'f' "f+f-f"]
main :: IO()
main = do
-- mapM_ (print) lists
-- where lists = [map (\a->(a,a)) [0..n] | n <- [1..10]]
-- mapM_ (print) (map length alignedPaths)
-- display (InWindow "fractal" (300, 300) (100, 100)) (white) (Pictures layers)
putStrLn (makeObj ( map (scale' 10) alignedPaths) ++ faces alignedPaths)
where layers = map line alignedPaths ++ map markPoints alignedPaths
alignedPaths = scaleAndRotateCurves chosenSystem paths'
paths' = [ selfComposeN (endIterations-n) (smooth 2) (makePath chosenSystem n) | n <- [startIterations..endIterations] ]
showFloat :: Float->String
showFloat = printf "%.10f"
makeObj :: [[Vector]] -> String
makeObj curves = (concat.concat) $ map
(\(order, curve)
-> (map (makeVectorString . addZ (10*2^order)) curve ))
(zip [0..] curves)
makeVectorString :: (Float,Float,Float)->String
makeVectorString (x,y,z) = printf "v %0.10f %0.10f %0.10f\n" x y z
addZ :: Float -> (Float, Float) -> (Float, Float,Float)
addZ z (x,y) = (x, y, z)
faces :: [[Vector]]->String
faces (c:curves) = concat [ printf "f %d %d %d\nf %d %d %d\n" a b c b d c | (a,b,c,d) <- squares]
where squares = [(p, p+1, p+width, p+width+1) | p <- topCorners]
topCorners = [ x+y | x <- [0..(width-2)], y <- layerStarts ]
layerStarts = [ 1+n*width | n <- [0..(height-2)]]
height = 1 + length curves
width = length c
(^*) f n = selfComposeN n f
selfComposeN :: Integer -> (a->a) -> a -> a
selfComposeN 0 function arg = function arg
selfComposeN n function arg = function (selfComposeN (n-1) function arg)
markPoints :: [Vector] -> Picture
markPoints points = Pictures (map drawCircle points)
where drawCircle (x,y) = translate x y (Circle 0.1)
scaleAndRotateCurves :: LSystem->[[Vector]]->[[Vector]]
scaleAndRotateCurves system curves = map (\(n, curve)->scaleAndRotate (scaleFactor**fromIntegral n) (angle*fromIntegral n) curve) (addIndicies (reverse curves))
where angle = -( ( \(Angle a) -> a ) . getTwistAngle) system -- -ve because we've reversed the order of the curves
scaleFactor = getScaleFactor system
addIndicies :: [a]->[(Integer, a)]
addIndicies = zip [1..]
scaleAndRotate :: Float->Float->[Vector]->[Vector]
scaleAndRotate scaleFactor angle = scale' scaleFactor . rotate' angle
scale' :: Float->[Vector]->[Vector]
scale' scaleFactor= map (mulSV scaleFactor)
rotate' :: Float->[Vector]->[Vector]
rotate' = map . rotateV
makePath :: LSystem->Integer->Path
makePath (LSystem (Angle theta) (Axiom a) rules) n = (0,0) : makePath' (iterate' n a rules) theta (0,0) 0
makePath' :: String->Float->Vector->Float->Path
makePath' [] theta curPos initialTheta = []
makePath' (c:cs) theta curPos initialTheta
| c == '+' = makePath' cs theta curPos (initialTheta + theta)
| c == '-' = makePath' cs theta curPos (initialTheta - theta)
| otherwise = newPos : makePath' cs theta newPos initialTheta
where newPos = curPos + (sin initialTheta, cos initialTheta)
iterate' :: Integer->String->[Rule]->String
iterate' 0 string rules = string
iterate' n string rules = iterate' (n-1) (replace string rules) rules
replace :: String->[Rule]->String
replace [c] rules = replace' c rules
replace (c:cs) rules = replace' c rules ++ replace cs rules
replace' :: Char->[Rule]->String
replace' c [Rule c' s]
| c == c' = s
| otherwise = [c]
replace' c (Rule c' s:rs)
| c == c' = s
| otherwise = replace' c rs
getScaleFactor :: LSystem->Float
getScaleFactor system = magV delta1/magV delta0
where delta0 = last path0
delta1 = last path1
path0 = makePath system 0
path1 = makePath system 1
getTwistAngle :: LSystem->Angle
getTwistAngle system = Angle (argV sum0 - argV sum1)
where sum0 = last path0 - head path0
sum1 = last path1 - head path1
path0 = makePath system 0
path1 = makePath system 1
| thip/developing-fractal | Main.hs | mit | 4,628 | 1 | 16 | 1,064 | 1,808 | 957 | 851 | 93 | 1 |
-- | Functions for producing RenderedCode values from PureScript Type values.
module Language.PureScript.Docs.RenderedCode.Render (
renderType,
renderTypeAtom,
renderRow,
renderKind,
RenderTypeOptions(..),
defaultRenderTypeOptions,
renderTypeWithOptions
) where
import Prelude ()
import Prelude.Compat
import Data.Monoid ((<>))
import Data.Maybe (fromMaybe)
import Control.Arrow ((<+>))
import Control.PatternArrows
import Language.PureScript.Crash
import Language.PureScript.Names
import Language.PureScript.Types
import Language.PureScript.Kinds
import Language.PureScript.Pretty.Kinds
import Language.PureScript.Environment
import Language.PureScript.Docs.RenderedCode.Types
import Language.PureScript.Docs.Utils.MonoidExtras
typeLiterals :: Pattern () Type RenderedCode
typeLiterals = mkPattern match
where
match TypeWildcard =
Just (syntax "_")
match (TypeVar var) =
Just (ident var)
match (PrettyPrintObject row) =
Just $ mintersperse sp
[ syntax "{"
, renderRow row
, syntax "}"
]
match (TypeConstructor (Qualified mn name)) =
Just (ctor (runProperName name) (maybeToContainingModule mn))
match (ConstrainedType deps ty) =
Just $ mintersperse sp
[ syntax "(" <> constraints <> syntax ")"
, syntax "=>"
, renderType ty
]
where
constraints = mintersperse (syntax "," <> sp) (map renderDep deps)
renderDep (pn, tys) =
let instApp = foldl TypeApp (TypeConstructor pn) tys
in renderType instApp
match REmpty =
Just (syntax "()")
match row@RCons{} =
Just (syntax "(" <> renderRow row <> syntax ")")
match _ =
Nothing
-- |
-- Render code representing a Row
--
renderRow :: Type -> RenderedCode
renderRow = uncurry renderRow' . rowToList
where
renderRow' h t = renderHead h <> renderTail t
renderHead :: [(String, Type)] -> RenderedCode
renderHead = mintersperse (syntax "," <> sp) . map renderLabel
renderLabel :: (String, Type) -> RenderedCode
renderLabel (label, ty) =
mintersperse sp
[ ident label
, syntax "::"
, renderType ty
]
renderTail :: Type -> RenderedCode
renderTail REmpty = mempty
renderTail other = sp <> syntax "|" <> sp <> renderType other
typeApp :: Pattern () Type (Type, Type)
typeApp = mkPattern match
where
match (TypeApp f x) = Just (f, x)
match _ = Nothing
appliedFunction :: Pattern () Type (Type, Type)
appliedFunction = mkPattern match
where
match (PrettyPrintFunction arg ret) = Just (arg, ret)
match _ = Nothing
kinded :: Pattern () Type (Kind, Type)
kinded = mkPattern match
where
match (KindedType t k) = Just (k, t)
match _ = Nothing
matchTypeAtom :: Pattern () Type RenderedCode
matchTypeAtom = typeLiterals <+> fmap parens matchType
where
parens x = syntax "(" <> x <> syntax ")"
matchType :: Pattern () Type RenderedCode
matchType = buildPrettyPrinter operators matchTypeAtom
where
operators :: OperatorTable () Type RenderedCode
operators =
OperatorTable [ [ AssocL typeApp $ \f x -> f <> sp <> x ]
, [ AssocR appliedFunction $ \arg ret -> mintersperse sp [arg, syntax "->", ret] ]
, [ Wrap forall_ $ \idents ty -> mconcat [syntax "forall", sp, mintersperse sp (map ident idents), syntax ".", sp, ty] ]
, [ Wrap kinded $ \k ty -> mintersperse sp [ty, syntax "::", renderKind k] ]
]
forall_ :: Pattern () Type ([String], Type)
forall_ = mkPattern match
where
match (PrettyPrintForAll idents ty) = Just (idents, ty)
match _ = Nothing
insertPlaceholders :: RenderTypeOptions -> Type -> Type
insertPlaceholders opts =
everywhereOnTypesTopDown convertForAlls . everywhereOnTypes (convert opts)
dePrim :: Type -> Type
dePrim ty@(TypeConstructor (Qualified _ name))
| ty == tyBoolean || ty == tyNumber || ty == tyString =
TypeConstructor $ Qualified Nothing name
dePrim other = other
convert :: RenderTypeOptions -> Type -> Type
convert _ (TypeApp (TypeApp f arg) ret) | f == tyFunction = PrettyPrintFunction arg ret
convert opts (TypeApp o r) | o == tyObject && prettyPrintObjects opts = PrettyPrintObject r
convert _ other = other
convertForAlls :: Type -> Type
convertForAlls (ForAll i ty _) = go [i] ty
where
go idents (ForAll ident' ty' _) = go (ident' : idents) ty'
go idents other = PrettyPrintForAll idents other
convertForAlls other = other
preprocessType :: RenderTypeOptions -> Type -> Type
preprocessType opts = dePrim . insertPlaceholders opts
-- |
-- Render code representing a Kind
--
renderKind :: Kind -> RenderedCode
renderKind = kind . prettyPrintKind
-- |
-- Render code representing a Type, as it should appear inside parentheses
--
renderTypeAtom :: Type -> RenderedCode
renderTypeAtom =
fromMaybe (internalError "Incomplete pattern") . pattern matchTypeAtom () . preprocessType defaultRenderTypeOptions
-- |
-- Render code representing a Type
--
renderType :: Type -> RenderedCode
renderType = renderTypeWithOptions defaultRenderTypeOptions
data RenderTypeOptions = RenderTypeOptions
{ prettyPrintObjects :: Bool
}
defaultRenderTypeOptions :: RenderTypeOptions
defaultRenderTypeOptions = RenderTypeOptions { prettyPrintObjects = True }
renderTypeWithOptions :: RenderTypeOptions -> Type -> RenderedCode
renderTypeWithOptions opts =
fromMaybe (internalError "Incomplete pattern") . pattern matchType () . preprocessType opts
| michaelficarra/purescript | src/Language/PureScript/Docs/RenderedCode/Render.hs | mit | 5,472 | 0 | 16 | 1,128 | 1,684 | 881 | 803 | 124 | 8 |
import Control.Monad
import Data.Matrix
type Cell = Bool
next :: Cell -> [Cell] -> Cell
next True cs
| neighbors < 2 = False
| neighbors == 2 || neighbors == 3 = True
| otherwise = False
where neighbors = liveCount cs
next False cs
| liveCount cs == 3 = True
| otherwise = False
liveCount :: ([Cell] -> Int)
liveCount = length . filter id
step :: Matrix Cell -> Matrix Cell
step world =
foldl (\w r -> mapRow (\c cell -> next cell (neighborsOf r c)) r w) world [1..rmax]
where rmax = nrows world
cmax = ncols world
allNeighborCoords r c = [ (r + r', c + c') | r' <- [-1..1], c' <- [-1..1] ]
neighborsOf r c = map (\(x, y) -> getElem x y world) $ filter (okCoord r c) $ allNeighborCoords r c
okCoord r c (x, y)
| x == r && y == c = False
| x < 1 || x > rmax = False
| y < 1 || y > cmax = False
| otherwise = True
convert :: (a -> b) -> Matrix a -> Matrix b
convert f m = fromList (nrows m) (ncols m) $ map f $ toList m
drawAndStep world = do
print $ convert (\x -> if x then '.' else '_') world
return $ step world
main = foldM (\w _ -> drawAndStep w) initialWorld iterations
where initialWorld = fromList 9 9 $ cycle [True, True, False, True]
iterations = [1..]
| ajm188/conway | Conway.hs | mit | 1,331 | 0 | 13 | 423 | 624 | 315 | 309 | 34 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Iptf.Hosts.Internal where
import Control.Applicative (pure)
import Data.Changeable (Changeable (..))
import qualified Data.Map.Strict as Map
import qualified Data.Set as S
import Data.Text (Text)
import Iptf.Ip.Internal (IP)
-- | Hosts is a map from and IP to a set of Hostnames. A Hostname can
-- only appear once in a Hosts
newtype Hosts = Hosts (Map.Map IP (S.Set Hostname)) deriving (Show, Eq)
-- | A Hostname cannot be blank
newtype Hostname = Hostname Text deriving (Show, Eq, Ord)
-- | A Record consists of an IP and a list of Hostnames. It's basically
-- a reified entry from Hosts
data Record = Record IP [Hostname] deriving (Show)
-- | HostsFile represents an /etc/hosts file. It contains any
-- text before the ip-to-file section. The ip-to-file section represented
-- as a Hosts, and any text after the ip-to-file section.
data HostsFile = HostsFile { pre :: Text
, hosts :: Hosts
, post :: Text } deriving (Show, Eq)
-- | Smart constructor for Hostname, doesn't allow blanks.
hostname :: Text -> Maybe Hostname
hostname t
| t == "" = Nothing
| otherwise = Just $ Hostname t
getIP :: Hostname -> Hosts -> Maybe IP
getIP n (Hosts h) = go $ Map.toList h
where
go [] = Nothing
go ((k, v):xs) = if S.member n v
then Just k
else go xs
getIP' :: Hostname -> Hosts -> Maybe IP
getIP' n (Hosts h) = case Map.keys filtered of
[] -> Nothing
x:_ -> Just x
where
filtered = Map.filter (S.member n) h
updateHostsFile :: HostsFile -> IP -> Hostname -> Changeable HostsFile
updateHostsFile (HostsFile pre' hosts' end') ip name =
case update hosts' name ip of
Changed h -> Changed $ HostsFile pre' h end'
Same _ -> Same $ HostsFile pre' hosts' end'
update :: Hosts -> Hostname -> IP -> Changeable Hosts
update hs n ip
| hostnameExists hs n && pure ip == getIP n hs = Same hs
| hostnameExists hs n = Changed . add ip [n] $ remove n hs
| otherwise = Changed $ add ip [n] hs
empty :: Hosts
empty = Hosts Map.empty
union :: Hosts -> Hosts -> Hosts
union (Hosts h1) h2 = Hosts $ Map.union h1 h2'
where
(Hosts h2') = foldr remove h2 ns1
ns1 = S.toList . S.unions $ Map.elems h1
add :: IP -> [Hostname] -> Hosts -> Hosts
add ip ns hs = Hosts $ Map.insertWith S.union ip (S.fromList ns) hs'
where
(Hosts hs') = foldr remove hs ns
new :: IP -> [Hostname] -> Hosts
new ip ns = Hosts $ Map.singleton ip (S.fromList ns)
remove :: Hostname -> Hosts -> Hosts
remove n (Hosts hs) = Hosts $ Map.mapMaybe prune hs
where
prune ns = maybeSet $ S.delete n ns
maybeSet s = if S.null s then Nothing else Just s
toList :: Hosts -> [Record]
toList (Hosts m) = map (\(ip, s) -> Record ip (S.toList s)) entries
where
entries = Map.toList m
fromList :: [Record] -> Hosts
fromList [] = empty
fromList (Record _ []:xs) = fromList xs
fromList (Record i hs:xs) = new i hs `union` fromList xs
fromList' :: [Record] -> Hosts
fromList' = foldr addRecord empty
where
addRecord (Record _ []) = id
addRecord (Record ip ns) = add ip ns
null :: Hosts -> Bool
null (Hosts h) = Map.null h
hostnameExists :: Hosts -> Hostname -> Bool
hostnameExists (Hosts hs) n = S.member n $ S.unions (Map.elems hs)
| werbitt/ip-to-file | src/Iptf/Hosts/Internal.hs | mit | 3,479 | 0 | 11 | 961 | 1,235 | 635 | 600 | 69 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Web.Markury where
import Web.Markury.Action.BookmarkAction
import Web.Markury.Action.LoginAction
import Web.Markury.Action.TagAction
import Web.Markury.Action.UserAction
import Web.Markury.Model.DB
import Web.Markury.Model.Input
import Control.Monad.Logger ( runNoLoggingT )
import Database.Persist.Sql ( runSqlPool, runMigration )
import Database.Persist.Sqlite ( createSqlitePool )
import Web.Spock.Safe
runMarkury :: IO ()
runMarkury = do
pool <- runNoLoggingT $ createSqlitePool "markury.db" 5
runNoLoggingT $ runSqlPool (runMigration migrateAll) pool
runSpock 8080 $ spock (defaultSpockCfg Nothing (PCPool pool) Nothing) $ do
getpost "login" $ loginAction
get "logout" $ logoutAction
subcomponent "bookmarks" $ do
get root allBookmarksAction
get ("view" <//> var) viewBookmarkAction
getpost "add" $ do
checkSession
addBookmarkAction
getpost ("delete" <//> var) $ \id -> do
checkSession
deleteBookmarkAction id
subcomponent "users" $ do
get root allUsersAction
get ("view" <//> var) viewUserAction
getpost "add" $ do
-- checkSession
addUserAction
getpost ("delete" <//> var) $ \id -> do
checkSession
deleteUserAction id
subcomponent "tags" $ do
get root allTagsAction
get ("view" <//> var) viewTagAction
getpost "add" $ do
checkSession
addTagAction
getpost ("delete" <//> var) $ \id -> do
checkSession
deleteTagAction id
| y-taka-23/markury | src/Web/Markury.hs | mit | 1,780 | 0 | 17 | 558 | 432 | 211 | 221 | 46 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Yesod.Default.Main
( defaultMain
, defaultMainLog
, defaultRunner
, defaultDevelApp
, LogFunc
) where
import Yesod.Default.Config
import Network.Wai (Application)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, setPort, setHost, setOnException)
import qualified Network.Wai.Handler.Warp as Warp
import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
import Network.Wai.Middleware.Gzip (gzip, GzipFiles (GzipCacheFolder), gzipFiles, def)
import Network.Wai.Middleware.Autohead (autohead)
import Network.Wai.Middleware.Jsonp (jsonp)
import Control.Monad (when)
import System.Environment (getEnvironment)
import Data.Maybe (fromMaybe)
import Text.Read (readMaybe)
import Control.Monad.Logger (Loc, LogSource, LogLevel (LevelError), liftLoc)
import System.Log.FastLogger (LogStr, toLogStr)
import Language.Haskell.TH.Syntax (qLocation)
#ifndef WINDOWS
import qualified System.Posix.Signals as Signal
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
#endif
-- | Run your app, taking environment and port settings from the
-- commandline.
--
-- @'fromArgs'@ helps parse a custom configuration
--
-- > main :: IO ()
-- > main = defaultMain (fromArgs parseExtra) makeApplication
--
defaultMain :: (Show env, Read env)
=> IO (AppConfig env extra)
-> (AppConfig env extra -> IO Application)
-> IO ()
defaultMain load getApp = do
config <- load
app <- getApp config
runSettings
( setPort (appPort config)
$ setHost (appHost config)
$ defaultSettings
) app
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
-- | Same as @defaultMain@, but gets a logging function back as well as an
-- @Application@ to install Warp exception handlers.
--
-- Since 1.2.5
defaultMainLog :: (Show env, Read env)
=> IO (AppConfig env extra)
-> (AppConfig env extra -> IO (Application, LogFunc))
-> IO ()
defaultMainLog load getApp = do
config <- load
(app, logFunc) <- getApp config
runSettings
( setPort (appPort config)
$ setHost (appHost config)
$ setOnException (const $ \e -> when (shouldLog' e) $ logFunc
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
$ defaultSettings
) app
where
shouldLog' = Warp.defaultShouldDisplayException
-- | Run your application continously, listening for SIGINT and exiting
-- when received
--
-- > withYourSite :: AppConfig DefaultEnv -> Logger -> (Application -> IO a) -> IO ()
-- > withYourSite conf logger f = do
-- > Settings.withConnectionPool conf $ \p -> do
-- > runConnectionPool (runMigration yourMigration) p
-- > defaultRunner f $ YourSite conf logger p
defaultRunner :: (Application -> IO ()) -> Application -> IO ()
defaultRunner f app = do
-- clear the .static-cache so we don't have stale content
exists <- doesDirectoryExist staticCache
when exists $ removeDirectoryRecursive staticCache
#ifdef WINDOWS
f (middlewares app)
#else
tid <- forkIO $ f (middlewares app) >> return ()
flag <- newEmptyMVar
_ <- Signal.installHandler Signal.sigINT (Signal.CatchOnce $ do
putStrLn "Caught an interrupt"
killThread tid
putMVar flag ()) Nothing
takeMVar flag
#endif
where
middlewares = gzip gset . jsonp . autohead
gset = def { gzipFiles = GzipCacheFolder staticCache }
staticCache = ".static-cache"
-- | Run your development app using a custom environment type and loader
-- function
defaultDevelApp
:: (Show env, Read env)
=> IO (AppConfig env extra) -- ^ A means to load your development @'AppConfig'@
-> (AppConfig env extra -> IO Application) -- ^ Get your @Application@
-> IO (Int, Application)
defaultDevelApp load getApp = do
conf <- load
env <- getEnvironment
let p = fromMaybe (appPort conf) $ lookup "PORT" env >>= readMaybe
pdisplay = fromMaybe p $ lookup "DISPLAY_PORT" env >>= readMaybe
putStrLn $ "Devel application launched: http://localhost:" ++ show pdisplay
app <- getApp conf
return (p, app)
| tolysz/yesod | yesod/Yesod/Default/Main.hs | mit | 4,453 | 0 | 19 | 1,014 | 945 | 520 | 425 | 87 | 1 |
-- | This file defines tests tree evaluation and parser functions.
--
-- Author: Thorsten Rangwich. See file <../LICENSE> for details.
--
-- This needs QuickCheck in at least version 2. Otherwise you have to replace QuickCheck2 with QuickCheck and
-- either implement an instance for Arbitrary Char or leave out the string tests.
import qualified Test.Framework as Fw
import qualified Test.Framework.Providers.QuickCheck2 as FQc
import qualified Data.List as DL
import qualified Data.Monoid as Monoid
import qualified Tree.FormulaTree as T
import qualified Data.Plain as P
import qualified FormulaEngine.Evaluate as Eval
import qualified FormulaEngine.Functions.Numerics as NumFuncs
import qualified FormulaEngine.Functions.StringFuncs as StringFuncs
import qualified FormulaEngine.Parse as Parse
-- | Convert a tree function into an Int function.
integerFuncWrapper :: ([P.Plain] -> P.Plain) -> [Int] -> Int
integerFuncWrapper tFunc = P.get . Eval.calcTree . T.Funcall (T.NamedFunction "f" tFunc) . map (T.Raw . P.PlInt)
floatFuncWrapper :: ([P.Plain] -> P.Plain) -> [Float] -> Float
floatFuncWrapper tFunc = P.get . Eval.calcTree . T.Funcall (T.NamedFunction "f" tFunc) . map (T.Raw . P.PlFloat)
stringFuncWrapper :: ([P.Plain] -> P.Plain) -> [String] -> String
stringFuncWrapper tFunc = P.get . Eval.calcTree . T.Funcall (T.NamedFunction "f" tFunc) . map (T.Raw . P.PlString)
-- | Integer add function should match results of builtin (+) for Int.
prop_addInt :: [Int] -> Bool
prop_addInt xs = integerFuncWrapper NumFuncs.add xs == sum xs
-- | String concatenation function.
prop_concString :: [String] -> Bool
prop_concString xs = stringFuncWrapper StringFuncs.conc xs == concat xs
-- | Recursive calls.
prop_recursive :: Bool
prop_recursive = (P.get . Eval.calcTree . Parse.compileTree) "(add 1 (add 2 3))" == (6 :: Int)
escape :: Char -> String
escape x = if x == '"' then "\"\"" else [x]
quote :: String -> String
quote s = "\"" ++ s ++ "\""
-- | Leading spaces.
prop_leadingSpaces :: Int -> String -> Bool
prop_leadingSpaces n s =
(P.get . Eval.calcTree . Parse.compileTree) (concat [DL.replicate (n `mod` 20) ' ', "\"", concatMap escape s, "\""]) == s
-- | Arbitrary strings.
prop_string :: String -> Bool
prop_string s = (P.get . Eval.calcTree . Parse.compileTree . quote . concatMap escape) s == s
-- | References are invalid in calcTrees.
prop_invalidReference :: Bool
prop_invalidReference = (P.checkError . Eval.calcTree . Parse.compileTree) "'1:1"
-- | Example for test options used below. To make them the default, a top level group can be defined
-- and tests can be nested. This is *NOT* intuitive and took me some hours to find out how this works.
-- There is no single example for that in order to migrate this stuff from old QuickCheck!!!
options :: Fw.TestOptions
options = Monoid.mempty { Fw.topt_maximum_generated_tests = Just 200
, Fw.topt_maximum_unsuitable_generated_tests = Just 1 }
-- | Run the tests
main :: IO ()
main = Fw.defaultMain tests
-- | The tests. Test can be nested to reuse test options.
tests = [
Fw.plusTestOptions options (Fw.testGroup "Integer tests" [
FQc.testProperty "add Int" prop_addInt
]),
Fw.testGroup "Miscellaneous tests" [
FQc.testProperty "recursive" prop_recursive,
FQc.testProperty "invalid ref" prop_invalidReference
],
Fw.testGroup "String tests" [
FQc.testProperty "concatenate" prop_concString,
FQc.testProperty "spaces" prop_leadingSpaces,
FQc.testProperty "string" prop_string
]]
| tnrangwi/grill | test/functional/test_EvalTrees.hs | mit | 3,563 | 0 | 12 | 623 | 848 | 470 | 378 | 48 | 2 |
{-| Stability: experimental
The functions here pretty-print numbers in a compact format. Examples:
>>> showSciRational (-0.0e+3) -- result: "0"
>>> showSciRational (0.25e+2) -- result: "25"
>>> showSciRational (-1.0e-1) -- result: "-.1"
>>> showSciRational (5.0e+20 / 6) -- result: "2.5e20/3"
>>> showSciRational (0xfeedface) -- result: "4277009102"
>>> showSciRational (1 .^ 99999999) -- result: "1e99999999"
__Note__: Without taking optimizations into account, the specialized functions
(@'showSciRational'@ and @'showsSciRational'@) are much more
efficient than the generic functions (@'showNumber'@ and
@'showsNumber'@ respectively).
-}
module Data.SciRatio.Show
(
-- * Simple pretty-printers
showNumber
, showSciRational
-- * @'ShowS'@ pretty-printers
, showsNumber
, showsSciRational
) where
import Data.Ratio (denominator, numerator)
import Data.SciRatio (SciRational, base10Exponent, fracSignificand)
-- Note: we need to specialize showNumber and showsNumber in order for the
-- rewrite rules in SciRatio to fire.
-- | Show a number (see @'showsNumber'@).
--
-- Note: for @'SciRational'@, consider using the more efficient, specialized
-- function @'showSciRational'@ instead.
{-# SPECIALIZE showNumber :: SciRational -> String #-}
showNumber :: Real a => a -> String
showNumber x = showsNumber x ""
-- | Show a number (see @'showsNumber'@).
showSciRational :: SciRational -> String
showSciRational x = showsSciRational x ""
-- | Show a rational number in scientific notation:
--
-- > [-+]?
-- > ( [0-9]+ [.]? [0-9]* | [.] [0-9]+ )
-- > ( [e] [-+]? [0-9]+ )?
-- > ( [/] [0-9]+ )?
--
-- Note: for @'SciRational'@, consider using the more efficient, specialized
-- function @'showsSciRational'@ instead.
{-# SPECIALIZE showsNumber :: SciRational -> ShowS #-}
showsNumber :: Real a => a -> ShowS
showsNumber = showsSciRational . realToFrac
-- | Show a number (see @'showNumber'@).
showsSciRational :: SciRational -> ShowS
showsSciRational x =
let r = toRational (fracSignificand x)
e = toInteger (base10Exponent x)
n = numerator r
d = denominator r in
-- canonicity ensures that the divisor is not a multiple of 2 nor 5
case d of
1 -> showsScientific n e
_ -> showsFraction n e d
-- | Same as @'shows'@ but specialized to @'Integer'@.
showsInteger :: Integer -> ShowS
showsInteger = shows
-- | Show a number as a fraction.
showsFraction :: Integer -> Integer -> Integer -> ShowS
showsFraction n e d = showsScientific n e . showChar '/' . showsInteger d
-- | Show a number in decimal or scientific notation.
showsScientific :: Integer -> Integer -> ShowS
showsScientific n e =
addSign . if abs e <= 2 * len -- e might be extremely large
then shorter fixed floating
else floating
where nS = showsInteger (abs n)
len = fromIntegral (length (nS ""))
lenPred = pred len
fixed = moveDot (-e) nS
floating = moveDot lenPred nS . showsExponent (e + lenPred)
addSign = if signum n == -1 then ('-' :) else id
-- | Show the exponent (as part of the scientific notation).
showsExponent :: Integer -> ShowS
showsExponent 0 = id
showsExponent p = ('e' :) . showsInteger p
-- | Choose the shorter string, preferring the left string.
shorter :: ShowS -> ShowS -> ShowS
shorter s s' = if length (s' "") < length (s "") then s' else s
-- | Move the decimal point by the given amount (positive numbers for left).
moveDot :: Integer -> ShowS -> ShowS
moveDot i s = (++) $ reverse . stripDot . insertDot i . reverse $ s ""
where stripDot ('.' : l) = l
stripDot l = l
-- | Insert a dot (@\'.\'@) before the given index, padding with zeros as
-- necessary. Negative numbers are accepted.
insertDot :: Integer -> String -> String
insertDot i s = case compare i 0 of
GT -> case s of
[] -> '0' : insertDot (pred i) s
x : xs -> x : insertDot (pred i) xs
EQ -> '.' : s
LT -> insertDot (succ i) ('0' : s)
| Rufflewind/sci-ratio | src/Data/SciRatio/Show.hs | mit | 4,126 | 0 | 14 | 985 | 764 | 408 | 356 | 56 | 4 |
to_tens :: Integer -> [Integer]
to_tens 0 = []
to_tens n = to_tens (n `div` 10) ++ [n `mod` 10]
is_num_valid 1 = False
is_num_valid num = let tens = to_tens num
in (sum $ map (\x -> x^5) tens) == num
| stefan-j/ProjectEuler | q30.hs | mit | 223 | 2 | 13 | 65 | 116 | 61 | 55 | 6 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
module FruRein
(
fruRein
, FruRein
) where
import Control.Monad.Random
import qualified Data.Map.Strict as M
import Graph (vertices, Graph, Vertex(..), neighbors)
import Tutte (ForceAlgo(..))
type Map = M.Map
type R = Float
type Vector2 = (R, R)
data FruRein =
FruRein
{ graph :: Graph
, w, l :: R
, k, t :: R
, verts :: Map Vertex Vector2
} deriving (Show)
instance ForceAlgo FruRein where
positions = verts
advance = advanceFruRein
fruRein :: Graph -> FruRein
fruRein graph =
let
randPosition :: RandomGen g => Rand g Vector2
randPosition = do
a <- getRandomR (0.0, 1.0)
b <- getRandomR (0.0, 1.0)
return (a, b)
randPositions :: [Vector2]
randPositions = evalRand (sequence $ repeat randPosition) (mkStdGen 0)
verts = M.fromList $ zip vs randPositions
in FruRein{graph, w, l, k, t, verts}
where w = 2.0
l = w
vs = vertices graph
numV = length vs
k = sqrt $ w * l / (fromIntegral numV)
t = 1.0
advanceFruRein :: FruRein -> FruRein
advanceFruRein f@FruRein{..} =
let
disp = M.fromList $ zip (vertices graph) $ repeat (0.0, 0.0)
verts' = applyDisp f . repulse f . attract f $ disp
t' = t + 1
in f{t = t', verts = verts'}
type DispMap = Map Vertex Vector2
type PosMap = Map Vertex Vector2
attract :: FruRein -> DispMap -> DispMap
attract FruRein{..} disp =
let
vdisp :: Vertex -> Vector2 -> Vector2
vdisp v oldDisp = foldr (f v) oldDisp $ vertices graph
f :: Vertex -> Vertex -> Vector2 -> Vector2
f v u vd = change verts v u vd attrForce plus
attrForce x = k * k / x
in M.mapWithKey vdisp disp
repulse :: FruRein -> DispMap -> DispMap
repulse FruRein{..} disp =
let
vdisp :: Vertex -> Vector2 -> Vector2
vdisp v oldDisp = foldr (f v) oldDisp $ neighbors graph v
f :: Vertex -> Vertex -> Vector2 -> Vector2
f v u vd = change verts v u vd repulseForce op
where op = if unVert v < unVert u then plus else minus
repulseForce x = x * x / k
in M.mapWithKey vdisp disp
applyDisp :: FruRein -> DispMap -> PosMap
applyDisp FruRein{..} disp =
let
temp = 0.2 * w / t
update :: Vertex -> Vector2 -> Vector2
update v pos = let vd = disp M.! v
dist = mag vd
(px, py) = plus pos $
scale vd (min dist temp / dist)
px' = (w / 2.0) `min` ((negate w / 2.0) `max` px)
py' = (l / 2.0) `min` ((negate l / 2.0) `max` py)
in (px', py')
in M.mapWithKey update verts
minus :: Vector2 -> Vector2 -> Vector2
minus (x,y) (x', y') = (x-x', y-y')
plus :: Vector2 -> Vector2 -> Vector2
plus (x,y) (x', y') = (x+x', y+y')
scale :: Vector2 -> R -> Vector2
scale (x,y) k = (k*x, k*y)
mag :: Vector2 -> R
mag (x, y) = sqrt $ x*x + y*y
change verts v u vd force op|v==u = vd
|otherwise =
let vp = verts M.! v
up = verts M.! u
delta = minus vp up
dist = mag delta
in vd `op` (scale delta $ force dist / dist)
| j-rock/tutte-your-stuff | src/FruRein.hs | mit | 3,348 | 0 | 18 | 1,158 | 1,320 | 707 | 613 | 97 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appsync-datasource-relationaldatabaseconfig.html
module Stratosphere.ResourceProperties.AppSyncDataSourceRelationalDatabaseConfig where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.AppSyncDataSourceRdsHttpEndpointConfig
-- | Full data type definition for AppSyncDataSourceRelationalDatabaseConfig.
-- See 'appSyncDataSourceRelationalDatabaseConfig' for a more convenient
-- constructor.
data AppSyncDataSourceRelationalDatabaseConfig =
AppSyncDataSourceRelationalDatabaseConfig
{ _appSyncDataSourceRelationalDatabaseConfigRdsHttpEndpointConfig :: Maybe AppSyncDataSourceRdsHttpEndpointConfig
, _appSyncDataSourceRelationalDatabaseConfigRelationalDatabaseSourceType :: Val Text
} deriving (Show, Eq)
instance ToJSON AppSyncDataSourceRelationalDatabaseConfig where
toJSON AppSyncDataSourceRelationalDatabaseConfig{..} =
object $
catMaybes
[ fmap (("RdsHttpEndpointConfig",) . toJSON) _appSyncDataSourceRelationalDatabaseConfigRdsHttpEndpointConfig
, (Just . ("RelationalDatabaseSourceType",) . toJSON) _appSyncDataSourceRelationalDatabaseConfigRelationalDatabaseSourceType
]
-- | Constructor for 'AppSyncDataSourceRelationalDatabaseConfig' containing
-- required fields as arguments.
appSyncDataSourceRelationalDatabaseConfig
:: Val Text -- ^ 'asdsrdcRelationalDatabaseSourceType'
-> AppSyncDataSourceRelationalDatabaseConfig
appSyncDataSourceRelationalDatabaseConfig relationalDatabaseSourceTypearg =
AppSyncDataSourceRelationalDatabaseConfig
{ _appSyncDataSourceRelationalDatabaseConfigRdsHttpEndpointConfig = Nothing
, _appSyncDataSourceRelationalDatabaseConfigRelationalDatabaseSourceType = relationalDatabaseSourceTypearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appsync-datasource-relationaldatabaseconfig.html#cfn-appsync-datasource-relationaldatabaseconfig-rdshttpendpointconfig
asdsrdcRdsHttpEndpointConfig :: Lens' AppSyncDataSourceRelationalDatabaseConfig (Maybe AppSyncDataSourceRdsHttpEndpointConfig)
asdsrdcRdsHttpEndpointConfig = lens _appSyncDataSourceRelationalDatabaseConfigRdsHttpEndpointConfig (\s a -> s { _appSyncDataSourceRelationalDatabaseConfigRdsHttpEndpointConfig = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appsync-datasource-relationaldatabaseconfig.html#cfn-appsync-datasource-relationaldatabaseconfig-relationaldatabasesourcetype
asdsrdcRelationalDatabaseSourceType :: Lens' AppSyncDataSourceRelationalDatabaseConfig (Val Text)
asdsrdcRelationalDatabaseSourceType = lens _appSyncDataSourceRelationalDatabaseConfigRelationalDatabaseSourceType (\s a -> s { _appSyncDataSourceRelationalDatabaseConfigRelationalDatabaseSourceType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/AppSyncDataSourceRelationalDatabaseConfig.hs | mit | 2,939 | 0 | 13 | 212 | 260 | 150 | 110 | 29 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-emr-ebsconfiguration.html
module Stratosphere.ResourceProperties.EMRInstanceGroupConfigEbsConfiguration where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.EMRInstanceGroupConfigEbsBlockDeviceConfig
-- | Full data type definition for EMRInstanceGroupConfigEbsConfiguration. See
-- 'emrInstanceGroupConfigEbsConfiguration' for a more convenient
-- constructor.
data EMRInstanceGroupConfigEbsConfiguration =
EMRInstanceGroupConfigEbsConfiguration
{ _eMRInstanceGroupConfigEbsConfigurationEbsBlockDeviceConfigs :: Maybe [EMRInstanceGroupConfigEbsBlockDeviceConfig]
, _eMRInstanceGroupConfigEbsConfigurationEbsOptimized :: Maybe (Val Bool)
} deriving (Show, Eq)
instance ToJSON EMRInstanceGroupConfigEbsConfiguration where
toJSON EMRInstanceGroupConfigEbsConfiguration{..} =
object $
catMaybes
[ fmap (("EbsBlockDeviceConfigs",) . toJSON) _eMRInstanceGroupConfigEbsConfigurationEbsBlockDeviceConfigs
, fmap (("EbsOptimized",) . toJSON) _eMRInstanceGroupConfigEbsConfigurationEbsOptimized
]
-- | Constructor for 'EMRInstanceGroupConfigEbsConfiguration' containing
-- required fields as arguments.
emrInstanceGroupConfigEbsConfiguration
:: EMRInstanceGroupConfigEbsConfiguration
emrInstanceGroupConfigEbsConfiguration =
EMRInstanceGroupConfigEbsConfiguration
{ _eMRInstanceGroupConfigEbsConfigurationEbsBlockDeviceConfigs = Nothing
, _eMRInstanceGroupConfigEbsConfigurationEbsOptimized = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-emr-ebsconfiguration.html#cfn-emr-ebsconfiguration-ebsblockdeviceconfigs
emrigcecEbsBlockDeviceConfigs :: Lens' EMRInstanceGroupConfigEbsConfiguration (Maybe [EMRInstanceGroupConfigEbsBlockDeviceConfig])
emrigcecEbsBlockDeviceConfigs = lens _eMRInstanceGroupConfigEbsConfigurationEbsBlockDeviceConfigs (\s a -> s { _eMRInstanceGroupConfigEbsConfigurationEbsBlockDeviceConfigs = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-emr-ebsconfiguration.html#cfn-emr-ebsconfiguration-ebsoptimized
emrigcecEbsOptimized :: Lens' EMRInstanceGroupConfigEbsConfiguration (Maybe (Val Bool))
emrigcecEbsOptimized = lens _eMRInstanceGroupConfigEbsConfigurationEbsOptimized (\s a -> s { _eMRInstanceGroupConfigEbsConfigurationEbsOptimized = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EMRInstanceGroupConfigEbsConfiguration.hs | mit | 2,530 | 0 | 12 | 205 | 265 | 154 | 111 | 28 | 1 |
{-# LANGUAGE TemplateHaskell, TypeSynonymInstances, FlexibleInstances #-}
module Test where
import Infernu.Types
main :: IO ()
main = do
res <- runAllTests
print res
return ()
| sinelaw/infernu | test/Test.hs | gpl-2.0 | 185 | 0 | 8 | 34 | 46 | 23 | 23 | 8 | 1 |
{-
Haskell implementation of cat
http://linux.die.net/man/1/cat
-}
module Main where
import System.Environment(getArgs)
printFile :: FilePath -> IO ()
printFile filePath = readFile filePath >>= putStrLn
main:: IO ()
main = getArgs >>= mapM_ printFile
| huseyinyilmaz/hs-gnu-core-utils | src/cat.hs | gpl-2.0 | 255 | 0 | 7 | 38 | 66 | 35 | 31 | 6 | 1 |
module Text.Pit (
module Text.Pit.Types,
module Text.Pit.Lexer,
module Text.Pit.Parser,
module Text.Pit.Unification,
module Text.Pit.Sld,
) where
import Text.Pit.Types
import Text.Pit.Lexer
import Text.Pit.Parser
import Text.Pit.Unification
import Text.Pit.Sld
| ShabbyX/pit | src/Text/Pit.hs | gpl-2.0 | 310 | 0 | 5 | 74 | 74 | 51 | 23 | 11 | 0 |
module IO.KeyInput where
--Standard libraries
import System.Exit (exitSuccess)
--Avoid conflicts with the Direction data type.
import Prelude hiding (Left,Right)
import qualified Graphics.UI.SDL as SDL
import qualified Character.Player as P
import qualified Character.Actor as A
import qualified GameState.GameState as GS
import qualified IO.Render as Render
import Misc.Point
data Direction = Up | Left | Down | Right
--Gives the initial state of the program. Eventually, this should go somewhere
--else.
initialState :: GS.State
initialState = GS.State $ P.Player (40,40)
--Refresh the screen, then wait for an SDL.KeyDown event, passing off to
--movePlayer in the event that a direction key is pressed. If no key press
--occurs, the function just calls itself again, making this the main game loop.
waitForKeyPress :: GS.State -> IO ()
waitForKeyPress st = do
refresh st
event <- SDL.waitEvent
case event of
SDL.Quit -> exitSuccess
SDL.KeyDown (SDL.Keysym SDL.SDLK_q _ _ ) -> exitSuccess
SDL.KeyDown (SDL.Keysym SDL.SDLK_DOWN _ _ ) -> movePlayer Down st
SDL.KeyDown (SDL.Keysym SDL.SDLK_UP _ _ ) -> movePlayer Up st
SDL.KeyDown (SDL.Keysym SDL.SDLK_LEFT _ _ ) -> movePlayer Left st
SDL.KeyDown (SDL.Keysym SDL.SDLK_RIGHT _ _ ) -> movePlayer Right st
_ -> return ()
waitForKeyPress st
--Refresh the screen, given the game state.
refresh :: GS.State -> IO ()
refresh st = Render.display Render.image $ playerCoords st
--Given the state dictionary, returns player coordinates.
--TODO maybe put elsewhere
playerCoords :: GS.State -> Point
playerCoords st = A.getLocation $ GS.player st
-- The main entry point for player movement.
movePlayer :: Direction -> GS.State -> IO ()
movePlayer Down st = movePlayer' st (0,1)
movePlayer Left st = movePlayer' st (-1,0)
movePlayer Right st = movePlayer' st (1,0)
movePlayer Up st = movePlayer' st (0,-1)
--A helper function that obtains an updated state and passes it to
--waitForKeyPress.
movePlayer' :: GS.State -> Point -> IO ()
movePlayer' st tuple = do
let newst = setPlayerCoords st tuple
waitForKeyPress newst
--Updates the player's coordinate data in the global game state dictionary. The
--syntax used here is a little weird. It is record update syntax, a way to
--update only one field of data type. For example, if
--State = { playerState :: Player, foo :: SomethingElse}, this would translate to:
--
--data State = State Player SomethingElse
--
--playerState :: State -> Player
--playerState (State p _ ) = p
--
--Similarly, the function:
-- setPlayerCoords st (x,y) = st { GS.player = someNewPlayerState (GS.player st) (x,y) }
--translates to:
-- setPlayerCoords (State player foo ) (x,y) = State (someNewPlayerState player (x,y)) foo
--
--
--The benefits are not very visible when the state dictionary is this small, but
--as the state dictionary becomes large, it will be extremely helpful to
--avoid having pattern matching like: setPlayerCoords (State a b c d e f g h i j) (x,y)
setPlayerCoords :: GS.State -> Point -> GS.State
setPlayerCoords st (x,y) = st { GS.player = mpHelper (GS.player st) }
where mpHelper p = p { P.loc = (A.getX p + x, A.getY p + y) }
| MortimerMcMire315/rogue_nads | src/IO/KeyInput.hs | gpl-3.0 | 3,305 | 0 | 13 | 687 | 691 | 374 | 317 | 41 | 7 |
{- This file is part of PhoneDirectory.
Copyright (C) 2009 Michael Steele
PhoneDirectory is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PhoneDirectory is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PhoneDirectory. If not, see <http://www.gnu.org/licenses/>.
-}
module TestPriority where
import Control.Applicative
import Test.QuickCheck
import Priority
import TestJSON
main :: IO ()
main = do
putStrLn "ContactInfo: Reflective JSON instance."
quickCheck (prop_reflective_json_instance :: Priority -> Bool)
instance Arbitrary Priority where
arbitrary = mkPriority <$> arbitrary
shrink = shrinkNothing
| mikesteele81/Phone-Directory | src/TestPriority.hs | gpl-3.0 | 1,086 | 0 | 9 | 206 | 80 | 44 | 36 | 12 | 1 |
module Chap06.Data.Sortable where
class Sortable s where
empty :: Ord a => s a
add :: Ord a => a -> s a -> s a
sort :: Ord a => s a -> [a]
| stappit/okasaki-pfds | src/Chap06/Data/Sortable.hs | gpl-3.0 | 151 | 0 | 10 | 47 | 79 | 39 | 40 | 5 | 0 |
{-# LANGUAGE NoMonomorphismRestriction #-}
import XMonad
import XMonad.Actions.GridSelect
import XMonad.Util.EZConfig
myGsconfig = (buildDefaultGSConfig colorizer)
{ gs_cellheight = 30
, gs_cellwidth = 350
, gs_font = fontXft }
where
colorizer = colorRangeFromClassName
lightgrey -- lowest inactive bg
lightgrey -- highest inactive bg
lightgrey -- active bg
lightblue -- inactive fg
orange -- active fg
lightblue = (0x6E,0xAA,0xF3)
lightgrey = (0xEE,0xEE,0xEE)
orange = (0xFC,0xA4,0x34)
fontXft = "xft\
\:Monaco\
\:pixelsize=16\
\:weight=bold\
\:width=semicondensed\
\:dpi=96\
\:hinting=true\
\:hintstyle=hintslight\
\:antialias=true\
\:rgba=rgb\
\:lcdfilter=lcdlight"
main = do
xmonad $ defaultConfig
{ terminal = "gnome-terminal"
, modMask = mod4Mask
, borderWidth = 3
, normalBorderColor = "#ffffff"
, focusedBorderColor = "#33cc33"
}
`additionalKeys`
[ ((0, 0x1008FF03), spawn "sudo lcd-bl down") -- XF86XK_MonBrightnessDown
, ((0, 0x1008FF02), spawn "sudo lcd-bl up") -- XF86XK_MonBrightnessUp
, ((0, 0x1008FF06), spawn "sudo kbd-bl down") -- XF86XK_KbdBrightnessDown
, ((0, 0x1008FF05), spawn "sudo kbd-bl up") -- XF86XK_KbdBrightnessUp
, ((0, 0x1008FF12), spawn "amixer -q set Master mute") -- XF86XK_AudioMute
, ((0, 0x1008FF11), spawn "amixer -q set Master 5%- unmute") -- XF86XK_AudioLowerVolume
, ((0, 0x1008FF13), spawn "amixer -q set Master 5%+ unmute") -- XF86XK_AudioRaiseVolume
, ((0, 0x1008FF2C), spawn "sudo eject") -- XF86XK_Eject
]
`additionalKeysP` -- NB M maps to Super (modMask = mod4Mask)
[ ("M-x w", spawn "chromium") -- Super+x then w to launch the browser
, ("M-x e", spawn "emacs") -- Super+x then e to launch the editor
, ("M-x f", spawn "pcmanfm") -- Super+x then f to launch the file manager
, ("M-a", goToSelected myGsconfig) -- Super+a for Grid Select
]
| c0c0n3/archlinux | mactop/_root_/home/andrea/.xmonad/xmonad.hs | gpl-3.0 | 2,564 | 0 | 11 | 1,020 | 379 | 238 | 141 | 39 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AppEngine.Apps.Services.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the services in the application.
--
-- /See:/ <https://cloud.google.com/appengine/docs/admin-api/ Google App Engine Admin API Reference> for @appengine.apps.services.list@.
module Network.Google.Resource.AppEngine.Apps.Services.List
(
-- * REST Resource
AppsServicesListResource
-- * Creating a Request
, appsServicesList
, AppsServicesList
-- * Request Lenses
, aslXgafv
, aslUploadProtocol
, aslPp
, aslAccessToken
, aslUploadType
, aslBearerToken
, aslAppsId
, aslPageToken
, aslPageSize
, aslCallback
) where
import Network.Google.AppEngine.Types
import Network.Google.Prelude
-- | A resource alias for @appengine.apps.services.list@ method which the
-- 'AppsServicesList' request conforms to.
type AppsServicesListResource =
"v1" :>
"apps" :>
Capture "appsId" Text :>
"services" :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListServicesResponse
-- | Lists all the services in the application.
--
-- /See:/ 'appsServicesList' smart constructor.
data AppsServicesList = AppsServicesList'
{ _aslXgafv :: !(Maybe Text)
, _aslUploadProtocol :: !(Maybe Text)
, _aslPp :: !Bool
, _aslAccessToken :: !(Maybe Text)
, _aslUploadType :: !(Maybe Text)
, _aslBearerToken :: !(Maybe Text)
, _aslAppsId :: !Text
, _aslPageToken :: !(Maybe Text)
, _aslPageSize :: !(Maybe (Textual Int32))
, _aslCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AppsServicesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aslXgafv'
--
-- * 'aslUploadProtocol'
--
-- * 'aslPp'
--
-- * 'aslAccessToken'
--
-- * 'aslUploadType'
--
-- * 'aslBearerToken'
--
-- * 'aslAppsId'
--
-- * 'aslPageToken'
--
-- * 'aslPageSize'
--
-- * 'aslCallback'
appsServicesList
:: Text -- ^ 'aslAppsId'
-> AppsServicesList
appsServicesList pAslAppsId_ =
AppsServicesList'
{ _aslXgafv = Nothing
, _aslUploadProtocol = Nothing
, _aslPp = True
, _aslAccessToken = Nothing
, _aslUploadType = Nothing
, _aslBearerToken = Nothing
, _aslAppsId = pAslAppsId_
, _aslPageToken = Nothing
, _aslPageSize = Nothing
, _aslCallback = Nothing
}
-- | V1 error format.
aslXgafv :: Lens' AppsServicesList (Maybe Text)
aslXgafv = lens _aslXgafv (\ s a -> s{_aslXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aslUploadProtocol :: Lens' AppsServicesList (Maybe Text)
aslUploadProtocol
= lens _aslUploadProtocol
(\ s a -> s{_aslUploadProtocol = a})
-- | Pretty-print response.
aslPp :: Lens' AppsServicesList Bool
aslPp = lens _aslPp (\ s a -> s{_aslPp = a})
-- | OAuth access token.
aslAccessToken :: Lens' AppsServicesList (Maybe Text)
aslAccessToken
= lens _aslAccessToken
(\ s a -> s{_aslAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aslUploadType :: Lens' AppsServicesList (Maybe Text)
aslUploadType
= lens _aslUploadType
(\ s a -> s{_aslUploadType = a})
-- | OAuth bearer token.
aslBearerToken :: Lens' AppsServicesList (Maybe Text)
aslBearerToken
= lens _aslBearerToken
(\ s a -> s{_aslBearerToken = a})
-- | Part of \`parent\`. Name of the parent Application resource. Example:
-- apps\/myapp.
aslAppsId :: Lens' AppsServicesList Text
aslAppsId
= lens _aslAppsId (\ s a -> s{_aslAppsId = a})
-- | Continuation token for fetching the next page of results.
aslPageToken :: Lens' AppsServicesList (Maybe Text)
aslPageToken
= lens _aslPageToken (\ s a -> s{_aslPageToken = a})
-- | Maximum results to return per page.
aslPageSize :: Lens' AppsServicesList (Maybe Int32)
aslPageSize
= lens _aslPageSize (\ s a -> s{_aslPageSize = a}) .
mapping _Coerce
-- | JSONP
aslCallback :: Lens' AppsServicesList (Maybe Text)
aslCallback
= lens _aslCallback (\ s a -> s{_aslCallback = a})
instance GoogleRequest AppsServicesList where
type Rs AppsServicesList = ListServicesResponse
type Scopes AppsServicesList =
'["https://www.googleapis.com/auth/appengine.admin",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient AppsServicesList'{..}
= go _aslAppsId _aslXgafv _aslUploadProtocol
(Just _aslPp)
_aslAccessToken
_aslUploadType
_aslBearerToken
_aslPageToken
_aslPageSize
_aslCallback
(Just AltJSON)
appEngineService
where go
= buildClient
(Proxy :: Proxy AppsServicesListResource)
mempty
| rueshyna/gogol | gogol-appengine/gen/Network/Google/Resource/AppEngine/Apps/Services/List.hs | mpl-2.0 | 6,235 | 0 | 21 | 1,635 | 1,043 | 601 | 442 | 146 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Orders.Returnlineitem
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a line item. This method can only be called for non-multi-client
-- accounts.
--
-- /See:/ <https://developers.google.com/shopping-content Content API for Shopping Reference> for @content.orders.returnlineitem@.
module Network.Google.Resource.Content.Orders.Returnlineitem
(
-- * REST Resource
OrdersReturnlineitemResource
-- * Creating a Request
, ordersReturnlineitem
, OrdersReturnlineitem
-- * Request Lenses
, oMerchantId
, oPayload
, oOrderId
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.orders.returnlineitem@ method which the
-- 'OrdersReturnlineitem' request conforms to.
type OrdersReturnlineitemResource =
"content" :>
"v2" :>
Capture "merchantId" (Textual Word64) :>
"orders" :>
Capture "orderId" Text :>
"returnLineItem" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] OrdersReturnLineItemRequest :>
Post '[JSON] OrdersReturnLineItemResponse
-- | Returns a line item. This method can only be called for non-multi-client
-- accounts.
--
-- /See:/ 'ordersReturnlineitem' smart constructor.
data OrdersReturnlineitem = OrdersReturnlineitem'
{ _oMerchantId :: !(Textual Word64)
, _oPayload :: !OrdersReturnLineItemRequest
, _oOrderId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OrdersReturnlineitem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oMerchantId'
--
-- * 'oPayload'
--
-- * 'oOrderId'
ordersReturnlineitem
:: Word64 -- ^ 'oMerchantId'
-> OrdersReturnLineItemRequest -- ^ 'oPayload'
-> Text -- ^ 'oOrderId'
-> OrdersReturnlineitem
ordersReturnlineitem pOMerchantId_ pOPayload_ pOOrderId_ =
OrdersReturnlineitem'
{ _oMerchantId = _Coerce # pOMerchantId_
, _oPayload = pOPayload_
, _oOrderId = pOOrderId_
}
-- | The ID of the managing account.
oMerchantId :: Lens' OrdersReturnlineitem Word64
oMerchantId
= lens _oMerchantId (\ s a -> s{_oMerchantId = a}) .
_Coerce
-- | Multipart request metadata.
oPayload :: Lens' OrdersReturnlineitem OrdersReturnLineItemRequest
oPayload = lens _oPayload (\ s a -> s{_oPayload = a})
-- | The ID of the order.
oOrderId :: Lens' OrdersReturnlineitem Text
oOrderId = lens _oOrderId (\ s a -> s{_oOrderId = a})
instance GoogleRequest OrdersReturnlineitem where
type Rs OrdersReturnlineitem =
OrdersReturnLineItemResponse
type Scopes OrdersReturnlineitem =
'["https://www.googleapis.com/auth/content"]
requestClient OrdersReturnlineitem'{..}
= go _oMerchantId _oOrderId (Just AltJSON) _oPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy OrdersReturnlineitemResource)
mempty
| rueshyna/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Orders/Returnlineitem.hs | mpl-2.0 | 3,827 | 0 | 15 | 877 | 483 | 287 | 196 | 73 | 1 |
module Affection.MessageBus.Message.MouseMessage
( MouseMessage(..)
-- | SDL reexports
, SDL.Window
, SDL.MouseDevice
, SDL.MouseButton
, SDL.InputMotion
, SDL.MouseScrollDirection
) where
import Affection.MessageBus.Message.Class
import Data.Word (Word8)
import Data.Int (Int32)
import qualified SDL
import Linear (V2(..))
-- Datatype for handling mouse events handed down from SDL2
data MouseMessage
-- | Mouse motion event
= MsgMouseMotion
{ msgMMWhen :: Double -- ^ Message time
, msgMMWindow :: Maybe SDL.Window -- ^ Focused window (if any)
, msgMMWhich :: SDL.MouseDevice -- ^ Mouse device identifier
, msgMMState :: [SDL.MouseButton] -- ^ List of pressed mouse buttons
, msgMMPos :: V2 Int32 -- ^ Absolute mouse positiom
, msgMMRelMotion :: V2 Int32 -- ^ Mouse movement relative to previous position
}
-- | Mouse button event
| MsgMouseButton
{ msgMBWhen :: Double -- ^ Message time
, msgMBWindow :: Maybe SDL.Window -- ^ Focused window (if any)
, msgMBMotion :: SDL.InputMotion -- ^ Button's input motion
, msgMBWhich :: SDL.MouseDevice -- ^ Mouse device identifier
, msgMBButton :: SDL.MouseButton -- ^ Affected mouse button
, msgMBClicks :: Word8 -- ^ Number of clicks
, msgMBPos :: V2 Int32 -- ^ Absolute mouse position
}
-- | Mouse wheel event
| MsgMouseWheel
{ msgMWWhen :: Double -- ^ Message time
, msgMWWhindow :: Maybe SDL.Window -- ^ Focused window (if any)
, msgMWWhich :: SDL.MouseDevice -- ^ Mouse device identifier
, msgMWPos :: V2 Int32 -- ^ Absolute mouse position
, msgMWDIrection :: SDL.MouseScrollDirection -- ^ Scroll direction
}
deriving (Show)
instance Message MouseMessage where
msgTime (MsgMouseMotion t _ _ _ _ _) = t
msgTime (MsgMouseButton t _ _ _ _ _ _) = t
msgTime (MsgMouseWheel t _ _ _ _) = t
| nek0/affection | src/Affection/MessageBus/Message/MouseMessage.hs | lgpl-3.0 | 1,994 | 0 | 10 | 541 | 366 | 224 | 142 | 39 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.Compile.Stages
( checkDangerousSource
, checkCodeConventions
, checkRequirements
) where
import CodeWorld.Compile.Framework
import CodeWorld.Compile.Requirements
import Control.Monad
import Control.Monad.State
import Data.Array
import Data.Generics
import Data.Maybe (isJust, fromMaybe)
import Data.Monoid
import Data.List (sort)
import Data.Text (Text, unpack)
import Data.Text.Encoding (decodeUtf8)
import Language.Haskell.Exts
import Text.Regex.TDFA
import Text.Regex.TDFA.Text
-- Checks a full list of conventions that are enforced by the CodeWorld
-- compiler for "codeworld" mode. In other modes, this has no effect.
checkCodeConventions :: MonadCompile m => m ()
checkCodeConventions = do
mode <- gets compileMode
checkOldStyleMixed mode
checkOldStyleGray
when (mode == "codeworld") $ do
checkFunctionParentheses
checkVarlessPatterns
checkPatternGuards
-- Look for uses of Template Haskell or related features in the compiler. These
-- cannot currently be used, because the compiler isn't properly sandboxed, so
-- this would be a remote code execution vulnerability.
--
-- At the moment, we don't look in the parsed code, but instead match
-- regular expressions against the source. That's because I don't quite
-- trust haskell-src-exts to correctly parse the source.
checkDangerousSource :: MonadCompile m => m ()
checkDangerousSource = do
src <- decodeUtf8 <$> getSourceCode
when (src =~ (".*TemplateHaskell.*" :: Text) ||
src =~ (".*QuasiQuotes.*" :: Text) ||
src =~ (".*glasgow-exts.*" :: Text)) $ do
addDiagnostics
[ (noSrcSpan, CompileError,
"error: Sorry, but your program uses forbidden language features.")
]
-- Looks for use of `mixed` with either a pair of colors (in CodeWorld mode) or
-- two colors (in Haskell mode). This is likely to be old code from before the
-- type signature was changed, so there's a custom error message.
checkOldStyleMixed :: MonadCompile m => SourceMode -> m ()
checkOldStyleMixed mode =
getParsedCode >>= \parsed -> case parsed of
Parsed mod -> addDiagnostics $
everything (++) (mkQ [] (oldStyleMixed mode)) mod
_ -> return ()
where oldStyleMixed :: SourceMode -> Exp SrcSpanInfo -> [Diagnostic]
oldStyleMixed "codeworld"
(App loc (Var _ (UnQual _ (Ident _ "mixed")))
(Tuple _ _ [_, _]))
= [(loc, CompileError,
"error: Outdated use of mixed function." ++
"\n The argument should be a list of colors." ++
"\n Example: mixed([red, orange, white])")]
oldStyleMixed "haskell"
(App loc (App _ (Var _ (UnQual _ (Ident _ "mixed"))) _) _)
= [(loc, CompileError,
"error: Outdated use of mixed function." ++
"\n The argument should be a list of colors." ++
"\n Example: mixed [red, orange, white]")]
oldStyleMixed _ _ = []
-- Looks for use of `gray` or `grey` with an argument. This is likely to be old
-- code from before the type signature was changed, so there's a custom error
-- message.
checkOldStyleGray :: MonadCompile m => m ()
checkOldStyleGray =
getParsedCode >>= \parsed -> case parsed of
Parsed mod -> addDiagnostics $
everything (++) (mkQ [] oldStyleGray) mod
_ -> return ()
where oldStyleGray :: Exp SrcSpanInfo -> [Diagnostic]
oldStyleGray
(App loc (Var _ (UnQual _ (Ident _ "gray"))) _)
= [(loc, CompileError,
"error: Outdated use of gray as a function." ++
"\n Remove the function argument for a medium shade of gray." ++
"\n For a different shade of gray, use light, dark, or HSL.")]
oldStyleGray
(App loc (Var _ (UnQual _ (Ident _ "grey"))) _)
= [(loc, CompileError,
"error: Outdated use of grey as a function." ++
"\n Remove the function argument for a medium shade of grey." ++
"\n For a different shade of gray, use light, dark, or HSL.")]
oldStyleGray _ = []
-- Look for function applications without parentheses. Since CodeWorld
-- functions are usually applied with parentheses, this often indicates a
-- missing piece of punctuation, such as an operator or comma, or misplaced
-- parentheses.
checkFunctionParentheses :: MonadCompile m => m ()
checkFunctionParentheses =
getParsedCode >>= \parsed -> case parsed of
Parsed mod -> addDiagnostics $
dedupErrorSpans (everything (++) (mkQ [] badExpApps) mod) ++
everything (++) (mkQ [] badMatchApps) mod ++
everything (++) (mkQ [] badPatternApps) mod
_ -> return () -- Fall back on GHC for parse errors.
dedupErrorSpans :: [Diagnostic] -> [Diagnostic]
dedupErrorSpans [] = []
dedupErrorSpans [err] = [err]
dedupErrorSpans ((loc1, sev1, msg1) : (loc2, sev2, msg2) : errs)
| loc1 `contains` loc2 = dedupErrorSpans ((loc1, sev1, msg1) : errs)
| otherwise = (loc1, sev1, msg1) : dedupErrorSpans ((loc2, sev2, msg2) : errs)
where
SrcSpanInfo {srcInfoSpan = span1} `contains` SrcSpanInfo {srcInfoSpan = span2} =
srcSpanFilename span1 == srcSpanFilename span2 &&
(srcSpanStartLine span1 < srcSpanStartLine span2 ||
(srcSpanStartLine span1 == srcSpanStartLine span2 &&
srcSpanStartColumn span1 <= srcSpanStartColumn span2)) &&
(srcSpanEndLine span1 > srcSpanEndLine span2 ||
(srcSpanEndLine span1 == srcSpanEndLine span2 &&
srcSpanEndColumn span1 >= srcSpanEndColumn span2))
badExpApps :: Exp SrcSpanInfo -> [Diagnostic]
badExpApps (App loc lhs rhs)
| not (isGoodExpAppLhs lhs) = [(ann rhs, CompileError, errorMsg)]
| not (isGoodExpAppRhs rhs) = [(ann rhs, CompileError, warningMsg)]
where
errorMsg = "error:" ++ missingParenError ++ multiplicationPhrase
warningMsg = "error:" ++ missingParenError ++ multiplicationPhrase ++ functionPhrase
functionPhrase
| isLikelyFunctionExp lhs = missingParenFunctionSuggestion lhs rhs
| otherwise = ""
multiplicationPhrase
| isLikelyNumberExp lhs && isLikelyNumberExp rhs = missingParenMultiplySuggestion lhs rhs
| otherwise = ""
badExpApps _ = []
badMatchApps :: Match SrcSpanInfo -> [Diagnostic]
badMatchApps (Match loc lhs pats _ _) =
take 1 [(ann p, CompileError, warningMsg p) | p <- pats, not (isGoodPatAppRhs p)]
where
warningMsg p = "error:" ++ missingParenError ++ missingParenFunctionSuggestion lhs p
badMatchApps _ = []
badPatternApps :: Pat SrcSpanInfo -> [Diagnostic]
badPatternApps (PApp loc lhs pats) =
take 1 [(ann p, CompileError, warningMsg p) | p <- pats, not (isGoodPatAppRhs p)]
where
warningMsg p = "error:" ++ missingParenError ++ missingParenFunctionSuggestion lhs p
badPatternApps _ = []
missingParenError :: String
missingParenError =
"\n \x2022 Missing punctuation before this expression." ++
"\n Perhaps you forgot a comma, an operator, or a bracket."
missingParenMultiplySuggestion :: (Pretty a, Pretty b) => a -> b -> String
missingParenMultiplySuggestion lhs rhs =
"\n \x2022 To multiply, please use the * operator." ++
"\n For example: " ++ lhsStr ++ " * " ++ rhsStr
where lhsStr = fromMaybe "a" (prettyPrintInline lhs)
rhsStr = fromMaybe "b" (prettyPrintInline rhs)
missingParenFunctionSuggestion :: (Pretty a, Pretty b) => a -> b -> String
missingParenFunctionSuggestion lhs rhs =
"\n \x2022 To apply a function, add parentheses around the argument." ++
"\n For example: " ++ lhsStr ++ "(" ++ rhsStr ++ ")"
where lhsStr = fromMaybe "f" (prettyPrintInline lhs)
rhsStr = fromMaybe "x" (prettyPrintInline rhs)
prettyPrintInline :: Pretty a => a -> Maybe String
prettyPrintInline a
| length result < 25 && not ('\n' `elem` result) = Just result
| otherwise = Nothing
where result = prettyPrintStyleMode style{ mode = OneLineMode } defaultMode a
-- | Determines whether the left-hand side of a function application
-- might possibly be a function. This eliminates cases where just by
-- syntax alone, we know this cannot possibly be a function, such as
-- when it's a number or a list literal.
isGoodExpAppLhs :: Exp l -> Bool
isGoodExpAppLhs (Lit _ _) = False
isGoodExpAppLhs (NegApp _ _) = False
isGoodExpAppLhs (Tuple _ _ _) = False
isGoodExpAppLhs (UnboxedSum _ _ _ _) = False
isGoodExpAppLhs (List _ _) = False
isGoodExpAppLhs (ParArray _ _) = False
isGoodExpAppLhs (RecConstr _ _ _) = False
isGoodExpAppLhs (RecUpdate _ _ _) = False
isGoodExpAppLhs (EnumFrom _ _) = False
isGoodExpAppLhs (EnumFromTo _ _ _) = False
isGoodExpAppLhs (EnumFromThen _ _ _) = False
isGoodExpAppLhs (EnumFromThenTo _ _ _ _) = False
isGoodExpAppLhs (ParArrayFromTo _ _ _) = False
isGoodExpAppLhs (ParArrayFromThenTo _ _ _ _) = False
isGoodExpAppLhs (ListComp _ _ _) = False
isGoodExpAppLhs (ParComp _ _ _) = False
isGoodExpAppLhs (ParArrayComp _ _ _) = False
isGoodExpAppLhs (VarQuote _ _) = False
isGoodExpAppLhs (TypQuote _ _) = False
isGoodExpAppLhs (Paren _ exp) = isGoodExpAppLhs exp
isGoodExpAppLhs _ = True
isGoodExpAppRhs :: Exp l -> Bool
isGoodExpAppRhs (Paren _ _) = True
isGoodExpAppRhs (Tuple _ _ _) = True
isGoodExpAppRhs (List _ _) = True
isGoodExpAppRhs (Con _ (Special _ (UnitCon _))) = True
isGoodExpAppRhs (ParArray _ _) = True
isGoodExpAppRhs (EnumFrom _ _) = True
isGoodExpAppRhs (EnumFromThen _ _ _) = True
isGoodExpAppRhs (EnumFromTo _ _ _) = True
isGoodExpAppRhs (EnumFromThenTo _ _ _ _) = True
isGoodExpAppRhs (ParArrayFromTo _ _ _) = True
isGoodExpAppRhs (ParArrayFromThenTo _ _ _ _) = True
isGoodExpAppRhs (ListComp _ _ _) = True
isGoodExpAppRhs (ParComp _ _ _) = True
isGoodExpAppRhs (ParArrayComp _ _ _) = True
isGoodExpAppRhs _ = False
isGoodPatAppRhs :: Pat l -> Bool
isGoodPatAppRhs (PParen _ _) = True
isGoodPatAppRhs (PTuple _ _ _) = True
isGoodPatAppRhs (PList _ _) = True
isGoodPatAppRhs (PApp _ (Special _ (UnitCon _)) []) = True
isGoodPatAppRhs _ = False
-- | Determines whether an expression is likely to be usable as a function
-- by adding parenthesized arguments. Note that when this would usually
-- require parentheses (such as with a lambda), this should return false.
isLikelyFunctionExp :: Exp l -> Bool
isLikelyFunctionExp (Var _ _) = True
isLikelyFunctionExp (Con _ _) = True
isLikelyFunctionExp (LeftSection _ _ _) = True
isLikelyFunctionExp (RightSection _ _ _) = True
isLikelyFunctionExp (Paren _ exp) = isGoodExpAppLhs exp
isLikelyFunctionExp _ = False
-- | Determines whether an expression is likely to be usable as a number
-- in a multiplication. Note that when this would usually require
-- parentheses (such as with a let statement), this should return false.
isLikelyNumberExp :: Exp l -> Bool
isLikelyNumberExp (Var _ _) = True
isLikelyNumberExp (Lit _ _) = True
isLikelyNumberExp (NegApp _ _) = True
isLikelyNumberExp (App _ _ _) = True
isLikelyNumberExp (Paren _ _) = True
isLikelyNumberExp _ = False
checkVarlessPatterns :: MonadCompile m => m ()
checkVarlessPatterns =
getParsedCode >>= \parsed -> case parsed of
Parsed mod -> addDiagnostics $
everything (++) (mkQ [] varlessPatBinds) mod
_ -> return ()
varlessPatBinds :: Decl SrcSpanInfo -> [Diagnostic]
varlessPatBinds (PatBind loc pat _ _)
| not (everything (||) (mkQ False isPatVar) pat)
= [(loc, CompileError,
"error: This definition doesn't define any variables.\n\t" ++
"Variables must begin with a lowercase letter.")]
varlessPatBinds _ = []
isPatVar :: Pat SrcSpanInfo -> Bool
isPatVar (PVar _ _) = True
isPatVar (PNPlusK _ _ _) = True
isPatVar (PAsPat _ _ _) = True
isPatVar _ = False
checkPatternGuards :: MonadCompile m => m ()
checkPatternGuards =
getParsedCode >>= \parsed -> case parsed of
Parsed mod -> addDiagnostics $
everything (++) (mkQ [] patternGuards) mod
_ -> return ()
patternGuards :: GuardedRhs SrcSpanInfo -> [Diagnostic]
patternGuards (GuardedRhs _ stmts _) =
[ (loc, CompileError,
"error: This arrow can't be used here.\n\t" ++
"To compare a negative number, add a space between < and -.")
| Generator loc _ _ <- stmts ]
| pranjaltale16/codeworld | codeworld-compiler/src/CodeWorld/Compile/Stages.hs | apache-2.0 | 13,079 | 0 | 18 | 2,874 | 3,309 | 1,707 | 1,602 | 235 | 4 |
{- | Pipelining is sending multiple requests over a socket and receiving the responses later in the same order (a' la HTTP pipelining). This is faster than sending one request, waiting for the response, then sending the next request, and so on. This implementation returns a /promise (future)/ response for each request that when invoked waits for the response if not already arrived. Multiple threads can send on the same pipeline (and get promises back); it will send each thread's request right away without waiting.
A pipeline closes itself when a read or write causes an error, so you can detect a broken pipeline by checking isClosed. It also closes itself when garbage collected, or you can close it explicitly. -}
{-# LANGUAGE RecordWildCards, NamedFieldPuns, ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
#if (__GLASGOW_HASKELL__ >= 706)
{-# LANGUAGE RecursiveDo #-}
#else
{-# LANGUAGE DoRec #-}
#endif
module System.IO.Pipeline (
-- * IOStream
IOStream(..),
-- * Pipeline
Pipeline, newPipeline, send, call, close, isClosed
) where
import Prelude hiding (length)
import Control.Concurrent (ThreadId, forkIO, killThread)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
import Control.Monad (forever)
import GHC.Conc (ThreadStatus(..), threadStatus)
import Control.Monad.Trans (liftIO)
#if MIN_VERSION_base(4,6,0)
import Control.Concurrent.MVar.Lifted (MVar, newEmptyMVar, newMVar, withMVar,
putMVar, readMVar, mkWeakMVar)
#else
import Control.Concurrent.MVar.Lifted (MVar, newEmptyMVar, newMVar, withMVar,
putMVar, readMVar, addMVarFinalizer)
#endif
import Control.Exception.Lifted (onException, throwIO, try)
#if !MIN_VERSION_base(4,6,0)
mkWeakMVar :: MVar a -> IO () -> IO ()
mkWeakMVar = addMVarFinalizer
#endif
-- * IOStream
-- | An IO sink and source where value of type @o@ are sent and values of type @i@ are received.
data IOStream i o = IOStream {
writeStream :: o -> IO (),
readStream :: IO i,
closeStream :: IO () }
-- * Pipeline
-- | Thread-safe and pipelined connection
data Pipeline i o = Pipeline {
vStream :: MVar (IOStream i o), -- ^ Mutex on handle, so only one thread at a time can write to it
responseQueue :: Chan (MVar (Either IOError i)), -- ^ Queue of threads waiting for responses. Every time a response arrive we pop the next thread and give it the response.
listenThread :: ThreadId
}
-- | Create new Pipeline over given handle. You should 'close' pipeline when finished, which will also close handle. If pipeline is not closed but eventually garbage collected, it will be closed along with handle.
newPipeline :: IOStream i o -> IO (Pipeline i o)
newPipeline stream = do
vStream <- newMVar stream
responseQueue <- newChan
rec
let pipe = Pipeline{..}
listenThread <- forkIO (listen pipe)
_ <- mkWeakMVar vStream $ do
killThread listenThread
closeStream stream
return pipe
close :: Pipeline i o -> IO ()
-- ^ Close pipe and underlying connection
close Pipeline{..} = do
killThread listenThread
closeStream =<< readMVar vStream
isClosed :: Pipeline i o -> IO Bool
isClosed Pipeline{listenThread} = do
status <- threadStatus listenThread
return $ case status of
ThreadRunning -> False
ThreadFinished -> True
ThreadBlocked _ -> False
ThreadDied -> True
--isPipeClosed Pipeline{..} = isClosed =<< readMVar vHandle -- isClosed hangs while listen loop is waiting on read
listen :: Pipeline i o -> IO ()
-- ^ Listen for responses and supply them to waiting threads in order
listen Pipeline{..} = do
stream <- readMVar vStream
forever $ do
e <- try $ readStream stream
var <- readChan responseQueue
putMVar var e
case e of
Left err -> closeStream stream >> ioError err -- close and stop looping
Right _ -> return ()
send :: Pipeline i o -> o -> IO ()
-- ^ Send message to destination; the destination must not response (otherwise future 'call's will get these responses instead of their own).
-- Throw IOError and close pipeline if send fails
send p@Pipeline{..} message = withMVar vStream (flip writeStream message) `onException` close p
call :: Pipeline i o -> o -> IO (IO i)
-- ^ Send message to destination and return /promise/ of response from one message only. The destination must reply to the message (otherwise promises will have the wrong responses in them).
-- Throw IOError and closes pipeline if send fails, likewise for promised response.
call p@Pipeline{..} message = withMVar vStream doCall `onException` close p where
doCall stream = do
writeStream stream message
var <- newEmptyMVar
liftIO $ writeChan responseQueue var
return $ readMVar var >>= either throwIO return -- return promise
{- Authors: Tony Hannan <tony@10gen.com>
Copyright 2011 10gen Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -}
| selectel/mongoDB-haskell | System/IO/Pipeline.hs | apache-2.0 | 5,494 | 0 | 14 | 1,157 | 874 | 459 | 415 | 67 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric, GADTs, RecordWildCards #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-- |
-- Module : Criterion.Types
-- Copyright : (c) 2009-2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- Types for benchmarking.
--
-- The core type is 'Benchmarkable', which admits both pure functions
-- and 'IO' actions.
--
-- For a pure function of type @a -> b@, the benchmarking harness
-- calls this function repeatedly, each time with a different 'Int64'
-- argument (the number of times to run the function in a loop), and
-- reduces the result the function returns to weak head normal form.
--
-- For an action of type @IO a@, the benchmarking harness calls the
-- action repeatedly, but does not reduce the result.
module Criterion.Types
(
-- * Configuration
Config(..)
, Verbosity(..)
-- * Benchmark descriptions
, Benchmarkable(..)
, Benchmark(..)
-- * Measurements
, Measured(..)
, fromInt
, toInt
, fromDouble
, toDouble
, measureAccessors
, measureKeys
, measure
, rescale
-- * Benchmark construction
, env
, envWithCleanup
, perBatchEnv
, perBatchEnvWithCleanup
, perRunEnv
, perRunEnvWithCleanup
, toBenchmarkable
, bench
, bgroup
, addPrefix
, benchNames
-- ** Evaluation control
, whnf
, nf
, nfIO
, whnfIO
-- * Result types
, Outliers(..)
, OutlierEffect(..)
, OutlierVariance(..)
, Regression(..)
, KDE(..)
, Report(..)
, SampleAnalysis(..)
, DataRecord(..)
) where
-- Temporary: to support pre-AMP GHC 7.8.4:
import Control.Applicative
import Data.Monoid
import Data.Semigroup
import Control.DeepSeq (NFData(rnf))
import Control.Exception (evaluate)
import Criterion.Types.Internal (fakeEnvironment)
import Data.Aeson (FromJSON(..), ToJSON(..))
import Data.Binary (Binary(..), putWord8, getWord8)
import Data.Data (Data, Typeable)
import Data.Int (Int64)
import Data.Map (Map, fromList)
import GHC.Generics (Generic)
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import qualified Statistics.Types as St
import Statistics.Resampling.Bootstrap ()
import Prelude
-- | Control the amount of information displayed.
data Verbosity = Quiet
| Normal
| Verbose
deriving (Eq, Ord, Bounded, Enum, Read, Show, Typeable, Data,
Generic)
-- | Top-level benchmarking configuration.
data Config = Config {
confInterval :: St.CL Double
-- ^ Confidence interval for bootstrap estimation (greater than
-- 0, less than 1).
, forceGC :: Bool
-- ^ /Obsolete, unused/. This option used to force garbage
-- collection between every benchmark run, but it no longer has
-- an effect (we now unconditionally force garbage collection).
-- This option remains solely for backwards API compatibility.
, timeLimit :: Double
-- ^ Number of seconds to run a single benchmark. (In practice,
-- execution time will very slightly exceed this limit.)
, resamples :: Int
-- ^ Number of resamples to perform when bootstrapping.
, regressions :: [([String], String)]
-- ^ Regressions to perform.
, rawDataFile :: Maybe FilePath
-- ^ File to write binary measurement and analysis data to. If
-- not specified, this will be a temporary file.
, reportFile :: Maybe FilePath
-- ^ File to write report output to, with template expanded.
, csvFile :: Maybe FilePath
-- ^ File to write CSV summary to.
, jsonFile :: Maybe FilePath
-- ^ File to write JSON-formatted results to.
, junitFile :: Maybe FilePath
-- ^ File to write JUnit-compatible XML results to.
, verbosity :: Verbosity
-- ^ Verbosity level to use when running and analysing
-- benchmarks.
, template :: FilePath
-- ^ Template file to use if writing a report.
} deriving (Eq, Read, Show, Typeable, Data, Generic)
{-# DEPRECATED forceGC
["forceGC will be removed in the next major criterion release."] #-}
-- | A pure function or impure action that can be benchmarked. The
-- 'Int64' parameter indicates the number of times to run the given
-- function or action.
data Benchmarkable = forall a . NFData a =>
Benchmarkable
{ allocEnv :: Int64 -> IO a
, cleanEnv :: Int64 -> a -> IO ()
, runRepeatedly :: a -> Int64 -> IO ()
, perRun :: Bool
}
noop :: Monad m => a -> m ()
noop = const $ return ()
{-# INLINE noop #-}
-- | Construct a 'Benchmarkable' value from an impure action, where the 'Int64'
-- parameter indicates the number of times to run the action.
toBenchmarkable :: (Int64 -> IO ()) -> Benchmarkable
toBenchmarkable f = Benchmarkable noop (const noop) (const f) False
{-# INLINE toBenchmarkable #-}
-- | A collection of measurements made while benchmarking.
--
-- Measurements related to garbage collection are tagged with __GC__.
-- They will only be available if a benchmark is run with @\"+RTS
-- -T\"@.
--
-- __Packed storage.__ When GC statistics cannot be collected, GC
-- values will be set to huge negative values. If a field is labeled
-- with \"__GC__\" below, use 'fromInt' and 'fromDouble' to safely
-- convert to \"real\" values.
data Measured = Measured {
measTime :: !Double
-- ^ Total wall-clock time elapsed, in seconds.
, measCpuTime :: !Double
-- ^ Total CPU time elapsed, in seconds. Includes both user and
-- kernel (system) time.
, measCycles :: !Int64
-- ^ Cycles, in unspecified units that may be CPU cycles. (On
-- i386 and x86_64, this is measured using the @rdtsc@
-- instruction.)
, measIters :: !Int64
-- ^ Number of loop iterations measured.
, measAllocated :: !Int64
-- ^ __(GC)__ Number of bytes allocated. Access using 'fromInt'.
, measNumGcs :: !Int64
-- ^ __(GC)__ Number of garbage collections performed. Access
-- using 'fromInt'.
, measBytesCopied :: !Int64
-- ^ __(GC)__ Number of bytes copied during garbage collection.
-- Access using 'fromInt'.
, measMutatorWallSeconds :: !Double
-- ^ __(GC)__ Wall-clock time spent doing real work
-- (\"mutation\"), as distinct from garbage collection. Access
-- using 'fromDouble'.
, measMutatorCpuSeconds :: !Double
-- ^ __(GC)__ CPU time spent doing real work (\"mutation\"), as
-- distinct from garbage collection. Access using 'fromDouble'.
, measGcWallSeconds :: !Double
-- ^ __(GC)__ Wall-clock time spent doing garbage collection.
-- Access using 'fromDouble'.
, measGcCpuSeconds :: !Double
-- ^ __(GC)__ CPU time spent doing garbage collection. Access
-- using 'fromDouble'.
} deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON Measured where
parseJSON v = do
(a,b,c,d,e,f,g,h,i,j,k) <- parseJSON v
-- The first four fields are not subject to the encoding policy:
return $ Measured a b c d
(int e) (int f) (int g)
(db h) (db i) (db j) (db k)
where int = toInt; db = toDouble
-- Here we treat the numeric fields as `Maybe Int64` and `Maybe Double`
-- and we use a specific policy for deciding when they should be Nothing,
-- which becomes null in JSON.
instance ToJSON Measured where
toJSON Measured{..} = toJSON
(measTime, measCpuTime, measCycles, measIters,
i measAllocated, i measNumGcs, i measBytesCopied,
d measMutatorWallSeconds, d measMutatorCpuSeconds,
d measGcWallSeconds, d measMutatorCpuSeconds)
where i = fromInt; d = fromDouble
instance NFData Measured where
rnf Measured{} = ()
-- THIS MUST REFLECT THE ORDER OF FIELDS IN THE DATA TYPE.
--
-- The ordering is used by Javascript code to pick out the correct
-- index into the vector that represents a Measured value in that
-- world.
measureAccessors_ :: [(String, (Measured -> Maybe Double, String))]
measureAccessors_ = [
("time", (Just . measTime,
"wall-clock time"))
, ("cpuTime", (Just . measCpuTime,
"CPU time"))
, ("cycles", (Just . fromIntegral . measCycles,
"CPU cycles"))
, ("iters", (Just . fromIntegral . measIters,
"loop iterations"))
, ("allocated", (fmap fromIntegral . fromInt . measAllocated,
"(+RTS -T) bytes allocated"))
, ("numGcs", (fmap fromIntegral . fromInt . measNumGcs,
"(+RTS -T) number of garbage collections"))
, ("bytesCopied", (fmap fromIntegral . fromInt . measBytesCopied,
"(+RTS -T) number of bytes copied during GC"))
, ("mutatorWallSeconds", (fromDouble . measMutatorWallSeconds,
"(+RTS -T) wall-clock time for mutator threads"))
, ("mutatorCpuSeconds", (fromDouble . measMutatorCpuSeconds,
"(+RTS -T) CPU time spent running mutator threads"))
, ("gcWallSeconds", (fromDouble . measGcWallSeconds,
"(+RTS -T) wall-clock time spent doing GC"))
, ("gcCpuSeconds", (fromDouble . measGcCpuSeconds,
"(+RTS -T) CPU time spent doing GC"))
]
-- | Field names in a 'Measured' record, in the order in which they
-- appear.
measureKeys :: [String]
measureKeys = map fst measureAccessors_
-- | Field names and accessors for a 'Measured' record.
measureAccessors :: Map String (Measured -> Maybe Double, String)
measureAccessors = fromList measureAccessors_
-- | Normalise every measurement as if 'measIters' was 1.
--
-- ('measIters' itself is left unaffected.)
rescale :: Measured -> Measured
rescale m@Measured{..} = m {
measTime = d measTime
, measCpuTime = d measCpuTime
, measCycles = i measCycles
-- skip measIters
, measNumGcs = i measNumGcs
, measBytesCopied = i measBytesCopied
, measMutatorWallSeconds = d measMutatorWallSeconds
, measMutatorCpuSeconds = d measMutatorCpuSeconds
, measGcWallSeconds = d measGcWallSeconds
, measGcCpuSeconds = d measGcCpuSeconds
} where
d k = maybe k (/ iters) (fromDouble k)
i k = maybe k (round . (/ iters)) (fromIntegral <$> fromInt k)
iters = fromIntegral measIters :: Double
-- | Convert a (possibly unavailable) GC measurement to a true value.
-- If the measurement is a huge negative number that corresponds to
-- \"no data\", this will return 'Nothing'.
fromInt :: Int64 -> Maybe Int64
fromInt i | i == minBound = Nothing
| otherwise = Just i
-- | Convert from a true value back to the packed representation used
-- for GC measurements.
toInt :: Maybe Int64 -> Int64
toInt Nothing = minBound
toInt (Just i) = i
-- | Convert a (possibly unavailable) GC measurement to a true value.
-- If the measurement is a huge negative number that corresponds to
-- \"no data\", this will return 'Nothing'.
fromDouble :: Double -> Maybe Double
fromDouble d | isInfinite d || isNaN d = Nothing
| otherwise = Just d
-- | Convert from a true value back to the packed representation used
-- for GC measurements.
toDouble :: Maybe Double -> Double
toDouble Nothing = -1/0
toDouble (Just d) = d
instance Binary Measured where
put Measured{..} = do
put measTime; put measCpuTime; put measCycles; put measIters
put measAllocated; put measNumGcs; put measBytesCopied
put measMutatorWallSeconds; put measMutatorCpuSeconds
put measGcWallSeconds; put measGcCpuSeconds
get = Measured <$> get <*> get <*> get <*> get
<*> get <*> get <*> get <*> get <*> get <*> get <*> get
-- | Apply an argument to a function, and evaluate the result to weak
-- head normal form (WHNF).
whnf :: (a -> b) -> a -> Benchmarkable
whnf = pureFunc id
{-# INLINE whnf #-}
-- | Apply an argument to a function, and evaluate the result to
-- normal form (NF).
nf :: NFData b => (a -> b) -> a -> Benchmarkable
nf = pureFunc rnf
{-# INLINE nf #-}
pureFunc :: (b -> c) -> (a -> b) -> a -> Benchmarkable
pureFunc reduce f0 x0 = toBenchmarkable (go f0 x0)
where go f x n
| n <= 0 = return ()
| otherwise = evaluate (reduce (f x)) >> go f x (n-1)
{-# INLINE pureFunc #-}
-- | Perform an action, then evaluate its result to normal form.
-- This is particularly useful for forcing a lazy 'IO' action to be
-- completely performed.
nfIO :: NFData a => IO a -> Benchmarkable
nfIO = toBenchmarkable . impure rnf
{-# INLINE nfIO #-}
-- | Perform an action, then evaluate its result to weak head normal
-- form (WHNF). This is useful for forcing an 'IO' action whose result
-- is an expression to be evaluated down to a more useful value.
whnfIO :: IO a -> Benchmarkable
whnfIO = toBenchmarkable . impure id
{-# INLINE whnfIO #-}
impure :: (a -> b) -> IO a -> Int64 -> IO ()
impure strategy a = go
where go n
| n <= 0 = return ()
| otherwise = a >>= (evaluate . strategy) >> go (n-1)
{-# INLINE impure #-}
-- | Specification of a collection of benchmarks and environments. A
-- benchmark may consist of:
--
-- * An environment that creates input data for benchmarks, created
-- with 'env'.
--
-- * A single 'Benchmarkable' item with a name, created with 'bench'.
--
-- * A (possibly nested) group of 'Benchmark's, created with 'bgroup'.
data Benchmark where
Environment :: NFData env
=> IO env -> (env -> IO a) -> (env -> Benchmark) -> Benchmark
Benchmark :: String -> Benchmarkable -> Benchmark
BenchGroup :: String -> [Benchmark] -> Benchmark
-- | Run a benchmark (or collection of benchmarks) in the given
-- environment. The purpose of an environment is to lazily create
-- input data to pass to the functions that will be benchmarked.
--
-- A common example of environment data is input that is read from a
-- file. Another is a large data structure constructed in-place.
--
-- __Motivation.__ In earlier versions of criterion, all benchmark
-- inputs were always created when a program started running. By
-- deferring the creation of an environment when its associated
-- benchmarks need the its, we avoid two problems that this strategy
-- caused:
--
-- * Memory pressure distorted the results of unrelated benchmarks.
-- If one benchmark needed e.g. a gigabyte-sized input, it would
-- force the garbage collector to do extra work when running some
-- other benchmark that had no use for that input. Since the data
-- created by an environment is only available when it is in scope,
-- it should be garbage collected before other benchmarks are run.
--
-- * The time cost of generating all needed inputs could be
-- significant in cases where no inputs (or just a few) were really
-- needed. This occurred often, for instance when just one out of a
-- large suite of benchmarks was run, or when a user would list the
-- collection of benchmarks without running any.
--
-- __Creation.__ An environment is created right before its related
-- benchmarks are run. The 'IO' action that creates the environment
-- is run, then the newly created environment is evaluated to normal
-- form (hence the 'NFData' constraint) before being passed to the
-- function that receives the environment.
--
-- __Complex environments.__ If you need to create an environment that
-- contains multiple values, simply pack the values into a tuple.
--
-- __Lazy pattern matching.__ In situations where a \"real\"
-- environment is not needed, e.g. if a list of benchmark names is
-- being generated, a value which throws an exception will be passed
-- to the function that receives the environment. This avoids the
-- overhead of generating an environment that will not actually be
-- used.
--
-- The function that receives the environment must use lazy pattern
-- matching to deconstruct the tuple (e.g., @~(x, y)@, not @(x, y)@),
-- as use of strict pattern matching will cause a crash if an
-- exception-throwing value is passed in.
--
-- __Example.__ This program runs benchmarks in an environment that
-- contains two values. The first value is the contents of a text
-- file; the second is a string. Pay attention to the use of a lazy
-- pattern to deconstruct the tuple in the function that returns the
-- benchmarks to be run.
--
-- > setupEnv = do
-- > let small = replicate 1000 (1 :: Int)
-- > big <- map length . words <$> readFile "/usr/dict/words"
-- > return (small, big)
-- >
-- > main = defaultMain [
-- > -- notice the lazy pattern match here!
-- > env setupEnv $ \ ~(small,big) -> bgroup "main" [
-- > bgroup "small" [
-- > bench "length" $ whnf length small
-- > , bench "length . filter" $ whnf (length . filter (==1)) small
-- > ]
-- > , bgroup "big" [
-- > bench "length" $ whnf length big
-- > , bench "length . filter" $ whnf (length . filter (==1)) big
-- > ]
-- > ] ]
--
-- __Discussion.__ The environment created in the example above is
-- intentionally /not/ ideal. As Haskell's scoping rules suggest, the
-- variable @big@ is in scope for the benchmarks that use only
-- @small@. It would be better to create a separate environment for
-- @big@, so that it will not be kept alive while the unrelated
-- benchmarks are being run.
env :: NFData env =>
IO env
-- ^ Create the environment. The environment will be evaluated to
-- normal form before being passed to the benchmark.
-> (env -> Benchmark)
-- ^ Take the newly created environment and make it available to
-- the given benchmarks.
-> Benchmark
env alloc = Environment alloc noop
-- | Same as `env`, but but allows for an additional callback
-- to clean up the environment. Resource clean up is exception safe, that is,
-- it runs even if the 'Benchmark' throws an exception.
envWithCleanup
:: NFData env
=> IO env
-- ^ Create the environment. The environment will be evaluated to
-- normal form before being passed to the benchmark.
-> (env -> IO a)
-- ^ Clean up the created environment.
-> (env -> Benchmark)
-- ^ Take the newly created environment and make it available to
-- the given benchmarks.
-> Benchmark
envWithCleanup = Environment
-- | Create a Benchmarkable where a fresh environment is allocated for every
-- batch of runs of the benchmarkable.
--
-- The environment is evaluated to normal form before the benchmark is run.
--
-- When using 'whnf', 'whnfIO', etc. Criterion creates a 'Benchmarkable'
-- whichs runs a batch of @N@ repeat runs of that expressions. Criterion may
-- run any number of these batches to get accurate measurements. Environments
-- created by 'env' and 'envWithCleanup', are shared across all these batches
-- of runs.
--
-- This is fine for simple benchmarks on static input, but when benchmarking
-- IO operations where these operations can modify (and especially grow) the
-- environment this means that later batches might have their accuracy effected
-- due to longer, for example, longer garbage collection pauses.
--
-- An example: Suppose we want to benchmark writing to a Chan, if we allocate
-- the Chan using environment and our benchmark consists of @writeChan env ()@,
-- the contents and thus size of the Chan will grow with every repeat. If
-- Criterion runs a 1,000 batches of 1,000 repeats, the result is that the
-- channel will have 999,000 items in it by the time the last batch is run.
-- Since GHC GC has to copy the live set for every major GC this means our last
-- set of writes will suffer a lot of noise of the previous repeats.
--
-- By allocating a fresh environment for every batch of runs this function
-- should eliminate this effect.
perBatchEnv
:: (NFData env, NFData b)
=> (Int64 -> IO env)
-- ^ Create an environment for a batch of N runs. The environment will be
-- evaluated to normal form before running.
-> (env -> IO b)
-- ^ Function returning the IO action that should be benchmarked with the
-- newly generated environment.
-> Benchmarkable
perBatchEnv alloc = perBatchEnvWithCleanup alloc (const noop)
-- | Same as `perBatchEnv`, but but allows for an additional callback
-- to clean up the environment. Resource clean up is exception safe, that is,
-- it runs even if the 'Benchmark' throws an exception.
perBatchEnvWithCleanup
:: (NFData env, NFData b)
=> (Int64 -> IO env)
-- ^ Create an environment for a batch of N runs. The environment will be
-- evaluated to normal form before running.
-> (Int64 -> env -> IO ())
-- ^ Clean up the created environment.
-> (env -> IO b)
-- ^ Function returning the IO action that should be benchmarked with the
-- newly generated environment.
-> Benchmarkable
perBatchEnvWithCleanup alloc clean work
= Benchmarkable alloc clean (impure rnf . work) False
-- | Create a Benchmarkable where a fresh environment is allocated for every
-- run of the operation to benchmark. This is useful for benchmarking mutable
-- operations that need a fresh environment, such as sorting a mutable Vector.
--
-- As with 'env' and 'perBatchEnv' the environment is evaluated to normal form
-- before the benchmark is run.
--
-- This introduces extra noise and result in reduce accuracy compared to other
-- Criterion benchmarks. But allows easier benchmarking for mutable operations
-- than was previously possible.
perRunEnv
:: (NFData env, NFData b)
=> IO env
-- ^ Action that creates the environment for a single run.
-> (env -> IO b)
-- ^ Function returning the IO action that should be benchmarked with the
-- newly genereted environment.
-> Benchmarkable
perRunEnv alloc = perRunEnvWithCleanup alloc noop
-- | Same as `perRunEnv`, but but allows for an additional callback
-- to clean up the environment. Resource clean up is exception safe, that is,
-- it runs even if the 'Benchmark' throws an exception.
perRunEnvWithCleanup
:: (NFData env, NFData b)
=> IO env
-- ^ Action that creates the environment for a single run.
-> (env -> IO ())
-- ^ Clean up the created environment.
-> (env -> IO b)
-- ^ Function returning the IO action that should be benchmarked with the
-- newly genereted environment.
-> Benchmarkable
perRunEnvWithCleanup alloc clean work = bm { perRun = True }
where
bm = perBatchEnvWithCleanup (const alloc) (const clean) work
-- | Create a single benchmark.
bench :: String -- ^ A name to identify the benchmark.
-> Benchmarkable -- ^ An activity to be benchmarked.
-> Benchmark
bench = Benchmark
-- | Group several benchmarks together under a common name.
bgroup :: String -- ^ A name to identify the group of benchmarks.
-> [Benchmark] -- ^ Benchmarks to group under this name.
-> Benchmark
bgroup = BenchGroup
-- | Add the given prefix to a name. If the prefix is empty, the name
-- is returned unmodified. Otherwise, the prefix and name are
-- separated by a @\'\/\'@ character.
addPrefix :: String -- ^ Prefix.
-> String -- ^ Name.
-> String
addPrefix "" desc = desc
addPrefix pfx desc = pfx ++ '/' : desc
-- | Retrieve the names of all benchmarks. Grouped benchmarks are
-- prefixed with the name of the group they're in.
benchNames :: Benchmark -> [String]
benchNames (Environment _ _ b) = benchNames (b fakeEnvironment)
benchNames (Benchmark d _) = [d]
benchNames (BenchGroup d bs) = map (addPrefix d) . concatMap benchNames $ bs
instance Show Benchmark where
show (Environment _ _ b) = "Environment _ _" ++ show (b fakeEnvironment)
show (Benchmark d _) = "Benchmark " ++ show d
show (BenchGroup d _) = "BenchGroup " ++ show d
measure :: (U.Unbox a) => (Measured -> a) -> V.Vector Measured -> U.Vector a
measure f v = U.convert . V.map f $ v
-- | Outliers from sample data, calculated using the boxplot
-- technique.
data Outliers = Outliers {
samplesSeen :: !Int64
, lowSevere :: !Int64
-- ^ More than 3 times the interquartile range (IQR) below the
-- first quartile.
, lowMild :: !Int64
-- ^ Between 1.5 and 3 times the IQR below the first quartile.
, highMild :: !Int64
-- ^ Between 1.5 and 3 times the IQR above the third quartile.
, highSevere :: !Int64
-- ^ More than 3 times the IQR above the third quartile.
} deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON Outliers
instance ToJSON Outliers
instance Binary Outliers where
put (Outliers v w x y z) = put v >> put w >> put x >> put y >> put z
get = Outliers <$> get <*> get <*> get <*> get <*> get
instance NFData Outliers
-- | A description of the extent to which outliers in the sample data
-- affect the sample mean and standard deviation.
data OutlierEffect = Unaffected -- ^ Less than 1% effect.
| Slight -- ^ Between 1% and 10%.
| Moderate -- ^ Between 10% and 50%.
| Severe -- ^ Above 50% (i.e. measurements
-- are useless).
deriving (Eq, Ord, Read, Show, Typeable, Data, Generic)
instance FromJSON OutlierEffect
instance ToJSON OutlierEffect
instance Binary OutlierEffect where
put Unaffected = putWord8 0
put Slight = putWord8 1
put Moderate = putWord8 2
put Severe = putWord8 3
get = do
i <- getWord8
case i of
0 -> return Unaffected
1 -> return Slight
2 -> return Moderate
3 -> return Severe
_ -> fail $ "get for OutlierEffect: unexpected " ++ show i
instance NFData OutlierEffect
instance Semigroup Outliers where
(<>) = addOutliers
instance Monoid Outliers where
mempty = Outliers 0 0 0 0 0
#if !(MIN_VERSION_base(4,11,0))
mappend = addOutliers
#endif
addOutliers :: Outliers -> Outliers -> Outliers
addOutliers (Outliers s a b c d) (Outliers t w x y z) =
Outliers (s+t) (a+w) (b+x) (c+y) (d+z)
{-# INLINE addOutliers #-}
-- | Analysis of the extent to which outliers in a sample affect its
-- standard deviation (and to some extent, its mean).
data OutlierVariance = OutlierVariance {
ovEffect :: OutlierEffect
-- ^ Qualitative description of effect.
, ovDesc :: String
-- ^ Brief textual description of effect.
, ovFraction :: Double
-- ^ Quantitative description of effect (a fraction between 0 and 1).
} deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON OutlierVariance
instance ToJSON OutlierVariance
instance Binary OutlierVariance where
put (OutlierVariance x y z) = put x >> put y >> put z
get = OutlierVariance <$> get <*> get <*> get
instance NFData OutlierVariance where
rnf OutlierVariance{..} = rnf ovEffect `seq` rnf ovDesc `seq` rnf ovFraction
-- | Results of a linear regression.
data Regression = Regression {
regResponder :: String
-- ^ Name of the responding variable.
, regCoeffs :: Map String (St.Estimate St.ConfInt Double)
-- ^ Map from name to value of predictor coefficients.
, regRSquare :: St.Estimate St.ConfInt Double
-- ^ R² goodness-of-fit estimate.
} deriving (Eq, Read, Show, Typeable, Generic)
instance FromJSON Regression
instance ToJSON Regression
instance Binary Regression where
put Regression{..} =
put regResponder >> put regCoeffs >> put regRSquare
get = Regression <$> get <*> get <*> get
instance NFData Regression where
rnf Regression{..} =
rnf regResponder `seq` rnf regCoeffs `seq` rnf regRSquare
-- | Result of a bootstrap analysis of a non-parametric sample.
data SampleAnalysis = SampleAnalysis {
anRegress :: [Regression]
-- ^ Estimates calculated via linear regression.
, anOverhead :: Double
-- ^ Estimated measurement overhead, in seconds. Estimation is
-- performed via linear regression.
, anMean :: St.Estimate St.ConfInt Double
-- ^ Estimated mean.
, anStdDev :: St.Estimate St.ConfInt Double
-- ^ Estimated standard deviation.
, anOutlierVar :: OutlierVariance
-- ^ Description of the effects of outliers on the estimated
-- variance.
} deriving (Eq, Read, Show, Typeable, Generic)
instance FromJSON SampleAnalysis
instance ToJSON SampleAnalysis
instance Binary SampleAnalysis where
put SampleAnalysis{..} = do
put anRegress; put anOverhead; put anMean; put anStdDev; put anOutlierVar
get = SampleAnalysis <$> get <*> get <*> get <*> get <*> get
instance NFData SampleAnalysis where
rnf SampleAnalysis{..} =
rnf anRegress `seq` rnf anOverhead `seq` rnf anMean `seq`
rnf anStdDev `seq` rnf anOutlierVar
-- | Data for a KDE chart of performance.
data KDE = KDE {
kdeType :: String
, kdeValues :: U.Vector Double
, kdePDF :: U.Vector Double
} deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON KDE
instance ToJSON KDE
instance Binary KDE where
put KDE{..} = put kdeType >> put kdeValues >> put kdePDF
get = KDE <$> get <*> get <*> get
instance NFData KDE where
rnf KDE{..} = rnf kdeType `seq` rnf kdeValues `seq` rnf kdePDF
-- | Report of a sample analysis.
data Report = Report {
reportNumber :: Int
-- ^ A simple index indicating that this is the /n/th report.
, reportName :: String
-- ^ The name of this report.
, reportKeys :: [String]
-- ^ See 'measureKeys'.
, reportMeasured :: V.Vector Measured
-- ^ Raw measurements. These are /not/ corrected for the
-- estimated measurement overhead that can be found via the
-- 'anOverhead' field of 'reportAnalysis'.
, reportAnalysis :: SampleAnalysis
-- ^ Report analysis.
, reportOutliers :: Outliers
-- ^ Analysis of outliers.
, reportKDEs :: [KDE]
-- ^ Data for a KDE of times.
} deriving (Eq, Read, Show, Typeable, Generic)
instance FromJSON Report
instance ToJSON Report
instance Binary Report where
put Report{..} =
put reportNumber >> put reportName >> put reportKeys >>
put reportMeasured >> put reportAnalysis >> put reportOutliers >>
put reportKDEs
get = Report <$> get <*> get <*> get <*> get <*> get <*> get <*> get
instance NFData Report where
rnf Report{..} =
rnf reportNumber `seq` rnf reportName `seq` rnf reportKeys `seq`
rnf reportMeasured `seq` rnf reportAnalysis `seq` rnf reportOutliers `seq`
rnf reportKDEs
data DataRecord = Measurement Int String (V.Vector Measured)
| Analysed Report
deriving (Eq, Read, Show, Typeable, Generic)
instance Binary DataRecord where
put (Measurement i n v) = putWord8 0 >> put i >> put n >> put v
put (Analysed r) = putWord8 1 >> put r
get = do
w <- getWord8
case w of
0 -> Measurement <$> get <*> get <*> get
1 -> Analysed <$> get
_ -> error ("bad tag " ++ show w)
instance NFData DataRecord where
rnf (Measurement i n v) = rnf i `seq` rnf n `seq` rnf v
rnf (Analysed r) = rnf r
instance FromJSON DataRecord
instance ToJSON DataRecord
| bgamari/criterion | Criterion/Types.hs | bsd-2-clause | 31,745 | 0 | 16 | 7,846 | 5,220 | 2,917 | 2,303 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : Data.ZoomCache.PCM.Enumeratee
-- Copyright : Conrad Parker
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Conrad Parker <conrad@metadecks.org>
-- Stability : unstable
-- Portability : unknown
--
-- ZoomCache PCM enumeratee conversions
----------------------------------------------------------------------
module Data.ZoomCache.PCM.Enumeratee (
enumPCMDouble
, enumListPCMDouble
, enumSummaryPCMDouble
, wholeTrackSummaryPCMDouble
, enumSummaryListPCMDouble
, wholeTrackSummaryListPCMDouble
) where
import Control.Applicative ((<$>))
import Control.Monad.Trans (MonadIO)
import Data.ByteString (ByteString)
import Data.Int
import qualified Data.Iteratee as I
import Data.Maybe
import Data.Offset
import Data.Typeable
import Data.TypeLevel.Num hiding ((==))
import Data.ZoomCache
import Data.ZoomCache.Codec
import Data.ZoomCache.NList
import Data.ZoomCache.Multichannel.NList
import Data.ZoomCache.PCM.Types
import Data.ZoomCache.PCM.IEEE754()
import Data.ZoomCache.PCM.Int()
----------------------------------------------------------------------
rawToPCMDouble :: ZoomRaw -> [PCM Double]
rawToPCMDouble (ZoomRaw xs) | typeOf xs == typeOf (undefined :: [PCM Double]) =
fromMaybe [] (cast xs :: Maybe [PCM Double])
| typeOf xs == typeOf (undefined :: [PCM Float]) =
f (cast xs :: Maybe [PCM Float])
| typeOf xs == typeOf (undefined :: [PCM Int]) =
f (cast xs :: Maybe [PCM Int])
| typeOf xs == typeOf (undefined :: [PCM Int8]) =
f (cast xs :: Maybe [PCM Int8])
| typeOf xs == typeOf (undefined :: [PCM Int16]) =
f (cast xs :: Maybe [PCM Int16])
| typeOf xs == typeOf (undefined :: [PCM Int32]) =
f (cast xs :: Maybe [PCM Int32])
| typeOf xs == typeOf (undefined :: [PCM Int64]) =
f (cast xs :: Maybe [PCM Int64])
| otherwise = []
where
f :: Real a => Maybe [PCM a] -> [PCM Double]
f = maybe [] (map (PCM . realToFrac . unPCM))
rawToListPCMDouble :: ZoomRaw -> [[PCM Double]]
rawToListPCMDouble (ZoomRaw xs) | not (null d) = [d]
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Double)]) =
l (cast xs :: Maybe [NList D1 (PCM Double)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Float)]) =
f (cast xs :: Maybe [NList D1 (PCM Float)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Int)]) =
f (cast xs :: Maybe [NList D1 (PCM Int)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Int8)]) =
f (cast xs :: Maybe [NList D1 (PCM Int8)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Int16)]) =
f (cast xs :: Maybe [NList D1 (PCM Int16)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Int32)]) =
f (cast xs :: Maybe [NList D1 (PCM Int32)])
| typeOf xs == typeOf (undefined :: [NList D1 (PCM Int64)]) =
f (cast xs :: Maybe [NList D1 (PCM Int64)])
| otherwise = []
where
d = rawToPCMDouble (ZoomRaw xs)
l :: Maybe [NList D1 a] -> [[a]]
l = maybe [] (map nListToList)
f :: (ZoomReadable a) => Maybe [NList D1 a] -> [[PCM Double]]
f = map (rawToPCMDouble . ZoomRaw) . l
----------------------------------------------------------------------
-- | Coercion of numeric Summary to type Summary Double.
toSummaryPCMDouble :: Typeable a => Summary a -> Maybe (Summary (PCM Double))
toSummaryPCMDouble s | typeOf s == typeOf (undefined :: Summary (PCM Double)) =
id (cast s :: Maybe (Summary (PCM Double)))
| typeOf s == typeOf (undefined :: Summary (PCM Float)) =
sd <$> (cast s :: Maybe (Summary (PCM Float)))
| typeOf s == typeOf (undefined :: Summary (PCM Int)) =
sd <$> (cast s :: Maybe (Summary (PCM Int)))
| typeOf s == typeOf (undefined :: Summary (PCM Int8)) =
sd <$> (cast s :: Maybe (Summary (PCM Int8)))
| typeOf s == typeOf (undefined :: Summary (PCM Int16)) =
sd <$> (cast s :: Maybe (Summary (PCM Int16)))
| typeOf s == typeOf (undefined :: Summary (PCM Int32)) =
sd <$> (cast s :: Maybe (Summary (PCM Int32)))
| typeOf s == typeOf (undefined :: Summary (PCM Int64)) =
sd <$> (cast s :: Maybe (Summary (PCM Int64)))
| otherwise = Nothing
where
sd :: ZoomPCM a => Summary (PCM a) -> Summary (PCM Double)
sd s' = s' { summaryData = toSummaryDataPCMDouble (summaryData s') }
toSummaryDataPCMDouble :: ZoomPCM a => SummaryData (PCM a) -> SummaryData (PCM Double)
toSummaryDataPCMDouble s = pcmMkSummary
(realToFrac . pcmMin $ s)
(realToFrac . pcmMax $ s)
(pcmAvg s)
(pcmRMS s)
toSummaryListPCMDouble :: Typeable a => Summary a -> Maybe [Summary (PCM Double)]
toSummaryListPCMDouble s | isJust sd = (:[]) <$> sd
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Double))) =
sl <$> (cast s :: Maybe (Summary (NList D1 (PCM Double))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Float))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Float))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Int))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Int))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Int8))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Int8))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Int16))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Int16))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Int32))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Int32))))
| typeOf s == typeOf (undefined :: Summary (NList D1 (PCM Int64))) =
sld <$> (cast s :: Maybe (Summary (NList D1 (PCM Int64))))
| otherwise = Nothing
where
sd = toSummaryPCMDouble s
sl :: Summary (NList D1 a) -> [Summary a]
sl = summaryNListToList
sld :: Typeable a => Summary (NList D1 a) -> [Summary (PCM Double)]
sld = catMaybes . map toSummaryPCMDouble . sl
----------------------------------------------------------------------
enumPCMDouble :: (Functor m, MonadIO m)
=> I.Enumeratee [Offset Block] [(TimeStamp, PCM Double)] m a
enumPCMDouble = I.joinI . enumPackets . I.mapChunks (concatMap f)
where
f :: Packet -> [(TimeStamp, PCM Double)]
f Packet{..} = zip packetTimeStamps (rawToPCMDouble packetData)
enumListPCMDouble :: (Functor m, MonadIO m)
=> I.Enumeratee [Offset Block] [(TimeStamp, [PCM Double])] m a
enumListPCMDouble = I.joinI . enumPackets . I.mapChunks (concatMap f)
where
f :: Packet -> [(TimeStamp, [PCM Double])]
f Packet{..} = zip packetTimeStamps (rawToListPCMDouble packetData)
----------------------------------------------------------------------
-- | Read the summary of an entire track.
wholeTrackSummaryPCMDouble :: (Functor m, MonadIO m)
=> [IdentifyCodec]
-> TrackNo
-> I.Iteratee (Offset ByteString) m (Summary (PCM Double))
wholeTrackSummaryPCMDouble identifiers trackNo = I.joinI $ enumCacheFile identifiers .
I.joinI . filterTracks [trackNo] . I.joinI . e $ I.last
where
e = I.joinI . enumSummaries . I.mapChunks (catMaybes . map toSD)
toSD :: ZoomSummary -> Maybe (Summary (PCM Double))
toSD (ZoomSummary s) = toSummaryPCMDouble s
enumSummaryPCMDouble :: (Functor m, MonadIO m)
=> Int
-> I.Enumeratee [Offset Block] [Summary (PCM Double)] m a
enumSummaryPCMDouble level =
I.joinI . enumSummaryLevel level .
I.mapChunks (catMaybes . map toSD)
where
toSD :: ZoomSummary -> Maybe (Summary (PCM Double))
toSD (ZoomSummary s) = toSummaryPCMDouble s
-- | Read the summary of an entire track.
wholeTrackSummaryListPCMDouble :: (Functor m, MonadIO m)
=> [IdentifyCodec]
-> TrackNo
-> I.Iteratee (Offset ByteString) m [Summary (PCM Double)]
wholeTrackSummaryListPCMDouble identifiers trackNo =
I.joinI $ enumCacheFile identifiers .
I.joinI . filterTracks [trackNo] . I.joinI . e $ I.last
where
e = I.joinI . enumSummaries . I.mapChunks (catMaybes . map toSLD)
toSLD :: ZoomSummary -> Maybe [Summary (PCM Double)]
toSLD (ZoomSummary s) = toSummaryListPCMDouble s
enumSummaryListPCMDouble :: (Functor m, MonadIO m)
=> Int
-> I.Enumeratee [Offset Block] [[Summary (PCM Double)]] m a
enumSummaryListPCMDouble level =
I.joinI . enumSummaryLevel level .
I.mapChunks (catMaybes . map toSLD)
where
toSLD :: ZoomSummary -> Maybe [Summary (PCM Double)]
toSLD (ZoomSummary s) = toSummaryListPCMDouble s
| kfish/zoom-cache-pcm | Data/ZoomCache/PCM/Enumeratee.hs | bsd-2-clause | 10,562 | 0 | 15 | 3,797 | 3,519 | 1,769 | 1,750 | 157 | 1 |
{-|
Module: HaskHOL.Lib.Haskell.A.Base
Copyright: (c) The University of Kansas 2013
LICENSE: BSD3
Maintainer: ecaustin@ittc.ku.edu
Stability: unstable
Portability: unknown
-}
module HaskHOL.Lib.Haskell.A.Base where
import HaskHOL.Core
import HaskHOL.Math
tyDefIdentity' :: MathCtxt thry => HOL Theory thry (HOLThm, HOLThm)
tyDefIdentity' = defineType [str| Identity = IdentityIn A |]
defIdentity' :: MathCtxt thry => HOL Theory thry HOLThm
defIdentity' = newDefinition "Identity"
[str| Identity = \\ 'a. \ x:'a. IdentityIn x |]
tyDefMaybe' :: MathCtxt thry => HOL Theory thry (HOLThm, HOLThm)
tyDefMaybe' = defineType [str| Maybe = Nothing | JustIn A |]
defJust' :: MathCtxt thry => HOL Theory thry HOLThm
defJust' = newDefinition "Just"
[str| Just = \\ 'a. \ x:'a. JustIn x |]
defEQ' :: MathCtxt thry => HOL Theory thry HOLThm
defEQ' = newDefinition "EQ"
[str| EQ = \\ 'a. \ x:'a y. x = y |]
-- Either
tyDefEither' :: MathCtxt thry => HOL Theory thry (HOLThm, HOLThm)
tyDefEither' = defineType [str| Either = LeftIn A | RightIn B |]
defLeft' :: MathCtxt thry => HOL Theory thry HOLThm
defLeft' = newDefinition "Left"
[str| Left = \\ 'a 'b. \ x:'a. (LeftIn x):(('a, 'b) Either) |]
defRight' :: MathCtxt thry => HOL Theory thry HOLThm
defRight' = newDefinition "Right"
[str| Right = \\ 'a 'b. \ x:'b. (RightIn x):(('a, 'b) Either) |]
| ecaustin/haskhol-haskell | src/HaskHOL/Lib/Haskell/A/Base.hs | bsd-2-clause | 1,384 | 0 | 7 | 264 | 297 | 168 | 129 | -1 | -1 |
-- | Representation and computation of visiblity of atomic commands
-- by clients.
--
-- See
-- <https://github.com/LambdaHack/LambdaHack/wiki/Client-server-architecture>.
module Game.LambdaHack.Atomic.PosAtomicRead
( PosAtomic(..), posUpdAtomic, posSfxAtomic, iidUpdAtomic, iidSfxAtomic
, breakUpdAtomic, lidOfPos, seenAtomicCli, seenAtomicSer
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, pointsProjBody, posProjBody, singleAid, doubleAid
, singleContainerStash, singleContainerActor
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import Game.LambdaHack.Atomic.CmdAtomic
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Perception
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.State
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Definition.Defs
-- All functions here that take an atomic action are executed
-- in the state just before the action is executed.
-- | The type representing visibility of atomic commands to factions,
-- based on the position of the command, etc. Note that the server
-- sees and smells all positions. Also note that hearing is not covered
-- because it gives very restricted information, so hearing doesn't equal
-- seeing (and we assume smelling actors get lots of data from smells).
data PosAtomic =
PosSight LevelId [Point] -- ^ whomever sees all the positions, notices
| PosFidAndSight FactionId LevelId [Point]
-- ^ observers and the faction notice
| PosSmell LevelId [Point] -- ^ whomever smells all the positions, notices
| PosSightLevels [(LevelId, Point)]
-- ^ whomever sees all the positions, notices
| PosFid FactionId -- ^ only the faction notices, server doesn't
| PosFidAndSer FactionId -- ^ faction and server notices
| PosSer -- ^ only the server notices
| PosAll -- ^ everybody notices
| PosNone -- ^ never broadcasted, but sent manually
deriving (Show, Eq)
-- | Produce the positions where the atomic update takes place or, more
-- generally, the conditions under which the update can be noticed by
-- a client.
--
-- The goal of this mechanics is to ensure that atomic commands involving
-- some positions visible by a client convey similar information as the client
-- would get by directly observing the changes
-- of the portion of server state limited to the visible positions.
-- Consequently, when the visible commands are later applied
-- to the client's state, the state stays consistent
-- --- in sync with the server state and correctly limited by visiblity.
-- There is some wiggle room both in what "in sync" and
-- "visible" means and how they propagate through time.
--
-- E.g., @UpdDisplaceActor@ in a black room between two enemy actors,
-- with only one actor carrying a 0-radius light would not be
-- distinguishable by looking at the state (or the screen) from @UpdMoveActor@
-- of the illuminated actor, hence such @UpdDisplaceActor@ should not be
-- observable, but @UpdMoveActor@ in similar cotext would be
-- (or the former should be perceived as the latter).
-- However, to simplify, we assign as strict visibility
-- requirements to @UpdMoveActor@ as to @UpdDisplaceActor@ and fall back
-- to @UpdSpotActor@ (which provides minimal information that does not
-- contradict state) if the visibility is lower.
posUpdAtomic :: MonadStateRead m => UpdAtomic -> m PosAtomic
posUpdAtomic cmd = case cmd of
UpdRegisterItems{} -> return PosNone
UpdCreateActor _ body _ -> return $! posProjBody body
UpdDestroyActor _ body _ -> return $! posProjBody body
UpdCreateItem _ _ _ _ c -> singleContainerStash c
UpdDestroyItem _ _ _ _ c -> singleContainerStash c
UpdSpotActor _ body -> return $! posProjBody body
UpdLoseActor _ body -> return $! posProjBody body
UpdSpotItem _ _ _ c -> singleContainerStash c
UpdLoseItem _ _ _ c -> singleContainerStash c
UpdSpotItemBag _ c _ -> singleContainerStash c
UpdLoseItemBag _ c _ -> singleContainerStash c
UpdMoveActor aid fromP toP -> do
b <- getsState $ getActorBody aid
-- Non-projectile actors are never totally isolated from environment;
-- they hear, feel air movement, etc.
return $! pointsProjBody b [fromP, toP]
UpdWaitActor aid _ _ -> singleAid aid
UpdDisplaceActor source target -> doubleAid source target
UpdMoveItem _ _ aid cstore1 cstore2 -> do
b <- getsState $ getActorBody aid
mlidPos1 <- lidPosOfStash b cstore1
mlidPos2 <- lidPosOfStash b cstore2
let mlidPos = mlidPos1 `mplus` mlidPos2
return $! maybe (posProjBody b)
(\lidPos -> PosSightLevels [lidPos, (blid b, bpos b)])
mlidPos
UpdRefillHP aid _ -> singleAid aid
UpdRefillCalm aid _ -> singleAid aid
UpdTrajectory aid _ _ -> singleAid aid
UpdQuitFaction{} -> return PosAll
UpdSpotStashFaction _ fid lid pos -> return $! PosFidAndSight fid lid [pos]
UpdLoseStashFaction _ fid lid pos -> return $! PosFidAndSight fid lid [pos]
UpdLeadFaction fid _ _ -> return $! PosFidAndSer fid
UpdDiplFaction{} -> return PosAll
UpdDoctrineFaction{} -> return PosAll -- make faction lore fun
UpdAutoFaction{} -> return PosAll
UpdRecordKill aid _ _ -> singleAid aid
UpdAlterTile lid p _ _ -> return $! PosSight lid [p]
UpdAlterExplorable{} -> return PosAll
-- Can't have @PosSight@, because we'd end up with many accessible
-- unknown tiles, but the game reporting 'all seen'.
UpdAlterGold{} -> return PosAll
UpdSearchTile aid p _ -> do
b <- getsState $ getActorBody aid
return $! pointsProjBody b [bpos b, p]
UpdHideTile aid p _ -> do
b <- getsState $ getActorBody aid
return $! pointsProjBody b [bpos b, p]
UpdSpotTile lid ts -> do
let ps = map fst ts
return $! PosSight lid ps
UpdLoseTile lid ts -> do
let ps = map fst ts
return $! PosSight lid ps
UpdSpotEntry lid ts -> do
let ps = map fst ts
return $! PosSight lid ps
UpdLoseEntry lid ts -> do
let ps = map fst ts
return $! PosSight lid ps
UpdAlterSmell lid p _ _ -> return $! PosSmell lid [p]
UpdSpotSmell lid sms -> do
let ps = map fst sms
return $! PosSmell lid ps
UpdLoseSmell lid sms -> do
let ps = map fst sms
return $! PosSmell lid ps
UpdTimeItem _ c _ _ -> singleContainerStash c
UpdAgeGame _ -> return PosAll
UpdUnAgeGame _ -> return PosAll
UpdDiscover c _ _ _ -> singleContainerActor c
-- This implies other factions applying items from their inventory,
-- when we can't see the position of the stash, won't Id the item
-- for us, even when notice item usage. Thrown items will Id, though,
-- just as triggering items from the floor or embedded items.
UpdCover c _ _ _ -> singleContainerActor c
UpdDiscoverKind c _ _ -> singleContainerActor c
UpdCoverKind c _ _ -> singleContainerActor c
UpdDiscoverAspect c _ _ -> singleContainerActor c
UpdCoverAspect c _ _ -> singleContainerActor c
UpdDiscoverServer{} -> return PosSer
UpdCoverServer{} -> return PosSer
UpdPerception{} -> return PosNone
UpdRestart fid _ _ _ _ _ -> return $! PosFid fid
UpdRestartServer _ -> return PosSer
UpdResume _ _ -> return PosNone
UpdResumeServer _ -> return PosSer
UpdKillExit fid -> return $! PosFid fid
UpdWriteSave -> return PosAll
UpdHearFid fid _ _ -> return $! PosFid fid
UpdMuteMessages fid _ -> return $! PosFid fid
-- | Produce the positions where the atomic special effect takes place.
posSfxAtomic :: MonadStateRead m => SfxAtomic -> m PosAtomic
posSfxAtomic cmd = case cmd of
SfxStrike _ target _ -> singleAid target
SfxRecoil _ target _ -> singleAid target
SfxSteal _ target _ -> singleAid target
SfxRelease _ target _ -> singleAid target
SfxProject aid _ -> singleAid aid
SfxReceive aid _ -> singleAid aid
SfxApply aid _ -> singleAid aid
SfxCheck aid _ -> singleAid aid
SfxTrigger aid lid p _ -> do
body <- getsState $ getActorBody aid
return $! PosSightLevels [(lid, p), (blid body, bpos body)]
-- @PosFidAndSightLevels@ would be better, but no big deal
SfxShun aid lid p _ -> do
body <- getsState $ getActorBody aid
return $! PosSightLevels [(lid, p), (blid body, bpos body)]
SfxEffect _ aid _ _ _ -> singleAid aid -- sometimes we don't see source, OK
SfxItemApplied _ _ c -> singleContainerActor c
SfxMsgFid fid _ -> return $! PosFid fid
SfxRestart -> return PosAll
SfxCollideTile aid _ -> singleAid aid
SfxTaunt _ aid -> singleAid aid
-- | All items introduced by the atomic command, to be used in it.
iidUpdAtomic :: UpdAtomic -> [ItemId]
iidUpdAtomic cmd = case cmd of
UpdRegisterItems{} -> []
UpdCreateActor{} -> [] -- iids and items needed even on server
UpdDestroyActor{} -> []
UpdCreateItem{} -> []
UpdDestroyItem{} -> []
UpdSpotActor _ body -> getCarriedIidsAndTrunk body
UpdLoseActor{} -> [] -- already seen, so items known
UpdSpotItem _ iid _ _ -> [iid]
UpdLoseItem{} -> []
UpdSpotItemBag _ _ bag -> EM.keys bag
UpdLoseItemBag{} -> []
UpdMoveActor{} -> []
UpdWaitActor{} -> []
UpdDisplaceActor{} -> []
UpdMoveItem{} -> []
UpdRefillHP{} -> []
UpdRefillCalm{} -> []
UpdTrajectory{} -> []
UpdQuitFaction{} -> []
UpdSpotStashFaction{} -> []
UpdLoseStashFaction{} -> []
UpdLeadFaction{} -> []
UpdDiplFaction{} -> []
UpdDoctrineFaction{} -> []
UpdAutoFaction{} -> []
UpdRecordKill{} -> []
UpdAlterTile{} -> []
UpdAlterExplorable{} -> []
UpdAlterGold{} -> []
UpdSearchTile{} -> []
UpdHideTile{} -> []
UpdSpotTile{} -> []
UpdLoseTile{} -> []
UpdSpotEntry{} -> []
UpdLoseEntry{} -> []
UpdAlterSmell{} -> []
UpdSpotSmell{} -> []
UpdLoseSmell{} -> []
UpdTimeItem iid _ _ _ -> [iid]
UpdAgeGame{} -> []
UpdUnAgeGame{} -> []
UpdDiscover _ iid _ _ -> [iid]
UpdCover _ iid _ _ -> [iid]
UpdDiscoverKind{} -> []
UpdCoverKind{} -> []
UpdDiscoverAspect _ iid _ -> [iid]
UpdCoverAspect _ iid _ -> [iid]
UpdDiscoverServer{} -> [] -- never sent to clients
UpdCoverServer{} -> []
UpdPerception{} -> []
UpdRestart{} -> []
UpdRestartServer{} -> []
UpdResume{} -> []
UpdResumeServer{} -> []
UpdKillExit{} -> []
UpdWriteSave -> []
UpdHearFid{} -> []
UpdMuteMessages{} -> []
-- | All items introduced by the atomic special effect, to be used in it.
iidSfxAtomic :: SfxAtomic -> [ItemId]
iidSfxAtomic cmd = case cmd of
SfxStrike _ _ iid -> [iid]
SfxRecoil _ _ iid -> [iid]
SfxSteal _ _ iid -> [iid]
SfxRelease _ _ iid -> [iid]
SfxProject _ iid -> [iid]
SfxReceive _ iid -> [iid]
SfxApply _ iid -> [iid]
SfxCheck _ iid -> [iid]
SfxTrigger{} -> []
SfxShun{} -> []
SfxEffect{} -> []
SfxItemApplied _ iid _ -> [iid]
SfxMsgFid{} -> []
SfxRestart{} -> []
SfxCollideTile{} -> []
SfxTaunt{} -> []
pointsProjBody :: Actor -> [Point] -> PosAtomic
pointsProjBody body ps =
if bproj body
then PosSight (blid body) ps
else PosFidAndSight (bfid body) (blid body) ps
posProjBody :: Actor -> PosAtomic
posProjBody body = pointsProjBody body [bpos body]
singleAid :: MonadStateRead m => ActorId -> m PosAtomic
singleAid aid = do
body <- getsState $ getActorBody aid
return $! posProjBody body
doubleAid :: MonadStateRead m => ActorId -> ActorId -> m PosAtomic
doubleAid source target = do
sb <- getsState $ getActorBody source
tb <- getsState $ getActorBody target
-- No @PosFidAndSight@ instead of @PosSight@, because both positions
-- need to be seen to have the enemy actor in client's state.
return $! assert (blid sb == blid tb) $ PosSight (blid sb) [bpos sb, bpos tb]
singleContainerStash :: MonadStateRead m => Container -> m PosAtomic
singleContainerStash (CFloor lid p) = return $! PosSight lid [p]
singleContainerStash (CEmbed lid p) = return $! PosSight lid [p]
singleContainerStash (CActor aid cstore) = do
b <- getsState $ getActorBody aid
mlidPos <- lidPosOfStash b cstore
return $! maybe (posProjBody b)
(\lidPos -> PosSightLevels [lidPos, (blid b, bpos b)])
-- the actor's position is needed so that a message
-- about the actor is not sent to a client that doesn't
-- know the actor; actor's faction is ignored, because
-- for these operations actor doesn't vanish
mlidPos
singleContainerStash (CTrunk fid lid p) = return $! PosFidAndSight fid lid [p]
singleContainerActor :: MonadStateRead m => Container -> m PosAtomic
singleContainerActor (CFloor lid p) = return $! PosSight lid [p]
singleContainerActor (CEmbed lid p) = return $! PosSight lid [p]
singleContainerActor (CActor aid _) = do
b <- getsState $ getActorBody aid
return $! posProjBody b
-- stash position is ignored, because for these operations, nothing
-- is added to that position; the store name is only used for flavour text
singleContainerActor (CTrunk fid lid p) = return $! PosFidAndSight fid lid [p]
lidPosOfStash :: MonadStateRead m
=> Actor -> CStore -> m (Maybe (LevelId, Point))
lidPosOfStash b cstore =
case cstore of
CStash -> do
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just{} -> return mstash
Nothing -> error $ "manipulating void stash" `showFailure` b
_ -> return Nothing
-- | Decompose an atomic action that is outside a client's visiblity.
-- The decomposed actions give less information that the original command,
-- but some of them may fall within the visibility range of the client.
-- The original action may give more information than even the total sum
-- of all actions it's broken into. E.g., @UpdMoveActor@
-- informs about the continued existence of the actor between
-- moves vs popping out of existence and then back in.
--
-- This is computed in server's @State@ from before performing the command.
breakUpdAtomic :: MonadStateRead m => UpdAtomic -> m [UpdAtomic]
breakUpdAtomic cmd = case cmd of
UpdCreateItem verbose iid item kit (CActor aid CStash) -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) ->
return [UpdCreateItem verbose iid item kit (CFloor lid pos)]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, item)
UpdDestroyItem verbose iid item kit (CActor aid CStash) -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) ->
return [UpdDestroyItem verbose iid item kit (CFloor lid pos)]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, item)
UpdSpotItem verbose iid kit (CActor aid CStash) -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [UpdSpotItem verbose iid kit (CFloor lid pos)]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, iid)
UpdLoseItem verbose iid kit (CActor aid CStash) -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [UpdLoseItem verbose iid kit (CFloor lid pos)]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, iid)
UpdSpotItemBag verbose (CActor aid CStash) bag -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [UpdSpotItemBag verbose (CFloor lid pos) bag]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, bag)
UpdLoseItemBag verbose (CActor aid CStash) bag -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [UpdLoseItemBag verbose (CFloor lid pos) bag]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, bag)
UpdMoveItem iid k aid CStash store2 -> do
b <- getsState $ getActorBody aid
bag <- getsState $ getBodyStoreBag b CStash
let (k1, it1) = bag EM.! iid
kit = assert (k <= k1) (k, take k it1)
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [ UpdLoseItem True iid kit (CFloor lid pos)
, UpdSpotItem True iid kit (CActor aid store2) ]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, iid)
UpdMoveItem iid k aid store1 CStash -> do
b <- getsState $ getActorBody aid
bag <- getsState $ getBodyStoreBag b store1
let (k1, it1) = bag EM.! iid
kit = assert (k <= k1) (k, take k it1)
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [ UpdLoseItem True iid kit (CActor aid store1)
, UpdSpotItem True iid kit (CFloor lid pos) ]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, iid)
UpdMoveActor aid fromP toP -> do
-- We assume other factions don't see leaders and we know the actor's
-- faction always sees the atomic command and no other commands
-- may be inserted between the two below, so the leader doesn't
-- need to be updated, even when aid is the leader.
b <- getsState $ getActorBody aid
return [ UpdLoseActor aid b
, UpdSpotActor aid b {bpos = toP, boldpos = Just fromP} ]
UpdDisplaceActor source target -> do
sb <- getsState $ getActorBody source
tb <- getsState $ getActorBody target
-- The order ensures the invariant that no two big actors occupy the same
-- position is maintained. The actions about leadership are required
-- to keep faction data (identify of the leader) consistent with actor
-- data (the actor that is the leader exists). Here, for speed
-- and simplicity we violate the property that in a faction
-- that has leaders, if any eligible actor is alive,
-- the leader is set, because for a moment there may be no leader,
-- even though other actors of the faction may exist.
msleader <- getsState $ gleader . (EM.! bfid sb) . sfactionD
mtleader <- getsState $ gleader . (EM.! bfid tb) . sfactionD
return $ [ UpdLeadFaction (bfid sb) msleader Nothing
| Just source == msleader ]
++ [ UpdLeadFaction (bfid tb) mtleader Nothing
| Just target == mtleader ]
++ [ UpdLoseActor source sb
, UpdLoseActor target tb
, UpdSpotActor source sb { bpos = bpos tb
, boldpos = Just $ bpos sb }
, UpdSpotActor target tb { bpos = bpos sb
, boldpos = Just $ bpos tb } ]
++ [ UpdLeadFaction (bfid sb) Nothing msleader
| Just source == msleader ]
++ [ UpdLeadFaction (bfid tb) Nothing mtleader
| Just target == mtleader ]
UpdTimeItem iid (CActor aid CStash) fromIt toIt -> do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
case mstash of
Just (lid, pos) -> return [UpdTimeItem iid (CFloor lid pos) fromIt toIt]
Nothing -> error $ "manipulating void stash" `showFailure` (aid, b, iid)
_ -> return []
-- | What is the main map level the @PosAtomic@ refers to, if any.
lidOfPos :: PosAtomic -> Maybe LevelId
lidOfPos posAtomic =
case posAtomic of
PosSight lid _ -> Just lid
PosFidAndSight _ lid _ -> Just lid
PosSmell lid _ -> Just lid
PosSightLevels [] -> Nothing
PosSightLevels ((lid, _) : _) -> Just lid
PosFid{} -> Nothing
PosFidAndSer{} -> Nothing
PosSer -> Nothing
PosAll -> Nothing
PosNone -> Nothing
-- | Given the client, its perception and an atomic command, determine
-- if the client notices the command.
seenAtomicCli :: Bool -> FactionId -> PerLid -> PosAtomic -> Bool
seenAtomicCli knowEvents fid perLid posAtomic =
let per = (perLid EM.!)
in case posAtomic of
PosSight lid ps -> all (`ES.member` totalVisible (per lid)) ps || knowEvents
PosFidAndSight fid2 lid ps ->
fid == fid2 || all (`ES.member` totalVisible (per lid)) ps || knowEvents
PosSmell lid ps -> all (`ES.member` totalSmelled (per lid)) ps || knowEvents
PosSightLevels l ->
let visible (lid, pos) = pos `ES.member` totalVisible (per lid)
in all visible l || knowEvents
PosFid fid2 -> fid == fid2
PosFidAndSer fid2 -> fid == fid2
PosSer -> False
PosAll -> True
PosNone -> False
-- | Determine whether the server would see a command that has
-- the given visibilty conditions.
seenAtomicSer :: PosAtomic -> Bool
seenAtomicSer posAtomic =
case posAtomic of
PosFid _ -> False
PosNone -> error $ "no position possible" `showFailure` posAtomic
_ -> True
| LambdaHack/LambdaHack | engine-src/Game/LambdaHack/Atomic/PosAtomicRead.hs | bsd-3-clause | 21,199 | 0 | 18 | 4,929 | 6,058 | 3,018 | 3,040 | 381 | 58 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.