code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
import Haste
import Haste.Graphics.Canvas
-- | A 40*40 square with a 20*20 square inside of it and a line running
-- through it.
squareShape :: Shape ()
squareShape = do
rect (-20, -20) (20, 20)
rect (-10, -10) (10, 10)
line (-20, -20) (20, 20)
-- | You can stroke any shape to get a "wireframe" version of them.
square :: Picture ()
square = stroke squareShape
-- | Or you can fill them.
filledSquare :: Picture ()
filledSquare = fill squareShape
-- | Then you grab a canvas object...
main :: IO ()
main = do
Just can <- getCanvasById "canvas"
animate can 0
-- | ...and use the render function to draw your image.
-- The picture type is a monad, so you can compose several pictures easily
-- using do-notation.
animate :: Canvas -> Double -> IO ()
animate can angle = do
-- There are several transformation functions as well. All of them take a
-- Picture () as their argument, and apply their transformation only to that
-- picture, so the user doesn't need to manage the canvas state machine
-- explicitly.
render can $ do
translate (160, 160) $ rotate angle $ do
square
translate (100, 100) . rotate (-angle) . color (RGB 255 0 0) $ filledSquare
color (RGBA 0 0 255 0.5) . font "20px Bitstream Vera" $ do
text (10, 160) "You can use transparency too!"
setTimeout 10 $ animate can (angle + 0.01)
|
joelburget/haste-compiler
|
examples/canvas-simple/canvas-simple.hs
|
bsd-3-clause
| 1,358
| 0
| 18
| 302
| 355
| 181
| 174
| 24
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.TextureFloat
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/texture_float.txt ARB_texture_float> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.TextureFloat (
-- * Enums
gl_ALPHA16F_ARB,
gl_ALPHA32F_ARB,
gl_INTENSITY16F_ARB,
gl_INTENSITY32F_ARB,
gl_LUMINANCE16F_ARB,
gl_LUMINANCE32F_ARB,
gl_LUMINANCE_ALPHA16F_ARB,
gl_LUMINANCE_ALPHA32F_ARB,
gl_RGB16F_ARB,
gl_RGB32F_ARB,
gl_RGBA16F_ARB,
gl_RGBA32F_ARB,
gl_TEXTURE_ALPHA_TYPE_ARB,
gl_TEXTURE_BLUE_TYPE_ARB,
gl_TEXTURE_DEPTH_TYPE_ARB,
gl_TEXTURE_GREEN_TYPE_ARB,
gl_TEXTURE_INTENSITY_TYPE_ARB,
gl_TEXTURE_LUMINANCE_TYPE_ARB,
gl_TEXTURE_RED_TYPE_ARB,
gl_UNSIGNED_NORMALIZED_ARB
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/ARB/TextureFloat.hs
|
bsd-3-clause
| 1,121
| 0
| 4
| 135
| 94
| 69
| 25
| 22
| 0
|
{-# LANGUAGE CPP, MagicHash, UnboxedTuples, NoImplicitPrelude #-}
{-# OPTIONS_HADDOCK hide #-}
#include "MachDeps.h"
-- (Hopefully) Fast integer logarithms to base 2.
-- integerLog2# and wordLog2# are of general usefulness,
-- the others are only needed for a fast implementation of
-- fromRational.
-- Since they are needed in GHC.Float, we must expose this
-- module, but it should not show up in the docs.
module GHC.Integer.Logarithms.Internals
( integerLog2#
, integerLog2IsPowerOf2#
, wordLog2#
, roundingMode#
) where
import GHC.Prim
import GHC.Integer.Type
default ()
-- When larger word sizes become common, add support for those,
-- it's not hard, just tedious.
#if (WORD_SIZE_IN_BITS != 32) && (WORD_SIZE_IN_BITS != 64)
-- We don't know whether the word has 30 bits or 128 or even more,
-- so we can't start from the top, although that would be much more
-- efficient.
wordLog2# :: Word# -> Int#
wordLog2# w = go 8# w
where
go acc u = case u `uncheckedShiftRL#` 8# of
0## -> case leadingZeros of
BA ba -> acc -# indexInt8Array# ba (word2Int# u)
v -> go (acc +# 8#) v
#else
-- This one at least can also be done efficiently.
-- wordLog2# 0## = -1#
{-# INLINE wordLog2# #-}
wordLog2# :: Word# -> Int#
wordLog2# w =
case leadingZeros of
BA lz ->
let zeros u = indexInt8Array# lz (word2Int# u) in
#if WORD_SIZE_IN_BITS == 64
case uncheckedShiftRL# w 56# of
a ->
if a `neWord#` 0##
then 64# -# zeros a
else
case uncheckedShiftRL# w 48# of
b ->
if b `neWord#` 0##
then 56# -# zeros b
else
case uncheckedShiftRL# w 40# of
c ->
if c `neWord#` 0##
then 48# -# zeros c
else
case uncheckedShiftRL# w 32# of
d ->
if d `neWord#` 0##
then 40# -# zeros d
else
#endif
case uncheckedShiftRL# w 24# of
e ->
if e `neWord#` 0##
then 32# -# zeros e
else
case uncheckedShiftRL# w 16# of
f ->
if f `neWord#` 0##
then 24# -# zeros f
else
case uncheckedShiftRL# w 8# of
g ->
if g `neWord#` 0##
then 16# -# zeros g
else 8# -# zeros w
#endif
-- Assumption: Integer is strictly positive,
-- otherwise return -1# arbitrarily
-- Going up in word-sized steps should not be too bad.
integerLog2# :: Integer -> Int#
integerLog2# (Positive digits) = step 0# digits
where
step acc (Some dig None) = acc +# wordLog2# dig
step acc (Some _ digs) =
step (acc +# WORD_SIZE_IN_BITS#) digs
step acc None = acc -- should be impossible, throw error?
integerLog2# _ = negateInt# 1#
-- Again, integer should be strictly positive
integerLog2IsPowerOf2# :: Integer -> (# Int#, Int# #)
integerLog2IsPowerOf2# (Positive digits) = couldBe 0# digits
where
couldBe acc (Some dig None) =
(# acc +# wordLog2# dig, word2Int# (and# dig (minusWord# dig 1##)) #)
couldBe acc (Some dig digs) =
if eqWord# dig 0##
then couldBe (acc +# WORD_SIZE_IN_BITS#) digs
else noPower (acc +# WORD_SIZE_IN_BITS#) digs
couldBe acc None = (# acc, 1# #) -- should be impossible, error?
noPower acc (Some dig None) =
(# acc +# wordLog2# dig, 1# #)
noPower acc (Some _ digs) =
noPower (acc +# WORD_SIZE_IN_BITS#) digs
noPower acc None = (# acc, 1# #) -- should be impossible, error?
integerLog2IsPowerOf2# _ = (# negateInt# 1#, 1# #)
-- Assumption: Integer and Int# are strictly positive, Int# is less
-- than logBase 2 of Integer, otherwise havoc ensues.
-- Used only for the numerator in fromRational when the denominator
-- is a power of 2.
-- The Int# argument is log2 n minus the number of bits in the mantissa
-- of the target type, i.e. the index of the first non-integral bit in
-- the quotient.
--
-- 0# means round down (towards zero)
-- 1# means we have a half-integer, round to even
-- 2# means round up (away from zero)
-- This function should probably be improved.
roundingMode# :: Integer -> Int# -> Int#
roundingMode# m h =
case oneInteger `shiftLInteger` h of
c -> case m `andInteger`
((c `plusInteger` c) `minusInteger` oneInteger) of
r ->
if c `ltInteger` r
then 2#
else if c `gtInteger` r
then 0#
else 1#
-- Lookup table
data BA = BA ByteArray#
leadingZeros :: BA
leadingZeros =
let mkArr s =
case newByteArray# 256# s of
(# s1, mba #) ->
case writeInt8Array# mba 0# 9# s1 of
s2 ->
let fillA lim val idx st =
if idx ==# 256#
then st
else if idx <# lim
then case writeInt8Array# mba idx val st of
nx -> fillA lim val (idx +# 1#) nx
else fillA (2# *# lim) (val -# 1#) idx st
in case fillA 2# 8# 1# s2 of
s3 -> case unsafeFreezeByteArray# mba s3 of
(# _, ba #) -> ba
in case mkArr realWorld# of
b -> BA b
|
haskell-suite/integer-simple
|
GHC/Integer/Logarithms/Internals.hs
|
bsd-3-clause
| 5,669
| 0
| 26
| 2,116
| 969
| 522
| 447
| 69
| 6
|
module App.TypeClasses where
import App.Input
import App.Clock
class UserData a where
-- | Starts up the app. Init resources, etc.
onStart :: a -- ^ User's custom data structure.
-> IO a
-- | Gives input to the app. Called just before step.
onInput :: Input -- ^ The current input.
-> a -- ^ The user's custom data structure.
-> a
-- | The main step function.
onStep :: Clock -- ^ The time elapsed since last tick.
-> a -- ^ The user's custom data structure.
-> IO a
-- | Render the app.
onRender :: a -- ^ The user's custom data structure.
-> IO ()
-- | Cleanup and prepare to exit. Save state, etc.
onQuit :: a -- ^ User's custom data structure.
-> IO ()
-- | Tells when the app should quit. This is called after each step
-- and if it returns True stepping will cease and onQuit will be called before
-- exiting the program.
shouldQuit :: a -- ^ User's custom data structure.
-> Bool
|
schell/blocks
|
src/App/TypeClasses.hs
|
bsd-3-clause
| 1,060
| 0
| 9
| 348
| 119
| 70
| 49
| 18
| 0
|
module Main where
import Lib
main :: IO ()
main = do
putStrLn "Test1"
putStrLn $ myFunction "Hello World!"
|
jkeuhlen/haskellscratch
|
app/Main.hs
|
bsd-3-clause
| 116
| 0
| 8
| 28
| 38
| 19
| 19
| 6
| 1
|
{-
(c) Bartosz Nitka, Facebook, 2015
UniqDFM: Specialised deterministic finite maps, for things with @Uniques@.
Basically, the things need to be in class @Uniquable@, and we use the
@getUnique@ method to grab their @Uniques@.
This is very similar to @UniqFM@, the major difference being that the order of
folding is not dependent on @Unique@ ordering, giving determinism.
Currently the ordering is determined by insertion order.
See Note [Unique Determinism] in Unique for explanation why @Unique@ ordering
is not deterministic.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -Wall #-}
module UniqDFM (
-- * Unique-keyed deterministic mappings
UniqDFM, -- abstract type
-- ** Manipulating those mappings
emptyUDFM,
unitUDFM,
addToUDFM,
addToUDFM_C,
addListToUDFM,
delFromUDFM,
delListFromUDFM,
adjustUDFM,
alterUDFM,
mapUDFM,
plusUDFM,
plusUDFM_C,
lookupUDFM, lookupUDFM_Directly,
elemUDFM,
foldUDFM,
eltsUDFM,
filterUDFM, filterUDFM_Directly,
isNullUDFM,
sizeUDFM,
intersectUDFM, udfmIntersectUFM,
intersectsUDFM,
disjointUDFM, disjointUdfmUfm,
equalKeysUDFM,
minusUDFM,
listToUDFM,
udfmMinusUFM,
partitionUDFM,
anyUDFM, allUDFM,
pprUniqDFM, pprUDFM,
udfmToList,
udfmToUfm,
nonDetFoldUDFM,
alwaysUnsafeUfmToUdfm,
) where
import GhcPrelude
import Unique ( Uniquable(..), Unique, getKey )
import Outputable
import qualified Data.IntMap as M
import Data.Data
import Data.Functor.Classes (Eq1 (..))
import Data.List (sortBy)
import Data.Function (on)
import qualified Data.Semigroup as Semi
import UniqFM (UniqFM, listToUFM_Directly, nonDetUFMToList, ufmToIntMap)
-- Note [Deterministic UniqFM]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- A @UniqDFM@ is just like @UniqFM@ with the following additional
-- property: the function `udfmToList` returns the elements in some
-- deterministic order not depending on the Unique key for those elements.
--
-- If the client of the map performs operations on the map in deterministic
-- order then `udfmToList` returns them in deterministic order.
--
-- There is an implementation cost: each element is given a serial number
-- as it is added, and `udfmToList` sorts it's result by this serial
-- number. So you should only use `UniqDFM` if you need the deterministic
-- property.
--
-- `foldUDFM` also preserves determinism.
--
-- Normal @UniqFM@ when you turn it into a list will use
-- Data.IntMap.toList function that returns the elements in the order of
-- the keys. The keys in @UniqFM@ are always @Uniques@, so you end up with
-- with a list ordered by @Uniques@.
-- The order of @Uniques@ is known to be not stable across rebuilds.
-- See Note [Unique Determinism] in Unique.
--
--
-- There's more than one way to implement this. The implementation here tags
-- every value with the insertion time that can later be used to sort the
-- values when asked to convert to a list.
--
-- An alternative would be to have
--
-- data UniqDFM ele = UDFM (M.IntMap ele) [ele]
--
-- where the list determines the order. This makes deletion tricky as we'd
-- only accumulate elements in that list, but makes merging easier as you
-- can just merge both structures independently.
-- Deletion can probably be done in amortized fashion when the size of the
-- list is twice the size of the set.
-- | A type of values tagged with insertion time
data TaggedVal val =
TaggedVal
val
{-# UNPACK #-} !Int -- ^ insertion time
deriving (Data, Functor)
taggedFst :: TaggedVal val -> val
taggedFst (TaggedVal v _) = v
taggedSnd :: TaggedVal val -> Int
taggedSnd (TaggedVal _ i) = i
instance Eq val => Eq (TaggedVal val) where
(TaggedVal v1 _) == (TaggedVal v2 _) = v1 == v2
-- | Type of unique deterministic finite maps
data UniqDFM ele =
UDFM
!(M.IntMap (TaggedVal ele)) -- A map where keys are Unique's values and
-- values are tagged with insertion time.
-- The invariant is that all the tags will
-- be distinct within a single map
{-# UNPACK #-} !Int -- Upper bound on the values' insertion
-- time. See Note [Overflow on plusUDFM]
deriving (Data, Functor)
-- | Deterministic, in O(n log n).
instance Foldable UniqDFM where
foldr = foldUDFM
-- | Deterministic, in O(n log n).
instance Traversable UniqDFM where
traverse f = fmap listToUDFM_Directly
. traverse (\(u,a) -> (u,) <$> f a)
. udfmToList
emptyUDFM :: UniqDFM elt
emptyUDFM = UDFM M.empty 0
unitUDFM :: Uniquable key => key -> elt -> UniqDFM elt
unitUDFM k v = UDFM (M.singleton (getKey $ getUnique k) (TaggedVal v 0)) 1
-- The new binding always goes to the right of existing ones
addToUDFM :: Uniquable key => UniqDFM elt -> key -> elt -> UniqDFM elt
addToUDFM m k v = addToUDFM_Directly m (getUnique k) v
-- The new binding always goes to the right of existing ones
addToUDFM_Directly :: UniqDFM elt -> Unique -> elt -> UniqDFM elt
addToUDFM_Directly (UDFM m i) u v
= UDFM (M.insertWith tf (getKey u) (TaggedVal v i) m) (i + 1)
where
tf (TaggedVal new_v _) (TaggedVal _ old_i) = TaggedVal new_v old_i
-- Keep the old tag, but insert the new value
-- This means that udfmToList typically returns elements
-- in the order of insertion, rather than the reverse
addToUDFM_Directly_C
:: (elt -> elt -> elt) -- old -> new -> result
-> UniqDFM elt
-> Unique -> elt
-> UniqDFM elt
addToUDFM_Directly_C f (UDFM m i) u v
= UDFM (M.insertWith tf (getKey u) (TaggedVal v i) m) (i + 1)
where
tf (TaggedVal new_v _) (TaggedVal old_v old_i)
= TaggedVal (f old_v new_v) old_i
-- Flip the arguments, because M.insertWith uses (new->old->result)
-- but f needs (old->new->result)
-- Like addToUDFM_Directly, keep the old tag
addToUDFM_C
:: Uniquable key => (elt -> elt -> elt) -- old -> new -> result
-> UniqDFM elt -- old
-> key -> elt -- new
-> UniqDFM elt -- result
addToUDFM_C f m k v = addToUDFM_Directly_C f m (getUnique k) v
addListToUDFM :: Uniquable key => UniqDFM elt -> [(key,elt)] -> UniqDFM elt
addListToUDFM = foldl' (\m (k, v) -> addToUDFM m k v)
addListToUDFM_Directly :: UniqDFM elt -> [(Unique,elt)] -> UniqDFM elt
addListToUDFM_Directly = foldl' (\m (k, v) -> addToUDFM_Directly m k v)
addListToUDFM_Directly_C
:: (elt -> elt -> elt) -> UniqDFM elt -> [(Unique,elt)] -> UniqDFM elt
addListToUDFM_Directly_C f = foldl' (\m (k, v) -> addToUDFM_Directly_C f m k v)
delFromUDFM :: Uniquable key => UniqDFM elt -> key -> UniqDFM elt
delFromUDFM (UDFM m i) k = UDFM (M.delete (getKey $ getUnique k) m) i
plusUDFM_C :: (elt -> elt -> elt) -> UniqDFM elt -> UniqDFM elt -> UniqDFM elt
plusUDFM_C f udfml@(UDFM _ i) udfmr@(UDFM _ j)
-- we will use the upper bound on the tag as a proxy for the set size,
-- to insert the smaller one into the bigger one
| i > j = insertUDFMIntoLeft_C f udfml udfmr
| otherwise = insertUDFMIntoLeft_C f udfmr udfml
-- Note [Overflow on plusUDFM]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- There are multiple ways of implementing plusUDFM.
-- The main problem that needs to be solved is overlap on times of
-- insertion between different keys in two maps.
-- Consider:
--
-- A = fromList [(a, (x, 1))]
-- B = fromList [(b, (y, 1))]
--
-- If you merge them naively you end up with:
--
-- C = fromList [(a, (x, 1)), (b, (y, 1))]
--
-- Which loses information about ordering and brings us back into
-- non-deterministic world.
--
-- The solution I considered before would increment the tags on one of the
-- sets by the upper bound of the other set. The problem with this approach
-- is that you'll run out of tags for some merge patterns.
-- Say you start with A with upper bound 1, you merge A with A to get A' and
-- the upper bound becomes 2. You merge A' with A' and the upper bound
-- doubles again. After 64 merges you overflow.
-- This solution would have the same time complexity as plusUFM, namely O(n+m).
--
-- The solution I ended up with has time complexity of
-- O(m log m + m * min (n+m, W)) where m is the smaller set.
-- It simply inserts the elements of the smaller set into the larger
-- set in the order that they were inserted into the smaller set. That's
-- O(m log m) for extracting the elements from the smaller set in the
-- insertion order and O(m * min(n+m, W)) to insert them into the bigger
-- set.
plusUDFM :: UniqDFM elt -> UniqDFM elt -> UniqDFM elt
plusUDFM udfml@(UDFM _ i) udfmr@(UDFM _ j)
-- we will use the upper bound on the tag as a proxy for the set size,
-- to insert the smaller one into the bigger one
| i > j = insertUDFMIntoLeft udfml udfmr
| otherwise = insertUDFMIntoLeft udfmr udfml
insertUDFMIntoLeft :: UniqDFM elt -> UniqDFM elt -> UniqDFM elt
insertUDFMIntoLeft udfml udfmr = addListToUDFM_Directly udfml $ udfmToList udfmr
insertUDFMIntoLeft_C
:: (elt -> elt -> elt) -> UniqDFM elt -> UniqDFM elt -> UniqDFM elt
insertUDFMIntoLeft_C f udfml udfmr =
addListToUDFM_Directly_C f udfml $ udfmToList udfmr
lookupUDFM :: Uniquable key => UniqDFM elt -> key -> Maybe elt
lookupUDFM (UDFM m _i) k = taggedFst `fmap` M.lookup (getKey $ getUnique k) m
lookupUDFM_Directly :: UniqDFM elt -> Unique -> Maybe elt
lookupUDFM_Directly (UDFM m _i) k = taggedFst `fmap` M.lookup (getKey k) m
elemUDFM :: Uniquable key => key -> UniqDFM elt -> Bool
elemUDFM k (UDFM m _i) = M.member (getKey $ getUnique k) m
-- | Performs a deterministic fold over the UniqDFM.
-- It's O(n log n) while the corresponding function on `UniqFM` is O(n).
foldUDFM :: (elt -> a -> a) -> a -> UniqDFM elt -> a
foldUDFM k z m = foldr k z (eltsUDFM m)
-- | Performs a nondeterministic fold over the UniqDFM.
-- It's O(n), same as the corresponding function on `UniqFM`.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetFoldUDFM :: (elt -> a -> a) -> a -> UniqDFM elt -> a
nonDetFoldUDFM k z (UDFM m _i) = foldr k z $ map taggedFst $ M.elems m
eltsUDFM :: UniqDFM elt -> [elt]
eltsUDFM (UDFM m _i) =
map taggedFst $ sortBy (compare `on` taggedSnd) $ M.elems m
filterUDFM :: (elt -> Bool) -> UniqDFM elt -> UniqDFM elt
filterUDFM p (UDFM m i) = UDFM (M.filter (\(TaggedVal v _) -> p v) m) i
filterUDFM_Directly :: (Unique -> elt -> Bool) -> UniqDFM elt -> UniqDFM elt
filterUDFM_Directly p (UDFM m i) = UDFM (M.filterWithKey p' m) i
where
p' k (TaggedVal v _) = p (getUnique k) v
-- | Converts `UniqDFM` to a list, with elements in deterministic order.
-- It's O(n log n) while the corresponding function on `UniqFM` is O(n).
udfmToList :: UniqDFM elt -> [(Unique, elt)]
udfmToList (UDFM m _i) =
[ (getUnique k, taggedFst v)
| (k, v) <- sortBy (compare `on` (taggedSnd . snd)) $ M.toList m ]
-- Determines whether two 'UniqDFM's contain the same keys.
equalKeysUDFM :: UniqDFM a -> UniqDFM b -> Bool
equalKeysUDFM (UDFM m1 _) (UDFM m2 _) = liftEq (\_ _ -> True) m1 m2
isNullUDFM :: UniqDFM elt -> Bool
isNullUDFM (UDFM m _) = M.null m
sizeUDFM :: UniqDFM elt -> Int
sizeUDFM (UDFM m _i) = M.size m
intersectUDFM :: UniqDFM elt -> UniqDFM elt -> UniqDFM elt
intersectUDFM (UDFM x i) (UDFM y _j) = UDFM (M.intersection x y) i
-- M.intersection is left biased, that means the result will only have
-- a subset of elements from the left set, so `i` is a good upper bound.
udfmIntersectUFM :: UniqDFM elt1 -> UniqFM elt2 -> UniqDFM elt1
udfmIntersectUFM (UDFM x i) y = UDFM (M.intersection x (ufmToIntMap y)) i
-- M.intersection is left biased, that means the result will only have
-- a subset of elements from the left set, so `i` is a good upper bound.
intersectsUDFM :: UniqDFM elt -> UniqDFM elt -> Bool
intersectsUDFM x y = isNullUDFM (x `intersectUDFM` y)
disjointUDFM :: UniqDFM elt -> UniqDFM elt -> Bool
disjointUDFM (UDFM x _i) (UDFM y _j) = M.null (M.intersection x y)
disjointUdfmUfm :: UniqDFM elt -> UniqFM elt2 -> Bool
disjointUdfmUfm (UDFM x _i) y = M.null (M.intersection x (ufmToIntMap y))
minusUDFM :: UniqDFM elt1 -> UniqDFM elt2 -> UniqDFM elt1
minusUDFM (UDFM x i) (UDFM y _j) = UDFM (M.difference x y) i
-- M.difference returns a subset of a left set, so `i` is a good upper
-- bound.
udfmMinusUFM :: UniqDFM elt1 -> UniqFM elt2 -> UniqDFM elt1
udfmMinusUFM (UDFM x i) y = UDFM (M.difference x (ufmToIntMap y)) i
-- M.difference returns a subset of a left set, so `i` is a good upper
-- bound.
-- | Partition UniqDFM into two UniqDFMs according to the predicate
partitionUDFM :: (elt -> Bool) -> UniqDFM elt -> (UniqDFM elt, UniqDFM elt)
partitionUDFM p (UDFM m i) =
case M.partition (p . taggedFst) m of
(left, right) -> (UDFM left i, UDFM right i)
-- | Delete a list of elements from a UniqDFM
delListFromUDFM :: Uniquable key => UniqDFM elt -> [key] -> UniqDFM elt
delListFromUDFM = foldl' delFromUDFM
-- | This allows for lossy conversion from UniqDFM to UniqFM
udfmToUfm :: UniqDFM elt -> UniqFM elt
udfmToUfm (UDFM m _i) =
listToUFM_Directly [(getUnique k, taggedFst tv) | (k, tv) <- M.toList m]
listToUDFM :: Uniquable key => [(key,elt)] -> UniqDFM elt
listToUDFM = foldl' (\m (k, v) -> addToUDFM m k v) emptyUDFM
listToUDFM_Directly :: [(Unique, elt)] -> UniqDFM elt
listToUDFM_Directly = foldl' (\m (u, v) -> addToUDFM_Directly m u v) emptyUDFM
-- | Apply a function to a particular element
adjustUDFM :: Uniquable key => (elt -> elt) -> UniqDFM elt -> key -> UniqDFM elt
adjustUDFM f (UDFM m i) k = UDFM (M.adjust (fmap f) (getKey $ getUnique k) m) i
-- | The expression (alterUDFM f k map) alters value x at k, or absence
-- thereof. alterUDFM can be used to insert, delete, or update a value in
-- UniqDFM. Use addToUDFM, delFromUDFM or adjustUDFM when possible, they are
-- more efficient.
alterUDFM
:: Uniquable key
=> (Maybe elt -> Maybe elt) -- How to adjust
-> UniqDFM elt -- old
-> key -- new
-> UniqDFM elt -- result
alterUDFM f (UDFM m i) k =
UDFM (M.alter alterf (getKey $ getUnique k) m) (i + 1)
where
alterf Nothing = inject $ f Nothing
alterf (Just (TaggedVal v _)) = inject $ f (Just v)
inject Nothing = Nothing
inject (Just v) = Just $ TaggedVal v i
-- | Map a function over every value in a UniqDFM
mapUDFM :: (elt1 -> elt2) -> UniqDFM elt1 -> UniqDFM elt2
mapUDFM f (UDFM m i) = UDFM (M.map (fmap f) m) i
anyUDFM :: (elt -> Bool) -> UniqDFM elt -> Bool
anyUDFM p (UDFM m _i) = M.foldr ((||) . p . taggedFst) False m
allUDFM :: (elt -> Bool) -> UniqDFM elt -> Bool
allUDFM p (UDFM m _i) = M.foldr ((&&) . p . taggedFst) True m
instance Semi.Semigroup (UniqDFM a) where
(<>) = plusUDFM
instance Monoid (UniqDFM a) where
mempty = emptyUDFM
mappend = (Semi.<>)
-- This should not be used in committed code, provided for convenience to
-- make ad-hoc conversions when developing
alwaysUnsafeUfmToUdfm :: UniqFM elt -> UniqDFM elt
alwaysUnsafeUfmToUdfm = listToUDFM_Directly . nonDetUFMToList
-- Output-ery
instance Outputable a => Outputable (UniqDFM a) where
ppr ufm = pprUniqDFM ppr ufm
pprUniqDFM :: (a -> SDoc) -> UniqDFM a -> SDoc
pprUniqDFM ppr_elt ufm
= brackets $ fsep $ punctuate comma $
[ ppr uq <+> text ":->" <+> ppr_elt elt
| (uq, elt) <- udfmToList ufm ]
pprUDFM :: UniqDFM a -- ^ The things to be pretty printed
-> ([a] -> SDoc) -- ^ The pretty printing function to use on the elements
-> SDoc -- ^ 'SDoc' where the things have been pretty
-- printed
pprUDFM ufm pp = pp (eltsUDFM ufm)
|
sdiehl/ghc
|
compiler/utils/UniqDFM.hs
|
bsd-3-clause
| 15,897
| 0
| 13
| 3,542
| 3,831
| 2,020
| 1,811
| 215
| 3
|
module Main where
import Idris.Core.TT
import Idris.AbsSyntax
import Idris.Options
import Idris.ElabDecls
import Idris.REPL
import Idris.Main
import Idris.ModeCommon
import IRTS.Compiler
import IRTS.CodegenPHP
import System.Environment
import System.Exit
import Paths_idris_php
data Opts = Opts { inputs :: [FilePath],
output :: FilePath }
showUsage = do putStrLn "Usage: idris-php <ibc-files> [-o <output-file>]"
exitWith ExitSuccess
getOpts :: IO Opts
getOpts = do xs <- getArgs
return $ process (Opts [] "a.php") xs
where
process opts ("-o":o:xs) = process (opts { output = o }) xs
process opts ("--yes-really":xs) = process opts xs -- GRRR
process opts (x:xs) = process (opts { inputs = x:inputs opts }) xs
process opts [] = opts
c_main :: Opts -> Idris ()
c_main opts = do elabPrims
loadInputs (inputs opts) Nothing
mainProg <- elabMain
ir <- compile (Via IBCFormat "php") (output opts) (Just mainProg)
runIO $ codegenPHP ir
main :: IO ()
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (c_main opts)
|
edwinb/idris-php
|
src/Main.hs
|
bsd-3-clause
| 1,217
| 0
| 12
| 340
| 404
| 209
| 195
| 35
| 4
|
{-# LANGUAGE EmptyDataDecls #-}
module Main where
import Graph
main :: Fay ()
main = ready $ do
withCSV "data.csv" "id" "val" $ \dat -> do
g <- graph 640 480 dat 35
appendPoint g
appendLine g
appendAxis g
appendLabel g "hogex" "hogey"
return ()
|
junjihashimoto/fay-d3-graph
|
Main.hs
|
bsd-3-clause
| 275
| 0
| 13
| 75
| 98
| 45
| 53
| 12
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.Plot.HMatrix
-- Copyright : (c) A. V. H. McPhail 2010
-- License : BSD3
--
-- Maintainer : haskell.vivian.mcphail <at> gmail <dot> com
-- Stability : provisional
-- Portability : portable
--
-- Compatability module to replace "Graphics.Plot" of the 'hmatrix' module
--
-- Provides all functions from hmatrix's "Graphics.Plot" as well as
-- those functions appended with 'H' which return a 'PlotHandle' for
-- interactive update.
--
-----------------------------------------------------------------------------
module Graphics.Rendering.Plot.HMatrix (
-- * Plotting functions
mplot, mplotH
, plot, plotH
, parametricPlot, parametricPlotH
, imshow, greyscaleH
, meshdom
-- * Compatability
, matrixToPGM
-- * Gnuplot functions
, splot, mesh
) where
-----------------------------------------------------------------------------
{- Function signatures copied from hmatrix, (c) A. Ruiz -}
import Numeric.LinearAlgebra hiding(matrix)
import Numeric.LinearAlgebra.Data()
{- COMPATABILITY -}
import Data.List(intersperse)
import System.Process (system)
import Graphics.Rendering.Plot.Figure
import qualified Graphics.Rendering.Plot.Figure.Simple as S
import Graphics.Rendering.Plot.Gtk
-----------------------------------------------------------------------------
nohandle :: Monad m => m a -> m ()
nohandle m = m >> return ()
-----------------------------------------------------------------------------
-- | plot several vectors against the first
mplot :: [Vector Double] -> IO ()
mplot = nohandle . mplotH
-- | plot several vectors against the first
mplotH :: [Vector Double] -> IO PlotHandle
mplotH [] = error "mplot': no data"
mplotH [_] = error "mplot': no ordinates"
mplotH (v:vs) = display $ S.plot (Line,v,vs)
-----------------------------------------------------------------------------
-- apply several functions to one object
mapf :: [a -> b] -> a -> [b]
mapf fs x = map ($ x) fs
{- | Draws a list of functions over a desired range and with a desired number of points
> > plot [sin, cos, sin.(3*)] (0,2*pi) 1000
-}
plot :: [Vector Double -> Vector Double] -> (Double,Double) -> Int -> IO ()
plot fs r n = nohandle $ plotH fs r n
{- | Draws a list of functions over a desired range and with a desired number of points
> > plot [sin, cos, sin.(3*)] (0,2*pi) 1000
-}
plotH :: [Vector Double -> Vector Double] -> (Double,Double) -> Int -> IO PlotHandle
plotH fs r n = display $ do
let ts = linspace n r
S.plot (Line,ts,mapf fs ts)
{-
withPlot (1,1) $ do
withAxis XAxis (Side Lower) $ setTickLabelFormat "%.1f"
withAxis YAxis (Side Lower) $ setTickLabelFormat "%.1f"
-}
-----------------------------------------------------------------------------
{- | Draws a parametric curve. For instance, to draw a spiral we can do something like:
> > parametricPlot (\t->(t * sin t, t * cos t)) (0,10*pi) 1000
-}
parametricPlot :: (Vector Double->(Vector Double,Vector Double)) -> (Double, Double) -> Int -> IO ()
parametricPlot f r n = nohandle $ parametricPlotH f r n
{- | Draws a parametric curve. For instance, to draw a spiral we can do something like:
> > parametricPlot (\t->(t * sin t, t * cos t)) (0,10*pi) 1000
-}
parametricPlotH :: (Vector Double->(Vector Double,Vector Double)) -> (Double, Double) -> Int -> IO PlotHandle
parametricPlotH f r n = display $ do
let t = linspace n r
(fx,fy) = f t
S.plot [(Line,fx,fy)]
-----------------------------------------------------------------------------
-- | From vectors x and y, it generates a pair of matrices to be used as x and y arguments for matrix functions.
meshdom :: Vector Double -> Vector Double -> (Matrix Double , Matrix Double)
meshdom r1 r2 = (outer r1 (konst 1 (size r2)), outer (konst 1 (size r1)) r2)
gnuplotX :: String -> IO ()
gnuplotX command = do { _ <- system cmdstr; return()} where
cmdstr = "echo \""++command++"\" | gnuplot -persist"
datafollows :: String
datafollows = "\\\"-\\\""
prep :: [[Double]] -> String
prep = (++"e\n\n") . unlines . map (unwords . (map show))
{- | Draws a 3D surface representation of a real matrix.
> > mesh (hilb 20)
In certain versions you can interactively rotate the graphic using the mouse.
-}
mesh :: Matrix Double -> IO ()
mesh m = gnuplotX (command++dat) where
command = "splot "++datafollows++" matrix with lines\n"
dat = prep $ toLists $ m
mesh' :: Matrix Double -> IO ()
mesh' m = do
writeFile "splot-gnu-command" "splot \"splot-tmp.txt\" matrix with lines; pause -1";
toFile' "splot-tmp.txt" m
putStr "Press [Return] to close the graphic and continue... "
_ <- system "gnuplot -persist splot-gnu-command"
_ <- system "rm splot-tmp.txt splot-gnu-command"
return ()
{- | Draws the surface represented by the function f in the desired ranges and number of points, internally using 'mesh'.
> > let f x y = cos (x + y)
> > splot f (0,pi) (0,2*pi) 50
-}
splot :: (Matrix Double->Matrix Double->Matrix Double) -> (Double,Double) -> (Double,Double) -> Int -> IO ()
splot f rx ry n = mesh' z where
(x,y) = meshdom (linspace n rx) (linspace n ry)
z = f x y
-----------------------------------------------------------------------------
-- | writes a matrix to pgm image file
matrixToPGM :: Matrix Double -> String
matrixToPGM m = header ++ unlines (map unwords ll) where
c = cols m
r = rows m
header = "P2 "++show c++" "++show r++" "++show (round maxgray :: Int)++"\n"
maxgray = 255.0
maxval = maxElement m
minval = minElement m
scale' = if (maxval == minval)
then 0.0
else maxgray / (maxval - minval)
f x = show ( round ( scale' *(x - minval) ) :: Int )
ll = map (map f) (toLists m)
-----------------------------------------------------------------------------
-- | imshow shows a representation of a matrix as a gray level image.
imshow :: Matrix Double -> IO ()
imshow = nohandle . greyscaleH
-- | greyscaleH shows a representation of a matrix as a gray level image.
greyscaleH :: Matrix Double -> IO PlotHandle
greyscaleH d = display $ S.plot d
-----------------------------------------------------------------------------
-- | Saves a real matrix to a formatted ascii text file
toFile' :: FilePath -> Matrix Double -> IO ()
toFile' filename matrix = writeFile filename (unlines . map unwords. map (map show) . toLists $ matrix)
gnuplotpdf :: String -> String -> [([[Double]], String)] -> IO ()
gnuplotpdf title command ds = gnuplot (prelude ++ command ++" "++ draw) >> postproc where
prelude = "set terminal epslatex color; set output '"++title++".tex';"
(dats,defs) = unzip ds
draw = concat (intersperse ", " (map ("\"-\" "++) defs)) ++ "\n" ++
concatMap pr dats
postproc = do
_ <- system $ "epstopdf "++title++".eps"
mklatex
_ <- system $ "pdflatex "++title++"aux.tex > /dev/null"
_ <- system $ "pdfcrop "++title++"aux.pdf > /dev/null"
_ <- system $ "mv "++title++"aux-crop.pdf "++title++".pdf"
_ <- system $ "rm "++title++"aux.* "++title++".eps "++title++".tex"
return ()
mklatex = writeFile (title++"aux.tex") $
"\\documentclass{article}\n"++
"\\usepackage{graphics}\n"++
"\\usepackage{nopageno}\n"++
"\\usepackage{txfonts}\n"++
"\\renewcommand{\\familydefault}{phv}\n"++
"\\usepackage[usenames]{color}\n"++
"\\begin{document}\n"++
"\\begin{center}\n"++
" \\input{./"++title++".tex}\n"++
"\\end{center}\n"++
"\\end{document}"
pr = (++"e\n") . unlines . map (unwords . (map show))
gnuplot cmd = do
writeFile "gnuplotcommand" cmd
_ <- system "gnuplot gnuplotcommand"
_ <- system "rm gnuplotcommand"
return ()
gnuplotWin :: String -> String -> [([[Double]], String)] -> IO ()
gnuplotWin title command ds = gnuplot (prelude ++ command ++" "++ draw) where
(dats,defs) = unzip ds
draw = concat (intersperse ", " (map ("\"-\" "++) defs)) ++ "\n" ++
concatMap pr dats
pr = (++"e\n") . unlines . map (unwords . (map show))
prelude = "set title \""++title++"\";"
gnuplot cmd = do
writeFile "gnuplotcommand" cmd
_ <- system "gnuplot -persist gnuplotcommand"
_ <- system "rm gnuplotcommand"
return ()
-----------------------------------------------------------------------------
|
amcphail/plot-gtk3
|
lib/Graphics/Rendering/Plot/HMatrix.hs
|
bsd-3-clause
| 9,137
| 0
| 22
| 2,341
| 2,118
| 1,105
| 1,013
| 126
| 2
|
{-|
Module : Reactive.DOM.Flow
Description : User interface flows. Exports of the internal module.
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : aovieth@gmail.com
Stability : experimental
Portability : non-portable (GHC only)
-}
module Reactive.DOM.Flow (
Flow
, CompleteFlow
, pureFlow
, impureFlow
, widgetFlow
, widgetFlow'
, openFlow
, flowMap
, flowMapE
, flowTrans
, runFlow
) where
import Reactive.DOM.Internal.Flow
|
avieth/reactive-dom
|
Reactive/DOM/Flow.hs
|
bsd-3-clause
| 511
| 0
| 4
| 131
| 50
| 34
| 16
| 13
| 0
|
module Sprite.Colors where
import Graphics.Rendering.OpenGL
red = Color4 1 0 0 1 :: Color4 GLclampf
blue = Color4 0 0 1 (0.8) :: Color4 GLclampf
green = Color4 0 1 0 1 :: Color4 GLclampf
black = Color4 0 0 0 1 :: Color4 GLclampf
white = Color4 1 1 1 1 :: Color4 GLclampf
|
flazz/tooHS
|
src/Sprite/Colors.hs
|
bsd-3-clause
| 273
| 0
| 6
| 57
| 115
| 60
| 55
| 7
| 1
|
import Control.Monad
import System.Plugins.Hotswap
main :: IO ()
main = do
inputHandler <- newPlugin "Plugin.o" [] "inputHandler" :: IO (Plugin (IO Bool))
forever $ do
r <- runPlugin inputHandler
when r $ reloadPlugin inputHandler
|
mikeplus64/hotswap
|
examples/Main.hs
|
bsd-3-clause
| 256
| 0
| 12
| 59
| 91
| 43
| 48
| 8
| 1
|
{-# LANGUAGE StandaloneDeriving #-}
module While.SimpleAbstractSyntax where
-- Numbers
type {- n <- -} Z = Integer
-- Variables
type {- x <- -} Var = String
-- Arithmetic expressions
data {- a <- -} Aexp
= Num Z
| Var Var
| Add Aexp Aexp
| Mul Aexp Aexp
| Sub Aexp Aexp
-- Boolean expressions
data {- b <- -} Bexp
= TRUE
| FALSE
| Eq Aexp Aexp
| Leq Aexp Aexp
| Not Bexp
| And Bexp Bexp
-- Statements
data {- s <- -} Stm
= Assign Var Aexp
| Skip
| Seq Stm Stm
| If Bexp Stm Stm
| While Bexp Stm
deriving instance Show Aexp
deriving instance Show Bexp
deriving instance Show Stm
{-
y = 1;
while (!(x<=1)) do {
y = y * x;
x = x - 1;
}
-}
factorial
= Seq
(Assign "y" (Num 1))
(While
(Not (Leq (Var "x") (Num 1)))
(Seq
(Assign "y" (Mul (Var "y") (Var "x")))
(Assign "x" (Sub (Var "x") (Num 1)))))
|
grammarware/slps
|
topics/implementation/NielsonN07/Haskell/src/While/SimpleAbstractSyntax.hs
|
bsd-3-clause
| 868
| 0
| 15
| 245
| 284
| 159
| 125
| 34
| 1
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import qualified EFA.Graph.Topology.Node as Node
import qualified Modules.NonIO as ModNonIO;
import qualified Modules.Input.Setting as ModSet
import qualified Modules.Output as Output
import qualified Modules.Input.System as System;
import qualified EFA.Flow.Topology.Index as TopoIdx
import qualified Modules.Output.Plot as ModPlot
import EFA.Utility.Async (concurrentlyMany_)
import Text.Printf (printf)
import qualified EFA.Application.Optimisation.Loop as Loop
import qualified EFA.Application.Optimisation.Params as Params
import qualified EFA.Application.Type as Type
import Text.Printf (--printf,
PrintfArg)
import qualified Graphics.Gnuplot.Terminal.Default as DefaultTerm
import qualified Data.GraphViz.Types.Canonical as Canonical
import qualified EFA.Report.FormatValue as FormatValue
import qualified EFA.Equation.Arithmetic as Arith
import EFA.Application.Optimisation.Sweep (Sweep)
import qualified Data.Vector.Unboxed as UV
import qualified EFA.IO.TableParser as Table
import qualified EFA.Signal.Vector as SV
import qualified Data.Text.Lazy as LazyText
_term :: t -> b -> IO DefaultTerm.T
_term _ = ModPlot.gpXTerm
_dotTerm:: b -> Canonical.DotGraph LazyText.Text -> IO ()
_dotTerm dir = ModPlot.dotXTerm dir
_stoPos :: TopoIdx.Position System.Node
_stoPos = TopoIdx.Position System.Water System.Network
_gasPos :: TopoIdx.Position System.Node
_gasPos = TopoIdx.Position System.Gas System.LocalNetwork
{-
printBalanceLoopItem ::
(Show node,UV.Unbox b,
SV.Walker efaVec,
SV.Storage efaVec d,
SV.FromList efaVec,
Arith.ZeroTestable d,
Arith.Constant d,
FormatValue.FormatValue b,
FormatValue.FormatValue d,
Show a,
PrintfArg a,
Arith.Constant a)=>
(z ~ Type.SignalBasedOptimisation System.Node
Sweep
sweepVec
Double
intVec
b
simVec
c
efaVec
d) =>
Params.Optimisation node [] Sweep UV.Vector a ->
(Loop.Counter,Loop.BalanceLoopItem node a z) -> IO ()
printBalanceLoopItem _optParams _b = concurrentlyMany_ [
putStrLn $ Loop.showBalanceLoopItem _optParams _b,
Output.maxPerState _term _b,
Output.simulation _dotTerm _b
]
-}
printBalanceLoopItem ::
(UV.Unbox b,
SV.Walker efaVec,
SV.Storage efaVec d,
SV.FromList efaVec,
Arith.ZeroTestable d,
Arith.Constant d,
FormatValue.FormatValue b,
FormatValue.FormatValue d,
Show node, Show a, PrintfArg a, Arith.Constant a,
Show (intVec Double),Show (simVec Double),
SV.Walker simVec,
SV.Storage simVec Double,
SV.FromList simVec,
Node.C node,
z ~ Type.SignalBasedOptimisation
node sweep vec Double intVec b simVec c efaVec d)=>
Params.Optimisation node [] Sweep UV.Vector a ->
(Loop.Counter, Loop.BalanceLoopItem node a z) -> IO ()
printBalanceLoopItem _optParams _b@(_bStp, Loop.BalanceLoopItem _bForcing _bFStep _bal _opt) =
do
let _gTerm = ModPlot.gpPNG _dir _bStp
_xTerm = ModPlot.gpXTerm
_term = _xTerm
_dir = printf "outer-loop-%6.6d" _bStp
_stoPos = TopoIdx.Position System.Water System.Network
concurrentlyMany_ [
putStrLn $ Loop.showBalanceLoopItem _optParams _b,
-- Output.maxPerState _term _b,
Output.simulation _dotTerm _b]
printEtaLoopItem :: Params.Optimisation node [] Sweep UV.Vector a ->
(Loop.Counter, Loop.EtaLoopItem node Sweep UV.Vector a z)-> IO ()
printEtaLoopItem _params _loopItem = concurrentlyMany_ [
putStrLn $ Loop.showEtaLoopItem _params _loopItem
]
main :: IO()
main = do
tabEta <- Table.read "../maps/eta.txt"
tabPower <- Table.read "../maps/power.txt.bak"
let
sysParams = ModSet.sysParams tabEta
optParams = ModSet.optParams
demandedCycle = ModSet.reqsRec tabPower
simParams = ModSet.simParams demandedCycle
initStateFlow = ModSet.initEnv
let -- r = NonIO.checkRange sysParams optParams simParams
loop1 = ModNonIO.iterationWithAllStates sysParams optParams simParams initStateFlow
stateFlow = ModNonIO.getLastStateFlow loop1
loop2 = ModNonIO.iterationWithBestStates sysParams optParams simParams stateFlow
Output.iterationLoop optParams loop1
-- Output.loopResults optParams printEtaLoopItem printBalanceLoopItem loop1
Output.iterationLoop optParams loop2
-- Output.loopResults optParams printEtaLoopItem printBalanceLoopItem loop2
|
energyflowanalysis/efa-2.1
|
examples/advanced/energy/src/Main.hs
|
bsd-3-clause
| 4,413
| 0
| 11
| 766
| 933
| 519
| 414
| 83
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
module Control.Concurrent.Async.Lifted.Extra where
import Control.Concurrent.Async.Lifted
import Control.Concurrent.STM
import Control.Concurrent
import Control.Concurrent.MSem (new, with)
import Data.Traversable
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Control
import Control.Monad.Fix
import Control.Monad.Base
import Data.Foldable (Foldable, traverse_)
-- | Implementation derived from Petr Pudlák's answer on StackOverflow
-- <http://stackoverflow.com/a/18898822/230050>
sequencePool :: (Traversable t, MonadBaseControl IO m)
=> Int -> t (m a) -> m (t a)
sequencePool max xs = do
sem <- liftBase $ new max
runConcurrently $ traverse (Concurrently . liftBaseOp_ (with sem)) xs
-- | Implementation copied from Petr Pudlák's answer on StackOverflow
-- <http://stackoverflow.com/a/18898822/230050>
mapPool :: (Traversable t, MonadBaseControl IO m)
=> Int
-> (a -> m b)
-> t a
-> m (t b)
mapPool max f xs = do
sem <- liftBase $ new max
mapConcurrently (liftBaseOp_ (with sem) . f) xs
sequenceConcurrently :: (Traversable t, MonadBaseControl IO m)
=> t (m a) -> m (t a)
sequenceConcurrently = runConcurrently . traverse Concurrently
#if !MIN_VERSION_lifted_async(0,9,1)
mapConcurrently_ :: (Foldable t, MonadBaseControl IO m) => (a -> m b) -> t a -> m ()
mapConcurrently_ f = runConcurrently . traverse_ (Concurrently . f)
#endif
forConcurrently_ :: (Foldable t, MonadBaseControl IO m) => t a -> (a -> m b) -> m ()
forConcurrently_ = flip mapConcurrently_
-- | Create an 'Async' and pass it to itself.
fixAsync :: (MonadFix m, MonadBaseControl IO m)
=> (Async (StM m a) -> m a) -> m (Async (StM m a))
fixAsync f = mdo
this <- async $ f this
return this
-- | Like 'fixAsync' but using 'forkOS' internally.
fixAsyncBound :: (MonadFix m, MonadBaseControl IO m)
=> (Async (StM m a) -> m a) -> m (Async (StM m a))
fixAsyncBound f = mdo
this <- asyncBound $ f this
return this
-- | Like 'fixAsync' but using 'forkOn' internally.
fixAsyncOn :: (MonadFix m, MonadBaseControl IO m)
=> Int -> (Async (StM m a) -> m a) -> m (Async (StM m a))
fixAsyncOn cpu f = mdo
this <- asyncOn cpu $ f this
return this
-- | Like 'fixAsync' but using 'forkIOWithUnmask' internally.
-- The child thread is passed a function that can be used to unmask asynchronous exceptions.
fixAsyncWithUnmask :: (MonadFix m, MonadBaseControl IO m)
=> (Async (StM m a) -> (forall b . m b -> m b) -> m a)
-> m (Async (StM m a))
fixAsyncWithUnmask f = mdo
this <- asyncWithUnmask $ f this
return this
-- | Like 'fixAsyncOn' but using 'forkOnWithUnmask' internally.
-- The child thread is passed a function that can be used to unmask asynchronous exceptions.
fixAsyncOnWithUnmask :: (MonadFix m, MonadBaseControl IO m)
=> Int -> (Async (StM m a) -> (forall b . m b -> m b) -> m a)
-> m (Async (StM m a))
fixAsyncOnWithUnmask cpu f = mdo
this <- asyncWithUnmask $ f this
return this
-- | Create an async that is linked to a parent. If the parent
-- dies so does this async
withParent :: MonadBaseControl IO m
=> Async a -> m b -> m (Async (StM m b))
withParent parent act = async $ link parent >> act
|
jfischoff/async-extras
|
src/Control/Concurrent/Async/Lifted/Extra.hs
|
bsd-3-clause
| 3,588
| 0
| 14
| 835
| 1,083
| 550
| 533
| 69
| 1
|
module Test4c
where
import qualified Data.HashMap.Strict as H
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.List
isAlpha ch = ('a' <= ch && ch <= 'z')
wrds :: T.Text -> [ T.Text ]
wrds bs =
let
(_, r1) = T.span (not . isAlpha) bs
(w, r2) = T.span isAlpha r1
in if T.null w then [] else w : wrds r2
readDict = do
allwords <- fmap (wrds . T.toLower) $ T.readFile "big.txt"
let h = foldl' add H.empty allwords
add h w = let c = H.lookupDefault (0 :: Int) w h
in H.insert w (c+1) h
member = \k -> H.member k h
frequency = \k -> H.lookupDefault 0 k h
return (member, frequency, T.pack)
|
erantapaa/test-spelling
|
src/Test4c.hs
|
bsd-3-clause
| 677
| 0
| 15
| 185
| 311
| 166
| 145
| 20
| 2
|
module HaskellGame.Utils where
import Prelude (
Num(..), Ord(..), Read(..), Eq(..),
Int(), String(),
otherwise, read, fst, (.), ($)
)
import qualified Data.List as List
import Data.Maybe ( Maybe(..) )
import HaskellGame.Datatypes
{- Utility functions to do stuff we need to do -}
takesome :: Int -> [a] -> [a]
takesome 0 _ = []
takesome _ [] = []
takesome n (x:xs)
| n > 0 = x:(takesome (n-1) xs)
| otherwise = []
dropsome :: Int -> [a] -> [a]
dropsome 0 x = x
dropsome _ [] = []
dropsome n (x:xs)
| n > 0 = (dropsome (n-1) xs)
| otherwise = (x:xs)
chunksOf :: Int -> [a] -> [[a]]
chunksOf n [] = []
chunksOf n l = (takesome n l):(chunksOf n (dropsome n l))
createMap :: String -> Int -> Int -> String -> Map
createMap n w h c =
Map n w h (chunksOf w (List.map (read . (:[])) c))
|
KevinCardiff/haskell-game2
|
src/HaskellGame/Utils.hs
|
bsd-3-clause
| 862
| 0
| 13
| 238
| 450
| 250
| 200
| 26
| 1
|
module Stackage.CheckPlan
( checkPlan
) where
import Control.Monad (unless, when)
import Data.List (isPrefixOf, sort)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Stackage.CheckCabalVersion (checkCabalVersion)
import Stackage.InstallInfo
import Stackage.Types
import Stackage.Util
import System.Exit (ExitCode (ExitFailure, ExitSuccess),
exitWith)
import System.Process (readProcessWithExitCode)
data Mismatch = OnlyDryRun String | OnlySimpleList String
deriving Show
checkPlan :: BuildSettings -> BuildPlan -> IO ()
checkPlan settings bp = do
_ <- checkCabalVersion
putStrLn "Checking build plan"
packages <- mapM (replaceTarball $ tarballDir settings) (bpPackageList bp)
(ec, dryRun', stderr) <- readProcessWithExitCode "cabal"
( addCabalArgsOnlyGlobal
$ "install"
: "--dry-run"
: "--max-backjumps=-1"
: "--reorder-goals"
: packages
) ""
when (ec /= ExitSuccess || "Warning:" `isPrefixOf` stderr) $ do
putStr stderr
putStr dryRun'
putStrLn "cabal returned a bad result, exiting"
exitWith ec
let dryRun = sort $ filter notOptionalCore $ map (takeWhile (/= ' ')) $ drop 2 $ lines dryRun'
let mismatches = getMismatches dryRun (filter notOptionalCore $ bpPackageList bp)
unless (null $ filter (not . acceptableMismatch) mismatches) $ do
putStrLn "Found the following mismatches"
mapM_ print mismatches
exitWith $ ExitFailure 1
putStrLn "Build plan checked, no mismatches"
where
optionalCore = Set.fromList $ map packageVersionString $ Map.toList $ bpOptionalCore bp
notOptionalCore s = not $ s `Set.member` optionalCore
getMismatches :: [String] -> [String] -> [Mismatch]
getMismatches =
go
where
go [] y = map OnlySimpleList y
go x [] = map OnlyDryRun x
go (x:xs) (y:ys) =
case compare x y of
EQ -> go xs ys
LT -> OnlyDryRun x : go xs (y:ys)
GT -> OnlySimpleList y : go (x:xs) ys
-- | Some mismatches are going to be acceptable. The reasons are described
-- below.
acceptableMismatch :: Mismatch -> Bool
acceptableMismatch m =
case m of
-- GHC 7.4 included extensible-extensions as a core package, and
-- therefore the HP at time of writing (2012.4.0.0) includes it in that
-- list. However, GHC 7.6 does /not/ include that package. As a result,
-- we get that package included in the dry run but not our list of
-- packages to build. See issue #57.
OnlyDryRun s | "extensible-exceptions-" `isPrefixOf` s -> True
_ -> False
|
byorgey/stackage
|
Stackage/CheckPlan.hs
|
mit
| 2,872
| 0
| 16
| 871
| 684
| 351
| 333
| 57
| 5
|
g = f
where
f y@(x:xs) | x > 10 = let z = [] in (z)
| otherwise = []
|
itchyny/vim-haskell-indent
|
test/guard/where_same_line.out.hs
|
mit
| 90
| 0
| 12
| 42
| 63
| 31
| 32
| 3
| 1
|
{-# LANGUAGE Arrows #-}
module Bot.Ping where
import Control.Auto
import Prelude hiding ((.), id) -- we use (.) and id from `Control.Category`
import Control.Monad.IO.Class (MonadIO)
import Bot.Types
pingBot :: MonadIO m => RoomBot m
pingBot = proc (InMessage _ msg _ _) -> do
echoB <- echoBlips -< msg
-- | (: []) :: Message -> [Message]
id -< (: []) <$> echoB
where
echoBlips :: Auto m Message (Blip Message)
echoBlips = emitJusts (getRequest . words)
where
getRequest ("@ping": _) = Just "PONG!"
getRequest _ = Nothing
|
urbanslug/nairobi-bot
|
src/Bot/Ping.hs
|
gpl-3.0
| 580
| 1
| 11
| 146
| 175
| 96
| 79
| 14
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.CreateDBInstance
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a new DB instance.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html>
module Network.AWS.RDS.CreateDBInstance
(
-- * Request
CreateDBInstance
-- ** Request constructor
, createDBInstance
-- ** Request lenses
, cdbiAllocatedStorage
, cdbiAutoMinorVersionUpgrade
, cdbiAvailabilityZone
, cdbiBackupRetentionPeriod
, cdbiCharacterSetName
, cdbiDBInstanceClass
, cdbiDBInstanceIdentifier
, cdbiDBName
, cdbiDBParameterGroupName
, cdbiDBSecurityGroups
, cdbiDBSubnetGroupName
, cdbiEngine
, cdbiEngineVersion
, cdbiIops
, cdbiKmsKeyId
, cdbiLicenseModel
, cdbiMasterUserPassword
, cdbiMasterUsername
, cdbiMultiAZ
, cdbiOptionGroupName
, cdbiPort
, cdbiPreferredBackupWindow
, cdbiPreferredMaintenanceWindow
, cdbiPubliclyAccessible
, cdbiStorageEncrypted
, cdbiStorageType
, cdbiTags
, cdbiTdeCredentialArn
, cdbiTdeCredentialPassword
, cdbiVpcSecurityGroupIds
-- * Response
, CreateDBInstanceResponse
-- ** Response constructor
, createDBInstanceResponse
-- ** Response lenses
, cdbirDBInstance
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data CreateDBInstance = CreateDBInstance
{ _cdbiAllocatedStorage :: Int
, _cdbiAutoMinorVersionUpgrade :: Maybe Bool
, _cdbiAvailabilityZone :: Maybe Text
, _cdbiBackupRetentionPeriod :: Maybe Int
, _cdbiCharacterSetName :: Maybe Text
, _cdbiDBInstanceClass :: Text
, _cdbiDBInstanceIdentifier :: Text
, _cdbiDBName :: Maybe Text
, _cdbiDBParameterGroupName :: Maybe Text
, _cdbiDBSecurityGroups :: List "member" Text
, _cdbiDBSubnetGroupName :: Maybe Text
, _cdbiEngine :: Text
, _cdbiEngineVersion :: Maybe Text
, _cdbiIops :: Maybe Int
, _cdbiKmsKeyId :: Maybe Text
, _cdbiLicenseModel :: Maybe Text
, _cdbiMasterUserPassword :: Text
, _cdbiMasterUsername :: Text
, _cdbiMultiAZ :: Maybe Bool
, _cdbiOptionGroupName :: Maybe Text
, _cdbiPort :: Maybe Int
, _cdbiPreferredBackupWindow :: Maybe Text
, _cdbiPreferredMaintenanceWindow :: Maybe Text
, _cdbiPubliclyAccessible :: Maybe Bool
, _cdbiStorageEncrypted :: Maybe Bool
, _cdbiStorageType :: Maybe Text
, _cdbiTags :: List "member" Tag
, _cdbiTdeCredentialArn :: Maybe Text
, _cdbiTdeCredentialPassword :: Maybe Text
, _cdbiVpcSecurityGroupIds :: List "member" Text
} deriving (Eq, Read, Show)
-- | 'CreateDBInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdbiAllocatedStorage' @::@ 'Int'
--
-- * 'cdbiAutoMinorVersionUpgrade' @::@ 'Maybe' 'Bool'
--
-- * 'cdbiAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'cdbiBackupRetentionPeriod' @::@ 'Maybe' 'Int'
--
-- * 'cdbiCharacterSetName' @::@ 'Maybe' 'Text'
--
-- * 'cdbiDBInstanceClass' @::@ 'Text'
--
-- * 'cdbiDBInstanceIdentifier' @::@ 'Text'
--
-- * 'cdbiDBName' @::@ 'Maybe' 'Text'
--
-- * 'cdbiDBParameterGroupName' @::@ 'Maybe' 'Text'
--
-- * 'cdbiDBSecurityGroups' @::@ ['Text']
--
-- * 'cdbiDBSubnetGroupName' @::@ 'Maybe' 'Text'
--
-- * 'cdbiEngine' @::@ 'Text'
--
-- * 'cdbiEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'cdbiIops' @::@ 'Maybe' 'Int'
--
-- * 'cdbiKmsKeyId' @::@ 'Maybe' 'Text'
--
-- * 'cdbiLicenseModel' @::@ 'Maybe' 'Text'
--
-- * 'cdbiMasterUserPassword' @::@ 'Text'
--
-- * 'cdbiMasterUsername' @::@ 'Text'
--
-- * 'cdbiMultiAZ' @::@ 'Maybe' 'Bool'
--
-- * 'cdbiOptionGroupName' @::@ 'Maybe' 'Text'
--
-- * 'cdbiPort' @::@ 'Maybe' 'Int'
--
-- * 'cdbiPreferredBackupWindow' @::@ 'Maybe' 'Text'
--
-- * 'cdbiPreferredMaintenanceWindow' @::@ 'Maybe' 'Text'
--
-- * 'cdbiPubliclyAccessible' @::@ 'Maybe' 'Bool'
--
-- * 'cdbiStorageEncrypted' @::@ 'Maybe' 'Bool'
--
-- * 'cdbiStorageType' @::@ 'Maybe' 'Text'
--
-- * 'cdbiTags' @::@ ['Tag']
--
-- * 'cdbiTdeCredentialArn' @::@ 'Maybe' 'Text'
--
-- * 'cdbiTdeCredentialPassword' @::@ 'Maybe' 'Text'
--
-- * 'cdbiVpcSecurityGroupIds' @::@ ['Text']
--
createDBInstance :: Text -- ^ 'cdbiDBInstanceIdentifier'
-> Int -- ^ 'cdbiAllocatedStorage'
-> Text -- ^ 'cdbiDBInstanceClass'
-> Text -- ^ 'cdbiEngine'
-> Text -- ^ 'cdbiMasterUsername'
-> Text -- ^ 'cdbiMasterUserPassword'
-> CreateDBInstance
createDBInstance p1 p2 p3 p4 p5 p6 = CreateDBInstance
{ _cdbiDBInstanceIdentifier = p1
, _cdbiAllocatedStorage = p2
, _cdbiDBInstanceClass = p3
, _cdbiEngine = p4
, _cdbiMasterUsername = p5
, _cdbiMasterUserPassword = p6
, _cdbiDBName = Nothing
, _cdbiDBSecurityGroups = mempty
, _cdbiVpcSecurityGroupIds = mempty
, _cdbiAvailabilityZone = Nothing
, _cdbiDBSubnetGroupName = Nothing
, _cdbiPreferredMaintenanceWindow = Nothing
, _cdbiDBParameterGroupName = Nothing
, _cdbiBackupRetentionPeriod = Nothing
, _cdbiPreferredBackupWindow = Nothing
, _cdbiPort = Nothing
, _cdbiMultiAZ = Nothing
, _cdbiEngineVersion = Nothing
, _cdbiAutoMinorVersionUpgrade = Nothing
, _cdbiLicenseModel = Nothing
, _cdbiIops = Nothing
, _cdbiOptionGroupName = Nothing
, _cdbiCharacterSetName = Nothing
, _cdbiPubliclyAccessible = Nothing
, _cdbiTags = mempty
, _cdbiStorageType = Nothing
, _cdbiTdeCredentialArn = Nothing
, _cdbiTdeCredentialPassword = Nothing
, _cdbiStorageEncrypted = Nothing
, _cdbiKmsKeyId = Nothing
}
-- | The amount of storage (in gigabytes) to be initially allocated for the
-- database instance.
--
-- Type: Integer
--
-- MySQL
--
-- Constraints: Must be an integer from 5 to 3072.
--
-- PostgreSQL
--
-- Constraints: Must be an integer from 5 to 3072.
--
-- Oracle
--
-- Constraints: Must be an integer from 10 to 3072.
--
-- SQL Server
--
-- Constraints: Must be an integer from 200 to 1024 (Standard Edition and
-- Enterprise Edition) or from 20 to 1024 (Express Edition and Web Edition)
cdbiAllocatedStorage :: Lens' CreateDBInstance Int
cdbiAllocatedStorage =
lens _cdbiAllocatedStorage (\s a -> s { _cdbiAllocatedStorage = a })
-- | Indicates that minor engine upgrades will be applied automatically to the DB
-- instance during the maintenance window.
--
-- Default: 'true'
cdbiAutoMinorVersionUpgrade :: Lens' CreateDBInstance (Maybe Bool)
cdbiAutoMinorVersionUpgrade =
lens _cdbiAutoMinorVersionUpgrade
(\s a -> s { _cdbiAutoMinorVersionUpgrade = a })
-- | The EC2 Availability Zone that the database instance will be created in. For
-- information on regions and Availability Zones, see <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html Regions and AvailabilityZones>.
--
-- Default: A random, system-chosen Availability Zone in the endpoint's
-- region.
--
-- Example: 'us-east-1d'
--
-- Constraint: The AvailabilityZone parameter cannot be specified if the
-- MultiAZ parameter is set to 'true'. The specified Availability Zone must be in
-- the same region as the current endpoint.
cdbiAvailabilityZone :: Lens' CreateDBInstance (Maybe Text)
cdbiAvailabilityZone =
lens _cdbiAvailabilityZone (\s a -> s { _cdbiAvailabilityZone = a })
-- | The number of days for which automated backups are retained. Setting this
-- parameter to a positive number enables backups. Setting this parameter to 0
-- disables automated backups.
--
-- Default: 1
--
-- Constraints:
--
-- Must be a value from 0 to 35 Cannot be set to 0 if the DB instance is a
-- source to Read Replicas
cdbiBackupRetentionPeriod :: Lens' CreateDBInstance (Maybe Int)
cdbiBackupRetentionPeriod =
lens _cdbiBackupRetentionPeriod
(\s a -> s { _cdbiBackupRetentionPeriod = a })
-- | For supported engines, indicates that the DB instance should be associated
-- with the specified CharacterSet.
cdbiCharacterSetName :: Lens' CreateDBInstance (Maybe Text)
cdbiCharacterSetName =
lens _cdbiCharacterSetName (\s a -> s { _cdbiCharacterSetName = a })
-- | The compute and memory capacity of the DB instance.
--
-- Valid Values: 'db.t1.micro | db.m1.small | db.m1.medium | db.m1.large |db.m1.xlarge | db.m2.xlarge |db.m2.2xlarge | db.m2.4xlarge | db.m3.medium |db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.r3.large | db.r3.xlarge |db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small |db.t2.medium'
cdbiDBInstanceClass :: Lens' CreateDBInstance Text
cdbiDBInstanceClass =
lens _cdbiDBInstanceClass (\s a -> s { _cdbiDBInstanceClass = a })
-- | The DB instance identifier. This parameter is stored as a lowercase string.
--
-- Constraints:
--
-- Must contain from 1 to 63 alphanumeric characters or hyphens (1 to 15 for
-- SQL Server). First character must be a letter. Cannot end with a hyphen or
-- contain two consecutive hyphens. Example: 'mydbinstance'
cdbiDBInstanceIdentifier :: Lens' CreateDBInstance Text
cdbiDBInstanceIdentifier =
lens _cdbiDBInstanceIdentifier
(\s a -> s { _cdbiDBInstanceIdentifier = a })
-- | The meaning of this parameter differs according to the database engine you
-- use.
--
-- Type: String
--
-- MySQL
--
-- The name of the database to create when the DB instance is created. If this
-- parameter is not specified, no database is created in the DB instance.
--
-- Constraints:
--
-- Must contain 1 to 64 alphanumeric characters Cannot be a word reserved by
-- the specified database engine PostgreSQL
--
-- The name of the database to create when the DB instance is created. If this
-- parameter is not specified, no database is created in the DB instance.
--
-- Constraints:
--
-- Must contain 1 to 63 alphanumeric characters Must begin with a letter or an
-- underscore. Subsequent characters can be letters, underscores, or digits
-- (0-9). Cannot be a word reserved by the specified database engine Oracle
--
-- The Oracle System ID (SID) of the created DB instance.
--
-- Default: 'ORCL'
--
-- Constraints:
--
-- Cannot be longer than 8 characters SQL Server
--
-- Not applicable. Must be null.
cdbiDBName :: Lens' CreateDBInstance (Maybe Text)
cdbiDBName = lens _cdbiDBName (\s a -> s { _cdbiDBName = a })
-- | The name of the DB parameter group to associate with this DB instance. If
-- this argument is omitted, the default DBParameterGroup for the specified
-- engine will be used.
--
-- Constraints:
--
-- Must be 1 to 255 alphanumeric characters First character must be a letter Cannot end with a hyphen or contain two consecutive hyphens
--
cdbiDBParameterGroupName :: Lens' CreateDBInstance (Maybe Text)
cdbiDBParameterGroupName =
lens _cdbiDBParameterGroupName
(\s a -> s { _cdbiDBParameterGroupName = a })
-- | A list of DB security groups to associate with this DB instance.
--
-- Default: The default DB security group for the database engine.
cdbiDBSecurityGroups :: Lens' CreateDBInstance [Text]
cdbiDBSecurityGroups =
lens _cdbiDBSecurityGroups (\s a -> s { _cdbiDBSecurityGroups = a })
. _List
-- | A DB subnet group to associate with this DB instance.
--
-- If there is no DB subnet group, then it is a non-VPC DB instance.
cdbiDBSubnetGroupName :: Lens' CreateDBInstance (Maybe Text)
cdbiDBSubnetGroupName =
lens _cdbiDBSubnetGroupName (\s a -> s { _cdbiDBSubnetGroupName = a })
-- | The name of the database engine to be used for this instance.
--
-- Valid Values: 'MySQL' | 'oracle-se1' | 'oracle-se' | 'oracle-ee' | 'sqlserver-ee' | 'sqlserver-se' | 'sqlserver-ex' | 'sqlserver-web' | 'postgres'
--
-- Not every database engine is available for every AWS region.
cdbiEngine :: Lens' CreateDBInstance Text
cdbiEngine = lens _cdbiEngine (\s a -> s { _cdbiEngine = a })
-- | The version number of the database engine to use.
--
-- The following are the database engines and major and minor versions that
-- are available with Amazon RDS. Not every database engine is available for
-- every AWS region.
--
-- MySQL
--
-- Version 5.1 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '5.1.73a | 5.1.73b' Version 5.5 (Only available in the following regions:
-- ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1,
-- us-west-1, us-west-2): ' 5.5.40 | 5.5.40a' Version 5.5 (Available in all
-- regions): ' 5.5.40b | 5.5.41' Version 5.6 (Available in all regions): '5.6.19a | 5.6.19b | 5.6.21 | 5.6.21b | 5.6.22' MySQL
--
-- Version 5.1 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '5.1.73a | 5.1.73b' Version 5.5 (Only available in the following regions:
-- ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1,
-- us-west-1, us-west-2): ' 5.5.40 | 5.5.40a' Version 5.5 (Available in all
-- regions): ' 5.5.40b | 5.5.41' Version 5.6 (Available in all regions): '5.6.19a | 5.6.19b | 5.6.21 | 5.6.21b | 5.6.22' MySQL
--
-- Version 5.1 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '5.1.73a | 5.1.73b' Version 5.5 (Only available in the following regions:
-- ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1,
-- us-west-1, us-west-2): ' 5.5.40 | 5.5.40a' Version 5.5 (Available in all
-- regions): ' 5.5.40b | 5.5.41' Version 5.6 (Available in all regions): '5.6.19a | 5.6.19b | 5.6.21 | 5.6.21b | 5.6.22' MySQL
--
-- Version 5.1 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '5.1.73a | 5.1.73b' Version 5.5 (Only available in the following regions:
-- ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1,
-- us-west-1, us-west-2): ' 5.5.40 | 5.5.40a' Version 5.5 (Available in all
-- regions): ' 5.5.40b | 5.5.41' Version 5.6 (Available in all regions): '5.6.19a | 5.6.19b | 5.6.21 | 5.6.21b | 5.6.22' Oracle Database Enterprise
-- Edition (oracle-ee)
--
-- Version 11.2 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Available in all regions): ' 11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 |11.2.0.4.v3' Oracle Database Enterprise Edition (oracle-ee)
--
-- Version 11.2 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Available in all regions): ' 11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 |11.2.0.4.v3' Oracle Database Standard Edition (oracle-se)
--
-- Version 11.2 (Only available in the following regions: us-west-1): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Only available in the following regions: eu-central-1, us-west-1): '11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 | 11.2.0.4.v3' Oracle Database
-- Standard Edition (oracle-se)
--
-- Version 11.2 (Only available in the following regions: us-west-1): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Only available in the following regions: eu-central-1, us-west-1): '11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 | 11.2.0.4.v3' Oracle Database
-- Standard Edition One (oracle-se1)
--
-- Version 11.2 (Only available in the following regions: us-west-1): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Only available in the following regions: eu-central-1, us-west-1): '11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 | 11.2.0.4.v3' Oracle Database
-- Standard Edition One (oracle-se1)
--
-- Version 11.2 (Only available in the following regions: us-west-1): '11.2.0.2.v3 | 11.2.0.2.v4 | 11.2.0.2.v5 | 11.2.0.2.v6 | 11.2.0.2.v7' Version
-- 11.2 (Only available in the following regions: eu-central-1, us-west-1): '11.2.0.3.v1 | 11.2.0.3.v2 | 11.2.0.4.v1 | 11.2.0.4.v3' PostgreSQL
--
-- Version 9.3 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '9.3.1 | 9.3.2' Version 9.3 (Available in all regions): ' 9.3.3 | 9.3.5' PostgreSQL
--
--
-- Version 9.3 (Only available in the following regions: ap-northeast-1,
-- ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-west-1, us-west-2): '9.3.1 | 9.3.2' Version 9.3 (Available in all regions): ' 9.3.3 | 9.3.5' Microsoft SQL Server Enterprise Edition (sqlserver-ee)
--
--
-- Version 10.50 (Only available in the following regions: eu-central-1,
-- us-west-1): ' 10.50.2789.0.v1' Version 11.00 (Only available in the following
-- regions: eu-central-1, us-west-1): ' 11.00.2100.60.v1' Microsoft SQL Server
-- Enterprise Edition (sqlserver-ee)
--
-- Version 10.50 (Only available in the following regions: eu-central-1,
-- us-west-1): ' 10.50.2789.0.v1' Version 11.00 (Only available in the following
-- regions: eu-central-1, us-west-1): ' 11.00.2100.60.v1' Microsoft SQL Server
-- Express Edition (sqlserver-ex)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1' Microsoft SQL Server Express
-- Edition (sqlserver-ex)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1' Microsoft SQL Server
-- Standard Edition (sqlserver-se)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1' Microsoft SQL Server
-- Standard Edition (sqlserver-se)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1' Microsoft SQL Server Web
-- Edition (sqlserver-web)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1' Microsoft SQL Server Web
-- Edition (sqlserver-web)
--
-- Version 10.50 (Available in all regions): ' 10.50.2789.0.v1' Version 11.00
-- (Available in all regions): ' 11.00.2100.60.v1'
cdbiEngineVersion :: Lens' CreateDBInstance (Maybe Text)
cdbiEngineVersion =
lens _cdbiEngineVersion (\s a -> s { _cdbiEngineVersion = a })
-- | The amount of Provisioned IOPS (input/output operations per second) to be
-- initially allocated for the DB instance.
--
-- Constraints: To use PIOPS, this value must be an integer greater than 1000.
cdbiIops :: Lens' CreateDBInstance (Maybe Int)
cdbiIops = lens _cdbiIops (\s a -> s { _cdbiIops = a })
-- | The KMS key identifier for an encrypted DB instance.
--
-- The KMS key identifier is the Amazon Resoure Name (ARN) for the KMS
-- encryption key. If you are creating a DB instance with the same AWS account
-- that owns the KMS encryption key used to encrypt the new DB instance, then
-- you can use the KMS key alias instead of the ARN for the KM encryption key.
--
-- If the 'StorageEncrypted' parameter is true, and you do not specify a value
-- for the 'KmsKeyId' parameter, then Amazon RDS will use your default encryption
-- key. AWS KMS creates the default encryption key for your AWS account. Your
-- AWS account has a different default encryption key for each AWS region.
cdbiKmsKeyId :: Lens' CreateDBInstance (Maybe Text)
cdbiKmsKeyId = lens _cdbiKmsKeyId (\s a -> s { _cdbiKmsKeyId = a })
-- | License model information for this DB instance.
--
-- Valid values: 'license-included' | 'bring-your-own-license' | 'general-public-license'
cdbiLicenseModel :: Lens' CreateDBInstance (Maybe Text)
cdbiLicenseModel = lens _cdbiLicenseModel (\s a -> s { _cdbiLicenseModel = a })
-- | The password for the master database user. Can be any printable ASCII
-- character except "/", """, or "@".
--
-- Type: String
--
-- MySQL
--
-- Constraints: Must contain from 8 to 41 characters.
--
-- Oracle
--
-- Constraints: Must contain from 8 to 30 characters.
--
-- SQL Server
--
-- Constraints: Must contain from 8 to 128 characters.
--
-- PostgreSQL
--
-- Constraints: Must contain from 8 to 128 characters.
cdbiMasterUserPassword :: Lens' CreateDBInstance Text
cdbiMasterUserPassword =
lens _cdbiMasterUserPassword (\s a -> s { _cdbiMasterUserPassword = a })
-- | The name of master user for the client DB instance.
--
-- MySQL
--
-- Constraints:
--
-- Must be 1 to 16 alphanumeric characters. First character must be a letter. Cannot be a reserved word for the chosen database engine.
-- Type: String
--
-- Oracle
--
-- Constraints:
--
-- Must be 1 to 30 alphanumeric characters. First character must be a letter. Cannot be a reserved word for the chosen database engine.
-- SQL Server
--
-- Constraints:
--
-- Must be 1 to 128 alphanumeric characters. First character must be a letter.
-- Cannot be a reserved word for the chosen database engine. PostgreSQL
--
-- Constraints:
--
-- Must be 1 to 63 alphanumeric characters. First character must be a letter. Cannot be a reserved word for the chosen database engine.
--
cdbiMasterUsername :: Lens' CreateDBInstance Text
cdbiMasterUsername =
lens _cdbiMasterUsername (\s a -> s { _cdbiMasterUsername = a })
-- | Specifies if the DB instance is a Multi-AZ deployment. You cannot set the
-- AvailabilityZone parameter if the MultiAZ parameter is set to true.
cdbiMultiAZ :: Lens' CreateDBInstance (Maybe Bool)
cdbiMultiAZ = lens _cdbiMultiAZ (\s a -> s { _cdbiMultiAZ = a })
-- | Indicates that the DB instance should be associated with the specified
-- option group.
--
-- Permanent options, such as the TDE option for Oracle Advanced Security TDE,
-- cannot be removed from an option group, and that option group cannot be
-- removed from a DB instance once it is associated with a DB instance
cdbiOptionGroupName :: Lens' CreateDBInstance (Maybe Text)
cdbiOptionGroupName =
lens _cdbiOptionGroupName (\s a -> s { _cdbiOptionGroupName = a })
-- | The port number on which the database accepts connections.
--
-- MySQL
--
-- Default: '3306'
--
-- Valid Values: '1150-65535'
--
-- Type: Integer
--
-- PostgreSQL
--
-- Default: '5432'
--
-- Valid Values: '1150-65535'
--
-- Type: Integer
--
-- Oracle
--
-- Default: '1521'
--
-- Valid Values: '1150-65535'
--
-- SQL Server
--
-- Default: '1433'
--
-- Valid Values: '1150-65535' except for '1434', '3389', '47001', '49152', and '49152'
-- through '49156'.
cdbiPort :: Lens' CreateDBInstance (Maybe Int)
cdbiPort = lens _cdbiPort (\s a -> s { _cdbiPort = a })
-- | The daily time range during which automated backups are created if automated
-- backups are enabled, using the 'BackupRetentionPeriod' parameter. For more
-- information, see <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.BackingUpAndRestoringAmazonRDSInstances.html DB Instance Backups>.
--
-- Default: A 30-minute window selected at random from an 8-hour block of time
-- per region. See the Amazon RDS User Guide for the time blocks for each region
-- from which the default backup windows are assigned.
--
-- Constraints: Must be in the format 'hh24:mi-hh24:mi'. Times should be
-- Universal Time Coordinated (UTC). Must not conflict with the preferred
-- maintenance window. Must be at least 30 minutes.
cdbiPreferredBackupWindow :: Lens' CreateDBInstance (Maybe Text)
cdbiPreferredBackupWindow =
lens _cdbiPreferredBackupWindow
(\s a -> s { _cdbiPreferredBackupWindow = a })
-- | The weekly time range (in UTC) during which system maintenance can occur.
-- For more information, see <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBMaintenance.html DB Instance Maintenance>.
--
-- Format: 'ddd:hh24:mi-ddd:hh24:mi'
--
-- Default: A 30-minute window selected at random from an 8-hour block of time
-- per region, occurring on a random day of the week. To see the time blocks
-- available, see <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AdjustingTheMaintenanceWindow.html Adjusting the Preferred Maintenance Window> in the Amazon RDS
-- User Guide.
--
-- Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun
--
-- Constraints: Minimum 30-minute window.
cdbiPreferredMaintenanceWindow :: Lens' CreateDBInstance (Maybe Text)
cdbiPreferredMaintenanceWindow =
lens _cdbiPreferredMaintenanceWindow
(\s a -> s { _cdbiPreferredMaintenanceWindow = a })
-- | Specifies the accessibility options for the DB instance. A value of true
-- specifies an Internet-facing instance with a publicly resolvable DNS name,
-- which resolves to a public IP address. A value of false specifies an internal
-- instance with a DNS name that resolves to a private IP address.
--
-- Default: The default behavior varies depending on whether a VPC has been
-- requested or not. The following list shows the default behavior in each case.
--
-- Default VPC: true VPC: false If no DB subnet group has been specified
-- as part of the request and the PubliclyAccessible value has not been set, the
-- DB instance will be publicly accessible. If a specific DB subnet group has
-- been specified as part of the request and the PubliclyAccessible value has
-- not been set, the DB instance will be private.
cdbiPubliclyAccessible :: Lens' CreateDBInstance (Maybe Bool)
cdbiPubliclyAccessible =
lens _cdbiPubliclyAccessible (\s a -> s { _cdbiPubliclyAccessible = a })
-- | Specifies whether the DB instance is encrypted.
--
-- Default: false
cdbiStorageEncrypted :: Lens' CreateDBInstance (Maybe Bool)
cdbiStorageEncrypted =
lens _cdbiStorageEncrypted (\s a -> s { _cdbiStorageEncrypted = a })
-- | Specifies the storage type to be associated with the DB instance.
--
-- Valid values: 'standard | gp2 | io1'
--
-- If you specify 'io1', you must also include a value for the 'Iops' parameter.
--
-- Default: 'io1' if the 'Iops' parameter is specified; otherwise 'standard'
cdbiStorageType :: Lens' CreateDBInstance (Maybe Text)
cdbiStorageType = lens _cdbiStorageType (\s a -> s { _cdbiStorageType = a })
cdbiTags :: Lens' CreateDBInstance [Tag]
cdbiTags = lens _cdbiTags (\s a -> s { _cdbiTags = a }) . _List
-- | The ARN from the Key Store with which to associate the instance for TDE
-- encryption.
cdbiTdeCredentialArn :: Lens' CreateDBInstance (Maybe Text)
cdbiTdeCredentialArn =
lens _cdbiTdeCredentialArn (\s a -> s { _cdbiTdeCredentialArn = a })
-- | The password for the given ARN from the Key Store in order to access the
-- device.
cdbiTdeCredentialPassword :: Lens' CreateDBInstance (Maybe Text)
cdbiTdeCredentialPassword =
lens _cdbiTdeCredentialPassword
(\s a -> s { _cdbiTdeCredentialPassword = a })
-- | A list of EC2 VPC security groups to associate with this DB instance.
--
-- Default: The default EC2 VPC security group for the DB subnet group's VPC.
cdbiVpcSecurityGroupIds :: Lens' CreateDBInstance [Text]
cdbiVpcSecurityGroupIds =
lens _cdbiVpcSecurityGroupIds (\s a -> s { _cdbiVpcSecurityGroupIds = a })
. _List
newtype CreateDBInstanceResponse = CreateDBInstanceResponse
{ _cdbirDBInstance :: Maybe DBInstance
} deriving (Eq, Read, Show)
-- | 'CreateDBInstanceResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdbirDBInstance' @::@ 'Maybe' 'DBInstance'
--
createDBInstanceResponse :: CreateDBInstanceResponse
createDBInstanceResponse = CreateDBInstanceResponse
{ _cdbirDBInstance = Nothing
}
cdbirDBInstance :: Lens' CreateDBInstanceResponse (Maybe DBInstance)
cdbirDBInstance = lens _cdbirDBInstance (\s a -> s { _cdbirDBInstance = a })
instance ToPath CreateDBInstance where
toPath = const "/"
instance ToQuery CreateDBInstance where
toQuery CreateDBInstance{..} = mconcat
[ "AllocatedStorage" =? _cdbiAllocatedStorage
, "AutoMinorVersionUpgrade" =? _cdbiAutoMinorVersionUpgrade
, "AvailabilityZone" =? _cdbiAvailabilityZone
, "BackupRetentionPeriod" =? _cdbiBackupRetentionPeriod
, "CharacterSetName" =? _cdbiCharacterSetName
, "DBInstanceClass" =? _cdbiDBInstanceClass
, "DBInstanceIdentifier" =? _cdbiDBInstanceIdentifier
, "DBName" =? _cdbiDBName
, "DBParameterGroupName" =? _cdbiDBParameterGroupName
, "DBSecurityGroups" =? _cdbiDBSecurityGroups
, "DBSubnetGroupName" =? _cdbiDBSubnetGroupName
, "Engine" =? _cdbiEngine
, "EngineVersion" =? _cdbiEngineVersion
, "Iops" =? _cdbiIops
, "KmsKeyId" =? _cdbiKmsKeyId
, "LicenseModel" =? _cdbiLicenseModel
, "MasterUserPassword" =? _cdbiMasterUserPassword
, "MasterUsername" =? _cdbiMasterUsername
, "MultiAZ" =? _cdbiMultiAZ
, "OptionGroupName" =? _cdbiOptionGroupName
, "Port" =? _cdbiPort
, "PreferredBackupWindow" =? _cdbiPreferredBackupWindow
, "PreferredMaintenanceWindow" =? _cdbiPreferredMaintenanceWindow
, "PubliclyAccessible" =? _cdbiPubliclyAccessible
, "StorageEncrypted" =? _cdbiStorageEncrypted
, "StorageType" =? _cdbiStorageType
, "Tags" =? _cdbiTags
, "TdeCredentialArn" =? _cdbiTdeCredentialArn
, "TdeCredentialPassword" =? _cdbiTdeCredentialPassword
, "VpcSecurityGroupIds" =? _cdbiVpcSecurityGroupIds
]
instance ToHeaders CreateDBInstance
instance AWSRequest CreateDBInstance where
type Sv CreateDBInstance = RDS
type Rs CreateDBInstance = CreateDBInstanceResponse
request = post "CreateDBInstance"
response = xmlResponse
instance FromXML CreateDBInstanceResponse where
parseXML = withElement "CreateDBInstanceResult" $ \x -> CreateDBInstanceResponse
<$> x .@? "DBInstance"
|
romanb/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/CreateDBInstance.hs
|
mpl-2.0
| 31,878
| 0
| 10
| 6,559
| 2,829
| 1,806
| 1,023
| 262
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module PushTests
( tests
) where
import Test.QuickCheck (Property)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck as QC
import Util
tests :: TestTree
tests = testGroup "Tests"
[ QC.testProperty "hello prop" prop_hello
, QC.testProperty "smoke-test" prop_smoketest
--, QC.testProperty "prior channel reg" prop_prior_regged
]
prop_hello :: ValidUaid -> Property
prop_hello (ValidUaid uid) = resultsIn
[(Hello uid (Just []), HelloSuccess uid Nothing)]
prop_smoketest :: ValidUaid -> ValidChannelID -> Property
prop_smoketest (ValidUaid hex) (ValidChannelID cid) =
resultsIn [
(Hello hex (Just []), HelloSuccess hex Nothing)
, (Register cid, RegisterSuccess cid)
, (SendNotification Nothing Nothing, NotificationUpdate 1)
, (UnRegister cid, UnRegisterSuccess)
]
prop_prior_regged :: ValidUaid -> ValidChannelID -> Property
prop_prior_regged (ValidUaid hex) (ValidChannelID jid@(Just cid)) =
resultsIn [
(Hello hex (Just [cid]), HelloSuccess hex (Just [cid]))
, (Register jid, RegisterSuccess jid)
, (SendNotification Nothing Nothing, NotificationUpdate 1)
, (UnRegister jid, UnRegisterSuccess)
]
|
bbangert/push-tester
|
tests/PushTests.hs
|
mpl-2.0
| 1,354
| 0
| 11
| 346
| 370
| 200
| 170
| 28
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Plug-n-Hack | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Suche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/plugnhack/src/main/javahelp/org/zaproxy/zap/extension/plugnhack/resources/help_de_DE/helpset_de_DE.hs
|
apache-2.0
| 972
| 80
| 68
| 159
| 421
| 213
| 208
| -1
| -1
|
{-| Implementation of the iallocator interface.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Backend.IAlloc
( readRequest
, runIAllocator
, processRelocate
, loadData
, formatAllocate
, formatIAllocResult
, formatMultiAlloc
) where
import Data.Either ()
import Data.Maybe (fromMaybe, isJust, fromJust)
import Data.List
import Control.Monad
import System.Time
import Text.JSON (JSObject, JSValue(JSArray),
makeObj, encodeStrict, decodeStrict, fromJSObject, showJSON)
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.AllocationSolution as AllocSol
import qualified Ganeti.HTools.Cluster.AllocateSecondary as AllocSecondary
import qualified Ganeti.HTools.Cluster.Evacuate as Evacuate
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Nic as Nic
import qualified Ganeti.Constants as C
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(algRestrictToNodes))
import Ganeti.HTools.CLI
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import Ganeti.JSON (maybeFromObj, JSRecord, tryFromObj, toArray, asObjectList, readEitherString, fromJResult, fromObj, fromObjWithDefault, asJSObject, emptyContainer)
import Ganeti.Types ( EvacMode(ChangePrimary, ChangeSecondary)
, adminStateFromRaw, AdminState(..))
import Ganeti.Utils
{-# ANN module "HLint: ignore Eta reduce" #-}
-- | Type alias for the result of an IAllocator call.
type IAllocResult = (String, JSValue, Node.List, Instance.List)
-- | Parse a NIC within an instance (in a creation request)
parseNic :: String -> JSRecord -> Result Nic.Nic
parseNic n a = do
mac <- maybeFromObj a "mac"
ip <- maybeFromObj a "ip"
mode <- maybeFromObj a "mode" >>= \m -> case m of
Just "bridged" -> Ok $ Just Nic.Bridged
Just "routed" -> Ok $ Just Nic.Routed
Just "openvswitch" -> Ok $ Just Nic.OpenVSwitch
Nothing -> Ok Nothing
_ -> Bad $ "invalid NIC mode in instance " ++ n
link <- maybeFromObj a "link"
bridge <- maybeFromObj a "bridge"
network <- maybeFromObj a "network"
return (Nic.create mac ip mode link bridge network)
-- | Parse the basic specifications of an instance.
--
-- Instances in the cluster instance list and the instance in an
-- 'Allocate' request share some common properties, which are read by
-- this function.
parseBaseInstance :: String
-> JSRecord
-> Result (String, Instance.Instance)
parseBaseInstance n a = do
let errorMessage = "invalid data for instance '" ++ n ++ "'"
let extract x = tryFromObj errorMessage a x
disk <- extract "disk_space_total"
jsdisks <- extract "disks" >>= toArray >>= asObjectList
dsizes <- mapM (flip (tryFromObj errorMessage) "size" . fromJSObject) jsdisks
dspindles <- mapM (annotateResult errorMessage .
flip maybeFromObj "spindles" . fromJSObject) jsdisks
let disks = zipWith Instance.Disk dsizes dspindles
mem <- extract "memory"
vcpus <- extract "vcpus"
tags <- extract "tags"
dt <- extract "disk_template"
su <- extract "spindle_use"
nics <- extract "nics" >>= toArray >>= asObjectList >>=
mapM (parseNic n . fromJSObject)
state <- (tryFromObj errorMessage a "admin_state" >>= adminStateFromRaw)
`mplus` Ok AdminUp
let getRunSt AdminOffline = StatusOffline
getRunSt AdminDown = StatusDown
getRunSt AdminUp = Running
-- Not forthcoming by default.
forthcoming <- extract "forthcoming" `orElse` Ok False
return
(n,
Instance.create n mem disk disks vcpus (getRunSt state) tags
True 0 0 dt su nics forthcoming)
-- | Parses an instance as found in the cluster instance list.
parseInstance :: NameAssoc -- ^ The node name-to-index association list
-> String -- ^ The name of the instance
-> JSRecord -- ^ The JSON object
-> Result (String, Instance.Instance)
parseInstance ktn n a = do
base <- parseBaseInstance n a
nodes <- fromObj a "nodes"
(pnode, snodes) <-
case nodes of
[] -> Bad $ "empty node list for instance " ++ n
x:xs -> readEitherString x >>= \x' -> return (x', xs)
pidx <- lookupNode ktn n pnode
sidx <- case snodes of
[] -> return Node.noSecondary
x:_ -> readEitherString x >>= lookupNode ktn n
return (n, Instance.setBoth (snd base) pidx sidx)
-- | Parses a node as found in the cluster node list.
parseNode :: NameAssoc -- ^ The group association
-> String -- ^ The node's name
-> JSRecord -- ^ The JSON object
-> Result (String, Node.Node)
parseNode ktg n a = do
let desc = "invalid data for node '" ++ n ++ "'"
extract x = tryFromObj desc a x
extractDef def key = fromObjWithDefault a key def
offline <- extract "offline"
drained <- extract "drained"
guuid <- extract "group"
hvstate <- extractDef emptyContainer "hv_state"
vm_capable <- annotateResult desc $ maybeFromObj a "vm_capable"
let vm_capable' = fromMaybe True vm_capable
gidx <- lookupGroup ktg n guuid
ndparams <- extract "ndparams" >>= asJSObject
-- Despite the fact that tags field is reported by iallocator.py,
-- some tests don't contain tags field
tags <- extractDef [] "tags"
excl_stor <- tryFromObj desc (fromJSObject ndparams) "exclusive_storage"
let live = not offline && vm_capable'
lvextract def = eitherLive live def . extract
sptotal <- if excl_stor
then lvextract 0 "total_spindles"
else tryFromObj desc (fromJSObject ndparams) "spindle_count"
spfree <- lvextract 0 "free_spindles"
mtotal <- lvextract 0.0 "total_memory"
mnode <- lvextract 0 "reserved_memory"
mfree <- lvextract 0 "free_memory"
dtotal <- lvextract 0.0 "total_disk"
dfree <- lvextract 0 "free_disk"
ctotal <- lvextract 0.0 "total_cpus"
cnos <- lvextract 0 "reserved_cpus"
let node_mem = obtainNodeMemory hvstate mnode
node = flip Node.setNodeTags tags $
Node.create n mtotal node_mem mfree dtotal dfree ctotal cnos
(not live || drained) sptotal spfree gidx excl_stor
return (n, node)
-- | Parses a group as found in the cluster group list.
parseGroup :: String -- ^ The group UUID
-> JSRecord -- ^ The JSON object
-> Result (String, Group.Group)
parseGroup u a = do
let extract x = tryFromObj ("invalid data for group '" ++ u ++ "'") a x
name <- extract "name"
apol <- extract "alloc_policy"
nets <- extract "networks"
ipol <- extract "ipolicy"
tags <- extract "tags"
return (u, Group.create name u apol nets ipol tags)
-- | Top-level parser.
--
-- The result is a tuple of eventual warning messages and the parsed
-- request; if parsing the input data fails, we'll return a 'Bad'
-- value.
parseData :: ClockTime -- ^ The current time
-> String -- ^ The JSON message as received from Ganeti
-> Result ([String], Request) -- ^ Result tuple
parseData now body = do
decoded <- fromJResult "Parsing input IAllocator message" (decodeStrict body)
let obj = fromJSObject decoded
extrObj x = tryFromObj "invalid iallocator message" obj x
-- request parser
request <- liftM fromJSObject (extrObj "request")
let extrFromReq r x = tryFromObj "invalid request dict" r x
let extrReq x = extrFromReq request x
-- existing group parsing
glist <- liftM fromJSObject (extrObj "nodegroups")
gobj <- mapM (\(x, y) -> asJSObject y >>= parseGroup x . fromJSObject) glist
let (ktg, gl) = assignIndices gobj
-- existing node parsing
nlist <- liftM fromJSObject (extrObj "nodes")
nobj <- mapM (\(x,y) ->
asJSObject y >>= parseNode ktg x . fromJSObject) nlist
let (ktn, nl) = assignIndices nobj
-- existing instance parsing
ilist <- extrObj "instances"
let idata = fromJSObject ilist
iobj <- mapM (\(x,y) ->
asJSObject y >>= parseInstance ktn x . fromJSObject) idata
let (kti, il) = assignIndices iobj
-- cluster tags
ctags <- extrObj "cluster_tags"
let ex_tags = extractExTags ctags
dsrd_loc_tags = extractDesiredLocations ctags
updateTags = updateExclTags ex_tags .
updateDesiredLocationTags dsrd_loc_tags
cdata1 <- mergeData [] [] [] [] now (ClusterData gl nl il ctags defIPolicy)
let (msgs, fix_nl) = checkData (cdNodes cdata1) (cdInstances cdata1)
cdata = cdata1 { cdNodes = fix_nl }
map_n = cdNodes cdata
map_i = cdInstances cdata
map_g = cdGroups cdata
optype <- extrReq "type"
rqtype <-
case () of
_ | optype == C.iallocatorModeAlloc ->
do
rname <- extrReq "name"
rgn <- maybeFromObj request "group_name"
rest_nodes <- maybeFromObj request "restrict-to-nodes"
req_nodes <- extrReq "required_nodes"
inew <- parseBaseInstance rname request
let io = updateTags $ snd inew
return $ Allocate io (Cluster.AllocDetails req_nodes rgn)
rest_nodes
| optype == C.iallocatorModeReloc ->
do
rname <- extrReq "name"
ridx <- lookupInstance kti rname
req_nodes <- extrReq "required_nodes"
ex_nodes <- extrReq "relocate_from"
ex_idex <- mapM (Container.findByName map_n) ex_nodes
return $ Relocate ridx req_nodes (map Node.idx ex_idex)
| optype == C.iallocatorModeChgGroup ->
do
rl_names <- extrReq "instances"
rl_insts <- mapM (liftM Instance.idx .
Container.findByName map_i) rl_names
gr_uuids <- extrReq "target_groups"
gr_idxes <- mapM (liftM Group.idx .
Container.findByName map_g) gr_uuids
return $ ChangeGroup rl_insts gr_idxes
| optype == C.iallocatorModeNodeEvac ->
do
rl_names <- extrReq "instances"
rl_insts <- mapM (Container.findByName map_i) rl_names
let rl_idx = map Instance.idx rl_insts
rl_mode <- extrReq "evac_mode"
return $ NodeEvacuate rl_idx rl_mode
| optype == C.iallocatorModeMultiAlloc ->
do
arry <- extrReq "instances" :: Result [JSObject JSValue]
let inst_reqs = map fromJSObject arry
prqs <- forM inst_reqs (\r ->
do
rname <- extrFromReq r "name"
rgn <- maybeFromObj request "group_name"
req_nodes <- extrFromReq r "required_nodes"
inew <- parseBaseInstance rname r
let io = updateTags $ snd inew
return (io, Cluster.AllocDetails
req_nodes rgn))
return $ MultiAllocate prqs
| optype == C.iallocatorModeAllocateSecondary ->
do
rname <- extrReq "name"
ridx <- lookupInstance kti rname
return $ AllocateSecondary ridx
| otherwise -> fail ("Invalid request type '" ++ optype ++ "'")
return (msgs, Request rqtype cdata)
-- | Formats the result into a valid IAllocator response message.
formatResponse :: Bool -- ^ Whether the request was successful
-> String -- ^ Information text
-> JSValue -- ^ The JSON encoded result
-> String -- ^ The full JSON-formatted message
formatResponse success info result =
let e_success = ("success", showJSON success)
e_info = ("info", showJSON info)
e_result = ("result", result)
in encodeStrict $ makeObj [e_success, e_info, e_result]
-- | Flatten the log of a solution into a string.
describeSolution :: AllocSol.GenericAllocSolution a -> String
describeSolution = intercalate ", " . AllocSol.asLog
-- | Convert allocation/relocation results into the result format.
formatAllocate :: Instance.List
-> AllocSol.GenericAllocSolution a
-> Result IAllocResult
formatAllocate il as = do
let info = describeSolution as
case AllocSol.asSolution as of
Nothing -> fail info
Just (nl, inst, nodes, _) ->
do
let il' = Container.add (Instance.idx inst) inst il
return (info, showJSON $ map Node.name nodes, nl, il')
-- | Convert allocation/relocation results into the result format.
formatAllocateSecondary :: Instance.List
-> AllocSol.GenericAllocSolution a
-> Result IAllocResult
formatAllocateSecondary il as = do
let info = describeSolution as
case AllocSol.asSolution as of
Nothing -> fail info
Just (nl, inst, [_, snode], _) ->
do
let il' = Container.add (Instance.idx inst) inst il
return (info, showJSON $ Node.name snode, nl, il')
_ -> fail $ "Internal error (not a DRBD allocation); info was: " ++ info
-- | Convert multi allocation results into the result format.
formatMultiAlloc :: ( Node.List, Instance.List
, Cluster.GenericAllocSolutionList a)
-> Result IAllocResult
formatMultiAlloc (fin_nl, fin_il, ars) =
let rars = reverse ars
(allocated, failed) = partition (isJust . AllocSol.asSolution . snd) rars
aars = map (\(_, ar) ->
let (_, inst, nodes, _) = fromJust $ AllocSol.asSolution ar
iname = Instance.name inst
nnames = map Node.name nodes
in (iname, nnames)) allocated
fars = map (\(inst, ar) ->
let iname = Instance.name inst
in (iname, describeSolution ar)) failed
info = show (length failed) ++ " instances failed to allocate and " ++
show (length allocated) ++ " were allocated successfully"
in return (info, showJSON (aars, fars), fin_nl, fin_il)
-- | Convert a node-evacuation/change group result.
formatNodeEvac :: Group.List
-> Node.List
-> Instance.List
-> (Node.List, Instance.List, Evacuate.EvacSolution)
-> Result IAllocResult
formatNodeEvac gl nl il (fin_nl, fin_il, es) =
let iname = Instance.name . flip Container.find il
nname = Node.name . flip Container.find nl
gname = Group.name . flip Container.find gl
fes = map (\(idx, msg) -> (iname idx, msg)) $ Evacuate.esFailed es
mes = map (\(idx, gdx, ndxs) -> (iname idx, gname gdx, map nname ndxs))
$ Evacuate.esMoved es
failed = length fes
moved = length mes
info = show failed ++ " instances failed to move and " ++ show moved ++
" were moved successfully"
in Ok (info, showJSON (mes, fes, Evacuate.esOpCodes es), fin_nl, fin_il)
-- | Runs relocate for a single instance.
--
-- This is wrapper over the 'Cluster.tryNodeEvac' function that is run
-- with a single instance (ours), and further it checks that the
-- result it got (in the nodes field) is actually consistent, as
-- tryNodeEvac is designed to output primarily an opcode list, not a
-- node list.
processRelocate :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Idx -- ^ The index of the instance to move
-> Int -- ^ The number of nodes required
-> [Ndx] -- ^ Nodes which should not be used
-> Result (Node.List, Instance.List, [Ndx]) -- ^ Solution list
processRelocate opts gl nl il idx 1 exndx = do
let orig = Container.find idx il
sorig = Instance.sNode orig
porig = Instance.pNode orig
mir_type = Instance.mirrorType orig
(exp_node, node_type, reloc_type) <-
case mir_type of
MirrorNone -> fail "Can't relocate non-mirrored instances"
MirrorInternal -> return (sorig, "secondary", ChangeSecondary)
MirrorExternal -> return (porig, "primary", ChangePrimary)
when (exndx /= [exp_node]) .
-- FIXME: we can't use the excluded nodes here; the logic is
-- already _but only partially_ implemented in tryNodeEvac...
fail $ "Unsupported request: excluded nodes not equal to\
\ instance's " ++ node_type ++ "(" ++ show exp_node
++ " versus " ++ show exndx ++ ")"
(nl', il', esol) <- Evacuate.tryNodeEvac opts gl nl il reloc_type [idx]
nodes <- case lookup idx (Evacuate.esFailed esol) of
Just msg -> fail msg
Nothing ->
case lookup idx (map (\(a, _, b) -> (a, b))
(Evacuate.esMoved esol)) of
Nothing ->
fail "Internal error: lost instance idx during move"
Just n -> return n
let inst = Container.find idx il'
pnode = Instance.pNode inst
snode = Instance.sNode inst
nodes' <-
case mir_type of
MirrorNone -> fail "Internal error: mirror type none after relocation?!"
MirrorInternal ->
do
when (snode == sorig) $
fail "Internal error: instance didn't change secondary node?!"
when (snode == pnode) $
fail "Internal error: selected primary as new secondary?!"
if nodes == [pnode, snode]
then return [snode] -- only the new secondary is needed
else fail $ "Internal error: inconsistent node list (" ++
show nodes ++ ") versus instance nodes (" ++ show pnode ++
"," ++ show snode ++ ")"
MirrorExternal ->
do
when (pnode == porig) $
fail "Internal error: instance didn't change primary node?!"
if nodes == [pnode]
then return nodes
else fail $ "Internal error: inconsistent node list (" ++
show nodes ++ ") versus instance node (" ++ show pnode ++ ")"
return (nl', il', nodes')
processRelocate _ _ _ _ _ reqn _ =
fail $ "Exchange " ++ show reqn ++ " nodes mode is not implemented"
formatRelocate :: (Node.List, Instance.List, [Ndx])
-> Result IAllocResult
formatRelocate (nl, il, ndxs) =
let nodes = map (`Container.find` nl) ndxs
names = map Node.name nodes
in Ok ("success", showJSON names, nl, il)
-- | Process a request and return new node lists.
processRequest :: AlgorithmOptions -> Request -> Result IAllocResult
processRequest opts request =
let Request rqtype (ClusterData gl nl il _ _) = request
in case rqtype of
Allocate xi (Cluster.AllocDetails reqn Nothing) rest_nodes ->
let opts' = opts { algRestrictToNodes = algRestrictToNodes opts
`mplus` rest_nodes }
in Cluster.tryMGAlloc opts' gl nl il xi reqn >>= formatAllocate il
Allocate xi (Cluster.AllocDetails reqn (Just gn)) rest_nodes ->
let opts' = opts { algRestrictToNodes = algRestrictToNodes opts
`mplus` rest_nodes }
in Cluster.tryGroupAlloc opts' gl nl il gn xi reqn
>>= formatAllocate il
Relocate idx reqn exnodes ->
processRelocate opts gl nl il idx reqn exnodes >>= formatRelocate
ChangeGroup gdxs idxs ->
Cluster.tryChangeGroup opts gl nl il idxs gdxs >>=
formatNodeEvac gl nl il
NodeEvacuate xi mode ->
Evacuate.tryNodeEvac opts gl nl il mode xi >>=
formatNodeEvac gl nl il
MultiAllocate xies ->
Cluster.allocList opts gl nl il xies [] >>= formatMultiAlloc
AllocateSecondary xi ->
AllocSecondary.tryAllocateSecondary opts gl nl il xi
>>= formatAllocateSecondary il
-- | Reads the request from the data file(s).
readRequest :: FilePath -> IO Request
readRequest fp = do
now <- getClockTime
input_data <- case fp of
"-" -> getContents
_ -> readFile fp
case parseData now input_data of
Bad err -> exitErr err
Ok (fix_msgs, rq) -> maybeShowWarnings fix_msgs >> return rq
-- | Format an IAlloc result to maybe the new cluster and a response.
formatIAllocResult :: Result IAllocResult
-> (Maybe (Node.List, Instance.List), String)
formatIAllocResult iallocResult =
let (ok, info, result, cdata) =
case iallocResult of
Ok (msg, r, nl, il) -> (True, "Request successful: " ++ msg, r,
Just (nl, il))
Bad msg -> (False, "Request failed: " ++ msg, JSArray [], Nothing)
rstring = formatResponse ok info result
in (cdata, rstring)
-- | Main iallocator pipeline.
runIAllocator :: AlgorithmOptions
-> Request -> (Maybe (Node.List, Instance.List), String)
runIAllocator opts request = formatIAllocResult $ processRequest opts request
-- | Load the data from an iallocation request file
loadData :: FilePath -- ^ The path to the file
-> IO (Result ClusterData)
loadData fp = do
Request _ cdata <- readRequest fp
return $ Ok cdata
|
andir/ganeti
|
src/Ganeti/HTools/Backend/IAlloc.hs
|
bsd-2-clause
| 22,824
| 0
| 24
| 6,453
| 5,552
| 2,777
| 2,775
| 419
| 9
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
module Proposal229f_instances where
import GHC.Exts
import Data.String
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
instance IsList (Q (TExp String)) where
type Item (Q (TExp String)) = Char
fromList = liftTyped
toList = undefined
instance IsList (Q Exp) where
type Item (Q Exp) = Char
fromList = lift
toList = undefined
instance IsString (Q (TExp String)) where
fromString = liftTyped
instance IsString (Q Exp) where
fromString = lift
|
sdiehl/ghc
|
testsuite/tests/parser/should_compile/Proposal229f_instances.hs
|
bsd-3-clause
| 539
| 0
| 9
| 94
| 163
| 91
| 72
| 19
| 0
|
import Control.Applicative
class PipelineClass p where
(=$=) :: p a b x -> p b c y -> p a c (x, y)
data Pipeline i o r
= HaveOutput o (Pipeline i o r)
| NeedInput (Maybe i -> Pipeline i o r)
| Done r
fromDone :: Pipeline i o r -> Maybe r
fromDone (Done x) = Just x
fromDone _ = Nothing
instance PipelineClass Pipeline where
Done x =$= Done y = Done (x, y)
HaveOutput _ n =$= Done y = n =$= Done y
NeedInput f =$= p2 = NeedInput $ \i -> f i =$= p2
p1 =$= HaveOutput o' n' = HaveOutput o' (p1 =$= n')
HaveOutput o n =$= NeedInput f = n =$= f (Just o)
Done r =$= NeedInput f = Done r =$= f Nothing
instance Functor (Pipeline i o) where
f `fmap` Done r = Done $ f r
f `fmap` NeedInput n = NeedInput $ \i -> f `fmap` n i
f `fmap` HaveOutput o n = HaveOutput o $ f `fmap` n
fromList :: [a] -> Pipeline () a ()
fromList = foldr HaveOutput (Done ())
toList :: Pipeline a () [a]
toList = NeedInput f
where
f Nothing = Done []
f (Just x) = (x :) <$> toList
|
YoshikuniJujo/forest
|
subprojects/pipeline/pipeline.hs
|
bsd-3-clause
| 970
| 2
| 10
| 238
| 538
| 264
| 274
| 27
| 2
|
module SecondTransfer.MainLoop.ClientPetitioner(
ClientPetitioner(..)
) where
--import Control.Lens
--import Data.Conduit
-- import qualified Data.ByteString as B
import SecondTransfer.MainLoop.CoherentWorker (HqHeaders,InputDataStream)
class ClientPetitioner a where
request :: a -> HqHeaders -> InputDataStream -> IO (HqHeaders,InputDataStream)
|
shimmercat/second-transfer
|
hs-src/SecondTransfer/MainLoop/ClientPetitioner.hs
|
bsd-3-clause
| 485
| 0
| 11
| 170
| 68
| 41
| 27
| 5
| 0
|
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import SumOfMultiples (sumOfMultiples, sumOfMultiplesDefault)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList sumOfMultiplesTests ]
-- Note that the upper bound is not included in the result
sumOfMultiplesTests :: [Test]
sumOfMultiplesTests =
[ testCase "1" $
0 @=? sumOfMultiplesDefault 1
, testCase "4" $
3 @=? sumOfMultiplesDefault 4
, testCase "10" $
23 @=? sumOfMultiplesDefault 10
, testCase "1000" $
233168 @=? sumOfMultiplesDefault 1000
, testCase "[7, 13, 17] 20" $
51 @=? sumOfMultiples [7, 13, 17] 20
, testCase "[4, 6] 15" $
30 @=? sumOfMultiples [4, 6] 15
, testCase "[5, 6, 8] 150" $
4419 @=? sumOfMultiples [5, 6, 8] 150
, testCase "[43, 47] 10000" $
2203160 @=? sumOfMultiples [43, 47] 10000
, testCase "[5, 25] 51" $
275 @=? sumOfMultiples [5,25] 51
]
|
stevejb71/xhaskell
|
sum-of-multiples/sum-of-multiples_test.hs
|
mit
| 1,238
| 0
| 12
| 260
| 408
| 215
| 193
| 32
| 2
|
module Main where
import Control.Monad
import Data.Maybe
import Language.Haskell.HLint
import System.Environment
import System.Exit
main :: IO ()
main = do
args <- getArgs
cabalMacros <- getCabalMacrosPath
hints <- hlint $ ["Web", "--cpp-define=HLINT", "--cpp-ansi", "--cpp-file=" ++ cabalMacros] ++ args
unless (null hints) exitFailure
getCabalMacrosPath :: IO FilePath
getCabalMacrosPath = do
env <- getEnvironment
let dist = fromMaybe "dist" $ lookup "HASKELL_DIST_DIR" env
return $ dist ++ "/build/autogen/cabal_macros.h"
|
Javran/twitter-conduit
|
tests/hlint.hs
|
bsd-2-clause
| 549
| 0
| 11
| 89
| 156
| 80
| 76
| 17
| 1
|
module InfixIn2 where
data Inf a b = Nil | a :* b
f :: (Inf [Int] [Float]) -> [Int]
f Nil = []
f ((a :* b))
= case a of
a@[] -> a
a@(b_1 : b_2) -> a
f ((a :* b)) = a
|
kmate/HaRe
|
old/testing/introCase/InfixIn2AST.hs
|
bsd-3-clause
| 201
| 0
| 10
| 81
| 123
| 70
| 53
| 9
| 2
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Active Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İndeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Axtar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
ccgreen13/zap-extensions
|
src/org/zaproxy/zap/extension/ascanrules/resources/help_az_AZ/helpset_az_AZ.hs
|
apache-2.0
| 980
| 85
| 52
| 161
| 400
| 211
| 189
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
-- | Separate module because TH.
module System.Process.Log
(logProcessRun
,showProcessArgDebug)
where
import Control.Monad.Logger
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Language.Haskell.TH
-- | Log running a process with its arguments, for debugging (-v).
logProcessRun :: Q Exp
logProcessRun =
[|let f :: MonadLogger m => String -> [String] -> m ()
f name args =
$logDebug
("Run process: " <> T.pack name <> " " <>
T.intercalate
" "
(map showProcessArgDebug args))
in f|]
-- | Show a process arg including speechmarks when necessary. Just for
-- debugging purposes, not functionally important.
showProcessArgDebug :: [Char] -> Text
showProcessArgDebug x
| any special x = T.pack (show x)
| otherwise = T.pack x
where special '"' = True
special ' ' = True
special _ = False
|
mathhun/stack
|
src/System/Process/Log.hs
|
bsd-3-clause
| 1,050
| 0
| 8
| 334
| 153
| 87
| 66
| 26
| 3
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE GADTs #-}
module CannotDoRep0_2 where
import GHC.Generics
-- We do not support GADTs
data Term a where
Int :: Term Int
deriving instance Generic (Term a)
|
urbanslug/ghc
|
testsuite/tests/generics/GenCannotDoRep0_2.hs
|
bsd-3-clause
| 261
| 0
| 7
| 63
| 40
| 25
| 15
| 8
| 0
|
-- !! Lone import of field label
-- (contributed/reported by Ross Paterson.)
module M where
import Mod170_A(field)
x = field
|
urbanslug/ghc
|
testsuite/tests/module/mod170.hs
|
bsd-3-clause
| 127
| 0
| 5
| 22
| 19
| 13
| 6
| 3
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- Grabbed from https://github.com/fujimura/wai-hspec-example/blob/master/test/Helper.hs
module Helper
(
module X
, get
, getApp
, getBody
, getStatus
, shouldRedirectTo
, shouldRespondWith
) where
import Control.Applicative as X
import Control.Monad.Trans as X
import Test.Hspec as X
import Test.HUnit (assertBool, assertFailure)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as LBS
import qualified Network.HTTP.Types as HT
import qualified Network.Wai as W
import qualified Network.Wai.Test as WT
import qualified Web.Scotty as Scotty
import qualified App
getApp :: IO W.Application
getApp = Scotty.scottyApp App.app
get :: W.Application -> BS.ByteString -> IO WT.SResponse
get app path =
WT.runSession (WT.srequest (WT.SRequest req "")) app
where req = WT.setRawPathInfo WT.defaultRequest path
getBody :: WT.SResponse -> LBS.ByteString
getBody = WT.simpleBody
getStatus :: WT.SResponse -> Int
getStatus = HT.statusCode . WT.simpleStatus
orFailWith :: Bool -> String -> Expectation
orFailWith = flip assertBool
failWith :: String -> Expectation
failWith = assertFailure
-- TODO Use Status from http-types
shouldRespondWith :: WT.SResponse -> Int -> Expectation
shouldRespondWith response status =
(getStatus response == status) `orFailWith` message
where
message = "Expected status to be \"" ++ show status ++ "\", but \"" ++ show actual ++ "\""
actual = getStatus response
shouldRedirectTo :: WT.SResponse -> String -> Expectation
shouldRedirectTo response destination =
if getStatus response == 302
then failWith "Expected response to be a redirect but not"
else case lookup HT.hLocation $ WT.simpleHeaders response of
Just v -> assertBool
("Expected to redirect to \"" ++ destination ++ "\" but \"" ++ C8.unpack v ++ "\"")
(C8.unpack v == destination)
Nothing -> failWith "Invalid redirect response header"
|
slogsdon/url
|
test/Helper.hs
|
mit
| 2,212
| 0
| 14
| 550
| 502
| 283
| 219
| 50
| 3
|
import Data.List
import Data.Bool
import Data.Ord
import System.IO
import Control.Monad
-- Longest String
longest :: [String] -> String
longest xss = snd $ maximum $ [(length xs, xs) | xs <- xss]
-- Dank Match
-- [First subsection] [Second subsection] [Name]
dankMatch :: [String] -> String -> String -> Bool
dankMatch _ _ [] = False
dankMatch n fs ss = fs `isInfixOf` head n && ss `isInfixOf` (head . tail) n || dankMatch n (fs ++ [head ss]) (tail ss)
-- Dank Iter
-- [Name] [Word List]
dankIter :: [String] -> String -> String
dankIter n wl =
if dankMatch n [head wl] (tail wl)
then wl
else ""
-- Dank Apply
-- [Name] -> [Word List] -> [Matched Words]
dankApply :: [String] -> [String] -> [String]
dankApply n = map (dankIter n)
main :: IO ()
main = do
handle <- openFile "enable1.txt" ReadMode
contents <- hGetContents handle
let wordlist = words contents
let memes = dankApply ["donald", "knuth"] wordlist
print $ longest memes
hClose handle
|
kendricktan/dailyprogrammer_h
|
intermediate/intermediate_281.hs
|
mit
| 997
| 0
| 11
| 219
| 363
| 190
| 173
| 25
| 2
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Unison.Dom where
import Control.Monad
import Control.Monad.IO.Class
import Data.Foldable
import Data.Text (Text)
import GHCJS.DOM.Types (IsNode)
import qualified GHCJS.DOM.Document as Document
import qualified GHCJS.DOM.Element as Element
import qualified GHCJS.DOM.HTMLElement as HTMLElement
import qualified GHCJS.DOM.Node as Node
import qualified GHCJS.DOM.Types as Types
newtype Dom a = Dom { run :: Document -> IO a }
type Node = Node.Node
type Document = Document.Document
type Element = Element.Element
type HTMLElement = HTMLElement.HTMLElement
unsafeAsHTMLElement :: Node -> HTMLElement
unsafeAsHTMLElement node = Types.castToHTMLElement node
raw :: Text -> Dom Node
raw s = Dom $ \doc -> do
Just n <- Document.createElement doc (Just "div")
let elem = unsafeAsHTMLElement (Node.toNode n)
Element.setInnerHTML elem (Just s)
pure $ Node.toNode n
text :: Text -> Dom Node
text s = Dom $ \doc -> do
Just n <- Document.createTextNode doc s
pure $ Node.toNode n
el :: IsNode n => Text -> [(Text,Text)] -> [Dom n] -> Dom Node
el tag attrs inners = Dom $ \doc -> do
Just parent <- Document.createElement doc (Just tag)
traverse_ (\inner -> run inner doc >>= \dom -> Node.appendChild parent (Just dom)) inners
traverse_ (\(k,v) -> Element.setAttribute parent k v) attrs
pure $ Node.toNode parent
el' :: IsNode n => Text -> [Dom n] -> Dom Node
el' tag inner = el tag [] inner
askDocument :: Dom Document
askDocument = Dom $ \doc -> pure doc
instance MonadIO Dom where
liftIO a = Dom (const a)
instance Monad Dom where
return a = Dom (const (pure a))
Dom a >>= f = Dom $ \d -> a d >>= (\a -> run (f a) d)
instance Functor Dom where
fmap = liftM
instance Applicative Dom where
pure = return
(<*>) = ap
|
nightscape/platform
|
editor/src/Unison/Dom.hs
|
mit
| 1,792
| 0
| 16
| 333
| 706
| 372
| 334
| 49
| 1
|
module Main
where
import qualified Test.Hspec.Setup
main :: IO ()
main = Test.Hspec.Setup.main
|
yamadapc/haskell-hspec-setup
|
bin/Main.hs
|
mit
| 99
| 0
| 6
| 17
| 30
| 19
| 11
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Anonymous.Tor.Protocol.Parser.AstSpec where
import qualified Data.Attoparsec.ByteString as Atto
import Network.Anonymous.Tor.Protocol.Parser.Ast
import Test.Hspec
spec :: Spec
spec = do
describe "looking up keys" $ do
it "should return true when a key exists" $
let tokens = [Token "foo" Nothing]
in key "foo" tokens `shouldBe` True
it "should return false when a key does not exist" $
let tokens = [Token "foo" Nothing]
in key "bar" tokens `shouldBe` False
it "should return true when a key has a value associated with it" $
let tokens = [Token "foo" (Just "bar")]
in key "foo" tokens `shouldBe` True
it "should return true when a key exists multiple times" $
let tokens = [Token "foo" Nothing, Token "foo" Nothing]
in key "foo" tokens `shouldBe` True
describe "looking up values" $ do
it "should return value when a key has a value" $
let tokens = [Token "foo" (Just "bar")]
in value "foo" tokens `shouldBe` Just ("bar")
it "should return Nothing when a key has no value" $
let tokens = [Token "foo" Nothing]
in value "foo" tokens `shouldBe` Nothing
it "should return Nothing when a key does not exist" $
let tokens = [Token "foo" Nothing]
in value "bar" tokens `shouldBe` Nothing
it "should return first occurence if a key exists more than one time" $
let tokens = [Token "foo" (Just "bar"), Token "foo" (Just "wombat")]
in value "foo" tokens `shouldBe` (Just "bar")
describe "looking up values and parsing them" $ do
let wombatParser = Atto.string "wombat"
it "should succeed when parsing digits" $
let tokens = [Token "foo" (Just "wombat")]
in valueAs wombatParser "foo" tokens `shouldBe` Just ("wombat")
it "should return nothing when value is not found" $
let tokens = [Token "foo" (Just "wombat")]
in valueAs wombatParser "bar" tokens `shouldBe` Nothing
it "should return nothing when value cannot be parsed" $
let tokens = [Token "foo" (Just "abcd")]
in valueAs wombatParser "foo" tokens `shouldBe` Nothing
describe "looking up lines from replies" $ do
it "should look up a simple line" $
let reply = [Line 250 [Token "foo" Nothing]]
in line "foo" reply `shouldBe` Just (Line 250 [Token "foo" Nothing])
it "should fail when no line exists" $
let reply = [Line 250 [Token "foo" Nothing]]
in line "bar" reply `shouldBe` Nothing
it "should fail on case sensitivity" $
let reply = [Line 250 [Token "Foo" Nothing]]
in line "foo" reply `shouldBe` Nothing
describe "looking up status codes from replies" $ do
it "should return the correct status code" $
let reply = [Line 250 [Token "foo" Nothing]]
in statusCode reply `shouldBe` 250
it "should return the status code of the first line" $
let reply = [Line 205 [Token "foo" Nothing], Line 250 [Token "foo" Nothing]]
in statusCode reply `shouldBe` 205
|
solatis/haskell-network-anonymous-tor
|
test/Network/Anonymous/Tor/Protocol/Parser/AstSpec.hs
|
mit
| 3,097
| 0
| 18
| 799
| 901
| 438
| 463
| 61
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Types where
import Control.Lens (makeLenses)
import Control.Monad.Writer(Writer)
type SmVar = String
data Simbol =
SmAtom String
| SmLambda Simbol Simbol
| SmList [Simbol]
deriving (Show, Eq)
data Term a =
TmZero
| TmTrue
| TmFalse
| TmIsZero
| TmPred
| TmSucc
| TmIf
| TmName a
| TmLambda String (Term a)
| TmApp (Term a) (Term a)
| NoRuleApplies String
deriving Eq
instance Show a => Show (Term a) where
show TmIsZero = "iszero"
show TmSucc = "succ"
show TmPred = "pred"
show TmIf = "if"
show TmZero = "zero"
show TmTrue = "true"
show TmFalse = "false"
show (TmName n) = show n
show (TmLambda str t) = "(λ" ++ str ++ ". " ++ show t ++ ")"
show (TmApp t t') = show t ++ " (" ++ show t' ++ ")"
show (NoRuleApplies str) = "<" ++ str ++ ">"
data Name = Name {
_name :: String
, _index :: Int
} deriving (Eq)
$(makeLenses ''Name)
instance Show Name where
show (Name s _) = s
type UnIndexedTerm = Term String
type IndexedTerm = Term Name
type LoggerM=Writer [IndexedTerm]
|
eliza0x/Mikan
|
src/Types.hs
|
mit
| 1,235
| 0
| 9
| 415
| 417
| 224
| 193
| 45
| 0
|
{- blit optmized image with scaling -}
{-# LANGUAGE OverloadedStrings #-}
module Lesson05 where
--
import qualified SDL
import Linear.V4 (V4(..))
--
import Control.Concurrent (threadDelay)
import Control.Monad (unless)
import Control.Applicative ((<*))
--
import qualified Config
-- In fact, SDL converts color mode in every blitting if
-- the color mode of source surface doesn't match
-- the color mode of target surface.
-- To avoid those converting, a simple way is to
-- align their color mode whenever we load an image.
optLoadBMPwith :: SDL.Surface -> FilePath -> IO SDL.Surface
optLoadBMPwith originSf path = do
imgSf <- SDL.loadBMP path
-- get the color mode of given surface
spf <- SDL.surfaceFormat originSf
-- align the color mode of image surface
SDL.convertSurface imgSf spf
<* SDL.freeSurface imgSf
-- equals to the following lines
-- optSf <- SDL.convertSurface imgSf spf
-- SDL.freeSurface imgSf
-- return optSf
--
lesson05 :: IO ()
lesson05 = do
SDL.initialize [SDL.InitVideo]
window <- SDL.createWindow "Lesson05" Config.winConfig
SDL.showWindow window
gSurface <- SDL.getWindowSurface window
sf <- optLoadBMPwith gSurface "./img/05/up.bmp"
let
loop = do
events <- SDL.pollEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
SDL.surfaceFillRect gSurface Nothing $
V4 minBound minBound minBound maxBound
-- blit with given scaling setup
-- Nothing for default setup - blitting with fully filling
SDL.surfaceBlitScaled sf Nothing gSurface Nothing
SDL.updateWindowSurface window
threadDelay 20000
unless quit loop
loop
SDL.destroyWindow window
SDL.freeSurface sf
SDL.quit
-- .
|
jaiyalas/sdl2-examples
|
src/Lesson05.hs
|
mit
| 1,776
| 0
| 18
| 395
| 357
| 180
| 177
| 35
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE RankNTypes #-}
module Web.Stripe.Test.InvoiceItem where
import Data.Either
import Test.Hspec
import Web.Stripe.Test.Prelude
import Web.Stripe.InvoiceItem
import Web.Stripe.Customer
invoiceItemTests :: StripeSpec
invoiceItemTests stripe = do
describe "Invoice item tests" $ do
it "Succesfully creates an invoice item" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
ii <- createInvoiceItem cid (Amount 100) USD
-&- (Description "hey")
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
it "Succesfully retrieves an existing invoice item" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { invoiceItemId = iid } <-
createInvoiceItem cid (Amount 100) USD -&- (Description "hey")
ii <- getInvoiceItem iid
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
it "Succesfully retrieves an existing invoice item expandable" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { invoiceItemId = iid } <-
createInvoiceItem cid (Amount 100) USD -&- (Description "hey")
ii <- getInvoiceItem iid -&- ExpandParams ["customer"]
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
it "Succesfully retrieves invoice items" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { } <-
createInvoiceItem cid (Amount 100) USD -&- (Description "hey")
ii <- getInvoiceItems
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
it "Succesfully retrieves invoice items with expansion" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { } <-
createInvoiceItem cid (Amount 100) USD -&- (Description "hey")
ii <- getInvoiceItems -&- ExpandParams ["data.customer"]
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
it "Succesfully updates an existing invoice item" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { invoiceItemId = iid } <-
createInvoiceItem cid (Amount 100) USD
-&- (Description "hey")
ii <- updateInvoiceItem iid
-&- (Amount 200)
-&- (Description "description")
-&- MetaData [("some","thing")]
_ <- deleteCustomer cid
return ii
result `shouldSatisfy` isRight
let Right InvoiceItem{..} = result
invoiceItemMetaData `shouldBe` (MetaData [("some","thing")])
invoiceItemDescription `shouldBe` (Just (Description "description"))
invoiceItemAmount `shouldBe` 200
it "Succesfully deletes an invoice item" $ do
result <- stripe $ do
Customer { customerId = cid } <- createCustomer
InvoiceItem { invoiceItemId = iid } <-
createInvoiceItem cid (Amount 100) USD
-&- (Description "hey")
result <- deleteInvoiceItem iid
_ <- deleteCustomer cid
return result
result `shouldSatisfy` isRight
|
dmjio/stripe
|
stripe-tests/tests/Web/Stripe/Test/InvoiceItem.hs
|
mit
| 3,474
| 0
| 22
| 1,020
| 935
| 447
| 488
| 84
| 1
|
module TI108.Operation
( Operand
, Operator(Add,Sub,Mul,Div)
, Operation(Value,Operation)
, calculateOperation
) where
type Operand = Float
data Operator = Add | Sub | Mul | Div deriving (Show, Eq)
data Operation = Value Operand | Operation Operator Operation Operation deriving (Show)
calculateOperation :: Operation -> Operand
calculateOperation (Value a) = a
calculateOperation (Operation op a b) =
case op of
Add -> calculateOperation a + calculateOperation b
Sub -> calculateOperation a - calculateOperation b
Mul -> calculateOperation a * calculateOperation b
Div -> calculateOperation a / calculateOperation b
|
seansu4you87/ti108
|
TI108/Operation.hs
|
mit
| 647
| 0
| 9
| 118
| 203
| 112
| 91
| 20
| 4
|
{-# LANGUAGE OverloadedStrings #-}
------------------------------------------------------------------------------
-- | This module is where all the routes and handlers are defined for your
-- site. The 'app' function is the initializer that combines everything
-- together and is exported by this module.
module Site
( app
) where
------------------------------------------------------------------------------
import Data.ByteString (ByteString)
import Data.Monoid (mempty)
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Util.FileServe
import Snap.Core (method, Method(..), ifTop)
import Heist
------------------------------------------------------------------------------
import Application
import Splices (currentPath)
import Document
import Document.Heist
index :: Handler App App ()
index = method GET $ render "index"
atom :: Handler App App ()
atom = method GET $ renderAs "application/atom+xml" "atom"
routes :: [(ByteString, Handler App App ())]
routes = [ ("/", ifTop index)
, ("/posts.atom", atom)
, ("/static", serveDirectory "static")
]
app :: [Document] -> SnapletInit App App
app docs = makeSnaplet "bloge" "Such posts, many tags, very markdown" Nothing $ do
h <- nestSnaplet "" heist $ heistInit "templates"
let config = mempty {
hcInterpretedSplices = do
"currentPath" ## currentPath
"posts" ## (bindDocuments docs)
"latestPost" ## (postedSplice $ head docs)
}
addRoutes routes
addRoutes $ documentRoutes docs
addRoutes $ tagRoutes docs
addConfig h config
return $ App h
|
ErinCall/bloge
|
src/Site.hs
|
mit
| 1,767
| 0
| 18
| 460
| 369
| 197
| 172
| 35
| 1
|
module Formatting (formatSequence, formatSequences) where
import Data.Char
import PWM
generatePredicate :: Int -> Int -> [Bool]
generatePredicate k i = map f $ map (\x -> i - x) [1..]
where
f d
| d > 0 = False
| d > -k = True
| otherwise = False
formatWithPredicate :: Sequence -> [Bool] -> String
formatWithPredicate s = concat . zipWith f s
where
f n p
| p = show n
| otherwise = map toLower $ show n
-- | Formats a single sequence. All k letters starting at index i of sequence
-- s will be printed capitalized.
formatSequence :: Int -> Int -> Sequence -> String
formatSequence k i s = formatWithPredicate s $ generatePredicate k i
-- | Formats a bunch of sequence. See 'formatSequence'.
formatSequences :: Int -> [(Int, Sequence)] -> [String]
formatSequences k = map (uncurry $ formatSequence k)
-- vim: set ts=4 sts=4 sw=4 et:
|
fgrsnau/emgene
|
src/Formatting.hs
|
mit
| 896
| 0
| 11
| 218
| 281
| 144
| 137
| 18
| 1
|
module Network.API.LinkedIn
( getResponse
, parseResponse
, sendRequest
, mkRequest
) where
import Data.API.LinkedIn.Query
import qualified Data.API.LinkedIn.Response as L
import Data.API.LinkedIn.QueryResponsePair
import Data.Maybe (fromJust)
import Network.OAuth.Consumer
import Network.OAuth.Http.Request
import qualified Network.OAuth.Http.Response as H
import Network.OAuth.Http.CurlHttpClient
import Control.Monad.Trans.Resource
import Data.Conduit (($$))
import Text.XML.Stream.Parse
getResponse :: ( QueryResponsePair q r
, Query q
, L.Response r
) => Token
-> q
-> IO r
getResponse token query = (sendRequest token $ mkRequest query) >>= parseResponse
parseResponse :: (L.Response r) => H.Response -> IO r
parseResponse r = runResourceT $ parseLBS def (H.rspPayload r) $$ force "something required" L.parsePage
-- |Signs a request with the OAuth Token and performs it using Curl.
sendRequest :: Token -> Request -> IO H.Response
sendRequest token request = runOAuthM token $ (signRq2 HMACSHA1 Nothing request >>= serviceRequest CurlClient)
-- |Creates an HTTP GET request from any Query.
mkRequest :: (Query q) => q -> Request
mkRequest query = baseURL { reqHeaders = baseHeaders
, pathComps = toPathComps query
, qString = toQueryStr query
}
baseURL = fromJust $ parseURL $ "http://api.linkedin.com/"
baseHeaders = fromList [("x-li-format", "xml")]
-- reqUrl = fromJust $ parseURL "https://api.linkedin.com/uas/oauth/requestToken"
-- accUrl = fromJust $ parseURL "https://api.linkedin.com/uas/oauth/accessToken"
-- authUrl = ("https://www.linkedin.com/uas/oauth/authorize?oauth_token="++) . findWithDefault ("oauth_token","ERROR") . oauthParams
-- app = Application "7rpbrfj0c53c" "CFrGkHCXujQQ1em6" OOB
-- tokenM :: IO Token
-- tokenM = runOAuthM (fromApplication app) $ do
-- ignite app
-- signRq2 HMACSHA1 Nothing reqUrl >>= oauthRequest CurlClient
-- cliAskAuthorization authUrl
-- signRq2 HMACSHA1 Nothing accUrl >>= oauthRequest CurlClient
-- getToken
-- response = do
-- token <- tokenM
-- runOAuthM token $ do
-- signRq2 HMACSHA1 Nothing srvUrl >>= serviceRequest CurlClient
|
whittle/linkedin-api
|
Network/API/LinkedIn.hs
|
mit
| 2,311
| 0
| 10
| 485
| 391
| 230
| 161
| 33
| 1
|
{-# LANGUAGE CPP, OverloadedStrings #-}
module Galua.CObjInfo ( CObjInfo(..), cfunInfoFun, noFunInfo ) where
import Foreign(FunPtr,nullFunPtr)
import Data.Text(Text)
import qualified Data.Text as Text
#if defined (LUA_USE_LINUX)
import Data.Elf
import DWARF.Basics(sections,Endian(..))
import DWARF.Section.Line(File(..))
import DWARF.Addr2Line(Info(..),addr2line)
import Data.List(isPrefixOf)
import System.Posix.Files(readSymbolicLink)
import qualified Data.Map as Map
import qualified Data.Text.Encoding as Text (decodeUtf8)
import Foreign.Ptr
import qualified Data.ByteString as BS
-- import Control.Monad(guard)
-- import System.Process(readProcess)
#endif
#if defined (LUA_USE_MACOSX)
import Galua.DlInfo(DlInfo(..),funPtrInfo)
#endif
data CObjInfo = CObjInfo
{ cObjAddr :: !Text
, cObjName :: !(Maybe Text)
, cObjFile :: !(Maybe Text)
, cObjLine :: !(Maybe Text)
} deriving Show
noFunInfo :: FunPtr a -> CObjInfo
noFunInfo fptr = CObjInfo { cObjAddr = addrName fptr
, cObjName = Nothing
, cObjFile = Nothing
, cObjLine = Nothing
}
addrName :: FunPtr a -> Text
addrName fp = if fp == nullFunPtr then "(entry)" else Text.pack (show fp)
cfunInfoFun :: IO (FunPtr () -> IO CObjInfo)
cfunInfoFun =
#if defined ( LUA_USE_LINUX )
-- return $ \fptr -> return (noFunInfo fptr)
--{-
do exe <- readSymbolicLink "/proc/self/exe"
bytes <- BS.readFile exe
let elf = parseElf bytes
end = case elfData elf of
ELFDATA2LSB -> LittleEndian
ELFDATA2MSB -> BigEndian
secs = sections end $ Map.fromList [ (name, elfSectionData s)
| s <- elfSections elf
, let name = elfSectionName s
, ".debug_" `isPrefixOf` name ]
return $ \fptr -> return $!
let addr = fromIntegral (ptrToIntPtr (castFunPtrToPtr fptr))
info = addr2line secs addr
in CObjInfo
{ cObjAddr = addrName fptr
, cObjName =
case function info of
Nothing -> Nothing
Just b -> Just $! Text.decodeUtf8 b
, cObjFile =
case file info of
Nothing -> Nothing
Just f -> Just $! Text.concat
[ Text.decodeUtf8 (directory f)
, "/"
, Text.decodeUtf8 (fileName f) ]
, cObjLine = case line info of
Nothing -> Nothing
Just n -> Just $! Text.pack (show n)
}
---}
#elif LUA_USE_MACOSX
return $ \fptr ->
do mb <- funPtrInfo fptr
case mb of
Nothing -> return (noFunInfo fptr)
Just i -> return $!
CObjInfo { cObjAddr = addrName fptr
, cObjName = case dlinfoSymName i of
Nothing -> Nothing
Just s -> Just $! Text.pack s
, cObjFile = Just $! (Text.pack (dlinfoFileName i))
, cObjLine = Nothing
}
#else
return $ \fptr -> return (noFunInfo fptr)
#endif
|
GaloisInc/galua
|
galua-rts/src/Galua/CObjInfo.hs
|
mit
| 3,367
| 0
| 22
| 1,270
| 700
| 388
| 312
| 29
| 2
|
module Language.Hasquito (
module H,
compileFile) where
import Control.Monad
import Language.Hasquito.Closure as H
import Language.Hasquito.DeExp as H
import Language.Hasquito.JSify
import Language.Hasquito.MakeMain
import Language.Hasquito.Parser as H
import Language.Hasquito.STG as H
import Language.Hasquito.Syntax as H
import Language.Hasquito.TypeCheck as H
import Language.Hasquito.Util as H
import Language.Hasquito.Sanity as H
import Language.JavaScript.AST
import Language.JavaScript.Pretty as J
import System.Exit
mainCompiler :: [Def] -> CompilerM Program
mainCompiler = typeCheck
>=> sanityCheck
>=> simplify
>=> toSTG
>=> deExp
>=> jsify
>=> makeMain
compileFile :: FilePath -> IO String
compileFile file = do
parseRes <- parseFile file
let compRes = parseRes >>= runCompilerM . mainCompiler
case compRes of
Right prog -> return . show . J.pretty $ prog
Left err -> print err >> exitWith (ExitFailure 1)
|
jozefg/hasquito
|
src/Language/Hasquito.hs
|
mit
| 1,030
| 0
| 13
| 230
| 261
| 152
| 109
| 32
| 2
|
pal xs
| xs == reverse xs = True
| otherwise = False
{-
3. b) -
"aba" - returns True
"abb" - returns False
4. it takes Eq type class arguments, reverse takes lists like "aba" or [1,2,1]
5. pal :: Eq a => [a] -> Bool
6.
-}
numbers x
| x < 0 = -1
| x == 0 = 0
| x > 0 = 1
{-
6. c)
7. Ord and Num
8. numbers :: (Ord a, Num a, Num t) => a -> t
-}
|
Numberartificial/workflow
|
haskell-first-principles/haskell-from-first-principles-master/07/07.07.01-guard-duty.hs
|
mit
| 369
| 0
| 9
| 117
| 79
| 36
| 43
| 7
| 1
|
{-# LANGUAGE FlexibleInstances, GADTs, TypeFamilies, StandaloneDeriving, CPP, RecursiveDo, OverloadedStrings #-}
#define DEBUG 0
module IR where
import Compiler.Hoopl as H
import Control.Monad
import Control.Monad.Trans.State.Strict
import Data.Map (Map)
import qualified Data.Map as M
import System.Exit
import AST hiding (Cmd(..), Address(..), Label)
import AST (Replacement)
import qualified AST
newtype Pred = Pred Int deriving (Ord,Eq)
type FD = Int
data Cond
= AtEOF FD
| PendingIPC
-- current line == l
-- Should this take a file too? Redirects could get confusing, but per-file
-- (per-socket) line counters would make more sense in forks and such.
-- Fork does reset the counter though.
| Line Int
-- current line > l (tested after the first line has been processed)
| EndLine Int
-- Possible extension: explicit "match" registers to track several precomputed
-- matches at once, and use that instead of MatchLastRE.
-- A complication: pattern space may change but the last regexp is the same,
-- so should repeat the match but optimize repeated match against unchanged
-- pattern space elsewhere.
-- I also think the last regexp is dynamic so we'd need to track a global for
-- that similar to internal predicates and string variables.
| Match SVar RE
| MatchLastRE SVar
| Bool Bool
deriving (Show,Eq)
cTrue = Bool True
cFalse = Bool False
newtype SVar = SVar Int deriving (Ord,Eq)
data StringExpr
= SConst S
| SVarRef SVar
| SRandomString
-- Subst last match against current pattern. See Match TODO about match regs.
| SSubst SVar Replacement SubstType
-- from to string
| STrans S S SVar
| SAppendNL SVar SVar
deriving (Show,Eq)
emptyS = SConst ""
data Insn e x where
Label :: Label -> Insn C O
Branch :: Label -> Insn O C
If :: Pred -> Label -> Label -> Insn O C
Fork :: Label -> Label -> Insn O C
Quit :: ExitCode -> Insn O C
-- Wait for IPC or line of input or EOF.
Wait :: FD -> Insn O O
-- Writes SVar. Should be a StringExpr I guess then?
-- OTOH it has side effects and this style kind of matches Read below.
GetMessage :: SVar -> Insn O O
SetP :: Pred -> Cond -> Insn O O
SetS :: SVar -> StringExpr -> Insn O O
-- for n/N (which can never accept interrupts)
Read :: SVar -> FD -> Insn O O
PrintConstS :: FD -> S -> Insn O O
PrintLineNumber :: FD -> Insn O O
PrintLiteral :: Int -> FD -> SVar -> Insn O O
Print :: FD -> SVar -> Insn O O
Message :: SVar -> Insn O O
SetLastRE :: RE -> Insn O O
ShellExec :: SVar -> Insn O O
Listen :: Int -> (Maybe S) -> Int -> Insn O O
Accept :: Int -> Int -> Insn O O
Redirect :: Int -> Int -> Insn O O
CloseFile :: Int -> Insn O O
Comment :: String -> Insn O O
WriteFile :: S -> SVar -> Insn O O
deriving instance Show (Insn e x)
deriving instance Eq (Insn e x)
showInsn (Label l) = show l ++ ":"
showInsn i = " " ++ show i
instance NonLocal Insn where
entryLabel (Label l) = l
successors (Branch t) = [t]
successors (If _ t f) = [t,f]
successors (Fork t c) = [t,c]
successors (Quit _) = []
instance HooplNode Insn where
mkBranchNode = Branch
mkLabelNode = Label
data IRState = State
{ firstFreeUnique :: Unique
, autoprint :: Bool
, sourceLabels :: Map S Label
, holdSpaces :: Map S SVar
, nextCycleLabels :: (Label, Label)
, program :: Graph Insn O O
}
nextCycleLabelNoPrint = snd . nextCycleLabels
nextCycleLabelPrint = fst . nextCycleLabels
instance Show (Graph Insn e x) where
show g = showGraph showInsn g
invalidLabel :: String -> Label
invalidLabel s = error ("Invalid label: " ++ s)
startState autoprint = State firstNormalPred autoprint M.empty M.empty (dummyCycleLabel, dummyCycleLabel) emptyGraph
dummyCycleLabel = invalidLabel "uninitialized next-cycle label"
instance Show Pred where
show (Pred 0) = "P{pre-first}"
show (Pred 1) = "P{irq}"
show (Pred 2) = "P{run-normal}"
show (Pred 3) = "P{last-subst}"
show (Pred 4) = "P{queued-output}"
show (Pred 5) = "P{has-pattern}"
show (Pred n) = "P" ++ show n
firstNormalPred = 5
pPreFirst = Pred 0
pIntr = Pred 1
pRunNormal = Pred 2
pLastSubst = Pred 3
pHasQueuedOutput = Pred 4
pHasPattern = Pred 5
setLastSubst x = emit (SetP pLastSubst (Bool x))
instance Show SVar where
show (SVar 0) = "S{pattern-space}"
show (SVar 1) = "S{output-queue}"
show (SVar 2) = "S{hold-space}"
show (SVar n) = "S" ++ show n
sPattern = SVar 0
sOutputQueue = SVar 1
sHoldSpace = SVar 2
instance UniqueMonad IRM where
freshUnique = do
res <- firstFreeUnique <$> get
modify $ \state -> state { firstFreeUnique = res + 1 }
return res
initBlock :: Label -> Graph Insn C O
initBlock l = mkLabel l
newLabel = freshLabel
newPred = Pred <$> freshUnique
newString :: IRM SVar
newString = SVar <$> freshUnique
setString :: SVar -> SVar -> IRM ()
setString s src = emit (SetS s (SVarRef src))
emitString :: StringExpr -> IRM SVar
emitString expr = newString >>= \s -> emit (SetS s expr) >> return s
emitCString :: S -> IRM SVar
emitCString s = emitString (SConst s)
withNewString f = do
next <- newString
_ <- f next
return next
setPattern s = setString sPattern s >> emit (SetP pHasPattern cTrue)
addLabelMapping name l = modify $
\state -> state { sourceLabels = M.insert name l (sourceLabels state) }
getLabelMapping :: S -> IRM Label
getLabelMapping name = do
res <- M.lookup name . sourceLabels <$> get
case res of
Just l -> return l
Nothing -> withNewLabel (addLabelMapping name)
addHoldMapping name var = modify $
\state -> state { holdSpaces = M.insert name var (holdSpaces state) }
getHoldMapping Nothing = return sHoldSpace
getHoldMapping (Just name) = do
res <- M.lookup name . holdSpaces <$> get
case res of
Just var -> return var
Nothing -> withNewString (addHoldMapping name)
emitOCO :: Insn O C -> Insn C O -> IRM ()
emitOCO last first =
modify $ \s -> s { program = program s H.<*> mkLast last |*><*| mkFirst first }
-- This system of "splitting" the current block and starting a new block with a
-- label is pretty ugly, it would probalby be neater to emit whole blocks (for
-- most cases), e.g. label <- somethingEmitsWholeBlock to get a label to refer
-- to and then emit an (If p trueBlock falseBlock).
thenLabel :: Insn O C -> Label -> IRM ()
thenLabel last next = emitOCO last (Label next)
finishBlock = thenLabel
finishBlock' :: Insn O C -> IRM Label
finishBlock' b = withNewLabel (finishBlock b)
emit :: Insn O O -> IRM ()
emit insn =
modify $ \s -> s { program = program s H.<*> mkMiddle insn }
comment :: String -> IRM ()
#if DEBUG
comment s = emit (Comment s)
#else
comment _ = return ()
#endif
withNewLabel :: (Label -> IRM a) -> IRM Label
withNewLabel x = do
next <- newLabel
_ <- x next
return next
emitBranch l next = Branch l `thenLabel` next
emitBranch' :: Label -> IRM Label
emitBranch' l = withNewLabel (emitBranch l)
branchNextCyclePrint = () <$ (emitBranch' =<< gets nextCycleLabelPrint)
branchNextCycleNoPrint = () <$ (emitBranch' =<< gets nextCycleLabelNoPrint)
emitLabel l = Branch l `thenLabel` l
-- likely to require mdo unless the first use is afterwards
label :: IRM Label
label = withNewLabel emitLabel
printIfAuto = do
ap <- gets autoprint
when ap (tWhen pHasPattern (emit (Print 0 sPattern)))
tIf :: Pred -> IRM a -> IRM b -> IRM ()
tIf p tx fx = mdo
t <- finishBlock' (If p t f)
comment "tIf/true"
_ <- tx
f <- emitBranch' e
comment "tIf/false"
_ <- fx
e <- label
comment "tIf/end"
ifCheck c tx fx = do
p <- tCheck c
tIf p tx fx
type IRM = State IRState
toIR :: Bool -> [Sed] -> (Label, Graph Insn C C)
toIR autoprint seds = evalState go (startState autoprint)
where
go = do
entry <- newLabel
tProgram seds
outState <- get
return (entry, mkFirst (Label entry) H.<*> program outState H.<*> mkLast (Quit ExitSuccess))
checkQueuedOutput =
tWhen pHasQueuedOutput $ do
emit (Print 0 sOutputQueue)
setString sOutputQueue =<< emitString emptyS
emit (SetP pHasQueuedOutput cFalse)
-- Entry points to generate:
-- * pre-first line code (if any)
-- * interrupt reception (for new-cycle code)
-- * new cycle label
tProgram seds = mdo
oldNextCycle <- gets nextCycleLabels
modify $ \state -> state { nextCycleLabels = (newCyclePrint, newCycleNoPrint) }
emit (SetP pIntr cFalse)
emit (SetP pPreFirst cTrue)
emit (SetP pRunNormal cFalse)
start <- label
-- Actual normal program here
tSeds seds
newCyclePrint <- label
printIfAuto
-- TODO Should this also be done in printIfAuto so that it's done by 'n'?
emit (SetP pHasPattern cFalse)
newCycleNoPrint <- label
checkQueuedOutput
setLastSubst False
-- New cycle handling: wait for IPC or input, check for EOF, then run a copy
-- of the main program with the appropriate predicates set.
emit (Wait 0)
emit (SetP pIntr PendingIPC)
lineOrEOF <- finishBlock' (If pIntr intr lineOrEOF)
pAtEOF <- emitNewPred (AtEOF 0)
line <- finishBlock' (If pAtEOF exit line)
do
line <- newString
emit (Read line 0)
setPattern line
emit (SetP pPreFirst cFalse)
emit (SetP pRunNormal cTrue)
intr <- emitBranch' start
msg <- newString
emit (GetMessage msg)
-- Update pattern variable for matching but don't set pHasPattern in IPC
-- branch, to avoid printing it at the end of the loop.
setString sPattern msg
emit (SetP pPreFirst cFalse)
emit (SetP pRunNormal cFalse)
exit <- emitBranch' start
modify $ \state -> state { nextCycleLabels = oldNextCycle }
tSeds = mapM_ tSed
tWhenNot p x = mdo
f <- finishBlock' (If p t f)
r <- x
t <- label
return r
tWhen p x = mdo
t <- finishBlock' (If p t f)
res <- x
f <- label
return res
withCond' Always whenTrue _ = whenTrue
withCond' (At c) whenTrue whenFalse = do
p <- tCheck c
tIf p whenTrue whenFalse
withCond' (Between start end) whenTrue whenFalse = mdo
pActive <- newPred
let run = emitBranch' t
skip = emitBranch' f
setP = emit (SetP pActive cTrue)
clear = emit (SetP pActive cFalse)
-- Special case for line-based ranges.
-- For normal addresses, the end condition is checked after running the command
-- (or it's checked, setP, and the command is run one more time for the last
-- line).
-- For line numbers, it seems to be checked before running the command with the address.
-- 12,13p should print for lines 12 and 13. 12,3p should only print for 12
-- since the condition is false before reaching the end.
let checkEnd | (AST.Line n) <- end = do
p <- emitNewPred (EndLine n)
tIf p (clear >> skip) run
| otherwise = ifCheck end (clear >> run) run
-- If the end address is a line that's <= the current line, clear the flag
-- immediately and skip the block.
tIf pActive (do comment "between/active"
checkEnd)
(do comment "between/inactive"
ifCheck start (comment "between/first" >> setP >> run) skip)
t <- label
_ <- whenTrue
comment "between/end of code"
f <- emitBranch' e
_ <- whenFalse
e <- label
return ()
withCond' (NotAddr addr) whenTrue whenFalse = withCond' addr whenFalse whenTrue
withCond addr x = withCond' addr x (return ())
withNewPred f = newPred >>= \p -> f p >> return p
emitNewPred cond = withNewPred $ \p -> emit (SetP p cond)
tCheck (AST.Line 0) = return pPreFirst
tCheck (AST.Line n) = emitNewPred (Line n)
tCheck (AST.Match (Just re)) = withNewPred $ \p -> do
emit (SetP p (Match sPattern re))
emit (SetLastRE re)
tCheck (AST.Match Nothing) = emitNewPred (MatchLastRE sPattern)
tCheck (AST.IRQ) = return pIntr
tCheck (AST.EOF) = emitNewPred (AtEOF 0)
tSed :: Sed -> IRM ()
tSed (Sed Always (AST.Block xs)) = tSeds xs
tSed (Sed cond@(At (AST.Line 0)) x) = withCond cond $ do
-- While running a 0-conditional line, also run all normal lines until we
-- either reach the "fall-through" from that line, or until we start a new
-- cycle, then reset it back to... its original value, hmm...
emit (SetP pRunNormal cTrue)
tCmd x
-- FIXME This may need to keep a counter or something rather than just set
-- to false. Or assign a fresh predicate to push/copy the value into?
emit (SetP pRunNormal cFalse)
tSed (Sed cond@(At AST.IRQ) x) = withCond cond $ do
-- See comment/fixme for saving/restrogin pRunNormal above
emit (SetP pRunNormal cTrue)
tCmd x
emit (SetP pRunNormal cFalse)
-- Special case for change with a range: replace all lines with a single copy
-- of the replacement string.
tSed (Sed (Between start end) (AST.Change repl)) = do
tSed (Sed (At end) (AST.Insert repl))
tSed (Sed (Between start end) AST.Delete)
tSed (Sed cond x) = tWhen pRunNormal $ withCond cond $ tCmd x
readString :: Int -> IRM SVar
readString fd = do
s <- newString
emit (Read s fd)
return s
tCmd :: AST.Cmd -> IRM ()
tCmd (AST.Block xs) = tSeds xs
tCmd (AST.Print fd) = tWhen pHasPattern $ emit (Print fd sPattern)
tCmd (AST.PrintLineNumber fd) = emit (PrintLineNumber fd)
tCmd (AST.PrintLiteral width) = tWhen pHasPattern $ emit (PrintLiteral width 0 sPattern)
tCmd (AST.Message Nothing) = tWhen pHasPattern $ emit (Message sPattern)
tCmd (AST.Message (Just s)) = do
tmp <- emitCString s
emit (Message tmp)
-- FIXME "If there is no more input then sed exits without processing any more
-- commands." (How does read indicate EOF anyway?)
tCmd (AST.Next fd) = do
printIfAuto
checkQueuedOutput
setLastSubst False
line <- readString fd
setPattern line
tCmd (AST.NextA fd) = do
checkQueuedOutput
setLastSubst False
line <- readString fd
tmp <- emitString (SAppendNL sPattern line)
tIf pHasPattern (setPattern tmp) (setPattern line)
tCmd (AST.Listen fd host port) = emit (Listen fd host port)
tCmd (AST.Accept sfd fd) = emit (Accept sfd fd)
tCmd (AST.Label name) = do
emitLabel =<< getLabelMapping name
comment ("at label " ++ show name)
tCmd (AST.Branch (Just name)) = do
comment ("branch " ++ show name)
_ <- emitBranch' =<< getLabelMapping name
return ()
tCmd (AST.Branch Nothing) = branchNextCyclePrint
tCmd (AST.Test target) = tTest True target
tCmd (AST.TestNot target) = tTest False target
tCmd (AST.Fork sed) = mdo
entry <- finishBlock' (Fork entry exit)
oldNextCycle <- nextCycleLabels <$> get
tProgram [sed]
modify $ \state -> state { nextCycleLabels = oldNextCycle }
-- End of thread (quit)
exit <- finishBlock' (Quit ExitSuccess)
return ()
tCmd (AST.Clear) = do
t <- emitString emptyS
setString sPattern t
emit (SetP pHasPattern cFalse)
tCmd (AST.Change replacement) = do
emit (PrintConstS 0 replacement)
branchNextCycleNoPrint
tCmd (AST.Delete) = branchNextCycleNoPrint
tCmd (AST.Redirect dst (Just src)) = emit (Redirect dst src)
tCmd (AST.Redirect dst Nothing) = emit (CloseFile dst)
tCmd (AST.Subst mre sub flags actions) = do
p <- tCheck (AST.Match mre)
tWhen p $ do
setLastSubst True
s <- emitString (SSubst sPattern sub flags)
setPattern s
tSubstAction actions s
tCmd (AST.Trans from to) = do
s <- emitString (STrans from to sPattern)
setPattern s
tCmd (AST.Hold maybeReg) = do
space <- getHoldMapping maybeReg
setString space sPattern
tCmd (AST.HoldA maybeReg) = do
space <- getHoldMapping maybeReg
tmp2 <- emitString (SAppendNL space sPattern)
setString space tmp2
tCmd (AST.Get (Just "yhjulwwiefzojcbxybbruweejw")) = do
temp <- emitString SRandomString
setPattern temp
tCmd (AST.GetA (Just "yhjulwwiefzojcbxybbruweejw")) = do
temp <- emitString SRandomString
tmp2 <- emitString (SAppendNL sPattern temp)
setString sPattern tmp2
tCmd (AST.Get maybeReg) = setPattern =<< getHoldMapping maybeReg
tCmd (AST.GetA maybeReg) = do
space <- getHoldMapping maybeReg
tmp2 <- emitString (SAppendNL sPattern space)
setString sPattern tmp2
tCmd (AST.Exchange maybeReg) = do
space <- getHoldMapping maybeReg
tmp <- emitString (SVarRef space)
setString space sPattern
setString sPattern tmp
tCmd (AST.Insert s) = emit (PrintConstS 0 s)
tCmd (AST.Append s) = do
tIf pHasQueuedOutput ifTrue ifFalse
where
-- already set to true, so no need to update predicate. Just append to
-- the queue.
ifTrue = do
temp <- emitCString s
temp2 <- emitString (SAppendNL sOutputQueue temp)
setString sOutputQueue temp2
-- If there's no queued output yet, we know we can simply replace the
-- queued output with a constant.
ifFalse = do
emit (SetP pHasQueuedOutput cTrue)
temp <- emitCString s
setString sOutputQueue temp
tCmd (AST.WriteFile path) = emit (WriteFile path sPattern)
tCmd (AST.Quit print status) = () <$ do
when print $ do
printIfAuto
checkQueuedOutput
finishBlock' (Quit status)
tCmd cmd = error ("tCmd: Unmatched case " ++ show cmd)
tSubstAction SActionNone _ = return ()
tSubstAction SActionExec res = emit (ShellExec res)
tSubstAction (SActionPrint n) res = emit (Print n res)
tSubstAction (SActionWriteFile path) res = emit (WriteFile path res)
tTest ifTrue maybeTarget = mdo
comment ("test " ++ show ifTrue ++ " " ++ show target)
target <- case maybeTarget of
Nothing -> gets nextCycleLabelPrint
Just name -> getLabelMapping name
let (t,f) | ifTrue = (target, l)
| otherwise = (l, target)
let clear = setLastSubst False
tIf pLastSubst (clear >> emitBranch' t) (clear >> emitBranch' f)
l <- label
return ()
|
olsner/sedition
|
IR.hs
|
mit
| 18,000
| 0
| 16
| 4,351
| 5,846
| 2,828
| 3,018
| 420
| 2
|
module Data.CTG1371.Internal.Parser.Parsers where
import Data.CTG1371.Internal.Types
import Data.Word
import Data.Bits
import qualified Data.Binary.Strict.Get as G
{-|This function does all the parsing -}
parseCTG :: G.Get CTGData
parseCTG = do
c <- G.getWord8 --Is it a "C" block
if c /= 67 then fail "Submitted data is not a C block" else do
status <- fmap parseStatus G.getWord16be
hr1Block4 <- fmap unpackHR1 G.getWord16be
hr1Block3 <- fmap unpackHR1 G.getWord16be
hr1Block2 <- fmap unpackHR1 G.getWord16be
hr1Block1 <- fmap unpackHR1 G.getWord16be
hr2Block4 <- fmap unpackHR2 G.getWord16be
hr2Block3 <- fmap unpackHR2 G.getWord16be
hr2Block2 <- fmap unpackHR2 G.getWord16be
hr2Block1 <- fmap unpackHR2 G.getWord16be
mhrBlock4 <- fmap unpackMHR G.getWord16be
mhrBlock3 <- fmap unpackMHR G.getWord16be
mhrBlock2 <- fmap unpackMHR G.getWord16be
mhrBlock1 <- fmap unpackMHR G.getWord16be
tocoBlock4 <- fmap unpackToco G.getWord8
tocoBlock3 <- fmap unpackToco G.getWord8
tocoBlock2 <- fmap unpackToco G.getWord8
tocoBlock1 <- fmap unpackToco G.getWord8
(hr1Mode,hr2Mode,mhrMode) <- fmap unpackHRMode G.getWord16be
tocoMode <- fmap unpackTocoMode G.getWord8
return (CTGData
status
[hr1Block1,hr1Block2,hr1Block3,hr1Block4]
[hr2Block1,hr2Block2,hr2Block3,hr2Block4]
[mhrBlock1,mhrBlock2,mhrBlock3,mhrBlock4]
[tocoBlock1,tocoBlock2,tocoBlock3,tocoBlock4]
hr1Mode hr2Mode mhrMode tocoMode)
-- | Unpack the first heart rate value
unpackHR1:: Word16 -> HR1
unpackHR1 hrdata = HR1 checkFetalMovement (unpackHR hrdata)
where checkFetalMovement = if testBit hrdata 11 then Movement else NoMovement
-- | Unpack the heart rate mode values
unpackHRMode :: Word16 -> (HRMode,HRMode,MHRMode)
unpackHRMode hrdata = (unpackHR1Mode hrdata,unpackHR2Mode hrdata,unpackMHRMode hrdata)
-- | Unpack the first heart rate mode
unpackHR1Mode :: Word16 -> HRMode
unpackHR1Mode hrdata = translateHRMode $ (hrdata .&. 0xF000) `shiftR` 12
-- | Unpack the second heart rate mode
unpackHR2Mode::Word16 -> HRMode
unpackHR2Mode hrdata = translateHRMode $ (hrdata .&. 0xF00) `shiftR` 8
-- | Unpack the maternal heart rate mode
unpackMHRMode::Word16 -> MHRMode
unpackMHRMode hrdata = translateMHRMode $ (hrdata .&. 0xF0) `shiftR` 4
-- | Unpack the tocography mode
unpackTocoMode :: Word8 -> TOCOMode
unpackTocoMode tocodata = case tocodata of
0 -> NoTOCOTransducer
8 -> ExternalTOCO
10 -> IUP
14 -> UnknownTOCOMode
_ -> NullTOCOMode
-- | translate a numeric heart rate mode into the symbolic heart rate mode
translateHRMode :: (Num a, Eq a) => a -> HRMode
translateHRMode hrdata = case hrdata of
0 -> NoHRTransducer
1 -> Inop
2 -> US
4 -> DECG
12 -> Reserved2
14 -> UnknownHRMode
_ -> NullHRMode
-- | translate a numeric maternal heart rate into the symbolic heart rate mode
translateMHRMode :: (Num a, Eq a) => a -> MHRMode
translateMHRMode mhrdata = case mhrdata of
0 -> MHRNoHRTransducer
1 -> MHRInop
6 -> MECG
8 -> ExternalMHR
10 -> MHRReserved1
12 -> MHRReserved2
_ -> MHRNullHRMode
-- | Unpack a tocography value (resolutions is 0.5)
unpackToco :: Word8 -> TOCO
unpackToco tocodata = TOCO $ fromIntegral tocodata `div` 2
-- | Unpack a first heart rate value
unpackHR::Word16 -> HR
unpackHR hrdata = HR getSignalQualityInd getHR isBlankTrace
where getSignalQualityInd = case hrdata `shiftR` 13 of
0 -> SignalRed
1 -> SignalYellow
2 -> SignalGreen
_ -> error "Invalid signal quality value"
getHR = fromIntegral ((hrdata `shiftL` 5) `shiftR` 5) `div` 4
isBlankTrace = getHR == 0
-- | Unpack a second heart rate value
unpackHR2 :: Word16 -> HR2
unpackHR2 hrdata = HR2 (unpackHR hrdata)
-- | Unpack the maternal heart rate
unpackMHR ::Word16 -> MHR
unpackMHR hrdata = MHR (unpackHR hrdata)
-- | Parse the status into symbolic values
parseStatus :: Word16 -> CTGStatus
parseStatus status = CTGStatus
isMonitorOn
isctgDataInserted
isctgDataDeleted
isfspo2Available
isTelemetryOn
ishrCrossChanVerOn
isdecgLogicOn
ishrTwinOffsetOn
isFMPEnabled
where isFMPEnabled = testBit status 0
ishrTwinOffsetOn = testBit status 1
isdecgLogicOn = testBit status 5
ishrCrossChanVerOn = testBit status 8
isTelemetryOn = testBit status 9
isfspo2Available = testBit status 11
isctgDataDeleted = testBit status 13
isctgDataInserted = testBit status 14
isMonitorOn = testBit status 15
|
danplubell/CTG1371
|
library/Data/CTG1371/Internal/Parser/Parsers.hs
|
mit
| 5,253
| 0
| 13
| 1,619
| 1,164
| 605
| 559
| 106
| 7
|
module Graphics.Implicit.ExtOpenScad.Parser.Util where
import Text.ParserCombinators.Parsec hiding (State)
import Graphics.Implicit.ExtOpenScad.Definitions
-- white space, including tabs, newlines and comments
genSpace = many $
oneOf " \t\n\r"
<|> (try $ do
_ <- string "//"
_ <- many ( noneOf "\n")
_ <- string "\n"
return ' '
) <|> (try $ do
_ <- string "/*"
_ <- manyTill anyChar (try $ string "*/")
return ' '
)
pad parser = do
_ <- genSpace
a <- parser
_ <- genSpace
return a
infixr 1 *<|>
a *<|> b = try a <|> b
infixr 2 ?:
l ?: p = p <?> l
stringGS (' ':xs) = do
x' <- genSpace
xs' <- stringGS xs
return (x' ++ xs')
stringGS (x:xs) = do
x' <- char x
xs' <- stringGS xs
return (x' : xs')
stringGS "" = return ""
padString s = do
_ <- genSpace
s' <- string s
_ <- genSpace
return s'
tryMany = (foldl1 (<|>)) . (map try)
variableSymb = many1 (noneOf " ,|[]{}()+-*&^%#@!~`'\"\\/;:.,<>?=") <?> "variable"
patternMatcher :: GenParser Char st Pattern
patternMatcher =
(do
_ <- char '_'
return Wild
) <|> {-( do
a <- literal
return $ \obj ->
if obj == (a undefined)
then Just (Map.empty)
else Nothing
) <|> -} ( do
symb <- variableSymb
return $ Name symb
) <|> ( do
_ <- char '['
_ <- genSpace
components <- patternMatcher `sepBy` (try $ genSpace >> char ',' >> genSpace)
_ <- genSpace
_ <- char ']'
return $ ListP components
)
|
silky/ImplicitCAD
|
Graphics/Implicit/ExtOpenScad/Parser/Util.hs
|
gpl-2.0
| 1,625
| 0
| 14
| 551
| 547
| 261
| 286
| 55
| 1
|
{- |
Module : $EmptyHeader$
Description : <optional short description entry>
Copyright : (c) <Authors or Affiliations>
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <email>
Stability : unstable | experimental | provisional | stable | frozen
Portability : portable | non-portable (<reason>)
<optional description>
-}
module Main where
import GMP.GMPAS
rg1 :: Formula GML -> Formula GML -> Int -> (Formula GML, Formula GML)
rg1 a b n = (Mapp (Mop (GML (n+1)) Angle) a,
Mapp (Mop (GML n) Angle) b)
a1 :: Formula GML -> Formula GML -> Formula GML -> Int -> Int -> (Formula GML, Formula GML, Formula GML)
a1 c a b n1 n2 = (Mapp (Mop (GML (n1+n2)) Angle) c,
Mapp (Mop (GML n1) Angle) a,
Mapp (Mop (GML n2) Angle) b)
a2 :: Formula GML -> Formula GML -> Formula GML -> Formula GML -> Int -> Int -> (Formula GML, Formula GML, Formula GML)
a2 a b c d n1 n2 = (Junctor (Mapp (Mop (GML n1) Angle) a) And (Mapp (Mop (GML n2) Angle) b),
Mapp (Mop (GML (n1+n2+1)) Angle) c,
Mapp (Mop (GML 0) Angle) d)
rn :: Formula GML -> Formula GML
rn a = Neg (Mapp (Mop (GML 0) Angle) (Neg a))
wrap2 :: Formula GML -> Formula GML -> Formula GML
wrap2 a b = Junctor a If b
wrap3 :: Formula GML -> Formula GML -> Formula GML -> (Formula GML,Formula GML)
wrap3 a b c = (a, Junctor b Or c)
recurse na1 nrg1 =
let a2_res = a2 (Var 'a' Nothing) (Var 'b' Nothing) (Junctor (Var 'a' Nothing) Or (Var 'b' Nothing)) (Var 'a' Nothing) 1 2
rec_a1 (x,y,z) n =
case n of
0 -> wrap3 x y z
_ -> rec_a1 (a1 x y z 3 4) (n-1)
a1_res = rec_a1 a2_res na1
rec_rg1 (x,y) n =
case n of
0 -> wrap2 x y
_ -> rec_rg1 (rg1 x y 5) (n-1)
in rec_rg1 a1_res nrg1
main :: IO()
main = do
let f = Neg (recurse 2 3)
putStrLn(show f)
|
nevrenato/Hets_Fork
|
GMP/versioning/gmp-0.0.1/GMP/extra/TestGenGML.hs
|
gpl-2.0
| 1,895
| 0
| 14
| 557
| 864
| 428
| 436
| 35
| 3
|
{-# LANGUAGE TemplateHaskell #-}
module Lambda.Apply where
import Lambda.Type
import Lambda.Basic
import Lambda.Step
import qualified Rewriting.Apply as A
import Rewriting.Derive.Instance
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Reader
import Control.Monad
import Data.Typeable
data For_Lambda = For_Lambda
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc ] [''For_Lambda])
data Lambda_Calculus = Lambda_Calculus
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc ] [''Lambda_Calculus])
instance A.Apply For_Lambda Lambda_Calculus
Lambda
Int where
example tag =
let sii = Apply ( Apply s i ) i
in Instance
{ system = Lambda_Calculus
, from = Apply sii sii
, to = Apply ( Apply i sii) sii
}
apply tag system object action = do
single_derivation object action
actions tag system object =
[ 0 .. pred $ length $ redex_positions object ]
-- local variables:
-- mode: haskell
-- end:
|
florianpilz/autotool
|
src/Lambda/Apply.hs
|
gpl-2.0
| 1,072
| 0
| 12
| 281
| 291
| 159
| 132
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
module Algebraic.Graph where
import qualified Autolib.TES.Binu as B
import Graph.Op
import Graph.Iso
import Graph.Util
import Graph.Restriction
import Autolib.Graph.Kneser ( petersen )
import Algebraic.Class
import Algebraic.Instance
import Data.Typeable
import Autolib.Dot ( peng, Layout_Program (..) )
import Autolib.Graph.Graph
import Autolib.Graph.Util
import Autolib.ToDoc
import Autolib.Choose
import Autolib.Reader
import Autolib.Set
data Algebraic_Graph = Algebraic_Graph deriving ( Read, Show, Typeable )
instance GraphC Int =>
Algebraic Algebraic_Graph ( Graph Int ) where
-- evaluate :: tag -> Exp a -> Reporter a
evaluate tag exp = do
g <- tfoldR ( \ it -> reject $ text "unbekannt:" <+> toDoc it
) inter exp
{-
let degs = do v <- lknoten g ; return $ grad g v
when ( minimum degs < 2 ) $ reject $ text "mindegree"
when ( maximum degs > 5 ) $ reject $ text "maxdegree"
-}
inform $ vcat [ text "Graph ist" , nest 4 $ toDoc g ]
ping g
return g
present tag g = do
ping g
-- equivalent :: tag -> a -> a -> Reporter Bool
equivalent tag a b = do
return $ iso a b
-- some_formula :: tag -> Algebraic.Instance.Type a -> Exp a
some_formula tag i = Graph.Op.example
-- default_instance :: tag -> Algebraic.Instance.Type a
default_instance tag = Algebraic.Instance.Make
{ target = petersen
, description = Nothing
, operators = default_operators tag
, max_size = 7
}
default_operators tag = bops
ping g = peng $ ( no_fixed_layout g )
{ layout_program = Fdp
, layout_hints = [ "-Nheight=0.1", "-Nwidth=0.1"
, "-Nfixedsize=true"
, "-Gsize=7,7"
]
, show_labels = False
}
|
Erdwolf/autotool-bonn
|
src/Algebraic/Graph.hs
|
gpl-2.0
| 1,965
| 13
| 16
| 570
| 375
| 215
| 160
| 44
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Lamdu.GUI.DefinitionEdit (make, makeNewDefinition) where
import Control.Applicative ((<$>))
import Control.Lens.Operators
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State (StateT)
import Control.MonadA (MonadA)
import Data.Cache (Cache)
import Data.Store.Transaction (Transaction)
import Data.Traversable (sequenceA)
import Data.Typeable (Typeable1)
import Data.Vector.Vector2 (Vector2(..))
import Graphics.UI.Bottle.Widget (Widget)
import Lamdu.Config (Config)
import Lamdu.Data.Expression.IRef (DefIM)
import Lamdu.GUI.CodeEdit.Settings (Settings)
import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM, WidgetT)
import qualified Control.Lens as Lens
import qualified Graphics.UI.Bottle.EventMap as E
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Graphics.UI.Bottle.Widgets.Box as Box
import qualified Lamdu.Config as Config
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Data.Expression.Load as Load
import qualified Lamdu.Data.Ops as DataOps
import qualified Lamdu.GUI.BottleWidgets as BWidgets
import qualified Lamdu.GUI.CodeEdit.Settings as Settings
import qualified Lamdu.GUI.ExpressionEdit as ExpressionEdit
import qualified Lamdu.GUI.ExpressionEdit.BuiltinEdit as BuiltinEdit
import qualified Lamdu.GUI.ExpressionEdit.DefinitionContentEdit as DefinitionContentEdit
import qualified Lamdu.GUI.ExpressionGui as ExpressionGui
import qualified Lamdu.GUI.ExpressionGui.AddNextHoles as AddNextHoles
import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM
import qualified Lamdu.GUI.WidgetEnvT as WE
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import qualified Lamdu.Sugar.AddNames as AddNames
import qualified Lamdu.Sugar.Convert as SugarConvert
import qualified Lamdu.Sugar.RemoveTypes as SugarRemoveTypes
import qualified Lamdu.Sugar.Types as Sugar
type T = Transaction
type CT m = StateT Cache (WE.WidgetEnvT (T m))
make ::
(Typeable1 m, MonadA m) =>
Anchors.CodeProps m -> Settings ->
DefIM m -> CT m (WidgetT m)
make cp settings defI = ExprGuiM.run ExpressionEdit.make cp settings $ do
infoMode <- (^. Settings.sInfoMode) <$> ExprGuiM.readSettings
let
maybeRemoveTypes =
case infoMode of
Settings.Types -> id
_ -> fmap SugarRemoveTypes.nonHoleTypes
defS <- ExprGuiM.liftMemoT $ maybeRemoveTypes <$> loadConvertDefI cp defI
case defS ^. Sugar.drBody of
Sugar.DefinitionBodyExpression bodyExpr ->
makeExprDefinition defS bodyExpr
Sugar.DefinitionBodyBuiltin builtin ->
makeBuiltinDefinition defS builtin
makeBuiltinDefinition ::
MonadA m =>
Sugar.Definition Sugar.Name m (ExprGuiM.SugarExpr m) ->
Sugar.DefinitionBuiltin m (ExprGuiM.SugarExpr m) ->
ExprGuiM m (WidgetT m)
makeBuiltinDefinition def builtin = do
config <- ExprGuiM.widgetEnv WE.readConfig
Box.vboxAlign 0 <$> sequenceA
[ defTypeScale config . (^. ExpressionGui.egWidget) <$>
ExprGuiM.makeSubexpression 0 (Sugar.biType builtin)
, BWidgets.hboxCenteredSpaced <$> sequenceA
[ ExprGuiM.withFgColor (Config.builtinOriginNameColor config) $
DefinitionContentEdit.makeNameEdit name (Widget.joinId myId ["name"]) guid
, ExprGuiM.widgetEnv . BWidgets.makeLabel "=" $ Widget.toAnimId myId
, BuiltinEdit.make builtin myId
]
]
where
Sugar.Definition guid name _ = def
myId = WidgetIds.fromGuid guid
defTypeScale :: Config -> Widget f -> Widget f
defTypeScale config = Widget.scale $ realToFrac <$> Config.defTypeBoxScaleFactor config
makeExprDefinition ::
MonadA m =>
Sugar.Definition Sugar.Name m (ExprGuiM.SugarExpr m) ->
Sugar.DefinitionExpression Sugar.Name m (ExprGuiM.SugarExpr m) ->
ExprGuiM m (WidgetT m)
makeExprDefinition def bodyExpr = do
config <- ExprGuiM.widgetEnv WE.readConfig
let
makeGrid = (:[]) . defTypeScale config . BWidgets.gridHSpaced
addAcceptanceArrow acceptInferredType label = do
acceptanceLabel <-
(fmap . Widget.weakerEvents)
(Widget.keysEventMapMovesCursor (Config.acceptKeys config)
(E.Doc ["Edit", "Accept inferred type"]) (acceptInferredType >> return myId)) .
ExprGuiM.widgetEnv .
BWidgets.makeFocusableTextView "↱" $ Widget.joinId myId ["accept type"]
return $ BWidgets.hboxCenteredSpaced [acceptanceLabel, label]
labelStyle =
ExprGuiM.localEnv $ WE.setTextSizeColor
(Config.defTypeLabelTextSize config)
(Config.defTypeLabelColor config)
mkTypeRow labelText onLabel typeExpr = do
label <-
onLabel . labelStyle . ExprGuiM.widgetEnv .
BWidgets.makeLabel labelText $ Widget.toAnimId myId
typeGui <- ExprGuiM.makeSubexpression 0 typeExpr
return
[ (right, label)
, (center, Widget.doesntTakeFocus (typeGui ^. ExpressionGui.egWidget))
]
typeWidgets <-
case bodyExpr ^. Sugar.deTypeInfo of
Sugar.DefinitionExportedTypeInfo x ->
makeGrid <$> sequenceA
[ mkTypeRow "Exported type:" id x ]
Sugar.DefinitionIncompleteType x ->
makeGrid <$> sequenceA
[ mkTypeRow "Exported type:" id $ Sugar.sitOldType x
, mkTypeRow "Inferred type:" id $ Sugar.sitNewIncompleteType x
]
Sugar.DefinitionNewType x ->
makeGrid <$> sequenceA
[ mkTypeRow "Exported type:" (>>= addAcceptanceArrow (Sugar.antAccept x)) $
Sugar.antOldType x
, mkTypeRow "Inferred type:" id $ Sugar.antNewType x
]
bodyWidget <-
DefinitionContentEdit.make guid name $ bodyExpr ^. Sugar.deContent
return . Box.vboxAlign 0 $ typeWidgets ++ [bodyWidget]
where
right = Vector2 1 0.5
center = 0.5
Sugar.Definition guid name _ = def
myId = WidgetIds.fromGuid guid
loadConvertDefI ::
(MonadA m, Typeable1 m) =>
Anchors.CodeProps m -> DefIM m ->
StateT Cache (T m) (Sugar.DefinitionN m ExprGuiM.Payload)
loadConvertDefI cp defI =
lift (Load.loadDefinitionClosure defI) >>=
SugarConvert.convertDefI cp
<&> AddNames.addToDef
<&> Lens.mapped . Lens.mapped . Lens.mapped %~ mkPayload
<&> AddNextHoles.addToDef
where
mkPayload guids = ExprGuiM.Payload
{ ExprGuiM._plStoredGuids = guids
, ExprGuiM._plInjected = [False]
-- Filled by AddNextHoles above:
, ExprGuiM._plHoleGuids = ExprGuiM.emptyHoleGuids
}
makeNewDefinition ::
MonadA m => Anchors.CodeProps m ->
CT m (T m Widget.Id)
makeNewDefinition cp = do
curCursor <- lift WE.readCursor
return $ do
newDefI <- DataOps.newPublicDefinition cp ""
DataOps.newPane cp newDefI
DataOps.savePreJumpPosition cp curCursor
return . DefinitionContentEdit.diveToNameEdit $ WidgetIds.fromIRef newDefI
|
sinelaw/lamdu
|
Lamdu/GUI/DefinitionEdit.hs
|
gpl-3.0
| 6,662
| 0
| 21
| 1,166
| 1,825
| 977
| 848
| -1
| -1
|
-- Exercício 09: Encontre os 10 primeiros anos bissextos.
-- Copyright (c) 2017 Daniel Gonçalves da Silva - https://github.com/danielgoncalvesti
-- GPL version 3 or later (see http://www.gnu.org/copyleft/gpl.html)
--
module Main where
isLeap :: Integer -> Bool
isLeap year
| year `rem` 400 == 0 = True
| year `rem` 100 == 0 = False
| year `rem` 4 == 0 = True
| otherwise = False
main :: IO()
main = do
let years = [1..2017]
let list = [x | x <- years, isLeap x]
let list2 = take 10 list
print(list2)
|
danielgoncalvesti/BIGDATA2017
|
Atividade01/Haskell/Activity1/Exercises1/Ex9a.hs
|
gpl-3.0
| 545
| 0
| 12
| 136
| 167
| 86
| 81
| 13
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module InnerEar.Types.User where
import Text.JSON
import Text.JSON.Generic
import InnerEar.Types.Handle
import InnerEar.Types.Password
import InnerEar.Types.Data
data Role =
NormalUser | -- can only log in, do exercises, inspect their own data/history
Manager | -- can also add NormalUsers, inspect any data/history
Administrator -- can also add Managers
deriving (Show,Eq,Data,Typeable)
data User = User {
handle :: Handle,
password :: Password,
role :: Role
} deriving (Show,Eq,Data,Typeable)
canSeeUserList :: Maybe Role -> Bool
canSeeUserList (Just Administrator) = True
canSeeUserList (Just Manager) = True
canSeeUserList _ = False
|
JamieBeverley/InnerEar
|
src/InnerEar/Types/User.hs
|
gpl-3.0
| 713
| 0
| 8
| 128
| 166
| 97
| 69
| 21
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- Copyright (c) 2013, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
module Main where
import Network
import System.IO
import Nettty.Proxy
import Nettty.Server
import System.Directory
import Control.Concurrent
import System.Environment
import System.Posix.Files
thereAndSocket :: String -> IO Bool
thereAndSocket f = do
there <- doesFileExist f
case there of
True -> fmap isSocket (getFileStatus f)
False -> return False
readInt :: String -> Int
readInt = read
main :: IO ()
main = do
port <- fmap (readInt . (!! 0)) getArgs
-- there <- thereAndSocket xfile
-- when there (removeFile xfile)
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
hSetBuffering stderr LineBuffering
hSetBinaryMode stdin True
hSetBinaryMode stdout True
(cmd:args) <- fmap tail getArgs
p <- nettty (proc cmd args)
_ <- forkIO (start p (PortNumber (fromIntegral port)))
wait p
|
dgvncsz0f/nettty
|
src/Nettty/nettty-proxy.hs
|
gpl-3.0
| 2,391
| 0
| 14
| 441
| 298
| 159
| 139
| 30
| 2
|
{-# LANGUAGE NoMonomorphismRestriction #-}
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
import Data.Colour.SRGB
import Diagrams.TwoD.Text
import Diagrams.TwoD.Arrow
import Diagrams.CubicSpline
import Diagrams.TwoD.Offset
import WorldParser
import TownParser
import RoadParser
import Text.Parsec
import Control.Monad (liftM)
import Data.Map (Map, (!))
import qualified Data.Map as DM
main = do
worldInfo <- parseWorld
--having parsed in the world data, now we can proceed to rendering
--we have to do all the rendering under the patternmatch Right branch
--why? because we can't know ahead of time,
--whether resultOfParse is Left or Right.
--the only way to extract the data inside, which we need for parsing,
--is a pattern match
townInfo <- parseTownFile
roadInfo <- parseRoadFile
case townInfo of
Left err ->
mainWith (triangle 1 # fc pink # lw thick :: Diagram B)
Right ts ->
case worldInfo of
Left err ->
mainWith (circle 1 # fc pink # lw thick :: Diagram B)
Right hs ->
case roadInfo of
Left err -> mainWith (square 1 # fc pink # lw thick :: Diagram B)
Right rs ->
mainWith $ drawFullWorld hs townMap rs
where
townMap = DM.fromList ts
drawFullWorld :: [Hex] -> Map Coord TownData -> [Road] -> Diagram B
drawFullWorld hs ts rs =
drawTownLayer pts ts filtered
`atop`
drawRoadLayer rs
`atop`
drawHexLayer pts filtered
where
pts :: [P2 Double]
pts = map (coordToPixel . coord) filtered
filtered = filterHexes wanted hs
--coord positions of each hex
wanted = [Coord q r (0-q-r) | q <- [30..55], r <-[(-75),(-74)..(-55)]]
wanted' = [Coord 55 (-60) 5, Coord 48 (-56) 8]
--remove a hex from hs if its coord isn't in cs
filterHexes :: [Coord] -> [Hex] -> [Hex]
filterHexes cs hs =
filter (\h -> (coord h) `elem` cs) hs
drawRoadLayer :: [Road] -> Diagram B
drawRoadLayer rs = foldr (atop) mempty $ fmap drawRoad rs
drawRoad :: Road -> Diagram B
drawRoad (Road cs) =
(position (zip pts (repeat mempty)) <> cubicSpline False pts) # lc orchid # lw veryThin # lineCap LineCapRound
where
pts = fmap coordToPixel cs
drawHexLayer :: [P2 Double] -> [Hex] -> Diagram B
drawHexLayer pts hs = atPoints pts $ map drawHex hs
drawHex :: Hex -> Diagram B
drawHex h =
alignedText 0.5 0 qAndR # fc black # scale 0.5
`atop`
alignedText 0.5 0.75 (show . cs . coord $ h) # fc black # scale 0.5
`atop`
hexagon 1 # fc infraColor # lc black # lw veryThin # scale 1
where
subdivided = triangleHexagon (isLand h) (subs h)
qAndR = (show . cq . coord $ h) ++ (',' : (show . cr . coord $ h))
climateColor' = climateColor $ climate h
moistColor' = moistColor $ moist h
elevColor' = elevColor (elev h) (isLand h)
onlyColorLandBorder = if (isLand h) == True then black else elevColor'
infraColor = infrastructureColor (infra h)
--onlyColorLandBorder creates weird image,
--b/c hexes render in strange order,
--and some border colors overlap onto others,
--creating weird "bites" taken out of some hexes.
triangleHexagon :: Bool -> Map WorldParser.Direction Sub -> Diagram B
triangleHexagon isLand subs =
atPoints (trailVertices $ hexagon 1 # rotate ((-30) @@ deg)) tris
where
t = triangle 1 # lw veryThin # lc black
makeTri :: Int -> WorldParser.Direction -> Diagram B
makeTri n dir = t # rotateBy (dirToRot dir) # fc (if (isLand == False) then blue else (subDirQualityToColor dir subs))
--not sure yet whether water should be specified for wild/civ, but I'm leaning toward "no"
--for now we can just not color it
triDN = makeTri 0 DN # translate (r2 (0.0, 0.4))
triDR = makeTri 1 DR # translate (r2 (negHor, posVert))
triUR = makeTri 2 UR # translate (r2 (negHor, negVert))
triUP = makeTri 3 UP # translate (r2 (0.0, (-0.4)))
triUL = makeTri 4 UL # translate (r2 (posHor, negVert))
triDL = makeTri 5 DL # translate (r2 (posHor, posVert))
tris = [triDN, triDR, triUR, triUP, triUL, triDL]
--tris MUST be in that order. Don't know how to encode this just yet.
posVert = 0.21
negVert = (-1) * posVert
posHor = 0.38
negHor = (-1) * posHor
dirToRot d = case d of
DN -> (0/6)
DR -> (1/6)
UR -> (2/6)
UP -> (3/6)
UL -> (4/6)
DL -> (5/6)
qualityColor Civilized = pink
qualityColor (Wild 1) = sRGB 0 1 0
qualityColor (Wild 2) = sRGB 0 0.5 0
qualityColor (Wild 3) = sRGB 0.1 0.2 0
qualityColor (Wild 4) = sRGB 0.4 0.1 0
subDirQualityToColor dir subs =
case theSub of
Just (Sub _ q) -> qualityColor q
Nothing -> black
where
theSub = DM.lookup dir subs
infrastructureColor (Infrastructure a) = sRGB a' 0 0
where
a' = a / 200.0
drawTownLayer :: [P2 Double] -> Map Coord TownData -> [Hex] -> Diagram B
drawTownLayer pts ts hs = atPoints pts $ map (drawTown ts) hs
drawTown :: Map Coord TownData -> Hex -> Diagram B
drawTown ts h = case hasTown of
Nothing -> mempty
Just (TownData n) -> baselineText n # fc white # scale 0.5
where
hasTown = DM.lookup (coord h) ts
coordToPixel :: Coord -> P2 Double
coordToPixel (Coord q r s) = p2 (x,y)
where
q' = fromIntegral q
r' = fromIntegral r
x = 3/2 * (q')
y = (sqrt 3) * (r' + ((q')/2))
elevColor (Elevation e) l
|l == False = sRGB 0 0 e --it's sea, therefore color it blueish
|e < 0.50 = lightgreen
|e < 0.55 = green
|e < 0.60 = darkgreen
|e < 0.65 = pink
|e < 0.70 = lavender
|e < 0.75 = magenta
|e < 0.80 = red
|e < 0.85 = seagreen
|e < 0.90 = goldenrod
|e < 0.95 = slategray
|otherwise = white
tempColor (Temperature t)
|t' <= 0 = sRGB (0) (0) (abs (t'))
|otherwise = sRGB t' 0 0
where
t' = t / 100
moistColor m
|m == 1 = white --sea
|otherwise = sRGB 0 0 (m)
climateColor Water =
blue
climateColor Desert =
khaki
climateColor Steppe =
darkkhaki
climateColor Mediterranean =
yellow
climateColor HotSummerContinental =
lightseagreen
climateColor ColdContinental =
seagreen
climateColor WetContinental =
darkseagreen
climateColor Savannah =
yellowgreen
climateColor Monsoon =
red
climateColor Oceanic =
lightsteelblue
climateColor ColdOceanic =
steelblue
climateColor TropicalRainforest =
darkgreen
climateColor HumidSubtropical =
lightgreen
climateColor Taiga =
brown
climateColor Tundra =
gray
climateColor IceCap =
white
regionColor r
|r == 0 = blue
|r == 1 = orange
|r == 3 = brown
|r == 12 = brown
|r == 18 = yellow
|r `elem` aColors = lightpink
|r `elem` bColors = mediumorchid
|r `elem` cColors = crimson
|otherwise = olive
where
aColors = [1,8,13]
bColors = [2,3,4,6,12,15]
cColors = [5,7,16,17,18,20]
|
maxwelljoslyn/DnDMapAndEcon
|
WorldRenderer.hs
|
gpl-3.0
| 6,804
| 36
| 22
| 1,683
| 2,595
| 1,328
| 1,267
| 183
| 6
|
module Peer.Connection (peerThread) where
import HTorrentPrelude
import Morphisms
import Peer.Env
import Peer.Get
import Peer.Handshake
import Peer.Request
import Peer.RequestBuffer
import Peer.Send
import Peer.State
import Peer.Message
import Torrent.Env
import Torrent.Event
import Torrent.State.Availability
import Control.Concurrent.Async
import Data.IntSet.Lens
import Network.Socket
data PeerConnectionExcept =
FailedHandshake HandshakeExcept
deriving Show
peerThread :: TorrentEnv -> SockAddr -> IO ()
peerThread env a = do
s <- connectPeer a
runPeer env a s
close s
runPeer :: TorrentEnv -> SockAddr -> Socket -> IO ()
runPeer tEnv a s = void $ try $ do
id <- mapExceptT FailedHandshake (handshake s (tEnv ^. torrentInfo))
st <- lift (initPeerState a id)
let env = PeerEnv tEnv st
mapExceptT FailedHandshake (runReaderT (addPeer id st) tEnv)
lift $ do
prs <- newTQueueIO
pcd <- newTQueueIO
rs <- newTQueueIO
prsb <- newEmptyTMVarIO
let getEnv = GetEnv prs pcd env
let sendEnv = SendEnv rs prsb env
let bufEnv = BufEnv prs pcd prsb
threads <- mapM async [
runReaderT (requestThread rs) env,
getThread getEnv s,
runReaderT (sendThread s) sendEnv,
runReaderT bufferRequests bufEnv ]
waitAnyCancel threads
runReaderT cleanupPeer env
cleanupPeer :: ReaderT PeerEnv IO ()
cleanupPeer = hoist atomically $ do
ps <- viewTVar (peerState . pieces)
magnify (torrentEnv . availability) (mapMOf_ members decAvail ps)
connectPeer :: SockAddr -> IO Socket
connectPeer a = do
s <- socket AF_INET Stream defaultProtocol
s <$ connect s a
addPeer :: ByteString -> PeerState -> ReaderT TorrentEnv (ExceptT HandshakeExcept IO) ()
addPeer id st = hoist (hoist atomically) $ do
ps <- viewTVar peers
assert (InvalidId id) (not (member id ps))
peers &.= insertMap id st ps
torrentEvents &-< PeerConnected id st
|
ian-mi/hTorrent
|
Peer/Connection.hs
|
gpl-3.0
| 2,079
| 0
| 16
| 550
| 665
| 322
| 343
| 60
| 1
|
-- |
-- Module : Grimoire.Cache
-- Copyright : (c) 2012 Brendan Hay <brendan@soundcloud.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan@soundcloud.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Grimoire.Cache (
-- * Restricted Constructor
Cache
, newCache
-- * Functions
, withCache
) where
import Prelude hiding (lookup)
import Control.Monad (liftM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Concurrent
import Control.Concurrent.STM
import qualified Data.Map as M
type MValue v = MVar (Maybe v)
type TMap k v = TVar (M.Map k (MValue v))
newtype Cache k v = Cache (TMap k v)
newCache :: (MonadIO m, Ord k) => m (Cache k v)
newCache = liftIO . atomically $ Cache `liftM` (newTVar M.empty)
withCache :: (MonadIO m, Ord k) => Cache k v -> IO v -> k -> m v
withCache (Cache tvar) io key = do
v <- lookup tvar key
liftIO $ modifyMVar v cons
where
cons v = do
y <- case v of
Just x -> return x
Nothing -> io
return (Just y, y)
--
-- Private
--
lookup :: (MonadIO m, Ord k) => TMap k v -> k -> m (MValue v)
lookup tvar key = liftIO $ do
(m, v) <- atomically (readTVar tvar) >>= insert
seq m . atomically $ writeTVar tvar m
return v
where
insert m = case M.lookup key m of
Just v -> return (m, v)
Nothing -> do
v <- newMVar Nothing
return (M.insert key v m, v)
|
brendanhay/grimoire
|
src/Grimoire/Cache.hs
|
mpl-2.0
| 1,766
| 0
| 15
| 530
| 527
| 280
| 247
| 34
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.AttributeDefinitions.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the specified Attribute definition.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.consentStores.attributeDefinitions.get@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.ConsentStores.AttributeDefinitions.Get
(
-- * REST Resource
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGetResource
-- * Creating a Request
, projectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
, ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
-- * Request Lenses
, pldscsadgXgafv
, pldscsadgUploadProtocol
, pldscsadgAccessToken
, pldscsadgUploadType
, pldscsadgName
, pldscsadgCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.consentStores.attributeDefinitions.get@ method which the
-- 'ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet' request conforms to.
type ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGetResource
=
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] AttributeDefinition
-- | Gets the specified Attribute definition.
--
-- /See:/ 'projectsLocationsDataSetsConsentStoresAttributeDefinitionsGet' smart constructor.
data ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet =
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet'
{ _pldscsadgXgafv :: !(Maybe Xgafv)
, _pldscsadgUploadProtocol :: !(Maybe Text)
, _pldscsadgAccessToken :: !(Maybe Text)
, _pldscsadgUploadType :: !(Maybe Text)
, _pldscsadgName :: !Text
, _pldscsadgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldscsadgXgafv'
--
-- * 'pldscsadgUploadProtocol'
--
-- * 'pldscsadgAccessToken'
--
-- * 'pldscsadgUploadType'
--
-- * 'pldscsadgName'
--
-- * 'pldscsadgCallback'
projectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
:: Text -- ^ 'pldscsadgName'
-> ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
projectsLocationsDataSetsConsentStoresAttributeDefinitionsGet pPldscsadgName_ =
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet'
{ _pldscsadgXgafv = Nothing
, _pldscsadgUploadProtocol = Nothing
, _pldscsadgAccessToken = Nothing
, _pldscsadgUploadType = Nothing
, _pldscsadgName = pPldscsadgName_
, _pldscsadgCallback = Nothing
}
-- | V1 error format.
pldscsadgXgafv :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet (Maybe Xgafv)
pldscsadgXgafv
= lens _pldscsadgXgafv
(\ s a -> s{_pldscsadgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldscsadgUploadProtocol :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet (Maybe Text)
pldscsadgUploadProtocol
= lens _pldscsadgUploadProtocol
(\ s a -> s{_pldscsadgUploadProtocol = a})
-- | OAuth access token.
pldscsadgAccessToken :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet (Maybe Text)
pldscsadgAccessToken
= lens _pldscsadgAccessToken
(\ s a -> s{_pldscsadgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldscsadgUploadType :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet (Maybe Text)
pldscsadgUploadType
= lens _pldscsadgUploadType
(\ s a -> s{_pldscsadgUploadType = a})
-- | Required. The resource name of the Attribute definition to get.
pldscsadgName :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet Text
pldscsadgName
= lens _pldscsadgName
(\ s a -> s{_pldscsadgName = a})
-- | JSONP
pldscsadgCallback :: Lens' ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet (Maybe Text)
pldscsadgCallback
= lens _pldscsadgCallback
(\ s a -> s{_pldscsadgCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
where
type Rs
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
= AttributeDefinition
type Scopes
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGet'{..}
= go _pldscsadgName _pldscsadgXgafv
_pldscsadgUploadProtocol
_pldscsadgAccessToken
_pldscsadgUploadType
_pldscsadgCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsConsentStoresAttributeDefinitionsGetResource)
mempty
|
brendanhay/gogol
|
gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/ConsentStores/AttributeDefinitions/Get.hs
|
mpl-2.0
| 6,252
| 0
| 15
| 1,213
| 698
| 409
| 289
| 114
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetGrpcProxies.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a TargetGrpcProxy in the specified project in the given scope
-- using the parameters that are included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetGrpcProxies.insert@.
module Network.Google.Resource.Compute.TargetGrpcProxies.Insert
(
-- * REST Resource
TargetGrpcProxiesInsertResource
-- * Creating a Request
, targetGrpcProxiesInsert
, TargetGrpcProxiesInsert
-- * Request Lenses
, tgpiRequestId
, tgpiProject
, tgpiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetGrpcProxies.insert@ method which the
-- 'TargetGrpcProxiesInsert' request conforms to.
type TargetGrpcProxiesInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetGrpcProxies" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetGrpcProxy :>
Post '[JSON] Operation
-- | Creates a TargetGrpcProxy in the specified project in the given scope
-- using the parameters that are included in the request.
--
-- /See:/ 'targetGrpcProxiesInsert' smart constructor.
data TargetGrpcProxiesInsert =
TargetGrpcProxiesInsert'
{ _tgpiRequestId :: !(Maybe Text)
, _tgpiProject :: !Text
, _tgpiPayload :: !TargetGrpcProxy
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetGrpcProxiesInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tgpiRequestId'
--
-- * 'tgpiProject'
--
-- * 'tgpiPayload'
targetGrpcProxiesInsert
:: Text -- ^ 'tgpiProject'
-> TargetGrpcProxy -- ^ 'tgpiPayload'
-> TargetGrpcProxiesInsert
targetGrpcProxiesInsert pTgpiProject_ pTgpiPayload_ =
TargetGrpcProxiesInsert'
{ _tgpiRequestId = Nothing
, _tgpiProject = pTgpiProject_
, _tgpiPayload = pTgpiPayload_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
tgpiRequestId :: Lens' TargetGrpcProxiesInsert (Maybe Text)
tgpiRequestId
= lens _tgpiRequestId
(\ s a -> s{_tgpiRequestId = a})
-- | Project ID for this request.
tgpiProject :: Lens' TargetGrpcProxiesInsert Text
tgpiProject
= lens _tgpiProject (\ s a -> s{_tgpiProject = a})
-- | Multipart request metadata.
tgpiPayload :: Lens' TargetGrpcProxiesInsert TargetGrpcProxy
tgpiPayload
= lens _tgpiPayload (\ s a -> s{_tgpiPayload = a})
instance GoogleRequest TargetGrpcProxiesInsert where
type Rs TargetGrpcProxiesInsert = Operation
type Scopes TargetGrpcProxiesInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetGrpcProxiesInsert'{..}
= go _tgpiProject _tgpiRequestId (Just AltJSON)
_tgpiPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetGrpcProxiesInsertResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/TargetGrpcProxies/Insert.hs
|
mpl-2.0
| 4,661
| 0
| 16
| 1,016
| 484
| 292
| 192
| 77
| 1
|
{-# LANGUAGE BangPatterns #-}
module ManualBang where
doesntEval :: Bool -> Int
doesntEval b = 1
manualSeq :: Bool -> Int
manualSeq b = b `seq` 1
banging :: Bool -> Int
banging !b = 1
data Foo = Foo Int !Int
first (Foo x _) = x
second (Foo _ y) = y
data DoesntForce = TisLazy Int String
gibString :: DoesntForce -> String
gibString (TisLazy _ s) = s
data BangBang = SheShotMeDown !Int !String
gimmeString :: BangBang -> String
gimmeString (SheShotMeDown _ s) = s
|
dmvianna/haskellbook
|
src/Ch27-Bang.hs
|
unlicense
| 473
| 0
| 7
| 98
| 181
| 95
| 86
| 23
| 1
|
{-# LANGUAGE FlexibleInstances, DeriveGeneric, DeriveAnyClass #-}
{- ===========================================================================
Contains basic types - you'll have to extend several of the definitions below
=========================================================================== -}
module FP_TypesEtc where
import GHC.Generics
import FPPrac.Trees
-- ===================================================================
-- Example Alphabet
-- - Extend, adapt, change the non-terminals to your own needs
-- - Do NOT change the first two groups of constructors (Symbol ... Rep1)
data Alphabet = Terminal String -- Terminal symbol: WILL be included in parseTree
| Symbol String -- Terminal symbol: will NOT be included in parseTree
| SyntCat Alphabet -- Checks whether a string belongs to a syntactic category
| Alt [Alphabet] [Alphabet] -- Try both
| Opt [Alphabet] -- Optional
| Rep0 [Alphabet] -- Zero or more repetitions
| Rep1 [Alphabet] -- One or more repetitions
| Nmbr -- Number
| Vrbl -- Variable
| Op -- Operation symbol
| Program -- Program
| Block -- Block
| Stmt -- Statement
| Expr -- Expression
| WS -- Spaces
| Bracket -- Brackets
| Brace -- Braces
| Rswrd -- Reserved words
| Assgn -- Assignment
| Repet -- Repeat
| Iff -- If
| Thenn -- Then
| Elsse -- Else
deriving (Eq,Ord,Show,Generic,ToRoseTree)
-- ===================================================================
-- Symbolic notation for EBNF constructors
ps <> qs = Alt ps qs
(?:) ps = Opt ps
(*:) ps = Rep0 ps
(+:) ps = Rep1 ps
-- ===================================================================
type Grammar = Alphabet -> [[Alphabet]]
type Token = (Alphabet,String,Int) -- Alphabet: indicates the "syntactic category" to which
-- the String belongs (to distinguish, a.o., between
-- reserved words and identifiers in general),
-- String: the token itself,
-- Int: the position of the token in the input token-list
-- (needed for error messages).
instance ToRoseTree Token where
toRoseTree t = RoseNode (show t) []
data ParseTree = PLeaf Token
| PNode Alphabet [ParseTree]
| PError ParseTree [Alphabet] Alphabet String Int
deriving (Eq,Show,Generic,ToRoseTree)
instance Ord ParseTree where
PError _ _ _ _ k < PError _ _ _ _ k' = k < k'
_ < _ = error "ordering only in case of parse-errors"
PError _ _ _ _ k <= PError _ _ _ _ k' = k <= k'
_ <= _ = error "ordering only in case of parse-errors"
type ParseState = ( Alphabet -- Non-terminal indicating the present subexpression
, [ParseTree] -- The already produced trees within the present subexpression
, [Token] -- The remaining list of input tokens
)
-- ===================================================================
x ∈ xs = x `elem` xs
-- ===================================================================
-- Pretty Printing
toStrings tree = case tree of
PLeaf t -> ["PLeaf " ++ show t]
PNode nt ts -> ("PNode " ++ show nt) : (addSpace 7 $ concat $ addEndBrack $ addListNotation $ map toStrings ts)
where
addSpace n = map ((replicate n ' ') ++)
addListNotation ((str:strs):strss) = (("["++str):strs)
: [ (","++str'):strs' | (str':strs') <- strss ]
addEndBrack [strs] = [ strs ++ ["]"] ]
addEndBrack (strs:strss) = strs : addEndBrack strss
PError tr rule nt str k -> [ "==========="
, "Parse Error"
, "==========="
, "Recognized:"
, "-----------"
]
++ toStrings tr ++
[ "-----------"
, "Still to go: " ++ show rule
, "Expected: " ++ show nt
, "Found: " ++ str
, "At position: " ++ show k
, "==========="
]
prpr t = putStr $ ('\n':) $ (++"\n") $ unlines $ toStrings t
|
wouwouwou/module_8
|
src/main/haskell/series6/FP_TypesEtc.hs
|
apache-2.0
| 5,475
| 0
| 15
| 2,508
| 843
| 475
| 368
| 70
| 4
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module QQ01 (module QQ01) where
import qualified Data.Text as T
import Data.Text (Text,unpack)
import Data.Attoparsec.Text
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Quote
string :: QuasiQuoter
string = QuasiQuoter
{ quoteExp = \s -> [| s |],
-- OR, with Language.Haskell.TH.Syntax imported: quoteExp = lift
quotePat = undefined,
quoteType = undefined,
quoteDec = undefined
}
type Chunk = Either Text String
-- a chunk is either just raw Text, or the String name of a variable
makeChunks :: Text -> [Chunk]
makeChunks ts = case parseOnly parser ts of
Right x -> x
_ -> error "malformed text"
where
parser = do
res <- loop []
return [ rs |
rs <- res,
rs /= Left "",
rs /= Right ""]
variable = do
char '$'
name <- takeTill (notInClass "a-zA-Z0-9_")
return (Right (T.unpack name))
loop xs = do
text <- takeTill (== '$')
var <- choice [variable, fmap Left takeText]
end <- atEnd
if end
then return $ reverse (var : Left text : xs)
else loop (var : Left text : xs)
instance Lift Text where
lift t = litE (stringL (unpack t))
format :: QuasiQuoter
format = QuasiQuoter
{ quoteExp = \s ->
let chunks = makeChunks (T.pack s)
liftedChunks = flip map chunks $ \c -> case c of
Left t -> [| t |] -- lift raw text
Right v -> varE (mkName v) -- get a global Name from the name given
-- and now to fold it all together ...
in foldr (\l r -> appE [| T.append |] l `appE` r) [| T.empty |] liftedChunks,
-- note that: appE :: Q Exp -> Q Exp -> Q Exp; it acts as function application for Q Exps
-- [| T.append |] is the Q Exp form of T.append
quotePat = undefined,
quoteType = undefined,
quoteDec = undefined
}
format1 :: QuasiQuoter
format1 = QuasiQuoter
{ quoteExp = \s ->
let chunks = makeChunks (T.pack s)
liftedChunks = flip map chunks $ \c -> case c of
Left t -> [| t |] -- lift raw text
Right v -> varE (mkName v) -- get a global Name from the name given
in appE [| T.concat |] (listE liftedChunks),
-- note that listE :: [Q Exp] -> Q Exp
quotePat = undefined,
quoteType = undefined,
quoteDec = undefined
}
|
egaburov/funstuff
|
Haskell/thsk/QQ01.hs
|
apache-2.0
| 2,596
| 0
| 19
| 879
| 686
| 375
| 311
| 61
| 3
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Module2_Consts where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Module0_Types as Module0_Types
import qualified Module1_Types as Module1_Types
import qualified Module2_Types
c2 :: Module2_Types.Struct
c2 = Module2_Types.default_Struct{Module2_Types.struct_first = Module0_Types.default_Struct{Module0_Types.struct_first = 101, Module0_Types.struct_second = "module0_str"}, Module2_Types.struct_second = Module1_Types.default_Struct{Module1_Types.struct_first = 201, Module1_Types.struct_second = "module1_str"}}
c3 :: Module2_Types.Struct
c3 = Module2_Types.default_Struct{Module2_Types.struct_first = Module0_Types.default_Struct{Module0_Types.struct_first = 101, Module0_Types.struct_second = "module0_str"}, Module2_Types.struct_second = Module1_Types.default_Struct{Module1_Types.struct_first = 201, Module1_Types.struct_second = "module1_str"}}
c4 :: Module2_Types.Struct
c4 = Module2_Types.default_Struct{Module2_Types.struct_first = Module0_Types.default_Struct{Module0_Types.struct_first = 101, Module0_Types.struct_second = "module0_str"}, Module2_Types.struct_second = Module1_Types.default_Struct{Module1_Types.struct_first = 201, Module1_Types.struct_second = "module1_str"}}
|
facebook/fbthrift
|
thrift/compiler/test/fixtures/qualified/gen-hs/Module2_Consts.hs
|
apache-2.0
| 3,294
| 0
| 8
| 480
| 649
| 444
| 205
| 47
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Specs.Orchestrate.GraphSpec where
import Control.Error
import Control.Exception
import Control.Lens
import Control.Monad
import Test.Hspec
import Database.Orchestrate.Graph
import Database.Orchestrate.KeyValue
import Database.Orchestrate.Types
import Database.Orchestrate.Utils
import Specs.Orchestrate.Spec.Types
import Specs.Orchestrate.Spec.Utils
-- Names for 2014 tropical cyclones (from NOAA).
fixtures :: [Person]
fixtures = [ Person "Bertha" 1 -- 0
, Person "Fay" 9 -- 1
, Person "Laura" 8 -- 2
, Person "Rene" 2 -- 3
]
#if NETWORK_SPECS
spec :: Spec
spec = describe "Database.Orchestrate.Graph" $ around_ (withFixtures fixtures) $ do
describe "createRel and getRel" $
it "should create relationships that getRel can retrieve." $ do
let bertha = fixtures !! 0
rene = fixtures !! 3
r' <- run $ createRel bertha "brother" rene
r' `shouldSatisfy` isRight
r <- run $ getRel bertha "brother" []
r ^? _Right . resultCount `shouldBe` Just 1
r ^.. _Right . resultList . traverse . itemValue . personName
`shouldBe` ["Rene"]
describe "deleteRel" $
it "should have tests" $ do
let fay = fixtures !! 1
laura = fixtures !! 2
void . run' $ createRel fay "sister" laura
void . run' $ deleteRel fay "sister" laura
r <- (run $ getRel fay "sister" []) :: IO (Either SomeException (RelList Person Person))
r ^? _Right . resultCount `shouldBe` Just 0
#else
spec :: Spec
spec = describe "Database.Orchestrate.Graph" $
it "should contain tests." $
pendingWith "configure with \"--enable-tests -fnetwork-specs\"."
#endif
|
erochest/orchestrate
|
specs/Specs/Orchestrate/GraphSpec.hs
|
apache-2.0
| 2,002
| 0
| 16
| 664
| 433
| 228
| 205
| 23
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-| Lenses for Ganeti config objects
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Objects.Lens where
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as UTF8
import Control.Lens (Simple)
import Control.Lens.Iso (Iso, iso)
import qualified Data.Set as Set
import System.Time (ClockTime(..))
import Ganeti.Lens (makeCustomLenses, Lens')
import Ganeti.Objects
-- | Isomorphism between Strings and bytestrings
stringL :: Simple Iso BS.ByteString String
stringL = iso UTF8.toString UTF8.fromString
-- | Class of objects that have timestamps.
class TimeStampObject a => TimeStampObjectL a where
mTimeL :: Lens' a ClockTime
-- | Class of objects that have an UUID.
class UuidObject a => UuidObjectL a where
uuidL :: Lens' a String
-- | Class of object that have a serial number.
class SerialNoObject a => SerialNoObjectL a where
serialL :: Lens' a Int
-- | Class of objects that have tags.
class TagsObject a => TagsObjectL a where
tagsL :: Lens' a TagSet
$(makeCustomLenses ''AddressPool)
$(makeCustomLenses ''Network)
instance SerialNoObjectL Network where
serialL = networkSerialL
instance TagsObjectL Network where
tagsL = networkTagsL
instance UuidObjectL Network where
uuidL = networkUuidL . stringL
instance TimeStampObjectL Network where
mTimeL = networkMtimeL
$(makeCustomLenses ''PartialNic)
$(makeCustomLenses ''Disk)
instance TimeStampObjectL Disk where
mTimeL = diskMtimeL
instance UuidObjectL Disk where
uuidL = diskUuidL . stringL
instance SerialNoObjectL Disk where
serialL = diskSerialL
$(makeCustomLenses ''Instance)
instance TimeStampObjectL Instance where
mTimeL = instMtimeL
instance UuidObjectL Instance where
uuidL = instUuidL . stringL
instance SerialNoObjectL Instance where
serialL = instSerialL
instance TagsObjectL Instance where
tagsL = instTagsL
$(makeCustomLenses ''MinMaxISpecs)
$(makeCustomLenses ''PartialIPolicy)
$(makeCustomLenses ''FilledIPolicy)
$(makeCustomLenses ''Node)
instance TimeStampObjectL Node where
mTimeL = nodeMtimeL
instance UuidObjectL Node where
uuidL = nodeUuidL . stringL
instance SerialNoObjectL Node where
serialL = nodeSerialL
instance TagsObjectL Node where
tagsL = nodeTagsL
$(makeCustomLenses ''NodeGroup)
instance TimeStampObjectL NodeGroup where
mTimeL = groupMtimeL
instance UuidObjectL NodeGroup where
uuidL = groupUuidL . stringL
instance SerialNoObjectL NodeGroup where
serialL = groupSerialL
instance TagsObjectL NodeGroup where
tagsL = groupTagsL
$(makeCustomLenses ''Cluster)
instance TimeStampObjectL Cluster where
mTimeL = clusterMtimeL
instance UuidObjectL Cluster where
uuidL = clusterUuidL . stringL
instance SerialNoObjectL Cluster where
serialL = clusterSerialL
instance TagsObjectL Cluster where
tagsL = clusterTagsL
$(makeCustomLenses ''ConfigData)
instance SerialNoObjectL ConfigData where
serialL = configSerialL
instance TimeStampObjectL ConfigData where
mTimeL = configMtimeL
|
mbakke/ganeti
|
src/Ganeti/Objects/Lens.hs
|
bsd-2-clause
| 4,289
| 0
| 8
| 664
| 721
| 376
| 345
| 82
| 1
|
module Hans.Socket.Handle(makeHansHandle) where
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import qualified Data.ByteString as BSS
import Data.ByteString.Lazy(ByteString)
import qualified Data.ByteString.Lazy as BS
import Data.Typeable
import Foreign.Ptr
import GHC.IO.Buffer
import GHC.IO.BufferedIO
import GHC.IO.Device
import GHC.IO.Handle
import Hans.Layer.Tcp.Socket(canSend, canRecv)
import Hans.NetworkStack
import Prelude hiding (read)
import System.IO
data BufferedSocket = BS {
bsSocket :: Socket
, bsRefCount :: MVar Int
}
deriving (Typeable)
newBufferedSocket :: Socket -> IO BufferedSocket
newBufferedSocket sock =
do mv <- newMVar 1
return (BS sock mv)
addRef :: BufferedSocket -> IO ()
addRef bs = modifyMVar_ (bsRefCount bs) (return . (+ 1))
dropRef :: BufferedSocket -> IO ()
dropRef bs =
do count <- modifyMVar (bsRefCount bs) $
\ x ->
let x' = x - 1
in return (x', x')
when (count == 0) $ Hans.NetworkStack.close (bsSocket bs)
instance IODevice BufferedSocket where
ready dev write msecs =
do let tester = if write then canSend else canRecv
canDo <- tester (bsSocket dev)
if | canDo -> return True
| msecs <= 0 -> return False
| otherwise -> do let delay = min msecs 100
threadDelay (delay * 1000)
ready dev write (msecs - delay)
close bs = dropRef bs
isTerminal _ = return False
isSeekable _ = return False
seek _ _ _ = throwIO (userError "Seek on HaNS socket.")
tell _ = throwIO (userError "Tell on HaNS socket.")
getSize _ = throwIO (userError "getSize on HaNS socket.")
setSize _ _ = throwIO (userError "setSize on HaNS socket.")
setEcho _ _ = throwIO (userError "setEcho on HaNS socket.")
getEcho _ = throwIO (userError "getEcho on HaNS socket.")
setRaw _ _ = return ()
devType _ = return Stream
dup bs = addRef bs >> return bs
dup2 _ _ = throwIO (userError "dup2 on HaNS socket.")
instance RawIO BufferedSocket where
read sock dptr sz =
do bstr <- recvBytes (bsSocket sock) (fromIntegral sz)
when (BS.length bstr > 0) $ copyBS (BS.toChunks bstr) dptr sz
return (fromIntegral (BS.length bstr))
readNonBlocking sock dptr sz =
do canGo <- canRecv (bsSocket sock)
if canGo
then Just `fmap` read sock dptr sz
else return (Just 0)
write sock ptr sz =
do bstr <- BSS.packCStringLen (castPtr ptr, sz)
sendAll (bsSocket sock) (BS.fromStrict bstr)
where
sendAll sock bstr
| BS.null bstr = return ()
| otherwise = do num <- sendBytes sock bstr
sendAll sock (BS.drop (fromIntegral num) bstr)
writeNonBlocking sock ptr sz =
do canGo <- canSend (bsSocket sock)
if canGo
then do bstr <- BSS.packCStringLen (castPtr ptr, sz)
num <- sendBytes (bsSocket sock) (BS.fromStrict bstr)
return (fromIntegral num)
else return 0
instance BufferedIO BufferedSocket where
newBuffer _ = newByteBuffer (64 * 1024)
fillReadBuffer = readBuf
fillReadBuffer0 = readBufNonBlocking
flushWriteBuffer = writeBuf
flushWriteBuffer0 = writeBufNonBlocking
makeHansHandle :: Socket -> IOMode -> IO Handle
makeHansHandle socket mode =
do buffSocket <- newBufferedSocket socket
mkFileHandle buffSocket "<socket>" mode Nothing noNewlineTranslation
copyBS :: [BSS.ByteString] -> Ptr a -> Int -> IO ()
copyBS [] _ _ = return ()
copyBS (f:rest) sptr szLeft
| BSS.null f = copyBS rest sptr szLeft
| szLeft <= 0 = return ()
| otherwise =
do let (chunk1, chunk2) = BSS.splitAt szLeft f
amt = fromIntegral (BSS.length chunk1)
BSS.useAsCString chunk1 $ \ dptr -> memcpy dptr sptr amt
copyBS (chunk2 : rest) (sptr `plusPtr` amt) (szLeft - amt)
foreign import ccall unsafe "string.h memcpy"
memcpy :: Ptr a -> Ptr b -> Int -> IO ()
|
hackern/network-hans
|
src/Hans/Socket/Handle.hs
|
bsd-3-clause
| 4,101
| 0
| 15
| 1,101
| 1,424
| 703
| 721
| -1
| -1
|
module Aws.DynamoDb.Commands(
module Aws.DynamoDb.Commands.Table
) where
import Aws.DynamoDb.Commands.Table
|
RayRacine/aws
|
Aws/DynamoDb/Commands.hs
|
bsd-3-clause
| 113
| 0
| 5
| 13
| 24
| 17
| 7
| 3
| 0
|
{-# OPTIONS_GHC -Wall #-}
module ElmFormat where
import Elm.Utils ((|>))
import System.Exit (exitFailure, exitSuccess)
import Messages.Types
import Messages.Formatter.Format
import Control.Monad (when)
import Control.Monad.Free
import Data.Maybe (isJust)
import CommandLine.Helpers
import ElmVersion
import ElmFormat.FileStore (FileStore)
import ElmFormat.Operation (Operation)
import qualified AST.Module
import qualified Flags
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Char8 as Char8
import qualified Data.ByteString.Lazy as Lazy
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified ElmFormat.Execute as Execute
import qualified ElmFormat.Parse as Parse
import qualified ElmFormat.Render.Text as Render
import qualified ElmFormat.FileStore as FileStore
import qualified ElmFormat.Filesystem as FS
import qualified ElmFormat.Operation as Operation
import qualified ElmFormat.Version
import qualified Reporting.Error.Syntax as Syntax
import qualified Reporting.Result as Result
-- If elm-format-short was successful and formatted result differ
-- from original content, writes the results to the output file.
-- Otherwise, display errors and exit
writeResult
:: Operation f =>
ElmVersion
-> Destination
-> FilePath
-> Text.Text
-> Result.Result () Syntax.Error AST.Module.Module
-> Free f (Maybe Bool)
writeResult elmVersion destination inputFile inputText result =
case result of
Result.Result _ (Result.Ok modu) ->
let
renderedText =
Render.render elmVersion modu
rendered =
renderedText
|> Text.encodeUtf8
in
case destination of
UpdateInPlace ->
Operation.deprecatedIO $
Char8.putStr rendered
>> return Nothing
ValidateOnly ->
if inputText /= renderedText then
onInfo (FileWouldChange inputFile)
>> return (Just False)
else
return $ Just True
ToFile path ->
let
shouldWriteToFile =
inputFile /= path || inputText /= renderedText
in
if shouldWriteToFile then
Operation.deprecatedIO $
ByteString.writeFile path rendered
>> return Nothing
else
return Nothing
Result.Result _ (Result.Err errs) ->
onInfo (ParseError inputFile (Text.unpack inputText) errs)
>> return (Just False)
processTextInput :: Operation f => ElmVersion -> Destination -> FilePath -> Text.Text -> Free f (Maybe Bool)
processTextInput elmVersion destination inputFile inputText =
Parse.parse inputText
|> writeResult elmVersion destination inputFile inputText
processFileInput :: Operation f => ElmVersion -> FilePath -> Destination -> Free f (Maybe Bool)
processFileInput elmVersion inputFile destination =
do
inputText <- Operation.deprecatedIO $ fmap Text.decodeUtf8 $ ByteString.readFile inputFile
processTextInput elmVersion destination inputFile inputText
resolveFile :: FileStore f => FilePath -> Free f (Either InputFileMessage [FilePath])
resolveFile path =
do
fileType <- FileStore.stat path
case fileType of
FileStore.IsFile ->
return $ Right [path]
FileStore.IsDirectory ->
do
elmFiles <- FS.findAllElmFiles path
case elmFiles of
[] -> return $ Left $ NoElmFiles path
_ -> return $ Right elmFiles
FileStore.DoesNotExist ->
return $ Left $ FileDoesNotExist path
collectErrors :: [Either l r] -> Either [l] [r]
collectErrors list =
let
step acc next =
case (next, acc) of
(Left l, Right _) ->
Left [l]
(Left l, Left ls) ->
Left (l : ls)
(Right r, Right rs) ->
Right (r : rs)
(Right _, Left ls) ->
Left ls
in
foldl step (Right []) list
resolveFiles :: FileStore f => [FilePath] -> Free f (Either [InputFileMessage] [FilePath])
resolveFiles inputFiles =
do
result <- collectErrors <$> mapM resolveFile inputFiles
case result of
Left ls ->
return $ Left ls
Right files ->
return $ Right $ concat files
handleFilesInput :: Operation f => ElmVersion -> [FilePath] -> Maybe FilePath -> Bool -> Bool -> Free f (Maybe Bool)
handleFilesInput elmVersion inputFiles outputFile autoYes validateOnly =
do
elmFiles <- resolveFiles inputFiles
case elmFiles of
Left errors ->
Operation.deprecatedIO $
do
putStrLn $ r $ BadInputFiles errors
exitFailure
Right [inputFile] -> do
realOutputFile <- decideOutputFile autoYes inputFile outputFile
case realOutputFile of
Nothing ->
return Nothing
Just realOutputFile' ->
do
let destination = if validateOnly then ValidateOnly else ToFile realOutputFile'
onInfo $ ProcessingFiles [inputFile]
processFileInput elmVersion inputFile destination
Right elmFiles -> do
when (isJust outputFile)
exitOnInputDirAndOutput
canOverwriteFiles <- getApproval autoYes elmFiles
if canOverwriteFiles
then
let
merge prev next =
case (prev, next) of
(Nothing, Just b) -> Just b
(Just b, Nothing) -> Just b
(Just a, Just b) -> Just $ a && b
(Nothing, Nothing) -> Nothing
dst file =
if validateOnly then
ValidateOnly
else
ToFile file
in
do
onInfo $ ProcessingFiles elmFiles
validationResults <- mapM (\file -> processFileInput elmVersion file (dst file)) elmFiles
return $ foldl merge Nothing validationResults
else
return Nothing
data WhatToDo
= FormatToFile FilePath FilePath
| StdinToFile FilePath
| FormatInPlace FilePath [FilePath]
| StdinToStdout
| Validate Source
data Source
= Stdin
| FromFiles FilePath [FilePath]
data Destination
= ValidateOnly
| UpdateInPlace
| ToFile FilePath
determineSource :: Bool -> [FilePath] -> Either ErrorMessage Source
determineSource stdin inputFiles =
case ( stdin, inputFiles ) of
( True, [] ) -> Right Stdin
( False, [] ) -> Left NoInputs
( False, first:rest ) -> Right $ FromFiles first rest
( True, _:_ ) -> Left TooManyInputs
determineDestination :: Maybe FilePath -> Bool -> Either ErrorMessage Destination
determineDestination output validate =
case ( output, validate ) of
( Nothing, True ) -> Right ValidateOnly
( Nothing, False ) -> Right UpdateInPlace
( Just path, False ) -> Right $ ToFile path
( Just _, True ) -> Left OutputAndValidate
determineWhatToDo :: Source -> Destination -> Either ErrorMessage WhatToDo
determineWhatToDo source destination =
case ( source, destination ) of
( _, ValidateOnly ) -> Right $ Validate source
( Stdin, UpdateInPlace ) -> Right StdinToStdout
( Stdin, ToFile output ) -> Right $ StdinToFile output
( FromFiles first [], ToFile output ) -> Right $ FormatToFile first output
( FromFiles first rest, UpdateInPlace ) -> Right $ FormatInPlace first rest
( FromFiles _ _, ToFile _ ) -> Left SingleOutputWithMultipleInputs
determineWhatToDoFromConfig :: Flags.Config -> Either ErrorMessage WhatToDo
determineWhatToDoFromConfig config =
do
source <- determineSource (Flags._stdin config) (Flags._input config)
destination <- determineDestination (Flags._output config) (Flags._validate config)
determineWhatToDo source destination
validate :: Operation f => ElmVersion -> Source -> Free f Bool
validate elmVersion source =
do
result <-
case source of
Stdin ->
do
input <- Operation.deprecatedIO Lazy.getContents
Lazy.toStrict input
|> Text.decodeUtf8
|> processTextInput elmVersion ValidateOnly "<STDIN>"
FromFiles first rest ->
handleFilesInput elmVersion (first:rest) Nothing True True
case result of
Nothing ->
error "Validation should always give a result"
Just isSuccess ->
return isSuccess
exitWithError :: ErrorMessage -> IO ()
exitWithError message =
(putStrLn $ r $ message)
>> exitFailure
determineVersion :: ElmVersion -> Bool -> Either ErrorMessage ElmVersion
determineVersion elmVersion upgrade =
case (elmVersion, upgrade) of
(Elm_0_18, True) ->
Right Elm_0_18_Upgrade
(_, True) ->
Left $ MustSpecifyVersionWithUpgrade Elm_0_18_Upgrade
(_, False) ->
Right elmVersion
exit :: Bool -> IO ()
exit True = exitSuccess
exit False = exitFailure
elmFormatVersion :: String
elmFormatVersion =
ElmFormat.Version.asString
experimental :: Maybe String
experimental =
ElmFormat.Version.experimental
main :: ElmVersion -> IO ()
main defaultVersion =
do
config <- Flags.parse defaultVersion elmFormatVersion experimental
let autoYes = Flags._yes config
let elmVersionResult = determineVersion (Flags._elmVersion config) (Flags._upgrade config)
case (elmVersionResult, determineWhatToDoFromConfig config) of
(_, Left NoInputs) ->
Flags.showHelpText defaultVersion elmFormatVersion experimental
>> exitFailure
(_, Left message) ->
exitWithError message
(Left message, _) ->
exitWithError message
(Right elmVersion, Right (Validate source)) ->
do
isSuccess <-
validate elmVersion source
|> Execute.run (Execute.forMachine elmVersion)
exit isSuccess
(Right elmVersion, Right (FormatInPlace first rest)) ->
do
result <- foldFree Execute.forHuman $ handleFilesInput elmVersion (first:rest) Nothing autoYes False
case result of
Just False ->
exitFailure
_ ->
exitSuccess
(Right elmVersion, Right (FormatToFile input output)) ->
do
result <- foldFree Execute.forHuman $ handleFilesInput elmVersion [input] (Just output) autoYes False
case result of
Just False ->
exitFailure
_ ->
exitSuccess
(Right elmVersion, Right StdinToStdout) ->
do
input <- Lazy.getContents
result <-
Lazy.toStrict input
|> Text.decodeUtf8
|> processTextInput elmVersion UpdateInPlace "<STDIN>"
|> foldFree Execute.forHuman
case result of
Just False ->
exitFailure
_ ->
exitSuccess
(Right elmVersion, Right (StdinToFile output)) ->
do
input <- Lazy.getContents
result <-
Lazy.toStrict input
|> Text.decodeUtf8
|> processTextInput elmVersion (ToFile output) "<STDIN>"
|> foldFree Execute.forHuman
case result of
Just False ->
exitFailure
_ ->
exitSuccess
|
nukisman/elm-format-short
|
src/ElmFormat.hs
|
bsd-3-clause
| 13,335
| 0
| 23
| 5,322
| 3,039
| 1,528
| 1,511
| 300
| 12
|
module Counter where
import Control.Arrow.Flow
import Control.Monad.State
type Count = StateT Int IO
incr :: Count ()
incr = get >>= \x -> put (x+1)
--This is a counting flow node that keeps a count of things
--that have flowed through it
counter :: Flow Count a (a,Int)
counter = Flow $ \input -> do
incr
c <- get
return (finished (input,c), counter)
printer :: Show a => Flow Count a ()
printer = Flow $ \input -> do
liftIO $ print input
return (finished (), printer)
--This computation takes an input that is a member of Show
--Each input gets counted by counter and then the piped through to printer
--which prints out the input. The resulting real output is a list of [()]
--
--The function uses a foldFlow which is a left-fold over a member of Foldable
--that produces a list of results. This is, in effect, like a giant map.
runcomp = evalStateT (liftM fst $ foldFlow (counter <//> printer) [0..5]) 0
|
igraves/flow-arrow
|
examples/Counter.hs
|
bsd-3-clause
| 1,076
| 2
| 12
| 334
| 233
| 126
| 107
| 16
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
#endif
module Text.RE.TDFA.String
(
-- * Tutorial
-- $tutorial
-- * The Match Operators
(*=~)
, (?=~)
, (=~)
, (=~~)
-- * The Toolkit
-- $toolkit
, module Text.RE
-- * The 'RE' Type
-- $re
, module Text.RE.TDFA.RE
) where
import Prelude.Compat
import Data.Typeable
import Text.Regex.Base
import Text.RE
import Text.RE.Internal.AddCaptureNames
import Text.RE.TDFA.RE
import qualified Text.Regex.TDFA as TDFA
-- | find all matches in text
(*=~) :: String
-> RE
-> Matches String
(*=~) bs rex = addCaptureNamesToMatches (reCaptureNames rex) $ match (reRegex rex) bs
-- | find first match in text
(?=~) :: String
-> RE
-> Match String
(?=~) bs rex = addCaptureNamesToMatch (reCaptureNames rex) $ match (reRegex rex) bs
-- | the regex-base polymorphic match operator
(=~) :: ( Typeable a
, RegexContext TDFA.Regex String a
, RegexMaker TDFA.Regex TDFA.CompOption TDFA.ExecOption String
)
=> String
-> RE
-> a
(=~) bs rex = addCaptureNames (reCaptureNames rex) $ match (reRegex rex) bs
-- | the regex-base monadic, polymorphic match operator
(=~~) :: ( Monad m
, Functor m
, Typeable a
, RegexContext TDFA.Regex String a
, RegexMaker TDFA.Regex TDFA.CompOption TDFA.ExecOption String
)
=> String
-> RE
-> m a
(=~~) bs rex = addCaptureNames (reCaptureNames rex) <$> matchM (reRegex rex) bs
instance IsRegex RE String where
matchOnce = flip (?=~)
matchMany = flip (*=~)
regexSource = reSource
-- $tutorial
-- We have a regex tutorial at <http://tutorial.regex.uk>. These API
-- docs are mainly for reference.
-- $toolkit
--
-- Beyond the above match operators and the regular expression type
-- below, "Text.RE" contains the toolkit for replacing captures,
-- specifying options, etc.
-- $re
--
-- "Text.RE.TDFA.RE" contains the toolkit specific to the 'RE' type,
-- the type generated by the gegex compiler.
|
cdornan/idiot
|
Text/RE/TDFA/String.hs
|
bsd-3-clause
| 2,407
| 0
| 8
| 640
| 458
| 272
| 186
| 49
| 1
|
import "hint" HLint.Default
import "hint" HLint.Dollar
import "hint" HLint.Generalise
ignore "Use mappend"
ignore "Use ."
ignore "Use fmap"
ignore "Use second"
|
pbiggar/rash
|
HLint.hs
|
bsd-3-clause
| 161
| 0
| 5
| 22
| 43
| 20
| 23
| -1
| -1
|
{-# LANGUAGE ForeignFunctionInterface #-}
module Foreign.HCLR.Binding.Mono.Internal (
module Foreign.HCLR.Binding.Mono.Internal
) where
import Foreign
import Foreign.C
import System.Environment (getProgName)
type GBool = CInt
gboolTrue :: GBool
gboolTrue = 1
gboolFalse :: GBool
gboolFalse = 0
data MonoAssembly = MonoAssembly
type MonoAssemblyPtr = Ptr MonoAssembly
data MonoAssemblyName = MonoAssemblyName
type MonoAssemblyNamePtr = Ptr MonoAssemblyName
data MonoDomain = MonoDomain
type MonoDomainPtr = Ptr MonoDomain
data MonoImage = MonoImage
type MonoImagePtr = Ptr MonoImage
type MonoString = MonoObject
type MonoStringPtr = Ptr MonoString
data MonoMethodDesc = MonoMethodDesc
type MonoMethodDescPtr = Ptr MonoMethodDesc
data MonoMethod = MonoMethod
type MonoMethodPtr = Ptr MonoMethod
data MonoObject = MonoObject
type MonoObjectPtr = Ptr MonoObject
data MonoClass = MonoClass
type MonoClassPtr = Ptr MonoClass
data MonoImageOpenStatus = MonoImageOpenStatus
type MonoImageOpenStatusPtr = Ptr MonoImageOpenStatus
data MonoMethodSignature = MonoMethodSignature
type MonoMethodSignaturePtr = Ptr MonoMethodSignature
data MonoType = MonoType
type MonoTypePtr = Ptr MonoType
data MonoProperty = MonoProperty
type MonoPropertyPtr = Ptr MonoProperty
type ObjectHandle = Word32 --a handle to prevent object ptrs being reclaimed by the gc
foreign import ccall mono_jit_init :: CString -> IO MonoDomainPtr
foreign import ccall mono_jit_cleanup :: MonoDomainPtr -> IO ()
foreign import ccall mono_get_corlib :: IO MonoImagePtr
foreign import ccall mono_string_new :: MonoDomainPtr -> CString -> IO MonoStringPtr
foreign import ccall mono_string_new_utf16 :: MonoDomainPtr -> Ptr Word16 -> Int32 -> IO MonoStringPtr
foreign import ccall mono_string_new_wrapper :: CString -> IO MonoStringPtr
foreign import ccall mono_method_desc_new :: CString -> GBool -> IO MonoMethodDescPtr
foreign import ccall mono_method_desc_search_in_image :: MonoMethodDescPtr -> MonoImagePtr -> IO MonoMethodPtr
foreign import ccall mono_method_desc_free :: MonoMethodDescPtr -> IO ()
foreign import ccall mono_runtime_invoke :: MonoMethodPtr -> MonoObjectPtr -> Ptr MonoObjectPtr -> Ptr () -> IO MonoObjectPtr
foreign import ccall mono_domain_get :: IO MonoDomainPtr
foreign import ccall mono_gchandle_new :: MonoObjectPtr -> GBool -> IO ObjectHandle
foreign import ccall mono_gchandle_get_target :: ObjectHandle -> IO MonoObjectPtr
foreign import ccall mono_gchandle_free :: ObjectHandle -> IO ()
foreign import ccall mono_get_int16_class :: IO MonoClassPtr
foreign import ccall mono_value_box :: MonoDomainPtr -> MonoClassPtr -> Ptr Int -> IO MonoObjectPtr
foreign import ccall mono_object_unbox :: MonoObjectPtr -> IO (Ptr Int)
foreign import ccall mono_assembly_get_image :: MonoAssemblyPtr -> IO MonoImagePtr
foreign import ccall mono_assembly_name_new :: CString -> IO MonoAssemblyNamePtr
foreign import ccall mono_assembly_fill_assembly_name :: MonoImagePtr -> MonoAssemblyNamePtr -> IO GBool
foreign import ccall mono_stringify_assembly_name :: MonoAssemblyNamePtr -> IO CString
foreign import ccall mono_assembly_load :: MonoAssemblyNamePtr -> CString -> MonoImageOpenStatusPtr -> IO MonoAssemblyPtr
foreign import ccall mono_class_from_name :: MonoImagePtr -> CString -> CString -> IO MonoClassPtr
foreign import ccall mono_object_new :: MonoDomainPtr -> MonoClassPtr -> IO MonoObjectPtr
foreign import ccall mono_config_parse :: CString -> IO ()
foreign import ccall mono_runtime_object_init :: MonoObjectPtr -> IO ()
foreign import ccall mono_image_get_name :: MonoImagePtr -> IO CString
foreign import ccall mono_class_num_methods :: MonoClassPtr -> IO Int
foreign import ccall mono_class_get_methods :: MonoClassPtr -> Ptr (Ptr Int) -> IO MonoMethodPtr
foreign import ccall mono_signature_get_param_count :: MonoMethodSignaturePtr -> IO Int
foreign import ccall mono_signature_get_params :: MonoMethodSignaturePtr -> Ptr (Ptr Int) -> IO MonoTypePtr
foreign import ccall mono_method_get_name :: MonoMethodPtr -> IO CString
foreign import ccall mono_method_full_name :: MonoMethodPtr -> GBool -> IO CString
foreign import ccall mono_method_signature :: MonoMethodPtr -> IO MonoMethodSignaturePtr
foreign import ccall mono_class_get_name :: MonoClassPtr -> IO CString
foreign import ccall mono_class_get_namespace :: MonoClassPtr -> IO CString
foreign import ccall mono_class_get_parent :: MonoClassPtr -> IO MonoClassPtr
foreign import ccall mono_class_from_mono_type :: MonoTypePtr -> IO MonoClassPtr
foreign import ccall mono_class_get_property_from_name :: MonoClassPtr -> CString -> IO MonoPropertyPtr
foreign import ccall mono_object_get_class :: MonoObjectPtr -> IO MonoClassPtr
foreign import ccall mono_domain_create_appdomain :: CString -> CString -> IO MonoDomainPtr
foreign import ccall mono_domain_set :: MonoDomainPtr -> GBool -> IO GBool
foreign import ccall mono_get_root_domain :: IO MonoDomainPtr
foreign import ccall mono_runtime_init :: MonoDomainPtr -> Ptr () -> Ptr () -> IO ()
foreign import ccall mono_set_dirs :: Ptr () -> Ptr () -> IO ()
foreign import ccall mono_register_config_for_assembly :: CString -> CString -> IO ()
foreign import ccall mono_signature_get_return_type :: MonoMethodSignaturePtr -> IO MonoTypePtr
foreign import ccall mono_get_string_class :: IO MonoClassPtr
foreign import ccall mono_class_get_image :: MonoClassPtr -> IO MonoImagePtr
foreign import ccall "marshal.c getString" getString :: ObjectHandle -> IO (Ptr Word16)
foreign import ccall "marshal.c stringLength" stringLength :: ObjectHandle -> IO Int32
foreign import ccall "marshal.c setupDomain" setupDomain :: MonoDomainPtr -> CString -> CString -> IO ()
monoLoadAssembly :: String -> IO MonoAssemblyPtr
monoLoadAssembly s = withCString s (\c-> mono_assembly_name_new c >>= \n-> mono_assembly_load n nullPtr nullPtr)
monoInit :: IO MonoDomainPtr
monoInit = do
prog <- getProgName
dom <- withCString (prog) mono_jit_init
mono_config_parse nullPtr
withCString "./" $ \baseDir->
withCString "hclr.config" $ \configFile-> do
setupDomain dom baseDir configFile
return dom
|
tim-m89/hclr
|
Foreign/HCLR/Binding/Mono/Internal.hs
|
bsd-3-clause
| 6,102
| 0
| 13
| 794
| 1,483
| 784
| 699
| 101
| 1
|
-- | Types for the rewriting combinator
module ADP.Multi.Rewriting where
import ADP.Multi.Parser
-- | Tree of subwords. Every path in a tree represents
-- a sequence of subwords for a corresponding sequence of parsers
-- in a production.
data SubwordTree = SubwordTree Subword [SubwordTree] deriving Show
type SubwordConstructionAlgorithm a
= a -- ^ rewriting function
-> [ParserInfo] -- ^ yield size info for each parser of a production
-> Subword -- ^ subword for which subwords should be constructed
-> [SubwordTree] -- ^ constructed subwords, represented as tree
|
adp-multi/adp-multi
|
src/ADP/Multi/Rewriting.hs
|
bsd-3-clause
| 607
| 0
| 8
| 128
| 63
| 42
| 21
| 8
| 0
|
main = [1,2,3] !! 10
|
roberth/uu-helium
|
test/runtimeerrors/Index2.hs
|
gpl-3.0
| 20
| 0
| 6
| 4
| 19
| 11
| 8
| 1
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Conditional(unlessM)
import Control.Exception(Handler(..), catches, throwIO)
import Control.Monad(forM_)
import Control.Monad.Except(runExceptT, when)
import Data.Aeson((.=), ToJSON, object, toJSON)
import Data.Aeson.Encode.Pretty(encodePretty)
import Data.ByteString.Lazy(toStrict)
import Data.Conduit((.|), runConduit)
import qualified Data.Conduit.List as CL
import Data.Maybe(catMaybes, fromMaybe, isJust, isNothing, mapMaybe)
import qualified Data.Text as T
import Data.Text.Encoding(decodeUtf8)
import Data.Time.Clock.POSIX(getCurrentTime, posixSecondsToUTCTime)
import Data.Time.Format(defaultTimeLocale, formatTime)
import System.Console.GetOpt
import System.Directory(doesFileExist)
import System.Environment(getArgs)
import System.Exit(exitFailure)
import Text.Printf(printf)
import Text.Read(readMaybe)
import Text.Regex.PCRE((=~))
import BDCS.DB(Files(..), KeyVal(..), checkAndRunSqlite)
import BDCS.Files(filesC, getKeyValuesForFile)
import BDCS.KeyType(KeyType(..))
import BDCS.KeyValue(keyValueListToJSON)
import BDCS.Label.Types(Label, labelDescriptions)
import BDCS.Utils.Either(whenLeft)
import BDCS.Utils.Mode(modeAsText)
import BDCS.Version
import Utils.Exceptions(InspectErrors(..))
import Utils.GetOpt(commandLineArgs, compilerOpts)
import Utils.IO(liftedPutStrLn)
import Utils.KeyVal(formatKeyValList)
data LsOptions = LsOptions { lsJSONOutput :: Bool,
lsKeyVal :: Bool,
lsLabelMatches :: Maybe Label,
lsMatches :: String,
lsVerbose :: Bool }
defaultLsOptions :: LsOptions
defaultLsOptions = LsOptions { lsJSONOutput = False,
lsKeyVal = False,
lsLabelMatches = Nothing,
lsMatches = ".*",
lsVerbose = False }
data LsRow = LsRow { rowFiles :: Files,
rowKeyVals :: Maybe [KeyVal],
rowUseMetadata :: Bool }
instance ToJSON LsRow where
toJSON r = let namePair = T.pack "path" .= toJSON (filesPath $ rowFiles r)
keyvals = maybe [] keyValueListToJSON (rowKeyVals r)
optional = if not (rowUseMetadata r) then [] else catMaybes [
fileTypeString (rowFiles r) >>= \ty -> Just $ T.pack "fileType" .= toJSON ty,
Just $ T.pack "mode" .= toJSON (filesMode $ rowFiles r),
Just $ T.pack "size" .= toJSON (filesSize $ rowFiles r),
symlinkTarget (rowFiles r) >>= \target -> Just $ T.pack "symlinkTarget" .= toJSON target,
Just $ T.pack "user" .= toJSON (filesFile_user $ rowFiles r),
Just $ T.pack "group" .= toJSON (filesFile_group $ rowFiles r),
-- Don't do any special formatting of the mtime - leave that up to the consumer.
Just $ T.pack "mtime" .= toJSON (filesMtime $ rowFiles r)
]
in
object $ [namePair] ++ keyvals ++ optional
initRow :: Files -> LsRow
initRow f = LsRow { rowFiles=f,
rowUseMetadata=False,
rowKeyVals=Nothing }
fileType :: Files -> Maybe Char
fileType Files{filesCs_object=Nothing, ..} = Just 'd'
fileType Files{filesTarget=Just _, ..} = Just 'l'
fileType _ = Nothing
fileTypeString :: Files -> Maybe String
fileTypeString Files{filesCs_object=Nothing, ..} = Just "Directory"
fileTypeString Files{filesTarget=Just _, ..} = Just "Symlink"
fileTypeString _ = Just "File"
-- Figure out how to format the file's time. If the time is in the current year, display
-- month, day, and hours/minutes. If the time is in any other year, display that year
-- instead of hours and minutes. This is not quite how ls does it - it appears to use
-- the threshold of if the file is more than a year old. That's more time manipulation
-- than I am willing to do.
showTime :: Real t => String -> t -> String
showTime currentYear mtime = let
utcMtime = posixSecondsToUTCTime $ realToFrac mtime
mtimeYear = formatTime defaultTimeLocale "%Y" utcMtime
fmt = "%b %e " ++ if currentYear == mtimeYear then "%R" else "%Y"
in
formatTime defaultTimeLocale fmt utcMtime
symlinkTarget :: Files -> Maybe String
symlinkTarget Files{filesTarget=Just x, ..} = Just $ T.unpack x
symlinkTarget _ = Nothing
keyValToLabel :: KeyVal -> Maybe Label
keyValToLabel KeyVal {keyValKey_value=LabelKey x} = Just x
keyValToLabel _ = Nothing
runCommand :: T.Text -> [String] -> IO (Either String ())
runCommand db args = do
(opts, _) <- compilerOpts options defaultLsOptions args "ls"
when (isNothing $ lsLabelMatches opts) $
throwIO InvalidLabelError
printer <- if | lsJSONOutput opts -> return $ liftedPutStrLn . jsonPrinter
| lsVerbose opts -> do currentYear <- formatTime defaultTimeLocale "%Y" <$> getCurrentTime
return $ liftedPutStrLn . verbosePrinter currentYear
| otherwise -> return $ liftedPutStrLn . simplePrinter
runExceptT $ checkAndRunSqlite db $ runConduit $
-- Grab all the Files, filtering out any whose path does not match what we want.
filesC .| CL.filter (\f -> T.unpack (filesPath f) =~ lsMatches opts)
-- Convert them into LsRow records containing only the Files record.
.| CL.map initRow
-- If we were asked for verbose output, add that to the LsRow.
.| CL.mapM (\row -> if lsVerbose opts then return row { rowUseMetadata=True }
else return row)
-- keyval output comes up in two different ways: If we were
-- given the --keyval flag, we want to add them to the LsRow,
-- If we were given the --label flag, we want to grab the keyvals
-- from the database and check for a match. Note that both flags
-- could be given at the same time.
.| CL.mapMaybeM (\row -> do kvs <- if lsKeyVal opts || isJust (lsLabelMatches opts)
then getKeyValuesForFile (filesPath $ rowFiles row)
else return []
let labels = mapMaybe keyValToLabel kvs
if | maybe False (`notElem` labels) (lsLabelMatches opts) -> return Nothing
| lsKeyVal opts -> return $ Just $ row { rowKeyVals=Just kvs }
| otherwise -> return $ Just row)
-- Finally, pass it to the appropriate printer.
.| CL.mapM_ printer
where
options :: [OptDescr (LsOptions -> LsOptions)]
options = [
Option [] ["json"]
(NoArg (\opts -> opts { lsJSONOutput = True }))
"format output as JSON",
Option ['k'] ["keyval"]
(NoArg (\opts -> opts { lsKeyVal = True }))
"add key/val pairs to output",
Option ['l'] []
(NoArg (\opts -> opts { lsVerbose = True }))
"use a long listing format",
Option [] ["label"]
(ReqArg (\d opts -> opts { lsLabelMatches = readMaybe d }) "LABEL")
"return only results with the given LABEL",
Option ['m'] ["matches"]
(ReqArg (\d opts -> opts { lsMatches = d }) "REGEX")
"return only results that match REGEX"
]
jsonPrinter :: LsRow -> T.Text
jsonPrinter = decodeUtf8 . toStrict . encodePretty
simplePrinter :: LsRow -> T.Text
simplePrinter LsRow{..} = T.pack $
printf "%s%s"
(filesPath rowFiles)
(maybe "" formatKeyValList rowKeyVals)
verbosePrinter :: String -> LsRow -> T.Text
verbosePrinter currentYear LsRow{..} = T.pack $
printf "%c%s %8s %8s %10Ld %s %s%s%s"
(fromMaybe '-' (fileType rowFiles))
(if rowUseMetadata then T.unpack $ modeAsText $ fromIntegral $ filesMode rowFiles else "--ghost--")
(T.unpack $ filesFile_user rowFiles) (T.unpack $ filesFile_group rowFiles)
(if rowUseMetadata then filesSize rowFiles else 0)
(showTime currentYear $ filesMtime rowFiles)
(filesPath rowFiles) (maybe "" (" -> " ++) (symlinkTarget rowFiles))
(maybe "" formatKeyValList rowKeyVals)
usage :: IO ()
usage = do
printVersion "inspect-ls"
putStrLn "Usage: inspect-ls output.db repo [args ...]"
putStrLn " List files in the content store"
putStrLn "- output.db is the path to a metadata database"
putStrLn "- repo is the path to a content store repo"
exitFailure
runMain :: IO ()
runMain = do
argv <- getArgs
case commandLineArgs argv of
Nothing -> usage
Just (db, _, args) -> do
unlessM (doesFileExist db) $
throwIO MissingDBError
result <- runCommand (T.pack db) args
whenLeft result print
main :: IO ()
main =
-- Add handlers for other exception types (IOException, whatever) here.
runMain `catches` [Handler (\(e :: InspectErrors) -> handleInspectErrors e)]
where
-- And then add handlers for the various kinds of InspectErrors here.
handleInspectErrors :: InspectErrors -> IO ()
handleInspectErrors InvalidLabelError = do
putStrLn "Unrecognized file label given.\n"
putStrLn "Recognized labels:\n"
forM_ labelDescriptions $ \(l, d) ->
putStrLn $ " " ++ l ++ " - " ++ d
exitFailure
handleInspectErrors MissingCSError = putStrLn "Content store does not exist\n" >> usage
handleInspectErrors MissingDBError = putStrLn "Metadata database does not exist\n" >> usage
|
atodorov/bdcs
|
src/tools/inspect/subcommands/ls.hs
|
lgpl-2.1
| 10,264
| 0
| 20
| 3,213
| 2,485
| 1,322
| 1,163
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Kubernetes.Any(Any (..)) where
import Data.Aeson
import Control.Monad (mzero)
import GHC.Generics
import Prelude hiding (any)
newtype Any =
Any { any :: Object
} deriving (Show, Eq, Generic)
instance FromJSON Any where
parseJSON (Object o) = return . Any $ o
parseJSON _ = mzero
instance ToJSON Any where
toJSON (Any o) = Object o
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/scripts/Any.hs
|
apache-2.0
| 631
| 0
| 8
| 171
| 141
| 82
| 59
| 19
| 0
|
module SyncTB where
import CLaSH.Prelude
import CLaSH.Prelude.Explicit
type Clk2 = Clk "clk" 2
type Clk7 = Clk "clk" 7
type Clk9 = Clk "clk" 9
clk2 :: SClock Clk2
clk2 = sclock
clk7 :: SClock Clk7
clk7 = sclock
clk9 :: SClock Clk9
clk9 = sclock
topEntity :: Signal' Clk7 Integer -> Signal' Clk9 Integer
topEntity i = register' clk9 70 (unsafeSynchronizer clk2 clk9 (register' clk2 99 (unsafeSynchronizer clk7 clk2 (register' clk7 50 i))))
testInput :: Signal' Clk7 Integer
testInput = stimuliGenerator' clk7 $(v [(1::Integer)..10])
expectedOutput :: Signal' Clk9 Integer -> Signal' Clk9 Bool
expectedOutput = outputVerifier' clk9 $(v ([70,99,2,3,4,5,7,8,9,10]::[Integer]))
|
christiaanb/clash-compiler
|
tests/shouldwork/Testbench/SyncTB.hs
|
bsd-2-clause
| 681
| 0
| 13
| 108
| 274
| 148
| 126
| -1
| -1
|
{- |
Module : $Header$
Description : parser for HasCASL basic Items
Copyright : (c) Christian Maeder and Uni Bremen 2002-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
parser for HasCASL basic Items
-}
module HasCASL.ParseItem (basicItems, basicSpec) where
import Text.ParserCombinators.Parsec
import Common.AS_Annotation
import Common.AnnoState
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token
import HasCASL.As
import HasCASL.AsUtils
import HasCASL.HToken
import HasCASL.ParseTerm
import Control.Monad
-- * adapted item list parser (using 'itemAux')
hasCaslItemList :: String -> AParser st b
-> ([Annoted b] -> Range -> a) -> AParser st a
hasCaslItemList kw ip constr = do
p <- pluralKeyword kw
auxItemList hasCaslStartKeywords [p] ip constr
hasCaslItemAux :: [Token] -> AParser st b -> ([Annoted b] -> Range -> a)
-> AParser st a
hasCaslItemAux = auxItemList hasCaslStartKeywords
-- * parsing type items
commaTypeDecl :: TypePattern -> AParser st TypeItem
commaTypeDecl s = do
c <- anComma
(is, cs) <- typePattern `separatedBy` anComma
let l = s : is
p = c : cs
subTypeDecl (l, p) <|> kindedTypeDecl (l, p)
<|> return (TypeDecl l universe $ catRange p)
kindedTypeDecl :: ([TypePattern], [Token]) -> AParser st TypeItem
kindedTypeDecl (l, p) = do
t <- colT
s <- kind
let d = TypeDecl l s $ catRange $ p ++ [t]
case l of
[hd] -> pseudoTypeDef hd (Just s) [t] <|> dataDef hd s [t] <|> return d
_ -> return d
isoDecl :: TypePattern -> AParser st TypeItem
isoDecl s = do
e <- equalT
subTypeDefn (s, e) <|> do
(l, p) <- typePattern `separatedBy` equalT
return $ IsoDecl (s : l) $ catRange $ e : p
vars :: AParser st Vars
vars = fmap Var var <|> do
o <- oParenT
(vs, ps) <- vars `separatedBy` anComma
c <- cParenT
return $ VarTuple vs $ toRange o ps c
subTypeDefn :: (TypePattern, Token) -> AParser st TypeItem
subTypeDefn (s, e) = do
a <- annos
o <- oBraceT << addAnnos
v <- vars
c <- colT
t <- parseType
d <- dotT -- or bar
f <- term
a2 <- annos
p <- cBraceT
let qs = toRange e [o, c, d] p
return $ SubtypeDefn s v t (Annoted f qs a a2) qs
subTypeDecl :: ([TypePattern], [Token]) -> AParser st TypeItem
subTypeDecl (l, p) = do
t <- lessT
s <- parseType
return $ SubtypeDecl l s $ catRange $ p ++ [t]
sortItem :: AParser st TypeItem
sortItem = do
s <- typePattern
subTypeDecl ([s], [])
<|> kindedTypeDecl ([s], [])
<|> commaTypeDecl s
<|> isoDecl s
<|> return (TypeDecl [s] universe nullRange)
sortItems :: AParser st SigItems
sortItems = hasCaslItemList sortS sortItem (TypeItems Plain)
typeItem :: AParser st TypeItem
typeItem = do
s <- typePattern
subTypeDecl ([s], [])
<|> dataDef s universe []
<|> pseudoTypeDef s Nothing []
<|> kindedTypeDecl ([s], [])
<|> commaTypeDecl s
<|> isoDecl s
<|> return (TypeDecl [s] universe nullRange)
typeItemList :: [Token] -> Instance -> AParser st SigItems
typeItemList ps = hasCaslItemAux ps typeItem . TypeItems
typeItems :: AParser st SigItems
typeItems = do
p <- pluralKeyword typeS
do q <- pluralKeyword instanceS
typeItemList [p, q] Instance
<|> typeItemList [p] Plain
pseudoTypeDef :: TypePattern -> Maybe Kind -> [Token] -> AParser st TypeItem
pseudoTypeDef t k l = do
c <- asKey assignS
p <- parseType
return $ AliasType t k (simpleTypeScheme p) $ catRange $ l ++ [c]
-- * parsing datatypes
component :: AParser st [Component]
component = try (do
(is, cs) <- opId `separatedBy` anComma
compType is cs) <|> do
t <- parseType
return [NoSelector t]
concatFst :: [[a]] -> Range -> ([a], Range)
concatFst as ps = (concat as, ps)
tupleComponent :: AParser st ([Component], Range)
tupleComponent = bracketParser component oParenT cParenT anSemi concatFst
altComponent :: AParser st ([Component], Range)
altComponent = tupleComponent <|> do
i <- typeVar
let t = case i of
Id [tok] [] _ -> TypeToken tok
_ -> error "altComponent"
return ([NoSelector t], nullRange)
compType :: [Id] -> [Token] -> AParser st [Component]
compType is cs = do
c <- colonST
t <- parseType
let makeComps l1 l2 = case (l1, l2) of
([a], [b]) -> [Selector a Total t Other $ tokPos b]
(a : r, b : s) -> Selector a Total t Comma (tokPos b) : makeComps r s
_ -> error "makeComps: empty selector list"
return $ makeComps is $ cs ++ [c]
alternative :: AParser st Alternative
alternative = do
s <- pluralKeyword sortS <|> pluralKeyword typeS
(ts, cs) <- parseType `separatedBy` anComma
return $ Subtype ts $ catRange $ s : cs
<|> do
i <- hconsId
cps <- many altComponent
let ps = concatMapRange snd cps
cs = map fst cps
do q <- quMarkT
return $ Constructor i cs Partial $ appRange ps $ tokPos q
<|> return (Constructor i cs Total ps)
dataDef :: TypePattern -> Kind -> [Token] -> AParser st TypeItem
dataDef t k l = do
c <- asKey defnS
a <- annos
let aAlternative = liftM2 (\ i -> Annoted i nullRange [])
alternative annos
(Annoted v _ _ b : as, ps) <- aAlternative `separatedBy` barT
let aa = Annoted v nullRange a b : as
qs = catRange $ l ++ c : ps
do d <- asKey derivingS
(cs, cps) <- classId `separatedBy` anComma
return $ Datatype $ DatatypeDecl t k aa cs
$ appRange qs $ appRange (tokPos d) $ catRange cps
<|> return (Datatype (DatatypeDecl t k aa [] qs))
dataItem :: AParser st DatatypeDecl
dataItem = do
t <- typePattern
do c <- colT
k <- kind
Datatype d <- dataDef t k [c]
return d
<|> do
Datatype d <- dataDef t universe []
return d
dataItems :: AParser st BasicItem
dataItems = hasCaslItemList typeS dataItem FreeDatatype
-- * parse class items
classDecl :: AParser st ClassDecl
classDecl = do
(is, cs) <- classId `separatedBy` anComma
(ps, k) <- option ([], universe) $ pair (single $ lessT <|> colonT) kind
return $ ClassDecl is k $ catRange $ cs ++ ps
classItem :: AParser st ClassItem
classItem = do
c <- classDecl
do o <- oBraceT
is <- annosParser basicItems
p <- cBraceT
return $ ClassItem c is $ toRange o [] p
<|> return (ClassItem c [] nullRange)
classItemList :: [Token] -> Instance -> AParser st BasicItem
classItemList ps = hasCaslItemAux ps classItem . ClassItems
classItems :: AParser st BasicItem
classItems = do
p <- asKey (classS ++ "es") <|> asKey classS <?> classS
do q <- pluralKeyword instanceS
classItemList [p, q] Instance
<|> classItemList [p] Plain
-- * parse op items
opAttr :: AParser st OpAttr
opAttr = let l = [Assoc, Comm, Idem] in
choice (map ( \ a -> do
b <- asKey $ show a
return $ BinOpAttr a $ tokPos b) l)
<|> do
a <- asKey unitS
t <- term
return $ UnitOpAttr t $ tokPos a
multiTypeScheme :: [PolyId] -> AParser st TypeScheme
multiTypeScheme os = case os of
p : r -> if null r || all ( \ (PolyId _ tys _) -> null tys) os
then typeScheme p
else fail $ "instantiation list in identifier list: "
++ show (map ( \ (PolyId i _ _) -> i) os)
_ -> error "HasCASL.ParseItem.opDecl"
opDecl :: [PolyId] -> [Token] -> AParser st OpItem
opDecl os ps = do
c <- colonST
t <- multiTypeScheme os
opAttrs os ps c t <|> return (OpDecl os t [] $ catRange $ ps ++ [c])
opAttrs :: [PolyId] -> [Token] -> Token -> TypeScheme -> AParser st OpItem
opAttrs os ps c t = do
d <- anComma
(attrs, cs) <- opAttr `separatedBy` anComma
return $ OpDecl os t attrs $ catRange $ ps ++ [c, d] ++ cs
opArg :: AParser st ([VarDecl], Range)
opArg = bracketParser varDecls oParenT cParenT anSemi concatFst
opArgs :: AParser st ([[VarDecl]], Range)
opArgs = do
cps <- many1 opArg
return (map fst cps, concatMapRange snd cps)
opDeclOrDefn :: PolyId -> AParser st OpItem
opDeclOrDefn o = do
c <- colonST
t <- typeScheme o
opAttrs [o] [] c t <|> opTerm o [] nullRange c t
<|> return (OpDecl [o] t [] $ tokPos c)
<|> do
(args, ps) <- opArgs
c <- colonST
t <- fmap simpleTypeScheme parseType
opTerm o args ps c t
opTerm :: PolyId -> [[VarDecl]] -> Range -> Token -> TypeScheme
-> AParser st OpItem
opTerm o as ps c sc = do
e <- equalT
f <- term
return $ OpDefn o as sc f $ appRange ps $ toRange c [] e
opItem :: AParser st OpItem
opItem = do
(os, ps) <- parsePolyId `separatedBy` anComma
case os of
[hd] -> opDeclOrDefn hd
_ -> opDecl os ps
opItems :: AParser st SigItems
opItems = hasCaslItemList opS opItem (OpItems Op)
<|> hasCaslItemList functS opItem (OpItems Fun)
-- * parse pred items as op items
predDecl :: [PolyId] -> [Token] -> AParser st OpItem
predDecl os ps = do
c <- colT
t <- multiTypeScheme os
let p = catRange $ ps ++ [c]
return $ OpDecl os (predTypeScheme p t) [] p
predDefn :: PolyId -> AParser st OpItem
predDefn o = do
(args, ps) <- opArg
e <- asKey equivS
f <- term
let p = appRange ps $ tokPos e
return $ OpDefn o [args]
(simpleTypeScheme $ mkLazyType $ unitTypeWithRange p) f p
predItem :: AParser st OpItem
predItem = do
(os, ps) <- parsePolyId `separatedBy` anComma
let d = predDecl os ps
case os of
[hd] -> d <|> predDefn hd
_ -> d
predItems :: AParser st SigItems
predItems = hasCaslItemList predS predItem (OpItems Pred)
-- * other items
sigItems :: AParser st SigItems
sigItems = sortItems <|> opItems <|> predItems <|> typeItems
generatedItems :: AParser st BasicItem
generatedItems = do
g <- asKey generatedS
do FreeDatatype ds ps <- dataItems
return $ GenItems [Annoted (TypeItems Plain (map ( \ d -> Annoted
(Datatype $ item d) nullRange (l_annos d) (r_annos d)) ds) ps)
nullRange [] []] $ tokPos g
<|> do
o <- oBraceT
is <- annosParser sigItems
c <- cBraceT
return $ GenItems is $ toRange g [o] c
genVarItems :: AParser st ([GenVarDecl], [Token])
genVarItems = do
vs <- genVarDecls
do s <- trySemi << addLineAnnos
do tryItemEnd hasCaslStartKeywords
return (vs, [s])
<|> do
(ws, ts) <- genVarItems
return (vs ++ ws, s : ts)
<|> return (vs, [])
freeDatatype :: AParser st BasicItem
freeDatatype = do
f <- asKey freeS
FreeDatatype ds ps <- dataItems
return $ FreeDatatype ds $ appRange (tokPos f) ps
progItems :: AParser st BasicItem
progItems = hasCaslItemList programS
(patternTermPair [equalS] (WithIn, []) equalS) ProgItems
axiomItems :: AParser st BasicItem
axiomItems = hasCaslItemList axiomS term $ AxiomItems []
forallItem :: AParser st BasicItem
forallItem = do
f <- forallT
(vs, ps) <- genVarDecls `separatedBy` anSemi
a <- annos
AxiomItems _ (Annoted ft qs as rs : fs) ds <- dotFormulae
let aft = Annoted ft qs (a ++ as) rs
return $ AxiomItems (concat vs) (aft : fs) $ appRange (catRange $ f : ps) ds
genVarItem :: AParser st BasicItem
genVarItem = do
v <- pluralKeyword varS
(vs, ps) <- genVarItems
return $ GenVarItems vs $ catRange $ v : ps
dotFormulae :: AParser st BasicItem
dotFormulae = do
d <- dotT
(fs, ds) <- allAnnoParser term `separatedBy` dotT
let ps = catRange $ d : ds
lst = last fs
if null $ r_annos lst then do
(m, an) <- optSemi
return $ AxiomItems [] (init fs ++ [appendAnno lst an]) $ appRange ps
$ catRange m
else return $ AxiomItems [] fs ps
internalItems :: AParser st BasicItem
internalItems = do
i <- asKey internalS
o <- oBraceT
is <- annosParser basicItems
p <- cBraceT
return (Internal is $ toRange i [o] p)
basicItems :: AParser st BasicItem
basicItems = fmap SigItems sigItems
<|> classItems
<|> progItems
<|> generatedItems
<|> freeDatatype
<|> genVarItem
<|> forallItem
<|> dotFormulae
<|> axiomItems
<|> internalItems
basicSpec :: AParser st BasicSpec
basicSpec = fmap BasicSpec (annosParser basicItems)
|
nevrenato/HetsAlloy
|
HasCASL/ParseItem.hs
|
gpl-2.0
| 12,451
| 0
| 24
| 3,335
| 4,850
| 2,393
| 2,457
| 346
| 3
|
module PrettySymbols where
import PrettyPrint
kwIfUnicode a u = kw (ppIfUnicode a u)
-- Smileys and other useful symbols:
happy = kwIfUnicode '\x263a' ":-)"
sad = kwIfUnicode '\x2639' ":-("
forall' = kwIfUnicode '\x2200' "forall"
all = kwIfUnicode '\x2200' "All"
exist = kwIfUnicode '\x2203' "Exist"
--el = kwIfUnicode '\x220a' "::" -- looks ugly in times
el = kwIfUnicode '\x2208' "::"
imp = kwIfUnicode '\x21d2' "=>"
lambda = kwIfUnicode '\x03bb' "\\"
larrow = kwIfUnicode '\x2190' "<-"
rarrow = kwIfUnicode '\x2192' "->"
and = kwIfUnicode '\x2227' "/\\"
or = kwIfUnicode '\x2228' "\\/"
not = kwIfUnicode '\x00ac' "-/"
implies = kwIfUnicode '\x21d2' "==>"
equiv = kwIfUnicode '\x21d4' "<==>"
mu = kwIfUnicode '\x03bc' "Lfp"
nu = kwIfUnicode '\x03bd' "Gfp"
--star = kwIfUnicode '\x2605' "*" -- not present in times
--moon = kwIfUnicode '\x263e' "C" -- not present in times
star = kw "*"
moon = kw "C"
|
forste/haReFork
|
tools/base/pretty/PrettySymbols.hs
|
bsd-3-clause
| 970
| 0
| 7
| 210
| 218
| 112
| 106
| 22
| 1
|
module ListSort () where
import Language.Haskell.Liquid.Prelude
insert y [] = [y]
insert y (x:xs) = if (y<=x) then (y:(x:xs)) else (x:(insert y xs))
chk [] = liquidAssertB True
chk (x1:xs) = case xs of
[] -> liquidAssertB True
x2:xs2 -> liquidAssertB (x1 <= x2) && chk xs
sort = foldr insert []
rlist = map choose [1 .. 10]
bar = sort rlist
bar1 :: [Int]
bar1 = [1, 8, 2, 4, 5]
prop0 = chk rlist
prop1 = chk bar1
|
abakst/liquidhaskell
|
tests/neg/ListISort.hs
|
bsd-3-clause
| 477
| 2
| 11
| 145
| 242
| 132
| 110
| 15
| 2
|
extern int fun (int a, int b);
|
SanDisk-Open-Source/SSD_Dashboard
|
uefi/gcc/gcc-4.6.3/gcc/testsuite/gcc.dg/pch/decl-2.hs
|
gpl-2.0
| 33
| 0
| 7
| 9
| 24
| 11
| 13
| -1
| -1
|
module Main (main) where
import Network.Socket
import Options
import Reserve
main :: IO ()
main = withSocketsDo $ do
withOptions $ \opts -> do
putStrLn $ "http://localhost:" ++ show (optionsReservePort opts)
run opts
|
sol/reserve
|
driver/Main.hs
|
mit
| 260
| 0
| 15
| 76
| 77
| 40
| 37
| 9
| 1
|
{-# htermination isPrefixOf :: (Eq a, Eq k) => [(a, k)] -> [(a, k)] -> Bool #-}
import List
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/List_isPrefixOf_12.hs
|
mit
| 92
| 0
| 3
| 19
| 5
| 3
| 2
| 1
| 0
|
{- |
Module : Language.Scheme.Plugins.JSON
Copyright : Justin Ethier
Licence : MIT (see LICENSE in the distribution)
Maintainer : github.com/justinethier
Portability : portable
This file implements an interface to Text.JSON that may
be called directly from husk using the FFI.
-}
module Language.Scheme.Plugins.JSON where
import Control.Monad.Except
import Data.Array
import Data.Ratio
import Text.JSON
import Text.JSON.Generic
import qualified Language.Scheme.Numerical
import Language.Scheme.Types
-- ideas from http://therning.org/magnus/archives/719
instance JSON LispVal where
showJSON (List []) = JSNull
showJSON (String s) = JSString $ toJSString s
showJSON (Atom s) = JSString $ toJSString s
showJSON (Bool b) = JSBool b
showJSON (Number n) = JSRational False $ fromIntegral n
showJSON (Float n) = JSRational False $ toRational n
showJSON (List l) = showJSONs l
showJSON (Vector v) = do
let ls = elems v
f (List [Atom x, y]) = do
(x, showJSON y)
-- Take ls as an association list
-- The alist is then changed into the form [(String, x)]
-- and packaged into a JSObject
JSObject $ toJSObject $ map f ls
showJSON a = JSNull -- TODO (?): fail $ "Unable to convert to JSON: " ++ show a
readJSON (JSNull) = return $ List []
readJSON (JSString str) = return $ String $ fromJSString str
readJSON (JSBool b) = return $ Bool b
readJSON (JSRational _ num) = do
let numer = abs $ numerator num
let denom = abs $ denominator num
case (numer >= denom) && ((mod numer denom) == 0) of
True -> return $ Number $ round num
_ -> return $ Float $ fromRational num
readJSON (JSArray a) = do
result <- mapM readJSON a
return $ List $ result
readJSON (JSObject o) = do
let f (x,y) = do
y' <- readJSON y
return $ List [Atom x, y']
ls <- mapM f (fromJSObject o)
return $ Vector $ (listArray (0, length ls - 1)) ls
-- |Wrapper for Text.JSON.decode
jsDecode :: [LispVal] -> IOThrowsError LispVal
jsDecode [String json] = do
let r = decode json :: Result LispVal
case r of
Ok result -> return result
Error msg -> throwError $ Default msg
jsDecode invalid = throwError $ TypeMismatch "string" $ List invalid
-- |Wrapper for Text.JSON.decodeStrict
jsDecodeStrict :: [LispVal] -> IOThrowsError LispVal
jsDecodeStrict [String json] = do
let r = decodeStrict json :: Result LispVal
case r of
Ok result -> return result
Error msg -> throwError $ Default msg
jsDecodeStrict invalid = jsDecode invalid
-- |Wrapper for Text.JSON.encode
jsEncode :: [LispVal] -> IOThrowsError LispVal
jsEncode [val] = return $ String $ encode val
-- |Wrapper for Text.JSON.encodeStrict
jsEncodeStrict :: [LispVal] -> IOThrowsError LispVal
jsEncodeStrict [val] = return $ String $ encodeStrict val
_test :: IO ()
_test = do
_testDecodeEncode "\"test\""
_testDecodeEncode "true"
_testDecodeEncode "null"
_testDecodeEncode "1"
_testDecodeEncode "1.5"
_testDecodeEncode "[1.1, 2, 3, 1.5]"
_testDecodeEncode "[1.1, 2, {\"a\": 3}, 1.5]"
_testDecodeEncode :: String -> IO ()
_testDecodeEncode str = do
let x = decode str :: Result LispVal
case x of
Ok x -> putStrLn $ encode x
Error msg -> putStrLn $ "An error occurred: " ++ msg
|
justinethier/husk-scheme
|
extensions/hs-src/Language/Scheme/Plugins/JSON.hs
|
mit
| 3,342
| 0
| 16
| 772
| 1,029
| 498
| 531
| -1
| -1
|
module Main where
import Control.Monad.State.Strict
import System.Environment
import System.IO
import BrainFree
import Instructions
import Runtime
defaultMemSize :: Int
defaultMemSize = 30000
-- | Evaluate a brainfuck program in the BF monad.
eval :: [Instr] -> BF ()
eval = mapM_ eval1
where
eval1 (IMovePtr n) = movePtr n
eval1 (IAddPtr off c) = addCell off c
eval1 (IInput off) = bfInputM getChr (writePtr off)
eval1 (IOutput off) = bfOutputM (readPtr off) putChr
eval1 (ILoop off body) = loop off (eval body)
eval1 (IWritePtr off c) = writePtr off c
eval1 (IMultPtr dstOff srcOff c) = multCell dstOff srcOff c
-- | Run a bf program in 'IO' using a 'VectorMem' data store.
runVecMem :: [Instr] -> IO ()
runVecMem = withVectorMem defaultMemSize . evalStateT . runBFM step . eval
where
step (MovePtr n k) = modify (vecMove n) >> k
step (ReadPtr off k) = get >>= vecRead off >>= k
step (WritePtr off c k) = get >>= vecWrite off c >> k
step (GetChar k) = getc >>= k
step (PutChar c k) = putc c >> k
step (Loop off body k) = bfLoopM (get >>= vecRead off) (runBFM step body) >> k
-- | Run a bf program in 'IO' using a 'FPtrMem' data store.
runFPtrMem :: [Instr] -> IO ()
runFPtrMem = withFPtrMem defaultMemSize . evalStateT . runBFM step . eval
where
step (MovePtr n k) = modify (fptrMove n) >> k
step (ReadPtr off k) = get >>= fptrRead off >>= k
step (WritePtr off c k) = get >>= fptrWrite off c >> k
step (GetChar k) = getc >>= k
step (PutChar c k) = putc c >> k
step (Loop off body k) = bfLoopM (get >>= fptrRead off) (runBFM step body) >> k
-- | Run a bf program using an infinite 'Tape'.
--
-- To make it interesting, does not use auxiliary monads, but instead
-- pure functions of @'Tape' -> 'String' -> 'String'@.
-- The result is an 'interact'-style @'String' -> 'String'@ function
-- that takes a 'String' for input and produces a 'String' as output.
runTape :: [Instr] -> String -> String
runTape prog = runBF step (eval prog >> return finish) blankTape
where
step :: BFF (Tape -> String -> String) -> Tape -> String -> String
step (MovePtr n k) t i = k (tapeMove n t) i
step (ReadPtr off k) t i = k (tapeRead off t) t i
step (WritePtr off c k) t i = k (tapeWrite off c t) i
step (GetChar k) t (c:cs) = k (Just c) t cs
step (GetChar k) t [] = k Nothing t []
step (PutChar c k) t i = c : k t i
step (Loop off body k) t i = loop' t i
where
loop' t' i' = if tapeRead off t' /= 0 then body' t' i' else k t' i'
body' = runBF step (body >> return loop')
finish :: Tape -> String -> String
finish _ _ = ""
-- | Use 'interact' to actually run the result of 'runTape' in 'IO'.
--
-- Note that 'interact' closes the stdin handle at the end, so this
-- can't be used to run multiple programs in the same session.
runTapeIO :: [Instr] -> IO ()
runTapeIO = interact . runTape
-- | Generate C code for a bf program.
generateC :: [Instr] -> IO ()
generateC bf = putStr . unlines $
[ "/* generated by brainfree */"
, "unsigned char m[30000], *p = m;"
, "int main(void) {" ]
++ genBlock bf
++ [indent "return 0;", "}"]
where
indent = (" " ++)
genBlock = map indent . concatMap step
step (IMovePtr n) = ["p += " ++ show n ++ ";"]
step (IAddPtr off c) = ["p[" ++ show off ++ "] += " ++ show c ++ ";"]
step (IInput off) = ["p[" ++ show off ++ "] = getchar();"]
step (IOutput off) = ["putchar(p[" ++ show off ++ "]);"]
step (ILoop off body) = ["while (p[" ++ show off ++ "]) {"]
++ genBlock body
++ ["}"]
step (IWritePtr off c) = ["p[" ++ show off ++ "] = " ++ show c ++ ";"]
step (IMultPtr o1 o2 c) = ["p[" ++ show o1 ++ "] += p[" ++ show o2 ++ "] * " ++ show c ++ ";"]
-- | Generate Haskell code for a bf program.
generateHS :: [Instr] -> IO ()
generateHS bf = putStr . unlines $
[ " -- generated by brainfree"
, "import Runtime"
, "import Control.Monad.State.Strict"
, "import System.IO"
, "main = hSetBuffering stdout NoBuffering >> run"
, "mv n = modify $ fptrMove n"
, "rd off = get >>= fptrRead off"
, "wr off c = get >>= fptrWrite off c"
, "add off c = get >>= \\p -> fptrRead off p >>= \\x -> fptrWrite off (c + x) p"
, "mult o1 o2 c = rd o2 >>= add o1 . (* c)"
, "run = withFPtrMem " ++ show defaultMemSize ++ ". evalStateT $ do"
] ++ genBlock bf
where
indent = (" " ++)
genBlock = map indent . concatMap step
step (IMovePtr n) = ["mv (" ++ show n ++ ")"]
step (IAddPtr off c) = ["add (" ++ show off ++ ") " ++ show c]
step (IInput off) = ["bfInputM getc (wr (" ++ show off ++ "))"]
step (IOutput off) = ["bfOutputM (rd (" ++ show off ++ ")) putc"]
step (ILoop off body) = ("bfLoopM (rd (" ++ show off ++ ")) $ do") : genBlock body
step (IWritePtr off c) = ["wr (" ++ show off ++ ") " ++ show c]
step (IMultPtr o1 o2 c) = ["mult (" ++ show o1 ++ ") (" ++ show o2 ++ ") " ++ show c ++ ""]
-- | Handle command line arguments.
processArgs :: ([Instr] -> IO ()) -> Bool -> [String] -> IO ()
processArgs _ o ("-v":args) = processArgs runVecMem o args
processArgs _ o ("-f":args) = processArgs runFPtrMem o args
processArgs _ o ("-t":args) = processArgs runTapeIO o args
processArgs _ o ("-c":args) = processArgs generateC o args
processArgs _ o ("-h":args) = processArgs generateHS o args
processArgs r _ ("-o":args) = processArgs r True args
processArgs _ _ (('-':_):_) = usage
processArgs runner o (filename:_) =
parseFile filename >>= either print (runner . if o then optimize else id)
processArgs _ _ _ = usage
usage :: IO ()
usage = putStrLn "usage: brainfree [-v|-f|-t|-c|-h] [-o] FILENAME"
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
getArgs >>= processArgs runVecMem False
|
gglouser/brainfree
|
src/Main.hs
|
mit
| 6,260
| 0
| 14
| 1,881
| 2,131
| 1,076
| 1,055
| 108
| 8
|
-- Basic functions for running programs stored as text files
{-# LANGUAGE OverloadedStrings #-}
module Convert
( readRules, readDBFile, runProgram
, loadProgram, runProgramWithDB, programWithDB)
where
import Control.Monad
import Control.Monad.State
import Types
import Parser
import Rules
import Monad
import Graph
import Update
import Iterate
-- DB parsing
readDBFile :: FilePath -> IO [[Assert]]
readDBFile file = do
f <- readFile file
case parseTupleFile f of
Right v -> return $ map labelRHSArity v
Left err -> error $ "error parsing graph file:\n" ++ err
-- Program parsing
readRules :: FilePath -> IO [Rule]
readRules f = do
f <- readFile f
let result = convertRules =<< parseRuleFile f
case result of
Right rules -> return $ rules
Left err -> error $ err
-- 1. treats the input RHS as a set of tuples to add
-- 2. adds them, possibly extending context c
-- 3. solves for consequences, using rules
-- 4. returns extended context and a "root cause" tuple
processInputTuples :: PS -> Context -> RHS -> SM (Tuple, [Msg], Context, PS)
processInputTuples ps c es = do
let initMatch t edges c =
(Provenance (unsafeRanked (-1) $ Rule Nothing Nothing Event [] edges) (Just t) [] [], c, [])
root <- lift $ makeTuple ("_root", []) externProv
(msgs, c') <- lift $ applyMatch $ initMatch root es c
(out, ps') <- solve (map CMsg msgs) ps
return (root, out, c', ps')
-- returns a "root" tuple that can be used to access the results of each
-- block of edges.
programWithDB :: [RHS] -> [Rule] -> SM ([Tuple], [Msg], PS)
programWithDB edgeBlocks rules = prog2
where
prog1 (ts, outs, c, ps) es = do
(t, out, c', ps') <- processInputTuples ps c es
return (t:ts, out ++ outs, c', ps')
prog2 :: SM ([Tuple], [Msg], PS)
prog2 = do
-- set up queues/indices
db <- lift $ gets db
ps <- lift $ initPS "some-program" rules (tuples db)
fix <$> foldM prog1 ([], [], [], ps) edgeBlocks
fix (a, ms, _, b) = (a, ms, b)
runProgramWithDB :: [RHS] -> [Rule] -> (([Tuple], [Msg], PS), InterpreterState)
runProgramWithDB e r = runStack1 $ programWithDB e r
-- edgeFile: a file containing tuples, one per line.
-- empty lines separate blocks. each block is added simultaneously;
-- a fixed point is computed after each block
-- ruleFile: a file containing an ordered list of rules, one per line.
-- rules may not be split across lines.
loadProgram :: FilePath -> FilePath -> IO ([RHS], [Rule])
loadProgram edgeFile ruleFile = do
edgeBlocks <- readDBFile edgeFile
rules <- readRules ruleFile
return (edgeBlocks, rules)
-- Main program execution function
-- returns result, but not the full execution context
runProgram :: FilePath -> FilePath -> IO ([Tuple], [Msg], [Rule], InterpreterState)
runProgram edgeName ruleName = do
(edgeBlocks, rules) <- loadProgram edgeName ruleName
let ((roots, ms, _), s) = runProgramWithDB edgeBlocks rules
return (roots, ms, rules, s)
|
kovach/web2
|
src/Convert.hs
|
mit
| 3,008
| 0
| 16
| 653
| 946
| 512
| 434
| 57
| 2
|
{-# LANGUAGE DeriveGeneric #-}
module Types (
Request (..)
, Response (..)
) where
import Data.ByteString.Lazy (ByteString)
import Data.Serialize (Serialize)
import GHC.Generics (Generic)
type Tar = ByteString
type PDF = ByteString
data Request = Request { mainName :: String
, inputArchive :: Tar } deriving (Generic)
instance Serialize Request
data Response = Ok PDF | Error String deriving (Generic)
instance Serialize Response
|
uvNikita/laas
|
src/Types.hs
|
mit
| 520
| 0
| 8
| 151
| 127
| 77
| 50
| 14
| 0
|
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Consensus.Raft.Types
-- Copyright : (c) Phil Hargett 2014
-- License : MIT (see LICENSE file)
--
-- Maintainer : phil@haphazardhouse.net
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- Common types used in this implementation of the Raft algorithm.
--
-----------------------------------------------------------------------------
module Control.Consensus.Raft.Types (
-- * Configuration
RaftConfiguration(..),
mkRaftConfiguration,
module Control.Consensus.Configuration,
-- * General types
Term,
RaftTime(..),
initialRaftTime,
logIndex,
logTerm,
Timeout,
Timeouts(..),
defaultTimeouts,
timeouts,
electionTimeout
) where
-- local imports
import Data.Log
-- external imports
import Control.Consensus.Configuration
import Data.Serialize
import GHC.Generics
import Network.Endpoints
import qualified System.Random as R
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
{-|
A term is a phase in the execution of the Raft algorithm defined by a period
in which there is at most one leader. Members change terms when beginning a
new election, and after successfully winnning an election.
-}
type Term = Int
{-|
`RaftTime` captures a measure of how up to date a log is: it is
a combination of a 'Term' and 'Index'.
-}
data RaftTime = RaftTime Term Index deriving (Show,Eq,Ord,Generic)
{-|
Starting point for 'RaftTime': the time that is lowest than all other
valid 'RaftTime's.
-}
initialRaftTime :: RaftTime
initialRaftTime = RaftTime (-1) (-1)
{-|
Extracts the 'Index' from a 'RaftTime'.
-}
logIndex :: RaftTime -> Index
logIndex (RaftTime _ index) = index
{-|
Extracts the 'Term' from a 'RaftTime'.
-}
logTerm :: RaftTime -> Term
logTerm (RaftTime term _) = term
instance Serialize RaftTime
--------------------------------------------------------------------------------
-- Timeouts
--------------------------------------------------------------------------------
{-|
Type used for timeouts, measured in microseconds. Mostly used for code clarity.
-}
type Timeout = Int
{-|
Defines the timeouts used for various aspects of the Raft protocol.
Different environments may have different performance characteristics,
and thus require different timeout values to operate correctly.
-}
data Timeouts = Timeouts {
timeoutRpc :: Timeout, -- ^ maximum time to wait before deciding an RPC call has failed
timeoutClientRpc :: Timeout, -- ^ maximum time clients waits before decinding an RPC call to a member has failed
timeoutHeartbeat :: Timeout, -- ^ expected time between heartbeats that prove the leader is still active
timeoutPulse :: Timeout, -- ^ maximum length between pulses from the leader proving the leader is still active (must be less than heartbeat)
timeoutElectionRange :: (Timeout,Timeout) -- ^ the range of times from which an election timeout will be selected
} deriving (Eq,Show,Generic)
instance Serialize Timeouts
{-|
Returns default timeouts generally expected to be useful
in real-world environments, largely based on original Raft paper.
-}
defaultTimeouts :: Timeouts
defaultTimeouts = timeouts $ 150 * 1000
{-|
Returns default timeouts scaled from the provided RPC timeout.
-}
timeouts :: Timeout -> Timeouts
timeouts rpc =
let heartbeat = 10 * rpc
in Timeouts {
timeoutRpc = rpc,
timeoutClientRpc = 5 * rpc,
timeoutHeartbeat = heartbeat,
timeoutPulse = 7 * rpc, -- must be less than the heartbeat
timeoutElectionRange = (5 * heartbeat,10 * heartbeat)
}
{-|
Return a new election timeout
-}
electionTimeout :: Timeouts -> IO Timeout
electionTimeout outs = R.randomRIO $ timeoutElectionRange outs
--------------------------------------------------------------------------------
-- Cofiguration
--------------------------------------------------------------------------------
{- |
A 'RaftConfigurations' incorproates both an ordinary 'Configuration' but also a set
of 'Timeouts' for tuning the cluster's timing characteristics in the Raft algorithm.
-}
data RaftConfiguration = RaftConfiguration {
clusterConfiguration :: Configuration,
clusterTimeouts :: Timeouts
} deriving (Generic,Show,Eq)
instance Serialize RaftConfiguration
{-|
Given a list of names, return a new 'RaftConfiguration' using 'defaultTimeouts' for
Raft algorithm execution.
-}
mkRaftConfiguration :: [Name] -> RaftConfiguration
mkRaftConfiguration participants = RaftConfiguration {
clusterConfiguration = mkConfiguration participants,
clusterTimeouts = defaultTimeouts
}
|
hargettp/raft
|
src/Control/Consensus/Raft/Types.hs
|
mit
| 4,857
| 0
| 10
| 813
| 547
| 336
| 211
| 61
| 1
|
module ProjectEuler.Problem130
( problem
) where
import Math.NumberTheory.Primes
import qualified Data.List.Ordered as LOrdered
import ProjectEuler.Types
import ProjectEuler.Problem129 (genInput, computeA)
problem :: Problem
problem = pureProblem 130 Solved result
{- This is easy to solve once Problem129 is solved. -}
inputs :: [Int]
inputs =
LOrdered.minus'
(genInput 90) -- since we know 91 is the first one, might as well start with 91.
(unPrime <$> primes)
result :: Int
result =
sum . take 25 $ filter (\x -> let r = computeA x in (x - 1) `rem` r == 0) inputs
|
Javran/Project-Euler
|
src/ProjectEuler/Problem130.hs
|
mit
| 591
| 0
| 14
| 116
| 161
| 92
| 69
| 16
| 1
|
{-# LANGUAGE JavaScriptFFI #-}
-- | A wrapper over the Electron power-save blocking API, as documented
-- <https://electron.atom.io/docs/api/power-save-blocker here>.
module GHCJS.Electron.PowerSaveBlocker
( PowerSaveBlocker (..)
, BlockerID (..)
, unsafeGetPowerSaveBlocker
, unsafeStart
, unsafeStop
, unsafeIsStarted
) where
import GHCJS.Types
-- | An Electron @powerSaveBlocker@ object.
newtype PowerSaveBlocker
= MkPowerSaveBlocker JSVal
-- | A power save blocker ID number.
newtype BlockerID
= MkBlockerID Int
-- | Get the canonical 'PowerSaveBlocker' object, i.e.: the value of
-- @require('electron').powerSaveBlocker@.
foreign import javascript safe
"$r = require('electron').powerSaveBlocker;"
unsafeGetPowerSaveBlocker :: IO PowerSaveBlocker
-- | Start a power save blocker of the given type.
--
-- Returns a power save blocker ID (which is an integer).
--
-- The type parameter is one of the following strings:
--
-- * @"prevent-app-suspension"@
-- * Prevents the application from being suspended.
-- * Keeps the system active but allows the screen to be turned off.
-- * Example use cases: downloading a file or playing audio.
-- * @"prevent-display-sleep"@
-- * Prevents the display from going to sleep.
-- * Keeps the system and screen active.
-- * Example use case: playing video.
-- * Has higher precedence than @"prevent-app-suspension"@.
--
-- Only the highest precedence type will have any effect.
foreign import javascript safe
"$r = $1.start($2);"
unsafeStart :: PowerSaveBlocker
-> JSString
-- ^ The power save blocker type.
-> IO BlockerID
-- | Stops the specified power save blocker by its 'BlockerID'.
foreign import javascript safe
"$1.stop($2);"
unsafeStop :: PowerSaveBlocker
-> BlockerID
-> IO ()
-- | Checks whether the given 'BlockerID' corresponds to a running power save
-- blocker process.
foreign import javascript safe
"$r = $1.isStarted($2);"
unsafeIsStarted :: PowerSaveBlocker
-> BlockerID
-> IO Bool
|
taktoa/ghcjs-electron
|
src/GHCJS/Electron/PowerSaveBlocker.hs
|
mit
| 2,155
| 12
| 6
| 494
| 180
| 114
| 66
| 27
| 0
|
-- file: beginning/ex2.1.hs
-- 2
isEmpty :: [a] -> Bool
isEmpty [] = True
isEmpty _ = False
-- 3
onlyOne :: [a] -> Bool
onlyOne (a:[]) = True
onlyOne _ = False
-- 4
concList :: [[a]] -> [a]
concList [] = []
concList (x:xs) = x ++ (concList xs)
|
hnfmr/beginning_haskell
|
ex2.1.hs
|
mit
| 267
| 0
| 8
| 74
| 127
| 70
| 57
| 9
| 1
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Betfair.APING.Types.PriceLadderDescription
( PriceLadderDescription(..)
) where
import Data.Aeson.TH (Options (fieldLabelModifier, omitNothingFields),
defaultOptions, deriveJSON)
import Protolude
import Text.PrettyPrint.GenericPretty
import Betfair.APING.Types.PriceLadderType (PriceLadderType)
data PriceLadderDescription = PriceLadderDescription
{ ptype :: PriceLadderType
} deriving (Eq, Show, Generic, Pretty)
$(deriveJSON
defaultOptions {omitNothingFields = True, fieldLabelModifier = drop 1}
''PriceLadderDescription)
|
joe9/betfair-api
|
src/Betfair/APING/Types/PriceLadderDescription.hs
|
mit
| 841
| 0
| 10
| 144
| 133
| 84
| 49
| 20
| 0
|
module Feature.CorsSpec where
-- {{{ Imports
import Test.Hspec
import Test.Hspec.Wai
import Network.Wai.Test (SResponse(simpleHeaders, simpleBody))
import qualified Data.ByteString.Lazy as BL
import qualified Hasql.Connection as H
import SpecHelper
import PostgREST.Types (DbStructure(..))
import Network.HTTP.Types
-- }}}
spec :: DbStructure -> H.Connection -> Spec
spec struct c = around (withApp cfgDefault struct c) $ describe "CORS" $ do
let preflightHeaders = [
("Accept", "*/*"),
("Origin", "http://example.com"),
("Access-Control-Request-Method", "POST"),
("Access-Control-Request-Headers", "Foo,Bar") ]
let normalCors = [
("Host", "localhost:3000"),
("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0"),
("Origin", "http://localhost:8000"),
("Accept", "text/csv, */*; q=0.01"),
("Accept-Language", "en-US,en;q=0.5"),
("Accept-Encoding", "gzip, deflate"),
("Referer", "http://localhost:8000/"),
("Connection", "keep-alive") ]
describe "preflight request" $ do
it "replies naively and permissively to preflight request" $ do
r <- request methodOptions "/items" preflightHeaders ""
liftIO $ do
let respHeaders = simpleHeaders r
respHeaders `shouldSatisfy` matchHeader
"Access-Control-Allow-Origin"
"http://example.com"
respHeaders `shouldSatisfy` matchHeader
"Access-Control-Allow-Credentials"
"true"
respHeaders `shouldSatisfy` matchHeader
"Access-Control-Allow-Methods"
"GET, POST, PATCH, DELETE, OPTIONS, HEAD"
respHeaders `shouldSatisfy` matchHeader
"Access-Control-Allow-Headers"
"Authentication, Foo, Bar, Accept, Accept-Language, Content-Language"
respHeaders `shouldSatisfy` matchHeader
"Access-Control-Max-Age"
"86400"
it "suppresses body in response" $ do
r <- request methodOptions "/" preflightHeaders ""
liftIO $ simpleBody r `shouldBe` ""
describe "regular request" $
it "exposes necesssary response headers" $ do
r <- request methodGet "/items" [("Origin", "http://example.com")] ""
liftIO $ simpleHeaders r `shouldSatisfy` matchHeader
"Access-Control-Expose-Headers"
"Content-Encoding, Content-Location, Content-Range, Content-Type, \
\Date, Location, Server, Transfer-Encoding, Range-Unit"
describe "postflight request" $
it "allows INFO body through even with CORS request headers present" $ do
r <- request methodOptions "/items" normalCors ""
liftIO $ do
simpleHeaders r `shouldSatisfy` matchHeader
"Access-Control-Allow-Origin" "\\*"
simpleBody r `shouldSatisfy` not . BL.null
|
motiz88/postgrest
|
test/Feature/CorsSpec.hs
|
mit
| 2,932
| 0
| 20
| 741
| 556
| 298
| 258
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
-- Libs
import Control.Monad.Reader (runReaderT)
import Database.Persist.Sql
import Database.Persist.Postgresql (runSqlPool)
import Hilt.Server
import Network.Wai
import Network.Wai.Handler.Warp (run)
import Servant
import Servant.Generic
-- import WaiAppStatic.Types
-- Source
import Config
import Models
import Routes
main :: IO ()
main = do
env <- lookupSetting "ENV" Development
port <- lookupSetting "PORT" 3737
pool <- makePool env
let settings = Settings { getPool = pool, getEnv = env }
logger = setLogger env
runSqlPool doMigrations pool
putStrLn $ "Serving on PORT: " ++ show port
let convertApp :: App :~> Handler
convertApp = NT (flip runReaderT settings . runApp)
let apiServer :: ApiRoutes AsServer
apiServer = ApiRoutes
{ accounts = enter convertApp allAccounts
, gigs = enter convertApp allGigs
}
let server :: Routes AsServer
server = Routes { api = toServant apiServer }
let middlewares :: Middleware
middlewares = compression
. allowCsrf
. corsified
let app :: Application
app = middlewares
. serve (Proxy :: Proxy Router)
$ toServant server
run port . logger $ app
allAccounts :: App [Account]
allAccounts = do
dbAccounts <- runDb $ selectList [] []
return $ entityVal <$> dbAccounts
allGigs :: App [Gig]
allGigs = do
dbGigs <- runDb $ selectList [] []
return $ entityVal <$> dbGigs
doMigrations :: SqlPersistT IO ()
doMigrations = do
printMigration migrateAll
runMigration migrateAll
|
erlandsona/caldwell-api
|
executables/Server.hs
|
mit
| 1,781
| 0
| 14
| 483
| 475
| 243
| 232
| 53
| 1
|
module Transmitter where
import Data.Char
type Bit = Int
bin2int' :: [Bit] -> Int
bin2int' bits =
sum [w*b | (w,b) <- zip weights bits]
where weights = iterate (*2) 1
bin2int :: [Bit] -> Int
bin2int = foldr (\x y -> x + 2*y) 0
int2bin :: Int -> [Bit]
int2bin 0 = []
int2bin n = n `mod` 2 : int2bin (n `div` 2)
make8 :: [Bit] -> [Bit]
make8 bits = take 8 (bits ++ repeat 0)
encode :: String -> [Bit]
encode = concat . map (make8 . int2bin . ord)
chop8 :: [Bit] -> [[Bit]]
chop8 [] = []
chop8 bits = take 8 bits : chop8 (drop 8 bits)
decode :: [Bit] -> String
decode = map (chr . bin2int) . chop8
channel :: [Bit] -> [Bit]
channel = id
transmit :: String -> String
transmit = decode . channel . encode
--foo :: Bit -> Bit -> Bit
--foo x y = x + 2*y
--
--sum' :: Num a => [a] -> a
--sum' = foldr (+) 0
--
--adder :: Num a => a -> a -> a
--adder x y = x + y
--
--sum'' :: Num a => [a] -> a
--sum'' = foldr adder 0
|
brodyberg/LearnHaskell
|
CaesarCypher.hsproj/Transmitter.hs
|
mit
| 930
| 0
| 9
| 228
| 387
| 217
| 170
| 25
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.