code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | Helper module providing a monad that collects lines
module Lseed.Geometry.Generator
( GeometryGenerator
, translated
, rotated
, runGeometryGenerator
, addLine
)
where
import Control.Monad.Reader
import Control.Monad.Writer
type Point = (Double, Double)
type Line = (Point, Point)
newtype GeometryGenerator x a = GeometryGenerator (ReaderT (Point, Double) (Writer [(Line, x)]) a)
deriving (Monad)
transformed :: Point -> GeometryGenerator x Point
transformed (x,y) = GeometryGenerator $ do
((bx,by),r) <- ask
let (x', y') = (cos r * x + sin r *y, -sin r * x + cos r *y)
return (bx + x', by + y')
translated :: Point -> GeometryGenerator x a -> GeometryGenerator x a
translated p (GeometryGenerator act) = do
(x',y') <- transformed p
GeometryGenerator $
local (\(_,r) -> ((x',y'),r)) act
rotated :: Double -> GeometryGenerator x a -> GeometryGenerator x a
rotated r (GeometryGenerator act) =
GeometryGenerator $ local (\(p,r') -> (p, r' - r)) act
addLine :: x -> Line -> GeometryGenerator x ()
addLine x (p1,p2) = do
p1' <- transformed p1
p2' <- transformed p2
GeometryGenerator $ tell [((p1', p2'),x)]
runGeometryGenerator :: Point -> Double -> GeometryGenerator x () -> [(Line, x)]
runGeometryGenerator p r (GeometryGenerator gen) =
execWriter (runReaderT gen (p,r))
| nomeata/L-seed | src/Lseed/Geometry/Generator.hs | bsd-3-clause | 1,352 | 13 | 16 | 238 | 567 | 308 | 259 | 34 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Hadrian.Utilities (
-- * List manipulation
fromSingleton, replaceEq, minusOrd, intersectOrd, lookupAll, chunksOfSize,
-- * String manipulation
quote, yesNo, zeroOne,
-- * FilePath manipulation
unifyPath, (-/-),
-- * Accessing Shake's type-indexed map
insertExtra, lookupExtra, userSetting,
-- * Paths
BuildRoot (..), buildRoot, buildRootRules, isGeneratedSource,
-- * File system operations
copyFile, copyFileUntracked, fixFile, makeExecutable, moveFile, removeFile,
createDirectory, copyDirectory, moveDirectory, removeDirectory,
-- * Diagnostic info
UseColour (..), Colour (..), ANSIColour (..), putColoured,
BuildProgressColour, mkBuildProgressColour, putBuild,
SuccessColour, mkSuccessColour, putSuccess,
ProgressInfo (..), putProgressInfo,
renderAction, renderActionNoOutput, renderProgram, renderLibrary, renderBox, renderUnicorn,
-- * Miscellaneous
(<&>), (%%>), cmdLineLengthLimit,
-- * Useful re-exports
Dynamic, fromDynamic, toDyn, TypeRep, typeOf
) where
import Control.Monad.Extra
import Data.Char
import Data.Dynamic (Dynamic, fromDynamic, toDyn)
import Data.HashMap.Strict (HashMap)
import Data.List.Extra
import Data.Maybe
import Data.Typeable (TypeRep, typeOf)
import Development.Shake hiding (Normal)
import Development.Shake.Classes
import Development.Shake.FilePath
import System.Environment (lookupEnv)
import System.Info.Extra
import qualified Control.Exception.Base as IO
import qualified Data.HashMap.Strict as Map
import qualified System.Directory.Extra as IO
import qualified System.Info.Extra as IO
import qualified System.IO as IO
-- | Extract a value from a singleton list, or terminate with an error message
-- if the list does not contain exactly one value.
fromSingleton :: String -> [a] -> a
fromSingleton _ [res] = res
fromSingleton msg _ = error msg
-- | Find and replace all occurrences of a value in a list.
replaceEq :: Eq a => a -> a -> [a] -> [a]
replaceEq from to = map (\cur -> if cur == from then to else cur)
-- Explicit definition to avoid dependency on Data.List.Ordered
-- | Difference of two ordered lists.
minusOrd :: Ord a => [a] -> [a] -> [a]
minusOrd [] _ = []
minusOrd xs [] = xs
minusOrd (x:xs) (y:ys) = case compare x y of
LT -> x : minusOrd xs (y:ys)
EQ -> minusOrd xs ys
GT -> minusOrd (x:xs) ys
-- Explicit definition to avoid dependency on Data.List.Ordered. TODO: add tests
-- | Intersection of two ordered lists by a predicate.
intersectOrd :: (a -> b -> Ordering) -> [a] -> [b] -> [a]
intersectOrd cmp = loop
where
loop [] _ = []
loop _ [] = []
loop (x:xs) (y:ys) = case cmp x y of
LT -> loop xs (y:ys)
EQ -> x : loop xs (y:ys)
GT -> loop (x:xs) ys
-- | Lookup all elements of a given sorted list in a given sorted dictionary.
-- @lookupAll list dict@ is equivalent to @map (flip lookup dict) list@ but has
-- linear complexity O(|list| + |dist|) instead of quadratic O(|list| * |dict|).
--
-- > lookupAll ["b", "c"] [("a", 1), ("c", 3), ("d", 4)] == [Nothing, Just 3]
-- > list & dict are sorted: lookupAll list dict == map (flip lookup dict) list
lookupAll :: Ord a => [a] -> [(a, b)] -> [Maybe b]
lookupAll [] _ = []
lookupAll (_:xs) [] = Nothing : lookupAll xs []
lookupAll (x:xs) (y:ys) = case compare x (fst y) of
LT -> Nothing : lookupAll xs (y:ys)
EQ -> Just (snd y) : lookupAll xs (y:ys)
GT -> lookupAll (x:xs) ys
-- | @chunksOfSize size strings@ splits a given list of strings into chunks not
-- exceeding the given @size@. If that is impossible, it uses singleton chunks.
chunksOfSize :: Int -> [String] -> [[String]]
chunksOfSize n = repeatedly f
where
f xs = splitAt (max 1 $ length $ takeWhile (<= n) $ scanl1 (+) $ map length xs) xs
-- | Add single quotes around a String.
quote :: String -> String
quote s = "'" ++ s ++ "'"
-- | Pretty-print a 'Bool' as a @"YES"@ or @"NO"@ string.
yesNo :: Bool -> String
yesNo True = "YES"
yesNo False = "NO"
-- | Pretty-print a `Bool` as a @"1"@ or @"0"@ string
zeroOne :: Bool -> String
zeroOne True = "1"
zeroOne False = "0"
-- | Normalise a path and convert all path separators to @/@, even on Windows.
unifyPath :: FilePath -> FilePath
unifyPath = toStandard . normaliseEx
-- | Combine paths with a forward slash regardless of platform.
(-/-) :: FilePath -> FilePath -> FilePath
"" -/- b = b
a -/- b
| last a == '/' = a ++ b
| otherwise = a ++ '/' : b
infixr 6 -/-
-- | Like Shake's '%>' but gives higher priority to longer patterns. Useful
-- in situations when a family of build rules, e.g. @"//*.a"@ and @"//*_p.a"@
-- can be matched by the same file, such as @library_p.a@. We break the tie
-- by preferring longer matches, which correpond to longer patterns.
(%%>) :: FilePattern -> (FilePath -> Action ()) -> Rules ()
p %%> a = priority (fromIntegral (length p) + 1) $ p %> a
infix 1 %%>
-- | Build command lines can get very long; for example, when building the Cabal
-- library, they can reach 2MB! Some operating systems do not support command
-- lines of such length, and this function can be used to obtain a reasonable
-- approximation of the limit. On Windows, it is theoretically 32768 characters
-- (since Windows 7). In practice we use 31000 to leave some breathing space for
-- the builder path & name, auxiliary flags, and other overheads. On Mac OS X,
-- ARG_MAX is 262144, yet when using @xargs@ on OSX this is reduced by over
-- 20000. Hence, 200000 seems like a sensible limit. On other operating systems
-- we currently use the 4194304 setting.
cmdLineLengthLimit :: Int
cmdLineLengthLimit | isWindows = 31000
| isMac = 200000
| otherwise = 4194304
-- | Insert a value into Shake's type-indexed map.
insertExtra :: Typeable a => a -> HashMap TypeRep Dynamic -> HashMap TypeRep Dynamic
insertExtra value = Map.insert (typeOf value) (toDyn value)
-- | Lookup a value in Shake's type-indexed map.
lookupExtra :: Typeable a => a -> Map.HashMap TypeRep Dynamic -> a
lookupExtra defaultValue extra = fromMaybe defaultValue maybeValue
where
maybeValue = fromDynamic =<< Map.lookup (typeOf defaultValue) extra
-- | Lookup a user setting in Shake's type-indexed map 'shakeExtra'. If the
-- setting is not found, return the provided default value instead.
userSetting :: Typeable a => a -> Action a
userSetting defaultValue = do
extra <- shakeExtra <$> getShakeOptions
return $ lookupExtra defaultValue extra
-- | Lookup a user setting in Shake's type-indexed map 'shakeExtra'. If the
-- setting is not found, return the provided default value instead.
userSettingRules :: Typeable a => a -> Rules a
userSettingRules defaultValue = do
extra <- shakeExtra <$> getShakeOptionsRules
return $ lookupExtra defaultValue extra
newtype BuildRoot = BuildRoot FilePath deriving (Typeable, Eq, Show)
-- | All build results are put into the 'buildRoot' directory.
buildRoot :: Action FilePath
buildRoot = do
BuildRoot path <- userSetting (BuildRoot "")
return path
buildRootRules :: Rules FilePath
buildRootRules = do
BuildRoot path <- userSettingRules (BuildRoot "")
return path
-- | A version of 'fmap' with flipped arguments. Useful for manipulating values
-- in context, e.g. 'buildRoot', as in the example below.
--
-- @
-- buildRoot <&> (-/- "dir") == fmap (-/- "dir") buildRoot
-- @
(<&>) :: Functor f => f a -> (a -> b) -> f b
(<&>) = flip fmap
infixl 1 <&>
-- | Given a 'FilePath' to a source file, return 'True' if it is generated.
-- The current implementation simply assumes that a file is generated if it
-- lives in the 'buildRoot' directory. Since most files are not generated the
-- test is usually very fast.
isGeneratedSource :: FilePath -> Action Bool
isGeneratedSource file = buildRoot <&> (`isPrefixOf` file)
-- | Copy a file tracking the source. Create the target directory if missing.
copyFile :: FilePath -> FilePath -> Action ()
copyFile source target = do
need [source] -- Guarantee the source is built before printing progress info.
let dir = takeDirectory target
liftIO $ IO.createDirectoryIfMissing True dir
putProgressInfo =<< renderAction "Copy file" source target
quietly $ copyFileChanged source target
-- | Copy a file without tracking the source. Create the target directory if missing.
copyFileUntracked :: FilePath -> FilePath -> Action ()
copyFileUntracked source target = do
let dir = takeDirectory target
liftIO $ IO.createDirectoryIfMissing True dir
putProgressInfo =<< renderAction "Copy file (untracked)" source target
liftIO $ IO.copyFile source target
-- | Transform a given file by applying a function to its contents.
fixFile :: FilePath -> (String -> String) -> Action ()
fixFile file f = do
putProgressInfo $ "| Fix " ++ file
contents <- liftIO $ IO.withFile file IO.ReadMode $ \h -> do
old <- IO.hGetContents h
let new = f old
IO.evaluate $ rnf new
return new
liftIO $ writeFile file contents
-- | Make a given file executable by running the @chmod +x@ command.
makeExecutable :: FilePath -> Action ()
makeExecutable file = do
putProgressInfo $ "| Make " ++ quote file ++ " executable."
quietly $ cmd "chmod +x " [file]
-- | Move a file. Note that we cannot track the source, because it is moved.
moveFile :: FilePath -> FilePath -> Action ()
moveFile source target = do
putProgressInfo =<< renderAction "Move file" source target
quietly $ cmd ["mv", source, target]
-- | Remove a file that doesn't necessarily exist.
removeFile :: FilePath -> Action ()
removeFile file = do
putProgressInfo $ "| Remove file " ++ file
liftIO . whenM (IO.doesFileExist file) $ IO.removeFile file
-- | Create a directory if it does not already exist.
createDirectory :: FilePath -> Action ()
createDirectory dir = do
putProgressInfo $ "| Create directory " ++ dir
liftIO $ IO.createDirectoryIfMissing True dir
-- | Copy a directory. The contents of the source directory is untracked.
copyDirectory :: FilePath -> FilePath -> Action ()
copyDirectory source target = do
putProgressInfo =<< renderAction "Copy directory" source target
quietly $ cmd ["cp", "-r", source, target]
-- | Move a directory. The contents of the source directory is untracked.
moveDirectory :: FilePath -> FilePath -> Action ()
moveDirectory source target = do
putProgressInfo =<< renderAction "Move directory" source target
quietly $ cmd ["mv", source, target]
-- | Remove a directory that doesn't necessarily exist.
removeDirectory :: FilePath -> Action ()
removeDirectory dir = do
putProgressInfo $ "| Remove directory " ++ dir
liftIO . whenM (IO.doesDirectoryExist dir) $ IO.removeDirectoryRecursive dir
data UseColour = Never | Auto | Always deriving (Eq, Show, Typeable)
-- | Terminal output colours
data Colour
= Dull ANSIColour -- ^ 8-bit ANSI colours
| Vivid ANSIColour -- ^ 16-bit vivid ANSI colours
| Extended String -- ^ Extended 256-bit colours, manual code stored
-- | ANSI terminal colours
data ANSIColour
= Black -- ^ ANSI code: 30
| Red -- ^ 31
| Green -- ^ 32
| Yellow -- ^ 33
| Blue -- ^ 34
| Magenta -- ^ 35
| Cyan -- ^ 36
| White -- ^ 37
| Reset -- ^ 0
-- | Convert ANSI colour names into their associated codes
colourCode :: ANSIColour -> String
colourCode Black = "30"
colourCode Red = "31"
colourCode Green = "32"
colourCode Yellow = "33"
colourCode Blue = "34"
colourCode Magenta = "35"
colourCode Cyan = "36"
colourCode White = "37"
colourCode Reset = "0"
-- | Create the final ANSI code.
mkColour :: Colour -> String
mkColour (Dull c) = colourCode c
mkColour (Vivid c) = colourCode c ++ ";1"
mkColour (Extended code) = "38;5;" ++ code
-- | A more colourful version of Shake's 'putNormal'.
putColoured :: String -> String -> Action ()
putColoured code msg = do
useColour <- userSetting Never
supported <- liftIO $ (&&) <$> IO.hIsTerminalDevice IO.stdout
<*> (not <$> isDumb)
let c Never = False
c Auto = supported || IO.isWindows -- Colours do work on Windows
c Always = True
if c useColour
then putNormal $ "\ESC[" ++ code ++ "m" ++ msg ++ "\ESC[0m"
else putNormal msg
where
isDumb = maybe False (== "dumb") <$> lookupEnv "TERM"
newtype BuildProgressColour = BuildProgressColour String
deriving Typeable
-- | Generate an encoded colour for progress output from names.
mkBuildProgressColour :: Colour -> BuildProgressColour
mkBuildProgressColour c = BuildProgressColour $ mkColour c
-- | Default 'BuildProgressColour'.
magenta :: BuildProgressColour
magenta = mkBuildProgressColour (Dull Magenta)
-- | Print a build progress message (e.g. executing a build command).
putBuild :: String -> Action ()
putBuild msg = do
BuildProgressColour code <- userSetting magenta
putColoured code msg
newtype SuccessColour = SuccessColour String
deriving Typeable
-- | Generate an encoded colour for successful output from names
mkSuccessColour :: Colour -> SuccessColour
mkSuccessColour c = SuccessColour $ mkColour c
-- | Default 'SuccessColour'.
green :: SuccessColour
green = mkSuccessColour (Dull Green)
-- | Print a success message (e.g. a package is built successfully).
putSuccess :: String -> Action ()
putSuccess msg = do
SuccessColour code <- userSetting green
putColoured code msg
data ProgressInfo = None | Brief | Normal | Unicorn deriving (Eq, Show, Typeable)
-- | Version of 'putBuild' controlled by @--progress-info@ command line argument.
putProgressInfo :: String -> Action ()
putProgressInfo msg = do
progressInfo <- userSetting None
when (progressInfo /= None) $ putBuild msg
-- | Render an action.
renderAction :: String -> FilePath -> FilePath -> Action String
renderAction what input output = do
progressInfo <- userSetting Brief
return $ case progressInfo of
None -> ""
Brief -> "| " ++ what ++ ": " ++ i ++ " => " ++ o
Normal -> renderBox [ what, " input: " ++ i, " => output: " ++ o ]
Unicorn -> renderUnicorn [ what, " input: " ++ i, " => output: " ++ o ]
where
i = unifyPath input
o = unifyPath output
-- | Render an action.
renderActionNoOutput :: String -> FilePath -> Action String
renderActionNoOutput what input = do
progressInfo <- userSetting Brief
return $ case progressInfo of
None -> ""
Brief -> "| " ++ what ++ ": " ++ i
Normal -> renderBox [ what, " input: " ++ i ]
Unicorn -> renderUnicorn [ what, " input: " ++ i ]
where
i = unifyPath input
-- | Render the successful build of a program.
renderProgram :: String -> String -> Maybe String -> String
renderProgram name bin synopsis = renderBox $
[ "Successfully built program " ++ name
, "Executable: " ++ bin ] ++
[ "Program synopsis: " ++ prettySynopsis synopsis | isJust synopsis ]
-- | Render the successful build of a library.
renderLibrary :: String -> String -> Maybe String -> String
renderLibrary name lib synopsis = renderBox $
[ "Successfully built library " ++ name
, "Library: " ++ lib ] ++
[ "Library synopsis: " ++ prettySynopsis synopsis | isJust synopsis ]
prettySynopsis :: Maybe String -> String
prettySynopsis Nothing = ""
prettySynopsis (Just s) = dropWhileEnd isPunctuation s ++ "."
-- | Render the given set of lines in an ASCII box. The minimum width and
-- whether to use Unicode symbols are hardcoded in the function's body.
--
-- >>> renderBox (words "lorem ipsum")
-- /----------\
-- | lorem |
-- | ipsum |
-- \----------/
renderBox :: [String] -> String
renderBox ls = tail $ concatMap ('\n' :) (boxTop : map renderLine ls ++ [boxBot])
where
-- Minimum total width of the box in characters
minimumBoxWidth = 32
-- TODO: Make this setting configurable? Setting to True by default seems
-- to work poorly with many fonts.
useUnicode = False
-- Characters to draw the box
(dash, pipe, topLeft, topRight, botLeft, botRight, padding)
| useUnicode = ('─', '│', '╭', '╮', '╰', '╯', ' ')
| otherwise = ('-', '|', '/', '\\', '\\', '/', ' ')
-- Box width, taking minimum desired length and content into account.
-- The -4 is for the beginning and end pipe/padding symbols, as
-- in "| xxx |".
boxContentWidth = (minimumBoxWidth - 4) `max` maxContentLength
where
maxContentLength = maximum (map length ls)
renderLine l = concat
[ [pipe, padding]
, padToLengthWith boxContentWidth padding l
, [padding, pipe] ]
where
padToLengthWith n filler x = x ++ replicate (n - length x) filler
(boxTop, boxBot) = ( topLeft : dashes ++ [topRight]
, botLeft : dashes ++ [botRight] )
where
-- +1 for each non-dash (= corner) char
dashes = replicate (boxContentWidth + 2) dash
-- | Render the given set of lines next to our favorite unicorn Robert.
renderUnicorn :: [String] -> String
renderUnicorn ls =
unlines $ take (max (length ponyLines) (length boxLines)) $
zipWith (++) (ponyLines ++ repeat ponyPadding) (boxLines ++ repeat "")
where
ponyLines :: [String]
ponyLines = [ " ,;,,;'"
, " ,;;'( Robert the spitting unicorn"
, " __ ,;;' ' \\ wants you to know"
, " /' '\\'~~'~' \\ /'\\.) that a task "
, " ,;( ) / |. / just finished! "
, " ,;' \\ /-.,,( ) \\ "
, " ^ ) / ) / )| Almost there! "
, " || || \\) "
, " (_\\ (_\\ " ]
ponyPadding :: String
ponyPadding = " "
boxLines :: [String]
boxLines = ["", "", ""] ++ (lines . renderBox $ ls)
| bgamari/shaking-up-ghc | src/Hadrian/Utilities.hs | bsd-3-clause | 18,183 | 0 | 15 | 4,293 | 4,146 | 2,216 | 1,930 | 296 | 5 |
{-# LANGUAGE QuasiQuotes #-}
{-@ LIQUID "--no-termination "@-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude
[lq| type OList a = [a]<{\fld v -> (v >= fld)}> |]
[lq| type DList a = [a]<{\fld v -> (fld >= v)}> |]
---------------------------------------------------------------------------
--------------------------- Official GHC Sort ----------------------------
---------------------------------------------------------------------------
[lq| assert sort1 :: (Ord a) => [a] -> OList a |]
sort1 :: (Ord a) => [a] -> [a]
sort1 = mergeAll . sequences
where
sequences (a:b:xs)
| a `compare` b == GT = descending b [a] xs
| otherwise = ascending b (a:) xs -- a >= b => (a:) ->
sequences [x] = [[x]]
sequences [] = [[]]
[lq| descending :: x:a -> OList {v:a | x < v} -> [a] -> [OList a] |]
descending a as (b:bs)
| a `compare` b == GT = descending b (a:as) bs
descending a as bs = (a:as): sequences bs
[lq| ascending :: x:a -> (OList {v:a|v>=x} -> OList a) -> [a] -> [OList a] |]
ascending a as (b:bs)
| a `compare` b /= GT = ascending b (\ys -> as (a:ys)) bs -- a <= b
ascending a as bs = as [a]: sequences bs
mergeAll [x] = x
mergeAll xs = mergeAll (mergePairs xs)
mergePairs (a:b:xs) = merge1 a b: mergePairs xs
mergePairs [x] = [x]
mergePairs [] = []
-- merge1 needs to be toplevel,
-- to get applied transformRec tx
merge1 (a:as') (b:bs')
| a `compare` b == GT = b:merge1 (a:as') bs'
| otherwise = a:merge1 as' (b:bs')
merge1 [] bs = bs
merge1 as [] = as
---------------------------------------------------------------------------
------------------- Mergesort ---------------------------------------------
---------------------------------------------------------------------------
[lq| assert sort2 :: (Ord a) => [a] -> OList a |]
sort2 :: (Ord a) => [a] -> [a]
sort2 = mergesort
mergesort :: (Ord a) => [a] -> [a]
mergesort = mergesort' . map wrap
mergesort' :: (Ord a) => [[a]] -> [a]
mergesort' [] = []
mergesort' [xs] = xs
mergesort' xss = mergesort' (merge_pairs xss)
merge_pairs :: (Ord a) => [[a]] -> [[a]]
merge_pairs [] = []
merge_pairs [xs] = [xs]
merge_pairs (xs:ys:xss) = merge xs ys : merge_pairs xss
merge :: (Ord a) => [a] -> [a] -> [a]
merge [] ys = ys
merge xs [] = xs
merge (x:xs) (y:ys)
= case x `compare` y of
GT -> y : merge (x:xs) ys
_ -> x : merge xs (y:ys)
wrap :: a -> [a]
wrap x = [x]
----------------------------------------------------------------------
-------------------- QuickSort ---------------------------------------
----------------------------------------------------------------------
[lq| assert sort3 :: (Ord a) => w:a -> [{v:a|v<=w}] -> OList a |]
sort3 :: (Ord a) => a -> [a] -> [a]
sort3 w ls = qsort w ls []
-- qsort is stable and does not concatenate.
qsort :: (Ord a) => a -> [a] -> [a] -> [a]
qsort _ [] r = r
qsort _ [x] r = x:r
qsort w (x:xs) r = qpart w x xs [] [] r
-- qpart partitions and sorts the sublists
qpart :: (Ord a) => a -> a -> [a] -> [a] -> [a] -> [a] -> [a]
qpart w x [] rlt rge r =
-- rlt and rge are in reverse order and must be sorted with an
-- anti-stable sorting
rqsort x rlt (x:rqsort w rge r)
qpart w x (y:ys) rlt rge r =
case compare x y of
GT -> qpart w x ys (y:rlt) rge r
_ -> qpart w x ys rlt (y:rge) r
-- rqsort is as qsort but anti-stable, i.e. reverses equal elements
rqsort :: (Ord a) => a -> [a] -> [a] -> [a]
rqsort _ [] r = r
rqsort _ [x] r = x:r
rqsort w (x:xs) r = rqpart w x xs [] [] r
rqpart :: (Ord a) => a -> a -> [a] -> [a] -> [a] -> [a] -> [a]
rqpart w x [] rle rgt r =
qsort x rle (x:qsort w rgt r)
rqpart w x (y:ys) rle rgt r =
case compare y x of
GT -> rqpart w x ys rle (y:rgt) r
_ -> rqpart w x ys (y:rle) rgt r
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/GhcListSort.hs | bsd-3-clause | 3,923 | 2 | 13 | 979 | 1,552 | 835 | 717 | -1 | -1 |
module Core where
import Core.Parser
import Core.Pretty
import Core.Types
import Core.Utils
bindersOf :: [(a, b)] -> [a]
bindersOf defns = [name | (name, rhs) <- defns]
rhssOf :: [(a, b)] -> [b]
rhssOf defns = [rhs | (name, rhs) <- defns]
isAtomicExpr :: Expr a -> Bool
isAtomicExpr (EVar v) = True
isAtomicExpr (ENum n) = True
isAtomicExpr e = False
preludeDefs :: CoreProgram
preludeDefs = [ ("I", ["x"], EVar "x")
, ("K", ["x", "y"], EVar "x")
, ("K1", ["x", "y"], EVar "y")
, ("S", ["f", "g", "x"], EAp (EAp (EVar "f") (EVar "x"))
(EAp (EVar "g") (EVar "x")))
, ("compose", ["f", "g", "x"], EAp (EVar "f")
(EAp (EVar "g") (EVar "x")))
, ("twice" , ["f"], EAp (EAp (EVar "compose") (EVar "f"))
(EVar "f"))
]
-- Exercise 1.6
letDef :: CoreProgram
letDef = [ ("testDef", ["x", "y"], ELet True [ ("foo", EAp (EVar "addOne")
(EVar "x"))
, ("bar", EAp (EVar "addOne")
(EVar "y"))
]
(EAp (EVar "foo")
(EAp (EAp (EVar "+") (EVar "bar"))
(EVar "foo"))))
]
-- Exercise 1.21 test
sampleProgram :: CoreProgram
sampleProgram = syntax . lexCore $
unlines [ "f = 3;", "g x y = let z = x in z;"
, "h x = case (let y = x in y) of"
, " <1> -> 2;"
, " <2> -> 5"
]
| Fuuzetsu/hcore | src/Core.hs | bsd-3-clause | 1,785 | 0 | 15 | 853 | 571 | 322 | 249 | 37 | 1 |
{-
The MIT License
Copyright (c) 2010 Korcan Hussein
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
module Timer (Timer(), defaultTimer, start, stop, getTimerTicks, pause, unpause, isStarted, isPaused) where
import qualified Graphics.UI.SDL.Time as SdlTime
import Data.Word
data Timer = Timer { startTicks :: Word32, pausedTicks :: Word32, paused :: Bool, started :: Bool }
defaultTimer = Timer { startTicks=0, pausedTicks=0, paused=False, started=False }
start :: Timer -> IO Timer
start timer = SdlTime.getTicks >>= \ticks -> return $ timer { startTicks=ticks, started=True,paused=False }
stop :: Timer -> Timer
stop timer = timer { paused=False, started=False }
getTimerTicks :: Timer -> IO Word32
getTimerTicks Timer { started=False } = return 0
getTimerTicks Timer { started=True, paused=True, pausedTicks=pausedTicks' } = return pausedTicks'
getTimerTicks Timer { started=True, paused=False, startTicks=st } = SdlTime.getTicks >>= \currTicks -> return $ currTicks - st
pause :: Timer -> IO Timer
pause timer@Timer { started=True, paused=False, startTicks=st } = SdlTime.getTicks >>= \currTicks -> return $ timer { pausedTicks=(currTicks - st), paused=True, started=True }
pause timer = return timer
unpause :: Timer -> IO Timer
unpause timer@Timer { paused=False } = return timer
unpause timer@Timer { paused=True, pausedTicks=pausedTicks' } =
SdlTime.getTicks >>= \currTicks -> return $ timer { startTicks=(currTicks - pausedTicks'), pausedTicks=0, paused=False }
isStarted :: Timer -> Bool
isStarted Timer { started=s } = s
isPaused :: Timer -> Bool
isPaused Timer { paused=p } = p | plurSKI/euclidOrchard | src/Timer.hs | bsd-3-clause | 2,795 | 0 | 11 | 612 | 566 | 328 | 238 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Applicative (optional)
import qualified Text.Show.Pretty as PP
import qualified Text.XML.Light as XML
import qualified Text.XML.Xleb as X
data Feed = Feed
{ feedTitle :: String
, feedSubtitle :: String
, feedLinks :: [Link]
, feedId :: String
, feedUpdated :: String
, feedEntries :: [Entry]
} deriving (Show)
data Link = Link
{ linkHref :: String
, linkRel :: Maybe String
} deriving (Show)
data Entry = Entry
{ entryTitle :: String
, entryLinks :: [Link]
, entryId :: String
, entryUpdated :: String
, entrySummary :: String
, entryAuthor :: Author
, entryContent :: Content
} deriving (Show)
data Content
= XHTMLContent XML.Element
| HTMLContent String
| TextContent String
deriving (Show)
data Author = Author
{ authorName :: String
, authorEmail :: String
} deriving (Show)
feed :: X.Xleb Feed
feed = X.elem "feed" $ do
feedTitle <- X.child "title" (X.contents X.string)
feedSubtitle <- X.child "subtitle" (X.contents X.string)
feedLinks <- X.children "link" link
feedId <- X.child "id" (X.contents X.string)
feedUpdated <- X.child "updated" (X.contents X.string)
feedEntries <- X.children "entry" entry
return Feed { .. }
link :: X.Xleb Link
link =
Link <$> X.attr "href" X.string
<*> optional (X.attr "rel" X.string)
entry :: X.Xleb Entry
entry = X.elem "entry" $ do
entryTitle <- X.child "title" (X.contents X.string)
entryLinks <- X.children "link" link
entryId <- X.child "id" (X.contents X.string)
entryUpdated <- X.child "updated" (X.contents X.string)
entrySummary <- X.child "summary" (X.contents X.string)
entryAuthor <- X.child "author" author
entryContent <- X.child "content" content
return Entry { .. }
content :: X.Xleb Content
content = do
typ <- X.attr "type" X.string
case typ of
"xhtml" -> XHTMLContent <$> X.rawElement
"html" -> HTMLContent <$> X.contents X.string
"text" -> TextContent <$> X.contents X.string
_ -> fail "Unknown content type"
author :: X.Xleb Author
author =
Author <$> X.child "name" (X.contents X.string)
<*> X.child "email" (X.contents X.string)
main :: IO ()
main = do
cs <- getContents
PP.pPrint (X.runXleb cs feed)
| aisamanra/xleb | examples/atom/Main.hs | bsd-3-clause | 2,379 | 0 | 12 | 541 | 818 | 425 | 393 | 76 | 4 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Test.Tinfoil.Hash.TestVectors where
import Data.ByteString (ByteString)
import P
import System.IO
import Tinfoil.Data.Hash
import Tinfoil.Encode
import Tinfoil.Hash
import Test.QuickCheck
testTestVector :: (ByteString -> Hash) -> ByteString -> Text -> Property
testTestVector f inVec outVec =
let r = hexEncode . unHash $ f inVec in
once $ r === outVec
-- SHA2 test vectors from the
-- <https://www.cosic.esat.kuleuven.be/nessie/testvectors/ NESSIE project>.
prop_sha256TestVec1 =
testTestVector
hashSHA256
""
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
prop_sha256TestVec2 =
testTestVector
hashSHA256
"a"
"ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb"
prop_sha256TestVec3 =
testTestVector
hashSHA256
"abc"
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"
prop_sha256TestVec4 =
testTestVector
hashSHA256
"message digest"
"f7846f55cf23e14eebeab5b4e1550cad5b509e3348fbc4efa3a1413d393cb650"
prop_sha256TestVec5 =
testTestVector
hashSHA256
"abcdefghijklmnopqrstuvwxyz"
"71c480df93d6ae2f1efad1447c66c9525e316218cf51fc8d9ed832f2daf18b73"
prop_sha256TestVec6 =
testTestVector
hashSHA256
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"
prop_sha256TestVec7 =
testTestVector
hashSHA256
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
"db4bfcbd4da0cd85a60c3c37d3fbd8805c77f15fc6b1fdfe614ee0a7c8fdb4c0"
prop_sha256TestVec8 =
testTestVector
hashSHA256
"12345678901234567890123456789012345678901234567890123456789012345678901234567890"
"f371bc4a311f2b009eef952dd83ca80e2b60026c8e935592d0f9c308453c813e"
return []
tests :: IO Bool
tests = $quickCheckAll
| ambiata/tinfoil | test/Test/Tinfoil/Hash/TestVectors.hs | bsd-3-clause | 2,094 | 0 | 10 | 340 | 238 | 128 | 110 | 60 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Cgroup support basic managing of linux cgroup controllers
--
module System.Linux.Cgroups
( cgroupsSupported
-- * Find cgroup utilities
-- ** Get all subsystems information
, lookupCgroupRoot
--, procCgroups
--, selfCgroups
-- * Lookup functions
-- $lookup
-- ** lookup subsystems
, lookupSubsystemRoot
, lookupSubsystemSelf
--, lookupProcRoot
--, lookupSelfRoot
-- ** Unsafe creation
, unsafeSubsystemRoot
-- ** Cgroup movement
-- *** creation
, unsafeCgroup
-- *** appending
, (</>)
, (<//>)
-- * Cgroup manipulation
, tasksFile
-- $cgroup_files
-- ** Creating and moving processes
, mkCgroup
, moveTasksTo
, moveProcsTo
-- ** Getters
, getTasks
, getTasks'
, getProcs
, getProcs'
-- * Controllers
, subFreezer,subBlkio,subCpuSet
, module EXP
) where
import BasicPrelude hiding ((</>))
import Data.List (find)
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Text as T
import Filesystem
import Filesystem.Path.CurrentOS (fromText)
import qualified Filesystem.Path.CurrentOS as F
import Control.Monad.Trans
import System.Linux.Cgroups.Types
-- export modules
import System.Linux.Cgroups.Subsystem.Cpu as EXP
import System.Linux.Cgroups.Subsystem.CpuAcct as EXP
import System.Linux.Cgroups.Subsystem.Memory as EXP
import System.Linux.Cgroups.Subsystem.Devices as EXP
-- | Check if cgroups is supported by kernel
cgroupsSupported :: IO Bool
cgroupsSupported = isFile "/proc/cgroups"
-- | Find rootfs for cgroup virtual filesystem
lookupCgroupRoot :: IO (Maybe (Cgroup Absolute))
lookupCgroupRoot = do
x <- matchMounts (on1st $ (==) "cgroup_root")
case x of
Nothing -> return Nothing
Just ys -> return $ (Cgroup . fromText) <$> ith 1 ys
-- listCgroups :: IO (CgroupName, Int, Int, Bool)
-- listCgroups = readFile "/proc/cgroups"
-- procCgroups :: Int -> IO (CgroupName, FilePath)
-- procCgroups = do
-- readFile ("/proc" </> (fromText . show $ pid) </> "cgroup")
-- | Find a root for cgroup 'Subsystem'
-- Internaly uses read of \/proc\/mounts
lookupSubsystemRoot :: Subsystem -> IO (Maybe (Hierarchy Absolute))
lookupSubsystemRoot name = do
x <- matchMounts (on1st $ (==) (mntName name))
case x of
Nothing -> return Nothing
Just ys -> return $ Hierarchy name <$> (Cgroup . fromText) <$> ith 1 ys
-- | Manually set 'Subsystem's root
unsafeSubsystemRoot :: Subsystem -> FilePath -> Hierarchy Absolute
unsafeSubsystemRoot name fp = Hierarchy name (Cgroup fp)
-- | Manually create Cgroup
-- May be used to create cgroup value from user input
unsafeCgroup :: FilePath -> Cgroup Absolute
unsafeCgroup fp = Cgroup fp
-- | Lookup 'Subsystem' root of the current process
lookupSubsystemSelf :: Subsystem -> IO (Maybe (Hierarchy Relative))
lookupSubsystemSelf name = do
ts <- readTextFile "/proc/self/cgroup"
let r = find (on2nd $ (==) (cgName name)) . (map (T.split (== ':'))) . lines $ ts
case r of
Nothing -> return Nothing
Just xs -> return $ Hierarchy name <$> (Cgroup . fromText . (T.drop 1)) <$> ith 2 xs
{- cgroups movements -}
-- | Append hierarchy to another one
(</>) :: Hierarchy a -> Text -> Hierarchy a
(Hierarchy n f) </> t = Hierarchy n (Cgroup $! unCgroup f F.</> fromText t)
-- | Append relative path to absolute one
(<//>) :: Hierarchy Absolute -> Hierarchy a -> Hierarchy Absolute
(Hierarchy n (Cgroup f1)) <//> (Hierarchy n2 (Cgroup f2)) | n == n2 = Hierarchy n (Cgroup $ f1 F.</> f2)
| otherwise = error "submodule doesn't match"
-- | Create new cgroup
-- TODO: fix text value
mkCgroup :: (HasCgroup a) => a -> Text -> IO a
mkCgroup a t = do
createDirectory True p'
return $ acgroup a p'
where p' = cgroup a F.</> fromText t
-- | move task to specified 'Hierarchy'
moveTasksTo :: (HasCgroup a) => a -> Int -> IO ()
moveTasksTo = move_to "tasks"
-- | move task and all processes in it's group to 'Hierarchy'
moveProcsTo :: (HasCgroup a) => a -> Int -> IO ()
moveProcsTo = move_to "cgroup.procs"
{-# INLINE move_to #-}
move_to :: HasCgroup a => FilePath -> a -> Int -> IO ()
move_to f a t = appendTextFile (cgroup a F.</> f) (show t)
getTasks, getProcs :: HasCgroup a => a -> IO (Set Int)
getTasks h = S.fromList <$> getTasks' h
getProcs h = S.fromList <$> getProcs' h
getTasks', getProcs' :: HasCgroup a => a -> IO [Int]
getTasks' = int_from "tasks"
getProcs' = int_from "cgroup.procs"
{-# INLINE int_from #-}
int_from :: HasCgroup a => FilePath -> a -> IO [Int]
int_from f p = map read . lines <$> readTextFile (cgroup p F.</> "tasks")
subFreezer,subBlkio,subCpuSet :: Subsystem
subFreezer = Controller "freezer"
subBlkio = Controller "blkio"
subCpuSet = Controller "cpuset"
ith :: Int -> [a] -> Maybe a
ith _ [] = Nothing
ith 0 (x:_) = Just x
ith n (x:xs) = ith (n-1) xs
on1st :: (a -> Bool) -> [a] -> Bool
on1st _ [] = False
on1st f (x:_) = f x
on2nd :: (a -> Bool) -> [a] -> Bool
on2nd _ [] = False
on2nd _ [_] = False
on2nd f (_:x:_) = f x
matchMounts :: ([Text] -> Bool) -> IO (Maybe [Text])
matchMounts f = do
mts <- readTextFile "/proc/mounts"
return $! find f . (map words) . lines $ mts
cgName :: Subsystem -> Text
cgName (Controller t) = t
cgName (Named t) = "name=" ++ t
-- $cgroup_files
-- Each cgroup is represented by a directory in the cgroup file system
-- containing the following files describing that cgroup:
--
-- [@tasks@] list of tasks (by PID) attached to that cgroup. This list
-- is not guaranteed to be sorted. Writing a thread ID into this file
-- moves the thread into this cgroup.
--
-- [@cgroup.procs@] list of thread group IDs in the cgroup. This list is
-- not guaranteed to be sorted or free of duplicate TGIDs, and userspace
-- should sort/uniquify the list if this property is required.
-- Writing a thread group ID into this file moves all threads in that
-- group into this cgroup.
--
-- [@notify_on_release@] flag: run the release agent on exit?
--
-- [@release_agent@] the path to use for release notifications (this file
-- exists in the top cgroup only)
--
-- $lookup
-- Each 'Hierarchy' has it's root that basically is \/sys\/fs\/cgroups
-- and it's relavite paths that are listed in \/proc\/X\/cgroups
-- So all Hierarchies are either Absolute or Relative, to perform an action
-- on cgroup you need to use Absolute path.
--
tasksFile :: (HasCgroup a) => a -> FilePath
tasksFile h = cgroup h F.</> "tasks"
| qnikst/cgroups-hs | System/Linux/Cgroups.hs | bsd-3-clause | 6,485 | 0 | 18 | 1,307 | 1,643 | 893 | 750 | 109 | 2 |
module Main where
import Lib
import DirectoryServer
main :: IO ()
main = mkApp
| Garygunn94/DFS | DirectoryServer/app/Main.hs | bsd-3-clause | 81 | 0 | 6 | 16 | 25 | 15 | 10 | 5 | 1 |
{-# LANGUAGE PatternGuards, DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.CSL.Style
-- Copyright : (c) Andrea Rossato
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Andrea Rossato <andrea.rossato@unitn.it>
-- Stability : unstable
-- Portability : unportable
--
-- The Style types
--
-----------------------------------------------------------------------------
module Text.CSL.Style where
import Data.List ( nubBy, isPrefixOf )
import Data.Generics ( Typeable, Data
, everywhere, everything, mkT, mkQ)
import Text.JSON
import Text.Pandoc.Definition ( Inline )
-- | The representation of a parsed CSL style.
data Style
= Style
{ styleVersion :: String
, styleClass :: String
, styleInfo :: Maybe CSInfo
, styleDefaultLocale :: String
, styleLocale :: [Locale]
, csOptions :: [Option]
, csMacros :: [MacroMap]
, citation :: Citation
, biblio :: Maybe Bibliography
} deriving ( Show, Typeable, Data )
data Locale
= Locale
{ localeVersion :: String
, localeLang :: String
, localeOptions :: [Option]
, localeTermMap :: [TermMap]
, localeDate :: [Element]
} deriving ( Show, Eq, Typeable, Data )
-- | With the 'defaultLocale', the locales-xx-XX.xml loaded file and
-- the parsed 'Style' cs:locale elements, produce the final 'Locale'
-- as the only element of a list, taking into account CSL locale
-- prioritization.
mergeLocales :: String -> Locale -> [Locale] -> [Locale]
mergeLocales s l ls = doMerge list
where
list = filter ((==) s . localeLang) ls ++
filter ((\x -> x /= [] && x `isPrefixOf` s) . localeLang) ls ++
filter ((==) [] . localeLang) ls
doMerge x = return l { localeOptions = newOpt x
, localeTermMap = newTermMap x
, localeDate = newDate x
}
newOpt x = nubBy (\a b -> fst a == fst b) (concatMap localeOptions x ++ localeOptions l)
newTermMap x = nubBy (\a b -> fst a == fst b) (concatMap localeTermMap x ++ localeTermMap l)
newDate x = nubBy (\(Date _ a _ _ _ _)
(Date _ b _ _ _ _) -> a == b) (concatMap localeDate x ++ localeDate l)
type TermMap
= ((String,Form),(String,String))
type MacroMap
= (String,[Element])
data Citation
= Citation
{ citOptions :: [Option]
, citSort :: [Sort]
, citLayout :: Layout
} deriving ( Show, Typeable, Data )
data Bibliography
= Bibliography
{ bibOptions :: [Option]
, bibSort :: [Sort]
, bibLayout :: Layout
} deriving ( Show, Typeable, Data )
type Option = (String,String)
mergeOptions :: [Option] -> [Option] -> [Option]
mergeOptions os = nubBy (\x y -> fst x == fst y) . (++) os
data Layout
= Layout
{ layFormat :: Formatting
, layDelim :: Delimiter
, elements :: [Element]
} deriving ( Show, Typeable, Data )
data Element
= Choose IfThen [IfThen] [Element]
| Macro String Formatting
| Const String Formatting
| Variable [String] Form Formatting Delimiter
| Term String Form Formatting Bool
| Label String Form Formatting Plural
| Number String NumericForm Formatting
| ShortNames [String] Formatting Delimiter
| Names [String] [Name] Formatting Delimiter [Element]
| Substitute [Element]
| Group Formatting Delimiter [Element]
| Date [String] DateForm Formatting Delimiter [DatePart] String
deriving ( Show, Eq, Typeable, Data )
data IfThen
= IfThen Condition Match [Element]
deriving ( Eq, Show, Typeable, Data )
data Condition
= Condition
{ isType :: [String]
, isSet :: [String]
, isNumeric :: [String]
, isUncertainDate :: [String]
, isPosition :: [String]
, disambiguation :: [String]
, isLocator :: [String]
} deriving ( Eq, Show, Typeable, Data )
type Delimiter = String
data Match
= Any
| All
| None
deriving ( Show, Read, Eq, Typeable, Data )
match :: Match -> [Bool] -> Bool
match All = and
match Any = or
match None = and . map not
data DatePart
= DatePart
{ dpName :: String
, dpForm :: String
, dpRangeDelim :: String
, dpFormatting :: Formatting
} deriving ( Show, Eq, Typeable, Data )
defaultDate :: [DatePart]
defaultDate
= [ DatePart "year" "" "-" emptyFormatting
, DatePart "month" "" "-" emptyFormatting
, DatePart "day" "" "-" emptyFormatting]
data Sort
= SortVariable String Sorting
| SortMacro String Sorting Int Int String
deriving ( Eq, Show, Typeable, Data )
data Sorting
= Ascending String
| Descending String
deriving ( Read, Show, Eq, Typeable, Data )
instance Ord Sorting where
compare (Ascending []) (Ascending []) = EQ
compare (Ascending []) (Ascending _) = GT
compare (Ascending _) (Ascending []) = LT
compare (Ascending a) (Ascending b) = compare' a b
compare (Descending []) (Descending []) = EQ
compare (Descending []) (Descending _) = GT
compare (Descending _) (Descending []) = LT
compare (Descending a) (Descending b) = compare' b a
compare _ _ = EQ
compare' :: String -> String -> Ordering
compare' x y
= case (head x, head y) of
('-','-') -> compare y x
('-', _ ) -> LT
(_ ,'-') -> GT
_ -> compare x y
data Form
= Long
| Short
| Count
| Verb
| VerbShort
| Symbol
| NotSet
deriving ( Eq, Show, Read, Typeable, Data )
data NumericForm
= Numeric
| Ordinal
| Roman
| LongOrdinal
deriving ( Eq, Show, Read, Typeable, Data )
data DateForm
= TextDate
| NumericDate
| NoFormDate
deriving ( Eq, Show, Read, Typeable, Data )
data Plural
= Contextual
| Always
| Never
deriving ( Eq, Show, Read, Typeable, Data )
data Name
= Name Form Formatting NameAttrs Delimiter [NamePart]
| NameLabel Form Formatting Plural
| EtAl Formatting String
deriving ( Eq, Show, Typeable, Data )
type NameAttrs = [(String, String)]
data NamePart
= NamePart String Formatting
deriving ( Show, Eq, Typeable, Data )
isName :: Name -> Bool
isName x = case x of Name {} -> True; _ -> False
isNames :: Element -> Bool
isNames x = case x of Names {} -> True; _ -> False
hasEtAl :: [Name] -> Bool
hasEtAl = not . null . query getEtAl
where getEtAl n
| EtAl _ _ <- n = [n]
| otherwise = []
data Formatting
= Formatting
{ prefix :: String
, suffix :: String
, fontFamily :: String
, fontStyle :: String
, fontVariant :: String
, fontWeight :: String
, textDecoration :: String
, verticalAlign :: String
, textCase :: String
, display :: String
, quotes :: Bool
, stripPeriods :: Bool
, noCase :: Bool
, noDecor :: Bool
} deriving ( Read, Eq, Ord, Typeable, Data )
instance Show Formatting where show _ = "emptyFormatting"
emptyFormatting :: Formatting
emptyFormatting
= Formatting [] [] [] [] [] [] [] [] [] [] False False False False
unsetAffixes :: Formatting -> Formatting
unsetAffixes f = f {prefix = [], suffix = []}
mergeFM :: Formatting -> Formatting -> Formatting
mergeFM (Formatting aa ab ac ad ae af ag ah ai aj ak al am an)
(Formatting ba bb bc bd be bf bg bh bi bj bk bl bm bn) =
Formatting (ba `betterThen` aa)
(bb `betterThen` ab)
(bc `betterThen` ac)
(bd `betterThen` ad)
(be `betterThen` ae)
(bf `betterThen` af)
(bg `betterThen` ag)
(bh `betterThen` ah)
(bi `betterThen` ai)
(bj `betterThen` aj)
(if bk /= ak then bk else ak)
(if bl /= al then bl else al)
(if bm /= am then bm else am)
(if bn /= an then bn else an)
data CSInfo
= CSInfo
{ csiTitle :: String
, csiAuthor :: CSAuthor
, csiCategories :: [CSCategory]
, csiId :: String
, csiUpdated :: String
} deriving ( Show, Read, Typeable, Data )
data CSAuthor = CSAuthor String String String deriving ( Show, Read, Eq, Typeable, Data )
data CSCategory = CSCategory String String String deriving ( Show, Read, Eq, Typeable, Data )
-- | The formatted output, produced after post-processing the
-- evaluated citations.
data FormattedOutput
= FO Formatting [FormattedOutput]
| FN String Formatting
| FS String Formatting
| FDel String
| FPan [Inline]
| FNull
deriving ( Eq, Show )
-- | The 'Output' generated by the evaluation of a style. Must be
-- further processed for disambiguation and collapsing.
data Output
= ONull
| OSpace
| OPan [Inline]
| ODel String -- ^ A delimiter string.
| OStr String Formatting -- ^ A simple 'String'
| ONum Int Formatting -- ^ A number (used to count contributors)
| OCitNum Int Formatting -- ^ The citation number
| OYear String String Formatting -- ^ The year and the citeId
| OYearSuf String String [Output] Formatting -- ^ The year suffix, the citeId and a holder for collision data
| OName String [Output] [[Output]] Formatting -- ^ A (family) name with the list of given names.
| OContrib String String [Output] [Output] [[Output]] -- ^ The citation key, the role (author, editor, etc.), the contributor(s),
-- the output needed for year suf. disambiguation, and everything used for
-- name disambiguation.
| Output [Output] Formatting -- ^ Some nested 'Output'
deriving ( Eq, Ord, Show, Typeable, Data )
data Affix
= PlainText String
| PandocText [Inline]
deriving ( Show, Read, Eq, Ord, Typeable, Data )
-- | Needed for the test-suite.
instance JSON Affix where
showJSON (PlainText s) = JSString . toJSString $ s
showJSON (PandocText i) = JSString . toJSString $ show i
readJSON jv
| JSString js <- jv
, [(x,"")] <- reads (fromJSString js) = Ok x
| otherwise = Ok $ PlainText []
type Citations = [[Cite]]
data Cite
= Cite
{ citeId :: String
, citePrefix :: Affix
, citeSuffix :: Affix
, citeLabel :: String
, citeLocator :: String
, citeNoteNumber :: String
, citePosition :: String
, nearNote :: Bool
, authorInText :: Bool
, suppressAuthor :: Bool
, citeHash :: Int
} deriving ( Show, Eq, Typeable, Data )
emptyAffix :: Affix
emptyAffix = PlainText []
emptyCite :: Cite
emptyCite = Cite [] emptyAffix emptyAffix [] [] [] [] False False False 0
-- | A citation group: a list of evaluated citations, the 'Formatting'
-- to be applied to them, and the 'Delimiter' between individual
-- citations.
data CitationGroup = CG [(Cite, Output)] Formatting Delimiter [(Cite, Output)] deriving ( Show, Eq, Typeable, Data )
data BiblioData
= BD
{ citations :: [[FormattedOutput]]
, bibliography :: [[FormattedOutput]]
} deriving ( Show )
-- | A record with all the data to produce the 'FormattedOutput' of a
-- citation: the citation key, the part of the citation that may be
-- colliding with other citations (the list of contributors for the
-- same year), the data to disambiguate it (all possible contributors
-- and all possible given names), and the disambiguated citation and
-- its year.
data CiteData
= CD
{ key :: String
, collision :: [Output]
, disambYS :: [Output]
, disambData :: [[Output]]
, disambed :: [Output]
, citYear :: String
} deriving ( Show, Typeable, Data )
instance Eq CiteData where
(==) (CD ka ca _ _ _ _)
(CD kb cb _ _ _ _) = ka == kb && ca == cb
data NameData
= ND
{ nameKey :: String
, nameCollision :: [Output]
, nameDisambData :: [[Output]]
, nameDataSolved :: [Output]
} deriving ( Show, Typeable, Data )
instance Eq NameData where
(==) (ND ka ca _ _)
(ND kb cb _ _) = ka == kb && ca == cb
formatOutputList :: [Output] -> [FormattedOutput]
formatOutputList = filterUseless . map formatOutput
where
filterUseless [] = []
filterUseless (o:os)
| FO _ [] <- o = filterUseless os
| FO f xs <- o
, isEmpty f = filterUseless xs ++ filterUseless os
| FO f xs <- o = case filterUseless xs of
[] -> filterUseless os
xs' -> FO f xs' : filterUseless os
| FNull <- o = filterUseless os
| otherwise = o : filterUseless os
where
isEmpty f = f == emptyFormatting
-- | Convert evaluated 'Output' into 'FormattedOutput', ready for the
-- output filters.
formatOutput :: Output -> FormattedOutput
formatOutput o
| OSpace <- o = FDel " "
| OPan i <- o = FPan i
| ODel [] <- o = FNull
| ODel s <- o = FDel s
| OStr [] _ <- o = FNull
| OStr s f <- o = FS s f
| OYear s _ f <- o = FS s f
| OYearSuf s _ _ f <- o = FS s f
| ONum i f <- o = FS (show i) f
| OCitNum i f <- o = FN (add00 i) f
| OName _ s _ f <- o = FO f (format s)
| OContrib _ _ s _ _ <- o = FO emptyFormatting (format s)
| Output os f <- o = FO f (format os)
| otherwise = FNull
where
format = map formatOutput
add00 = reverse . take 5 . flip (++) (repeat '0') . reverse . show
-- | Map the evaluated output of a citation group.
mapGroupOutput :: (Output -> [a]) -> CitationGroup -> [a]
mapGroupOutput f (CG _ _ _ os) = concatMap f $ map snd os
-- | A generic processing function.
proc :: (Typeable a, Data b) => (a -> a) -> b -> b
proc f = everywhere (mkT f)
-- | A generic query function.
query :: (Typeable a, Data b) => (a -> [c]) -> b -> [c]
query f = everything (++) ([] `mkQ` f)
-- | Removes all given names form a 'OName' element with 'proc'.
rmGivenNames :: Output -> Output
rmGivenNames o
| OName i s _ f <- o = OName i s [] f
| otherwise = o
rmNameHash :: Output -> Output
rmNameHash o
| OName _ s ss f <- o = OName [] s ss f
| otherwise = o
-- | Removes all contributors' names.
rmContribs :: Output -> Output
rmContribs o
| OContrib s r _ _ _ <- o = OContrib s r [] [] []
| otherwise = o
-- | Add, with 'proc', a give name to the family name. Needed for
-- disambiguation.
addGivenNames :: [Output] -> [Output]
addGivenNames
= addGN True
where
addGN _ [] = []
addGN b (o:os)
| OName i _ xs f <- o
, xs /= [] = if b then OName i (head xs) (tail xs) f : addGN False os else o:os
| otherwise = o : addGN b os
-- | Add the year suffix to the year. Needed for disambiguation.
addYearSuffix :: Output -> Output
addYearSuffix o
| OYear y k f <- o = Output [OYear y k emptyFormatting,OYearSuf [] k [] emptyFormatting] f
| Output (x:xs) f <- o = if or $ map hasYearSuf (x : xs)
then Output (x : xs) f
else if hasYear x
then Output (addYearSuffix x : xs) f
else Output (x : [addYearSuffix $ Output xs emptyFormatting]) f
| otherwise = o
hasYear :: Output -> Bool
hasYear = not . null . query getYear
where getYear o
| OYear _ _ _ <- o = [o]
| otherwise = []
hasYearSuf :: Output -> Bool
hasYearSuf = not . null . query getYearSuf
where getYearSuf o
| OYearSuf _ _ _ _ <- o = ["a"]
| otherwise = []
betterThen :: Eq a => [a] -> [a] -> [a]
betterThen a b = if a == [] then b else a
| singingwolfboy/citeproc-hs | src/Text/CSL/Style.hs | bsd-3-clause | 17,188 | 0 | 17 | 6,115 | 5,123 | 2,780 | 2,343 | 389 | 5 |
{-# LANGUAGE DerivingVia #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Orphans
() where
import Control.Monad.Trans.Resource
import Control.Monad.Trans.Resource.Internal
( ReleaseKey(..)
, ReleaseMap(..)
)
import Data.Typeable ( Typeable )
import Foreign.Ptr ( Ptr )
import NoThunks.Class
import SDL ( Window )
import Vulkan.Core10
import Vulkan.Extensions.VK_KHR_acceleration_structure
import Vulkan.Extensions.VK_KHR_surface
import Vulkan.Extensions.VK_KHR_swapchain
( SwapchainKHR )
import VulkanMemoryAllocator
-- Handles
deriving via OnlyCheckWhnf (Ptr a) instance Typeable a => NoThunks (Ptr a)
deriving via OnlyCheckWhnf AccelerationStructureKHR instance NoThunks AccelerationStructureKHR
deriving via OnlyCheckWhnf Allocation instance NoThunks Allocation
deriving via OnlyCheckWhnf Buffer instance NoThunks Buffer
deriving via OnlyCheckWhnf CommandPool instance NoThunks CommandPool
deriving via OnlyCheckWhnf DescriptorSet instance NoThunks DescriptorSet
deriving via OnlyCheckWhnf Pipeline instance NoThunks Pipeline
deriving via OnlyCheckWhnf PipelineLayout instance NoThunks PipelineLayout
deriving via OnlyCheckWhnf SDL.Window instance NoThunks SDL.Window
deriving via OnlyCheckWhnf Semaphore instance NoThunks Semaphore
deriving via OnlyCheckWhnf SurfaceKHR instance NoThunks SurfaceKHR
deriving via OnlyCheckWhnf SwapchainKHR instance NoThunks SwapchainKHR
deriving via OnlyCheckWhnf Image instance NoThunks Image
deriving via OnlyCheckWhnf ImageView instance NoThunks ImageView
-- Enums
deriving via OnlyCheckWhnf PresentModeKHR instance NoThunks PresentModeKHR
-- Simple Structs
deriving via OnlyCheckWhnf SurfaceFormatKHR instance NoThunks SurfaceFormatKHR
deriving via OnlyCheckWhnf Extent2D instance NoThunks Extent2D
-- Others
instance NoThunks ReleaseMap where
noThunks c = \case
(ReleaseMap n r i) -> noThunks c (n, r, i)
ReleaseMapClosed -> noThunks c ()
showTypeOf _ = "ReleaseMap"
wNoThunks c = \case
(ReleaseMap n r i) -> wNoThunks c (n, r, i)
ReleaseMapClosed -> wNoThunks c ()
instance NoThunks ReleaseKey where
noThunks c (ReleaseKey r i) = noThunks c (r, i)
showTypeOf _ = "ReleaseKey"
wNoThunks c (ReleaseKey r i) = wNoThunks c (r, i)
| expipiplus1/vulkan | examples/lib/Orphans.hs | bsd-3-clause | 2,573 | 0 | 10 | 667 | 582 | 303 | 279 | -1 | -1 |
{-# LANGUAGE CPP, DeriveDataTypeable, MultiWayIf, NamedFieldPuns, OverloadedStrings, RankNTypes,
RecordWildCards, TemplateHaskell, TupleSections #-}
-- | Run commands in Docker containers
module Stack.Docker
(checkVersions
,cleanup
,CleanupOpts(..)
,CleanupAction(..)
,dockerCleanupCmdName
,dockerCmdName
,dockerOptsParser
,dockerOptsFromMonoid
,dockerPullCmdName
,preventInContainer
,pull
,rerunCmdWithOptionalContainer
,rerunCmdWithRequiredContainer
,rerunWithOptionalContainer
,reset
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Monad.IO.Class (MonadIO,liftIO)
import Control.Monad.Logger (MonadLogger,logError,logInfo,logWarn)
import Control.Monad.Writer (execWriter,runWriter,tell)
import Data.Aeson (FromJSON(..),(.:),(.:?),(.!=),eitherDecode)
import Data.ByteString.Builder (stringUtf8,charUtf8,toLazyByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (isSpace,toUpper,isAscii)
import Data.List (dropWhileEnd,find,intercalate,intersperse,isPrefixOf,isInfixOf,foldl',sortBy)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..))
import Data.Typeable (Typeable)
import Options.Applicative.Builder.Extra (maybeBoolFlags)
import Options.Applicative (Parser,str,option,help,auto,metavar,long,value,hidden,internal,idm)
import Path
import Path.IO (getWorkingDir,listDirectory)
import Paths_stack (version)
import Stack.Constants (projectDockerSandboxDir,stackProgName,stackDotYaml,stackRootEnvVar)
import Stack.Types
import Stack.Docker.GlobalDB
import System.Directory (createDirectoryIfMissing,removeDirectoryRecursive,removeFile)
import System.Directory (doesDirectoryExist)
import System.Environment (lookupEnv,getProgName,getArgs,getExecutablePath)
import System.Exit (ExitCode(ExitSuccess),exitWith)
import System.FilePath (takeBaseName,isPathSeparator)
import System.Info (arch,os)
import System.IO (stderr,stdin,stdout,hIsTerminalDevice)
import qualified System.Process as Proc
import System.Process.PagerEditor (editByteString)
import System.Process.Read
import Text.Printf (printf)
#ifndef mingw32_HOST_OS
import System.Posix.Signals (installHandler,sigTERM,Handler(Catch))
#endif
-- | If Docker is enabled, re-runs the currently running OS command in a Docker container.
-- Otherwise, runs the inner action.
rerunWithOptionalContainer :: (MonadLogger m,MonadIO m,MonadThrow m)
=> Config -> Maybe (Path Abs Dir) -> IO () -> m ()
rerunWithOptionalContainer config mprojectRoot inner =
rerunCmdWithOptionalContainer config mprojectRoot getCmdArgs inner
where
getCmdArgs =
do args <- getArgs
if arch == "x86_64" && os == "linux"
then do exePath <- getExecutablePath
let mountDir = concat ["/tmp/host-",stackProgName]
mountPath = concat [mountDir,"/",takeBaseName exePath]
return (mountPath
,args
,config{configDocker=docker{dockerMount=Mount exePath mountPath :
dockerMount docker}})
else do progName <- getProgName
return (takeBaseName progName,args,config)
docker = configDocker config
-- | If Docker is enabled, re-runs the OS command returned by the second argument in a
-- Docker container. Otherwise, runs the inner action.
rerunCmdWithOptionalContainer :: (MonadLogger m,MonadIO m,MonadThrow m)
=> Config
-> Maybe (Path Abs Dir)
-> IO (FilePath,[String],Config)
-> IO ()
-> m ()
rerunCmdWithOptionalContainer config mprojectRoot getCmdArgs inner =
do inContainer <- getInContainer
if inContainer || not (dockerEnable (configDocker config))
then liftIO inner
else do (cmd_,args,config') <- liftIO getCmdArgs
runContainerAndExit config' mprojectRoot cmd_ args [] (return ())
-- | If Docker is enabled, re-runs the OS command returned by the second argument in a
-- Docker container. Otherwise, runs the inner action.
rerunCmdWithRequiredContainer :: (MonadLogger m,MonadIO m,MonadThrow m)
=> Config
-> Maybe (Path Abs Dir)
-> IO (FilePath,[String],Config)
-> m ()
rerunCmdWithRequiredContainer config mprojectRoot getCmdArgs =
do when (not (dockerEnable (configDocker config)))
(throwM DockerMustBeEnabledException)
(cmd_,args,config') <- liftIO getCmdArgs
runContainerAndExit config' mprojectRoot cmd_ args [] (return ())
-- | Error if running in a container.
preventInContainer :: (MonadIO m,MonadThrow m) => m () -> m ()
preventInContainer inner =
do inContainer <- getInContainer
if inContainer
then throwM OnlyOnHostException
else inner
-- | 'True' if we are currently running inside a Docker container.
getInContainer :: (MonadIO m) => m Bool
getInContainer =
do maybeEnvVar <- liftIO (lookupEnv sandboxIDEnvVar)
case maybeEnvVar of
Nothing -> return False
Just _ -> return True
-- | Run a command in a new Docker container, then exit the process.
runContainerAndExit :: (MonadLogger m,MonadIO m,MonadThrow m)
=> Config
-> Maybe (Path Abs Dir)
-> FilePath
-> [String]
-> [(String,String)]
-> IO ()
-> m ()
runContainerAndExit config
mprojectRoot
cmnd
args
envVars
successPostAction =
do envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride
uidOut <- readProcessStdout Nothing envOverride "id" ["-u"]
gidOut <- readProcessStdout Nothing envOverride "id" ["-g"]
(dockerHost,dockerCertPath,dockerTlsVerify) <-
liftIO ((,,) <$> lookupEnv "DOCKER_HOST"
<*> lookupEnv "DOCKER_CERT_PATH"
<*> lookupEnv "DOCKER_TLS_VERIFY")
(isStdinTerminal,isStdoutTerminal,isStderrTerminal) <-
liftIO ((,,) <$> hIsTerminalDevice stdin
<*> hIsTerminalDevice stdout
<*> hIsTerminalDevice stderr)
pwd <- getWorkingDir
when (maybe False (isPrefixOf "tcp://") dockerHost &&
maybe False (isInfixOf "boot2docker") dockerCertPath)
($logWarn "WARNING: Using boot2docker is NOT supported, and not likely to perform well.")
let image = dockerImage docker
maybeImageInfo <- inspect envOverride image
imageInfo <- case maybeImageInfo of
Just ii -> return ii
Nothing
| dockerAutoPull docker ->
do pullImage pwd envOverride docker image
mii2 <- inspect envOverride image
case mii2 of
Just ii2 -> return ii2
Nothing -> throwM (InspectFailedException image)
| otherwise -> throwM (NotPulledException image)
let uid = dropWhileEnd isSpace (decodeUtf8 uidOut)
gid = dropWhileEnd isSpace (decodeUtf8 gidOut)
imageEnvVars = map (break (== '=')) (icEnv (iiConfig imageInfo))
(sandboxID,oldImage) =
case lookupImageEnv sandboxIDEnvVar imageEnvVars of
Just x -> (x,False)
Nothing ->
--EKB TODO: remove this and oldImage after lts-1.x images no longer in use
let sandboxName = maybe "default" id (lookupImageEnv "SANDBOX_NAME" imageEnvVars)
maybeImageCabalRemoteRepoName = lookupImageEnv "CABAL_REMOTE_REPO_NAME" imageEnvVars
maybeImageStackageSlug = lookupImageEnv "STACKAGE_SLUG" imageEnvVars
maybeImageStackageDate = lookupImageEnv "STACKAGE_DATE" imageEnvVars
in (case (maybeImageStackageSlug,maybeImageStackageDate) of
(Just stackageSlug,_) -> sandboxName ++ "_" ++ stackageSlug
(_,Just stackageDate) -> sandboxName ++ "_" ++ stackageDate
_ -> sandboxName ++ maybe "" ("_" ++) maybeImageCabalRemoteRepoName
,True)
sandboxIDDir <- parseRelDir (sandboxID ++ "/")
let stackRoot = configStackRoot config
sandboxDir = projectDockerSandboxDir projectRoot
sandboxSandboxDir = sandboxDir </> $(mkRelDir ".sandbox/") </> sandboxIDDir
sandboxHomeDir = sandboxDir </> homeDirName
sandboxRepoDir = sandboxDir </> sandboxIDDir
sandboxSubdirs = map (\d -> sandboxRepoDir </> d)
sandboxedHomeSubdirectories
isTerm = isStdinTerminal && isStdoutTerminal && isStderrTerminal
execDockerProcess =
do mapM_ (createDirectoryIfMissing True)
(concat [[toFilePath sandboxHomeDir
,toFilePath sandboxSandboxDir] ++
map toFilePath sandboxSubdirs])
execProcessAndExit
envOverride
"docker"
(concat
[["run"
,"--net=host"
,"-e",stackRootEnvVar ++ "=" ++ trimTrailingPathSep stackRoot
,"-e","WORK_UID=" ++ uid
,"-e","WORK_GID=" ++ gid
,"-e","WORK_WD=" ++ trimTrailingPathSep pwd
,"-e","WORK_HOME=" ++ trimTrailingPathSep sandboxRepoDir
,"-e","WORK_ROOT=" ++ trimTrailingPathSep projectRoot
,"-e",hostVersionEnvVar ++ "=" ++ versionString stackVersion
,"-e",requireVersionEnvVar ++ "=" ++ versionString requireContainerVersion
,"-v",trimTrailingPathSep stackRoot ++ ":" ++ trimTrailingPathSep stackRoot
,"-v",trimTrailingPathSep projectRoot ++ ":" ++ trimTrailingPathSep projectRoot
,"-v",trimTrailingPathSep sandboxSandboxDir ++ ":" ++ trimTrailingPathSep sandboxDir
,"-v",trimTrailingPathSep sandboxHomeDir ++ ":" ++ trimTrailingPathSep sandboxRepoDir]
,if oldImage
then ["-e",sandboxIDEnvVar ++ "=" ++ sandboxID
,"--entrypoint=/root/entrypoint.sh"]
else []
,case (dockerPassHost docker,dockerHost) of
(True,Just x@('u':'n':'i':'x':':':'/':'/':s)) -> ["-e","DOCKER_HOST=" ++ x
,"-v",s ++ ":" ++ s]
(True,Just x) -> ["-e","DOCKER_HOST=" ++ x]
(True,Nothing) -> ["-v","/var/run/docker.sock:/var/run/docker.sock"]
(False,_) -> []
,case (dockerPassHost docker,dockerCertPath) of
(True,Just x) -> ["-e","DOCKER_CERT_PATH=" ++ x
,"-v",x ++ ":" ++ x]
_ -> []
,case (dockerPassHost docker,dockerTlsVerify) of
(True,Just x )-> ["-e","DOCKER_TLS_VERIFY=" ++ x]
_ -> []
,concatMap sandboxSubdirArg sandboxSubdirs
,concatMap mountArg (dockerMount docker)
,case dockerContainerName docker of
Just name -> ["--name=" ++ name]
Nothing -> []
,if dockerDetach docker
then ["-d"]
else concat [["--rm" | not (dockerPersist docker)]
,["-t" | isTerm]
,["-i" | isTerm]]
,dockerRunArgs docker
,[image]
,map (\(k,v) -> k ++ "=" ++ v) envVars
,[cmnd]
,args])
successPostAction
liftIO (do updateDockerImageLastUsed config
(iiId imageInfo)
(toFilePath projectRoot)
execDockerProcess)
where
lookupImageEnv name vars =
case lookup name vars of
Just ('=':val) -> Just val
_ -> Nothing
mountArg (Mount host container) = ["-v",host ++ ":" ++ container]
sandboxSubdirArg subdir = ["-v",trimTrailingPathSep subdir++ ":" ++ trimTrailingPathSep subdir]
trimTrailingPathSep = dropWhileEnd isPathSeparator . toFilePath
projectRoot = fromMaybeProjectRoot mprojectRoot
docker = configDocker config
-- | Clean-up old docker images and containers.
cleanup :: (MonadLogger m,MonadIO m,MonadThrow m) => Config -> CleanupOpts -> m ()
cleanup config opts =
do envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride
let runDocker = readProcessStdout Nothing envOverride "docker"
imagesOut <- runDocker ["images","--no-trunc","-f","dangling=false"]
danglingImagesOut <- runDocker ["images","--no-trunc","-f","dangling=true"]
runningContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=running"]
restartingContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=restarting"]
exitedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=exited"]
pausedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=paused"]
let imageRepos = parseImagesOut imagesOut
danglingImageHashes = Map.keys (parseImagesOut danglingImagesOut)
runningContainers = parseContainersOut runningContainersOut ++
parseContainersOut restartingContainersOut
stoppedContainers = parseContainersOut exitedContainersOut ++
parseContainersOut pausedContainersOut
inspectMap <- inspects envOverride
(Map.keys imageRepos ++
danglingImageHashes ++
map fst stoppedContainers ++
map fst runningContainers)
(imagesLastUsed,curTime) <-
liftIO ((,) <$> getDockerImagesLastUsed config
<*> getZonedTime)
let planWriter = buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap
plan = toLazyByteString (execWriter planWriter)
plan' <- case dcAction opts of
CleanupInteractive ->
liftIO (editByteString (intercalate "-" [stackProgName
,dockerCmdName
,dockerCleanupCmdName
,"plan"])
plan)
CleanupImmediate -> return plan
CleanupDryRun -> do liftIO (LBS.hPut stdout plan)
return LBS.empty
mapM_ (performPlanLine envOverride)
(reverse (filter filterPlanLine (lines (LBS.unpack plan'))))
allImageHashesOut <- runDocker ["images","-aq","--no-trunc"]
liftIO (pruneDockerImagesLastUsed config (lines (decodeUtf8 allImageHashesOut)))
where
filterPlanLine line =
case line of
c:_ | isSpace c -> False
_ -> True
performPlanLine envOverride line =
case filter (not . null) (words (takeWhile (/= '#') line)) of
[] -> return ()
(c:_):t:v:_ ->
do args <- if | toUpper c == 'R' && t == imageStr ->
do $logInfo (concatT ["Removing image: '",v,"'"])
return ["rmi",v]
| toUpper c == 'R' && t == containerStr ->
do $logInfo (concatT ["Removing container: '",v,"'"])
return ["rm","-f",v]
| otherwise -> throwM (InvalidCleanupCommandException line)
e <- liftIO (try (callProcess Nothing envOverride "docker" args))
case e of
Left (ProcessExitedUnsuccessfully _ _) ->
$logError (concatT ["Could not remove: '",v,"'"])
Right () -> return ()
_ -> throwM (InvalidCleanupCommandException line)
parseImagesOut = Map.fromListWith (++) . map parseImageRepo . drop 1 . lines . decodeUtf8
where parseImageRepo :: String -> (String, [String])
parseImageRepo line =
case words line of
repo:tag:hash:_
| repo == "<none>" -> (hash,[])
| tag == "<none>" -> (hash,[repo])
| otherwise -> (hash,[repo ++ ":" ++ tag])
_ -> throw (InvalidImagesOutputException line)
parseContainersOut = map parseContainer . drop 1 . lines . decodeUtf8
where parseContainer line =
case words line of
hash:image:rest -> (hash,(image,last rest))
_ -> throw (InvalidPSOutputException line)
buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap =
do case dcAction opts of
CleanupInteractive ->
do buildStrLn
(concat
["# STACK DOCKER CLEANUP PLAN"
,"\n#"
,"\n# When you leave the editor, the lines in this plan will be processed."
,"\n#"
,"\n# Lines that begin with 'R' denote an image or container that will be."
,"\n# removed. You may change the first character to/from 'R' to remove/keep"
,"\n# and image or container that would otherwise be kept/removed."
,"\n#"
,"\n# To cancel the cleanup, delete all lines in this file."
,"\n#"
,"\n# By default, the following images/containers will be removed:"
,"\n#"])
buildDefault dcRemoveKnownImagesLastUsedDaysAgo "Known images last used"
buildDefault dcRemoveUnknownImagesCreatedDaysAgo "Unknown images created"
buildDefault dcRemoveDanglingImagesCreatedDaysAgo "Dangling images created"
buildDefault dcRemoveStoppedContainersCreatedDaysAgo "Stopped containers created"
buildDefault dcRemoveRunningContainersCreatedDaysAgo "Running containers created"
buildStrLn
(concat
["#"
,"\n# The default plan can be adjusted using command-line arguments."
,"\n# Run '" ++ unwords [stackProgName, dockerCmdName, dockerCleanupCmdName] ++
" --help' for details."
,"\n#"])
_ -> buildStrLn
(unlines
["# Lines that begin with 'R' denote an image or container that will be."
,"# removed."])
buildSection "KNOWN IMAGES (pulled/used by stack)"
imagesLastUsed
buildKnownImage
buildSection "UNKNOWN IMAGES (not managed by stack)"
(sortCreated (Map.toList (foldl' (\m (h,_) -> Map.delete h m)
imageRepos
imagesLastUsed)))
buildUnknownImage
buildSection "DANGLING IMAGES (no named references and not depended on by other images)"
(sortCreated (map (,()) danglingImageHashes))
buildDanglingImage
buildSection "STOPPED CONTAINERS"
(sortCreated stoppedContainers)
(buildContainer (dcRemoveStoppedContainersCreatedDaysAgo opts))
buildSection "RUNNING CONTAINERS"
(sortCreated runningContainers)
(buildContainer (dcRemoveRunningContainersCreatedDaysAgo opts))
where
buildDefault accessor description =
case accessor opts of
Just days -> buildStrLn ("# - " ++ description ++ " at least " ++ showDays days ++ ".")
Nothing -> return ()
sortCreated l =
reverse (sortBy (\(_,_,a) (_,_,b) -> compare a b)
(catMaybes (map (\(h,r) -> fmap (\ii -> (h,r,iiCreated ii))
(Map.lookup h inspectMap))
l)))
buildSection sectionHead items itemBuilder =
do let (anyWrote,b) = runWriter (forM items itemBuilder)
if or anyWrote
then do buildSectionHead sectionHead
tell b
else return ()
buildKnownImage (imageHash,lastUsedProjects) =
case Map.lookup imageHash imageRepos of
Just repos@(_:_) ->
do case lastUsedProjects of
(l,_):_ -> forM_ repos (buildImageTime (dcRemoveKnownImagesLastUsedDaysAgo opts) l)
_ -> forM_ repos buildKeepImage
forM_ lastUsedProjects buildProject
buildInspect imageHash
return True
_ -> return False
buildUnknownImage (hash, repos, created) =
case repos of
[] -> return False
_ -> do forM_ repos (buildImageTime (dcRemoveUnknownImagesCreatedDaysAgo opts) created)
buildInspect hash
return True
buildDanglingImage (hash, (), created) =
do buildImageTime (dcRemoveDanglingImagesCreatedDaysAgo opts) created hash
buildInspect hash
return True
buildContainer removeAge (hash,(image,name),created) =
do let display = (name ++ " (image: " ++ image ++ ")")
buildTime containerStr removeAge created display
buildInspect hash
return True
buildProject (lastUsedTime, projectPath) =
buildInfo ("Last used " ++
showDaysAgo lastUsedTime ++
" in " ++
projectPath)
buildInspect hash =
case Map.lookup hash inspectMap of
Just (Inspect{iiCreated,iiVirtualSize}) ->
buildInfo ("Created " ++
showDaysAgo iiCreated ++
maybe ""
(\s -> " (size: " ++
printf "%g" (fromIntegral s / 1024.0 / 1024.0 :: Float) ++
"M)")
iiVirtualSize)
Nothing -> return ()
showDays days =
case days of
0 -> "today"
1 -> "yesterday"
n -> show n ++ " days ago"
showDaysAgo oldTime = showDays (daysAgo oldTime)
daysAgo oldTime =
let ZonedTime (LocalTime today _) zone = curTime
LocalTime oldDay _ = utcToLocalTime zone oldTime
in diffDays today oldDay
buildImageTime = buildTime imageStr
buildTime t removeAge time display =
case removeAge of
Just d | daysAgo time >= d -> buildStrLn ("R " ++ t ++ " " ++ display)
_ -> buildKeep t display
buildKeep t d = buildStrLn (" " ++ t ++ " " ++ d)
buildKeepImage = buildKeep imageStr
buildSectionHead s = buildStrLn ("\n#\n# " ++ s ++ "\n#\n")
buildInfo = buildStrLn . (" # " ++)
buildStrLn l = do buildStr l
tell (charUtf8 '\n')
buildStr = tell . stringUtf8
imageStr = "image"
containerStr = "container"
-- | Inspect Docker image or container.
inspect :: (MonadIO m,MonadThrow m) => EnvOverride -> String -> m (Maybe Inspect)
inspect envOverride image =
do results <- inspects envOverride [image]
case Map.toList results of
[] -> return Nothing
[(_,i)] -> return (Just i)
_ -> throwM (InvalidInspectOutputException "expect a single result")
-- | Inspect multiple Docker images and/or containers.
inspects :: (MonadIO m,MonadThrow m) => EnvOverride -> [String] -> m (Map String Inspect)
inspects _ [] = return Map.empty
inspects envOverride images =
do maybeInspectOut <- tryProcessStdout Nothing envOverride "docker" ("inspect" : images)
case maybeInspectOut of
Right inspectOut ->
-- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8
case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of
Left msg -> throwM (InvalidInspectOutputException msg)
Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results))
Left (ProcessExitedUnsuccessfully _ _) -> return Map.empty
-- | Pull latest version of configured Docker image from registry.
pull :: (MonadLogger m,MonadIO m,MonadThrow m) => Config -> m ()
pull config =
do envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride
pwd <- getWorkingDir
pullImage pwd envOverride docker (dockerImage docker)
where docker = configDocker config
-- | Pull Docker image from registry.
pullImage :: (MonadLogger m,MonadIO m,MonadThrow m)
=> Path Abs Dir -> EnvOverride -> DockerOpts -> String -> m ()
pullImage pwd envOverride docker image =
do $logInfo (concatT ["Pulling image from registry: '",image,"'"])
when (dockerRegistryLogin docker)
(do $logInfo "You may need to log in."
runIn
pwd
"docker"
envOverride
(concat
[["login"]
,maybe [] (\u -> ["--username=" ++ u]) (dockerRegistryUsername docker)
,maybe [] (\p -> ["--password=" ++ p]) (dockerRegistryPassword docker)
,[takeWhile (/= '/') image]])
Nothing)
e <- liftIO (try (callProcess Nothing envOverride "docker" ["pull",image]))
case e of
Left (ProcessExitedUnsuccessfully _ _) -> throwM (PullFailedException image)
Right () -> return ()
-- | Check docker version (throws exception if incorrect)
checkDockerVersion :: (MonadIO m,MonadThrow m) => EnvOverride -> m ()
checkDockerVersion envOverride =
do dockerExists <- doesExecutableExist envOverride "docker"
when (not dockerExists)
(throwM DockerNotInstalledException)
dockerVersionOut <- readProcessStdout Nothing envOverride "docker" ["--version"]
case words (decodeUtf8 dockerVersionOut) of
(_:_:v:_) ->
case parseVersionFromString (dropWhileEnd (== ',') v) of
Just v'
| v' < minimumDockerVersion ->
throwM (DockerTooOldException minimumDockerVersion v')
| v' `elem` prohibitedDockerVersions ->
throwM (DockerVersionProhibitedException prohibitedDockerVersions v')
| otherwise ->
return ()
_ -> throwM InvalidVersionOutputException
_ -> throwM InvalidVersionOutputException
where minimumDockerVersion = $(mkVersion "1.3.0")
prohibitedDockerVersions = [$(mkVersion "1.2.0")]
-- | Run a process, then exit with the same exit code.
execProcessAndExit :: EnvOverride -> FilePath -> [String] -> IO () -> IO ()
execProcessAndExit envOverride cmnd args successPostAction =
do (_, _, _, h) <- Proc.createProcess (Proc.proc cmnd args){Proc.delegate_ctlc = True
,Proc.env = envHelper envOverride}
#ifndef mingw32_HOST_OS
_ <- installHandler sigTERM (Catch (Proc.terminateProcess h)) Nothing
#endif
exitCode <- Proc.waitForProcess h
when (exitCode == ExitSuccess)
successPostAction
exitWith exitCode
-- | Remove the project's Docker sandbox.
reset :: (MonadIO m) => Maybe (Path Abs Dir) -> Bool -> m ()
reset maybeProjectRoot keepHome =
liftIO (removeDirectoryContents
(projectDockerSandboxDir projectRoot)
[homeDirName | keepHome]
[])
where projectRoot = fromMaybeProjectRoot maybeProjectRoot
-- | Remove the contents of a directory, without removing the directory itself.
-- This is used instead of 'FS.removeTree' to clear bind-mounted directories, since
-- removing the root of the bind-mount won't work.
removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of
-> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal
-> [Path Rel File] -- ^ Top-level file names to exclude from removal
-> IO ()
removeDirectoryContents path excludeDirs excludeFiles =
do isRootDir <- doesDirectoryExist (toFilePath path)
when isRootDir
(do (lsd,lsf) <- listDirectory path
forM_ lsd
(\d -> unless (dirname d `elem` excludeDirs)
(removeDirectoryRecursive (toFilePath d)))
forM_ lsf
(\f -> unless (filename f `elem` excludeFiles)
(removeFile (toFilePath f))))
-- | Subdirectories of the home directory to sandbox between GHC/Stackage versions.
sandboxedHomeSubdirectories :: [Path Rel Dir]
sandboxedHomeSubdirectories =
[$(mkRelDir ".ghc/")
,$(mkRelDir ".cabal/")
,$(mkRelDir ".ghcjs/")]
-- | Name of home directory within @.docker-sandbox@.
homeDirName :: Path Rel Dir
homeDirName = $(mkRelDir ".home/")
-- | Check host 'stack' version
checkHostStackVersion :: (MonadIO m,MonadThrow m) => Version -> m ()
checkHostStackVersion minVersion =
do maybeHostVer <- liftIO (lookupEnv hostVersionEnvVar)
case parseVersionFromString =<< maybeHostVer of
Just hostVer
| hostVer < minVersion -> throwM (HostStackTooOldException minVersion (Just hostVer))
| otherwise -> return ()
Nothing ->
do inContainer <- getInContainer
if inContainer
then throwM (HostStackTooOldException minVersion Nothing)
else return ()
-- | Check host and container 'stack' versions are compatible.
checkVersions :: (MonadIO m,MonadThrow m) => m ()
checkVersions =
do inContainer <- getInContainer
when inContainer
(do checkHostStackVersion requireHostVersion
maybeReqVer <- liftIO (lookupEnv requireVersionEnvVar)
case parseVersionFromString =<< maybeReqVer of
Just reqVer
| stackVersion < reqVer -> throwM (ContainerStackTooOldException reqVer stackVersion)
| otherwise -> return ()
_ -> return ())
-- | Options parser configuration for Docker.
dockerOptsParser :: Bool -> Parser DockerOptsMonoid
dockerOptsParser showOptions =
DockerOptsMonoid
<$> pure Nothing
<*> maybeBoolFlags dockerCmdName
"using a Docker container"
hide
<*> ((Just . DockerMonoidRepo) <$> option str (long (dockerOptName dockerRepoArgName) <>
hide <>
metavar "NAME" <>
help "Docker repository name") <|>
(Just . DockerMonoidImage) <$> option str (long (dockerOptName dockerImageArgName) <>
hide <>
metavar "IMAGE" <>
help "Exact Docker image ID (overrides docker-repo)") <|>
pure Nothing)
<*> maybeBoolFlags (dockerOptName dockerRegistryLoginArgName)
"registry requires login"
hide
<*> maybeStrOption (long (dockerOptName dockerRegistryUsernameArgName) <>
hide <>
metavar "USERNAME" <>
help "Docker registry username")
<*> maybeStrOption (long (dockerOptName dockerRegistryPasswordArgName) <>
hide <>
metavar "PASSWORD" <>
help "Docker registry password")
<*> maybeBoolFlags (dockerOptName dockerAutoPullArgName)
"automatic pulling latest version of image"
hide
<*> maybeBoolFlags (dockerOptName dockerDetachArgName)
"running a detached Docker container"
hide
<*> maybeBoolFlags (dockerOptName dockerPersistArgName)
"not deleting container after it exits"
hide
<*> maybeStrOption (long (dockerOptName dockerContainerNameArgName) <>
hide <>
metavar "NAME" <>
help "Docker container name")
<*> wordsStrOption (long (dockerOptName dockerRunArgsArgName) <>
hide <>
value [] <>
metavar "'ARG1 [ARG2 ...]'" <>
help "Additional arguments to pass to 'docker run'")
<*> many (option auto (long (dockerOptName dockerMountArgName) <>
hide <>
metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <>
help ("Mount volumes from host in container " ++
"(may specify mutliple times)")))
<*> maybeBoolFlags (dockerOptName dockerPassHostArgName)
"passing Docker daemon connection information into container"
hide
<*> maybeStrOption (long (dockerOptName dockerDatabasePathArgName) <>
hide <>
metavar "PATH" <>
help "Location of image usage tracking database")
where
dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName
maybeStrOption = optional . option str
wordsStrOption = option (fmap words str)
hide = if showOptions
then idm
else internal <> hidden
-- | Interprets DockerOptsMonoid options.
dockerOptsFromMonoid :: Maybe Project -> Path Abs Dir -> DockerOptsMonoid -> DockerOpts
dockerOptsFromMonoid mproject stackRoot DockerOptsMonoid{..} = DockerOpts
{dockerEnable = fromMaybe (fromMaybe False dockerMonoidExists) dockerMonoidEnable
,dockerImage =
let defaultTag =
case mproject of
Nothing -> ""
Just proj ->
case projectResolver proj of
ResolverSnapshot n@(LTS _ _) -> ":" ++ (T.unpack (renderSnapName n))
_ -> throw (ResolverNotSupportedException (projectResolver proj))
in case dockerMonoidRepoOrImage of
Nothing -> "fpco/dev" ++ defaultTag
Just (DockerMonoidImage image) -> image
Just (DockerMonoidRepo repo) ->
case find (`elem` (":@" :: String)) repo of
Just _ -> -- Repo already specified a tag or digest, so don't append default
repo
Nothing -> repo ++ defaultTag
,dockerRegistryLogin = fromMaybe (isJust (emptyToNothing dockerMonoidRegistryUsername))
dockerMonoidRegistryLogin
,dockerRegistryUsername = emptyToNothing dockerMonoidRegistryUsername
,dockerRegistryPassword = emptyToNothing dockerMonoidRegistryPassword
,dockerAutoPull = fromMaybe False dockerMonoidAutoPull
,dockerDetach = fromMaybe False dockerMonoidDetach
,dockerPersist = fromMaybe False dockerMonoidPersist
,dockerContainerName = emptyToNothing dockerMonoidContainerName
,dockerRunArgs = dockerMonoidRunArgs
,dockerMount = dockerMonoidMount
,dockerPassHost = fromMaybe False dockerMonoidPassHost
,dockerDatabasePath =
case dockerMonoidDatabasePath of
Nothing -> stackRoot </> $(mkRelFile "docker.db")
Just fp -> case parseAbsFile fp of
Left e -> throw (InvalidDatabasePathException e)
Right p -> p
}
where emptyToNothing Nothing = Nothing
emptyToNothing (Just s) | null s = Nothing
| otherwise = Just s
-- | Convenience function to decode ByteString to String.
decodeUtf8 :: BS.ByteString -> String
decodeUtf8 bs = T.unpack (T.decodeUtf8 (bs))
-- | Convenience function constructing message for @$log*@.
concatT :: [String] -> Text
concatT = T.pack . concat
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
fromMaybeProjectRoot = fromMaybe (throw CannotDetermineProjectRootException)
-- | Environment variable to the host's stack version.
hostVersionEnvVar :: String
hostVersionEnvVar = "STACK_DOCKER_HOST_VERSION"
-- | Environment variable to pass required container stack version.
requireVersionEnvVar :: String
requireVersionEnvVar = "STACK_DOCKER_REQUIRE_VERSION"
-- | Environment variable that contains the sandbox ID.
sandboxIDEnvVar :: String
sandboxIDEnvVar = "DOCKER_SANDBOX_ID"
-- | Command-line argument for "docker"
dockerCmdName :: String
dockerCmdName = "docker"
-- | Command-line argument for @docker pull@.
dockerPullCmdName :: String
dockerPullCmdName = "pull"
-- | Command-line argument for @docker cleanup@.
dockerCleanupCmdName :: String
dockerCleanupCmdName = "cleanup"
-- | Version of 'stack' required to be installed in container.
requireContainerVersion :: Version
requireContainerVersion = $(mkVersion "0.0.0")
-- | Version of 'stack' required to be installed on the host.
requireHostVersion :: Version
requireHostVersion = $(mkVersion "0.0.0")
-- | Stack cabal package version
stackVersion :: Version
stackVersion = fromCabalVersion version
-- | Options for 'cleanup'.
data CleanupOpts = CleanupOpts
{ dcAction :: !CleanupAction
, dcRemoveKnownImagesLastUsedDaysAgo :: !(Maybe Integer)
, dcRemoveUnknownImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveDanglingImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveStoppedContainersCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveRunningContainersCreatedDaysAgo :: !(Maybe Integer) }
deriving (Show)
-- | Cleanup action.
data CleanupAction = CleanupInteractive
| CleanupImmediate
| CleanupDryRun
deriving (Show)
-- | Parsed result of @docker inspect@.
data Inspect = Inspect
{iiConfig :: ImageConfig
,iiCreated :: UTCTime
,iiId :: String
,iiVirtualSize :: Maybe Integer }
deriving (Show)
-- | Parse @docker inspect@ output.
instance FromJSON Inspect where
parseJSON v =
do o <- parseJSON v
(Inspect <$> o .: T.pack "Config"
<*> o .: T.pack "Created"
<*> o .: T.pack "Id"
<*> o .:? T.pack "VirtualSize")
-- | Parsed @Config@ section of @docker inspect@ output.
data ImageConfig = ImageConfig
{icEnv :: [String]}
deriving (Show)
-- | Parse @Config@ section of @docker inspect@ output.
instance FromJSON ImageConfig where
parseJSON v =
do o <- parseJSON v
(ImageConfig <$> o .:? T.pack "Env" .!= [])
-- | Exceptions thrown by Stack.Docker.
data StackDockerException
= DockerMustBeEnabledException
-- ^ Docker must be enabled to use the command.
| OnlyOnHostException
-- ^ Command must be run on host OS (not in a container).
| InspectFailedException String
-- ^ @docker inspect@ failed.
| NotPulledException String
-- ^ Image does not exist.
| InvalidCleanupCommandException String
-- ^ Input to @docker cleanup@ has invalid command.
| InvalidImagesOutputException String
-- ^ Invalid output from @docker images@.
| InvalidPSOutputException String
-- ^ Invalid output from @docker ps@.
| InvalidInspectOutputException String
-- ^ Invalid output from @docker inspect@.
| PullFailedException String
-- ^ Could not pull a Docker image.
| DockerTooOldException Version Version
-- ^ Installed version of @docker@ below minimum version.
| DockerVersionProhibitedException [Version] Version
-- ^ Installed version of @docker@ is prohibited.
| InvalidVersionOutputException
-- ^ Invalid output from @docker --version@.
| HostStackTooOldException Version (Maybe Version)
-- ^ Version of @stack@ on host is too old for version in image.
| ContainerStackTooOldException Version Version
-- ^ Version of @stack@ in container/image is too old for version on host.
| ResolverNotSupportedException Resolver
-- ^ Only LTS resolvers are supported for default image tag.
| CannotDetermineProjectRootException
-- ^ Can't determine the project root (where to put @.docker-sandbox@).
| DockerNotInstalledException
-- ^ @docker --version@ failed.
| InvalidDatabasePathException SomeException
-- ^ Invalid global database path.
deriving (Typeable)
-- | Exception instance for StackDockerException.
instance Exception StackDockerException
-- | Show instance for StackDockerException.
instance Show StackDockerException where
show DockerMustBeEnabledException =
concat ["Docker must be enabled in your ",toFilePath stackDotYaml," to use this command."]
show OnlyOnHostException =
"This command must be run on host OS (not in a Docker container)."
show (InspectFailedException image) =
concat ["'docker inspect' failed for image after pull: ",image,"."]
show (NotPulledException image) =
concat ["The Docker image referenced by "
,toFilePath stackDotYaml
," has not\nbeen downloaded:\n "
,image
,"\n\nRun '"
,unwords [stackProgName, dockerCmdName, dockerPullCmdName]
,"' to download it, then try again."]
show (InvalidCleanupCommandException line) =
concat ["Invalid line in cleanup commands: '",line,"'."]
show (InvalidImagesOutputException line) =
concat ["Invalid 'docker images' output line: '",line,"'."]
show (InvalidPSOutputException line) =
concat ["Invalid 'docker ps' output line: '",line,"'."]
show (InvalidInspectOutputException msg) =
concat ["Invalid 'docker inspect' output: ",msg,"."]
show (PullFailedException image) =
concat ["Could not pull Docker image:\n "
,image
,"\nThere may not be an image on the registry for your resolver's LTS version in\n"
,toFilePath stackDotYaml
,"."]
show (DockerTooOldException minVersion haveVersion) =
concat ["Minimum docker version '"
,versionString minVersion
,"' is required (you have '"
,versionString haveVersion
,"')."]
show (DockerVersionProhibitedException prohibitedVersions haveVersion) =
concat ["These Docker versions are prohibited (you have '"
,versionString haveVersion
,"'): "
,concat (intersperse ", " (map versionString prohibitedVersions))
,"."]
show InvalidVersionOutputException =
"Cannot get Docker version (invalid 'docker --version' output)."
show (HostStackTooOldException minVersion (Just hostVersion)) =
concat ["The host's version of '"
,stackProgName
,"' is too old for this Docker image.\nVersion "
,versionString minVersion
," is required; you have "
,versionString hostVersion
,"."]
show (HostStackTooOldException minVersion Nothing) =
concat ["The host's version of '"
,stackProgName
,"' is too old.\nVersion "
,versionString minVersion
," is required."]
show (ContainerStackTooOldException requiredVersion containerVersion) =
concat ["The Docker container's version of '"
,stackProgName
,"' is too old.\nVersion "
,versionString requiredVersion
," is required; the container has "
,versionString containerVersion
,"."]
show (ResolverNotSupportedException resolver) =
concat ["Resolver not supported for Docker images:\n "
,show resolver
,"\nUse an LTS resolver, or set the '"
,T.unpack dockerImageArgName
,"' explicitly, in "
,toFilePath stackDotYaml
,"."]
show CannotDetermineProjectRootException =
"Cannot determine project root directory for Docker sandbox."
show DockerNotInstalledException=
"Cannot find 'docker' in PATH. Is Docker installed?"
show (InvalidDatabasePathException ex) =
concat ["Invalid database path: ",show ex]
| mietek/stack | src/Stack/Docker.hs | bsd-3-clause | 45,744 | 0 | 29 | 14,663 | 9,711 | 5,005 | 4,706 | 860 | 21 |
module Network.Mail.Postie.Address
( Address,
-- | Represents an email address
address,
-- | Returns address from local and domain part
addressLocalPart,
-- | Returns local part of address
addressDomain,
-- | Retuns domain part of address
toByteString,
-- | Resulting ByteString has format localPart\@domainPart.
toLazyByteString,
-- | Resulting Lazy.ByteString has format localPart\@domainPart.
parseAddress,
-- | Parses a ByteString to Address
addrSpec,
)
where
import Control.Applicative
import Control.Monad (void)
import Data.Attoparsec.ByteString.Char8
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Maybe (fromMaybe)
import Data.String
import Data.Typeable (Typeable)
data Address
= Address
{ addressLocalPart :: !BS.ByteString,
addressDomain :: !BS.ByteString
}
deriving (Eq, Ord, Typeable)
instance Show Address where
show = BS.unpack . toByteString
instance IsString Address where
fromString = fromMaybe (error "invalid email literal") . parseAddress . BS.pack
address :: BS.ByteString -> BS.ByteString -> Address
address = Address
toByteString :: Address -> BS.ByteString
toByteString (Address l d) = BS.concat [l, BS.singleton '@', d]
toLazyByteString :: Address -> LBS.ByteString
toLazyByteString (Address l d) = LBS.fromChunks [l, BS.singleton '@', d]
parseAddress :: BS.ByteString -> Maybe Address
parseAddress = maybeResult . parse addrSpec
-- | Address Parser. Borrowed form email-validate-2.0.1. Parser for email address.
addrSpec :: Parser Address
addrSpec = do
localPart <- local
_ <- char '@'
Address localPart <$> domain
local :: Parser BS.ByteString
local = dottedAtoms
domain :: Parser BS.ByteString
domain = dottedAtoms <|> domainLiteral
dottedAtoms :: Parser BS.ByteString
dottedAtoms =
BS.intercalate (BS.singleton '.')
<$> (optional cfws *> (atom <|> quotedString) <* optional cfws) `sepBy1` char '.'
atom :: Parser BS.ByteString
atom = takeWhile1 isAtomText
isAtomText :: Char -> Bool
isAtomText x = isAlphaNum x || inClass "!#$%&'*+/=?^_`{|}~-" x
domainLiteral :: Parser BS.ByteString
domainLiteral =
BS.cons '[' . flip BS.snoc ']' . BS.concat
<$> between
(optional cfws *> char '[')
(char ']' <* optional cfws)
(many (optional fws >> takeWhile1 isDomainText) <* optional fws)
isDomainText :: Char -> Bool
isDomainText x = inClass "\33-\90\94-\126" x || isObsNoWsCtl x
quotedString :: Parser BS.ByteString
quotedString =
(\x -> BS.concat [BS.singleton '"', BS.concat x, BS.singleton '"'])
<$> between
(char '"')
(char '"')
(many (optional fws >> quotedContent) <* optional fws)
quotedContent :: Parser BS.ByteString
quotedContent = takeWhile1 isQuotedText <|> quotedPair
isQuotedText :: Char -> Bool
isQuotedText x = inClass "\33\35-\91\93-\126" x || isObsNoWsCtl x
quotedPair :: Parser BS.ByteString
quotedPair = BS.cons '\\' . BS.singleton <$> (char '\\' *> (vchar <|> wsp <|> lf <|> cr <|> obsNoWsCtl <|> nullChar))
cfws :: Parser ()
cfws = ignore $ many (comment <|> fws)
fws :: Parser ()
fws =
ignore $
ignore (wsp1 >> optional (crlf >> wsp1))
<|> ignore (many1 (crlf >> wsp1))
ignore :: Parser a -> Parser ()
ignore = void
between :: Parser l -> Parser r -> Parser x -> Parser x
between l r x = l *> x <* r
comment :: Parser ()
comment =
ignore
( between (char '(') (char ')') $
many (ignore commentContent <|> fws)
)
commentContent :: Parser ()
commentContent = skipWhile1 isCommentText <|> ignore quotedPair <|> comment
isCommentText :: Char -> Bool
isCommentText x = inClass "\33-\39\42-\91\93-\126" x || isObsNoWsCtl x
nullChar :: Parser Char
nullChar = char '\0'
skipWhile1 :: (Char -> Bool) -> Parser ()
skipWhile1 x = satisfy x >> skipWhile x
wsp1 :: Parser ()
wsp1 = skipWhile1 isWsp
wsp :: Parser Char
wsp = satisfy isWsp
isWsp :: Char -> Bool
isWsp x = x == ' ' || x == '\t'
isAlphaNum :: Char -> Bool
isAlphaNum x = isDigit x || isAlpha_ascii x
cr :: Parser Char
cr = char '\r'
lf :: Parser Char
lf = char '\n'
crlf :: Parser ()
crlf = cr >> lf >> return ()
isVchar :: Char -> Bool
isVchar = inClass "\x21-\x7e"
vchar :: Parser Char
vchar = satisfy isVchar
isObsNoWsCtl :: Char -> Bool
isObsNoWsCtl = inClass "\1-\8\11-\12\14-\31\127"
obsNoWsCtl :: Parser Char
obsNoWsCtl = satisfy isObsNoWsCtl
| alexbiehl/postie | src/Network/Mail/Postie/Address.hs | bsd-3-clause | 4,421 | 0 | 13 | 858 | 1,412 | 727 | 685 | 123 | 1 |
import ForSyDe.Shallow
import Test.QuickCheck
type Rate = Positive Int
instance (Arbitrary a) => Arbitrary (Signal a) where
arbitrary = do
x <- arbitrary
return (signal x)
instance Arbitrary a => Arbitrary (AbstExt a) where
arbitrary = do
x <- arbitrary
return (Prst x)
countEvent _ NullS = 0
countEvent a (x :- xs) | a == x = 1 + countEvent a xs
| otherwise = countEvent a xs
-- SY Process Properties
prop_delaySY xs = 1 + lengthS xs == lengthS (delaySY 1 xs)
where types = xs :: Signal Int
prop_mealySY xs = lengthS xs == lengthS (mealySY (+) (-) 1 xs)
where types = xs :: Signal Int
prop_mapSY xs = lengthS xs == lengthS (mapSY (+1) xs)
where types = xs :: Signal Int
prop_zipWithSY xs ys = min (lengthS xs) (lengthS ys) == lengthS (zipWithSY (+) xs ys)
where types = (xs :: Signal Int, ys :: Signal Int)
prop_AbstSY1 xs = inputAbst xs == outputAbst xs
where types = xs :: Signal (AbstExt Int)
inputAbst = countEvent Abst
outputAbst x = countEvent Abst (mapSY (psi (+1)) x)
-- SDF Process Properties
prop_feedbackSDF xs = (lengthS xs) `div` (3 * 2) >= (lengthS out) `div` (2 * 3)
where types = xs :: Signal Int
out = actor21SDF (3,1) 3 (\x y -> (+(head y)) <$> x) xs st
st = delaySDF [1,1,1] $ actor21SDF (2,4) 2 (zipWith (+)) st xs
prop_actor11SDF gc gp xs = rateI >= rateO && rateI <= rateO + 1
where types = (gc :: Rate, gp :: Rate, xs :: Signal Int)
(c, p) = (getPositive gc, getPositive gc)
rateI = lengthS xs `div` c
rateO = lengthS out `div` p
out = actor11SDF c p (take p . repeat . head) xs
prop_actor21SDF gc gp xs ys = rateI1 >= rateO && rateI2 >= rateO && (min rateI1 rateI2) <= rateO + 1
where types = (gc :: (Rate,Rate), gp :: Rate, xs :: Signal Int, ys :: Signal Int)
(c1, c2, p) = (getPositive $ fst gc, getPositive $ snd gc, getPositive gp)
rateI1 = lengthS xs `div` c1
rateI2 = lengthS ys `div` c2
rateO = lengthS out `div` p
out = actor21SDF (c1,c2) p (\x -> take p . repeat . head) xs ys
main = do
let runTest s prop = putStr (s ++ " ") >> quickCheck prop
runTest "SY delay num events" prop_delaySY
runTest "SY mealy num events" prop_mealySY
runTest "SY map num events" prop_mapSY
runTest "SY map num absents" prop_AbstSY1
runTest "SY zipWith num events" prop_zipWithSY
runTest "SDF feedback tokens" prop_zipWithSY
runTest "SDF actor11 tokens" prop_actor11SDF
runTest "SDF actor21 tokens" prop_actor21SDF
| forsyde/forsyde-shallow | test/Props.hs | bsd-3-clause | 2,562 | 0 | 13 | 674 | 1,058 | 547 | 511 | 53 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Auth.Logout
-- Copyright : (C) 2015 Ricky Elrod
-- License : MIT (see LICENSE file)
-- Maintainer : (C) Ricky Elrod <ricky@elrod.me>
-- Stability : experimental
--
-- Functions and handlers for logging the user out.
module Auth.Logout where
import Application
import Snap.Core
import Snap.Snaplet
import Snap.Snaplet.Auth
-- | Logs out and redirects the user to the site index.
handleLogout :: Handler App (AuthManager App) ()
handleLogout = logout >> redirect "/"
| meoblast001/quotum-snap | src/Auth/Logout.hs | mit | 520 | 0 | 7 | 84 | 67 | 42 | 25 | 8 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module DBTypes where
import Data.Aeson
import Data.Aeson.TH
import Data.Text
import Data.Serialize
import Data.Time.Clock
import GHC.Generics
import Control.Lens
import Types
import Models
import Data.Default
type TenantID = Key DBTenant
type Tenant = DBTenant
type TenantOutput = DBTenant
type UserID = Key DBUser
data DBError = TenantNotFound TenantID
| UserNotFound UserID deriving (Eq, Show)
data UserCreationError = UserExists Text
| TenantDoesn'tExist Text
deriving (Eq, Show)
data Role = Role { roleName :: Text
, roleCapabilities :: [Capability]
}
instance Default Role where
def = Role "Default Role" []
data Capability = ViewUserDetails
| EditUserDetails
| EditUserRoles
| EditTenantDetails
data Activation =
Activation { activationTenantID :: TenantID
, activationTime :: UTCTime
} deriving (Generic)
data TenantIdent =
TenantI { _name :: Text
, _backofficeDomain :: Text
} deriving (Generic)
instance FromJSON TenantIdent where
parseJSON = genericParseJSON (defaultOptions { fieldLabelModifier = Prelude.drop 1})
instance ToJSON TenantIdent where
toEncoding = genericToEncoding (defaultOptions { fieldLabelModifier = Prelude.drop 1})
toJSON = genericToJSON (defaultOptions { fieldLabelModifier = Prelude.drop 1})
instance HasName TenantIdent where
name = lens _name (\ti n -> ti { _name = n } )
instance HasBackofficeDomain TenantIdent where
backofficeDomain = lens _backofficeDomain (\ti bd -> ti { _backofficeDomain = bd } )
type TenantInput = TenantIdent
data FieldStatus = Present | Absent | Unknown
type family Omittable (state :: FieldStatus) a where
Omittable Present a = a
Omittable Absent a = ()
Omittable Unknown a = Maybe a
class HasTenantID s where
tenantID :: Lens' s TenantID
instance HasTenantID DBUser where
tenantID = dBUserTenantID
data UserBase (pass :: FieldStatus) (st :: FieldStatus) (rl :: FieldStatus) (id :: FieldStatus) =
UserB { _userFirstName :: Text
, _userLastName :: Text
, _userEmail :: Text
, _userPhone :: Text
, _userUsername :: Text
, _userTenantID :: TenantID
, _userPassword :: Omittable pass Text
, _userStatus :: Omittable st UserStatus
, _userRole :: Omittable rl Role
, _userUserID :: Omittable id UserID
} deriving (Generic)
makeLenses ''UserBase
instance HasHumanName (UserBase pass st rl id) where
firstName = userFirstName
lastName = userLastName
instance HasContactDetails (UserBase pass st rl id) where
email = userEmail
phone = userPhone
instance HasUsername (UserBase pass st rl id) where
username = userUsername
instance HasPassword (UserBase Present st rl id) where
password = userPassword
instance HasTenantID (UserBase pas st rl id) where
tenantID = userTenantID
deriving instance (Show (Omittable pass Text),
Show (Omittable st UserStatus),
Show (Omittable rl Role),
Show (Omittable id UserID))
=> Show (UserBase pass st rl id)
type UserInput = UserBase Present Absent Absent Absent
type User = UserBase Absent Present Present Present
| vacationlabs/haskell-webapps | ServantPersistent/src/DBTypes.hs | mit | 3,713 | 0 | 11 | 955 | 894 | 507 | 387 | 97 | 0 |
-- Nim example from chapter 10 of Programming in Haskell,
-- Graham Hutton, Cambridge University Press, 2016.
import Data.Char
-- Game utilities
next :: Int -> Int
next 1 = 2
next 2 = 1
type Board = [Int]
initial :: Board
initial = [5,4,3,2,1]
finished :: Board -> Bool
finished = all (== 0)
valid :: Board -> Int -> Int -> Bool
valid board row num = board !! (row-1) >= num
move :: Board -> Int -> Int -> Board
move board row num = [update r n | (r,n) <- zip [1..] board]
where update r n = if r == row then n-num else n
-- IO utilities
putRow :: Int -> Int -> IO ()
putRow row num = do putStr (show row)
putStr ": "
putStrLn (concat (replicate num "* "))
putBoard :: Board -> IO ()
putBoard [a,b,c,d,e] = do putRow 1 a
putRow 2 b
putRow 3 c
putRow 4 d
putRow 5 e
getDigit :: String -> IO Int
getDigit prompt = do putStr prompt
x <- getChar
newline
if isDigit x then
return (digitToInt x)
else
do putStrLn "ERROR: Invalid digit"
getDigit prompt
newline :: IO ()
newline = putChar '\n'
-- Game of nim
play :: Board -> Int -> IO ()
play board player =
do newline
putBoard board
if finished board then
do newline
putStr "Player "
putStr (show (next player))
putStrLn " wins!!"
else
do newline
putStr "Player "
putStrLn (show player)
row <- getDigit "Enter a row number: "
num <- getDigit "Stars to remove : "
if valid board row num then
play (move board row num) (next player)
else
do newline
putStrLn "ERROR: Invalid move"
play board player
nim :: IO ()
nim = play initial 1
| thalerjonathan/phd | coding/learning/haskell/grahambook/Code_Solutions/nim.hs | gpl-3.0 | 2,012 | 0 | 14 | 833 | 655 | 318 | 337 | 55 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Test.AWS.CloudTrail.Internal
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.CloudTrail.Internal where
import Test.AWS.Prelude
| fmapfmapfmap/amazonka | amazonka-cloudtrail/test/Test/AWS/CloudTrail/Internal.hs | mpl-2.0 | 627 | 0 | 4 | 140 | 25 | 21 | 4 | 4 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Query.Filter (testQuery_Filter) where
import Test.QuickCheck hiding (Result)
import Test.QuickCheck.Monadic
import qualified Data.Map as Map
import Data.List
import Text.JSON (showJSON)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects (genEmptyCluster)
import Ganeti.BasicTypes
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Query.Filter
import Ganeti.Query.Language
import Ganeti.Query.Query
import Ganeti.Utils (niceSort)
-- * Helpers
-- | Run a query and check that we got a specific response.
checkQueryResults :: ConfigData -> Query -> String
-> [[ResultEntry]] -> Property
checkQueryResults cfg qr descr expected = monadicIO $ do
result <- run (query cfg False qr) >>= resultProp
stop $ printTestCase ("Inconsistent results in " ++ descr)
(qresData result ==? expected)
-- | Makes a node name query, given a filter.
makeNodeQuery :: Filter FilterField -> Query
makeNodeQuery = Query (ItemTypeOpCode QRNode) ["name"]
-- | Checks if a given operation failed.
expectBadQuery :: ConfigData -> Query -> String -> Property
expectBadQuery cfg qr descr = monadicIO $ do
result <- run (query cfg False qr)
case result of
Bad _ -> return ()
Ok a -> stop . failTest $ "Expected failure in " ++ descr ++
" but got " ++ show a
-- | A helper to construct a list of results from an expected names list.
namesToResult :: [String] -> [[ResultEntry]]
namesToResult = map ((:[]) . ResultEntry RSNormal . Just . showJSON)
-- | Generates a cluster and returns its node names too.
genClusterNames :: Int -> Int -> Gen (ConfigData, [String])
genClusterNames min_nodes max_nodes = do
numnodes <- choose (min_nodes, max_nodes)
cfg <- genEmptyCluster numnodes
return (cfg, niceSort . Map.keys . fromContainer $ configNodes cfg)
-- * Test cases
-- | Tests single node filtering: eq should return it, and (lt and gt)
-- should fail.
prop_node_single_filter :: Property
prop_node_single_filter =
forAll (genClusterNames 1 maxNodes) $ \(cfg, allnodes) ->
forAll (elements allnodes) $ \nname ->
let fvalue = QuotedString nname
buildflt n = n "name" fvalue
expsingle = namesToResult [nname]
othernodes = nname `delete` allnodes
expnot = namesToResult othernodes
test_query = checkQueryResults cfg . makeNodeQuery
in conjoin
[ test_query (buildflt EQFilter) "single-name 'EQ' filter" expsingle
, test_query (NotFilter (buildflt EQFilter))
"single-name 'NOT EQ' filter" expnot
, test_query (AndFilter [buildflt LTFilter, buildflt GTFilter])
"single-name 'AND [LT,GT]' filter" []
, test_query (AndFilter [buildflt LEFilter, buildflt GEFilter])
"single-name 'And [LE,GE]' filter" expsingle
]
-- | Tests node filtering based on name equality: many 'OrFilter'
-- should return all results combined, many 'AndFilter' together
-- should return nothing. Note that we need at least 2 nodes so that
-- the 'AndFilter' case breaks.
prop_node_many_filter :: Property
prop_node_many_filter =
forAll (genClusterNames 2 maxNodes) $ \(cfg, nnames) ->
let eqfilter = map (EQFilter "name" . QuotedString) nnames
alln = namesToResult nnames
test_query = checkQueryResults cfg . makeNodeQuery
num_zero = NumericValue 0
in conjoin
[ test_query (OrFilter eqfilter) "all nodes 'Or' name filter" alln
, test_query (AndFilter eqfilter) "all nodes 'And' name filter" []
-- this next test works only because genEmptyCluster generates a
-- cluster with no instances
, test_query (EQFilter "pinst_cnt" num_zero) "pinst_cnt 'Eq' 0" alln
, test_query (GTFilter "sinst_cnt" num_zero) "sinst_cnt 'GT' 0" []
]
-- | Tests name ordering consistency: requesting a 'simple filter'
-- results in identical name ordering as the wanted names, requesting
-- a more complex filter results in a niceSort-ed order.
prop_node_name_ordering :: Property
prop_node_name_ordering =
forAll (genClusterNames 2 6) $ \(cfg, nnames) ->
forAll (elements (subsequences nnames)) $ \sorted_nodes ->
forAll (elements (permutations sorted_nodes)) $ \chosen_nodes ->
let orfilter = OrFilter $ map (EQFilter "name" . QuotedString) chosen_nodes
alln = namesToResult chosen_nodes
all_sorted = namesToResult $ niceSort chosen_nodes
test_query = checkQueryResults cfg . makeNodeQuery
in conjoin
[ test_query orfilter "simple filter/requested" alln
, test_query (AndFilter [orfilter]) "complex filter/sorted" all_sorted
]
-- | Tests node regex filtering. This is a very basic test :(
prop_node_regex_filter :: Property
prop_node_regex_filter =
forAll (genClusterNames 0 maxNodes) $ \(cfg, nnames) ->
case mkRegex ".*"::Result FilterRegex of
Bad msg -> failTest $ "Can't build regex?! Error: " ++ msg
Ok rx ->
checkQueryResults cfg (makeNodeQuery (RegexpFilter "name" rx))
"rows for all nodes regexp filter" $ namesToResult nnames
-- | Tests node regex filtering. This is a very basic test :(
prop_node_bad_filter :: String -> Int -> Property
prop_node_bad_filter rndname rndint =
forAll (genClusterNames 1 maxNodes) $ \(cfg, _) ->
let test_query = expectBadQuery cfg . makeNodeQuery
string_value = QuotedString rndname
numeric_value = NumericValue $ fromIntegral rndint
in case mkRegex ".*"::Result FilterRegex of
Bad msg -> failTest $ "Can't build regex?! Error: " ++ msg
Ok rx ->
conjoin
[ test_query (RegexpFilter "offline" rx)
"regex filter against boolean field"
, test_query (EQFilter "name" numeric_value)
"numeric value eq against string field"
, test_query (TrueFilter "name")
"true filter against string field"
, test_query (EQFilter "offline" string_value)
"quoted string eq against boolean field"
, test_query (ContainsFilter "name" string_value)
"quoted string in non-list field"
, test_query (ContainsFilter "name" numeric_value)
"numeric value in non-list field"
]
-- | Tests make simple filter.
prop_makeSimpleFilter :: Property
prop_makeSimpleFilter =
forAll (resize 10 $ listOf1 genName) $ \names ->
forAll (resize 10 $ listOf1 arbitrary) $ \ids ->
forAll genName $ \namefield ->
conjoin [ printTestCase "test expected names" $
makeSimpleFilter namefield (map Left names) ==?
OrFilter (map (EQFilter namefield . QuotedString) names)
, printTestCase "test expected IDs" $
makeSimpleFilter namefield (map Right ids) ==?
OrFilter (map (EQFilter namefield . NumericValue) ids)
, printTestCase "test empty names" $
makeSimpleFilter namefield [] ==? EmptyFilter
]
testSuite "Query/Filter"
[ 'prop_node_single_filter
, 'prop_node_many_filter
, 'prop_node_name_ordering
, 'prop_node_regex_filter
, 'prop_node_bad_filter
, 'prop_makeSimpleFilter
]
| ganeti-github-testing/ganeti-test-1 | test/hs/Test/Ganeti/Query/Filter.hs | bsd-2-clause | 8,498 | 0 | 19 | 1,765 | 1,634 | 851 | 783 | 132 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
-- Concurrent execution with dependencies. Types currently hard-coded for needs
-- of stack, but could be generalized easily.
module Control.Concurrent.Execute
( ActionType (..)
, ActionId (..)
, ActionContext (..)
, Action (..)
, runActions
) where
import Control.Applicative
import Control.Concurrent.Async (Concurrently (..), async)
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (join, unless)
import Data.Foldable (sequenceA_)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Typeable (Typeable)
import Prelude -- Fix AMP warning
import Stack.Types
data ActionType
= ATBuild
| ATFinal
deriving (Show, Eq, Ord)
data ActionId = ActionId !PackageIdentifier !ActionType
deriving (Show, Eq, Ord)
data Action = Action
{ actionId :: !ActionId
, actionDeps :: !(Set ActionId)
, actionDo :: !(ActionContext -> IO ())
}
data ActionContext = ActionContext
{ acRemaining :: !(Set ActionId)
-- ^ Does not include the current action
}
deriving Show
data ExecuteState = ExecuteState
{ esActions :: TVar [Action]
, esExceptions :: TVar [SomeException]
, esInAction :: TVar (Set ActionId)
, esCompleted :: TVar Int
, esFinalLock :: Maybe (TMVar ())
, esKeepGoing :: Bool
}
data ExecuteException
= InconsistentDependencies
deriving Typeable
instance Exception ExecuteException
instance Show ExecuteException where
show InconsistentDependencies =
"Inconsistent dependencies were discovered while executing your build plan. This should never happen, please report it as a bug to the stack team."
runActions :: Int -- ^ threads
-> Bool -- ^ keep going after one task has failed
-> Bool -- ^ run final actions concurrently?
-> [Action]
-> (TVar Int -> IO ()) -- ^ progress updated
-> IO [SomeException]
runActions threads keepGoing concurrentFinal actions0 withProgress = do
es <- ExecuteState
<$> newTVarIO actions0
<*> newTVarIO []
<*> newTVarIO Set.empty
<*> newTVarIO 0
<*> (if concurrentFinal
then pure Nothing
else Just <$> atomically (newTMVar ()))
<*> pure keepGoing
_ <- async $ withProgress $ esCompleted es
if threads <= 1
then runActions' es
else runConcurrently $ sequenceA_ $ replicate threads $ Concurrently $ runActions' es
readTVarIO $ esExceptions es
runActions' :: ExecuteState -> IO ()
runActions' ExecuteState {..} =
loop
where
breakOnErrs inner = do
errs <- readTVar esExceptions
if null errs || esKeepGoing
then inner
else return $ return ()
withActions inner = do
as <- readTVar esActions
if null as
then return $ return ()
else inner as
loop = join $ atomically $ breakOnErrs $ withActions $ \as ->
case break (Set.null . actionDeps) as of
(_, []) -> do
inAction <- readTVar esInAction
if Set.null inAction
then do
unless esKeepGoing $
modifyTVar esExceptions (toException InconsistentDependencies:)
return $ return ()
else retry
(xs, action:ys) -> do
unlock <-
case (actionId action, esFinalLock) of
(ActionId _ ATFinal, Just lock) -> do
takeTMVar lock
return $ putTMVar lock ()
_ -> return $ return ()
let as' = xs ++ ys
inAction <- readTVar esInAction
let remaining = Set.union
(Set.fromList $ map actionId as')
inAction
writeTVar esActions as'
modifyTVar esInAction (Set.insert $ actionId action)
return $ mask $ \restore -> do
eres <- try $ restore $ actionDo action ActionContext
{ acRemaining = remaining
}
atomically $ do
unlock
modifyTVar esInAction (Set.delete $ actionId action)
modifyTVar esCompleted (+1)
case eres of
Left err -> modifyTVar esExceptions (err:)
Right () ->
let dropDep a = a { actionDeps = Set.delete (actionId action) $ actionDeps a }
in modifyTVar esActions $ map dropDep
restore loop
| mathhun/stack | src/Control/Concurrent/Execute.hs | bsd-3-clause | 5,003 | 0 | 33 | 1,914 | 1,181 | 605 | 576 | 129 | 7 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnPat]{Renaming of patterns}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
module RnPat (-- main entry points
rnPat, rnPats, rnBindPat, rnPatAndThen,
NameMaker, applyNameMaker, -- a utility for making names:
localRecNameMaker, topRecNameMaker, -- sometimes we want to make local names,
-- sometimes we want to make top (qualified) names.
isTopRecNameMaker,
rnHsRecFields, HsRecFieldContext(..),
rnHsRecUpdFields,
-- CpsRn monad
CpsRn, liftCps,
-- Literals
rnLit, rnOverLit,
-- Pattern Error messages that are also used elsewhere
checkTupSize, patSigErr
) where
-- ENH: thin imports to only what is necessary for patterns
import {-# SOURCE #-} RnExpr ( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSplicePat )
#include "HsVersions.h"
import HsSyn
import TcRnMonad
import TcHsSyn ( hsOverLitName )
import RnEnv
import RnTypes
import DynFlags
import PrelNames
import TyCon ( tyConName )
import ConLike
import TypeRep ( TyThing(..) )
import Name
import NameSet
import RdrName
import BasicTypes
import Util
import ListSetOps ( removeDups )
import Outputable
import SrcLoc
import FastString
import Literal ( inCharRange )
import TysWiredIn ( nilDataCon )
import DataCon
import Control.Monad ( when, liftM, ap )
import Data.Ratio
{-
*********************************************************
* *
The CpsRn Monad
* *
*********************************************************
Note [CpsRn monad]
~~~~~~~~~~~~~~~~~~
The CpsRn monad uses continuation-passing style to support this
style of programming:
do { ...
; ns <- bindNames rs
; ...blah... }
where rs::[RdrName], ns::[Name]
The idea is that '...blah...'
a) sees the bindings of ns
b) returns the free variables it mentions
so that bindNames can report unused ones
In particular,
mapM rnPatAndThen [p1, p2, p3]
has a *left-to-right* scoping: it makes the binders in
p1 scope over p2,p3.
-}
newtype CpsRn b = CpsRn { unCpsRn :: forall r. (b -> RnM (r, FreeVars))
-> RnM (r, FreeVars) }
-- See Note [CpsRn monad]
instance Functor CpsRn where
fmap = liftM
instance Applicative CpsRn where
pure x = CpsRn (\k -> k x)
(<*>) = ap
instance Monad CpsRn where
return = pure
(CpsRn m) >>= mk = CpsRn (\k -> m (\v -> unCpsRn (mk v) k))
runCps :: CpsRn a -> RnM (a, FreeVars)
runCps (CpsRn m) = m (\r -> return (r, emptyFVs))
liftCps :: RnM a -> CpsRn a
liftCps rn_thing = CpsRn (\k -> rn_thing >>= k)
liftCpsFV :: RnM (a, FreeVars) -> CpsRn a
liftCpsFV rn_thing = CpsRn (\k -> do { (v,fvs1) <- rn_thing
; (r,fvs2) <- k v
; return (r, fvs1 `plusFV` fvs2) })
wrapSrcSpanCps :: (a -> CpsRn b) -> Located a -> CpsRn (Located b)
-- Set the location, and also wrap it around the value returned
wrapSrcSpanCps fn (L loc a)
= CpsRn (\k -> setSrcSpan loc $
unCpsRn (fn a) $ \v ->
k (L loc v))
lookupConCps :: Located RdrName -> CpsRn (Located Name)
lookupConCps con_rdr
= CpsRn (\k -> do { con_name <- lookupLocatedOccRn con_rdr
; (r, fvs) <- k con_name
; return (r, addOneFV fvs (unLoc con_name)) })
-- We add the constructor name to the free vars
-- See Note [Patterns are uses]
{-
Note [Patterns are uses]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module Foo( f, g ) where
data T = T1 | T2
f T1 = True
f T2 = False
g _ = T1
Arguably we should report T2 as unused, even though it appears in a
pattern, because it never occurs in a constructed position. See
Trac #7336.
However, implementing this in the face of pattern synonyms would be
less straightforward, since given two pattern synonyms
pattern P1 <- P2
pattern P2 <- ()
we need to observe the dependency between P1 and P2 so that type
checking can be done in the correct order (just like for value
bindings). Dependencies between bindings is analyzed in the renamer,
where we don't know yet whether P2 is a constructor or a pattern
synonym. So for now, we do report conid occurrences in patterns as
uses.
*********************************************************
* *
Name makers
* *
*********************************************************
Externally abstract type of name makers,
which is how you go from a RdrName to a Name
-}
data NameMaker
= LamMk -- Lambdas
Bool -- True <=> report unused bindings
-- (even if True, the warning only comes out
-- if -fwarn-unused-matches is on)
| LetMk -- Let bindings, incl top level
-- Do *not* check for unused bindings
TopLevelFlag
MiniFixityEnv
topRecNameMaker :: MiniFixityEnv -> NameMaker
topRecNameMaker fix_env = LetMk TopLevel fix_env
isTopRecNameMaker :: NameMaker -> Bool
isTopRecNameMaker (LetMk TopLevel _) = True
isTopRecNameMaker _ = False
localRecNameMaker :: MiniFixityEnv -> NameMaker
localRecNameMaker fix_env = LetMk NotTopLevel fix_env
matchNameMaker :: HsMatchContext a -> NameMaker
matchNameMaker ctxt = LamMk report_unused
where
-- Do not report unused names in interactive contexts
-- i.e. when you type 'x <- e' at the GHCi prompt
report_unused = case ctxt of
StmtCtxt GhciStmtCtxt -> False
-- also, don't warn in pattern quotes, as there
-- is no RHS where the variables can be used!
ThPatQuote -> False
_ -> True
rnHsSigCps :: HsWithBndrs RdrName (LHsType RdrName)
-> CpsRn (HsWithBndrs Name (LHsType Name))
rnHsSigCps sig
= CpsRn (rnHsBndrSig PatCtx sig)
newPatLName :: NameMaker -> Located RdrName -> CpsRn (Located Name)
newPatLName name_maker rdr_name@(L loc _)
= do { name <- newPatName name_maker rdr_name
; return (L loc name) }
newPatName :: NameMaker -> Located RdrName -> CpsRn Name
newPatName (LamMk report_unused) rdr_name
= CpsRn (\ thing_inside ->
do { name <- newLocalBndrRn rdr_name
; (res, fvs) <- bindLocalNames [name] (thing_inside name)
; when report_unused $ warnUnusedMatches [name] fvs
; return (res, name `delFV` fvs) })
newPatName (LetMk is_top fix_env) rdr_name
= CpsRn (\ thing_inside ->
do { name <- case is_top of
NotTopLevel -> newLocalBndrRn rdr_name
TopLevel -> newTopSrcBinder rdr_name
; bindLocalNames [name] $ -- Do *not* use bindLocalNameFV here
-- See Note [View pattern usage]
addLocalFixities fix_env [name] $
thing_inside name })
-- Note: the bindLocalNames is somewhat suspicious
-- because it binds a top-level name as a local name.
-- however, this binding seems to work, and it only exists for
-- the duration of the patterns and the continuation;
-- then the top-level name is added to the global env
-- before going on to the RHSes (see RnSource.hs).
{-
Note [View pattern usage]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let (r, (r -> x)) = x in ...
Here the pattern binds 'r', and then uses it *only* in the view pattern.
We want to "see" this use, and in let-bindings we collect all uses and
report unused variables at the binding level. So we must use bindLocalNames
here, *not* bindLocalNameFV. Trac #3943.
*********************************************************
* *
External entry points
* *
*********************************************************
There are various entry points to renaming patterns, depending on
(1) whether the names created should be top-level names or local names
(2) whether the scope of the names is entirely given in a continuation
(e.g., in a case or lambda, but not in a let or at the top-level,
because of the way mutually recursive bindings are handled)
(3) whether the a type signature in the pattern can bind
lexically-scoped type variables (for unpacking existential
type vars in data constructors)
(4) whether we do duplicate and unused variable checking
(5) whether there are fixity declarations associated with the names
bound by the patterns that need to be brought into scope with them.
Rather than burdening the clients of this module with all of these choices,
we export the three points in this design space that we actually need:
-}
-- ----------- Entry point 1: rnPats -------------------
-- Binds local names; the scope of the bindings is entirely in the thing_inside
-- * allows type sigs to bind type vars
-- * local namemaker
-- * unused and duplicate checking
-- * no fixities
rnPats :: HsMatchContext Name -- for error messages
-> [LPat RdrName]
-> ([LPat Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnPats ctxt pats thing_inside
= do { envs_before <- getRdrEnvs
-- (1) rename the patterns, bringing into scope all of the term variables
-- (2) then do the thing inside.
; unCpsRn (rnLPatsAndThen (matchNameMaker ctxt) pats) $ \ pats' -> do
{ -- Check for duplicated and shadowed names
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- Because we don't bind the vars all at once, we can't
-- check incrementally for duplicates;
-- Nor can we check incrementally for shadowing, else we'll
-- complain *twice* about duplicates e.g. f (x,x) = ...
; addErrCtxt doc_pat $
checkDupAndShadowedNames envs_before $
collectPatsBinders pats'
; thing_inside pats' } }
where
doc_pat = ptext (sLit "In") <+> pprMatchContext ctxt
rnPat :: HsMatchContext Name -- for error messages
-> LPat RdrName
-> (LPat Name -> RnM (a, FreeVars))
-> RnM (a, FreeVars) -- Variables bound by pattern do not
-- appear in the result FreeVars
rnPat ctxt pat thing_inside
= rnPats ctxt [pat] (\pats' -> let [pat'] = pats' in thing_inside pat')
applyNameMaker :: NameMaker -> Located RdrName -> RnM (Located Name)
applyNameMaker mk rdr = do { (n, _fvs) <- runCps (newPatLName mk rdr)
; return n }
-- ----------- Entry point 2: rnBindPat -------------------
-- Binds local names; in a recursive scope that involves other bound vars
-- e.g let { (x, Just y) = e1; ... } in ...
-- * does NOT allows type sig to bind type vars
-- * local namemaker
-- * no unused and duplicate checking
-- * fixities might be coming in
rnBindPat :: NameMaker
-> LPat RdrName
-> RnM (LPat Name, FreeVars)
-- Returned FreeVars are the free variables of the pattern,
-- of course excluding variables bound by this pattern
rnBindPat name_maker pat = runCps (rnLPatAndThen name_maker pat)
{-
*********************************************************
* *
The main event
* *
*********************************************************
-}
-- ----------- Entry point 3: rnLPatAndThen -------------------
-- General version: parametrized by how you make new names
rnLPatsAndThen :: NameMaker -> [LPat RdrName] -> CpsRn [LPat Name]
rnLPatsAndThen mk = mapM (rnLPatAndThen mk)
-- Despite the map, the monad ensures that each pattern binds
-- variables that may be mentioned in subsequent patterns in the list
--------------------
-- The workhorse
rnLPatAndThen :: NameMaker -> LPat RdrName -> CpsRn (LPat Name)
rnLPatAndThen nm lpat = wrapSrcSpanCps (rnPatAndThen nm) lpat
rnPatAndThen :: NameMaker -> Pat RdrName -> CpsRn (Pat Name)
rnPatAndThen _ (WildPat _) = return (WildPat placeHolderType)
rnPatAndThen mk (ParPat pat) = do { pat' <- rnLPatAndThen mk pat; return (ParPat pat') }
rnPatAndThen mk (LazyPat pat) = do { pat' <- rnLPatAndThen mk pat; return (LazyPat pat') }
rnPatAndThen mk (BangPat pat) = do { pat' <- rnLPatAndThen mk pat; return (BangPat pat') }
rnPatAndThen mk (VarPat rdr) = do { loc <- liftCps getSrcSpanM
; name <- newPatName mk (L loc rdr)
; return (VarPat name) }
-- we need to bind pattern variables for view pattern expressions
-- (e.g. in the pattern (x, x -> y) x needs to be bound in the rhs of the tuple)
rnPatAndThen mk (SigPatIn pat sig)
-- When renaming a pattern type signature (e.g. f (a :: T) = ...), it is
-- important to rename its type signature _before_ renaming the rest of the
-- pattern, so that type variables are first bound by the _outermost_ pattern
-- type signature they occur in. This keeps the type checker happy when
-- pattern type signatures happen to be nested (#7827)
--
-- f ((Just (x :: a) :: Maybe a)
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~^ `a' is first bound here
-- ~~~~~~~~~~~~~~~^ the same `a' then used here
= do { sig' <- rnHsSigCps sig
; pat' <- rnLPatAndThen mk pat
; return (SigPatIn pat' sig') }
rnPatAndThen mk (LitPat lit)
| HsString src s <- lit
= do { ovlStr <- liftCps (xoptM Opt_OverloadedStrings)
; if ovlStr
then rnPatAndThen mk
(mkNPat (noLoc (mkHsIsString src s placeHolderType))
Nothing)
else normal_lit }
| otherwise = normal_lit
where
normal_lit = do { liftCps (rnLit lit); return (LitPat lit) }
rnPatAndThen _ (NPat (L l lit) mb_neg _eq)
= do { lit' <- liftCpsFV $ rnOverLit lit
; mb_neg' <- liftCpsFV $ case mb_neg of
Nothing -> return (Nothing, emptyFVs)
Just _ -> do { (neg, fvs) <- lookupSyntaxName negateName
; return (Just neg, fvs) }
; eq' <- liftCpsFV $ lookupSyntaxName eqName
; return (NPat (L l lit') mb_neg' eq') }
rnPatAndThen mk (NPlusKPat rdr (L l lit) _ _)
= do { new_name <- newPatName mk rdr
; lit' <- liftCpsFV $ rnOverLit lit
; minus <- liftCpsFV $ lookupSyntaxName minusName
; ge <- liftCpsFV $ lookupSyntaxName geName
; return (NPlusKPat (L (nameSrcSpan new_name) new_name)
(L l lit') ge minus) }
-- The Report says that n+k patterns must be in Integral
rnPatAndThen mk (AsPat rdr pat)
= do { new_name <- newPatLName mk rdr
; pat' <- rnLPatAndThen mk pat
; return (AsPat new_name pat') }
rnPatAndThen mk p@(ViewPat expr pat _ty)
= do { liftCps $ do { vp_flag <- xoptM Opt_ViewPatterns
; checkErr vp_flag (badViewPat p) }
-- Because of the way we're arranging the recursive calls,
-- this will be in the right context
; expr' <- liftCpsFV $ rnLExpr expr
; pat' <- rnLPatAndThen mk pat
-- Note: at this point the PreTcType in ty can only be a placeHolder
-- ; return (ViewPat expr' pat' ty) }
; return (ViewPat expr' pat' placeHolderType) }
rnPatAndThen mk (ConPatIn con stuff)
-- rnConPatAndThen takes care of reconstructing the pattern
-- The pattern for the empty list needs to be replaced by an empty explicit list pattern when overloaded lists is turned on.
= case unLoc con == nameRdrName (dataConName nilDataCon) of
True -> do { ol_flag <- liftCps $ xoptM Opt_OverloadedLists
; if ol_flag then rnPatAndThen mk (ListPat [] placeHolderType Nothing)
else rnConPatAndThen mk con stuff}
False -> rnConPatAndThen mk con stuff
rnPatAndThen mk (ListPat pats _ _)
= do { opt_OverloadedLists <- liftCps $ xoptM Opt_OverloadedLists
; pats' <- rnLPatsAndThen mk pats
; case opt_OverloadedLists of
True -> do { (to_list_name,_) <- liftCps $ lookupSyntaxName toListName
; return (ListPat pats' placeHolderType
(Just (placeHolderType, to_list_name)))}
False -> return (ListPat pats' placeHolderType Nothing) }
rnPatAndThen mk (PArrPat pats _)
= do { pats' <- rnLPatsAndThen mk pats
; return (PArrPat pats' placeHolderType) }
rnPatAndThen mk (TuplePat pats boxed _)
= do { liftCps $ checkTupSize (length pats)
; pats' <- rnLPatsAndThen mk pats
; return (TuplePat pats' boxed []) }
rnPatAndThen mk (SplicePat splice)
= do { eith <- liftCpsFV $ rnSplicePat splice
; case eith of -- See Note [rnSplicePat] in RnSplice
Left not_yet_renamed -> rnPatAndThen mk not_yet_renamed
Right already_renamed -> return already_renamed }
rnPatAndThen _ pat = pprPanic "rnLPatAndThen" (ppr pat)
--------------------
rnConPatAndThen :: NameMaker
-> Located RdrName -- the constructor
-> HsConPatDetails RdrName
-> CpsRn (Pat Name)
rnConPatAndThen mk con (PrefixCon pats)
= do { con' <- lookupConCps con
; pats' <- rnLPatsAndThen mk pats
; return (ConPatIn con' (PrefixCon pats')) }
rnConPatAndThen mk con (InfixCon pat1 pat2)
= do { con' <- lookupConCps con
; pat1' <- rnLPatAndThen mk pat1
; pat2' <- rnLPatAndThen mk pat2
; fixity <- liftCps $ lookupFixityRn (unLoc con')
; liftCps $ mkConOpPatRn con' fixity pat1' pat2' }
rnConPatAndThen mk con (RecCon rpats)
= do { con' <- lookupConCps con
; rpats' <- rnHsRecPatsAndThen mk con' rpats
; return (ConPatIn con' (RecCon rpats')) }
--------------------
rnHsRecPatsAndThen :: NameMaker
-> Located Name -- Constructor
-> HsRecFields RdrName (LPat RdrName)
-> CpsRn (HsRecFields Name (LPat Name))
rnHsRecPatsAndThen mk (L _ con) hs_rec_fields@(HsRecFields { rec_dotdot = dd })
= do { flds <- liftCpsFV $ rnHsRecFields (HsRecFieldPat con) VarPat hs_rec_fields
; flds' <- mapM rn_field (flds `zip` [1..])
; return (HsRecFields { rec_flds = flds', rec_dotdot = dd }) }
where
rn_field (L l fld, n') = do { arg' <- rnLPatAndThen (nested_mk dd mk n')
(hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' })) }
-- Suppress unused-match reporting for fields introduced by ".."
nested_mk Nothing mk _ = mk
nested_mk (Just _) mk@(LetMk {}) _ = mk
nested_mk (Just n) (LamMk report_unused) n' = LamMk (report_unused && (n' <= n))
{-
************************************************************************
* *
Record fields
* *
************************************************************************
-}
data HsRecFieldContext
= HsRecFieldCon Name
| HsRecFieldPat Name
| HsRecFieldUpd
rnHsRecFields
:: forall arg.
HsRecFieldContext
-> (RdrName -> arg) -- When punning, use this to build a new field
-> HsRecFields RdrName (Located arg)
-> RnM ([LHsRecField Name (Located arg)], FreeVars)
-- This surprisingly complicated pass
-- a) looks up the field name (possibly using disambiguation)
-- b) fills in puns and dot-dot stuff
-- When we we've finished, we've renamed the LHS, but not the RHS,
-- of each x=e binding
--
-- This is used for record construction and pattern-matching, but not updates.
rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
= do { pun_ok <- xoptM Opt_RecordPuns
; disambig_ok <- xoptM Opt_DisambiguateRecordFields
; parent <- check_disambiguation disambig_ok mb_con
; flds1 <- mapM (rn_fld pun_ok parent) flds
; mapM_ (addErr . dupFieldErr ctxt) dup_flds
; dotdot_flds <- rn_dotdot dotdot mb_con flds1
; let all_flds | null dotdot_flds = flds1
| otherwise = flds1 ++ dotdot_flds
; return (all_flds, mkFVs (getFieldIds all_flds)) }
where
mb_con = case ctxt of
HsRecFieldCon con | not (isUnboundName con) -> Just con
HsRecFieldPat con | not (isUnboundName con) -> Just con
_ {- update or isUnboundName con -} -> Nothing
-- The unbound name test is because if the constructor
-- isn't in scope the constructor lookup will add an error
-- add an error, but still return an unbound name.
-- We don't want that to screw up the dot-dot fill-in stuff.
doc = case mb_con of
Nothing -> ptext (sLit "constructor field name")
Just con -> ptext (sLit "field of constructor") <+> quotes (ppr con)
rn_fld :: Bool -> Maybe Name -> LHsRecField RdrName (Located arg)
-> RnM (LHsRecField Name (Located arg))
rn_fld pun_ok parent (L l (HsRecField { hsRecFieldLbl = L loc (FieldOcc lbl _)
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { sel <- setSrcSpan loc $ lookupSubBndrOcc True parent doc lbl
; arg' <- if pun
then do { checkErr pun_ok (badPun (L loc lbl))
; return (L loc (mk_arg lbl)) }
else return arg
; return (L l (HsRecField { hsRecFieldLbl = L loc (FieldOcc lbl sel)
, hsRecFieldArg = arg'
, hsRecPun = pun })) }
rn_dotdot :: Maybe Int -- See Note [DotDot fields] in HsPat
-> Maybe Name -- The constructor (Nothing for an
-- out of scope constructor)
-> [LHsRecField Name (Located arg)] -- Explicit fields
-> RnM [LHsRecField Name (Located arg)] -- Filled in .. fields
rn_dotdot Nothing _mb_con _flds -- No ".." at all
= return []
rn_dotdot (Just {}) Nothing _flds -- Constructor out of scope
= return []
rn_dotdot (Just n) (Just con) flds -- ".." on record construction / pat match
= ASSERT( n == length flds )
do { loc <- getSrcSpanM -- Rather approximate
; dd_flag <- xoptM Opt_RecordWildCards
; checkErr dd_flag (needFlagDotDot ctxt)
; (rdr_env, lcl_env) <- getRdrEnvs
; con_fields <- lookupConstructorFields con
; when (null con_fields) (addErr (badDotDotCon con))
; let present_flds = map (occNameFS . rdrNameOcc) $ getFieldLbls flds
parent_tc = find_tycon rdr_env con
-- For constructor uses (but not patterns)
-- the arg should be in scope (unqualified)
-- ignoring the record field itself
-- Eg. data R = R { x,y :: Int }
-- f x = R { .. } -- Should expand to R {x=x}, not R{x=x,y=y}
arg_in_scope lbl
= rdr `elemLocalRdrEnv` lcl_env
|| notNull [ gre | gre <- lookupGRE_RdrName rdr rdr_env
, case gre_par gre of
ParentIs p -> p /= parent_tc
FldParent { par_is = p } -> p /= parent_tc
PatternSynonym -> True
NoParent -> True ]
where
rdr = mkVarUnqual lbl
dot_dot_gres = [ (lbl, sel, head gres)
| fl <- con_fields
, let lbl = flLabel fl
, let sel = flSelector fl
, not (lbl `elem` present_flds)
, let gres = lookupGRE_Field_Name rdr_env sel lbl
, not (null gres) -- Check selector is in scope
, case ctxt of
HsRecFieldCon {} -> arg_in_scope lbl
_other -> True ]
; addUsedGREs (map thirdOf3 dot_dot_gres)
; return [ L loc (HsRecField
{ hsRecFieldLbl = L loc (FieldOcc arg_rdr sel)
, hsRecFieldArg = L loc (mk_arg arg_rdr)
, hsRecPun = False })
| (lbl, sel, _) <- dot_dot_gres
, let arg_rdr = mkVarUnqual lbl ] }
check_disambiguation :: Bool -> Maybe Name -> RnM (Maybe Name)
-- When disambiguation is on, return name of parent tycon.
check_disambiguation disambig_ok mb_con
| disambig_ok, Just con <- mb_con
= do { env <- getGlobalRdrEnv; return (Just (find_tycon env con)) }
| otherwise = return Nothing
find_tycon :: GlobalRdrEnv -> Name {- DataCon -} -> Name {- TyCon -}
-- Return the parent *type constructor* of the data constructor
-- That is, the parent of the data constructor.
-- That's the parent to use for looking up record fields.
find_tycon env con
| Just (AConLike (RealDataCon dc)) <- wiredInNameTyThing_maybe con
= tyConName (dataConTyCon dc) -- Special case for [], which is built-in syntax
-- and not in the GlobalRdrEnv (Trac #8448)
| [GRE { gre_par = ParentIs p }] <- lookupGRE_Name env con
= p
| otherwise
= pprPanic "find_tycon" (ppr con $$ ppr (lookupGRE_Name env con))
dup_flds :: [[RdrName]]
-- Each list represents a RdrName that occurred more than once
-- (the list contains all occurrences)
-- Each list in dup_fields is non-empty
(_, dup_flds) = removeDups compare (getFieldLbls flds)
rnHsRecUpdFields
:: [LHsRecUpdField RdrName]
-> RnM ([LHsRecUpdField Name], FreeVars)
rnHsRecUpdFields flds
= do { pun_ok <- xoptM Opt_RecordPuns
; overload_ok <- xoptM Opt_DuplicateRecordFields
; (flds1, fvss) <- mapAndUnzipM (rn_fld pun_ok overload_ok) flds
; mapM_ (addErr . dupFieldErr HsRecFieldUpd) dup_flds
-- Check for an empty record update e {}
-- NB: don't complain about e { .. }, because rn_dotdot has done that already
; when (null flds) $ addErr emptyUpdateErr
; return (flds1, plusFVs fvss) }
where
doc = ptext (sLit "constructor field name")
rn_fld :: Bool -> Bool -> LHsRecUpdField RdrName -> RnM (LHsRecUpdField Name, FreeVars)
rn_fld pun_ok overload_ok (L l (HsRecField { hsRecFieldLbl = L loc f
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { let lbl = rdrNameAmbiguousFieldOcc f
; sel <- setSrcSpan loc $
-- Defer renaming of overloaded fields to the typechecker
-- See Note [Disambiguating record fields] in TcExpr
if overload_ok
then do { mb <- lookupGlobalOccRn_overloaded overload_ok lbl
; case mb of
Nothing -> do { addErr (unknownSubordinateErr doc lbl)
; return (Right []) }
Just r -> return r }
else fmap Left $ lookupSubBndrOcc True Nothing doc lbl
; arg' <- if pun
then do { checkErr pun_ok (badPun (L loc lbl))
; return (L loc (HsVar lbl)) }
else return arg
; (arg'', fvs) <- rnLExpr arg'
; let fvs' = case sel of
Left sel_name -> fvs `addOneFV` sel_name
Right [FieldOcc _ sel_name] -> fvs `addOneFV` sel_name
Right _ -> fvs
lbl' = case sel of
Left sel_name -> L loc (Unambiguous lbl sel_name)
Right [FieldOcc lbl sel_name] -> L loc (Unambiguous lbl sel_name)
Right _ -> L loc (Ambiguous lbl PlaceHolder)
; return (L l (HsRecField { hsRecFieldLbl = lbl'
, hsRecFieldArg = arg''
, hsRecPun = pun }), fvs') }
dup_flds :: [[RdrName]]
-- Each list represents a RdrName that occurred more than once
-- (the list contains all occurrences)
-- Each list in dup_fields is non-empty
(_, dup_flds) = removeDups compare (getFieldUpdLbls flds)
getFieldIds :: [LHsRecField Name arg] -> [Name]
getFieldIds flds = map (unLoc . hsRecFieldSel . unLoc) flds
getFieldLbls :: [LHsRecField id arg] -> [RdrName]
getFieldLbls flds = map (rdrNameFieldOcc . unLoc . hsRecFieldLbl . unLoc) flds
getFieldUpdLbls :: [LHsRecUpdField id] -> [RdrName]
getFieldUpdLbls flds = map (rdrNameAmbiguousFieldOcc . unLoc . hsRecFieldLbl . unLoc) flds
needFlagDotDot :: HsRecFieldContext -> SDoc
needFlagDotDot ctxt = vcat [ptext (sLit "Illegal `..' in record") <+> pprRFC ctxt,
ptext (sLit "Use RecordWildCards to permit this")]
badDotDotCon :: Name -> SDoc
badDotDotCon con
= vcat [ ptext (sLit "Illegal `..' notation for constructor") <+> quotes (ppr con)
, nest 2 (ptext (sLit "The constructor has no labelled fields")) ]
emptyUpdateErr :: SDoc
emptyUpdateErr = ptext (sLit "Empty record update")
badPun :: Located RdrName -> SDoc
badPun fld = vcat [ptext (sLit "Illegal use of punning for field") <+> quotes (ppr fld),
ptext (sLit "Use NamedFieldPuns to permit this")]
dupFieldErr :: HsRecFieldContext -> [RdrName] -> SDoc
dupFieldErr ctxt dups
= hsep [ptext (sLit "duplicate field name"),
quotes (ppr (head dups)),
ptext (sLit "in record"), pprRFC ctxt]
pprRFC :: HsRecFieldContext -> SDoc
pprRFC (HsRecFieldCon {}) = ptext (sLit "construction")
pprRFC (HsRecFieldPat {}) = ptext (sLit "pattern")
pprRFC (HsRecFieldUpd {}) = ptext (sLit "update")
{-
************************************************************************
* *
\subsubsection{Literals}
* *
************************************************************************
When literals occur we have to make sure
that the types and classes they involve
are made available.
-}
rnLit :: HsLit -> RnM ()
rnLit (HsChar _ c) = checkErr (inCharRange c) (bogusCharError c)
rnLit _ = return ()
-- Turn a Fractional-looking literal which happens to be an integer into an
-- Integer-looking literal.
generalizeOverLitVal :: OverLitVal -> OverLitVal
generalizeOverLitVal (HsFractional (FL {fl_text=src,fl_value=val}))
| denominator val == 1 = HsIntegral src (numerator val)
generalizeOverLitVal lit = lit
rnOverLit :: HsOverLit t -> RnM (HsOverLit Name, FreeVars)
rnOverLit origLit
= do { opt_NumDecimals <- xoptM Opt_NumDecimals
; let { lit@(OverLit {ol_val=val})
| opt_NumDecimals = origLit {ol_val = generalizeOverLitVal (ol_val origLit)}
| otherwise = origLit
}
; let std_name = hsOverLitName val
; (from_thing_name, fvs) <- lookupSyntaxName std_name
; let rebindable = case from_thing_name of
HsVar v -> v /= std_name
_ -> panic "rnOverLit"
; return (lit { ol_witness = from_thing_name
, ol_rebindable = rebindable
, ol_type = placeHolderType }, fvs) }
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
patSigErr :: Outputable a => a -> SDoc
patSigErr ty
= (ptext (sLit "Illegal signature in pattern:") <+> ppr ty)
$$ nest 4 (ptext (sLit "Use ScopedTypeVariables to permit it"))
bogusCharError :: Char -> SDoc
bogusCharError c
= ptext (sLit "character literal out of range: '\\") <> char c <> char '\''
badViewPat :: Pat RdrName -> SDoc
badViewPat pat = vcat [ptext (sLit "Illegal view pattern: ") <+> ppr pat,
ptext (sLit "Use ViewPatterns to enable view patterns")]
| elieux/ghc | compiler/rename/RnPat.hs | bsd-3-clause | 33,610 | 29 | 23 | 10,960 | 6,521 | 3,534 | 2,987 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE PolyKinds #-}
#ifdef USE_REFLEX_OPTIMIZER
{-# OPTIONS_GHC -fplugin=Reflex.Optimizer #-}
#endif
-- There are two expected orphan instances in this module:
-- * MonadSample (Pure t) ((->) t)
-- * MonadHold (Pure t) ((->) t)
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module: Reflex.Pure
-- Description:
-- This module provides a pure implementation of Reflex, which is intended to
-- serve as a reference for the semantics of the Reflex class. All
-- implementations of Reflex should produce the same results as this
-- implementation, although performance and laziness/strictness may differ.
module Reflex.Pure
( Pure
, Behavior (..)
, Event (..)
, Dynamic (..)
, Incremental (..)
) where
import Control.Monad
import Data.Dependent.Map (DMap)
import Data.GADT.Compare (GCompare)
import qualified Data.Dependent.Map as DMap
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Maybe
import Data.MemoTrie
import Data.Monoid
import Data.Type.Coercion
import Reflex.Class
import Data.Kind (Type)
-- | A completely pure-functional 'Reflex' timeline, identifying moments in time
-- with the type @/t/@.
data Pure (t :: Type)
-- | The 'Enum' instance of @/t/@ must be dense: for all @/x :: t/@, there must not exist
-- any @/y :: t/@ such that @/'pred' x < y < x/@. The 'HasTrie' instance will be used
-- exclusively to memoize functions of @/t/@, not for any of its other capabilities.
instance (Enum t, HasTrie t, Ord t) => Reflex (Pure t) where
newtype Behavior (Pure t) a = Behavior { unBehavior :: t -> a }
newtype Event (Pure t) a = Event { unEvent :: t -> Maybe a }
newtype Dynamic (Pure t) a = Dynamic { unDynamic :: t -> (a, Maybe a) }
newtype Incremental (Pure t) p = Incremental { unIncremental :: t -> (PatchTarget p, Maybe p) }
type PushM (Pure t) = (->) t
type PullM (Pure t) = (->) t
never :: Event (Pure t) a
never = Event $ \_ -> Nothing
constant :: a -> Behavior (Pure t) a
constant x = Behavior $ \_ -> x
push :: (a -> PushM (Pure t) (Maybe b)) -> Event (Pure t) a -> Event (Pure t) b
push f e = Event $ memo $ \t -> unEvent e t >>= \o -> f o t
pushCheap :: (a -> PushM (Pure t) (Maybe b)) -> Event (Pure t) a -> Event (Pure t) b
pushCheap = push
pull :: PullM (Pure t) a -> Behavior (Pure t) a
pull = Behavior . memo
-- [UNUSED_CONSTRAINT]: The following type signature for merge will produce a
-- warning because the GCompare instance is not used; however, removing the
-- GCompare instance produces a different warning, due to that constraint
-- being present in the original class definition.
--mergeG :: GCompare k => (forall a. q a -> Event (Pure t) (v a))
-- -> DMap k q -> Event (Pure t) (DMap k v)
mergeG nt events = Event $ memo $ \t ->
let currentOccurrences = DMap.mapMaybeWithKey (\_ q -> case nt q of Event a -> a t) events
in if DMap.null currentOccurrences
then Nothing
else Just currentOccurrences
-- The instance signature doeesn't compile, leave commented for documentation
-- fanG :: GCompare k => Event (Pure t) (DMap k v) -> EventSelectorG (Pure t) k v
fanG e = EventSelectorG $ \k -> Event $ \t -> unEvent e t >>= DMap.lookup k
switch :: Behavior (Pure t) (Event (Pure t) a) -> Event (Pure t) a
switch b = Event $ memo $ \t -> unEvent (unBehavior b t) t
coincidence :: Event (Pure t) (Event (Pure t) a) -> Event (Pure t) a
coincidence e = Event $ memo $ \t -> unEvent e t >>= \o -> unEvent o t
current :: Dynamic (Pure t) a -> Behavior (Pure t) a
current d = Behavior $ \t -> fst $ unDynamic d t
updated :: Dynamic (Pure t) a -> Event (Pure t) a
updated d = Event $ \t -> snd $ unDynamic d t
unsafeBuildDynamic :: PullM (Pure t) a -> Event (Pure t) a -> Dynamic (Pure t) a
unsafeBuildDynamic readV0 v' = Dynamic $ \t -> (readV0 t, unEvent v' t)
-- See UNUSED_CONSTRAINT, above.
--unsafeBuildIncremental :: Patch p => PullM (Pure t) a -> Event (Pure t) (p
--a) -> Incremental (Pure t) p a
unsafeBuildIncremental readV0 p = Incremental $ \t -> (readV0 t, unEvent p t)
mergeIncrementalG = mergeIncrementalImpl
mergeIncrementalWithMoveG = mergeIncrementalImpl
currentIncremental i = Behavior $ \t -> fst $ unIncremental i t
updatedIncremental i = Event $ \t -> snd $ unIncremental i t
incrementalToDynamic i = Dynamic $ \t ->
let (old, mPatch) = unIncremental i t
e = case mPatch of
Nothing -> Nothing
Just patch -> apply patch old
in (old, e)
behaviorCoercion Coercion = Coercion
eventCoercion Coercion = Coercion
dynamicCoercion Coercion = Coercion
incrementalCoercion Coercion Coercion = Coercion
fanInt e = EventSelectorInt $ \k -> Event $ \t -> unEvent e t >>= IntMap.lookup k
mergeIntIncremental = mergeIntIncrementalImpl
mergeIncrementalImpl :: (PatchTarget p ~ DMap k q, GCompare k)
=> (forall a. q a -> Event (Pure t) (v a))
-> Incremental (Pure t) p -> Event (Pure t) (DMap k v)
mergeIncrementalImpl nt i = Event $ \t ->
let results = DMap.mapMaybeWithKey (\_ q -> case nt q of Event e -> e t) $ fst $ unIncremental i t
in if DMap.null results
then Nothing
else Just results
mergeIntIncrementalImpl :: (PatchTarget p ~ IntMap (Event (Pure t) a)) => Incremental (Pure t) p -> Event (Pure t) (IntMap a)
mergeIntIncrementalImpl i = Event $ \t ->
let results = IntMap.mapMaybeWithKey (\_ (Event e) -> e t) $ fst $ unIncremental i t
in if IntMap.null results
then Nothing
else Just results
instance Functor (Dynamic (Pure t)) where
fmap f d = Dynamic $ \t -> let (cur, upd) = unDynamic d t
in (f cur, fmap f upd)
instance Applicative (Dynamic (Pure t)) where
pure a = Dynamic $ \_ -> (a, Nothing)
(<*>) = ap
instance Monad (Dynamic (Pure t)) where
return = pure
(x :: Dynamic (Pure t) a) >>= (f :: a -> Dynamic (Pure t) b) = Dynamic $ \t ->
let (curX :: a, updX :: Maybe a) = unDynamic x t
(cur :: b, updOuter :: Maybe b) = unDynamic (f curX) t
(updInner :: Maybe b, updBoth :: Maybe b) = case updX of
Nothing -> (Nothing, Nothing)
Just nextX -> let (c, u) = unDynamic (f nextX) t
in (Just c, u)
in (cur, getFirst $ mconcat $ map First [updBoth, updOuter, updInner])
instance MonadSample (Pure t) ((->) t) where
sample :: Behavior (Pure t) a -> (t -> a)
sample = unBehavior
instance (Enum t, HasTrie t, Ord t) => MonadHold (Pure t) ((->) t) where
hold :: a -> Event (Pure t) a -> t -> Behavior (Pure t) a
hold initialValue e initialTime = Behavior f
where f = memo $ \sampleTime ->
-- Really, the sampleTime should never be prior to the initialTime,
-- because that would mean the Behavior is being sampled before
-- being created.
if sampleTime <= initialTime
then initialValue
else let lastTime = pred sampleTime
in fromMaybe (f lastTime) $ unEvent e lastTime
holdDyn v0 = buildDynamic (return v0)
buildDynamic :: (t -> a) -> Event (Pure t) a -> t -> Dynamic (Pure t) a
buildDynamic initialValue e initialTime =
let Behavior f = hold (initialValue initialTime) e initialTime
in Dynamic $ \t -> (f t, unEvent e t)
holdIncremental :: Patch p => PatchTarget p -> Event (Pure t) p -> t -> Incremental (Pure t) p
holdIncremental initialValue e initialTime = Incremental $ \t -> (f t, unEvent e t)
where f = memo $ \sampleTime ->
-- Really, the sampleTime should never be prior to the initialTime,
-- because that would mean the Behavior is being sampled before
-- being created.
if sampleTime <= initialTime
then initialValue
else let lastTime = pred sampleTime
lastValue = f lastTime
in case unEvent e lastTime of
Nothing -> lastValue
Just x -> fromMaybe lastValue $ apply x lastValue
headE = slowHeadE
now t = Event $ guard . (t ==)
| ryantrinkle/reflex | src/Reflex/Pure.hs | bsd-3-clause | 8,306 | 0 | 21 | 2,038 | 2,681 | 1,393 | 1,288 | -1 | -1 |
{-# LANGUAGE Safe, TypeOperators #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Zip
-- Copyright : (c) Nils Schweinsberg 2011,
-- (c) George Giorgidze 2011
-- (c) University Tuebingen 2011
-- License : BSD-style (see the file libraries/base/LICENSE)
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Monadic zipping (used for monad comprehensions)
--
-----------------------------------------------------------------------------
module Control.Monad.Zip where
import Control.Monad (liftM, liftM2)
import Data.Functor.Identity
import Data.Monoid
import Data.Proxy
import qualified Data.List.NonEmpty as NE
import GHC.Generics
-- | `MonadZip` type class. Minimal definition: `mzip` or `mzipWith`
--
-- Instances should satisfy the laws:
--
-- * Naturality :
--
-- > liftM (f *** g) (mzip ma mb) = mzip (liftM f ma) (liftM g mb)
--
-- * Information Preservation:
--
-- > liftM (const ()) ma = liftM (const ()) mb
-- > ==>
-- > munzip (mzip ma mb) = (ma, mb)
--
class Monad m => MonadZip m where
{-# MINIMAL mzip | mzipWith #-}
mzip :: m a -> m b -> m (a,b)
mzip = mzipWith (,)
mzipWith :: (a -> b -> c) -> m a -> m b -> m c
mzipWith f ma mb = liftM (uncurry f) (mzip ma mb)
munzip :: m (a,b) -> (m a, m b)
munzip mab = (liftM fst mab, liftM snd mab)
-- munzip is a member of the class because sometimes
-- you can implement it more efficiently than the
-- above default code. See Trac #4370 comment by giorgidze
-- | @since 4.3.1.0
instance MonadZip [] where
mzip = zip
mzipWith = zipWith
munzip = unzip
-- | @since 4.9.0.0
instance MonadZip NE.NonEmpty where
mzip = NE.zip
mzipWith = NE.zipWith
munzip = NE.unzip
-- | @since 4.8.0.0
instance MonadZip Identity where
mzipWith = liftM2
munzip (Identity (a, b)) = (Identity a, Identity b)
-- | @since 4.8.0.0
instance MonadZip Dual where
-- Cannot use coerce, it's unsafe
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip Sum where
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip Product where
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip Maybe where
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip First where
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip Last where
mzipWith = liftM2
-- | @since 4.8.0.0
instance MonadZip f => MonadZip (Alt f) where
mzipWith f (Alt ma) (Alt mb) = Alt (mzipWith f ma mb)
-- | @since 4.9.0.0
instance MonadZip Proxy where
mzipWith _ _ _ = Proxy
-- Instances for GHC.Generics
-- | @since 4.9.0.0
-- instance MonadZip U1 where
-- mzipWith _ _ _ = U1
-- | @since 4.9.0.0
-- instance MonadZip Par1 where
-- mzipWith = liftM2
-- | @since 4.9.0.0
-- instance MonadZip f => MonadZip (Rec1 f) where
-- mzipWith f (Rec1 fa) (Rec1 fb) = Rec1 (mzipWith f fa fb)
-- | @since 4.9.0.0
-- instance MonadZip f => MonadZip (M1 i c f) where
-- mzipWith f (M1 fa) (M1 fb) = M1 (mzipWith f fa fb)
-- | @since 4.9.0.0
-- instance (MonadZip f, MonadZip g) => MonadZip (f :*: g) where
-- mzipWith f (x1 :*: y1) (x2 :*: y2) = mzipWith f x1 x2 :*: mzipWith f y1 y2
| rahulmutt/ghcvm | libraries/base/Control/Monad/Zip.hs | bsd-3-clause | 3,308 | 0 | 10 | 768 | 545 | 317 | 228 | 43 | 0 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "dist/dist-sandbox-261cd265/build/System/Posix/Files/Common.hs" #-}
{-# LINE 1 "System/Posix/Files/Common.hsc" #-}
{-# LANGUAGE Trustworthy #-}
{-# LINE 2 "System/Posix/Files/Common.hsc" #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Posix.Files.Common
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (requires POSIX)
--
-- Functions defined by the POSIX standards for manipulating and querying the
-- file system. Names of underlying POSIX functions are indicated whenever
-- possible. A more complete documentation of the POSIX functions together
-- with a more detailed description of different error conditions are usually
-- available in the system's manual pages or from
-- <http://www.unix.org/version3/online.html> (free registration required).
--
-- When a function that calls an underlying POSIX function fails, the errno
-- code is converted to an 'IOError' using 'Foreign.C.Error.errnoToIOError'.
-- For a list of which errno codes may be generated, consult the POSIX
-- documentation for the underlying function.
--
-----------------------------------------------------------------------------
{-# LINE 28 "System/Posix/Files/Common.hsc" #-}
module System.Posix.Files.Common (
-- * File modes
-- FileMode exported by System.Posix.Types
unionFileModes, intersectFileModes,
nullFileMode,
ownerReadMode, ownerWriteMode, ownerExecuteMode, ownerModes,
groupReadMode, groupWriteMode, groupExecuteMode, groupModes,
otherReadMode, otherWriteMode, otherExecuteMode, otherModes,
setUserIDMode, setGroupIDMode,
stdFileMode, accessModes,
fileTypeModes,
blockSpecialMode, characterSpecialMode, namedPipeMode, regularFileMode,
directoryMode, symbolicLinkMode, socketMode,
-- ** Setting file modes
setFdMode, setFileCreationMask,
-- * File status
FileStatus(..),
-- ** Obtaining file status
getFdStatus,
-- ** Querying file status
deviceID, fileID, fileMode, linkCount, fileOwner, fileGroup,
specialDeviceID, fileSize, accessTime, modificationTime,
statusChangeTime,
accessTimeHiRes, modificationTimeHiRes, statusChangeTimeHiRes,
setFdTimesHiRes, touchFd,
isBlockDevice, isCharacterDevice, isNamedPipe, isRegularFile,
isDirectory, isSymbolicLink, isSocket,
-- * Setting file sizes
setFdSize,
-- * Changing file ownership
setFdOwnerAndGroup,
-- * Find system-specific limits for a file
PathVar(..), getFdPathVar, pathVarConst,
-- * Low level types and functions
{-# LINE 70 "System/Posix/Files/Common.hsc" #-}
CTimeSpec(..),
toCTimeSpec,
c_utimensat,
{-# LINE 74 "System/Posix/Files/Common.hsc" #-}
CTimeVal(..),
toCTimeVal,
c_utimes,
{-# LINE 78 "System/Posix/Files/Common.hsc" #-}
c_lutimes,
{-# LINE 80 "System/Posix/Files/Common.hsc" #-}
) where
import System.Posix.Types
import System.IO.Unsafe
import Data.Bits
import Data.Int
import Data.Ratio
import Data.Time.Clock.POSIX (POSIXTime)
import System.Posix.Internals
import Foreign.C
import Foreign.ForeignPtr
{-# LINE 92 "System/Posix/Files/Common.hsc" #-}
import Foreign.Marshal (withArray)
{-# LINE 94 "System/Posix/Files/Common.hsc" #-}
import Foreign.Ptr
import Foreign.Storable
-- -----------------------------------------------------------------------------
-- POSIX file modes
-- The abstract type 'FileMode', constants and operators for
-- manipulating the file modes defined by POSIX.
-- | No permissions.
nullFileMode :: FileMode
nullFileMode = 0
-- | Owner has read permission.
ownerReadMode :: FileMode
ownerReadMode = (256)
{-# LINE 110 "System/Posix/Files/Common.hsc" #-}
-- | Owner has write permission.
ownerWriteMode :: FileMode
ownerWriteMode = (128)
{-# LINE 114 "System/Posix/Files/Common.hsc" #-}
-- | Owner has execute permission.
ownerExecuteMode :: FileMode
ownerExecuteMode = (64)
{-# LINE 118 "System/Posix/Files/Common.hsc" #-}
-- | Group has read permission.
groupReadMode :: FileMode
groupReadMode = (32)
{-# LINE 122 "System/Posix/Files/Common.hsc" #-}
-- | Group has write permission.
groupWriteMode :: FileMode
groupWriteMode = (16)
{-# LINE 126 "System/Posix/Files/Common.hsc" #-}
-- | Group has execute permission.
groupExecuteMode :: FileMode
groupExecuteMode = (8)
{-# LINE 130 "System/Posix/Files/Common.hsc" #-}
-- | Others have read permission.
otherReadMode :: FileMode
otherReadMode = (4)
{-# LINE 134 "System/Posix/Files/Common.hsc" #-}
-- | Others have write permission.
otherWriteMode :: FileMode
otherWriteMode = (2)
{-# LINE 138 "System/Posix/Files/Common.hsc" #-}
-- | Others have execute permission.
otherExecuteMode :: FileMode
otherExecuteMode = (1)
{-# LINE 142 "System/Posix/Files/Common.hsc" #-}
-- | Set user ID on execution.
setUserIDMode :: FileMode
setUserIDMode = (2048)
{-# LINE 146 "System/Posix/Files/Common.hsc" #-}
-- | Set group ID on execution.
setGroupIDMode :: FileMode
setGroupIDMode = (1024)
{-# LINE 150 "System/Posix/Files/Common.hsc" #-}
-- | Owner, group and others have read and write permission.
stdFileMode :: FileMode
stdFileMode = ownerReadMode .|. ownerWriteMode .|.
groupReadMode .|. groupWriteMode .|.
otherReadMode .|. otherWriteMode
-- | Owner has read, write and execute permission.
ownerModes :: FileMode
ownerModes = (448)
{-# LINE 160 "System/Posix/Files/Common.hsc" #-}
-- | Group has read, write and execute permission.
groupModes :: FileMode
groupModes = (56)
{-# LINE 164 "System/Posix/Files/Common.hsc" #-}
-- | Others have read, write and execute permission.
otherModes :: FileMode
otherModes = (7)
{-# LINE 168 "System/Posix/Files/Common.hsc" #-}
-- | Owner, group and others have read, write and execute permission.
accessModes :: FileMode
accessModes = ownerModes .|. groupModes .|. otherModes
-- | Combines the two file modes into one that contains modes that appear in
-- either.
unionFileModes :: FileMode -> FileMode -> FileMode
unionFileModes m1 m2 = m1 .|. m2
-- | Combines two file modes into one that only contains modes that appear in
-- both.
intersectFileModes :: FileMode -> FileMode -> FileMode
intersectFileModes m1 m2 = m1 .&. m2
fileTypeModes :: FileMode
fileTypeModes = (61440)
{-# LINE 185 "System/Posix/Files/Common.hsc" #-}
blockSpecialMode :: FileMode
blockSpecialMode = (24576)
{-# LINE 188 "System/Posix/Files/Common.hsc" #-}
characterSpecialMode :: FileMode
characterSpecialMode = (8192)
{-# LINE 191 "System/Posix/Files/Common.hsc" #-}
namedPipeMode :: FileMode
namedPipeMode = (4096)
{-# LINE 194 "System/Posix/Files/Common.hsc" #-}
regularFileMode :: FileMode
regularFileMode = (32768)
{-# LINE 197 "System/Posix/Files/Common.hsc" #-}
directoryMode :: FileMode
directoryMode = (16384)
{-# LINE 200 "System/Posix/Files/Common.hsc" #-}
symbolicLinkMode :: FileMode
symbolicLinkMode = (40960)
{-# LINE 203 "System/Posix/Files/Common.hsc" #-}
socketMode :: FileMode
socketMode = (49152)
{-# LINE 206 "System/Posix/Files/Common.hsc" #-}
-- | @setFdMode fd mode@ acts like 'setFileMode' but uses a file descriptor
-- @fd@ instead of a 'FilePath'.
--
-- Note: calls @fchmod@.
setFdMode :: Fd -> FileMode -> IO ()
setFdMode (Fd fd) m =
throwErrnoIfMinus1_ "setFdMode" (c_fchmod fd m)
foreign import ccall unsafe "fchmod"
c_fchmod :: CInt -> CMode -> IO CInt
-- | @setFileCreationMask mode@ sets the file mode creation mask to @mode@.
-- Modes set by this operation are subtracted from files and directories upon
-- creation. The previous file creation mask is returned.
--
-- Note: calls @umask@.
setFileCreationMask :: FileMode -> IO FileMode
setFileCreationMask mask = c_umask mask
-- -----------------------------------------------------------------------------
-- stat() support
-- | POSIX defines operations to get information, such as owner, permissions,
-- size and access times, about a file. This information is represented by the
-- 'FileStatus' type.
--
-- Note: see @chmod@.
newtype FileStatus = FileStatus (ForeignPtr CStat)
-- | ID of the device on which this file resides.
deviceID :: FileStatus -> DeviceID
-- | inode number
fileID :: FileStatus -> FileID
-- | File mode (such as permissions).
fileMode :: FileStatus -> FileMode
-- | Number of hard links to this file.
linkCount :: FileStatus -> LinkCount
-- | ID of owner.
fileOwner :: FileStatus -> UserID
-- | ID of group.
fileGroup :: FileStatus -> GroupID
-- | Describes the device that this file represents.
specialDeviceID :: FileStatus -> DeviceID
-- | Size of the file in bytes. If this file is a symbolic link the size is
-- the length of the pathname it contains.
fileSize :: FileStatus -> FileOffset
-- | Time of last access.
accessTime :: FileStatus -> EpochTime
-- | Time of last access in sub-second resolution.
accessTimeHiRes :: FileStatus -> POSIXTime
-- | Time of last modification.
modificationTime :: FileStatus -> EpochTime
-- | Time of last modification in sub-second resolution.
modificationTimeHiRes :: FileStatus -> POSIXTime
-- | Time of last status change (i.e. owner, group, link count, mode, etc.).
statusChangeTime :: FileStatus -> EpochTime
-- | Time of last status change (i.e. owner, group, link count, mode, etc.) in sub-second resolution.
statusChangeTimeHiRes :: FileStatus -> POSIXTime
deviceID (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 0))
{-# LINE 268 "System/Posix/Files/Common.hsc" #-}
fileID (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 8))
{-# LINE 270 "System/Posix/Files/Common.hsc" #-}
fileMode (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 24))
{-# LINE 272 "System/Posix/Files/Common.hsc" #-}
linkCount (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 16))
{-# LINE 274 "System/Posix/Files/Common.hsc" #-}
fileOwner (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 28))
{-# LINE 276 "System/Posix/Files/Common.hsc" #-}
fileGroup (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 32))
{-# LINE 278 "System/Posix/Files/Common.hsc" #-}
specialDeviceID (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 40))
{-# LINE 280 "System/Posix/Files/Common.hsc" #-}
fileSize (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 48))
{-# LINE 282 "System/Posix/Files/Common.hsc" #-}
accessTime (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 72))
{-# LINE 284 "System/Posix/Files/Common.hsc" #-}
modificationTime (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 88))
{-# LINE 286 "System/Posix/Files/Common.hsc" #-}
statusChangeTime (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ ((\hsc_ptr -> peekByteOff hsc_ptr 104))
{-# LINE 288 "System/Posix/Files/Common.hsc" #-}
accessTimeHiRes (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ \stat_ptr -> do
sec <- ((\hsc_ptr -> peekByteOff hsc_ptr 72)) stat_ptr :: IO EpochTime
{-# LINE 292 "System/Posix/Files/Common.hsc" #-}
{-# LINE 293 "System/Posix/Files/Common.hsc" #-}
nsec <- ((\hsc_ptr -> peekByteOff hsc_ptr 80)) stat_ptr :: IO (Int64)
{-# LINE 294 "System/Posix/Files/Common.hsc" #-}
let frac = toInteger nsec % 10^(9::Int)
{-# LINE 310 "System/Posix/Files/Common.hsc" #-}
return $ fromRational $ toRational sec + frac
modificationTimeHiRes (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ \stat_ptr -> do
sec <- ((\hsc_ptr -> peekByteOff hsc_ptr 88)) stat_ptr :: IO EpochTime
{-# LINE 315 "System/Posix/Files/Common.hsc" #-}
{-# LINE 316 "System/Posix/Files/Common.hsc" #-}
nsec <- ((\hsc_ptr -> peekByteOff hsc_ptr 96)) stat_ptr :: IO (Int64)
{-# LINE 317 "System/Posix/Files/Common.hsc" #-}
let frac = toInteger nsec % 10^(9::Int)
{-# LINE 333 "System/Posix/Files/Common.hsc" #-}
return $ fromRational $ toRational sec + frac
statusChangeTimeHiRes (FileStatus stat) =
unsafePerformIO $ withForeignPtr stat $ \stat_ptr -> do
sec <- ((\hsc_ptr -> peekByteOff hsc_ptr 104)) stat_ptr :: IO EpochTime
{-# LINE 338 "System/Posix/Files/Common.hsc" #-}
{-# LINE 339 "System/Posix/Files/Common.hsc" #-}
nsec <- ((\hsc_ptr -> peekByteOff hsc_ptr 112)) stat_ptr :: IO (Int64)
{-# LINE 340 "System/Posix/Files/Common.hsc" #-}
let frac = toInteger nsec % 10^(9::Int)
{-# LINE 356 "System/Posix/Files/Common.hsc" #-}
return $ fromRational $ toRational sec + frac
-- | Checks if this file is a block device.
isBlockDevice :: FileStatus -> Bool
-- | Checks if this file is a character device.
isCharacterDevice :: FileStatus -> Bool
-- | Checks if this file is a named pipe device.
isNamedPipe :: FileStatus -> Bool
-- | Checks if this file is a regular file device.
isRegularFile :: FileStatus -> Bool
-- | Checks if this file is a directory device.
isDirectory :: FileStatus -> Bool
-- | Checks if this file is a symbolic link device.
isSymbolicLink :: FileStatus -> Bool
-- | Checks if this file is a socket device.
isSocket :: FileStatus -> Bool
isBlockDevice stat =
(fileMode stat `intersectFileModes` fileTypeModes) == blockSpecialMode
isCharacterDevice stat =
(fileMode stat `intersectFileModes` fileTypeModes) == characterSpecialMode
isNamedPipe stat =
(fileMode stat `intersectFileModes` fileTypeModes) == namedPipeMode
isRegularFile stat =
(fileMode stat `intersectFileModes` fileTypeModes) == regularFileMode
isDirectory stat =
(fileMode stat `intersectFileModes` fileTypeModes) == directoryMode
isSymbolicLink stat =
(fileMode stat `intersectFileModes` fileTypeModes) == symbolicLinkMode
isSocket stat =
(fileMode stat `intersectFileModes` fileTypeModes) == socketMode
-- | @getFdStatus fd@ acts as 'getFileStatus' but uses a file descriptor @fd@.
--
-- Note: calls @fstat@.
getFdStatus :: Fd -> IO FileStatus
getFdStatus (Fd fd) = do
fp <- mallocForeignPtrBytes (144)
{-# LINE 394 "System/Posix/Files/Common.hsc" #-}
withForeignPtr fp $ \p ->
throwErrnoIfMinus1_ "getFdStatus" (c_fstat fd p)
return (FileStatus fp)
-- -----------------------------------------------------------------------------
-- Setting file times
{-# LINE 402 "System/Posix/Files/Common.hsc" #-}
data CTimeSpec = CTimeSpec EpochTime CLong
instance Storable CTimeSpec where
sizeOf _ = (16)
{-# LINE 406 "System/Posix/Files/Common.hsc" #-}
alignment _ = alignment (undefined :: CInt)
poke p (CTimeSpec sec nsec) = do
((\hsc_ptr -> pokeByteOff hsc_ptr 0)) p sec
{-# LINE 409 "System/Posix/Files/Common.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 8)) p nsec
{-# LINE 410 "System/Posix/Files/Common.hsc" #-}
peek p = do
sec <- (\hsc_ptr -> peekByteOff hsc_ptr 0) p
{-# LINE 412 "System/Posix/Files/Common.hsc" #-}
nsec <- (\hsc_ptr -> peekByteOff hsc_ptr 8) p
{-# LINE 413 "System/Posix/Files/Common.hsc" #-}
return $ CTimeSpec sec nsec
toCTimeSpec :: POSIXTime -> CTimeSpec
toCTimeSpec t = CTimeSpec (CTime sec) (truncate $ 10^(9::Int) * frac)
where
(sec, frac) = if (frac' < 0) then (sec' - 1, frac' + 1) else (sec', frac')
(sec', frac') = properFraction $ toRational t
{-# LINE 421 "System/Posix/Files/Common.hsc" #-}
{-# LINE 423 "System/Posix/Files/Common.hsc" #-}
foreign import ccall unsafe "utimensat"
c_utimensat :: CInt -> CString -> Ptr CTimeSpec -> CInt -> IO CInt
{-# LINE 426 "System/Posix/Files/Common.hsc" #-}
{-# LINE 428 "System/Posix/Files/Common.hsc" #-}
foreign import ccall unsafe "futimens"
c_futimens :: CInt -> Ptr CTimeSpec -> IO CInt
{-# LINE 431 "System/Posix/Files/Common.hsc" #-}
data CTimeVal = CTimeVal CLong CLong
instance Storable CTimeVal where
sizeOf _ = (16)
{-# LINE 436 "System/Posix/Files/Common.hsc" #-}
alignment _ = alignment (undefined :: CInt)
poke p (CTimeVal sec usec) = do
((\hsc_ptr -> pokeByteOff hsc_ptr 0)) p sec
{-# LINE 439 "System/Posix/Files/Common.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 8)) p usec
{-# LINE 440 "System/Posix/Files/Common.hsc" #-}
peek p = do
sec <- (\hsc_ptr -> peekByteOff hsc_ptr 0) p
{-# LINE 442 "System/Posix/Files/Common.hsc" #-}
usec <- (\hsc_ptr -> peekByteOff hsc_ptr 8) p
{-# LINE 443 "System/Posix/Files/Common.hsc" #-}
return $ CTimeVal sec usec
toCTimeVal :: POSIXTime -> CTimeVal
toCTimeVal t = CTimeVal sec (truncate $ 10^(6::Int) * frac)
where
(sec, frac) = if (frac' < 0) then (sec' - 1, frac' + 1) else (sec', frac')
(sec', frac') = properFraction $ toRational t
foreign import ccall unsafe "utimes"
c_utimes :: CString -> Ptr CTimeVal -> IO CInt
{-# LINE 455 "System/Posix/Files/Common.hsc" #-}
foreign import ccall unsafe "lutimes"
c_lutimes :: CString -> Ptr CTimeVal -> IO CInt
{-# LINE 458 "System/Posix/Files/Common.hsc" #-}
{-# LINE 460 "System/Posix/Files/Common.hsc" #-}
foreign import ccall unsafe "futimes"
c_futimes :: CInt -> Ptr CTimeVal -> IO CInt
{-# LINE 463 "System/Posix/Files/Common.hsc" #-}
-- | Like 'setFileTimesHiRes' but uses a file descriptor instead of a path.
-- This operation is not supported on all platforms. On these platforms,
-- this function will raise an exception.
--
-- Note: calls @futimens@ or @futimes@.
--
-- @since 2.7.0.0
setFdTimesHiRes :: Fd -> POSIXTime -> POSIXTime -> IO ()
{-# LINE 473 "System/Posix/Files/Common.hsc" #-}
setFdTimesHiRes (Fd fd) atime mtime =
withArray [toCTimeSpec atime, toCTimeSpec mtime] $ \times ->
throwErrnoIfMinus1_ "setFdTimesHiRes" (c_futimens fd times)
{-# LINE 484 "System/Posix/Files/Common.hsc" #-}
-- | Like 'touchFile' but uses a file descriptor instead of a path.
-- This operation is not supported on all platforms. On these platforms,
-- this function will raise an exception.
--
-- Note: calls @futimes@.
--
-- @since 2.7.0.0
touchFd :: Fd -> IO ()
{-# LINE 494 "System/Posix/Files/Common.hsc" #-}
touchFd (Fd fd) =
throwErrnoIfMinus1_ "touchFd" (c_futimes fd nullPtr)
{-# LINE 500 "System/Posix/Files/Common.hsc" #-}
-- -----------------------------------------------------------------------------
-- fchown()
-- | Acts as 'setOwnerAndGroup' but uses a file descriptor instead of a
-- 'FilePath'.
--
-- Note: calls @fchown@.
setFdOwnerAndGroup :: Fd -> UserID -> GroupID -> IO ()
setFdOwnerAndGroup (Fd fd) uid gid =
throwErrnoIfMinus1_ "setFdOwnerAndGroup" (c_fchown fd uid gid)
foreign import ccall unsafe "fchown"
c_fchown :: CInt -> CUid -> CGid -> IO CInt
-- -----------------------------------------------------------------------------
-- ftruncate()
-- | Acts as 'setFileSize' but uses a file descriptor instead of a 'FilePath'.
--
-- Note: calls @ftruncate@.
setFdSize :: Fd -> FileOffset -> IO ()
setFdSize (Fd fd) off =
throwErrnoIfMinus1_ "setFdSize" (c_ftruncate fd off)
-- -----------------------------------------------------------------------------
-- pathconf()/fpathconf() support
data PathVar
= FileSizeBits {- _PC_FILESIZEBITS -}
| LinkLimit {- _PC_LINK_MAX -}
| InputLineLimit {- _PC_MAX_CANON -}
| InputQueueLimit {- _PC_MAX_INPUT -}
| FileNameLimit {- _PC_NAME_MAX -}
| PathNameLimit {- _PC_PATH_MAX -}
| PipeBufferLimit {- _PC_PIPE_BUF -}
-- These are described as optional in POSIX:
{- _PC_ALLOC_SIZE_MIN -}
{- _PC_REC_INCR_XFER_SIZE -}
{- _PC_REC_MAX_XFER_SIZE -}
{- _PC_REC_MIN_XFER_SIZE -}
{- _PC_REC_XFER_ALIGN -}
| SymbolicLinkLimit {- _PC_SYMLINK_MAX -}
| SetOwnerAndGroupIsRestricted {- _PC_CHOWN_RESTRICTED -}
| FileNamesAreNotTruncated {- _PC_NO_TRUNC -}
| VDisableChar {- _PC_VDISABLE -}
| AsyncIOAvailable {- _PC_ASYNC_IO -}
| PrioIOAvailable {- _PC_PRIO_IO -}
| SyncIOAvailable {- _PC_SYNC_IO -}
pathVarConst :: PathVar -> CInt
pathVarConst v = case v of
LinkLimit -> (0)
{-# LINE 553 "System/Posix/Files/Common.hsc" #-}
InputLineLimit -> (1)
{-# LINE 554 "System/Posix/Files/Common.hsc" #-}
InputQueueLimit -> (2)
{-# LINE 555 "System/Posix/Files/Common.hsc" #-}
FileNameLimit -> (3)
{-# LINE 556 "System/Posix/Files/Common.hsc" #-}
PathNameLimit -> (4)
{-# LINE 557 "System/Posix/Files/Common.hsc" #-}
PipeBufferLimit -> (5)
{-# LINE 558 "System/Posix/Files/Common.hsc" #-}
SetOwnerAndGroupIsRestricted -> (6)
{-# LINE 559 "System/Posix/Files/Common.hsc" #-}
FileNamesAreNotTruncated -> (7)
{-# LINE 560 "System/Posix/Files/Common.hsc" #-}
VDisableChar -> (8)
{-# LINE 561 "System/Posix/Files/Common.hsc" #-}
{-# LINE 563 "System/Posix/Files/Common.hsc" #-}
SyncIOAvailable -> (9)
{-# LINE 564 "System/Posix/Files/Common.hsc" #-}
{-# LINE 567 "System/Posix/Files/Common.hsc" #-}
{-# LINE 569 "System/Posix/Files/Common.hsc" #-}
AsyncIOAvailable -> (10)
{-# LINE 570 "System/Posix/Files/Common.hsc" #-}
{-# LINE 573 "System/Posix/Files/Common.hsc" #-}
{-# LINE 575 "System/Posix/Files/Common.hsc" #-}
PrioIOAvailable -> (11)
{-# LINE 576 "System/Posix/Files/Common.hsc" #-}
{-# LINE 579 "System/Posix/Files/Common.hsc" #-}
{-# LINE 583 "System/Posix/Files/Common.hsc" #-}
FileSizeBits -> error "_PC_FILESIZEBITS not available"
{-# LINE 585 "System/Posix/Files/Common.hsc" #-}
{-# LINE 589 "System/Posix/Files/Common.hsc" #-}
SymbolicLinkLimit -> error "_PC_SYMLINK_MAX not available"
{-# LINE 591 "System/Posix/Files/Common.hsc" #-}
-- | @getFdPathVar var fd@ obtains the dynamic value of the requested
-- configurable file limit or option associated with the file or directory
-- attached to the open channel @fd@. For defined file limits, @getFdPathVar@
-- returns the associated value. For defined file options, the result of
-- @getFdPathVar@ is undefined, but not failure.
--
-- Note: calls @fpathconf@.
getFdPathVar :: Fd -> PathVar -> IO Limit
getFdPathVar (Fd fd) v =
throwErrnoIfMinus1 "getFdPathVar" $
c_fpathconf fd (pathVarConst v)
foreign import ccall unsafe "fpathconf"
c_fpathconf :: CInt -> CInt -> IO CLong
| phischu/fragnix | tests/packages/scotty/System.Posix.Files.Common.hs | bsd-3-clause | 23,125 | 199 | 14 | 4,373 | 3,570 | 2,051 | 1,519 | 283 | 14 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/System/PosixCompat/Types.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-|
This module re-exports the types from @System.Posix.Types@ on all platforms.
On Windows 'UserID', 'GroupID' and 'LinkCount' are missing, so they are
redefined by this module.
-}
module System.PosixCompat.Types (
module System.Posix.Types
) where
import System.Posix.Types
| phischu/fragnix | tests/packages/scotty/System.PosixCompat.Types.hs | bsd-3-clause | 478 | 0 | 5 | 114 | 27 | 20 | 7 | 7 | 0 |
module Baum.Binary.Show where
-- $Id$
import Baum.Binary.Type
import Data.Tree
toTree :: Show a => Baum a -> Tree String
toTree Null = Node "-" []
toTree b = Node ( show $ key b )
[ toTree ( left b ) , toTree ( right b ) ]
| Erdwolf/autotool-bonn | src/Baum/Binary/Show.hs | gpl-2.0 | 247 | 2 | 9 | 72 | 105 | 54 | 51 | 7 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
import Stack.Prelude
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.Foldable as F
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List.Extra (groupSortOn)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Normalize as T (normalize , NormalizationMode(NFC))
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import qualified Distribution.Text as C
import qualified Distribution.Version as C
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.Find (findFiles)
import Path.IO hiding (findFiles)
import qualified Paths_stack as Meta
import qualified RIO.FilePath as FP
import RIO.List ((\\), intercalate, intersperse,
isSuffixOf, isPrefixOf)
import RIO.List.Partial (minimumBy)
import Stack.BuildPlan
import Stack.Config (getSnapshots,
makeConcreteResolver)
import Stack.Constants
import Stack.SourceMap
import Stack.Types.Config
import Stack.Types.Resolver
import Stack.Types.Version
-- | Generate stack.yaml
initProject
:: (HasConfig env, HasGHCVariant env)
=> Path Abs Dir
-> InitOpts
-> Maybe AbstractResolver
-> RIO env ()
initProject currDir initOpts mresolver = do
let dest = currDir </> stackDotYaml
reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest
exists <- doesFileExist dest
when (not (forceOverwrite initOpts) && exists) $
throwString
("Error: Stack configuration file " <> reldest <>
" exists, use '--force' to overwrite it.")
dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts)
let find = findCabalDirs (includeSubDirs initOpts)
dirs' = if null dirs then [currDir] else dirs
logInfo "Looking for .cabal or package.yaml files to use to init the project."
cabaldirs <- Set.toList . Set.unions <$> mapM find dirs'
(bundle, dupPkgs) <- cabalPackagesCheck cabaldirs Nothing
let makeRelDir dir =
case stripProperPrefix currDir dir of
Nothing
| currDir == dir -> "."
| otherwise -> assert False $ toFilePathNoTrailingSep dir
Just rel -> toFilePathNoTrailingSep rel
fpToPkgDir fp =
let absDir = parent fp
in ResolvedPath (RelFilePath $ T.pack $ makeRelDir absDir) absDir
pkgDirs = Map.map (fpToPkgDir . fst) bundle
(snapshotLoc, flags, extraDeps, rbundle) <- getDefaultResolver initOpts mresolver pkgDirs
let ignored = Map.difference bundle rbundle
dupPkgMsg
| dupPkgs /= [] =
"Warning (added by new or init): Some packages were found to \
\have names conflicting with others and have been commented \
\out in the packages section.\n"
| otherwise = ""
missingPkgMsg
| Map.size ignored > 0 =
"Warning (added by new or init): Some packages were found to \
\be incompatible with the resolver and have been left commented \
\out in the packages section.\n"
| otherwise = ""
extraDepMsg
| Map.size extraDeps > 0 =
"Warning (added by new or init): Specified resolver could not \
\satisfy all dependencies. Some external packages have been \
\added as dependencies.\n"
| otherwise = ""
makeUserMsg msgs =
let msg = concat msgs
in if msg /= "" then
msg <> "You can omit this message by removing it from \
\stack.yaml\n"
else ""
userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg]
gpdByDir = Map.fromList [ (parent fp, gpd) | (fp, gpd) <- Map.elems bundle]
gpds = Map.elems $
Map.mapMaybe (flip Map.lookup gpdByDir . resolvedAbsolute) rbundle
deps <- for (Map.toList extraDeps) $ \(n, v) ->
PLImmutable . cplComplete <$>
completePackageLocation (RPLIHackage (PackageIdentifierRevision n v CFILatest) Nothing)
let p = Project
{ projectUserMsg = if userMsg == "" then Nothing else Just userMsg
, projectPackages = resolvedRelative <$> Map.elems rbundle
, projectDependencies = map toRawPL deps
, projectFlags = removeSrcPkgDefaultFlags gpds flags
, projectResolver = snapshotLoc
, projectCompiler = Nothing
, projectExtraPackageDBs = []
, projectCurator = Nothing
, projectDropPackages = mempty
}
makeRel = fmap toFilePath . makeRelativeToCurrentDir
indent t = T.unlines $ fmap (" " <>) (T.lines t)
logInfo $ "Initialising configuration using resolver: " <> display snapshotLoc
logInfo $ "Total number of user packages considered: "
<> display (Map.size bundle + length dupPkgs)
when (dupPkgs /= []) $ do
logWarn $ "Warning! Ignoring "
<> displayShow (length dupPkgs)
<> " duplicate packages:"
rels <- mapM makeRel dupPkgs
logWarn $ display $ indent $ showItems rels
when (Map.size ignored > 0) $ do
logWarn $ "Warning! Ignoring "
<> displayShow (Map.size ignored)
<> " packages due to dependency conflicts:"
rels <- mapM makeRel (Map.elems (fmap fst ignored))
logWarn $ display $ indent $ showItems rels
when (Map.size extraDeps > 0) $ do
logWarn $ "Warning! " <> displayShow (Map.size extraDeps)
<> " external dependencies were added."
logInfo $
(if exists then "Overwriting existing configuration file: "
else "Writing configuration to file: ")
<> fromString reldest
writeBinaryFileAtomic dest
$ renderStackYaml p
(Map.elems $ fmap (makeRelDir . parent . fst) ignored)
(map (makeRelDir . parent) dupPkgs)
logInfo "All done."
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder
renderStackYaml p ignoredPackages dupPackages =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString headerHelp
<> B.byteString "\n\n"
<> F.foldMap (goComment o) comments
<> goOthers (o `HM.difference` HM.fromList comments)
<> B.byteString footerHelp
<> "\n"
goComment o (name, comment) =
case (convert <$> HM.lookup name o) <|> nonPresentValue name of
Nothing -> assert (name == "user-message") mempty
Just v ->
B.byteString comment <>
B.byteString "\n" <>
v <>
if name == "packages" then commentedPackages else "" <>
B.byteString "\n"
where
convert v = B.byteString (Yaml.encode $ Yaml.object [(name, v)])
-- Some fields in stack.yaml are optional and may not be
-- generated. For these, we provided commented out dummy
-- values to go along with the comments.
nonPresentValue "extra-deps" = Just "# extra-deps: []\n"
nonPresentValue "flags" = Just "# flags: {}\n"
nonPresentValue "extra-package-dbs" = Just "# extra-package-dbs: []\n"
nonPresentValue _ = Nothing
commentLine l | null l = "#"
| otherwise = "# " ++ l
commentHelp = BC.pack . intercalate "\n" . map commentLine
commentedPackages =
let ignoredComment = commentHelp
[ "The following packages have been ignored due to incompatibility with the"
, "resolver compiler, dependency conflicts with other packages"
, "or unsatisfied dependencies."
]
dupComment = commentHelp
[ "The following packages have been ignored due to package name conflict "
, "with other packages."
]
in commentPackages ignoredComment ignoredPackages
<> commentPackages dupComment dupPackages
commentPackages comment pkgs
| pkgs /= [] =
B.byteString comment
<> B.byteString "\n"
<> B.byteString (BC.pack $ concat
$ map (\x -> "#- " ++ x ++ "\n") pkgs ++ ["\n"])
| otherwise = ""
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
-- Per Section Help
comments =
[ ("user-message" , userMsgHelp)
, ("resolver" , resolverHelp)
, ("packages" , packageHelp)
, ("extra-deps" , extraDepsHelp)
, ("flags" , "# Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "# Extra package databases containing global packages")
]
-- Help strings
headerHelp = commentHelp
[ "This file was automatically generated by 'stack init'"
, ""
, "Some commonly used options have been documented as comments in this file."
, "For advanced use and comprehensive documentation of the format, please see:"
, "https://docs.haskellstack.org/en/stable/yaml_configuration/"
]
resolverHelp = commentHelp
[ "Resolver to choose a 'specific' stackage snapshot or a compiler version."
, "A snapshot resolver dictates the compiler version and the set of packages"
, "to be used for project dependencies. For example:"
, ""
, "resolver: lts-3.5"
, "resolver: nightly-2015-09-21"
, "resolver: ghc-7.10.2"
, ""
, "The location of a snapshot can be provided as a file or url. Stack assumes"
, "a snapshot provided as a file might change, whereas a url resource does not."
, ""
, "resolver: ./custom-snapshot.yaml"
, "resolver: https://example.com/snapshots/2018-01-01.yaml"
]
userMsgHelp = commentHelp
[ "A warning or info to be displayed to the user on config load." ]
packageHelp = commentHelp
[ "User packages to be built."
, "Various formats can be used as shown in the example below."
, ""
, "packages:"
, "- some-directory"
, "- https://example.com/foo/bar/baz-0.0.2.tar.gz"
, " subdirs:"
, " - auto-update"
, " - wai"
]
extraDepsHelp = commentHelp
[ "Dependency packages to be pulled from upstream that are not in the resolver."
, "These entries can reference officially published versions as well as"
, "forks / in-progress versions pinned to a git hash. For example:"
, ""
, "extra-deps:"
, "- acme-missiles-0.3"
, "- git: https://github.com/commercialhaskell/stack.git"
, " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a"
, ""
]
footerHelp =
let major = toMajorVersion $ C.mkVersion' Meta.version
in commentHelp
[ "Control whether we use the GHC we find on the path"
, "system-ghc: true"
, ""
, "Require a specific version of stack, using version ranges"
, "require-stack-version: -any # Default"
, "require-stack-version: \""
++ C.display (C.orLaterVersion major) ++ "\""
, ""
, "Override the architecture used by stack, especially useful on Windows"
, "arch: i386"
, "arch: x86_64"
, ""
, "Extra directories used by stack for building"
, "extra-include-dirs: [/path/to/dir]"
, "extra-lib-dirs: [/path/to/dir]"
, ""
, "Allow a newer minor version of GHC than the snapshot specifies"
, "compiler-check: newer-minor"
]
getSnapshots' :: HasConfig env => RIO env Snapshots
getSnapshots' = do
getSnapshots `catchAny` \e -> do
logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
logError ""
logError " https://github.com/commercialhaskell/stack/issues/234"
logError ""
logError "You can try again, or create your stack.yaml file by hand. See:"
logError ""
logError " http://docs.haskellstack.org/en/stable/yaml_configuration/"
logError ""
logError $ "Exception was: " <> displayShow e
throwString ""
-- | Get the default resolver value
getDefaultResolver
:: (HasConfig env, HasGHCVariant env)
=> InitOpts
-> Maybe AbstractResolver
-> Map PackageName (ResolvedPath Dir)
-- ^ Src package name: cabal dir
-> RIO env
( RawSnapshotLocation
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (ResolvedPath Dir))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getDefaultResolver initOpts mresolver pkgDirs = do
(candidate, loc) <- case mresolver of
Nothing -> selectSnapResolver
Just ar -> do
sl <- makeConcreteResolver ar
c <- loadProjectSnapshotCandidate sl NoPrintWarnings False
return (c, sl)
getWorkingResolverPlan initOpts pkgDirs candidate loc
where
-- TODO support selecting best across regular and custom snapshots
selectSnapResolver = do
snaps <- fmap getRecommendedSnapshots getSnapshots'
(c, l, r) <- selectBestSnapshot (Map.elems pkgDirs) snaps
case r of
BuildPlanCheckFail {} | not (omitPackages initOpts)
-> throwM (NoMatchingSnapshot snaps)
_ -> return (c, l)
getWorkingResolverPlan
:: (HasConfig env, HasGHCVariant env)
=> InitOpts
-> Map PackageName (ResolvedPath Dir)
-- ^ Src packages: cabal dir
-> SnapshotCandidate env
-> RawSnapshotLocation
-> RIO env
( RawSnapshotLocation
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (ResolvedPath Dir))
-- ^ ( SnapshotDef
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getWorkingResolverPlan initOpts pkgDirs0 snapCandidate snapLoc = do
logInfo $ "Selected resolver: " <> display snapLoc
go pkgDirs0
where
go pkgDirs = do
eres <- checkBundleResolver initOpts snapLoc snapCandidate (Map.elems pkgDirs)
-- if some packages failed try again using the rest
case eres of
Right (f, edeps)-> return (snapLoc, f, edeps, pkgDirs)
Left ignored
| Map.null available -> do
logWarn "*** Could not find a working plan for any of \
\the user packages.\nProceeding to create a \
\config anyway."
return (snapLoc, Map.empty, Map.empty, Map.empty)
| otherwise -> do
when (Map.size available == Map.size pkgDirs) $
error "Bug: No packages to ignore"
if length ignored > 1 then do
logWarn "*** Ignoring packages:"
logWarn $ display $ indent $ showItems $ map packageNameString ignored
else
logWarn $ "*** Ignoring package: "
<> fromString
(case ignored of
[] -> error "getWorkingResolverPlan.head"
x:_ -> packageNameString x)
go available
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
isAvailable k _ = k `notElem` ignored
available = Map.filterWithKey isAvailable pkgDirs
checkBundleResolver
:: (HasConfig env, HasGHCVariant env)
=> InitOpts
-> RawSnapshotLocation
-> SnapshotCandidate env
-> [ResolvedPath Dir]
-- ^ Src package dirs
-> RIO env
(Either [PackageName] ( Map PackageName (Map FlagName Bool)
, Map PackageName Version))
checkBundleResolver initOpts snapshotLoc snapCandidate pkgDirs = do
result <- checkSnapBuildPlan pkgDirs Nothing snapCandidate
case result of
BuildPlanCheckOk f -> return $ Right (f, Map.empty)
BuildPlanCheckPartial _f e -> do -- FIXME:qrilka unused f
if omitPackages initOpts
then do
warnPartial result
logWarn "*** Omitting packages with unsatisfied dependencies"
return $ Left $ failedUserPkgs e
else throwM $ ResolverPartial snapshotLoc (show result)
BuildPlanCheckFail _ e _
| omitPackages initOpts -> do
logWarn $ "*** Resolver compiler mismatch: "
<> display snapshotLoc
logWarn $ display $ indent $ T.pack $ show result
return $ Left $ failedUserPkgs e
| otherwise -> throwM $ ResolverMismatch snapshotLoc (show result)
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
warnPartial res = do
logWarn $ "*** Resolver " <> display snapshotLoc
<> " will need external packages: "
logWarn $ display $ indent $ T.pack $ show res
failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap deNeededBy e))
getRecommendedSnapshots :: Snapshots -> NonEmpty SnapName
getRecommendedSnapshots snapshots =
-- in order - Latest LTS, Latest Nightly, all LTS most recent first
case NonEmpty.nonEmpty ltss of
Just (mostRecent :| older)
-> mostRecent :| (nightly : older)
Nothing
-> nightly :| []
where
ltss = map (uncurry LTS) (IntMap.toDescList $ snapshotsLts snapshots)
nightly = Nightly (snapshotsNightly snapshots)
data InitOpts = InitOpts
{ searchDirs :: ![T.Text]
-- ^ List of sub directories to search for .cabal files
, omitPackages :: Bool
-- ^ Exclude conflicting or incompatible user packages
, forceOverwrite :: Bool
-- ^ Overwrite existing stack.yaml
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
findCabalDirs
:: HasConfig env
=> Bool -> Path Abs Dir -> RIO env (Set (Path Abs Dir))
findCabalDirs recurse dir =
Set.fromList . map parent
<$> liftIO (findFiles dir isHpackOrCabal subdirFilter)
where
subdirFilter subdir = recurse && not (isIgnored subdir)
isHpack = (== "package.yaml") . toFilePath . filename
isCabal = (".cabal" `isSuffixOf`) . toFilePath
isHpackOrCabal x = isHpack x || isCabal x
isIgnored path = "." `isPrefixOf` dirName || dirName `Set.member` ignoredDirs
where
dirName = FP.dropTrailingPathSeparator (toFilePath (dirname path))
-- | Special directories that we don't want to traverse for .cabal files
ignoredDirs :: Set FilePath
ignoredDirs = Set.fromList
[ "dist"
]
cabalPackagesCheck
:: (HasConfig env, HasGHCVariant env)
=> [Path Abs Dir]
-> Maybe String
-> RIO env
( Map PackageName (Path Abs File, C.GenericPackageDescription)
, [Path Abs File])
cabalPackagesCheck cabaldirs dupErrMsg = do
when (null cabaldirs) $ do
logWarn "We didn't find any local package directories"
logWarn "You may want to create a package with \"stack new\" instead"
logWarn "Create an empty project for now"
logWarn "If this isn't what you want, please delete the generated \"stack.yaml\""
relpaths <- mapM prettyPath cabaldirs
logInfo "Using cabal packages:"
logInfo $ formatGroup relpaths
packages <- for cabaldirs $ \dir -> do
(gpdio, _name, cabalfp) <- loadCabalFilePath dir
gpd <- liftIO $ gpdio YesPrintWarnings
pure (cabalfp, gpd)
-- package name cannot be empty or missing otherwise
-- it will result in cabal solver failure.
-- stack requires packages name to match the cabal file name
-- Just the latter check is enough to cover both the cases
let normalizeString = T.unpack . T.normalize T.NFC . T.pack
getNameMismatchPkg (fp, gpd)
| (normalizeString . packageNameString . gpdPackageName) gpd /= (normalizeString . FP.takeBaseName . toFilePath) fp
= Just fp
| otherwise = Nothing
nameMismatchPkgs = mapMaybe getNameMismatchPkg packages
when (nameMismatchPkgs /= []) $ do
rels <- mapM prettyPath nameMismatchPkgs
error $ "Package name as defined in the .cabal file must match the \
\.cabal file name.\n\
\Please fix the following packages and try again:\n"
<> T.unpack (utf8BuilderToText (formatGroup rels))
let dupGroups = filter ((> 1) . length)
. groupSortOn (gpdPackageName . snd)
dupAll = concat $ dupGroups packages
-- Among duplicates prefer to include the ones in upper level dirs
pathlen = length . FP.splitPath . toFilePath . fst
getmin = minimumBy (compare `on` pathlen)
dupSelected = map getmin (dupGroups packages)
dupIgnored = dupAll \\ dupSelected
unique = packages \\ dupIgnored
when (dupIgnored /= []) $ do
dups <- mapM (mapM (prettyPath. fst)) (dupGroups packages)
logWarn $
"Following packages have duplicate package names:\n" <>
mconcat (intersperse "\n" (map formatGroup dups))
case dupErrMsg of
Nothing -> logWarn $
"Packages with duplicate names will be ignored.\n"
<> "Packages in upper level directories will be preferred.\n"
Just msg -> error msg
return (Map.fromList
$ map (\(file, gpd) -> (gpdPackageName gpd,(file, gpd))) unique
, map fst dupIgnored)
formatGroup :: [String] -> Utf8Builder
formatGroup = foldMap (\path -> "- " <> fromString path <> "\n")
prettyPath ::
(MonadIO m, RelPath (Path r t) ~ Path Rel t, AnyPath (Path r t))
=> Path r t
-> m FilePath
prettyPath path = do
eres <- liftIO $ try $ makeRelativeToCurrentDir path
return $ case eres of
Left (_ :: PathException) -> toFilePath path
Right res -> toFilePath res
| juhp/stack | src/Stack/Init.hs | bsd-3-clause | 24,124 | 0 | 22 | 7,963 | 4,928 | 2,510 | 2,418 | 463 | 7 |
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.InstalledPackageInfo
-- Copyright : (c) The University of Glasgow 2004
--
-- Maintainer : libraries@haskell.org
-- Portability : portable
--
-- This is the information about an /installed/ package that
-- is communicated to the @ghc-pkg@ program in order to register
-- a package. @ghc-pkg@ now consumes this package format (as of version
-- 6.4). This is specific to GHC at the moment.
--
-- The @.cabal@ file format is for describing a package that is not yet
-- installed. It has a lot of flexibility, like conditionals and dependency
-- ranges. As such, that format is not at all suitable for describing a package
-- that has already been built and installed. By the time we get to that stage,
-- we have resolved all conditionals and resolved dependency version
-- constraints to exact versions of dependent packages. So, this module defines
-- the 'InstalledPackageInfo' data structure that contains all the info we keep
-- about an installed package. There is a parser and pretty printer. The
-- textual format is rather simpler than the @.cabal@ format: there are no
-- sections, for example.
-- This module is meant to be local-only to Distribution...
module Distribution.InstalledPackageInfo (
InstalledPackageInfo_(..), InstalledPackageInfo,
OriginalModule(..), ExposedModule(..),
ParseResult(..), PError(..), PWarning,
emptyInstalledPackageInfo,
parseInstalledPackageInfo,
showInstalledPackageInfo,
showInstalledPackageInfoField,
showSimpleInstalledPackageInfoField,
fieldsInstalledPackageInfo,
) where
import Distribution.ParseUtils
( FieldDescr(..), ParseResult(..), PError(..), PWarning
, simpleField, listField, parseLicenseQ
, showFields, showSingleNamedField, showSimpleSingleNamedField
, parseFieldsFlat
, parseFilePathQ, parseTokenQ, parseModuleNameQ, parsePackageNameQ
, showFilePath, showToken, boolField, parseOptVersion
, parseFreeText, showFreeText, parseOptCommaList )
import Distribution.License ( License(..) )
import Distribution.Package
( PackageName(..), PackageIdentifier(..)
, PackageId, InstalledPackageId(..)
, packageName, packageVersion, PackageKey(..) )
import qualified Distribution.Package as Package
import Distribution.ModuleName
( ModuleName )
import Distribution.Version
( Version(..) )
import Distribution.Text
( Text(disp, parse) )
import Text.PrettyPrint as Disp
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.Binary (Binary)
import Data.Maybe (fromMaybe)
import GHC.Generics (Generic)
-- -----------------------------------------------------------------------------
-- The InstalledPackageInfo type
data InstalledPackageInfo_ m
= InstalledPackageInfo {
-- these parts are exactly the same as PackageDescription
installedPackageId :: InstalledPackageId,
sourcePackageId :: PackageId,
packageKey :: PackageKey,
license :: License,
copyright :: String,
maintainer :: String,
author :: String,
stability :: String,
homepage :: String,
pkgUrl :: String,
synopsis :: String,
description :: String,
category :: String,
-- these parts are required by an installed package only:
exposed :: Bool,
exposedModules :: [ExposedModule],
instantiatedWith :: [(m, OriginalModule)],
hiddenModules :: [m],
trusted :: Bool,
importDirs :: [FilePath],
libraryDirs :: [FilePath],
dataDir :: FilePath,
hsLibraries :: [String],
extraLibraries :: [String],
extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi
includeDirs :: [FilePath],
includes :: [String],
depends :: [InstalledPackageId],
ccOptions :: [String],
ldOptions :: [String],
frameworkDirs :: [FilePath],
frameworks :: [String],
haddockInterfaces :: [FilePath],
haddockHTMLs :: [FilePath],
pkgRoot :: Maybe FilePath
}
deriving (Generic, Read, Show)
instance Binary m => Binary (InstalledPackageInfo_ m)
instance Package.Package (InstalledPackageInfo_ str) where
packageId = sourcePackageId
instance Package.PackageInstalled (InstalledPackageInfo_ str) where
installedPackageId = installedPackageId
installedDepends = depends
type InstalledPackageInfo = InstalledPackageInfo_ ModuleName
emptyInstalledPackageInfo :: InstalledPackageInfo_ m
emptyInstalledPackageInfo
= InstalledPackageInfo {
installedPackageId = InstalledPackageId "",
sourcePackageId = PackageIdentifier (PackageName "") noVersion,
packageKey = OldPackageKey (PackageIdentifier
(PackageName "") noVersion),
license = UnspecifiedLicense,
copyright = "",
maintainer = "",
author = "",
stability = "",
homepage = "",
pkgUrl = "",
synopsis = "",
description = "",
category = "",
exposed = False,
exposedModules = [],
hiddenModules = [],
instantiatedWith = [],
trusted = False,
importDirs = [],
libraryDirs = [],
dataDir = "",
hsLibraries = [],
extraLibraries = [],
extraGHCiLibraries= [],
includeDirs = [],
includes = [],
depends = [],
ccOptions = [],
ldOptions = [],
frameworkDirs = [],
frameworks = [],
haddockInterfaces = [],
haddockHTMLs = [],
pkgRoot = Nothing
}
noVersion :: Version
noVersion = Version [] []
-- -----------------------------------------------------------------------------
-- Exposed modules
data OriginalModule
= OriginalModule {
originalPackageId :: InstalledPackageId,
originalModuleName :: ModuleName
}
deriving (Generic, Eq, Read, Show)
data ExposedModule
= ExposedModule {
exposedName :: ModuleName,
exposedReexport :: Maybe OriginalModule,
exposedSignature :: Maybe OriginalModule -- This field is unused for now.
}
deriving (Generic, Read, Show)
instance Text OriginalModule where
disp (OriginalModule ipi m) =
disp ipi <> Disp.char ':' <> disp m
parse = do
ipi <- parse
_ <- Parse.char ':'
m <- parse
return (OriginalModule ipi m)
instance Text ExposedModule where
disp (ExposedModule m reexport signature) =
Disp.sep [ disp m
, case reexport of
Just m' -> Disp.sep [Disp.text "from", disp m']
Nothing -> Disp.empty
, case signature of
Just m' -> Disp.sep [Disp.text "is", disp m']
Nothing -> Disp.empty
]
parse = do
m <- parseModuleNameQ
Parse.skipSpaces
reexport <- Parse.option Nothing $ do
_ <- Parse.string "from"
Parse.skipSpaces
fmap Just parse
Parse.skipSpaces
signature <- Parse.option Nothing $ do
_ <- Parse.string "is"
Parse.skipSpaces
fmap Just parse
return (ExposedModule m reexport signature)
instance Binary OriginalModule
instance Binary ExposedModule
-- To maintain backwards-compatibility, we accept both comma/non-comma
-- separated variants of this field. You SHOULD use the comma syntax if you
-- use any new functions, although actually it's unambiguous due to a quirk
-- of the fact that modules must start with capital letters.
showExposedModules :: [ExposedModule] -> Disp.Doc
showExposedModules xs
| all isExposedModule xs = fsep (map disp xs)
| otherwise = fsep (Disp.punctuate comma (map disp xs))
where isExposedModule (ExposedModule _ Nothing Nothing) = True
isExposedModule _ = False
parseExposedModules :: Parse.ReadP r [ExposedModule]
parseExposedModules = parseOptCommaList parse
-- -----------------------------------------------------------------------------
-- Parsing
parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo
parseInstalledPackageInfo =
parseFieldsFlat (fieldsInstalledPackageInfo ++ deprecatedFieldDescrs)
emptyInstalledPackageInfo
parseInstantiatedWith :: Parse.ReadP r (ModuleName, OriginalModule)
parseInstantiatedWith = do k <- parse
_ <- Parse.char '='
n <- parse
_ <- Parse.char '@'
p <- parse
return (k, OriginalModule p n)
-- -----------------------------------------------------------------------------
-- Pretty-printing
showInstalledPackageInfo :: InstalledPackageInfo -> String
showInstalledPackageInfo = showFields fieldsInstalledPackageInfo
showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String)
showInstalledPackageInfoField = showSingleNamedField fieldsInstalledPackageInfo
showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String)
showSimpleInstalledPackageInfoField = showSimpleSingleNamedField fieldsInstalledPackageInfo
showInstantiatedWith :: (ModuleName, OriginalModule) -> Doc
showInstantiatedWith (k, OriginalModule p m) = disp k <> text "=" <> disp m <> text "@" <> disp p
-- -----------------------------------------------------------------------------
-- Description of the fields, for parsing/printing
fieldsInstalledPackageInfo :: [FieldDescr InstalledPackageInfo]
fieldsInstalledPackageInfo = basicFieldDescrs ++ installedFieldDescrs
basicFieldDescrs :: [FieldDescr InstalledPackageInfo]
basicFieldDescrs =
[ simpleField "name"
disp parsePackageNameQ
packageName (\name pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgName=name}})
, simpleField "version"
disp parseOptVersion
packageVersion (\ver pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgVersion=ver}})
, simpleField "id"
disp parse
installedPackageId (\ipid pkg -> pkg{installedPackageId=ipid})
, simpleField "key"
disp parse
packageKey (\ipid pkg -> pkg{packageKey=ipid})
, simpleField "license"
disp parseLicenseQ
license (\l pkg -> pkg{license=l})
, simpleField "copyright"
showFreeText parseFreeText
copyright (\val pkg -> pkg{copyright=val})
, simpleField "maintainer"
showFreeText parseFreeText
maintainer (\val pkg -> pkg{maintainer=val})
, simpleField "stability"
showFreeText parseFreeText
stability (\val pkg -> pkg{stability=val})
, simpleField "homepage"
showFreeText parseFreeText
homepage (\val pkg -> pkg{homepage=val})
, simpleField "package-url"
showFreeText parseFreeText
pkgUrl (\val pkg -> pkg{pkgUrl=val})
, simpleField "synopsis"
showFreeText parseFreeText
synopsis (\val pkg -> pkg{synopsis=val})
, simpleField "description"
showFreeText parseFreeText
description (\val pkg -> pkg{description=val})
, simpleField "category"
showFreeText parseFreeText
category (\val pkg -> pkg{category=val})
, simpleField "author"
showFreeText parseFreeText
author (\val pkg -> pkg{author=val})
]
installedFieldDescrs :: [FieldDescr InstalledPackageInfo]
installedFieldDescrs = [
boolField "exposed"
exposed (\val pkg -> pkg{exposed=val})
, simpleField "exposed-modules"
showExposedModules parseExposedModules
exposedModules (\xs pkg -> pkg{exposedModules=xs})
, listField "hidden-modules"
disp parseModuleNameQ
hiddenModules (\xs pkg -> pkg{hiddenModules=xs})
, listField "instantiated-with"
showInstantiatedWith parseInstantiatedWith
instantiatedWith (\xs pkg -> pkg{instantiatedWith=xs})
, boolField "trusted"
trusted (\val pkg -> pkg{trusted=val})
, listField "import-dirs"
showFilePath parseFilePathQ
importDirs (\xs pkg -> pkg{importDirs=xs})
, listField "library-dirs"
showFilePath parseFilePathQ
libraryDirs (\xs pkg -> pkg{libraryDirs=xs})
, simpleField "data-dir"
showFilePath (parseFilePathQ Parse.<++ return "")
dataDir (\val pkg -> pkg{dataDir=val})
, listField "hs-libraries"
showFilePath parseTokenQ
hsLibraries (\xs pkg -> pkg{hsLibraries=xs})
, listField "extra-libraries"
showToken parseTokenQ
extraLibraries (\xs pkg -> pkg{extraLibraries=xs})
, listField "extra-ghci-libraries"
showToken parseTokenQ
extraGHCiLibraries (\xs pkg -> pkg{extraGHCiLibraries=xs})
, listField "include-dirs"
showFilePath parseFilePathQ
includeDirs (\xs pkg -> pkg{includeDirs=xs})
, listField "includes"
showFilePath parseFilePathQ
includes (\xs pkg -> pkg{includes=xs})
, listField "depends"
disp parse
depends (\xs pkg -> pkg{depends=xs})
, listField "cc-options"
showToken parseTokenQ
ccOptions (\path pkg -> pkg{ccOptions=path})
, listField "ld-options"
showToken parseTokenQ
ldOptions (\path pkg -> pkg{ldOptions=path})
, listField "framework-dirs"
showFilePath parseFilePathQ
frameworkDirs (\xs pkg -> pkg{frameworkDirs=xs})
, listField "frameworks"
showToken parseTokenQ
frameworks (\xs pkg -> pkg{frameworks=xs})
, listField "haddock-interfaces"
showFilePath parseFilePathQ
haddockInterfaces (\xs pkg -> pkg{haddockInterfaces=xs})
, listField "haddock-html"
showFilePath parseFilePathQ
haddockHTMLs (\xs pkg -> pkg{haddockHTMLs=xs})
, simpleField "pkgroot"
(const Disp.empty) parseFilePathQ
(fromMaybe "" . pkgRoot) (\xs pkg -> pkg{pkgRoot=Just xs})
]
deprecatedFieldDescrs :: [FieldDescr InstalledPackageInfo]
deprecatedFieldDescrs = [
listField "hugs-options"
showToken parseTokenQ
(const []) (const id)
]
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/Distribution/InstalledPackageInfo.hs | bsd-3-clause | 15,921 | 0 | 14 | 5,108 | 3,094 | 1,764 | 1,330 | 304 | 2 |
module TestTime(tests) where
import Test.HUnit
import Database.HDBC
import TestUtils
import Control.Exception
import Data.Time
import Data.Time.LocalTime
import Data.Time.Clock.POSIX
import Data.Maybe
import Data.Convertible
import SpecificDB
import System.Locale(defaultTimeLocale)
import Database.HDBC.Locale (iso8601DateFormat)
import qualified System.Time as ST
instance Eq ZonedTime where
a == b = zonedTimeToUTC a == zonedTimeToUTC b &&
zonedTimeZone a == zonedTimeZone b
testZonedTime :: ZonedTime
testZonedTime = fromJust $ parseTime defaultTimeLocale (iso8601DateFormat (Just "%T %z"))
"1989-08-01 15:33:01 -0500"
testZonedTimeFrac :: ZonedTime
testZonedTimeFrac = fromJust $ parseTime defaultTimeLocale (iso8601DateFormat (Just "%T%Q %z"))
"1989-08-01 15:33:01.536 -0500"
rowdata t = [[SqlInt32 100, toSql t, SqlNull]]
testDTType inputdata convToSqlValue = dbTestCase $ \dbh ->
do run dbh ("CREATE TABLE hdbctesttime (testid INTEGER PRIMARY KEY NOT NULL, \
\testvalue " ++ dateTimeTypeOfSqlValue value ++ ")") []
finally (testIt dbh) (do commit dbh
run dbh "DROP TABLE hdbctesttime" []
commit dbh
)
where testIt dbh =
do run dbh "INSERT INTO hdbctesttime (testid, testvalue) VALUES (?, ?)"
[iToSql 5, value]
commit dbh
r <- quickQuery' dbh "SELECT testid, testvalue FROM hdbctesttime" []
case r of
[[testidsv, testvaluesv]] ->
do assertEqual "testid" (5::Int) (fromSql testidsv)
assertEqual "testvalue" inputdata (fromSql testvaluesv)
value = convToSqlValue inputdata
mkTest label inputdata convfunc =
TestLabel label (testDTType inputdata convfunc)
tests = TestList $
((TestLabel "Non-frac" $ testIt testZonedTime) :
if supportsFracTime then [TestLabel "Frac" $ testIt testZonedTimeFrac] else [])
testIt baseZonedTime =
TestList [mkTest "Day" baseDay toSql,
mkTest "TimeOfDay" baseTimeOfDay toSql,
mkTest "ZonedTimeOfDay" baseZonedTimeOfDay toSql,
mkTest "LocalTime" baseLocalTime toSql,
mkTest "ZonedTime" baseZonedTime toSql,
mkTest "UTCTime" baseUTCTime toSql,
mkTest "DiffTime" baseDiffTime toSql,
mkTest "POSIXTime" basePOSIXTime posixToSql,
mkTest "ClockTime" baseClockTime toSql,
mkTest "CalendarTime" baseCalendarTime toSql,
mkTest "TimeDiff" baseTimeDiff toSql
]
where
baseDay :: Day
baseDay = localDay baseLocalTime
baseTimeOfDay :: TimeOfDay
baseTimeOfDay = localTimeOfDay baseLocalTime
baseZonedTimeOfDay :: (TimeOfDay, TimeZone)
baseZonedTimeOfDay = fromSql (SqlZonedTime baseZonedTime)
baseLocalTime :: LocalTime
baseLocalTime = zonedTimeToLocalTime baseZonedTime
baseUTCTime :: UTCTime
baseUTCTime = convert baseZonedTime
baseDiffTime :: NominalDiffTime
baseDiffTime = basePOSIXTime
basePOSIXTime :: POSIXTime
basePOSIXTime = convert baseZonedTime
baseTimeDiff :: ST.TimeDiff
baseTimeDiff = convert baseDiffTime
-- No fractional parts for these two
baseClockTime :: ST.ClockTime
baseClockTime = convert testZonedTime
baseCalendarTime :: ST.CalendarTime
baseCalendarTime = convert testZonedTime
| hdbc/hdbc-sqlite3 | testsrc/TestTime.hs | bsd-3-clause | 3,583 | 0 | 16 | 1,008 | 773 | 399 | 374 | 77 | 2 |
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified Control.Exception
import qualified Data.ByteString.Lazy as DBL
import qualified Data.Maybe
import qualified Network
import Thrift.Protocol.Binary
import Thrift.Server
import Thrift.Transport.Handle
import qualified ThriftTestUtils
import qualified DebugProtoTest_Types as Types
import qualified Inherited
import qualified Inherited_Client as IClient
import qualified Inherited_Iface as IIface
import qualified Srv_Client as SClient
import qualified Srv_Iface as SIface
-- we don't actually need this import, but force it to check the code generator exports proper Haskell syntax
import qualified Srv()
data InheritedHandler = InheritedHandler
instance SIface.Srv_Iface InheritedHandler where
janky _ arg = do
ThriftTestUtils.serverLog $ "Got janky method call: " ++ show arg
return $ 31
voidMethod _ = do
ThriftTestUtils.serverLog "Got voidMethod method call"
return ()
primitiveMethod _ = do
ThriftTestUtils.serverLog "Got primitiveMethod call"
return $ 42
structMethod _ = do
ThriftTestUtils.serverLog "Got structMethod call"
return $ Types.CompactProtoTestStruct {
Types.f_CompactProtoTestStruct_a_byte = Just 0x01,
Types.f_CompactProtoTestStruct_a_i16 = Just 0x02,
Types.f_CompactProtoTestStruct_a_i32 = Just 0x03,
Types.f_CompactProtoTestStruct_a_i64 = Just 0x04,
Types.f_CompactProtoTestStruct_a_double = Just 0.1,
Types.f_CompactProtoTestStruct_a_string = Just "abcdef",
Types.f_CompactProtoTestStruct_a_binary = Just DBL.empty,
Types.f_CompactProtoTestStruct_true_field = Just True,
Types.f_CompactProtoTestStruct_false_field = Just False,
Types.f_CompactProtoTestStruct_empty_struct_field = Just Types.Empty,
Types.f_CompactProtoTestStruct_byte_list = Nothing,
Types.f_CompactProtoTestStruct_i16_list = Nothing,
Types.f_CompactProtoTestStruct_i32_list = Nothing,
Types.f_CompactProtoTestStruct_i64_list = Nothing,
Types.f_CompactProtoTestStruct_double_list = Nothing,
Types.f_CompactProtoTestStruct_string_list = Nothing,
Types.f_CompactProtoTestStruct_binary_list = Nothing,
Types.f_CompactProtoTestStruct_boolean_list = Nothing,
Types.f_CompactProtoTestStruct_struct_list = Nothing,
Types.f_CompactProtoTestStruct_byte_set = Nothing,
Types.f_CompactProtoTestStruct_i16_set = Nothing,
Types.f_CompactProtoTestStruct_i32_set = Nothing,
Types.f_CompactProtoTestStruct_i64_set = Nothing,
Types.f_CompactProtoTestStruct_double_set = Nothing,
Types.f_CompactProtoTestStruct_string_set = Nothing,
Types.f_CompactProtoTestStruct_binary_set = Nothing,
Types.f_CompactProtoTestStruct_boolean_set = Nothing,
Types.f_CompactProtoTestStruct_struct_set = Nothing,
Types.f_CompactProtoTestStruct_byte_byte_map = Nothing,
Types.f_CompactProtoTestStruct_i16_byte_map = Nothing,
Types.f_CompactProtoTestStruct_i32_byte_map = Nothing,
Types.f_CompactProtoTestStruct_i64_byte_map = Nothing,
Types.f_CompactProtoTestStruct_double_byte_map = Nothing,
Types.f_CompactProtoTestStruct_string_byte_map = Nothing,
Types.f_CompactProtoTestStruct_binary_byte_map = Nothing,
Types.f_CompactProtoTestStruct_boolean_byte_map = Nothing,
Types.f_CompactProtoTestStruct_byte_i16_map = Nothing,
Types.f_CompactProtoTestStruct_byte_i32_map = Nothing,
Types.f_CompactProtoTestStruct_byte_i64_map = Nothing,
Types.f_CompactProtoTestStruct_byte_double_map = Nothing,
Types.f_CompactProtoTestStruct_byte_string_map = Nothing,
Types.f_CompactProtoTestStruct_byte_binary_map = Nothing,
Types.f_CompactProtoTestStruct_byte_boolean_map = Nothing,
Types.f_CompactProtoTestStruct_list_byte_map = Nothing,
Types.f_CompactProtoTestStruct_set_byte_map = Nothing,
Types.f_CompactProtoTestStruct_map_byte_map = Nothing,
Types.f_CompactProtoTestStruct_byte_map_map = Nothing,
Types.f_CompactProtoTestStruct_byte_set_map = Nothing,
Types.f_CompactProtoTestStruct_byte_list_map = Nothing }
methodWithDefaultArgs _ arg = do
ThriftTestUtils.serverLog $ "Got methodWithDefaultArgs: " ++ show arg
return ()
onewayMethod _ = do
ThriftTestUtils.serverLog "Got onewayMethod"
instance IIface.Inherited_Iface InheritedHandler where
identity _ arg = do
ThriftTestUtils.serverLog $ "Got identity method: " ++ show arg
return $ Data.Maybe.fromJust arg
client :: (String, Network.PortID) -> IO ()
client addr = do
to <- hOpen addr
let p = BinaryProtocol to
let ps = (p,p)
v1 <- SClient.janky ps 42
ThriftTestUtils.clientLog $ show v1
SClient.voidMethod ps
v2 <- SClient.primitiveMethod ps
ThriftTestUtils.clientLog $ show v2
v3 <- SClient.structMethod ps
ThriftTestUtils.clientLog $ show v3
SClient.methodWithDefaultArgs ps 42
SClient.onewayMethod ps
v4 <- IClient.identity ps 42
ThriftTestUtils.clientLog $ show v4
return ()
server :: Network.PortNumber -> IO ()
server port = do
ThriftTestUtils.serverLog "Ready..."
(runBasicServer InheritedHandler Inherited.process port)
`Control.Exception.catch`
(\(TransportExn s _) -> error $ "FAILURE: " ++ show s)
main :: IO ()
main = ThriftTestUtils.runTest server client
| rkq/cxxexp | third-party/src/thrift-0.9.1/test/hs/DebugProtoTest_Main.hs | mit | 6,574 | 0 | 12 | 1,421 | 1,066 | 587 | 479 | 114 | 1 |
module RenameImport where
import BarModule | charleso/intellij-haskforce | tests/gold/refactoring/RenameFile/RenameImport-after.hs | apache-2.0 | 43 | 0 | 3 | 5 | 7 | 5 | 2 | 2 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.ManageDebug
-- Copyright : (c) Brandon S Allbery KF8NH, 2014
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : allbery.b@gmail.com
-- Stability : unstable
-- Portability : not portable
--
-- A @manageHook@ and associated @logHook@ for debugging 'ManageHook's.
-- Simplest usage: wrap your xmonad config in the @debugManageHook@ combinator.
-- Or use @debugManageHookOn@ for a triggerable version, specifying the
-- triggering key sequence in 'XMonad.Util.EZConfig' syntax. Or use the
-- individual hooks in whatever way you see fit.
--
-----------------------------------------------------------------------------
--
--
module XMonad.Hooks.ManageDebug (debugManageHook
,debugManageHookOn
,manageDebug
,maybeManageDebug
,manageDebugLogHook
,debugNextManagedWindow
) where
import XMonad
import XMonad.Hooks.DebugStack
import XMonad.Util.DebugWindow
import XMonad.Util.EZConfig
import qualified XMonad.Util.ExtensibleState as XS
import Control.Monad (when)
-- persistent state for manageHook debugging to trigger logHook debugging
data ManageStackDebug = MSD (Bool,Bool) deriving Typeable
instance ExtensionClass ManageStackDebug where
initialValue = MSD (False,False)
-- | A combinator to add full 'ManageHook' debugging in a single operation.
debugManageHook :: XConfig l -> XConfig l
debugManageHook cf = cf {logHook = manageDebugLogHook <+> logHook cf
,manageHook = manageDebug <+> manageHook cf
}
-- | A combinator to add triggerable 'ManageHook' debugging in a single operation.
-- Specify a key sequence as a string in 'XMonad.Util.EZConfig' syntax; press
-- this key before opening the window to get just that logged.
debugManageHookOn :: String -> XConfig l -> XConfig l
debugManageHookOn key cf = cf {logHook = manageDebugLogHook <+> logHook cf
,manageHook = maybeManageDebug <+> manageHook cf
}
`additionalKeysP`
[(key,debugNextManagedWindow)]
-- | Place this at the start of a 'ManageHook', or possibly other places for a
-- more limited view. It will show the current 'StackSet' state and the new
-- window, and set a flag so that @manageDebugLogHook@ will display the
-- final 'StackSet' state.
--
-- Note that the initial state shows only the current workspace; the final
-- one shows all workspaces, since your 'ManageHook' might use e.g. 'doShift',
manageDebug :: ManageHook
manageDebug = do
w <- ask
liftX $ do
trace "== manageHook; current stack =="
debugStackString >>= trace
ws <- debugWindow w
trace $ "new:\n " ++ ws
XS.modify $ \(MSD (_,key)) -> MSD (True,key)
idHook
-- | @manageDebug@ only if the user requested it with @debugNextManagedWindow@.
maybeManageDebug :: ManageHook
maybeManageDebug = do
go <- liftX $ do
MSD (log_,go') <- XS.get
XS.put $ MSD (log_,False)
return go'
if go then manageDebug else idHook
-- | If @manageDebug@ has set the debug-stack flag, show the stack.
manageDebugLogHook :: X ()
manageDebugLogHook = do
MSD (go,key) <- XS.get
when go $ do
trace "== manageHook; final stack =="
debugStackFullString >>= trace
XS.put $ MSD (False,key)
idHook
-- | Request that the next window to be managed be @manageDebug@-ed. This can
-- be used anywhere an X action can, such as key bindings, mouse bindings
-- (presumably with 'const'), 'startupHook', etc.
debugNextManagedWindow :: X ()
debugNextManagedWindow = XS.modify $ \(MSD (log_,_)) -> MSD (log_,True)
| pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Hooks/ManageDebug.hs | bsd-2-clause | 4,185 | 0 | 14 | 1,222 | 561 | 315 | 246 | 51 | 2 |
{-# LANGUAGE OverloadedStrings, RankNTypes #-}
module Graphics.UI.Bottle.View
( View, augmentAnimId, backgroundColor, scaled
) where
import Data.Vector.Vector2 (Vector2(..))
import Graphics.UI.Bottle.Animation (AnimId, Layer)
import qualified Control.Lens as Lens
import qualified Data.ByteString.Char8 as SBS8
import qualified Graphics.DrawingCombinators as Draw
import qualified Graphics.UI.Bottle.Animation as Anim
type View = (Anim.Size, Anim.Frame)
augmentAnimId :: Show a => AnimId -> a -> AnimId
augmentAnimId animId = Anim.joinId animId . (:[]) . SBS8.pack . show
backgroundColor :: AnimId -> Layer -> Draw.Color -> View -> View
backgroundColor animId layer color (size, frame) =
(size, Anim.backgroundColor (animId ++ ["bg"]) layer color size frame)
scaled :: Vector2 Draw.R -> Lens.Iso' View View
scaled factor =
Lens.iso (scale factor) (scale (1/factor))
where
scale ratio (size, frame) =
(size*ratio, Anim.scale ratio frame)
| schell/lamdu | bottlelib/Graphics/UI/Bottle/View.hs | gpl-3.0 | 962 | 0 | 9 | 145 | 319 | 185 | 134 | 20 | 1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE RankNTypes, ImplicitParams, UnboxedTuples #-}
-- Test two slightly exotic things about type signatures
module ShouldCompile where
-- The for-all hoisting should hoist the
-- implicit parameter to give
-- r :: (?param::a) => a
r :: Int -> ((?param :: a) => a)
r = error "urk"
-- The unboxed tuple is OK because it is
-- used on the right hand end of an arrow
type T = (# Int, Int #)
f :: Int -> T
f = error "urk"
| olsner/ghc | testsuite/tests/typecheck/should_compile/tc145.hs | bsd-3-clause | 547 | 0 | 8 | 158 | 69 | 44 | 25 | 8 | 1 |
{-# LANGUAGE GADTs #-}
module T5217 where
import Language.Haskell.TH
$([d| data T a b where { T1 :: Int -> T Int Char
; T2 :: a -> T a a
; T3 :: a -> T [a] a
; T4 :: a -> b -> T b [a] } |])
| ezyang/ghc | testsuite/tests/th/T5217.hs | bsd-3-clause | 261 | 0 | 6 | 122 | 22 | 15 | 7 | -1 | -1 |
import Test.Tasty
import qualified Tests.Math.Hclaws.ConservationLaws
import qualified Tests.Math.Hclaws.FrontTracking
import qualified Tests.Math.Hclaws.Systems.Burgers
import qualified Tests.Math.Hclaws.Systems.JenssenYoung2004_31
import qualified Tests.Math.Hclaws.Systems.Linear
import qualified Tests.Math.Hclaws.Systems.ShallowWater
import qualified Tests.Math.Hclaws.Systems.TveitoWinther1995_3
import qualified Tests.Math.Hclaws.Systems.TwoComponentChromatography
import qualified Tests.Math.Hclaws.Curves
import qualified Tests.Math.Hclaws.Differentiation
import qualified Tests.Math.Hclaws.Fan
import qualified Tests.Math.Hclaws.Integration
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests"
[ Tests.Math.Hclaws.Systems.Burgers.tests
, Tests.Math.Hclaws.Systems.JenssenYoung2004_31.tests
, Tests.Math.Hclaws.Systems.Linear.tests
, Tests.Math.Hclaws.Systems.ShallowWater.tests
, Tests.Math.Hclaws.Systems.TwoComponentChromatography.tests
, Tests.Math.Hclaws.Systems.TveitoWinther1995_3.tests
, Tests.Math.Hclaws.FrontTracking.tests
, Tests.Math.Hclaws.ConservationLaws.tests
, Tests.Math.Hclaws.Curves.tests
, Tests.Math.Hclaws.Differentiation.tests
, Tests.Math.Hclaws.Fan.tests
, Tests.Math.Hclaws.Integration.tests
]
| mikebenfield/hclaws | test/Main.hs | isc | 1,277 | 1 | 6 | 104 | 243 | 172 | 71 | 28 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module InstaHuskee where
import Data.Text
import Data.Default (def)
import Network.HTTP.Client
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Control.Monad.Reader
import Control.Monad.Trans.Resource
import Instagram
--------------------------------------------------------------------------------
type Config = String
type AppM a = InstagramT (ResourceT IO) a
--------------------------------------------------------------------------------
runApp :: (MonadReader Config m, MonadIO m) => AppM a -> m a
runApp = undefined
-- To be parametrised/read from the environment
redirectUri :: RedirectUri
redirectUri = pack "http://example.com/auth/ig"
credentials :: Credentials
credentials = Credentials (pack "CLIENT_ID") (pack "CLIENT_SECRET")
-- Top-level API
getRecentMediaByTag :: Text -> OAuthToken -> IO (Envelope [Media])
getRecentMediaByTag tag token = runIGAction $ getRecentTagged tag (Just token) def
getAuthURL :: IO Text
getAuthURL = runIGAction $ getUserAccessTokenURL1 redirectUri []
getAuthToken :: Text -> IO OAuthToken
getAuthToken code = runIGAction $ getUserAccessTokenURL2 redirectUri code
likeMedia :: MediaID -> OAuthToken -> IO (Envelope NoResult)
likeMedia mediaId token = runIGAction $ like mediaId token
-- Mechanics --
runIGAction :: AppM a -> IO a
runIGAction = runResourceT . runInstagramFn
runInstagramFn :: forall b (m :: * -> *) . (MonadBaseControl IO m, MonadResource m) => InstagramT m b -> m b
runInstagramFn igAction = do
manager <- liftIO $ newManager tlsManagerSettings
runInstagramT credentials manager igAction
| dzotokan/instahuskee | src/InstaHuskee.hs | mit | 1,686 | 0 | 10 | 288 | 412 | 220 | 192 | -1 | -1 |
module RailFenceCipher (encode, decode) where
encode :: Int -> String -> String
encode = error "You need to implement this function!"
decode :: Int -> String -> String
decode = error "You need to implement this function!"
| exercism/xhaskell | exercises/practice/rail-fence-cipher/src/RailFenceCipher.hs | mit | 224 | 0 | 6 | 39 | 54 | 30 | 24 | 5 | 1 |
-- Problems.Problem011.hs
module Problems.Problem011 (p11) where
import Data.List
main = print p11
p11 :: Int
p11 = maximum $ map biggestFromGrid [leftGrid, rightGrid, topGrid, bottomGrid, diagonalLefRightGrid, diagonalRightLeftGrid]
biggestFromGrid :: [[Int]] -> Int
biggestFromGrid = maximum . map biggestFromRow
biggestFromRow :: [Int] -> Int
biggestFromRow = maximum . map product . map (take 4) . tails
diagonalRightLeftGrid :: [[Int]]
diagonalRightLeftGrid = (transpose $ zipWith drop [0..] rightGrid) ++ (transpose $ zipWith drop [0..] bottomGrid)
diagonalLefRightGrid :: [[Int]]
diagonalLefRightGrid = (transpose $ zipWith drop [0..] leftGrid) ++ (transpose $ zipWith drop [0..] topGrid)
bottomGrid :: [[Int]]
bottomGrid = map reverse topGrid
topGrid :: [[Int]]
topGrid = transpose leftGrid
rightGrid :: [[Int]]
rightGrid = map reverse leftGrid
leftGrid :: [[Int]]
leftGrid = grid
grid :: [[Int]]
grid = [[08, 02, 22, 97, 38, 15, 00, 40, 00, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 08],
[49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 00],
[81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65],
[52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91],
[22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80],
[24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50],
[32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70],
[67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 08, 40, 91, 66, 49, 94, 21],
[24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72],
[21, 36, 23, 09, 75, 00, 76, 44, 20, 45, 35, 14, 00, 61, 33, 97, 34, 31, 33, 95],
[78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 09, 53, 56, 92],
[16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 00, 17, 54, 24, 36, 29, 85, 57],
[86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58],
[19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40],
[04, 52, 08, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66],
[88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69],
[04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 08, 46, 29, 32, 40, 62, 76, 36],
[20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16],
[20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54],
[01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48]]
| Sgoettschkes/learning | haskell/ProjectEuler/src/Problems/Problem011.hs | mit | 2,726 | 0 | 9 | 698 | 1,591 | 1,028 | 563 | 42 | 1 |
module Main where
import PositiveInteger
import Text.Trifecta
main :: IO ()
main = do
print $ parseString parseDigit mempty "123"
print $ parseString parseDigit mempty "abc" -- expected to fail
print $ parseString base10Integer mempty "123abc"
print $ parseString base10Integer mempty "abc" -- expected to fail
print $ parseString base10Integer' mempty "123abc"
print $ parseString base10Integer' mempty "-123abc"
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter24/PostiveInteger/src/Main.hs | mit | 429 | 0 | 8 | 73 | 113 | 54 | 59 | 11 | 1 |
import Data.List
import Data.Numbers.Primes
num :: [Integer] -> Integer
num qs = product $ zipWith (^) primes qs
kind :: [Integer] -> Integer
kind qs = 1 + go qs 1
where go [] _ = 0
go (q:qs) k = q*k + go qs (k*(q*2+1))
primeSumLists k = tail $ go smallPrime 1 k
where smallPrime = takeWhile (<k) primes
go [] _ _ = [[]]
go (p:ps) l k = [x:y | x <- [0..k], let s = l*(p^x), s < k, y <- go ps s k]
answer :: Integer -> Integer
answer k = k
where cands = (inits . repeat) 1
longestCand = head $ dropWhile (\x -> kind x < k) cands
p = tail $ zipWith (*) primes longestCand | jwvg0425/HaskellScratchPad | src/euler110.hs | mit | 639 | 5 | 13 | 195 | 372 | 189 | 183 | 17 | 2 |
{-# htermination filterFM :: Ord a => ([a] -> b -> Bool) -> FiniteMap [a] b -> FiniteMap [a] b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_filterFM_4.hs | mit | 116 | 0 | 3 | 23 | 5 | 3 | 2 | 1 | 0 |
-----------------------------------------------------------------------------
--
-- Module : Connections.Mash
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Connections.Mash (
mashG
) where
import Connections
import Direction
mashG :: ConnectionsGen
mashG = ConnectionsGen mash'
mash' :: Int -> Connections
mash' levels = Connections num (nexts ++ downs)
where num = levels * levels
connectNext i = (i
, if isLast i
then i + 1 - levels
else i + 1
, FromLeft)
hasNext i = not (isLast i && (row i `mod` 2 /= 0))
nexts = map connectNext . filter hasNext $ [0 .. n - 1]
isLast i = (i + 1) `mod` levels == 0
connectDown i = (i, (i + levels) `mod` n, FromUp)
hasDown i = not (isDown i && (col i `mod` 2 == 0))
downs = map connectDown . filter hasDown $ [0 .. n - 1]
isDown i = i `elem` [n - levels .. n - 1]
row i = i `div` levels
col i = i `mod` levels
n = levels * levels
| uvNikita/TopologyCalc | src/Connections/Mash.hs | mit | 1,267 | 0 | 13 | 438 | 375 | 211 | 164 | 24 | 2 |
{- |
Module : Text.Pandoc.CrossRef
Copyright : Copyright (C) 2015 Nikolay Yakimov
License : GNU GPL, version 2 or above
Maintainer : Nikolay Yakimov <root@livid.pp.ru>
Stability : alpha
Portability : portable
Public interface to pandoc-crossref library
Example of use:
> import Text.Pandoc
> import Text.Pandoc.JSON
>
> import Text.Pandoc.CrossRef
>
> main :: IO ()
> main = toJSONFilter go
> where
> go fmt p@(Pandoc meta _) = runCrossRefIO meta fmt action p
> where
> action (Pandoc _ bs) = do
> meta' <- crossRefMeta
> bs' <- crossRefBlocks bs
> return $ Pandoc meta' bs'
This module also exports utility functions for setting up meta-settings for
pandoc-crossref. Refer to documentation for a complete list of metadata field
names. All functions accept a single argument of type, returned by
"Text.Pandoc.Builder" functions, and return 'Meta'.
Example:
> runCrossRefIO meta fmt crossRefBlocks blocks
> where
> meta =
> figureTitle (str "Figura")
> <> tableTitle (str "Tabla")
> <> figPrefix (str "fig.")
> <> eqnPrefix (str "ec.")
> <> tblPrefix (str "tbl.")
> <> loftitle (header 1 $ text "Lista de figuras")
> <> lotTitle (header 1 $ text "Lista de tablas")
> <> chaptersDepth (MetaString "2")
-}
{-# LANGUAGE RankNTypes #-}
module Text.Pandoc.CrossRef (
crossRefBlocks
, crossRefMeta
, defaultCrossRefAction
, runCrossRef
, runCrossRefIO
, module SG
, CrossRefM
, CrossRefEnv(..)
) where
import Control.Monad.State
import qualified Control.Monad.Reader as R
import Text.Pandoc
import Data.Monoid ((<>))
import Text.Pandoc.CrossRef.References
import Text.Pandoc.CrossRef.Util.Settings
import Text.Pandoc.CrossRef.Util.Options as O
import Text.Pandoc.CrossRef.Util.CodeBlockCaptions
import Text.Pandoc.CrossRef.Util.ModifyMeta
import Text.Pandoc.CrossRef.Util.Settings.Gen as SG
-- | Enviromnent for 'CrossRefM'
data CrossRefEnv = CrossRefEnv {
creSettings :: Meta -- ^Metadata settings
, creOptions :: Options -- ^Internal pandoc-crossref options
}
-- | Essentially a reader monad for basic pandoc-crossref environment
type CrossRefM a = R.Reader CrossRefEnv a
{- | Walk over blocks, while inserting cross-references, list-of, etc.
Works in 'CrossRefM' monad. -}
crossRefBlocks :: [Block] -> CrossRefM [Block]
crossRefBlocks blocks = do
opts <- R.asks creOptions
let
doWalk =
bottomUpM (mkCodeBlockCaptions opts) blocks
>>= replaceAll opts
>>= bottomUpM (replaceRefs opts)
>>= bottomUpM (listOf opts)
return $ evalState doWalk def
{- | Modifies metadata for LaTeX output, adding header-includes instructions
to setup custom and builtin environments.
Note, that if output format is not "latex", this function does nothing.
Works in 'CrossRefM' monad. -}
crossRefMeta :: CrossRefM Meta
crossRefMeta = do
opts <- R.asks creOptions
dtv <- R.asks creSettings
return $ modifyMeta opts dtv
{- | Combines 'crossRefMeta' and 'crossRefBlocks'
Works in 'CrossRefM' monad. -}
defaultCrossRefAction :: Pandoc -> CrossRefM Pandoc
defaultCrossRefAction (Pandoc _ bs) = do
meta' <- crossRefMeta
bs' <- crossRefBlocks bs
return $ Pandoc meta' bs'
{- | Run an action in 'CrossRefM' monad with argument, and return pure result.
This is primary function to work with 'CrossRefM' -}
runCrossRef :: forall a b. Meta -> Maybe Format -> (a -> CrossRefM b) -> a -> b
runCrossRef meta fmt action arg = R.runReader (action arg) env
where
settings = meta <> defaultMeta
env = CrossRefEnv {
creSettings = settings
, creOptions = getOptions settings fmt
}
{- | Run an action in 'CrossRefM' monad with argument, and return 'IO' result.
This function will attempt to read pandoc-crossref settings from settings
file specified by crossrefYaml metadata field. -}
runCrossRefIO :: forall a b. Meta -> Maybe Format -> (a -> CrossRefM b) -> a -> IO b
runCrossRefIO meta fmt action arg = do
settings <- getSettings meta
let
env = CrossRefEnv {
creSettings = settings
, creOptions = getOptions settings fmt
}
return $ R.runReader (action arg) env
| infotroph/pandoc-crossref | lib/Text/Pandoc/CrossRef.hs | gpl-2.0 | 4,286 | 0 | 15 | 958 | 594 | 327 | 267 | 58 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Graph.VC.Param where
-- $Id$
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Param = Param
{ knoten :: Int
, kanten :: Int
, deck_knoten_moeglich :: Int
, deck_knoten_maximum :: Int
, kanten_in_deckmenge :: Int
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Param])
p0 :: Param
p0 = Param { knoten = 15
, kanten = 25
, deck_knoten_moeglich = 6
, deck_knoten_maximum = 8
, kanten_in_deckmenge = 3
}
| Erdwolf/autotool-bonn | src/Graph/VC/Param.hs | gpl-2.0 | 617 | 6 | 9 | 200 | 141 | 87 | 54 | 19 | 1 |
{-# LANGUAGE ForeignFunctionInterface, GeneralizedNewtypeDeriving, ExistentialQuantification, TemplateHaskell, TupleSections, ParallelListComp, NamedFieldPuns, NoMonomorphismRestriction, ScopedTypeVariables #-}
--module Mapgen.Campus (makeMap, CampusArgs (..)) where
module Mapgen.Campus where
import Object
import BasicTypes
import CommonTypes
import Mapgen.Rect
import Mapgen.Furniture
import Control.Monad.Random
import Control.Monad.Trans (MonadIO, liftIO)
import Control.Arrow
import Data.Array.IArray
import Data.Bits
import Data.List (group, sort, delete, (\\), nubBy)
import Data.Maybe (catMaybes)
import Data.VectorSpace (magnitude, (^-^), (^+^))
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Graph.Inductive (LPath (..), Gr, sp, gmap, Graph, DynGraph, Node, LEdge, lab, match, (&), labNodes, labEdges, emap, nmap, delNodes, lsuc, lpre, neighbors, noNodes)
import Data.Graph.Inductive.Basic (gsel)
import Data.Graph.Inductive.Query (msTree, bft)
import Data.Accessor
import Data.Accessor.Template
import Control.Monad (ap, foldM, replicateM, liftM2)
import Util (cfoldr, uniformRandomPick, justUniformRandomPick, takeWhileAndOne, matching, ntimesM, none, lengthAtLeast)
import Mapgen.Util (showCoordIntArray, showCoordIntArrayN, floorEverything, addEdgeWalls, justLab)
import Mapgen.RectTree
import Tile
import Mapgen.Room
import Mapgen.CampusArgs
import Data.Set (Set)
import qualified Data.Set as Set
import Debug.Trace
--addOutside _ _ g = addOutsideGently g
addOutside (CampusArgs {numOutsidePaths}) a g = --addOutsideGently g
let roomGraph = nmap room_ g
ws = walls $ ((^+^ Coord 1 1) *** (^-^ Coord 2 2)) $ bounds a
wallLengths = map length ws in do
wallChoices <- replicateM numOutsidePaths (do from <- getRandomR (0, 3)
to <- getRandomR (0, 2)
return (from, if to >= from then to + 1 else to))
positions <- mapM (\(fromWall, toWall) ->
do fromIndex <- getRandomR (0, wallLengths !! fromWall - 1)
toIndex <- getRandomR (0, wallLengths !! toWall - 1)
return (ws !! fromWall !! fromIndex, ws !! toWall !! toIndex)) wallChoices
return $ foldr (\(begin, end) ->
adjustPathU (isInside ^= False) id (path roomGraph (a ! begin) (a ! end) 3)) g positions
-- ^ Break up big compounds of insideness. Currently works by picking a random node whose neighbors' neighbors are all inside, and connecting that node by the shortest path in terms of number of nodes to an outside node.
touchUpOutside g =
let roomGraph = nmap room_ g
choices = map fst $ filter (\(n, l) -> isInside_ l && all (isInside_ . snd) (concatMap (lneighbors g . fst) $ lneighbors g n)) $ labNodes g
nodeIsOutsideRoom n = case lab g n of
Just (RoomLabel { isInside_ = False } ) -> True
_ -> False
in do mStart <- uniformRandomPick choices
case mStart of
Nothing -> return g
Just (from, _) ->
-- todo: handle connection width constraints? (ref. path's minWidth argument and paring-out of edges thinner than the constraint)
case filter (nodeIsOutsideRoom . head) $ bft from g of
[] -> return g
(xs:_) -> touchUpOutside $ adjustPathU (isInside ^= False) id xs g
lneighbors g n =
let ns = neighbors g n in
catMaybes $ zipWith (\a b -> liftM2 (,) (Just a) b) ns (map (lab g) ns)
addCrossingCorridors (CampusArgs {numCrossingCorridors}) a g = do
let innerBounds = ((^+^ Coord 1 1) *** (^-^ Coord 2 2)) $ bounds a
roomGraph = nmap room_ g
positions <- replicateM numCrossingCorridors (liftM2 (,) (getRandomR innerBounds) (getRandomR innerBounds))
return $ foldr (\(begin, end) -> adjustPathU (isInside ^= True) (connectivity ^= Corridor 2) (path roomGraph (a ! begin) (a ! end) 2)) g positions
addCorridors (CampusArgs {numCorridors}) g = do
let roomGraph = nmap room_ g
insideNodes = map fst . filter (isInside_ . snd) $ labNodes g
outsideNodes = map fst . filter (not . isInside_ . snd) $ labNodes g
if not (lengthAtLeast 1 insideNodes && lengthAtLeast 1 outsideNodes)
then return g
else do positions <- replicateM numCorridors (liftM2 (,) (fst `fmap` justUniformRandomPick insideNodes) (fst `fmap` justUniformRandomPick outsideNodes))
return $ foldr (\(begin, end) ->
let p = takeWhileAndOne (\n -> maybe False isInside_ (lab g n)) $ path roomGraph begin end 2 in
adjustPathU id (connectivity ^= Corridor 2) p) g positions
makeMap
:: (Functor m, MonadRandom m, MonadIO m) =>
CampusArgs -> (Coord, Coord) -> m (World, [(Object, Coord)], [Compound])
makeMap args mapBounds@(boundsMin, boundsMax) = do
-- layoutCharacters <- liftIO $ justReadJsonFile "LayoutCharacters.json"
-- roomMap can't deal with non-zero-indexed arrays, but we need space to put the top and left edge walls, so move things about a bit
-- note that boundsMax only needs to be made smaller to make space for walls on the top and left; the bottom and right produce
-- empty space naturally since those tiles are owned by rooms in the roomgraph (but don't have walls rendered on them because there
-- are no rooms on the other side to have edges with)
(roomArray, roomGraph) <- (id *** nmap ((^+^ Coord 1 1) *** id)) `fmap` roomMap mapBounds defaultRectTreeArgs
let world1 :: Array Coord Tile
world1 = floorEverything $ accumArray const 0 mapBounds []
n <- getRandomR (0, noNodes roomGraph - 1)
-- roomGraph2 <- addOutside args roomArray (nmap ((flip RoomLabel) True) $ roomGraph)
let roomGraph2 = adjustNodeLabel (isInside ^= False) n $ nmap ((flip RoomLabel) True) $ roomGraph
-- roomGraph3 <- addCrossingCorridors args roomArray (emap (const $ EdgeLabel None) roomGraph2)
let roomGraph3 = emap (const $ EdgeLabel None) roomGraph2
roomGraph4 <- touchUpOutside roomGraph3
roomGraph5 <- addCorridors args roomGraph4
roomGraph6 <- connectUp roomGraph5
let compounds = calcCompounds roomGraph6
roomGraph7 <- addRandomConnections args roomGraph6
(objs, (world3 :: Array Coord Tile), idlingPoints) <- renderWorld roomGraph7 world1 compounds
let compounds' = zipWith Compound (map (map (room_ . snd) . labNodes) $ compounds) idlingPoints
return $ (addEdgeWalls (bounds world3) world3, objs, compounds')
isConnection None = False
isConnection (Corridor {}) = True
isConnection (OneDoor {}) = True
numConnections (to, _, r, from) = length . filter isConnection . map connectivity_ . map fst $ nubBy (matching snd) $ (to ++ from)
addFeature i f = setBit i (fromEnum f)
umap f = map f . gsel (const True)
roomConnectivityStats g =
map (head &&& length) . group . sort . map snd .
filter (isInside_ . justLab g . fst) .
Map.toList $
foldr (\(from, _, e) g ->
if isConnection . connectivity_ $ e
then Map.insertWith (+) from 1 g
else g) Map.empty $ labEdges g
path roomGraph start end minWidth =
sp start end distanceGraph
where
distanceGraph = gmapU (\(edges, rNum, r) ->
let edgesWithDistance = map (\(_, n) -> (magnitude (approximateCenter r ^-^ (approximateCenter $ justLab roomGraph n)), n)) $
filter (\(_, n) -> minWidth <= connectionWidth r (justLab roomGraph n)) $ edges in
(edgesWithDistance, rNum, ())) roomGraph
--gmapU :: Graph a b => ((Adj b, Node, a) -> (Adj c, Node, d)) -> gr a b -> gr c d
gmapU f = gmap (\(into, n, l, _) -> (\(links, n, l) -> (links, n, l, links)) $ f (into, n, l))
| arirahikkala/straylight-divergence | src/Mapgen/Campus.hs | gpl-3.0 | 7,866 | 0 | 25 | 1,903 | 2,427 | 1,294 | 1,133 | 115 | 4 |
-----------------------------------------------------------------------------
-- Standard Library: List operations
--
-- Suitable for use with Helium, derived from Hugs 98 Standard Library
-- Modifications:
-- * tuple constructors for zip functions
-- * 'generic' functions for Integral type class are excluded
-- * list functions from Prelude are not exported by this module
-----------------------------------------------------------------------------
module List where
import Maybe
infix 5 \\
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex x = findIndex (x ==)
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices x = findIndices (x ==)
find :: (a -> Bool) -> [a] -> Maybe a
find p = listToMaybe . filter p
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p = listToMaybe . findIndices p
findIndices :: (a -> Bool) -> [a] -> [Int]
findIndices p xs = [ i | (x,i) <- zip xs zeroList, p x ]
where
zeroList = f 0
f n = n : f (n + 1)
nub :: (Eq a) => [a] -> [a]
nub = nubBy (==)
nubBy :: (a -> a -> Bool) -> [a] -> [a]
nubBy eq [] = []
nubBy eq (x:xs) = x : nubBy eq (filter (\y -> not (eq x y)) xs)
delete :: (Eq a) => a -> [a] -> [a]
delete = deleteBy (==)
deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteBy eq x [] = []
deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys
(\\) :: (Eq a) => [a] -> [a] -> [a]
(\\) = foldl (flip delete)
deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
deleteFirstsBy eq = foldl (flip (deleteBy eq))
union :: (Eq a) => [a] -> [a] -> [a]
union = unionBy (==)
unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs
intersect :: (Eq a) => [a] -> [a] -> [a]
intersect = intersectBy (==)
intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy eq xs ys = [x | x <- xs, any (eq x) ys]
intersperse :: a -> [a] -> [a]
intersperse sep [] = []
intersperse sep [x] = [x]
intersperse sep (x:xs) = x : sep : intersperse sep xs
transpose :: [[a]] -> [[a]]
transpose [] = []
transpose ([] : xss) = transpose xss
transpose ((x:xs) : xss) = (x : [h | (h:t) <- xss]) :
transpose (xs : [ t | (h:t) <- xss])
partition :: (a -> Bool) -> [a] -> ([a],[a])
partition p xs = foldr select ([],[]) xs
where select x (ts,fs) | p x = (x:ts,fs)
| otherwise = (ts,x:fs)
-- group splits its list argument into a list of lists of equal, adjacent
-- elements. e.g.,
-- group "Mississippi" == ["M","i","ss","i","ss","i","pp","i"]
group :: (Eq a) => [a] -> [[a]]
group = groupBy (==)
groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy eq [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
-- inits xs returns the list of initial segments of xs, shortest first.
-- e.g., inits "abc" == ["","a","ab","abc"]
inits :: [a] -> [[a]]
inits [] = [[]]
inits (x:xs) = [[]] ++ map (x:) (inits xs)
-- tails xs returns the list of all final segments of xs, longest first.
-- e.g., tails "abc" == ["abc", "bc", "c",""]
tails :: [a] -> [[a]]
tails [] = [[]]
tails xxs@(_:xs) = xxs : tails xs
isPrefixOf :: (Eq a) => [a] -> [a] -> Bool
isPrefixOf [] _ = True
isPrefixOf _ [] = False
isPrefixOf (x:xs) (y:ys) = x == y && isPrefixOf xs ys
isSuffixOf :: (Eq a) => [a] -> [a] -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
mapAccumL :: (a -> b -> (a, c)) -> a -> [b] -> (a, [c])
mapAccumL f s [] = (s, [])
mapAccumL f s (x:xs) = (s'',y:ys)
where (s', y ) = f s x
(s'',ys) = mapAccumL f s' xs
mapAccumR :: (a -> b -> (a, c)) -> a -> [b] -> (a, [c])
mapAccumR f s [] = (s, [])
mapAccumR f s (x:xs) = (s'', y:ys)
where (s'',y ) = f s' x
(s', ys) = mapAccumR f s xs
unfoldr :: (b -> Maybe (a,b)) -> b -> [a]
unfoldr f b = case f b of Nothing -> []
Just (a,b) -> a : unfoldr f b
sort :: (Ord a) => [a] -> [a]
sort = sortBy compare
sortBy :: (a -> a -> Ordering) -> [a] -> [a]
sortBy cmp = foldr (insertBy cmp) []
insert :: (Ord a) => a -> [a] -> [a]
insert = insertBy compare
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy cmp x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
maximumBy :: (a -> a -> Ordering) -> [a] -> a
maximumBy cmp [] = error "List.maximumBy: empty list"
maximumBy cmp xs = foldl1 max xs
where
max x y = case cmp x y of
GT -> x
_ -> y
minimumBy :: (a -> a -> Ordering) -> [a] -> a
minimumBy cmp [] = error "List.minimumBy: empty list"
minimumBy cmp xs = foldl1 min xs
where
min x y = case cmp x y of
GT -> y
_ -> x
zip4 :: [a] -> [b] -> [c] -> [d] -> [(a,b,c,d)]
zip4 = zipWith4 (\a b c d -> (a, b, c, d))
zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a,b,c,d,e)]
zip5 = zipWith5 (\a b c d e -> (a, b, c, d, e))
zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[(a,b,c,d,e,f)]
zip6 = zipWith6 (\a b c d e f -> (a, b, c, d, e, f))
zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[g] -> [(a,b,c,d,e,f,g)]
zip7 = zipWith7 (\a b c d e f g -> (a, b, c, d, e, f, g))
zipWith4 :: (a->b->c->d->e) -> [a]->[b]->[c]->[d]->[e]
zipWith4 z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4 z as bs cs ds
zipWith4 _ _ _ _ _ = []
zipWith5 :: (a->b->c->d->e->f) ->
[a]->[b]->[c]->[d]->[e]->[f]
zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es)
= z a b c d e : zipWith5 z as bs cs ds es
zipWith5 _ _ _ _ _ _ = []
zipWith6 :: (a->b->c->d->e->f->g) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]
zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs)
= z a b c d e f : zipWith6 z as bs cs ds es fs
zipWith6 _ _ _ _ _ _ _ = []
zipWith7 :: (a->b->c->d->e->f->g->h) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]->[h]
zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs)
= z a b c d e f g : zipWith7 z as bs cs ds es fs gs
zipWith7 _ _ _ _ _ _ _ _ = []
unzip4 :: [(a,b,c,d)] -> ([a],[b],[c],[d])
unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) ->
(a:as,b:bs,c:cs,d:ds))
([],[],[],[])
unzip5 :: [(a,b,c,d,e)] -> ([a],[b],[c],[d],[e])
unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) ->
(a:as,b:bs,c:cs,d:ds,e:es))
([],[],[],[],[])
unzip6 :: [(a,b,c,d,e,f)] -> ([a],[b],[c],[d],[e],[f])
unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs))
([],[],[],[],[],[])
unzip7 :: [(a,b,c,d,e,f,g)] -> ([a],[b],[c],[d],[e],[f],[g])
unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs))
([],[],[],[],[],[],[])
-----------------------------------------------------------------------------
| Helium4Haskell/helium | lib/List.hs | gpl-3.0 | 8,416 | 0 | 13 | 3,368 | 4,278 | 2,390 | 1,888 | 150 | 2 |
--project euler problem 27
{--
Euler published the remarkable quadratic formula:
n² + n + 41
It turns out that the formula will produce 40 primes for the consecutive values n = 0 to 39. However, when n = 40, 402 + 40 + 41 = 40(40 + 1) + 41 is divisible by 41, and certainly when n = 41, 41² + 41 + 41 is clearly divisible by 41.
Using computers, the incredible formula n² 79n + 1601 was discovered, which produces 80 primes for the consecutive values n = 0 to 79. The product of the coefficients, 79 and 1601, is 126479.
Considering quadratics of the form:
n² + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of n
e.g. |11| = 11 and |4| = 4
Find the product of the coefficients, a and b, for the quadratic expression that produces the maximum number of primes for consecutive values of n, starting with n = 0.
--}
import Data.Array
import Data.List
-- borrowed from 050.hs
primes :: Int -> [Int]
{-# NOINLINE primes #-}
primes n
| n <= 2 = []
| otherwise =
let
sqrPrimes = primes (ceiling (sqrt (fromIntegral n)))
sieves = concat
[[2 * p, 3 * p..n - 1] | p <- sqrPrimes]
sieves' = zip sieves (repeat False)
flags = accumArray (&&) True (0, n - 1) sieves'
in
drop 2 (filter (flags!) [0..n - 1])
-- magic eulerian formula
quadratic :: Int -> Int -> Int -> Int
quadratic a b n = n*n + a*n + b
-- [y] = f([x])
list :: Int -> Int -> [Int]
list a b = [ quadratic a b n | n <- [0..1000] ]
-- counts consectives
consecutive :: [Int] -> Int
consecutive list = length $ takeWhile (\x -> x `elem` (primes 1000)) list
-- generates all a*b and the number of consecutive primes
generator :: [(Int,Int)]
generator = [(a*b, consecutive (list a b)) | a<-[-999..999], b<-(primes 1000), a+b > 0]
-- finds the (a*b, # consecutives)
findMax :: [(Int,Int)] -> (Int,Int)
findMax list = maximumBy comp list where
comp (a,b) (c,d) = compare b d
-- :)
main = print $ findMax generator
| goalieca/haskelling | 027.hs | gpl-3.0 | 1,982 | 2 | 15 | 460 | 519 | 277 | 242 | 26 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.ListVolumes
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation lists the iSCSI stored volumes of a gateway. Results are
-- sorted by volume ARN. The response includes only the volume ARNs. If you want
-- additional volume information, use the 'DescribeStorediSCSIVolumes' API.
--
-- The operation supports pagination. By default, the operation returns a
-- maximum of up to 100 volumes. You can optionally specify the 'Limit' field in
-- the body to limit the number of volumes in the response. If the number of
-- volumes returned in the response is truncated, the response includes a Marker
-- field. You can use this Marker value in your subsequent request to retrieve
-- the next set of volumes.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_ListVolumes.html>
module Network.AWS.StorageGateway.ListVolumes
(
-- * Request
ListVolumes
-- ** Request constructor
, listVolumes
-- ** Request lenses
, lvGatewayARN
, lvLimit
, lvMarker
-- * Response
, ListVolumesResponse
-- ** Response constructor
, listVolumesResponse
-- ** Response lenses
, lvrGatewayARN
, lvrMarker
, lvrVolumeInfos
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data ListVolumes = ListVolumes
{ _lvGatewayARN :: Text
, _lvLimit :: Maybe Nat
, _lvMarker :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListVolumes' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lvGatewayARN' @::@ 'Text'
--
-- * 'lvLimit' @::@ 'Maybe' 'Natural'
--
-- * 'lvMarker' @::@ 'Maybe' 'Text'
--
listVolumes :: Text -- ^ 'lvGatewayARN'
-> ListVolumes
listVolumes p1 = ListVolumes
{ _lvGatewayARN = p1
, _lvMarker = Nothing
, _lvLimit = Nothing
}
lvGatewayARN :: Lens' ListVolumes Text
lvGatewayARN = lens _lvGatewayARN (\s a -> s { _lvGatewayARN = a })
-- | Specifies that the list of volumes returned be limited to the specified
-- number of items.
lvLimit :: Lens' ListVolumes (Maybe Natural)
lvLimit = lens _lvLimit (\s a -> s { _lvLimit = a }) . mapping _Nat
-- | A string that indicates the position at which to begin the returned list of
-- volumes. Obtain the marker from the response of a previous List iSCSI Volumes
-- request.
lvMarker :: Lens' ListVolumes (Maybe Text)
lvMarker = lens _lvMarker (\s a -> s { _lvMarker = a })
data ListVolumesResponse = ListVolumesResponse
{ _lvrGatewayARN :: Maybe Text
, _lvrMarker :: Maybe Text
, _lvrVolumeInfos :: List "VolumeInfos" VolumeInfo
} deriving (Eq, Read, Show)
-- | 'ListVolumesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lvrGatewayARN' @::@ 'Maybe' 'Text'
--
-- * 'lvrMarker' @::@ 'Maybe' 'Text'
--
-- * 'lvrVolumeInfos' @::@ ['VolumeInfo']
--
listVolumesResponse :: ListVolumesResponse
listVolumesResponse = ListVolumesResponse
{ _lvrGatewayARN = Nothing
, _lvrMarker = Nothing
, _lvrVolumeInfos = mempty
}
lvrGatewayARN :: Lens' ListVolumesResponse (Maybe Text)
lvrGatewayARN = lens _lvrGatewayARN (\s a -> s { _lvrGatewayARN = a })
lvrMarker :: Lens' ListVolumesResponse (Maybe Text)
lvrMarker = lens _lvrMarker (\s a -> s { _lvrMarker = a })
lvrVolumeInfos :: Lens' ListVolumesResponse [VolumeInfo]
lvrVolumeInfos = lens _lvrVolumeInfos (\s a -> s { _lvrVolumeInfos = a }) . _List
instance ToPath ListVolumes where
toPath = const "/"
instance ToQuery ListVolumes where
toQuery = const mempty
instance ToHeaders ListVolumes
instance ToJSON ListVolumes where
toJSON ListVolumes{..} = object
[ "GatewayARN" .= _lvGatewayARN
, "Marker" .= _lvMarker
, "Limit" .= _lvLimit
]
instance AWSRequest ListVolumes where
type Sv ListVolumes = StorageGateway
type Rs ListVolumes = ListVolumesResponse
request = post "ListVolumes"
response = jsonResponse
instance FromJSON ListVolumesResponse where
parseJSON = withObject "ListVolumesResponse" $ \o -> ListVolumesResponse
<$> o .:? "GatewayARN"
<*> o .:? "Marker"
<*> o .:? "VolumeInfos" .!= mempty
instance AWSPager ListVolumes where
page rq rs
| stop (rs ^. lvrMarker) = Nothing
| otherwise = (\x -> rq & lvMarker ?~ x)
<$> (rs ^. lvrMarker)
| dysinger/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/ListVolumes.hs | mpl-2.0 | 5,418 | 0 | 14 | 1,230 | 816 | 480 | 336 | 84 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Spanner.Projects.Instances.Databases.DropDatabase
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Drops (aka deletes) a Cloud Spanner database. Completed backups for the
-- database will be retained according to their \`expire_time\`.
--
-- /See:/ <https://cloud.google.com/spanner/ Cloud Spanner API Reference> for @spanner.projects.instances.databases.dropDatabase@.
module Network.Google.Resource.Spanner.Projects.Instances.Databases.DropDatabase
(
-- * REST Resource
ProjectsInstancesDatabasesDropDatabaseResource
-- * Creating a Request
, projectsInstancesDatabasesDropDatabase
, ProjectsInstancesDatabasesDropDatabase
-- * Request Lenses
, pidddXgafv
, pidddUploadProtocol
, pidddDatabase
, pidddAccessToken
, pidddUploadType
, pidddCallback
) where
import Network.Google.Prelude
import Network.Google.Spanner.Types
-- | A resource alias for @spanner.projects.instances.databases.dropDatabase@ method which the
-- 'ProjectsInstancesDatabasesDropDatabase' request conforms to.
type ProjectsInstancesDatabasesDropDatabaseResource =
"v1" :>
Capture "database" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Drops (aka deletes) a Cloud Spanner database. Completed backups for the
-- database will be retained according to their \`expire_time\`.
--
-- /See:/ 'projectsInstancesDatabasesDropDatabase' smart constructor.
data ProjectsInstancesDatabasesDropDatabase =
ProjectsInstancesDatabasesDropDatabase'
{ _pidddXgafv :: !(Maybe Xgafv)
, _pidddUploadProtocol :: !(Maybe Text)
, _pidddDatabase :: !Text
, _pidddAccessToken :: !(Maybe Text)
, _pidddUploadType :: !(Maybe Text)
, _pidddCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesDatabasesDropDatabase' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pidddXgafv'
--
-- * 'pidddUploadProtocol'
--
-- * 'pidddDatabase'
--
-- * 'pidddAccessToken'
--
-- * 'pidddUploadType'
--
-- * 'pidddCallback'
projectsInstancesDatabasesDropDatabase
:: Text -- ^ 'pidddDatabase'
-> ProjectsInstancesDatabasesDropDatabase
projectsInstancesDatabasesDropDatabase pPidddDatabase_ =
ProjectsInstancesDatabasesDropDatabase'
{ _pidddXgafv = Nothing
, _pidddUploadProtocol = Nothing
, _pidddDatabase = pPidddDatabase_
, _pidddAccessToken = Nothing
, _pidddUploadType = Nothing
, _pidddCallback = Nothing
}
-- | V1 error format.
pidddXgafv :: Lens' ProjectsInstancesDatabasesDropDatabase (Maybe Xgafv)
pidddXgafv
= lens _pidddXgafv (\ s a -> s{_pidddXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pidddUploadProtocol :: Lens' ProjectsInstancesDatabasesDropDatabase (Maybe Text)
pidddUploadProtocol
= lens _pidddUploadProtocol
(\ s a -> s{_pidddUploadProtocol = a})
-- | Required. The database to be dropped.
pidddDatabase :: Lens' ProjectsInstancesDatabasesDropDatabase Text
pidddDatabase
= lens _pidddDatabase
(\ s a -> s{_pidddDatabase = a})
-- | OAuth access token.
pidddAccessToken :: Lens' ProjectsInstancesDatabasesDropDatabase (Maybe Text)
pidddAccessToken
= lens _pidddAccessToken
(\ s a -> s{_pidddAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pidddUploadType :: Lens' ProjectsInstancesDatabasesDropDatabase (Maybe Text)
pidddUploadType
= lens _pidddUploadType
(\ s a -> s{_pidddUploadType = a})
-- | JSONP
pidddCallback :: Lens' ProjectsInstancesDatabasesDropDatabase (Maybe Text)
pidddCallback
= lens _pidddCallback
(\ s a -> s{_pidddCallback = a})
instance GoogleRequest
ProjectsInstancesDatabasesDropDatabase
where
type Rs ProjectsInstancesDatabasesDropDatabase =
Empty
type Scopes ProjectsInstancesDatabasesDropDatabase =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/spanner.admin"]
requestClient
ProjectsInstancesDatabasesDropDatabase'{..}
= go _pidddDatabase _pidddXgafv _pidddUploadProtocol
_pidddAccessToken
_pidddUploadType
_pidddCallback
(Just AltJSON)
spannerService
where go
= buildClient
(Proxy ::
Proxy ProjectsInstancesDatabasesDropDatabaseResource)
mempty
| brendanhay/gogol | gogol-spanner/gen/Network/Google/Resource/Spanner/Projects/Instances/Databases/DropDatabase.hs | mpl-2.0 | 5,503 | 0 | 15 | 1,152 | 701 | 411 | 290 | 109 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.Operations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all instance operations that have been performed on the given
-- Cloud SQL instance in the reverse chronological order of the start time.
--
-- /See:/ <https://cloud.google.com/sql/docs/reference/latest Cloud SQL Administration API Reference> for @sql.operations.list@.
module Network.Google.Resource.SQL.Operations.List
(
-- * REST Resource
OperationsListResource
-- * Creating a Request
, operationsList
, OperationsList
-- * Request Lenses
, olProject
, olPageToken
, olMaxResults
, olInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.operations.list@ method which the
-- 'OperationsList' request conforms to.
type OperationsListResource =
"sql" :>
"v1beta4" :>
"projects" :>
Capture "project" Text :>
"operations" :>
QueryParam "instance" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] OperationsListResponse
-- | Lists all instance operations that have been performed on the given
-- Cloud SQL instance in the reverse chronological order of the start time.
--
-- /See:/ 'operationsList' smart constructor.
data OperationsList = OperationsList'
{ _olProject :: !Text
, _olPageToken :: !(Maybe Text)
, _olMaxResults :: !(Maybe (Textual Word32))
, _olInstance :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olProject'
--
-- * 'olPageToken'
--
-- * 'olMaxResults'
--
-- * 'olInstance'
operationsList
:: Text -- ^ 'olProject'
-> Text -- ^ 'olInstance'
-> OperationsList
operationsList pOlProject_ pOlInstance_ =
OperationsList'
{ _olProject = pOlProject_
, _olPageToken = Nothing
, _olMaxResults = Nothing
, _olInstance = pOlInstance_
}
-- | Project ID of the project that contains the instance.
olProject :: Lens' OperationsList Text
olProject
= lens _olProject (\ s a -> s{_olProject = a})
-- | A previously-returned page token representing part of the larger set of
-- results to view.
olPageToken :: Lens' OperationsList (Maybe Text)
olPageToken
= lens _olPageToken (\ s a -> s{_olPageToken = a})
-- | Maximum number of operations per response.
olMaxResults :: Lens' OperationsList (Maybe Word32)
olMaxResults
= lens _olMaxResults (\ s a -> s{_olMaxResults = a})
. mapping _Coerce
-- | Cloud SQL instance ID. This does not include the project ID.
olInstance :: Lens' OperationsList Text
olInstance
= lens _olInstance (\ s a -> s{_olInstance = a})
instance GoogleRequest OperationsList where
type Rs OperationsList = OperationsListResponse
type Scopes OperationsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient OperationsList'{..}
= go _olProject (Just _olInstance) _olPageToken
_olMaxResults
(Just AltJSON)
sQLAdminService
where go
= buildClient (Proxy :: Proxy OperationsListResource)
mempty
| rueshyna/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/Operations/List.hs | mpl-2.0 | 4,219 | 0 | 16 | 1,006 | 572 | 336 | 236 | 84 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudTasks.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudTasks.Types.Product where
import Network.Google.CloudTasks.Types.Sum
import Network.Google.Prelude
-- | Rate limits. This message determines the maximum rate that tasks can be
-- dispatched by a queue, regardless of whether the dispatch is a first
-- task attempt or a retry. Note: The debugging command, RunTask, will run
-- a task even if the queue has reached its RateLimits.
--
-- /See:/ 'rateLimits' smart constructor.
data RateLimits =
RateLimits'
{ _rlMaxConcurrentDispatches :: !(Maybe (Textual Int32))
, _rlMaxDispatchesPerSecond :: !(Maybe (Textual Double))
, _rlMaxBurstSize :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RateLimits' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rlMaxConcurrentDispatches'
--
-- * 'rlMaxDispatchesPerSecond'
--
-- * 'rlMaxBurstSize'
rateLimits
:: RateLimits
rateLimits =
RateLimits'
{ _rlMaxConcurrentDispatches = Nothing
, _rlMaxDispatchesPerSecond = Nothing
, _rlMaxBurstSize = Nothing
}
-- | The maximum number of concurrent tasks that Cloud Tasks allows to be
-- dispatched for this queue. After this threshold has been reached, Cloud
-- Tasks stops dispatching tasks until the number of concurrent requests
-- decreases. If unspecified when the queue is created, Cloud Tasks will
-- pick the default. The maximum allowed value is 5,000. This field has the
-- same meaning as [max_concurrent_requests in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#max_concurrent_requests).
rlMaxConcurrentDispatches :: Lens' RateLimits (Maybe Int32)
rlMaxConcurrentDispatches
= lens _rlMaxConcurrentDispatches
(\ s a -> s{_rlMaxConcurrentDispatches = a})
. mapping _Coerce
-- | The maximum rate at which tasks are dispatched from this queue. If
-- unspecified when the queue is created, Cloud Tasks will pick the
-- default. * The maximum allowed value is 500. This field has the same
-- meaning as [rate in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#rate).
rlMaxDispatchesPerSecond :: Lens' RateLimits (Maybe Double)
rlMaxDispatchesPerSecond
= lens _rlMaxDispatchesPerSecond
(\ s a -> s{_rlMaxDispatchesPerSecond = a})
. mapping _Coerce
-- | Output only. The max burst size. Max burst size limits how fast tasks in
-- queue are processed when many tasks are in the queue and the rate is
-- high. This field allows the queue to have a high rate so processing
-- starts shortly after a task is enqueued, but still limits resource usage
-- when many tasks are enqueued in a short period of time. The [token
-- bucket](https:\/\/wikipedia.org\/wiki\/Token_Bucket) algorithm is used
-- to control the rate of task dispatches. Each queue has a token bucket
-- that holds tokens, up to the maximum specified by \`max_burst_size\`.
-- Each time a task is dispatched, a token is removed from the bucket.
-- Tasks will be dispatched until the queue\'s bucket runs out of tokens.
-- The bucket will be continuously refilled with new tokens based on
-- max_dispatches_per_second. Cloud Tasks will pick the value of
-- \`max_burst_size\` based on the value of max_dispatches_per_second. For
-- queues that were created or updated using \`queue.yaml\/xml\`,
-- \`max_burst_size\` is equal to
-- [bucket_size](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#bucket_size).
-- Since \`max_burst_size\` is output only, if UpdateQueue is called on a
-- queue created by \`queue.yaml\/xml\`, \`max_burst_size\` will be reset
-- based on the value of max_dispatches_per_second, regardless of whether
-- max_dispatches_per_second is updated.
rlMaxBurstSize :: Lens' RateLimits (Maybe Int32)
rlMaxBurstSize
= lens _rlMaxBurstSize
(\ s a -> s{_rlMaxBurstSize = a})
. mapping _Coerce
instance FromJSON RateLimits where
parseJSON
= withObject "RateLimits"
(\ o ->
RateLimits' <$>
(o .:? "maxConcurrentDispatches") <*>
(o .:? "maxDispatchesPerSecond")
<*> (o .:? "maxBurstSize"))
instance ToJSON RateLimits where
toJSON RateLimits'{..}
= object
(catMaybes
[("maxConcurrentDispatches" .=) <$>
_rlMaxConcurrentDispatches,
("maxDispatchesPerSecond" .=) <$>
_rlMaxDispatchesPerSecond,
("maxBurstSize" .=) <$> _rlMaxBurstSize])
-- | Contains information needed for generating an [OAuth
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2).
-- This type of authorization should generally only be used when calling
-- Google APIs hosted on *.googleapis.com.
--
-- /See:/ 'oAuthToken' smart constructor.
data OAuthToken =
OAuthToken'
{ _oatScope :: !(Maybe Text)
, _oatServiceAccountEmail :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OAuthToken' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oatScope'
--
-- * 'oatServiceAccountEmail'
oAuthToken
:: OAuthToken
oAuthToken =
OAuthToken' {_oatScope = Nothing, _oatServiceAccountEmail = Nothing}
-- | OAuth scope to be used for generating OAuth access token. If not
-- specified, \"https:\/\/www.googleapis.com\/auth\/cloud-platform\" will
-- be used.
oatScope :: Lens' OAuthToken (Maybe Text)
oatScope = lens _oatScope (\ s a -> s{_oatScope = a})
-- | [Service account
-- email](https:\/\/cloud.google.com\/iam\/docs\/service-accounts) to be
-- used for generating OAuth token. The service account must be within the
-- same project as the queue. The caller must have
-- iam.serviceAccounts.actAs permission for the service account.
oatServiceAccountEmail :: Lens' OAuthToken (Maybe Text)
oatServiceAccountEmail
= lens _oatServiceAccountEmail
(\ s a -> s{_oatServiceAccountEmail = a})
instance FromJSON OAuthToken where
parseJSON
= withObject "OAuthToken"
(\ o ->
OAuthToken' <$>
(o .:? "scope") <*> (o .:? "serviceAccountEmail"))
instance ToJSON OAuthToken where
toJSON OAuthToken'{..}
= object
(catMaybes
[("scope" .=) <$> _oatScope,
("serviceAccountEmail" .=) <$>
_oatServiceAccountEmail])
-- | The \`Status\` type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by [gRPC](https:\/\/github.com\/grpc). Each \`Status\` message
-- contains three pieces of data: error code, error message, and error
-- details. You can find out more about this error model and how to work
-- with it in the [API Design
-- Guide](https:\/\/cloud.google.com\/apis\/design\/errors).
--
-- /See:/ 'status' smart constructor.
data Status =
Status'
{ _sDetails :: !(Maybe [StatusDetailsItem])
, _sCode :: !(Maybe (Textual Int32))
, _sMessage :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sDetails'
--
-- * 'sCode'
--
-- * 'sMessage'
status
:: Status
status = Status' {_sDetails = Nothing, _sCode = Nothing, _sMessage = Nothing}
-- | A list of messages that carry the error details. There is a common set
-- of message types for APIs to use.
sDetails :: Lens' Status [StatusDetailsItem]
sDetails
= lens _sDetails (\ s a -> s{_sDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
sCode :: Lens' Status (Maybe Int32)
sCode
= lens _sCode (\ s a -> s{_sCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
sMessage :: Lens' Status (Maybe Text)
sMessage = lens _sMessage (\ s a -> s{_sMessage = a})
instance FromJSON Status where
parseJSON
= withObject "Status"
(\ o ->
Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON Status where
toJSON Status'{..}
= object
(catMaybes
[("details" .=) <$> _sDetails,
("code" .=) <$> _sCode,
("message" .=) <$> _sMessage])
-- | Represents a textual expression in the Common Expression Language (CEL)
-- syntax. CEL is a C-like expression language. The syntax and semantics of
-- CEL are documented at https:\/\/github.com\/google\/cel-spec. Example
-- (Comparison): title: \"Summary size limit\" description: \"Determines if
-- a summary is less than 100 chars\" expression: \"document.summary.size()
-- \< 100\" Example (Equality): title: \"Requestor is owner\" description:
-- \"Determines if requestor is the document owner\" expression:
-- \"document.owner == request.auth.claims.email\" Example (Logic): title:
-- \"Public documents\" description: \"Determine whether the document
-- should be publicly visible\" expression: \"document.type != \'private\'
-- && document.type != \'internal\'\" Example (Data Manipulation): title:
-- \"Notification string\" description: \"Create a notification string with
-- a timestamp.\" expression: \"\'New message received at \' +
-- string(document.create_time)\" The exact variables and functions that
-- may be referenced within an expression are determined by the service
-- that evaluates it. See the service documentation for additional
-- information.
--
-- /See:/ 'expr' smart constructor.
data Expr =
Expr'
{ _eLocation :: !(Maybe Text)
, _eExpression :: !(Maybe Text)
, _eTitle :: !(Maybe Text)
, _eDescription :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Expr' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eLocation'
--
-- * 'eExpression'
--
-- * 'eTitle'
--
-- * 'eDescription'
expr
:: Expr
expr =
Expr'
{ _eLocation = Nothing
, _eExpression = Nothing
, _eTitle = Nothing
, _eDescription = Nothing
}
-- | Optional. String indicating the location of the expression for error
-- reporting, e.g. a file name and a position in the file.
eLocation :: Lens' Expr (Maybe Text)
eLocation
= lens _eLocation (\ s a -> s{_eLocation = a})
-- | Textual representation of an expression in Common Expression Language
-- syntax.
eExpression :: Lens' Expr (Maybe Text)
eExpression
= lens _eExpression (\ s a -> s{_eExpression = a})
-- | Optional. Title for the expression, i.e. a short string describing its
-- purpose. This can be used e.g. in UIs which allow to enter the
-- expression.
eTitle :: Lens' Expr (Maybe Text)
eTitle = lens _eTitle (\ s a -> s{_eTitle = a})
-- | Optional. Description of the expression. This is a longer text which
-- describes the expression, e.g. when hovered over it in a UI.
eDescription :: Lens' Expr (Maybe Text)
eDescription
= lens _eDescription (\ s a -> s{_eDescription = a})
instance FromJSON Expr where
parseJSON
= withObject "Expr"
(\ o ->
Expr' <$>
(o .:? "location") <*> (o .:? "expression") <*>
(o .:? "title")
<*> (o .:? "description"))
instance ToJSON Expr where
toJSON Expr'{..}
= object
(catMaybes
[("location" .=) <$> _eLocation,
("expression" .=) <$> _eExpression,
("title" .=) <$> _eTitle,
("description" .=) <$> _eDescription])
-- | The response message for Locations.ListLocations.
--
-- /See:/ 'listLocationsResponse' smart constructor.
data ListLocationsResponse =
ListLocationsResponse'
{ _llrNextPageToken :: !(Maybe Text)
, _llrLocations :: !(Maybe [Location])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListLocationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llrNextPageToken'
--
-- * 'llrLocations'
listLocationsResponse
:: ListLocationsResponse
listLocationsResponse =
ListLocationsResponse' {_llrNextPageToken = Nothing, _llrLocations = Nothing}
-- | The standard List next-page token.
llrNextPageToken :: Lens' ListLocationsResponse (Maybe Text)
llrNextPageToken
= lens _llrNextPageToken
(\ s a -> s{_llrNextPageToken = a})
-- | A list of locations that matches the specified filter in the request.
llrLocations :: Lens' ListLocationsResponse [Location]
llrLocations
= lens _llrLocations (\ s a -> s{_llrLocations = a})
. _Default
. _Coerce
instance FromJSON ListLocationsResponse where
parseJSON
= withObject "ListLocationsResponse"
(\ o ->
ListLocationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "locations" .!= mempty))
instance ToJSON ListLocationsResponse where
toJSON ListLocationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _llrNextPageToken,
("locations" .=) <$> _llrLocations])
-- | Request message for \`GetIamPolicy\` method.
--
-- /See:/ 'getIAMPolicyRequest' smart constructor.
newtype GetIAMPolicyRequest =
GetIAMPolicyRequest'
{ _giprOptions :: Maybe GetPolicyOptions
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GetIAMPolicyRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'giprOptions'
getIAMPolicyRequest
:: GetIAMPolicyRequest
getIAMPolicyRequest = GetIAMPolicyRequest' {_giprOptions = Nothing}
-- | OPTIONAL: A \`GetPolicyOptions\` object for specifying options to
-- \`GetIamPolicy\`.
giprOptions :: Lens' GetIAMPolicyRequest (Maybe GetPolicyOptions)
giprOptions
= lens _giprOptions (\ s a -> s{_giprOptions = a})
instance FromJSON GetIAMPolicyRequest where
parseJSON
= withObject "GetIAMPolicyRequest"
(\ o -> GetIAMPolicyRequest' <$> (o .:? "options"))
instance ToJSON GetIAMPolicyRequest where
toJSON GetIAMPolicyRequest'{..}
= object
(catMaybes [("options" .=) <$> _giprOptions])
-- | Contains information needed for generating an [OpenID Connect
-- token](https:\/\/developers.google.com\/identity\/protocols\/OpenIDConnect).
-- This type of authorization can be used for many scenarios, including
-- calling Cloud Run, or endpoints where you intend to validate the token
-- yourself.
--
-- /See:/ 'oidcToken' smart constructor.
data OidcToken =
OidcToken'
{ _otAudience :: !(Maybe Text)
, _otServiceAccountEmail :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OidcToken' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'otAudience'
--
-- * 'otServiceAccountEmail'
oidcToken
:: OidcToken
oidcToken = OidcToken' {_otAudience = Nothing, _otServiceAccountEmail = Nothing}
-- | Audience to be used when generating OIDC token. If not specified, the
-- URI specified in target will be used.
otAudience :: Lens' OidcToken (Maybe Text)
otAudience
= lens _otAudience (\ s a -> s{_otAudience = a})
-- | [Service account
-- email](https:\/\/cloud.google.com\/iam\/docs\/service-accounts) to be
-- used for generating OIDC token. The service account must be within the
-- same project as the queue. The caller must have
-- iam.serviceAccounts.actAs permission for the service account.
otServiceAccountEmail :: Lens' OidcToken (Maybe Text)
otServiceAccountEmail
= lens _otServiceAccountEmail
(\ s a -> s{_otServiceAccountEmail = a})
instance FromJSON OidcToken where
parseJSON
= withObject "OidcToken"
(\ o ->
OidcToken' <$>
(o .:? "audience") <*> (o .:? "serviceAccountEmail"))
instance ToJSON OidcToken where
toJSON OidcToken'{..}
= object
(catMaybes
[("audience" .=) <$> _otAudience,
("serviceAccountEmail" .=) <$>
_otServiceAccountEmail])
-- | Retry config. These settings determine when a failed task attempt is
-- retried.
--
-- /See:/ 'retryConfig' smart constructor.
data RetryConfig =
RetryConfig'
{ _rcMaxDoublings :: !(Maybe (Textual Int32))
, _rcMaxRetryDuration :: !(Maybe GDuration)
, _rcMaxAttempts :: !(Maybe (Textual Int32))
, _rcMaxBackoff :: !(Maybe GDuration)
, _rcMinBackoff :: !(Maybe GDuration)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RetryConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rcMaxDoublings'
--
-- * 'rcMaxRetryDuration'
--
-- * 'rcMaxAttempts'
--
-- * 'rcMaxBackoff'
--
-- * 'rcMinBackoff'
retryConfig
:: RetryConfig
retryConfig =
RetryConfig'
{ _rcMaxDoublings = Nothing
, _rcMaxRetryDuration = Nothing
, _rcMaxAttempts = Nothing
, _rcMaxBackoff = Nothing
, _rcMinBackoff = Nothing
}
-- | The time between retries will double \`max_doublings\` times. A task\'s
-- retry interval starts at min_backoff, then doubles \`max_doublings\`
-- times, then increases linearly, and finally retries at intervals of
-- max_backoff up to max_attempts times. For example, if min_backoff is
-- 10s, max_backoff is 300s, and \`max_doublings\` is 3, then the a task
-- will first be retried in 10s. The retry interval will double three
-- times, and then increase linearly by 2^3 * 10s. Finally, the task will
-- retry at intervals of max_backoff until the task has been attempted
-- max_attempts times. Thus, the requests will retry at 10s, 20s, 40s, 80s,
-- 160s, 240s, 300s, 300s, .... If unspecified when the queue is created,
-- Cloud Tasks will pick the default. This field has the same meaning as
-- [max_doublings in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#retry_parameters).
rcMaxDoublings :: Lens' RetryConfig (Maybe Int32)
rcMaxDoublings
= lens _rcMaxDoublings
(\ s a -> s{_rcMaxDoublings = a})
. mapping _Coerce
-- | If positive, \`max_retry_duration\` specifies the time limit for
-- retrying a failed task, measured from when the task was first attempted.
-- Once \`max_retry_duration\` time has passed *and* the task has been
-- attempted max_attempts times, no further attempts will be made and the
-- task will be deleted. If zero, then the task age is unlimited. If
-- unspecified when the queue is created, Cloud Tasks will pick the
-- default. \`max_retry_duration\` will be truncated to the nearest second.
-- This field has the same meaning as [task_age_limit in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#retry_parameters).
rcMaxRetryDuration :: Lens' RetryConfig (Maybe Scientific)
rcMaxRetryDuration
= lens _rcMaxRetryDuration
(\ s a -> s{_rcMaxRetryDuration = a})
. mapping _GDuration
-- | Number of attempts per task. Cloud Tasks will attempt the task
-- \`max_attempts\` times (that is, if the first attempt fails, then there
-- will be \`max_attempts - 1\` retries). Must be >= -1. If unspecified
-- when the queue is created, Cloud Tasks will pick the default. -1
-- indicates unlimited attempts. This field has the same meaning as
-- [task_retry_limit in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#retry_parameters).
rcMaxAttempts :: Lens' RetryConfig (Maybe Int32)
rcMaxAttempts
= lens _rcMaxAttempts
(\ s a -> s{_rcMaxAttempts = a})
. mapping _Coerce
-- | A task will be scheduled for retry between min_backoff and max_backoff
-- duration after it fails, if the queue\'s RetryConfig specifies that the
-- task should be retried. If unspecified when the queue is created, Cloud
-- Tasks will pick the default. \`max_backoff\` will be truncated to the
-- nearest second. This field has the same meaning as [max_backoff_seconds
-- in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#retry_parameters).
rcMaxBackoff :: Lens' RetryConfig (Maybe Scientific)
rcMaxBackoff
= lens _rcMaxBackoff (\ s a -> s{_rcMaxBackoff = a})
. mapping _GDuration
-- | A task will be scheduled for retry between min_backoff and max_backoff
-- duration after it fails, if the queue\'s RetryConfig specifies that the
-- task should be retried. If unspecified when the queue is created, Cloud
-- Tasks will pick the default. \`min_backoff\` will be truncated to the
-- nearest second. This field has the same meaning as [min_backoff_seconds
-- in
-- queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/queueref#retry_parameters).
rcMinBackoff :: Lens' RetryConfig (Maybe Scientific)
rcMinBackoff
= lens _rcMinBackoff (\ s a -> s{_rcMinBackoff = a})
. mapping _GDuration
instance FromJSON RetryConfig where
parseJSON
= withObject "RetryConfig"
(\ o ->
RetryConfig' <$>
(o .:? "maxDoublings") <*> (o .:? "maxRetryDuration")
<*> (o .:? "maxAttempts")
<*> (o .:? "maxBackoff")
<*> (o .:? "minBackoff"))
instance ToJSON RetryConfig where
toJSON RetryConfig'{..}
= object
(catMaybes
[("maxDoublings" .=) <$> _rcMaxDoublings,
("maxRetryDuration" .=) <$> _rcMaxRetryDuration,
("maxAttempts" .=) <$> _rcMaxAttempts,
("maxBackoff" .=) <$> _rcMaxBackoff,
("minBackoff" .=) <$> _rcMinBackoff])
-- | Request message for forcing a task to run now using RunTask.
--
-- /See:/ 'runTaskRequest' smart constructor.
newtype RunTaskRequest =
RunTaskRequest'
{ _rtrResponseView :: Maybe RunTaskRequestResponseView
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RunTaskRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rtrResponseView'
runTaskRequest
:: RunTaskRequest
runTaskRequest = RunTaskRequest' {_rtrResponseView = Nothing}
-- | The response_view specifies which subset of the Task will be returned.
-- By default response_view is BASIC; not all information is retrieved by
-- default because some data, such as payloads, might be desirable to
-- return only when needed because of its large size or because of the
-- sensitivity of data that it contains. Authorization for FULL requires
-- \`cloudtasks.tasks.fullView\` [Google
-- IAM](https:\/\/cloud.google.com\/iam\/) permission on the Task resource.
rtrResponseView :: Lens' RunTaskRequest (Maybe RunTaskRequestResponseView)
rtrResponseView
= lens _rtrResponseView
(\ s a -> s{_rtrResponseView = a})
instance FromJSON RunTaskRequest where
parseJSON
= withObject "RunTaskRequest"
(\ o -> RunTaskRequest' <$> (o .:? "responseView"))
instance ToJSON RunTaskRequest where
toJSON RunTaskRequest'{..}
= object
(catMaybes
[("responseView" .=) <$> _rtrResponseView])
-- | HTTP request headers. This map contains the header field names and
-- values. Headers can be set when the task is created. These headers
-- represent a subset of the headers that will accompany the task\'s HTTP
-- request. Some HTTP request headers will be ignored or replaced. A
-- partial list of headers that will be ignored or replaced is: * Host:
-- This will be computed by Cloud Tasks and derived from HttpRequest.url. *
-- Content-Length: This will be computed by Cloud Tasks. * User-Agent: This
-- will be set to \`\"Google-Cloud-Tasks\"\`. * X-Google-*: Google use
-- only. * X-AppEngine-*: Google use only. \`Content-Type\` won\'t be set
-- by Cloud Tasks. You can explicitly set \`Content-Type\` to a media type
-- when the task is created. For example, \`Content-Type\` can be set to
-- \`\"application\/octet-stream\"\` or \`\"application\/json\"\`. Headers
-- which can have multiple values (according to RFC2616) can be specified
-- using comma-separated values. The size of the headers must be less than
-- 80KB.
--
-- /See:/ 'hTTPRequestHeaders' smart constructor.
newtype HTTPRequestHeaders =
HTTPRequestHeaders'
{ _httprhAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'HTTPRequestHeaders' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'httprhAddtional'
hTTPRequestHeaders
:: HashMap Text Text -- ^ 'httprhAddtional'
-> HTTPRequestHeaders
hTTPRequestHeaders pHttprhAddtional_ =
HTTPRequestHeaders' {_httprhAddtional = _Coerce # pHttprhAddtional_}
httprhAddtional :: Lens' HTTPRequestHeaders (HashMap Text Text)
httprhAddtional
= lens _httprhAddtional
(\ s a -> s{_httprhAddtional = a})
. _Coerce
instance FromJSON HTTPRequestHeaders where
parseJSON
= withObject "HTTPRequestHeaders"
(\ o -> HTTPRequestHeaders' <$> (parseJSONObject o))
instance ToJSON HTTPRequestHeaders where
toJSON = toJSON . _httprhAddtional
-- | A resource that represents Google Cloud Platform location.
--
-- /See:/ 'location' smart constructor.
data Location =
Location'
{ _lName :: !(Maybe Text)
, _lMetadata :: !(Maybe LocationMetadata)
, _lDisplayName :: !(Maybe Text)
, _lLabels :: !(Maybe LocationLabels)
, _lLocationId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Location' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lName'
--
-- * 'lMetadata'
--
-- * 'lDisplayName'
--
-- * 'lLabels'
--
-- * 'lLocationId'
location
:: Location
location =
Location'
{ _lName = Nothing
, _lMetadata = Nothing
, _lDisplayName = Nothing
, _lLabels = Nothing
, _lLocationId = Nothing
}
-- | Resource name for the location, which may vary between implementations.
-- For example: \`\"projects\/example-project\/locations\/us-east1\"\`
lName :: Lens' Location (Maybe Text)
lName = lens _lName (\ s a -> s{_lName = a})
-- | Service-specific metadata. For example the available capacity at the
-- given location.
lMetadata :: Lens' Location (Maybe LocationMetadata)
lMetadata
= lens _lMetadata (\ s a -> s{_lMetadata = a})
-- | The friendly name for this location, typically a nearby city name. For
-- example, \"Tokyo\".
lDisplayName :: Lens' Location (Maybe Text)
lDisplayName
= lens _lDisplayName (\ s a -> s{_lDisplayName = a})
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
lLabels :: Lens' Location (Maybe LocationLabels)
lLabels = lens _lLabels (\ s a -> s{_lLabels = a})
-- | The canonical id for this location. For example: \`\"us-east1\"\`.
lLocationId :: Lens' Location (Maybe Text)
lLocationId
= lens _lLocationId (\ s a -> s{_lLocationId = a})
instance FromJSON Location where
parseJSON
= withObject "Location"
(\ o ->
Location' <$>
(o .:? "name") <*> (o .:? "metadata") <*>
(o .:? "displayName")
<*> (o .:? "labels")
<*> (o .:? "locationId"))
instance ToJSON Location where
toJSON Location'{..}
= object
(catMaybes
[("name" .=) <$> _lName,
("metadata" .=) <$> _lMetadata,
("displayName" .=) <$> _lDisplayName,
("labels" .=) <$> _lLabels,
("locationId" .=) <$> _lLocationId])
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'empty' smart constructor.
data Empty =
Empty'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Empty' with the minimum fields required to make a request.
--
empty
:: Empty
empty = Empty'
instance FromJSON Empty where
parseJSON = withObject "Empty" (\ o -> pure Empty')
instance ToJSON Empty where
toJSON = const emptyObject
-- | Request message for CreateTask.
--
-- /See:/ 'createTaskRequest' smart constructor.
data CreateTaskRequest =
CreateTaskRequest'
{ _ctrResponseView :: !(Maybe CreateTaskRequestResponseView)
, _ctrTask :: !(Maybe Task)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CreateTaskRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ctrResponseView'
--
-- * 'ctrTask'
createTaskRequest
:: CreateTaskRequest
createTaskRequest =
CreateTaskRequest' {_ctrResponseView = Nothing, _ctrTask = Nothing}
-- | The response_view specifies which subset of the Task will be returned.
-- By default response_view is BASIC; not all information is retrieved by
-- default because some data, such as payloads, might be desirable to
-- return only when needed because of its large size or because of the
-- sensitivity of data that it contains. Authorization for FULL requires
-- \`cloudtasks.tasks.fullView\` [Google
-- IAM](https:\/\/cloud.google.com\/iam\/) permission on the Task resource.
ctrResponseView :: Lens' CreateTaskRequest (Maybe CreateTaskRequestResponseView)
ctrResponseView
= lens _ctrResponseView
(\ s a -> s{_ctrResponseView = a})
-- | Required. The task to add. Task names have the following format:
-- \`projects\/PROJECT_ID\/locations\/LOCATION_ID\/queues\/QUEUE_ID\/tasks\/TASK_ID\`.
-- The user can optionally specify a task name. If a name is not specified
-- then the system will generate a random unique task id, which will be set
-- in the task returned in the response. If schedule_time is not set or is
-- in the past then Cloud Tasks will set it to the current time. Task
-- De-duplication: Explicitly specifying a task ID enables task
-- de-duplication. If a task\'s ID is identical to that of an existing task
-- or a task that was deleted or executed recently then the call will fail
-- with ALREADY_EXISTS. If the task\'s queue was created using Cloud Tasks,
-- then another task with the same name can\'t be created for ~1hour after
-- the original task was deleted or executed. If the task\'s queue was
-- created using queue.yaml or queue.xml, then another task with the same
-- name can\'t be created for ~9days after the original task was deleted or
-- executed. Because there is an extra lookup cost to identify duplicate
-- task names, these CreateTask calls have significantly increased latency.
-- Using hashed strings for the task id or for the prefix of the task id is
-- recommended. Choosing task ids that are sequential or have sequential
-- prefixes, for example using a timestamp, causes an increase in latency
-- and error rates in all task commands. The infrastructure relies on an
-- approximately uniform distribution of task ids to store and serve tasks
-- efficiently.
ctrTask :: Lens' CreateTaskRequest (Maybe Task)
ctrTask = lens _ctrTask (\ s a -> s{_ctrTask = a})
instance FromJSON CreateTaskRequest where
parseJSON
= withObject "CreateTaskRequest"
(\ o ->
CreateTaskRequest' <$>
(o .:? "responseView") <*> (o .:? "task"))
instance ToJSON CreateTaskRequest where
toJSON CreateTaskRequest'{..}
= object
(catMaybes
[("responseView" .=) <$> _ctrResponseView,
("task" .=) <$> _ctrTask])
-- | Response message for ListQueues.
--
-- /See:/ 'listQueuesResponse' smart constructor.
data ListQueuesResponse =
ListQueuesResponse'
{ _lqrNextPageToken :: !(Maybe Text)
, _lqrQueues :: !(Maybe [Queue])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListQueuesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lqrNextPageToken'
--
-- * 'lqrQueues'
listQueuesResponse
:: ListQueuesResponse
listQueuesResponse =
ListQueuesResponse' {_lqrNextPageToken = Nothing, _lqrQueues = Nothing}
-- | A token to retrieve next page of results. To return the next page of
-- results, call ListQueues with this value as the page_token. If the
-- next_page_token is empty, there are no more results. The page token is
-- valid for only 2 hours.
lqrNextPageToken :: Lens' ListQueuesResponse (Maybe Text)
lqrNextPageToken
= lens _lqrNextPageToken
(\ s a -> s{_lqrNextPageToken = a})
-- | The list of queues.
lqrQueues :: Lens' ListQueuesResponse [Queue]
lqrQueues
= lens _lqrQueues (\ s a -> s{_lqrQueues = a}) .
_Default
. _Coerce
instance FromJSON ListQueuesResponse where
parseJSON
= withObject "ListQueuesResponse"
(\ o ->
ListQueuesResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "queues" .!= mempty))
instance ToJSON ListQueuesResponse where
toJSON ListQueuesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lqrNextPageToken,
("queues" .=) <$> _lqrQueues])
--
-- /See:/ 'statusDetailsItem' smart constructor.
newtype StatusDetailsItem =
StatusDetailsItem'
{ _sdiAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdiAddtional'
statusDetailsItem
:: HashMap Text JSONValue -- ^ 'sdiAddtional'
-> StatusDetailsItem
statusDetailsItem pSdiAddtional_ =
StatusDetailsItem' {_sdiAddtional = _Coerce # pSdiAddtional_}
-- | Properties of the object. Contains field \'type with type URL.
sdiAddtional :: Lens' StatusDetailsItem (HashMap Text JSONValue)
sdiAddtional
= lens _sdiAddtional (\ s a -> s{_sdiAddtional = a})
. _Coerce
instance FromJSON StatusDetailsItem where
parseJSON
= withObject "StatusDetailsItem"
(\ o -> StatusDetailsItem' <$> (parseJSONObject o))
instance ToJSON StatusDetailsItem where
toJSON = toJSON . _sdiAddtional
-- | Encapsulates settings provided to GetIamPolicy.
--
-- /See:/ 'getPolicyOptions' smart constructor.
newtype GetPolicyOptions =
GetPolicyOptions'
{ _gpoRequestedPolicyVersion :: Maybe (Textual Int32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GetPolicyOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gpoRequestedPolicyVersion'
getPolicyOptions
:: GetPolicyOptions
getPolicyOptions = GetPolicyOptions' {_gpoRequestedPolicyVersion = Nothing}
-- | Optional. The policy format version to be returned. Valid values are 0,
-- 1, and 3. Requests specifying an invalid value will be rejected.
-- Requests for policies with any conditional bindings must specify version
-- 3. Policies without any conditional bindings may specify any valid value
-- or leave the field unset. To learn which resources support conditions in
-- their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
gpoRequestedPolicyVersion :: Lens' GetPolicyOptions (Maybe Int32)
gpoRequestedPolicyVersion
= lens _gpoRequestedPolicyVersion
(\ s a -> s{_gpoRequestedPolicyVersion = a})
. mapping _Coerce
instance FromJSON GetPolicyOptions where
parseJSON
= withObject "GetPolicyOptions"
(\ o ->
GetPolicyOptions' <$>
(o .:? "requestedPolicyVersion"))
instance ToJSON GetPolicyOptions where
toJSON GetPolicyOptions'{..}
= object
(catMaybes
[("requestedPolicyVersion" .=) <$>
_gpoRequestedPolicyVersion])
-- | Request message for \`SetIamPolicy\` method.
--
-- /See:/ 'setIAMPolicyRequest' smart constructor.
newtype SetIAMPolicyRequest =
SetIAMPolicyRequest'
{ _siprPolicy :: Maybe Policy
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SetIAMPolicyRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'siprPolicy'
setIAMPolicyRequest
:: SetIAMPolicyRequest
setIAMPolicyRequest = SetIAMPolicyRequest' {_siprPolicy = Nothing}
-- | REQUIRED: The complete policy to be applied to the \`resource\`. The
-- size of the policy is limited to a few 10s of KB. An empty policy is a
-- valid policy but certain Cloud Platform services (such as Projects)
-- might reject them.
siprPolicy :: Lens' SetIAMPolicyRequest (Maybe Policy)
siprPolicy
= lens _siprPolicy (\ s a -> s{_siprPolicy = a})
instance FromJSON SetIAMPolicyRequest where
parseJSON
= withObject "SetIAMPolicyRequest"
(\ o -> SetIAMPolicyRequest' <$> (o .:? "policy"))
instance ToJSON SetIAMPolicyRequest where
toJSON SetIAMPolicyRequest'{..}
= object (catMaybes [("policy" .=) <$> _siprPolicy])
-- | A queue is a container of related tasks. Queues are configured to manage
-- how those tasks are dispatched. Configurable properties include rate
-- limits, retry options, queue types, and others.
--
-- /See:/ 'queue' smart constructor.
data Queue =
Queue'
{ _qRateLimits :: !(Maybe RateLimits)
, _qAppEngineRoutingOverride :: !(Maybe AppEngineRouting)
, _qState :: !(Maybe QueueState)
, _qRetryConfig :: !(Maybe RetryConfig)
, _qStackdriverLoggingConfig :: !(Maybe StackdriverLoggingConfig)
, _qName :: !(Maybe Text)
, _qPurgeTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Queue' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'qRateLimits'
--
-- * 'qAppEngineRoutingOverride'
--
-- * 'qState'
--
-- * 'qRetryConfig'
--
-- * 'qStackdriverLoggingConfig'
--
-- * 'qName'
--
-- * 'qPurgeTime'
queue
:: Queue
queue =
Queue'
{ _qRateLimits = Nothing
, _qAppEngineRoutingOverride = Nothing
, _qState = Nothing
, _qRetryConfig = Nothing
, _qStackdriverLoggingConfig = Nothing
, _qName = Nothing
, _qPurgeTime = Nothing
}
-- | Rate limits for task dispatches. rate_limits and retry_config are
-- related because they both control task attempts. However they control
-- task attempts in different ways: * rate_limits controls the total rate
-- of dispatches from a queue (i.e. all traffic dispatched from the queue,
-- regardless of whether the dispatch is from a first attempt or a retry).
-- * retry_config controls what happens to particular a task after its
-- first attempt fails. That is, retry_config controls task retries (the
-- second attempt, third attempt, etc). The queue\'s actual dispatch rate
-- is the result of: * Number of tasks in the queue * User-specified
-- throttling: rate_limits, retry_config, and the queue\'s state. * System
-- throttling due to \`429\` (Too Many Requests) or \`503\` (Service
-- Unavailable) responses from the worker, high error rates, or to smooth
-- sudden large traffic spikes.
qRateLimits :: Lens' Queue (Maybe RateLimits)
qRateLimits
= lens _qRateLimits (\ s a -> s{_qRateLimits = a})
-- | Overrides for task-level app_engine_routing. These settings apply only
-- to App Engine tasks in this queue. Http tasks are not affected. If set,
-- \`app_engine_routing_override\` is used for all App Engine tasks in the
-- queue, no matter what the setting is for the task-level
-- app_engine_routing.
qAppEngineRoutingOverride :: Lens' Queue (Maybe AppEngineRouting)
qAppEngineRoutingOverride
= lens _qAppEngineRoutingOverride
(\ s a -> s{_qAppEngineRoutingOverride = a})
-- | Output only. The state of the queue. \`state\` can only be changed by
-- calling PauseQueue, ResumeQueue, or uploading
-- [queue.yaml\/xml](https:\/\/cloud.google.com\/appengine\/docs\/python\/config\/queueref).
-- UpdateQueue cannot be used to change \`state\`.
qState :: Lens' Queue (Maybe QueueState)
qState = lens _qState (\ s a -> s{_qState = a})
-- | Settings that determine the retry behavior. * For tasks created using
-- Cloud Tasks: the queue-level retry settings apply to all tasks in the
-- queue that were created using Cloud Tasks. Retry settings cannot be set
-- on individual tasks. * For tasks created using the App Engine SDK: the
-- queue-level retry settings apply to all tasks in the queue which do not
-- have retry settings explicitly set on the task and were created by the
-- App Engine SDK. See [App Engine
-- documentation](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/taskqueue\/push\/retrying-tasks).
qRetryConfig :: Lens' Queue (Maybe RetryConfig)
qRetryConfig
= lens _qRetryConfig (\ s a -> s{_qRetryConfig = a})
-- | Configuration options for writing logs to [Stackdriver
-- Logging](https:\/\/cloud.google.com\/logging\/docs\/). If this field is
-- unset, then no logs are written.
qStackdriverLoggingConfig :: Lens' Queue (Maybe StackdriverLoggingConfig)
qStackdriverLoggingConfig
= lens _qStackdriverLoggingConfig
(\ s a -> s{_qStackdriverLoggingConfig = a})
-- | Caller-specified and required in CreateQueue, after which it becomes
-- output only. The queue name. The queue name must have the following
-- format:
-- \`projects\/PROJECT_ID\/locations\/LOCATION_ID\/queues\/QUEUE_ID\` *
-- \`PROJECT_ID\` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens
-- (-), colons (:), or periods (.). For more information, see [Identifying
-- projects](https:\/\/cloud.google.com\/resource-manager\/docs\/creating-managing-projects#identifying_projects)
-- * \`LOCATION_ID\` is the canonical ID for the queue\'s location. The
-- list of available locations can be obtained by calling ListLocations.
-- For more information, see
-- https:\/\/cloud.google.com\/about\/locations\/. * \`QUEUE_ID\` can
-- contain letters ([A-Za-z]), numbers ([0-9]), or hyphens (-). The maximum
-- length is 100 characters.
qName :: Lens' Queue (Maybe Text)
qName = lens _qName (\ s a -> s{_qName = a})
-- | Output only. The last time this queue was purged. All tasks that were
-- created before this time were purged. A queue can be purged using
-- PurgeQueue, the [App Engine Task Queue SDK, or the Cloud
-- Console](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/taskqueue\/push\/deleting-tasks-and-queues#purging_all_tasks_from_a_queue).
-- Purge time will be truncated to the nearest microsecond. Purge time will
-- be unset if the queue has never been purged.
qPurgeTime :: Lens' Queue (Maybe UTCTime)
qPurgeTime
= lens _qPurgeTime (\ s a -> s{_qPurgeTime = a}) .
mapping _DateTime
instance FromJSON Queue where
parseJSON
= withObject "Queue"
(\ o ->
Queue' <$>
(o .:? "rateLimits") <*>
(o .:? "appEngineRoutingOverride")
<*> (o .:? "state")
<*> (o .:? "retryConfig")
<*> (o .:? "stackdriverLoggingConfig")
<*> (o .:? "name")
<*> (o .:? "purgeTime"))
instance ToJSON Queue where
toJSON Queue'{..}
= object
(catMaybes
[("rateLimits" .=) <$> _qRateLimits,
("appEngineRoutingOverride" .=) <$>
_qAppEngineRoutingOverride,
("state" .=) <$> _qState,
("retryConfig" .=) <$> _qRetryConfig,
("stackdriverLoggingConfig" .=) <$>
_qStackdriverLoggingConfig,
("name" .=) <$> _qName,
("purgeTime" .=) <$> _qPurgeTime])
-- | HTTP request. The task will be pushed to the worker as an HTTP request.
-- If the worker or the redirected worker acknowledges the task by
-- returning a successful HTTP response code ([\`200\` - \`299\`]), the
-- task will be removed from the queue. If any other HTTP response code is
-- returned or no response is received, the task will be retried according
-- to the following: * User-specified throttling: retry configuration, rate
-- limits, and the queue\'s state. * System throttling: To prevent the
-- worker from overloading, Cloud Tasks may temporarily reduce the queue\'s
-- effective rate. User-specified settings will not be changed. System
-- throttling happens because: * Cloud Tasks backs off on all errors.
-- Normally the backoff specified in rate limits will be used. But if the
-- worker returns \`429\` (Too Many Requests), \`503\` (Service
-- Unavailable), or the rate of errors is high, Cloud Tasks will use a
-- higher backoff rate. The retry specified in the \`Retry-After\` HTTP
-- response header is considered. * To prevent traffic spikes and to smooth
-- sudden increases in traffic, dispatches ramp up slowly when the queue is
-- newly created or idle and if large numbers of tasks suddenly become
-- available to dispatch (due to spikes in create task rates, the queue
-- being unpaused, or many tasks that are scheduled at the same time).
--
-- /See:/ 'hTTPRequest' smart constructor.
data HTTPRequest =
HTTPRequest'
{ _httprOAuthToken :: !(Maybe OAuthToken)
, _httprHTTPMethod :: !(Maybe HTTPRequestHTTPMethod)
, _httprOidcToken :: !(Maybe OidcToken)
, _httprBody :: !(Maybe Bytes)
, _httprURL :: !(Maybe Text)
, _httprHeaders :: !(Maybe HTTPRequestHeaders)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'HTTPRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'httprOAuthToken'
--
-- * 'httprHTTPMethod'
--
-- * 'httprOidcToken'
--
-- * 'httprBody'
--
-- * 'httprURL'
--
-- * 'httprHeaders'
hTTPRequest
:: HTTPRequest
hTTPRequest =
HTTPRequest'
{ _httprOAuthToken = Nothing
, _httprHTTPMethod = Nothing
, _httprOidcToken = Nothing
, _httprBody = Nothing
, _httprURL = Nothing
, _httprHeaders = Nothing
}
-- | If specified, an [OAuth
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2)
-- will be generated and attached as an \`Authorization\` header in the
-- HTTP request. This type of authorization should generally only be used
-- when calling Google APIs hosted on *.googleapis.com.
httprOAuthToken :: Lens' HTTPRequest (Maybe OAuthToken)
httprOAuthToken
= lens _httprOAuthToken
(\ s a -> s{_httprOAuthToken = a})
-- | The HTTP method to use for the request. The default is POST.
httprHTTPMethod :: Lens' HTTPRequest (Maybe HTTPRequestHTTPMethod)
httprHTTPMethod
= lens _httprHTTPMethod
(\ s a -> s{_httprHTTPMethod = a})
-- | If specified, an
-- [OIDC](https:\/\/developers.google.com\/identity\/protocols\/OpenIDConnect)
-- token will be generated and attached as an \`Authorization\` header in
-- the HTTP request. This type of authorization can be used for many
-- scenarios, including calling Cloud Run, or endpoints where you intend to
-- validate the token yourself.
httprOidcToken :: Lens' HTTPRequest (Maybe OidcToken)
httprOidcToken
= lens _httprOidcToken
(\ s a -> s{_httprOidcToken = a})
-- | HTTP request body. A request body is allowed only if the HTTP method is
-- POST, PUT, or PATCH. It is an error to set body on a task with an
-- incompatible HttpMethod.
httprBody :: Lens' HTTPRequest (Maybe ByteString)
httprBody
= lens _httprBody (\ s a -> s{_httprBody = a}) .
mapping _Bytes
-- | Required. The full url path that the request will be sent to. This
-- string must begin with either \"http:\/\/\" or \"https:\/\/\". Some
-- examples are: \`http:\/\/acme.com\` and
-- \`https:\/\/acme.com\/sales:8080\`. Cloud Tasks will encode some
-- characters for safety and compatibility. The maximum allowed URL length
-- is 2083 characters after encoding. The \`Location\` header response from
-- a redirect response [\`300\` - \`399\`] may be followed. The redirect is
-- not counted as a separate attempt.
httprURL :: Lens' HTTPRequest (Maybe Text)
httprURL = lens _httprURL (\ s a -> s{_httprURL = a})
-- | HTTP request headers. This map contains the header field names and
-- values. Headers can be set when the task is created. These headers
-- represent a subset of the headers that will accompany the task\'s HTTP
-- request. Some HTTP request headers will be ignored or replaced. A
-- partial list of headers that will be ignored or replaced is: * Host:
-- This will be computed by Cloud Tasks and derived from HttpRequest.url. *
-- Content-Length: This will be computed by Cloud Tasks. * User-Agent: This
-- will be set to \`\"Google-Cloud-Tasks\"\`. * X-Google-*: Google use
-- only. * X-AppEngine-*: Google use only. \`Content-Type\` won\'t be set
-- by Cloud Tasks. You can explicitly set \`Content-Type\` to a media type
-- when the task is created. For example, \`Content-Type\` can be set to
-- \`\"application\/octet-stream\"\` or \`\"application\/json\"\`. Headers
-- which can have multiple values (according to RFC2616) can be specified
-- using comma-separated values. The size of the headers must be less than
-- 80KB.
httprHeaders :: Lens' HTTPRequest (Maybe HTTPRequestHeaders)
httprHeaders
= lens _httprHeaders (\ s a -> s{_httprHeaders = a})
instance FromJSON HTTPRequest where
parseJSON
= withObject "HTTPRequest"
(\ o ->
HTTPRequest' <$>
(o .:? "oauthToken") <*> (o .:? "httpMethod") <*>
(o .:? "oidcToken")
<*> (o .:? "body")
<*> (o .:? "url")
<*> (o .:? "headers"))
instance ToJSON HTTPRequest where
toJSON HTTPRequest'{..}
= object
(catMaybes
[("oauthToken" .=) <$> _httprOAuthToken,
("httpMethod" .=) <$> _httprHTTPMethod,
("oidcToken" .=) <$> _httprOidcToken,
("body" .=) <$> _httprBody, ("url" .=) <$> _httprURL,
("headers" .=) <$> _httprHeaders])
-- | Configuration options for writing logs to [Stackdriver
-- Logging](https:\/\/cloud.google.com\/logging\/docs\/).
--
-- /See:/ 'stackdriverLoggingConfig' smart constructor.
newtype StackdriverLoggingConfig =
StackdriverLoggingConfig'
{ _slcSamplingRatio :: Maybe (Textual Double)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StackdriverLoggingConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'slcSamplingRatio'
stackdriverLoggingConfig
:: StackdriverLoggingConfig
stackdriverLoggingConfig =
StackdriverLoggingConfig' {_slcSamplingRatio = Nothing}
-- | Specifies the fraction of operations to write to [Stackdriver
-- Logging](https:\/\/cloud.google.com\/logging\/docs\/). This field may
-- contain any value between 0.0 and 1.0, inclusive. 0.0 is the default and
-- means that no operations are logged.
slcSamplingRatio :: Lens' StackdriverLoggingConfig (Maybe Double)
slcSamplingRatio
= lens _slcSamplingRatio
(\ s a -> s{_slcSamplingRatio = a})
. mapping _Coerce
instance FromJSON StackdriverLoggingConfig where
parseJSON
= withObject "StackdriverLoggingConfig"
(\ o ->
StackdriverLoggingConfig' <$>
(o .:? "samplingRatio"))
instance ToJSON StackdriverLoggingConfig where
toJSON StackdriverLoggingConfig'{..}
= object
(catMaybes
[("samplingRatio" .=) <$> _slcSamplingRatio])
-- | Response message for listing tasks using ListTasks.
--
-- /See:/ 'listTasksResponse' smart constructor.
data ListTasksResponse =
ListTasksResponse'
{ _ltrNextPageToken :: !(Maybe Text)
, _ltrTasks :: !(Maybe [Task])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListTasksResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ltrNextPageToken'
--
-- * 'ltrTasks'
listTasksResponse
:: ListTasksResponse
listTasksResponse =
ListTasksResponse' {_ltrNextPageToken = Nothing, _ltrTasks = Nothing}
-- | A token to retrieve next page of results. To return the next page of
-- results, call ListTasks with this value as the page_token. If the
-- next_page_token is empty, there are no more results.
ltrNextPageToken :: Lens' ListTasksResponse (Maybe Text)
ltrNextPageToken
= lens _ltrNextPageToken
(\ s a -> s{_ltrNextPageToken = a})
-- | The list of tasks.
ltrTasks :: Lens' ListTasksResponse [Task]
ltrTasks
= lens _ltrTasks (\ s a -> s{_ltrTasks = a}) .
_Default
. _Coerce
instance FromJSON ListTasksResponse where
parseJSON
= withObject "ListTasksResponse"
(\ o ->
ListTasksResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "tasks" .!= mempty))
instance ToJSON ListTasksResponse where
toJSON ListTasksResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _ltrNextPageToken,
("tasks" .=) <$> _ltrTasks])
-- | HTTP request headers. This map contains the header field names and
-- values. Headers can be set when the task is created. Repeated headers
-- are not supported but a header value can contain commas. Cloud Tasks
-- sets some headers to default values: * \`User-Agent\`: By default, this
-- header is \`\"AppEngine-Google;
-- (+http:\/\/code.google.com\/appengine)\"\`. This header can be modified,
-- but Cloud Tasks will append \`\"AppEngine-Google;
-- (+http:\/\/code.google.com\/appengine)\"\` to the modified
-- \`User-Agent\`. If the task has a body, Cloud Tasks sets the following
-- headers: * \`Content-Type\`: By default, the \`Content-Type\` header is
-- set to \`\"application\/octet-stream\"\`. The default can be overridden
-- by explicitly setting \`Content-Type\` to a particular media type when
-- the task is created. For example, \`Content-Type\` can be set to
-- \`\"application\/json\"\`. * \`Content-Length\`: This is computed by
-- Cloud Tasks. This value is output only. It cannot be changed. The
-- headers below cannot be set or overridden: * \`Host\` * \`X-Google-*\` *
-- \`X-AppEngine-*\` In addition, Cloud Tasks sets some headers when the
-- task is dispatched, such as headers containing information about the
-- task; see [request
-- headers](https:\/\/cloud.google.com\/tasks\/docs\/creating-appengine-handlers#reading_request_headers).
-- These headers are set only when the task is dispatched, so they are not
-- visible when the task is returned in a Cloud Tasks response. Although
-- there is no specific limit for the maximum number of headers or the
-- size, there is a limit on the maximum size of the Task. For more
-- information, see the CreateTask documentation.
--
-- /See:/ 'appEngineHTTPRequestHeaders' smart constructor.
newtype AppEngineHTTPRequestHeaders =
AppEngineHTTPRequestHeaders'
{ _aehttprhAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppEngineHTTPRequestHeaders' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aehttprhAddtional'
appEngineHTTPRequestHeaders
:: HashMap Text Text -- ^ 'aehttprhAddtional'
-> AppEngineHTTPRequestHeaders
appEngineHTTPRequestHeaders pAehttprhAddtional_ =
AppEngineHTTPRequestHeaders'
{_aehttprhAddtional = _Coerce # pAehttprhAddtional_}
aehttprhAddtional :: Lens' AppEngineHTTPRequestHeaders (HashMap Text Text)
aehttprhAddtional
= lens _aehttprhAddtional
(\ s a -> s{_aehttprhAddtional = a})
. _Coerce
instance FromJSON AppEngineHTTPRequestHeaders where
parseJSON
= withObject "AppEngineHTTPRequestHeaders"
(\ o ->
AppEngineHTTPRequestHeaders' <$> (parseJSONObject o))
instance ToJSON AppEngineHTTPRequestHeaders where
toJSON = toJSON . _aehttprhAddtional
-- | Request message for PauseQueue.
--
-- /See:/ 'pauseQueueRequest' smart constructor.
data PauseQueueRequest =
PauseQueueRequest'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PauseQueueRequest' with the minimum fields required to make a request.
--
pauseQueueRequest
:: PauseQueueRequest
pauseQueueRequest = PauseQueueRequest'
instance FromJSON PauseQueueRequest where
parseJSON
= withObject "PauseQueueRequest"
(\ o -> pure PauseQueueRequest')
instance ToJSON PauseQueueRequest where
toJSON = const emptyObject
-- | Request message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsRequest' smart constructor.
newtype TestIAMPermissionsRequest =
TestIAMPermissionsRequest'
{ _tiprPermissions :: Maybe [Text]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TestIAMPermissionsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiprPermissions'
testIAMPermissionsRequest
:: TestIAMPermissionsRequest
testIAMPermissionsRequest =
TestIAMPermissionsRequest' {_tiprPermissions = Nothing}
-- | The set of permissions to check for the \`resource\`. Permissions with
-- wildcards (such as \'*\' or \'storage.*\') are not allowed. For more
-- information see [IAM
-- Overview](https:\/\/cloud.google.com\/iam\/docs\/overview#permissions).
tiprPermissions :: Lens' TestIAMPermissionsRequest [Text]
tiprPermissions
= lens _tiprPermissions
(\ s a -> s{_tiprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsRequest where
parseJSON
= withObject "TestIAMPermissionsRequest"
(\ o ->
TestIAMPermissionsRequest' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsRequest where
toJSON TestIAMPermissionsRequest'{..}
= object
(catMaybes [("permissions" .=) <$> _tiprPermissions])
-- | The status of a task attempt.
--
-- /See:/ 'attempt' smart constructor.
data Attempt =
Attempt'
{ _aResponseStatus :: !(Maybe Status)
, _aScheduleTime :: !(Maybe DateTime')
, _aDispatchTime :: !(Maybe DateTime')
, _aResponseTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Attempt' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aResponseStatus'
--
-- * 'aScheduleTime'
--
-- * 'aDispatchTime'
--
-- * 'aResponseTime'
attempt
:: Attempt
attempt =
Attempt'
{ _aResponseStatus = Nothing
, _aScheduleTime = Nothing
, _aDispatchTime = Nothing
, _aResponseTime = Nothing
}
-- | Output only. The response from the worker for this attempt. If
-- \`response_time\` is unset, then the task has not been attempted or is
-- currently running and the \`response_status\` field is meaningless.
aResponseStatus :: Lens' Attempt (Maybe Status)
aResponseStatus
= lens _aResponseStatus
(\ s a -> s{_aResponseStatus = a})
-- | Output only. The time that this attempt was scheduled. \`schedule_time\`
-- will be truncated to the nearest microsecond.
aScheduleTime :: Lens' Attempt (Maybe UTCTime)
aScheduleTime
= lens _aScheduleTime
(\ s a -> s{_aScheduleTime = a})
. mapping _DateTime
-- | Output only. The time that this attempt was dispatched.
-- \`dispatch_time\` will be truncated to the nearest microsecond.
aDispatchTime :: Lens' Attempt (Maybe UTCTime)
aDispatchTime
= lens _aDispatchTime
(\ s a -> s{_aDispatchTime = a})
. mapping _DateTime
-- | Output only. The time that this attempt response was received.
-- \`response_time\` will be truncated to the nearest microsecond.
aResponseTime :: Lens' Attempt (Maybe UTCTime)
aResponseTime
= lens _aResponseTime
(\ s a -> s{_aResponseTime = a})
. mapping _DateTime
instance FromJSON Attempt where
parseJSON
= withObject "Attempt"
(\ o ->
Attempt' <$>
(o .:? "responseStatus") <*> (o .:? "scheduleTime")
<*> (o .:? "dispatchTime")
<*> (o .:? "responseTime"))
instance ToJSON Attempt where
toJSON Attempt'{..}
= object
(catMaybes
[("responseStatus" .=) <$> _aResponseStatus,
("scheduleTime" .=) <$> _aScheduleTime,
("dispatchTime" .=) <$> _aDispatchTime,
("responseTime" .=) <$> _aResponseTime])
-- | Request message for PurgeQueue.
--
-- /See:/ 'purgeQueueRequest' smart constructor.
data PurgeQueueRequest =
PurgeQueueRequest'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PurgeQueueRequest' with the minimum fields required to make a request.
--
purgeQueueRequest
:: PurgeQueueRequest
purgeQueueRequest = PurgeQueueRequest'
instance FromJSON PurgeQueueRequest where
parseJSON
= withObject "PurgeQueueRequest"
(\ o -> pure PurgeQueueRequest')
instance ToJSON PurgeQueueRequest where
toJSON = const emptyObject
-- | A unit of scheduled work.
--
-- /See:/ 'task' smart constructor.
data Task =
Task'
{ _tLastAttempt :: !(Maybe Attempt)
, _tDispatchDeadline :: !(Maybe GDuration)
, _tScheduleTime :: !(Maybe DateTime')
, _tHTTPRequest :: !(Maybe HTTPRequest)
, _tName :: !(Maybe Text)
, _tFirstAttempt :: !(Maybe Attempt)
, _tView :: !(Maybe TaskView)
, _tResponseCount :: !(Maybe (Textual Int32))
, _tDispatchCount :: !(Maybe (Textual Int32))
, _tAppEngineHTTPRequest :: !(Maybe AppEngineHTTPRequest)
, _tCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Task' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tLastAttempt'
--
-- * 'tDispatchDeadline'
--
-- * 'tScheduleTime'
--
-- * 'tHTTPRequest'
--
-- * 'tName'
--
-- * 'tFirstAttempt'
--
-- * 'tView'
--
-- * 'tResponseCount'
--
-- * 'tDispatchCount'
--
-- * 'tAppEngineHTTPRequest'
--
-- * 'tCreateTime'
task
:: Task
task =
Task'
{ _tLastAttempt = Nothing
, _tDispatchDeadline = Nothing
, _tScheduleTime = Nothing
, _tHTTPRequest = Nothing
, _tName = Nothing
, _tFirstAttempt = Nothing
, _tView = Nothing
, _tResponseCount = Nothing
, _tDispatchCount = Nothing
, _tAppEngineHTTPRequest = Nothing
, _tCreateTime = Nothing
}
-- | Output only. The status of the task\'s last attempt.
tLastAttempt :: Lens' Task (Maybe Attempt)
tLastAttempt
= lens _tLastAttempt (\ s a -> s{_tLastAttempt = a})
-- | The deadline for requests sent to the worker. If the worker does not
-- respond by this deadline then the request is cancelled and the attempt
-- is marked as a \`DEADLINE_EXCEEDED\` failure. Cloud Tasks will retry the
-- task according to the RetryConfig. Note that when the request is
-- cancelled, Cloud Tasks will stop listening for the response, but whether
-- the worker stops processing depends on the worker. For example, if the
-- worker is stuck, it may not react to cancelled requests. The default and
-- maximum values depend on the type of request: * For HTTP tasks, the
-- default is 10 minutes. The deadline must be in the interval [15 seconds,
-- 30 minutes]. * For App Engine tasks, 0 indicates that the request has
-- the default deadline. The default deadline depends on the [scaling
-- type](https:\/\/cloud.google.com\/appengine\/docs\/standard\/go\/how-instances-are-managed#instance_scaling)
-- of the service: 10 minutes for standard apps with automatic scaling, 24
-- hours for standard apps with manual and basic scaling, and 60 minutes
-- for flex apps. If the request deadline is set, it must be in the
-- interval [15 seconds, 24 hours 15 seconds]. Regardless of the task\'s
-- \`dispatch_deadline\`, the app handler will not run for longer than than
-- the service\'s timeout. We recommend setting the \`dispatch_deadline\`
-- to at most a few seconds more than the app handler\'s timeout. For more
-- information see
-- [Timeouts](https:\/\/cloud.google.com\/tasks\/docs\/creating-appengine-handlers#timeouts).
-- \`dispatch_deadline\` will be truncated to the nearest millisecond. The
-- deadline is an approximate deadline.
tDispatchDeadline :: Lens' Task (Maybe Scientific)
tDispatchDeadline
= lens _tDispatchDeadline
(\ s a -> s{_tDispatchDeadline = a})
. mapping _GDuration
-- | The time when the task is scheduled to be attempted or retried.
-- \`schedule_time\` will be truncated to the nearest microsecond.
tScheduleTime :: Lens' Task (Maybe UTCTime)
tScheduleTime
= lens _tScheduleTime
(\ s a -> s{_tScheduleTime = a})
. mapping _DateTime
-- | HTTP request that is sent to the worker. An HTTP task is a task that has
-- HttpRequest set.
tHTTPRequest :: Lens' Task (Maybe HTTPRequest)
tHTTPRequest
= lens _tHTTPRequest (\ s a -> s{_tHTTPRequest = a})
-- | Optionally caller-specified in CreateTask. The task name. The task name
-- must have the following format:
-- \`projects\/PROJECT_ID\/locations\/LOCATION_ID\/queues\/QUEUE_ID\/tasks\/TASK_ID\`
-- * \`PROJECT_ID\` can contain letters ([A-Za-z]), numbers ([0-9]),
-- hyphens (-), colons (:), or periods (.). For more information, see
-- [Identifying
-- projects](https:\/\/cloud.google.com\/resource-manager\/docs\/creating-managing-projects#identifying_projects)
-- * \`LOCATION_ID\` is the canonical ID for the task\'s location. The list
-- of available locations can be obtained by calling ListLocations. For
-- more information, see https:\/\/cloud.google.com\/about\/locations\/. *
-- \`QUEUE_ID\` can contain letters ([A-Za-z]), numbers ([0-9]), or hyphens
-- (-). The maximum length is 100 characters. * \`TASK_ID\` can contain
-- only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores
-- (_). The maximum length is 500 characters.
tName :: Lens' Task (Maybe Text)
tName = lens _tName (\ s a -> s{_tName = a})
-- | Output only. The status of the task\'s first attempt. Only dispatch_time
-- will be set. The other Attempt information is not retained by Cloud
-- Tasks.
tFirstAttempt :: Lens' Task (Maybe Attempt)
tFirstAttempt
= lens _tFirstAttempt
(\ s a -> s{_tFirstAttempt = a})
-- | Output only. The view specifies which subset of the Task has been
-- returned.
tView :: Lens' Task (Maybe TaskView)
tView = lens _tView (\ s a -> s{_tView = a})
-- | Output only. The number of attempts which have received a response.
tResponseCount :: Lens' Task (Maybe Int32)
tResponseCount
= lens _tResponseCount
(\ s a -> s{_tResponseCount = a})
. mapping _Coerce
-- | Output only. The number of attempts dispatched. This count includes
-- attempts which have been dispatched but haven\'t received a response.
tDispatchCount :: Lens' Task (Maybe Int32)
tDispatchCount
= lens _tDispatchCount
(\ s a -> s{_tDispatchCount = a})
. mapping _Coerce
-- | HTTP request that is sent to the App Engine app handler. An App Engine
-- task is a task that has AppEngineHttpRequest set.
tAppEngineHTTPRequest :: Lens' Task (Maybe AppEngineHTTPRequest)
tAppEngineHTTPRequest
= lens _tAppEngineHTTPRequest
(\ s a -> s{_tAppEngineHTTPRequest = a})
-- | Output only. The time that the task was created. \`create_time\` will be
-- truncated to the nearest second.
tCreateTime :: Lens' Task (Maybe UTCTime)
tCreateTime
= lens _tCreateTime (\ s a -> s{_tCreateTime = a}) .
mapping _DateTime
instance FromJSON Task where
parseJSON
= withObject "Task"
(\ o ->
Task' <$>
(o .:? "lastAttempt") <*> (o .:? "dispatchDeadline")
<*> (o .:? "scheduleTime")
<*> (o .:? "httpRequest")
<*> (o .:? "name")
<*> (o .:? "firstAttempt")
<*> (o .:? "view")
<*> (o .:? "responseCount")
<*> (o .:? "dispatchCount")
<*> (o .:? "appEngineHttpRequest")
<*> (o .:? "createTime"))
instance ToJSON Task where
toJSON Task'{..}
= object
(catMaybes
[("lastAttempt" .=) <$> _tLastAttempt,
("dispatchDeadline" .=) <$> _tDispatchDeadline,
("scheduleTime" .=) <$> _tScheduleTime,
("httpRequest" .=) <$> _tHTTPRequest,
("name" .=) <$> _tName,
("firstAttempt" .=) <$> _tFirstAttempt,
("view" .=) <$> _tView,
("responseCount" .=) <$> _tResponseCount,
("dispatchCount" .=) <$> _tDispatchCount,
("appEngineHttpRequest" .=) <$>
_tAppEngineHTTPRequest,
("createTime" .=) <$> _tCreateTime])
-- | Response message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsResponse' smart constructor.
newtype TestIAMPermissionsResponse =
TestIAMPermissionsResponse'
{ _tiamprPermissions :: Maybe [Text]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TestIAMPermissionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiamprPermissions'
testIAMPermissionsResponse
:: TestIAMPermissionsResponse
testIAMPermissionsResponse =
TestIAMPermissionsResponse' {_tiamprPermissions = Nothing}
-- | A subset of \`TestPermissionsRequest.permissions\` that the caller is
-- allowed.
tiamprPermissions :: Lens' TestIAMPermissionsResponse [Text]
tiamprPermissions
= lens _tiamprPermissions
(\ s a -> s{_tiamprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsResponse where
parseJSON
= withObject "TestIAMPermissionsResponse"
(\ o ->
TestIAMPermissionsResponse' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsResponse where
toJSON TestIAMPermissionsResponse'{..}
= object
(catMaybes
[("permissions" .=) <$> _tiamprPermissions])
-- | An Identity and Access Management (IAM) policy, which specifies access
-- controls for Google Cloud resources. A \`Policy\` is a collection of
-- \`bindings\`. A \`binding\` binds one or more \`members\` to a single
-- \`role\`. Members can be user accounts, service accounts, Google groups,
-- and domains (such as G Suite). A \`role\` is a named list of
-- permissions; each \`role\` can be an IAM predefined role or a
-- user-created custom role. For some types of Google Cloud resources, a
-- \`binding\` can also specify a \`condition\`, which is a logical
-- expression that allows access to a resource only if the expression
-- evaluates to \`true\`. A condition can add constraints based on
-- attributes of the request, the resource, or both. To learn which
-- resources support conditions in their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
-- **JSON example:** { \"bindings\": [ { \"role\":
-- \"roles\/resourcemanager.organizationAdmin\", \"members\": [
-- \"user:mike\'example.com\", \"group:admins\'example.com\",
-- \"domain:google.com\",
-- \"serviceAccount:my-project-id\'appspot.gserviceaccount.com\" ] }, {
-- \"role\": \"roles\/resourcemanager.organizationViewer\", \"members\": [
-- \"user:eve\'example.com\" ], \"condition\": { \"title\": \"expirable
-- access\", \"description\": \"Does not grant access after Sep 2020\",
-- \"expression\": \"request.time \<
-- timestamp(\'2020-10-01T00:00:00.000Z\')\", } } ], \"etag\":
-- \"BwWWja0YfJA=\", \"version\": 3 } **YAML example:** bindings: -
-- members: - user:mike\'example.com - group:admins\'example.com -
-- domain:google.com -
-- serviceAccount:my-project-id\'appspot.gserviceaccount.com role:
-- roles\/resourcemanager.organizationAdmin - members: -
-- user:eve\'example.com role: roles\/resourcemanager.organizationViewer
-- condition: title: expirable access description: Does not grant access
-- after Sep 2020 expression: request.time \<
-- timestamp(\'2020-10-01T00:00:00.000Z\') - etag: BwWWja0YfJA= - version:
-- 3 For a description of IAM and its features, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/docs\/).
--
-- /See:/ 'policy' smart constructor.
data Policy =
Policy'
{ _pEtag :: !(Maybe Bytes)
, _pVersion :: !(Maybe (Textual Int32))
, _pBindings :: !(Maybe [Binding])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Policy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pEtag'
--
-- * 'pVersion'
--
-- * 'pBindings'
policy
:: Policy
policy = Policy' {_pEtag = Nothing, _pVersion = Nothing, _pBindings = Nothing}
-- | \`etag\` is used for optimistic concurrency control as a way to help
-- prevent simultaneous updates of a policy from overwriting each other. It
-- is strongly suggested that systems make use of the \`etag\` in the
-- read-modify-write cycle to perform policy updates in order to avoid race
-- conditions: An \`etag\` is returned in the response to \`getIamPolicy\`,
-- and systems are expected to put that etag in the request to
-- \`setIamPolicy\` to ensure that their change will be applied to the same
-- version of the policy. **Important:** If you use IAM Conditions, you
-- must include the \`etag\` field whenever you call \`setIamPolicy\`. If
-- you omit this field, then IAM allows you to overwrite a version \`3\`
-- policy with a version \`1\` policy, and all of the conditions in the
-- version \`3\` policy are lost.
pEtag :: Lens' Policy (Maybe ByteString)
pEtag
= lens _pEtag (\ s a -> s{_pEtag = a}) .
mapping _Bytes
-- | Specifies the format of the policy. Valid values are \`0\`, \`1\`, and
-- \`3\`. Requests that specify an invalid value are rejected. Any
-- operation that affects conditional role bindings must specify version
-- \`3\`. This requirement applies to the following operations: * Getting a
-- policy that includes a conditional role binding * Adding a conditional
-- role binding to a policy * Changing a conditional role binding in a
-- policy * Removing any role binding, with or without a condition, from a
-- policy that includes conditions **Important:** If you use IAM
-- Conditions, you must include the \`etag\` field whenever you call
-- \`setIamPolicy\`. If you omit this field, then IAM allows you to
-- overwrite a version \`3\` policy with a version \`1\` policy, and all of
-- the conditions in the version \`3\` policy are lost. If a policy does
-- not include any conditions, operations on that policy may specify any
-- valid version or leave the field unset. To learn which resources support
-- conditions in their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
pVersion :: Lens' Policy (Maybe Int32)
pVersion
= lens _pVersion (\ s a -> s{_pVersion = a}) .
mapping _Coerce
-- | Associates a list of \`members\` to a \`role\`. Optionally, may specify
-- a \`condition\` that determines how and when the \`bindings\` are
-- applied. Each of the \`bindings\` must contain at least one member.
pBindings :: Lens' Policy [Binding]
pBindings
= lens _pBindings (\ s a -> s{_pBindings = a}) .
_Default
. _Coerce
instance FromJSON Policy where
parseJSON
= withObject "Policy"
(\ o ->
Policy' <$>
(o .:? "etag") <*> (o .:? "version") <*>
(o .:? "bindings" .!= mempty))
instance ToJSON Policy where
toJSON Policy'{..}
= object
(catMaybes
[("etag" .=) <$> _pEtag,
("version" .=) <$> _pVersion,
("bindings" .=) <$> _pBindings])
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
--
-- /See:/ 'locationLabels' smart constructor.
newtype LocationLabels =
LocationLabels'
{ _llAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LocationLabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llAddtional'
locationLabels
:: HashMap Text Text -- ^ 'llAddtional'
-> LocationLabels
locationLabels pLlAddtional_ =
LocationLabels' {_llAddtional = _Coerce # pLlAddtional_}
llAddtional :: Lens' LocationLabels (HashMap Text Text)
llAddtional
= lens _llAddtional (\ s a -> s{_llAddtional = a}) .
_Coerce
instance FromJSON LocationLabels where
parseJSON
= withObject "LocationLabels"
(\ o -> LocationLabels' <$> (parseJSONObject o))
instance ToJSON LocationLabels where
toJSON = toJSON . _llAddtional
-- | Service-specific metadata. For example the available capacity at the
-- given location.
--
-- /See:/ 'locationMetadata' smart constructor.
newtype LocationMetadata =
LocationMetadata'
{ _lmAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LocationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lmAddtional'
locationMetadata
:: HashMap Text JSONValue -- ^ 'lmAddtional'
-> LocationMetadata
locationMetadata pLmAddtional_ =
LocationMetadata' {_lmAddtional = _Coerce # pLmAddtional_}
-- | Properties of the object. Contains field \'type with type URL.
lmAddtional :: Lens' LocationMetadata (HashMap Text JSONValue)
lmAddtional
= lens _lmAddtional (\ s a -> s{_lmAddtional = a}) .
_Coerce
instance FromJSON LocationMetadata where
parseJSON
= withObject "LocationMetadata"
(\ o -> LocationMetadata' <$> (parseJSONObject o))
instance ToJSON LocationMetadata where
toJSON = toJSON . _lmAddtional
-- | App Engine Routing. Defines routing characteristics specific to App
-- Engine - service, version, and instance. For more information about
-- services, versions, and instances see [An Overview of App
-- Engine](https:\/\/cloud.google.com\/appengine\/docs\/python\/an-overview-of-app-engine),
-- [Microservices Architecture on Google App
-- Engine](https:\/\/cloud.google.com\/appengine\/docs\/python\/microservices-on-app-engine),
-- [App Engine Standard request
-- routing](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/how-requests-are-routed),
-- and [App Engine Flex request
-- routing](https:\/\/cloud.google.com\/appengine\/docs\/flexible\/python\/how-requests-are-routed).
-- Using AppEngineRouting requires
-- [\`appengine.applications.get\`](https:\/\/cloud.google.com\/appengine\/docs\/admin-api\/access-control)
-- Google IAM permission for the project and the following scope:
-- \`https:\/\/www.googleapis.com\/auth\/cloud-platform\`
--
-- /See:/ 'appEngineRouting' smart constructor.
data AppEngineRouting =
AppEngineRouting'
{ _aerService :: !(Maybe Text)
, _aerVersion :: !(Maybe Text)
, _aerHost :: !(Maybe Text)
, _aerInstance :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppEngineRouting' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aerService'
--
-- * 'aerVersion'
--
-- * 'aerHost'
--
-- * 'aerInstance'
appEngineRouting
:: AppEngineRouting
appEngineRouting =
AppEngineRouting'
{ _aerService = Nothing
, _aerVersion = Nothing
, _aerHost = Nothing
, _aerInstance = Nothing
}
-- | App service. By default, the task is sent to the service which is the
-- default service when the task is attempted. For some queues or tasks
-- which were created using the App Engine Task Queue API, host is not
-- parsable into service, version, and instance. For example, some tasks
-- which were created using the App Engine SDK use a custom domain name;
-- custom domains are not parsed by Cloud Tasks. If host is not parsable,
-- then service, version, and instance are the empty string.
aerService :: Lens' AppEngineRouting (Maybe Text)
aerService
= lens _aerService (\ s a -> s{_aerService = a})
-- | App version. By default, the task is sent to the version which is the
-- default version when the task is attempted. For some queues or tasks
-- which were created using the App Engine Task Queue API, host is not
-- parsable into service, version, and instance. For example, some tasks
-- which were created using the App Engine SDK use a custom domain name;
-- custom domains are not parsed by Cloud Tasks. If host is not parsable,
-- then service, version, and instance are the empty string.
aerVersion :: Lens' AppEngineRouting (Maybe Text)
aerVersion
= lens _aerVersion (\ s a -> s{_aerVersion = a})
-- | Output only. The host that the task is sent to. The host is constructed
-- from the domain name of the app associated with the queue\'s project ID
-- (for example .appspot.com), and the service, version, and instance.
-- Tasks which were created using the App Engine SDK might have a custom
-- domain name. For more information, see [How Requests are
-- Routed](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/how-requests-are-routed).
aerHost :: Lens' AppEngineRouting (Maybe Text)
aerHost = lens _aerHost (\ s a -> s{_aerHost = a})
-- | App instance. By default, the task is sent to an instance which is
-- available when the task is attempted. Requests can only be sent to a
-- specific instance if [manual scaling is used in App Engine
-- Standard](https:\/\/cloud.google.com\/appengine\/docs\/python\/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes).
-- App Engine Flex does not support instances. For more information, see
-- [App Engine Standard request
-- routing](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/how-requests-are-routed)
-- and [App Engine Flex request
-- routing](https:\/\/cloud.google.com\/appengine\/docs\/flexible\/python\/how-requests-are-routed).
aerInstance :: Lens' AppEngineRouting (Maybe Text)
aerInstance
= lens _aerInstance (\ s a -> s{_aerInstance = a})
instance FromJSON AppEngineRouting where
parseJSON
= withObject "AppEngineRouting"
(\ o ->
AppEngineRouting' <$>
(o .:? "service") <*> (o .:? "version") <*>
(o .:? "host")
<*> (o .:? "instance"))
instance ToJSON AppEngineRouting where
toJSON AppEngineRouting'{..}
= object
(catMaybes
[("service" .=) <$> _aerService,
("version" .=) <$> _aerVersion,
("host" .=) <$> _aerHost,
("instance" .=) <$> _aerInstance])
-- | App Engine HTTP request. The message defines the HTTP request that is
-- sent to an App Engine app when the task is dispatched. Using
-- AppEngineHttpRequest requires
-- [\`appengine.applications.get\`](https:\/\/cloud.google.com\/appengine\/docs\/admin-api\/access-control)
-- Google IAM permission for the project and the following scope:
-- \`https:\/\/www.googleapis.com\/auth\/cloud-platform\` The task will be
-- delivered to the App Engine app which belongs to the same project as the
-- queue. For more information, see [How Requests are
-- Routed](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/how-requests-are-routed)
-- and how routing is affected by [dispatch
-- files](https:\/\/cloud.google.com\/appengine\/docs\/python\/config\/dispatchref).
-- Traffic is encrypted during transport and never leaves Google
-- datacenters. Because this traffic is carried over a communication
-- mechanism internal to Google, you cannot explicitly set the protocol
-- (for example, HTTP or HTTPS). The request to the handler, however, will
-- appear to have used the HTTP protocol. The AppEngineRouting used to
-- construct the URL that the task is delivered to can be set at the
-- queue-level or task-level: * If app_engine_routing_override is set on
-- the queue, this value is used for all tasks in the queue, no matter what
-- the setting is for the task-level app_engine_routing. The \`url\` that
-- the task will be sent to is: * \`url =\` host \`+\` relative_uri Tasks
-- can be dispatched to secure app handlers, unsecure app handlers, and
-- URIs restricted with [\`login:
-- admin\`](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/appref).
-- Because tasks are not run as any user, they cannot be dispatched to URIs
-- restricted with [\`login:
-- required\`](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python\/config\/appref)
-- Task dispatches also do not follow redirects. The task attempt has
-- succeeded if the app\'s request handler returns an HTTP response code in
-- the range [\`200\` - \`299\`]. The task attempt has failed if the app\'s
-- handler returns a non-2xx response code or Cloud Tasks does not receive
-- response before the deadline. Failed tasks will be retried according to
-- the retry configuration. \`503\` (Service Unavailable) is considered an
-- App Engine system error instead of an application error and will cause
-- Cloud Tasks\' traffic congestion control to temporarily throttle the
-- queue\'s dispatches. Unlike other types of task targets, a \`429\` (Too
-- Many Requests) response from an app handler does not cause traffic
-- congestion control to throttle the queue.
--
-- /See:/ 'appEngineHTTPRequest' smart constructor.
data AppEngineHTTPRequest =
AppEngineHTTPRequest'
{ _aehttprHTTPMethod :: !(Maybe AppEngineHTTPRequestHTTPMethod)
, _aehttprRelativeURI :: !(Maybe Text)
, _aehttprBody :: !(Maybe Bytes)
, _aehttprHeaders :: !(Maybe AppEngineHTTPRequestHeaders)
, _aehttprAppEngineRouting :: !(Maybe AppEngineRouting)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppEngineHTTPRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aehttprHTTPMethod'
--
-- * 'aehttprRelativeURI'
--
-- * 'aehttprBody'
--
-- * 'aehttprHeaders'
--
-- * 'aehttprAppEngineRouting'
appEngineHTTPRequest
:: AppEngineHTTPRequest
appEngineHTTPRequest =
AppEngineHTTPRequest'
{ _aehttprHTTPMethod = Nothing
, _aehttprRelativeURI = Nothing
, _aehttprBody = Nothing
, _aehttprHeaders = Nothing
, _aehttprAppEngineRouting = Nothing
}
-- | The HTTP method to use for the request. The default is POST. The app\'s
-- request handler for the task\'s target URL must be able to handle HTTP
-- requests with this http_method, otherwise the task attempt fails with
-- error code 405 (Method Not Allowed). See [Writing a push task request
-- handler](https:\/\/cloud.google.com\/appengine\/docs\/java\/taskqueue\/push\/creating-handlers#writing_a_push_task_request_handler)
-- and the App Engine documentation for your runtime on [How Requests are
-- Handled](https:\/\/cloud.google.com\/appengine\/docs\/standard\/python3\/how-requests-are-handled).
aehttprHTTPMethod :: Lens' AppEngineHTTPRequest (Maybe AppEngineHTTPRequestHTTPMethod)
aehttprHTTPMethod
= lens _aehttprHTTPMethod
(\ s a -> s{_aehttprHTTPMethod = a})
-- | The relative URI. The relative URI must begin with \"\/\" and must be a
-- valid HTTP relative URI. It can contain a path and query string
-- arguments. If the relative URI is empty, then the root path \"\/\" will
-- be used. No spaces are allowed, and the maximum length allowed is 2083
-- characters.
aehttprRelativeURI :: Lens' AppEngineHTTPRequest (Maybe Text)
aehttprRelativeURI
= lens _aehttprRelativeURI
(\ s a -> s{_aehttprRelativeURI = a})
-- | HTTP request body. A request body is allowed only if the HTTP method is
-- POST or PUT. It is an error to set a body on a task with an incompatible
-- HttpMethod.
aehttprBody :: Lens' AppEngineHTTPRequest (Maybe ByteString)
aehttprBody
= lens _aehttprBody (\ s a -> s{_aehttprBody = a}) .
mapping _Bytes
-- | HTTP request headers. This map contains the header field names and
-- values. Headers can be set when the task is created. Repeated headers
-- are not supported but a header value can contain commas. Cloud Tasks
-- sets some headers to default values: * \`User-Agent\`: By default, this
-- header is \`\"AppEngine-Google;
-- (+http:\/\/code.google.com\/appengine)\"\`. This header can be modified,
-- but Cloud Tasks will append \`\"AppEngine-Google;
-- (+http:\/\/code.google.com\/appengine)\"\` to the modified
-- \`User-Agent\`. If the task has a body, Cloud Tasks sets the following
-- headers: * \`Content-Type\`: By default, the \`Content-Type\` header is
-- set to \`\"application\/octet-stream\"\`. The default can be overridden
-- by explicitly setting \`Content-Type\` to a particular media type when
-- the task is created. For example, \`Content-Type\` can be set to
-- \`\"application\/json\"\`. * \`Content-Length\`: This is computed by
-- Cloud Tasks. This value is output only. It cannot be changed. The
-- headers below cannot be set or overridden: * \`Host\` * \`X-Google-*\` *
-- \`X-AppEngine-*\` In addition, Cloud Tasks sets some headers when the
-- task is dispatched, such as headers containing information about the
-- task; see [request
-- headers](https:\/\/cloud.google.com\/tasks\/docs\/creating-appengine-handlers#reading_request_headers).
-- These headers are set only when the task is dispatched, so they are not
-- visible when the task is returned in a Cloud Tasks response. Although
-- there is no specific limit for the maximum number of headers or the
-- size, there is a limit on the maximum size of the Task. For more
-- information, see the CreateTask documentation.
aehttprHeaders :: Lens' AppEngineHTTPRequest (Maybe AppEngineHTTPRequestHeaders)
aehttprHeaders
= lens _aehttprHeaders
(\ s a -> s{_aehttprHeaders = a})
-- | Task-level setting for App Engine routing. * If
-- app_engine_routing_override is set on the queue, this value is used for
-- all tasks in the queue, no matter what the setting is for the task-level
-- app_engine_routing.
aehttprAppEngineRouting :: Lens' AppEngineHTTPRequest (Maybe AppEngineRouting)
aehttprAppEngineRouting
= lens _aehttprAppEngineRouting
(\ s a -> s{_aehttprAppEngineRouting = a})
instance FromJSON AppEngineHTTPRequest where
parseJSON
= withObject "AppEngineHTTPRequest"
(\ o ->
AppEngineHTTPRequest' <$>
(o .:? "httpMethod") <*> (o .:? "relativeUri") <*>
(o .:? "body")
<*> (o .:? "headers")
<*> (o .:? "appEngineRouting"))
instance ToJSON AppEngineHTTPRequest where
toJSON AppEngineHTTPRequest'{..}
= object
(catMaybes
[("httpMethod" .=) <$> _aehttprHTTPMethod,
("relativeUri" .=) <$> _aehttprRelativeURI,
("body" .=) <$> _aehttprBody,
("headers" .=) <$> _aehttprHeaders,
("appEngineRouting" .=) <$>
_aehttprAppEngineRouting])
-- | Request message for ResumeQueue.
--
-- /See:/ 'resumeQueueRequest' smart constructor.
data ResumeQueueRequest =
ResumeQueueRequest'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ResumeQueueRequest' with the minimum fields required to make a request.
--
resumeQueueRequest
:: ResumeQueueRequest
resumeQueueRequest = ResumeQueueRequest'
instance FromJSON ResumeQueueRequest where
parseJSON
= withObject "ResumeQueueRequest"
(\ o -> pure ResumeQueueRequest')
instance ToJSON ResumeQueueRequest where
toJSON = const emptyObject
-- | Associates \`members\` with a \`role\`.
--
-- /See:/ 'binding' smart constructor.
data Binding =
Binding'
{ _bMembers :: !(Maybe [Text])
, _bRole :: !(Maybe Text)
, _bCondition :: !(Maybe Expr)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Binding' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bMembers'
--
-- * 'bRole'
--
-- * 'bCondition'
binding
:: Binding
binding =
Binding' {_bMembers = Nothing, _bRole = Nothing, _bCondition = Nothing}
-- | Specifies the identities requesting access for a Cloud Platform
-- resource. \`members\` can have the following values: * \`allUsers\`: A
-- special identifier that represents anyone who is on the internet; with
-- or without a Google account. * \`allAuthenticatedUsers\`: A special
-- identifier that represents anyone who is authenticated with a Google
-- account or a service account. * \`user:{emailid}\`: An email address
-- that represents a specific Google account. For example,
-- \`alice\'example.com\` . * \`serviceAccount:{emailid}\`: An email
-- address that represents a service account. For example,
-- \`my-other-app\'appspot.gserviceaccount.com\`. * \`group:{emailid}\`: An
-- email address that represents a Google group. For example,
-- \`admins\'example.com\`. * \`deleted:user:{emailid}?uid={uniqueid}\`: An
-- email address (plus unique identifier) representing a user that has been
-- recently deleted. For example,
-- \`alice\'example.com?uid=123456789012345678901\`. If the user is
-- recovered, this value reverts to \`user:{emailid}\` and the recovered
-- user retains the role in the binding. *
-- \`deleted:serviceAccount:{emailid}?uid={uniqueid}\`: An email address
-- (plus unique identifier) representing a service account that has been
-- recently deleted. For example,
-- \`my-other-app\'appspot.gserviceaccount.com?uid=123456789012345678901\`.
-- If the service account is undeleted, this value reverts to
-- \`serviceAccount:{emailid}\` and the undeleted service account retains
-- the role in the binding. * \`deleted:group:{emailid}?uid={uniqueid}\`:
-- An email address (plus unique identifier) representing a Google group
-- that has been recently deleted. For example,
-- \`admins\'example.com?uid=123456789012345678901\`. If the group is
-- recovered, this value reverts to \`group:{emailid}\` and the recovered
-- group retains the role in the binding. * \`domain:{domain}\`: The G
-- Suite domain (primary) that represents all the users of that domain. For
-- example, \`google.com\` or \`example.com\`.
bMembers :: Lens' Binding [Text]
bMembers
= lens _bMembers (\ s a -> s{_bMembers = a}) .
_Default
. _Coerce
-- | Role that is assigned to \`members\`. For example, \`roles\/viewer\`,
-- \`roles\/editor\`, or \`roles\/owner\`.
bRole :: Lens' Binding (Maybe Text)
bRole = lens _bRole (\ s a -> s{_bRole = a})
-- | The condition that is associated with this binding. If the condition
-- evaluates to \`true\`, then this binding applies to the current request.
-- If the condition evaluates to \`false\`, then this binding does not
-- apply to the current request. However, a different role binding might
-- grant the same role to one or more of the members in this binding. To
-- learn which resources support conditions in their IAM policies, see the
-- [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
bCondition :: Lens' Binding (Maybe Expr)
bCondition
= lens _bCondition (\ s a -> s{_bCondition = a})
instance FromJSON Binding where
parseJSON
= withObject "Binding"
(\ o ->
Binding' <$>
(o .:? "members" .!= mempty) <*> (o .:? "role") <*>
(o .:? "condition"))
instance ToJSON Binding where
toJSON Binding'{..}
= object
(catMaybes
[("members" .=) <$> _bMembers,
("role" .=) <$> _bRole,
("condition" .=) <$> _bCondition])
| brendanhay/gogol | gogol-cloudtasks/gen/Network/Google/CloudTasks/Types/Product.hs | mpl-2.0 | 98,658 | 0 | 21 | 20,289 | 12,201 | 7,202 | 4,999 | 1,307 | 1 |
module Array where {
data Array a = Array [a];
}
| jwaldmann/ceta-postproc | CeTA-2.39/generated/Haskell/Array.hs | lgpl-3.0 | 50 | 0 | 7 | 12 | 20 | 14 | 6 | 2 | 0 |
{-|
This package provides test types for Network.Haskoin
-}
module Network.Haskoin.Test
(
-- * Util Arbitrary functions
arbitraryBS
, arbitraryBS1
, arbitraryBSn
, arbitraryUTCTime
, arbitraryMaybe
-- * Crypto Arbitrary functions
, arbitraryHash160
, arbitraryHash256
, arbitraryHash512
, arbitraryCheckSum32
, arbitraryPrvKey
, arbitraryPrvKeyC
, arbitraryPrvKeyU
, arbitraryPubKey
, arbitraryPubKeyC
, arbitraryPubKeyU
, arbitraryAddress
, arbitraryPubKeyAddress
, arbitraryScriptAddress
, arbitrarySignature
, arbitraryXPrvKey
, arbitraryXPubKey
, arbitraryBip32PathIndex
, arbitraryHardPath
, arbitrarySoftPath
, arbitraryDerivPath
, arbitraryParsedPath
-- * Network Arbitrary functions
, arbitraryVarInt
, arbitraryVarString
, arbitraryNetworkAddress
, arbitraryNetworkAddressTime
, arbitraryInvType
, arbitraryInvVector
, arbitraryInv1
, arbitraryVersion
, arbitraryAddr1
, arbitraryAlert
, arbitraryReject
, arbitraryRejectCode
, arbitraryGetData
, arbitraryNotFound
, arbitraryPing
, arbitraryPong
, arbitraryBloomFlags
, arbitraryBloomFilter
, arbitraryFilterLoad
, arbitraryFilterAdd
, arbitraryMessageCommand
-- * Message Arbitrary functions
, arbitraryMessageHeader
, arbitraryMessage
-- * Script arbitrary functions
, arbitraryScriptOp
, arbitraryScript
, arbitraryIntScriptOp
, arbitraryPushDataType
, arbitraryTxSignature
, arbitrarySigHash
, arbitraryValidSigHash
, arbitraryMSParam
, arbitraryScriptOutput
, arbitrarySimpleOutput
, arbitraryPKOutput
, arbitraryPKHashOutput
, arbitraryMSOutput
, arbitraryMSCOutput
, arbitrarySHOutput
, arbitraryScriptInput
, arbitrarySimpleInput
, arbitraryPKInput
, arbitraryPKHashInput
, arbitraryPKHashCInput
, arbitraryMSInput
, arbitrarySHInput
, arbitraryMulSigSHCInput
-- * Transaction arbitrary functions
, TestCoin(..)
, arbitrarySatoshi
, arbitraryTx
, arbitraryTxHash
, arbitraryTxIn
, arbitraryTxOut
, arbitraryOutPoint
, arbitraryAddrOnlyTx
, arbitraryAddrOnlyTxIn
, arbitraryAddrOnlyTxOut
, arbitrarySigInput
, arbitraryPKSigInput
, arbitraryPKHashSigInput
, arbitraryMSSigInput
, arbitrarySHSigInput
, arbitrarySigningData
, arbitraryPartialTxs
-- * Block arbitrary functions
, arbitraryBlock
, arbitraryBlockHeader
, arbitraryBlockHash
, arbitraryGetBlocks
, arbitraryGetHeaders
, arbitraryHeaders
, arbitraryMerkleBlock
) where
import Network.Haskoin.Test.Util
import Network.Haskoin.Test.Crypto
import Network.Haskoin.Test.Network
import Network.Haskoin.Test.Message
import Network.Haskoin.Test.Script
import Network.Haskoin.Test.Transaction
import Network.Haskoin.Test.Block
| plaprade/haskoin | haskoin-core/src/Network/Haskoin/Test.hs | unlicense | 2,571 | 0 | 5 | 279 | 358 | 244 | 114 | 105 | 0 |
allNumbers = allNumbersFrom 1
allNumbersFrom n = n: allNumbersFrom (n+1)
data TimeMachine = TM {manufacturer :: String, year :: Integer } deriving (Eq, Show)
timeMachinesFrom :: String -> Integer -> [TimeMachine]
timeMachinesFrom mf y = TM mf y : timeMachinesFrom mf (y+1)
yxtake _ [] = []
yxtake 0 l = []
yxtake n (x:xs) = x: yxtake (n-1) xs
yxsum [] = 0
yxsum (x:xs) = x + ( yxsum xs)
fibonacci = 0 : 1: zipWith (+) fibonacci (tail fibonacci)
yxfib 0 = 0
yxfib 1 = 1
yxfib n = yxfib (n-1) + yxfib (n-2)
fibonacci2 = map fst $ iterate (\(n, n1)->(n1,n+n1)) (0,1)
| wangyixiang/beginninghaskell | chapter5/src/Chapter5/Infinite.hs | unlicense | 574 | 5 | 10 | 117 | 344 | 172 | 172 | 15 | 1 |
module Transformers where
{-
This is a collection of various monad transformers, independent of the usually used
transformer libraries like mtl.
-}
import TransformerClass
import Data.Monoid
import Control.Monad (liftM)
-- List Transformer
newtype ListT m a = ListT { runListT :: m [a] }
instance Monad m => Monad (ListT m) where
return = ListT . return . (:[])
m >>= f = ListT $ do
xs <- runListT m
y <- sequence . map (runListT . f) $ xs
return $ concat y
-- Maybe Transformer
newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
instance Monad m => Monad (MaybeT m) where
return = MaybeT . return . Just
m >>= f = MaybeT $ do
m' <- runMaybeT m
case m' of
Nothing -> return Nothing
Just x -> runMaybeT $ f x
instance MonadTrans MaybeT where
lift = MaybeT . liftM Just
-- Reader Transformer
newtype ReaderT r m a = ReaderT { runReaderT :: r -> m a }
instance Monad m => Monad (ReaderT r m) where
return x = ReaderT $ \_ -> return x
m >>= f = ReaderT $ \x -> do
m' <- runReaderT m x
runReaderT (f m') x
instance MonadTrans (ReaderT r) where
lift x = ReaderT $ \_ -> x
ask :: Monad m => ReaderT r m r
ask = ReaderT return
-- State Transformer
newtype StateT s m a = StateT { runStateT :: s -> m (a,s) }
instance Monad m => Monad (StateT s m) where
return x = StateT $ \s -> return (x,s)
m >>= f = StateT $ \s -> do
(x,s') <- runStateT m s
runStateT (f x) s'
instance MonadTrans (StateT s) where
lift x = StateT $ \s -> x >>= \x' -> return (x',s)
get :: Monad m => StateT s m s
get = StateT $ \s -> return (s,s)
put :: Monad m => a -> StateT s m a
put a = StateT $ \s -> return (a,s)
-- Writer Transformer
newtype WriterT w m a = WriterT { runWriterT :: m (a,w) }
instance (Monoid w, Monad m) => Monad (WriterT w m) where
return x = WriterT $ return (x,mempty)
m >>= f = WriterT $ do
(a,w) <- runWriterT m
(a',w') <- runWriterT (f a)
return (a',w `mappend` w')
instance Monoid w => MonadTrans (WriterT w) where
lift x = WriterT $ x >>= \x' -> return (x',mempty)
tell :: (Monoid n, Monad m) => n -> WriterT n m ()
tell n = WriterT . return $ ((),n)
-- Identity Transformer
newtype Identity a = Identity { runIdentity :: a }
instance Monad Identity where
return = Identity
m >>= f = f (runIdentity m)
--
newtype IdentityT m a = IdentityT { runIdentityT :: m a }
instance Monad m => Monad (IdentityT m) where
return = IdentityT . return
m >>= f = IdentityT $ do
m' <- runIdentityT m
runIdentityT $ f m'
instance MonadTrans IdentityT where
lift x = IdentityT x
-- ErrorT
newtype ErrorT e m a = ErrorT { runErrorT :: m (Either e a) }
instance Monad m => Monad (ErrorT e m) where
return x = ErrorT (return $ Right x)
m >>= f = ErrorT $ do
i <- runErrorT m
case i of
Left x -> return (Left x)
Right y -> runErrorT $ f y
instance MonadTrans (ErrorT e) where
lift x = ErrorT $ x >>= \x' -> return (Right x')
throwError :: Monad m => e -> ErrorT e m ()
throwError e = ErrorT . return . Left $ e
catchError :: Monad m => ErrorT e m a -> (e -> ErrorT e m a) -> ErrorT e m a
catchError m f = ErrorT $ do
v <- runErrorT m
case v of
Left e -> runErrorT $ f e
Right c -> return (Right c)
----
type Reader' r a = ReaderT r Identity a
reader :: (r -> a) -> Reader' r a
reader f = ReaderT (Identity . f)
runReader' :: Reader' r a -> r -> a
runReader' r = runIdentity . (runReaderT r)
type Maybe' a = MaybeT Identity a
just :: a -> Maybe' a
just = MaybeT . Identity . Just
nothing :: Maybe' a
nothing = MaybeT $ Identity Nothing
type State' s a = StateT s Identity a
state :: (s -> (a,s)) -> State' s a
state f = StateT (Identity . f)
runState :: State' s a -> s -> (a,s)
runState st = runIdentity . runStateT st
type Writer' w a = WriterT w Identity a
writer :: Monoid w => (a,w) -> Writer' w a
writer t = WriterT (Identity t)
runWriter :: Monoid w => Writer' w a -> (a,w)
runWriter = runIdentity . runWriterT
--------------------------------
type T1 = StateT [Int] (ReaderT Int (WriterT [Int] Identity)) Int
push :: Int -> T1
push i = StateT $ \s -> return (i,i:s)
pushDef :: T1
pushDef = do
d <- lift ask
push d
f1 :: T1
f1 = do
push 7
pushDef
pushDef
lift . lift . tell $ [1,2,3]
return 8
| dermesser/haskell-experiments | Transformers.hs | unlicense | 4,790 | 0 | 14 | 1,582 | 1,966 | 1,015 | 951 | 118 | 2 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Calc where
import StackVM
import Data.Functor ((<$>))
import qualified ExprT as E
import qualified Data.Map as M
import Parser
-- Exercise #1:
eval :: E.ExprT -> Integer
eval (E.Lit a) = a
eval (E.Add a b) = eval a + eval b
eval (E.Mul a b) = eval a * eval b
-- Exercise #2:
evalStr :: String -> Maybe Integer
evalStr str = eval <$> parseExp E.Lit E.Add E.Mul str
-- Exercise #3:
class Expr a where
lit :: Integer -> a
add :: a -> a -> a
mul :: a -> a -> a
instance Expr E.ExprT where
lit a = E.Lit a
add a b = E.Add a b
mul a b = E.Mul a b
-- Exercise #4:
instance Expr Integer where
lit a = a
add a b = a + b
mul a b = a * b
instance Expr Bool where
lit a | a <= 0 = False
| a > 0 = True
add a b = a || b
mul a b = a && b
data MinMax = MinMax Integer
deriving (Show, Eq, Ord)
instance Expr MinMax where
lit a = MinMax a
add a b = max a b
mul a b = min a b
data Mod7 = Mod7 Integer
deriving (Show, Eq, Ord)
instance Num Mod7 where
fromInteger a = Mod7 (a `mod` 7)
Mod7 a + Mod7 b = fromInteger (a + b)
Mod7 a * Mod7 b = fromInteger (a * b)
Mod7 a - Mod7 b = fromInteger (a - b)
signum (Mod7 a) = Mod7 (signum a)
abs (Mod7 a) = Mod7 (abs a)
instance Expr Mod7 where
lit a = fromInteger a :: Mod7
add a b = a + b
mul a b = a * b
-- Exercise #5:
instance Expr Program where
lit a = [PushI a]
add a b = a ++ b ++ [Add]
mul a b = a ++ b ++ [Mul]
compile :: String -> Maybe Program
compile str = parseExp lit add mul str
-- Exercise #6:
class HasVars a where
var :: String -> a
data VarExprT = LitV Integer
| Var String
| AddV VarExprT VarExprT
| MulV VarExprT VarExprT
deriving (Eq, Ord, Show)
instance HasVars VarExprT where
var x = Var x
instance Expr VarExprT where
lit a = LitV a
add a b = AddV a b
mul a b = MulV a b
| parsonsmatt/cis194 | hw5/Calc.hs | apache-2.0 | 2,049 | 0 | 9 | 679 | 901 | 455 | 446 | 68 | 1 |
module BankKata.BankAccount where
import BankKata.Natural
import Data.List
import Data.Time
data Transaction = Deposit Day Nat | Withdraw Day Nat deriving (Show, Eq)
newtype BankAccount = BankAccount { getTransactions :: [Transaction] } deriving (Show, Eq)
emptyAccount :: BankAccount
emptyAccount = BankAccount []
deposit :: Day -> Nat -> BankAccount -> BankAccount
deposit date amount account = addTransaction account (Deposit date amount)
withdraw :: Day -> Nat -> BankAccount -> BankAccount
withdraw date amount account = addTransaction account (Withdraw date amount)
addTransaction :: BankAccount -> Transaction -> BankAccount
addTransaction account transaction = BankAccount $ (getTransactions account) ++ [transaction]
hdate = rpad 7 "date "
hcredit = rpad 8 "credit "
hdebit = rpad 7 "debit "
hbalance = rpad 9 "balance "
sep = "|"
printStatement :: BankAccount -> String
printStatement account = header ++ "\n" ++ statementLines
where
header = hdate ++ sep ++ hcredit ++ sep ++ hdebit ++ sep ++ hbalance
statementLines = intercalate "\n" $ formatLine <$> zip (reverse transactions) (reverse (runningBalance transactions))
transactions = getTransactions account
formatLine :: (Transaction, Int) -> String
formatLine (tran, balance) =
fdate tran ++ sep ++ fcredit tran ++ sep ++ fdebit tran ++ sep ++ fbalance balance
where
fdate (Deposit day _) = (rpadto hdate (formatDay day))
fdate (Withdraw day _) = (rpadto hdate (formatDay day))
fcredit (Deposit _ am) = (rpadto hcredit (formatAmount am))
fcredit (Withdraw _ _) = (rpadto hcredit "")
fdebit (Withdraw _ am) = (rpadto hdebit (formatAmount am))
fdebit (Deposit _ _) = (rpadto hdebit "")
fbalance balance = (rpadto hbalance (show balance))
formatAmount :: Nat -> String
formatAmount nat = show $ intValue nat
formatDay :: Day -> String
formatDay date = (show d) ++ "-" ++ (show m)
where (_,m,d) = toGregorian date
runningBalance :: [Transaction] -> [Int]
runningBalance trans = drop 1 $ scanl (\b -> \a -> (amountOf a) + b) 0 trans
where
amountOf (Deposit _ am) = intValue am
amountOf (Withdraw _ am) = negate $ intValue am
rpadto :: String -> String -> String
rpadto l s = rpad (length l) (s ++ " ")
rpad :: Int -> String -> String
rpad n s
| length s < n = (++ s ) . concat $ replicate (n - length s) " "
| otherwise = s
| PaNaVTEC/Katas | bank-kata/haskell/src/BankKata/BankAccount.hs | apache-2.0 | 2,393 | 0 | 12 | 488 | 919 | 472 | 447 | 49 | 4 |
module Main (main) where
import Control.Applicative hiding (many)
import Data.Text (Text)
import qualified Data.Text.IO as T
import System.Environment (getArgs)
import Text.Parsec hiding ((<|>), parse, State)
import Text.Parsec.Expr hiding (Operator)
import Text.Parsec.Indent
import Text.Parsec.Text ()
import Text.Show.Pretty (ppShow)
import Language.Livescript.Lexer
import Language.Livescript.Parser.Type
main :: IO ()
main = do
args <- getArgs
mapM_ parseAndCheck args
parseAndCheck :: FilePath -> IO ()
parseAndCheck path = do
putStrLn path
txt <- T.readFile path
case parse blockP path txt of
Left err -> print err
Right x -> putStrLn (ppShow x)
------------------------------------------------------------------------
parse :: Parser a -> SourceName -> Text -> Either ParseError a
parse p source input = runIndent source (runParserT p () source input)
------------------------------------------------------------------------
data Expr
= Block [Expr]
| Var String
| Literal String
-- assignment
| Assign Expr Expr
| ReAssign Expr Expr
-- binary operators
| Add Expr Expr
| Sub Expr Expr
deriving (Show,Eq,Ord)
------------------------------------------------------------------------
blockP :: Parser Expr
blockP = Block <$> exprP `sepEndBy` newlineP
exprP :: Parser Expr
exprP = buildExpressionParser operators (lexeme termP)
termP :: Parser Expr
termP = varP <|> literalP
varP :: Parser Expr
varP = Var <$> identifier
literalP :: Parser Expr
literalP = Literal . show <$> decimal
operators :: [[Operator Expr]]
operators =
[ [ assocLeft "+" Add
, assocLeft "-" Sub
]
, [ assocLeft "=" Assign
, assocLeft ":=" ReAssign
]
]
assocLeft :: String -> (a -> a -> a) -> Operator a
assocLeft op f = Infix (reservedOp op *> pure f) AssocLeft
newlineP :: Parser ()
newlineP = lexeme (oneOf ";\r\n") *> return ()
| jystic/language-livescript | src/Main.hs | apache-2.0 | 2,028 | 0 | 12 | 479 | 606 | 327 | 279 | 54 | 2 |
-- | This module contains the Notifications App. This application allows Tersus applications
-- to create topics to which other applications can subscribe and all messages sent to
-- this application under a particular topic will be delivered to all applications
-- subscribed to that topic
module Tersus.Cluster.TersusNotificationsApp where
import Prelude
import System.IO.Unsafe (unsafePerformIO)
import Data.SafeCopy
import Data.IxSet
import qualified Data.IxSet as I
import Data.Text hiding (foldl,empty)
import Data.Time.Clock (getCurrentTime)
import Data.Typeable.Internal (Typeable)
import qualified Control.Monad.State as S
import qualified Control.Monad.Reader as R
import Data.Aeson (ToJSON,FromJSON,toJSON,parseJSON,object,(.:),(.:?),fromJSON)
import qualified Data.Aeson.Types as A
import Control.Monad (mzero)
import Tersus.Cluster.TersusService
import Tersus.Global
import Control.Applicative
import Prelude
import Tersus.DataTypes.Messaging
import Tersus.DataTypes.TApplication
import Tersus.DataTypes.User
import Tersus.Database(io)
import qualified Tersus.Global as Tersus.Global
-- | The name of the notifications application
tersusNotificationAppName :: Text
tersusNotificationAppName = "tersusNotifications"
-- | The user that runs the Notifications App
tersusNotificationsUser :: Text
tersusNotificationsUser = "tersus"
tersusNotificationsApp' :: TApplication
tersusNotificationsApp' = TApp 2 tersusNotificationAppName tersusNotificationAppName "This app provides notifications via messages of a variety of events" "http://tersusland.com/tersus" "neto@netowork.me" (unsafePerformIO getCurrentTime) "notificationsAppKey" [Tersus.Global.tersusUserId]
tersusServiceUser :: User
tersusServiceUser = User 0 "tersus@tersusland.com" tersusNotificationsUser (Just "") False
data TopicSubscription = TopicSubscription {subscriber :: AppInstance,topic :: Text} deriving (Typeable,Show,Eq,Ord)
-- | A datatype to index the topics in the topics database
newtype Topic = Topic Text deriving (Typeable,Ord,SafeCopy,Eq)
-- | The possible requests that can be done to the Notifications app
-- Subscribe: subscribe to a topic
-- Unsubscribe: unsubscribe from a topic
-- Notify: send a message to all subscribers of a topic
data Action = Subscribe | Unsubscribe | Notify deriving (Show,Read)
-- | Datatype representing a service request from an app instance
-- to the notifications app
data Operation = Operation {action :: Either Action Text
,topicStr :: Text
,notificationText :: Maybe Text} deriving (Show,Read)
-- | Possible outcomes that can ocurr in the topics application.
-- Success indicates the operation completed successfully
-- InvalidFormat indicates that the request could not be parsed
-- UnrecognizedOperation indicates that the action in the message is not an action of the Notifications App.
data NResultCode = UnrecognizedOperation | Success | InvalidFormat deriving (Show,Read)
-- | The result of a request to the topics application. Indicates wether it was successful
-- or erroneous
data NResult = InvalidOperation Text | SuccessfulOperation Action | FormatError deriving (Show,Read)
instance FromJSON Action where
parseJSON (A.String appAction) = case readMay.unpack $ appAction of
Nothing -> mzero
Just a -> return a
parseJSON _ = mzero
instance FromJSON (Either Action Text) where
parseJSON (A.String appAction) = return $ case fromJSON $ A.String appAction of
A.Error _ -> Right $ appAction
A.Success a -> Left a
parseJSON _ = mzero
instance ToJSON Action where
toJSON = A.String . pack . show
instance ToJSON NResultCode where
toJSON = A.String . pack . show
-- | The JSON field where the result of a service of the
-- topics application is placed
resultTxt :: Text
resultTxt = "result"
-- | The json field for the operation
operationTxt :: Text
operationTxt = "operation"
instance ToJSON NResult where
toJSON (InvalidOperation t) = object [(resultTxt,toJSON UnrecognizedOperation)
,(operationTxt,toJSON t)]
toJSON (SuccessfulOperation a) = object [(resultTxt,toJSON Success)
,(operationTxt,toJSON a)]
toJSON FormatError = object [(resultTxt,toJSON InvalidFormat)]
instance ToJSON Operation where
toJSON op = object allFields
where
mandatory = [("action",toJSON $ either toJSON toJSON $ action op),("topic",toJSON $ topicStr op)]
allFields = case notificationText op of
Nothing -> mandatory
Just t -> mandatory ++ [("notification",toJSON t)]
instance FromJSON Operation where
parseJSON (A.Object operation) = Operation <$>
operation .: "action" <*>
operation .: "topic" <*>
operation .:? "notification"
parseJSON _ = mzero
-- | Function that processes the incoming messages. The message can have a request
-- to subscribe to a topic, to unsubscribe to a topic or to send a message to everyone subscribed to
-- the topic
tersusNotificationsRecv :: TMessage -> TersusServiceM TopicsDb ()
tersusNotificationsRecv message = do
io $ putStrLn $ show message
case operation of
Nothing -> sendResponse $ encodeAsText FormatError
Just op -> runOperation op
where
TMessage uSender uReceiver aSender aReceiver msgBody _ = message
operation = decodeFromText $ msgBody
sendResponse txt = sendMessage' $ TMessage uReceiver uSender aReceiver aSender txt
runOperation (Operation (Right s) _ _) = sendResponse $ encodeAsText $ InvalidOperation s
runOperation (Operation (Left Subscribe) selTopic _) = do
db <- getDb
_ <- update' db $ AddSubscription $ TopicSubscription (getSendAppInstance message) selTopic
sendResponse $ encodeAsText $ SuccessfulOperation Subscribe
runOperation (Operation (Left Unsubscribe) selTopic _) = do
db <- getDb
_ <- update' db $ RmSubscription $ TopicSubscription (getSendAppInstance message) selTopic
sendResponse $ encodeAsText $ SuccessfulOperation Unsubscribe
runOperation (Operation (Left Notify) selTopic notification) = do
db <- getDb
subscribers <- query' db (GetSubscribers selTopic) >>= return.(delete $ TopicSubscription (getSendAppInstance message) selTopic)
mapM_ notificationMsg $ toList subscribers
sendResponse $ encodeAsText $ SuccessfulOperation Notify
where
notificationMsg (TopicSubscription s _) = do
let
AppInstance uSubs aSubs = s
resp = case notification of
Nothing -> ""
Just n -> n
msg = TMessage uSender uSubs aSender aSubs resp
sendMessage' msg
-- | Notifications App. This app allows applications to create topics to which they can send messages and theese messages
-- will be delivered to every application that is subscribed to the topic
tersusNotificationsApp :: TersusServerApp TopicsDb
tersusNotificationsApp = TersusServerApp tersusNotificationsApp' tersusServiceUser tersusNotificationsRecv Nothing Nothing $ Just $ openLocalStateFrom "/tmp/TNA" $ TopicsDb $ I.empty
| kmels/tersus | Tersus/Cluster/TersusNotificationsApp.hs | bsd-2-clause | 7,514 | 1 | 17 | 1,743 | 1,531 | 820 | 711 | 108 | 6 |
--
-- Copyright (c) 2013, Carl Joachim Svenn
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Font.FontColor
(
FontColor (..),
makeFontColorFloat,
makeFontColorGLubyte,
fontColorScaleAlpha,
fontColorChangeAlpha,
) where
import MyPrelude
import OpenGL
import OpenGL.Helpers
-- | fixme/question:
-- * representing colors with premultiplied alpha 4-tuple?
-- * use 4 GLubyte's, instead of GLfloat? (but I don't think GHC saves
-- this space, and instead represent GLubyte as Word32)
data FontColor =
FontColor
{
fontcolorR :: !GLfloat,
fontcolorG :: !GLfloat,
fontcolorB :: !GLfloat,
fontcolorA :: !GLfloat
}
makeFontColorFloat :: Float -> Float -> Float -> Float -> FontColor
makeFontColorFloat r g b a =
FontColor (rTF r) (rTF g) (rTF b) (rTF a)
makeFontColorGLubyte :: GLubyte -> GLubyte -> GLubyte -> GLubyte -> FontColor
makeFontColorGLubyte r g b a =
FontColor (0.003921569 * fI r) (0.003921569 * fI g)
(0.003921569 * fI b) (0.003921569 * fI a)
-- | fixme: premultiplied?
fontColorScaleAlpha :: FontColor -> Float -> FontColor
fontColorScaleAlpha (FontColor r g b a) scale =
FontColor r g b (rTF scale * a)
-- | fixme: premultiplied?
fontColorChangeAlpha :: FontColor -> Float -> FontColor
fontColorChangeAlpha (FontColor r g b a) a' =
FontColor r g b (rTF a')
| karamellpelle/MEnv | source/Font/FontColor.hs | bsd-2-clause | 2,757 | 0 | 9 | 614 | 362 | 205 | 157 | 37 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Codec.Baduk.Parser.WBaduk (parseNGF) where
import Data.Baduk.Game
import Text.Parsec
import Text.Parsec.Text
import Control.Applicative hiding (optional, (<|>))
import Data.Time.Format
import System.Locale
import qualified Data.Text as T
import qualified Data.Map as M
readRankType 'D' = Dan
readRankType 'K' = Kyu
readRankType 'P' = Pro
eol = string "\r\n"
chomp = do skipMany $ noneOf "\r\n"
eol
numberLine = read <$> many1 digit <* eol
playerLine = Player <$> name <*> rank
where name = T.pack <$> many1 (noneOf " ")
rank = make <$> (spaces *> many1 digit)
<*> oneOf "DKP" <* optional (char '*') <* eol
make r t = Rank (read r) (readRankType t)
coords = "ABCDEFGHIJKLMNOPQRST"
cmap = M.fromList $ zip coords [0..19]
pCoord = (cmap M.!) <$> oneOf coords
moveLine = (mv . ch2c) <$> (string "PM" *> count 2 anyChar *> oneOf "BW")
<*> pCoord <*> pCoord <* count 2 anyChar <* eol
where mv c 0 0 = Pass c
mv c x y = Stone c x y
ch2c 'B' = Black
ch2c 'W' = White
resultLine = Win <$> pWinner <*> pMargin
where pWinner = (flip winner) <$> (((string "White") <|> (string "Black")) <* space)
<*> ((string "wins") <|> (string "loses"))
pMargin = margin <$> (spaces *> ((string "on") <|> (string "by")))
<*> (spaces *> ((string "time")
<|> (string "resign")
<|> pPoints))
<* chomp
pPoints = do whole <- many1 digit
half <- optionMaybe (char '.')
return $ case half of
Nothing -> whole
Just '.' -> whole ++ ".5"
winner "wins" = s2c
winner "loses" = other . s2c
s2c "White" = White
s2c "Black" = Black
other Black = White
other White = Black
margin "on" "time" = Time
margin "by" "resign" = Resignation
margin "by" p = Points (read p)
ngf = do chomp
size <- numberLine
white <- playerLine
black <- playerLine
chomp
handi <- numberLine
chomp
komii <- numberLine
let komi = if handi == 0 then 0.5 else fromIntegral komii
tline <- many1 $ noneOf "]"
let time = readTime defaultTimeLocale "%0Y%m%d [%R" tline
chomp
chomp -- This field might be minutes of main time?
result <- resultLine
nMoves <- numberLine
moves <- count nMoves moveLine
return $ Game size black white handi komi time moves result
parseNGF = parse ngf "parseNGF"
| frodwith/baduk-formats | src/Codec/Baduk/Parser/WBaduk.hs | bsd-3-clause | 2,905 | 0 | 15 | 1,116 | 874 | 443 | 431 | 71 | 7 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Control.Access.RoleBased.Internal.Types
( module Control.Access.RoleBased.Internal.Role
, module Control.Access.RoleBased.Internal.Rule
, RoleMonad(..)
, RuleChecker(..)
) where
------------------------------------------------------------------------------
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.Logic
------------------------------------------------------------------------------
import Control.Access.RoleBased.Internal.Role
import Control.Access.RoleBased.Internal.Rule
------------------------------------------------------------------------------
-- TODO: should the monads be transformers here? If they were, you could check
-- more complex predicates here
------------------------------------------------------------------------------
newtype RoleMonad a = RoleMonad { _unRC :: Logic a }
deriving (Alternative, Applicative, Functor, Monad, MonadPlus, MonadLogic)
------------------------------------------------------------------------------
newtype RuleChecker a = RuleChecker (ReaderT (RoleMonad Role) RoleMonad a)
deriving (Alternative, Applicative, Functor, Monad, MonadPlus, MonadLogic)
| sopvop/access-rolebased | src/Control/Access/RoleBased/Internal/Types.hs | bsd-3-clause | 1,348 | 0 | 9 | 191 | 182 | 118 | 64 | 17 | 0 |
module Extensions where
data Extension
= ASFExt ASFExt
| FLACExt FLACExt
| M4AExt M4AExt
| MonkeysAudioExt MonkeysAudioExt
| MP3Ext MP3Ext
| MusepackExt MusepackExt
| OggSpeexExt OggSpeexExt
| OggVorbisExt OggVorbisExt
| TrueAudioExt TrueAudioExt
| WavPackExt WavPackExt
| OptimFROGExt OptimFROGExt
deriving Show
data ASFExt
= ASF
| WMA
deriving Show
data FLACExt
= FLAC
deriving Show
data M4AExt
= M4A
| MP4
| M4B
| M4P
deriving Show
data MonkeysAudioExt
= APE
deriving Show
data MP3Ext
= MP3
deriving Show
data MusepackExt
= MPC
| MPPlus
| MPP
deriving Show
data OggSpeexExt
= SPX
deriving Show
data OggVorbisExt
= OGG
| OGA
deriving Show
data TrueAudioExt
= TTA
deriving Show
data WavPackExt
= WV
deriving Show
data OptimFROGExt
= OFR
deriving Show
| kylcarte/harihara | src/Extensions.hs | bsd-3-clause | 1,012 | 0 | 6 | 380 | 197 | 120 | 77 | 54 | 0 |
module Models.Talk where
import Control.Monad (liftM, mzero, void)
import Data.Aeson
import qualified Data.ByteString.Char8 as C
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Read as T
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Database.Persist
import qualified Database.Redis as KV
import GHC.Generics (Generic)
import Network.HTTP.Client.Conduit (HttpException, httpLbs,
parseUrlThrow, responseBody)
import RIO hiding (id)
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor (fromDocument)
import Types (AppRIO, runDB)
import Config (Config (..))
import Model
import qualified Models.RedisKeys as Keys
import Models.Types (mkTalkUrl)
import Web.TED.TalkPage (parseDescription, parseImage,
parseTalkObject)
data TalkObj = TalkObj
{ id :: Int
, name :: Text
, slug :: Text
, filmedAt :: UTCTime
, publishedAt :: UTCTime
, languages :: [Language]
, mediaSlug :: Text
} deriving (Generic)
instance FromJSON TalkObj where
parseJSON (Object v) = do
idText <- v .: "id"
tid <- case fst <$> T.decimal idText of
Right tid -> pure tid
_ -> fail "id is not int"
TalkObj
<$> pure tid
<*> v .: "name"
<*> v .: "slug"
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> v .: "languages"
<*> v .: "mediaIdentifier"
parseJSON _ = mzero
getTalks :: Int -> Int -> AppRIO [Entity Talk]
getTalks offset limit = do
runDB $ selectList []
[ Desc TalkId
, LimitTo limit
, OffsetBy offset
]
getTalk :: Int -> Text -> AppRIO (Maybe (Entity Talk))
getTalk tid url = do
Config { kvConn } <- ask
cached <- liftIO $ KV.runRedis kvConn $
KV.get $ Keys.cache $ fromIntegral tid
case cached of
Right (Just _) -> getTalkById tid (Just url)
Right Nothing -> saveToDB url
Left _ -> saveToDB url
getTalkById :: Int -> Maybe Text -> AppRIO (Maybe (Entity Talk))
getTalkById tid mUrl = do
xs <- runDB $ getEntity $ TalkKey tid
case xs of
Just talk -> return $ Just talk
_ -> maybe (return Nothing) saveToDB mUrl
hush :: Either a b -> Maybe b
hush (Left _) = Nothing
hush (Right v) = Just v
getTalkBySlug :: Text -> AppRIO (Maybe (Entity Talk))
getTalkBySlug slug = do
Config { kvConn } <- ask
mtid <- liftIO $ fmap (join . hush) <$> KV.runRedis kvConn $ KV.get $ Keys.slug slug
case mtid of
Just tid ->
case readMaybe $ C.unpack tid of
Just tid' -> getTalk tid' url
Nothing -> pure Nothing
Nothing ->
saveToDB url
where
url = mkTalkUrl slug
saveToDB :: Text -> AppRIO (Maybe (Entity Talk))
saveToDB url = do
Config{..} <- ask
mTalk <- fetchTalk url
case mTalk of
Just entity@(Entity talkId talk) -> do
void $ liftIO $ KV.runRedis kvConn $ KV.multiExec $ do
void $ KV.setex (Keys.cache $ unTalkKey talkId) (3600*24) ""
KV.set (Keys.slug $ talkSlug talk) (C.pack $ show $ unTalkKey talkId)
runDB $ repsert talkId talk
return $ Just entity
Nothing -> return Nothing
fetchTalk :: Text -> AppRIO (Maybe (Entity Talk))
fetchTalk url = do
handle (\(_::HttpException) -> return Nothing) $ do
req <- parseUrlThrow $ T.unpack url
res <- httpLbs req
let
body = responseBody res
cursor = fromDocument $ parseLBS body
desc = parseDescription cursor
img = parseImage cursor
core = parseTalkObject body
case eitherDecode core of
Right TalkObj{..} -> do
return $ Just $ Entity (TalkKey $ fromIntegral id) (Talk
{ talkName = name
, talkSlug = slug
, talkFilmedAt = filmedAt
, talkPublishedAt = publishedAt
, talkDescription = desc
, talkImage = img
, talkLanguages = toJSON languages
, talkMediaSlug = mediaSlug
, talkMediaPad = 0.0
})
Left err -> do
logErrorS "fetchTalk" $ fromString err
pure Nothing
| rnons/ted2srt | backend/src/Models/Talk.hs | bsd-3-clause | 4,599 | 0 | 20 | 1,598 | 1,442 | 738 | 704 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Data.Csv.Types
(
-- * Core CSV types
Csv
, Record
, Header
, Name
, NamedRecord
, Field
, toNamedRecord
-- * Header handling
, HasHeader(..)
) where
import qualified Data.ByteString as S
import qualified Data.HashMap.Strict as HM
import Data.Vector (Vector)
import qualified Data.Vector as V
import GHC.Generics (Generic)
import Data.Typeable (Typeable)
import Data.Data (Data)
-- | CSV data represented as a Haskell vector of vector of
-- bytestrings.
type Csv = Vector Record
-- | A record corresponds to a single line in a CSV file.
type Record = Vector Field
-- | The header corresponds to the first line a CSV file. Not all CSV
-- files have a header.
type Header = Vector Name
-- | A header has one or more names, describing the data in the column
-- following the name.
type Name = S.ByteString
-- | A record corresponds to a single line in a CSV file, indexed by
-- the column name rather than the column index.
type NamedRecord = HM.HashMap S.ByteString S.ByteString
-- | A single field within a record.
type Field = S.ByteString
-- | Convert a 'Record' to a 'NamedRecord' by attaching column names.
-- The 'Header' and 'Record' must be of the same length.
toNamedRecord :: Header -> Record -> NamedRecord
toNamedRecord hdr v = HM.fromList . V.toList $ V.zip hdr v
-- | Is the CSV data preceded by a header?
data HasHeader = NoHeader -- ^ The CSV data is not preceded by a header
| HasHeader -- ^ The CSV data is preceded by a header
deriving (Show, Read, Eq, Ord, Enum, Typeable, Data, Generic)
| treeowl/cassava | Data/Csv/Types.hs | bsd-3-clause | 1,661 | 0 | 7 | 360 | 268 | 166 | 102 | 30 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is the command line front end to the Simple build system. When given
-- the parsed command-line args and package information, is able to perform
-- basic commands like configure, build, install, register, etc.
--
-- This module exports the main functions that Setup.hs scripts use. It
-- re-exports the 'UserHooks' type, the standard entry points like
-- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of
-- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own
-- behaviour.
--
-- This module isn't called \"Simple\" because it's simple. Far from
-- it. It's called \"Simple\" because it does complicated things to
-- simple software.
--
-- The original idea was that there could be different build systems that all
-- presented the same compatible command line interfaces. There is still a
-- "Distribution.Make" system but in practice no packages use it.
{-
Work around this warning:
libraries/Cabal/Distribution/Simple.hs:78:0:
Warning: In the use of `runTests'
(imported from Distribution.Simple.UserHooks):
Deprecated: "Please use the new testing interface instead!"
-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Distribution.Simple (
module Distribution.Package,
module Distribution.Version,
module Distribution.License,
module Distribution.Simple.Compiler,
module Language.Haskell.Extension,
-- * Simple interface
defaultMain, defaultMainNoRead, defaultMainArgs,
-- * Customization
UserHooks(..), Args,
defaultMainWithHooks, defaultMainWithHooksArgs,
-- ** Standard sets of hooks
simpleUserHooks,
autoconfUserHooks,
defaultUserHooks, emptyUserHooks,
-- ** Utils
defaultHookedPackageDesc
) where
-- local
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.UserHooks
import Distribution.Package --must not specify imports, since we're exporting module.
import Distribution.PackageDescription
( PackageDescription(..), GenericPackageDescription, Executable(..)
, updatePackageDescription, hasLibs
, HookedBuildInfo, emptyHookedBuildInfo )
import Distribution.PackageDescription.Parse
( readPackageDescription, readHookedBuildInfo )
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.Simple.Program
( defaultProgramConfiguration, builtinPrograms
, restoreProgramConfiguration)
import Distribution.Simple.Program.Db
import Distribution.Simple.Program.Find
import Distribution.Simple.Program.Run
import Distribution.Simple.Program.Types
import Distribution.Simple.PreProcess (knownSuffixHandlers, PPSuffixHandler)
import Distribution.Simple.Setup
import Distribution.Simple.Command
import Distribution.Simple.Build ( build, repl )
import Distribution.Simple.SrcDist ( sdist )
import Distribution.Simple.Register
( register, unregister )
import Distribution.Simple.Configure
( getPersistBuildConfig, maybeGetPersistBuildConfig
, writePersistBuildConfig, checkPersistBuildConfigOutdated
, configure, checkForeignDeps, findDistPrefOrDefault )
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) )
import Distribution.Simple.Bench (bench)
import Distribution.Simple.BuildPaths ( srcPref)
import Distribution.Simple.Test (test)
import Distribution.Simple.Install (install)
import Distribution.Simple.Haddock (haddock, hscolour)
import Distribution.Simple.Utils
(die, notice, info, warn, setupMessage, chattyTry,
defaultPackageDesc, defaultHookedPackageDesc,
cabalVersion, topHandler )
import Distribution.Utils.NubList
import Distribution.Verbosity
import Language.Haskell.Extension
import Distribution.Version
import Distribution.License
import Distribution.Text
( display )
-- Base
import System.Environment(getArgs, getProgName)
import System.Directory(removeFile, doesFileExist,
doesDirectoryExist, removeDirectoryRecursive,
canonicalizePath)
import System.Exit (exitWith,ExitCode(..))
import System.FilePath(searchPathSeparator)
import Distribution.Compat.Environment (getEnvironment)
import Control.Monad (when)
import Data.Foldable (traverse_)
import Data.List (intercalate, unionBy, nub, (\\))
-- | A simple implementation of @main@ for a Cabal setup script.
-- It reads the package description file using IO, and performs the
-- action specified on the command line.
defaultMain :: IO ()
defaultMain = getArgs >>= defaultMainHelper simpleUserHooks
-- | A version of 'defaultMain' that is passed the command line
-- arguments, rather than getting them from the environment.
defaultMainArgs :: [String] -> IO ()
defaultMainArgs = defaultMainHelper simpleUserHooks
-- | A customizable version of 'defaultMain'.
defaultMainWithHooks :: UserHooks -> IO ()
defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks
-- | A customizable version of 'defaultMain' that also takes the command
-- line arguments.
defaultMainWithHooksArgs :: UserHooks -> [String] -> IO ()
defaultMainWithHooksArgs = defaultMainHelper
-- | Like 'defaultMain', but accepts the package description as input
-- rather than using IO to read it.
defaultMainNoRead :: GenericPackageDescription -> IO ()
defaultMainNoRead pkg_descr =
getArgs >>=
defaultMainHelper simpleUserHooks { readDesc = return (Just pkg_descr) }
defaultMainHelper :: UserHooks -> Args -> IO ()
defaultMainHelper hooks args = topHandler $
case commandsRun (globalCommand commands) commands args of
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo (flags, commandParse) ->
case commandParse of
_ | fromFlag (globalVersion flags) -> printVersion
| fromFlag (globalNumericVersion flags) -> printNumericVersion
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo action -> action
where
printHelp help = getProgName >>= putStr . help
printOptionsList = putStr . unlines
printErrors errs = do
putStr (intercalate "\n" errs)
exitWith (ExitFailure 1)
printNumericVersion = putStrLn $ display cabalVersion
printVersion = putStrLn $ "Cabal library version "
++ display cabalVersion
progs = addKnownPrograms (hookedPrograms hooks) defaultProgramConfiguration
commands =
[configureCommand progs `commandAddAction` \fs as ->
configureAction hooks fs as >> return ()
,buildCommand progs `commandAddAction` buildAction hooks
,replCommand progs `commandAddAction` replAction hooks
,installCommand `commandAddAction` installAction hooks
,copyCommand `commandAddAction` copyAction hooks
,haddockCommand `commandAddAction` haddockAction hooks
,cleanCommand `commandAddAction` cleanAction hooks
,sdistCommand `commandAddAction` sdistAction hooks
,hscolourCommand `commandAddAction` hscolourAction hooks
,registerCommand `commandAddAction` registerAction hooks
,unregisterCommand `commandAddAction` unregisterAction hooks
,testCommand `commandAddAction` testAction hooks
,benchmarkCommand `commandAddAction` benchAction hooks
]
-- | Combine the preprocessors in the given hooks with the
-- preprocessors built into cabal.
allSuffixHandlers :: UserHooks
-> [PPSuffixHandler]
allSuffixHandlers hooks
= overridesPP (hookedPreProcessors hooks) knownSuffixHandlers
where
overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler]
overridesPP = unionBy (\x y -> fst x == fst y)
configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo
configureAction hooks flags args = do
distPref <- findDistPrefOrDefault (configDistPref flags)
let flags' = flags { configDistPref = toFlag distPref }
pbi <- preConf hooks args flags'
(mb_pd_file, pkg_descr0) <- confPkgDescr
--get_pkg_descr (configVerbosity flags')
--let pkg_descr = updatePackageDescription pbi pkg_descr0
let epkg_descr = (pkg_descr0, pbi)
--(warns, ers) <- sanityCheckPackage pkg_descr
--errorOut (configVerbosity flags') warns ers
localbuildinfo0 <- confHook hooks epkg_descr flags'
-- remember the .cabal filename if we know it
-- and all the extra command line args
let localbuildinfo = localbuildinfo0 {
pkgDescrFile = mb_pd_file,
extraConfigArgs = args
}
writePersistBuildConfig distPref localbuildinfo
let pkg_descr = localPkgDescr localbuildinfo
postConf hooks args flags' pkg_descr localbuildinfo
return localbuildinfo
where
verbosity = fromFlag (configVerbosity flags)
confPkgDescr :: IO (Maybe FilePath, GenericPackageDescription)
confPkgDescr = do
mdescr <- readDesc hooks
case mdescr of
Just descr -> return (Nothing, descr)
Nothing -> do
pdfile0 <- defaultPackageDesc verbosity
pdfile <- canonicalizePath pdfile0
descr <- readPackageDescription verbosity pdfile
return (Just pdfile, descr)
buildAction :: UserHooks -> BuildFlags -> Args -> IO ()
buildAction hooks flags args = do
distPref <- findDistPrefOrDefault (buildDistPref flags)
let verbosity = fromFlag $ buildVerbosity flags
flags' = flags { buildDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(buildProgramPaths flags')
(buildProgramArgs flags')
(withPrograms lbi)
hookedAction preBuild buildHook postBuild
(return lbi { withPrograms = progs })
hooks flags' { buildArgs = args } args
replAction :: UserHooks -> ReplFlags -> Args -> IO ()
replAction hooks flags args = do
distPref <- findDistPrefOrDefault (replDistPref flags)
let verbosity = fromFlag $ replVerbosity flags
flags' = flags { replDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(replProgramPaths flags')
(replProgramArgs flags')
(withPrograms lbi)
pbi <- preRepl hooks args flags'
let lbi' = lbi { withPrograms = progs }
pkg_descr0 = localPkgDescr lbi'
pkg_descr = updatePackageDescription pbi pkg_descr0
replHook hooks pkg_descr lbi' hooks flags' args
postRepl hooks args flags' pkg_descr lbi'
hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO ()
hscolourAction hooks flags args = do
distPref <- findDistPrefOrDefault (hscolourDistPref flags)
let verbosity = fromFlag $ hscolourVerbosity flags
flags' = flags { hscolourDistPref = toFlag distPref }
hookedAction preHscolour hscolourHook postHscolour
(getBuildConfig hooks verbosity distPref)
hooks flags' args
haddockAction :: UserHooks -> HaddockFlags -> Args -> IO ()
haddockAction hooks flags args = do
distPref <- findDistPrefOrDefault (haddockDistPref flags)
let verbosity = fromFlag $ haddockVerbosity flags
flags' = flags { haddockDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(haddockProgramPaths flags')
(haddockProgramArgs flags')
(withPrograms lbi)
hookedAction preHaddock haddockHook postHaddock
(return lbi { withPrograms = progs })
hooks flags' args
cleanAction :: UserHooks -> CleanFlags -> Args -> IO ()
cleanAction hooks flags args = do
distPref <- findDistPrefOrDefault (cleanDistPref flags)
let flags' = flags { cleanDistPref = toFlag distPref }
pbi <- preClean hooks args flags'
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
-- We don't sanity check for clean as an error
-- here would prevent cleaning:
--sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
cleanHook hooks pkg_descr () hooks flags'
postClean hooks args flags' pkg_descr ()
where
verbosity = fromFlag (cleanVerbosity flags)
copyAction :: UserHooks -> CopyFlags -> Args -> IO ()
copyAction hooks flags args = do
distPref <- findDistPrefOrDefault (copyDistPref flags)
let verbosity = fromFlag $ copyVerbosity flags
flags' = flags { copyDistPref = toFlag distPref }
hookedAction preCopy copyHook postCopy
(getBuildConfig hooks verbosity distPref)
hooks flags' args
installAction :: UserHooks -> InstallFlags -> Args -> IO ()
installAction hooks flags args = do
distPref <- findDistPrefOrDefault (installDistPref flags)
let verbosity = fromFlag $ installVerbosity flags
flags' = flags { installDistPref = toFlag distPref }
hookedAction preInst instHook postInst
(getBuildConfig hooks verbosity distPref)
hooks flags' args
sdistAction :: UserHooks -> SDistFlags -> Args -> IO ()
sdistAction hooks flags args = do
distPref <- findDistPrefOrDefault (sDistDistPref flags)
let flags' = flags { sDistDistPref = toFlag distPref }
pbi <- preSDist hooks args flags'
mlbi <- maybeGetPersistBuildConfig distPref
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
sDistHook hooks pkg_descr mlbi hooks flags'
postSDist hooks args flags' pkg_descr mlbi
where
verbosity = fromFlag (sDistVerbosity flags)
testAction :: UserHooks -> TestFlags -> Args -> IO ()
testAction hooks flags args = do
distPref <- findDistPrefOrDefault (testDistPref flags)
let verbosity = fromFlag $ testVerbosity flags
flags' = flags { testDistPref = toFlag distPref }
localBuildInfo <- getBuildConfig hooks verbosity distPref
let pkg_descr = localPkgDescr localBuildInfo
-- It is safe to do 'runTests' before the new test handler because the
-- default action is a no-op and if the package uses the old test interface
-- the new handler will find no tests.
runTests hooks args False pkg_descr localBuildInfo
hookedActionWithArgs preTest testHook postTest
(getBuildConfig hooks verbosity distPref)
hooks flags' args
benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO ()
benchAction hooks flags args = do
distPref <- findDistPrefOrDefault (benchmarkDistPref flags)
let verbosity = fromFlag $ benchmarkVerbosity flags
flags' = flags { benchmarkDistPref = toFlag distPref }
hookedActionWithArgs preBench benchHook postBench
(getBuildConfig hooks verbosity distPref)
hooks flags' args
registerAction :: UserHooks -> RegisterFlags -> Args -> IO ()
registerAction hooks flags args = do
distPref <- findDistPrefOrDefault (regDistPref flags)
let verbosity = fromFlag $ regVerbosity flags
flags' = flags { regDistPref = toFlag distPref }
hookedAction preReg regHook postReg
(getBuildConfig hooks verbosity distPref)
hooks flags' args
unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO ()
unregisterAction hooks flags args = do
distPref <- findDistPrefOrDefault (regDistPref flags)
let verbosity = fromFlag $ regVerbosity flags
flags' = flags { regDistPref = toFlag distPref }
hookedAction preUnreg unregHook postUnreg
(getBuildConfig hooks verbosity distPref)
hooks flags' args
hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedAction pre_hook cmd_hook =
hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> cmd_hook h pd lbi uh flags)
hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedActionWithArgs pre_hook cmd_hook post_hook get_build_config hooks flags args = do
pbi <- pre_hook hooks args flags
localbuildinfo <- get_build_config
let pkg_descr0 = localPkgDescr localbuildinfo
--pkg_descr0 <- get_pkg_descr (get_verbose flags)
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
-- TODO: should we write the modified package descr back to the
-- localbuildinfo?
cmd_hook hooks args pkg_descr localbuildinfo hooks flags
post_hook hooks args flags pkg_descr localbuildinfo
sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO ()
sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_)
= die $ "The buildinfo contains info for a library, "
++ "but the package does not have a library."
sanityCheckHookedBuildInfo pkg_descr (_, hookExes)
| not (null nonExistant)
= die $ "The buildinfo contains info for an executable called '"
++ head nonExistant ++ "' but the package does not have a "
++ "executable with that name."
where
pkgExeNames = nub (map exeName (executables pkg_descr))
hookExeNames = nub (map fst hookExes)
nonExistant = hookExeNames \\ pkgExeNames
sanityCheckHookedBuildInfo _ _ = return ()
getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo
getBuildConfig hooks verbosity distPref = do
lbi_wo_programs <- getPersistBuildConfig distPref
-- Restore info about unconfigured programs, since it is not serialized
let lbi = lbi_wo_programs {
withPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(withPrograms lbi_wo_programs)
}
case pkgDescrFile lbi of
Nothing -> return lbi
Just pkg_descr_file -> do
outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file
if outdated
then reconfigure pkg_descr_file lbi
else return lbi
where
reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo
reconfigure pkg_descr_file lbi = do
notice verbosity $ pkg_descr_file ++ " has been changed. "
++ "Re-configuring with most recently used options. "
++ "If this fails, please run configure manually.\n"
let cFlags = configFlags lbi
let cFlags' = cFlags {
-- Since the list of unconfigured programs is not serialized,
-- restore it to the same value as normally used at the beginning
-- of a configure run:
configPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(configPrograms cFlags),
-- Use the current, not saved verbosity level:
configVerbosity = Flag verbosity
}
configureAction hooks cFlags' (extraConfigArgs lbi)
-- --------------------------------------------------------------------------
-- Cleaning
clean :: PackageDescription -> CleanFlags -> IO ()
clean pkg_descr flags = do
let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags
notice verbosity "cleaning..."
maybeConfig <- if fromFlag (cleanSaveConf flags)
then maybeGetPersistBuildConfig distPref
else return Nothing
-- remove the whole dist/ directory rather than tracking exactly what files
-- we created in there.
chattyTry "removing dist/" $ do
exists <- doesDirectoryExist distPref
when exists (removeDirectoryRecursive distPref)
-- Any extra files the user wants to remove
mapM_ removeFileOrDirectory (extraTmpFiles pkg_descr)
-- If the user wanted to save the config, write it back
traverse_ (writePersistBuildConfig distPref) maybeConfig
where
removeFileOrDirectory :: FilePath -> IO ()
removeFileOrDirectory fname = do
isDir <- doesDirectoryExist fname
isFile <- doesFileExist fname
if isDir then removeDirectoryRecursive fname
else when isFile $ removeFile fname
verbosity = fromFlag (cleanVerbosity flags)
-- --------------------------------------------------------------------------
-- Default hooks
-- | Hooks that correspond to a plain instantiation of the
-- \"simple\" build system
simpleUserHooks :: UserHooks
simpleUserHooks =
emptyUserHooks {
confHook = configure,
postConf = finalChecks,
buildHook = defaultBuildHook,
replHook = defaultReplHook,
copyHook = \desc lbi _ f -> install desc lbi f, -- has correct 'copy' behavior with params
testHook = defaultTestHook,
benchHook = defaultBenchHook,
instHook = defaultInstallHook,
sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h),
cleanHook = \p _ _ f -> clean p f,
hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f,
haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f,
regHook = defaultRegHook,
unregHook = \p l _ f -> unregister p l f
}
where
finalChecks _args flags pkg_descr lbi =
checkForeignDeps pkg_descr lbi (lessVerbose verbosity)
where
verbosity = fromFlag (configVerbosity flags)
-- | Basic autoconf 'UserHooks':
--
-- * 'postConf' runs @.\/configure@, if present.
--
-- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst',
-- 'preReg' and 'preUnreg' read additional build information from
-- /package/@.buildinfo@, if present.
--
-- Thus @configure@ can use local system information to generate
-- /package/@.buildinfo@ and possibly other files.
{-# DEPRECATED defaultUserHooks
"Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-}
defaultUserHooks :: UserHooks
defaultUserHooks = autoconfUserHooks {
confHook = \pkg flags -> do
let verbosity = fromFlag (configVerbosity flags)
warn verbosity
"defaultUserHooks in Setup script is deprecated."
confHook autoconfUserHooks pkg flags,
postConf = oldCompatPostConf
}
-- This is the annoying old version that only runs configure if it exists.
-- It's here for compatibility with existing Setup.hs scripts. See:
-- https://github.com/haskell/cabal/issues/158
where oldCompatPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
when confExists $
runConfigureScript verbosity
backwardsCompatHack flags lbi
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = True
autoconfUserHooks :: UserHooks
autoconfUserHooks
= simpleUserHooks
{
postConf = defaultPostConf,
preBuild = \_ flags ->
-- not using 'readHook' here because 'build' takes
-- extra args
getHookedBuildInfo $ fromFlag $ buildVerbosity flags,
preClean = readHook cleanVerbosity,
preCopy = readHook copyVerbosity,
preInst = readHook installVerbosity,
preHscolour = readHook hscolourVerbosity,
preHaddock = readHook haddockVerbosity,
preReg = readHook regVerbosity,
preUnreg = readHook regVerbosity
}
where defaultPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
defaultPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
if confExists
then runConfigureScript verbosity
backwardsCompatHack flags lbi
else die "configure script not found."
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = False
readHook :: (a -> Flag Verbosity) -> Args -> a -> IO HookedBuildInfo
readHook get_verbosity a flags = do
noExtraFlags a
getHookedBuildInfo verbosity
where
verbosity = fromFlag (get_verbosity flags)
runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo
-> IO ()
runConfigureScript verbosity backwardsCompatHack flags lbi = do
env <- getEnvironment
let programConfig = withPrograms lbi
(ccProg, ccFlags) <- configureCCompiler verbosity programConfig
-- The C compiler's compilation and linker flags (e.g.
-- "C compiler flags" and "Gcc Linker flags" from GHC) have already
-- been merged into ccFlags, so we set both CFLAGS and LDFLAGS
-- to ccFlags
-- We don't try and tell configure which ld to use, as we don't have
-- a way to pass its flags too
let extraPath = fromNubList $ configProgramPathExtra flags
let cflagsEnv = maybe (unwords ccFlags) (++ (" " ++ unwords ccFlags)) $ lookup "CFLAGS" env
spSep = [searchPathSeparator]
pathEnv = maybe (intercalate spSep extraPath) ((intercalate spSep extraPath ++ spSep)++) $ lookup "PATH" env
overEnv = ("CFLAGS", Just cflagsEnv) : [("PATH", Just pathEnv) | not (null extraPath)]
args' = args ++ ["CC=" ++ ccProg]
shProg = simpleProgram "sh"
progDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb
shConfiguredProg <- lookupProgram shProg `fmap` configureProgram verbosity shProg progDb
case shConfiguredProg of
Just sh -> runProgramInvocation verbosity (programInvocation (sh {programOverrideEnv = overEnv}) args')
Nothing -> die notFoundMsg
where
args = "./configure" : configureArgs backwardsCompatHack flags
notFoundMsg = "The package has a './configure' script. If you are on Windows, This requires a "
++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin. "
++ "If you are not on Windows, ensure that an 'sh' command is discoverable in your path."
getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo
getHookedBuildInfo verbosity = do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> do
info verbosity $ "Reading parameters from " ++ infoFile
readHookedBuildInfo verbosity infoFile
defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> TestFlags -> IO ()
defaultTestHook args pkg_descr localbuildinfo _ flags =
test args pkg_descr localbuildinfo flags
defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> BenchmarkFlags -> IO ()
defaultBenchHook args pkg_descr localbuildinfo _ flags =
bench args pkg_descr localbuildinfo flags
defaultInstallHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> InstallFlags -> IO ()
defaultInstallHook pkg_descr localbuildinfo _ flags = do
let copyFlags = defaultCopyFlags {
copyDistPref = installDistPref flags,
copyDest = toFlag NoCopyDest,
copyVerbosity = installVerbosity flags
}
install pkg_descr localbuildinfo copyFlags
let registerFlags = defaultRegisterFlags {
regDistPref = installDistPref flags,
regInPlace = installInPlace flags,
regPackageDB = installPackageDB flags,
regVerbosity = installVerbosity flags
}
when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags
defaultBuildHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> BuildFlags -> IO ()
defaultBuildHook pkg_descr localbuildinfo hooks flags =
build pkg_descr localbuildinfo flags (allSuffixHandlers hooks)
defaultReplHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> ReplFlags -> [String] -> IO ()
defaultReplHook pkg_descr localbuildinfo hooks flags args =
repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args
defaultRegHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> RegisterFlags -> IO ()
defaultRegHook pkg_descr localbuildinfo _ flags =
if hasLibs pkg_descr
then register pkg_descr localbuildinfo flags
else setupMessage verbosity
"Package contains no library to register:" (packageId pkg_descr)
where verbosity = fromFlag (regVerbosity flags)
| randen/cabal | Cabal/Distribution/Simple.hs | bsd-3-clause | 30,711 | 20 | 25 | 7,647 | 6,206 | 3,147 | 3,059 | 502 | 8 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
module Bio.Foreign.MACS2
( makeTagDirectory
, FindPeakOpt
, style
, control
, findPeaks
, pos2bed ) where
import Control.Lens (makeFields, (^.))
import Data.Default.Class (Default(..))
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Shelly (shelly, run_, run)
data MACS2Opt = MACS2Opt
{ mACS2OptControl :: !(Maybe FilePath)
, mACS2OptOdir :: !FilePath
, mACS2OptGenomeSize :: !String
, mACS2OptPvalue :: !(Maybe Double)
, mACS2OptbuildModel :: !Bool
, mACS2Opt
} deriving (Show, Read)
makeFields ''MACS2Opt
instance Default FindPeakOpt where
def = FindPeakOpt
{ findPeakOptStyle = "factor"
, m= Nothing
}
callPeak :: [FilePath] -- ^ samples
-> MACS2Opt
-> IO ()
callPeak samples opt =
| kaizhang/bioinformatics-toolkit-foreign | src/Bio/Foreign/MACS2.hs | bsd-3-clause | 1,002 | 2 | 12 | 229 | 228 | 139 | 89 | -1 | -1 |
{-+
This module defines the parser monad, and the function #parseFile#, which is
the only way to invoke a parser.
-}
module ParseMonad(thenPM,PM,HToken,returnPM,parseError,getSrcLoc,
State,get,set,setreturn,eof,eoftoken,
parseFile,parseTokens) where
import HsTokens(Token(GotEOF))
import HsLexerPass1(lexerPass1Only,lexerPass0,Pos(..),line,column)
import MUtils
import SrcLoc1
import SrcLocPretty
import PrettyPrint(pp)
import Control.Monad.Error
--import Control.Monad(liftM,MonadPlus(..))
import Monad
import ExceptM()
--import IOExts(trace) -- for debugging only
default(Int)
type HToken = (Token,(SrcLoc,String))
--type Pos = (Int,Int)
type Error = String
type LayoutContext = Int
type State = ([HToken],[LayoutContext])
-- Parser monad type:
newtype PM a = PM {unPM::(State->Either ParseMonad.Error (a,State))}
returnPM x = PM $ Right . (,) x
PM p1 `thenPM` xp2 = PM $ \ts -> p1 ts >>= uncurry (unPM . xp2) -- =<< p1 ts
failPM msg = PM $ \ _ -> Left msg
{-
emapPM f (PM p) = PM $ \ ts -> case p ts of
Right ans -> Right ans
Left err -> Left (f err)
-}
get = PM $ \ st -> Right (st,st)
set st = PM $ \ _ -> Right ((),st)
setreturn x st = PM $ \ _ -> Right (x,st)
instance Monad PM where
return=returnPM
(>>=) = thenPM
fail = parseError
{-instance Monad (Either String) where
(Left a) >>= _ = Left a
(Right b) >>= f = f b
return = Right
fail a = Left a -}
instance Functor PM where fmap = liftM
instance MonadPlus PM where
mzero = fail "parser failed"
PM p1 `mplus` PM p2 = PM $ \ s -> case p1 s of
y@(Right _) -> y
Left _ -> p2 s
getSrcLoc = fst.snd # peek
peek = tok1 # get
where
tok1 (ts,_) = case ts of
t:_ -> t
[] -> eoftoken
parseError msg = err =<< peek
where err (t,(p,s)) =
failPM $ pos++": "++msg
where pos = if p==eof
then "at end of file"
else pp p++", before "++s
parseFile pm f = parseTokens pm f . lexerPass0
parseTokens (PM p) f ts =
case p (map convPos $ lexerPass1Only ts,initial_layout_context_stack) of
Left err -> fail ({-f++": "++-}err)
Right (x,_) -> return x
where
convPos (t,(Pos n l c,s)) = {-seq loc-} (t,(loc,s))
where loc = SrcLoc f n l c
eoftoken = (GotEOF,(eof,""))
eof = SrcLoc "?" 0 (-1) (-1) -- hmm
initial_layout_context_stack = []
| forste/haReFork | tools/base/parse2/ParseMonad.hs | bsd-3-clause | 2,385 | 9 | 14 | 594 | 832 | 471 | 361 | -1 | -1 |
module MyDoc where
import Text.PrettyPrint
import EnvM
import MUtils
import Monad
import Char(isSpace)
type Heading = (Int,String)
data TxtDec = Plain | Emph | Code | Math deriving (Eq,Show)
type DecString = (TxtDec,String)
type Code = [String]
type TxtBlock = [Paragraph]
data Paragraph = Txt [[DecString]] | Lst [TxtBlock] | H Heading deriving Show
type MyDoc = [Either TxtBlock Code]
data Style = Style
{ ppHeading :: Heading -> Doc
, ppDecStr :: DecString -> Doc
, ppCode :: Code -> Doc
, ppList :: [Doc] -> Doc
, ppItem :: Doc -> Doc
, ppParagraph :: Doc -> Doc
, ppText :: Doc -> Doc
, ppFinalize :: Doc -> Doc
}
env f x = do e <- getEnv
return (f e x)
ppH x = env ppHeading x
ppS x = env ppDecStr x
ppC x = env ppCode x
ppL x = env ppList x
ppI x = env ppItem x
ppP x = env ppParagraph x
ppT x = env ppText x
ppF x = env ppFinalize x
ppMyDoc d = ppF =<< (vcat # mapM (either topLevel ppC) d)
topLevel d = ppT =<< (vcat # mapM prt d)
where
prt ((H h):xs) = liftM2 ($$) (ppH h) (prt xs)
prt ((Txt as):xs)
| emp as = (text "" $$) # prt xs
prt x = vcat # mapM (ppP @@ ppPar) x
ppTxtBlock d = vcat # mapM ppPar d
ppPar (Txt tss) = vcat # mapM (\ts -> hcat # mapM ppS ts) tss
ppPar (Lst is) = ppL =<< mapM (ppI @@ ppTxtBlock) is
ppPar (H _) = error "inner heading? (MyDoc.hs)"
emp [] = True
emp ([]:xss) = emp xss
emp (xs:xss) = all (all isSpace.snd) xs && emp xss
| forste/haReFork | tools/hs2html/MyDoc.hs | bsd-3-clause | 1,562 | 0 | 11 | 486 | 704 | 372 | 332 | 45 | 3 |
module Main(main) where
import Game.TicTacToe(client)
main :: IO ()
main = client
| peterson/hsttt | src/Main.hs | bsd-3-clause | 84 | 0 | 6 | 14 | 34 | 20 | 14 | 4 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Turtle.Line
( Line
, lineToText
, textToLines
, linesToText
, textToLine
, unsafeTextToLine
, NewlineForbidden(..)
) where
import Data.Text (Text)
import qualified Data.Text as Text
#if __GLASGOW_HASKELL__ >= 708
import Data.Coerce
#endif
import Data.List.NonEmpty (NonEmpty(..))
import Data.String
#if __GLASGOW_HASKELL__ >= 710
#else
import Data.Monoid
#endif
import Data.Maybe
import Data.Typeable
import Control.Exception
import qualified Data.List.NonEmpty
-- | The `NewlineForbidden` exception is thrown when you construct a `Line`
-- using an overloaded string literal or by calling `fromString` explicitly
-- and the supplied string contains newlines. This is a programming error to
-- do so: if you aren't sure that the input string is newline-free, do not
-- rely on the @`IsString` `Line`@ instance.
--
-- When debugging, it might be useful to look for implicit invocations of
-- `fromString` for `Line`:
--
-- > >>> sh (do { line <- "Hello\nWorld"; echo line })
-- > *** Exception: NewlineForbidden
--
-- In the above example, `echo` expects its argument to be a `Line`, thus
-- @line :: `Line`@. Since we bind @line@ in `Shell`, the string literal
-- @\"Hello\\nWorld\"@ has type @`Shell` `Line`@. The
-- @`IsString` (`Shell` `Line`)@ instance delegates the construction of a
-- `Line` to the @`IsString` `Line`@ instance, where the exception is thrown.
--
-- To fix the problem, use `textToLines`:
--
-- > >>> sh (do { line <- select (textToLines "Hello\nWorld"); echo line })
-- > Hello
-- > World
data NewlineForbidden = NewlineForbidden
deriving (Show, Typeable)
instance Exception NewlineForbidden
-- | A line of text (does not contain newlines).
newtype Line = Line Text
deriving (Eq, Ord, Show, Monoid)
instance IsString Line where
fromString = fromMaybe (throw NewlineForbidden) . textToLine . fromString
-- | Convert a line to a text value.
lineToText :: Line -> Text
lineToText (Line t) = t
-- | Split text into lines. The inverse of `linesToText`.
textToLines :: Text -> NonEmpty Line
textToLines =
#if __GLASGOW_HASKELL__ >= 708
Data.List.NonEmpty.fromList . coerce (Text.splitOn "\n")
#else
Data.List.NonEmpty.fromList . map unsafeTextToLine . Text.splitOn "\n"
#endif
-- | Merge lines into a single text value.
linesToText :: [Line] -> Text
linesToText =
#if __GLASGOW_HASKELL__ >= 708
coerce Text.unlines
#else
Text.unlines . map lineToText
#endif
-- | Try to convert a text value into a line.
-- Precondition (checked): the argument does not contain newlines.
textToLine :: Text -> Maybe Line
textToLine = fromSingleton . textToLines
where
fromSingleton (a :| []) = Just a
fromSingleton _ = Nothing
-- | Convert a text value into a line.
-- Precondition (unchecked): the argument does not contain newlines.
unsafeTextToLine :: Text -> Line
unsafeTextToLine = Line
| mitchellwrosen/Haskell-Turtle-Library | src/Turtle/Line.hs | bsd-3-clause | 3,036 | 0 | 10 | 546 | 372 | 230 | 142 | 42 | 2 |
module Backend.Interpreter where
import qualified Data.Map as Map
import qualified Data.Maybe as Maybe
import Frontend.Parser
type Env = Map.Map String Expr
interpret :: [Stmt] -> IO ()
interpret stmts = interpret' stmts Map.empty
interpret' :: [Stmt] -> Env -> IO ()
interpret' [] _ = return ()
interpret' (ExprStmt expr : rest) env = interpret' rest env
interpret' (Assign str expr : rest) env = interpret' rest env'
where env' = Map.insert str expr env
interpret' (Print expr : rest) env = do
print $ eval expr env
interpret' rest env
eval :: Expr -> Env -> Int
eval (Int n) _ = n
eval (Var s) env = eval (Maybe.fromJust (Map.lookup s env)) env
eval (Add lExpr rExpr) env = eval lExpr env + eval rExpr env
eval (Sub lExpr rExpr) env = eval lExpr env - eval rExpr env
eval (Mul lExpr rExpr) env = eval lExpr env * eval rExpr env
eval (Div lExpr rExpr) env = eval lExpr env `div` eval rExpr env
eval (Neg expr) env = -(eval expr env) | JCGrant/JLang | src/Backend/Interpreter.hs | bsd-3-clause | 981 | 0 | 10 | 221 | 442 | 221 | 221 | 23 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
-- | "SDL.Video.Renderer" provides a high-level interface to SDL's accelerated 2D rendering library.
module SDL.Video.Renderer
( Renderer
-- * 'Renderer' Configuration
-- | These configuration options can be used with 'SDL.Video.createRenderer' to create 'Renderer's.
, RendererConfig(..)
, defaultRenderer
, RendererType(..)
-- * Drawing Primitives
, blitScaled
, blitSurface
, fillRect
, fillRects
, renderClear
, renderCopy
, renderCopyEx
, renderDrawLine
, renderDrawLines
, renderDrawPoint
, renderDrawPoints
, renderDrawRect
, renderDrawRects
, renderFillRect
, renderFillRects
, renderPresent
-- * 'Renderer' State
-- | SDL exposes a stateful interface to 'Renderer's - the above primitives drawing routines will change their
-- output depending on the value of these state variables.
, renderDrawBlendMode
, renderDrawColor
, renderTarget
, renderTargetSupported
, renderClipRect
, renderLogicalSize
, renderScale
, renderViewport
-- * 'Surface's
, Surface(..)
, updateWindowSurface
-- ** Creating and Destroying 'Surface's
, convertSurface
, createRGBSurface
, createRGBSurfaceFrom
, freeSurface
, getWindowSurface
, loadBMP
-- ** 'Surface' state
, colorKey
, surfaceBlendMode
, surfaceDimensions
, surfaceFormat
, surfacePixels
-- ** Accessing 'Surface' Data
, lockSurface
, unlockSurface
-- * 'Palette's and pixel formats
, Palette
, PixelFormat(..)
, SurfacePixelFormat
, formatPalette
, mapRGB
, setPaletteColors
-- * Textures
, Texture
-- ** Creating, Using and Destroying 'Texture's
, createTexture
, TextureAccess(..)
, createTextureFromSurface
, destroyTexture
, glBindTexture
, glUnbindTexture
-- ** 'Texture' State
, textureAlphaMod
, textureBlendMode
, BlendMode(..)
, textureColorMod
-- ** Accessing 'Texture' Data
, lockTexture
, unlockTexture
, queryTexture
, TextureInfo(..)
, Rectangle(..)
-- * Available 'Renderer's
-- | These functions allow you to query the current system for available 'Renderer's that can be created
-- with 'SDL.Video.createRenderer'.
, getRendererInfo
, RendererInfo(..)
, getRenderDriverInfo
) where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Bits
import Data.Data (Data)
import Data.Foldable
import Data.StateVar
import Data.Text (Text)
import Data.Typeable
import Data.Word
import Foreign.C.String
import Foreign.C.Types
import Foreign.Marshal.Alloc
import Foreign.Marshal.Utils
import Foreign.Ptr
import Foreign.Storable
import GHC.Generics (Generic)
import Linear
import Linear.Affine (Point(P))
import Prelude hiding (foldr)
import SDL.Exception
import SDL.Internal.Numbered
import SDL.Internal.Types
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as Text
import qualified Data.Vector.Storable as SV
import qualified Data.Vector.Storable.Mutable as MSV
import qualified SDL.Raw as Raw
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
import Data.Traversable
#endif
-- | Perform a fast surface copy to a destination surface.
--
-- See @<https://wiki.libsdl.org/SDL_BlitSurface SDL_BlitSurface>@ for C documentation.
blitSurface :: MonadIO m
=> Surface -- ^ The 'Surface' to be copied from
-> Maybe (Rectangle CInt) -- ^ The rectangle to be copied, or 'Nothing' to copy the entire surface
-> Surface -- ^ The 'Surface' that is the blit target
-> Maybe (Point V2 CInt) -- ^ The position to blit to
-> m ()
blitSurface (Surface src _) srcRect (Surface dst _) dstLoc = liftIO $
throwIfNeg_ "SDL.Video.blitSurface" "SDL_BlitSurface" $
maybeWith with srcRect $ \srcPtr ->
maybeWith with (fmap (flip Rectangle 0) dstLoc) $ \dstPtr ->
Raw.blitSurface src (castPtr srcPtr) dst (castPtr dstPtr)
-- | Create a texture for a rendering context.
--
-- See @<https://wiki.libsdl.org/SDL_CreateTexture SDL_CreateTexture>@ for C documentation.
createTexture :: (Functor m,MonadIO m)
=> Renderer -- ^ The rendering context.
-> PixelFormat
-> TextureAccess
-> V2 CInt -- ^ The size of the texture.
-> m Texture
createTexture (Renderer r) fmt access (V2 w h) =
fmap Texture $
throwIfNull "SDL.Video.Renderer.createTexture" "SDL_CreateTexture" $
Raw.createTexture r (toNumber fmt) (toNumber access) w h
-- | Create a texture from an existing surface.
--
-- See @<https://wiki.libsdl.org/SDL_CreateTextureFromSurface SDL_CreateTextureFromSurface>@ for C documentation.
createTextureFromSurface :: (Functor m,MonadIO m)
=> Renderer -- ^ The rendering context
-> Surface -- ^ The surface containing pixel data used to fill the texture
-> m Texture
createTextureFromSurface (Renderer r) (Surface s _) =
fmap Texture $
throwIfNull "SDL.Video.createTextureFromSurface" "SDL_CreateTextureFromSurface" $
Raw.createTextureFromSurface r s
-- | Bind an OpenGL\/ES\/ES2 texture to the current context for use with when rendering OpenGL primitives directly.
--
-- See @<https://wiki.libsdl.org/SDL_GL_BindTexture SDL_GL_BindTexture>@ for C documentation.
glBindTexture :: (Functor m,MonadIO m)
=> Texture -- ^ The texture to bind to the current OpenGL\/ES\/ES2 context
-> m ()
glBindTexture (Texture t) =
throwIfNeg_ "SDL.Video.Renderer.glBindTexture" "SDL_GL_BindTexture" $
Raw.glBindTexture t nullPtr nullPtr
-- | Unbind an OpenGL\/ES\/ES2 texture from the current context.
--
-- See @<https://wiki.libsdl.org/SDL_GL_UnbindTexture SDL_GL_UnbindTexture>@ for C documentation.
glUnbindTexture :: (Functor m,MonadIO m)
=> Texture -- ^ The texture to unbind from the current OpenGL\/ES\/ES2 context
-> m ()
glUnbindTexture (Texture t) =
throwIfNeg_ "SDL.Video.Renderer.glUnindTexture" "SDL_GL_UnbindTexture" $
Raw.glUnbindTexture t
-- | Destroy the specified texture.
--
-- See @<https://wiki.libsdl.org/SDL_DestroyTexture SDL_DestroyTexture>@ for the C documentation.
destroyTexture :: MonadIO m => Texture -> m ()
destroyTexture (Texture t) = Raw.destroyTexture t
-- | Lock a portion of the texture for *write-only* pixel access.
--
-- See @<https://wiki.libsdl.org/SDL_LockTexture SDL_LockTexture>@ for C documentation.
lockTexture :: MonadIO m
=> Texture -- ^ The 'Texture' to lock for access, which must have been created with 'TextureAccessStreaming'
-> Maybe (Rectangle CInt) -- ^ The area to lock for access; 'Nothing' to lock the entire texture
-> m (Ptr (),CInt) -- ^ A pointer to the locked pixels, appropriately offset by the locked area, and the pitch of the locked pixels (the pitch is the length of one row in bytes).
lockTexture (Texture t) rect = liftIO $
alloca $ \pixelsPtr ->
alloca $ \pitchPtr ->
maybeWith with rect $ \rectPtr -> do
throwIfNeg_ "lockTexture" "SDL_LockTexture" $
Raw.lockTexture t (castPtr rectPtr) pixelsPtr pitchPtr
pixels <- peek pixelsPtr
pitch <- peek pitchPtr
return (pixels, pitch)
-- | Unlock a texture, uploading the changes to video memory, if needed.
--
-- /Warning/: See <https://bugzilla.libsdl.org/show_bug.cgi?id=1586 Bug No. 1586> before using this function!
--
-- See @<https://wiki.libsdl.org/SDL_UnlockTexture SDL_UnlockTexture>@ for C documentation.
unlockTexture :: MonadIO m => Texture -> m ()
unlockTexture (Texture t) = liftIO $ Raw.unlockTexture t
-- | Set up a surface for directly accessing the pixels.
--
-- See @<https://wiki.libsdl.org/SDL_LockSurface SDL_LockSurface>@ for C documentation.
lockSurface :: MonadIO m => Surface -> m ()
lockSurface (Surface s _) = liftIO $
throwIfNeg_ "lockSurface" "SDL_LockSurface" $
Raw.lockSurface s
-- | Release a surface after directly accessing the pixels.
--
-- See @<https://wiki.libsdl.org/SDL_UnlockSurface SDL_UnlockSurface>@ for C documentation.
unlockSurface :: MonadIO m => Surface -> m ()
unlockSurface (Surface s _) = Raw.unlockSurface s
-- | Information to the GPU about how you will use a texture.
data TextureAccess
= TextureAccessStatic
-- ^ Changes rarely, cannot be locked
| TextureAccessStreaming
-- ^ changes frequently, can be locked
| TextureAccessTarget
-- ^ Can be used as a render target
deriving (Bounded, Data, Enum, Eq, Generic, Ord, Read, Show, Typeable)
instance FromNumber TextureAccess CInt where
fromNumber n' = case n' of
Raw.SDL_TEXTUREACCESS_STATIC -> TextureAccessStatic
Raw.SDL_TEXTUREACCESS_STREAMING -> TextureAccessStreaming
Raw.SDL_TEXTUREACCESS_TARGET -> TextureAccessTarget
_ -> error "Unknown value"
instance ToNumber TextureAccess CInt where
toNumber t = case t of
TextureAccessStatic -> Raw.SDL_TEXTUREACCESS_STATIC
TextureAccessStreaming -> Raw.SDL_TEXTUREACCESS_STREAMING
TextureAccessTarget -> Raw.SDL_TEXTUREACCESS_TARGET
data TextureInfo = TextureInfo
{ texturePixelFormat :: PixelFormat
-- ^ Raw format of the texture; the actual format may differ, but pixel transfers will use this format
, textureAccess :: TextureAccess
-- ^ The access available to the texture
, textureWidth :: CInt
-- ^ The width of the texture
, textureHeight :: CInt
-- ^ The height of the texture
} deriving (Eq, Generic, Ord, Read, Show, Typeable)
-- | Query the attributes of a texture.
--
-- See @<https://wiki.libsdl.org/SDL_QueryTexture SDL_QueryTexture>@ for C documentation.
queryTexture :: MonadIO m => Texture -> m TextureInfo
queryTexture (Texture tex) = liftIO $
alloca $ \pfPtr ->
alloca $ \acPtr ->
alloca $ \wPtr ->
alloca $ \hPtr -> do
throwIfNeg_ "SDL.Video.queryTexture" "SDL_QueryTexture" $
Raw.queryTexture tex pfPtr acPtr wPtr hPtr
TextureInfo <$>
fmap fromNumber (peek pfPtr) <*>
fmap fromNumber (peek acPtr) <*>
peek wPtr <*>
peek hPtr
-- | Allocate a new RGB surface.
--
-- See @<https://wiki.libsdl.org/SDL_CreateRGBSurface SDL_CreateRGBSurface>@ for C documentation.
createRGBSurface :: (Functor m,MonadIO m)
=> V2 CInt -- ^ The size of the surface
-> CInt -- ^ The bit-depth of the surface
-> V4 Word32 -- ^ The red, green, blue and alpha mask for the pixels
-> m Surface
createRGBSurface (V2 w h) d (V4 r g b a) =
fmap unmanagedSurface $
throwIfNull "SDL.Video.createRGBSurface" "SDL_CreateRGBSurface" $
Raw.createRGBSurface 0 w h d r g b a
-- | Allocate a new RGB surface with existing pixel data.
--
-- See @<https://wiki.libsdl.org/SDL_CreateRGBSurfaceFrom SDL_CreateRGBSurfaceFrom>@ for C documentation.
createRGBSurfaceFrom :: (Functor m,MonadIO m)
=> MSV.IOVector Word8 -- ^ The existing pixel data
-> V2 CInt -- ^ The size of the surface
-> CInt -- ^ The bit-depth of the surface
-> CInt -- ^ The pitch - the length of a row of pixels in bytes
-> V4 Word32 -- ^ The red, green blue and alpha mask for the pixels
-> m Surface
createRGBSurfaceFrom pixels (V2 w h) d p (V4 r g b a) = liftIO $
fmap (managedSurface pixels) $
throwIfNull "SDL.Video.createRGBSurfaceFrom" "SDL_CreateRGBSurfaceFrom" $
MSV.unsafeWith pixels $ \pixelPtr ->
Raw.createRGBSurfaceFrom (castPtr pixelPtr) w h d p r g b a
-- | Perform a fast fill of a rectangle with a specific color.
--
-- If there is a clip rectangle set on the destination (set via 'clipRect'), then this function will fill based on the intersection of the clip rectangle and the given 'Rectangle'.
--
-- See @<https://wiki.libsdl.org/SDL_FillRect SDL_FillRect>@ for C documentation.
fillRect :: MonadIO m
=> Surface -- ^ The 'Surface' that is the drawing target.
-> Maybe (Rectangle CInt) -- ^ The rectangle to fill, or 'Nothing' to fill the entire surface.
-> Word32 -- ^ The color to fill with. This should be a pixel of the format used by the surface, and can be generated by 'mapRGB' or 'mapRGBA'. If the color value contains an alpha component then the destination is simply filled with that alpha information, no blending takes place.
-> m ()
fillRect (Surface s _) rect col = liftIO $
throwIfNeg_ "SDL.Video.fillRect" "SDL_FillRect" $
maybeWith with rect $ \rectPtr ->
Raw.fillRect s (castPtr rectPtr) col
-- | Perform a fast fill of a set of rectangles with a specific color.
--
-- If there is a clip rectangle set on any of the destinations (set via 'clipRect'), then this function will fill based on the intersection of the clip rectangle and the given 'Rectangle's.
--
-- See @<https://wiki.libsdl.org/SDL_FillRect SDL_FillRects>@ for C documentation.
fillRects :: MonadIO m
=> Surface -- ^ The 'Surface' that is the drawing target.
-> SV.Vector (Rectangle CInt) -- ^ A 'SV.Vector' of 'Rectangle's to be filled.
-> Word32 -- ^ The color to fill with. This should be a pixel of the format used by the surface, and can be generated by 'mapRGB' or 'mapRGBA'. If the color value contains an alpha component then the destination is simply filled with that alpha information, no blending takes place.
-> m ()
fillRects (Surface s _) rects col = liftIO $ do
throwIfNeg_ "SDL.Video.fillRects" "SDL_FillRects" $
SV.unsafeWith rects $ \rp ->
Raw.fillRects s
(castPtr rp)
(fromIntegral (SV.length rects))
col
-- | Free an RGB surface.
--
-- If the surface was created using 'createRGBSurfaceFrom' then the pixel data is not freed.
--
-- See @<https://wiki.libsdl.org/SDL_FreeSurface SDL_FreeSurface>@ for the C documentation.
freeSurface :: MonadIO m => Surface -> m ()
freeSurface (Surface s _) = Raw.freeSurface s
-- | Load a surface from a BMP file.
--
-- See @<https://wiki.libsdl.org/SDL_LoadBMP SDL_LoadBMP>@ for C documentation.
loadBMP :: MonadIO m => FilePath -> m Surface
loadBMP filePath = liftIO $
fmap unmanagedSurface $
throwIfNull "SDL.Video.loadBMP" "SDL_LoadBMP" $
withCString filePath $ Raw.loadBMP
newtype SurfacePixelFormat = SurfacePixelFormat (Ptr Raw.PixelFormat)
deriving (Eq, Typeable)
-- It's possible we could use unsafePerformIO here, but I'm not
-- sure. De need to guarantee that pointers aren't reused?
-- | Map an RGB triple to an opaque pixel value for a given pixel format.
--
-- This function maps the RGB color value to the specified pixel format and returns the pixel value best approximating the given RGB color value for the given pixel format.
--
-- If the format has a palette (8-bit) the index of the closest matching color in the palette will be returned.
--
-- If the specified pixel format has an alpha component it will be returned as all 1 bits (fully opaque).
--
-- If the pixel format bpp (color depth) is less than 32-bpp then the unused upper bits of the return value can safely be ignored (e.g., with a 16-bpp format the return value can be assigned to a 'Word16', and similarly a 'Word8' for an 8-bpp format).
--
-- See @<https://wiki.libsdl.org/SDL_MapRGB SDL_MapRGB>@ for C documentation.
mapRGB :: MonadIO m
=> SurfacePixelFormat -- ^ The format of the pixel
-> V3 Word8 -- ^ The color to map
-> m Word32
mapRGB (SurfacePixelFormat fmt) (V3 r g b) = Raw.mapRGB fmt r g b
-- It's possible we could use unsafePerformIO here, but I'm not
-- sure. surface->{w,h} are immutable, but do we need to guarantee that pointers
-- aren't reused by *different* surfaces.
-- | Retrive the width and height of a 'Surface'.
surfaceDimensions :: MonadIO m => Surface -> m (V2 CInt)
surfaceDimensions (Surface s _) = liftIO $ (V2 <$> Raw.surfaceW <*> Raw.surfaceH) <$> peek s
-- | Obtain the pointer to the underlying pixels in a surface. You should bracket
-- this call with 'lockSurface' and 'unlockSurface', respectively.
surfacePixels :: MonadIO m => Surface -> m (Ptr ())
surfacePixels (Surface s _) = liftIO $ Raw.surfacePixels <$> peek s
-- It's possible we could use unsafePerformIO here, but I'm not
-- sure. surface->format is immutable, but do we need to guarantee that pointers
-- aren't reused by *different* surfaces?
-- | Inspect the pixel format under a surface.
surfaceFormat :: MonadIO m => Surface -> m SurfacePixelFormat
surfaceFormat (Surface s _) = liftIO $ SurfacePixelFormat . Raw.surfaceFormat <$> peek s
newtype Palette = Palette (Ptr Raw.Palette)
deriving (Eq, Typeable)
formatPalette :: MonadIO m => SurfacePixelFormat -> m (Maybe Palette)
formatPalette (SurfacePixelFormat f) = liftIO $ wrap . Raw.pixelFormatPalette <$> peek f
where wrap p
| p == nullPtr = Nothing
| otherwise = Just (Palette p)
-- | Set a range of colors in a palette.
--
-- See @<https://wiki.libsdl.org/SDL_SetPaletteColors SDL_SetPaletteColors>@ for C documentation.
setPaletteColors :: MonadIO m
=> Palette -- ^ The 'Palette' to modify
-> (SV.Vector (V4 Word8)) -- ^ A 'SV.Vector' of colours to copy into the palette
-> CInt -- ^ The index of the first palette entry to modify
-> m ()
setPaletteColors (Palette p) colors first = liftIO $
throwIfNeg_ "SDL.Video.setPaletteColors" "SDL_SetPaletteColors" $
SV.unsafeWith colors $ \cp ->
Raw.setPaletteColors p (castPtr cp) first n
where
n = fromIntegral $ SV.length colors
-- | Get the SDL surface associated with the window.
--
-- See @<https://wiki.libsdl.org/SDL_GetWindowSurface SDL_GetWindowSurface>@ for C documentation.
getWindowSurface :: (Functor m, MonadIO m) => Window -> m Surface
getWindowSurface (Window w) =
fmap unmanagedSurface $
throwIfNull "SDL.Video.getWindowSurface" "SDL_GetWindowSurface" $
Raw.getWindowSurface w
-- | Get or set the blend mode used for drawing operations (fill and line).
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetRenderDrawBlendMode SDL_SetRenderDrawBlendMode>@ and @<https://wiki.libsdl.org/SDL_GetRenderDrawBlendMode SDL_GetRenderDrawBlendMode>@ for C documentation.
renderDrawBlendMode :: Renderer -> StateVar BlendMode
renderDrawBlendMode (Renderer r) = makeStateVar getRenderDrawBlendMode setRenderDrawBlendMode
where
getRenderDrawBlendMode = liftIO $
alloca $ \bmPtr -> do
throwIfNeg_ "SDL.Video.Renderer.getRenderDrawBlendMode" "SDL_GetRenderDrawBlendMode" $
Raw.getRenderDrawBlendMode r bmPtr
fromNumber <$> peek bmPtr
setRenderDrawBlendMode bm =
throwIfNeg_ "SDL.Video.Renderer.setRenderDrawBlendMode" "SDL_SetRenderDrawBlendMode" $
Raw.setRenderDrawBlendMode r (toNumber bm)
-- | Get or set the color used for drawing operations (rect, line and clear).
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetRenderDrawColor SDL_SetRenderDrawColor>@ and @<https://wiki.libsdl.org/SDL_GetRenderDrawColor SDL_GetRenderDrawColor>@ for C documentation.
renderDrawColor :: Renderer -> StateVar (V4 Word8)
renderDrawColor (Renderer re) = makeStateVar getRenderDrawColor setRenderDrawColor
where
getRenderDrawColor = liftIO $
alloca $ \r ->
alloca $ \g ->
alloca $ \b ->
alloca $ \a -> do
throwIfNeg_ "SDL.Video.Renderer.getRenderDrawColor" "SDL_GetRenderDrawColor" $
Raw.getRenderDrawColor re r g b a
V4 <$> peek r <*> peek g <*> peek b <*> peek a
setRenderDrawColor (V4 r g b a) =
throwIfNeg_ "SDL.Video.setRenderDrawColor" "SDL_SetRenderDrawColor" $
Raw.setRenderDrawColor re r g b a
-- | Copy the window surface to the screen.
--
-- This is the function you use to reflect any changes to the surface on the screen.
--
-- See @<https://wiki.libsdl.org/SDL_UpdateWindowSurface SDL_UpdateWindowSurface>@ for C documentation.
updateWindowSurface :: (Functor m, MonadIO m) => Window -> m ()
updateWindowSurface (Window w) =
throwIfNeg_ "SDL.Video.updateWindowSurface" "SDL_UpdateWindowSurface" $
Raw.updateWindowSurface w
-- | Blend modes used in 'renderCopy' and drawing operations.
data BlendMode
= BlendNone
-- ^ No blending
| BlendAlphaBlend
-- ^ Alpha blending.
--
-- @
-- dstRGB = (srcRGB * srcA) + (dstRGB * (1-srcA))
-- dstA = srcA + (dstA * (1-srcA))
-- @
| BlendAdditive
-- ^ Additive blending
--
-- @
-- dstRGB = (srcRGB * srcA) + dstRGB
-- dstA = dstA
-- @
| BlendMod
-- ^ Color modulate
--
-- @
-- dstRGB = srcRGB * dstRGB
-- dstA = dstA
--
deriving (Bounded, Data, Enum, Eq, Generic, Ord, Read, Show, Typeable)
instance FromNumber BlendMode Word32 where
fromNumber n = case n of
Raw.SDL_BLENDMODE_ADD -> BlendAdditive
Raw.SDL_BLENDMODE_BLEND -> BlendAlphaBlend
Raw.SDL_BLENDMODE_ADD -> BlendAdditive
Raw.SDL_BLENDMODE_MOD -> BlendMod
_ -> error $ "fromNumber<BlendMode>: unknown blend mode: " ++ show n
instance ToNumber BlendMode Word32 where
toNumber BlendNone = Raw.SDL_BLENDMODE_NONE
toNumber BlendAlphaBlend = Raw.SDL_BLENDMODE_BLEND
toNumber BlendAdditive = Raw.SDL_BLENDMODE_ADD
toNumber BlendMod = Raw.SDL_BLENDMODE_MOD
data Rectangle a = Rectangle (Point V2 a) (V2 a)
deriving (Eq, Functor, Generic, Ord, Read, Show, Typeable)
instance Storable a => Storable (Rectangle a) where
sizeOf ~(Rectangle o s) = sizeOf o + sizeOf s
alignment _ = 0
peek ptr = do
o <- peek (castPtr ptr)
s <- peek (castPtr (ptr `plusPtr` sizeOf o))
return (Rectangle o s)
poke ptr (Rectangle o s) = do
poke (castPtr ptr) o
poke (castPtr (ptr `plusPtr` sizeOf o)) s
data Surface = Surface (Ptr Raw.Surface) (Maybe (MSV.IOVector Word8))
deriving (Typeable)
unmanagedSurface :: Ptr Raw.Surface -> Surface
unmanagedSurface s = Surface s Nothing
managedSurface :: MSV.IOVector Word8 -> Ptr Raw.Surface -> Surface
managedSurface p s = Surface s (Just p)
newtype Texture = Texture Raw.Texture
deriving (Eq, Typeable)
-- | Draw a rectangle outline on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawRect SDL_RenderDrawRect>@ for C documentation.
renderDrawRect :: MonadIO m
=> Renderer
-> Maybe (Rectangle CInt) -- ^ The rectangle outline to draw. 'Nothing' for the entire rendering context.
-> m ()
renderDrawRect (Renderer r) rect = liftIO $
throwIfNeg_ "SDL.Video.renderDrawRect" "SDL_RenderDrawRect" $
maybeWith with rect (Raw.renderDrawRect r . castPtr)
-- | Draw some number of rectangles on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawRects SDL_RenderDrawRects>@ for C documentation.
renderDrawRects :: MonadIO m => Renderer -> SV.Vector (Rectangle CInt) -> m ()
renderDrawRects (Renderer r) rects = liftIO $
throwIfNeg_ "SDL.Video.renderDrawRects" "SDL_RenderDrawRects" $
SV.unsafeWith rects $ \rp ->
Raw.renderDrawRects r
(castPtr rp)
(fromIntegral (SV.length rects))
-- | Fill a rectangle on the current rendering target with the drawing color.
--
-- See @<https://wiki.libsdl.org/SDL_RenderFillRect SDL_RenderFillRect>@ for C documentation.
renderFillRect :: MonadIO m
=> Renderer
-> Maybe (Rectangle CInt) -- ^ The rectangle to fill. 'Nothing' for the entire rendering context.
-> m ()
renderFillRect (Renderer r) rect = liftIO $ do
throwIfNeg_ "SDL.Video.renderFillRect" "SDL_RenderFillRect" $
maybeWith with rect $ \rPtr ->
Raw.renderFillRect r
(castPtr rPtr)
-- | Fill some number of rectangles on the current rendering target with the drawing color.
--
-- See @<https://wiki.libsdl.org/SDL_RenderFillRects SDL_RenderFillRects>@ for C documentation.
renderFillRects :: MonadIO m => Renderer -> SV.Vector (Rectangle CInt) -> m ()
renderFillRects (Renderer r) rects = liftIO $
throwIfNeg_ "SDL.Video.renderFillRects" "SDL_RenderFillRects" $
SV.unsafeWith rects $ \rp ->
Raw.renderFillRects r
(castPtr rp)
(fromIntegral (SV.length rects))
-- | Clear the current rendering target with the drawing color.
--
-- See @<https://wiki.libsdl.org/SDL_RenderClear SDL_RenderClear>@ for C documentation.
renderClear :: (Functor m, MonadIO m) => Renderer -> m ()
renderClear (Renderer r) =
throwIfNeg_ "SDL.Video.renderClear" "SDL_RenderClear" $
Raw.renderClear r
-- | Get or set the drawing scale for rendering on the current target.
--
-- The drawing coordinates are scaled by the x\/y scaling factors before they are used by the renderer. This allows resolution independent drawing with a single coordinate system.
--
-- If this results in scaling or subpixel drawing by the rendering backend, it will be handled using the appropriate quality hints. For best results use integer scaling factors.
--
-- See @<https://wiki.libsdl.org/SDL_RenderSetScale SDL_RenderSetScale>@ and @<https://wiki.libsdl.org/SDL_RenderGetScale SDL_RenderGetScale>@ for C documentation.
renderScale :: Renderer -> StateVar (V2 CFloat)
renderScale (Renderer r) = makeStateVar renderGetScale renderSetScale
where
renderSetScale (V2 x y) =
throwIfNeg_ "SDL.Video.renderSetScale" "SDL_RenderSetScale" $
Raw.renderSetScale r x y
renderGetScale = liftIO $
alloca $ \w ->
alloca $ \h -> do
Raw.renderGetScale r w h
V2 <$> peek w <*> peek h
-- | Get or set the clip rectangle for rendering on the specified target.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_RenderSetClipRect SDL_RenderSetClipRect>@ and @<https://wiki.libsdl.org/SDL_RenderGetClipRect SDL_RenderGetClipRect>@ for C documentation.
renderClipRect :: Renderer -> StateVar (Maybe (Rectangle CInt))
renderClipRect (Renderer r) = makeStateVar renderGetClipRect renderSetClipRect
where
renderGetClipRect = liftIO $
alloca $ \rPtr -> do
Raw.renderGetClipRect r rPtr
maybePeek peek (castPtr rPtr)
renderSetClipRect rect =
liftIO $
throwIfNeg_ "SDL.Video.renderSetClipRect" "SDL_RenderSetClipRect" $
maybeWith with rect $ Raw.renderSetClipRect r . castPtr
-- | Get or set the drawing area for rendering on the current target.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_RenderSetViewport SDL_RenderSetViewport>@ and @<https://wiki.libsdl.org/SDL_RenderGetViewport SDL_RenderGetViewport>@ for C documentation.
renderViewport :: Renderer -> StateVar (Maybe (Rectangle CInt))
renderViewport (Renderer r) = makeStateVar renderGetViewport renderSetViewport
where
renderGetViewport = liftIO $
alloca $ \rect -> do
Raw.renderGetViewport r rect
maybePeek peek (castPtr rect)
renderSetViewport rect =
liftIO $
throwIfNeg_ "SDL.Video.renderSetViewport" "SDL_RenderSetViewport" $
maybeWith with rect $ Raw.renderSetViewport r . castPtr
-- | Update the screen with any rendering performed since the previous call.
--
-- SDL\'s rendering functions operate on a backbuffer; that is, calling a rendering function such as 'renderDrawLine' does not directly put a line on the screen, but rather updates the backbuffer. As such, you compose your entire scene and present the composed backbuffer to the screen as a complete picture.
--
-- Therefore, when using SDL's rendering API, one does all drawing intended for the frame, and then calls this function once per frame to present the final drawing to the user.
--
-- The backbuffer should be considered invalidated after each present; do not assume that previous contents will exist between frames. You are strongly encouraged to call 'renderClear' to initialize the backbuffer before starting each new frame's drawing, even if you plan to overwrite every pixel.
--
-- See @<https://wiki.libsdl.org/SDL_RenderPresent SDL_RenderPresent>@ for C documentation.
renderPresent :: MonadIO m => Renderer -> m ()
renderPresent (Renderer r) = Raw.renderPresent r
-- | Copy a portion of the texture to the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderCopy SDL_RenderCopy>@ for C documentation.
renderCopy :: MonadIO m
=> Renderer -- ^ The rendering context
-> Texture -- ^ The source texture
-> Maybe (Rectangle CInt) -- ^ The source rectangle to copy, or 'Nothing' for the whole texture
-> Maybe (Rectangle CInt) -- ^ The destination rectangle to copy to, or 'Nothing' for the whole rendering target. The texture will be stretched to fill the given rectangle.
-> m ()
renderCopy (Renderer r) (Texture t) srcRect dstRect =
liftIO $
throwIfNeg_ "SDL.Video.renderCopy" "SDL_RenderCopy" $
maybeWith with srcRect $ \src ->
maybeWith with dstRect $ \dst ->
Raw.renderCopy r t (castPtr src) (castPtr dst)
-- | Copy a portion of the texture to the current rendering target, optionally rotating it by angle around the given center and also flipping it top-bottom and/or left-right.
--
-- See @<https://wiki.libsdl.org/SDL_RenderCopy SDL_RenderCopyEx>@ for C documentation.
renderCopyEx :: MonadIO m
=> Renderer -- ^ The rendering context
-> Texture -- ^ The source texture
-> Maybe (Rectangle CInt) -- ^ The source rectangle to copy, or 'Nothing' for the whole texture
-> Maybe (Rectangle CInt) -- ^ The destination rectangle to copy to, or 'Nothing' for the whole rendering target. The texture will be stretched to fill the given rectangle.
-> CDouble -- ^ An angle in degrees that indicates the point around which the destination rectangle will be rotated.
-> Maybe (Point V2 CInt) -- ^ The point of rotation
-> V2 Bool -- ^ Whether to flip in the X or Y axis. -- ^ The point of rotation
-> m () -- ^ Whether to flip in the X or Y axis.
renderCopyEx (Renderer r) (Texture t) srcRect dstRect theta center flips =
liftIO $
throwIfNeg_ "SDL.Video.renderCopyEx" "SDL_RenderCopyEx" $
maybeWith with srcRect $ \src ->
maybeWith with dstRect $ \dst ->
maybeWith with center $ \c ->
Raw.renderCopyEx r t (castPtr src) (castPtr dst) theta (castPtr c)
(case flips of
V2 x y -> (if x then Raw.SDL_FLIP_HORIZONTAL else 0) .|.
(if y then Raw.SDL_FLIP_VERTICAL else 0))
-- | Draw a line on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawLine SDL_RenderDrawLine>@ for C documentation.
renderDrawLine :: (Functor m,MonadIO m)
=> Renderer
-> Point V2 CInt -- ^ The start point of the line
-> Point V2 CInt -- ^ The end point of the line
-> m ()
renderDrawLine (Renderer r) (P (V2 x y)) (P (V2 x' y')) =
throwIfNeg_ "SDL.Video.renderDrawLine" "SDL_RenderDrawLine" $
Raw.renderDrawLine r x y x' y'
-- | Draw a series of connected lines on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawLines SDL_RenderDrawLines>@ for C documentation.
renderDrawLines :: MonadIO m
=> Renderer
-> SV.Vector (Point V2 CInt) -- ^ A 'SV.Vector' of points along the line. SDL will draw lines between these points.
-> m ()
renderDrawLines (Renderer r) points =
liftIO $
throwIfNeg_ "SDL.Video.renderDrawLines" "SDL_RenderDrawLines" $
SV.unsafeWith points $ \cp ->
Raw.renderDrawLines r
(castPtr cp)
(fromIntegral (SV.length points))
-- | Draw a point on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawPoint SDL_RenderDrawPoint>@ for C documentation.
renderDrawPoint :: (Functor m, MonadIO m) => Renderer -> Point V2 CInt -> m ()
renderDrawPoint (Renderer r) (P (V2 x y)) =
throwIfNeg_ "SDL.Video.renderDrawPoint" "SDL_RenderDrawPoint" $
Raw.renderDrawPoint r x y
-- | Draw multiple points on the current rendering target.
--
-- See @<https://wiki.libsdl.org/SDL_RenderDrawPoints SDL_RenderDrawPoints>@ for C documentation.
renderDrawPoints :: MonadIO m => Renderer -> SV.Vector (Point V2 CInt) -> m ()
renderDrawPoints (Renderer r) points =
liftIO $
throwIfNeg_ "SDL.Video.renderDrawPoints" "SDL_RenderDrawPoints" $
SV.unsafeWith points $ \cp ->
Raw.renderDrawPoints r
(castPtr cp)
(fromIntegral (SV.length points))
-- | Copy an existing surface into a new one that is optimized for blitting to a surface of a specified pixel format.
--
-- This function is used to optimize images for faster repeat blitting. This is accomplished by converting the original and storing the result as a new surface. The new, optimized surface can then be used as the source for future blits, making them faster.
--
-- See @<https://wiki.libsdl.org/SDL_ConvertSurface SDL_ConvertSurface>@ for C documentation.
convertSurface :: (Functor m,MonadIO m)
=> Surface -- ^ The 'Surface' to convert
-> SurfacePixelFormat -- ^ The pixel format that the new surface is optimized for
-> m Surface
convertSurface (Surface s _) (SurfacePixelFormat fmt) =
fmap unmanagedSurface $
throwIfNull "SDL.Video.Renderer.convertSurface" "SDL_ConvertSurface" $
Raw.convertSurface s fmt 0
-- | Perform a scaled surface copy to a destination surface.
--
-- See @<https://wiki.libsdl.org/SDL_BlitScaled SDL_BlitScaled>@ for C documentation.
blitScaled :: MonadIO m
=> Surface -- ^ The 'Surface' to be copied from
-> Maybe (Rectangle CInt) -- ^ The rectangle to be copied, or 'Nothing' to copy the entire surface
-> Surface -- ^ The 'Surface' that is the blit target
-> Maybe (Rectangle CInt) -- ^ The rectangle that is copied into, or 'Nothing' to copy into the entire surface
-> m ()
blitScaled (Surface src _) srcRect (Surface dst _) dstRect =
liftIO $
throwIfNeg_ "SDL.Video.blitSurface" "SDL_BlitSurface" $
maybeWith with srcRect $ \srcPtr ->
maybeWith with dstRect $ \dstPtr ->
Raw.blitScaled src (castPtr srcPtr) dst (castPtr dstPtr)
-- | Get or set the color key (transparent pixel color) for a surface.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetColorKey SDL_SetColorKey>@ and @<https://wiki.libsdl.org/SDL_GetColorKey SDL_GetColorKey>@ for C documentation.
colorKey :: Surface -> StateVar (Maybe Word32)
colorKey (Surface s _) = makeStateVar getColorKey setColorKey
where
getColorKey =
liftIO $
alloca $ \keyPtr -> do
ret <- Raw.getColorKey s keyPtr
if ret == -1 then return Nothing else fmap Just (peek keyPtr)
setColorKey key =
liftIO $
throwIfNeg_ "SDL.Video.Renderer.setColorKey" "SDL_SetColorKey" $
case key of
Nothing ->
alloca $ \keyPtr -> do
-- TODO Error checking?
ret <- Raw.getColorKey s keyPtr
if ret == -1
-- if ret == -1 then there is no key enabled, so we have nothing to
-- do.
then return 0
else do key' <- peek keyPtr
Raw.setColorKey s 0 key'
Just key' -> do
Raw.setColorKey s 1 key'
-- | Get or set the additional color value multiplied into render copy operations.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetTextureColorMod SDL_SetTextureColorMod>@ and @<https://wiki.libsdl.org/SDL_GetTextureColorMod SDL_GetTextureColorMod>@ for C documentation.
textureColorMod :: Texture -> StateVar (V3 Word8)
textureColorMod (Texture t) = makeStateVar getTextureColorMod setTextureColorMod
where
getTextureColorMod = liftIO $
alloca $ \r ->
alloca $ \g ->
alloca $ \b -> do
throwIfNeg_ "SDL.Video.Renderer.getTextureColorMod" "SDL_GetTextureColorMod" $
Raw.getTextureColorMod t r g b
V3 <$> peek r <*> peek g <*> peek b
setTextureColorMod (V3 r g b) =
throwIfNeg_ "SDL.Video.Renderer.setTextureColorMod" "SDL_SetTextureColorMod" $
Raw.setTextureColorMod t r g b
data PixelFormat
= Unknown
| Index1LSB
| Index1MSB
| Index4LSB
| Index4MSB
| Index8
| RGB332
| RGB444
| RGB555
| BGR555
| ARGB4444
| RGBA4444
| ABGR4444
| BGRA4444
| ARGB1555
| RGBA5551
| ABGR1555
| BGRA5551
| RGB565
| BGR565
| RGB24
| BGR24
| RGB888
| RGBX8888
| BGR888
| BGRX8888
| ARGB8888
| RGBA8888
| ABGR8888
| BGRA8888
| ARGB2101010
| YV12
| IYUV
| YUY2
| UYVY
| YVYU
deriving (Bounded, Data, Enum, Eq, Generic, Ord, Read, Show, Typeable)
instance FromNumber PixelFormat Word32 where
fromNumber n' = case n' of
Raw.SDL_PIXELFORMAT_UNKNOWN -> Unknown
Raw.SDL_PIXELFORMAT_INDEX1LSB -> Index1LSB
Raw.SDL_PIXELFORMAT_INDEX1MSB -> Index1MSB
Raw.SDL_PIXELFORMAT_INDEX4LSB -> Index4LSB
Raw.SDL_PIXELFORMAT_INDEX4MSB -> Index4MSB
Raw.SDL_PIXELFORMAT_INDEX8 -> Index8
Raw.SDL_PIXELFORMAT_RGB332 -> RGB332
Raw.SDL_PIXELFORMAT_RGB444 -> RGB444
Raw.SDL_PIXELFORMAT_RGB555 -> RGB555
Raw.SDL_PIXELFORMAT_BGR555 -> BGR555
Raw.SDL_PIXELFORMAT_ARGB4444 -> ARGB4444
Raw.SDL_PIXELFORMAT_RGBA4444 -> RGBA4444
Raw.SDL_PIXELFORMAT_ABGR4444 -> ABGR4444
Raw.SDL_PIXELFORMAT_BGRA4444 -> BGRA4444
Raw.SDL_PIXELFORMAT_ARGB1555 -> ARGB1555
Raw.SDL_PIXELFORMAT_RGBA5551 -> RGBA5551
Raw.SDL_PIXELFORMAT_ABGR1555 -> ABGR1555
Raw.SDL_PIXELFORMAT_BGRA5551 -> BGRA5551
Raw.SDL_PIXELFORMAT_RGB565 -> RGB565
Raw.SDL_PIXELFORMAT_BGR565 -> BGR565
Raw.SDL_PIXELFORMAT_RGB24 -> RGB24
Raw.SDL_PIXELFORMAT_BGR24 -> BGR24
Raw.SDL_PIXELFORMAT_RGB888 -> RGB888
Raw.SDL_PIXELFORMAT_RGBX8888 -> RGBX8888
Raw.SDL_PIXELFORMAT_BGR888 -> BGR888
Raw.SDL_PIXELFORMAT_BGRX8888 -> BGRX8888
Raw.SDL_PIXELFORMAT_ARGB8888 -> ARGB8888
Raw.SDL_PIXELFORMAT_RGBA8888 -> RGBA8888
Raw.SDL_PIXELFORMAT_ABGR8888 -> ABGR8888
Raw.SDL_PIXELFORMAT_BGRA8888 -> BGRA8888
Raw.SDL_PIXELFORMAT_ARGB2101010 -> ARGB2101010
Raw.SDL_PIXELFORMAT_YV12 -> YV12
Raw.SDL_PIXELFORMAT_IYUV -> IYUV
Raw.SDL_PIXELFORMAT_YUY2 -> YUY2
Raw.SDL_PIXELFORMAT_UYVY -> UYVY
Raw.SDL_PIXELFORMAT_YVYU -> YVYU
_ -> error "fromNumber: not numbered"
instance ToNumber PixelFormat Word32 where
toNumber pf = case pf of
Unknown -> Raw.SDL_PIXELFORMAT_UNKNOWN
Index1LSB -> Raw.SDL_PIXELFORMAT_INDEX1LSB
Index1MSB -> Raw.SDL_PIXELFORMAT_INDEX1MSB
Index4LSB -> Raw.SDL_PIXELFORMAT_INDEX4LSB
Index4MSB -> Raw.SDL_PIXELFORMAT_INDEX4MSB
Index8 -> Raw.SDL_PIXELFORMAT_INDEX8
RGB332 -> Raw.SDL_PIXELFORMAT_RGB332
RGB444 -> Raw.SDL_PIXELFORMAT_RGB444
RGB555 -> Raw.SDL_PIXELFORMAT_RGB555
BGR555 -> Raw.SDL_PIXELFORMAT_BGR555
ARGB4444 -> Raw.SDL_PIXELFORMAT_ARGB4444
RGBA4444 -> Raw.SDL_PIXELFORMAT_RGBA4444
ABGR4444 -> Raw.SDL_PIXELFORMAT_ABGR4444
BGRA4444 -> Raw.SDL_PIXELFORMAT_BGRA4444
ARGB1555 -> Raw.SDL_PIXELFORMAT_ARGB1555
RGBA5551 -> Raw.SDL_PIXELFORMAT_RGBA5551
ABGR1555 -> Raw.SDL_PIXELFORMAT_ABGR1555
BGRA5551 -> Raw.SDL_PIXELFORMAT_BGRA5551
RGB565 -> Raw.SDL_PIXELFORMAT_RGB565
BGR565 -> Raw.SDL_PIXELFORMAT_BGR565
RGB24 -> Raw.SDL_PIXELFORMAT_RGB24
BGR24 -> Raw.SDL_PIXELFORMAT_BGR24
RGB888 -> Raw.SDL_PIXELFORMAT_RGB888
RGBX8888 -> Raw.SDL_PIXELFORMAT_RGBX8888
BGR888 -> Raw.SDL_PIXELFORMAT_BGR888
BGRX8888 -> Raw.SDL_PIXELFORMAT_BGRX8888
ARGB8888 -> Raw.SDL_PIXELFORMAT_ARGB8888
RGBA8888 -> Raw.SDL_PIXELFORMAT_RGBA8888
ABGR8888 -> Raw.SDL_PIXELFORMAT_ABGR8888
BGRA8888 -> Raw.SDL_PIXELFORMAT_BGRA8888
ARGB2101010 -> Raw.SDL_PIXELFORMAT_ARGB2101010
YV12 -> Raw.SDL_PIXELFORMAT_YV12
IYUV -> Raw.SDL_PIXELFORMAT_IYUV
YUY2 -> Raw.SDL_PIXELFORMAT_YUY2
UYVY -> Raw.SDL_PIXELFORMAT_UYVY
YVYU -> Raw.SDL_PIXELFORMAT_YVYU
-- | Renderer acceleration mode
data RendererType
= UnacceleratedRenderer
-- ^ The renderer does not use hardware acceleration
| AcceleratedRenderer
-- ^ The renderer uses hardware acceleration and refresh rate is ignored
| AcceleratedVSyncRenderer
-- ^ The renderer uses hardware acceleration and present is synchronized with the refresh rate
| SoftwareRenderer
-- ^ The renderer is a software fallback
deriving (Bounded, Data, Enum, Eq, Generic, Ord, Read, Show, Typeable)
-- | The configuration data used when creating windows.
data RendererConfig = RendererConfig
{ rendererType :: RendererType
-- ^ The renderer's acceleration mode
, rendererTargetTexture :: Bool
-- ^ The renderer supports rendering to texture
} deriving (Data, Eq, Generic, Ord, Read, Show, Typeable)
instance FromNumber RendererConfig Word32 where
fromNumber n = RendererConfig
{ rendererType = rendererType'
(n .&. Raw.SDL_RENDERER_SOFTWARE /= 0)
(n .&. Raw.SDL_RENDERER_ACCELERATED /= 0)
(n .&. Raw.SDL_RENDERER_PRESENTVSYNC /= 0)
, rendererTargetTexture = n .&. Raw.SDL_RENDERER_TARGETTEXTURE /= 0
}
where
rendererType' s a v | s = SoftwareRenderer
| a && v = AcceleratedVSyncRenderer
| a = AcceleratedRenderer
| otherwise = UnacceleratedRenderer
instance ToNumber RendererConfig Word32 where
toNumber config = foldr (.|.) 0
[ if isSoftware then Raw.SDL_RENDERER_SOFTWARE else 0
, if not isSoftware then Raw.SDL_RENDERER_ACCELERATED else 0
, if rendererType config == AcceleratedVSyncRenderer then Raw.SDL_RENDERER_PRESENTVSYNC else 0
, if rendererTargetTexture config then Raw.SDL_RENDERER_TARGETTEXTURE else 0
]
where
isSoftware = rendererType config == SoftwareRenderer
-- | Default options for 'RendererConfig'.
--
-- @
-- 'defaultRenderer' = 'RendererConfig'
-- { 'rendererType' = 'AcceleratedRenderer'
-- , 'rendererTargetTexture' = False
-- }
-- @
defaultRenderer :: RendererConfig
defaultRenderer = RendererConfig
{ rendererType = AcceleratedRenderer
, rendererTargetTexture = False
}
-- | Information about an instantiated 'Renderer'.
data RendererInfo = RendererInfo
{ rendererInfoName :: Text
-- ^ The name of the renderer
, rendererInfoFlags :: RendererConfig
-- ^ Supported renderer features
, rendererInfoNumTextureFormats :: Word32
-- ^ The number of available texture formats
, rendererInfoTextureFormats :: [PixelFormat]
-- ^ The available texture formats
, rendererInfoMaxTextureWidth :: CInt
-- ^ The maximum texture width
, rendererInfoMaxTextureHeight :: CInt
-- ^ The maximum texture height
} deriving (Eq, Generic, Ord, Read, Show, Typeable)
fromRawRendererInfo :: MonadIO m => Raw.RendererInfo -> m RendererInfo
fromRawRendererInfo (Raw.RendererInfo name flgs ntf tfs mtw mth) = liftIO $ do
name' <- Text.decodeUtf8 <$> BS.packCString name
return $ RendererInfo name' (fromNumber flgs) ntf (fmap fromNumber tfs) mtw mth
-- | Get information about a rendering context.
--
-- See @<https://wiki.libsdl.org/SDL_GetRendererInfo SDL_GetRendererInfo>@ for C documentation.
getRendererInfo :: MonadIO m => Renderer -> m RendererInfo
getRendererInfo (Renderer renderer) = liftIO $
alloca $ \rptr -> do
throwIfNeg_ "getRendererInfo" "SDL_GetRendererInfo" $
Raw.getRendererInfo renderer rptr
peek rptr >>= fromRawRendererInfo
-- | Enumerate all known render drivers on the system, and determine their supported features.
--
-- See @<https://wiki.libsdl.org/SDL_GetRenderDriverInfo SDL_GetRenderDriverInfo>@ for C documentation.
getRenderDriverInfo :: MonadIO m => m [RendererInfo]
getRenderDriverInfo = liftIO $ do
count <- Raw.getNumRenderDrivers
traverse go [0..count-1]
where
go idx = alloca $ \rptr -> do
throwIfNeg_ "getRenderDriverInfo" "SDL_GetRenderDriverInfo" $
Raw.getRenderDriverInfo idx rptr
peek rptr >>= fromRawRendererInfo
-- | Get or set the additional alpha value multiplied into render copy operations.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetTextureAlphaMod SDL_SetTextureAlphaMod>@ and @<https://wiki.libsdl.org/SDL_GetTextureAlphaMod SDL_GetTextureAlphaMod>@ for C documentation.
textureAlphaMod :: Texture -> StateVar Word8
textureAlphaMod (Texture t) = makeStateVar getTextureAlphaMod setTextureAlphaMod
where
getTextureAlphaMod = liftIO $
alloca $ \x -> do
throwIfNeg_ "SDL.Video.Renderer.getTextureAlphaMod" "SDL_GetTextureAlphaMod" $
Raw.getTextureAlphaMod t x
peek x
setTextureAlphaMod alpha =
throwIfNeg_ "SDL.Video.Renderer.setTextureAlphaMod" "SDL_SetTextureAlphaMod" $
Raw.setTextureAlphaMod t alpha
-- | Get or set the blend mode used for texture copy operations.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetTextureBlendMode SDL_SetTextureBlendMode>@ and @<https://wiki.libsdl.org/SDL_GetTextureBlendMode SDL_GetTextureBlendMode>@ for C documentation.
textureBlendMode :: Texture -> StateVar BlendMode
textureBlendMode (Texture t) = makeStateVar getTextureBlendMode setTextureBlendMode
where
getTextureBlendMode = liftIO $
alloca $ \x -> do
throwIfNeg_ "SDL.Video.Renderer.getTextureBlendMode" "SDL_GetTextureBlendMode" $
Raw.getTextureBlendMode t x
fromNumber <$> peek x
setTextureBlendMode bm =
throwIfNeg_ "SDL.Video.Renderer.setTextureBlendMode" "SDL_SetTextureBlendMode" $
Raw.setTextureBlendMode t (toNumber bm)
-- | Get or set the blend mode used for blit operations.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetSurfaceBlendMode SDL_SetSurfaceBlendMode>@ and @<https://wiki.libsdl.org/SDL_GetSurfaceBlendMode SDL_GetSurfaceBlendMode>@ for C documentation.
surfaceBlendMode :: Surface -> StateVar BlendMode
surfaceBlendMode (Surface s _) = makeStateVar getSurfaceBlendMode setSurfaceBlendMode
where
getSurfaceBlendMode = liftIO $
alloca $ \x -> do
throwIfNeg_ "SDL.Video.Renderer.getSurfaceBlendMode" "SDL_GetSurfaceBlendMode" $
Raw.getSurfaceBlendMode s x
fromNumber <$> peek x
setSurfaceBlendMode bm =
throwIfNeg_ "SDL.Video.Renderer.setSurfaceBlendMode" "SDL_SetSurfaceBlendMode" $
Raw.setSurfaceBlendMode s (toNumber bm)
-- | Get or set the current render target. 'Nothing' corresponds to the default render target.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_SetRenderTarget SDL_SetRenderTarget>@ and @<https://wiki.libsdl.org/SDL_GetRenderTarget SDL_GetRenderTarget>@ for C documentation.
renderTarget :: Renderer -> StateVar (Maybe Texture)
renderTarget (Renderer r) = makeStateVar getRenderTarget setRenderTarget
where
getRenderTarget = do
t <- Raw.getRenderTarget r
return $
if t == nullPtr
then Nothing
else Just (Texture t)
setRenderTarget texture =
throwIfNeg_ "SDL.Video.Renderer.setRenderTarget" "SDL_SetRenderTarget" $
case texture of
Nothing -> Raw.setRenderTarget r nullPtr
Just (Texture t) -> Raw.setRenderTarget r t
-- | Get or set the device independent resolution for rendering.
--
-- This 'StateVar' can be modified using '$=' and the current value retrieved with 'get'.
--
-- See @<https://wiki.libsdl.org/SDL_RenderSetLogicalSize SDL_RenderSetLogicalSize>@ and @<https://wiki.libsdl.org/SDL_RenderGetLogicalSize SDL_RenderGetLogicalSize>@ for C documentation.
renderLogicalSize :: Renderer -> StateVar (Maybe (V2 CInt))
renderLogicalSize (Renderer r) = makeStateVar renderGetLogicalSize renderSetLogicalSize
where
renderGetLogicalSize = liftIO $
alloca $ \w -> do
alloca $ \h -> do
Raw.renderGetLogicalSize r w h
v <- V2 <$> peek w <*> peek h
return $ if v == 0 then Nothing else Just v
renderSetLogicalSize v =
throwIfNeg_ "SDL.Video.renderSetLogicalSize" "SDL_RenderSetLogicalSize" $ do
let (x,y) = case v of Just (V2 vx vy) -> (vx, vy)
Nothing -> (0,0)
Raw.renderSetLogicalSize r x y
-- | Determine whether a window supports the use of render targets.
--
-- See @<https://wiki.libsdl.org/SDL_RenderTargetSupported SDL_RenderTargetSupported>@ for C documentation.
renderTargetSupported :: (MonadIO m) => Renderer -> m Bool
renderTargetSupported (Renderer r) = Raw.renderTargetSupported r
| bj4rtmar/sdl2 | src/SDL/Video/Renderer.hs | bsd-3-clause | 49,276 | 0 | 21 | 10,032 | 8,524 | 4,458 | 4,066 | 764 | 4 |
{-# LANGUAGE PackageImports #-}
module Main where
import "crypto-random" Crypto.Random
import System.Environment
import Data.ByteString as B
gen :: String -> Int -> IO ()
gen file sz = do
entPool <- createEntropyPool
let cprg = cprgCreate entPool :: SystemRNG
let (b, _) = cprgGenerate sz cprg
B.writeFile file b
main = do
args <- getArgs
case args of
file:sz:[] -> gen file (read sz)
_ -> error "usage: generate-random <file> <size>"
| vincenthz/hs-crypto-random | Tests/GenerateRandom.hs | bsd-3-clause | 489 | 0 | 12 | 125 | 159 | 80 | 79 | 16 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Blockchain.Data.AddressStateDB (
AddressState(..),
blankAddressState,
getAddressState,
getAllAddressStates,
putAddressState,
deleteAddressState,
addressStateExists
) where
import Data.Binary
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Functor
import Data.List
import Numeric
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Blockchain.Data.Address
import qualified Blockchain.Colors as CL
import Blockchain.DBM
import Blockchain.ExtDBs
import Blockchain.Format
import qualified Data.NibbleString as N
import Blockchain.Data.RLP
import Blockchain.SHA
import Blockchain.Util
--import Debug.Trace
data AddressState = AddressState { addressStateNonce::Integer, addressStateBalance::Integer, addressStateContractRoot::SHAPtr, addressStateCodeHash::SHA } deriving (Show)
blankAddressState::AddressState
blankAddressState = AddressState { addressStateNonce=0, addressStateBalance=0, addressStateContractRoot=emptyTriePtr, addressStateCodeHash=hash "" }
instance Format AddressState where
format a = CL.blue "AddressState" ++
tab("\nnonce: " ++ showHex (addressStateNonce a) "" ++
"\nbalance: " ++ show (toInteger $ addressStateBalance a) ++
"\ncontractRoot: " ++ show (pretty $ addressStateContractRoot a) ++
"\ncodeHash: " ++ show (pretty $ addressStateCodeHash a))
instance RLPSerializable AddressState where
--rlpEncode a | balance a < 0 = rlpEncode a{balance = - balance a}
rlpEncode a | addressStateBalance a < 0 = error $ "Error in cal to rlpEncode for AddressState: AddressState has negative balance: " ++ format a
rlpEncode a = RLPArray [
rlpEncode $ toInteger $ addressStateNonce a,
rlpEncode $ toInteger $ addressStateBalance a,
rlpEncode $ addressStateContractRoot a,
rlpEncode $ addressStateCodeHash a
]
rlpDecode (RLPArray [n, b, cr, ch]) =
AddressState {
addressStateNonce=fromInteger $ rlpDecode n,
addressStateBalance=fromInteger $ rlpDecode b,
addressStateContractRoot=rlpDecode cr,
addressStateCodeHash=rlpDecode ch
}
rlpDecode x = error $ "Missing case in rlpDecode for AddressState: " ++ show (pretty x)
addressAsNibbleString::Address->N.NibbleString
addressAsNibbleString (Address s) = N.EvenNibbleString $ BL.toStrict $ encode s
getAddressState::Address->DBM AddressState
getAddressState address = do
states <- getKeyVal $ addressAsNibbleString address
return $ maybe blankAddressState (rlpDecode . rlpDeserialize . rlpDecode) states
nibbleString2ByteString::N.NibbleString->B.ByteString
nibbleString2ByteString (N.EvenNibbleString str) = str
nibbleString2ByteString (N.OddNibbleString c str) = c `B.cons` str
getAllAddressStates::DBM [(Address, AddressState)]
getAllAddressStates = do
states <- getAllKeyVals
return $ map convert $ states
where
convert::(N.NibbleString, RLPObject)->(Address, AddressState)
convert (k, v) = (Address $ fromInteger $ byteString2Integer $ nibbleString2ByteString k, rlpDecode . rlpDeserialize . rlpDecode $ v)
putAddressState::Address->AddressState->DBM ()
putAddressState address newState =
putKeyVal (addressAsNibbleString address) $ rlpEncode $ rlpSerialize $ rlpEncode newState
deleteAddressState::Address->DBM ()
deleteAddressState address =
deleteKey (addressAsNibbleString address)
addressStateExists::Address->DBM Bool
addressStateExists address =
keyExists (addressAsNibbleString address)
| jamshidh/ethereum-data-leveldb | src/Blockchain/Data/AddressStateDB.hs | bsd-3-clause | 3,557 | 0 | 18 | 566 | 905 | 489 | 416 | 72 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Web.Spock.FrameworkSpecHelper where
#if MIN_VERSION_hspec_wai(0,8,0)
import Test.Hspec.Wai.Matcher
#endif
#if MIN_VERSION_base(4,11,0)
#else
import Data.Monoid
#endif
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Lazy.Char8 as BSLC
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Word
import Network.HTTP.Types.Header
import Network.HTTP.Types.Method
import qualified Network.Wai as Wai
import Test.Hspec
import Test.Hspec.Wai
statusBodyMatch :: Int -> BSLC.ByteString -> ResponseMatcher
#if MIN_VERSION_hspec_wai(0,8,0)
statusBodyMatch s b =
ResponseMatcher
{ matchStatus = s
, matchBody = bodyEquals b
, matchHeaders = []
}
#else
statusBodyMatch s b =
ResponseMatcher { matchStatus = s, matchBody = Just b, matchHeaders = [] }
#endif
sizeLimitSpec :: (Word64 -> IO Wai.Application) -> Spec
sizeLimitSpec app =
with (app maxSize) $
describe "Request size limit" $
do
it "allows small enough requests the way" $
do
post "/size" okBs `shouldRespondWith` matcher 200 okBs
post "/size" okBs2 `shouldRespondWith` matcher 200 okBs2
it "denys large requests the way" $
post "/size" tooLongBs `shouldRespondWith` 413
where
matcher = statusBodyMatch
maxSize = 1024
okBs = BSLC.replicate (fromIntegral maxSize - 50) 'i'
okBs2 = BSLC.replicate (fromIntegral maxSize) 'j'
tooLongBs = BSLC.replicate (fromIntegral maxSize + 100) 'k'
frameworkSpec :: IO Wai.Application -> Spec
frameworkSpec app =
with app $
do
routingSpec
actionSpec
headerTest
cookieTest
routingSpec :: SpecWith (st, Wai.Application)
routingSpec =
describe "Routing Framework" $
do
it "allows root actions" $
get "/" `shouldRespondWith` "root" {matchStatus = 200}
it "allows access to get params" $
get "/get-params?foo=bar" `shouldRespondWith` "[(\"foo\",\"bar\")]" {matchStatus = 200}
it "supports wai app responses" $
do
get "/wai/foo" `shouldRespondWith` "[\"wai\",\"foo\"]" {matchStatus = 200}
get "/wai/foo/bar" `shouldRespondWith` "[\"wai\",\"foo\",\"bar\"]" {matchStatus = 200}
it "allows access to post params" $
postHtmlForm "/post-params" [("foo", "bar")]
`shouldRespondWith` "[(\"foo\",\"bar\")]" {matchStatus = 200}
it "allows access to empty post params" $
postHtmlForm "/post-params" []
`shouldRespondWith` "[]" {matchStatus = 200}
it "allows broken body for post params" $
post "/post-params" ""
`shouldRespondWith` "[]" {matchStatus = 200}
it "allows json body" $
post "/json" "{ \"sampleJson\": \"foo\"}"
`shouldRespondWith` "foo" {matchStatus = 200}
it "allows raw body" $
post "/raw-body" "raw" `shouldRespondWith` "raw" {matchStatus = 200}
it "allows empty raw body" $
post "/raw-body" "" `shouldRespondWith` "" {matchStatus = 200}
it "matches regardless of the VERB" $
do
get "/all/verbs" `shouldRespondWith` "ok" {matchStatus = 200}
post "/all/verbs" "" `shouldRespondWith` "ok" {matchStatus = 200}
request "FIZZBUZZ" "/all/verbs" [] "" `shouldRespondWith` "ok" {matchStatus = 200}
request "NOTIFY" "/all/verbs" [] "" `shouldRespondWith` "ok" {matchStatus = 200}
it "routes different HTTP-verbs to different actions" $
do
verbTest get "GET"
verbTest (`post` "") "POST"
verbTest (`put` "") "PUT"
verbTest delete "DELETE"
verbTest (`patch` "") "PATCH"
verbTestGp get "GETPOST"
verbTestGp (`post` "") "GETPOST"
it "can extract params from routes" $
get "/param-test/42" `shouldRespondWith` "int42" {matchStatus = 200}
it "can handle multiple matching routes" $
get "/param-test/static" `shouldRespondWith` "static" {matchStatus = 200}
it "ignores trailing slashes" $
get "/param-test/static/" `shouldRespondWith` "static" {matchStatus = 200}
it "works with subcomponents" $
do
get "/subcomponent/foo" `shouldRespondWith` "foo" {matchStatus = 200}
get "/subcomponent/subcomponent2/bar" `shouldRespondWith` "bar" {matchStatus = 200}
it "allows the definition of a fallback handler" $
get "/askldjas/aklsdj" `shouldRespondWith` "askldjas/aklsdj" {matchStatus = 200}
it "allows the definition of a fallback handler for custom verb" $
request "MYVERB" "/askldjas/aklsdj" [] "" `shouldRespondWith` "askldjas/aklsdj" {matchStatus = 200}
it "detected the preferred format" $
request "GET" "/preferred-format" [("Accept", "text/html,application/xml;q=0.9,image/webp,*/*;q=0.8")] "" `shouldRespondWith` "html" {matchStatus = 200}
it "/test-slash and test-noslash are the same thing" $
do
get "/test-slash" `shouldRespondWith` "ok" {matchStatus = 200}
get "test-slash" `shouldRespondWith` "ok" {matchStatus = 200}
get "/test-noslash" `shouldRespondWith` "ok" {matchStatus = 200}
get "test-noslash" `shouldRespondWith` "ok" {matchStatus = 200}
it "allows custom verbs" $
request "NOTIFY" "/notify/itnotifies" [] "" `shouldRespondWith` "itnotifies" {matchStatus = 200}
where
verbTestGp verb verbVerbose =
verb "/verb-test-gp" `shouldRespondWith` (verbVerbose {matchStatus = 200})
verbTest verb verbVerbose =
verb "/verb-test" `shouldRespondWith` (verbVerbose {matchStatus = 200})
errorHandlerSpec :: IO Wai.Application -> Spec
errorHandlerSpec app =
with app $
describe "Error Handler" $
do
it "handles non-existing routes correctly" $
do
get "/non/existing/route" `shouldRespondWith` "NOT FOUND" {matchStatus = 404}
post "/non/existing/route" "" `shouldRespondWith` "NOT FOUND" {matchStatus = 404}
put "/non/existing/route" "" `shouldRespondWith` "NOT FOUND" {matchStatus = 404}
patch "/non/existing/route" "" `shouldRespondWith` "NOT FOUND" {matchStatus = 404}
it "handles server errors correctly" $
get "/failing/route" `shouldRespondWith` "SERVER ERROR" {matchStatus = 500}
it "does not interfere with user emitted errors" $
get "/user/error" `shouldRespondWith` "UNAUTHORIZED" {matchStatus = 403}
actionSpec :: SpecWith (st, Wai.Application)
actionSpec =
describe "Action Framework" $
do
it "handles auth correctly" $
do
request methodGet "/auth/user/pass" [mkAuthHeader "user" "pass"] "" `shouldRespondWith` "ok" {matchStatus = 200}
request methodGet "/auth/user/pass" [mkAuthHeader "user" ""] "" `shouldRespondWith` "err" {matchStatus = 401}
request methodGet "/auth/user/pass" [mkAuthHeader "" ""] "" `shouldRespondWith` "err" {matchStatus = 401}
request methodGet "/auth/user/pass" [mkAuthHeader "asd" "asd"] "" `shouldRespondWith` "err" {matchStatus = 401}
request methodGet "/auth/user/pass" [] "" `shouldRespondWith` "Authentication required. " {matchStatus = 401}
where
mkAuthHeader :: BS.ByteString -> BS.ByteString -> Header
mkAuthHeader user pass =
("Authorization", "Basic " <> (B64.encode $ user <> ":" <> pass))
cookieTest :: SpecWith (st, Wai.Application)
cookieTest =
describe "Cookies" $
do
it "sets single cookies correctly" $
get "/cookie/single"
`shouldRespondWith` "set"
{ matchStatus = 200,
matchHeaders =
[ matchCookie "single" "test"
]
}
it "sets multiple cookies correctly" $
get "/cookie/multiple"
`shouldRespondWith` "set"
{ matchStatus = 200,
matchHeaders =
[ matchCookie "multiple1" "test1",
matchCookie "multiple2" "test2"
]
}
headerTest :: SpecWith (st, Wai.Application)
headerTest =
describe "Headers" $
do
it "supports custom headers" $
get "/set-header"
`shouldRespondWith` "ok"
{ matchStatus = 200,
matchHeaders =
[ "X-FooBar" <:> "Baz"
]
}
it "supports multi headers" $
get "/set-multi-header"
`shouldRespondWith` "ok"
{ matchStatus = 200,
matchHeaders =
[ "Content-Language" <:> "de",
"Content-Language" <:> "en"
]
}
matchCookie :: T.Text -> T.Text -> MatchHeader
matchCookie name val =
#if MIN_VERSION_hspec_wai(0,8,0)
MatchHeader $ \headers _ ->
#else
MatchHeader $ \headers ->
#endif
let relevantHeaders = filter (\h -> fst h == "Set-Cookie") headers
loop [] =
Just
( "No cookie named " ++ T.unpack name ++ " with value "
++ T.unpack val
++ " found"
)
loop (x : xs) =
let (cname, cval) = T.breakOn "=" $ fst $ T.breakOn ";" $ T.decodeUtf8 $ snd x
in if cname == name && cval == "=" <> val
then Nothing
else loop xs
in loop relevantHeaders
| agrafix/Spock | Spock-core/test/Web/Spock/FrameworkSpecHelper.hs | bsd-3-clause | 9,369 | 0 | 20 | 2,480 | 2,211 | 1,174 | 1,037 | 191 | 3 |
{-# LANGUAGE TemplateHaskell, TypeOperators #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Network.Protocol.Snmp.AgentX.Packet.Types
( PDU(..)
, Version
, SessionID
, TransactionID
, PacketID
, econvert
, Context(..)
, Tag(..)
, TaggedError(..)
, TestError(..)
, CommitError(..)
, RError(..)
, UndoError(..)
, SysUptime
, Index
, MaxRepeaters
, NonRepeaters
-- ** Packet
, Packet
-- *** constructor
, mkPacket
-- *** lenses
, flags
, version
, pdu
, sid
, pid
, tid
-- ** SearchRange
, SearchRange
-- *** constructor
, mkSearchRange
-- *** lenses
, startOID
, endOID
, include
-- ** VarBind containt Value and OID
, VarBind
-- *** constructor
, mkVarBind
-- *** lenses
, vboid
, vbvalue
-- ** Flags
, Flags
-- *** constructor
, mkFlags
, InstanceRegistration
, NewIndex
, AnyIndex
, NonDefaultContext
, BigEndian
-- *** lenses
, instanceRegistration
, newIndex
, anyIndex
, nonDefaultContext
, bigEndian
)
where
import Data.Label
import Data.Word
import Data.ByteString (ByteString)
import Data.String
import Data.Default
import Network.Protocol.Snmp (Value(..), OID)
-- | protocol version (const 1 by default)
newtype Version = Version Word8 deriving (Show, Eq, Enum, Bounded, Ord)
-- | session id in header, rfc 2741, section 6.1
newtype SessionID = SessionID Word32 deriving (Show, Eq, Enum, Bounded, Ord)
-- | transaction id in header, rfc 2741, section 6.1
newtype TransactionID = TransactionID Word32 deriving (Show, Eq, Ord, Enum, Bounded)
-- | packet id in header, rfc 2741, section 6.1
newtype PacketID = PacketID Word32 deriving (Show, Eq, Ord, Enum, Bounded)
-- | helper for convert
econvert :: (Enum a, Enum b) => a -> b
econvert = toEnum . fromEnum
-- | rfc 2571 section 3.3.1, rfc 2741 section 6.1.1 Context
newtype Context = Context ByteString deriving (Show, Ord, Eq, IsString)
-- | rfc 2741, section 6.2.2, Error status in agentx-close-pdu
data Reason = Other
| ParseError
| ProtocolError
| Timeouts
| Shutdown
| ByManager
deriving (Show, Eq)
-- | rfc 2741, 6.2.16, Error status in agentx-response-pdu
data RError = NoAgentXError
| OpenFailed
| NotOpen
| IndexWrongType
| IndexAlreadyAllocated
| IndexNonAvailable
| IndexNotAllocated
| UnsupportedContext
| DuplicateRegistration
| UnknownRegistration
| UnknownAgentCaps
| RParseError
| RequestDenied
| ProcessingError
deriving (Show, Eq)
-- | result for testSetAIO (rfc 2741, section 7.2.4.1)
data TestError
= NoTestError
| TooBig
| NoSuchName
| BadValue
| ReadOnly
| GenError
| NoAccess
| WrongType
| WrongLength
| WrongEncoding
| WrongValue
| NoCreation
| InconsistentValue
| ResourceUnavailable
| NotWritable
| InconsistentName
deriving (Show, Eq)
-- | result for commitSetAIO (rfc 2741, section 7.2.4.2)
data CommitError
= NoCommitError
| CommitFailed
deriving (Show, Eq)
-- | result for undoSetAIO (rfc 2741, section 7.2.4.3)
data UndoError
= NoUndoError
| UndoFailed
deriving (Show, Eq)
type Timeout = Word8
type Priority = Word8
type RangeSubid = Word8
type Description = ByteString
type MContext = Maybe Context
type UpperBound = Maybe Word32
type SysUptime = Word32
type Index = Word16
type NonRepeaters = Word16
type MaxRepeaters = Word16
-- | Error with Tag instance
data TaggedError = forall a. (Show a, Eq a, Tag a Word16) => Tagged a
instance Tag TaggedError Word16 where
tag (Tagged x) = tag x
unTag x
| x == 0 = Tagged NoAgentXError
| x > 0 && x <= 18 = Tagged (unTag x :: TestError)
| otherwise = Tagged (unTag x :: RError)
instance Eq TaggedError where
x == y = (tag x :: Word16) == (tag y :: Word16)
instance Show TaggedError where
show (Tagged a) = show a
-- | rfc 2741, section 6.2
data PDU = Open Timeout OID Description -- ^ section 6.2.1
| Close Reason -- ^ section 6.2.2
| Register MContext Timeout Priority RangeSubid OID UpperBound -- ^ section 6.2.3
| Unregister MContext Priority RangeSubid OID UpperBound -- ^ section 6.2.4
| Get MContext [OID] -- ^ section 6.2.5
| GetNext MContext [SearchRange] -- ^ section 6.2.6
| GetBulk MContext NonRepeaters MaxRepeaters [SearchRange] -- ^ section 6.2.7
| TestSet MContext [VarBind] -- ^ section 6.2.8
| CommitSet -- ^ section 6.2.9
| UndoSet -- ^ section 6.2.9
| CleanupSet -- ^ section 6.2.9
| Notify MContext [VarBind] -- ^ section 6.2.10
| Ping MContext -- ^ section 6.2.11
| IndexAllocate MContext [VarBind] -- ^ section 6.2.12
| IndexDeallocate MContext [VarBind] -- ^ section 6.2.13
| AddAgentCaps MContext OID Description -- ^ section 6.2.14
| RemoveAgentCaps MContext OID -- ^ section 6.2.15
| Response SysUptime TaggedError Index [VarBind] -- ^ section 6.2.16
deriving (Show, Eq)
-- | class for convert Errors to Word* and Word* to Errors
class Tag a b where
tag :: a -> b
unTag :: b -> a
-- | Packet type, describe agentx packet.
data Packet = Packet
{ _version :: Version
, _pdu :: PDU
, _flags :: Flags
, _sid :: SessionID
, _tid :: TransactionID
, _pid :: PacketID
} deriving Show
mkPacket :: Version -> PDU -> Flags -> SessionID -> TransactionID -> PacketID -> Packet
mkPacket = Packet
-- | header flags, rfc 2741, section 6.1
data Flags = Flags
{ _instanceRegistration :: InstanceRegistration
, _newIndex :: NewIndex
, _anyIndex :: AnyIndex
, _nonDefaultContext :: NonDefaultContext
, _bigEndian :: BigEndian
} deriving (Show)
type InstanceRegistration = Bool
type NewIndex = Bool
type AnyIndex = Bool
type NonDefaultContext = Bool
type BigEndian = Bool
mkFlags :: InstanceRegistration -> NewIndex -> AnyIndex -> NonDefaultContext -> BigEndian -> Flags
mkFlags = Flags
-- | used for getnext and other requests (rfc 2741, section5.2 )
data SearchRange = SearchRange
{ _startOID :: OID
, _endOID :: OID
, _include :: Bool
} deriving (Show, Eq)
-- | create SearchRange
mkSearchRange :: OID -> OID -> Bool -> SearchRange
mkSearchRange = SearchRange
-- | containt oid and value (rfc 2741, section 5.4)
data VarBind = VarBind
{ _vboid :: OID
, _vbvalue :: Value
} deriving (Show, Eq)
-- | constructor for VarBind
mkVarBind :: OID -> Value -> VarBind
mkVarBind = VarBind
mkLabels [''Packet, ''Flags, ''VarBind, ''SearchRange ]
instance Tag Reason Word8 where
tag Other = 1
tag ParseError = 2
tag ProtocolError = 3
tag Timeouts = 4
tag Shutdown = 5
tag ByManager = 6
unTag 1 = Other
unTag 2 = ParseError
unTag 3 = ProtocolError
unTag 4 = Timeouts
unTag 5 = Shutdown
unTag 6 = ByManager
unTag _ = error "unknown reasonFromTag"
instance Tag RError Word16 where
tag NoAgentXError = 0
tag OpenFailed = 256
tag NotOpen = 257
tag IndexWrongType = 258
tag IndexAlreadyAllocated = 259
tag IndexNonAvailable = 260
tag IndexNotAllocated = 261
tag UnsupportedContext = 262
tag DuplicateRegistration = 263
tag UnknownRegistration = 264
tag UnknownAgentCaps = 265
tag RParseError = 266
tag RequestDenied = 267
tag ProcessingError = 268
unTag 0 = NoAgentXError
unTag 256 = OpenFailed
unTag 257 = NotOpen
unTag 258 = IndexWrongType
unTag 259 = IndexAlreadyAllocated
unTag 260 = IndexNonAvailable
unTag 261 = IndexNotAllocated
unTag 262 = UnsupportedContext
unTag 263 = DuplicateRegistration
unTag 264 = UnknownRegistration
unTag 265 = UnknownAgentCaps
unTag 266 = RParseError
unTag 267 = RequestDenied
unTag 268 = ProcessingError
unTag _ = error "bad rerror"
instance Tag TestError Word16 where
tag NoTestError = 0
tag TooBig = 1
tag NoSuchName = 2
tag BadValue = 3
tag ReadOnly = 4
tag GenError = 5
tag NoAccess = 6
tag WrongType = 7
tag WrongLength = 8
tag WrongEncoding = 9
tag WrongValue = 10
tag NoCreation = 11
tag InconsistentValue = 12
tag ResourceUnavailable = 13
tag NotWritable = 17
tag InconsistentName = 18
unTag 0 = NoTestError
unTag 1 = TooBig
unTag 2 = NoSuchName
unTag 3 = BadValue
unTag 4 = ReadOnly
unTag 5 = GenError
unTag 6 = NoAccess
unTag 7 = WrongType
unTag 8 = WrongLength
unTag 9 = WrongEncoding
unTag 10 = WrongValue
unTag 11 = NoCreation
unTag 12 = InconsistentValue
unTag 13 = ResourceUnavailable
unTag 17 = NotWritable
unTag 18 = InconsistentName
unTag _ = error "unknown tag in TestError"
instance Tag CommitError Word16 where
tag NoCommitError = 0
tag CommitFailed = 14
unTag 0 = NoCommitError
unTag 14 = CommitFailed
unTag _ = error "unknown tag in CommitError"
instance Tag UndoError Word16 where
tag NoUndoError = 0
tag UndoFailed = 15
unTag 0 = NoUndoError
unTag 15 = UndoFailed
unTag _ = error "unknown tag in UndoError"
instance Tag PDU Word8 where
tag Open{} = 1
tag Close{} = 2
tag Register{} = 3
tag Unregister{} = 4
tag Get{} = 5
tag GetNext{} = 6
tag GetBulk{} = 7
tag TestSet{} = 8
tag CommitSet{} = 9
tag UndoSet{} = 10
tag CleanupSet{} = 11
tag Notify{} = 12
tag Ping{} = 13
tag IndexAllocate{} = 14
tag IndexDeallocate{} = 15
tag AddAgentCaps{} = 16
tag RemoveAgentCaps{} = 17
tag Response{} = 18
unTag _ = undefined
instance Tag Value Word16 where
-- TODO check zerodotzero
tag (ZeroDotZero) = 0
tag (Integer _) = 2
tag (String _) = 4
tag (Zero) = 5
tag (OI _) = 6
tag (IpAddress _ _ _ _) = 64
tag (Counter32 _) = 65
tag (Gaude32 _) = 66
tag (TimeTicks _) = 67
tag (Opaque _) = 68
tag (Counter64 _) = 70
tag (NoSuchObject) = 128
tag (NoSuchInstance) = 129
tag (EndOfMibView) = 130
unTag _ = undefined
instance Default Flags where
def = mkFlags False False False False False
instance Default Version where
def = Version 1
| chemist/agentx | src/Network/Protocol/Snmp/AgentX/Packet/Types.hs | bsd-3-clause | 11,501 | 0 | 11 | 3,794 | 2,689 | 1,496 | 1,193 | 325 | 1 |
{-# LINE 1 "GHC.Event.Internal.hs" #-}
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE ExistentialQuantification, NoImplicitPrelude #-}
module GHC.Event.Internal
(
-- * Event back end
Backend
, backend
, delete
, poll
, modifyFd
, modifyFdOnce
-- * Event type
, Event
, evtRead
, evtWrite
, evtClose
, eventIs
-- * Lifetimes
, Lifetime(..)
, EventLifetime
, eventLifetime
, elLifetime
, elEvent
-- * Timeout type
, Timeout(..)
-- * Helpers
, throwErrnoIfMinus1NoRetry
) where
import Data.Bits ((.|.), (.&.))
import Data.OldList (foldl', filter, intercalate, null)
import Foreign.C.Error (eINTR, getErrno, throwErrno)
import System.Posix.Types (Fd)
import GHC.Base
import GHC.Num (Num(..))
import GHC.Show (Show(..))
-- | An I\/O event.
newtype Event = Event Int
deriving (Eq)
evtNothing :: Event
evtNothing = Event 0
{-# INLINE evtNothing #-}
-- | Data is available to be read.
evtRead :: Event
evtRead = Event 1
{-# INLINE evtRead #-}
-- | The file descriptor is ready to accept a write.
evtWrite :: Event
evtWrite = Event 2
{-# INLINE evtWrite #-}
-- | Another thread closed the file descriptor.
evtClose :: Event
evtClose = Event 4
{-# INLINE evtClose #-}
eventIs :: Event -> Event -> Bool
eventIs (Event a) (Event b) = a .&. b /= 0
instance Show Event where
show e = '[' : (intercalate "," . filter (not . null) $
[evtRead `so` "evtRead",
evtWrite `so` "evtWrite",
evtClose `so` "evtClose"]) ++ "]"
where ev `so` disp | e `eventIs` ev = disp
| otherwise = ""
instance Monoid Event where
mempty = evtNothing
mappend = evtCombine
mconcat = evtConcat
evtCombine :: Event -> Event -> Event
evtCombine (Event a) (Event b) = Event (a .|. b)
{-# INLINE evtCombine #-}
evtConcat :: [Event] -> Event
evtConcat = foldl' evtCombine evtNothing
{-# INLINE evtConcat #-}
-- | The lifetime of an event registration.
--
-- @since 4.8.1.0
data Lifetime = OneShot -- ^ the registration will be active for only one
-- event
| MultiShot -- ^ the registration will trigger multiple times
deriving (Show, Eq)
-- | The longer of two lifetimes.
elSupremum :: Lifetime -> Lifetime -> Lifetime
elSupremum OneShot OneShot = OneShot
elSupremum _ _ = MultiShot
{-# INLINE elSupremum #-}
-- | @mappend@ == @elSupremum@
instance Monoid Lifetime where
mempty = OneShot
mappend = elSupremum
-- | A pair of an event and lifetime
--
-- Here we encode the event in the bottom three bits and the lifetime
-- in the fourth bit.
newtype EventLifetime = EL Int
deriving (Show, Eq)
instance Monoid EventLifetime where
mempty = EL 0
EL a `mappend` EL b = EL (a .|. b)
eventLifetime :: Event -> Lifetime -> EventLifetime
eventLifetime (Event e) l = EL (e .|. lifetimeBit l)
where
lifetimeBit OneShot = 0
lifetimeBit MultiShot = 8
{-# INLINE eventLifetime #-}
elLifetime :: EventLifetime -> Lifetime
elLifetime (EL x) = if x .&. 8 == 0 then OneShot else MultiShot
{-# INLINE elLifetime #-}
elEvent :: EventLifetime -> Event
elEvent (EL x) = Event (x .&. 0x7)
{-# INLINE elEvent #-}
-- | A type alias for timeouts, specified in seconds.
data Timeout = Timeout {-# UNPACK #-} !Double
| Forever
deriving (Show)
-- | Event notification backend.
data Backend = forall a. Backend {
_beState :: !a
-- | Poll backend for new events. The provided callback is called
-- once per file descriptor with new events.
, _bePoll :: a -- backend state
-> Maybe Timeout -- timeout in milliseconds ('Nothing' for non-blocking poll)
-> (Fd -> Event -> IO ()) -- I/O callback
-> IO Int
-- | Register, modify, or unregister interest in the given events
-- on the given file descriptor.
, _beModifyFd :: a
-> Fd -- file descriptor
-> Event -- old events to watch for ('mempty' for new)
-> Event -- new events to watch for ('mempty' to delete)
-> IO Bool
-- | Register interest in new events on a given file descriptor, set
-- to be deactivated after the first event.
, _beModifyFdOnce :: a
-> Fd -- file descriptor
-> Event -- new events to watch
-> IO Bool
, _beDelete :: a -> IO ()
}
backend :: (a -> Maybe Timeout -> (Fd -> Event -> IO ()) -> IO Int)
-> (a -> Fd -> Event -> Event -> IO Bool)
-> (a -> Fd -> Event -> IO Bool)
-> (a -> IO ())
-> a
-> Backend
backend bPoll bModifyFd bModifyFdOnce bDelete state =
Backend state bPoll bModifyFd bModifyFdOnce bDelete
{-# INLINE backend #-}
poll :: Backend -> Maybe Timeout -> (Fd -> Event -> IO ()) -> IO Int
poll (Backend bState bPoll _ _ _) = bPoll bState
{-# INLINE poll #-}
-- | Returns 'True' if the modification succeeded.
-- Returns 'False' if this backend does not support
-- event notifications on this type of file.
modifyFd :: Backend -> Fd -> Event -> Event -> IO Bool
modifyFd (Backend bState _ bModifyFd _ _) = bModifyFd bState
{-# INLINE modifyFd #-}
-- | Returns 'True' if the modification succeeded.
-- Returns 'False' if this backend does not support
-- event notifications on this type of file.
modifyFdOnce :: Backend -> Fd -> Event -> IO Bool
modifyFdOnce (Backend bState _ _ bModifyFdOnce _) = bModifyFdOnce bState
{-# INLINE modifyFdOnce #-}
delete :: Backend -> IO ()
delete (Backend bState _ _ _ bDelete) = bDelete bState
{-# INLINE delete #-}
-- | Throw an 'IOError' corresponding to the current value of
-- 'getErrno' if the result value of the 'IO' action is -1 and
-- 'getErrno' is not 'eINTR'. If the result value is -1 and
-- 'getErrno' returns 'eINTR' 0 is returned. Otherwise the result
-- value is returned.
throwErrnoIfMinus1NoRetry :: (Eq a, Num a) => String -> IO a -> IO a
throwErrnoIfMinus1NoRetry loc f = do
res <- f
if res == -1
then do
err <- getErrno
if err == eINTR then return 0 else throwErrno loc
else return res
| phischu/fragnix | builtins/base/GHC.Event.Internal.hs | bsd-3-clause | 6,327 | 0 | 16 | 1,784 | 1,393 | 774 | 619 | 138 | 3 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, FlexibleInstances #-}
module HrrDatatypeTest where
import Database.HDBC.Query.TH (defineTableFromDB)
import Database.HDBC.Schema.Oracle (driverOracle)
import Database.Record.TH (derivingShow)
import DataSource (connect, owner)
defineTableFromDB connect driverOracle owner "hrr_datatype_test" [derivingShow]
| amutake/haskell-relational-record-driver-oracle | example/src/HrrDatatypeTest.hs | bsd-3-clause | 364 | 0 | 6 | 32 | 68 | 42 | 26 | 7 | 0 |
{-# LANGUAGE ConstraintKinds,
DataKinds,
FlexibleContexts,
FlexibleInstances,
MultiParamTypeClasses,
RankNTypes,
ScopedTypeVariables,
TypeFamilies,
TypeOperators #-}
-- | Lens utilities for working with 'Record's.
module Frames.RecLens where
import Control.Applicative
import qualified Data.Vinyl as V
import Data.Vinyl.Functor (Identity(..))
import Data.Vinyl.TypeLevel
import Frames.Col ((:->)(..))
import Frames.Rec (Record)
rlens' :: (i ~ RIndex r rs, V.RElem r rs i, Functor f, Functor g)
=> sing r
-> (g r -> f (g r))
-> V.Rec g rs
-> f (V.Rec g rs)
rlens' = V.rlens
{-# INLINE rlens' #-}
-- | Getter for a 'V.Rec' field
rget' :: Functor g
=> (forall f. Functor f
=> (g (s :-> a) -> f (g (s :-> a))) -> V.Rec g rs -> f (V.Rec g rs))
-> V.Rec g rs -> g a
rget' l = fmap getCol . getConst . l Const
{-# INLINE rget' #-}
-- | Setter for a 'V.Rec' field.
rput' :: Functor g
=> (forall f. Functor f
=> (g (s :-> a) -> f (g (s :-> a))) -> V.Rec g rs -> f (V.Rec g rs))
-> g a -> V.Rec g rs -> V.Rec g rs
rput' l y = getIdentity . l (\_ -> Identity (fmap Col y))
{-# INLINE rput' #-}
-- * Plain records
-- | Create a lens for accessing a field of a 'Record'.
rlens :: (Functor f, V.RElem (s :-> a) rs (RIndex (s :-> a) rs))
=> proxy (s :-> a) -> (a -> f a) -> Record rs -> f (Record rs)
rlens k f = rlens' k (fmap Identity . getIdentity . fmap f')
where f' (Col x) = fmap Col (f x)
{-# INLINE rlens #-}
-- | Getter for a 'Record' field.
rget :: (forall f. Functor f => (a -> f a) -> Record rs -> f (Record rs))
-> Record rs -> a
rget l = getConst . l Const
{-# INLINE rget #-}
-- | Setter for a 'Record' field.
rput :: (forall f. Functor f => (a -> f a) -> Record rs -> f (Record rs))
-> a -> Record rs -> Record rs
rput l y = getIdentity . l (\_ -> Identity y)
{-# INLINE rput #-}
| codygman/Frames | src/Frames/RecLens.hs | bsd-3-clause | 1,982 | 0 | 17 | 566 | 790 | 406 | 384 | 48 | 1 |
module Data.Honeybee.Types (BValue(..))where
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as M
data BValue = BInteger Integer
| BString B.ByteString
| BList [BValue]
| BDict (M.Map BValue BValue)
deriving (Show, Eq, Ord)
| DuoSRX/honeybee | src/Data/Honeybee/Types.hs | bsd-3-clause | 298 | 0 | 9 | 85 | 87 | 54 | 33 | 8 | 0 |
-- | Keeps our main Language -related logic
module Toy.Lang
( module M
) where
import Toy.Lang.Data as M
import Toy.Lang.Interpreter as M
import Toy.Lang.Lib as M
import Toy.Lang.Parser as M ()
import Toy.Lang.Translator as M
| Martoon-00/toy-compiler | src/Toy/Lang.hs | bsd-3-clause | 313 | 0 | 4 | 123 | 56 | 41 | 15 | 7 | 0 |
module IAU2000.TestEquations where
import Astro
import Astro.DefaultData
import Astro.Time
import Astro.Time.Convert
import Astro.Time.Sidereal
--import IAU2000.Equations
import Numeric.Units.Dimensional.Prelude
import Test.QuickCheck
import qualified Prelude
-- | Comparison allowing for inaccuracy.
cmpE :: (Ord a, Num a) => Quantity d a -> Quantity d a -> Quantity d a -> Bool
cmpE accuracy x x' = abs(x - x') < accuracy
run = (flip runAstro) defaultAstroData
-- Accuracies.
-- eeError = 1 *~ nano arcsecond
eeError = 1e-9 *~ arcsecond
--prop_ee_equals_gmst_minus_oo :: Double -> Bool
--prop_ee_equals_gmst_minus_oo t = run $ do
-- Test relationships between angles.
-- era = Earth Rotation Angle
-- eo = Equation of Origins
-- ee = Equation of Equinoxes
-- GAST = ERA - Eo
prop_gast_era_eo :: Double -> Bool
prop_gast_era_eo t = run $ do
eo <- equationOfOrigins tt
gast <- gast tt
era' <- convert tt >>= return . era
return $ cmpE eeError gast (era' - eo)
where tt = addTime j2000 (t *~ day)
-- Ee = GAST - GMST
prop_ee_gast_gmst :: Double -> Bool
prop_ee_gast_gmst t = run $ do
ee <- evalM (equationOfEquinoxes.nutation) tt
gmst <- gmst tt
gast <- gast tt
return $ cmpE eeError ee (gast - gmst)
where tt = addTime j2000 (t *~ day)
-- Eo = - (GMSTp + Ee)
prop_eo_gmstp_ee :: Double -> Bool
prop_eo_gmstp_ee t = run $ do
eo <- equationOfOrigins tt
ee <- evalM (equationOfEquinoxes.nutation) tt
return $ cmpE eeError (negate eo) (gmst_p tt + ee)
where tt = addTime j2000 (t *~ day)
main = do
quickCheck prop_gast_era_eo
quickCheck prop_ee_gast_gmst
quickCheck prop_eo_gmstp_ee
| bjornbm/astro | test/IAU2000/TestEquations.hs | bsd-3-clause | 1,636 | 0 | 12 | 317 | 473 | 242 | 231 | 37 | 1 |
module PaymentTrackerSpec where
import SpecHelper
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "PaymentTracker" $ do
context "totalPaid" $ do
it "calculates total money paid" $ do
totalPaid payments `shouldBe` 41
context "paymentsPerCategory" $ do
it "groups payments per category" $ do
((paymentsPerCategory payments) !! 0) `shouldBe` (food, [payments !! 0, payments !! 2])
context "totalPaidPerCategory" $ do
it "calculates total money paid per category" $ do
totalPaidPerCategory payments `shouldBe` [(food, 30), (transportation, 11)]
context "percentPerCategory" $ do
it "calculates percentage associated with each category" $ do
percentPerCategory payments `shouldBe` [(food, 74), (transportation, 27)]
context "warnings" $ do
it "detects warnings for over-payments" $ do
warnings overPayments `shouldBe` (Just [Warn clothes 100])
it "knows when there is NO warning required" $ do
warnings payments `shouldBe` Nothing
context "sortAndLabelMonthsPayments" $ do
it "sorts based on datetime and label payments of months" $ do
let months = ["jan_2016_costs", "aug_2015_costs", "dec_2015_costs"]
let payments = [[chicken, uberToWork], [kebob, priceyClothes], [uberToAirport]]
let sortedAndLabeled = sortAndLabelMonthsPayments months payments
sortedAndLabeled `shouldBe` [
("August_2015", [kebob, priceyClothes]),
("December_2015", [uberToAirport]),
("January_2016", [chicken, uberToWork])
]
context "Paid Money for Each Category over Months" $ do
let jan = "jan_2016"
let feb = "feb_2016"
let foodJan = (food, jan, 20)
let foodFeb = (food, feb, 30)
context "totalPaidInMonthCategories" $ do
it "creates triples of (category, month, paid)" $ do
let monthPayments = (jan, payments)
totalPaidInMonthCategories monthPayments `shouldBe` [(food, jan, 30), (transportation, jan, 11)]
context "groupMonthsPaymentsByCat" $ do
it "group payments of months by their category value" $ do
let monthsPaidCats = [foodJan, (transportation, jan, 50), foodFeb]
groupMonthsPaymentsByCat monthsPaidCats `shouldBe` [[foodJan, foodFeb], [(transportation, jan, 50)]]
context "factorOutCatMonthsPayments" $ do
it "factors paid value for a category in different months" $ do
let monthsPays = [(food, jan, 20), (food, feb, 30)]
factorOutCatMonthsPayments monthsPays `shouldBe` (food, [(jan, 20), (feb, 30)])
context "categoryPaidOverMonths" $ do
it "collects value paid in different months for each category" $ do
let monthsPayments = [(jan, [chicken, uberToWork, kebob]), (feb, [uberToAirport, kebob, chicken, kebob])]
categoryPaidOverMonths monthsPayments `shouldBe` [(food, [(jan, 30), (feb, 50)]), (transportation, [(jan, 11), (feb, 60)])]
| Sam-Serpoosh/WatchIt | test/PaymentTrackerSpec.hs | bsd-3-clause | 3,116 | 0 | 24 | 822 | 884 | 476 | 408 | 54 | 1 |
cons :: Integer -> Integer -> Integer
cons x y = 2 ^ x * 3 ^ y
unpower :: Integer -> Integer -> Integer
unpower n x
| x `mod` n /= 0 = 0
| otherwise = 1 + unpower n (x `div` n)
uncons :: Integer -> (Integer, Integer)
uncons xy = (unpower 2 xy, unpower 3 xy)
| YoshikuniJujo/funpaala | samples/24_adt_module/tuple23.hs | bsd-3-clause | 262 | 0 | 9 | 65 | 141 | 73 | 68 | 8 | 1 |
module Terminal where
import System.Console.ANSI
colorString :: Color -> String -> String
colorString col str = prefix ++ str ++ appendix where
prefix = setSGRCode [SetColor Foreground Vivid col,
SetConsoleIntensity BoldIntensity]
appendix = setSGRCode [Reset]
blackString :: String -> String
blackString = colorString Black
redString :: String -> String
redString = colorString Red
greenString :: String -> String
greenString = colorString Green
yellowString :: String -> String
yellowString = colorString Yellow
blueString :: String -> String
blueString = colorString Blue
magentaString :: String -> String
magentaString = colorString Magenta
cyanString :: String -> String
cyanString = colorString Cyan
whiteString :: String -> String
whiteString = colorString White
blinkString :: String -> String
blinkString str = prefix ++ str ++ appendix where
prefix = setSGRCode [SetBlinkSpeed SlowBlink]
appendix = setSGRCode [Reset]
blinkColorString :: Color -> String -> String
blinkColorString col = blinkString . colorString col
| Frefreak/Gideon | src/Terminal.hs | bsd-3-clause | 1,079 | 0 | 9 | 196 | 294 | 155 | 139 | 29 | 1 |
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-# LANGUAGE LambdaCase #-}
-- | Utilities for CoreS.AST.
module CoreS.ASTUtils (
-- * Identifiers and Names
singName
-- * Types
, isTNum
, isTInt
, typeFold
, typeDimens
, typeBase
-- * Literals
, litTrue
, litFalse
, litBoolEq
-- * lvalues
, singVar
-- * Expressions, Statements, Blocks
, allowedInSExpr
, ternaryIntoStmts
, logIntoStmts
, exprIntoStmts
, exprIntoExprStmts
, mkSBlock
) where
import Data.Maybe (fromMaybe)
import Control.Monad ((>=>))
import Control.Lens ((^?), isn't, (^..))
import Data.Data.Lens (uniplate)
import CoreS.AST
--------------------------------------------------------------------------------
-- Idents & Names:
--------------------------------------------------------------------------------
-- | Constructs a Name from a single Ident.
singName :: Ident -> Name
singName = Name . pure
--------------------------------------------------------------------------------
-- Types:
--------------------------------------------------------------------------------
-- | Yields True if the type is primitive numeric.
isTNum :: Type -> Bool
isTNum t = fromMaybe False $ isn't _BoolT <$> (t ^? tPrim)
-- | Yields True if the type is primitive integral.
isTInt :: Type -> Bool
isTInt = (`elem` [byT, chT, shT, inT, loT])
-- Fold a type into something else recursively until it reaches a base type.
-- Tail recursive fold.
typeFold :: (b -> Type -> b) -> b -> Type -> b
typeFold f z = \case
ArrayT t -> typeFold f (f z t) t
t -> z
-- | Dimensionality of a type, an array adds +1 dimensionality.
typeDimens :: Type -> Integer
typeDimens = typeFold (const . (+1)) 0
-- | Base type of type - given a base type, this is id.
typeBase :: Type -> Type
typeBase t = typeFold (flip const) t t
--------------------------------------------------------------------------------
-- Literals:
--------------------------------------------------------------------------------
-- | Yields True if the given expression is a True literal.
litTrue :: Expr -> Bool
litTrue = litBoolEq True
-- | Yields True if the given expression is a False literal.
litFalse :: Expr -> Bool
litFalse = litBoolEq False
-- | Yields True if the given expression is == the given bool literal.
litBoolEq :: Bool -> Expr -> Bool
litBoolEq eq = \case
ELit (Boolean b) | b == eq -> True
_ -> False
--------------------------------------------------------------------------------
-- lvalues:
--------------------------------------------------------------------------------
-- | Constructs an LValue from a single Ident.
-- This needn't be a local variable, but could instead be a static field
-- of some static import, or in the future a static field of the same class.
singVar :: Ident -> LValue
singVar = LVName . singName
--------------------------------------------------------------------------------
-- Expressions, Statements, Blocks:
--------------------------------------------------------------------------------
-- | Determines if the given Expr is allowed in an SExpr
-- according to JLS § 14.8. Expression Statements
allowedInSExpr :: Expr -> Bool
allowedInSExpr = \case
EAssign {} -> True
EOAssign {} -> True
EStep {} -> True
EMApp {} -> True
EInstNew {} -> True
ESysOut {} -> True
_ -> False
-- | Convert a ternary expression, expanded, into a list of statements.
-- This is a special case since only the side-effects of one branch will
-- occur depending on the conditional expression.
ternaryIntoStmts :: Expr -> Expr -> Expr -> [Stmt]
ternaryIntoStmts c ei ee =
let sis = exprIntoStmts ei
ses = exprIntoStmts ee
in if null sis && null ses
then exprIntoStmts c
else pure $ SIfElse c (mkSBlock sis) (mkSBlock ses)
-- | Convert a logical expression, expanded, into a list of statements.
-- This is a special case due to the short-circuiting natureo of && and ||.
logIntoStmts :: Expr -> Expr -> LogOp -> [Stmt]
logIntoStmts l r o =
let srs = exprIntoStmts r
fop = case o of LAnd -> id ; LOr -> ENot ;
in if null srs then exprIntoStmts l else pure $ SIf (fop l) (mkSBlock srs)
-- | Split an expression into parts allowed as statements.
-- Such statements may or may not have side-effects.
exprIntoStmts :: Expr -> [Stmt]
exprIntoStmts = \case
-- Already allowed as a Stmt, so no-op:
e | allowedInSExpr e -> [SExpr e]
-- Ternaries require special treatment as they correspond to if else:
ECond c ei ee -> ternaryIntoStmts c ei ee
-- Logical operators && and || are short circuiting:
ELog o l r -> logIntoStmts l r o
-- Otherwise, collect all subexprs left-to-right, convert those & merge:
e -> e ^.. uniplate >>= exprIntoStmts
-- | Split an expression into parts that are allowed as expression statements
-- as specified by allowedInSExpr.
exprIntoExprStmts :: Expr -> [Expr]
exprIntoExprStmts = exprIntoStmts >=> (^.. _SExpr)
-- | Construct a statement block out of a list of statements.
mkSBlock :: [Stmt] -> Stmt
mkSBlock = SBlock . Block | DATX02-17-26/DATX02-17-26 | libsrc/CoreS/ASTUtils.hs | gpl-2.0 | 5,884 | 0 | 11 | 1,138 | 917 | 515 | 402 | 78 | 7 |
-- | The all-important theming engine!
--
-- Cf
-- https://hackage.haskell.org/package/vty/docs/Graphics-Vty-Attributes.html
-- http://hackage.haskell.org/package/brick/docs/Brick-AttrMap.html
-- http://hackage.haskell.org/package/brick-0.1/docs/Brick-Util.html
-- http://hackage.haskell.org/package/brick-0.1/docs/Brick-Widgets-Core.html#g:5
-- http://hackage.haskell.org/package/brick-0.1/docs/Brick-Widgets-Border.html
{-# LANGUAGE OverloadedStrings #-}
module Hledger.UI.Theme (
defaultTheme
,getTheme
,themes
,themeNames
)
where
import qualified Data.Map as M
import Data.Maybe
import Graphics.Vty
import Brick
defaultTheme :: AttrMap
defaultTheme = fromMaybe (snd $ head themesList) $ getTheme "white"
-- the theme named here should exist;
-- otherwise it will take the first one from the list,
-- which must be non-empty.
-- | Look up the named theme, if it exists.
getTheme :: String -> Maybe AttrMap
getTheme name = M.lookup name themes
-- | A selection of named themes specifying terminal colours and styles.
-- One of these is active at a time.
--
-- A hledger-ui theme is a vty/brick AttrMap. Each theme specifies a
-- default style (Attr), plus extra styles which are applied when
-- their (hierarchical) name matches the widget rendering context.
-- "More specific styles, if present, are used and only fall back to
-- more general ones when the more specific ones are absent, but also
-- these styles get merged, so that if a more specific style only
-- provides the foreground color, its more general parent style can
-- set the background color, too."
-- For example: rendering a widget named "b" inside a widget named "a",
-- - if a style named "a" <> "b" exists, it will be used. Anything it
-- does not specify will be taken from a style named "a" if that
-- exists, otherwise from the default style.
-- - otherwise if a style named "a" exists, it will be used, and
-- anything it does not specify will be taken from the default style.
-- - otherwise (you guessed it) the default style is used.
--
themes :: M.Map String AttrMap
themes = M.fromList themesList
themeNames :: [String]
themeNames = map fst themesList
(&) = withStyle
active = fg brightWhite & bold
selectbg = yellow
select = black `on` selectbg
themesList :: [(String, AttrMap)]
themesList = [
("default", attrMap (black `on` white) [
("border" , white `on` black & dim)
,("border" <> "bold" , currentAttr & bold)
,("border" <> "depth" , active)
,("border" <> "filename" , currentAttr)
,("border" <> "key" , active)
,("border" <> "minibuffer" , white `on` black & bold)
,("border" <> "query" , active)
,("border" <> "selected" , active)
,("error" , fg red)
,("help" , white `on` black & dim)
,("help" <> "heading" , fg yellow)
,("help" <> "key" , active)
-- ,("list" , black `on` white)
-- ,("list" <> "amount" , currentAttr)
,("list" <> "amount" <> "decrease" , fg red)
-- ,("list" <> "amount" <> "increase" , fg green)
,("list" <> "amount" <> "decrease" <> "selected" , red `on` selectbg & bold)
-- ,("list" <> "amount" <> "increase" <> "selected" , green `on` selectbg & bold)
,("list" <> "balance" , currentAttr & bold)
,("list" <> "balance" <> "negative" , fg red)
,("list" <> "balance" <> "positive" , fg black)
,("list" <> "balance" <> "negative" <> "selected" , red `on` selectbg & bold)
,("list" <> "balance" <> "positive" <> "selected" , select & bold)
,("list" <> "selected" , select)
-- ,("list" <> "accounts" , white `on` brightGreen)
-- ,("list" <> "selected" , black `on` brightYellow)
])
,("greenterm", attrMap (green `on` black) [
("list" <> "selected" , black `on` green)
])
,("terminal", attrMap defAttr [
("border" , white `on` black),
("list" , defAttr),
("list" <> "selected" , defAttr & reverseVideo)
])
]
-- halfbrightattr = defAttr & dim
-- reverseattr = defAttr & reverseVideo
-- redattr = defAttr `withForeColor` red
-- greenattr = defAttr `withForeColor` green
-- reverseredattr = defAttr & reverseVideo `withForeColor` red
-- reversegreenattr= defAttr & reverseVideo `withForeColor` green
| adept/hledger | hledger-ui/Hledger/UI/Theme.hs | gpl-3.0 | 4,912 | 0 | 12 | 1,526 | 718 | 441 | 277 | 51 | 1 |
#! /usr/bin/env nix-shell
#! nix-shell ./lineage_hive_generator.hs.nix -i runghc
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
import Data.Functor ((<&>))
import Control.Monad (when)
import Control.Arrow ((>>>))
import Data.Proxy (Proxy(..))
import Data.Either (isLeft, fromLeft, fromRight)
import Text.Printf (formatString)
import System.IO (hPrint, stderr)
import Data.String.Conversions (cs)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.HashMap.Strict as HM
import qualified Data.Aeson as J
import Data.Conduit (ConduitT, runConduitRes, runConduit, bracketP, (.|))
import qualified Data.Conduit.Combinators as C
import qualified Database.Sql.Hive.Parser as HIVE
import qualified Database.Sql.Hive.Type as HIVE
import Database.Sql.Type (
Catalog(..), DatabaseName(..), FullyQualifiedTableName(..), FQTN(..)
, makeDefaultingCatalog, mkNormalSchema
)
import Database.Sql.Util.Scope (runResolverWarn)
import Database.Sql.Util.Lineage.Table (getTableLineage)
import Data.Aeson.QQ (aesonQQ)
import Data.Time.Clock.POSIX (getPOSIXTime)
instance J.ToJSON FullyQualifiedTableName
instance J.ToJSONKey FullyQualifiedTableName
nowts :: IO Int
nowts = floor . (* 1000) <$> getPOSIXTime
catalog :: Catalog
catalog = makeDefaultingCatalog HM.empty
[mkNormalSchema "public" ()]
(DatabaseName () "defaultDatabase")
tableName :: FullyQualifiedTableName -> T.Text
tableName (FullyQualifiedTableName database schema name) = T.intercalate "." [database, schema, name]
mkMCE :: Int -> (FQTN, S.Set FQTN) -> J.Value
mkMCE ts (output, inputs) = [aesonQQ|
{ "proposedSnapshot": {
"com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": {
"urn": #{uriName output}
, "aspects": [
{ "com.linkedin.pegasus2avro.dataset.UpstreamLineage": {
"upstreams": #{upstreams ts inputs}
}
}
]
}
}
}
|]
where
upstream :: Int -> T.Text -> J.Value
upstream ts dataset = [aesonQQ|
{ "auditStamp": {"time":#{ts}, "actor":"urn:li:corpuser:jdoe"}
, "dataset": #{dataset}
, "type":"TRANSFORMED"
}
|]
upstreams ts = map (upstream ts . uriName) . S.toList
uriName :: FQTN -> T.Text
uriName fqtn = "urn:li:dataset:(urn:li:dataPlatform:hive,"
<> tableName fqtn
<> ",PROD)"
main = do
contents <- T.getContents <&> T.lines
ts <- nowts
runConduit $ C.yieldMany contents
.| C.iterM (hPrint stderr)
.| C.mapM (cs >>> T.readFile)
.| C.concatMap parseSQL
.| C.mapM resolveStatement
.| C.concatMap (getTableLineage >>> M.toList)
.| C.map (mkMCE ts)
.| C.mapM_ (J.encode >>> cs >>> putStrLn)
where
parseSQL sql = do
let stOrErr = HIVE.parseManyAll sql
when (isLeft stOrErr) $
error $ show (fromLeft undefined stOrErr)
fromRight undefined stOrErr
resolveStatement st = do
let resolvedStOrErr = runResolverWarn (HIVE.resolveHiveStatement st) HIVE.dialectProxy catalog
when (isLeft . fst $ resolvedStOrErr) $
error $ show (fromLeft undefined (fst resolvedStOrErr))
let (Right queryResolved, resolutions) = resolvedStOrErr
return queryResolved
| mars-lan/WhereHows | contrib/metadata-ingestion/haskell/bin/lineage_hive_generator.hs | apache-2.0 | 3,489 | 0 | 16 | 793 | 871 | 491 | 380 | 69 | 1 |
module FP_ParserGen where
{- ===========================================================================
Contains Parser Generator
=========================================================================== -}
import Types
import Data.List
-- ==========================================================================================================
-- endSkip for dealing withend of input
endSkip nt = case nt of
Opt _ -> True
Rep0 _ -> True
Alt nts mts -> all endSkip nts || all endSkip mts
Rep1 nts -> all endSkip nts
_ -> False
-- ==========================================================================================================
-- Parser Generator
parserGen :: Grammar -> [Alphabet] -> ParseState -> [(ParseTree,[Token])]
parserGen gr [] (nt0,ts,tokens) = [(PNode nt0 ts, tokens)]
parserGen gr (nt:rule) (nt0,ts,[]) | endSkip nt = parserGen gr rule (nt0,ts,[])
| otherwise = [(PError (PNode nt0 ts) (nt:rule) nt "end of input" 0, [])]
parserGen gr (nt:rule) (nt0,ts,(cat,str,k):tokens) = case nt of
-- ============================================================================================================
-- Backus-Naur constructions
Alt nts mts -> parserGen gr (nts++rule) (nt0,ts,(cat,str,k):tokens)
++ parserGen gr (mts++rule) (nt0,ts,(cat,str,k):tokens)
Opt nts -> parserGen gr (nts++rule) (nt0,ts,(cat,str,k):tokens)
++ parserGen gr rule (nt0,ts,(cat,str,k):tokens)
Rep0 nts -> parserGen gr (nts ++ (Rep0 nts : rule)) (nt0,ts,(cat,str,k):tokens)
++ parserGen gr rule (nt0,ts,(cat,str,k):tokens)
Rep1 nts -> parserGen gr (nts ++ (Rep0 nts : rule)) (nt0,ts,(cat,str,k):tokens)
-- ============================================================================================================
-- Terminals
Terminal str' | str==str' -> parserGen gr rule (nt0, ts++[PLeaf (cat,str,k)], tokens)
| otherwise -> [(PError (PNode nt0 ts) (nt:rule) nt str k, [])]
Symbol str' | str==str' -> parserGen gr rule (nt0,ts,tokens)
| otherwise -> [(PError (PNode nt0 ts) (nt:rule) nt str k, [])]
SyntCat cat' | cat==cat' -> parserGen gr rule (nt0, ts++[PLeaf (cat,str,k)], tokens)
| otherwise -> [(PError (PNode nt0 ts) (nt:rule) nt str k, [])]
-- ============================================================================================================
-- Non-terminals
_ -> concat [ nextParses | r <- gr nt
, let parses = parserGen gr r (nt,[],(cat,str,k):tokens)
, let correctParses = filter (not.isPError.fst) parses
, let nextParses | null correctParses = [ (finalPError (nt0,ts) $ maximum $ map fst parses , []) ]
| otherwise = concat $ map (continueParsing gr rule (nt0,ts)) correctParses
]
-- ==================================================
-- Additional functions
isPError (PError _ _ _ _ _) = True
isPError _ = False
continueParsing gr rule (nt0,ts) (t,tokens) = parserGen gr rule (nt0,ts++[t],tokens)
finalPError (nt0,ts) (PError t rule nt str k) = PError (PNode nt0 (ts++[t])) rule nt str k
-- ==================================================
parse :: Grammar -> Alphabet -> [Token] -> ParseTree -- [(ParseTree, [Token])]
parse gr s tokens | null correctParses = maximum $ map fst parses
| not $ null rest = error "tokenList not fully parsed"
| otherwise = final
where
parses = [ (t,rem) | r <- gr s
, (t,rem) <- parserGen gr r (s,[],tokens)
]
correctParses = filter (not.isPError.fst) parses
(final,rest) = head correctParses
| wouwouwou/2017_module_8 | src/haskell/PP-project-2017/FP_ParserGen.hs | apache-2.0 | 4,311 | 0 | 20 | 1,380 | 1,387 | 742 | 645 | 44 | 8 |
{- |
This module provides normalized multi-dimensional versions of the transforms in @fftw@.
The forwards transforms in this module are identical to those in "Numeric.FFT.Vector.Unnormalized".
The backwards transforms are normalized to be their inverse operations (approximately, due to floating point precision).
For more information on the underlying transforms, see
<http://www.fftw.org/fftw3_doc/What-FFTW-Really-Computes.html>.
@since 0.2
-}
module Numeric.FFT.Vector.Invertible.Multi
(
-- * Creating and executing 'Plan's
run,
plan,
execute,
-- * Complex-to-complex transforms
U.dft,
idft,
-- * Real-to-complex transforms
U.dftR2C,
dftC2R,
) where
import Numeric.FFT.Vector.Base
import qualified Numeric.FFT.Vector.Unnormalized.Multi as U
import Data.Complex
import qualified Data.Vector.Storable as VS
-- | A backward discrete Fourier transform which is the inverse of 'U.dft'. The output and input sizes are the same (@n@).
idft :: TransformND (Complex Double) (Complex Double)
idft = U.idft {normalizationND = \ns -> constMultOutput $ 1 / toEnum (VS.product ns)}
-- | A normalized backward discrete Fourier transform which is the left inverse of
-- 'U.dftR2C'. (Specifically, @run dftC2R . run dftR2C == id@.)
--
-- This 'Transform' behaves differently than the others:
--
-- - Calling @planND dftC2R dims@ where @dims = [n0, ..., nk]@ creates a 'Plan' whose /output/ size is @dims@, and whose
-- /input/ size is @[n0, ..., nk \`div\` 2 + 1]@.
--
-- - If @length v == n0 * ... * nk@, then @length (run dftC2R v) == n0 * ... * 2*(nk-1)@.
--
dftC2R :: TransformND (Complex Double) Double
dftC2R = U.dftC2R {normalizationND = \ns -> constMultOutput $ 1 / toEnum (VS.product ns)}
| judah/vector-fftw | Numeric/FFT/Vector/Invertible/Multi.hs | bsd-3-clause | 1,784 | 0 | 12 | 346 | 200 | 125 | 75 | 17 | 1 |
module Distribution.Solver.Modular.IndexConversion
( convPIs
) where
import Data.List as L
import Data.Map as M
import Data.Maybe
import Data.Monoid as Mon
import Data.Set as S
import Prelude hiding (pi)
import Distribution.Compiler
import Distribution.InstalledPackageInfo as IPI
import Distribution.Package -- from Cabal
import Distribution.PackageDescription as PD -- from Cabal
import Distribution.PackageDescription.Configuration as PDC
import qualified Distribution.Simple.PackageIndex as SI
import Distribution.System
import Distribution.Solver.Types.ComponentDeps (Component(..))
import Distribution.Solver.Types.OptionalStanza
import qualified Distribution.Solver.Types.PackageIndex as CI
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.SourcePackage
import Distribution.Solver.Modular.Dependency as D
import Distribution.Solver.Modular.Flag as F
import Distribution.Solver.Modular.Index
import Distribution.Solver.Modular.Package
import Distribution.Solver.Modular.Tree
import Distribution.Solver.Modular.Version
-- | Convert both the installed package index and the source package
-- index into one uniform solver index.
--
-- We use 'allPackagesBySourcePackageId' for the installed package index
-- because that returns us several instances of the same package and version
-- in order of preference. This allows us in principle to \"shadow\"
-- packages if there are several installed packages of the same version.
-- There are currently some shortcomings in both GHC and Cabal in
-- resolving these situations. However, the right thing to do is to
-- fix the problem there, so for now, shadowing is only activated if
-- explicitly requested.
convPIs :: OS -> Arch -> CompilerInfo -> ShadowPkgs -> StrongFlags ->
SI.InstalledPackageIndex -> CI.PackageIndex (SourcePackage loc) -> Index
convPIs os arch comp sip strfl iidx sidx =
mkIndex (convIPI' sip iidx ++ convSPI' os arch comp strfl sidx)
-- | Convert a Cabal installed package index to the simpler,
-- more uniform index format of the solver.
convIPI' :: ShadowPkgs -> SI.InstalledPackageIndex -> [(PN, I, PInfo)]
convIPI' (ShadowPkgs sip) idx =
-- apply shadowing whenever there are multiple installed packages with
-- the same version
[ maybeShadow (convIP idx pkg)
| (_pkgid, pkgs) <- SI.allPackagesBySourcePackageId idx
, (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ]
where
-- shadowing is recorded in the package info
shadow (pn, i, PInfo fdeps fds _) | sip = (pn, i, PInfo fdeps fds (Just Shadowed))
shadow x = x
-- | Convert a single installed package into the solver-specific format.
convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo)
convIP idx ipi =
case mapM (convIPId pn idx) (IPI.depends ipi) of
Nothing -> (pn, i, PInfo [] M.empty (Just Broken))
Just fds -> (pn, i, PInfo (setComp fds) M.empty Nothing)
where
-- We assume that all dependencies of installed packages are _library_ deps
ipid = IPI.installedUnitId ipi
i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid)
pn = pkgName (sourcePackageId ipi)
setComp = setCompFlaggedDeps (ComponentLib (unPackageName pn))
-- TODO: Installed packages should also store their encapsulations!
-- | Convert dependencies specified by an installed package id into
-- flagged dependencies of the solver.
--
-- May return Nothing if the package can't be found in the index. That
-- indicates that the original package having this dependency is broken
-- and should be ignored.
convIPId :: PN -> SI.InstalledPackageIndex -> UnitId -> Maybe (FlaggedDep () PN)
convIPId pn' idx ipid =
case SI.lookupUnitId idx ipid of
Nothing -> Nothing
Just ipi -> let i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid)
pn = pkgName (sourcePackageId ipi)
in Just (D.Simple (Dep pn (Fixed i (P pn'))) ())
-- | Convert a cabal-install source package index to the simpler,
-- more uniform index format of the solver.
convSPI' :: OS -> Arch -> CompilerInfo -> StrongFlags ->
CI.PackageIndex (SourcePackage loc) -> [(PN, I, PInfo)]
convSPI' os arch cinfo strfl = L.map (convSP os arch cinfo strfl) . CI.allPackages
-- | Convert a single source package into the solver-specific format.
convSP :: OS -> Arch -> CompilerInfo -> StrongFlags -> SourcePackage loc -> (PN, I, PInfo)
convSP os arch cinfo strfl (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) =
let i = I pv InRepo
in (pn, i, convGPD os arch cinfo strfl (PI pn i) gpd)
-- We do not use 'flattenPackageDescription' or 'finalizePackageDescription'
-- from 'Distribution.PackageDescription.Configuration' here, because we
-- want to keep the condition tree, but simplify much of the test.
-- | Convert a generic package description to a solver-specific 'PInfo'.
convGPD :: OS -> Arch -> CompilerInfo -> StrongFlags ->
PI PN -> GenericPackageDescription -> PInfo
convGPD os arch cinfo strfl pi@(PI pn _)
(GenericPackageDescription pkg flags libs exes tests benchs) =
let
fds = flagInfo strfl flags
-- | We have to be careful to filter out dependencies on
-- internal libraries, since they don't refer to real packages
-- and thus cannot actually be solved over. We'll do this
-- by creating a set of package names which are "internal"
-- and dropping them as we convert.
ipns = S.fromList [ PackageName nm
| (nm, _) <- libs
-- Don't include the true package name;
-- qualification could make this relevant.
-- TODO: Can we qualify over internal
-- dependencies? Not for now!
, PackageName nm /= pn ]
conv :: Mon.Monoid a => Component -> (a -> BuildInfo) ->
CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN
conv comp getInfo = convCondTree os arch cinfo pi fds comp getInfo ipns .
PDC.addBuildableCondition getInfo
flagged_deps
= concatMap (\(nm, ds) -> conv (ComponentLib nm) libBuildInfo ds) libs
++ concatMap (\(nm, ds) -> conv (ComponentExe nm) buildInfo ds) exes
++ prefix (Stanza (SN pi TestStanzas))
(L.map (\(nm, ds) -> conv (ComponentTest nm) testBuildInfo ds) tests)
++ prefix (Stanza (SN pi BenchStanzas))
(L.map (\(nm, ds) -> conv (ComponentBench nm) benchmarkBuildInfo ds) benchs)
++ maybe [] (convSetupBuildInfo pi) (setupBuildInfo pkg)
in
PInfo flagged_deps fds Nothing
-- With convenience libraries, we have to do some work. Imagine you
-- have the following Cabal file:
--
-- name: foo
-- library foo-internal
-- build-depends: external-a
-- library
-- build-depends: foo-internal, external-b
-- library foo-helper
-- build-depends: foo, external-c
-- test-suite foo-tests
-- build-depends: foo-helper, external-d
--
-- What should the final flagged dependency tree be? Ideally, it
-- should look like this:
--
-- [ Simple (Dep external-a) (Library foo-internal)
-- , Simple (Dep external-b) (Library foo)
-- , Stanza (SN foo TestStanzas) $
-- [ Simple (Dep external-c) (Library foo-helper)
-- , Simple (Dep external-d) (TestSuite foo-tests) ]
-- ]
--
-- There are two things to note:
--
-- 1. First, we eliminated the "local" dependencies foo-internal
-- and foo-helper. This are implicitly assumed to refer to "foo"
-- so we don't need to have them around. If you forget this,
-- Cabal will then try to pick a version for "foo-helper" but
-- no such package exists (this is the cost of overloading
-- build-depends to refer to both packages and components.)
--
-- 2. Second, it is more precise to have external-c be qualified
-- by a test stanza, since foo-helper only needs to be built if
-- your are building the test suite (and not the main library).
-- If you omit it, Cabal will always attempt to depsolve for
-- foo-helper even if you aren't building the test suite.
-- | Create a flagged dependency tree from a list @fds@ of flagged
-- dependencies, using @f@ to form the tree node (@f@ will be
-- something like @Stanza sn@).
prefix :: (FlaggedDeps comp qpn -> FlaggedDep comp' qpn)
-> [FlaggedDeps comp qpn] -> FlaggedDeps comp' qpn
prefix _ [] = []
prefix f fds = [f (concat fds)]
-- | Convert flag information. Automatic flags are now considered weak
-- unless strong flags have been selected explicitly.
flagInfo :: StrongFlags -> [PD.Flag] -> FlagInfo
flagInfo (StrongFlags strfl) =
M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b m (weak m)))
where
weak m = WeakOrTrivial $ not (strfl || m)
-- | Internal package names, which should not be interpreted as true
-- dependencies.
type IPNs = Set PN
-- | Convenience function to delete a 'FlaggedDep' if it's
-- for a 'PN' that isn't actually real.
filterIPNs :: IPNs -> Dependency -> FlaggedDep Component PN -> FlaggedDeps Component PN
filterIPNs ipns (Dependency pn _) fd
| S.notMember pn ipns = [fd]
| otherwise = []
-- | Convert condition trees to flagged dependencies. Mutually
-- recursive with 'convBranch'. See 'convBranch' for an explanation
-- of all arguments preceeding the input 'CondTree'.
convCondTree :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo ->
Component ->
(a -> BuildInfo) ->
IPNs ->
CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN
convCondTree os arch cinfo pi@(PI pn _) fds comp getInfo ipns (CondNode info ds branches) =
concatMap
(\d -> filterIPNs ipns d (D.Simple (convDep pn d) comp))
ds -- unconditional package dependencies
++ L.map (\e -> D.Simple (Ext e) comp) (PD.allExtensions bi) -- unconditional extension dependencies
++ L.map (\l -> D.Simple (Lang l) comp) (PD.allLanguages bi) -- unconditional language dependencies
++ L.map (\(Dependency pkn vr) -> D.Simple (Pkg pkn vr) comp) (PD.pkgconfigDepends bi) -- unconditional pkg-config dependencies
++ concatMap (convBranch os arch cinfo pi fds comp getInfo ipns) branches
where
bi = getInfo info
-- | Branch interpreter. Mutually recursive with 'convCondTree'.
--
-- Here, we try to simplify one of Cabal's condition tree branches into the
-- solver's flagged dependency format, which is weaker. Condition trees can
-- contain complex logical expression composed from flag choices and special
-- flags (such as architecture, or compiler flavour). We try to evaluate the
-- special flags and subsequently simplify to a tree that only depends on
-- simple flag choices.
--
-- This function takes a number of arguments:
--
-- 1. Some pre dependency-solving known information ('OS', 'Arch',
-- 'CompilerInfo') for @os()@, @arch()@ and @impl()@ variables,
--
-- 2. The package instance @'PI' 'PN'@ which this condition tree
-- came from, so that we can correctly associate @flag()@
-- variables with the correct package name qualifier,
--
-- 3. The flag defaults 'FlagInfo' so that we can populate
-- 'Flagged' dependencies with 'FInfo',
--
-- 4. The name of the component 'Component' so we can record where
-- the fine-grained information about where the component came
-- from (see 'convCondTree'), and
--
-- 5. A selector to extract the 'BuildInfo' from the leaves of
-- the 'CondTree' (which actually contains the needed
-- dependency information.)
--
-- 6. The set of package names which should be considered internal
-- dependencies, and thus not handled as dependencies.
convBranch :: OS -> Arch -> CompilerInfo ->
PI PN -> FlagInfo ->
Component ->
(a -> BuildInfo) ->
IPNs ->
(Condition ConfVar,
CondTree ConfVar [Dependency] a,
Maybe (CondTree ConfVar [Dependency] a)) -> FlaggedDeps Component PN
convBranch os arch cinfo pi@(PI pn _) fds comp getInfo ipns (c', t', mf') =
go c' ( convCondTree os arch cinfo pi fds comp getInfo ipns t')
(maybe [] (convCondTree os arch cinfo pi fds comp getInfo ipns) mf')
where
go :: Condition ConfVar ->
FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN
go (Lit True) t _ = t
go (Lit False) _ f = f
go (CNot c) t f = go c f t
go (CAnd c d) t f = go c (go d t f) f
go (COr c d) t f = go c t (go d t f)
go (Var (Flag fn)) t f = extractCommon t f ++ [Flagged (FN pi fn) (fds ! fn) t f]
go (Var (OS os')) t f
| os == os' = t
| otherwise = f
go (Var (Arch arch')) t f
| arch == arch' = t
| otherwise = f
go (Var (Impl cf cvr)) t f
| matchImpl (compilerInfoId cinfo) ||
-- fixme: Nothing should be treated as unknown, rather than empty
-- list. This code should eventually be changed to either
-- support partial resolution of compiler flags or to
-- complain about incompletely configured compilers.
any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t
| otherwise = f
where
matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv
-- If both branches contain the same package as a simple dep, we lift it to
-- the next higher-level, but without constraints. This heuristic together
-- with deferring flag choices will then usually first resolve this package,
-- and try an already installed version before imposing a default flag choice
-- that might not be what we want.
--
-- Note that we make assumptions here on the form of the dependencies that
-- can occur at this point. In particular, no occurrences of Fixed, and no
-- occurrences of multiple version ranges, as all dependencies below this
-- point have been generated using 'convDep'.
extractCommon :: FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN
extractCommon ps ps' = [ D.Simple (Dep pn1 (Constrained [(vr1 .||. vr2, P pn)])) comp
| D.Simple (Dep pn1 (Constrained [(vr1, _)])) _ <- ps
, D.Simple (Dep pn2 (Constrained [(vr2, _)])) _ <- ps'
, pn1 == pn2
]
-- | Convert a Cabal dependency to a solver-specific dependency.
convDep :: PN -> Dependency -> Dep PN
convDep pn' (Dependency pn vr) = Dep pn (Constrained [(vr, P pn')])
-- | Convert setup dependencies
convSetupBuildInfo :: PI PN -> SetupBuildInfo -> FlaggedDeps Component PN
convSetupBuildInfo (PI pn _i) nfo =
L.map (\d -> D.Simple (convDep pn d) ComponentSetup) (PD.setupDepends nfo)
| thomie/cabal | cabal-install/Distribution/Solver/Modular/IndexConversion.hs | bsd-3-clause | 15,127 | 0 | 19 | 3,782 | 3,100 | 1,667 | 1,433 | 149 | 9 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Numeric.Log
( Log(..)
) where
import Data.Function (on)
import Numeric.Algebra
import Prelude hiding ((*),(^),(/),recip,negate,subtract)
newtype Log r = Log { runLog :: r }
instance Multiplicative r => Additive (Log r) where
Log a + Log b = Log (a * b)
sumWith1 f = Log . productWith1 (runLog . f)
sinnum1p n (Log m) = Log (pow1p m n)
instance Unital r => LeftModule Natural (Log r) where
n .* Log m = Log (pow m n)
instance Unital r => RightModule Natural (Log r) where
Log m *. n = Log (pow m n)
instance Unital r => Monoidal (Log r) where
zero = Log one
sinnum n (Log m) = Log (pow m n)
sumWith f = Log . productWith (runLog . f)
instance Division r => LeftModule Integer (Log r) where
n .* Log m = Log (m ^ n)
instance Division r => RightModule Integer (Log r) where
Log m *. n = Log (m ^ n)
instance Division r => Group (Log r) where
Log a - Log b = Log (a / b)
negate (Log a) = Log (recip a)
subtract (Log a) (Log b) = Log (a \\ b)
times n (Log m) = Log (m ^ n)
instance Commutative r => Abelian (Log r)
instance Band r => Idempotent (Log r)
instance Factorable r => Partitionable (Log r) where
partitionWith f = factorWith (f `on` Log) . runLog
| athanclark/algebra | src/Numeric/Log.hs | bsd-3-clause | 1,247 | 2 | 9 | 291 | 654 | 327 | 327 | 32 | 0 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.ReadCopyPixels
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 4.3 (Drawing, Reading, and Copying Pixels)
-- of the OpenGL 2.1 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.ReadCopyPixels (
-- * Reading Pixels
readPixels, readBuffer,
-- * Copying Pixels
PixelCopyType(..), copyPixels,
-- * Copying Pixels for framebuffers
BlitBuffer(..), blitFramebuffer
) where
import Graphics.Rendering.OpenGL.GL.BufferMode
import Graphics.Rendering.OpenGL.GL.CoordTrans
import Graphics.Rendering.OpenGL.GL.PixelData
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.GL.Texturing.Filter
import Graphics.Rendering.OpenGL.GLU.ErrorsInternal
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
readPixels :: Position -> Size -> PixelData a -> IO ()
readPixels (Position x y) (Size w h) pd =
withPixelData pd $ glReadPixels x y w h
--------------------------------------------------------------------------------
readBuffer :: StateVar BufferMode
readBuffer =
makeStateVar
(getEnum1 unmarshalBufferMode GetReadBuffer)
(maybe recordInvalidValue glReadBuffer . marshalBufferMode)
--------------------------------------------------------------------------------
data PixelCopyType =
CopyColor
| CopyDepth
| CopyStencil
deriving ( Eq, Ord, Show )
marshalPixelCopyType :: PixelCopyType -> GLenum
marshalPixelCopyType x = case x of
CopyColor -> gl_COLOR
CopyDepth -> gl_DEPTH
CopyStencil -> gl_STENCIL
--------------------------------------------------------------------------------
copyPixels :: Position -> Size -> PixelCopyType -> IO ()
copyPixels (Position x y) (Size w h) t =
glCopyPixels x y w h (marshalPixelCopyType t)
--------------------------------------------------------------------------------
-- | The buffers which can be copied with 'blitFramebuffer'.
data BlitBuffer =
ColorBuffer'
| StencilBuffer'
| DepthBuffer'
deriving ( Eq, Ord, Show )
marshalBlitBuffer :: BlitBuffer -> GLbitfield
marshalBlitBuffer x = case x of
ColorBuffer' -> gl_COLOR_BUFFER_BIT
StencilBuffer' -> gl_STENCIL_BUFFER_BIT
DepthBuffer' -> gl_DEPTH_BUFFER_BIT
--------------------------------------------------------------------------------
blitFramebuffer :: Position
-> Position
-> Position
-> Position
-> [BlitBuffer]
-> TextureFilter
-> IO ()
blitFramebuffer (Position sx0 sy0)
(Position sx1 sy1)
(Position dx0 dy0)
(Position dx1 dy1)
buffers
filt =
glBlitFramebuffer sx0 sy0 sx1 sy1 dx0 dy0 dx1 dy1
(sum (map marshalBlitBuffer buffers))
(fromIntegral (marshalMagnificationFilter filt))
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/ReadCopyPixels.hs | bsd-3-clause | 3,300 | 0 | 12 | 641 | 567 | 322 | 245 | 59 | 3 |
module Main where
test1 :: Int
test2 :: a -> a -> Complex a
test2 = (:+)
test25 :: NFData a => a
test25 = undefined
test3 :: (b -> b -> c) -> (a -> b) -> a -> a -> c
test3 = on
test4 = putStrLn "Bar"
test5 :: [t] -> ()
test5 (_:_) = ()
-- hlint
test6 :: [Integer] -> [Integer]
test6 = map (+ 1) . map (* 2)
| carlohamalainen/ghc-mod | test-elisp/inp.hs | bsd-3-clause | 314 | 0 | 8 | 82 | 171 | 96 | 75 | 13 | 1 |
module Infix2 where
-- define an infix constructor and attempt to remove it...
data T1 a b = a :$: b | b :#: a
f x y = x :$: y | kmate/HaRe | old/testing/removeCon/Infix2.hs | bsd-3-clause | 130 | 0 | 6 | 35 | 39 | 23 | 16 | 3 | 1 |
{-# LANGUAGE CPP #-}
-- This module tests the deprecated properties of Data.Map and Data.IntMap,
-- because these cannot be tested in either map-properties or
-- intmap-properties, as these modules are designed to work with the .Lazy and
-- .Strict modules.
import qualified Data.Map as M
import qualified Data.Map.Strict as SM
import qualified Data.IntMap as IM
import qualified Data.IntMap.Strict as SIM
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Text.Show.Functions ()
default (Int)
main :: IO ()
main = defaultMain
[ testProperty "Data.Map.insertWith' as Strict.insertWith" prop_mapInsertWith'Strict
, testProperty "Data.Map.insertWith' undefined value" prop_mapInsertWith'Undefined
, testProperty "Data.Map.insertWithKey' as Strict.insertWithKey" prop_mapInsertWithKey'Strict
, testProperty "Data.Map.insertWithKey' undefined value" prop_mapInsertWithKey'Undefined
, testProperty "Data.Map.insertLookupWithKey' as Strict.insertLookupWithKey" prop_mapInsertLookupWithKey'Strict
, testProperty "Data.Map.insertLookupWithKey' undefined value" prop_mapInsertLookupWithKey'Undefined
, testProperty "Data.IntMap.insertWith' as Strict.insertWith" prop_intmapInsertWith'Strict
, testProperty "Data.IntMap.insertWith' undefined value" prop_intmapInsertWith'Undefined
, testProperty "Data.IntMap.insertWithKey' as Strict.insertWithKey" prop_intmapInsertWithKey'Strict
, testProperty "Data.IntMap.insertWithKey' undefined value" prop_intmapInsertWithKey'Undefined
]
---------- Map properties ----------
prop_mapInsertWith'Strict :: [(Int, Int)] -> (Int -> Int -> Int) -> [(Int, Int)] -> Bool
prop_mapInsertWith'Strict xs f kxxs =
let m = M.fromList xs
insertList ins = foldr (\(kx, x) -> ins f kx x) m kxxs
in insertList M.insertWith' == insertList SM.insertWith
prop_mapInsertWith'Undefined :: [(Int, Int)] -> Bool
prop_mapInsertWith'Undefined xs =
let m = M.fromList xs
f _ x = x * 33
insertList ins = foldr (\(kx, _) -> ins f kx undefined) m xs
in insertList M.insertWith' == insertList M.insertWith
prop_mapInsertWithKey'Strict :: [(Int, Int)] -> (Int -> Int -> Int -> Int) -> [(Int, Int)] -> Bool
prop_mapInsertWithKey'Strict xs f kxxs =
let m = M.fromList xs
insertList ins = foldr (\(kx, x) -> ins f kx x) m kxxs
in insertList M.insertWithKey' == insertList SM.insertWithKey
prop_mapInsertWithKey'Undefined :: [(Int, Int)] -> Bool
prop_mapInsertWithKey'Undefined xs =
let m = M.fromList xs
f k _ x = (k + x) * 33
insertList ins = foldr (\(kx, _) -> ins f kx undefined) m xs
in insertList M.insertWithKey' == insertList M.insertWithKey
prop_mapInsertLookupWithKey'Strict :: [(Int, Int)] -> (Int -> Int -> Int -> Int) -> [(Int, Int)] -> Bool
prop_mapInsertLookupWithKey'Strict xs f kxxs =
let m = M.fromList xs
insertLookupList insLkp = scanr (\(kx, x) (_, mp) -> insLkp f kx x mp) (Nothing, m) kxxs
in insertLookupList M.insertLookupWithKey' == insertLookupList SM.insertLookupWithKey
prop_mapInsertLookupWithKey'Undefined :: [(Int, Int)] -> Bool
prop_mapInsertLookupWithKey'Undefined xs =
let m = M.fromList xs
f k _ x = (k + x) * 33
insertLookupList insLkp = scanr (\(kx, _) (_, mp) -> insLkp f kx undefined mp) (Nothing, m) xs
in insertLookupList M.insertLookupWithKey' == insertLookupList M.insertLookupWithKey
---------- IntMap properties ----------
prop_intmapInsertWith'Strict :: [(Int, Int)] -> (Int -> Int -> Int) -> [(Int, Int)] -> Bool
prop_intmapInsertWith'Strict xs f kxxs =
let m = IM.fromList xs
insertList ins = foldr (\(kx, x) -> ins f kx x) m kxxs
in insertList IM.insertWith' == insertList SIM.insertWith
prop_intmapInsertWith'Undefined :: [(Int, Int)] -> Bool
prop_intmapInsertWith'Undefined xs =
let m = IM.fromList xs
f _ x = x * 33
insertList ins = foldr (\(kx, _) -> ins f kx undefined) m xs
in insertList IM.insertWith' == insertList IM.insertWith
prop_intmapInsertWithKey'Strict :: [(Int, Int)] -> (Int -> Int -> Int -> Int) -> [(Int, Int)] -> Bool
prop_intmapInsertWithKey'Strict xs f kxxs =
let m = IM.fromList xs
insertList ins = foldr (\(kx, x) -> ins f kx x) m kxxs
in insertList IM.insertWithKey' == insertList SIM.insertWithKey
prop_intmapInsertWithKey'Undefined :: [(Int, Int)] -> Bool
prop_intmapInsertWithKey'Undefined xs =
let m = IM.fromList xs
f k _ x = (k + x) * 33
insertList ins = foldr (\(kx, _) -> ins f kx undefined) m xs
in insertList IM.insertWithKey' == insertList IM.insertWithKey
| iu-parfunc/containers | tests/deprecated-properties.hs | bsd-3-clause | 4,608 | 0 | 12 | 837 | 1,388 | 736 | 652 | 76 | 1 |
import Data.IORef
import Control.Monad
import Control.Exception
import Control.Concurrent.MVar
import System.Mem
main :: IO ()
main = do
run
run
run
run
m <- newEmptyMVar
quit m
performMajorGC
takeMVar m
run :: IO ()
run = do
ref <- newIORef ()
void $ mkWeakIORef ref $ do
putStr "."
throwIO $ ErrorCall "failed"
quit :: MVar () -> IO ()
quit m = do
ref <- newIORef ()
void $ mkWeakIORef ref $ do
putMVar m ()
| sdiehl/ghc | libraries/base/tests/T13167.hs | bsd-3-clause | 449 | 0 | 11 | 115 | 196 | 89 | 107 | 26 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module : Control.Monad.Fail
-- Copyright : (C) 2015 David Luposchainsky,
-- (C) 2015 Herbert Valerio Riedel
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Transitional module providing the 'MonadFail' class and primitive
-- instances.
--
-- This module can be imported for defining forward compatible
-- 'MonadFail' instances:
--
-- @
-- import qualified Control.Monad.Fail as Fail
--
-- instance Monad Foo where
-- (>>=) = {- ...bind impl... -}
--
-- -- Provide legacy 'fail' implementation for when
-- -- new-style MonadFail desugaring is not enabled.
-- fail = Fail.fail
--
-- instance Fail.MonadFail Foo where
-- fail = {- ...fail implementation... -}
-- @
--
-- See <https://prime.haskell.org/wiki/Libraries/Proposals/MonadFail>
-- for more details.
--
-- @since 4.9.0.0
--
module Control.Monad.Fail ( MonadFail(fail) ) where
import GHC.Base (String, Monad(), Maybe(Nothing), IO())
import {-# SOURCE #-} GHC.IO (failIO)
-- | When a value is bound in @do@-notation, the pattern on the left
-- hand side of @<-@ might not match. In this case, this class
-- provides a function to recover.
--
-- A 'Monad' without a 'MonadFail' instance may only be used in conjunction
-- with pattern that always match, such as newtypes, tuples, data types with
-- only a single data constructor, and irrefutable patterns (@~pat@).
--
-- Instances of 'MonadFail' should satisfy the following law: @fail s@ should
-- be a left zero for '>>=',
--
-- @
-- fail s >>= f = fail s
-- @
--
-- If your 'Monad' is also 'MonadPlus', a popular definition is
--
-- @
-- fail _ = mzero
-- @
--
-- @since 4.9.0.0
class Monad m => MonadFail m where
fail :: String -> m a
instance MonadFail Maybe where
fail _ = Nothing
instance MonadFail [] where
{-# INLINE fail #-}
fail _ = []
instance MonadFail IO where
fail = failIO
| tolysz/prepare-ghcjs | spec-lts8/base/Control/Monad/Fail.hs | bsd-3-clause | 2,037 | 0 | 8 | 406 | 193 | 135 | 58 | 17 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.