code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
-- Solution for day 4 of http://adventofcode.com/
module Main where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Digest.Pure.MD5 (md5)
newtype Secret = Secret { getByteString :: LB.ByteString }
mkHash :: Secret -> Int -> LB.ByteString
mkHash s i = let counter = C.pack . show $ i
in LB.append (getByteString s) counter
mine :: Secret -> [(Int, String)]
mine s = map (\x -> (x, show . md5 $ mkHash s x)) [1..]
hit :: String -> Bool
hit = all (== '0') . take 5
answer :: [(Int, String)] -> (Int, String)
answer = head . filter (\(i, bs) -> hit bs)
main :: IO ()
main = do putStrLn "Secret key?"
key <- (Secret . LB.fromStrict) <$> B.getLine
print $ answer . mine $ key
|
tomwadeson/adventofcode
|
04/app/Main.hs
|
mit
| 854
| 0
| 11
| 194
| 319
| 181
| 138
| 20
| 1
|
{-# LANGUAGE QuasiQuotes #-}
{-|
Module : BreadU.Pages.JS.SocialButtons
Description : SocialButtons JavaScript.
Stability : experimental
Portability : POSIX
Third-party JS for social buttons.
-}
module BreadU.Pages.JS.SocialButtons
( twitterWidget
, facebookSDK
) where
import BreadU.Types ( LangCode(..) )
import Data.Text ( Text )
import Data.String.QQ
-- | Official Twitter widget, it's using for the Tweet button.
twitterWidget :: Text
twitterWidget = [s|
window.twttr = (function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0],
t = window.twttr || {};
if (d.getElementById(id)) return t;
js = d.createElement(s);
js.id = id;
js.src = "https://platform.twitter.com/widgets.js";
fjs.parentNode.insertBefore(js, fjs);
t._e = [];
t.ready = function(f) {
t._e.push(f);
};
return t;
}(document, "script", "twitter-wjs"));
|]
-- | Facebook SDK, with app's id.
facebookSDK :: LangCode -> Text
facebookSDK En = [s|
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/en_US/sdk.js#xfbml=1&version=v2.9&appId=1162939683834190";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
|]
facebookSDK De = [s|
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/de_DE/sdk.js#xfbml=1&version=v2.9&appId=1162939683834190";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
|]
facebookSDK Ru = [s|
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/ru_RU/sdk.js#xfbml=1&version=v2.9&appId=1162939683834190";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
|]
|
denisshevchenko/breadu.info
|
src/lib/BreadU/Pages/JS/SocialButtons.hs
|
mit
| 2,029
| 0
| 6
| 344
| 109
| 73
| 36
| 13
| 1
|
module GHCJS.DOM.SpeechSynthesisVoice (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/SpeechSynthesisVoice.hs
|
mit
| 50
| 0
| 3
| 7
| 10
| 7
| 3
| 1
| 0
|
module Models
( RandomNumber(..)
, readsNumberFormat
, generateForNumberFormat
) where
import Control.Applicative ((<$>))
import System.Random (randomRIO)
data NumberFormat = NFPlain
| NFUSD
readsNumberFormat x
| x == "usd" = NFUSD
| otherwise = NFPlain
data RandomNumber = Plain Integer
| USD Integer Integer
generateForNumberFormat lo hi fmt =
case fmt of
NFPlain -> Plain <$> randomRIO (lo, hi)
NFUSD -> do
dollars <- randomRIO (lo, hi)
cents <- randomRIO (0, 99)
return $ USD dollars cents
|
rjregenold/icanhazrandomnumber
|
src/Models.hs
|
mit
| 587
| 0
| 12
| 169
| 179
| 96
| 83
| 20
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module CreepyWaffle.Types
( -- * Items
Item (..)
, ItemType (..)
-- * Actions
, Act (..)
, ActType (..)
-- * Player State
, Player (..)
, updateHP
, updateMP
, updateLX
, updateLY
-- * Current Cursor
, CursorIdx (..)
-- * Overall State (Player + Cursor)
, State (..)
, updateIdx
, moveTo
, moveToR
-- -- * Small Event, a subset of Vty.Event
-- , SmallEvent
-- , vty2se
-- * Block for Simple Map
, Block (..)
, (<!!>)
) where
--
import Data.Default
--
-- import qualified Graphics.Vty as V
--
data Item = Item
{ itName :: String
, itType :: ItemType
}
deriving (Eq,Ord)
data ItemType = IT_WEAPON
| IT_AROMR
| IT_POSION Act
| IT_BOOK Act
deriving (Eq,Ord)
instance Default Item where
def = Item "unknown posion" $ IT_POSION def
instance Show ItemType where
show IT_WEAPON = "武"
show IT_AROMR = "防"
show (IT_POSION _) = "藥"
show (IT_BOOK _) = "書"
instance Show Item where
show (Item n t) = "["++ show t ++"]:"++n
--
data Act = Act
{ actName :: String
, actType :: ActType
}
deriving (Eq,Ord)
data ActType = AT_CAST {heal :: Int, dmg :: Int}
| AT_MOVE {speed :: Int}
deriving (Eq,Ord)
instance Default Act where
def = Act "idel" $ AT_MOVE 0
instance Show Act where
show (Act n t) = "["++ show t ++"]:"++n
instance Show ActType where
show (AT_CAST h d) = "效<"++show h++"/"++show d++">"
show (AT_MOVE s) = "移<"++show s++">"
--
data Player = Player
{ hp :: Int, mp :: Int
, lX :: Int, lY :: Int
, package :: [Item]
, actions :: [Act]
}
deriving (Eq,Show)
--
updateHP :: Int -> Player -> Player
updateHP i p@(Player h m x y ps as) = p {hp = h+i}
updateMP :: Int -> Player -> Player
updateMP i p@(Player h m x y ps as) = p {mp = m+i}
updateLX :: Int -> Player -> Player
updateLX i p@(Player h m x y ps as) = p {lX = x+i}
updateLY :: Int -> Player -> Player
updateLY i p@(Player h m x y ps as) = p {lY = y+i}
--
data CursorIdx = CursorIdx {selX :: Int, selY :: Int}
deriving (Eq,Show)
instance Default CursorIdx where
def = CursorIdx 0 0
--
data State = St Player CursorIdx
deriving (Eq,Show)
--
updateIdx :: Int -> Int -> State -> State
updateIdx _x _y st@(St p (CursorIdx x y)) =
let newX = max 0 $ min (x+_x) 1
newY = max 0 $ min (y+_y) $ (\n->n-1) $
if newX == 0 then length $ package p else length $ actions p
in St p (CursorIdx newX newY)
updateIdx _ _ st = st
--
moveTo :: Int -> Int -> State -> State
moveTo _x _y (St p c) = St (p {lX = _x, lY = _y}) c
moveToR :: Int -> Int -> State -> State
moveToR dx dy (St p c) = St (updateLX dx $ updateLY dy p) c
--
-- type SmallEvent = Maybe V.Key
-- --
-- vty2se :: V.Event -> SmallEvent
-- vty2se (V.EvKey ke ms) = Just ke
-- vty2se _ = Nothing
--
class Block a where
throughable :: a -> Bool
encounterable :: a -> Bool
pickable :: a -> Bool
(<!!>) :: [String] -> (Int,Int) -> Char
ls <!!> (x,y) = (ls !! y) !! x
instance Block Char where
throughable '*' = False -- Wall
throughable _ = True -- Anything not a wall
encounterable '1' = True
encounterable '2' = True
encounterable '3' = True
encounterable '4' = True
encounterable '5' = True
encounterable _ = False
pickable _ = False
|
jaiyalas/creepy-waffle
|
src/CreepyWaffle/Types.hs
|
mit
| 3,439
| 0
| 14
| 991
| 1,315
| 728
| 587
| 99
| 2
|
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-|
Copyright : (c) Microsoft
License : MIT
Maintainer : adamsap@microsoft.com
Stability : provisional
Portability : portable
-}
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Language.Bond.Syntax.Util
( -- * Type classification
-- | Functions that test if a type belongs to a particular category. These
-- functions will resolve type aliases and return answer based on the type
-- the alias resolves to.
isScalar
, isUnsigned
, isSigned
, isFloat
, isString
, isContainer
, isList
, isAssociative
, isNullable
, isStruct
, isEnum
, isMetaName
-- * Type mapping
, fmapType
-- * Folds
, foldMapFields
, foldMapStructFields
, foldMapType
-- * Helper functions
, resolveAlias
) where
import Data.Maybe
import Data.List
import qualified Data.Foldable as F
import Data.Monoid
import Prelude
import Language.Bond.Util
import Language.Bond.Syntax.Types
-- | Returns 'True' if the type represents a scalar.
isScalar :: Type -> Bool
isScalar BT_Int8 = True
isScalar BT_Int16 = True
isScalar BT_Int32 = True
isScalar BT_Int64 = True
isScalar BT_UInt8 = True
isScalar BT_UInt16 = True
isScalar BT_UInt32 = True
isScalar BT_UInt64 = True
isScalar BT_Float = True
isScalar BT_Double = True
isScalar BT_Bool = True
isScalar (BT_TypeParam (TypeParam _ (Just Value))) = True
isScalar (BT_UserDefined Enum {..} _) = True
isScalar (BT_UserDefined a@Alias {} args) = isScalar $ resolveAlias a args
isScalar _ = False
-- | Returns 'True' if the type represents unsigned integer.
isUnsigned :: Type -> Bool
isUnsigned BT_UInt8 = True
isUnsigned BT_UInt16 = True
isUnsigned BT_UInt32 = True
isUnsigned BT_UInt64 = True
isUnsigned (BT_UserDefined a@Alias {} args) = isUnsigned $ resolveAlias a args
isUnsigned _ = False
-- | Returns 'True' if the type represents signed integer.
isSigned :: Type -> Bool
isSigned BT_Int8 = True
isSigned BT_Int16 = True
isSigned BT_Int32 = True
isSigned BT_Int64 = True
isSigned (BT_UserDefined a@Alias {} args) = isSigned $ resolveAlias a args
isSigned _ = False
-- | Returns 'True' if the type represents floating point number.
isFloat :: Type -> Bool
isFloat BT_Float = True
isFloat BT_Double = True
isFloat (BT_UserDefined a@Alias {} args) = isFloat $ resolveAlias a args
isFloat _ = False
-- | Returns 'True' if the type represents a meta-name type.
isMetaName :: Type -> Bool
isMetaName BT_MetaName = True
isMetaName BT_MetaFullName = True
isMetaName (BT_UserDefined a@Alias {} args) = isMetaName $ resolveAlias a args
isMetaName _ = False
-- | Returns 'True' if the type represents a string.
isString :: Type -> Bool
isString BT_String = True
isString BT_WString = True
isString (BT_UserDefined a@Alias {} args) = isString $ resolveAlias a args
isString _ = False
-- | Returns 'True' if the type represents a list or a vector.
isList :: Type -> Bool
isList (BT_List _) = True
isList (BT_Vector _) = True
isList (BT_UserDefined a@Alias {} args) = isList $ resolveAlias a args
isList _ = False
-- | Returns 'True' if the type represents a map or a set.
isAssociative :: Type -> Bool
isAssociative (BT_Set _) = True
isAssociative (BT_Map _ _) = True
isAssociative (BT_UserDefined a@Alias {} args) = isAssociative $ resolveAlias a args
isAssociative _ = False
-- | Returns 'True' if the type represents a container (i.e. list, vector, set or map).
isContainer :: Type -> Bool
isContainer f = isList f || isAssociative f
-- | Returns 'True' if the type represents a struct or a struct forward declaration.
isStruct :: Type -> Bool
isStruct (BT_UserDefined Struct {} _) = True
isStruct (BT_UserDefined Forward {} _) = True
isStruct (BT_UserDefined a@Alias {} args) = isStruct $ resolveAlias a args
isStruct _ = False
-- | Returns 'True' if the type represents an enum
isEnum :: Type -> Bool
isEnum (BT_UserDefined Enum {} _) = True
isEnum (BT_UserDefined a@Alias {} args) = isEnum $ resolveAlias a args
isEnum _ = False
-- | Returns 'True' if the type represents a nullable type.
isNullable :: Type -> Bool
isNullable (BT_Nullable _) = True
isNullable (BT_UserDefined a@Alias {} args) = isNullable $ resolveAlias a args
isNullable _ = False
-- | Recursively map a 'Type' into another 'Type'.
--
-- ==== __Examples__
--
-- Change lists into vectors:
--
-- > listToVector = fmapType f
-- > where
-- > f (BT_List x) = BT_Vector x
-- > f x = x
fmapType :: (Type -> Type) -> Type -> Type
fmapType f (BT_UserDefined decl args) = f $ BT_UserDefined decl $ map (fmapType f) args
fmapType f (BT_Maybe element) = f $ BT_Maybe $ fmapType f element
fmapType f (BT_Map key value) = f $ BT_Map (fmapType f key) (fmapType f value)
fmapType f (BT_List element) = f $ BT_List $ fmapType f element
fmapType f (BT_Vector element) = f $ BT_Vector $ fmapType f element
fmapType f (BT_Set element) = f $ BT_Set $ fmapType f element
fmapType f (BT_Nullable element) = f $ BT_Nullable $ fmapType f element
fmapType f (BT_Bonded struct) = f $ BT_Bonded $ fmapType f struct
fmapType f x = f x
-- | Maps all fields, including fields of the base, to a 'Monoid', and combines
-- the results. Returns 'mempty' if type is not a struct.
--
-- ==== __Examples__
--
-- Check if there are any container fields:
--
-- > anyContainerFields :: Type -> Bool
-- > anyContainerFields = getAny . foldMapFields (Any . isContainer . fieldType)
foldMapFields :: (Monoid m) => (Field -> m) -> Type -> m
foldMapFields f t = case t of
(BT_UserDefined Struct {..} _) -> optional (foldMapFields f) structBase <> F.foldMap f structFields
(BT_UserDefined a@Alias {..} args) -> foldMapFields f $ resolveAlias a args
_ -> mempty
-- | Like 'foldMapFields' but takes a 'Declaration' as an argument instead of 'Type'.
foldMapStructFields :: Monoid m => (Field -> m) -> Declaration -> m
foldMapStructFields f s = foldMapFields f $ BT_UserDefined s []
-- | Maps all parts of a 'Type' to a 'Monoid' and combines the results.
--
-- ==== __Examples__
--
-- For a type:
--
-- > list<nullable<int32>>
--
-- the result is:
--
-- > f (BT_List (BT_Nullable BT_Int32))
-- > <> f (BT_Nullable BT_Int32)
-- > <> f BT_Int32
--
-- 'foldMapType' resolves type aliases. E.g. given the following type alias
-- declaration (Bond IDL syntax):
--
-- > using Array<T, N> = vector<T>;
--
-- the result for the following type:
--
-- > Array<int32, 10>
--
-- is:
--
-- > f (BT_UserDefined Alias{..} [BT_Int32, BT_IntTypeArg 10])
-- > <> f (BT_Vector BT_Int32)
-- > <> f BT_Int32
foldMapType :: (Monoid m) => (Type -> m) -> Type -> m
foldMapType f t@(BT_UserDefined a@Alias {} args) = f t <> foldMapType f (resolveAlias a args)
foldMapType f t@(BT_UserDefined _ args) = f t <> F.foldMap (foldMapType f) args
foldMapType f t@(BT_Maybe element) = f t <> foldMapType f element
foldMapType f t@(BT_Map key value) = f t <> foldMapType f key <> foldMapType f value
foldMapType f t@(BT_List element) = f t <> foldMapType f element
foldMapType f t@(BT_Vector element) = f t <> foldMapType f element
foldMapType f t@(BT_Set element) = f t <> foldMapType f element
foldMapType f t@(BT_Nullable element) = f t <> foldMapType f element
foldMapType f t@(BT_Bonded struct) = f t <> foldMapType f struct
foldMapType f x = f x
-- | Resolves a type alias declaration with given type arguments. Note that the
-- function resolves one level of aliasing and thus may return a type alias.
resolveAlias :: Declaration -> [Type] -> Type
resolveAlias Alias {..} args = fmapType resolveParam aliasType
where
resolveParam (BT_TypeParam param) = snd.fromJust $ find ((param ==).fst) paramsArgs
resolveParam x = x
paramsArgs = zip declParams args
resolveAlias _ _ = error "resolveAlias: impossible happened."
|
ant0nsc/bond
|
compiler/src/Language/Bond/Syntax/Util.hs
|
mit
| 7,859
| 0
| 12
| 1,483
| 2,051
| 1,077
| 974
| 131
| 3
|
-- fold/foldl as foldr/foldl'
-- ref: https://wiki.haskell.org/Fold
-- ref: https://wiki.haskell.org/Foldr_Foldl_Foldl%27
-- ref: https://wiki.haskell.org/Foldl_as_foldr
-- alt: https://wiki.haskell.org/Foldl_as_foldr_alternative
-- 1. Fold
-- In functional programming, fold (or reduce) is a family of higher order functions that process a data structure in some order and build a return value. This is as opposed to the family of unfold functions which take a starting value and apply it to a function to generate a data structure.
-- Typically, a fold deals with two things: a combining function, and a data structure, typically a list of elements. The fold then proceeds to combine elements of the data structure using the function in some systematic way.
-- non-associative -> foldl vs. foldr
-- tree-like folds
foldt :: (a -> a -> a) -> a -> [a] -> a
foldt f z [] = z
foldt f z [x] = x
foldt f z xs = foldt f z (pairs f xs)
foldi :: (a -> a -> a) -> a -> [a] -> a
foldi f z [] = z
foldi f z (x:xs) = f x (foldi f z (pairs f xs))
pairs :: (a -> a -> a) -> [a] -> [a]
pairs f (x:y:t) = f x y : pairs f t
pairs f t = t
-- fold -> Church encoding
-- catamorphism
-- This way of looking at things provides a simple route to designing fold-like functions on other algebraic data structures, like various sorts of trees. One writes a function which recursively replaces the constructors of the datatype with provided functions, and any constant values of the type with provided values. Such functions are generally referred to as Catamorphisms.
-- 2. foldr foldl foldl'
-- We somehow have to tell the system that the inner redex should be reduced before the outer. Or the stack will overflow by lazy-evaluation or foldr
-- seq :: a -> b -> b
-- seq is a primitive system function that when applied to x and y will first reduce x then return y. The idea is that y references x so that when y is reduced x will not be a big unreduced chain anymore.
foldl' f z [] = z
foldl' f z (x:xs) = let z' = z `f` x
in seq z' $ foldl' f z' xs
-- Usually the choice is between foldr and foldl', since foldl and foldl' are the same except for their strictness properties, so if both return a result, it must be the same. foldl' is the more efficient way to arrive at that result because it doesn't build a huge thunk. However, if the combining function is lazy in its first argument, foldl may happily return a result where foldl' hits an exception:
-- Rules of Thumb for Folds
{-
Folds are among the most useful and common functions in Haskell. They are an often-superior replacement for what in other language would be loops, but can do much more. Here are a few rules of thumb on which folds to use when.
foldr is not only the right fold, it is also most commonly the right fold to use, in particular when transforming lists (or other foldables) into lists with related elements in the same order. Notably, foldr will be effective for transforming even infinite lists into other infinite lists. For such purposes, it should be your first and most natural choice. For example, note that foldr (:) []==id.
Note that the initial element is irrelevant when foldr is applied to an infinite list. For that reason, it is may be good practice when writing a function which should only be applied to infinite lists to replace foldr f [] with foldr f undefined. This both documents that the function should only be applied to infinite lists and will result in an error when you try to apply it to a finite list.
The other very useful fold is foldl'. It can be thought of as a foldr with these differences:
foldl' conceptually reverses the order of the list. One consequence is that a foldl' (unlike foldr) applied to an infinite list will be bottom; it will not produce any usable results, just as an express reverse would not. Note that foldl' (flip (:)) []==reverse.
foldl' often has much better time and space performance than a foldr would for the reasons explained in the previous sections.
You should pick foldl' principally in two cases:
When the list to which it is applied is large, but definitely finite, you do not care about the implicit reversal (for example, because your combining function is commutative like (+), (*), or Set.union), and you seek to improve the performance of your code.
When you actually do want to reverse the order of the list, in addition to possibly performing some other transformation to the elements. In particular, if you find that you precede or follow your fold with a reverse, it is quite likely that you could improve your code by using the other fold and taking advantage of the implicit reverse.
foldl is rarely the right choice. It gives you the implicit reverse of fold, but without the performance gains of foldl'. Only in rare, or specially constructed cases like in the previous section, will it yield better results than foldl'.
Another reason that foldr is often the better choice is that the folding function can short-circuit, that is, terminate early by yielding a result which does not depend on the value of the accumulating parameter. When such possibilities arise with some frequency in your problem, short-circuiting can greatly improve your program's performance. Left folds can never short-circuit.
-}
-- 3. foldl as foldr
foldl :: (a -> b -> a) -> a -> [b] -> a
foldl f a bs =
foldr (\b g x -> g (f x b)) id bs a
foldl f a list = go2 list a
where
go2 [] = (\acc -> acc) -- nil case
go2 (x : xs) = \acc -> (go2 xs) (f acc x) -- construct x (go2 xs)
-- By paper: http://www.cs.nott.ac.uk/~pszgmh/fold.pdf
go2 list = foldr construct (\acc -> acc) list
where
construct x r = \acc -> r (f acc x)
foldl f a list = (foldr construct (\acc -> acc) list) a
where
construct x r = \acc -> r (f acc x)
-- Because r is the same function as constructed by the construct here, calling this e.g. for a list [x,y,...,z] scans through the whole list as-if evaluating a nested lambda applied to the initial value of the accumulator,
{-
(\acc->
(\acc->
(... (\acc-> (\acc -> acc)
(f acc z)) ...)
(f acc y))
(f acc x)) a
which creates the chain of evaluations as in
(\acc -> acc) (f (... (f (f a x) y) ...) z)
- }
-- The converse is not true, since foldr may work on infinite lists, which foldl variants never can do. However, for finite lists, foldr can also be written in terms of foldl (although losing laziness in the process), in a similar way like this:
foldr :: (b -> a -> a) -> a -> [b] -> a
foldr f a bs =
foldl (\g b x -> g (f b x)) id bs a
-- I find it easier to imagine a fold as a sequence of updates. An update is a function mapping from an old value to an updated new value.
newtype Update a = Update {evalUpdate :: a -> a}
instance Monoid (Update a) where
mempty = Update id
mappend (Update x) (Update y) = Update (y.x)
foldlMonoid :: (a -> b -> a) -> a -> [b] -> a
foldlMonoid f a bs =
flip evalUpdate a $
mconcat $
map (Update . flip f) bs
-- mconcat :: Monoid a => [a] -> a
-- mconcat = foldr mappend mempty
-- Update a is just Dual (Endo a)
-- If you use a State monad instead of a monoid, you obtain an alternative implementation of mapAccumL.
{-
Using the foldr expression we can write variants of foldl that behave slightly different from the original one.
E.g. we can write a foldl that can stop before reaching the end of the input list and thus may also terminate on infinite input.
-}
foldlMaybe :: (a -> b -> Maybe a) -> a -> [b] -> Maybe a
foldlMaybe f a bs =
foldr (\b g x -> f x b >>= g) Just bs a
import Control.Monad ((>=>), )
newtype UpdateMaybe a = UpdateMaybe {evalUpdateMaybe :: a -> Maybe a}
instance Monoid (UpdateMaybe a) where
mempty = UpdateMaybe Just
mappend (UpdateMaybe x) (UpdateMaybe y) = UpdateMaybe (x>=>y)
foldlMaybeMonoid :: (a -> b -> Maybe a) -> a -> [b] -> Maybe a
foldlMaybeMonoid f a bs =
flip evalUpdateMaybe a $
mconcat $
map (UpdateMaybe . flip f) bs
-- Parsing numbers using a bound
readBounded :: Integer -> String -> Maybe Integer
readBounded bound str =
case str of
"" -> Nothing
"0" -> Just 0
_ -> foldr
(\digit addLeastSig mostSig ->
let n = mostSig*10 + toInteger (Char.digitToInt digit)
in guard (Char.isDigit digit) >>
guard (not (mostSig==0 && digit=='0')) >>
guard (n <= bound) >>
addLeastSig n)
Just str 0
readBoundedMonoid :: Integer -> String -> Maybe Integer
readBoundedMonoid bound str =
case str of
"" -> Nothing
"0" -> Just 0
_ ->
let m digit =
UpdateMaybe $ \mostSig ->
let n = mostSig*10 + toInteger (Char.digitToInt digit)
in guard (Char.isDigit digit) >>
guard (not (mostSig==0 && digit=='0')) >>
guard (n <= bound) >>
Just n
in evalUpdateMaybe (mconcat $ map m str) 0
|
Airtnp/Freshman_Simple_Haskell_Lib
|
Idioms/Fold.hs
|
mit
| 9,330
| 29
| 25
| 2,371
| 1,493
| 783
| 710
| -1
| -1
|
-- ゼロかどうかを返す関数
-- Eq クラスかつNum クラスの値をaとして、aを引数にとりBoolを返す関数
-- 引数が0なら
isZero :: (Eq a, Num a) => a -> Bool
isZero 0 = True
isZero _ = False
-- 絶対値を返す関数
-- Ord クラスかつNum クラスの値をaとして、aを引数にとりaを返す関数
-- 引数nが0以上ならnを返す。nが0以下なら-nを返す
myAbs :: (Ord a, Num a) => a -> a
myAbs n
| n >= 0 = n
| n < 0 = -n
|
yshnb/haskell-tutorial
|
examples/function_pattern.hs
|
mit
| 485
| 0
| 8
| 64
| 104
| 55
| 49
| 7
| 1
|
import Control.Concurrent
import Data.Time
import Data.Time.Lens
import Control.Monad
import Text.Regex.Applicative
import Data.Char
import Data.Maybe
import System.Process
data Time a = MakeTime
{ tSec :: a
, tMin :: a
, tHour :: a
, cmd :: String
}
deriving Show
data NumOrStar = Num Int | Star
deriving Show
sleep n = threadDelay (n * 10^6)
(==*) :: Int -> NumOrStar -> Bool
n ==* Star = True
n ==* Num m = n == m
checkTime :: ZonedTime -> Time NumOrStar -> Bool
checkTime currentTime time =
round (getL seconds currentTime) ==* tSec time &&
getL minutes currentTime ==* tMin time &&
getL hours currentTime ==* tHour time
cron :: String -> IO ()
cron time = do
let timeStruct = readEntry time
currentTime <- getZonedTime
cron' currentTime timeStruct
where
cron' currentTime timeStruct = do
-- currentTime <- getZonedTime
when (checkTime currentTime timeStruct) $
(void $ system $ cmd timeStruct)
sleep 1
cron' (modL seconds (+1) currentTime) timeStruct
testTimeSpec = MakeTime Star (Num 56) (Num 12)
-- Parsing
number :: RE Char Int
number = read <$> many (psym isDigit)
star :: RE Char NumOrStar
star = Star <$ sym '*'
numOrStar :: RE Char NumOrStar
numOrStar = (Num <$> number) <|> star
entry = mkTime <$> many (numOrStar <* many (psym isSpace))
<*> many anySym
where
mkTime [s,m,h] = MakeTime s m h
readEntry :: String -> Time NumOrStar
readEntry s = fromJust $ s =~ entry
|
alogic0/agilebc
|
Cron.hs
|
mit
| 1,483
| 0
| 12
| 343
| 549
| 281
| 268
| 47
| 1
|
-- | Test.Proof module
{-# LANGUAGE UnicodeSyntax #-}
module Test.Proof where
import Data.TSTP
base1 ∷ [F]
base1 = [
F {name = "a1",
role = Axiom,
formula = PredApp (AtomicWord "a") [],
source = NoSource
},
F {name = "a2",
role = Axiom,
formula = PredApp (AtomicWord "b") [],
source = NoSource
},
F {name = "a3",
role = Axiom,
formula = BinOp (BinOp (PredApp (AtomicWord "a") []) (:&:) (PredApp (AtomicWord "b") [])) (:=>:) (PredApp (AtomicWord "z") []),
source = NoSource
},
F {name = "a4",
role = Conjecture,
formula = PredApp (AtomicWord "z") [],
source = NoSource
},
F {name = "subgoal_0",
role = Plain,
formula = PredApp (AtomicWord "z") [],
source = Inference Strip [] [Parent "a4" []]
},
F {name = "negate_0_0",
role = Plain,
formula = (:~:) (PredApp (AtomicWord "z") []),
source = Inference Negate [] [Parent "subgoal_0" []]
},
F {name = "normalize_0_0",
role = Plain,
formula = (:~:) (PredApp (AtomicWord "z") []),
source = Inference Canonicalize [] [Parent "negate_0_0" []]
},
F {name = "normalize_0_1",
role = Plain,
formula = BinOp (BinOp ((:~:) (PredApp (AtomicWord "a") [])) (:|:) ((:~:) (PredApp (AtomicWord "b") []))) (:|:) (PredApp (AtomicWord "z") []),
source = Inference Canonicalize [] [Parent "a3" []]
},
F {name = "normalize_0_2",
role = Plain,
formula = PredApp (AtomicWord "a") [],
source = Inference Canonicalize [] [Parent "a1" []]
},
F {name = "normalize_0_3",
role = Plain,
formula = PredApp (AtomicWord "b") [],
source = Inference Canonicalize [] [Parent "a2" []]
},
F {name = "normalize_0_4",
role = Plain,
formula = PredApp (AtomicWord "z") [],
source = Inference Simplify [] [Parent "normalize_0_1" [],
Parent "normalize_0_2" [],
Parent "normalize_0_3" []]
},
F {name = "normalize_0_5",
role = Plain,
formula = PredApp (AtomicWord "$false") [],
source = Inference Simplify [] [Parent "normalize_0_0" [],
Parent "normalize_0_4" []]
},
F {name = "refute_0_0",
role = Plain,
formula = Quant All [] (PredApp (AtomicWord "$false") []),
source = Inference Canonicalize [] [Parent "normalize_0_5" []]
}]
|
agomezl/tstp2agda
|
test/Test/Proof.hs
|
mit
| 2,311
| 0
| 16
| 604
| 929
| 524
| 405
| 60
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import System.Console.CmdArgs
import Data.Data
import Control.Monad
import LexML.URN.Types hiding (Data)
import LexML.URN.Parser
import LexML.Linker
import LexML.Linker(linker)
import System.Log.Logger
import System.Log.Handler.Syslog
import System.Log.Handler.Simple
-- import qualified System.IO.UTF8 as IOUTF
import Control.Monad.Except
import Prelude hiding (log)
import LexML.Version
import qualified Data.ByteString as BIO
import Data.ByteString.UTF8 (toString, fromString)
import System.IO
main = do
updateGlobalLogger rootLoggerName (setLevel ERROR)
let contexto = RC_Nome "federal"
let lo = defaultLinkerOptions {
loContext = contexto
, loResolverUrl = Nothing
, loOutputType = OT_XLINK
, loInputType = IT_TAGGED
, loDebugRegras = False
, loDebugTokens = False
, loConstituicaoSimples = True
}
hSetBuffering stdout LineBuffering
hSetBuffering stdin LineBuffering
let processLoop = do
eof <- isEOF
unless eof $ do
line <- BIO.getLine
let l = toString line
res <- runExceptT $ linker lo l
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right out -> do
let content = resContent out
hSetEncoding stdout utf8
putStrLn content
hFlush stdout
processLoop
processLoop
|
lexml/lexml-linker
|
src/main/haskell/SimpleLinker.hs
|
gpl-2.0
| 1,485
| 0
| 23
| 415
| 359
| 191
| 168
| 46
| 2
|
-- Simple demo that demonstrates colored Gaussian noise
module Main (main) where
-- Import a portion of the Numeric.Random library
import Numeric.Random.Generator.MT19937 (genrand)
import Numeric.Random.Distribution.Uniform (uniform53oc)
import Numeric.Random.Distribution.Normal (normal_ar)
import Numeric.Random.Spectrum.White (white)
import Numeric.Random.Spectrum.Pink (kellet)
import Numeric.Random.Spectrum.Purple (purple)
import Numeric.Random.Spectrum.Brown (brown)
-- We do some simple FFT analysis
import Numeric.Transform.Fourier.FFT (rfft)
-- Import the System functions that we need
import System.Environment (getProgName, getArgs)
import System.IO (IOMode(WriteMode), withFile, hPutStrLn, hPutStr)
import System.Exit (exitFailure)
-- We need support for complex numbers and arrays
import Data.Complex (Complex((:+)))
import Data.Array (Array, listArray, elems, bounds, assocs)
-- Noise parameters
mu :: Double
mu = 0
sigma :: Double
sigma = 1
-- u is our list of uniforms over (0,1]
u :: [Double]
u = uniform53oc $ genrand 42
-- x is our list of normal random variables
x :: [Double]
x = normal_ar (mu,sigma) u
-- white: flat power spectrum
white_gn :: [Double]
white_gn = white $ x
-- pink: -3 dB/octave or -10 dB/decade
pink_gn :: [Double]
pink_gn = kellet $ white_gn
-- brown: -6 dB/octave or -20 dB/decade
brown_gn :: [Double]
brown_gn = brown $ white_gn
-- purple: +6 dB/octave or +20 dB/decade
purple_gn :: [Double]
purple_gn = purple $ white_gn
-- dbrfft caluclates the magnitude response of the input, and subtracts
-- out the power of the integration window
dbrfft :: Array Int Double -> Array Int Double
dbrfft xs = fmap db $ rfft $ xs
where db (r:+i) = 10 * log10 (r*r+i*i) - 10 * log10 n
log10 = logBase 10
n = fromIntegral $ snd (bounds xs) + 1
-- avg averages a list of arrays pointwise
avg :: [Array Int Double] -> Array Int Double
avg xs = fmap (/ n) xs'
where xs' = foldl1 add xs
add as bs = listArray (bounds as) $ zipWith (+) (elems as) (elems bs)
n = fromIntegral $ length xs
{- |
'chunk' creates sublists from xs of n1 elements,
and overlapping n2 points
-}
chunk :: Int -> Int -> [a] -> [[a]]
chunk n1 n2 =
let m = n1-n2
go xs = take n1 xs : go (drop m xs)
in go
-- avg calculates an averaged RFFT using a rectangular window
-- n1 is the length of each FFT
-- n2 is the overlap
-- n3 is the number of FFTs to average
avgrfft :: Int -> Int -> Int -> [Double] -> Array Int Double
avgrfft n1 n2 n3 xs =
avg $ take n3 $ map (dbrfft . listArray (0,n1-1)) $ chunk n1 n2 xs
-- simple function to write out an array to a file
dump :: String -> Array Int Double -> IO ()
dump filename xs =
withFile filename WriteMode $ \h -> mapM_ (dump' h) $ assocs xs
where dump' h (f,m) = do hPutStr h $ show f
hPutStr h $ " "
hPutStrLn h $ show m
-- usage function
usage :: IO a
usage = do self <- getProgName
putStrLn $ "usage: " ++ self ++ " n1 n2 n3"
putStrLn $ " where n1 = FFT length"
putStrLn $ " n2 = overlap"
putStrLn $ " n3 = number of FFTs to average"
exitFailure
-- simple function to parse the command line
parseArgs :: IO (Int,Int,Int)
parseArgs = do
args <- getArgs
case map read args of
[n1,n2,n3] -> return (n1,n2,n3)
_ -> usage
-- glue it all together
main :: IO ()
main = do (n1,n2,n3) <- parseArgs
dump "white.out" $ avgrfft n1 n2 n3 $ white_gn
dump "pink.out" $ avgrfft n1 n2 n3 $ pink_gn
dump "brown.out" $ avgrfft n1 n2 n3 $ brown_gn
dump "purple.out" $ avgrfft n1 n2 n3 $ purple_gn
|
tolysz/dsp
|
demo/NoiseDemo.hs
|
gpl-2.0
| 3,746
| 0
| 14
| 937
| 1,102
| 595
| 507
| 73
| 2
|
{-# LANGUAGE DeriveGeneric, FlexibleInstances, OverloadedStrings, RankNTypes #-}
module Model.Field ( Field
, FieldType(..)
, FieldPos
-- *Classes
, ToField(..)
-- *Constants
, typeLabels
-- *Functions
-- **General
, typeOf
, toString
, typeLabel
, typeOperator
, defaultValue
, mkError
, isError
, convert
, convertKeepText
, toInt
, toDouble
-- **Operators
, andField
, orField
, notField
, compareField
, maxField
, minField
, ternary
-- *Other
, (!!!)
) where
import Data.Aeson
import Data.Maybe(fromJust)
import Data.Text(Text)
import qualified Data.Text as T
import Data.Text.Read(decimal, signed, double)
import GHC.Int(Int32)
import TextShow(TextShow(showt))
import GHC.Generics
-- |A field can store an Int, a Double or a String or it may be
-- empty. The special constructor AnError stores an erroneous string
-- for the type. It is useful for converting without loosing the
-- original value.
data Field = AnInt Int Text
| ADouble Double Text
| AString Text
| AnError FieldType Text
| Empty
deriving (Show, Eq)
-- |The position of the field, it is an Int32 for compatibility with gi-gtk
type FieldPos = Int32
-- |Auxiliary operator for accessing a list with a FieldPos
(!!!) :: [a] -> FieldPos -> a
(!!!) l = (!!) l . fromIntegral
class ToField t where
toField :: t -> Field
instance ToField Int where
toField n = AnInt n $ showt n
instance ToField Double where
toField d = ADouble d $ showt d
instance ToField Text where
toField = AString
instance ToField () where
toField = const Empty
instance ToField f => ToField (Maybe f) where
toField Nothing = Empty
toField (Just v) = toField v
-- |The types that can be seen by the user and their labels
typeLabels :: [(FieldType, Text)]
typeLabels = [ (TypeString, "Cadena")
, (TypeInt, "Entero")
, (TypeInt0, "Entero0")
, (TypeDouble, "Flotante")
, (TypeDouble0, "Flotante0")
]
typeLabel :: FieldType -> Text
typeLabel = fromJust . flip lookup typeLabels
-- |The operators used for casting in formulas.
typeOperators :: [(FieldType, Text)]
typeOperators = [ (TypeString, "str")
, (TypeInt, "int")
, (TypeInt0, "int0")
, (TypeDouble, "float")
, (TypeDouble0, "float0")
]
typeOperator :: FieldType -> Text
typeOperator = fromJust . flip lookup typeOperators
-- |The string associated to a `Field`.
toString :: Field -> Text
toString (AnInt _ s) = s
toString (ADouble _ s) = s
toString (AString s) = s
toString (AnError _ s) = s
toString Empty = "-----"
data FieldType = TypeInt
| TypeInt0
| TypeDouble
| TypeDouble0
| TypeString
| TypeEmpty
deriving (Show, Eq, Generic)
instance ToJSON FieldType where
toEncoding = genericToEncoding defaultOptions
instance FromJSON FieldType
typeOf :: Field -> FieldType
typeOf (AnInt _ _) = TypeInt
typeOf (ADouble _ _) = TypeDouble
typeOf (AString _) = TypeString
typeOf (AnError t _) = t
typeOf Empty = TypeEmpty
defaultValue :: FieldType -> Field
defaultValue TypeInt = AnInt 0 "0"
defaultValue TypeInt0 = AnInt 0 ""
defaultValue TypeDouble = ADouble 0 "0.0"
defaultValue TypeDouble0 = ADouble 0 ""
defaultValue TypeString = AString ""
defaultValue TypeEmpty = Empty
baseType :: FieldType -> FieldType
baseType = typeOf . defaultValue
-- |Convert a field to a given type, return `AnError` with
-- a message if there is an error in the conversion
convert :: FieldType -> Field -> Field
convert t f | typeOf f == baseType t = f
| otherwise = doConvert f t
-- |Convert a field to the given type but keeping the text
convertKeepText :: FieldType -> Field -> Field
convertKeepText t f = case convert t f of
AnError _ _ -> AnError t (toString f)
f' -> f'
doConvert :: Field -> FieldType -> Field
doConvert (AnError _ m) t = AnError t m
doConvert (ADouble d _) TypeInt = AnInt i $ showt i
where i = truncate d
doConvert (AString str) TypeInt = case signed decimal str of
Right (n, "") -> AnInt n str
_ -> AnError TypeInt (str `T.append` " no es un entero")
doConvert (ADouble d _) TypeInt0 = AnInt i $ showt i
where i = truncate d
doConvert (AString str) TypeInt0 = AnInt (case signed decimal str of
Right (n, "") -> n
_ -> 0) str
doConvert (AnInt n _) TypeDouble = ADouble f $ showt f
where f = fromIntegral n
doConvert (AString str) TypeDouble = case signed double str of
Right (d, "") -> ADouble d str
_ -> AnError TypeDouble (str `T.append` " no es un flotante")
doConvert (AnInt n _) TypeDouble0 = ADouble f $ showt f
where f = fromIntegral n
doConvert (AString str) TypeDouble0 = ADouble (case signed double str of
Right (d, "") -> d
_ -> 0 ) str
doConvert Empty t = AnError t "Conversión de valor vacío"
doConvert f TypeString = AString $ toString f
doConvert f t = AnError t $ toString f
mkError :: Text -> Field
mkError = AnError TypeEmpty
isError :: Field -> Bool
isError AnError {} = True
isError _ = False
typeError :: Text -> Field -> Field
typeError _ e@AnError {} = e
typeError op f = AnError TypeEmpty $ T.concat ["Error de tipos en ", op, ": ", typeLabel $ typeOf f]
typeError2 :: Text -> Field -> Field -> Field
typeError2 _ e@AnError{} _ = e
typeError2 _ _ e@AnError{} = e
typeError2 op f1 f2 = AnError TypeEmpty $ T.concat ["Error de tipos en ", op, ": ", typeLabel $ typeOf f1, " y ", typeLabel $ typeOf f2]
nmap :: Text -> (forall a. Num a => a -> a) -> (Field -> Field)
nmap _ op (AnInt n _) = toField $ op n
nmap _ op (ADouble d _) = toField $ op d
nmap name _ f = typeError name f
andField :: Field -> Field -> Field
andField (AnInt n1 _) (AnInt n2 _) | n1 > 0 && n2 > 0 = toField (1::Int)
| otherwise = toField (0::Int)
andField f1 f2 = typeError2 "y lógico" f1 f2
orField :: Field -> Field -> Field
orField (AnInt n1 _) (AnInt n2 _) | n1 > 0 || n2 > 0 = toField (1::Int)
| otherwise = toField (0::Int)
orField f1 f2 = typeError2 "o lógico" f1 f2
notField :: Field -> Field
notField (AnInt n _) | n > 0 = toField (0::Int)
| otherwise = toField (1::Int)
notField f = typeError "no lógico" f
maxField :: Field -> Field -> Field
maxField f1 f2 = case compareField (>=) f1 f2 of
err@(AnError _ _) -> err
AnInt 1 _ -> f1
_ -> f2
minField :: Field -> Field -> Field
minField f1 f2 = case compareField (<=) f1 f2 of
AnInt 1 _ -> f1
_ -> f2
compareField :: (Field -> Field -> Bool) -> Field -> Field -> Field
compareField _ e@(AnError _ _) _ = e
compareField _ _ e@(AnError _ _) = e
compareField cmp f1 f2 | not $ comparable (typeOf f1) (typeOf f2) = AnError TypeInt $ T.concat ["Tipos no comparables: ", typeLabel $ typeOf f1, " y ", typeLabel $ typeOf f2]
| cmp f1 f2 = toField (1::Int)
| otherwise = toField (0::Int)
where comparable t1 t2 = numeric t1 && numeric t2 || t1 == t2
numeric TypeInt = True
numeric TypeInt0 = True
numeric TypeDouble = True
numeric TypeDouble0 = True
numeric _ = False
ternary :: Field -> Field -> Field -> Field
ternary (AnInt n1 _) e2 e3 | n1 > 0 = e2
| otherwise = e3
ternary e1 _ _ = typeError "operador ?" e1
instance Num Field where
AnInt n1 _ + AnInt n2 _ = toField $ n1 + n2
AnInt n _ + ADouble d _ = toField $ fromIntegral n + d
ADouble d _ + AnInt n _ = toField $ d + fromIntegral n
ADouble d1 _ + ADouble d2 _ = toField $ d1 + d2
AString s1 + AString s2 = AString $ T.append s1 s2
f1 + f2 = typeError2 "suma" f1 f2
AnInt n1 _ * AnInt n2 _ = toField $ n1 * n2
AnInt n _ * ADouble d _ = toField $ fromIntegral n * d
ADouble d _ * AnInt n _ = toField $ d * fromIntegral n
ADouble d1 _ * ADouble d2 _ = toField $ d1 * d2
AString s * AnInt n _ = AString $ T.replicate n s
AnInt n _ * AString s = AString $ T.replicate n s
f1 * f2 = typeError2 "producto" f1 f2
abs = nmap "valor absoluto" abs
fromInteger v = AnInt (fromInteger v) (showt v)
negate = nmap "negación" negate
signum = nmap "signum" signum
instance Real Field where
toRational = undefined
instance Ord Field where
compare (AnInt n1 _) (AnInt n2 _) = compare n1 n2
compare (AnInt n _) (ADouble d _) = compare (fromIntegral n) d
compare (AnInt _ _) _ = LT
compare (ADouble d _) (AnInt n _) = compare d (fromIntegral n)
compare (ADouble d1 _) (ADouble d2 _) = compare d1 d2
compare (ADouble _ _) _ = LT
compare (AString s1) (AString s2) = compare s1 s2
compare (AString _) (AnInt _ _) = GT
compare (AString _) (ADouble _ _) = GT
compare (AString _) _ = LT
compare (AnError _ s1) (AnError _ s2) = compare s1 s2
compare (AnError _ _) (AnInt _ _) = GT
compare (AnError _ _) (ADouble _ _) = GT
compare (AnError _ _) (AString _) = GT
compare (AnError _ _) Empty = LT
compare Empty Empty = EQ
compare Empty _ = GT
instance Enum Field where
toEnum v = AnInt v $ showt v
fromEnum (AnInt n _) = n
fromEnum _ = error "fromEnum de un no entero"
instance Integral Field where
quot (AnInt n1 _) (AnInt n2 _) = toField $ quot n1 n2
quot f1 f2 = typeError2 "división entera" f1 f2
rem (AnInt n1 _) (AnInt n2 _) = toField $ rem n1 n2
rem f1 f2 = typeError2 "resto" f1 f2
div (AnInt n1 _) (AnInt n2 _) = toField $ div n1 n2
div f1 f2 = typeError2 "división entera" f1 f2
mod (AnInt n1 _) (AnInt n2 _) = toField $ mod n1 n2
mod f1 f2 = typeError2 "resto" f1 f2
quotRem (AnInt n1 _) (AnInt n2 _) = (toField q, toField r)
where (q, r) = quotRem n1 n2
quotRem f1 f2 = ( typeError2 "división entera" f1 f2
, typeError2 "resto" f1 f2)
divMod (AnInt n1 _) (AnInt n2 _) = (toField d, toField m)
where (d, m) = divMod n1 n2
divMod f1 f2 = ( typeError2 "división entera" f1 f2
, typeError2 "resto" f1 f2)
toInteger = toInteger . toInt
-- |Recover the Int from an integer expression or error
toInt :: Field -> Int
toInt (AnInt n _) = n
toInt f = error $ "toInt de un " ++ T.unpack (typeLabel $ typeOf f)
-- |Recover the Double from a double expression or error
toDouble :: Field -> Double
toDouble (ADouble d _) = d
toDouble f = error $ "toDouble de un " ++ T.unpack (typeLabel $ typeOf f)
instance Fractional Field where
(AnInt n1 _) / (AnInt n2 _) = toField ((fromIntegral n1 / fromIntegral n2)::Double)
(AnInt n _) / (ADouble d _) = toField $ fromIntegral n / d
(ADouble d _) / (AnInt n _) = toField $ d / fromIntegral n
(ADouble d1 _) / (ADouble d2 _) = toField $ d1/d2
f1 / f2 = typeError2 "división" f1 f2
recip (AnInt n _) = toField ((recip $ fromIntegral n) :: Double)
recip (ADouble d _) = toField $ recip d
recip f = typeError "recíproco" f
fromRational v = toField (fromRational v :: Double)
|
jvilar/hrows
|
lib/Model/Field.hs
|
gpl-2.0
| 12,202
| 0
| 11
| 4,063
| 4,281
| 2,167
| 2,114
| 263
| 5
|
-- | We benchmark different stages of parsing. @small@, @medium@, and
-- @large@ are three sets of CMs with different file sizes.
--
-- TODO
--
-- - only extract header information
-- - single-threaded full parsing
-- - multi-threaded full parsing
module Main where
import Control.Lens
import Control.Monad.Trans.Writer.Strict
import Control.Monad (when)
import Criterion.Main
import Data.List as L
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Maybe (maybeToList)
import Biobase.SElab.CM.Types as CM
import Biobase.SElab.HMM.Types as HMM
import Biobase.SElab.Model.Import
import Biobase.SElab.Model.Types
{-
-- | Parse only the header
parseOnlyHeader :: Int -> String -> IO [PreModel]
parseOnlyHeader n file = do
(xs,log) <- runResourceT $ runWriterT $ sourceFile file $= ungzip $= preModels $= ccm n $$ consume
when (not $ T.null log) $ T.putStr log
-- print $ length xs
-- print $ xs ^.. ix 0 . _Right . _1 . CM.name
-- print $ xs ^.. ix 1 . _Left . _1 . HMM.name
return xs
parseFull :: Int -> String -> IO [CM]
parseFull n file = do
(xs,log) <- runResourceT $ runWriterT $ sourceFile file $= ungzip $= preModels $= ccm n $= finalizeModels 1 $= attachHMMs $$ consume
when (not $ T.null log) $ T.putStr log
return xs
parseFullPar :: Int -> String -> IO [CM]
parseFullPar n file = do
(xs,log) <- runResourceT $ runWriterT $ sourceFile file $= ungzip $= preModels $= ccm n $= finalizeModels 64 $= attachHMMs $$ consume
when (not $ T.null log) $ T.putStr log
return xs
main :: IO ()
main = defaultMain
-- only parse the header
[ bgroup "header"
[ bench "x 1" $ nfIO $ parseOnlyHeader 1 "./rfam-models/CL00001.cm.gz"
]
-- full parsing, single-threaded
, bgroup "singlethreaded"
[ bench "small" $ nfIO $ parseFull 1 "./rfam-models/CL00001.cm.gz"
, bench "medium" $ nfIO $ parseFull 10 "./rfam-models/CL00001.cm.gz"
, bench "large" $ nfIO $ parseFull 100 "./rfam-models/CL00001.cm.gz"
]
-- full parsing, multi-threaded
, bgroup "multithreaded"
[ bench "small" $ nfIO $ parseFullPar 1 "./rfam-models/CL00001.cm.gz"
, bench "medium" $ nfIO $ parseFullPar 10 "./rfam-models/CL00001.cm.gz"
, bench "large" $ nfIO $ parseFullPar 100 "./rfam-models/CL00001.cm.gz"
]
]
ccm n = go where
go = do
xx <- await
yy <- await
case xx of
Nothing -> return ()
Just x -> do
Prelude.mapM_ yield $ L.concat $ L.replicate n $ x : maybeToList yy
go
-}
main :: IO ()
main = return ()
|
choener/BiobaseInfernal
|
tests/parsing.hs
|
gpl-3.0
| 2,636
| 0
| 6
| 640
| 127
| 88
| 39
| 15
| 1
|
{-|
Module exporting functionality for working with the
file and directory panes of the file browser. It contains
functionality for adding elements, operating upon elements
and changing the view.
-}
module TreeViewOperations
(
operateSelection,
createListing,
upDirectory,
openSelection,
openDirectory
)
where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Gdk.EventM
import Buttons
import Components
import Util
import Types
import DirectoryOperations
import qualified SystemOperations as S
import TreeFunctions
import qualified Control.Exception as E
import System.FilePath ( (</>), splitFileName, takeExtension )
import System.Directory (doesDirectoryExist, getDirectoryContents, doesFileExist, getCurrentDirectory, setCurrentDirectory, getModificationTime )
import Control.Monad ( forM, forM_, filterM )
import Control.Monad.Trans ( liftIO )
{-|
Function to operate upon a user selection. Determines where the
selection is coming from, i.e. directory store or file store.
Based on this it carries out a supplied action, and then refreshes
the view.
-}
operateSelection :: ( [FilePath] -> IO ( ) ) -- ^ The action to be completed if the selection is within the directory store.
-> ( [FilePath] -> IO ( ) ) -- ^ The action to be completed if the selection is within the file store.
-> FileBrowser -- ^ The 'FileBrowser' to obtain the selection from.
-> IO ( )
operateSelection dirFunc fileFunc b = do
toggleButtonGetActive ( switch b ) >>= ( \t ->
case t of
True -> selectOperate ( dirStore b ) ( dirTree b ) dirFunc
False -> selectOperate ( fileStore b ) ( fileTree b ) fileFunc )
getCurrentDirectory >>= ( \c -> refreshBrowser c b )
{-|
Function to obtain complete an operation on a user selection. Obtains all
the selected elements from the supplied store and carries out the function
upon those elements.
-}
selectOperate :: ListStore StoreRow -- ^ The store to take the selection from.
-> TreeView -- ^ The 'TreeView' containing the store.
-> ( [FilePath] -> IO ( ) ) -- ^ The action to be completed on the selections.
-> IO ( )
selectOperate store tree function = getMultipleSelection tree store >>= ( \r ->
case r of
Nothing -> return ( )
( Just r ) -> function r )
{-|
Function to open a selection made by the user. It firstly checks
where the user selection(s) are and based on that either opens
a directory or all files selected by the user.
-}
openSelection :: FileBrowser -- ^ The 'FileBrowser' containing the selection(s).
-> IO ( )
openSelection browse = toggleButtonGetActive ( switch browse ) >>= ( \t ->
case t of
True -> openDirectory browse
False -> getMultipleSelection ( fileTree browse ) ( fileStore browse ) >>= ( \name ->
case name of
Nothing -> return ( )
( Just name ) -> openFile name >> return ( ) ) )
{-|
Function to open a file from the file store.
-}
openFileStore :: FileBrowser -- ^ The 'FileBrowser' containing the selection.
-> TreePath -- ^ The path to the selection.
-> IO ( )
openFileStore browse path = listStoreGetValue ( fileStore browse ) ( head path ) >>= (\r -> (\d -> putStrLn(d++(name r)) >> openFile [d++(name r)] ) =<< getCurrentDirectory )
{-|
Function to open a directory selected by the user. It gets the selected directory
and concatenates it to the current directory and refreshes the 'FileBrowser' with
the new path.
-}
openDirectory :: FileBrowser -- ^ The browser to be updated.
-> IO ( )
openDirectory browse =
getSingleSelection ( dirTree browse ) ( dirStore browse ) >>= ( \d ->
case d of
Nothing -> return ( )
( Just d ) -> getCurrentDirectory >>= ( \c -> refreshBrowser ( c ++ "/" ++ d ) browse ) )
{-|
Creates the initial 'StoreRow's to be added
to the ListStores. Creates all the cell entrys
for each row.
-}
createInitialRows :: [FilePath] -- ^ The list of file paths.
-> IO [StoreRow] -- ^ The list of resulting 'StoreRow'.
createInitialRows paths = forM paths $ \p -> do
i <- createIcon p
s <- getFileSize p
t <- getModificationTime p
pm <- permissionString p
return ( StoreRow p i s ( show t ) pm )
{-|
Create a default drag source for each of the
'ListStore'.
-}
defaultDrag :: Int -- ^ Number to be associated with this listStore.
-> SelectionTypeTag -- ^ Selection to set data with.
-> Maybe ( DragSourceIface model row ) -- ^ The DragSource.
defaultDrag x tag = (Just DragSourceIface
{
treeDragSourceRowDraggable = \_ _ -> return True,
treeDragSourceDragDataGet = \_ (i:_) -> do
selectionDataSet tag [x,i]
return True,
treeDragSourceDragDataDelete = \_ _ -> return True
})
{-|
Function to create a listing containing a supplied list of elements for display within
the browser. The resulting 'TreeView' will have a column named with the supplied title.
-}
createListing :: (String,String) -- ^ The title for the column.
-> ([FilePath],[FilePath]) -- ^ The list of elements to be displayed.
-> IO ( TreeView, TreeView, ScrolledWindow, ScrolledWindow, ListStore StoreRow, ListStore StoreRow ) -- ^ The resulting 'TreeView', 'ScrolledWindow' and 'ListStore'
createListing (dtitle,ftitle) (ds,fs) = do
compTypeTag <- atomNew ""
dir <- scrolledWindowNew Nothing Nothing
files <- scrolledWindowNew Nothing Nothing
dtext <- createInitialRows ds
ftext <- createInitialRows fs
listF <- listStoreNewDND ftext ( defaultDrag 1 compTypeTag )
Nothing
listD <- listStoreNewDND dtext ( defaultDrag 0 compTypeTag )
(Just DragDestIface
{
treeDragDestRowDropPossible = \_ _ -> do
s <- selectionDataGet compTypeTag
case s :: Maybe [Int] of
Just (x:xs:_) -> return True
_ -> return False,
treeDragDestDragDataReceived = \store (i:_) -> do
s <- selectionDataGet compTypeTag
case s :: Maybe [Int] of
Just (x:y:_) -> do
liftIO $ ( do
dirVal <- listStoreGetValue store i
dragVal <- listStoreGetValue ( if (x==0) then store else listF ) y
S.moveFile ( name dragVal ) ( name dirVal )
(dirs, files) <- viewDirectory =<< getCurrentDirectory
refreshPane dirs store
refreshPane files listF )
return True
_ -> return False
})
tViewD <- treeViewNewWithModel listD
tViewF <- treeViewNewWithModel listF
imageCell <- cellRendererPixbufNew
textCell <- cellRendererTextNew
sizetext <- cellRendererTextNew
modtext <- cellRendererTextNew
permtext <- cellRendererTextNew
tl <- targetListNew
targetListAdd tl targetTreeModelRow [TargetSameApp] 0
forM_ [(dtitle, tViewD,listD,dir),(ftitle,tViewF,listF,files)] $ \(t,tv,s,sw) -> do
sel <- treeViewGetSelection tv
treeSelectionSetMode sel SelectionMultiple
treeViewEnableModelDragDest tv tl [ActionMove]
treeViewEnableModelDragSource tv [Button1] tl [ActionMove]
nCol <- treeViewColumnNew
sCol <- treeViewColumnNew
mCol <- treeViewColumnNew
pCol <- treeViewColumnNew
forM [(nCol,t),(sCol,"Size"),(mCol,"Last Modified"),(pCol,"Permissions")] $ \(col,title) -> do
treeViewColumnSetTitle col title
treeViewColumnSetResizable col True
cellLayoutPackStart nCol imageCell False
forM_ [(nCol,textCell),(sCol,sizetext),(mCol,modtext),(pCol,permtext)] $ \(c,b) ->
cellLayoutPackStart c b False
cellLayoutSetAttributes nCol textCell s $ \r -> [cellText := name r]
cellLayoutSetAttributes nCol imageCell s $ \r -> [cellPixbuf := icon r]
cellLayoutSetAttributes sCol sizetext s $ \r -> [cellText := size r]
cellLayoutSetAttributes mCol modtext s $ \r -> [cellText := lastAccess r]
cellLayoutSetAttributes pCol permtext s $ \r -> [cellText := permissions r]
forM_ [nCol,sCol,mCol,pCol] $ \col ->
treeViewAppendColumn tv col
containerAdd sw tv
return ( tViewD, tViewF, dir, files, listD, listF )
{-|
Function to move up one directory.
i.e. If currently in \"\/home\/michael\" change to \"\/home\"
-}
upDirectory :: FileBrowser -- ^ The 'FileBrowser' to update.
-> IO ( )
upDirectory b = do
cur <- getCurrentDirectory
let ( super, _ ) = splitFileName cur
setCurrentDirectory super
refreshBrowser super b
|
michaeldever/haskellfm
|
TreeViewOperations.hs
|
gpl-3.0
| 9,634
| 0
| 30
| 3,154
| 2,060
| 1,067
| 993
| 147
| 4
|
{-# LANGUAGE TemplateHaskell, OverlappingInstances, FlexibleContexts, NoMonomorphismRestriction #-}
{-# OPTIONS -Wall #-}
module ShortShow where
import Element
import Data.List
import Math.Groups.S2
import Math.Groups.S3
import Data.Word
import THUtil
-- | This class is for stringifying things (for humans) in contexts where characters are at a premium, e.g. Blender object names. It's generally only intended to differentiate the @a@ from other @a@s
-- occuring in the same "context" (e.g. Blender scene), not to uniquely determine it.
class ShortShow a where
shortShowsPrec :: Int -> a -> ShowS
shortShow :: a -> String
shortShow = ($"") . shortShowsPrec 0
shortShowsPrec _ x = (shortShow x ++)
shortShows :: ShortShow a => a -> ShowS
shortShows = shortShowsPrec 0
instance (ShortShow a, ShortShow b) => ShortShow (a,b) where
shortShowsPrec _ (a,b) = showChar '('
. shortShows a
. showChar ',' . shortShows b
. showChar ')'
instance (ShortShow a, ShortShow b, ShortShow c) => ShortShow (a,b,c) where
shortShowsPrec _ (a,b,c) = showChar '('
. shortShows a
. showChar ',' . shortShows b
. showChar ',' . shortShows c
. showChar ')'
instance (ShortShow a, ShortShow b, ShortShow c, ShortShow d) => ShortShow (a,b,c,d) where
shortShowsPrec _ (a,b,c,d) = showChar '('
. shortShows a
. showChar ',' . shortShows b
. showChar ',' . shortShows c
. showChar ',' . shortShows d
. showChar ')'
-- instance ShortShow v => ShortShow (List v n) where
-- shortShow = error "ShortShow (List v n): not implemented"
shortShowAsSet :: (AsList a, ShortShow (Element a)) => a -> [Char]
shortShowAsSet xs = "{" ++ (intercalate "," . fmap shortShow . asList) xs ++ "}"
instance ShortShow S2 where shortShow = show
instance ShortShow S3 where shortShow = show
instance ShortShow Int where shortShow = show
instance ShortShow Word where shortShow = show
instance ShortShow Integer where shortShow = show
instance ShortShow Bool where shortShow b = [if b then 'T' else 'F']
-- | Prints no indication of wheter the value is a Left or a Right, as it's usually clear. Write an OverlappingInstance otherwise.
instance (ShortShow a, ShortShow b) => ShortShow (Either a b) where shortShow = either shortShow shortShow
inheritShortShow
:: (Convertible accessor ExpQ,
Convertible sub TypeQ,
Convertible super TypeQ) =>
sub -> super -> accessor -> Q [Dec]
inheritShortShow = inheritSingleArgClass ''ShortShow ['shortShow] []
|
DanielSchuessler/hstri
|
ShortShow.hs
|
gpl-3.0
| 2,757
| 0
| 14
| 742
| 705
| 370
| 335
| 49
| 1
|
module Time (
Timestamp, getTime,
Timediff, timeDiff, timeDiffSeconds
) where
import Data.Aeson
import Data.Time.Clock.POSIX ( getPOSIXTime )
newtype Timestamp = Timestamp { unTs :: Double } deriving ( Show )
getTime :: IO Timestamp
getTime = (Timestamp . realToFrac) `fmap` getPOSIXTime
newtype Timediff = Timediff { timeDiffSeconds :: Double } deriving ( Show )
timeDiff :: Timestamp -> Timestamp -> Timediff
timeDiff ts1 ts2 = Timediff $ (unTs ts2) - (unTs ts1)
instance ToJSON Timediff where
toJSON td = toJSON $ timeDiffSeconds td
|
waldheinz/ads
|
src/lib/Time.hs
|
gpl-3.0
| 551
| 0
| 8
| 98
| 170
| 99
| 71
| 13
| 1
|
module Main where
doubleMe x = x + x
doubleUs x y = (doubleMe x) + (doubleMe y)
doubleSmallNumber' x = (if x > 100 then x else (doubleMe x)) + 1
|
yumerov/haskell-study
|
learnyouahaskell/00-starting-out/baby.hs
|
gpl-3.0
| 147
| 0
| 9
| 34
| 75
| 40
| 35
| 4
| 2
|
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module CTypes (module Foreign.C.Types) where
import Foreign.C.Types
|
jwiegley/ghc-release
|
libraries/haskell98/CTypes.hs
|
gpl-3.0
| 150
| 0
| 5
| 21
| 23
| 17
| 6
| 3
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Orders.Shiplineitems
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Marks line item(s) as shipped. This method can only be called for
-- non-multi-client accounts.
--
-- /See:/ <https://developers.google.com/shopping-content Content API for Shopping Reference> for @content.orders.shiplineitems@.
module Network.Google.Resource.Content.Orders.Shiplineitems
(
-- * REST Resource
OrdersShiplineitemsResource
-- * Creating a Request
, ordersShiplineitems
, OrdersShiplineitems
-- * Request Lenses
, osMerchantId
, osPayload
, osOrderId
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.orders.shiplineitems@ method which the
-- 'OrdersShiplineitems' request conforms to.
type OrdersShiplineitemsResource =
"content" :>
"v2" :>
Capture "merchantId" (Textual Word64) :>
"orders" :>
Capture "orderId" Text :>
"shipLineItems" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] OrdersShipLineItemsRequest :>
Post '[JSON] OrdersShipLineItemsResponse
-- | Marks line item(s) as shipped. This method can only be called for
-- non-multi-client accounts.
--
-- /See:/ 'ordersShiplineitems' smart constructor.
data OrdersShiplineitems = OrdersShiplineitems'
{ _osMerchantId :: !(Textual Word64)
, _osPayload :: !OrdersShipLineItemsRequest
, _osOrderId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OrdersShiplineitems' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'osMerchantId'
--
-- * 'osPayload'
--
-- * 'osOrderId'
ordersShiplineitems
:: Word64 -- ^ 'osMerchantId'
-> OrdersShipLineItemsRequest -- ^ 'osPayload'
-> Text -- ^ 'osOrderId'
-> OrdersShiplineitems
ordersShiplineitems pOsMerchantId_ pOsPayload_ pOsOrderId_ =
OrdersShiplineitems'
{ _osMerchantId = _Coerce # pOsMerchantId_
, _osPayload = pOsPayload_
, _osOrderId = pOsOrderId_
}
-- | The ID of the managing account.
osMerchantId :: Lens' OrdersShiplineitems Word64
osMerchantId
= lens _osMerchantId (\ s a -> s{_osMerchantId = a})
. _Coerce
-- | Multipart request metadata.
osPayload :: Lens' OrdersShiplineitems OrdersShipLineItemsRequest
osPayload
= lens _osPayload (\ s a -> s{_osPayload = a})
-- | The ID of the order.
osOrderId :: Lens' OrdersShiplineitems Text
osOrderId
= lens _osOrderId (\ s a -> s{_osOrderId = a})
instance GoogleRequest OrdersShiplineitems where
type Rs OrdersShiplineitems =
OrdersShipLineItemsResponse
type Scopes OrdersShiplineitems =
'["https://www.googleapis.com/auth/content"]
requestClient OrdersShiplineitems'{..}
= go _osMerchantId _osOrderId (Just AltJSON)
_osPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy OrdersShiplineitemsResource)
mempty
|
rueshyna/gogol
|
gogol-shopping-content/gen/Network/Google/Resource/Content/Orders/Shiplineitems.hs
|
mpl-2.0
| 3,869
| 0
| 15
| 897
| 483
| 287
| 196
| 76
| 1
|
<h3>Example 64 (nested plot stacks)</h3>
<plot-stack width=600 aspect=1
x="[[seq(0,2*PI+0.2,101)]]" axis-x-label="Time"
axis-x-ticks="[[[0,[PI,'π'],[2*PI,'2π']]]]"
end-tick-size=0>
<plot-stack title="Fat" stroke-width=4>
<plot title="sin" axis-y-label="sin(x)">
<lines y="[[sin(x)]]" stroke="red"></lines>
</plot>
<plot title="cos" axis-y-label="cos(x)">
<lines y="[[cos(x)]]" stroke="blue"></lines>
</plot>
<plot title="sum" axis-y-label="sin(x) + cos(x)">
<lines y="[[sin(x) + cos(x)]]" stroke="green"></lines>
</plot>
</plot-stack>
<plot-stack title="Thin" stroke-width=1>
<plot title="sin" axis-y-label="sin(x)">
<lines y="[[sin(x)]]" stroke="red"></lines>
</plot>
<plot title="cos" axis-y-label="cos(x)">
<lines y="[[cos(x)]]" stroke="blue"></lines>
</plot>
<plot title="sum" axis-y-label="sin(x) + cos(x)">
<lines y="[[sin(x) + cos(x)]]" stroke="green"></lines>
</plot>
</plot-stack>
</plot-stack>
|
openbrainsrc/hRadian
|
examples/Example/defunct/Eg64.hs
|
mpl-2.0
| 1,039
| 84
| 66
| 200
| 533
| 252
| 281
| -1
| -1
|
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func = do
stmt
stmt
|
lspitzner/brittany
|
data/Test441.hs
|
agpl-3.0
| 138
| 0
| 6
| 20
| 13
| 6
| 7
| 3
| 1
|
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE OverloadedStrings #-}
module BDCS.RPM.Signatures(mkRSASignature,
mkSHASignature)
where
import qualified Data.ByteString as BS
import Data.ByteString.Char8(pack)
import Database.Esqueleto
import BDCS.DB(Builds, BuildSignatures(..))
import BDCS.Exceptions(DBException(..), throwIfNothing)
import RPM.Tags(Tag, findTag, tagValue)
mkRSASignature :: [Tag] -> Key Builds -> BuildSignatures
mkRSASignature tags buildId = let
rsaSig = getRSASig `throwIfNothing` MissingRPMTag "RSAHeader"
in
BuildSignatures buildId "RSA" rsaSig
where
getRSASig = findTag "RSAHeader" tags >>= \t -> tagValue t :: Maybe BS.ByteString
mkSHASignature :: [Tag] -> Key Builds -> BuildSignatures
mkSHASignature tags buildId = let
shaSig = getSHASig `throwIfNothing` MissingRPMTag "SHA1Header"
in
BuildSignatures buildId "SHA1" shaSig
where
getSHASig = findTag "SHA1Header" tags >>= \t -> (tagValue t :: Maybe String) >>= Just . pack
|
dashea/bdcs
|
importer/BDCS/RPM/Signatures.hs
|
lgpl-2.1
| 1,708
| 0
| 12
| 312
| 289
| 166
| 123
| 19
| 1
|
module Main where
import System.Plugins
main = do
makeAll "One.hs" []
load2 "Two.o"
load2 "./Two.o" -- shouldn't load
load2 "../hier3/Two.o" -- shouldn't load
load2 "././././Two.o" -- shouldn't load
-- and this one pulls in "../hier3/Two.o" as a dep
y <- load "One.o" ["../hier3"] [] "resource"
case y of
LoadSuccess _ s -> putStrLn $ "One plugin: " ++ s
LoadFailure es -> mapM_ putStrLn es >> putStrLn "Failure: y"
load2 f = do
x <- load f [".", "../hier3", ""] [] "resource" -- depend on One.o
case x of
LoadSuccess _ s -> putStrLn $ "Two plugin: " ++ s
LoadFailure es -> mapM_ putStrLn es >> putStrLn "Failure: x"
|
Changaco/haskell-plugins
|
testsuite/hier/hier3/Main.hs
|
lgpl-2.1
| 711
| 0
| 11
| 204
| 203
| 96
| 107
| 17
| 2
|
module HackerRank.Utilities.Manager.DB where
import Control.Monad
import Data.Either
import Data.Maybe
import System.Directory
import System.FilePath.Posix
import System.IO
import System.IO.Error
import qualified System.IO.Strict as SIO
import qualified Text.Fuzzy as F
import Text.Show.Pretty ( ppShow )
import HackerRank.Utilities.Manager.IOError
import HackerRank.Utilities.Manager.Challenge
absDBPath :: IO FilePath
absDBPath = (</> ".hrmng/db.hs") <$> getHomeDirectory
readDB :: IO [(String, (FilePath, Challenge))]
readDB = withDefault "[Manager][DB][readDB]"
[]
(absDBPath >>= (SIO.readFile >=> return . read))
writeDB :: [(String, (FilePath, Challenge))] -> IO ()
writeDB xs = wrapIOError "[Manager][DB][writeDB]"
(absDBPath >>= withFileExsit write (mkdir >> write))
where mkdir = createDirectory . takeDirectory
write = flip writeFile (ppShow xs)
lookupDB :: String -> IO [(String, (FilePath, Challenge))]
lookupDB slug = fuzzyLookup slug <$!> readDB
where fuzzyLookup p xs = map F.original $ F.filter p xs "<" ">" fst False
exsitsInDB :: String -> IO Bool
exsitsInDB slug = wrapIOError "[Manager][DB][exsitsInDB]"
(isJust . lookup slug <$!> readDB)
updateDB :: (String, (FilePath, Challenge)) -> IO ()
updateDB pair = wrapIOError "[Manager][DB][updateDB]"
((pair:) <$!> readDB >>= writeDB)
nameToPath :: String -> IO FilePath
nameToPath slug = withDirExsit return (lookupDB >=> (\xs ->
case length xs of
0 -> ioError nrErr
1 -> return . fst . snd . head $ xs
_ -> mulErr xs >>= ioError)) slug
where nrErr = userError $
"[Manager][DB][nameOrPath]: Cannot resolve \"" ++ slug ++ "\""
mulErr xs = do
matches <- mapM fmt xs
return . userError $
unlines (("[Manager][DB][nameOrPath]: Multiple matches of \"" ++
slug ++ "\" found:"):matches)
fmt (s, (p, c)) = do
rel <- makeRelativeToCurrentDirectory p
return $ unlines ["\tName: " ++ s, "\tPath: " ++ rel]
listChallenges :: IO [(String, (FilePath, Challenge))]
listChallenges = readDB >>= mapM (\(n, (p, c)) -> do
rel <- makeRelativeToCurrentDirectory p
return (n, (rel, c)))
|
oopsno/hrank
|
src/HackerRank/Utilities/Manager/DB.hs
|
unlicense
| 2,303
| 0
| 16
| 556
| 725
| 395
| 330
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_HADDOCK hide #-}
module Network.IRC.Util where
import qualified Data.Text.Format as TF
import ClassyPrelude
import Control.Monad.Base (MonadBase)
import Data.Convertible (convert)
import Data.Maybe (fromJust)
import Data.Text (strip)
import Data.Time (diffUTCTime)
oneSec :: Int
oneSec = 1000000
mapKeys :: IsMap map => map -> [ContainerKey map]
mapKeys = map fst . mapToList
mapValues :: IsMap map => map -> [MapValue map]
mapValues = map snd . mapToList
mergeMaps :: forall map map1 map2.
(IsMap map1, IsMap map2, IsMap map,
MapValue map ~ (MapValue map1, MapValue map2),
ContainerKey map1 ~ ContainerKey map,
ContainerKey map2 ~ ContainerKey map) =>
map1 -> map2 -> map
mergeMaps map1 map2 =
flip (`foldl'` mempty) (mapKeys map1) $ \acc key ->
if key `member` map2
then insertMap key (fromJust $ lookup key map1, fromJust $ lookup key map2) acc
else acc
whenJust :: Monad m => Maybe t -> (t -> m ()) -> m ()
whenJust m f = maybe (return ()) f m
clean :: Text -> Text
clean = toLower . strip
io :: MonadIO m => IO a -> m a
io = liftIO
atomicModIORef :: MonadBase IO f => IORef t -> (t -> t) -> f ()
atomicModIORef ref f = void . atomicModifyIORef' ref $ \v -> (f v, v)
-- | Display a time span as one time relative to another.
relativeTime :: UTCTime -> UTCTime -> Text
relativeTime t1 t2 =
maybe "unknown" (convert . format) $ find (\(s,_,_) -> abs period >= s) ranges
where
minute = 60; hour = minute * 60; day = hour * 24;
week = day * 7; month = day * 30; year = month * 12
format range =
(if period > 0 then "in " else "")
++ case range of
(_, str, 0) -> pack str
(_, str, base) -> TF.format (fromString str) $ TF.Only (abs $ round (period / base) :: Integer)
++ (if period <= 0 then " ago" else "")
period = t1 `diffUTCTime` t2
ranges = [ (year*2, "{} years", year)
, (year, "a year", 0)
, (month*2, "{} months", month)
, (month, "a month", 0)
, (week*2, "{} weeks", week)
, (week, "a week", 0)
, (day*2, "{} days", day)
, (day, "a day", 0)
, (hour*4, "{} hours", hour)
, (hour*3, "a few hours", 0)
, (hour*2, "{} hours", hour)
, (hour, "an hour", 0)
, (minute*31, "{} minutes", minute)
, (minute*30, "half an hour", 0)
, (minute*2, "{} minutes", minute)
, (minute, "a minute", 0)
, (0, "{} seconds", 1)
]
|
abhin4v/hask-irc
|
hask-irc-core/Network/IRC/Util.hs
|
apache-2.0
| 2,827
| 0
| 18
| 964
| 1,017
| 573
| 444
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE LambdaCase #-}
module Main where
import Control.Monad
import Data.Typeable
import Flow
import Flow.Vector
import Flow.Halide
-- Needed for FFI to work
import Data.Vector.HFixed.Class ()
import Flow.Halide.Types ()
-- Data tags
data Vec deriving Typeable
data Sum deriving Typeable
-- Abstract flow signatures
f, g :: Flow Vec
f = flow "f"
g = flow "g"
pp :: Flow Vec -> Flow Vec -> Flow Vec
pp = flow "product"
a :: Flow Vec -> Flow Sum
a = flow "sum"
ddp :: Flow Sum
ddp = a $ pp f g
-- Vector representation
type VecRepr = DynHalideRepr Dim0 Float Vec
vecRepr :: Domain Range -> VecRepr
vecRepr = dynHalideRepr dim0
type SumRepr = HalideRepr Z Float Sum
sumRepr :: SumRepr
sumRepr = halideRepr Z
-- Kernels
fKern :: Domain Range -> Kernel Vec
fKern size = halideKernel0 "f" (vecRepr size) kern_generate_f
foreign import ccall unsafe kern_generate_f :: HalideFun '[] VecRepr
gKern :: Domain Range -> Kernel Vec
gKern size = halideKernel0 "g" (vecRepr size) kern_generate_g
foreign import ccall unsafe kern_generate_g :: HalideFun '[] VecRepr
ppKern :: Domain Range -> Flow Vec -> Flow Vec -> Kernel Vec
ppKern size = halideKernel1Write "pp" (vecRepr size) (vecRepr size) kern_dotp
foreign import ccall unsafe kern_dotp :: HalideFun '[ VecRepr ] VecRepr
aKern :: Domain Range -> Flow Vec -> Kernel Sum
aKern size = halideKernel1 "a" (vecRepr size) sumRepr kern_sum
foreign import ccall unsafe kern_sum :: HalideFun '[ VecRepr ] SumRepr
-- Dummy recover kernel
recoverKern :: Domain Range -> Kernel Vec
recoverKern size = halideKernel0 "recover" (vecRepr size) (error "No real kernel here")
printKern :: Flow Sum -> Kernel Sum
printKern = mergingKernel "print" (sumRepr :. Z) NoRepr $ \case
[(sv,_)]-> \_ -> do
s <- peekVector (castVector sv :: Vector Float) 0
putStrLn $ "Sum: " ++ show s
return nullVector
_other -> fail "printKern: Received wrong number of input buffers!"
-- | Dot product, non-distributed
dpStrat :: Int -> Strategy ()
dpStrat size = do
-- Make vector domain
dom <- makeRangeDomain 0 size
-- Calculate ddp for the whole domain
bind f (fKern dom)
bind g (gKern dom)
recover f (recoverKern dom)
recover g (recoverKern dom)
bindRule pp (ppKern dom)
bindRule a (aKern dom)
calculate ddp
rebind ddp printKern
-- | Dot product, distributed
ddpStrat :: Int -> Strategy ()
ddpStrat size = do
-- Make vector domain
dom <- makeRangeDomain 0 size
-- Calculate ddp for the whole domain
regs <- split dom 10
distribute regs ParSchedule $ do
bind f (fKern regs)
bind g (gKern regs)
recover f (recoverKern regs)
recover g (recoverKern regs)
bind (pp f g) (ppKern regs f g)
bindRule a (aKern dom)
calculate ddp
void $ bindNew $ printKern ddp
main :: IO ()
main = do
let size = 1000000
dumpSteps $ ddpStrat size
execStrategyDNA $ ddpStrat size
putStrLn $ "Expected: " ++ show ((size-1)*size`div`20)
|
SKA-ScienceDataProcessor/RC
|
MS5/programs/dotproduct.hs
|
apache-2.0
| 3,026
| 0
| 15
| 594
| 1,034
| 509
| 525
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
import Web.Scotty
import System.Environment
import System.Posix.Env
import Control.Monad.IO.Class (liftIO)
import Network.Wai.Middleware.RequestLogger
import Network.Wai.Middleware.Static (addBase, noDots, staticPolicy, (>->))
import Data.Default (def)
import Data.Monoid((<>))
import Network.Wai.Handler.Warp (settingsPort, settingsHost)
import Data.Conduit.Network
import Network.HTTP.Types.Status
import Database.PostgreSQL.Simple
import Transform
import qualified Data.Text.Lazy as D
import qualified Model.Definition as Def
import qualified View.Index
import qualified View.Add
import qualified View.Added
import qualified View.Error
opts :: String -> Int -> Options
opts ip port = def { verbose = 0
, settings = (settings def) { settingsHost = Host ip, settingsPort = port }
}
repoPath :: IO String
repoPath = do basePath <- getEnvDefault "OPENSHIFT_REPO_DIR" ""
return basePath
dbConnInfo :: IO ConnectInfo
dbConnInfo = do host <- getEnvDefault "OPENSHIFT_POSTGRESQL_DB_HOST" "127.0.0.1"
port <- getEnvDefault "OPENSHIFT_POSTGRESQL_DB_PORT" "5432"
user <- getEnvDefault "OPENSHIFT_POSTGRESQL_DB_USERNAME" "postgres"
password <- getEnvDefault "OPENSHIFT_POSTGRESQL_DB_PASSWORD" ""
dbName <- getEnvDefault "OPENSHIFT_APP_NAME" "postgres"
return (ConnectInfo host (read port) user password dbName)
dbConn :: IO Connection
dbConn = do connInfo <- dbConnInfo
connect connInfo
main :: IO ()
main = do
args <- getArgs
case args of
[ip,port] -> do conn <- dbConn
_ <- Def.createDefinitionTable conn
base <- repoPath
scottyOpts (opts ip (read port)) $ do
middleware (staticPolicy (noDots >-> addBase (base <> "resources")))
middleware logStdoutDev
processRoutes conn
_ -> putStrLn "Required arguments [ip] [port]"
processRoutes :: Connection -> ScottyM ()
processRoutes conn = do
get "/" (listDefinitions conn)
get "/add" renderAddForm
post "/add" (postDefinition conn)
listDefinitions :: Connection -> ActionM ()
listDefinitions conn = do
defs <- liftIO (Def.allDefinitions conn)
html (View.Index.render (toPigLatin defs))
renderAddForm :: ActionM ()
renderAddForm = do
html View.Add.render
postDefinition :: Connection -> ActionM ()
postDefinition conn = do
parameters <- params
maybe returnError addDef (mapM (getParam parameters) ["phrase", "meaning"])
where getParam prms name = lookup name prms
returnError = createResponse (View.Error.render "Missing parameter") badRequest400
addDef (p:m:_)
| D.null p = createResponse (View.Error.render "Empty phrase parameter") badRequest400
| D.null m = createResponse (View.Error.render "Empty meaning parameter") badRequest400
| otherwise = addDefinition conn p m
addDef _ =
createResponse (View.Error.render "Internal error") internalServerError500
addDefinition :: Connection -> D.Text -> D.Text -> ActionM ()
addDefinition conn phrase meaning = do
result <- liftIO (Def.getDefinitionByPhrase conn phrase)
case result of
[] -> do
added <- liftIO (Def.addDefinition conn (Def.Definition phrase meaning))
case added of
Left errorMessage -> createResponse (View.Error.render errorMessage) internalServerError500
Right _ -> createResponse View.Added.render ok200
_ -> createResponse (View.Error.render "Duplicate phrase") badRequest400
createResponse :: D.Text -> Status -> ActionM ()
createResponse view stat = do
html view
status stat
|
codemiller/pirate-gold
|
src/Main.hs
|
apache-2.0
| 3,867
| 0
| 22
| 959
| 1,077
| 543
| 534
| 86
| 3
|
module Story
( Story -- *
, apTimes -- ([Word32] -> [Word32]) -> Story -> Story
, apValues -- ([Float] -> [Float]) -> Story -> Story
) where
import Data.Word
-- | Representation of a value change mapped over time.
type Story = [(Word32, Float)]
-- | Apply a function to all time points of the story.
apTimes :: ([Word32] -> [Word32]) -- ^ time chang
-> Story -- ^ old story
-> Story -- ^ new story
apTimes fn story = zip (fn $ map fst story) (map snd story)
-- | Apply a function to all value points of the story.
apValues :: ([Float] -> [Float]) -- ^ value change
-> Story -- ^ old story
-> Story -- ^ new story
apValues fn story = zip (map fst story) (fn $ map snd story)
|
lovasko/swim
|
src/Story.hs
|
bsd-2-clause
| 783
| 0
| 8
| 246
| 169
| 99
| 70
| 14
| 1
|
-- | Gets command-line options
module Parser.Man.Options
( getOptions
, optDescr
, Options(..)
) where
import System.Console.GetOpt
import System.Environment
import System.IO
data Flag = Complete String | Help
-- | Command-line options
data Options = Options {
completionFile :: String, -- ^ Where to write the completion file, or '-' for stdout
help :: Bool
} deriving (Show)
defaultOptions = Options {
completionFile = "-",
help = False
}
modifyOption :: Flag -> Options -> Options
modifyOption Help opts = opts {help = True}
modifyOption (Complete s) opts = opts {completionFile = s}
-- | Gets command-line options based on 'optDescr'
getOptions :: IO Options
getOptions = do
args <- getArgs
let (flags, _, _) = getOpt RequireOrder optDescr args
return (foldr modifyOption defaultOptions flags)
-- | Description of command-line options for this tool
-- '-f --file Destination file for bash completions'
-- '-h -? Print this help message'
optDescr :: [OptDescr Flag]
optDescr =
[ Option ['f'] ["file"] (ReqArg Complete "FILE") "Destination file for bash completions"
, Option ['h', '?'] ["help"] (NoArg Help) "Print this help message"
]
|
headprogrammingczar/ghc-man-completion
|
Parser/Man/Options.hs
|
bsd-3-clause
| 1,179
| 0
| 10
| 219
| 290
| 166
| 124
| 27
| 1
|
import Music.Prelude
-- |
-- String quartet
--
-- Hommage a Henrik Strindberg
--
tremCanon = compress 4 $
(delay 124 $ set parts' violins1 $ subjs|*1)
<>
(delay 120 $ set parts' violins2 $ subjs|*1)
<>
(delay 4 $ set parts' violas $ subjs|*2)
<>
(delay 0 $ set parts' cellos $ subjs|*2)
where
subjs = scat $ map (\n -> palindrome2 $ rev2 $ subj n) [1..40::Int]
subj n
| n < 8 = a_|*2 |> e|*1 |> a|*1
| n < 16 = a_|*2 |> e|*1 |> a|*1 |> e|*1 |> a|*1
| n < 24 = a_|*2 |> e|*0.5 |> a|*0.5 |> e|*0.5 |> a|*0.5
| otherwise = e|*0.5 |> a|*0.5
mainCanon2 = palindrome2 mainCanon <> celloEntry
celloEntry = set parts' cellos e''|*(25*5/8)
mainCanon = timeSignature (time 6 8) $ asScore $
(set parts' violins1 $ harmonic 2 $ times 50 $ legato $ accentLast $
octavesUp 2 $ scat [a_,e,a,cs',cs',a,e,a_]|/8)
<>
(set parts' violins2 $ harmonic 2 $ times 50 $ legato $ accentLast $
octavesUp 2 $ scat [d,g,b,b,g,d]|/8)|*(3/2)
<>
(set parts' violas $ harmonic 2 $ times 50 $ legato $ accentLast $
octavesUp 2 $ scat [a,d,a,a,d,a]|/8)|*(3*2/2)
<>
set parts' cellos a'|*(25*5/8)
music :: Music
music = mainCanon2
openBook :: Music -> IO ()
openBook = openLilypond' LyScoreFormat
main :: IO ()
main = openBook music
-- TODO
rev2 = id
palindrome2 = id
|
music-suite/music-preludes
|
examples/trio.hs
|
bsd-3-clause
| 1,495
| 13
| 20
| 488
| 729
| 370
| 359
| 36
| 1
|
module Main (main) where
import qualified Math.LinearRecursive.Internal.Matrix as M
import qualified Math.LinearRecursive.Internal.Vector as V
import qualified Math.LinearRecursive.Internal.Polynomial as P
import Math.LinearRecursive.Monad
import Test.QuickCheck
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Control.Applicative ((<$>), (<*>))
import qualified Data.IntMap as IntMap
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests = [ testGroup "matrix"
[ testProperty "add0" prop_matrix_add0
, testProperty "mul1" prop_matrix_mul1
, testProperty "add" prop_matrix_add
, testProperty "mul" prop_matrix_mul
, testProperty "muladd" prop_matrix_muladd
, testProperty "addmul" prop_matrix_addmul
, testProperty "inverse" prop_matrix_inverse
]
, testGroup "vector"
[ testProperty "add0" prop_vector_add0
, testProperty "mul1" prop_vector_mul1
, testProperty "add" prop_vector_add
, testProperty "mul" prop_vector_mul
, testProperty "sub" prop_vector_sub
]
, testGroup "polynomial"
[ testProperty "add0" prop_poly_add0
, testProperty "mul1" prop_poly_mul1
, testProperty "add" prop_poly_add
, testProperty "mul" prop_poly_mul
, testProperty "list" prop_poly_list
]
, testGroup "linear recursive monad"
[ testProperty "fib" prop_fib
, testProperty "const" prop_const
, testProperty "step" prop_step
, testProperty "powerof" prop_powerof
, testProperty "poly" prop_poly
, testProperty "partwith" prop_partwith
]
]
instance (Num a, Arbitrary a) => Arbitrary (M.Matrix a) where
arbitrary = frequency [ (30, fmap fromInteger arbitrary)
, (70, fmap M.matrix (vectorOf 4 (vectorOf 4 arbitrary)))
]
shrink a = case M.matrixSize a of
Nothing -> [M.diagonal v | v <- shrink (M.unDiagonal a)]
Just _ -> [a + M.diagonal v | v <- shrink (head (head ma))]
where
ma = M.unMatrix a
prop_matrix_add0 :: M.Matrix Rational -> Bool
prop_matrix_add0 a = (a + 0) == a && (0 + a) == a
prop_matrix_mul1 :: M.Matrix Rational -> Bool
prop_matrix_mul1 a = a * 1 == a && 1 * a == a
prop_matrix_add :: M.Matrix Rational -> M.Matrix Rational -> M.Matrix Rational -> Bool
prop_matrix_add a b c = (a + b) + c == a + (b + c)
prop_matrix_mul :: M.Matrix Rational -> M.Matrix Rational -> M.Matrix Rational -> Bool
prop_matrix_mul a b c = (a * b) * c == a * (b * c)
prop_matrix_muladd :: M.Matrix Rational -> M.Matrix Rational -> M.Matrix Rational -> Bool
prop_matrix_muladd a b c = a * (b + c) == a * b + a * c
prop_matrix_addmul :: M.Matrix Rational -> M.Matrix Rational -> M.Matrix Rational -> Bool
prop_matrix_addmul a b c = (a + b) * c == a * c + b * c
newtype Diag1Matrix a = Diag1 (M.Matrix a) deriving Show
instance (Num a, Arbitrary a) => Arbitrary (Diag1Matrix a) where
arbitrary = do
n <- choose (1, 20)
Diag1 . cutMatrix <$> vectorOf n (vectorOf n arbitrary)
where
cutMatrix ma = M.matrix [[if i == j then 1 else if i > j then 0 else aij | (j, aij) <- zip [0..] ri] | (i, ri) <- zip [0..] ma]
shrink a = []
prop_matrix_inverse :: Diag1Matrix Integer -> Bool
prop_matrix_inverse (Diag1 ma) = ma' * ma == one
where
ma' = M.inverseMatrixDiag1 ma
n = length (M.unMatrix ma)
one = M.matrix [[if i == j then 1 else 0 | j <- [1..n]] | i <- [1..n]]
instance (Eq a, Num a, Arbitrary a) => Arbitrary (V.Vector1 a) where
arbitrary = V.vector1 <$> choose (0, 30)
shrink a = [V.vector1 a' | a' <- shrink (V.unVector1 a), a' >= 0]
instance (Eq a, Num a, Arbitrary a) => Arbitrary (V.Vector a) where
arbitrary = V.vector . IntMap.fromListWith (+) <$> listOf ((,) <$> choose (0,30) <*> arbitrary)
shrink a = [V.vector $ IntMap.insert i v' (V.unVector a) | (i, v) <- IntMap.assocs (V.unVector a), v' <- shrink v]
prop_vector_add0 :: V.Vector Rational -> Bool
prop_vector_add0 a = (a <+> zeroVector) == a && (zeroVector <+> a) == a
prop_vector_mul1 :: V.Vector Rational -> Bool
prop_vector_mul1 a = (a *> 1) == a && (1 <* a) == a
prop_vector_add :: V.Vector Rational -> V.Vector Rational -> V.Vector Rational -> Bool
prop_vector_add a b c = (a <+> b) <+> c == a <+> (b <+> c)
prop_vector_mul :: V.Vector Rational -> Rational -> Rational -> Bool
prop_vector_mul a b c = a *> b *> c == b <* c <* a && a *> (b + c) == a *> b <+> a *> c
prop_vector_sub :: V.Vector Rational -> V.Vector Rational -> Bool
prop_vector_sub a b = a <+> b <-> b == a
instance (Eq a, Num a, Arbitrary a) => Arbitrary (P.Polynomial a) where
arbitrary = fmap P.polynomial arbitrary
shrink = map P.polynomial . shrink . P.unPoly
prop_poly_add0 :: P.Polynomial Rational -> Bool
prop_poly_add0 a = a + 0 == a && 0 + a == a
prop_poly_mul1 :: P.Polynomial Rational -> Bool
prop_poly_mul1 a = a * 1 == a && 1 * a == a
prop_poly_add :: P.Polynomial Rational -> P.Polynomial Rational -> P.Polynomial Rational -> Bool
prop_poly_add a b c = (a + b) + c == a + (b + c) && a + b == b + a
prop_poly_mul :: P.Polynomial Integer -> P.Polynomial Integer -> P.Polynomial Integer -> Bool
prop_poly_mul a b c = (a * b) * c == a * (b * c) && a * b == b * a
prop_poly_list :: P.Polynomial Rational -> Bool
prop_poly_list a = a == P.fromList (P.toList a)
fibSeq :: [Integer]
fibSeq = 1 : 1 : zipWith (+) fibSeq (tail fibSeq)
fib :: Integer -> Integer
fib n = flip runLinearRecursive n fibmonad
fibmonad :: LinearRecursive Integer (Variable Integer)
fibmonad = do
[f0, f1] <- newVariables [1, 1]
f0 <:- f0 <+> f1
return f1
prop_fib :: NonNegative Integer -> Bool
prop_fib (NonNegative n) = fibSeq !! fromIntegral n == fib n
prop_const :: NonNegative Integer -> Integer -> Bool
prop_const (NonNegative n) v = runLinearRecursive (getConstant v) n == v
prop_step :: NonNegative Integer -> Bool
prop_step (NonNegative n) = runLinearRecursive getStep n == n
prop_powerof :: NonNegative Integer -> Integer -> Bool
prop_powerof (NonNegative n) a = runLinearRecursive (getPowerOf a) n == a ^ n
prop_poly :: Polynomial Integer -> NonNegative Integer -> Bool
prop_poly p (NonNegative v) = runLinearRecursive (getPolynomial p) v == P.evalPoly p v
arbMonad :: Gen ((LinearRecursive Integer (LinearCombination Integer), [Integer]))
arbMonad = oneof [ return (fmap toVector fibmonad, fibSeq)
, (\x -> (getPowerOf x, scanl (*) 1 (repeat x))) <$> arbitrary
, (\p -> (getPolynomial p, map (P.evalPoly p) [0..])) <$> arbitrary
]
prop_partwith :: Polynomial Integer -> NonNegative Integer -> Gen Bool
prop_partwith p (NonNegative n) = do
(monad, seq) <- arbMonad
let lhs = runLinearRecursive (monad >>= getPartialSumWith p) n
let rhs = sum [P.evalPoly p (n - i) * fi | (i, fi) <- zip [0..n] seq]
return $ lhs == rhs
|
bjin/monad-lrs
|
Test.hs
|
bsd-3-clause
| 7,044
| 0
| 16
| 1,700
| 2,751
| 1,415
| 1,336
| -1
| -1
|
{-# OPTIONS_GHC -Wall #-}
module Classy.Casadi.Bindings( SXM(..)
, SXFunction(..)
, CasadiModule(..)
, casadiInit
, sym
, vsym
, msym
, sxInt
, sxDouble
, gradient
, jacobian
, hessian
, matrixMultiply
, transpose
, veccat
, horzcat
, vertcat
, sxFunction
, generateCode
-- * binary
, sxmMul
, sxmDiv
, sxmAdd
, sxmSub
, sxmPow
-- * unary
, sxmSignum
, sxmNeg
, sxmExp
, sxmAbs
, sxmLog
, sxmAcos
, sxmAsin
, sxmAtan
, sxmCos
, sxmCosh
, sxmSin
, sxmSinh
, sxmTan
, sxmTanh
, sxmSqrt
, pyShow
) where
import Control.Applicative ( (<$>) )
import Foreign.C.Types ( CDouble, CInt )
import Python.Exceptions
import Python.Interpreter
import Python.Objects
import System.IO.Unsafe ( unsafePerformIO )
newtype SXM = SXM PyObject
newtype SXFunction = SXFunction PyObject
newtype CasadiModule = CasadiModule PyObject
instance Show SXM where
-- show (SXM x) = show x
show = unsafePerformIO . pyShow
instance Show SXFunction where
-- show (SXFunction x) = show x
show = unsafePerformIO . pyShow
instance FromPyObject SXM where fromPyObject = return . SXM
instance ToPyObject SXM where toPyObject (SXM x) = return x
instance FromPyObject CasadiModule where fromPyObject = return . CasadiModule
instance ToPyObject CasadiModule where toPyObject (CasadiModule x) = return x
instance ToPyObject SXFunction where toPyObject (SXFunction p) = return p
instance FromPyObject SXFunction where fromPyObject = return . SXFunction
-- some orphan instances
instance ToPyObject Bool where
toPyObject True = pyRun_String "True" Py_eval_input []
toPyObject False = pyRun_String "False" Py_eval_input []
instance ToPyObject PyObject where
toPyObject = return
instance FromPyObject PyObject where
fromPyObject = return
pyShow :: ToPyObject a => a -> IO String
pyShow x = callByNameHs "str" [x] noKwParms
-------------------------------------------------------------
sym :: CasadiModule -> String -> IO SXM
sym casadi name = msym casadi name (1,1)
vsym :: CasadiModule -> String -> Int -> IO SXM
vsym casadi name n = msym casadi name (n,1)
msym :: CasadiModule -> String -> (Int,Int) -> IO SXM
--msym casadi name (r,c) = callMethodHs casadi "ssym" [toPyObject name, toPyObject r, toPyObject c] noKwParms
msym (CasadiModule casadi) name (r,c) = do
ssym <- getattr casadi "ssym"
name' <- toPyObject name
r' <- toPyObject (toInteger r)
c' <- toPyObject (toInteger c)
mat <- pyObject_Call ssym [name', r', c'] []
return (SXM mat)
sxInt :: CasadiModule -> Int -> IO SXM
sxInt (CasadiModule casadi) k
| withinCIntBounds k = callMethodHs casadi "ssym" [(fromIntegral k)::CInt] noKwParms
| otherwise = error $ "sxInt got out of range value: "++show k++", range: "++show (minCInt,maxCInt)
where
withinCIntBounds x = and [fromIntegral x <= maxCInt, fromIntegral x >= minCInt]
maxCInt = toInteger (maxBound :: CInt)
minCInt = toInteger (minBound :: CInt)
sxDouble :: CasadiModule -> Double -> IO SXM
sxDouble (CasadiModule casadi) x = callMethodHs casadi "ssym" [(realToFrac x)::CDouble] noKwParms
--------------------------------------------------------------
gradient :: CasadiModule -> SXM -> SXM -> IO SXM
gradient (CasadiModule casadi) ex args = callMethodHs casadi "gradient" [ex, args] noKwParms
jacobian :: CasadiModule -> SXM -> SXM -> IO SXM
jacobian (CasadiModule casadi) ex args = callMethodHs casadi "jacobian" [ex, args] noKwParms
hessian :: CasadiModule -> SXM -> SXM -> IO SXM
hessian (CasadiModule casadi) ex args = callMethodHs casadi "hessian" [ex, args] noKwParms
---------------------------------------------------------------
sxFunction :: CasadiModule -> [SXM] -> [SXM] -> IO SXFunction
sxFunction (CasadiModule casadi) xs zs =
handlePy (\x -> ("sxFunction: " ++) . show <$> formatException x >>= error) $ do
f@(SXFunction fun) <- callMethodHs casadi "SXFunction" [xs, zs] noKwParms
runMethodHs fun "init" noParms noKwParms
return f
generateCode :: SXFunction -> String -> IO String
generateCode (SXFunction fun) filename = do
callMethodHs fun "generateCode" [filename] noKwParms
-------------------------------------------------------------
callSXMatrix :: String -> CasadiModule -> [SXM] -> IO SXM
callSXMatrix method (CasadiModule casadi) xs =
handlePy (\x -> ("callSXMatrix: " ++) . show <$> formatException x >>= error) $ do
sxmatrix <- getattr casadi "SXMatrix"
callMethodHs sxmatrix method xs noKwParms
binarySXMatrix :: String -> CasadiModule -> SXM -> SXM -> IO SXM
binarySXMatrix method casadi x y = callSXMatrix method casadi [x,y]
unarySXMatrix :: String -> CasadiModule -> SXM -> IO SXM
unarySXMatrix method casadi x = callSXMatrix method casadi [x]
matrixMultiply :: CasadiModule -> SXM -> SXM -> IO SXM
matrixMultiply = binarySXMatrix "mul"
sxmMul :: CasadiModule -> SXM -> SXM -> IO SXM
sxmMul = binarySXMatrix "__mul__"
sxmDiv :: CasadiModule -> SXM -> SXM -> IO SXM
sxmDiv = binarySXMatrix "__div__"
sxmAdd :: CasadiModule -> SXM -> SXM -> IO SXM
sxmAdd = binarySXMatrix "__add__"
sxmSub :: CasadiModule -> SXM -> SXM -> IO SXM
sxmSub = binarySXMatrix "__sub__"
sxmPow :: CasadiModule -> SXM -> SXM -> IO SXM
sxmPow = binarySXMatrix "__pow__"
-----------------------------------
transpose :: CasadiModule -> SXM -> IO SXM
transpose = unarySXMatrix "T"
sxmSignum :: CasadiModule -> SXM -> IO SXM
sxmSignum = unarySXMatrix "__sign__"
sxmNeg :: CasadiModule -> SXM -> IO SXM
sxmNeg = unarySXMatrix "__neg__"
sxmExp :: CasadiModule -> SXM -> IO SXM
sxmExp = unarySXMatrix "exp"
sxmAbs :: CasadiModule -> SXM -> IO SXM
sxmAbs = unarySXMatrix "fabs"
sxmLog :: CasadiModule -> SXM -> IO SXM
sxmLog = unarySXMatrix "log"
sxmAcos :: CasadiModule -> SXM -> IO SXM
sxmAcos = unarySXMatrix "arccos"
sxmAsin :: CasadiModule -> SXM -> IO SXM
sxmAsin = unarySXMatrix "arcsin"
sxmAtan :: CasadiModule -> SXM -> IO SXM
sxmAtan = unarySXMatrix "arctan"
sxmCos :: CasadiModule -> SXM -> IO SXM
sxmCos = unarySXMatrix "cos"
sxmCosh :: CasadiModule -> SXM -> IO SXM
sxmCosh = unarySXMatrix "cosh"
sxmSin :: CasadiModule -> SXM -> IO SXM
sxmSin = unarySXMatrix "sin"
sxmSinh :: CasadiModule -> SXM -> IO SXM
sxmSinh = unarySXMatrix "sinh"
sxmTan :: CasadiModule -> SXM -> IO SXM
sxmTan = unarySXMatrix "tan"
sxmTanh :: CasadiModule -> SXM -> IO SXM
sxmTanh = unarySXMatrix "tanh"
sxmSqrt :: CasadiModule -> SXM -> IO SXM
sxmSqrt = unarySXMatrix "sqrt"
----------------------------------
veccat :: CasadiModule -> [SXM] -> IO SXM
veccat (CasadiModule cm) ins =
handlePy (\x -> ("veccat: " ++) . show <$> formatException x >>= error) $
callMethodHs cm "veccat" [ins] noKwParms
horzcat :: CasadiModule -> [SXM] -> IO SXM
horzcat (CasadiModule cm) ins =
handlePy (\x -> ("horzcat: " ++) . show <$> formatException x >>= error) $
callMethodHs cm "horzcat" [ins] noKwParms
vertcat :: CasadiModule -> [SXM] -> IO SXM
vertcat (CasadiModule cm) ins =
handlePy (\x -> ("vertcat: " ++) . show <$> formatException x >>= error) $
callMethodHs cm "vertcat" [ins] noKwParms
---------------------------------
casadiInit :: IO CasadiModule
casadiInit = do
py_initialize
fmap CasadiModule (pyImport_ImportModule "casadi")
--main :: IO ()
--main = do
-- casadi <- casadiInit
---- _ <- pyRun_SimpleString "from casadi import *"
---- print casadi
---- d <- pyModule_GetDict casadi
--
---- b <- pyRun_SimpleString "casadi.ssym('t')"
---- print b
-- x <- vsym casadi "x" 5
-- y <- sym casadi "y"
--
-- z <- mul casadi x y
-- z2 <- mul casadi x z
--
-- fun <- sxFunction casadi [x,y] [z,z2]
--
---- print z
---- print fun
--
-- dz2dx <- hessian casadi z2 x
-- print dz2dx
--
---- z' <- sxShowPyObject z
---- z'' <- sxReprOf z
---- putStrLn z'
---- putStrLn z''
|
ghorn/classy-dvda
|
src/Classy/Casadi/Bindings.hs
|
bsd-3-clause
| 8,954
| 0
| 12
| 2,600
| 2,311
| 1,215
| 1,096
| 174
| 1
|
{-# LANGUAGE CPP, BangPatterns #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
--
-- |
-- Module : Data.ByteString.Lazy.UTF8
-- Copyright : (c) Iavor S. Diatchki 2009
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : emertens@galois.com
-- Stability : experimental
-- Portability : portable
--
-- This module provides fast, validated encoding and decoding functions
-- between 'ByteString's and 'String's. It does not exactly match the
-- output of the Codec.Binary.UTF8.String output for invalid encodings
-- as the number of replacement characters is sometimes longer.
module Data.ByteString.Lazy.UTF8
( B.ByteString
, decode
, replacement_char
, uncons
, splitAt
, take
, drop
, span
, break
, fromString
, toString
, foldl
, foldr
, length
, lines
, lines'
) where
import Data.Bits
import Data.Word
import Data.Int
import qualified Data.ByteString.Lazy as B
import Prelude hiding (take,drop,splitAt,span,break,foldr,foldl,length,lines)
import Codec.Binary.UTF8.String(encode)
import Codec.Binary.UTF8.Generic (buncons)
-- | Converts a Haskell string into a UTF8 encoded bytestring.
fromString :: String -> B.ByteString
fromString xs = B.pack (encode xs)
-- | Convert a UTF8 encoded bytestring into a Haskell string.
-- Invalid characters are replaced with '\xFFFD'.
toString :: B.ByteString -> String
toString bs = foldr (:) [] bs
-- | This character is used to mark errors in a UTF8 encoded string.
replacement_char :: Char
replacement_char = '\xfffd'
-- | Try to extract a character from a byte string.
-- Returns 'Nothing' if there are no more bytes in the byte string.
-- Otherwise, it returns a decoded character and the number of
-- bytes used in its representation.
-- Errors are replaced by character '\0xFFFD'.
-- XXX: Should we combine sequences of errors into a single replacement
-- character?
decode :: B.ByteString -> Maybe (Char,Int64)
decode bs = do (c,cs) <- buncons bs
return (choose (fromEnum c) cs)
where
choose :: Int -> B.ByteString -> (Char, Int64)
choose c cs
| c < 0x80 = (toEnum $ fromEnum c, 1)
| c < 0xc0 = (replacement_char, 1)
| c < 0xe0 = bytes2 (mask c 0x1f) cs
| c < 0xf0 = bytes3 (mask c 0x0f) cs
| c < 0xf8 = bytes4 (mask c 0x07) cs
| otherwise = (replacement_char, 1)
mask :: Int -> Int -> Int
mask c m = fromEnum (c .&. m)
combine :: Int -> Word8 -> Int
combine acc r = shiftL acc 6 .|. fromEnum (r .&. 0x3f)
follower :: Int -> Word8 -> Maybe Int
follower acc r | r .&. 0xc0 == 0x80 = Just (combine acc r)
follower _ _ = Nothing
{-# INLINE get_follower #-}
get_follower :: Int -> B.ByteString -> Maybe (Int, B.ByteString)
get_follower acc cs = do (x,xs) <- buncons cs
acc1 <- follower acc x
return (acc1,xs)
bytes2 :: Int -> B.ByteString -> (Char, Int64)
bytes2 c cs = case get_follower c cs of
Just (d, _) | d >= 0x80 -> (toEnum d, 2)
| otherwise -> (replacement_char, 1)
_ -> (replacement_char, 1)
bytes3 :: Int -> B.ByteString -> (Char, Int64)
bytes3 c cs =
case get_follower c cs of
Just (d1, cs1) ->
case get_follower d1 cs1 of
Just (d, _) | (d >= 0x800 && d < 0xd800) ||
(d > 0xdfff && d < 0xfffe) -> (toEnum d, 3)
| otherwise -> (replacement_char, 3)
_ -> (replacement_char, 2)
_ -> (replacement_char, 1)
bytes4 :: Int -> B.ByteString -> (Char, Int64)
bytes4 c cs =
case get_follower c cs of
Just (d1, cs1) ->
case get_follower d1 cs1 of
Just (d2, cs2) ->
case get_follower d2 cs2 of
Just (d,_) | d >= 0x10000 && d < 0x110000 -> (toEnum d, 4)
| otherwise -> (replacement_char, 4)
_ -> (replacement_char, 3)
_ -> (replacement_char, 2)
_ -> (replacement_char, 1)
{-# INLINE decode #-}
-- | Split after a given number of characters.
-- Negative values are treated as if they are 0.
splitAt :: Int64 -> B.ByteString -> (B.ByteString,B.ByteString)
splitAt x bs = loop 0 x bs
where loop !a n _ | n <= 0 = B.splitAt a bs
loop !a n bs1 = case decode bs1 of
Just (_,y) -> loop (a+y) (n-1) (B.drop y bs1)
Nothing -> (bs, B.empty)
-- | @take n s@ returns the first @n@ characters of @s@.
-- If @s@ has less than @n@ characters, then we return the whole of @s@.
take :: Int64 -> B.ByteString -> B.ByteString
take x bs = loop 0 x bs
where loop !a n _ | n <= 0 = B.take a bs
loop !a n bs1 = case decode bs1 of
Just (_,y) -> loop (a+y) (n-1) (B.drop y bs1)
Nothing -> bs
-- | @drop n s@ returns the @s@ without its first @n@ characters.
-- If @s@ has less than @n@ characters, then we return an empty string.
drop :: Int64 -> B.ByteString -> B.ByteString
drop x bs = loop 0 x bs
where loop !a n _ | n <= 0 = B.drop a bs
loop !a n bs1 = case decode bs1 of
Just (_,y) -> loop (a+y) (n-1) (B.drop y bs1)
Nothing -> B.empty
-- | Split a string into two parts: the first is the longest prefix
-- that contains only characters that satisfy the predicate; the second
-- part is the rest of the string.
-- Invalid characters are passed as '\0xFFFD' to the predicate.
span :: (Char -> Bool) -> B.ByteString -> (B.ByteString, B.ByteString)
span p bs = loop 0 bs
where loop a cs = case decode cs of
Just (c,n) | p c -> loop (a+n) (B.drop n cs)
_ -> B.splitAt a bs
-- | Split a string into two parts: the first is the longest prefix
-- that contains only characters that do not satisfy the predicate; the second
-- part is the rest of the string.
-- Invalid characters are passed as '\0xFFFD' to the predicate.
break :: (Char -> Bool) -> B.ByteString -> (B.ByteString, B.ByteString)
break p bs = span (not . p) bs
-- | Get the first character of a byte string, if any.
-- Malformed characters are replaced by '\0xFFFD'.
uncons :: B.ByteString -> Maybe (Char,B.ByteString)
uncons bs = do (c,n) <- decode bs
return (c, B.drop n bs)
-- | Traverse a bytestring (right biased).
foldr :: (Char -> a -> a) -> a -> B.ByteString -> a
foldr cons nil cs = case uncons cs of
Just (a,as) -> cons a (foldr cons nil as)
Nothing -> nil
-- | Traverse a bytestring (left biased).
-- This function is strict in the accumulator.
foldl :: (a -> Char -> a) -> a -> B.ByteString -> a
foldl add acc cs = case uncons cs of
Just (a,as) -> let v = add acc a
in seq v (foldl add v as)
Nothing -> acc
-- | Counts the number of characters encoded in the bytestring.
-- Note that this includes replacement characters.
length :: B.ByteString -> Int
length b = loop 0 b
where loop n xs = case decode xs of
Just (_,m) -> loop (n+1) (B.drop m xs)
Nothing -> n
-- | Split a string into a list of lines.
-- Lines are terminated by '\n' or the end of the string.
-- Empty lines may not be terminated by the end of the string.
-- See also 'lines\''.
lines :: B.ByteString -> [B.ByteString]
lines bs | B.null bs = []
lines bs = case B.elemIndex 10 bs of
Just x -> let (xs,ys) = B.splitAt x bs
in xs : lines (B.tail ys)
Nothing -> [bs]
-- | Split a string into a list of lines.
-- Lines are terminated by '\n' or the end of the string.
-- Empty lines may not be terminated by the end of the string.
-- This function preserves the terminators.
-- See also 'lines'.
lines' :: B.ByteString -> [B.ByteString]
lines' bs | B.null bs = []
lines' bs = case B.elemIndex 10 bs of
Just x -> let (xs,ys) = B.splitAt (x+1) bs
in xs : lines' ys
Nothing -> [bs]
|
ghc/packages-utf8-string
|
Data/ByteString/Lazy/UTF8.hs
|
bsd-3-clause
| 8,126
| 0
| 21
| 2,403
| 2,340
| 1,230
| 1,110
| 136
| 8
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
#include "kinds.h"
#ifdef DataPolyKinds
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
#endif
#ifdef SafeHaskell
{-# LANGUAGE Safe #-}
#endif
module Type.Tuple.Sextet
( Sextet
, Fst
, Snd
, Trd
, Frt
, Fft
, Sxt
)
where
#ifndef DataPolyKinds
-- base ----------------------------------------------------------------------
import Data.Typeable (Typeable)
-- types ---------------------------------------------------------------------
import Type.Bool ((:&&))
import Type.Eq ((:==))
import Type.Meta (Known, Val, val)
import Type.Meta.Proxy (Proxy (Proxy))
import Type.Ord (Compare)
import Type.Semigroup ((:<>))
#endif
------------------------------------------------------------------------------
#ifdef DataPolyKinds
#if __GLASGOW_HASKELL__ >= 708
type Sextet = '(,,,,,)
#else
type Sextet a b c d e f = '(a, b, c, d, e, f)
#endif
#else
data Sextet a b c d e f
deriving (Typeable)
------------------------------------------------------------------------------
instance (Known a, Known b, Known c, Known d, Known e, Known f) =>
Known (Sextet a b c d e f)
where
type Val (Sextet a b c d e f) = (Val a, Val b, Val c, Val d, Val e, Val f)
val _ = (val (Proxy :: Proxy a), val (Proxy :: Proxy b),
val (Proxy :: Proxy c), val (Proxy :: Proxy d),
val (Proxy :: Proxy e), val (Proxy :: Proxy f))
------------------------------------------------------------------------------
type instance Sextet a b c d e f :== Sextet a' b' c' d' e' f' =
a :== a' :&& b :== b' :&& c :== c' :&& d :== d' :&& e :== e' :&& f :== f'
------------------------------------------------------------------------------
type instance Compare (Sextet a b c d e f) (Sextet a' b' c' d' e' f')
= Compare a a' :<> Compare b b' :<> Compare c c' :<> Compare d d' :<>
Compare e e' :<> Compare f f'
------------------------------------------------------------------------------
type instance Sextet a b c d e f :<> Sextet a' b' c' d' e' f'
= Sextet (a :<> a') (b :<> b') (c :<> c') (d :<> d') (e :<> e') (f :<> f')
#endif
------------------------------------------------------------------------------
type family Fst
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly1
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Fst (Sextet a _b _c _d _e _f) = a
------------------------------------------------------------------------------
type family Snd
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly2
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Snd (Sextet _a b _c _d _e _f) = b
------------------------------------------------------------------------------
type family Trd
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly3
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Trd (Sextet _a _b c _d _e _f) = c
------------------------------------------------------------------------------
type family Frt
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly4
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Frt (Sextet _a _b _c d _e _f) = d
------------------------------------------------------------------------------
type family Fft
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly5
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Fft (Sextet _a _b _c _d e _f) = e
------------------------------------------------------------------------------
type family Sxt
(p :: KSextet (KPoly1, KPoly2, KPoly3, KPoly4, KPoly5, KPoly6))
:: KPoly6
#ifdef ClosedTypeFamilies
where
#else
type instance
#endif
Sxt (Sextet _a _b _c _d _e f) = f
|
duairc/symbols
|
types/src/Type/Tuple/Sextet.hs
|
bsd-3-clause
| 4,112
| 0
| 8
| 795
| 550
| 355
| 195
| -1
| -1
|
module Derivation where
import Term
data Derivation
= PI_EQ Derivation Derivation -- BINDS
| PI_INTRO Int Derivation Derivation -- BINDS
| PI_ELIM Int Term Derivation Derivation -- BINDS
| LAM_EQ Int Derivation Derivation -- BINDS
| AP_EQ Int Term Derivation Derivation Derivation
| FUN_EXT Derivation Derivation Derivation -- BINDS
| SIG_EQ Derivation Derivation -- BINDS
| SIG_INTRO Int Term Derivation Derivation Derivation -- BINDS
| SIG_ELIM Int Derivation -- BINDS
| PAIR_EQ Int Derivation Derivation Derivation -- BINDS
| FST_EQ Term Derivation
| SND_EQ Int Term Derivation Derivation
| NAT_EQ
| NAT_INTRO
| NAT_ELIM Int Derivation Derivation -- BINDS
| ZERO_EQ
| SUCC_EQ Derivation
| NATREC_EQ Derivation Derivation Derivation -- BINDS
| UNIT_EQ
| TT_EQ
| UNIT_INTRO
| EQ_EQ Derivation Derivation Derivation
| EQ_MEM_EQ Derivation
| EQ_SYM Derivation
| EQ_SUBST Int Term Derivation Derivation Derivation -- BINDS
| CEQ_EQ Derivation Derivation
| CEQ_MEM_EQ Derivation
| CEQ_SYM Derivation
| CEQ_SUBST Term Derivation Derivation
| CEQ_STEP Derivation
| CEQ_REFL
| BASE_EQ
| BASE_MEM_EQ Derivation
| BASE_ELIM_EQ Int Derivation
| UNI_EQ
| CUMULATIVE Derivation
| PER_EQ Derivation Derivation Derivation Derivation Derivation Derivation -- BINDS
| PER_MEM_EQ Int Derivation Derivation Derivation Derivation
| PER_ELIM_EQ Int Derivation -- BINDS
| WITNESS Term Derivation
-- | CUT
| VAR Int
| VAR_EQ
#ifdef FLAG_coind
-- TODO Derivations of co/inductive types.
#endif
deriving (Eq, Show)
|
thsutton/cha
|
lib/Derivation.hs
|
bsd-3-clause
| 1,672
| 0
| 6
| 398
| 334
| 199
| 135
| 46
| 0
|
{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-}
-- | Expand out synthetic instructions into single machine instrs.
module SPARC.CodeGen.Expand (
expandTop
)
where
import GhcPrelude
import SPARC.Instr
import SPARC.Imm
import SPARC.AddrMode
import SPARC.Regs
import Instruction
import Reg
import Format
import GHC.Cmm
import Outputable
import OrdList
-- | Expand out synthetic instructions in this top level thing
expandTop :: NatCmmDecl RawCmmStatics Instr -> NatCmmDecl RawCmmStatics Instr
expandTop top@(CmmData{})
= top
expandTop (CmmProc info lbl live (ListGraph blocks))
= CmmProc info lbl live (ListGraph $ map expandBlock blocks)
-- | Expand out synthetic instructions in this block
expandBlock :: NatBasicBlock Instr -> NatBasicBlock Instr
expandBlock (BasicBlock label instrs)
= let instrs_ol = expandBlockInstrs instrs
instrs' = fromOL instrs_ol
in BasicBlock label instrs'
-- | Expand out some instructions
expandBlockInstrs :: [Instr] -> OrdList Instr
expandBlockInstrs [] = nilOL
expandBlockInstrs (ii:is)
= let ii_doubleRegs = remapRegPair ii
is_misaligned = expandMisalignedDoubles ii_doubleRegs
in is_misaligned `appOL` expandBlockInstrs is
-- | In the SPARC instruction set the FP register pairs that are used
-- to hold 64 bit floats are referred to by just the first reg
-- of the pair. Remap our internal reg pairs to the appropriate reg.
--
-- For example:
-- ldd [%l1], (%f0 | %f1)
--
-- gets mapped to
-- ldd [$l1], %f0
--
remapRegPair :: Instr -> Instr
remapRegPair instr
= let patchF reg
= case reg of
RegReal (RealRegSingle _)
-> reg
RegReal (RealRegPair r1 r2)
-- sanity checking
| r1 >= 32
, r1 <= 63
, r1 `mod` 2 == 0
, r2 == r1 + 1
-> RegReal (RealRegSingle r1)
| otherwise
-> pprPanic "SPARC.CodeGen.Expand: not remapping dodgy looking reg pair " (ppr reg)
RegVirtual _
-> pprPanic "SPARC.CodeGen.Expand: not remapping virtual reg " (ppr reg)
in patchRegsOfInstr instr patchF
-- Expand out 64 bit load/stores into individual instructions to handle
-- possible double alignment problems.
--
-- TODO: It'd be better to use a scratch reg instead of the add/sub thing.
-- We might be able to do this faster if we use the UA2007 instr set
-- instead of restricting ourselves to SPARC V9.
--
expandMisalignedDoubles :: Instr -> OrdList Instr
expandMisalignedDoubles instr
-- Translate to:
-- add g1,g2,g1
-- ld [g1],%fn
-- ld [g1+4],%f(n+1)
-- sub g1,g2,g1 -- to restore g1
| LD FF64 (AddrRegReg r1 r2) fReg <- instr
= toOL [ ADD False False r1 (RIReg r2) r1
, LD FF32 (AddrRegReg r1 g0) fReg
, LD FF32 (AddrRegImm r1 (ImmInt 4)) (fRegHi fReg)
, SUB False False r1 (RIReg r2) r1 ]
-- Translate to
-- ld [addr],%fn
-- ld [addr+4],%f(n+1)
| LD FF64 addr fReg <- instr
= let Just addr' = addrOffset addr 4
in toOL [ LD FF32 addr fReg
, LD FF32 addr' (fRegHi fReg) ]
-- Translate to:
-- add g1,g2,g1
-- st %fn,[g1]
-- st %f(n+1),[g1+4]
-- sub g1,g2,g1 -- to restore g1
| ST FF64 fReg (AddrRegReg r1 r2) <- instr
= toOL [ ADD False False r1 (RIReg r2) r1
, ST FF32 fReg (AddrRegReg r1 g0)
, ST FF32 (fRegHi fReg) (AddrRegImm r1 (ImmInt 4))
, SUB False False r1 (RIReg r2) r1 ]
-- Translate to
-- ld [addr],%fn
-- ld [addr+4],%f(n+1)
| ST FF64 fReg addr <- instr
= let Just addr' = addrOffset addr 4
in toOL [ ST FF32 fReg addr
, ST FF32 (fRegHi fReg) addr' ]
-- some other instr
| otherwise
= unitOL instr
-- | The high partner for this float reg.
fRegHi :: Reg -> Reg
fRegHi (RegReal (RealRegSingle r1))
| r1 >= 32
, r1 <= 63
, r1 `mod` 2 == 0
= (RegReal $ RealRegSingle (r1 + 1))
-- Can't take high partner for non-low reg.
fRegHi reg
= pprPanic "SPARC.CodeGen.Expand: can't take fRegHi from " (ppr reg)
|
sdiehl/ghc
|
compiler/nativeGen/SPARC/CodeGen/Expand.hs
|
bsd-3-clause
| 4,860
| 0
| 16
| 1,861
| 977
| 500
| 477
| 77
| 3
|
module MiniKanren.Term where
import Data.String (IsString(..))
import qualified Data.List as L
import qualified Data.Text as T
import qualified Data.Map as M
data Var
= VNamed T.Text Int
| VGen Int
deriving (Eq, Ord)
instance Show Var where
show (VNamed s i) = T.unpack s ++ "." ++ show i
show (VGen i) = "_." ++ show i
newtype Atom = Atom { unAtom :: T.Text }
deriving (Eq, Ord, IsString)
instance Show Atom where
show (Atom a) = '\'' : T.unpack a
data Term
= TAtom Atom
| TPair Term Term
| TVar Var
deriving (Eq)
instance IsString Term where
fromString = TAtom . fromString
instance Show Term where
show (TAtom a) = show a
show t@(TPair {}) = showList t
where
showList t = "(" ++ L.intercalate " " (map show ts) ++ lastShown ++ ")"
where
lastShown = if last == nil then "" else " . " ++ show last
(ts, last) = unfoldPair t
show (TVar v) = show v
foldPair :: [Term] -> Term
foldPair = foldr TPair nil
list = foldPair
unfoldPair :: Term -> ([Term], Term)
unfoldPair (TPair t1 t2) = let (ts, last) = unfoldPair t2 in (t1:ts, last)
unfoldPair t = ([], t)
type Env = M.Map Var Term
domain :: Env -> [Var]
domain = M.keys
canonize :: Env -> Term -> Term
canonize env t = case t of
TAtom _ -> t
TVar v -> maybe t (canonize $ M.delete v env) $ M.lookup v env
TPair a d -> canonize env a `TPair` canonize env d
data UnifyError
= OccursCheck Var Term
| AtomsDiffer Atom Atom
| TypeMismatch Term Term
deriving (Show)
unify :: Term -> Term -> Env -> Either UnifyError Env
unify t1 t2 env = case (t1, t2) of
(TAtom a1, TAtom a2) -> if a1 == a2
then pure env
else Left $ AtomsDiffer a1 a2
(TPair f1 s1, TPair f2 s2) -> unify f1 f2 env >>= unify s1 s2
(TVar v1, TVar v2) | v1 == v2 -> pure env
(_, TVar v2) -> t1 `unifyVar` v2
(TVar v1, _) -> t2 `unifyVar` v1
_ -> Left $ TypeMismatch t1 t2
where
t `unifyVar` v = if v `notIn` t
then tryExtend v t env
else Left $ OccursCheck v t
unifyC :: Term -> Term -> Env -> Either UnifyError Env
unifyC t1 t2 env = unify (c t1) (c t2) env
where
c = canonize env
notIn :: Var -> Term -> Bool
notIn v1 t2 = case t2 of
TVar v2 -> v1 /= v2
TAtom _ -> True
TPair f s -> v1 `notIn` f && v1 `notIn` s
tryExtend v t env = case M.lookup v env of
Just t0 -> unify t t0 env
Nothing -> pure $ M.insert v t env
emptyEnv :: Env
emptyEnv = M.empty
nil :: Term
nil = TAtom "nil"
|
overminder/hs-mini-kanren
|
src/MiniKanren/Term.hs
|
bsd-3-clause
| 2,423
| 0
| 13
| 614
| 1,134
| 591
| 543
| 77
| 8
|
{-# LANGUAGE TemplateHaskell, GeneralizedNewtypeDeriving, TypeFamilies, FlexibleInstances #-}
module Weight.Types where
import BasicPrelude
import Control.Lens
import Data.Default
import Weight.PlateOrder.Types as PO
-- !A collection of exercises that are known to exist. A person will usually have selected only a subset of these at any given time.
data WeightState = WS {
_exercises :: Map Text Exercise -- ^ indexed by their id
} deriving (Show)
instance Default WeightState where
def = WS empty
-- !How many days should go by without doing an exercise
-- before you should just start over again at 15 reps.
type CycleLength = Integer
data ExerciseType = Bodyweight | Dumbbell | Barbell deriving (Eq, Show)
data Exercise = Exercise {
_eExerciseId :: Text, -- ^ "bsquats"
_eName :: Text, -- ^ "Barbell Squats"
_eMinReps :: Int, -- ^ minimum I am willing to do
_eType :: ExerciseType,
_eRank :: Float -- ^ Determines where exercise will appear in workout (TODO get rid of this hack)
} deriving (Eq, Show)
type Weight = Rational
type Reps = Int
data Proficiency = Pro {
_pReps :: Reps,
_pWeight :: Weight
} deriving (Eq, Show)
data WorkoutPlan = Plan [WorkoutStep] deriving (Show, Eq)
data WorkoutStep =
BarbellExercise Exercise Reps Weight PlateOrder
| DumbbellExercise Exercise Reps Weight
| BodyWeightExercise Exercise Reps
deriving (Show, Eq)
makeLenses ''WeightState
makeLenses ''Exercise
makeLenses ''Proficiency
makeLenses ''WorkoutPlan
makeLenses ''WorkoutStep
instance Ord Exercise where
compare e1 e2 = compare (e1 ^. eRank) (e2 ^. eRank)
|
mindreader/iron-tracker
|
Weight/Types.hs
|
bsd-3-clause
| 1,669
| 0
| 9
| 359
| 345
| 196
| 149
| 39
| 0
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.ZH.Rules
( rules
) where
import Prelude
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Numeral.Types (NumeralData(..))
import Duckling.Regex.Types
import Duckling.Time.Computed
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import Duckling.Types
import qualified Duckling.Ordinal.Types as TOrdinal
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
import qualified Duckling.Numeral.Types as TNumeral
ruleTheDayAfterTomorrow :: Rule
ruleTheDayAfterTomorrow = Rule
{ name = "the day after tomorrow"
, pattern =
[ regex "后天|後天|後日"
]
, prod = \_ -> tt $ cycleNth TG.Day 2
}
ruleTwoDaysAfterTomorrow :: Rule
ruleTwoDaysAfterTomorrow = Rule
{ name = "two days after tomorrow"
, pattern =
[ regex "大后天|大後天|大後日"
]
, prod = \_ -> tt $ cycleNth TG.Day 3
}
ruleRelativeMinutesTotillbeforeIntegerHourofday :: Rule
ruleRelativeMinutesTotillbeforeIntegerHourofday = Rule
{ name = "relative minutes to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "(点|點)差"
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time td:_:token:_) -> do
n <- getIntValue token
Token Time <$> minutesBefore n td
_ -> Nothing
}
ruleRelativeMinutesTotillbeforeNoonmidnight :: Rule
ruleRelativeMinutesTotillbeforeNoonmidnight = Rule
{ name = "relative minutes to|till|before noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "差"
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time td:_:token:_) -> do
n <- getIntValue token
Token Time <$> minutesBefore n td
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday = Rule
{ name = "relative (10-59) minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点|點"
, Predicate $ isIntegerBetween 10 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours n
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday2 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday2 = Rule
{ name = "relative (10-59) minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点|點"
, Predicate $ isIntegerBetween 10 59
, regex "分"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours n
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday3 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday3 = Rule
{ name = "relative (1-9) minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点零|點零"
, Predicate $ isIntegerBetween 1 9
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours n
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday4 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday4 = Rule
{ name = "relative (1-9) minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点零|點零"
, Predicate $ isIntegerBetween 1 9
, regex "分"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours n
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday5 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday5 = Rule
{ name = "number of 5 minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点踏|點踏|点搭|點搭"
, Predicate $ isIntegerBetween 1 11
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours (5*n)
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday6 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday6 = Rule
{ name = "number of 5 minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点|點"
, Predicate $ isIntegerBetween 1 9
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours (5*n)
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday7 :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday7 = Rule
{ name = "number of 5 minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点|點"
, Predicate $ isIntegerBetween 1 11
, regex "個字"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours (5*n)
_ -> Nothing
}
ruleRelativeMinutesAfterpastNoonmidnight :: Rule
ruleRelativeMinutesAfterpastNoonmidnight = Rule
{ name = "relative minutes after|past noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "过|\x904e"
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute True hours n
_ -> Nothing
}
ruleQuarterTotillbeforeIntegerHourofday :: Rule
ruleQuarterTotillbeforeIntegerHourofday = Rule
{ name = "quarter to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "(点|點)差"
, regex "一刻"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> Token Time <$> minutesBefore 15 td
_ -> Nothing
}
ruleQuarterTotillbeforeNoonmidnight :: Rule
ruleQuarterTotillbeforeNoonmidnight = Rule
{ name = "quarter to|till|before noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "差"
, regex "一刻"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> Token Time <$> minutesBefore 15 td
_ -> Nothing
}
ruleQuarterAfterpastIntegerHourofday :: Rule
ruleQuarterAfterpastIntegerHourofday = Rule
{ name = "quarter after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "点|點"
, regex "一刻"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_) -> tt $ hourMinute True hours 15
_ -> Nothing
}
ruleQuarterAfterpastNoonmidnight :: Rule
ruleQuarterAfterpastNoonmidnight = Rule
{ name = "quarter after|past noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "过"
, regex "一刻"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_) -> tt $ hourMinute True hours 15
_ -> Nothing
}
ruleHalfTotillbeforeIntegerHourofday :: Rule
ruleHalfTotillbeforeIntegerHourofday = Rule
{ name = "half to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "(点|點)差"
, regex "半"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleHalfTotillbeforeNoonmidnight :: Rule
ruleHalfTotillbeforeNoonmidnight = Rule
{ name = "half to|till|before noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "差"
, regex "半"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleHalfAfterpastIntegerHourofday :: Rule
ruleHalfAfterpastIntegerHourofday = Rule
{ name = "half after|past <integer> (hour-of-day)"
, pattern =
[ Predicate isAnHourOfDay
, regex "(点|點)(踏|搭)?"
, regex "半"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_) -> tt $ hourMinute True hours 30
_ -> Nothing
}
ruleHalfAfterpastNoonmidnight :: Rule
ruleHalfAfterpastNoonmidnight = Rule
{ name = "half after|past noon|midnight"
, pattern =
[ Predicate isMidnightOrNoon
, regex "过"
, regex "半"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) _)}:
_) -> tt $ hourMinute True hours 30
_ -> Nothing
}
ruleHhmmTimeofday :: Rule
ruleHhmmTimeofday = Rule
{ name = "hh:mm (time-of-day)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3])):([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt $ hourMinute True h m
_ -> Nothing
}
ruleThisDayofweek :: Rule
ruleThisDayofweek = Rule
{ name = "this <day-of-week>"
, pattern =
[ regex "这|這|今(个|個)?"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ predNth 0 False td
_ -> Nothing
}
ruleNthTimeOfTime2 :: Rule
ruleNthTimeOfTime2 = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Time
, regex "的"
, dimension Ordinal
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Ordinal od:Token Time td2:_) -> Token Time .
predNth (TOrdinal.value od - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleLastTime :: Rule
ruleLastTime = Rule
{ name = "last <time>"
, pattern =
[ regex "去|上(个|個)?"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ predNth (-1) False td
_ -> Nothing
}
ruleInDuration :: Rule
ruleInDuration = Rule
{ name = "in <duration>"
, pattern =
[ regex "再"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> tt $ inDuration dd
_ -> Nothing
}
ruleNow :: Rule
ruleNow = Rule
{ name = "now"
, pattern =
[ regex "现在|此时|此刻|当前|現在|此時|當前|\x5b9c\x5bb6|\x800c\x5bb6|\x4f9d\x5bb6"
]
, prod = \_ -> tt now
}
ruleTheCycleAfterTime :: Rule
ruleTheCycleAfterTime = Rule
{ name = "the <cycle> after <time>"
, pattern =
[ regex "那"
, dimension TimeGrain
, regex "(之)?(后|後)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain 1 td
_ -> Nothing
}
ruleTheCycleBeforeTime :: Rule
ruleTheCycleBeforeTime = Rule
{ name = "the <cycle> before <time>"
, pattern =
[ regex "那"
, dimension TimeGrain
, regex "(之)?前"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain (-1) td
_ -> Nothing
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "中午"
]
, prod = \_ -> tt $ mkOkForThisNext $ hour False 12
}
ruleToday :: Rule
ruleToday = Rule
{ name = "today"
, pattern =
[ regex "今天|今日"
]
, prod = \_ -> tt today
}
ruleNextDayofweek :: Rule
ruleNextDayofweek = Rule
{ name = "next <day-of-week>"
, pattern =
[ regex "明|下(个|個)?"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleTheDayBeforeYesterday :: Rule
ruleTheDayBeforeYesterday = Rule
{ name = "the day before yesterday"
, pattern =
[ regex "前天|前日"
]
, prod = \_ -> tt . cycleNth TG.Day $ - 2
}
ruleNextCycle :: Rule
ruleNextCycle = Rule
{ name = "next <cycle>"
, pattern =
[ regex "下(个|個)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
ruleDurationFromNow :: Rule
ruleDurationFromNow = Rule
{ name = "<duration> from now"
, pattern =
[ dimension Duration
, regex "后|後|之後"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLastCycle :: Rule
ruleLastCycle = Rule
{ name = "last <cycle>"
, pattern =
[ regex "上(个|個)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleLastDuration :: Rule
ruleLastDuration = Rule
{ name = "last <duration>"
, pattern =
[ regex "上(个|個)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleAfternoon :: Rule
ruleAfternoon = Rule
{ name = "afternoon"
, pattern =
[ regex "下午|中午|\x664f\x665d"
]
, prod = \_ ->
let from = hour False 12
to = hour False 19
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleMidnight :: Rule
ruleMidnight = Rule
{ name = "midnight"
, pattern =
[ regex "午夜|凌晨|半夜"
]
, prod = \_ -> tt $ mkOkForThisNext $ hour False 0
}
ruleInduringThePartofday :: Rule
ruleInduringThePartofday = Rule
{ name = "in|during the <part-of-day>"
, pattern =
[ Predicate isAPartOfDay
, regex "点|點"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleIntersectBy :: Rule
ruleIntersectBy = Rule
{ name = "intersect by \",\""
, pattern =
[ Predicate isNotLatent
, regex ","
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleMmdd :: Rule
ruleMmdd = Rule
{ name = "mm/dd"
, pattern =
[ regex "(0?[1-9]|1[0-2])/(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:_)):_) -> do
m <- parseInt mm
d <- parseInt dd
tt $ monthDay m d
_ -> Nothing
}
ruleIntegerLatentTimeofday :: Rule
ruleIntegerLatentTimeofday = Rule
{ name = "<integer> (latent time-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ hour True v
_ -> Nothing
}
ruleYearNumericWithYearSymbol :: Rule
ruleYearNumericWithYearSymbol = Rule
{ name = "year (numeric with year symbol)"
, pattern =
[ Predicate $ isIntegerBetween 1000 9999
, regex "年"
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt $ year v
_ -> Nothing
}
ruleYearNumericWithYearSymbol2 :: Rule
ruleYearNumericWithYearSymbol2 = Rule
{ name = "xxxx year"
, pattern =
[ dimension Numeral
, dimension Numeral
, dimension Numeral
, dimension Numeral
, regex "年"
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = y1}:
Token Numeral NumeralData{TNumeral.value = y2}:
Token Numeral NumeralData{TNumeral.value = y3}:
Token Numeral NumeralData{TNumeral.value = y4}:
_) -> do
let v = floor(y1*1000 + y2*100 + y3*10 + y4)
tt $ year v
_ -> Nothing
}
ruleDurationAgo :: Rule
ruleDurationAgo = Rule
{ name = "<duration> ago"
, pattern =
[ dimension Duration
, regex "(之)?前"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleHhmmMilitaryTimeofday :: Rule
ruleHhmmMilitaryTimeofday = Rule
{ name = "hhmm (military time-of-day)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt . mkLatent $ hourMinute False h m
_ -> Nothing
}
ruleLastNCycle :: Rule
ruleLastNCycle = Rule
{ name = "last n <cycle>"
, pattern =
[ regex "上|前"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain (- v)
_ -> Nothing
}
ruleNCycleLast :: Rule
ruleNCycleLast = Rule
{ name = "n <cycle> last"
, pattern =
[ Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
, regex "(之)?前"
]
, prod = \tokens -> case tokens of
(token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain (- v)
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNthTimeOfTime :: Rule
ruleNthTimeOfTime = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Time
, dimension Ordinal
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Ordinal od:Token Time td2:_) -> Token Time .
predNth (TOrdinal.value od - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "周末|週末"
]
, prod = \_ -> tt $ mkOkForThisNext weekend
}
ruleLastYear :: Rule
ruleLastYear = Rule
{ name = "last year"
, pattern =
[ regex "去年|上年"
]
, prod = \_ -> tt . cycleNth TG.Year $ - 1
}
ruleDimTimePartofday :: Rule
ruleDimTimePartofday = Rule
{ name = "<dim time> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNextTime :: Rule
ruleNextTime = Rule
{ name = "next <time>"
, pattern =
[ regex "明|下(个|個)?"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 1 False td
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(0?[1-9]|1[0-2])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (yy:mm:dd:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleNextNCycle :: Rule
ruleNextNCycle = Rule
{ name = "next n <cycle>"
, pattern =
[ regex "下|后|後"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleNCycleNext :: Rule
ruleNCycleNext = Rule
{ name = "next n <cycle>"
, pattern =
[ Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
, regex "下|(之)?后|(之)?後"
]
, prod = \tokens -> case tokens of
(token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleMorning :: Rule
ruleMorning = Rule
{ name = "morning"
, pattern =
[ regex "早上|早晨|\x671d(\x982d)?早"
]
, prod = \_ ->
let from = hour False 4
to = hour False 12
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleNextYear :: Rule
ruleNextYear = Rule
{ name = "next year"
, pattern =
[ regex "明年|下年"
]
, prod = \_ -> tt $ cycleNth TG.Year 1
}
ruleThisCycle :: Rule
ruleThisCycle = Rule
{ name = "this <cycle>"
, pattern =
[ regex "(这|這)(一)?|今個"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 0
_ -> Nothing
}
ruleThisTime :: Rule
ruleThisTime = Rule
{ name = "this <time>"
, pattern =
[ regex "今(个|個)?|这(个)?|這(個)?"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleYesterday :: Rule
ruleYesterday = Rule
{ name = "yesterday"
, pattern =
[ regex "昨天|昨日|\x5c0b日"
]
, prod = \_ -> tt . cycleNth TG.Day $ - 1
}
ruleLastNight :: Rule
ruleLastNight = Rule
{ name = "last night"
, pattern =
[ regex "昨晚|昨天晚上|\x5c0b晚"
]
, prod = \_ -> do
let td1 = cycleNth TG.Day $ - 1
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect td1 td2
}
ruleTimeofdayAmpm :: Rule
ruleTimeofdayAmpm = Rule
{ name = "<time-of-day> am|pm"
, pattern =
[ Predicate isATimeOfDay
, regex "([ap])(\\s|\\.)?m?\\.?"
]
, prod = \tokens -> case tokens of
(Token Time td:Token RegexMatch (GroupMatch (ap:_)):_) ->
tt $ timeOfDayAMPM (Text.toLower ap == "a") td
_ -> Nothing
}
ruleNamedmonthDayofmonth :: Rule
ruleNamedmonthDayofmonth = Rule
{ name = "<named-month> <day-of-month>"
, pattern =
[ Predicate isAMonth
, dimension Numeral
, regex "号|號|日"
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleDayOfMonth :: Rule
ruleDayOfMonth = Rule
{ name = "<time> <day-of-month>"
, pattern =
[ Predicate $ isIntegerBetween 1 31
, regex "号|號|日"
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
rulePartofdayDimTime :: Rule
rulePartofdayDimTime = Rule
{ name = "<part-of-day> <dim time>"
, pattern =
[ Predicate isAPartOfDay
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleTonight :: Rule
ruleTonight = Rule
{ name = "tonight"
, pattern =
[ regex "今晚|今天晚上"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect today td2
}
ruleTomorrowNight :: Rule
ruleTomorrowNight = Rule
{ name = "tomorrow night"
, pattern =
[ regex "明晚|明天晚上|\x807d晚"
]
, prod = \_ -> do
let td1 = cycleNth TG.Day 1
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect td1 td2
}
ruleThisYear :: Rule
ruleThisYear = Rule
{ name = "this year"
, pattern =
[ regex "今年"
]
, prod = \_ -> tt $ cycleNth TG.Year 0
}
ruleAbsorptionOfAfterNamedDay :: Rule
ruleAbsorptionOfAfterNamedDay = Rule
{ name = "absorption of , after named day"
, pattern =
[ Predicate isADayOfWeek
, regex ","
]
, prod = \tokens -> case tokens of
(x:_) -> Just x
_ -> Nothing
}
ruleEveningnight :: Rule
ruleEveningnight = Rule
{ name = "evening|night"
, pattern =
[ regex "晚上|晚间"
]
, prod = \_ ->
let from = hour False 18
to = hour False 0
in Token Time . partOfDay . mkLatent <$>
interval TTime.Open from to
}
ruleMmddyyyy :: Rule
ruleMmddyyyy = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "(0?[1-9]|1[0-2])/(3[01]|[12]\\d|0?[1-9])/(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:yy:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTomorrow :: Rule
ruleTomorrow = Rule
{ name = "tomorrow"
, pattern =
[ regex "明天|明日|\x807d日"
]
, prod = \_ -> tt $ cycleNth TG.Day 1
}
ruleTimeofdayOclock :: Rule
ruleTimeofdayOclock = Rule
{ name = "<time-of-day> o'clock"
, pattern =
[ Predicate isATimeOfDay
, regex "點|点|時"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone (Text.toUpper tz) td
_ -> Nothing
}
ruleDaysOfWeek :: [Rule]
ruleDaysOfWeek = mkRuleDaysOfWeek
[ ( "Monday", "星期一|周一|礼拜一|禮拜一|週一" )
, ( "Tuesday", "星期二|周二|礼拜二|禮拜二|週二" )
, ( "Wednesday", "星期三|周三|礼拜三|禮拜三|週三" )
, ( "Thursday", "星期四|周四|礼拜四|禮拜四|週四" )
, ( "Friday", "星期五|周五|礼拜五|禮拜五|週五" )
, ( "Saturday", "星期六|周六|礼拜六|禮拜六|週六" )
, ( "Sunday", "星期日|星期天|礼拜天|周日|禮拜天|週日|禮拜日" )
]
ruleMonths :: [Rule]
ruleMonths = mkRuleMonths
[ ( "January", "(一|1)月(份)?" )
, ( "February", "(二|2)月(份)?" )
, ( "March", "(三|3)月(份)?" )
, ( "April", "(四|4)月(份)?" )
, ( "May", "(五|5)月(份)?" )
, ( "June", "(六|6)月(份)?" )
, ( "July", "(七|7)月(份)?" )
, ( "August", "(八|8)月(份)?" )
, ( "September", "(九|9)月(份)?" )
, ( "October", "(十|10)月(份)?" )
, ( "November", "(十一|11)月(份)?" )
, ( "December", "(十二|12)月(份)?" )
]
rulePeriodicHolidays :: [Rule]
rulePeriodicHolidays = mkRuleHolidays
-- Fixed dates, year over year
[ ( "中国共产党的生日", "中(国共产党诞|國共產黨誕)生日|(党|黨)的生日", monthDay 7 1 )
, ( "愚人节", "愚人(节|節)", monthDay 4 1 )
, ( "建军节", "(中国人民解放(军|軍)|八一)?建(军节|軍節)", monthDay 8 1 )
, ( "植树节", "中(国植树节|國植樹節)", monthDay 3 12 )
, ( "五四青年节", "(中(国|國))?(五四|54)?青年(节|節)", monthDay 5 4 )
, ( "圣诞节", "(圣诞|聖誕)(节|節)?", monthDay 12 25 )
, ( "平安夜", "(平安|聖誕)夜", monthDay 12 24 )
, ( "哥伦布日", "哥(伦|倫)布日", monthDay 10 12 )
, ( "双十一", "(双|雙)(十一|11)", monthDay 11 11 )
, ( "万圣节", "万圣节|萬聖節", monthDay 10 31 )
, ( "香港回归纪念日", "香港回(归纪|歸紀)念日", monthDay 7 1 )
, ( "人权日", "人(权|權)日", monthDay 12 10 )
, ( "美国独立日", "(美国)?(独|獨)立日", monthDay 7 4 )
, ( "儿童节", "(国际|國際)?(六一|61)?(儿|兒)童(节|節)", monthDay 6 1 )
, ( "国际慈善日", "(国际|國際)慈善日", monthDay 9 5 )
, ( "国际瑜伽日", "(国际|國際)瑜伽日", monthDay 6 21 )
, ( "国际爵士日", "(国际|國際)爵士日", monthDay 4 30 )
, ( "国际奥林匹克日", "(国际|國際)奥林匹克日", monthDay 6 23 )
, ( "妇女节", "(国际劳动|國際勞動|三八)?(妇|婦)女(节|節)", monthDay 3 8 )
, ( "劳动节", "(五一|51)?(国际|國際)?(劳动|勞動)(节|節)", monthDay 5 1 )
, ( "国际青年节", "(国际|國際)青年(节|節)", monthDay 8 12 )
, ( "澳门回归纪念日", "澳(门|門)回(归纪|歸紀)念日", monthDay 12 20 )
, ( "全国爱牙日", "全(国爱|國愛)牙日", monthDay 9 20 )
, ( "全国爱耳日", "全(国爱|國愛)耳日", monthDay 3 3 )
, ( "全国爱眼日", "全(国爱|國愛)眼日", monthDay 6 6 )
, ( "南京大屠杀纪念日", "南京大屠(杀纪|殺紀)念日", monthDay 12 13 )
, ( "辛亥革命纪念日", "辛亥革命(纪|紀)念日", monthDay 10 10 )
, ( "元旦", "元旦(节|節)?|((公|(阳|陽))(历|曆))?新年", monthDay 1 1 )
, ( "新年夜", "新年夜", monthDay 12 31 )
, ( "情人节", "(情人|(圣瓦伦丁|聖瓦倫丁))(节|節)", monthDay 2 14 )
, ( "清明节", "清明(节|節)", monthDay 4 5 )
, ( "光棍节", "光棍(节|節)", monthDay 11 11 )
, ( "圣帕特里克节", "圣帕特里克节|聖帕特裏克節", monthDay 3 17 )
, ( "教师节", "(中(国|國))?教师(节|節)", monthDay 9 10 )
, ( "退伍军人节", "(退伍(军|軍)人|老兵)(节|節)", monthDay 11 11 )
, ( "白色情人节", "白色情人(节|節)", monthDay 3 14 )
, ( "世界艾滋病日", "世界艾滋病日", monthDay 12 1 )
, ( "世界献血日", "世界(献|獻)血日", monthDay 6 14 )
, ( "世界癌症日", "世界癌(症|癥)日", monthDay 2 4 )
, ( "国际消费者权益日", "(国际|世界)?(消费者权益|消費者權益)日|三一五", monthDay 3 15 )
, ( "世界糖尿病日", "世界糖尿病日", monthDay 11 14 )
, ( "世界环境日", "世界(环|環)境日", monthDay 6 5 )
, ( "世界粮食日", "世界((粮|糧)食|食物)日", monthDay 10 16 )
, ( "世界心脏日", "世界心(脏|臟)日", monthDay 9 29 )
, ( "世界海洋日", "世界海洋日", monthDay 6 8 )
, ( "世界诗歌日", "世界(诗|詩)歌日", monthDay 3 21 )
, ( "世界人口日", "世界人口日", monthDay 7 11 )
, ( "世界难民日", "世界(难|難)民日", monthDay 6 20 )
, ( "世界教师日", "世界教师日", monthDay 10 5 )
, ( "世界旅游日", "世界旅游日", monthDay 9 27 )
-- Fixed day/week/month, year over year
, ( "父亲节", "父(亲节|親節)", nthDOWOfMonth 3 7 6 )
, ( "马丁路德金日", "(马|馬)丁路德金((纪|紀)念)?日", nthDOWOfMonth 3 1 1)
, ( "母亲节", "母(亲节|親節)", nthDOWOfMonth 2 7 5 )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "耶稣升天节", "耶(稣|穌)升天(节|節|日)"
, cycleNthAfter False TG.Day 39 easterSunday )
, ( "大斋首日", "大(斋|齋)首日|(圣|聖)灰((礼仪|禮儀)?日|星期三)|灰日"
, cycleNthAfter False TG.Day (-46) easterSunday )
, ( "阿舒拉节", "阿舒拉(节|節)"
, cycleNthAfter False TG.Day 9 muharram )
, ( "克哈特普迦节", "克哈特普迦(节|節)"
, cycleNthAfter False TG.Day 8 dhanteras )
, ( "春节", "春(节|節)|(农历|農曆|唐人)新年|新(正|春)|正月(正(时|時)|朔日)|岁首"
, chineseNewYear )
, ( "基督圣体节", "基督(圣体|聖體)((圣|聖)血)?((节|節)|瞻(礼|禮))"
, cycleNthAfter False TG.Day 60 easterSunday )
, ( "排灯节", "(排|万|萬|印度)(灯节|燈節)"
, cycleNthAfter False TG.Day 2 dhanteras )
, ( "复活节星期一", "(复|復)活(节|節)星期一"
, cycleNthAfter False TG.Day 1 easterSunday )
, ( "复活节", "(复|復)活(节|節)|主(复|復)活日", easterSunday )
, ( "古尔邦节", "古(尔|爾)邦(节|節)|宰牲(节|節)"
, eidalAdha )
, ( "开斋节", "(开斋|開齋|肉孜|(尔|爾)代)(节|節)", eidalFitr )
, ( "耶稣受难日", "(耶(稣|穌)|主)受(难|難)(节|節|日)|(圣|聖|沈默)(周|週)五"
, cycleNthAfter False TG.Day (-2) easterSunday )
, ( "侯丽节", "((侯|荷)(丽|麗)|洒红|灑紅|欢悦|歡悅|五彩|胡里|好利|霍利)(节|節)"
, cycleNthAfter False TG.Day 39 vasantPanchami )
, ( "圣周六"
, "神?(圣周|聖週)六|(耶(稣|穌)|主)受(难|難)(节|節|日)翌日|(复|復)活(节|節)前夜|黑色星期六"
, cycleNthAfter False TG.Day (-1) easterSunday )
, ( "伊斯兰新年", "伊斯兰(教)?(历)?新年"
, muharram )
, ( "登霄节"
, "(夜行)?登霄(节|節)"
, cycleNthAfter False TG.Day 26 rajab
)
, ( "印度丰收节第四天", "(印度(丰|豐)收|(庞|龐)格(尔|爾))(节|節)第四天"
, cycleNthAfter False TG.Day 2 thaiPongal )
, ( "篝火节", "((犹|猶)太教)?篝火(节|節)", lagBaOmer )
, ( "法令之夜"
, "(法令|命运|权力)之夜"
, cycleNthAfter False TG.Day 26 ramadan )
, ( "拉撒路圣周六", "拉撒路(圣周|聖週)六|拉匝路(周|週)六"
, cycleNthAfter False TG.Day (-8) orthodoxEaster )
, ( "印度丰收节第三天", "(印度(丰|豐)收|(庞|龐)格(尔|爾))(节|節)第三天"
, cycleNthAfter False TG.Day 1 thaiPongal )
, ( "神圣星期四"
, "濯足(节|節)|神(圣|聖)星期四|(圣周|聖週)(星期)?四|(设|設)立(圣|聖)餐日"
, cycleNthAfter False TG.Day (-3) easterSunday )
, ( "圣纪节" , "圣纪节|聖紀節", mawlid )
, ( "东正教复活节星期一", "(东|東)正教(复|復)活(节|節)星期一"
, cycleNthAfter False TG.Day 1 orthodoxEaster )
, ( "东正教复活节", "(东|東)正教((复|復)活(节|節)|主(复|復)活日)"
, orthodoxEaster )
, ( "东正教圣周六"
, "(东|東)正教(神?(圣周|聖週)六|(耶(稣|穌)|主)受(难|難)(节|節|日)翌日|(复|復)活(节|節)前夜)"
, cycleNthAfter False TG.Day (-1) orthodoxEaster )
, ( "东正教耶稣受难日", "(东|東)正教((耶(稣|穌)|主)受(难|難)(节|節|日)|(圣|聖|沈默)(周|週)五)"
, cycleNthAfter False TG.Day (-2) orthodoxEaster )
, ( "东正教棕枝主日", "(东|東)正教((棕|圣|聖)枝|圣树|聖樹|基督苦(难|難))主日"
, cycleNthAfter False TG.Day (-7) orthodoxEaster )
, ( "棕枝主日", "((棕|圣|聖)枝|圣树|聖樹|基督苦(难|難))主日"
, cycleNthAfter False TG.Day (-7) easterSunday )
, ( "五旬节", "五旬(节|節)|(圣灵|聖靈)降(临|臨)(日|节|節)"
, cycleNthAfter False TG.Day 49 easterSunday )
, ( "印度兄妹节", "(印度兄妹|拉克沙班丹)(节|節)", rakshaBandhan )
, ( "圣会节", "(圣|聖)会(节|節)"
, cycleNthAfter False TG.Day 21 roshHashana )
, ( "忏悔节", "忏悔(节|節|火曜日)|煎(饼|餅)星期二"
, cycleNthAfter False TG.Day (-47) easterSunday )
, ( "西赫托拉节", "(西赫(托拉|妥拉)|诵经|誦經|转经|轉經|律法|(欢庆圣|歡慶聖)法)(节|節)"
, cycleNthAfter False TG.Day 22 roshHashana )
, ( "印度丰收节", "(印度|淡米(尔|爾))(丰|豐)收(节|節)", thaiPongal )
, ( "欧南节", "欧南(节|節)", thiruOnam )
, ( "圣殿被毁日", "((圣|聖)殿被毁|禁食)日", tishaBAv )
, ( "圣三一主日", "((天主)?(圣|聖)?三一|(圣|聖)三)(主日|节|節)"
, cycleNthAfter False TG.Day 56 easterSunday )
, ( "十胜节", "(十(胜|勝)|(凯|凱)旋|(圣|聖)母)(节|節)"
, cycleNthAfter False TG.Day 9 navaratri )
-- 15th day of Shevat
, ( "犹太植树节", "((犹|猶)太植(树|樹)|(图|圖)比舍巴特)(节|節)|(树|樹)木新年", tuBishvat )
-- day of the full moon in May in the Gregorian calendar
, ( "卫塞节", "((卫|衛)塞|威瑟|比(萨宝|薩寶)蕉)(节|節)", vesak )
, ( "以色列独立日", "以色列((独|獨)立日|国庆节|國慶節)", yomHaatzmaut )
, ( "赎罪日", "(赎|贖)罪日", cycleNthAfter False TG.Day 9 roshHashana )
, ( "圣灵节庆日", "(圣灵节庆|聖靈節慶)日"
, cycleNthAfter False TG.Day 50 easterSunday )
-- Other
, ( "老板节", "老(板节|闆節)"
, predNthClosest 0 weekday (monthDay 10 16) )
]
ruleComputedHolidays' :: [Rule]
ruleComputedHolidays' = mkRuleHolidays'
[ ( "全球青年服务日", "全球青年服(务|務)日"
, let start = globalYouthServiceDay
end = cycleNthAfter False TG.Day 2 globalYouthServiceDay
in interval TTime.Open start end )
, ( "四旬节", "四旬(节|節)"
, let start = cycleNthAfter False TG.Day (-48) orthodoxEaster
end = cycleNthAfter False TG.Day (-9) orthodoxEaster
in interval TTime.Open start end )
, ( "光明节", "(光明|修殿|(献|獻)殿|(烛|燭)光|哈努卡|(马|馬)加比)(节|節)"
, let start = chanukah
end = cycleNthAfter False TG.Day 7 chanukah
in interval TTime.Open start end )
, ( "大斋期", "大(斋|齋)(期|节|節)"
, let start = cycleNthAfter False TG.Day (-46) easterSunday
end = cycleNthAfter False TG.Day (-1) easterSunday
in interval TTime.Open start end )
, ( "九夜节", "(九夜|(难|難)近母)(节|節)"
, let start = navaratri
end = cycleNthAfter False TG.Day 9 navaratri
in interval TTime.Open start end )
, ( "逾越节", "逾越(节|節)"
, let start = passover
end = cycleNthAfter False TG.Day 8 passover
in interval TTime.Open start end )
, ( "斋月", "(穆斯林)?(斋|齋)月"
, let start = ramadan
end = cycleNthAfter False TG.Day (-1) eidalFitr
in interval TTime.Open start end )
, ( "犹太新年", "(犹|猶)太新年"
, let start = roshHashana
end = cycleNthAfter False TG.Day 2 roshHashana
in interval TTime.Open start end )
, ( "七七节", "(七七|沙夫幼特|(周|週)日|收(获|穫)|新果(实|實))(节|節)"
, let start = cycleNthAfter False TG.Day 50 passover
end = cycleNthAfter False TG.Day 52 passover
in interval TTime.Open start end )
, ( "住棚节", "住棚(节|節)"
, let start = cycleNthAfter False TG.Day 14 roshHashana
end = cycleNthAfter False TG.Day 22 roshHashana
in interval TTime.Open start end )
-- Other
-- Last Saturday of March unless it falls on Holy Saturday
-- In which case it's the Saturday before
, ( "地球一小时", "地球一小(时|時)"
, let holySaturday = cycleNthAfter False TG.Day (-1) easterSunday
tentative = predLastOf (dayOfWeek 6) (month 3)
alternative = cycleNthAfter False TG.Day (-7) tentative
in do
day <- intersectWithReplacement holySaturday tentative alternative
start <- intersect day $ hourMinute True 20 30
interval TTime.Closed start $ cycleNthAfter False TG.Minute 60 start )
]
rules :: [Rule]
rules =
[ ruleAbsorptionOfAfterNamedDay
, ruleAfternoon
, ruleDimTimePartofday
, ruleDurationAgo
, ruleDurationFromNow
, ruleEveningnight
, ruleHhmmMilitaryTimeofday
, ruleHhmmTimeofday
, ruleInDuration
, ruleInduringThePartofday
, ruleIntegerLatentTimeofday
, ruleIntersect
, ruleIntersectBy
, ruleLastCycle
, ruleLastNCycle
, ruleNCycleLast
, ruleLastNight
, ruleLastTime
, ruleLastYear
, ruleMidnight
, ruleMmdd
, ruleMmddyyyy
, ruleMorning
, ruleNamedmonthDayofmonth
, ruleNextCycle
, ruleNextNCycle
, ruleNCycleNext
, ruleNextTime
, ruleNextYear
, ruleNoon
, ruleNow
, ruleNthTimeOfTime
, ruleNthTimeOfTime2
, rulePartofdayDimTime
, ruleRelativeMinutesAfterpastIntegerHourofday
, ruleRelativeMinutesAfterpastNoonmidnight
, ruleRelativeMinutesTotillbeforeIntegerHourofday
, ruleRelativeMinutesTotillbeforeNoonmidnight
, ruleQuarterAfterpastIntegerHourofday
, ruleQuarterAfterpastNoonmidnight
, ruleQuarterTotillbeforeIntegerHourofday
, ruleQuarterTotillbeforeNoonmidnight
, ruleHalfAfterpastIntegerHourofday
, ruleHalfAfterpastNoonmidnight
, ruleHalfTotillbeforeIntegerHourofday
, ruleHalfTotillbeforeNoonmidnight
, ruleTheCycleAfterTime
, ruleTheCycleBeforeTime
, ruleTheDayAfterTomorrow
, ruleTwoDaysAfterTomorrow
, ruleTheDayBeforeYesterday
, ruleThisCycle
, ruleThisDayofweek
, ruleThisTime
, ruleThisYear
, ruleNextDayofweek
, ruleTimeofdayAmpm
, ruleTimeofdayOclock
, ruleToday
, ruleTomorrow
, ruleTomorrowNight
, ruleTonight
, ruleWeekend
, ruleYearNumericWithYearSymbol
, ruleYearNumericWithYearSymbol2
, ruleYesterday
, ruleYyyymmdd
, ruleTimezone
, ruleRelativeMinutesAfterpastIntegerHourofday2
, ruleRelativeMinutesAfterpastIntegerHourofday3
, ruleRelativeMinutesAfterpastIntegerHourofday4
, ruleRelativeMinutesAfterpastIntegerHourofday5
, ruleRelativeMinutesAfterpastIntegerHourofday6
, ruleRelativeMinutesAfterpastIntegerHourofday7
, ruleLastDuration
, ruleDayOfMonth
]
++ ruleDaysOfWeek
++ ruleMonths
++ ruleComputedHolidays
++ ruleComputedHolidays'
++ rulePeriodicHolidays
|
facebookincubator/duckling
|
Duckling/Time/ZH/Rules.hs
|
bsd-3-clause
| 42,006
| 0
| 23
| 9,428
| 10,926
| 5,985
| 4,941
| 1,100
| 2
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Entry point to the default cabal-install front-end.
-----------------------------------------------------------------------------
module Main (main) where
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, withRepoContext
, ConfigFlags(..)
, ConfigExFlags(..), defaultConfigExFlags, configureExCommand
, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..)
, buildCommand, replCommand, testCommand, benchmarkCommand
, InstallFlags(..), defaultInstallFlags
, installCommand, upgradeCommand, uninstallCommand
, FetchFlags(..), fetchCommand
, FreezeFlags(..), freezeCommand
, genBoundsCommand
, GetFlags(..), getCommand, unpackCommand
, checkCommand
, formatCommand
, updateCommand
, ListFlags(..), listCommand
, InfoFlags(..), infoCommand
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, runCommand
, InitFlags(initVerbosity), initCommand
, SDistFlags(..), SDistExFlags(..), sdistCommand
, Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand
, ActAsSetupFlags(..), actAsSetupCommand
, SandboxFlags(..), sandboxCommand
, ExecFlags(..), execCommand
, UserConfigFlags(..), userConfigCommand
, reportCommand
, manpageCommand
)
import Distribution.Simple.Setup
( HaddockFlags(..), haddockCommand, defaultHaddockFlags
, HscolourFlags(..), hscolourCommand
, ReplFlags(..)
, CopyFlags(..), copyCommand
, RegisterFlags(..), registerCommand
, CleanFlags(..), cleanCommand
, TestFlags(..), BenchmarkFlags(..)
, Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag
, configAbsolutePaths
)
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Config
( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff
, userConfigUpdate, createDefaultConfigFile, getConfigFilePath )
import Distribution.Client.Targets
( readUserTargets )
import qualified Distribution.Client.List as List
( list, info )
import qualified Distribution.Client.CmdConfigure as CmdConfigure
import qualified Distribution.Client.CmdBuild as CmdBuild
import qualified Distribution.Client.CmdRepl as CmdRepl
import Distribution.Client.Install (install)
import Distribution.Client.Configure (configure)
import Distribution.Client.Update (update)
import Distribution.Client.Exec (exec)
import Distribution.Client.Fetch (fetch)
import Distribution.Client.Freeze (freeze)
import Distribution.Client.GenBounds (genBounds)
import Distribution.Client.Check as Check (check)
--import Distribution.Client.Clean (clean)
import qualified Distribution.Client.Upload as Upload
import Distribution.Client.Run (run, splitRunArgs)
import Distribution.Client.SrcDist (sdist)
import Distribution.Client.Get (get)
import Distribution.Client.Sandbox (sandboxInit
,sandboxAddSource
,sandboxDelete
,sandboxDeleteSource
,sandboxListSources
,sandboxHcPkg
,dumpPackageEnvironment
,getSandboxConfigFilePath
,loadConfigOrSandboxConfig
,findSavedDistPref
,initPackageDBIfNeeded
,maybeWithSandboxDirOnSearchPath
,maybeWithSandboxPackageInfo
,WereDepsReinstalled(..)
,maybeReinstallAddSourceDeps
,tryGetIndexFilePath
,sandboxBuildDir
,updateSandboxConfigFileFlag
,updateInstallDirs
,configCompilerAux'
,getPersistOrConfigCompiler
,configPackageDB')
import Distribution.Client.Sandbox.PackageEnvironment
(setPackageDB
,userPackageEnvironmentFile)
import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord)
import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox)
import Distribution.Client.Tar (createTarGzFile)
import Distribution.Client.Types (Password (..))
import Distribution.Client.Init (initCabal)
import Distribution.Client.Manpage (manpage)
import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade
import Distribution.Client.Utils (determineNumJobs
#if defined(mingw32_HOST_OS)
,relaxEncodingErrors
#endif
,existsAndIsMoreRecentThan)
import Distribution.Package (packageId)
import Distribution.PackageDescription
( BuildType(..), Executable(..), buildable )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.PackageDescription.PrettyPrint
( writeGenericPackageDescription )
import qualified Distribution.Simple as Simple
import qualified Distribution.Make as Make
import Distribution.Simple.Build
( startInterpreter )
import Distribution.Simple.Command
( CommandParse(..), CommandUI(..), Command, CommandSpec(..)
, CommandType(..), commandsRun, commandAddAction, hiddenCommand
, commandFromSpec)
import Distribution.Simple.Compiler
( Compiler(..) )
import Distribution.Simple.Configure
( checkPersistBuildConfigOutdated, configCompilerAuxEx
, ConfigStateFileError(..), localBuildInfoFile
, getPersistBuildConfig, tryGetPersistBuildConfig )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Program (defaultProgramConfiguration
,configureAllKnownPrograms
,simpleProgramInvocation
,getProgramInvocationOutput)
import Distribution.Simple.Program.Db (reconfigurePrograms)
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Utils
( cabalVersion, die, notice, info, topHandler
, findPackageDesc, tryFindPackageDesc )
import Distribution.Text
( display )
import Distribution.Verbosity as Verbosity
( Verbosity, normal )
import Distribution.Version
( Version(..), orLaterVersion )
import qualified Paths_cabal_install (version)
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure, exitSuccess)
import System.FilePath ( dropExtension, splitExtension
, takeExtension, (</>), (<.>))
import System.IO ( BufferMode(LineBuffering), hSetBuffering
#ifdef mingw32_HOST_OS
, stderr
#endif
, stdout )
import System.Directory (doesFileExist, getCurrentDirectory)
import Data.List (intercalate)
import Data.Maybe (listToMaybe)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
import Control.Applicative (pure, (<$>))
#endif
import Control.Monad (when, unless)
-- | Entry point
--
main :: IO ()
main = do
-- Enable line buffering so that we can get fast feedback even when piped.
-- This is especially important for CI and build systems.
hSetBuffering stdout LineBuffering
-- The default locale encoding for Windows CLI is not UTF-8 and printing
-- Unicode characters to it will fail unless we relax the handling of encoding
-- errors when writing to stderr and stdout.
#ifdef mingw32_HOST_OS
relaxEncodingErrors stdout
relaxEncodingErrors stderr
#endif
getArgs >>= mainWorker
mainWorker :: [String] -> IO ()
mainWorker args = topHandler $
case commandsRun (globalCommand commands) commands args of
CommandHelp help -> printGlobalHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo (globalFlags, commandParse) ->
case commandParse of
_ | fromFlagOrDefault False (globalVersion globalFlags)
-> printVersion
| fromFlagOrDefault False (globalNumericVersion globalFlags)
-> printNumericVersion
CommandHelp help -> printCommandHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo action -> do
globalFlags' <- updateSandboxConfigFileFlag globalFlags
action globalFlags'
where
printCommandHelp help = do
pname <- getProgName
putStr (help pname)
printGlobalHelp help = do
pname <- getProgName
configFile <- defaultConfigFile
putStr (help pname)
putStr $ "\nYou can edit the cabal configuration file to set defaults:\n"
++ " " ++ configFile ++ "\n"
exists <- doesFileExist configFile
when (not exists) $
putStrLn $ "This file will be generated with sensible "
++ "defaults if you run 'cabal update'."
printOptionsList = putStr . unlines
printErrors errs = die $ intercalate "\n" errs
printNumericVersion = putStrLn $ display Paths_cabal_install.version
printVersion = putStrLn $ "cabal-install version "
++ display Paths_cabal_install.version
++ "\ncompiled using version "
++ display cabalVersion
++ " of the Cabal library "
commands = map commandFromSpec commandSpecs
commandSpecs =
[ regularCmd installCommand installAction
, regularCmd updateCommand updateAction
, regularCmd listCommand listAction
, regularCmd infoCommand infoAction
, regularCmd fetchCommand fetchAction
, regularCmd freezeCommand freezeAction
, regularCmd getCommand getAction
, hiddenCmd unpackCommand unpackAction
, regularCmd checkCommand checkAction
, regularCmd sdistCommand sdistAction
, regularCmd uploadCommand uploadAction
, regularCmd reportCommand reportAction
, regularCmd runCommand runAction
, regularCmd initCommand initAction
, regularCmd configureExCommand configureAction
, regularCmd buildCommand buildAction
, regularCmd replCommand replAction
, regularCmd sandboxCommand sandboxAction
, regularCmd haddockCommand haddockAction
, regularCmd execCommand execAction
, regularCmd userConfigCommand userConfigAction
, regularCmd cleanCommand cleanAction
, regularCmd genBoundsCommand genBoundsAction
, wrapperCmd copyCommand copyVerbosity copyDistPref
, wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref
, wrapperCmd registerCommand regVerbosity regDistPref
, regularCmd testCommand testAction
, regularCmd benchmarkCommand benchmarkAction
, hiddenCmd uninstallCommand uninstallAction
, hiddenCmd formatCommand formatAction
, hiddenCmd upgradeCommand upgradeAction
, hiddenCmd win32SelfUpgradeCommand win32SelfUpgradeAction
, hiddenCmd actAsSetupCommand actAsSetupAction
, hiddenCmd manpageCommand (manpageAction commandSpecs)
, hiddenCmd installCommand { commandName = "new-configure" }
CmdConfigure.configureAction
, hiddenCmd installCommand { commandName = "new-build" }
CmdBuild.buildAction
, hiddenCmd installCommand { commandName = "new-repl" }
CmdRepl.replAction
]
type Action = GlobalFlags -> IO ()
regularCmd :: CommandUI flags -> (flags -> [String] -> action)
-> CommandSpec action
regularCmd ui action =
CommandSpec ui ((flip commandAddAction) action) NormalCommand
hiddenCmd :: CommandUI flags -> (flags -> [String] -> action)
-> CommandSpec action
hiddenCmd ui action =
CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action))
HiddenCommand
wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity)
-> (flags -> Flag String) -> CommandSpec Action
wrapperCmd ui verbosity distPref =
CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand
wrapperAction :: Monoid flags
=> CommandUI flags
-> (flags -> Flag Verbosity)
-> (flags -> Flag String)
-> Command Action
wrapperAction command verbosityFlag distPrefFlag =
commandAddAction command
{ commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do
let verbosity = fromFlagOrDefault normal (verbosityFlag flags)
(_, config) <- loadConfigOrSandboxConfig verbosity globalFlags
distPref <- findSavedDistPref config (distPrefFlag flags)
let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref }
setupWrapper verbosity setupScriptOptions Nothing
command (const flags) extraArgs
configureAction :: (ConfigFlags, ConfigExFlags)
-> [String] -> Action
configureAction (configFlags, configExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
(useSandbox, config) <- fmap
(updateInstallDirs (configUserInstall configFlags))
(loadConfigOrSandboxConfig verbosity globalFlags)
let configFlags' = savedConfigureFlags config `mappend` configFlags
configExFlags' = savedConfigureExFlags config `mappend` configExFlags
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAuxEx configFlags'
-- If we're working inside a sandbox and the user has set the -w option, we
-- may need to create a sandbox-local package DB for this compiler and add a
-- timestamp record for this compiler to the timestamp file.
let configFlags'' = case useSandbox of
NoSandbox -> configFlags'
(UseSandbox sandboxDir) -> setPackageDB sandboxDir
comp platform configFlags'
whenUsingSandbox useSandbox $ \sandboxDir -> do
initPackageDBIfNeeded verbosity configFlags'' comp conf
-- NOTE: We do not write the new sandbox package DB location to
-- 'cabal.sandbox.config' here because 'configure -w' must not affect
-- subsequent 'install' (for UI compatibility with non-sandboxed mode).
indexFile <- tryGetIndexFilePath config
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
maybeWithSandboxDirOnSearchPath useSandbox $
withRepoContext verbosity globalFlags' $ \repoContext ->
configure verbosity
(configPackageDB' configFlags'')
repoContext
comp platform conf configFlags'' configExFlags' extraArgs
buildAction :: (BuildFlags, BuildExFlags) -> [String] -> Action
buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags)
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to be
-- done to support sandboxes.
(useSandbox, config, distPref) <- reconfigure verbosity
(buildDistPref buildFlags)
mempty [] globalFlags noAddSource
(buildNumJobs buildFlags) (const Nothing)
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags extraArgs
-- | Actually do the work of building the package. This is separate from
-- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke
-- 'reconfigure' twice.
build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO ()
build verbosity config distPref buildFlags extraArgs =
setupWrapper verbosity setupOptions Nothing
(Cabal.buildCommand progConf) mkBuildFlags extraArgs
where
progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
mkBuildFlags version = filterBuildFlags version config buildFlags'
buildFlags' = buildFlags
{ buildVerbosity = toFlag verbosity
, buildDistPref = toFlag distPref
}
-- | Make sure that we don't pass new flags to setup scripts compiled against
-- old versions of Cabal.
filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags
filterBuildFlags version config buildFlags
| version >= Version [1,19,1] [] = buildFlags_latest
-- Cabal < 1.19.1 doesn't support 'build -j'.
| otherwise = buildFlags_pre_1_19_1
where
buildFlags_pre_1_19_1 = buildFlags {
buildNumJobs = NoFlag
}
buildFlags_latest = buildFlags {
-- Take the 'jobs' setting '~/.cabal/config' into account.
buildNumJobs = Flag . Just . determineNumJobs $
(numJobsConfigFlag `mappend` numJobsCmdLineFlag)
}
numJobsConfigFlag = installNumJobs . savedInstallFlags $ config
numJobsCmdLineFlag = buildNumJobs buildFlags
replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action
replAction (replFlags, buildExFlags) extraArgs globalFlags = do
cwd <- getCurrentDirectory
pkgDesc <- findPackageDesc cwd
either (const onNoPkgDesc) (const onPkgDesc) pkgDesc
where
verbosity = fromFlagOrDefault normal (replVerbosity replFlags)
-- There is a .cabal file in the current directory: start a REPL and load
-- the project's modules.
onPkgDesc = do
let noAddSource = case replReload replFlags of
Flag True -> SkipAddSourceDepsCheck
_ -> fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to
-- be done to support sandboxes.
(useSandbox, _config, distPref) <-
reconfigure verbosity (replDistPref replFlags)
mempty [] globalFlags noAddSource NoFlag
(const Nothing)
let progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions
{ useCabalVersion = orLaterVersion $ Version [1,18,0] []
, useDistPref = distPref
}
replFlags' = replFlags
{ replVerbosity = toFlag verbosity
, replDistPref = toFlag distPref
}
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
(Cabal.replCommand progConf) (const replFlags') extraArgs
-- No .cabal file in the current directory: just start the REPL (possibly
-- using the sandbox package DB).
onNoPkgDesc = do
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
(comp, platform, programDb) <- configCompilerAux' configFlags
programDb' <- reconfigurePrograms verbosity
(replProgramPaths replFlags)
(replProgramArgs replFlags)
programDb
startInterpreter verbosity programDb' comp platform
(configPackageDB' configFlags)
-- | Re-configure the package in the current directory if needed. Deciding
-- when to reconfigure and with which options is convoluted:
--
-- If we are reconfiguring, we must always run @configure@ with the
-- verbosity option we are given; however, that a previous configuration
-- uses a different verbosity setting is not reason enough to reconfigure.
--
-- The package should be configured to use the same \"dist\" prefix as
-- given to the @build@ command, otherwise the build will probably
-- fail. Not only does this determine the \"dist\" prefix setting if we
-- need to reconfigure anyway, but an existing configuration should be
-- invalidated if its \"dist\" prefix differs.
--
-- If the package has never been configured (i.e., there is no
-- LocalBuildInfo), we must configure first, using the default options.
--
-- If the package has been configured, there will be a 'LocalBuildInfo'.
-- If there no package description file, we assume that the
-- 'PackageDescription' is up to date, though the configuration may need
-- to be updated for other reasons (see above). If there is a package
-- description file, and it has been modified since the 'LocalBuildInfo'
-- was generated, then we need to reconfigure.
--
-- The caller of this function may also have specific requirements
-- regarding the flags the last configuration used. For example,
-- 'testAction' requires that the package be configured with test suites
-- enabled. The caller may pass the required settings to this function
-- along with a function to check the validity of the saved 'ConfigFlags';
-- these required settings will be checked first upon determining that
-- a previous configuration exists.
reconfigure :: Verbosity -- ^ Verbosity setting
-> Flag FilePath -- ^ \"dist\" prefix
-> ConfigFlags -- ^ Additional config flags to set. These flags
-- will be 'mappend'ed to the last used or
-- default 'ConfigFlags' as appropriate, so
-- this value should be 'mempty' with only the
-- required flags set. The required verbosity
-- and \"dist\" prefix flags will be set
-- automatically because they are always
-- required; therefore, it is not necessary to
-- set them here.
-> [String] -- ^ Extra arguments
-> GlobalFlags -- ^ Global flags
-> SkipAddSourceDepsCheck
-- ^ Should we skip the timestamp check for modified
-- add-source dependencies?
-> Flag (Maybe Int)
-- ^ -j flag for reinstalling add-source deps.
-> (ConfigFlags -> Maybe String)
-- ^ Check that the required flags are set in
-- the last used 'ConfigFlags'. If the required
-- flags are not set, provide a message to the
-- user explaining the reason for
-- reconfiguration. Because the correct \"dist\"
-- prefix setting is always required, it is checked
-- automatically; this function need not check
-- for it.
-> IO (UseSandbox, SavedConfig, FilePath)
reconfigure verbosity flagDistPref addConfigFlags extraArgs globalFlags
skipAddSourceDepsCheck numJobsFlag checkFlags = do
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
distPref <- findSavedDistPref config flagDistPref
eLbi <- tryGetPersistBuildConfig distPref
config' <- case eLbi of
Left err -> onNoBuildConfig (useSandbox, config) distPref err
Right lbi -> onBuildConfig (useSandbox, config) distPref lbi
return (useSandbox, config', distPref)
where
-- We couldn't load the saved package config file.
--
-- If we're in a sandbox: add-source deps don't have to be reinstalled
-- (since we don't know the compiler & platform).
onNoBuildConfig :: (UseSandbox, SavedConfig) -> FilePath
-> ConfigStateFileError -> IO SavedConfig
onNoBuildConfig (_, config) distPref err = do
let msg = case err of
ConfigStateFileMissing -> "Package has never been configured."
ConfigStateFileNoParse -> "Saved package config file seems "
++ "to be corrupt."
_ -> show err
case err of
-- Note: the build config could have been generated by a custom setup
-- script built against a different Cabal version, so it's crucial that
-- we ignore the bad version error here.
ConfigStateFileBadVersion _ _ _ -> info verbosity msg
_ -> do
let distVerbFlags = mempty
{ configVerbosity = toFlag verbosity
, configDistPref = toFlag distPref
}
defaultFlags = mappend addConfigFlags distVerbFlags
notice verbosity
$ msg ++ " Configuring with default flags." ++ configureManually
configureAction (defaultFlags, defaultConfigExFlags)
extraArgs globalFlags
return config
-- Package has been configured, but the configuration may be out of
-- date or required flags may not be set.
--
-- If we're in a sandbox: reinstall the modified add-source deps and
-- force reconfigure if we did.
onBuildConfig :: (UseSandbox, SavedConfig) -> FilePath
-> LBI.LocalBuildInfo -> IO SavedConfig
onBuildConfig (useSandbox, config) distPref lbi = do
let configFlags = LBI.configFlags lbi
distVerbFlags = mempty
{ configVerbosity = toFlag verbosity
, configDistPref = toFlag distPref
}
flags = mconcat [configFlags, addConfigFlags, distVerbFlags]
-- Was the sandbox created after the package was already configured? We
-- may need to skip reinstallation of add-source deps and force
-- reconfigure.
let buildConfig = localBuildInfoFile distPref
sandboxConfig <- getSandboxConfigFilePath globalFlags
isSandboxConfigNewer <-
sandboxConfig `existsAndIsMoreRecentThan` buildConfig
let skipAddSourceDepsCheck'
| isSandboxConfigNewer = SkipAddSourceDepsCheck
| otherwise = skipAddSourceDepsCheck
when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $
info verbosity "Skipping add-source deps check..."
let (_, config') = updateInstallDirs
(configUserInstall flags)
(useSandbox, config)
depsReinstalled <-
case skipAddSourceDepsCheck' of
DontSkipAddSourceDepsCheck ->
maybeReinstallAddSourceDeps
verbosity numJobsFlag flags globalFlags
(useSandbox, config')
SkipAddSourceDepsCheck -> do
return NoDepsReinstalled
-- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need
-- to force reconfigure. Note that it's possible to use @cabal.config@
-- even without sandboxes.
isUserPackageEnvironmentFileNewer <-
userPackageEnvironmentFile `existsAndIsMoreRecentThan` buildConfig
-- Determine whether we need to reconfigure and which message to show to
-- the user if that is the case.
mMsg <- determineMessageToShow distPref lbi configFlags
depsReinstalled isSandboxConfigNewer
isUserPackageEnvironmentFileNewer
case mMsg of
-- No message for the user indicates that reconfiguration
-- is not required.
Nothing -> return config'
-- Show the message and reconfigure.
Just msg -> do
notice verbosity msg
configureAction (flags, defaultConfigExFlags)
extraArgs globalFlags
return config'
-- Determine what message, if any, to display to the user if reconfiguration
-- is required.
determineMessageToShow :: FilePath -> LBI.LocalBuildInfo -> ConfigFlags
-> WereDepsReinstalled -> Bool -> Bool
-> IO (Maybe String)
determineMessageToShow _ _ _ _ True _ =
-- The sandbox was created after the package was already configured.
return $! Just $! sandboxConfigNewerMessage
determineMessageToShow _ _ _ _ False True =
-- The user package environment file was modified.
return $! Just $! userPackageEnvironmentFileModifiedMessage
determineMessageToShow distPref lbi configFlags depsReinstalled
False False = do
let savedDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(configDistPref configFlags)
case depsReinstalled of
ReinstalledSomeDeps ->
-- Some add-source deps were reinstalled.
return $! Just $! reinstalledDepsMessage
NoDepsReinstalled ->
case checkFlags configFlags of
-- Flag required by the caller is not set.
Just msg -> return $! Just $! msg ++ configureManually
Nothing
-- Required "dist" prefix is not set.
| savedDistPref /= distPref ->
return $! Just distPrefMessage
-- All required flags are set, but the configuration
-- may be outdated.
| otherwise -> case LBI.pkgDescrFile lbi of
Nothing -> return Nothing
Just pdFile -> do
outdated <- checkPersistBuildConfigOutdated
distPref pdFile
return $! if outdated
then Just $! outdatedMessage pdFile
else Nothing
reconfiguringMostRecent = " Re-configuring with most recently used options."
configureManually = " If this fails, please run configure manually."
sandboxConfigNewerMessage =
"The sandbox was created after the package was already configured."
++ reconfiguringMostRecent
++ configureManually
userPackageEnvironmentFileModifiedMessage =
"The user package environment file ('"
++ userPackageEnvironmentFile ++ "') was modified."
++ reconfiguringMostRecent
++ configureManually
distPrefMessage =
"Package previously configured with different \"dist\" prefix."
++ reconfiguringMostRecent
++ configureManually
outdatedMessage pdFile =
pdFile ++ " has been changed."
++ reconfiguringMostRecent
++ configureManually
reinstalledDepsMessage =
"Some add-source dependencies have been reinstalled."
++ reconfiguringMostRecent
++ configureManually
installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> [String] -> Action
installAction (configFlags, _, installFlags, _) _ globalFlags
| fromFlagOrDefault False (installOnly installFlags) = do
let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
(_, config) <- loadConfigOrSandboxConfig verbosity globalFlags
distPref <- findSavedDistPref config (configDistPref configFlags)
let setupOpts = defaultSetupScriptOptions { useDistPref = distPref }
setupWrapper verbosity setupOpts Nothing installCommand (const mempty) []
installAction (configFlags, configExFlags, installFlags, haddockFlags)
extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
(useSandbox, config) <- fmap
(updateInstallDirs (configUserInstall configFlags))
(loadConfigOrSandboxConfig verbosity globalFlags)
targets <- readUserTargets verbosity extraArgs
-- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to
-- 'configure' when run inside a sandbox. Right now, running
--
-- $ cabal sandbox init && cabal configure -w /path/to/ghc
-- && cabal build && cabal install
--
-- performs the compilation twice unless you also pass -w to 'install'.
-- However, this is the same behaviour that 'cabal install' has in the normal
-- mode of operation, so we stick to it for consistency.
let sandboxDistPref = case useSandbox of
NoSandbox -> NoFlag
UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir
distPref <- findSavedDistPref config
(configDistPref configFlags `mappend` sandboxDistPref)
let configFlags' = maybeForceTests installFlags' $
savedConfigureFlags config `mappend`
configFlags { configDistPref = toFlag distPref }
configExFlags' = defaultConfigExFlags `mappend`
savedConfigureExFlags config `mappend` configExFlags
installFlags' = defaultInstallFlags `mappend`
savedInstallFlags config `mappend` installFlags
haddockFlags' = defaultHaddockFlags `mappend`
savedHaddockFlags config `mappend`
haddockFlags { haddockDistPref = toFlag distPref }
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags'
-- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the
-- future.
conf' <- configureAllKnownPrograms verbosity conf
-- If we're working inside a sandbox and the user has set the -w option, we
-- may need to create a sandbox-local package DB for this compiler and add a
-- timestamp record for this compiler to the timestamp file.
configFlags'' <- case useSandbox of
NoSandbox -> configAbsolutePaths $ configFlags'
(UseSandbox sandboxDir) -> return $ setPackageDB sandboxDir comp platform
configFlags'
whenUsingSandbox useSandbox $ \sandboxDir -> do
initPackageDBIfNeeded verbosity configFlags'' comp conf'
indexFile <- tryGetIndexFilePath config
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
-- TODO: Passing 'SandboxPackageInfo' to install unconditionally here means
-- that 'cabal install some-package' inside a sandbox will sometimes reinstall
-- modified add-source deps, even if they are not among the dependencies of
-- 'some-package'. This can also prevent packages that depend on older
-- versions of add-source'd packages from building (see #1362).
maybeWithSandboxPackageInfo verbosity configFlags'' globalFlags'
comp platform conf useSandbox $ \mSandboxPkgInfo ->
maybeWithSandboxDirOnSearchPath useSandbox $
withRepoContext verbosity globalFlags' $ \repoContext ->
install verbosity
(configPackageDB' configFlags'')
repoContext
comp platform conf'
useSandbox mSandboxPkgInfo
globalFlags' configFlags'' configExFlags'
installFlags' haddockFlags'
targets
where
-- '--run-tests' implies '--enable-tests'.
maybeForceTests installFlags' configFlags' =
if fromFlagOrDefault False (installRunTests installFlags')
then configFlags' { configTests = toFlag True }
else configFlags'
testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags
-> IO ()
testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (testVerbosity testFlags)
addConfigFlags = mempty { configTests = toFlag True }
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
buildFlags' = buildFlags
{ buildVerbosity = testVerbosity testFlags }
checkFlags flags
| fromFlagOrDefault False (configTests flags) = Nothing
| otherwise = Just "Re-configuring with test suites enabled."
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config, distPref) <-
reconfigure verbosity (testDistPref testFlags)
addConfigFlags [] globalFlags noAddSource
(buildNumJobs buildFlags') checkFlags
let setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
testFlags' = testFlags { testDistPref = toFlag distPref }
-- The package was just configured, so the LBI must be available.
names <- componentNamesFromLBI verbosity distPref "test suites"
(\c -> case c of { LBI.CTest{} -> True; _ -> False })
let extraArgs'
| null extraArgs = case names of
ComponentNamesUnknown -> []
ComponentNames names' -> [ name | LBI.CTestName name <- names' ]
| otherwise = extraArgs
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags' extraArgs'
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
Cabal.testCommand (const testFlags') extraArgs'
data ComponentNames = ComponentNamesUnknown
| ComponentNames [LBI.ComponentName]
-- | Return the names of all buildable components matching a given predicate.
componentNamesFromLBI :: Verbosity -> FilePath -> String
-> (LBI.Component -> Bool)
-> IO ComponentNames
componentNamesFromLBI verbosity distPref targetsDescr compPred = do
eLBI <- tryGetPersistBuildConfig distPref
case eLBI of
Left err -> case err of
-- Note: the build config could have been generated by a custom setup
-- script built against a different Cabal version, so it's crucial that
-- we ignore the bad version error here.
ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown
_ -> die (show err)
Right lbi -> do
let pkgDescr = LBI.localPkgDescr lbi
names = map LBI.componentName
. filter (buildable . LBI.componentBuildInfo)
. filter compPred $
LBI.pkgComponents pkgDescr
if null names
then do notice verbosity $ "Package has no buildable "
++ targetsDescr ++ "."
exitSuccess -- See #3215.
else return $! (ComponentNames names)
benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags)
-> [String] -> GlobalFlags
-> IO ()
benchmarkAction (benchmarkFlags, buildFlags, buildExFlags)
extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal
(benchmarkVerbosity benchmarkFlags)
addConfigFlags = mempty { configBenchmarks = toFlag True }
buildFlags' = buildFlags
{ buildVerbosity = benchmarkVerbosity benchmarkFlags }
checkFlags flags
| fromFlagOrDefault False (configBenchmarks flags) = Nothing
| otherwise = Just "Re-configuring with benchmarks enabled."
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config, distPref) <-
reconfigure verbosity (benchmarkDistPref benchmarkFlags)
addConfigFlags [] globalFlags noAddSource
(buildNumJobs buildFlags') checkFlags
let setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref }
-- The package was just configured, so the LBI must be available.
names <- componentNamesFromLBI verbosity distPref "benchmarks"
(\c -> case c of { LBI.CBench{} -> True; _ -> False; })
let extraArgs'
| null extraArgs = case names of
ComponentNamesUnknown -> []
ComponentNames names' -> [name | LBI.CBenchName name <- names']
| otherwise = extraArgs
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags' extraArgs'
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
Cabal.benchmarkCommand (const benchmarkFlags') extraArgs'
haddockAction :: HaddockFlags -> [String] -> Action
haddockAction haddockFlags extraArgs globalFlags = do
let verbosity = fromFlag (haddockVerbosity haddockFlags)
(_useSandbox, config, distPref) <-
reconfigure verbosity (haddockDistPref haddockFlags)
mempty [] globalFlags DontSkipAddSourceDepsCheck
NoFlag (const Nothing)
let haddockFlags' = defaultHaddockFlags `mappend`
savedHaddockFlags config `mappend`
haddockFlags { haddockDistPref = toFlag distPref }
setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref }
setupWrapper verbosity setupScriptOptions Nothing
haddockCommand (const haddockFlags') extraArgs
when (fromFlagOrDefault False $ haddockForHackage haddockFlags) $ do
pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref)
let dest = distPref </> name <.> "tar.gz"
name = display (packageId pkg) ++ "-docs"
docDir = distPref </> "doc" </> "html"
createTarGzFile dest docDir name
notice verbosity $ "Documentation tarball created: " ++ dest
cleanAction :: CleanFlags -> [String] -> Action
cleanAction cleanFlags extraArgs globalFlags = do
(_, config) <- loadConfigOrSandboxConfig verbosity globalFlags
distPref <- findSavedDistPref config (cleanDistPref cleanFlags)
let setupScriptOptions = defaultSetupScriptOptions
{ useDistPref = distPref
, useWin32CleanHack = True
}
cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref }
setupWrapper verbosity setupScriptOptions Nothing
cleanCommand (const cleanFlags') extraArgs
where
verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags)
listAction :: ListFlags -> [String] -> Action
listAction listFlags extraArgs globalFlags = do
let verbosity = fromFlag (listVerbosity listFlags)
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity
(globalFlags { globalRequireSandbox = Flag False })
let configFlags' = savedConfigureFlags config
configFlags = configFlags' {
configPackageDBs = configPackageDBs configFlags'
`mappend` listPackageDBs listFlags
}
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, _, conf) <- configCompilerAux' configFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
List.list verbosity
(configPackageDB' configFlags)
repoContext
comp
conf
listFlags
extraArgs
infoAction :: InfoFlags -> [String] -> Action
infoAction infoFlags extraArgs globalFlags = do
let verbosity = fromFlag (infoVerbosity infoFlags)
targets <- readUserTargets verbosity extraArgs
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity
(globalFlags { globalRequireSandbox = Flag False })
let configFlags' = savedConfigureFlags config
configFlags = configFlags' {
configPackageDBs = configPackageDBs configFlags'
`mappend` infoPackageDBs infoFlags
}
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, _, conf) <- configCompilerAuxEx configFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
List.info verbosity
(configPackageDB' configFlags)
repoContext
comp
conf
globalFlags'
infoFlags
targets
updateAction :: Flag Verbosity -> [String] -> Action
updateAction verbosityFlag extraArgs globalFlags = do
unless (null extraArgs) $
die $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag verbosityFlag
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity
(globalFlags { globalRequireSandbox = Flag False })
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
update verbosity repoContext
upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> [String] -> Action
upgradeAction _ _ _ = die $
"Use the 'cabal install' command instead of 'cabal upgrade'.\n"
++ "You can install the latest version of a package using 'cabal install'. "
++ "The 'cabal upgrade' command has been removed because people found it "
++ "confusing and it often led to broken packages.\n"
++ "If you want the old upgrade behaviour then use the install command "
++ "with the --upgrade-dependencies flag (but check first with --dry-run "
++ "to see what would happen). This will try to pick the latest versions "
++ "of all dependencies, rather than the usual behaviour of trying to pick "
++ "installed versions of all dependencies. If you do use "
++ "--upgrade-dependencies, it is recommended that you do not upgrade core "
++ "packages (e.g. by using appropriate --constraint= flags)."
fetchAction :: FetchFlags -> [String] -> Action
fetchAction fetchFlags extraArgs globalFlags = do
let verbosity = fromFlag (fetchVerbosity fetchFlags)
targets <- readUserTargets verbosity extraArgs
config <- loadConfig verbosity (globalConfigFile globalFlags)
let configFlags = savedConfigureFlags config
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
fetch verbosity
(configPackageDB' configFlags)
repoContext
comp platform conf globalFlags' fetchFlags
targets
freezeAction :: FreezeFlags -> [String] -> Action
freezeAction freezeFlags _extraArgs globalFlags = do
let verbosity = fromFlag (freezeVerbosity freezeFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags
maybeWithSandboxPackageInfo verbosity configFlags globalFlags'
comp platform conf useSandbox $ \mSandboxPkgInfo ->
maybeWithSandboxDirOnSearchPath useSandbox $
withRepoContext verbosity globalFlags' $ \repoContext ->
freeze verbosity
(configPackageDB' configFlags)
repoContext
comp platform conf
mSandboxPkgInfo
globalFlags' freezeFlags
genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO ()
genBoundsAction freezeFlags _extraArgs globalFlags = do
let verbosity = fromFlag (freezeVerbosity freezeFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags
maybeWithSandboxPackageInfo verbosity configFlags globalFlags'
comp platform conf useSandbox $ \mSandboxPkgInfo ->
maybeWithSandboxDirOnSearchPath useSandbox $
withRepoContext verbosity globalFlags' $ \repoContext ->
genBounds verbosity
(configPackageDB' configFlags)
repoContext
comp platform conf
mSandboxPkgInfo
globalFlags' freezeFlags
uploadAction :: UploadFlags -> [String] -> Action
uploadAction uploadFlags extraArgs globalFlags = do
config <- loadConfig verbosity (globalConfigFile globalFlags)
let uploadFlags' = savedUploadFlags config `mappend` uploadFlags
globalFlags' = savedGlobalFlags config `mappend` globalFlags
tarfiles = extraArgs
when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $
die "the 'upload' command expects at least one .tar.gz archive."
when (fromFlag (uploadCheck uploadFlags')
&& fromFlag (uploadDoc uploadFlags')) $
die "--check and --doc cannot be used together."
checkTarFiles extraArgs
maybe_password <-
case uploadPasswordCmd uploadFlags'
of Flag (xs:xss) -> Just . Password <$>
getProgramInvocationOutput verbosity
(simpleProgramInvocation xs xss)
_ -> pure $ flagToMaybe $ uploadPassword uploadFlags'
withRepoContext verbosity globalFlags' $ \repoContext -> do
if fromFlag (uploadCheck uploadFlags')
then do
Upload.check verbosity repoContext tarfiles
else if fromFlag (uploadDoc uploadFlags')
then do
when (length tarfiles > 1) $
die $ "the 'upload' command can only upload documentation "
++ "for one package at a time."
tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles
Upload.uploadDoc verbosity
repoContext
(flagToMaybe $ uploadUsername uploadFlags')
maybe_password
tarfile
else do
Upload.upload verbosity
repoContext
(flagToMaybe $ uploadUsername uploadFlags')
maybe_password
tarfiles
where
verbosity = fromFlag (uploadVerbosity uploadFlags)
checkTarFiles tarfiles
| not (null otherFiles)
= die $ "the 'upload' command expects only .tar.gz archives: "
++ intercalate ", " otherFiles
| otherwise = sequence_
[ do exists <- doesFileExist tarfile
unless exists $ die $ "file not found: " ++ tarfile
| tarfile <- tarfiles ]
where otherFiles = filter (not . isTarGzFile) tarfiles
isTarGzFile file = case splitExtension file of
(file', ".gz") -> takeExtension file' == ".tar"
_ -> False
generateDocTarball config = do
notice verbosity
"No documentation tarball specified. Building documentation tarball..."
haddockAction (defaultHaddockFlags { haddockForHackage = Flag True })
[] globalFlags
distPref <- findSavedDistPref config NoFlag
pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref)
return $ distPref </> display (packageId pkg) ++ "-docs" <.> "tar.gz"
checkAction :: Flag Verbosity -> [String] -> Action
checkAction verbosityFlag extraArgs _globalFlags = do
unless (null extraArgs) $
die $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs
allOk <- Check.check (fromFlag verbosityFlag)
unless allOk exitFailure
formatAction :: Flag Verbosity -> [String] -> Action
formatAction verbosityFlag extraArgs _globalFlags = do
let verbosity = fromFlag verbosityFlag
path <- case extraArgs of
[] -> do cwd <- getCurrentDirectory
tryFindPackageDesc cwd
(p:_) -> return p
pkgDesc <- readPackageDescription verbosity path
-- Uses 'writeFileAtomic' under the hood.
writeGenericPackageDescription path pkgDesc
uninstallAction :: Flag Verbosity -> [String] -> Action
uninstallAction _verbosityFlag extraArgs _globalFlags = do
let package = case extraArgs of
p:_ -> p
_ -> "PACKAGE_NAME"
die $ "This version of 'cabal-install' does not support the 'uninstall' "
++ "operation. "
++ "It will likely be implemented at some point in the future; "
++ "in the meantime you're advised to use either 'ghc-pkg unregister "
++ package ++ "' or 'cabal sandbox hc-pkg -- unregister " ++ package ++ "'."
sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> Action
sdistAction (sdistFlags, sdistExFlags) extraArgs globalFlags = do
unless (null extraArgs) $
die $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag (sDistVerbosity sdistFlags)
(_, config) <- loadConfigOrSandboxConfig verbosity globalFlags
distPref <- findSavedDistPref config (sDistDistPref sdistFlags)
let sdistFlags' = sdistFlags { sDistDistPref = toFlag distPref }
sdist sdistFlags' sdistExFlags
reportAction :: ReportFlags -> [String] -> Action
reportAction reportFlags extraArgs globalFlags = do
unless (null extraArgs) $
die $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag (reportVerbosity reportFlags)
config <- loadConfig verbosity (globalConfigFile globalFlags)
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
reportFlags' = savedReportFlags config `mappend` reportFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
Upload.report verbosity repoContext
(flagToMaybe $ reportUsername reportFlags')
(flagToMaybe $ reportPassword reportFlags')
runAction :: (BuildFlags, BuildExFlags) -> [String] -> Action
runAction (buildFlags, buildExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags)
let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config, distPref) <-
reconfigure verbosity (buildDistPref buildFlags) mempty []
globalFlags noAddSource (buildNumJobs buildFlags)
(const Nothing)
lbi <- getPersistBuildConfig distPref
(exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags ["exe:" ++ exeName exe]
maybeWithSandboxDirOnSearchPath useSandbox $
run verbosity lbi exe exeArgs
getAction :: GetFlags -> [String] -> Action
getAction getFlags extraArgs globalFlags = do
let verbosity = fromFlag (getVerbosity getFlags)
targets <- readUserTargets verbosity extraArgs
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity
(globalFlags { globalRequireSandbox = Flag False })
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
withRepoContext verbosity (savedGlobalFlags config) $ \repoContext ->
get verbosity
repoContext
globalFlags'
getFlags
targets
unpackAction :: GetFlags -> [String] -> Action
unpackAction getFlags extraArgs globalFlags = do
getAction getFlags extraArgs globalFlags
initAction :: InitFlags -> [String] -> Action
initAction initFlags extraArgs globalFlags = do
when (extraArgs /= []) $
die $ "'init' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag (initVerbosity initFlags)
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity
(globalFlags { globalRequireSandbox = Flag False })
let configFlags = savedConfigureFlags config
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, _, conf) <- configCompilerAux' configFlags
withRepoContext verbosity globalFlags' $ \repoContext ->
initCabal verbosity
(configPackageDB' configFlags)
repoContext
comp
conf
initFlags
sandboxAction :: SandboxFlags -> [String] -> Action
sandboxAction sandboxFlags extraArgs globalFlags = do
let verbosity = fromFlag (sandboxVerbosity sandboxFlags)
case extraArgs of
-- Basic sandbox commands.
["init"] -> sandboxInit verbosity sandboxFlags globalFlags
["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags
("add-source":extra) -> do
when (noExtraArgs extra) $
die "The 'sandbox add-source' command expects at least one argument"
sandboxAddSource verbosity extra sandboxFlags globalFlags
("delete-source":extra) -> do
when (noExtraArgs extra) $
die ("The 'sandbox delete-source' command expects " ++
"at least one argument")
sandboxDeleteSource verbosity extra sandboxFlags globalFlags
["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags
-- More advanced commands.
("hc-pkg":extra) -> do
when (noExtraArgs extra) $
die $ "The 'sandbox hc-pkg' command expects at least one argument"
sandboxHcPkg verbosity sandboxFlags globalFlags extra
["buildopts"] -> die "Not implemented!"
-- Hidden commands.
["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags
-- Error handling.
[] -> die $ "Please specify a subcommand (see 'help sandbox')"
_ -> die $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs
where
noExtraArgs = (<1) . length
execAction :: ExecFlags -> [String] -> Action
execAction execFlags extraArgs globalFlags = do
let verbosity = fromFlag (execVerbosity execFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
(comp, platform, conf) <- getPersistOrConfigCompiler configFlags
exec verbosity useSandbox comp platform conf extraArgs
userConfigAction :: UserConfigFlags -> [String] -> Action
userConfigAction ucflags extraArgs globalFlags = do
let verbosity = fromFlag (userConfigVerbosity ucflags)
force = fromFlag (userConfigForce ucflags)
case extraArgs of
("init":_) -> do
path <- configFile
fileExists <- doesFileExist path
if (not fileExists || (fileExists && force))
then createDefaultConfigFile verbosity path
else die $ path ++ " already exists."
("diff":_) -> mapM_ putStrLn =<< userConfigDiff globalFlags
("update":_) -> userConfigUpdate verbosity globalFlags
-- Error handling.
[] -> die $ "Please specify a subcommand (see 'help user-config')"
_ -> die $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs
where configFile = getConfigFilePath (globalConfigFile globalFlags)
-- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details.
--
win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> Action
win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do
let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags)
Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path
win32SelfUpgradeAction _ _ _ = return ()
-- | Used as an entry point when cabal-install needs to invoke itself
-- as a setup script. This can happen e.g. when doing parallel builds.
--
actAsSetupAction :: ActAsSetupFlags -> [String] -> Action
actAsSetupAction actAsSetupFlags args _globalFlags =
let bt = fromFlag (actAsSetupBuildType actAsSetupFlags)
in case bt of
Simple -> Simple.defaultMainArgs args
Configure -> Simple.defaultMainWithHooksArgs
Simple.autoconfUserHooks args
Make -> Make.defaultMainArgs args
Custom -> error "actAsSetupAction Custom"
(UnknownBuildType _) -> error "actAsSetupAction UnknownBuildType"
manpageAction :: [CommandSpec action] -> Flag Verbosity -> [String] -> Action
manpageAction commands _ extraArgs _ = do
unless (null extraArgs) $
die $ "'manpage' doesn't take any extra arguments: " ++ unwords extraArgs
pname <- getProgName
let cabalCmd = if takeExtension pname == ".exe"
then dropExtension pname
else pname
putStrLn $ manpage cabalCmd commands
|
gbaz/cabal
|
cabal-install/Main.hs
|
bsd-3-clause
| 60,888
| 0
| 24
| 16,668
| 11,059
| 5,761
| 5,298
| 998
| 13
|
module Graphics.QML.Test.Harness where
import Graphics.QML.Test.Framework
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Graphics.QML
import Data.IORef
import Data.Proxy
import Data.Maybe
import System.IO
import System.Directory
qmlPrelude :: String
qmlPrelude = unlines [
"import Qt 4.7",
"Rectangle {",
" id: page;",
" width: 100; height: 100;",
" color: 'green';",
" Component.onCompleted: {"]
qmlPostscript :: String
qmlPostscript = unlines [
" window.close();",
" }",
"}"]
runTest :: (TestAction a) => TestBoxSrc a -> IO TestStatus
runTest src = do
let js = showTestCode (srcTestBoxes src) ""
tmpDir <- getTemporaryDirectory
(qmlPath, hndl) <- openTempFile tmpDir "test1-.qml"
hPutStr hndl (qmlPrelude ++ js ++ qmlPostscript)
hClose hndl
mock <- mockFromSrc src
go <- newObject mock
createEngine defaultEngineConfig {
initialURL = filePathToURI qmlPath,
contextObject = Just go}
runEngines
removeFile qmlPath
status <- readIORef (mockStatus mock)
if isJust $ testFault status
then putStrLn $ show status
else return ()
return status
testProperty :: (TestAction a) => TestBoxSrc a -> Property
testProperty src = monadicIO $ do
status <- run $ runTest src
assert $ isNothing $ testFault status
return ()
checkProperty :: TestType -> IO ()
checkProperty (TestType pxy) =
quickCheck $ testProperty . constrainSrc pxy
constrainSrc :: (TestAction a) => Proxy a -> TestBoxSrc a -> TestBoxSrc a
constrainSrc = flip const
|
drhodes/HsQML
|
test/Graphics/QML/Test/Harness.hs
|
bsd-3-clause
| 1,599
| 0
| 12
| 369
| 465
| 233
| 232
| 52
| 2
|
{-# LANGUAGE NoMonomorphismRestriction, ExtendedDefaultRules#-}
module DocTest.Flat.Instances.Text where
import qualified DocTest
import Test.Tasty(TestTree,testGroup)
import Flat.Instances.Text
import Flat.Instances.Base()
import Flat.Instances.Test
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Word
tests :: IO TestTree
tests = testGroup "Flat.Instances.Text" <$> sequence [ DocTest.test "src/Data/Flat/Instances/Text.hs:25" ["(True,16,[1,0])"] (DocTest.asPrint( tst $ T.pack "" )), DocTest.test "src/Data/Flat/Instances/Text.hs:28" ["(True,120,[1,3,97,97,97,0])"] (DocTest.asPrint( tst $ T.pack "aaa" )), DocTest.test "src/Data/Flat/Instances/Text.hs:31" ["(True,120,[1,6,194,162,194,162,194,162,0])"] (DocTest.asPrint( tst $ T.pack "¢¢¢" )), DocTest.test "src/Data/Flat/Instances/Text.hs:34" ["(True,120,[1,9,230,151,165,230,151,165,230,151,165,0])"] (DocTest.asPrint( tst $ T.pack "日日日" )), DocTest.test "src/Data/Flat/Instances/Text.hs:44" ["True"] (DocTest.asPrint( tst (T.pack "abc") == tst (TL.pack "abc") )), DocTest.test "src/Data/Flat/Instances/Text.hs:60" ["True"] (DocTest.asPrint( tst (UTF8Text $ T.pack "日日日") == tst (T.pack "日日日") ))]
|
tittoassini/flat
|
test/DocTest/Data/Flat/Instances/Text.hs
|
bsd-3-clause
| 1,242
| 0
| 17
| 135
| 340
| 185
| 155
| 12
| 1
|
module Main ( main ) where
import Test.Tasty
import qualified OrgModeTest
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "" [OrgModeTest.tests]
|
sbergot/haskellorgmode
|
tests/MainTest.hs
|
bsd-3-clause
| 185
| 0
| 7
| 40
| 56
| 32
| 24
| 7
| 1
|
module RTS.Bytecode where
import Core.TT
import Core.CaseTree
import Core.Evaluate
import Idris.AbsSyntax
import RTS.SC
import Control.Monad.State
type Local = Int
data BAtom = BP Name Int | BL Local | BC Const
deriving Show
-- Like SC, but with explicit evaluation, de Bruijn levels for locals, and all
-- intermediate values put into variables (which can be stored on the stack or
-- in registers when generating code)
data Bytecode = BAtom BAtom
| BApp BAtom [BAtom]
| BTailApp BAtom [BAtom]
| BLazy BAtom [BAtom]
| BLet Local Bytecode Bytecode
| BFCall String CType [(BAtom, CType)]
| BCon Tag [BAtom]
| BCase Local [BAlt]
| BPrimOp SPrim [BAtom]
| BError String
| BGetArgs [Name] Bytecode -- get function arguments
| BReserve Int Bytecode -- reserve stack space, clear on exit
deriving Show
data BAlt = BConCase Tag [Name] Int Bytecode
| BConstCase Const Bytecode
| BDefaultCase Bytecode
deriving Show
bcdefs :: [(Name, SCDef)] -> [(Name, (Int, Bytecode))]
bcdefs xs = map (\ (n, s) -> (n, bc xs s)) xs
bc all (SCDef args max c) = (length args,
BGetArgs (map fst args) (bcExp all max (length args) c))
bcExp all v arity x
= let (code, max) = runState (bc' True arity x) v
space = max - arity in
if (space > 0) then BReserve space code else code
where
ref i = do s <- get
when (i > s) $ put i
next = do s <- get
put (s + 1)
return s
scarity n = case lookup n all of
Just (SCDef args _ _) -> length args
bc' :: Bool -> Int -> SCExp -> State Int Bytecode
bc' tl d (SRef n) = if tl then return $ BTailApp (BP n (scarity n)) []
else return $ BApp (BP n (scarity n)) []
bc' tl d (SLoc i) = do ref i; return $ BAtom (BL i)
bc' tl d (SApp f args)
= do f' <- case f of
SRef n -> return $ BAtom (BP n (scarity n))
_ -> bc' False d f
args' <- mapM (bc' False d) args
let bapp = if tl then BTailApp else BApp
case f' of
BAtom r -> mkApp (\x -> bapp r x) args' []
bc -> do v <- next
mkApp (\x -> BLet v bc (bapp (BL v) x)) args' []
bc' tl d (SLazyApp f args) = do args' <- mapM (bc' False d) args
-- let bapp = if tl then BTailApp else BApp
mkApp (\x -> BLazy (BP f (scarity f)) x) args' []
bc' tl d (SLet n val sc) = do v' <- bc' False d val
sc' <- bc' False (d + 1) sc
return $ BLet d v' sc'
bc' tl d (SCon t args) = do args' <- mapM (bc' False d) args
mkApp (\x -> BCon t x) args' []
bc' tl d (SFCall c t args) = do args' <- mapM (bc' False d) (map fst args)
mkFApp c t (zip args' (map snd args)) []
bc' tl d (SConst c) = return $ BAtom (BC c)
bc' tl d (SError s) = return $ BError s
bc' tl d (SCase e alts) = do e' <- bc' False d e
alts' <- mapM (bcAlt tl d) alts
case e' of
BAtom (BL i) -> return $ BCase i alts'
bc -> do v <- next
return $ BLet v bc (BCase v alts')
bc' tl d (SPrimOp p args) = do args' <- mapM (bc' tl d) args
mkApp (\x -> BPrimOp p x) args' []
mkApp ap [] locs = return $ ap locs
mkApp ap (BAtom (BL i) : as) locs = mkApp ap as (locs ++ [BL i])
mkApp ap (a : as) locs = do v <- next
app <- mkApp ap as (locs ++ [BL v])
return $ BLet v a app
bcAlt tl d (SConCase t args e) = do e' <- bc' tl (d + length args) e
return $ BConCase t args d e'
bcAlt tl d (SConstCase i e) = do e' <- bc' tl d e
return $ BConstCase i e'
bcAlt tl d (SDefaultCase e) = do e' <- bc' tl d e
return $ BDefaultCase e'
mkFApp c ty [] locs = return $ BFCall c ty locs
mkFApp c ty ((BAtom (BL i), t) : as) locs
= mkFApp c ty as (locs ++ [(BL i, t)])
mkFApp c ty ((a, t) : as) locs
= do v <- next
app <- mkFApp c ty as (locs ++ [(BL v, t)])
return $ BLet v a app
|
byorgey/Idris-dev
|
src/RTS/Bytecode.hs
|
bsd-3-clause
| 4,710
| 0
| 21
| 2,034
| 1,879
| 940
| 939
| 93
| 23
|
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Plugins
import qualified Filter as F
main :: IO ()
main =
do let names = ['F.filter]
ph <- newPluginsHandle
reload ph names
loop ph
loop :: PluginsHandle -> IO ()
loop ph =
do putStrLn "Enter some text followed by <enter> or 'quit' to exit:"
msg <- getLine
case msg of
"quit" -> return ()
_ -> do
fn <- ($(lookupName 'F.filter) ph)
print (fn msg)
loop ph
|
Happstack/plugins-ng
|
Example.hs
|
bsd-3-clause
| 494
| 0
| 18
| 158
| 165
| 80
| 85
| 20
| 2
|
module IntegrationTests where
import Test.Hspec
import Test.QuickCheck
import Control.Exception
import Data.ByteString.Lazy.Char8
import Http
import Types
import UriBuilder
import Auth
import FireLogic
--import JWT -- not yet existing, will provide the generateJWT function.
spec :: Spec
spec = do
describe "Http.get" $ do
it "returns the contens of httpbin.org/xml" $ do
(FireResponse _ status _) <- get $ FireRequest "http://httpbin.org/xml" []
status `shouldBe` 200
describe "Http.get with empty parameters" $ do
it "throws an error" $ do
(get $ FireRequest "http://httpbin.org.xml" [(Format, "")]) `shouldThrow` anyException
describe "Test configuration is a go" $ do
it "returns -awesome- when asked for Haskell" $ do
config <- getConf
let url = (config !! 1) ++ "haskell.json"
let authReq = (Auth, Prelude.head config)
FireResponse body _ _ <- get $ FireRequest url [authReq]
unpack body `shouldBe` "\"awesome\""
-- # JWT Module
-- it "return -awesome- when asked for Haskell" $ do
-- config <- getConf
-- let url = (config !! 1) ++ "haskell.json"
-- let secret = (Auth, Prelude.head config)
-- FireResponse body _ _ <- get $ FireRequest url [generateJWT secret ] -- the payload itself (body) will possibly also have to go into the generateJWT function
-- unpack body `shouldBe` "\"awesome\""
|
sphaso/firebase-haskell-client
|
test/IntegrationTests.hs
|
bsd-3-clause
| 1,499
| 0
| 18
| 399
| 271
| 141
| 130
| 26
| 1
|
module Data.EBNF.Trie where
import qualified Data.Foldable as F
import Data.Functor
import Data.List
{-|
Type defining a Trie
-}
data Trie a = Trie {
value :: a,
children :: [Trie a]
} deriving (Show)
{-|
Flatten a Trie to all it's component tries
-}
flatten :: Trie a -> [Trie a]
flatten tr = tr : (concatMap flatten . children $ tr)
{-|
Flatten a Trie to all it's values
-}
flatten' :: Trie a -> [a]
flatten' tr = value tr : (concatMap flatten' . children $ tr)
instance F.Foldable Trie where
foldr f z = foldr f z . flatten'
instance Functor Trie where
fmap f tr@(Trie val ch) = Trie val' ch' where
val' = f val
ch' = map (fmap f) ch
|
Lokidottir/ebnf-bff
|
src/Data/EBNF/Trie.hs
|
mit
| 724
| 0
| 10
| 213
| 240
| 129
| 111
| 18
| 1
|
module Test.QuickFuzz.Global where
import System.IO.Unsafe (unsafePerformIO)
import Control.Exception
import Data.Global
import Data.IORef
import Data.Maybe
type FreqVar = IORef [Int]
type FreqState = IORef [(String, FreqVar)]
freqs :: FreqState
freqs = declareIORef "freqs" []
declareFreqVar var = (var, declareIORef var ([] :: [Int]))
readFreqFile :: IO String
readFreqFile = do
strOrExc <- try $ readFile "freqs.txt"
case (strOrExc :: Either IOError String) of
Left _ -> return "[]"
Right contents -> return contents
initFreqs :: IO ()
initFreqs = do
contents <- readFreqFile
let freqMap = (read contents) :: [(String, [Int])]
updateVar freqs (map (\(var, xs) -> declareFreqVar var) freqMap)
freqs <- readIORef freqs
mapM_ (\(var, xs) -> setFreqs var xs freqs ) freqMap
getFreqs :: String -> [Int]
getFreqs var = unsafePerformIO $ do
freqs <- readIORef freqs
--print var
case (lookup var freqs) of
Just x -> readIORef x
Nothing -> return (repeat 1)
setFreqs :: String -> [Int] -> [(String, FreqVar)] -> IO ()
setFreqs var xs freqs = updateVar (fromJust (lookup var freqs)) xs
updateVar :: (IORef a) -> a -> IO ()
updateVar v xs = atomicModifyIORef v f
where f _ = (xs, ())
|
elopez/QuickFuzz
|
src/Test/QuickFuzz/Global.hs
|
gpl-3.0
| 1,467
| 0
| 13
| 480
| 507
| 264
| 243
| 35
| 2
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Sequence Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/sequence/src/main/javahelp/org/zaproxy/zap/extension/sequence/resources/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 978
| 80
| 66
| 160
| 415
| 210
| 205
| -1
| -1
|
-- by Kirill Elagin
get_change :: Int -> Int
get_change m = n -- write your code here
main :: IO ()
main = do
[m] <- fmap words getLine
print $ get_change (read m)
|
xunilrj/sandbox
|
courses/coursera-sandiego-algorithms/algorithmic-toolbox/assignment002/change/change.hs
|
apache-2.0
| 170
| 0
| 10
| 41
| 66
| 33
| 33
| 6
| 1
|
-- Copyright (c) 2010 - Seweryn Dynerowicz
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- imitations under the License.
module Policy.SharedAttribute
( SharedAttr(..)
) where
import Data.Set
import Algebra.Semiring
data Attribute = A | B | C | D
deriving (Ord, Eq, Show)
data SharedAttr = SA (Set Attribute)
deriving(Eq, Show)
instance Semiring (SharedAttr) where
add (SA a) (SA b) = SA (union a b)
addId = SA empty
mul (SA a) (SA b) = SA (intersection a b)
mulId = SA (fromList [A,B,C,D])
|
sdynerow/Semirings-Library
|
haskell/Metrics/SharedAttribute.hs
|
apache-2.0
| 983
| 0
| 9
| 181
| 205
| 117
| 88
| 13
| 0
|
{-# LANGUAGE PatternGuards #-}
module Idris.DataOpts where
-- Forcing, detagging and collapsing
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Core.TT
import Control.Applicative
import Data.List
import Data.Maybe
import Debug.Trace
class Optimisable term where
applyOpts :: term -> Idris term
instance (Optimisable a, Optimisable b) => Optimisable (a, b) where
applyOpts (x, y) = (,) <$> applyOpts x <*> applyOpts y
instance (Optimisable a, Optimisable b) => Optimisable (vs, a, b) where
applyOpts (v, x, y) = (,,) v <$> applyOpts x <*> applyOpts y
instance Optimisable a => Optimisable [a] where
applyOpts = mapM applyOpts
instance Optimisable a => Optimisable (Either a (a, a)) where
applyOpts (Left t) = Left <$> applyOpts t
applyOpts (Right t) = Right <$> applyOpts t
-- Raw is for compile time optimisation (before type checking)
-- Term is for run time optimisation (after type checking, collapsing allowed)
-- Compile time: no collapsing
instance Optimisable Raw where
applyOpts t@(RApp f a)
| (Var n, args) <- raw_unapply t -- MAGIC HERE
= raw_apply (Var n) <$> mapM applyOpts args
| otherwise = RApp <$> applyOpts f <*> applyOpts a
applyOpts (RBind n b t) = RBind n <$> applyOpts b <*> applyOpts t
applyOpts (RForce t) = applyOpts t
applyOpts t = return t
-- Erase types (makes ibc smaller, and we don't need them)
instance Optimisable (Binder (TT Name)) where
applyOpts (Let t v) = Let <$> return Erased <*> applyOpts v
applyOpts b = return (b { binderTy = Erased })
instance Optimisable (Binder Raw) where
applyOpts b = do t' <- applyOpts (binderTy b)
return (b { binderTy = t' })
-- Run-time: do everything
prel = [txt "Nat", txt "Prelude"]
instance Optimisable (TT Name) where
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "plus" && mod == prel
= return (P Ref (sUN "prim__addBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "mult" && mod == prel
= return (P Ref (sUN "prim__mulBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "divNat" && mod == prel
= return (P Ref (sUN "prim__sdivBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "modNat" && mod == prel
= return (P Ref (sUN "prim__sremBigInt") Erased)
applyOpts (App _ (P _ (NS (UN fn) mod) _) x)
| fn == txt "fromIntegerNat" && mod == prel
= applyOpts x
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "fromIntegerNat" && mod == prel
= return (App Complete (P Ref (sNS (sUN "id") ["Basics","Prelude"]) Erased) Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "toIntegerNat" && mod == prel
= return (App Complete (P Ref (sNS (sUN "id") ["Basics","Prelude"]) Erased) Erased)
applyOpts c@(P (DCon t arity uniq) n _)
= return $ applyDataOptRT n t arity uniq []
applyOpts t@(App s f a)
| (c@(P (DCon t arity uniq) n _), args) <- unApply t
= applyDataOptRT n t arity uniq <$> mapM applyOpts args
| otherwise = App s <$> applyOpts f <*> applyOpts a
applyOpts (Bind n b t) = Bind n <$> applyOpts b <*> applyOpts t
applyOpts (Proj t i) = Proj <$> applyOpts t <*> pure i
applyOpts t = return t
-- Need to saturate arguments first to ensure that optimisation happens uniformly
applyDataOptRT :: Name -> Int -> Int -> Bool -> [Term] -> Term
applyDataOptRT n tag arity uniq args
| length args == arity = doOpts n args
| otherwise = let extra = satArgs (arity - length args)
tm = doOpts n (args ++ map (\n -> P Bound n Erased) extra)
in bind extra tm
where
satArgs n = map (\i -> sMN i "sat") [1..n]
bind [] tm = tm
bind (n:ns) tm = Bind n (Lam Erased) (pToV n (bind ns tm))
-- Nat special cases
-- TODO: Would be nice if this was configurable in idris source!
-- Issue #1597 https://github.com/idris-lang/Idris-dev/issues/1597
doOpts (NS (UN z) [nat, prelude]) []
| z == txt "Z" && nat == txt "Nat" && prelude == txt "Prelude"
= Constant (BI 0)
doOpts (NS (UN s) [nat, prelude]) [k]
| s == txt "S" && nat == txt "Nat" && prelude == txt "Prelude"
= App Complete (App Complete (P Ref (sUN "prim__addBigInt") Erased) k) (Constant (BI 1))
doOpts n args = mkApp (P (DCon tag arity uniq) n Erased) args
|
BartAdv/Idris-dev
|
src/Idris/DataOpts.hs
|
bsd-3-clause
| 4,457
| 0
| 16
| 1,176
| 1,842
| 907
| 935
| 82
| 4
|
module Digest
( digest
, digestlazy
) where
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
digest :: B.ByteString -> B.ByteString
digest bs = SHA1.hash bs
digestlazy :: BL.ByteString -> B.ByteString
digestlazy bs = SHA1.hashlazy bs
|
artems/htorr
|
src/Digest.hs
|
bsd-3-clause
| 328
| 0
| 6
| 62
| 90
| 54
| 36
| 10
| 1
|
-- FAILING TEST: This should be rejected if we wish to disallow applying
-- concrete refinements to non-refined types (e.g. Int) where they are currently
-- silently dropped.
-- issue #519
module Main where
{-@ id2 :: forall <p :: Int -> Prop>. Int<p> -> Int<p> @-}
id2 :: Int -> Int
id2 x = x
{-@ type Neg = Int<{\x -> x < 0}> @-}
{-@ three :: Neg @-}
three = id2 3
|
ssaavedra/liquidhaskell
|
tests/neg/AbsApp.hs
|
bsd-3-clause
| 372
| 0
| 5
| 80
| 36
| 23
| 13
| 4
| 1
|
import Graphics.UI.Gtk
import Data.Char (toUpper)
main :: IO ()
main= do
initGUI
window <- windowNew
set window [windowTitle := "Notebook Example 1", windowDefaultWidth := 300,
windowDefaultHeight := 200 ]
ntbk <- notebookNew
containerAdd window ntbk
set ntbk [notebookScrollable := True, notebookTabPos := PosBottom]
stls <- stockListIds
sequence_ (map (myNewPage ntbk) stls)
onSwitchPage ntbk (putStrLn . ((++)"Page: ") . show)
widgetShowAll window
onDestroy window mainQuit
mainGUI
tabName :: StockId -> String
tabName st = (drop 3) (conv st) where
conv (x:[]) = x:[]
conv (x:y:ys) | x == '-' = (toUpper y):(conv ys)
| otherwise = x: (conv (y:ys))
myNewPage :: Notebook -> StockId -> IO Int
myNewPage noteb stk =
do img <- imageNewFromStock stk 6
pagenum <- notebookAppendPage noteb img (tabName stk)
return pagenum
|
k0001/gtk2hs
|
docs/tutorial/Tutorial_Port/Example_Code/GtkChap5-4a.hs
|
gpl-3.0
| 1,007
| 0
| 12
| 313
| 371
| 179
| 192
| 27
| 2
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
--
-- (c) The University of Glasgow
--
#include "HsVersions.h"
module Avail (
Avails,
AvailInfo(..),
avail,
availsToNameSet,
availsToNameSetWithSelectors,
availsToNameEnv,
availName, availNames, availNonFldNames,
availNamesWithSelectors,
availFlds,
stableAvailCmp,
plusAvail,
trimAvail,
filterAvail,
filterAvails,
nubAvails
) where
import GhcPrelude
import Name
import NameEnv
import NameSet
import FieldLabel
import Binary
import ListSetOps
import Outputable
import Util
import Data.Data ( Data )
import Data.List ( find )
import Data.Function
-- -----------------------------------------------------------------------------
-- The AvailInfo type
-- | Records what things are "available", i.e. in scope
data AvailInfo = Avail Name -- ^ An ordinary identifier in scope
| AvailTC Name
[Name]
[FieldLabel]
-- ^ A type or class in scope. Parameters:
--
-- 1) The name of the type or class
-- 2) The available pieces of type or class,
-- excluding field selectors.
-- 3) The record fields of the type
-- (see Note [Representing fields in AvailInfo]).
--
-- The AvailTC Invariant:
-- * If the type or class is itself
-- to be in scope, it must be
-- *first* in this list. Thus,
-- typically: @AvailTC Eq [Eq, ==, \/=]@
deriving( Eq, Data )
-- Equality used when deciding if the
-- interface has changed
-- | A collection of 'AvailInfo' - several things that are \"available\"
type Avails = [AvailInfo]
{-
Note [Representing fields in AvailInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -XDuplicateRecordFields is disabled (the normal case), a
datatype like
data T = MkT { foo :: Int }
gives rise to the AvailInfo
AvailTC T [T, MkT] [FieldLabel "foo" False foo],
whereas if -XDuplicateRecordFields is enabled it gives
AvailTC T [T, MkT] [FieldLabel "foo" True $sel:foo:MkT]
since the label does not match the selector name.
The labels in a field list are not necessarily unique:
data families allow the same parent (the family tycon) to have
multiple distinct fields with the same label. For example,
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool}
gives rise to
AvailTC F [F, MkFInt, MkFBool]
[FieldLabel "foo" True $sel:foo:MkFInt, FieldLabel "foo" True $sel:foo:MkFBool].
Moreover, note that the flIsOverloaded flag need not be the same for
all the elements of the list. In the example above, this occurs if
the two data instances are defined in different modules, one with
`-XDuplicateRecordFields` enabled and one with it disabled. Thus it
is possible to have
AvailTC F [F, MkFInt, MkFBool]
[FieldLabel "foo" True $sel:foo:MkFInt, FieldLabel "foo" False foo].
If the two data instances are defined in different modules, both
without `-XDuplicateRecordFields`, it will be impossible to export
them from the same module (even with `-XDuplicateRecordfields`
enabled), because they would be represented identically. The
workaround here is to enable `-XDuplicateRecordFields` on the defining
modules.
-}
-- | Compare lexicographically
stableAvailCmp :: AvailInfo -> AvailInfo -> Ordering
stableAvailCmp (Avail n1) (Avail n2) = n1 `stableNameCmp` n2
stableAvailCmp (Avail {}) (AvailTC {}) = LT
stableAvailCmp (AvailTC n ns nfs) (AvailTC m ms mfs) =
(n `stableNameCmp` m) `thenCmp`
(cmpList stableNameCmp ns ms) `thenCmp`
(cmpList (stableNameCmp `on` flSelector) nfs mfs)
stableAvailCmp (AvailTC {}) (Avail {}) = GT
avail :: Name -> AvailInfo
avail n = Avail n
-- -----------------------------------------------------------------------------
-- Operations on AvailInfo
availsToNameSet :: [AvailInfo] -> NameSet
availsToNameSet avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNames avail)
availsToNameSetWithSelectors :: [AvailInfo] -> NameSet
availsToNameSetWithSelectors avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNamesWithSelectors avail)
availsToNameEnv :: [AvailInfo] -> NameEnv AvailInfo
availsToNameEnv avails = foldr add emptyNameEnv avails
where add avail env = extendNameEnvList env
(zip (availNames avail) (repeat avail))
-- | Just the main name made available, i.e. not the available pieces
-- of type or class brought into scope by the 'GenAvailInfo'
availName :: AvailInfo -> Name
availName (Avail n) = n
availName (AvailTC n _ _) = n
-- | All names made available by the availability information (excluding overloaded selectors)
availNames :: AvailInfo -> [Name]
availNames (Avail n) = [n]
availNames (AvailTC _ ns fs) = ns ++ [ flSelector f | f <- fs, not (flIsOverloaded f) ]
-- | All names made available by the availability information (including overloaded selectors)
availNamesWithSelectors :: AvailInfo -> [Name]
availNamesWithSelectors (Avail n) = [n]
availNamesWithSelectors (AvailTC _ ns fs) = ns ++ map flSelector fs
-- | Names for non-fields made available by the availability information
availNonFldNames :: AvailInfo -> [Name]
availNonFldNames (Avail n) = [n]
availNonFldNames (AvailTC _ ns _) = ns
-- | Fields made available by the availability information
availFlds :: AvailInfo -> [FieldLabel]
availFlds (AvailTC _ _ fs) = fs
availFlds _ = []
-- -----------------------------------------------------------------------------
-- Utility
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
= case (n1==s1, n2==s2) of -- Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
-- | trims an 'AvailInfo' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail n) _ = Avail n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
-- | filters 'AvailInfo's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
-- | filters an 'AvailInfo' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
-- | Combines 'AvailInfo's from the same family
-- 'avails' may have several items with the same availName
-- E.g import Ix( Ix(..), index )
-- will give Ix(Ix,index,range) and Ix(index)
-- We want to combine these; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
-- -----------------------------------------------------------------------------
-- Printing
instance Outputable AvailInfo where
ppr = pprAvail
pprAvail :: AvailInfo -> SDoc
pprAvail (Avail n)
= ppr n
pprAvail (AvailTC n ns fs)
= ppr n <> braces (sep [ fsep (punctuate comma (map ppr ns)) <> semi
, fsep (punctuate comma (map (ppr . flLabel) fs))])
instance Binary AvailInfo where
put_ bh (Avail aa) = do
putByte bh 0
put_ bh aa
put_ bh (AvailTC ab ac ad) = do
putByte bh 1
put_ bh ab
put_ bh ac
put_ bh ad
get bh = do
h <- getByte bh
case h of
0 -> do aa <- get bh
return (Avail aa)
_ -> do ab <- get bh
ac <- get bh
ad <- get bh
return (AvailTC ab ac ad)
|
ezyang/ghc
|
compiler/basicTypes/Avail.hs
|
bsd-3-clause
| 9,571
| 0
| 16
| 2,689
| 2,088
| 1,114
| 974
| 138
| 4
|
{-# LANGUAGE OverloadedStrings #-}
-- This benchmark reveals a huge performance regression that showed up
-- under GHC 7.8.1 (https://github.com/bos/attoparsec/issues/56).
--
-- With GHC 7.6.3 and older, this program runs in 0.04 seconds. Under
-- GHC 7.8.1 with (<|>) inlined, time jumps to 12 seconds!
import Control.Applicative
import Data.Text (Text)
import qualified Data.Attoparsec.Text as A
import qualified Data.Text as T
testParser :: Text -> Either String Int
testParser f = fmap length -- avoid printing out the entire matched list
. A.parseOnly (many ((() <$ A.string "b") <|> (() <$ A.anyChar)))
$ f
main :: IO ()
main = print . testParser $ T.replicate 50000 "a"
|
beni55/attoparsec
|
benchmarks/Alternative.hs
|
bsd-3-clause
| 698
| 0
| 15
| 127
| 146
| 82
| 64
| 11
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.BuildPlanSpec where
import Stack.BuildPlan
import Control.Monad.Logger
import Control.Exception hiding (try)
import Control.Monad.Catch (try)
import Data.Monoid
import qualified Data.Map as Map
import qualified Data.Set as Set
import Network.HTTP.Conduit (Manager)
import Prelude -- Fix redundant import warnings
import System.Directory
import System.Environment
import System.IO.Temp (withSystemTempDirectory)
import Test.Hspec
import Stack.Config
import Stack.Types
import Stack.Types.StackT
data T = T
{ manager :: Manager
}
setup :: IO T
setup = do
manager <- newTLSManager
unsetEnv "STACK_YAML"
return T{..}
teardown :: T -> IO ()
teardown _ = return ()
main :: IO ()
main = hspec spec
spec :: Spec
spec = beforeAll setup $ afterAll teardown $ do
let logLevel = LevelDebug
let loadConfig' m = runStackLoggingT m logLevel False False (loadConfig mempty Nothing Nothing)
let loadBuildConfigRest m = runStackLoggingT m logLevel False False
let inTempDir action = do
currentDirectory <- getCurrentDirectory
withSystemTempDirectory "Stack_BuildPlanSpec" $ \tempDir -> do
let enterDir = setCurrentDirectory tempDir
let exitDir = setCurrentDirectory currentDirectory
bracket_ enterDir exitDir action
it "finds missing transitive dependencies #159" $ \T{..} -> inTempDir $ do
-- Note: this test is somewhat fragile, depending on packages on
-- Hackage remaining in a certain state. If it fails, confirm that
-- github still depends on failure.
writeFile "stack.yaml" "resolver: lts-2.9"
LoadConfig{..} <- loadConfig' manager
bconfig <- loadBuildConfigRest manager (lcLoadBuildConfig Nothing)
runStackT manager logLevel bconfig False False $ do
mbp <- loadMiniBuildPlan $ LTS 2 9
eres <- try $ resolveBuildPlan
mbp
(const False)
(Map.fromList
[ ($(mkPackageName "github"), Set.empty)
])
case eres of
Left (UnknownPackages _ unknown _) -> do
case Map.lookup $(mkPackageName "github") unknown of
Nothing -> error "doesn't list github as unknown"
Just _ -> return ()
{- Currently not implemented, see: https://github.com/fpco/stack/issues/159#issuecomment-107809418
case Map.lookup $(mkPackageName "failure") unknown of
Nothing -> error "failure not listed"
Just _ -> return ()
-}
_ -> error $ "Unexpected result from resolveBuildPlan: " ++ show eres
return ()
describe "shadowMiniBuildPlan" $ do
let version = $(mkVersion "1.0.0") -- unimportant for this test
pn = either throw id . parsePackageNameFromString
mkMPI deps = MiniPackageInfo
{ mpiVersion = version
, mpiFlags = Map.empty
, mpiPackageDeps = Set.fromList $ map pn $ words deps
, mpiToolDeps = Set.empty
, mpiExes = Set.empty
, mpiHasLibrary = True
}
go x y = (pn x, mkMPI y)
resourcet = go "resourcet" ""
conduit = go "conduit" "resourcet"
conduitExtra = go "conduit-extra" "conduit"
text = go "text" ""
attoparsec = go "attoparsec" "text"
aeson = go "aeson" "text attoparsec"
mkMBP pkgs = MiniBuildPlan
{ mbpCompilerVersion = GhcVersion version
, mbpPackages = Map.fromList pkgs
}
mbpAll = mkMBP [resourcet, conduit, conduitExtra, text, attoparsec, aeson]
test name input shadowed output extra =
it name $ const $
shadowMiniBuildPlan input (Set.fromList $ map pn $ words shadowed)
`shouldBe` (output, Map.fromList extra)
test "no shadowing" mbpAll "" mbpAll []
test "shadow something that isn't there" mbpAll "does-not-exist" mbpAll []
test "shadow a leaf" mbpAll "conduit-extra"
(mkMBP [resourcet, conduit, text, attoparsec, aeson])
[]
test "shadow direct dep" mbpAll "conduit"
(mkMBP [resourcet, text, attoparsec, aeson])
[conduitExtra]
test "shadow deep dep" mbpAll "resourcet"
(mkMBP [text, attoparsec, aeson])
[conduit, conduitExtra]
test "shadow deep dep and leaf" mbpAll "resourcet aeson"
(mkMBP [text, attoparsec])
[conduit, conduitExtra]
test "shadow deep dep and direct dep" mbpAll "resourcet conduit"
(mkMBP [text, attoparsec, aeson])
[conduitExtra]
|
luigy/stack
|
src/test/Stack/BuildPlanSpec.hs
|
bsd-3-clause
| 5,064
| 19
| 25
| 1,651
| 1,119
| 584
| 535
| 103
| 3
|
-- A dummy Set module...
module Set where
newtype Set a = Set [a]
emptySet :: Set a
mkSet :: Ord a => [a] -> Set a
setToList :: Set a -> [a]
unionManySets :: Ord a => [Set a] -> Set a
intersect, union, minusSet :: Ord a => Set a -> Set a -> Set a
mapSet :: Ord b => (a->b) -> Set a -> Set b
elementOf :: Ord a => a -> Set a -> Bool
emptySet = undefined
mkSet = undefined
setToList = undefined
unionManySets = undefined
minusSet = undefined
mapSet = undefined
intersect = undefined
union = undefined
elementOf = undefined
|
forste/haReFork
|
tools/base/tests/GhcLibraries/Set.hs
|
bsd-3-clause
| 524
| 0
| 8
| 111
| 221
| 118
| 103
| 18
| 1
|
{-# LANGUAGE BangPatterns #-}
-- |
-- Module : Data.Text.Encoding.Fusion.Common
-- Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009,
-- (c) Duncan Coutts 2009,
-- (c) Jasper Van der Jeugt 2011
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com, rtomharper@googlemail.com,
-- duncan@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Fusible 'Stream'-oriented functions for converting between 'Text'
-- and several common encodings.
module Data.Text.Encoding.Fusion.Common
(
-- * Restreaming
-- Restreaming is the act of converting from one 'Stream'
-- representation to another.
restreamUtf8
, restreamUtf16LE
, restreamUtf16BE
, restreamUtf32LE
, restreamUtf32BE
) where
import Data.Bits ((.&.))
import Data.Text.Fusion (Step(..), Stream(..))
import Data.Text.Fusion.Internal (RS(..))
import Data.Text.UnsafeChar (ord)
import Data.Text.UnsafeShift (shiftR)
import Data.Word (Word8)
import qualified Data.Text.Encoding.Utf8 as U8
-- | /O(n)/ Convert a Stream Char into a UTF-8 encoded Stream Word8.
restreamUtf8 :: Stream Char -> Stream Word8
restreamUtf8 (Stream next0 s0 len) = Stream next (RS0 s0) (len * 2)
where
next (RS0 s) = case next0 s of
Done -> Done
Skip s' -> Skip (RS0 s')
Yield x s'
| n <= 0x7F -> Yield c (RS0 s')
| n <= 0x07FF -> Yield a2 (RS1 s' b2)
| n <= 0xFFFF -> Yield a3 (RS2 s' b3 c3)
| otherwise -> Yield a4 (RS3 s' b4 c4 d4)
where
n = ord x
c = fromIntegral n
(a2,b2) = U8.ord2 x
(a3,b3,c3) = U8.ord3 x
(a4,b4,c4,d4) = U8.ord4 x
next (RS1 s x2) = Yield x2 (RS0 s)
next (RS2 s x2 x3) = Yield x2 (RS1 s x3)
next (RS3 s x2 x3 x4) = Yield x2 (RS2 s x3 x4)
{-# INLINE next #-}
{-# INLINE restreamUtf8 #-}
restreamUtf16BE :: Stream Char -> Stream Word8
restreamUtf16BE (Stream next0 s0 len) = Stream next (RS0 s0) (len * 2)
where
next (RS0 s) = case next0 s of
Done -> Done
Skip s' -> Skip (RS0 s')
Yield x s'
| n < 0x10000 -> Yield (fromIntegral $ n `shiftR` 8) $
RS1 s' (fromIntegral n)
| otherwise -> Yield c1 $ RS3 s' c2 c3 c4
where
n = ord x
n1 = n - 0x10000
c1 = fromIntegral (n1 `shiftR` 18 + 0xD8)
c2 = fromIntegral (n1 `shiftR` 10)
n2 = n1 .&. 0x3FF
c3 = fromIntegral (n2 `shiftR` 8 + 0xDC)
c4 = fromIntegral n2
next (RS1 s x2) = Yield x2 (RS0 s)
next (RS2 s x2 x3) = Yield x2 (RS1 s x3)
next (RS3 s x2 x3 x4) = Yield x2 (RS2 s x3 x4)
{-# INLINE next #-}
{-# INLINE restreamUtf16BE #-}
restreamUtf16LE :: Stream Char -> Stream Word8
restreamUtf16LE (Stream next0 s0 len) = Stream next (RS0 s0) (len * 2)
where
next (RS0 s) = case next0 s of
Done -> Done
Skip s' -> Skip (RS0 s')
Yield x s'
| n < 0x10000 -> Yield (fromIntegral n) $
RS1 s' (fromIntegral $ shiftR n 8)
| otherwise -> Yield c1 $ RS3 s' c2 c3 c4
where
n = ord x
n1 = n - 0x10000
c2 = fromIntegral (shiftR n1 18 + 0xD8)
c1 = fromIntegral (shiftR n1 10)
n2 = n1 .&. 0x3FF
c4 = fromIntegral (shiftR n2 8 + 0xDC)
c3 = fromIntegral n2
next (RS1 s x2) = Yield x2 (RS0 s)
next (RS2 s x2 x3) = Yield x2 (RS1 s x3)
next (RS3 s x2 x3 x4) = Yield x2 (RS2 s x3 x4)
{-# INLINE next #-}
{-# INLINE restreamUtf16LE #-}
restreamUtf32BE :: Stream Char -> Stream Word8
restreamUtf32BE (Stream next0 s0 len) = Stream next (RS0 s0) (len * 2)
where
next (RS0 s) = case next0 s of
Done -> Done
Skip s' -> Skip (RS0 s')
Yield x s' -> Yield c1 (RS3 s' c2 c3 c4)
where
n = ord x
c1 = fromIntegral $ shiftR n 24
c2 = fromIntegral $ shiftR n 16
c3 = fromIntegral $ shiftR n 8
c4 = fromIntegral n
next (RS1 s x2) = Yield x2 (RS0 s)
next (RS2 s x2 x3) = Yield x2 (RS1 s x3)
next (RS3 s x2 x3 x4) = Yield x2 (RS2 s x3 x4)
{-# INLINE next #-}
{-# INLINE restreamUtf32BE #-}
restreamUtf32LE :: Stream Char -> Stream Word8
restreamUtf32LE (Stream next0 s0 len) = Stream next (RS0 s0) (len * 2)
where
next (RS0 s) = case next0 s of
Done -> Done
Skip s' -> Skip (RS0 s')
Yield x s' -> Yield c1 (RS3 s' c2 c3 c4)
where
n = ord x
c4 = fromIntegral $ shiftR n 24
c3 = fromIntegral $ shiftR n 16
c2 = fromIntegral $ shiftR n 8
c1 = fromIntegral n
next (RS1 s x2) = Yield x2 (RS0 s)
next (RS2 s x2 x3) = Yield x2 (RS1 s x3)
next (RS3 s x2 x3 x4) = Yield x2 (RS2 s x3 x4)
{-# INLINE next #-}
{-# INLINE restreamUtf32LE #-}
|
mightymoose/liquidhaskell
|
benchmarks/text-0.11.2.3/Data/Text/Encoding/Fusion/Common.hs
|
bsd-3-clause
| 5,141
| 0
| 15
| 1,838
| 1,819
| 924
| 895
| 109
| 6
|
{-# LANGUAGE TypeFamilies, TypeOperators, RankNTypes #-}
module HO where
import Data.IORef
import Data.Kind
type family SMRef (m::(Type -> Type)) :: Type -> Type
type family SMMonad (r::(Type -> Type)) :: Type -> Type
type instance SMRef IO = IORef
type instance SMMonad IORef = IO
class SMMonad (SMRef m) ~ m => SM m where
new :: forall a. a -> m (SMRef m a)
read :: forall a. (SMRef m a) -> m a
write :: forall a. (SMRef m a) -> a -> m ()
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/HO.hs
|
bsd-3-clause
| 492
| 0
| 11
| 138
| 195
| 108
| 87
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE PatternSynonyms #-}
module T11970(B(recSel), Foo((--.->)), C(C,P,x,Q, B, recSel)) where
pattern D = Nothing
newtype B = B { recSel :: Int }
class Foo a where
type (--.->) a
newtype C = C Int
pattern P x = C x
pattern Q{x} = C x
|
olsner/ghc
|
testsuite/tests/module/T11970.hs
|
bsd-3-clause
| 344
| 0
| 7
| 68
| 118
| 76
| 42
| 19
| 0
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CApiFFI #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE UnliftedFFITypes #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GHCForeignImportPrim #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
#include "MachDeps.h"
-- |
-- Module : GHC.Integer.GMP.Internals
-- Copyright : (c) Herbert Valerio Riedel 2014
-- License : BSD3
--
-- Maintainer : ghc-devs@haskell.org
-- Stability : provisional
-- Portability : non-portable (GHC Extensions)
--
-- This modules provides access to the 'Integer' constructors and
-- exposes some highly optimized GMP-operations.
--
-- Note that since @integer-gmp@ does not depend on `base`, error
-- reporting via exceptions, 'error', or 'undefined' is not
-- available. Instead, the low-level functions will crash the runtime
-- if called with invalid arguments.
--
-- See also
-- <https://ghc.haskell.org/trac/ghc/wiki/Commentary/Libraries/Integer GHC Commentary: Libraries/Integer>.
module GHC.Integer.GMP.Internals
( -- * The 'Integer' type
Integer(..)
, isValidInteger#
-- ** Basic 'Integer' operations
, module GHC.Integer
-- ** Additional 'Integer' operations
, bitInteger
, popCountInteger
, gcdInteger
, gcdExtInteger
, lcmInteger
, sqrInteger
, powModInteger
, recipModInteger
-- ** Additional conversion operations to 'Integer'
, wordToNegInteger
, bigNatToInteger
, bigNatToNegInteger
-- * The 'BigNat' type
, BigNat(..)
, GmpLimb, GmpLimb#
, GmpSize, GmpSize#
-- **
, isValidBigNat#
, sizeofBigNat#
, zeroBigNat
, oneBigNat
, nullBigNat
-- ** Conversions to/from 'BigNat'
, byteArrayToBigNat#
, wordToBigNat
, wordToBigNat2
, bigNatToInt
, bigNatToWord
, indexBigNat#
-- ** 'BigNat' arithmetic operations
, plusBigNat
, plusBigNatWord
, minusBigNat
, minusBigNatWord
, timesBigNat
, timesBigNatWord
, sqrBigNat
, quotRemBigNat
, quotRemBigNatWord
, quotBigNatWord
, quotBigNat
, remBigNat
, remBigNatWord
, gcdBigNat
, gcdBigNatWord
, powModBigNat
, powModBigNatWord
, recipModBigNat
-- ** 'BigNat' logic operations
, shiftRBigNat
, shiftLBigNat
, testBitBigNat
, andBigNat
, xorBigNat
, popCountBigNat
, orBigNat
, bitBigNat
-- ** 'BigNat' comparision predicates
, isZeroBigNat
, isNullBigNat#
, compareBigNatWord
, compareBigNat
, eqBigNatWord
, eqBigNatWord#
, eqBigNat
, eqBigNat#
, gtBigNatWord#
-- * Miscellaneous GMP-provided operations
, gcdInt
, gcdWord
, powModWord
, recipModWord
-- * Primality tests
, testPrimeInteger
, testPrimeBigNat
, testPrimeWord#
, nextPrimeInteger
, nextPrimeBigNat
, nextPrimeWord#
-- * Import/export functions
-- ** Compute size of serialisation
, sizeInBaseBigNat
, sizeInBaseInteger
, sizeInBaseWord#
-- ** Export
, exportBigNatToAddr
, exportIntegerToAddr
, exportWordToAddr
, exportBigNatToMutableByteArray
, exportIntegerToMutableByteArray
, exportWordToMutableByteArray
-- ** Import
, importBigNatFromAddr
, importIntegerFromAddr
, importBigNatFromByteArray
, importIntegerFromByteArray
) where
import GHC.Integer.Type
import GHC.Integer
import GHC.Prim
import GHC.Types
default ()
-- | Compute number of digits (without sign) in given @/base/@.
--
-- This function wraps @mpz_sizeinbase()@ which has some
-- implementation pecularities to take into account:
--
-- * \"@'sizeInBaseInteger' 0 /base/ = 1@\"
-- (see also comment in 'exportIntegerToMutableByteArray').
--
-- * This function is only defined if @/base/ >= 2#@ and @/base/ <= 256#@
-- (Note: the documentation claims that only @/base/ <= 62#@ is
-- supported, however the actual implementation supports up to base 256).
--
-- * If @/base/@ is a power of 2, the result will be exact. In other
-- cases (e.g. for @/base/ = 10#@), the result /may/ be 1 digit too large
-- sometimes.
--
-- * \"@'sizeInBaseInteger' /i/ 2#@\" can be used to determine the most
-- significant bit of @/i/@.
--
-- @since 0.5.1.0
sizeInBaseInteger :: Integer -> Int# -> Word#
sizeInBaseInteger (S# i#) = sizeInBaseWord# (int2Word# (absI# i#))
sizeInBaseInteger (Jp# bn) = sizeInBaseBigNat bn
sizeInBaseInteger (Jn# bn) = sizeInBaseBigNat bn
-- | Version of 'sizeInBaseInteger' operating on 'BigNat'
--
-- @since 1.0.0.0
sizeInBaseBigNat :: BigNat -> Int# -> Word#
sizeInBaseBigNat bn@(BN# ba#) = c_mpn_sizeinbase# ba# (sizeofBigNat# bn)
foreign import ccall unsafe "integer_gmp_mpn_sizeinbase"
c_mpn_sizeinbase# :: ByteArray# -> GmpSize# -> Int# -> Word#
-- | Version of 'sizeInBaseInteger' operating on 'Word#'
--
-- @since 1.0.0.0
foreign import ccall unsafe "integer_gmp_mpn_sizeinbase1"
sizeInBaseWord# :: Word# -> Int# -> Word#
-- | Dump 'Integer' (without sign) to @/addr/@ in base-256 representation.
--
-- @'exportIntegerToAddr' /i/ /addr/ /e/@
--
-- See description of 'exportIntegerToMutableByteArray' for more details.
--
-- @since 1.0.0.0
exportIntegerToAddr :: Integer -> Addr# -> Int# -> IO Word
exportIntegerToAddr (S# i#) = exportWordToAddr (W# (int2Word# (absI# i#)))
exportIntegerToAddr (Jp# bn) = exportBigNatToAddr bn
exportIntegerToAddr (Jn# bn) = exportBigNatToAddr bn
-- | Version of 'exportIntegerToAddr' operating on 'BigNat's.
exportBigNatToAddr :: BigNat -> Addr# -> Int# -> IO Word
exportBigNatToAddr bn@(BN# ba#) addr e
= c_mpn_exportToAddr# ba# (sizeofBigNat# bn) addr 0# e
foreign import ccall unsafe "integer_gmp_mpn_export"
c_mpn_exportToAddr# :: ByteArray# -> GmpSize# -> Addr# -> Int# -> Int#
-> IO Word
-- | Version of 'exportIntegerToAddr' operating on 'Word's.
exportWordToAddr :: Word -> Addr# -> Int# -> IO Word
exportWordToAddr (W# w#) addr
= c_mpn_export1ToAddr# w# addr 0# -- TODO: we don't calling GMP for that
foreign import ccall unsafe "integer_gmp_mpn_export1"
c_mpn_export1ToAddr# :: GmpLimb# -> Addr# -> Int# -> Int#
-> IO Word
-- | Dump 'Integer' (without sign) to mutable byte-array in base-256
-- representation.
--
-- The call
--
-- @'exportIntegerToMutableByteArray' /i/ /mba/ /offset/ /msbf/@
--
-- writes
--
-- * the 'Integer' @/i/@
--
-- * into the 'MutableByteArray#' @/mba/@ starting at @/offset/@
--
-- * with most significant byte first if @msbf@ is @1#@ or least
-- significant byte first if @msbf@ is @0#@, and
--
-- * returns number of bytes written.
--
-- Use \"@'sizeInBaseInteger' /i/ 256#@\" to compute the exact number of
-- bytes written in advance for @/i/ /= 0@. In case of @/i/ == 0@,
-- 'exportIntegerToMutableByteArray' will write and report zero bytes
-- written, whereas 'sizeInBaseInteger' report one byte.
--
-- It's recommended to avoid calling 'exportIntegerToMutableByteArray' for small
-- integers as this function would currently convert those to big
-- integers in msbf to call @mpz_export()@.
--
-- @since 1.0.0.0
exportIntegerToMutableByteArray :: Integer -> MutableByteArray# RealWorld
-> Word# -> Int# -> IO Word
exportIntegerToMutableByteArray (S# i#)
= exportWordToMutableByteArray (W# (int2Word# (absI# i#)))
exportIntegerToMutableByteArray (Jp# bn) = exportBigNatToMutableByteArray bn
exportIntegerToMutableByteArray (Jn# bn) = exportBigNatToMutableByteArray bn
-- | Version of 'exportIntegerToMutableByteArray' operating on 'BigNat's.
--
-- @since 1.0.0.0
exportBigNatToMutableByteArray :: BigNat -> MutableByteArray# RealWorld -> Word#
-> Int# -> IO Word
exportBigNatToMutableByteArray bn@(BN# ba#)
= c_mpn_exportToMutableByteArray# ba# (sizeofBigNat# bn)
foreign import ccall unsafe "integer_gmp_mpn_export"
c_mpn_exportToMutableByteArray# :: ByteArray# -> GmpSize#
-> MutableByteArray# RealWorld -> Word#
-> Int# -> IO Word
-- | Version of 'exportIntegerToMutableByteArray' operating on 'Word's.
--
-- @since 1.0.0.0
exportWordToMutableByteArray :: Word -> MutableByteArray# RealWorld -> Word#
-> Int# -> IO Word
exportWordToMutableByteArray (W# w#) = c_mpn_export1ToMutableByteArray# w#
foreign import ccall unsafe "integer_gmp_mpn_export1"
c_mpn_export1ToMutableByteArray# :: GmpLimb# -> MutableByteArray# RealWorld
-> Word# -> Int# -> IO Word
-- | Probalistic Miller-Rabin primality test.
--
-- \"@'testPrimeInteger' /n/ /k/@\" determines whether @/n/@ is prime
-- and returns one of the following results:
--
-- * @2#@ is returned if @/n/@ is definitely prime,
--
-- * @1#@ if @/n/@ is a /probable prime/, or
--
-- * @0#@ if @/n/@ is definitely not a prime.
--
-- The @/k/@ argument controls how many test rounds are performed for
-- determining a /probable prime/. For more details, see
-- <http://gmplib.org/manual/Number-Theoretic-Functions.html#index-mpz_005fprobab_005fprime_005fp-360 GMP documentation for `mpz_probab_prime_p()`>.
--
-- @since 0.5.1.0
{-# NOINLINE testPrimeInteger #-}
testPrimeInteger :: Integer -> Int# -> Int#
testPrimeInteger (S# i#) = testPrimeWord# (int2Word# (absI# i#))
testPrimeInteger (Jp# n) = testPrimeBigNat n
testPrimeInteger (Jn# n) = testPrimeBigNat n
-- | Version of 'testPrimeInteger' operating on 'BigNat's
--
-- @since 1.0.0.0
testPrimeBigNat :: BigNat -> Int# -> Int#
testPrimeBigNat bn@(BN# ba#) = c_integer_gmp_test_prime# ba# (sizeofBigNat# bn)
foreign import ccall unsafe "integer_gmp_test_prime"
c_integer_gmp_test_prime# :: ByteArray# -> GmpSize# -> Int# -> Int#
-- | Version of 'testPrimeInteger' operating on 'Word#'s
--
-- @since 1.0.0.0
foreign import ccall unsafe "integer_gmp_test_prime1"
testPrimeWord# :: GmpLimb# -> Int# -> Int#
-- | Compute next prime greater than @/n/@ probalistically.
--
-- According to the GMP documentation, the underlying function
-- @mpz_nextprime()@ \"uses a probabilistic algorithm to identify
-- primes. For practical purposes it's adequate, the chance of a
-- composite passing will be extremely small.\"
--
-- @since 0.5.1.0
{-# NOINLINE nextPrimeInteger #-}
nextPrimeInteger :: Integer -> Integer
nextPrimeInteger (S# i#)
| isTrue# (i# ># 1#) = wordToInteger (nextPrimeWord# (int2Word# i#))
| True = S# 2#
nextPrimeInteger (Jp# bn) = Jp# (nextPrimeBigNat bn)
nextPrimeInteger (Jn# _) = S# 2#
-- | Version of 'nextPrimeInteger' operating on 'Word#'s
--
-- @since 1.0.0.0
foreign import ccall unsafe "integer_gmp_next_prime1"
nextPrimeWord# :: GmpLimb# -> GmpLimb#
|
green-haskell/ghc
|
libraries/integer-gmp2/src/GHC/Integer/GMP/Internals.hs
|
bsd-3-clause
| 10,830
| 0
| 11
| 2,205
| 1,446
| 847
| 599
| 169
| 1
|
module Options where
import Control.Monad
import System.Exit
import System.Environment (getEnv)
import System.FilePath (joinPath)
import System.Console.GetOpt
import Text.Printf (printf)
data Mode = Help | Add String | Query String | List (Maybe String) | Edit | Dump
deriving (Eq, Show)
data Options = Options {
mode :: Mode
, databaseFile :: FilePath
, userName :: Maybe String
, repeatCount :: Maybe Int
, passwordOnly :: Bool
} deriving Show
defaultOptions :: Options
defaultOptions = Options {
mode = Help
, databaseFile = ""
, userName = Nothing
, repeatCount = Nothing
, passwordOnly = False
}
options :: [OptDescr (Options -> Options)]
options = [
Option [] ["help"] (NoArg (\ opts -> opts { mode = Help })) "display this help and exit"
, Option ['a'] ["add"] (ReqArg (\s opts -> opts { mode = Add s }) "URL") "add a new entry to the database; the password is\nalways automatically generated; the username is\ngenerated unless --user is specified"
, Option ['q'] ["query"] (ReqArg (\s opts -> opts { mode = Query s }) "TERM") "lookup a password, the term must match exactly one\nentry"
, Option ['l'] ["list"] (OptArg (\s opts -> opts { mode = List s}) "TERM") "list all entries matching the given term"
, Option ['e'] ["edit"] (NoArg (\ opts -> opts { mode = Edit})) "invoke vim to edit the database using sensible\ndefaults (no backup, no swapfile etc)"
, Option [] ["dump"] (NoArg (\ opts -> opts { mode = Dump})) "dump database to stdout"
, Option [] ["dbfile"] (ReqArg (\s opts -> opts { databaseFile = s }) "FILE") "file where passwords are stored;\ndefaults to ~/.pwsafe/db"
, Option [] ["user"] (ReqArg (\s opts -> opts { userName = Just s }) "USER") "specify a username to be used for a new entry;\nthis option is to be used with --add"
, Option ['n'] [] (ReqArg (\s opts -> opts { repeatCount = (Just . read) s }) "NUMBER") "copy password n times to clipboard;\ndefaults to 1"
, Option [] ["password-only"]
(NoArg (\ opts -> opts { passwordOnly = True})) "only copy password to clipboard"
]
defaultDatabaseFile :: IO String
defaultDatabaseFile = do
home <- getEnv "HOME"
return $ joinPath [home, ".pwsafe", "db"]
get :: [String] -> IO Options
get args = do
let (opts_, files, errors) = getOpt Permute options args
let opts__ = foldl (flip id) defaultOptions opts_
opts <- case databaseFile opts__ of
"" -> do
db <- defaultDatabaseFile
return opts__ {databaseFile = db}
_ -> return opts__
when ((not . null) errors)
(tryHelp $ head errors)
when ((not . null) files)
(tryHelp $ printf "unrecognized option `%s'\n" $ head files)
return opts
where
printAndExit :: String -> IO a
printAndExit s = putStr s >> exitFailure
tryHelp message = printAndExit $ "pwsafe: " ++ message
++ "Try `pwsafe --help' for more information.\n"
printHelp :: IO ()
printHelp = putStr $ usageInfo "Usage: pwsafe [OPTION]...\n" options
|
sol/pwsafe
|
src/Options.hs
|
mit
| 3,139
| 0
| 15
| 793
| 959
| 521
| 438
| 60
| 2
|
type Birds = Int
type Pole = (Birds, Birds)
landLeft :: Birds -> Pole -> Maybe Pole
landLeft n (left,right)
| abs ((left + n) - right) < 4 = Just (left + n, right)
| otherwise = Nothing
landRight :: Birds -> Pole -> Maybe Pole landRight n (left,right)
| abs (left - (right + n)) < 4 = Just (left, right + n)
| otherwise = Nothing
|
RAFIRAF/HASKELL
|
A Fistful of Monads/tightrope.hs
|
mit
| 399
| 2
| 13
| 138
| 179
| 94
| 85
| -1
| -1
|
module Test.Smoke.Types.Errors where
import Control.Exception (Exception, IOException)
import Data.Text (Text)
import Test.Smoke.Paths
import Test.Smoke.Types.Base
import Test.Smoke.Types.Executable
data SmokeError
= DiscoveryError SmokeDiscoveryError
| PlanningError SmokePlanningError
| ExecutionError SmokeExecutionError
| AssertionError SmokeAssertionError
| BlessError SmokeBlessError
deriving (Show)
instance Exception SmokeError
data SmokeDiscoveryError
= NoSuchLocation FilePath
| NoSuchTest (RelativePath File) TestName
| CannotSelectTestInDirectory (RelativePath Dir) TestName
| InvalidSpecification (RelativePath File) String
deriving (Show)
instance Exception SmokeDiscoveryError
data SmokePlanningError
= NoCommand
| NoInput
| NoOutput
| PlanningFixtureFileError SmokeFileError
| PlanningPathError PathError
| PlanningFilterError SmokeFilterError
deriving (Show)
instance Exception SmokePlanningError
data SmokeExecutionError
= NonExistentWorkingDirectory WorkingDirectory
| CouldNotExecuteCommand Executable IOError
| CouldNotStoreDirectory (ResolvedPath Dir) IOError
| CouldNotRevertDirectory (ResolvedPath Dir) IOError
deriving (Show)
instance Exception SmokeExecutionError
newtype SmokeAssertionError
= AssertionFilterError SmokeFilterError
deriving (Show)
instance Exception SmokeAssertionError
data SmokeBlessError
= CouldNotBlessInlineFixture FixtureName Text
| CouldNotBlessAMissingValue FixtureName
| CouldNotBlessWithMultipleValues FixtureName
| CouldNotBlessContainsAssertion FixtureName Text
| BlessIOException IOException
deriving (Show)
instance Exception SmokeBlessError
data SmokeFilterError
= MissingFilterScript
| CouldNotExecuteFilter Executable IOError
| FilterExecutionFailed Executable Status StdOut StdErr
| FilterPathError PathError
deriving (Show)
instance Exception SmokeFilterError
data SmokeFileError = MissingFile (RelativePath File) | CouldNotReadFile (RelativePath File) IOError
deriving (Show)
instance Exception SmokeFileError
data SuiteError
= SuiteDiscoveryError SmokeDiscoveryError
| SuitePathError PathError
deriving (Show)
|
SamirTalwar/Smoke
|
src/lib/Test/Smoke/Types/Errors.hs
|
mit
| 2,172
| 0
| 8
| 289
| 430
| 241
| 189
| 63
| 0
|
{-# htermination (gtChar :: Char -> Char -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Char = Char MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
primCmpChar :: Char -> Char -> Ordering;
primCmpChar (Char x) (Char y) = primCmpInt x y;
compareChar :: Char -> Char -> Ordering
compareChar = primCmpChar;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
gtChar :: Char -> Char -> MyBool
gtChar x y = esEsOrdering (compareChar x y) GT;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/GT_4.hs
|
mit
| 1,391
| 0
| 8
| 301
| 594
| 321
| 273
| 37
| 1
|
module HarmLang.Utility where
import Data.List
sortedUnique :: (Ord a, Eq a) => [a] -> [a]
sortedUnique = removeAdjacentDups . sort
-- http://stackoverflow.com/questions/8227218/removing-repeated-elements-from-a-list-in-haskell
removeAdjacentDups :: (Ord a, Eq a) => [a] -> [a]
removeAdjacentDups xs = remove xs
where
remove [] = []
remove [x] = [x]
remove (x1:x2:xs)
| x1 == x2 = remove (x1:xs)
| otherwise = x1 : remove (x2:xs)
-- http://stackoverflow.com/questions/4978578/how-to-split-a-string-in-haskell
separateBy :: Eq a => a -> [a] -> [[a]]
separateBy chr = unfoldr sep where
sep [] = Nothing
sep l = Just . fmap (drop 1) . break (==chr) $ l
-- A Cyrus Cousins original.
allRotations :: [a] -> [[a]]
allRotations l = map (\i -> take (length l) ((drop i) (cycle l))) [0..((-) (length l) 1)]
-- https://www.haskell.org/pipermail/haskell-cafe/2003-June/004484.html
powerset :: [a] -> [[a]]
powerset [] = [[]]
powerset (x:xs) = xss ++ map (x:) xss
where xss = powerset xs
--map with indices
mapInd :: (a -> Int -> b) -> [a] -> [b]
mapInd f l = let
mapIndH f [] _ = []
mapIndH f (a:as) i = (f a i):(mapIndH f as ((+) i 1))
in
mapIndH f l 0
--http://stackoverflow.com/questions/9270478/efficiently-find-indices-of-maxima-of-a-list
indexOfMaximum :: (Ord n, Num n) => [n] -> Int
indexOfMaximum list =
let indexOfMaximum' :: (Ord n, Num n) => [n] -> Int -> n -> Int -> Int
indexOfMaximum' list' currIndex highestVal highestIndex
| null list' = highestIndex
| (head list') > highestVal =
indexOfMaximum' (tail list') (1 + currIndex) (head list') currIndex
| otherwise =
indexOfMaximum' (tail list') (1 + currIndex) highestVal highestIndex
in indexOfMaximum' list 0 0 0
indexOfMinimum :: (Ord n, Num n) => [n] -> Int
indexOfMinimum list = indexOfMaximum (map (\a -> 0 - a) list)
|
lrassaby/harmlang
|
src/HarmLang/Utility.hs
|
mit
| 1,957
| 0
| 14
| 465
| 803
| 421
| 382
| 38
| 3
|
-- Describe a list
-- https://www.codewars.com/kata/57a4a3e653ba3346bc000810
module ParseFloat where
describeList [] = "empty"
describeList [x] = "singleton"
describeList xs = "longer"
|
gafiatulin/codewars
|
src/7 kyu/ParseFloat.hs
|
mit
| 189
| 0
| 6
| 25
| 35
| 20
| 15
| 4
| 1
|
module Server
( startServer
) where
import Control.Concurrent (forkIO)
import Control.Exception (bracket, tryJust)
import Control.Monad (guard)
import Control.Monad (when)
import Network (listenOn, PortID(UnixSocket), Socket, accept, sClose)
import System.Directory (removeFile)
import System.IO (Handle, hGetLine, hPutStrLn, hClose)
import System.IO.Error (isDoesNotExistError)
import Buffer
import Config (Config, socketPath)
import Util (whisper, unescapeStr)
startServer :: Config -> IO ()
startServer cfg = do
let removeSocketFile :: IO ()
removeSocketFile = do
whisper $ "Deleting socket file " ++ socketPath cfg
-- Ignore possible error if socket file does not exist
_ <- tryJust (guard . isDoesNotExistError) $ removeFile (socketPath cfg)
return ()
whisper $ "Starting server on socket " ++ socketPath cfg
removeSocketFile
bracket
(listenOn (UnixSocket (socketPath cfg)))
(\sock -> sClose sock >> removeSocketFile)
$ \sock -> do
bracket createBufferState destroyBufferState $ \bufferState -> do
-- TODO kill these threads during shutdown
_ <- forkIO (createNeededBuffersLoop bufferState)
_ <- forkIO (idleCreateBuffersLoop bufferState)
waitForConnection bufferState sock
runCommand :: BufferState -> String -> FilePath -> [String] -> IO ()
runCommand buffers progName progPwd progArgs = do
whisper $ "Running command " ++ progName ++ " " ++ progPwd ++ " " ++ show progArgs
runProgram buffers progName progPwd progArgs
waitForConnection :: BufferState -> Socket -> IO ()
waitForConnection buffers sock = do
-- TODO handle IOException for accept
(h, _, _) <- accept sock
continue <- handleConnection buffers h
hClose h
when continue $ waitForConnection buffers sock
handleConnection :: BufferState -> Handle -> IO Bool
handleConnection buffers h = do
whisper $ "Client connected"
cmd <- hGetLine h
case cmd of
"run" -> do
progName <- hGetLine h
progPwd <- hGetLine h
progArgs <- readArgs []
runCommand buffers (unescapeStr progName) (unescapeStr progPwd) (reverse progArgs)
hPutStrLn h "Ok"
return True
"status" -> do
whisper $ "Sending status"
status <- bufferStateStatus buffers
hPutStrLn h status
return True
"shutdown" -> do
whisper $ "Shutting down"
hPutStrLn h "Ok"
return False
"flushbuffer" -> error "TODO"
"fillbuffer" -> error "TODO"
_ -> do
whisper $ "Invalid command: " ++ cmd
hPutStrLn h $ "Invalid command: " ++ cmd
return True
where
readArgs :: [String] -> IO [String]
readArgs xs = do
t <- hGetLine h
case null t of
True -> return xs
False -> readArgs ((unescapeStr t):xs)
|
bitc/instantrun
|
src/Server.hs
|
mit
| 3,014
| 0
| 18
| 885
| 856
| 416
| 440
| 73
| 7
|
isPrime n = hlp n 2
where hlp n i
| i * i > n = True
| n `mod` i == 0 = False
| otherwise = hlp n (i + 1)
primes = filter isPrime [2..]
ret = primes !! (10001 - 1)
|
liuyang1/euler
|
007.hs
|
mit
| 212
| 0
| 11
| 94
| 109
| 54
| 55
| 7
| 1
|
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientNamenodeProtocolProtos.GetFsStatusRequestProto (GetFsStatusRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data GetFsStatusRequestProto = GetFsStatusRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable GetFsStatusRequestProto where
mergeAppend GetFsStatusRequestProto GetFsStatusRequestProto = GetFsStatusRequestProto
instance P'.Default GetFsStatusRequestProto where
defaultValue = GetFsStatusRequestProto
instance P'.Wire GetFsStatusRequestProto where
wireSize ft' self'@(GetFsStatusRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(GetFsStatusRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> GetFsStatusRequestProto) GetFsStatusRequestProto where
getVal m' f' = f' m'
instance P'.GPB GetFsStatusRequestProto
instance P'.ReflectDescriptor GetFsStatusRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.GetFsStatusRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientNamenodeProtocolProtos\"], baseName = MName \"GetFsStatusRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientNamenodeProtocolProtos\",\"GetFsStatusRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType GetFsStatusRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg GetFsStatusRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue
|
alexbiehl/hoop
|
hadoop-protos/src/Hadoop/Protos/ClientNamenodeProtocolProtos/GetFsStatusRequestProto.hs
|
mit
| 2,907
| 1
| 16
| 533
| 554
| 291
| 263
| 53
| 0
|
{-# htermination eltsFM_GE :: Ord a => FiniteMap a b -> a -> [b] #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_eltsFM_GE_1.hs
|
mit
| 86
| 0
| 3
| 17
| 5
| 3
| 2
| 1
| 0
|
-- Project Euler Problem 7 - 10001st prime
--
--
import Data.List
-- Note coprimes assumes the input list is sorted
coprimes :: (Integral a) => [a] -> [a]
coprimes [] = []
coprimes [x] = [x]
--coprimes (x:xs) = x:(coprimes (xs \\ [2*x, 3*x .. (last xs)]) )
coprimes (x:xs) = x:(coprimes [a | a <- xs, a `mod` x /= 0])
main = do
print ((coprimes [2..200000])!!10000)
|
yunwilliamyu/programming-exercises
|
project_euler/p007_10001st_prime.hs
|
cc0-1.0
| 373
| 2
| 12
| 76
| 142
| 79
| 63
| 7
| 1
|
{-# LANGUAGE DeriveGeneric #-}
module Vec3 where
import Control.Applicative
import Control.DeepSeq
import Data.Foldable
import ApplicativeBinaryOp
import Data.Aeson
import GHC.Generics
data Vec3 a = Vec3 a a a
deriving (Eq, Show, Generic)
instance (FromJSON a) => FromJSON (Vec3 a)
instance (ToJSON a) => ToJSON (Vec3 a)
sqrMagnitude :: Num a => Vec3 a -> a
sqrMagnitude (Vec3 x y z) = x * x + y * y + z * z
magnitude :: Floating a => Vec3 a -> a
magnitude v = sqrt $ sqrMagnitude v
normalize :: Floating b => Vec3 b -> Vec3 b
normalize v = (/ magnitude v) <$> v
dot :: Num a => Vec3 a -> Vec3 a -> a
dot (Vec3 ax ay az) (Vec3 bx by bz) = ax * bx + ay * by + az * bz
cross :: Num a => Vec3 a -> Vec3 a -> Vec3 a
cross (Vec3 ax ay az) (Vec3 bx by bz) = Vec3 (ay * bz - by * az)
(az * bx - az * bx)
(ax * by - bx * ay)
reflect :: Num a => Vec3 a -> Vec3 a -> Vec3 a
reflect n d = d - (n * pure (2 * dot n d))
instance Functor Vec3 where
fmap f (Vec3 x y z) = Vec3 (f x) (f y) (f z)
instance Applicative Vec3 where
pure x = Vec3 x x x
Vec3 f g h <*> Vec3 x y z = Vec3 (f x) (g y) (h z)
instance Foldable Vec3 where
foldr f b (Vec3 x y z) = f x $ f y $ f z b
instance (Num a) => Num (Vec3 a) where
(+) = abop (+)
(-) = abop (-)
(*) = abop (*)
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (NFData a) => NFData (Vec3 a) where
rnf (Vec3 x y z) = x `deepseq`
y `deepseq`
z `deepseq`
()
|
reuk/rayverb
|
src/Vec3.hs
|
gpl-2.0
| 1,633
| 0
| 11
| 553
| 810
| 412
| 398
| 45
| 1
|
{-# LANGUAGE ExistentialQuantification, MultiParamTypeClasses
, DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
{- |
Module : $Header$
Description : Grothendieck logic (flattening of logic graph to a single logic)
Copyright : (c) Till Mossakowski, and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable (overlapping instances, dynamics, existentials)
Grothendieck logic (flattening of logic graph to a single logic)
The Grothendieck logic is defined to be the
heterogeneous logic over the logic graph.
This will be the logic over which the data
structures and algorithms for specification in-the-large
are built.
This module heavily works with existential types, see
<http://haskell.org/hawiki/ExistentialTypes> and chapter 7 of /Heterogeneous
specification and the heterogeneous tool set/
(<http://www.informatik.uni-bremen.de/~till/papers/habil.ps>).
References:
R. Diaconescu:
Grothendieck institutions
J. applied categorical structures 10, 2002, p. 383-402.
T. Mossakowski:
Comorphism-based Grothendieck institutions.
In K. Diks, W. Rytter (Eds.), Mathematical foundations of computer science,
LNCS 2420, pp. 593-604
T. Mossakowski:
Heterogeneous specification and the heterogeneous tool set.
-}
module Logic.Grothendieck
( G_basic_spec (..)
, G_sign (..)
, SigId (..)
, startSigId
, isHomSubGsign
, isSubGsign
, logicOfGsign
, symsOfGsign
, G_symbolmap (..)
, G_mapofsymbol (..)
, G_symbol (..)
, G_symb_items_list (..)
, G_symb_map_items_list (..)
, G_sublogics (..)
, isSublogic
, isProperSublogic
, joinSublogics
, G_morphism (..)
, MorId (..)
, startMorId
, mkG_morphism
, lessSublogicComor
, LogicGraph (..)
, setCurLogic
, setSyntax
, setCurSublogic
, emptyLogicGraph
, lookupLogic
, lookupCurrentLogic
, lookupCurrentSyntax
, lookupCompComorphism
, lookupComorphism
, lookupModification
, GMorphism (..)
, isHomogeneous
, Grothendieck (..)
, gEmbed
, gEmbed2
, gEmbedComorphism
, homGsigDiff
, gsigUnion
, gsigManyUnion
, homogeneousMorManyUnion
, logicInclusion
, logicUnion
, updateMorIndex
, toG_morphism
, gSigCoerce
, ginclusion
, compInclusion
, findComorphismPaths
, logicGraph2Graph
, findComorphism
, isTransportable
, Square (..)
, LaxTriangle (..)
, mkIdSquare
, mkDefSquare
, mirrorSquare
, lookupSquare
) where
import Logic.Coerce
import Logic.Comorphism
import Logic.ExtSign
import Logic.Logic
import Logic.Modification
import Logic.Morphism
import ATerm.Lib
import Common.Doc
import Common.DocUtils
import Common.ExtSign
import Common.Id
import Common.IRI (IRI)
import Common.Lexer
import Common.Parsec
import Common.Result
import Common.Token
import Common.Utils
import Common.LibName
import Common.GraphAlgo
import Control.Monad (foldM)
import Data.Maybe
import Data.Typeable
import qualified Data.Map as Map
import qualified Data.Set as Set
import Text.ParserCombinators.Parsec (Parser, parse, eof, (<|>))
-- for looking up modifications
-- * \"Grothendieck\" versions of the various parts of type class Logic
-- | Grothendieck basic specifications
data G_basic_spec = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_basic_spec lid basic_spec
deriving Typeable
instance Show G_basic_spec where
show (G_basic_spec _ s) = show s
instance Pretty G_basic_spec where
pretty (G_basic_spec _ s) = pretty s
instance GetRange G_basic_spec
-- dummy instances for development graphs
instance Ord G_basic_spec where
compare _ _ = EQ
instance Eq G_basic_spec where
_ == _ = True
-- | index for signatures
newtype SigId = SigId Int
deriving (Typeable, Show, Eq, Ord, Enum, ShATermConvertible)
startSigId :: SigId
startSigId = SigId 0
{- | Grothendieck signatures with an lookup index. Zero indices
indicate unknown ones. It would be nice to have special (may be
negative) indices for empty signatures (one for every logic). -}
data G_sign = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree => G_sign
{ gSignLogic :: lid
, gSign :: ExtSign sign symbol
, gSignSelfIdx :: SigId -- ^ index to lookup this 'G_sign' in sign map
} deriving Typeable
instance Eq G_sign where
a == b = compare a b == EQ
instance Ord G_sign where
compare (G_sign l1 sigma1 s1) (G_sign l2 sigma2 s2) =
if s1 > startSigId && s2 > startSigId && s1 == s2 then EQ else
case compare (Logic l1) $ Logic l2 of
EQ -> compare (coerceSign l1 l2 "Eq G_sign" sigma1) $ Just sigma2
r -> r
-- | prefer a faster subsignature test if possible
isHomSubGsign :: G_sign -> G_sign -> Bool
isHomSubGsign (G_sign l1 sigma1 s1) (G_sign l2 sigma2 s2) =
(s1 > startSigId && s2 > startSigId && s1 == s2) ||
maybe False (ext_is_subsig l1 sigma1)
(coerceSign l2 l1 "isHomSubGsign" sigma2)
isSubGsign :: LogicGraph -> G_sign -> G_sign -> Bool
isSubGsign lg (G_sign lid1 (ExtSign sigma1 _) _)
(G_sign lid2 (ExtSign sigma2 _) _) =
Just True ==
do Comorphism cid <- resultToMaybe $
logicInclusion lg (Logic lid1) (Logic lid2)
sigma1' <- coercePlainSign lid1 (sourceLogic cid)
"Grothendieck.isSubGsign: cannot happen" sigma1
sigma2' <- coercePlainSign lid2 (targetLogic cid)
"Grothendieck.isSubGsign: cannot happen" sigma2
sigma1t <- resultToMaybe $ map_sign cid sigma1'
return $ is_subsig (targetLogic cid) (fst sigma1t) sigma2'
instance Show G_sign where
show (G_sign _ s _) = show s
instance Pretty G_sign where
pretty (G_sign _ (ExtSign s _) _) = pretty s
logicOfGsign :: G_sign -> AnyLogic
logicOfGsign (G_sign lid _ _) = Logic lid
symsOfGsign :: G_sign -> Set.Set G_symbol
symsOfGsign (G_sign lid (ExtSign sgn _) _) = Set.map (G_symbol lid)
$ symset_of lid sgn
-- | Grothendieck maps with symbol as keys
data G_symbolmap a = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_symbolmap lid (Map.Map symbol a)
deriving Typeable
instance Show a => Show (G_symbolmap a) where
show (G_symbolmap _ sm) = show sm
instance (Typeable a, Ord a) => Eq (G_symbolmap a) where
a == b = compare a b == EQ
instance (Typeable a, Ord a) => Ord (G_symbolmap a) where
compare (G_symbolmap l1 sm1) (G_symbolmap l2 sm2) =
case compare (Logic l1) $ Logic l2 of
EQ -> compare (coerceSymbolmap l1 l2 sm1) sm2
r -> r
-- | Grothendieck maps with symbol as values
data G_mapofsymbol a = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_mapofsymbol lid (Map.Map a symbol)
deriving Typeable
instance Show a => Show (G_mapofsymbol a) where
show (G_mapofsymbol _ sm) = show sm
instance (Typeable a, Ord a) => Eq (G_mapofsymbol a) where
a == b = compare a b == EQ
instance (Typeable a, Ord a) => Ord (G_mapofsymbol a) where
compare (G_mapofsymbol l1 sm1) (G_mapofsymbol l2 sm2) =
case compare (Logic l1) $ Logic l2 of
EQ -> compare (coerceMapofsymbol l1 l2 sm1) sm2
r -> r
-- | Grothendieck symbols
data G_symbol = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_symbol lid symbol
deriving Typeable
instance GetRange G_symbol where
getRange (G_symbol _ s) = getRange s
rangeSpan (G_symbol _ s) = rangeSpan s
instance Show G_symbol where
show (G_symbol _ s) = show s
instance Pretty G_symbol where
pretty (G_symbol _ s) = pretty s
instance Eq G_symbol where
a == b = compare a b == EQ
instance Ord G_symbol where
compare (G_symbol l1 s1) (G_symbol l2 s2) =
case compare (Logic l1) $ Logic l2 of
EQ -> compare (coerceSymbol l1 l2 s1) s2
r -> r
-- | Grothendieck symbol lists
data G_symb_items_list = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_symb_items_list lid [symb_items]
deriving Typeable
instance GetRange G_symb_items_list
instance Show G_symb_items_list where
show (G_symb_items_list _ l) = show l
instance Pretty G_symb_items_list where
pretty (G_symb_items_list _ l) = ppWithCommas l
instance Eq G_symb_items_list where
(G_symb_items_list i1 s1) == (G_symb_items_list i2 s2) =
coerceSymbItemsList i1 i2 "Eq G_symb_items_list" s1 == Just s2
-- | Grothendieck symbol maps
data G_symb_map_items_list = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_symb_map_items_list lid [symb_map_items]
deriving Typeable
instance GetRange G_symb_map_items_list
instance Show G_symb_map_items_list where
show (G_symb_map_items_list _ l) = show l
instance Pretty G_symb_map_items_list where
pretty (G_symb_map_items_list _ l) = ppWithCommas l
instance Eq G_symb_map_items_list where
(G_symb_map_items_list i1 s1) == (G_symb_map_items_list i2 s2) =
coerceSymbMapItemsList i1 i2 "Eq G_symb_map_items_list" s1 == Just s2
-- | Grothendieck sublogics
data G_sublogics = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
G_sublogics lid sublogics
deriving Typeable
instance Show G_sublogics where
show (G_sublogics lid sub) = language_name lid ++ case sublogicName sub of
[] -> ""
h -> '.' : h
instance Eq G_sublogics where
g1 == g2 = compare g1 g2 == EQ
instance Ord G_sublogics where
compare (G_sublogics lid1 l1) (G_sublogics lid2 l2) =
case compare (Logic lid1) $ Logic lid2 of
EQ -> compare (forceCoerceSublogic lid1 lid2 l1) l2
r -> r
isSublogic :: G_sublogics -> G_sublogics -> Bool
isSublogic (G_sublogics lid1 l1) (G_sublogics lid2 l2) =
isSubElem (forceCoerceSublogic lid1 lid2 l1) l2
isProperSublogic :: G_sublogics -> G_sublogics -> Bool
isProperSublogic a b = isSublogic a b && a /= b
joinSublogics :: G_sublogics -> G_sublogics -> Maybe G_sublogics
joinSublogics (G_sublogics lid1 l1) (G_sublogics lid2 l2) =
case coerceSublogic lid1 lid2 "coerce Sublogic" l1 of
Just sl -> Just (G_sublogics lid2 (lub sl l2))
Nothing -> Nothing
-- | index for morphisms
newtype MorId = MorId Int
deriving (Typeable, Show, Eq, Ord, Enum, ShATermConvertible)
startMorId :: MorId
startMorId = MorId 0
{- | Homogeneous Grothendieck signature morphisms with indices. For
the domain index it would be nice it showed also the emptiness. -}
data G_morphism = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree => G_morphism
{ gMorphismLogic :: lid
, gMorphism :: morphism
, gMorphismSelfIdx :: MorId -- ^ lookup index in morphism map
} deriving Typeable
instance Show G_morphism where
show (G_morphism _ m _) = show m
instance Pretty G_morphism where
pretty (G_morphism _ m _) = pretty m
mkG_morphism :: forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree
=> lid -> morphism -> G_morphism
mkG_morphism l m = G_morphism l m startMorId
-- | check if sublogic fits for comorphism
lessSublogicComor :: G_sublogics -> AnyComorphism -> Bool
lessSublogicComor (G_sublogics lid1 sub1) (Comorphism cid) =
let lid2 = sourceLogic cid
in Logic lid2 == Logic lid1
&& isSubElem (forceCoerceSublogic lid1 lid2 sub1) (sourceSublogic cid)
type SublogicBasedTheories = Map.Map IRI (LibName, String)
-- | Logic graph
data LogicGraph = LogicGraph
{ logics :: Map.Map String AnyLogic
, currentLogic :: String
, currentSyntax :: Maybe IRI
, currentSublogic :: Maybe G_sublogics
, currentTargetBase :: Maybe (LibName, String)
, sublogicBasedTheories :: Map.Map AnyLogic SublogicBasedTheories
, comorphisms :: Map.Map String AnyComorphism
, inclusions :: Map.Map (String, String) AnyComorphism
, unions :: Map.Map (String, String) (AnyComorphism, AnyComorphism)
, morphisms :: Map.Map String AnyMorphism
, modifications :: Map.Map String AnyModification
, squares :: Map.Map (AnyComorphism, AnyComorphism) [Square]
, qTATranslations :: Map.Map String AnyComorphism
, prefixes :: Map.Map String IRI
} deriving Show
emptyLogicGraph :: LogicGraph
emptyLogicGraph = LogicGraph
{ logics = Map.empty
, currentLogic = "CASL"
, currentSyntax = Nothing
, currentSublogic = Nothing
, currentTargetBase = Nothing
, sublogicBasedTheories = Map.empty
, comorphisms = Map.empty
, inclusions = Map.empty
, unions = Map.empty
, morphisms = Map.empty
, modifications = Map.empty
, squares = Map.empty
, qTATranslations = Map.empty
, prefixes = Map.empty }
setCurLogicAux :: String -> LogicGraph -> LogicGraph
setCurLogicAux s lg = lg { currentLogic = s }
setCurLogic :: String -> LogicGraph -> LogicGraph
setCurLogic s lg = if s == currentLogic lg then
lg else setSyntaxAux Nothing $ setCurLogicAux s lg
setSyntaxAux :: Maybe IRI -> LogicGraph -> LogicGraph
setSyntaxAux s lg = lg { currentSyntax = s }
setSyntax :: Maybe IRI -> LogicGraph -> LogicGraph
setSyntax s lg = if isNothing s then lg else setSyntaxAux s lg
setCurSublogic :: Maybe G_sublogics -> LogicGraph -> LogicGraph
setCurSublogic s lg = lg { currentSublogic = s }
instance Pretty LogicGraph where
pretty lg = text ("current logic is: " ++ currentLogic lg)
$+$ text "all logics:"
$+$ sepByCommas (map text $ Map.keys $ logics lg)
$+$ text "comorphism inclusions:"
$+$ vcat (map pretty $ Map.elems $ inclusions lg)
$+$ text "all comorphisms:"
$+$ vcat (map pretty $ Map.elems $ comorphisms lg)
-- | find a logic in a logic graph
lookupLogic :: Monad m => String -> String -> LogicGraph -> m AnyLogic
lookupLogic error_prefix logname logicGraph =
case Map.lookup logname $ logics logicGraph of
Nothing -> fail $ error_prefix ++ "unknown logic: " ++ logname
Just lid -> return lid
lookupCurrentLogic :: Monad m => String -> LogicGraph -> m AnyLogic
lookupCurrentLogic msg lg = lookupLogic (msg ++ " ") (currentLogic lg) lg
lookupCurrentSyntax :: Monad m => String -> LogicGraph
-> m (AnyLogic, Maybe IRI)
lookupCurrentSyntax msg lg = do
l <- lookupLogic (msg ++ " ") (currentLogic lg) lg
return (l, currentSyntax lg)
-- | union to two logics
logicUnion :: LogicGraph -> AnyLogic -> AnyLogic
-> Result (AnyComorphism, AnyComorphism)
logicUnion lg l1@(Logic lid1) l2@(Logic lid2) =
case logicInclusion lg l1 l2 of
Result _ (Just c) -> return (c, idComorphism l2)
_ -> case logicInclusion lg l2 l1 of
Result _ (Just c) -> return (idComorphism l1, c)
_ -> case Map.lookup (ln1, ln2) (unions lg) of
Just u -> return u
Nothing -> case Map.lookup (ln2, ln1) (unions lg) of
Just (c2, c1) -> return (c1, c2)
Nothing -> fail $ "Union of logics " ++ ln1 ++
" and " ++ ln2 ++ " does not exist"
where ln1 = language_name lid1
ln2 = language_name lid2
-- | find a comorphism composition in a logic graph
lookupCompComorphism :: Monad m => [String] -> LogicGraph -> m AnyComorphism
lookupCompComorphism nameList logicGraph = do
cs <- mapM lookupN nameList
case cs of
c : cs1 -> foldM compComorphism c cs1
_ -> fail "Illegal empty comorphism composition"
where
lookupN name =
case name of
'i' : 'd' : '_' : logic -> do
let (mainLogic, subLogicD) = span (/= '.') logic
-- subLogicD will begin with a . which has to be removed
msublogic = if null subLogicD
then Nothing
else Just $ tail subLogicD
Logic lid <- maybe (fail ("Cannot find Logic " ++ mainLogic)) return
$ Map.lookup mainLogic (logics logicGraph)
case maybe (Just $ top_sublogic lid) (parseSublogic lid) msublogic of
Nothing -> fail $ maybe "missing sublogic"
("unknown sublogic name " ++) msublogic
Just s -> return $ Comorphism $ mkIdComorphism lid s
_ -> maybe (fail ("Cannot find logic comorphism " ++ name)) return
$ Map.lookup name (comorphisms logicGraph)
-- | find a comorphism in a logic graph
lookupComorphism :: Monad m => String -> LogicGraph -> m AnyComorphism
lookupComorphism = lookupCompComorphism . splitOn ';'
-- | find a modification in a logic graph
lookupModification :: (Monad m) => String -> LogicGraph -> m AnyModification
lookupModification input lG
= case parse (parseModif lG << eof) "" input of
Left err -> fail $ show err
Right x -> x
parseModif :: (Monad m) => LogicGraph -> Parser (m AnyModification)
parseModif lG = do
(xs, _) <- separatedBy (vertcomp lG) crossT
let r = do
y <- sequence xs
case y of
m : ms -> return $ foldM horCompModification m ms
_ -> Nothing
case r of
Nothing -> fail "Illegal empty horizontal composition"
Just m -> return m
vertcomp :: (Monad m) => LogicGraph -> Parser (m AnyModification)
vertcomp lG = do
(xs, _) <- separatedBy (pm lG) semiT
let r = do
y <- sequence xs
case y of
m : ms -> return $ foldM vertCompModification m ms
_ -> Nothing
-- r has type Maybe (m AnyModification)
case r of
Nothing -> fail "Illegal empty vertical composition"
Just m -> return m
pm :: (Monad m) => LogicGraph -> Parser (m AnyModification)
pm lG = parseName lG <|> bracks lG
bracks :: (Monad m) => LogicGraph -> Parser (m AnyModification)
bracks lG = do
oParenT
modif <- parseModif lG
cParenT
return modif
parseIdentity :: (Monad m) => LogicGraph -> Parser (m AnyModification)
parseIdentity lG = do
tryString "id_"
tok <- simpleId
let name = tokStr tok
case Map.lookup name (comorphisms lG) of
Nothing -> fail $ "Cannot find comorphism" ++ name
Just x -> return $ return $ idModification x
parseName :: (Monad m) => LogicGraph -> Parser (m AnyModification)
parseName lG = parseIdentity lG <|> do
tok <- simpleId
let name = tokStr tok
case Map.lookup name (modifications lG) of
Nothing -> fail $ "Cannot find modification" ++ name
Just x -> return $ return x
-- * The Grothendieck signature category
-- | Grothendieck signature morphisms with indices
data GMorphism = forall cid lid1 sublogics1
basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1
lid2 sublogics2
basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2 .
Comorphism cid
lid1 sublogics1 basic_spec1 sentence1
symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1
lid2 sublogics2 basic_spec2 sentence2
symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2 => GMorphism
{ gMorphismComor :: cid
, gMorphismSign :: ExtSign sign1 symbol1
, gMorphismSignIdx :: SigId -- ^ 'G_sign' index of source signature
, gMorphismMor :: morphism2
, gMorphismMorIdx :: MorId -- ^ `G_morphism index of target morphism
} deriving Typeable
instance Eq GMorphism where
a == b = compare a b == EQ
instance Ord GMorphism where
compare (GMorphism cid1 sigma1 in1 mor1 in1')
(GMorphism cid2 sigma2 in2 mor2 in2') =
case compare (Comorphism cid1, G_sign (sourceLogic cid1) sigma1 in1)
(Comorphism cid2, G_sign (sourceLogic cid2) sigma2 in2) of
EQ -> if in1' > startMorId && in2' > startMorId && in1' == in2'
then EQ else
compare (coerceMorphism (targetLogic cid1) (targetLogic cid2)
"Ord GMorphism.coerceMorphism" mor1) (Just mor2)
-- this coersion will succeed, because cid1 and cid2 are equal
r -> r
isHomogeneous :: GMorphism -> Bool
isHomogeneous (GMorphism cid _ _ _ _) =
isIdComorphism (Comorphism cid)
data Grothendieck = Grothendieck deriving (Typeable, Show)
instance Language Grothendieck
instance Show GMorphism where
show (GMorphism cid s _ m _) =
show (Comorphism cid) ++ "(" ++ show s ++ ")" ++ show m
instance Pretty GMorphism where
pretty (GMorphism cid (ExtSign s _) _ m _) = let c = Comorphism cid in fsep
[ text $ show c
, if isIdComorphism c then empty else specBraces $ space <> pretty s
, pretty m ]
-- signature category of the Grothendieck institution
instance Category G_sign GMorphism where
ide (G_sign lid sigma@(ExtSign s _) ind) =
GMorphism (mkIdComorphism lid (top_sublogic lid))
sigma ind (ide s) startMorId
-- composition of Grothendieck signature morphisms
composeMorphisms (GMorphism r1 sigma1 ind1 mor1 _)
(GMorphism r2 _sigma2 _ mor2 _) =
do let lid1 = sourceLogic r1
lid2 = targetLogic r1
lid3 = sourceLogic r2
lid4 = targetLogic r2
-- if the second comorphism is the identity then simplify immediately
if isIdComorphism (Comorphism r2) then do
mor2' <- coerceMorphism lid4 lid2 "Grothendieck.comp" mor2
mor' <- composeMorphisms mor1 mor2'
return (GMorphism r1 sigma1 ind1 mor' startMorId)
else do
{- coercion between target of first and
source of second Grothendieck morphism -}
mor1' <- coerceMorphism lid2 lid3 "Grothendieck.comp" mor1
{- map signature morphism component of first Grothendieck morphism
along the comorphism component of the second one ... -}
mor1'' <- map_morphism r2 mor1'
{- and then compose the result with the signature morphism component
of first one -}
mor <- composeMorphisms mor1'' mor2
-- also if the first comorphism is the identity...
if isIdComorphism (Comorphism r1) &&
case coerceSublogic lid2 lid3 "Grothendieck.comp"
(targetSublogic r1) of
Just sl1 -> maybe False
(isSubElem (targetSublogic r2))
(mapSublogic r2 sl1)
_ -> False
-- ... then things simplify ...
then do
sigma1' <- coerceSign lid1 lid3 "Grothendieck.comp" sigma1
return (GMorphism r2 sigma1' ind1 mor startMorId)
else return $ GMorphism (CompComorphism r1 r2)
sigma1 ind1 mor startMorId
dom (GMorphism r sigma ind _mor _) =
G_sign (sourceLogic r) sigma ind
cod (GMorphism r (ExtSign _ _) _ mor _) =
let lid2 = targetLogic r
sig2 = cod mor
in G_sign lid2 (makeExtSign lid2 sig2) startSigId
isInclusion (GMorphism cid _ _ mor _) =
isInclusionComorphism cid && isInclusion mor
legal_mor (GMorphism r (ExtSign s _) _ mor _) = do
legal_mor mor
case maybeResult $ map_sign r s of
Just (sigma', _) | sigma' == cod mor -> return ()
_ -> fail "legal_mor.GMorphism2"
-- | Embedding of homogeneous signature morphisms as Grothendieck sig mors
gEmbed2 :: G_sign -> G_morphism -> GMorphism
gEmbed2 (G_sign lid2 sig si) (G_morphism lid mor ind) =
let cid = mkIdComorphism lid (top_sublogic lid)
Just sig1 = coerceSign lid2 (sourceLogic cid) "gEmbed2" sig
in GMorphism cid sig1 si mor ind
-- | Embedding of homogeneous signature morphisms as Grothendieck sig mors
gEmbed :: G_morphism -> GMorphism
gEmbed (G_morphism lid mor ind) = let sig = dom mor in
GMorphism (mkIdComorphism lid (top_sublogic lid))
(makeExtSign lid sig) startSigId mor ind
-- | Embedding of comorphisms as Grothendieck sig mors
gEmbedComorphism :: AnyComorphism -> G_sign -> Result GMorphism
gEmbedComorphism (Comorphism cid) (G_sign lid sig ind) = do
sig'@(ExtSign s _) <- coerceSign lid (sourceLogic cid) "gEmbedComorphism" sig
(sigTar, _) <- map_sign cid s
return (GMorphism cid sig' ind (ide sigTar) startMorId)
-- | heterogeneous union of two Grothendieck signatures
gsigUnion :: LogicGraph -> Bool -> G_sign -> G_sign -> Result G_sign
gsigUnion lg both gsig1@(G_sign lid1 (ExtSign sigma1 _) _)
gsig2@(G_sign lid2 (ExtSign sigma2 _) _) =
if Logic lid1 == Logic lid2
then homogeneousGsigUnion both gsig1 gsig2
else do
(Comorphism cid1, Comorphism cid2) <-
logicUnion lg (Logic lid1) (Logic lid2)
let lidS1 = sourceLogic cid1
lidS2 = sourceLogic cid2
lidT1 = targetLogic cid1
lidT2 = targetLogic cid2
sigma1' <- coercePlainSign lid1 lidS1 "Union of signaturesa" sigma1
sigma2' <- coercePlainSign lid2 lidS2 "Union of signaturesb" sigma2
(sigma1'', _) <- map_sign cid1 sigma1' -- where to put axioms???
(sigma2'', _) <- map_sign cid2 sigma2' -- where to put axioms???
sigma2''' <- coercePlainSign lidT2 lidT1 "Union of signaturesc" sigma2''
sigma3 <- signature_union lidT1 sigma1'' sigma2'''
return $ G_sign lidT1 (ExtSign sigma3 $ symset_of lidT1
$ if both then sigma3 else sigma2''') startSigId
-- | homogeneous Union of two Grothendieck signatures
homogeneousGsigUnion :: Bool -> G_sign -> G_sign -> Result G_sign
homogeneousGsigUnion both (G_sign lid1 sigma1 _) (G_sign lid2 sigma2 _) = do
sigma2'@(ExtSign sig2 _) <- coerceSign lid2 lid1 "Union of signatures" sigma2
sigma3@(ExtSign sig3 _) <- ext_signature_union lid1 sigma1 sigma2'
return $ G_sign lid1
(if both then sigma3 else ExtSign sig3 $ symset_of lid1 sig2)
startSigId
homGsigDiff :: G_sign -> G_sign -> Result G_sign
homGsigDiff (G_sign lid1 (ExtSign s1 _) _) (G_sign lid2 (ExtSign s2 _) _) = do
s3 <- coercePlainSign lid2 lid1 "hom differerence of signatures" s2
s4 <- signatureDiff lid1 s1 s3
return $ G_sign lid1 (makeExtSign lid1 s4) startSigId
-- | union of a list of Grothendieck signatures
gsigManyUnion :: LogicGraph -> [G_sign] -> Result G_sign
gsigManyUnion _ [] =
fail "union of emtpy list of signatures"
gsigManyUnion lg (gsigma : gsigmas) =
foldM (gsigUnion lg True) gsigma gsigmas
-- | homogeneous Union of a list of morphisms
homogeneousMorManyUnion :: [G_morphism] -> Result G_morphism
homogeneousMorManyUnion [] =
fail "homogeneous union of emtpy list of morphisms"
homogeneousMorManyUnion (gmor : gmors) =
foldM ( \ (G_morphism lid2 mor2 _) (G_morphism lid1 mor1 _) -> do
mor1' <- coerceMorphism lid1 lid2 "homogeneousMorManyUnion" mor1
mor <- morphism_union lid2 mor1' mor2
return (G_morphism lid2 mor startMorId)) gmor gmors
-- | inclusion between two logics
logicInclusion :: LogicGraph -> AnyLogic -> AnyLogic -> Result AnyComorphism
logicInclusion logicGraph l1@(Logic lid1) (Logic lid2) =
let ln1 = language_name lid1
ln2 = language_name lid2 in
if ln1 == ln2 then
return (idComorphism l1)
else case Map.lookup (ln1, ln2) (inclusions logicGraph) of
Just (Comorphism i) ->
return (Comorphism i)
Nothing ->
fail ("No inclusion from " ++ ln1 ++ " to " ++ ln2 ++ " found")
updateMorIndex :: MorId -> GMorphism -> GMorphism
updateMorIndex i (GMorphism cid sign si mor _) = GMorphism cid sign si mor i
toG_morphism :: GMorphism -> G_morphism
toG_morphism (GMorphism cid _ _ mor i) = G_morphism (targetLogic cid) mor i
gSigCoerce :: LogicGraph -> G_sign -> AnyLogic
-> Result (G_sign, AnyComorphism)
gSigCoerce lg g@(G_sign lid1 sigma1 _) l2@(Logic lid2) =
if Logic lid1 == Logic lid2
then return (g, idComorphism l2) else do
cmor@(Comorphism i) <- logicInclusion lg (Logic lid1) l2
ExtSign sigma1' sy <-
coerceSign lid1 (sourceLogic i) "gSigCoerce of signature" sigma1
(sigma1'', _) <- map_sign i sigma1'
sys <- return . Set.unions . map (map_symbol i sigma1') $ Set.toList sy
let lid = targetLogic i
return (G_sign lid (ExtSign sigma1'' sys) startSigId, cmor)
-- | inclusion morphism between two Grothendieck signatures
ginclusion :: LogicGraph -> G_sign -> G_sign -> Result GMorphism
ginclusion = inclusionAux True
inclusionAux :: Bool -> LogicGraph -> G_sign -> G_sign -> Result GMorphism
inclusionAux guard lg (G_sign lid1 sigma1 ind) (G_sign lid2 sigma2 _) = do
Comorphism i <- logicInclusion lg (Logic lid1) (Logic lid2)
ext1@(ExtSign sigma1' _) <-
coerceSign lid1 (sourceLogic i) "Inclusion of signatures" sigma1
(sigma1'', _) <- map_sign i sigma1'
ExtSign sigma2' _ <-
coerceSign lid2 (targetLogic i) "Inclusion of signatures" sigma2
mor <- (if guard then inclusion else subsig_inclusion)
(targetLogic i) sigma1'' sigma2'
return (GMorphism i ext1 ind mor startMorId)
genCompInclusion :: (G_sign -> G_sign -> Result GMorphism)
-> GMorphism -> GMorphism -> Result GMorphism
genCompInclusion f mor1 mor2 = do
let sigma1 = cod mor1
sigma2 = dom mor2
incl <- f sigma1 sigma2
mor <- composeMorphisms mor1 incl
composeMorphisms mor mor2
{- | Composition of two Grothendieck signature morphisms
with intermediate inclusion -}
compInclusion :: LogicGraph -> GMorphism -> GMorphism -> Result GMorphism
compInclusion = genCompInclusion . inclusionAux False
-- | Find all (composites of) comorphisms starting from a given logic
findComorphismPaths :: LogicGraph -> G_sublogics -> [AnyComorphism]
findComorphismPaths lg (G_sublogics lid sub) =
nubOrd $ map fst $ iterateComp (0 :: Int) [(idc, [idc])]
where
idc = Comorphism (mkIdComorphism lid sub)
coMors = Map.elems $ comorphisms lg
-- compute possible compositions, but only up to depth 4
iterateComp n l =
if n > 2 || l == newL then newL else iterateComp (n + 1) newL
where
newL = nubOrd $ l ++ concatMap extend l
-- extend comorphism list in all directions, but no cylces
extend (coMor, cmps) =
let addCoMor c =
case compComorphism coMor c of
Nothing -> Nothing
Just c1 -> Just (c1, c : cmps)
in mapMaybe addCoMor $ filter (not . (`elem` cmps)) coMors
-- | graph representation of the logic graph
logicGraph2Graph :: LogicGraph
-> Graph (G_sublogics, Maybe AnyComorphism) AnyComorphism
logicGraph2Graph lg =
let relevantMorphisms = filter hasModelExpansion . Map.elems $ comorphisms lg
in Graph {
neighbours = \ (G_sublogics lid sl, c1) ->
let coerce c = forceCoerceSublogic lid (sourceLogic c)
in Data.Maybe.mapMaybe
(\ (Comorphism c) -> maybe Nothing (\ sl1 -> Just (Comorphism c,
(G_sublogics (targetLogic c) sl1, Just $ Comorphism c)))
(mapSublogic c (coerce c sl))) $
filter (\ (Comorphism c) -> Logic (sourceLogic c) == Logic lid
&& isSubElem (coerce c sl) (sourceSublogic c)
&& (case c1 of Just (Comorphism c1') -> show c1' /= show c
_ -> True)) relevantMorphisms,
weight = \ (Comorphism c) -> if Logic (sourceLogic c) ==
Logic (targetLogic c) then 1 else 3
}
-- | finds first comorphism with a matching sublogic
findComorphism :: Monad m => G_sublogics -> [AnyComorphism] -> m AnyComorphism
findComorphism _ [] = fail "No matching comorphism found"
findComorphism gsl@(G_sublogics lid sub) (Comorphism cid : rest) =
let l2 = sourceLogic cid in
if Logic lid == Logic l2
&& isSubElem (forceCoerceSublogic lid l2 sub) (sourceSublogic cid)
then return $ Comorphism cid
else findComorphism gsl rest
{- | check transportability of Grothendieck signature morphisms
(currently returns false for heterogeneous morphisms) -}
isTransportable :: GMorphism -> Bool
isTransportable (GMorphism cid _ ind1 mor ind2) =
ind1 > startSigId && ind2 > startMorId
&& isModelTransportable (Comorphism cid)
&& is_transportable (targetLogic cid) mor
-- * Lax triangles and weakly amalgamable squares of lax triangles
{- a lax triangle looks like:
laxTarget
i -------------------------------------> k
^ laxModif
| |
i ------------- > j -------------------> k
laxFst laxSnd
and I_k is quasi-semi-exact -}
data LaxTriangle = LaxTriangle {
laxModif :: AnyModification,
laxFst, laxSnd, laxTarget :: AnyComorphism
} deriving (Show, Eq, Ord)
{- a weakly amalgamable square of lax triangles
consists of two lax triangles with the same laxTarget -}
data Square = Square {
leftTriangle, rightTriangle :: LaxTriangle
} deriving (Show, Eq, Ord)
-- for deriving Eq, first equality for modifications is needed
mkIdSquare :: AnyLogic -> Square
mkIdSquare (Logic lid) = let
idCom = Comorphism (mkIdComorphism lid (top_sublogic lid))
idMod = idModification idCom
idTriangle = LaxTriangle {
laxModif = idMod,
laxFst = idCom,
laxSnd = idCom,
laxTarget = idCom}
in Square {leftTriangle = idTriangle, rightTriangle = idTriangle}
mkDefSquare :: AnyComorphism -> Square
mkDefSquare c1@(Comorphism cid1) = let
idComS = Comorphism $ mkIdComorphism (sourceLogic cid1) $
top_sublogic $ sourceLogic cid1
idComT = Comorphism $ mkIdComorphism (targetLogic cid1) $
top_sublogic $ targetLogic cid1
idMod = idModification c1
lTriangle = LaxTriangle {
laxModif = idMod,
laxFst = c1,
laxSnd = idComS,
laxTarget = c1
}
rTriangle = LaxTriangle {
laxModif = idMod,
laxFst = idComT,
laxSnd = c1,
laxTarget = c1
}
in Square {leftTriangle = lTriangle, rightTriangle = rTriangle}
mirrorSquare :: Square -> Square
mirrorSquare s = Square {
leftTriangle = rightTriangle s,
rightTriangle = leftTriangle s}
lookupSquare :: AnyComorphism -> AnyComorphism -> LogicGraph -> Result [Square]
lookupSquare com1 com2 lg = maybe (fail "lookupSquare") return $ do
sqL1 <- Map.lookup (com1, com2) $ squares lg
sqL2 <- Map.lookup (com2, com1) $ squares lg
return $ nubOrd $ sqL1 ++ map mirrorSquare sqL2
-- maybe adjusted if comparing AnyModifications change
|
nevrenato/HetsAlloy
|
Logic/Grothendieck.hs
|
gpl-2.0
| 36,060
| 5
| 25
| 8,738
| 9,831
| 5,042
| 4,789
| 737
| 5
|
-- quick sort
quickSort :: Ord a => [a] -> [a]
quickSort [x] = [x]
quickSort (x:xs) = quickSort mini ++ [x] ++ quickSort maxi
where mini = filter (<x) xs
maxi = filter (>=x) xs
filterSplit :: (a -> Bool) -> [a] -> ([a], [a])
filterSplit _ [] = ([], [])
filterSplit f (x:xs) | f x = ((x:l), r)
| otherwise = (l, (x:r))
where (l, r) = filterSplit f xs
quickSort' :: Ord a => [a] -> [a]
quickSort' [] = []
quickSort' [x] = [x]
quickSort' (x:xs) = quickSort' l ++ [x] ++ quickSort' r
where (l, r) = filterSplit (<x) xs
|
dalonng/hellos
|
haskell.hello/quickSort.hs
|
gpl-2.0
| 603
| 1
| 8
| 192
| 341
| 184
| 157
| 15
| 1
|
module Sites.DenizensAttention
( denizensAttention
) where
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as BL
-- Tagsoup
import Text.HTML.TagSoup hiding (parseTags, renderTags)
import Text.HTML.TagSoup.Fast
import Control.Monad
import Control.Monad.IO.Class
import Pipes (Pipe)
-- Local imports
import Types
import Interpreter
rootUrl = "http://denizensattention.smackjeeves.com"
firstChapter = "/chapters/81551/part-one-a-dark-beginning/"
--
-- Denizen Attention - Testing fast tagsoup
--
denizensAttention = Comic
{ comicName = "Denizens Attention"
, seedPage = rootUrl ++ firstChapter
, seedCache = Always
, pageParse = denizensAttentionPageParse
, cookies = []
}
denizensAttentionPageParse :: Pipe ReplyType FetchType IO ()
denizensAttentionPageParse = runWebFetchT $ do
pg <- fetchSeedpage
let page = parseTagsT $ BL.toStrict pg
-- Parse out a list of chapters
-- option.jumpbox_chapter:nth-child(3)
let chp = (1, T.pack firstChapter) : (
zip [2..] $
map (fromAttrib $ T.pack "value") $
filter (~== "<option>") $
takeWhile (~/= "</select>") $
head $
sections (~== "<option class=jumpbox_chapter>") page)
(liftIO . print) chp
debug ""
-- TODO: add in the chapter name here as well
forM_ (map toChp chp) (\(url, ct) -> do
pg' <- fetchWebpage [(url, Always)]
let page' = parseTagsT $ BL.toStrict pg'
-- Parse out a list of pages
let pages = (
zip [1..] $
map (fromAttrib $ T.pack "value") $
filter (~== "<option>") $
takeWhile (~/= "</optgroup>") $
dropWhile (~/= "<option class=jumpbox_page>") page')
(liftIO . print) pages
debug ""
forM_ (map (toPage ct) pages) (\(url', ct') -> do
pg'' <- fetchWebpage [(url', Always)]
let page'' = parseTagsT $ BL.toStrict pg''
let img = (
(fromAttrib $ T.pack "src") $
head $
filter (~== "<img id=comic_image>") page'')
(liftIO . print) img
debug ""
fetchImage (T.unpack img) ct'
)
)
toChp :: (Integer, T.Text) -> (String, ComicTag)
toChp (chp, url) = ((rootUrl ++ T.unpack url), (ComicTag (T.pack "Denizens Attention") Nothing Nothing (Just $ UnitTag [StandAlone $ Digit chp Nothing Nothing Nothing] Nothing) Nothing))
toPage :: ComicTag -> (Integer, T.Text) -> (String, ComicTag)
toPage ct (page, url) = ((rootUrl ++ T.unpack url), (ct{ctFileName = Just $ T.justifyRight 7 '0' $ T.pack (show page ++ ".png")}))
|
pharaun/hComicFetcher
|
src/Sites/DenizensAttention.hs
|
gpl-3.0
| 2,727
| 4
| 31
| 781
| 809
| 437
| 372
| 57
| 1
|
module Draw where
import Actor
import Box
import Canvas
import Cursor
import qualified Dungeon as D
import GameState
import Point
import Terrain
import Knowledge
import Entity.Map
import Data.Set
import UI.HSCurses.Curses (refresh, update)
draw :: Set Point -> GameState -> IO ()
draw pts (GameState am dgn now) = do
cv@(Canvas _ bx) <- stdCanvas
let (_, cr) = getPlayer am
center = centerPt bx
offset = cr - center in
printCanvas cv (\ p -> renderFn (p + offset)) >>
writeTo cv center >>
print_string 0 0 (show now) >>
refresh >> update >>
return ()
where knowledge = actorKnow $ fst $ getPlayer am
renderFn p =
let isVis = member p pts
known = isKnown knowledge p
asGlyph = (renderTile $ D.get dgn p, isVis)
mob = entityAt am in
if not isVis
then if known
then asGlyph
else (' ', False)
else case mob p of
Nothing -> asGlyph
Just a -> (glyph a, True)
|
bhickey/catamad
|
src/Draw.hs
|
gpl-3.0
| 1,064
| 0
| 19
| 369
| 362
| 191
| 171
| 37
| 4
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Jobs
( Validation(..)
, variantToPropertyJob
, processProperty
, missingRunToTimingJob
, processTiming
, validationMissingRuns
, validateResults
, cleanupValidation
) where
import BroadcastChan.Conduit
import qualified Control.Concurrent.STM as STM
import Control.Monad (unless, when)
import Crypto.Hash.Conduit (hashFile)
import qualified Data.ByteArray (convert)
import Data.Conduit (ConduitT, (.|), runConduit)
import qualified Data.Conduit.Combinators as C
import qualified Data.Conduit.Text as C
import Data.Maybe (fromMaybe)
import Data.Semigroup (Max(..))
import qualified Data.Text as T
import Data.Time.Clock (getCurrentTime)
import Core
import Parsers
import ProcessPool (Job, Result(..), makePropertyJob, makeTimingJob)
import qualified ProcessPool
import Query (streamQuery)
import Query.Missing
import qualified RuntimeData
import Schema
import Sql (MonadSql, Region, Transaction, (=.))
import qualified Sql
import qualified Sql.Transaction as SqlTrans
computeHash :: MonadIO m => FilePath -> m Hash
computeHash path = do
(digest :: HashDigest) <- hashFile path
return . Hash . Data.ByteArray.convert $ digest
variantToPropertyJob
:: Entity Variant
-> SqlM (Maybe (Job (Key Algorithm, Key Graph, Maybe Hash, Maybe Int)))
variantToPropertyJob
(Entity varId (Variant graphId variantCfgId _ hash step hasProps retries)) =
case hash of
Nothing
| retries < 5 && not hasProps && step == 0 -> yieldJob
| hasProps -> jobError $
"Properties stored, but not result hash for variant #"
<> showSqlKey varId
| not hasProps && step /= 0 -> jobError $
"No properties, but did find max steps for variant #"
<> showSqlKey varId
| retries >= 5 -> jobWarn $
"Hash missing, but too many retries for variant #"
<> showSqlKey varId
| otherwise -> jobError $
"Variant information for #" <> showSqlKey varId
<> " is in an inconsistent state!"
Just _
| retries < 5 && not hasProps && step == 0 -> do
logWarnN $ mconcat
[ "Found a stored result, but no properties for variant#"
, showSqlKey varId
]
yieldJob
| hasProps -> return Nothing
| not hasProps && step /= 0 -> jobError $
"No properties, but did find max steps for variant #"
<> showSqlKey varId
| retries >= 5 -> jobWarn $
"No properties, but too many retries for variant #"
<> showSqlKey varId
| otherwise -> jobError $
"Variant information for #" <> showSqlKey varId
<> " is in an inconsistent state!"
where
jobWarn msg = Nothing <$ logWarnN msg
jobError msg = Nothing <$ logErrorN msg
maxStep | not hasProps && step == 0 = Nothing
| otherwise = Just step
yieldJob = do
Graph _ path _ _ _ <- Sql.getJust graphId
VariantConfig algoId _ flags _ _ <- Sql.getJust variantCfgId
Algorithm algo _ <- Sql.getJust algoId
let job = makePropertyJob (algoId,graphId,hash,maxStep) varId Nothing $
[ "-a", algo, fromMaybe "" flags, path ]
return $ Just job
processProperty
:: Result (Key Algorithm, Key Graph, Maybe Hash, Maybe Int) -> SqlM ()
processProperty result@Result
{ resultValue=(algoId, _, _, _)
, resultPropLog = Nothing
, ..
} = do
ProcessPool.cleanupOutput result
ProcessPool.cleanupTimings result
logThrowM . GenericInvariantViolation $ mconcat
[ "Found property run without property log file for algorithm #"
, showSqlKey algoId, " variant #", showSqlKey resultVariant
]
processProperty result@Result
{ resultValue = (algoId, graphId, hash, maxStep)
, resultOutput = (outputFile, _)
, resultPropLog = Just (propLog, _)
, ..
} = do
logDebugNS "Property#Start" resultLabel
ProcessPool.cleanupTimings result
resultHash <- computeHash outputFile
SqlTrans.tryAbortableTransaction $ do
loadProps <- case hash of
Nothing -> True <$ SqlTrans.update resultVariant
[VariantResult =. Just resultHash]
Just prevHash | prevHash == resultHash -> return True
_ -> False <$ logErrorN
("Hash mismatch for variant: " <> showSqlKey resultVariant)
ProcessPool.cleanupOutput result
when loadProps $ do
stepCount <- runConduit $
C.sourceFile propLog
.| C.decode C.utf8
.| C.map (T.replace "," "")
.| conduitParse property
.| C.foldMapM insertProperty
case (maxStep, stepCount) of
(_, Nothing) -> logInfoN $ mconcat
[ "Did't find step count for algorithm #"
, showSqlKey algoId, " and variant #"
, showSqlKey resultVariant
]
(Nothing, Just (Max n)) ->
SqlTrans.update resultVariant [VariantMaxStepId =. n]
(Just step, Just (Max n))
| n < step -> SqlTrans.abortTransaction $ mconcat
[ "Found less than expected step count for variant: "
, showSqlKey resultVariant
]
| n > step -> SqlTrans.abortTransaction $ mconcat
[ "Found more than expected step count for variant: "
, showSqlKey resultVariant
]
| otherwise -> return ()
SqlTrans.update resultVariant [VariantPropsStored =. True]
ProcessPool.cleanupProperties result
logDebugNS "Property#End" resultLabel
where
insertProperty :: Property -> Transaction SqlM (Maybe (Max Int))
insertProperty (GraphProperty name val) = Nothing <$ do
propId <- SqlTrans.insertUniq $ GraphPropName name
SqlTrans.insertUniq $ GraphPropValue graphId propId val
insertProperty (StepProperty n _ _)
| Just i <- maxStep
, n > i = SqlTrans.abortTransaction $ mconcat
[ "Found step property with a step count (", showText n
, ") larger than stored maximum (", showText i, ") for algorithm #"
, showSqlKey algoId, " variant #", showSqlKey resultVariant
]
insertProperty (StepProperty n name val) = Just (Max n) <$ do
propId <- SqlTrans.insertUniq $ StepPropName name
SqlTrans.insertUniq $ StepProp propId algoId
SqlTrans.insertUniq $ StepPropValue resultVariant n propId algoId val
insertProperty Prediction{} = return Nothing
missingRunToTimingJob
:: (MonadLogger m, MonadResource m, MonadSql m)
=> Key Platform
-> MissingRun ExtraVariantInfo
-> m (Maybe (Job (Key Algorithm, Key Implementation, Hash, Int)))
missingRunToTimingJob platformId MissingRun{..} = case missingRunExtraInfo of
ExtraVariantInfo Nothing _ -> Nothing <$ logErrorN msg
where
msg = mconcat
[ "Algorithm #", showSqlKey missingRunAlgorithmId
, " results missing for variant #", showSqlKey missingRunVariantId
]
ExtraVariantInfo (Just hash) steps -> return . Just $ makeTimingJob
(missingRunAlgorithmId, missingRunImplId, hash, steps)
missingRunVariantId
(Just (platformId, missingRunImplName))
missingRunArgs
processTiming
:: (MonadCatch m, MonadLogger m, MonadResource m, MonadSql m)
=> Key RunConfig
-> CommitId
-> Result (Key Algorithm, Key Implementation, Hash, Int)
-> m ()
processTiming runConfigId commit result@Result{..} = do
logDebugNS "Timing#Start" resultLabel
time <- liftIO getCurrentTime
resultHash <- computeHash outputFile
ProcessPool.cleanupProperties result
ProcessPool.cleanupOutput result
if commit /= resultAlgorithmVersion
then logErrorN $ mconcat
[ "Unexpected algorithm version for implementation #"
, showSqlKey implId, "! Expected commit ", getCommitId commit
, " found commit ", getCommitId resultAlgorithmVersion
]
else SqlTrans.tryAbortableTransaction $ do
let validated = resultHash == hash
runId <- SqlTrans.insert $
Run runConfigId resultVariant implId algoId time validated
unless validated $ do
logErrorN . mconcat $
[ "Implementation #", showSqlKey implId
, " has wrong result hash for variant #"
, showSqlKey resultVariant
, " for run config #", showSqlKey runConfigId
]
runConduit $
C.sourceFile timingFile
.| C.decode C.utf8
.| C.map (T.replace "," "")
.| conduitParse timer
.| C.mapM_ (insertTiming runId)
logDebugNS "Timing#End" resultLabel
ProcessPool.cleanupTimings result
where
(algoId, implId, hash, maxStep) = resultValue
timingFile = T.unpack resultLabel <> ".timings"
outputFile = T.unpack resultLabel <> ".output"
insertTiming
:: (MonadThrow m, MonadSql m) => Key Run -> Timer -> Transaction m ()
insertTiming runId (TotalTiming Timing{..}) = SqlTrans.insert_ $
TotalTimer runId name minTime avgTime maxTime stddev
insertTiming _ (StepTiming n _) | n > maxStep =
SqlTrans.abortTransaction $ mconcat
[ "Found step timing with a step count (", showText n
, ") larger than stored maximum (", showText maxStep
, ") for algorithm #", showSqlKey algoId, " variant #"
, showSqlKey resultVariant
]
insertTiming runId (StepTiming n Timing{..}) = SqlTrans.insert_ $
StepTimer runId resultVariant n name minTime avgTime maxTime stddev
data Validation = Validation
{ cleanData :: SqlM ()
, originalCommit :: CommitId
, referenceResult :: FilePath
, runId :: Key Run
}
validationMissingRuns
:: Key Platform
-> Result ValidationVariant
-> ConduitT (Result ValidationVariant) (Job Validation) (Region SqlM) ()
validationMissingRuns platformId result@Result{..} = do
ProcessPool.cleanupTimings result
ProcessPool.cleanupProperties result
refCounter <- liftIO $ STM.newTVarIO validationMissingCount
let onCompletion :: SqlM ()
onCompletion = do
count <- liftIO . STM.atomically $ do
STM.modifyTVar' refCounter (subtract 1)
STM.readTVar refCounter
when (count == 0) $ ProcessPool.cleanupOutput result
mkValidation :: Key Run -> Validation
mkValidation = Validation onCompletion validationCommit outputFile
toValidationJob :: MissingRun (Key Run) -> Job Validation
toValidationJob MissingRun{..} = makeTimingJob
(mkValidation missingRunExtraInfo)
missingRunVariantId
(Just (platformId, missingRunImplName))
missingRunArgs
streamQuery (validationRunQuery resultValue platformId)
.| C.map toValidationJob
where
ValidationVariant{..} = resultValue
outputFile = T.unpack resultLabel <> ".output"
validateResults
:: Int -> ConduitT (Result Validation) (Result Validation) SqlM ()
validateResults numProcs = do
validate <- RuntimeData.getOutputChecker
parMapM (Simple Terminate) numProcs (process validate)
where
process
:: (FilePath -> FilePath -> SqlM Bool)
-> Result Validation
-> SqlM (Result Validation)
process check res@Result{resultAlgorithmVersion, resultOutput, resultValue}
| originalCommit /= resultAlgorithmVersion = do
res <$ logErrorN "Result validation used wrong algorithm version!"
| otherwise = do
result <- check referenceResult outputFile
ts <- liftIO $ getCurrentTime
when result $ do
Sql.update runId [RunValidated =. True, RunTimestamp =. ts]
return res
where
Validation{..} = resultValue
(outputFile, _) = resultOutput
cleanupValidation :: Result Validation -> SqlM ()
cleanupValidation result@Result{resultValue = Validation{..}} = do
ProcessPool.cleanupOutput result
ProcessPool.cleanupTimings result
ProcessPool.cleanupProperties result
cleanData
|
merijn/GPU-benchmarks
|
benchmark-analysis/ingest-src/Jobs.hs
|
gpl-3.0
| 12,965
| 0
| 20
| 3,933
| 3,225
| 1,608
| 1,617
| 281
| 8
|
module Helpers
where
import System.Exit
import Data.Either
import Ticket
import Common
-- checks that the first item in the parameter is a valid ticket.
-- then calls the given function with the ticket and the rest of
-- the arguments.
paramList
:: (Ticket -> [String] -> IO b)
-> [String]
-> String
-> String
-> IO b
paramList f args cmd usagemsg = do
checkIsInit
(_, nonopts) <- doArgs [] undefined [] cmd args True
if length nonopts < 2
then putStrLn usagemsg >> exitWith (ExitFailure 1)
else do
mtick <- findTicket (head nonopts)
case mtick of
Right t -> f t (tail nonopts)
Left e -> do
putStrLn e
putStrLn usagemsg
exitWith (ExitFailure 1)
-- checks that all arguments are valid tickets.
-- then loads these tickets and returns them.
loadArgsAsTickets :: [String] -> String -> IO [Ticket]
loadArgsAsTickets args usagemsg = do
checkIsInit
if null args
then putStrLn usagemsg >> exitWith (ExitFailure 1)
else do
ts <- mapM findTicket args
checkAllRight ts usagemsg
-- exits on failure.
checkAllRight :: [Either String b] -> String -> IO [b]
checkAllRight es usagemsg = do
let ls = lefts es
if null ls
then return (rights es)
else do
mapM_ putStrLn ls
putStrLn usagemsg
exitWith (ExitFailure 1)
|
anttisalonen/nix
|
src/Helpers.hs
|
gpl-3.0
| 1,352
| 0
| 17
| 361
| 410
| 198
| 212
| 41
| 3
|
-- pascal triangle http://www.haskell.org/haskellwiki/Blow_your_mind
import Haste.HPlay.View
import Control.Applicative
import Data.Monoid
main= runBody $ showpascal
pascal = iterate (\row -> zipWith (+) ([0] ++ row) (row ++ [0])) [1] :: [[Int]]
showpascal =do
n <- getInt Nothing ! atr "size" "5" `fire` OnKeyUp <|> return 10
wraw $ do
p << ("Show " ++ show n ++ " rows of the Pascal triangle ")
mconcat[p ! style "text-align:center" $ row | row <- take n pascal]
|
agocorona/tryhplay
|
examples/pascaltriangle.hs
|
gpl-3.0
| 478
| 0
| 14
| 88
| 184
| 97
| 87
| 10
| 1
|
-- |
-- Copyright : © 2009 CNRS - École Polytechnique - INRIA
-- License : GPL
--
-- Parser for interface files.
module Dedukti.Parser.Interface (parse) where
import Dedukti.Module
import qualified Data.ByteString.Lazy.Char8 as B
parse :: FilePath -> B.ByteString -> [Qid]
parse _ = map qid . B.lines
|
mboes/dedukti
|
Dedukti/Parser/Interface.hs
|
gpl-3.0
| 309
| 0
| 7
| 54
| 67
| 42
| 25
| 5
| 1
|
module Control.Concurrent.STM.Lens where
import Morphisms
import ClassyPrelude
import Control.Lens
import Control.Monad.Reader
import Control.Monad.State
import Control.Concurrent.STM
import Control.Monad.STM.Class
viewTVar :: (MonadReader e m, MonadSTM m) => Getting (TVar a) e (TVar a) -> m a
viewTVar v = view v >>= liftSTM . readTVar
viewTVarIO :: (MonadReader e m, MonadIO m) => Getting (TVar a) e (TVar a) -> m a
viewTVarIO v = view v >>= liftIO . readTVarIO
infixr 8 &.=
(&.=) :: (MonadReader e m, MonadSTM m) =>
Getting (TVar a) e (TVar a) -> a -> m ()
v &.= a = view v >>= liftSTM . flip writeTVar a
infixr 8 !.=
(!.=) :: (MonadReader e m, MonadIO m) =>
Getting (TVar a) e (TVar a) -> a -> m ()
v !.= a = embedReader liftIO (v &.= a)
infixr 8 &%=
(&%=) :: (MonadReader e m, MonadSTM m) =>
Getting (TVar a) e (TVar a) -> (a -> a) -> m ()
v &%= f = view v >>= liftSTM . flip modifyTVar f
infixr 8 !%=
(!%=) :: (MonadReader e m, MonadIO m) =>
Getting (TVar a) e (TVar a) -> (a -> a) -> m ()
v !%= f = embedReader liftIO (v &%= f)
infixr 8 &%%=
(&%%=) :: (MonadReader e m, MonadSTM m) =>
Getting (TVar a) e (TVar a) -> (a -> (r, a)) -> m r
v &%%= f = do
a <- viewTVar v
let (r, a') = f a
v &.= a'
return r
infixr 8 !%%=
(!%%=) :: (MonadReader e m, MonadIO m) =>
Getting (TVar a) e (TVar a) -> (a -> (r, a)) -> m r
v !%%= f = embedReader liftIO (v &%%= f)
viewTChan :: (MonadReader e m, MonadSTM m) => Getting (TChan a) e (TChan a) -> m a
viewTChan v = view v >>= liftSTM . readTChan
viewTChanIO :: (MonadReader e m, MonadIO m) => Getting (TChan a) e (TChan a) -> m a
viewTChanIO = embedReader liftIO . viewTChan
viewTQueue :: (MonadReader e m, MonadSTM m) => Getting (TQueue a) e (TQueue a) -> m a
viewTQueue v = view v >>= liftSTM . readTQueue
viewTQueueIO :: (MonadReader e m, MonadIO m) => Getting (TQueue a) e (TQueue a) -> m a
viewTQueueIO = embedReader liftIO . viewTQueue
infixr 8 &-<
(&-<) :: (MonadReader e m, MonadSTM m) =>
Getting (TChan a) e (TChan a) -> a -> m ()
c &-< a = view c >>= liftSTM . flip writeTChan a
infixr 8 !-<
(!-<) :: (MonadReader e m, MonadIO m) =>
Getting (TChan a) e (TChan a) -> a -> m ()
c !-< a = embedReader liftIO (c &-< a)
infixr 8 &<<
(&<<) :: (MonadReader e m, MonadSTM m) =>
Getting (TQueue a) e (TQueue a) -> a -> m ()
c &<< a = view c >>= liftSTM . flip writeTQueue a
infixr 8 !<<
(!<<) :: (MonadReader e m, MonadIO m) =>
Getting (TQueue a) e (TQueue a) -> a -> m ()
c !<< a = embedReader liftIO (c &<< a)
|
ian-mi/hTorrent
|
Control/Concurrent/STM/Lens.hs
|
gpl-3.0
| 2,529
| 0
| 10
| 584
| 1,354
| 690
| 664
| 64
| 1
|
{-# OPTIONS_GHC -F -pgmF interpol #-}
-- | Checks what happens if Interpol is already imported
-- when the the preprocessor is imported and used.
module Main where
import Text.Interpol
myVar :: Int
myVar = 23
have :: String
have = "have"
main :: IO ()
main = putStrLn $ "I #{have} " ^-^ myVar ^-^ " apples."
|
scvalex/interpol
|
Test/AlreadyImportedWithUse.hs
|
gpl-3.0
| 314
| 0
| 7
| 64
| 59
| 35
| 24
| 9
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.S3.GetObjectAcl
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns the access control list (ACL) of an object.
--
-- <http://docs.aws.amazon.com/AmazonS3/latest/API/GetObjectAcl.html>
module Network.AWS.S3.GetObjectAcl
(
-- * Request
GetObjectAcl
-- ** Request constructor
, getObjectAcl
-- ** Request lenses
, goaBucket
, goaKey
, goaVersionId
-- * Response
, GetObjectAclResponse
-- ** Response constructor
, getObjectAclResponse
-- ** Response lenses
, goarGrants
, goarOwner
) where
import Network.AWS.Prelude
import Network.AWS.Request.S3
import Network.AWS.S3.Types
import qualified GHC.Exts
data GetObjectAcl = GetObjectAcl
{ _goaBucket :: Text
, _goaKey :: Text
, _goaVersionId :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'GetObjectAcl' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'goaBucket' @::@ 'Text'
--
-- * 'goaKey' @::@ 'Text'
--
-- * 'goaVersionId' @::@ 'Maybe' 'Text'
--
getObjectAcl :: Text -- ^ 'goaBucket'
-> Text -- ^ 'goaKey'
-> GetObjectAcl
getObjectAcl p1 p2 = GetObjectAcl
{ _goaBucket = p1
, _goaKey = p2
, _goaVersionId = Nothing
}
goaBucket :: Lens' GetObjectAcl Text
goaBucket = lens _goaBucket (\s a -> s { _goaBucket = a })
goaKey :: Lens' GetObjectAcl Text
goaKey = lens _goaKey (\s a -> s { _goaKey = a })
-- | VersionId used to reference a specific version of the object.
goaVersionId :: Lens' GetObjectAcl (Maybe Text)
goaVersionId = lens _goaVersionId (\s a -> s { _goaVersionId = a })
data GetObjectAclResponse = GetObjectAclResponse
{ _goarGrants :: List "Grant" Grant
, _goarOwner :: Maybe Owner
} deriving (Eq, Read, Show)
-- | 'GetObjectAclResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'goarGrants' @::@ ['Grant']
--
-- * 'goarOwner' @::@ 'Maybe' 'Owner'
--
getObjectAclResponse :: GetObjectAclResponse
getObjectAclResponse = GetObjectAclResponse
{ _goarOwner = Nothing
, _goarGrants = mempty
}
-- | A list of grants.
goarGrants :: Lens' GetObjectAclResponse [Grant]
goarGrants = lens _goarGrants (\s a -> s { _goarGrants = a }) . _List
goarOwner :: Lens' GetObjectAclResponse (Maybe Owner)
goarOwner = lens _goarOwner (\s a -> s { _goarOwner = a })
instance ToPath GetObjectAcl where
toPath GetObjectAcl{..} = mconcat
[ "/"
, toText _goaBucket
, "/"
, toText _goaKey
]
instance ToQuery GetObjectAcl where
toQuery GetObjectAcl{..} = mconcat
[ "acl"
, "versionId" =? _goaVersionId
]
instance ToHeaders GetObjectAcl
instance ToXMLRoot GetObjectAcl where
toXMLRoot = const (namespaced ns "GetObjectAcl" [])
instance ToXML GetObjectAcl
instance AWSRequest GetObjectAcl where
type Sv GetObjectAcl = S3
type Rs GetObjectAcl = GetObjectAclResponse
request = get
response = xmlResponse
instance FromXML GetObjectAclResponse where
parseXML x = GetObjectAclResponse
<$> x .@? "AccessControlList" .!@ mempty
<*> x .@? "Owner"
|
dysinger/amazonka
|
amazonka-s3/gen/Network/AWS/S3/GetObjectAcl.hs
|
mpl-2.0
| 4,128
| 0
| 10
| 1,007
| 676
| 400
| 276
| 78
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.Courses.Teachers.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a list of teachers of this course that the requester is
-- permitted to view. This method returns the following error codes: *
-- \`NOT_FOUND\` if the course does not exist. * \`PERMISSION_DENIED\` for
-- access errors.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.courses.teachers.list@.
module Network.Google.Resource.Classroom.Courses.Teachers.List
(
-- * REST Resource
CoursesTeachersListResource
-- * Creating a Request
, coursesTeachersList
, CoursesTeachersList
-- * Request Lenses
, ctlXgafv
, ctlUploadProtocol
, ctlPp
, ctlCourseId
, ctlAccessToken
, ctlUploadType
, ctlBearerToken
, ctlPageToken
, ctlPageSize
, ctlCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.courses.teachers.list@ method which the
-- 'CoursesTeachersList' request conforms to.
type CoursesTeachersListResource =
"v1" :>
"courses" :>
Capture "courseId" Text :>
"teachers" :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListTeachersResponse
-- | Returns a list of teachers of this course that the requester is
-- permitted to view. This method returns the following error codes: *
-- \`NOT_FOUND\` if the course does not exist. * \`PERMISSION_DENIED\` for
-- access errors.
--
-- /See:/ 'coursesTeachersList' smart constructor.
data CoursesTeachersList = CoursesTeachersList'
{ _ctlXgafv :: !(Maybe Text)
, _ctlUploadProtocol :: !(Maybe Text)
, _ctlPp :: !Bool
, _ctlCourseId :: !Text
, _ctlAccessToken :: !(Maybe Text)
, _ctlUploadType :: !(Maybe Text)
, _ctlBearerToken :: !(Maybe Text)
, _ctlPageToken :: !(Maybe Text)
, _ctlPageSize :: !(Maybe (Textual Int32))
, _ctlCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CoursesTeachersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ctlXgafv'
--
-- * 'ctlUploadProtocol'
--
-- * 'ctlPp'
--
-- * 'ctlCourseId'
--
-- * 'ctlAccessToken'
--
-- * 'ctlUploadType'
--
-- * 'ctlBearerToken'
--
-- * 'ctlPageToken'
--
-- * 'ctlPageSize'
--
-- * 'ctlCallback'
coursesTeachersList
:: Text -- ^ 'ctlCourseId'
-> CoursesTeachersList
coursesTeachersList pCtlCourseId_ =
CoursesTeachersList'
{ _ctlXgafv = Nothing
, _ctlUploadProtocol = Nothing
, _ctlPp = True
, _ctlCourseId = pCtlCourseId_
, _ctlAccessToken = Nothing
, _ctlUploadType = Nothing
, _ctlBearerToken = Nothing
, _ctlPageToken = Nothing
, _ctlPageSize = Nothing
, _ctlCallback = Nothing
}
-- | V1 error format.
ctlXgafv :: Lens' CoursesTeachersList (Maybe Text)
ctlXgafv = lens _ctlXgafv (\ s a -> s{_ctlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ctlUploadProtocol :: Lens' CoursesTeachersList (Maybe Text)
ctlUploadProtocol
= lens _ctlUploadProtocol
(\ s a -> s{_ctlUploadProtocol = a})
-- | Pretty-print response.
ctlPp :: Lens' CoursesTeachersList Bool
ctlPp = lens _ctlPp (\ s a -> s{_ctlPp = a})
-- | Identifier of the course. This identifier can be either the
-- Classroom-assigned identifier or an alias.
ctlCourseId :: Lens' CoursesTeachersList Text
ctlCourseId
= lens _ctlCourseId (\ s a -> s{_ctlCourseId = a})
-- | OAuth access token.
ctlAccessToken :: Lens' CoursesTeachersList (Maybe Text)
ctlAccessToken
= lens _ctlAccessToken
(\ s a -> s{_ctlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ctlUploadType :: Lens' CoursesTeachersList (Maybe Text)
ctlUploadType
= lens _ctlUploadType
(\ s a -> s{_ctlUploadType = a})
-- | OAuth bearer token.
ctlBearerToken :: Lens' CoursesTeachersList (Maybe Text)
ctlBearerToken
= lens _ctlBearerToken
(\ s a -> s{_ctlBearerToken = a})
-- | nextPageToken value returned from a previous list call, indicating that
-- the subsequent page of results should be returned. The list request must
-- be otherwise identical to the one that resulted in this token.
ctlPageToken :: Lens' CoursesTeachersList (Maybe Text)
ctlPageToken
= lens _ctlPageToken (\ s a -> s{_ctlPageToken = a})
-- | Maximum number of items to return. Zero means no maximum. The server may
-- return fewer than the specified number of results.
ctlPageSize :: Lens' CoursesTeachersList (Maybe Int32)
ctlPageSize
= lens _ctlPageSize (\ s a -> s{_ctlPageSize = a}) .
mapping _Coerce
-- | JSONP
ctlCallback :: Lens' CoursesTeachersList (Maybe Text)
ctlCallback
= lens _ctlCallback (\ s a -> s{_ctlCallback = a})
instance GoogleRequest CoursesTeachersList where
type Rs CoursesTeachersList = ListTeachersResponse
type Scopes CoursesTeachersList =
'["https://www.googleapis.com/auth/classroom.profile.emails",
"https://www.googleapis.com/auth/classroom.profile.photos",
"https://www.googleapis.com/auth/classroom.rosters",
"https://www.googleapis.com/auth/classroom.rosters.readonly"]
requestClient CoursesTeachersList'{..}
= go _ctlCourseId _ctlXgafv _ctlUploadProtocol
(Just _ctlPp)
_ctlAccessToken
_ctlUploadType
_ctlBearerToken
_ctlPageToken
_ctlPageSize
_ctlCallback
(Just AltJSON)
classroomService
where go
= buildClient
(Proxy :: Proxy CoursesTeachersListResource)
mempty
|
rueshyna/gogol
|
gogol-classroom/gen/Network/Google/Resource/Classroom/Courses/Teachers/List.hs
|
mpl-2.0
| 7,067
| 0
| 21
| 1,755
| 1,055
| 612
| 443
| 147
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.CreativeFields.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing creative field.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.creativeFields.update@.
module Network.Google.Resource.DFAReporting.CreativeFields.Update
(
-- * REST Resource
CreativeFieldsUpdateResource
-- * Creating a Request
, creativeFieldsUpdate
, CreativeFieldsUpdate
-- * Request Lenses
, cfuProFileId
, cfuPayload
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.creativeFields.update@ method which the
-- 'CreativeFieldsUpdate' request conforms to.
type CreativeFieldsUpdateResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"creativeFields" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CreativeField :>
Put '[JSON] CreativeField
-- | Updates an existing creative field.
--
-- /See:/ 'creativeFieldsUpdate' smart constructor.
data CreativeFieldsUpdate = CreativeFieldsUpdate'
{ _cfuProFileId :: !(Textual Int64)
, _cfuPayload :: !CreativeField
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreativeFieldsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cfuProFileId'
--
-- * 'cfuPayload'
creativeFieldsUpdate
:: Int64 -- ^ 'cfuProFileId'
-> CreativeField -- ^ 'cfuPayload'
-> CreativeFieldsUpdate
creativeFieldsUpdate pCfuProFileId_ pCfuPayload_ =
CreativeFieldsUpdate'
{ _cfuProFileId = _Coerce # pCfuProFileId_
, _cfuPayload = pCfuPayload_
}
-- | User profile ID associated with this request.
cfuProFileId :: Lens' CreativeFieldsUpdate Int64
cfuProFileId
= lens _cfuProFileId (\ s a -> s{_cfuProFileId = a})
. _Coerce
-- | Multipart request metadata.
cfuPayload :: Lens' CreativeFieldsUpdate CreativeField
cfuPayload
= lens _cfuPayload (\ s a -> s{_cfuPayload = a})
instance GoogleRequest CreativeFieldsUpdate where
type Rs CreativeFieldsUpdate = CreativeField
type Scopes CreativeFieldsUpdate =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient CreativeFieldsUpdate'{..}
= go _cfuProFileId (Just AltJSON) _cfuPayload
dFAReportingService
where go
= buildClient
(Proxy :: Proxy CreativeFieldsUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/CreativeFields/Update.hs
|
mpl-2.0
| 3,416
| 0
| 14
| 760
| 406
| 242
| 164
| 64
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AppEngine.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AppEngine.Types.Product where
import Network.Google.AppEngine.Types.Sum
import Network.Google.Prelude
-- | Metadata for the given google.longrunning.Operation.
--
-- /See:/ 'operationMetadataExperimental' smart constructor.
data OperationMetadataExperimental = OperationMetadataExperimental'
{ _omeInsertTime :: !(Maybe Text)
, _omeUser :: !(Maybe Text)
, _omeMethod :: !(Maybe Text)
, _omeEndTime :: !(Maybe Text)
, _omeTarget :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataExperimental' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omeInsertTime'
--
-- * 'omeUser'
--
-- * 'omeMethod'
--
-- * 'omeEndTime'
--
-- * 'omeTarget'
operationMetadataExperimental
:: OperationMetadataExperimental
operationMetadataExperimental =
OperationMetadataExperimental'
{ _omeInsertTime = Nothing
, _omeUser = Nothing
, _omeMethod = Nothing
, _omeEndTime = Nothing
, _omeTarget = Nothing
}
-- | Time that this operation was created.\'OutputOnly
omeInsertTime :: Lens' OperationMetadataExperimental (Maybe Text)
omeInsertTime
= lens _omeInsertTime
(\ s a -> s{_omeInsertTime = a})
-- | User who requested this operation.\'OutputOnly
omeUser :: Lens' OperationMetadataExperimental (Maybe Text)
omeUser = lens _omeUser (\ s a -> s{_omeUser = a})
-- | API method that initiated this operation. Example:
-- google.appengine.experimental.CustomDomains.CreateCustomDomain.\'OutputOnly
omeMethod :: Lens' OperationMetadataExperimental (Maybe Text)
omeMethod
= lens _omeMethod (\ s a -> s{_omeMethod = a})
-- | Time that this operation completed.\'OutputOnly
omeEndTime :: Lens' OperationMetadataExperimental (Maybe Text)
omeEndTime
= lens _omeEndTime (\ s a -> s{_omeEndTime = a})
-- | Name of the resource that this operation is acting on. Example:
-- apps\/myapp\/customDomains\/example.com.\'OutputOnly
omeTarget :: Lens' OperationMetadataExperimental (Maybe Text)
omeTarget
= lens _omeTarget (\ s a -> s{_omeTarget = a})
instance FromJSON OperationMetadataExperimental where
parseJSON
= withObject "OperationMetadataExperimental"
(\ o ->
OperationMetadataExperimental' <$>
(o .:? "insertTime") <*> (o .:? "user") <*>
(o .:? "method")
<*> (o .:? "endTime")
<*> (o .:? "target"))
instance ToJSON OperationMetadataExperimental where
toJSON OperationMetadataExperimental'{..}
= object
(catMaybes
[("insertTime" .=) <$> _omeInsertTime,
("user" .=) <$> _omeUser,
("method" .=) <$> _omeMethod,
("endTime" .=) <$> _omeEndTime,
("target" .=) <$> _omeTarget])
-- | Target scaling by network usage. Only applicable for VM runtimes.
--
-- /See:/ 'networkUtilization' smart constructor.
data NetworkUtilization = NetworkUtilization'
{ _nuTargetReceivedBytesPerSecond :: !(Maybe (Textual Int32))
, _nuTargetSentPacketsPerSecond :: !(Maybe (Textual Int32))
, _nuTargetReceivedPacketsPerSecond :: !(Maybe (Textual Int32))
, _nuTargetSentBytesPerSecond :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'NetworkUtilization' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nuTargetReceivedBytesPerSecond'
--
-- * 'nuTargetSentPacketsPerSecond'
--
-- * 'nuTargetReceivedPacketsPerSecond'
--
-- * 'nuTargetSentBytesPerSecond'
networkUtilization
:: NetworkUtilization
networkUtilization =
NetworkUtilization'
{ _nuTargetReceivedBytesPerSecond = Nothing
, _nuTargetSentPacketsPerSecond = Nothing
, _nuTargetReceivedPacketsPerSecond = Nothing
, _nuTargetSentBytesPerSecond = Nothing
}
-- | Target bytes received per second.
nuTargetReceivedBytesPerSecond :: Lens' NetworkUtilization (Maybe Int32)
nuTargetReceivedBytesPerSecond
= lens _nuTargetReceivedBytesPerSecond
(\ s a -> s{_nuTargetReceivedBytesPerSecond = a})
. mapping _Coerce
-- | Target packets sent per second.
nuTargetSentPacketsPerSecond :: Lens' NetworkUtilization (Maybe Int32)
nuTargetSentPacketsPerSecond
= lens _nuTargetSentPacketsPerSecond
(\ s a -> s{_nuTargetSentPacketsPerSecond = a})
. mapping _Coerce
-- | Target packets received per second.
nuTargetReceivedPacketsPerSecond :: Lens' NetworkUtilization (Maybe Int32)
nuTargetReceivedPacketsPerSecond
= lens _nuTargetReceivedPacketsPerSecond
(\ s a -> s{_nuTargetReceivedPacketsPerSecond = a})
. mapping _Coerce
-- | Target bytes sent per second.
nuTargetSentBytesPerSecond :: Lens' NetworkUtilization (Maybe Int32)
nuTargetSentBytesPerSecond
= lens _nuTargetSentBytesPerSecond
(\ s a -> s{_nuTargetSentBytesPerSecond = a})
. mapping _Coerce
instance FromJSON NetworkUtilization where
parseJSON
= withObject "NetworkUtilization"
(\ o ->
NetworkUtilization' <$>
(o .:? "targetReceivedBytesPerSecond") <*>
(o .:? "targetSentPacketsPerSecond")
<*> (o .:? "targetReceivedPacketsPerSecond")
<*> (o .:? "targetSentBytesPerSecond"))
instance ToJSON NetworkUtilization where
toJSON NetworkUtilization'{..}
= object
(catMaybes
[("targetReceivedBytesPerSecond" .=) <$>
_nuTargetReceivedBytesPerSecond,
("targetSentPacketsPerSecond" .=) <$>
_nuTargetSentPacketsPerSecond,
("targetReceivedPacketsPerSecond" .=) <$>
_nuTargetReceivedPacketsPerSecond,
("targetSentBytesPerSecond" .=) <$>
_nuTargetSentBytesPerSecond])
-- | The Status type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by gRPC (https:\/\/github.com\/grpc). The error model is
-- designed to be: Simple to use and understand for most users Flexible
-- enough to meet unexpected needsOverviewThe Status message contains three
-- pieces of data: error code, error message, and error details. The error
-- code should be an enum value of google.rpc.Code, but it may accept
-- additional error codes if needed. The error message should be a
-- developer-facing English message that helps developers understand and
-- resolve the error. If a localized user-facing error message is needed,
-- put the localized message in the error details or localize it in the
-- client. The optional error details may contain arbitrary information
-- about the error. There is a predefined set of error detail types in the
-- package google.rpc which can be used for common error
-- conditions.Language mappingThe Status message is the logical
-- representation of the error model, but it is not necessarily the actual
-- wire format. When the Status message is exposed in different client
-- libraries and different wire protocols, it can be mapped differently.
-- For example, it will likely be mapped to some exceptions in Java, but
-- more likely mapped to some error codes in C.Other usesThe error model
-- and the Status message can be used in a variety of environments, either
-- with or without APIs, to provide a consistent developer experience
-- across different environments.Example uses of this error model include:
-- Partial errors. If a service needs to return partial errors to the
-- client, it may embed the Status in the normal response to indicate the
-- partial errors. Workflow errors. A typical workflow has multiple steps.
-- Each step may have a Status message for error reporting purpose. Batch
-- operations. If a client uses batch request and batch response, the
-- Status message should be used directly inside batch response, one for
-- each error sub-response. Asynchronous operations. If an API call embeds
-- asynchronous operation results in its response, the status of those
-- operations should be represented directly using the Status message.
-- Logging. If some API errors are stored in logs, the message Status could
-- be used directly after any stripping needed for security\/privacy
-- reasons.
--
-- /See:/ 'status' smart constructor.
data Status = Status'
{ _sDetails :: !(Maybe [StatusDetailsItem])
, _sCode :: !(Maybe (Textual Int32))
, _sMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sDetails'
--
-- * 'sCode'
--
-- * 'sMessage'
status
:: Status
status =
Status'
{ _sDetails = Nothing
, _sCode = Nothing
, _sMessage = Nothing
}
-- | A list of messages that carry the error details. There will be a common
-- set of message types for APIs to use.
sDetails :: Lens' Status [StatusDetailsItem]
sDetails
= lens _sDetails (\ s a -> s{_sDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
sCode :: Lens' Status (Maybe Int32)
sCode
= lens _sCode (\ s a -> s{_sCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
sMessage :: Lens' Status (Maybe Text)
sMessage = lens _sMessage (\ s a -> s{_sMessage = a})
instance FromJSON Status where
parseJSON
= withObject "Status"
(\ o ->
Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON Status where
toJSON Status'{..}
= object
(catMaybes
[("details" .=) <$> _sDetails,
("code" .=) <$> _sCode,
("message" .=) <$> _sMessage])
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
--
-- /See:/ 'operationSchema' smart constructor.
newtype OperationSchema = OperationSchema'
{ _osAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationSchema' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'osAddtional'
operationSchema
:: HashMap Text JSONValue -- ^ 'osAddtional'
-> OperationSchema
operationSchema pOsAddtional_ =
OperationSchema'
{ _osAddtional = _Coerce # pOsAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
osAddtional :: Lens' OperationSchema (HashMap Text JSONValue)
osAddtional
= lens _osAddtional (\ s a -> s{_osAddtional = a}) .
_Coerce
instance FromJSON OperationSchema where
parseJSON
= withObject "OperationSchema"
(\ o -> OperationSchema' <$> (parseJSONObject o))
instance ToJSON OperationSchema where
toJSON = toJSON . _osAddtional
-- | Service-specific metadata. For example the available capacity at the
-- given location.
--
-- /See:/ 'locationSchema' smart constructor.
newtype LocationSchema = LocationSchema'
{ _lsAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocationSchema' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsAddtional'
locationSchema
:: HashMap Text JSONValue -- ^ 'lsAddtional'
-> LocationSchema
locationSchema pLsAddtional_ =
LocationSchema'
{ _lsAddtional = _Coerce # pLsAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
lsAddtional :: Lens' LocationSchema (HashMap Text JSONValue)
lsAddtional
= lens _lsAddtional (\ s a -> s{_lsAddtional = a}) .
_Coerce
instance FromJSON LocationSchema where
parseJSON
= withObject "LocationSchema"
(\ o -> LocationSchema' <$> (parseJSONObject o))
instance ToJSON LocationSchema where
toJSON = toJSON . _lsAddtional
-- | Traffic routing configuration for versions within a single service.
-- Traffic splits define how traffic directed to the service is assigned to
-- versions.
--
-- /See:/ 'trafficSplit' smart constructor.
data TrafficSplit = TrafficSplit'
{ _tsShardBy :: !(Maybe Text)
, _tsAllocations :: !(Maybe TrafficSplitAllocations)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TrafficSplit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tsShardBy'
--
-- * 'tsAllocations'
trafficSplit
:: TrafficSplit
trafficSplit =
TrafficSplit'
{ _tsShardBy = Nothing
, _tsAllocations = Nothing
}
-- | Mechanism used to determine which version a request is sent to. The
-- traffic selection algorithm will be stable for either type until
-- allocations are changed.
tsShardBy :: Lens' TrafficSplit (Maybe Text)
tsShardBy
= lens _tsShardBy (\ s a -> s{_tsShardBy = a})
-- | Mapping from version IDs within the service to fractional (0.000, 1]
-- allocations of traffic for that version. Each version can be specified
-- only once, but some versions in the service may not have any traffic
-- allocation. Services that have traffic allocated cannot be deleted until
-- either the service is deleted or their traffic allocation is removed.
-- Allocations must sum to 1. Up to two decimal place precision is
-- supported for IP-based splits and up to three decimal places is
-- supported for cookie-based splits.
tsAllocations :: Lens' TrafficSplit (Maybe TrafficSplitAllocations)
tsAllocations
= lens _tsAllocations
(\ s a -> s{_tsAllocations = a})
instance FromJSON TrafficSplit where
parseJSON
= withObject "TrafficSplit"
(\ o ->
TrafficSplit' <$>
(o .:? "shardBy") <*> (o .:? "allocations"))
instance ToJSON TrafficSplit where
toJSON TrafficSplit'{..}
= object
(catMaybes
[("shardBy" .=) <$> _tsShardBy,
("allocations" .=) <$> _tsAllocations])
-- | Executes a script to handle the request that matches the URL pattern.
--
-- /See:/ 'scriptHandler' smart constructor.
newtype ScriptHandler = ScriptHandler'
{ _shScriptPath :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ScriptHandler' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'shScriptPath'
scriptHandler
:: ScriptHandler
scriptHandler =
ScriptHandler'
{ _shScriptPath = Nothing
}
-- | Path to the script from the application root directory.
shScriptPath :: Lens' ScriptHandler (Maybe Text)
shScriptPath
= lens _shScriptPath (\ s a -> s{_shScriptPath = a})
instance FromJSON ScriptHandler where
parseJSON
= withObject "ScriptHandler"
(\ o -> ScriptHandler' <$> (o .:? "scriptPath"))
instance ToJSON ScriptHandler where
toJSON ScriptHandler'{..}
= object
(catMaybes [("scriptPath" .=) <$> _shScriptPath])
-- | Response message for Services.ListServices.
--
-- /See:/ 'listServicesResponse' smart constructor.
data ListServicesResponse = ListServicesResponse'
{ _lsrNextPageToken :: !(Maybe Text)
, _lsrServices :: !(Maybe [Service])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListServicesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsrNextPageToken'
--
-- * 'lsrServices'
listServicesResponse
:: ListServicesResponse
listServicesResponse =
ListServicesResponse'
{ _lsrNextPageToken = Nothing
, _lsrServices = Nothing
}
-- | Continuation token for fetching the next page of results.
lsrNextPageToken :: Lens' ListServicesResponse (Maybe Text)
lsrNextPageToken
= lens _lsrNextPageToken
(\ s a -> s{_lsrNextPageToken = a})
-- | The services belonging to the requested application.
lsrServices :: Lens' ListServicesResponse [Service]
lsrServices
= lens _lsrServices (\ s a -> s{_lsrServices = a}) .
_Default
. _Coerce
instance FromJSON ListServicesResponse where
parseJSON
= withObject "ListServicesResponse"
(\ o ->
ListServicesResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "services" .!= mempty))
instance ToJSON ListServicesResponse where
toJSON ListServicesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lsrNextPageToken,
("services" .=) <$> _lsrServices])
-- | URL pattern and description of how the URL should be handled. App Engine
-- can handle URLs by executing application code or by serving static files
-- uploaded with the version, such as images, CSS, or JavaScript.
--
-- /See:/ 'urlMap' smart constructor.
data URLMap = URLMap'
{ _umScript :: !(Maybe ScriptHandler)
, _umSecurityLevel :: !(Maybe Text)
, _umAPIEndpoint :: !(Maybe APIEndpointHandler)
, _umURLRegex :: !(Maybe Text)
, _umRedirectHTTPResponseCode :: !(Maybe Text)
, _umAuthFailAction :: !(Maybe Text)
, _umStaticFiles :: !(Maybe StaticFilesHandler)
, _umLogin :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'URLMap' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'umScript'
--
-- * 'umSecurityLevel'
--
-- * 'umAPIEndpoint'
--
-- * 'umURLRegex'
--
-- * 'umRedirectHTTPResponseCode'
--
-- * 'umAuthFailAction'
--
-- * 'umStaticFiles'
--
-- * 'umLogin'
urlMap
:: URLMap
urlMap =
URLMap'
{ _umScript = Nothing
, _umSecurityLevel = Nothing
, _umAPIEndpoint = Nothing
, _umURLRegex = Nothing
, _umRedirectHTTPResponseCode = Nothing
, _umAuthFailAction = Nothing
, _umStaticFiles = Nothing
, _umLogin = Nothing
}
-- | Executes a script to handle the request that matches this URL pattern.
umScript :: Lens' URLMap (Maybe ScriptHandler)
umScript = lens _umScript (\ s a -> s{_umScript = a})
-- | Security (HTTPS) enforcement for this URL.
umSecurityLevel :: Lens' URLMap (Maybe Text)
umSecurityLevel
= lens _umSecurityLevel
(\ s a -> s{_umSecurityLevel = a})
-- | Uses API Endpoints to handle requests.
umAPIEndpoint :: Lens' URLMap (Maybe APIEndpointHandler)
umAPIEndpoint
= lens _umAPIEndpoint
(\ s a -> s{_umAPIEndpoint = a})
-- | URL prefix. Uses regular expression syntax, which means regexp special
-- characters must be escaped, but should not contain groupings. All URLs
-- that begin with this prefix are handled by this handler, using the
-- portion of the URL after the prefix as part of the file path.
umURLRegex :: Lens' URLMap (Maybe Text)
umURLRegex
= lens _umURLRegex (\ s a -> s{_umURLRegex = a})
-- | 30x code to use when performing redirects for the secure field. Defaults
-- to 302.
umRedirectHTTPResponseCode :: Lens' URLMap (Maybe Text)
umRedirectHTTPResponseCode
= lens _umRedirectHTTPResponseCode
(\ s a -> s{_umRedirectHTTPResponseCode = a})
-- | Action to take when users access resources that require authentication.
-- Defaults to redirect.
umAuthFailAction :: Lens' URLMap (Maybe Text)
umAuthFailAction
= lens _umAuthFailAction
(\ s a -> s{_umAuthFailAction = a})
-- | Returns the contents of a file, such as an image, as the response.
umStaticFiles :: Lens' URLMap (Maybe StaticFilesHandler)
umStaticFiles
= lens _umStaticFiles
(\ s a -> s{_umStaticFiles = a})
-- | Level of login required to access this resource.
umLogin :: Lens' URLMap (Maybe Text)
umLogin = lens _umLogin (\ s a -> s{_umLogin = a})
instance FromJSON URLMap where
parseJSON
= withObject "URLMap"
(\ o ->
URLMap' <$>
(o .:? "script") <*> (o .:? "securityLevel") <*>
(o .:? "apiEndpoint")
<*> (o .:? "urlRegex")
<*> (o .:? "redirectHttpResponseCode")
<*> (o .:? "authFailAction")
<*> (o .:? "staticFiles")
<*> (o .:? "login"))
instance ToJSON URLMap where
toJSON URLMap'{..}
= object
(catMaybes
[("script" .=) <$> _umScript,
("securityLevel" .=) <$> _umSecurityLevel,
("apiEndpoint" .=) <$> _umAPIEndpoint,
("urlRegex" .=) <$> _umURLRegex,
("redirectHttpResponseCode" .=) <$>
_umRedirectHTTPResponseCode,
("authFailAction" .=) <$> _umAuthFailAction,
("staticFiles" .=) <$> _umStaticFiles,
("login" .=) <$> _umLogin])
-- | Third-party Python runtime library that is required by the application.
--
-- /See:/ 'library' smart constructor.
data Library = Library'
{ _lName :: !(Maybe Text)
, _lVersion :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Library' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lName'
--
-- * 'lVersion'
library
:: Library
library =
Library'
{ _lName = Nothing
, _lVersion = Nothing
}
-- | Name of the library. Example: \"django\".
lName :: Lens' Library (Maybe Text)
lName = lens _lName (\ s a -> s{_lName = a})
-- | Version of the library to select, or \"latest\".
lVersion :: Lens' Library (Maybe Text)
lVersion = lens _lVersion (\ s a -> s{_lVersion = a})
instance FromJSON Library where
parseJSON
= withObject "Library"
(\ o ->
Library' <$> (o .:? "name") <*> (o .:? "version"))
instance ToJSON Library where
toJSON Library'{..}
= object
(catMaybes
[("name" .=) <$> _lName,
("version" .=) <$> _lVersion])
-- | The response message for LocationService.ListLocations.
--
-- /See:/ 'listLocationsResponse' smart constructor.
data ListLocationsResponse = ListLocationsResponse'
{ _llrNextPageToken :: !(Maybe Text)
, _llrLocations :: !(Maybe [Location])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListLocationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llrNextPageToken'
--
-- * 'llrLocations'
listLocationsResponse
:: ListLocationsResponse
listLocationsResponse =
ListLocationsResponse'
{ _llrNextPageToken = Nothing
, _llrLocations = Nothing
}
-- | The standard List next-page token.
llrNextPageToken :: Lens' ListLocationsResponse (Maybe Text)
llrNextPageToken
= lens _llrNextPageToken
(\ s a -> s{_llrNextPageToken = a})
-- | A list of locations that matches the specified filter in the request.
llrLocations :: Lens' ListLocationsResponse [Location]
llrLocations
= lens _llrLocations (\ s a -> s{_llrLocations = a})
. _Default
. _Coerce
instance FromJSON ListLocationsResponse where
parseJSON
= withObject "ListLocationsResponse"
(\ o ->
ListLocationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "locations" .!= mempty))
instance ToJSON ListLocationsResponse where
toJSON ListLocationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _llrNextPageToken,
("locations" .=) <$> _llrLocations])
-- | Target scaling by disk usage. Only applicable for VM runtimes.
--
-- /See:/ 'diskUtilization' smart constructor.
data DiskUtilization = DiskUtilization'
{ _duTargetReadBytesPerSecond :: !(Maybe (Textual Int32))
, _duTargetReadOpsPerSecond :: !(Maybe (Textual Int32))
, _duTargetWriteOpsPerSecond :: !(Maybe (Textual Int32))
, _duTargetWriteBytesPerSecond :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DiskUtilization' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'duTargetReadBytesPerSecond'
--
-- * 'duTargetReadOpsPerSecond'
--
-- * 'duTargetWriteOpsPerSecond'
--
-- * 'duTargetWriteBytesPerSecond'
diskUtilization
:: DiskUtilization
diskUtilization =
DiskUtilization'
{ _duTargetReadBytesPerSecond = Nothing
, _duTargetReadOpsPerSecond = Nothing
, _duTargetWriteOpsPerSecond = Nothing
, _duTargetWriteBytesPerSecond = Nothing
}
-- | Target bytes read per second.
duTargetReadBytesPerSecond :: Lens' DiskUtilization (Maybe Int32)
duTargetReadBytesPerSecond
= lens _duTargetReadBytesPerSecond
(\ s a -> s{_duTargetReadBytesPerSecond = a})
. mapping _Coerce
-- | Target ops read per seconds.
duTargetReadOpsPerSecond :: Lens' DiskUtilization (Maybe Int32)
duTargetReadOpsPerSecond
= lens _duTargetReadOpsPerSecond
(\ s a -> s{_duTargetReadOpsPerSecond = a})
. mapping _Coerce
-- | Target ops written per second.
duTargetWriteOpsPerSecond :: Lens' DiskUtilization (Maybe Int32)
duTargetWriteOpsPerSecond
= lens _duTargetWriteOpsPerSecond
(\ s a -> s{_duTargetWriteOpsPerSecond = a})
. mapping _Coerce
-- | Target bytes written per second.
duTargetWriteBytesPerSecond :: Lens' DiskUtilization (Maybe Int32)
duTargetWriteBytesPerSecond
= lens _duTargetWriteBytesPerSecond
(\ s a -> s{_duTargetWriteBytesPerSecond = a})
. mapping _Coerce
instance FromJSON DiskUtilization where
parseJSON
= withObject "DiskUtilization"
(\ o ->
DiskUtilization' <$>
(o .:? "targetReadBytesPerSecond") <*>
(o .:? "targetReadOpsPerSecond")
<*> (o .:? "targetWriteOpsPerSecond")
<*> (o .:? "targetWriteBytesPerSecond"))
instance ToJSON DiskUtilization where
toJSON DiskUtilization'{..}
= object
(catMaybes
[("targetReadBytesPerSecond" .=) <$>
_duTargetReadBytesPerSecond,
("targetReadOpsPerSecond" .=) <$>
_duTargetReadOpsPerSecond,
("targetWriteOpsPerSecond" .=) <$>
_duTargetWriteOpsPerSecond,
("targetWriteBytesPerSecond" .=) <$>
_duTargetWriteBytesPerSecond])
-- | The response message for Operations.ListOperations.
--
-- /See:/ 'listOperationsResponse' smart constructor.
data ListOperationsResponse = ListOperationsResponse'
{ _lorNextPageToken :: !(Maybe Text)
, _lorOperations :: !(Maybe [Operation])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListOperationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lorNextPageToken'
--
-- * 'lorOperations'
listOperationsResponse
:: ListOperationsResponse
listOperationsResponse =
ListOperationsResponse'
{ _lorNextPageToken = Nothing
, _lorOperations = Nothing
}
-- | The standard List next-page token.
lorNextPageToken :: Lens' ListOperationsResponse (Maybe Text)
lorNextPageToken
= lens _lorNextPageToken
(\ s a -> s{_lorNextPageToken = a})
-- | A list of operations that matches the specified filter in the request.
lorOperations :: Lens' ListOperationsResponse [Operation]
lorOperations
= lens _lorOperations
(\ s a -> s{_lorOperations = a})
. _Default
. _Coerce
instance FromJSON ListOperationsResponse where
parseJSON
= withObject "ListOperationsResponse"
(\ o ->
ListOperationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "operations" .!= mempty))
instance ToJSON ListOperationsResponse where
toJSON ListOperationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lorNextPageToken,
("operations" .=) <$> _lorOperations])
-- | Health checking configuration for VM instances. Unhealthy instances are
-- killed and replaced with new instances. Only applicable for instances in
-- App Engine flexible environment.
--
-- /See:/ 'healthCheck' smart constructor.
data HealthCheck = HealthCheck'
{ _hcHealthyThreshold :: !(Maybe (Textual Word32))
, _hcDisableHealthCheck :: !(Maybe Bool)
, _hcCheckInterval :: !(Maybe Text)
, _hcRestartThreshold :: !(Maybe (Textual Word32))
, _hcHost :: !(Maybe Text)
, _hcTimeout :: !(Maybe Text)
, _hcUnhealthyThreshold :: !(Maybe (Textual Word32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'HealthCheck' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hcHealthyThreshold'
--
-- * 'hcDisableHealthCheck'
--
-- * 'hcCheckInterval'
--
-- * 'hcRestartThreshold'
--
-- * 'hcHost'
--
-- * 'hcTimeout'
--
-- * 'hcUnhealthyThreshold'
healthCheck
:: HealthCheck
healthCheck =
HealthCheck'
{ _hcHealthyThreshold = Nothing
, _hcDisableHealthCheck = Nothing
, _hcCheckInterval = Nothing
, _hcRestartThreshold = Nothing
, _hcHost = Nothing
, _hcTimeout = Nothing
, _hcUnhealthyThreshold = Nothing
}
-- | Number of consecutive successful health checks required before receiving
-- traffic.
hcHealthyThreshold :: Lens' HealthCheck (Maybe Word32)
hcHealthyThreshold
= lens _hcHealthyThreshold
(\ s a -> s{_hcHealthyThreshold = a})
. mapping _Coerce
-- | Whether to explicitly disable health checks for this instance.
hcDisableHealthCheck :: Lens' HealthCheck (Maybe Bool)
hcDisableHealthCheck
= lens _hcDisableHealthCheck
(\ s a -> s{_hcDisableHealthCheck = a})
-- | Interval between health checks.
hcCheckInterval :: Lens' HealthCheck (Maybe Text)
hcCheckInterval
= lens _hcCheckInterval
(\ s a -> s{_hcCheckInterval = a})
-- | Number of consecutive failed health checks required before an instance
-- is restarted.
hcRestartThreshold :: Lens' HealthCheck (Maybe Word32)
hcRestartThreshold
= lens _hcRestartThreshold
(\ s a -> s{_hcRestartThreshold = a})
. mapping _Coerce
-- | Host header to send when performing an HTTP health check. Example:
-- \"myapp.appspot.com\"
hcHost :: Lens' HealthCheck (Maybe Text)
hcHost = lens _hcHost (\ s a -> s{_hcHost = a})
-- | Time before the health check is considered failed.
hcTimeout :: Lens' HealthCheck (Maybe Text)
hcTimeout
= lens _hcTimeout (\ s a -> s{_hcTimeout = a})
-- | Number of consecutive failed health checks required before removing
-- traffic.
hcUnhealthyThreshold :: Lens' HealthCheck (Maybe Word32)
hcUnhealthyThreshold
= lens _hcUnhealthyThreshold
(\ s a -> s{_hcUnhealthyThreshold = a})
. mapping _Coerce
instance FromJSON HealthCheck where
parseJSON
= withObject "HealthCheck"
(\ o ->
HealthCheck' <$>
(o .:? "healthyThreshold") <*>
(o .:? "disableHealthCheck")
<*> (o .:? "checkInterval")
<*> (o .:? "restartThreshold")
<*> (o .:? "host")
<*> (o .:? "timeout")
<*> (o .:? "unhealthyThreshold"))
instance ToJSON HealthCheck where
toJSON HealthCheck'{..}
= object
(catMaybes
[("healthyThreshold" .=) <$> _hcHealthyThreshold,
("disableHealthCheck" .=) <$> _hcDisableHealthCheck,
("checkInterval" .=) <$> _hcCheckInterval,
("restartThreshold" .=) <$> _hcRestartThreshold,
("host" .=) <$> _hcHost,
("timeout" .=) <$> _hcTimeout,
("unhealthyThreshold" .=) <$> _hcUnhealthyThreshold])
-- | Google Cloud Endpoints
-- (https:\/\/cloud.google.com\/appengine\/docs\/python\/endpoints\/)
-- configuration for API handlers.
--
-- /See:/ 'apiConfigHandler' smart constructor.
data APIConfigHandler = APIConfigHandler'
{ _achScript :: !(Maybe Text)
, _achSecurityLevel :: !(Maybe Text)
, _achURL :: !(Maybe Text)
, _achAuthFailAction :: !(Maybe Text)
, _achLogin :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'APIConfigHandler' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'achScript'
--
-- * 'achSecurityLevel'
--
-- * 'achURL'
--
-- * 'achAuthFailAction'
--
-- * 'achLogin'
apiConfigHandler
:: APIConfigHandler
apiConfigHandler =
APIConfigHandler'
{ _achScript = Nothing
, _achSecurityLevel = Nothing
, _achURL = Nothing
, _achAuthFailAction = Nothing
, _achLogin = Nothing
}
-- | Path to the script from the application root directory.
achScript :: Lens' APIConfigHandler (Maybe Text)
achScript
= lens _achScript (\ s a -> s{_achScript = a})
-- | Security (HTTPS) enforcement for this URL.
achSecurityLevel :: Lens' APIConfigHandler (Maybe Text)
achSecurityLevel
= lens _achSecurityLevel
(\ s a -> s{_achSecurityLevel = a})
-- | URL to serve the endpoint at.
achURL :: Lens' APIConfigHandler (Maybe Text)
achURL = lens _achURL (\ s a -> s{_achURL = a})
-- | Action to take when users access resources that require authentication.
-- Defaults to redirect.
achAuthFailAction :: Lens' APIConfigHandler (Maybe Text)
achAuthFailAction
= lens _achAuthFailAction
(\ s a -> s{_achAuthFailAction = a})
-- | Level of login required to access this resource. Defaults to optional.
achLogin :: Lens' APIConfigHandler (Maybe Text)
achLogin = lens _achLogin (\ s a -> s{_achLogin = a})
instance FromJSON APIConfigHandler where
parseJSON
= withObject "APIConfigHandler"
(\ o ->
APIConfigHandler' <$>
(o .:? "script") <*> (o .:? "securityLevel") <*>
(o .:? "url")
<*> (o .:? "authFailAction")
<*> (o .:? "login"))
instance ToJSON APIConfigHandler where
toJSON APIConfigHandler'{..}
= object
(catMaybes
[("script" .=) <$> _achScript,
("securityLevel" .=) <$> _achSecurityLevel,
("url" .=) <$> _achURL,
("authFailAction" .=) <$> _achAuthFailAction,
("login" .=) <$> _achLogin])
-- | Environment variables available to the application.Only returned in GET
-- requests if view=FULL is set.
--
-- /See:/ 'versionEnvVariables' smart constructor.
newtype VersionEnvVariables = VersionEnvVariables'
{ _vevAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VersionEnvVariables' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vevAddtional'
versionEnvVariables
:: HashMap Text Text -- ^ 'vevAddtional'
-> VersionEnvVariables
versionEnvVariables pVevAddtional_ =
VersionEnvVariables'
{ _vevAddtional = _Coerce # pVevAddtional_
}
vevAddtional :: Lens' VersionEnvVariables (HashMap Text Text)
vevAddtional
= lens _vevAddtional (\ s a -> s{_vevAddtional = a})
. _Coerce
instance FromJSON VersionEnvVariables where
parseJSON
= withObject "VersionEnvVariables"
(\ o -> VersionEnvVariables' <$> (parseJSONObject o))
instance ToJSON VersionEnvVariables where
toJSON = toJSON . _vevAddtional
-- | An Application resource contains the top-level configuration of an App
-- Engine application.
--
-- /See:/ 'application' smart constructor.
data Application = Application'
{ _aDefaultHostname :: !(Maybe Text)
, _aDefaultCookieExpiration :: !(Maybe Text)
, _aAuthDomain :: !(Maybe Text)
, _aCodeBucket :: !(Maybe Text)
, _aName :: !(Maybe Text)
, _aDispatchRules :: !(Maybe [URLDispatchRule])
, _aDefaultBucket :: !(Maybe Text)
, _aId :: !(Maybe Text)
, _aLocationId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Application' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aDefaultHostname'
--
-- * 'aDefaultCookieExpiration'
--
-- * 'aAuthDomain'
--
-- * 'aCodeBucket'
--
-- * 'aName'
--
-- * 'aDispatchRules'
--
-- * 'aDefaultBucket'
--
-- * 'aId'
--
-- * 'aLocationId'
application
:: Application
application =
Application'
{ _aDefaultHostname = Nothing
, _aDefaultCookieExpiration = Nothing
, _aAuthDomain = Nothing
, _aCodeBucket = Nothing
, _aName = Nothing
, _aDispatchRules = Nothing
, _aDefaultBucket = Nothing
, _aId = Nothing
, _aLocationId = Nothing
}
-- | Hostname used to reach this application, as resolved by App
-- Engine.\'OutputOnly
aDefaultHostname :: Lens' Application (Maybe Text)
aDefaultHostname
= lens _aDefaultHostname
(\ s a -> s{_aDefaultHostname = a})
-- | Cookie expiration policy for this application.
aDefaultCookieExpiration :: Lens' Application (Maybe Text)
aDefaultCookieExpiration
= lens _aDefaultCookieExpiration
(\ s a -> s{_aDefaultCookieExpiration = a})
-- | Google Apps authentication domain that controls which users can access
-- this application.Defaults to open access for any Google Account.
aAuthDomain :: Lens' Application (Maybe Text)
aAuthDomain
= lens _aAuthDomain (\ s a -> s{_aAuthDomain = a})
-- | Google Cloud Storage bucket that can be used for storing files
-- associated with this application. This bucket is associated with the
-- application and can be used by the gcloud deployment
-- commands.\'OutputOnly
aCodeBucket :: Lens' Application (Maybe Text)
aCodeBucket
= lens _aCodeBucket (\ s a -> s{_aCodeBucket = a})
-- | Full path to the Application resource in the API. Example:
-- apps\/myapp.\'OutputOnly
aName :: Lens' Application (Maybe Text)
aName = lens _aName (\ s a -> s{_aName = a})
-- | HTTP path dispatch rules for requests to the application that do not
-- explicitly target a service or version. Rules are
-- order-dependent.\'OutputOnly
aDispatchRules :: Lens' Application [URLDispatchRule]
aDispatchRules
= lens _aDispatchRules
(\ s a -> s{_aDispatchRules = a})
. _Default
. _Coerce
-- | Google Cloud Storage bucket that can be used by this application to
-- store content.\'OutputOnly
aDefaultBucket :: Lens' Application (Maybe Text)
aDefaultBucket
= lens _aDefaultBucket
(\ s a -> s{_aDefaultBucket = a})
-- | Identifier of the Application resource. This identifier is equivalent to
-- the project ID of the Google Cloud Platform project where you want to
-- deploy your application. Example: myapp.
aId :: Lens' Application (Maybe Text)
aId = lens _aId (\ s a -> s{_aId = a})
-- | Location from which this application will be run. Application instances
-- will run out of data centers in the chosen location, which is also where
-- all of the application\'s end user content is stored.Defaults to
-- us-central.Options are:us-central - Central USeurope-west - Western
-- Europeus-east1 - Eastern US
aLocationId :: Lens' Application (Maybe Text)
aLocationId
= lens _aLocationId (\ s a -> s{_aLocationId = a})
instance FromJSON Application where
parseJSON
= withObject "Application"
(\ o ->
Application' <$>
(o .:? "defaultHostname") <*>
(o .:? "defaultCookieExpiration")
<*> (o .:? "authDomain")
<*> (o .:? "codeBucket")
<*> (o .:? "name")
<*> (o .:? "dispatchRules" .!= mempty)
<*> (o .:? "defaultBucket")
<*> (o .:? "id")
<*> (o .:? "locationId"))
instance ToJSON Application where
toJSON Application'{..}
= object
(catMaybes
[("defaultHostname" .=) <$> _aDefaultHostname,
("defaultCookieExpiration" .=) <$>
_aDefaultCookieExpiration,
("authDomain" .=) <$> _aAuthDomain,
("codeBucket" .=) <$> _aCodeBucket,
("name" .=) <$> _aName,
("dispatchRules" .=) <$> _aDispatchRules,
("defaultBucket" .=) <$> _aDefaultBucket,
("id" .=) <$> _aId,
("locationId" .=) <$> _aLocationId])
-- | Metadata settings that are supplied to this version to enable beta
-- runtime features.
--
-- /See:/ 'versionBetaSettings' smart constructor.
newtype VersionBetaSettings = VersionBetaSettings'
{ _vbsAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VersionBetaSettings' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vbsAddtional'
versionBetaSettings
:: HashMap Text Text -- ^ 'vbsAddtional'
-> VersionBetaSettings
versionBetaSettings pVbsAddtional_ =
VersionBetaSettings'
{ _vbsAddtional = _Coerce # pVbsAddtional_
}
vbsAddtional :: Lens' VersionBetaSettings (HashMap Text Text)
vbsAddtional
= lens _vbsAddtional (\ s a -> s{_vbsAddtional = a})
. _Coerce
instance FromJSON VersionBetaSettings where
parseJSON
= withObject "VersionBetaSettings"
(\ o -> VersionBetaSettings' <$> (parseJSONObject o))
instance ToJSON VersionBetaSettings where
toJSON = toJSON . _vbsAddtional
-- | A Service resource is a logical component of an application that can
-- share state and communicate in a secure fashion with other services. For
-- example, an application that handles customer requests might include
-- separate services to handle tasks such as backend data analysis or API
-- requests from mobile devices. Each service has a collection of versions
-- that define a specific set of code used to implement the functionality
-- of that service.
--
-- /See:/ 'service' smart constructor.
data Service = Service'
{ _sSplit :: !(Maybe TrafficSplit)
, _sName :: !(Maybe Text)
, _sId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Service' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sSplit'
--
-- * 'sName'
--
-- * 'sId'
service
:: Service
service =
Service'
{ _sSplit = Nothing
, _sName = Nothing
, _sId = Nothing
}
-- | Mapping that defines fractional HTTP traffic diversion to different
-- versions within the service.
sSplit :: Lens' Service (Maybe TrafficSplit)
sSplit = lens _sSplit (\ s a -> s{_sSplit = a})
-- | Full path to the Service resource in the API. Example:
-- apps\/myapp\/services\/default.\'OutputOnly
sName :: Lens' Service (Maybe Text)
sName = lens _sName (\ s a -> s{_sName = a})
-- | Relative name of the service within the application. Example:
-- default.\'OutputOnly
sId :: Lens' Service (Maybe Text)
sId = lens _sId (\ s a -> s{_sId = a})
instance FromJSON Service where
parseJSON
= withObject "Service"
(\ o ->
Service' <$>
(o .:? "split") <*> (o .:? "name") <*> (o .:? "id"))
instance ToJSON Service where
toJSON Service'{..}
= object
(catMaybes
[("split" .=) <$> _sSplit, ("name" .=) <$> _sName,
("id" .=) <$> _sId])
-- | Cloud Endpoints (https:\/\/cloud.google.com\/endpoints) configuration.
-- The Endpoints API Service provides tooling for serving Open API and gRPC
-- endpoints via an NGINX proxy.The fields here refer to the name and
-- configuration id of a \"service\" resource in the Service Management API
-- (https:\/\/cloud.google.com\/service-management\/overview).
--
-- /See:/ 'endpointsAPIService' smart constructor.
data EndpointsAPIService = EndpointsAPIService'
{ _easName :: !(Maybe Text)
, _easConfigId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EndpointsAPIService' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'easName'
--
-- * 'easConfigId'
endpointsAPIService
:: EndpointsAPIService
endpointsAPIService =
EndpointsAPIService'
{ _easName = Nothing
, _easConfigId = Nothing
}
-- | Endpoints service name which is the name of the \"service\" resource in
-- the Service Management API. For example
-- \"myapi.endpoints.myproject.cloud.goog\"
easName :: Lens' EndpointsAPIService (Maybe Text)
easName = lens _easName (\ s a -> s{_easName = a})
-- | Endpoints service configuration id as specified by the Service
-- Management API. For example \"2016-09-19r1\"
easConfigId :: Lens' EndpointsAPIService (Maybe Text)
easConfigId
= lens _easConfigId (\ s a -> s{_easConfigId = a})
instance FromJSON EndpointsAPIService where
parseJSON
= withObject "EndpointsAPIService"
(\ o ->
EndpointsAPIService' <$>
(o .:? "name") <*> (o .:? "configId"))
instance ToJSON EndpointsAPIService where
toJSON EndpointsAPIService'{..}
= object
(catMaybes
[("name" .=) <$> _easName,
("configId" .=) <$> _easConfigId])
-- | A resource that represents Google Cloud Platform location.
--
-- /See:/ 'location' smart constructor.
data Location = Location'
{ _locName :: !(Maybe Text)
, _locMetadata :: !(Maybe LocationSchema)
, _locLabels :: !(Maybe LocationLabels)
, _locLocationId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Location' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'locName'
--
-- * 'locMetadata'
--
-- * 'locLabels'
--
-- * 'locLocationId'
location
:: Location
location =
Location'
{ _locName = Nothing
, _locMetadata = Nothing
, _locLabels = Nothing
, _locLocationId = Nothing
}
-- | Resource name for the location, which may vary between implementations.
-- For example: \"projects\/example-project\/locations\/us-east1\"
locName :: Lens' Location (Maybe Text)
locName = lens _locName (\ s a -> s{_locName = a})
-- | Service-specific metadata. For example the available capacity at the
-- given location.
locMetadata :: Lens' Location (Maybe LocationSchema)
locMetadata
= lens _locMetadata (\ s a -> s{_locMetadata = a})
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
locLabels :: Lens' Location (Maybe LocationLabels)
locLabels
= lens _locLabels (\ s a -> s{_locLabels = a})
-- | The canonical id for this location. For example: \"us-east1\".
locLocationId :: Lens' Location (Maybe Text)
locLocationId
= lens _locLocationId
(\ s a -> s{_locLocationId = a})
instance FromJSON Location where
parseJSON
= withObject "Location"
(\ o ->
Location' <$>
(o .:? "name") <*> (o .:? "metadata") <*>
(o .:? "labels")
<*> (o .:? "locationId"))
instance ToJSON Location where
toJSON Location'{..}
= object
(catMaybes
[("name" .=) <$> _locName,
("metadata" .=) <$> _locMetadata,
("labels" .=) <$> _locLabels,
("locationId" .=) <$> _locLocationId])
-- | This resource represents a long-running operation that is the result of
-- a network API call.
--
-- /See:/ 'operation' smart constructor.
data Operation = Operation'
{ _oDone :: !(Maybe Bool)
, _oError :: !(Maybe Status)
, _oResponse :: !(Maybe OperationResponse)
, _oName :: !(Maybe Text)
, _oMetadata :: !(Maybe OperationSchema)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Operation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oDone'
--
-- * 'oError'
--
-- * 'oResponse'
--
-- * 'oName'
--
-- * 'oMetadata'
operation
:: Operation
operation =
Operation'
{ _oDone = Nothing
, _oError = Nothing
, _oResponse = Nothing
, _oName = Nothing
, _oMetadata = Nothing
}
-- | If the value is false, it means the operation is still in progress. If
-- true, the operation is completed, and either error or response is
-- available.
oDone :: Lens' Operation (Maybe Bool)
oDone = lens _oDone (\ s a -> s{_oDone = a})
-- | The error result of the operation in case of failure or cancellation.
oError :: Lens' Operation (Maybe Status)
oError = lens _oError (\ s a -> s{_oError = a})
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as Delete, the response is
-- google.protobuf.Empty. If the original method is standard
-- Get\/Create\/Update, the response should be the resource. For other
-- methods, the response should have the type XxxResponse, where Xxx is the
-- original method name. For example, if the original method name is
-- TakeSnapshot(), the inferred response type is TakeSnapshotResponse.
oResponse :: Lens' Operation (Maybe OperationResponse)
oResponse
= lens _oResponse (\ s a -> s{_oResponse = a})
-- | The server-assigned name, which is only unique within the same service
-- that originally returns it. If you use the default HTTP mapping, the
-- name should have the format of operations\/some\/unique\/name.
oName :: Lens' Operation (Maybe Text)
oName = lens _oName (\ s a -> s{_oName = a})
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
oMetadata :: Lens' Operation (Maybe OperationSchema)
oMetadata
= lens _oMetadata (\ s a -> s{_oMetadata = a})
instance FromJSON Operation where
parseJSON
= withObject "Operation"
(\ o ->
Operation' <$>
(o .:? "done") <*> (o .:? "error") <*>
(o .:? "response")
<*> (o .:? "name")
<*> (o .:? "metadata"))
instance ToJSON Operation where
toJSON Operation'{..}
= object
(catMaybes
[("done" .=) <$> _oDone, ("error" .=) <$> _oError,
("response" .=) <$> _oResponse,
("name" .=) <$> _oName,
("metadata" .=) <$> _oMetadata])
-- | The zip file information for a zip deployment.
--
-- /See:/ 'zipInfo' smart constructor.
data ZipInfo = ZipInfo'
{ _ziFilesCount :: !(Maybe (Textual Int32))
, _ziSourceURL :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ZipInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ziFilesCount'
--
-- * 'ziSourceURL'
zipInfo
:: ZipInfo
zipInfo =
ZipInfo'
{ _ziFilesCount = Nothing
, _ziSourceURL = Nothing
}
-- | An estimate of the number of files in a zip for a zip deployment. If
-- set, must be greater than or equal to the actual number of files. Used
-- for optimizing performance; if not provided, deployment may be slow.
ziFilesCount :: Lens' ZipInfo (Maybe Int32)
ziFilesCount
= lens _ziFilesCount (\ s a -> s{_ziFilesCount = a})
. mapping _Coerce
-- | URL of the zip file to deploy from. Must be a URL to a resource in
-- Google Cloud Storage in the form
-- \'http(s):\/\/storage.googleapis.com\/\/\'.
ziSourceURL :: Lens' ZipInfo (Maybe Text)
ziSourceURL
= lens _ziSourceURL (\ s a -> s{_ziSourceURL = a})
instance FromJSON ZipInfo where
parseJSON
= withObject "ZipInfo"
(\ o ->
ZipInfo' <$>
(o .:? "filesCount") <*> (o .:? "sourceUrl"))
instance ToJSON ZipInfo where
toJSON ZipInfo'{..}
= object
(catMaybes
[("filesCount" .=) <$> _ziFilesCount,
("sourceUrl" .=) <$> _ziSourceURL])
-- | Rules to match an HTTP request and dispatch that request to a service.
--
-- /See:/ 'urlDispatchRule' smart constructor.
data URLDispatchRule = URLDispatchRule'
{ _udrPath :: !(Maybe Text)
, _udrService :: !(Maybe Text)
, _udrDomain :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'URLDispatchRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'udrPath'
--
-- * 'udrService'
--
-- * 'udrDomain'
urlDispatchRule
:: URLDispatchRule
urlDispatchRule =
URLDispatchRule'
{ _udrPath = Nothing
, _udrService = Nothing
, _udrDomain = Nothing
}
-- | Pathname within the host. Must start with a \"\/\". A single \"*\" can
-- be included at the end of the path. The sum of the lengths of the domain
-- and path may not exceed 100 characters.
udrPath :: Lens' URLDispatchRule (Maybe Text)
udrPath = lens _udrPath (\ s a -> s{_udrPath = a})
-- | Resource ID of a service in this application that should serve the
-- matched request. The service must already exist. Example: default.
udrService :: Lens' URLDispatchRule (Maybe Text)
udrService
= lens _udrService (\ s a -> s{_udrService = a})
-- | Domain name to match against. The wildcard \"*\" is supported if
-- specified before a period: \"*.\".Defaults to matching all domains:
-- \"*\".
udrDomain :: Lens' URLDispatchRule (Maybe Text)
udrDomain
= lens _udrDomain (\ s a -> s{_udrDomain = a})
instance FromJSON URLDispatchRule where
parseJSON
= withObject "URLDispatchRule"
(\ o ->
URLDispatchRule' <$>
(o .:? "path") <*> (o .:? "service") <*>
(o .:? "domain"))
instance ToJSON URLDispatchRule where
toJSON URLDispatchRule'{..}
= object
(catMaybes
[("path" .=) <$> _udrPath,
("service" .=) <$> _udrService,
("domain" .=) <$> _udrDomain])
-- | Response message for Versions.ListVersions.
--
-- /See:/ 'listVersionsResponse' smart constructor.
data ListVersionsResponse = ListVersionsResponse'
{ _lvrNextPageToken :: !(Maybe Text)
, _lvrVersions :: !(Maybe [Version])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListVersionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lvrNextPageToken'
--
-- * 'lvrVersions'
listVersionsResponse
:: ListVersionsResponse
listVersionsResponse =
ListVersionsResponse'
{ _lvrNextPageToken = Nothing
, _lvrVersions = Nothing
}
-- | Continuation token for fetching the next page of results.
lvrNextPageToken :: Lens' ListVersionsResponse (Maybe Text)
lvrNextPageToken
= lens _lvrNextPageToken
(\ s a -> s{_lvrNextPageToken = a})
-- | The versions belonging to the requested service.
lvrVersions :: Lens' ListVersionsResponse [Version]
lvrVersions
= lens _lvrVersions (\ s a -> s{_lvrVersions = a}) .
_Default
. _Coerce
instance FromJSON ListVersionsResponse where
parseJSON
= withObject "ListVersionsResponse"
(\ o ->
ListVersionsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "versions" .!= mempty))
instance ToJSON ListVersionsResponse where
toJSON ListVersionsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lvrNextPageToken,
("versions" .=) <$> _lvrVersions])
-- | Single source file that is part of the version to be deployed. Each
-- source file that is deployed must be specified separately.
--
-- /See:/ 'fileInfo' smart constructor.
data FileInfo = FileInfo'
{ _fiSha1Sum :: !(Maybe Text)
, _fiMimeType :: !(Maybe Text)
, _fiSourceURL :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fiSha1Sum'
--
-- * 'fiMimeType'
--
-- * 'fiSourceURL'
fileInfo
:: FileInfo
fileInfo =
FileInfo'
{ _fiSha1Sum = Nothing
, _fiMimeType = Nothing
, _fiSourceURL = Nothing
}
-- | The SHA1 hash of the file, in hex.
fiSha1Sum :: Lens' FileInfo (Maybe Text)
fiSha1Sum
= lens _fiSha1Sum (\ s a -> s{_fiSha1Sum = a})
-- | The MIME type of the file.Defaults to the value from Google Cloud
-- Storage.
fiMimeType :: Lens' FileInfo (Maybe Text)
fiMimeType
= lens _fiMimeType (\ s a -> s{_fiMimeType = a})
-- | URL source to use to fetch this file. Must be a URL to a resource in
-- Google Cloud Storage in the form
-- \'http(s):\/\/storage.googleapis.com\/\/\'.
fiSourceURL :: Lens' FileInfo (Maybe Text)
fiSourceURL
= lens _fiSourceURL (\ s a -> s{_fiSourceURL = a})
instance FromJSON FileInfo where
parseJSON
= withObject "FileInfo"
(\ o ->
FileInfo' <$>
(o .:? "sha1Sum") <*> (o .:? "mimeType") <*>
(o .:? "sourceUrl"))
instance ToJSON FileInfo where
toJSON FileInfo'{..}
= object
(catMaybes
[("sha1Sum" .=) <$> _fiSha1Sum,
("mimeType" .=) <$> _fiMimeType,
("sourceUrl" .=) <$> _fiSourceURL])
-- | Automatic scaling is based on request rate, response latencies, and
-- other application metrics.
--
-- /See:/ 'automaticScaling' smart constructor.
data AutomaticScaling = AutomaticScaling'
{ _asNetworkUtilization :: !(Maybe NetworkUtilization)
, _asMaxTotalInstances :: !(Maybe (Textual Int32))
, _asMinIdleInstances :: !(Maybe (Textual Int32))
, _asDiskUtilization :: !(Maybe DiskUtilization)
, _asMinPendingLatency :: !(Maybe Text)
, _asCPUUtilization :: !(Maybe CPUUtilization)
, _asMaxIdleInstances :: !(Maybe (Textual Int32))
, _asMinTotalInstances :: !(Maybe (Textual Int32))
, _asMaxConcurrentRequests :: !(Maybe (Textual Int32))
, _asCoolDownPeriod :: !(Maybe Text)
, _asRequestUtilization :: !(Maybe RequestUtilization)
, _asMaxPendingLatency :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AutomaticScaling' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asNetworkUtilization'
--
-- * 'asMaxTotalInstances'
--
-- * 'asMinIdleInstances'
--
-- * 'asDiskUtilization'
--
-- * 'asMinPendingLatency'
--
-- * 'asCPUUtilization'
--
-- * 'asMaxIdleInstances'
--
-- * 'asMinTotalInstances'
--
-- * 'asMaxConcurrentRequests'
--
-- * 'asCoolDownPeriod'
--
-- * 'asRequestUtilization'
--
-- * 'asMaxPendingLatency'
automaticScaling
:: AutomaticScaling
automaticScaling =
AutomaticScaling'
{ _asNetworkUtilization = Nothing
, _asMaxTotalInstances = Nothing
, _asMinIdleInstances = Nothing
, _asDiskUtilization = Nothing
, _asMinPendingLatency = Nothing
, _asCPUUtilization = Nothing
, _asMaxIdleInstances = Nothing
, _asMinTotalInstances = Nothing
, _asMaxConcurrentRequests = Nothing
, _asCoolDownPeriod = Nothing
, _asRequestUtilization = Nothing
, _asMaxPendingLatency = Nothing
}
-- | Target scaling by network usage.
asNetworkUtilization :: Lens' AutomaticScaling (Maybe NetworkUtilization)
asNetworkUtilization
= lens _asNetworkUtilization
(\ s a -> s{_asNetworkUtilization = a})
-- | Maximum number of instances that should be started to handle requests.
asMaxTotalInstances :: Lens' AutomaticScaling (Maybe Int32)
asMaxTotalInstances
= lens _asMaxTotalInstances
(\ s a -> s{_asMaxTotalInstances = a})
. mapping _Coerce
-- | Minimum number of idle instances that should be maintained for this
-- version. Only applicable for the default version of a service.
asMinIdleInstances :: Lens' AutomaticScaling (Maybe Int32)
asMinIdleInstances
= lens _asMinIdleInstances
(\ s a -> s{_asMinIdleInstances = a})
. mapping _Coerce
-- | Target scaling by disk usage.
asDiskUtilization :: Lens' AutomaticScaling (Maybe DiskUtilization)
asDiskUtilization
= lens _asDiskUtilization
(\ s a -> s{_asDiskUtilization = a})
-- | Minimum amount of time a request should wait in the pending queue before
-- starting a new instance to handle it.
asMinPendingLatency :: Lens' AutomaticScaling (Maybe Text)
asMinPendingLatency
= lens _asMinPendingLatency
(\ s a -> s{_asMinPendingLatency = a})
-- | Target scaling by CPU usage.
asCPUUtilization :: Lens' AutomaticScaling (Maybe CPUUtilization)
asCPUUtilization
= lens _asCPUUtilization
(\ s a -> s{_asCPUUtilization = a})
-- | Maximum number of idle instances that should be maintained for this
-- version.
asMaxIdleInstances :: Lens' AutomaticScaling (Maybe Int32)
asMaxIdleInstances
= lens _asMaxIdleInstances
(\ s a -> s{_asMaxIdleInstances = a})
. mapping _Coerce
-- | Minimum number of instances that should be maintained for this version.
asMinTotalInstances :: Lens' AutomaticScaling (Maybe Int32)
asMinTotalInstances
= lens _asMinTotalInstances
(\ s a -> s{_asMinTotalInstances = a})
. mapping _Coerce
-- | Number of concurrent requests an automatic scaling instance can accept
-- before the scheduler spawns a new instance.Defaults to a
-- runtime-specific value.
asMaxConcurrentRequests :: Lens' AutomaticScaling (Maybe Int32)
asMaxConcurrentRequests
= lens _asMaxConcurrentRequests
(\ s a -> s{_asMaxConcurrentRequests = a})
. mapping _Coerce
-- | Amount of time that the Autoscaler
-- (https:\/\/cloud.google.com\/compute\/docs\/autoscaler\/) should wait
-- between changes to the number of virtual machines. Only applicable for
-- VM runtimes.
asCoolDownPeriod :: Lens' AutomaticScaling (Maybe Text)
asCoolDownPeriod
= lens _asCoolDownPeriod
(\ s a -> s{_asCoolDownPeriod = a})
-- | Target scaling by request utilization.
asRequestUtilization :: Lens' AutomaticScaling (Maybe RequestUtilization)
asRequestUtilization
= lens _asRequestUtilization
(\ s a -> s{_asRequestUtilization = a})
-- | Maximum amount of time that a request should wait in the pending queue
-- before starting a new instance to handle it.
asMaxPendingLatency :: Lens' AutomaticScaling (Maybe Text)
asMaxPendingLatency
= lens _asMaxPendingLatency
(\ s a -> s{_asMaxPendingLatency = a})
instance FromJSON AutomaticScaling where
parseJSON
= withObject "AutomaticScaling"
(\ o ->
AutomaticScaling' <$>
(o .:? "networkUtilization") <*>
(o .:? "maxTotalInstances")
<*> (o .:? "minIdleInstances")
<*> (o .:? "diskUtilization")
<*> (o .:? "minPendingLatency")
<*> (o .:? "cpuUtilization")
<*> (o .:? "maxIdleInstances")
<*> (o .:? "minTotalInstances")
<*> (o .:? "maxConcurrentRequests")
<*> (o .:? "coolDownPeriod")
<*> (o .:? "requestUtilization")
<*> (o .:? "maxPendingLatency"))
instance ToJSON AutomaticScaling where
toJSON AutomaticScaling'{..}
= object
(catMaybes
[("networkUtilization" .=) <$> _asNetworkUtilization,
("maxTotalInstances" .=) <$> _asMaxTotalInstances,
("minIdleInstances" .=) <$> _asMinIdleInstances,
("diskUtilization" .=) <$> _asDiskUtilization,
("minPendingLatency" .=) <$> _asMinPendingLatency,
("cpuUtilization" .=) <$> _asCPUUtilization,
("maxIdleInstances" .=) <$> _asMaxIdleInstances,
("minTotalInstances" .=) <$> _asMinTotalInstances,
("maxConcurrentRequests" .=) <$>
_asMaxConcurrentRequests,
("coolDownPeriod" .=) <$> _asCoolDownPeriod,
("requestUtilization" .=) <$> _asRequestUtilization,
("maxPendingLatency" .=) <$> _asMaxPendingLatency])
-- | Metadata for the given google.longrunning.Operation.
--
-- /See:/ 'operationMetadataV1Beta5' smart constructor.
data OperationMetadataV1Beta5 = OperationMetadataV1Beta5'
{ _omvbInsertTime :: !(Maybe Text)
, _omvbUser :: !(Maybe Text)
, _omvbMethod :: !(Maybe Text)
, _omvbEndTime :: !(Maybe Text)
, _omvbTarget :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataV1Beta5' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omvbInsertTime'
--
-- * 'omvbUser'
--
-- * 'omvbMethod'
--
-- * 'omvbEndTime'
--
-- * 'omvbTarget'
operationMetadataV1Beta5
:: OperationMetadataV1Beta5
operationMetadataV1Beta5 =
OperationMetadataV1Beta5'
{ _omvbInsertTime = Nothing
, _omvbUser = Nothing
, _omvbMethod = Nothing
, _omvbEndTime = Nothing
, _omvbTarget = Nothing
}
-- | Timestamp that this operation was created.\'OutputOnly
omvbInsertTime :: Lens' OperationMetadataV1Beta5 (Maybe Text)
omvbInsertTime
= lens _omvbInsertTime
(\ s a -> s{_omvbInsertTime = a})
-- | User who requested this operation.\'OutputOnly
omvbUser :: Lens' OperationMetadataV1Beta5 (Maybe Text)
omvbUser = lens _omvbUser (\ s a -> s{_omvbUser = a})
-- | API method name that initiated this operation. Example:
-- google.appengine.v1beta5.Version.CreateVersion.\'OutputOnly
omvbMethod :: Lens' OperationMetadataV1Beta5 (Maybe Text)
omvbMethod
= lens _omvbMethod (\ s a -> s{_omvbMethod = a})
-- | Timestamp that this operation completed.\'OutputOnly
omvbEndTime :: Lens' OperationMetadataV1Beta5 (Maybe Text)
omvbEndTime
= lens _omvbEndTime (\ s a -> s{_omvbEndTime = a})
-- | Name of the resource that this operation is acting on. Example:
-- apps\/myapp\/services\/default.\'OutputOnly
omvbTarget :: Lens' OperationMetadataV1Beta5 (Maybe Text)
omvbTarget
= lens _omvbTarget (\ s a -> s{_omvbTarget = a})
instance FromJSON OperationMetadataV1Beta5 where
parseJSON
= withObject "OperationMetadataV1Beta5"
(\ o ->
OperationMetadataV1Beta5' <$>
(o .:? "insertTime") <*> (o .:? "user") <*>
(o .:? "method")
<*> (o .:? "endTime")
<*> (o .:? "target"))
instance ToJSON OperationMetadataV1Beta5 where
toJSON OperationMetadataV1Beta5'{..}
= object
(catMaybes
[("insertTime" .=) <$> _omvbInsertTime,
("user" .=) <$> _omvbUser,
("method" .=) <$> _omvbMethod,
("endTime" .=) <$> _omvbEndTime,
("target" .=) <$> _omvbTarget])
-- | Volumes mounted within the app container. Only applicable for VM
-- runtimes.
--
-- /See:/ 'volume' smart constructor.
data Volume = Volume'
{ _vSizeGb :: !(Maybe (Textual Double))
, _vName :: !(Maybe Text)
, _vVolumeType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Volume' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vSizeGb'
--
-- * 'vName'
--
-- * 'vVolumeType'
volume
:: Volume
volume =
Volume'
{ _vSizeGb = Nothing
, _vName = Nothing
, _vVolumeType = Nothing
}
-- | Volume size in gigabytes.
vSizeGb :: Lens' Volume (Maybe Double)
vSizeGb
= lens _vSizeGb (\ s a -> s{_vSizeGb = a}) .
mapping _Coerce
-- | Unique name for the volume.
vName :: Lens' Volume (Maybe Text)
vName = lens _vName (\ s a -> s{_vName = a})
-- | Underlying volume type, e.g. \'tmpfs\'.
vVolumeType :: Lens' Volume (Maybe Text)
vVolumeType
= lens _vVolumeType (\ s a -> s{_vVolumeType = a})
instance FromJSON Volume where
parseJSON
= withObject "Volume"
(\ o ->
Volume' <$>
(o .:? "sizeGb") <*> (o .:? "name") <*>
(o .:? "volumeType"))
instance ToJSON Volume where
toJSON Volume'{..}
= object
(catMaybes
[("sizeGb" .=) <$> _vSizeGb, ("name" .=) <$> _vName,
("volumeType" .=) <$> _vVolumeType])
-- | Uses Google Cloud Endpoints to handle requests.
--
-- /See:/ 'apiEndpointHandler' smart constructor.
newtype APIEndpointHandler = APIEndpointHandler'
{ _aehScriptPath :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'APIEndpointHandler' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aehScriptPath'
apiEndpointHandler
:: APIEndpointHandler
apiEndpointHandler =
APIEndpointHandler'
{ _aehScriptPath = Nothing
}
-- | Path to the script from the application root directory.
aehScriptPath :: Lens' APIEndpointHandler (Maybe Text)
aehScriptPath
= lens _aehScriptPath
(\ s a -> s{_aehScriptPath = a})
instance FromJSON APIEndpointHandler where
parseJSON
= withObject "APIEndpointHandler"
(\ o -> APIEndpointHandler' <$> (o .:? "scriptPath"))
instance ToJSON APIEndpointHandler where
toJSON APIEndpointHandler'{..}
= object
(catMaybes [("scriptPath" .=) <$> _aehScriptPath])
--
-- /See:/ 'statusDetailsItem' smart constructor.
newtype StatusDetailsItem = StatusDetailsItem'
{ _sdiAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdiAddtional'
statusDetailsItem
:: HashMap Text JSONValue -- ^ 'sdiAddtional'
-> StatusDetailsItem
statusDetailsItem pSdiAddtional_ =
StatusDetailsItem'
{ _sdiAddtional = _Coerce # pSdiAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
sdiAddtional :: Lens' StatusDetailsItem (HashMap Text JSONValue)
sdiAddtional
= lens _sdiAddtional (\ s a -> s{_sdiAddtional = a})
. _Coerce
instance FromJSON StatusDetailsItem where
parseJSON
= withObject "StatusDetailsItem"
(\ o -> StatusDetailsItem' <$> (parseJSONObject o))
instance ToJSON StatusDetailsItem where
toJSON = toJSON . _sdiAddtional
-- | Extra network settings. Only applicable for VM runtimes.
--
-- /See:/ 'network' smart constructor.
data Network = Network'
{ _nSubnetworkName :: !(Maybe Text)
, _nForwardedPorts :: !(Maybe [Text])
, _nInstanceTag :: !(Maybe Text)
, _nName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Network' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nSubnetworkName'
--
-- * 'nForwardedPorts'
--
-- * 'nInstanceTag'
--
-- * 'nName'
network
:: Network
network =
Network'
{ _nSubnetworkName = Nothing
, _nForwardedPorts = Nothing
, _nInstanceTag = Nothing
, _nName = Nothing
}
-- | Google Cloud Platform sub-network where the virtual machines are
-- created. Specify the short name, not the resource path.If a subnetwork
-- name is specified, a network name will also be required unless it is for
-- the default network. If the network the VM instance is being created in
-- is a Legacy network, then the IP address is allocated from the
-- IPv4Range. If the network the VM instance is being created in is an auto
-- Subnet Mode Network, then only network name should be specified (not the
-- subnetwork_name) and the IP address is created from the IPCidrRange of
-- the subnetwork that exists in that zone for that network. If the network
-- the VM instance is being created in is a custom Subnet Mode Network,
-- then the subnetwork_name must be specified and the IP address is created
-- from the IPCidrRange of the subnetwork.If specified, the subnetwork must
-- exist in the same region as the Flex app.
nSubnetworkName :: Lens' Network (Maybe Text)
nSubnetworkName
= lens _nSubnetworkName
(\ s a -> s{_nSubnetworkName = a})
-- | List of ports, or port pairs, to forward from the virtual machine to the
-- application container.
nForwardedPorts :: Lens' Network [Text]
nForwardedPorts
= lens _nForwardedPorts
(\ s a -> s{_nForwardedPorts = a})
. _Default
. _Coerce
-- | Tag to apply to the VM instance during creation.
nInstanceTag :: Lens' Network (Maybe Text)
nInstanceTag
= lens _nInstanceTag (\ s a -> s{_nInstanceTag = a})
-- | Google Cloud Platform network where the virtual machines are created.
-- Specify the short name, not the resource path.Defaults to default.
nName :: Lens' Network (Maybe Text)
nName = lens _nName (\ s a -> s{_nName = a})
instance FromJSON Network where
parseJSON
= withObject "Network"
(\ o ->
Network' <$>
(o .:? "subnetworkName") <*>
(o .:? "forwardedPorts" .!= mempty)
<*> (o .:? "instanceTag")
<*> (o .:? "name"))
instance ToJSON Network where
toJSON Network'{..}
= object
(catMaybes
[("subnetworkName" .=) <$> _nSubnetworkName,
("forwardedPorts" .=) <$> _nForwardedPorts,
("instanceTag" .=) <$> _nInstanceTag,
("name" .=) <$> _nName])
-- | Request message for Instances.DebugInstance.
--
-- /See:/ 'debugInstanceRequest' smart constructor.
newtype DebugInstanceRequest = DebugInstanceRequest'
{ _dirSSHKey :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DebugInstanceRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dirSSHKey'
debugInstanceRequest
:: DebugInstanceRequest
debugInstanceRequest =
DebugInstanceRequest'
{ _dirSSHKey = Nothing
}
-- | Public SSH key to add to the instance. Examples: [USERNAME]:ssh-rsa
-- [KEY_VALUE] [USERNAME] [USERNAME]:ssh-rsa [KEY_VALUE] google-ssh
-- {\"userName\":\"[USERNAME]\",\"expireOn\":\"[EXPIRE_TIME]\"}For more
-- information, see Adding and Removing SSH Keys
-- (https:\/\/cloud.google.com\/compute\/docs\/instances\/adding-removing-ssh-keys).
dirSSHKey :: Lens' DebugInstanceRequest (Maybe Text)
dirSSHKey
= lens _dirSSHKey (\ s a -> s{_dirSSHKey = a})
instance FromJSON DebugInstanceRequest where
parseJSON
= withObject "DebugInstanceRequest"
(\ o -> DebugInstanceRequest' <$> (o .:? "sshKey"))
instance ToJSON DebugInstanceRequest where
toJSON DebugInstanceRequest'{..}
= object (catMaybes [("sshKey" .=) <$> _dirSSHKey])
-- | HTTP headers to use for all responses from these URLs.
--
-- /See:/ 'staticFilesHandlerHTTPHeaders' smart constructor.
newtype StaticFilesHandlerHTTPHeaders = StaticFilesHandlerHTTPHeaders'
{ _sfhhttphAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StaticFilesHandlerHTTPHeaders' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sfhhttphAddtional'
staticFilesHandlerHTTPHeaders
:: HashMap Text Text -- ^ 'sfhhttphAddtional'
-> StaticFilesHandlerHTTPHeaders
staticFilesHandlerHTTPHeaders pSfhhttphAddtional_ =
StaticFilesHandlerHTTPHeaders'
{ _sfhhttphAddtional = _Coerce # pSfhhttphAddtional_
}
sfhhttphAddtional :: Lens' StaticFilesHandlerHTTPHeaders (HashMap Text Text)
sfhhttphAddtional
= lens _sfhhttphAddtional
(\ s a -> s{_sfhhttphAddtional = a})
. _Coerce
instance FromJSON StaticFilesHandlerHTTPHeaders where
parseJSON
= withObject "StaticFilesHandlerHTTPHeaders"
(\ o ->
StaticFilesHandlerHTTPHeaders' <$>
(parseJSONObject o))
instance ToJSON StaticFilesHandlerHTTPHeaders where
toJSON = toJSON . _sfhhttphAddtional
-- | Machine resources for a version.
--
-- /See:/ 'resources' smart constructor.
data Resources = Resources'
{ _rMemoryGb :: !(Maybe (Textual Double))
, _rDiskGb :: !(Maybe (Textual Double))
, _rVolumes :: !(Maybe [Volume])
, _rCPU :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Resources' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rMemoryGb'
--
-- * 'rDiskGb'
--
-- * 'rVolumes'
--
-- * 'rCPU'
resources
:: Resources
resources =
Resources'
{ _rMemoryGb = Nothing
, _rDiskGb = Nothing
, _rVolumes = Nothing
, _rCPU = Nothing
}
-- | Memory (GB) needed.
rMemoryGb :: Lens' Resources (Maybe Double)
rMemoryGb
= lens _rMemoryGb (\ s a -> s{_rMemoryGb = a}) .
mapping _Coerce
-- | Disk size (GB) needed.
rDiskGb :: Lens' Resources (Maybe Double)
rDiskGb
= lens _rDiskGb (\ s a -> s{_rDiskGb = a}) .
mapping _Coerce
-- | User specified volumes.
rVolumes :: Lens' Resources [Volume]
rVolumes
= lens _rVolumes (\ s a -> s{_rVolumes = a}) .
_Default
. _Coerce
-- | Number of CPU cores needed.
rCPU :: Lens' Resources (Maybe Double)
rCPU
= lens _rCPU (\ s a -> s{_rCPU = a}) .
mapping _Coerce
instance FromJSON Resources where
parseJSON
= withObject "Resources"
(\ o ->
Resources' <$>
(o .:? "memoryGb") <*> (o .:? "diskGb") <*>
(o .:? "volumes" .!= mempty)
<*> (o .:? "cpu"))
instance ToJSON Resources where
toJSON Resources'{..}
= object
(catMaybes
[("memoryGb" .=) <$> _rMemoryGb,
("diskGb" .=) <$> _rDiskGb,
("volumes" .=) <$> _rVolumes, ("cpu" .=) <$> _rCPU])
-- | Manifest of the files stored in Google Cloud Storage that are included
-- as part of this version. All files must be readable using the
-- credentials supplied with this call.
--
-- /See:/ 'deploymentFiles' smart constructor.
newtype DeploymentFiles = DeploymentFiles'
{ _dfAddtional :: HashMap Text FileInfo
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeploymentFiles' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dfAddtional'
deploymentFiles
:: HashMap Text FileInfo -- ^ 'dfAddtional'
-> DeploymentFiles
deploymentFiles pDfAddtional_ =
DeploymentFiles'
{ _dfAddtional = _Coerce # pDfAddtional_
}
dfAddtional :: Lens' DeploymentFiles (HashMap Text FileInfo)
dfAddtional
= lens _dfAddtional (\ s a -> s{_dfAddtional = a}) .
_Coerce
instance FromJSON DeploymentFiles where
parseJSON
= withObject "DeploymentFiles"
(\ o -> DeploymentFiles' <$> (parseJSONObject o))
instance ToJSON DeploymentFiles where
toJSON = toJSON . _dfAddtional
-- | Target scaling by CPU usage.
--
-- /See:/ 'cpuUtilization' smart constructor.
data CPUUtilization = CPUUtilization'
{ _cuAggregationWindowLength :: !(Maybe Text)
, _cuTargetUtilization :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CPUUtilization' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cuAggregationWindowLength'
--
-- * 'cuTargetUtilization'
cpuUtilization
:: CPUUtilization
cpuUtilization =
CPUUtilization'
{ _cuAggregationWindowLength = Nothing
, _cuTargetUtilization = Nothing
}
-- | Period of time over which CPU utilization is calculated.
cuAggregationWindowLength :: Lens' CPUUtilization (Maybe Text)
cuAggregationWindowLength
= lens _cuAggregationWindowLength
(\ s a -> s{_cuAggregationWindowLength = a})
-- | Target CPU utilization ratio to maintain when scaling. Must be between 0
-- and 1.
cuTargetUtilization :: Lens' CPUUtilization (Maybe Double)
cuTargetUtilization
= lens _cuTargetUtilization
(\ s a -> s{_cuTargetUtilization = a})
. mapping _Coerce
instance FromJSON CPUUtilization where
parseJSON
= withObject "CPUUtilization"
(\ o ->
CPUUtilization' <$>
(o .:? "aggregationWindowLength") <*>
(o .:? "targetUtilization"))
instance ToJSON CPUUtilization where
toJSON CPUUtilization'{..}
= object
(catMaybes
[("aggregationWindowLength" .=) <$>
_cuAggregationWindowLength,
("targetUtilization" .=) <$> _cuTargetUtilization])
-- | Mapping from version IDs within the service to fractional (0.000, 1]
-- allocations of traffic for that version. Each version can be specified
-- only once, but some versions in the service may not have any traffic
-- allocation. Services that have traffic allocated cannot be deleted until
-- either the service is deleted or their traffic allocation is removed.
-- Allocations must sum to 1. Up to two decimal place precision is
-- supported for IP-based splits and up to three decimal places is
-- supported for cookie-based splits.
--
-- /See:/ 'trafficSplitAllocations' smart constructor.
newtype TrafficSplitAllocations = TrafficSplitAllocations'
{ _tsaAddtional :: HashMap Text (Textual Double)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TrafficSplitAllocations' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tsaAddtional'
trafficSplitAllocations
:: HashMap Text Double -- ^ 'tsaAddtional'
-> TrafficSplitAllocations
trafficSplitAllocations pTsaAddtional_ =
TrafficSplitAllocations'
{ _tsaAddtional = _Coerce # pTsaAddtional_
}
tsaAddtional :: Lens' TrafficSplitAllocations (HashMap Text Double)
tsaAddtional
= lens _tsaAddtional (\ s a -> s{_tsaAddtional = a})
. _Coerce
instance FromJSON TrafficSplitAllocations where
parseJSON
= withObject "TrafficSplitAllocations"
(\ o ->
TrafficSplitAllocations' <$> (parseJSONObject o))
instance ToJSON TrafficSplitAllocations where
toJSON = toJSON . _tsaAddtional
-- | A service with manual scaling runs continuously, allowing you to perform
-- complex initialization and rely on the state of its memory over time.
--
-- /See:/ 'manualScaling' smart constructor.
newtype ManualScaling = ManualScaling'
{ _msInstances :: Maybe (Textual Int32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManualScaling' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'msInstances'
manualScaling
:: ManualScaling
manualScaling =
ManualScaling'
{ _msInstances = Nothing
}
-- | Number of instances to assign to the service at the start. This number
-- can later be altered by using the Modules API
-- (https:\/\/cloud.google.com\/appengine\/docs\/python\/modules\/functions)
-- set_num_instances() function.
msInstances :: Lens' ManualScaling (Maybe Int32)
msInstances
= lens _msInstances (\ s a -> s{_msInstances = a}) .
mapping _Coerce
instance FromJSON ManualScaling where
parseJSON
= withObject "ManualScaling"
(\ o -> ManualScaling' <$> (o .:? "instances"))
instance ToJSON ManualScaling where
toJSON ManualScaling'{..}
= object
(catMaybes [("instances" .=) <$> _msInstances])
-- | A service with basic scaling will create an instance when the
-- application receives a request. The instance will be turned down when
-- the app becomes idle. Basic scaling is ideal for work that is
-- intermittent or driven by user activity.
--
-- /See:/ 'basicScaling' smart constructor.
data BasicScaling = BasicScaling'
{ _bsMaxInstances :: !(Maybe (Textual Int32))
, _bsIdleTimeout :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BasicScaling' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bsMaxInstances'
--
-- * 'bsIdleTimeout'
basicScaling
:: BasicScaling
basicScaling =
BasicScaling'
{ _bsMaxInstances = Nothing
, _bsIdleTimeout = Nothing
}
-- | Maximum number of instances to create for this version.
bsMaxInstances :: Lens' BasicScaling (Maybe Int32)
bsMaxInstances
= lens _bsMaxInstances
(\ s a -> s{_bsMaxInstances = a})
. mapping _Coerce
-- | Duration of time after the last request that an instance must wait
-- before the instance is shut down.
bsIdleTimeout :: Lens' BasicScaling (Maybe Text)
bsIdleTimeout
= lens _bsIdleTimeout
(\ s a -> s{_bsIdleTimeout = a})
instance FromJSON BasicScaling where
parseJSON
= withObject "BasicScaling"
(\ o ->
BasicScaling' <$>
(o .:? "maxInstances") <*> (o .:? "idleTimeout"))
instance ToJSON BasicScaling where
toJSON BasicScaling'{..}
= object
(catMaybes
[("maxInstances" .=) <$> _bsMaxInstances,
("idleTimeout" .=) <$> _bsIdleTimeout])
-- | Metadata for the given google.longrunning.Operation.
--
-- /See:/ 'operationMetadataV1' smart constructor.
data OperationMetadataV1 = OperationMetadataV1'
{ _omvEphemeralMessage :: !(Maybe Text)
, _omvInsertTime :: !(Maybe Text)
, _omvUser :: !(Maybe Text)
, _omvMethod :: !(Maybe Text)
, _omvEndTime :: !(Maybe Text)
, _omvWarning :: !(Maybe [Text])
, _omvTarget :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataV1' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omvEphemeralMessage'
--
-- * 'omvInsertTime'
--
-- * 'omvUser'
--
-- * 'omvMethod'
--
-- * 'omvEndTime'
--
-- * 'omvWarning'
--
-- * 'omvTarget'
operationMetadataV1
:: OperationMetadataV1
operationMetadataV1 =
OperationMetadataV1'
{ _omvEphemeralMessage = Nothing
, _omvInsertTime = Nothing
, _omvUser = Nothing
, _omvMethod = Nothing
, _omvEndTime = Nothing
, _omvWarning = Nothing
, _omvTarget = Nothing
}
-- | Ephemeral message that may change every time the operation is polled.
-- \'OutputOnly
omvEphemeralMessage :: Lens' OperationMetadataV1 (Maybe Text)
omvEphemeralMessage
= lens _omvEphemeralMessage
(\ s a -> s{_omvEphemeralMessage = a})
-- | Time that this operation was created.\'OutputOnly
omvInsertTime :: Lens' OperationMetadataV1 (Maybe Text)
omvInsertTime
= lens _omvInsertTime
(\ s a -> s{_omvInsertTime = a})
-- | User who requested this operation.\'OutputOnly
omvUser :: Lens' OperationMetadataV1 (Maybe Text)
omvUser = lens _omvUser (\ s a -> s{_omvUser = a})
-- | API method that initiated this operation. Example:
-- google.appengine.v1.Versions.CreateVersion.\'OutputOnly
omvMethod :: Lens' OperationMetadataV1 (Maybe Text)
omvMethod
= lens _omvMethod (\ s a -> s{_omvMethod = a})
-- | Time that this operation completed.\'OutputOnly
omvEndTime :: Lens' OperationMetadataV1 (Maybe Text)
omvEndTime
= lens _omvEndTime (\ s a -> s{_omvEndTime = a})
-- | Durable messages that persist on every operation poll. \'OutputOnly
omvWarning :: Lens' OperationMetadataV1 [Text]
omvWarning
= lens _omvWarning (\ s a -> s{_omvWarning = a}) .
_Default
. _Coerce
-- | Name of the resource that this operation is acting on. Example:
-- apps\/myapp\/services\/default.\'OutputOnly
omvTarget :: Lens' OperationMetadataV1 (Maybe Text)
omvTarget
= lens _omvTarget (\ s a -> s{_omvTarget = a})
instance FromJSON OperationMetadataV1 where
parseJSON
= withObject "OperationMetadataV1"
(\ o ->
OperationMetadataV1' <$>
(o .:? "ephemeralMessage") <*> (o .:? "insertTime")
<*> (o .:? "user")
<*> (o .:? "method")
<*> (o .:? "endTime")
<*> (o .:? "warning" .!= mempty)
<*> (o .:? "target"))
instance ToJSON OperationMetadataV1 where
toJSON OperationMetadataV1'{..}
= object
(catMaybes
[("ephemeralMessage" .=) <$> _omvEphemeralMessage,
("insertTime" .=) <$> _omvInsertTime,
("user" .=) <$> _omvUser,
("method" .=) <$> _omvMethod,
("endTime" .=) <$> _omvEndTime,
("warning" .=) <$> _omvWarning,
("target" .=) <$> _omvTarget])
-- | A Version resource is a specific set of source code and configuration
-- files that are deployed into a service.
--
-- /See:/ 'version' smart constructor.
data Version = Version'
{ _verRuntime :: !(Maybe Text)
, _verNobuildFilesRegex :: !(Maybe Text)
, _verInstanceClass :: !(Maybe Text)
, _verHealthCheck :: !(Maybe HealthCheck)
, _verEndpointsAPIService :: !(Maybe EndpointsAPIService)
, _verEnv :: !(Maybe Text)
, _verDefaultExpiration :: !(Maybe Text)
, _verAutomaticScaling :: !(Maybe AutomaticScaling)
, _verErrorHandlers :: !(Maybe [ErrorHandler])
, _verCreatedBy :: !(Maybe Text)
, _verVM :: !(Maybe Bool)
, _verHandlers :: !(Maybe [URLMap])
, _verInboundServices :: !(Maybe [Text])
, _verNetwork :: !(Maybe Network)
, _verResources :: !(Maybe Resources)
, _verName :: !(Maybe Text)
, _verThreadsafe :: !(Maybe Bool)
, _verBetaSettings :: !(Maybe VersionBetaSettings)
, _verBasicScaling :: !(Maybe BasicScaling)
, _verManualScaling :: !(Maybe ManualScaling)
, _verAPIConfig :: !(Maybe APIConfigHandler)
, _verId :: !(Maybe Text)
, _verEnvVariables :: !(Maybe VersionEnvVariables)
, _verServingStatus :: !(Maybe Text)
, _verDiskUsageBytes :: !(Maybe (Textual Int64))
, _verCreateTime :: !(Maybe Text)
, _verLibraries :: !(Maybe [Library])
, _verVersionURL :: !(Maybe Text)
, _verDeployment :: !(Maybe Deployment)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Version' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'verRuntime'
--
-- * 'verNobuildFilesRegex'
--
-- * 'verInstanceClass'
--
-- * 'verHealthCheck'
--
-- * 'verEndpointsAPIService'
--
-- * 'verEnv'
--
-- * 'verDefaultExpiration'
--
-- * 'verAutomaticScaling'
--
-- * 'verErrorHandlers'
--
-- * 'verCreatedBy'
--
-- * 'verVM'
--
-- * 'verHandlers'
--
-- * 'verInboundServices'
--
-- * 'verNetwork'
--
-- * 'verResources'
--
-- * 'verName'
--
-- * 'verThreadsafe'
--
-- * 'verBetaSettings'
--
-- * 'verBasicScaling'
--
-- * 'verManualScaling'
--
-- * 'verAPIConfig'
--
-- * 'verId'
--
-- * 'verEnvVariables'
--
-- * 'verServingStatus'
--
-- * 'verDiskUsageBytes'
--
-- * 'verCreateTime'
--
-- * 'verLibraries'
--
-- * 'verVersionURL'
--
-- * 'verDeployment'
version
:: Version
version =
Version'
{ _verRuntime = Nothing
, _verNobuildFilesRegex = Nothing
, _verInstanceClass = Nothing
, _verHealthCheck = Nothing
, _verEndpointsAPIService = Nothing
, _verEnv = Nothing
, _verDefaultExpiration = Nothing
, _verAutomaticScaling = Nothing
, _verErrorHandlers = Nothing
, _verCreatedBy = Nothing
, _verVM = Nothing
, _verHandlers = Nothing
, _verInboundServices = Nothing
, _verNetwork = Nothing
, _verResources = Nothing
, _verName = Nothing
, _verThreadsafe = Nothing
, _verBetaSettings = Nothing
, _verBasicScaling = Nothing
, _verManualScaling = Nothing
, _verAPIConfig = Nothing
, _verId = Nothing
, _verEnvVariables = Nothing
, _verServingStatus = Nothing
, _verDiskUsageBytes = Nothing
, _verCreateTime = Nothing
, _verLibraries = Nothing
, _verVersionURL = Nothing
, _verDeployment = Nothing
}
-- | Desired runtime. Example: python27.
verRuntime :: Lens' Version (Maybe Text)
verRuntime
= lens _verRuntime (\ s a -> s{_verRuntime = a})
-- | Files that match this pattern will not be built into this version. Only
-- applicable for Go runtimes.Only returned in GET requests if view=FULL is
-- set.
verNobuildFilesRegex :: Lens' Version (Maybe Text)
verNobuildFilesRegex
= lens _verNobuildFilesRegex
(\ s a -> s{_verNobuildFilesRegex = a})
-- | Instance class that is used to run this version. Valid values are:
-- AutomaticScaling: F1, F2, F4, F4_1G ManualScaling or BasicScaling: B1,
-- B2, B4, B8, B4_1GDefaults to F1 for AutomaticScaling and B1 for
-- ManualScaling or BasicScaling.
verInstanceClass :: Lens' Version (Maybe Text)
verInstanceClass
= lens _verInstanceClass
(\ s a -> s{_verInstanceClass = a})
-- | Configures health checking for VM instances. Unhealthy instances are
-- stopped and replaced with new instances. Only applicable for VM
-- runtimes.Only returned in GET requests if view=FULL is set.
verHealthCheck :: Lens' Version (Maybe HealthCheck)
verHealthCheck
= lens _verHealthCheck
(\ s a -> s{_verHealthCheck = a})
-- | Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud
-- Endpoints Extensible Service Proxy will be provided to serve the API
-- implemented by the app.
verEndpointsAPIService :: Lens' Version (Maybe EndpointsAPIService)
verEndpointsAPIService
= lens _verEndpointsAPIService
(\ s a -> s{_verEndpointsAPIService = a})
-- | App Engine execution environment for this version.Defaults to standard.
verEnv :: Lens' Version (Maybe Text)
verEnv = lens _verEnv (\ s a -> s{_verEnv = a})
-- | Duration that static files should be cached by web proxies and browsers.
-- Only applicable if the corresponding StaticFilesHandler
-- (https:\/\/cloud.google.com\/appengine\/docs\/admin-api\/reference\/rest\/v1\/apps.services.versions#staticfileshandler)
-- does not specify its own expiration time.Only returned in GET requests
-- if view=FULL is set.
verDefaultExpiration :: Lens' Version (Maybe Text)
verDefaultExpiration
= lens _verDefaultExpiration
(\ s a -> s{_verDefaultExpiration = a})
-- | Automatic scaling is based on request rate, response latencies, and
-- other application metrics.
verAutomaticScaling :: Lens' Version (Maybe AutomaticScaling)
verAutomaticScaling
= lens _verAutomaticScaling
(\ s a -> s{_verAutomaticScaling = a})
-- | Custom static error pages. Limited to 10KB per page.Only returned in GET
-- requests if view=FULL is set.
verErrorHandlers :: Lens' Version [ErrorHandler]
verErrorHandlers
= lens _verErrorHandlers
(\ s a -> s{_verErrorHandlers = a})
. _Default
. _Coerce
-- | Email address of the user who created this version.\'OutputOnly
verCreatedBy :: Lens' Version (Maybe Text)
verCreatedBy
= lens _verCreatedBy (\ s a -> s{_verCreatedBy = a})
-- | Whether to deploy this version in a container on a virtual machine.
verVM :: Lens' Version (Maybe Bool)
verVM = lens _verVM (\ s a -> s{_verVM = a})
-- | An ordered list of URL-matching patterns that should be applied to
-- incoming requests. The first matching URL handles the request and other
-- request handlers are not attempted.Only returned in GET requests if
-- view=FULL is set.
verHandlers :: Lens' Version [URLMap]
verHandlers
= lens _verHandlers (\ s a -> s{_verHandlers = a}) .
_Default
. _Coerce
-- | Before an application can receive email or XMPP messages, the
-- application must be configured to enable the service.
verInboundServices :: Lens' Version [Text]
verInboundServices
= lens _verInboundServices
(\ s a -> s{_verInboundServices = a})
. _Default
. _Coerce
-- | Extra network settings. Only applicable for VM runtimes.
verNetwork :: Lens' Version (Maybe Network)
verNetwork
= lens _verNetwork (\ s a -> s{_verNetwork = a})
-- | Machine resources for this version. Only applicable for VM runtimes.
verResources :: Lens' Version (Maybe Resources)
verResources
= lens _verResources (\ s a -> s{_verResources = a})
-- | Full path to the Version resource in the API. Example:
-- apps\/myapp\/services\/default\/versions\/v1.\'OutputOnly
verName :: Lens' Version (Maybe Text)
verName = lens _verName (\ s a -> s{_verName = a})
-- | Whether multiple requests can be dispatched to this version at once.
verThreadsafe :: Lens' Version (Maybe Bool)
verThreadsafe
= lens _verThreadsafe
(\ s a -> s{_verThreadsafe = a})
-- | Metadata settings that are supplied to this version to enable beta
-- runtime features.
verBetaSettings :: Lens' Version (Maybe VersionBetaSettings)
verBetaSettings
= lens _verBetaSettings
(\ s a -> s{_verBetaSettings = a})
-- | A service with basic scaling will create an instance when the
-- application receives a request. The instance will be turned down when
-- the app becomes idle. Basic scaling is ideal for work that is
-- intermittent or driven by user activity.
verBasicScaling :: Lens' Version (Maybe BasicScaling)
verBasicScaling
= lens _verBasicScaling
(\ s a -> s{_verBasicScaling = a})
-- | A service with manual scaling runs continuously, allowing you to perform
-- complex initialization and rely on the state of its memory over time.
verManualScaling :: Lens' Version (Maybe ManualScaling)
verManualScaling
= lens _verManualScaling
(\ s a -> s{_verManualScaling = a})
-- | Serving configuration for Google Cloud Endpoints
-- (https:\/\/cloud.google.com\/appengine\/docs\/python\/endpoints\/).Only
-- returned in GET requests if view=FULL is set.
verAPIConfig :: Lens' Version (Maybe APIConfigHandler)
verAPIConfig
= lens _verAPIConfig (\ s a -> s{_verAPIConfig = a})
-- | Relative name of the version within the service. Example: v1. Version
-- names can contain only lowercase letters, numbers, or hyphens. Reserved
-- names: \"default\", \"latest\", and any name with the prefix \"ah-\".
verId :: Lens' Version (Maybe Text)
verId = lens _verId (\ s a -> s{_verId = a})
-- | Environment variables available to the application.Only returned in GET
-- requests if view=FULL is set.
verEnvVariables :: Lens' Version (Maybe VersionEnvVariables)
verEnvVariables
= lens _verEnvVariables
(\ s a -> s{_verEnvVariables = a})
-- | Current serving status of this version. Only the versions with a SERVING
-- status create instances and can be billed.SERVING_STATUS_UNSPECIFIED is
-- an invalid value. Defaults to SERVING.
verServingStatus :: Lens' Version (Maybe Text)
verServingStatus
= lens _verServingStatus
(\ s a -> s{_verServingStatus = a})
-- | Total size in bytes of all the files that are included in this version
-- and curerntly hosted on the App Engine disk.\'OutputOnly
verDiskUsageBytes :: Lens' Version (Maybe Int64)
verDiskUsageBytes
= lens _verDiskUsageBytes
(\ s a -> s{_verDiskUsageBytes = a})
. mapping _Coerce
-- | Time that this version was created.\'OutputOnly
verCreateTime :: Lens' Version (Maybe Text)
verCreateTime
= lens _verCreateTime
(\ s a -> s{_verCreateTime = a})
-- | Configuration for third-party Python runtime libraries that are required
-- by the application.Only returned in GET requests if view=FULL is set.
verLibraries :: Lens' Version [Library]
verLibraries
= lens _verLibraries (\ s a -> s{_verLibraries = a})
. _Default
. _Coerce
-- | Serving URL for this version. Example:
-- \"https:\/\/myversion-dot-myservice-dot-myapp.appspot.com\"\'OutputOnly
verVersionURL :: Lens' Version (Maybe Text)
verVersionURL
= lens _verVersionURL
(\ s a -> s{_verVersionURL = a})
-- | Code and application artifacts that make up this version.Only returned
-- in GET requests if view=FULL is set.
verDeployment :: Lens' Version (Maybe Deployment)
verDeployment
= lens _verDeployment
(\ s a -> s{_verDeployment = a})
instance FromJSON Version where
parseJSON
= withObject "Version"
(\ o ->
Version' <$>
(o .:? "runtime") <*> (o .:? "nobuildFilesRegex") <*>
(o .:? "instanceClass")
<*> (o .:? "healthCheck")
<*> (o .:? "endpointsApiService")
<*> (o .:? "env")
<*> (o .:? "defaultExpiration")
<*> (o .:? "automaticScaling")
<*> (o .:? "errorHandlers" .!= mempty)
<*> (o .:? "createdBy")
<*> (o .:? "vm")
<*> (o .:? "handlers" .!= mempty)
<*> (o .:? "inboundServices" .!= mempty)
<*> (o .:? "network")
<*> (o .:? "resources")
<*> (o .:? "name")
<*> (o .:? "threadsafe")
<*> (o .:? "betaSettings")
<*> (o .:? "basicScaling")
<*> (o .:? "manualScaling")
<*> (o .:? "apiConfig")
<*> (o .:? "id")
<*> (o .:? "envVariables")
<*> (o .:? "servingStatus")
<*> (o .:? "diskUsageBytes")
<*> (o .:? "createTime")
<*> (o .:? "libraries" .!= mempty)
<*> (o .:? "versionUrl")
<*> (o .:? "deployment"))
instance ToJSON Version where
toJSON Version'{..}
= object
(catMaybes
[("runtime" .=) <$> _verRuntime,
("nobuildFilesRegex" .=) <$> _verNobuildFilesRegex,
("instanceClass" .=) <$> _verInstanceClass,
("healthCheck" .=) <$> _verHealthCheck,
("endpointsApiService" .=) <$>
_verEndpointsAPIService,
("env" .=) <$> _verEnv,
("defaultExpiration" .=) <$> _verDefaultExpiration,
("automaticScaling" .=) <$> _verAutomaticScaling,
("errorHandlers" .=) <$> _verErrorHandlers,
("createdBy" .=) <$> _verCreatedBy,
("vm" .=) <$> _verVM,
("handlers" .=) <$> _verHandlers,
("inboundServices" .=) <$> _verInboundServices,
("network" .=) <$> _verNetwork,
("resources" .=) <$> _verResources,
("name" .=) <$> _verName,
("threadsafe" .=) <$> _verThreadsafe,
("betaSettings" .=) <$> _verBetaSettings,
("basicScaling" .=) <$> _verBasicScaling,
("manualScaling" .=) <$> _verManualScaling,
("apiConfig" .=) <$> _verAPIConfig,
("id" .=) <$> _verId,
("envVariables" .=) <$> _verEnvVariables,
("servingStatus" .=) <$> _verServingStatus,
("diskUsageBytes" .=) <$> _verDiskUsageBytes,
("createTime" .=) <$> _verCreateTime,
("libraries" .=) <$> _verLibraries,
("versionUrl" .=) <$> _verVersionURL,
("deployment" .=) <$> _verDeployment])
-- | Files served directly to the user for a given URL, such as images, CSS
-- stylesheets, or JavaScript source files. Static file handlers describe
-- which files in the application directory are static files, and which
-- URLs serve them.
--
-- /See:/ 'staticFilesHandler' smart constructor.
data StaticFilesHandler = StaticFilesHandler'
{ _sfhHTTPHeaders :: !(Maybe StaticFilesHandlerHTTPHeaders)
, _sfhPath :: !(Maybe Text)
, _sfhRequireMatchingFile :: !(Maybe Bool)
, _sfhExpiration :: !(Maybe Text)
, _sfhMimeType :: !(Maybe Text)
, _sfhApplicationReadable :: !(Maybe Bool)
, _sfhUploadPathRegex :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StaticFilesHandler' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sfhHTTPHeaders'
--
-- * 'sfhPath'
--
-- * 'sfhRequireMatchingFile'
--
-- * 'sfhExpiration'
--
-- * 'sfhMimeType'
--
-- * 'sfhApplicationReadable'
--
-- * 'sfhUploadPathRegex'
staticFilesHandler
:: StaticFilesHandler
staticFilesHandler =
StaticFilesHandler'
{ _sfhHTTPHeaders = Nothing
, _sfhPath = Nothing
, _sfhRequireMatchingFile = Nothing
, _sfhExpiration = Nothing
, _sfhMimeType = Nothing
, _sfhApplicationReadable = Nothing
, _sfhUploadPathRegex = Nothing
}
-- | HTTP headers to use for all responses from these URLs.
sfhHTTPHeaders :: Lens' StaticFilesHandler (Maybe StaticFilesHandlerHTTPHeaders)
sfhHTTPHeaders
= lens _sfhHTTPHeaders
(\ s a -> s{_sfhHTTPHeaders = a})
-- | Path to the static files matched by the URL pattern, from the
-- application root directory. The path can refer to text matched in
-- groupings in the URL pattern.
sfhPath :: Lens' StaticFilesHandler (Maybe Text)
sfhPath = lens _sfhPath (\ s a -> s{_sfhPath = a})
-- | Whether this handler should match the request if the file referenced by
-- the handler does not exist.
sfhRequireMatchingFile :: Lens' StaticFilesHandler (Maybe Bool)
sfhRequireMatchingFile
= lens _sfhRequireMatchingFile
(\ s a -> s{_sfhRequireMatchingFile = a})
-- | Time a static file served by this handler should be cached by web
-- proxies and browsers.
sfhExpiration :: Lens' StaticFilesHandler (Maybe Text)
sfhExpiration
= lens _sfhExpiration
(\ s a -> s{_sfhExpiration = a})
-- | MIME type used to serve all files served by this handler.Defaults to
-- file-specific MIME types, which are derived from each file\'s filename
-- extension.
sfhMimeType :: Lens' StaticFilesHandler (Maybe Text)
sfhMimeType
= lens _sfhMimeType (\ s a -> s{_sfhMimeType = a})
-- | Whether files should also be uploaded as code data. By default, files
-- declared in static file handlers are uploaded as static data and are
-- only served to end users; they cannot be read by the application. If
-- enabled, uploads are charged against both your code and static data
-- storage resource quotas.
sfhApplicationReadable :: Lens' StaticFilesHandler (Maybe Bool)
sfhApplicationReadable
= lens _sfhApplicationReadable
(\ s a -> s{_sfhApplicationReadable = a})
-- | Regular expression that matches the file paths for all files that should
-- be referenced by this handler.
sfhUploadPathRegex :: Lens' StaticFilesHandler (Maybe Text)
sfhUploadPathRegex
= lens _sfhUploadPathRegex
(\ s a -> s{_sfhUploadPathRegex = a})
instance FromJSON StaticFilesHandler where
parseJSON
= withObject "StaticFilesHandler"
(\ o ->
StaticFilesHandler' <$>
(o .:? "httpHeaders") <*> (o .:? "path") <*>
(o .:? "requireMatchingFile")
<*> (o .:? "expiration")
<*> (o .:? "mimeType")
<*> (o .:? "applicationReadable")
<*> (o .:? "uploadPathRegex"))
instance ToJSON StaticFilesHandler where
toJSON StaticFilesHandler'{..}
= object
(catMaybes
[("httpHeaders" .=) <$> _sfhHTTPHeaders,
("path" .=) <$> _sfhPath,
("requireMatchingFile" .=) <$>
_sfhRequireMatchingFile,
("expiration" .=) <$> _sfhExpiration,
("mimeType" .=) <$> _sfhMimeType,
("applicationReadable" .=) <$>
_sfhApplicationReadable,
("uploadPathRegex" .=) <$> _sfhUploadPathRegex])
-- | Custom static error page to be served when an error occurs.
--
-- /See:/ 'errorHandler' smart constructor.
data ErrorHandler = ErrorHandler'
{ _ehMimeType :: !(Maybe Text)
, _ehErrorCode :: !(Maybe Text)
, _ehStaticFile :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ErrorHandler' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ehMimeType'
--
-- * 'ehErrorCode'
--
-- * 'ehStaticFile'
errorHandler
:: ErrorHandler
errorHandler =
ErrorHandler'
{ _ehMimeType = Nothing
, _ehErrorCode = Nothing
, _ehStaticFile = Nothing
}
-- | MIME type of file. Defaults to text\/html.
ehMimeType :: Lens' ErrorHandler (Maybe Text)
ehMimeType
= lens _ehMimeType (\ s a -> s{_ehMimeType = a})
-- | Error condition this handler applies to.
ehErrorCode :: Lens' ErrorHandler (Maybe Text)
ehErrorCode
= lens _ehErrorCode (\ s a -> s{_ehErrorCode = a})
-- | Static file content to be served for this error.
ehStaticFile :: Lens' ErrorHandler (Maybe Text)
ehStaticFile
= lens _ehStaticFile (\ s a -> s{_ehStaticFile = a})
instance FromJSON ErrorHandler where
parseJSON
= withObject "ErrorHandler"
(\ o ->
ErrorHandler' <$>
(o .:? "mimeType") <*> (o .:? "errorCode") <*>
(o .:? "staticFile"))
instance ToJSON ErrorHandler where
toJSON ErrorHandler'{..}
= object
(catMaybes
[("mimeType" .=) <$> _ehMimeType,
("errorCode" .=) <$> _ehErrorCode,
("staticFile" .=) <$> _ehStaticFile])
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
--
-- /See:/ 'locationLabels' smart constructor.
newtype LocationLabels = LocationLabels'
{ _llAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocationLabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llAddtional'
locationLabels
:: HashMap Text Text -- ^ 'llAddtional'
-> LocationLabels
locationLabels pLlAddtional_ =
LocationLabels'
{ _llAddtional = _Coerce # pLlAddtional_
}
llAddtional :: Lens' LocationLabels (HashMap Text Text)
llAddtional
= lens _llAddtional (\ s a -> s{_llAddtional = a}) .
_Coerce
instance FromJSON LocationLabels where
parseJSON
= withObject "LocationLabels"
(\ o -> LocationLabels' <$> (parseJSONObject o))
instance ToJSON LocationLabels where
toJSON = toJSON . _llAddtional
-- | Metadata for the given google.cloud.location.Location.
--
-- /See:/ 'locationMetadata' smart constructor.
data LocationMetadata = LocationMetadata'
{ _lmStandardEnvironmentAvailable :: !(Maybe Bool)
, _lmFlexibleEnvironmentAvailable :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lmStandardEnvironmentAvailable'
--
-- * 'lmFlexibleEnvironmentAvailable'
locationMetadata
:: LocationMetadata
locationMetadata =
LocationMetadata'
{ _lmStandardEnvironmentAvailable = Nothing
, _lmFlexibleEnvironmentAvailable = Nothing
}
-- | App Engine Standard Environment is available in the given
-- location.\'OutputOnly
lmStandardEnvironmentAvailable :: Lens' LocationMetadata (Maybe Bool)
lmStandardEnvironmentAvailable
= lens _lmStandardEnvironmentAvailable
(\ s a -> s{_lmStandardEnvironmentAvailable = a})
-- | App Engine Flexible Environment is available in the given
-- location.\'OutputOnly
lmFlexibleEnvironmentAvailable :: Lens' LocationMetadata (Maybe Bool)
lmFlexibleEnvironmentAvailable
= lens _lmFlexibleEnvironmentAvailable
(\ s a -> s{_lmFlexibleEnvironmentAvailable = a})
instance FromJSON LocationMetadata where
parseJSON
= withObject "LocationMetadata"
(\ o ->
LocationMetadata' <$>
(o .:? "standardEnvironmentAvailable") <*>
(o .:? "flexibleEnvironmentAvailable"))
instance ToJSON LocationMetadata where
toJSON LocationMetadata'{..}
= object
(catMaybes
[("standardEnvironmentAvailable" .=) <$>
_lmStandardEnvironmentAvailable,
("flexibleEnvironmentAvailable" .=) <$>
_lmFlexibleEnvironmentAvailable])
-- | Metadata for the given google.longrunning.Operation.
--
-- /See:/ 'operationMetadata' smart constructor.
data OperationMetadata = OperationMetadata'
{ _omInsertTime :: !(Maybe Text)
, _omUser :: !(Maybe Text)
, _omMethod :: !(Maybe Text)
, _omEndTime :: !(Maybe Text)
, _omOperationType :: !(Maybe Text)
, _omTarget :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omInsertTime'
--
-- * 'omUser'
--
-- * 'omMethod'
--
-- * 'omEndTime'
--
-- * 'omOperationType'
--
-- * 'omTarget'
operationMetadata
:: OperationMetadata
operationMetadata =
OperationMetadata'
{ _omInsertTime = Nothing
, _omUser = Nothing
, _omMethod = Nothing
, _omEndTime = Nothing
, _omOperationType = Nothing
, _omTarget = Nothing
}
-- | Timestamp that this operation was created.\'OutputOnly
omInsertTime :: Lens' OperationMetadata (Maybe Text)
omInsertTime
= lens _omInsertTime (\ s a -> s{_omInsertTime = a})
-- | User who requested this operation.\'OutputOnly
omUser :: Lens' OperationMetadata (Maybe Text)
omUser = lens _omUser (\ s a -> s{_omUser = a})
-- | API method that initiated this operation. Example:
-- google.appengine.v1beta4.Version.CreateVersion.\'OutputOnly
omMethod :: Lens' OperationMetadata (Maybe Text)
omMethod = lens _omMethod (\ s a -> s{_omMethod = a})
-- | Timestamp that this operation completed.\'OutputOnly
omEndTime :: Lens' OperationMetadata (Maybe Text)
omEndTime
= lens _omEndTime (\ s a -> s{_omEndTime = a})
-- | Type of this operation. Deprecated, use method field instead. Example:
-- \"create_version\".\'OutputOnly
omOperationType :: Lens' OperationMetadata (Maybe Text)
omOperationType
= lens _omOperationType
(\ s a -> s{_omOperationType = a})
-- | Name of the resource that this operation is acting on. Example:
-- apps\/myapp\/modules\/default.\'OutputOnly
omTarget :: Lens' OperationMetadata (Maybe Text)
omTarget = lens _omTarget (\ s a -> s{_omTarget = a})
instance FromJSON OperationMetadata where
parseJSON
= withObject "OperationMetadata"
(\ o ->
OperationMetadata' <$>
(o .:? "insertTime") <*> (o .:? "user") <*>
(o .:? "method")
<*> (o .:? "endTime")
<*> (o .:? "operationType")
<*> (o .:? "target"))
instance ToJSON OperationMetadata where
toJSON OperationMetadata'{..}
= object
(catMaybes
[("insertTime" .=) <$> _omInsertTime,
("user" .=) <$> _omUser, ("method" .=) <$> _omMethod,
("endTime" .=) <$> _omEndTime,
("operationType" .=) <$> _omOperationType,
("target" .=) <$> _omTarget])
-- | Response message for Instances.ListInstances.
--
-- /See:/ 'listInstancesResponse' smart constructor.
data ListInstancesResponse = ListInstancesResponse'
{ _lirNextPageToken :: !(Maybe Text)
, _lirInstances :: !(Maybe [Instance])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListInstancesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lirNextPageToken'
--
-- * 'lirInstances'
listInstancesResponse
:: ListInstancesResponse
listInstancesResponse =
ListInstancesResponse'
{ _lirNextPageToken = Nothing
, _lirInstances = Nothing
}
-- | Continuation token for fetching the next page of results.
lirNextPageToken :: Lens' ListInstancesResponse (Maybe Text)
lirNextPageToken
= lens _lirNextPageToken
(\ s a -> s{_lirNextPageToken = a})
-- | The instances belonging to the requested version.
lirInstances :: Lens' ListInstancesResponse [Instance]
lirInstances
= lens _lirInstances (\ s a -> s{_lirInstances = a})
. _Default
. _Coerce
instance FromJSON ListInstancesResponse where
parseJSON
= withObject "ListInstancesResponse"
(\ o ->
ListInstancesResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "instances" .!= mempty))
instance ToJSON ListInstancesResponse where
toJSON ListInstancesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lirNextPageToken,
("instances" .=) <$> _lirInstances])
-- | Target scaling by request utilization. Only applicable for VM runtimes.
--
-- /See:/ 'requestUtilization' smart constructor.
data RequestUtilization = RequestUtilization'
{ _ruTargetConcurrentRequests :: !(Maybe (Textual Int32))
, _ruTargetRequestCountPerSecond :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RequestUtilization' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ruTargetConcurrentRequests'
--
-- * 'ruTargetRequestCountPerSecond'
requestUtilization
:: RequestUtilization
requestUtilization =
RequestUtilization'
{ _ruTargetConcurrentRequests = Nothing
, _ruTargetRequestCountPerSecond = Nothing
}
-- | Target number of concurrent requests.
ruTargetConcurrentRequests :: Lens' RequestUtilization (Maybe Int32)
ruTargetConcurrentRequests
= lens _ruTargetConcurrentRequests
(\ s a -> s{_ruTargetConcurrentRequests = a})
. mapping _Coerce
-- | Target requests per second.
ruTargetRequestCountPerSecond :: Lens' RequestUtilization (Maybe Int32)
ruTargetRequestCountPerSecond
= lens _ruTargetRequestCountPerSecond
(\ s a -> s{_ruTargetRequestCountPerSecond = a})
. mapping _Coerce
instance FromJSON RequestUtilization where
parseJSON
= withObject "RequestUtilization"
(\ o ->
RequestUtilization' <$>
(o .:? "targetConcurrentRequests") <*>
(o .:? "targetRequestCountPerSecond"))
instance ToJSON RequestUtilization where
toJSON RequestUtilization'{..}
= object
(catMaybes
[("targetConcurrentRequests" .=) <$>
_ruTargetConcurrentRequests,
("targetRequestCountPerSecond" .=) <$>
_ruTargetRequestCountPerSecond])
-- | Request message for \'Applications.RepairApplication\'.
--
-- /See:/ 'repairApplicationRequest' smart constructor.
data RepairApplicationRequest =
RepairApplicationRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RepairApplicationRequest' with the minimum fields required to make a request.
--
repairApplicationRequest
:: RepairApplicationRequest
repairApplicationRequest = RepairApplicationRequest'
instance FromJSON RepairApplicationRequest where
parseJSON
= withObject "RepairApplicationRequest"
(\ o -> pure RepairApplicationRequest')
instance ToJSON RepairApplicationRequest where
toJSON = const emptyObject
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as Delete, the response is
-- google.protobuf.Empty. If the original method is standard
-- Get\/Create\/Update, the response should be the resource. For other
-- methods, the response should have the type XxxResponse, where Xxx is the
-- original method name. For example, if the original method name is
-- TakeSnapshot(), the inferred response type is TakeSnapshotResponse.
--
-- /See:/ 'operationResponse' smart constructor.
newtype OperationResponse = OperationResponse'
{ _orAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'orAddtional'
operationResponse
:: HashMap Text JSONValue -- ^ 'orAddtional'
-> OperationResponse
operationResponse pOrAddtional_ =
OperationResponse'
{ _orAddtional = _Coerce # pOrAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
orAddtional :: Lens' OperationResponse (HashMap Text JSONValue)
orAddtional
= lens _orAddtional (\ s a -> s{_orAddtional = a}) .
_Coerce
instance FromJSON OperationResponse where
parseJSON
= withObject "OperationResponse"
(\ o -> OperationResponse' <$> (parseJSONObject o))
instance ToJSON OperationResponse where
toJSON = toJSON . _orAddtional
-- | Docker image that is used to start a VM container for the version you
-- deploy.
--
-- /See:/ 'containerInfo' smart constructor.
newtype ContainerInfo = ContainerInfo'
{ _ciImage :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ContainerInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ciImage'
containerInfo
:: ContainerInfo
containerInfo =
ContainerInfo'
{ _ciImage = Nothing
}
-- | URI to the hosted container image in a Docker repository. The URI must
-- be fully qualified and include a tag or digest. Examples:
-- \"gcr.io\/my-project\/image:tag\" or
-- \"gcr.io\/my-project\/image\'digest\"
ciImage :: Lens' ContainerInfo (Maybe Text)
ciImage = lens _ciImage (\ s a -> s{_ciImage = a})
instance FromJSON ContainerInfo where
parseJSON
= withObject "ContainerInfo"
(\ o -> ContainerInfo' <$> (o .:? "image"))
instance ToJSON ContainerInfo where
toJSON ContainerInfo'{..}
= object (catMaybes [("image" .=) <$> _ciImage])
-- | An Instance resource is the computing unit that App Engine uses to
-- automatically scale an application.
--
-- /See:/ 'instance'' smart constructor.
data Instance = Instance'
{ _iMemoryUsage :: !(Maybe (Textual Int64))
, _iVMStatus :: !(Maybe Text)
, _iVMZoneName :: !(Maybe Text)
, _iVMIP :: !(Maybe Text)
, _iStartTime :: !(Maybe Text)
, _iVMId :: !(Maybe Text)
, _iAvailability :: !(Maybe Text)
, _iVMName :: !(Maybe Text)
, _iName :: !(Maybe Text)
, _iVMDebugEnabled :: !(Maybe Bool)
, _iRequests :: !(Maybe (Textual Int32))
, _iQps :: !(Maybe (Textual Double))
, _iId :: !(Maybe Text)
, _iErrors :: !(Maybe (Textual Int32))
, _iAverageLatency :: !(Maybe (Textual Int32))
, _iAppEngineRelease :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Instance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iMemoryUsage'
--
-- * 'iVMStatus'
--
-- * 'iVMZoneName'
--
-- * 'iVMIP'
--
-- * 'iStartTime'
--
-- * 'iVMId'
--
-- * 'iAvailability'
--
-- * 'iVMName'
--
-- * 'iName'
--
-- * 'iVMDebugEnabled'
--
-- * 'iRequests'
--
-- * 'iQps'
--
-- * 'iId'
--
-- * 'iErrors'
--
-- * 'iAverageLatency'
--
-- * 'iAppEngineRelease'
instance'
:: Instance
instance' =
Instance'
{ _iMemoryUsage = Nothing
, _iVMStatus = Nothing
, _iVMZoneName = Nothing
, _iVMIP = Nothing
, _iStartTime = Nothing
, _iVMId = Nothing
, _iAvailability = Nothing
, _iVMName = Nothing
, _iName = Nothing
, _iVMDebugEnabled = Nothing
, _iRequests = Nothing
, _iQps = Nothing
, _iId = Nothing
, _iErrors = Nothing
, _iAverageLatency = Nothing
, _iAppEngineRelease = Nothing
}
-- | Total memory in use (bytes).\'OutputOnly
iMemoryUsage :: Lens' Instance (Maybe Int64)
iMemoryUsage
= lens _iMemoryUsage (\ s a -> s{_iMemoryUsage = a})
. mapping _Coerce
-- | Status of the virtual machine where this instance lives. Only applicable
-- for instances in App Engine flexible environment.\'OutputOnly
iVMStatus :: Lens' Instance (Maybe Text)
iVMStatus
= lens _iVMStatus (\ s a -> s{_iVMStatus = a})
-- | Zone where the virtual machine is located. Only applicable for instances
-- in App Engine flexible environment.\'OutputOnly
iVMZoneName :: Lens' Instance (Maybe Text)
iVMZoneName
= lens _iVMZoneName (\ s a -> s{_iVMZoneName = a})
-- | The IP address of this instance. Only applicable for instances in App
-- Engine flexible environment.\'OutputOnly
iVMIP :: Lens' Instance (Maybe Text)
iVMIP = lens _iVMIP (\ s a -> s{_iVMIP = a})
-- | Time that this instance was started.\'OutputOnly
iStartTime :: Lens' Instance (Maybe Text)
iStartTime
= lens _iStartTime (\ s a -> s{_iStartTime = a})
-- | Virtual machine ID of this instance. Only applicable for instances in
-- App Engine flexible environment.\'OutputOnly
iVMId :: Lens' Instance (Maybe Text)
iVMId = lens _iVMId (\ s a -> s{_iVMId = a})
-- | Availability of the instance.\'OutputOnly
iAvailability :: Lens' Instance (Maybe Text)
iAvailability
= lens _iAvailability
(\ s a -> s{_iAvailability = a})
-- | Name of the virtual machine where this instance lives. Only applicable
-- for instances in App Engine flexible environment.\'OutputOnly
iVMName :: Lens' Instance (Maybe Text)
iVMName = lens _iVMName (\ s a -> s{_iVMName = a})
-- | Full path to the Instance resource in the API. Example:
-- apps\/myapp\/services\/default\/versions\/v1\/instances\/instance-1.\'OutputOnly
iName :: Lens' Instance (Maybe Text)
iName = lens _iName (\ s a -> s{_iName = a})
-- | Whether this instance is in debug mode. Only applicable for instances in
-- App Engine flexible environment.\'OutputOnly
iVMDebugEnabled :: Lens' Instance (Maybe Bool)
iVMDebugEnabled
= lens _iVMDebugEnabled
(\ s a -> s{_iVMDebugEnabled = a})
-- | Number of requests since this instance was started.\'OutputOnly
iRequests :: Lens' Instance (Maybe Int32)
iRequests
= lens _iRequests (\ s a -> s{_iRequests = a}) .
mapping _Coerce
-- | Average queries per second (QPS) over the last minute.\'OutputOnly
iQps :: Lens' Instance (Maybe Double)
iQps
= lens _iQps (\ s a -> s{_iQps = a}) .
mapping _Coerce
-- | Relative name of the instance within the version. Example:
-- instance-1.\'OutputOnly
iId :: Lens' Instance (Maybe Text)
iId = lens _iId (\ s a -> s{_iId = a})
-- | Number of errors since this instance was started.\'OutputOnly
iErrors :: Lens' Instance (Maybe Int32)
iErrors
= lens _iErrors (\ s a -> s{_iErrors = a}) .
mapping _Coerce
-- | Average latency (ms) over the last minute.\'OutputOnly
iAverageLatency :: Lens' Instance (Maybe Int32)
iAverageLatency
= lens _iAverageLatency
(\ s a -> s{_iAverageLatency = a})
. mapping _Coerce
-- | App Engine release this instance is running on.\'OutputOnly
iAppEngineRelease :: Lens' Instance (Maybe Text)
iAppEngineRelease
= lens _iAppEngineRelease
(\ s a -> s{_iAppEngineRelease = a})
instance FromJSON Instance where
parseJSON
= withObject "Instance"
(\ o ->
Instance' <$>
(o .:? "memoryUsage") <*> (o .:? "vmStatus") <*>
(o .:? "vmZoneName")
<*> (o .:? "vmIp")
<*> (o .:? "startTime")
<*> (o .:? "vmId")
<*> (o .:? "availability")
<*> (o .:? "vmName")
<*> (o .:? "name")
<*> (o .:? "vmDebugEnabled")
<*> (o .:? "requests")
<*> (o .:? "qps")
<*> (o .:? "id")
<*> (o .:? "errors")
<*> (o .:? "averageLatency")
<*> (o .:? "appEngineRelease"))
instance ToJSON Instance where
toJSON Instance'{..}
= object
(catMaybes
[("memoryUsage" .=) <$> _iMemoryUsage,
("vmStatus" .=) <$> _iVMStatus,
("vmZoneName" .=) <$> _iVMZoneName,
("vmIp" .=) <$> _iVMIP,
("startTime" .=) <$> _iStartTime,
("vmId" .=) <$> _iVMId,
("availability" .=) <$> _iAvailability,
("vmName" .=) <$> _iVMName, ("name" .=) <$> _iName,
("vmDebugEnabled" .=) <$> _iVMDebugEnabled,
("requests" .=) <$> _iRequests, ("qps" .=) <$> _iQps,
("id" .=) <$> _iId, ("errors" .=) <$> _iErrors,
("averageLatency" .=) <$> _iAverageLatency,
("appEngineRelease" .=) <$> _iAppEngineRelease])
-- | Code and application artifacts used to deploy a version to App Engine.
--
-- /See:/ 'deployment' smart constructor.
data Deployment = Deployment'
{ _dZip :: !(Maybe ZipInfo)
, _dContainer :: !(Maybe ContainerInfo)
, _dFiles :: !(Maybe DeploymentFiles)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Deployment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dZip'
--
-- * 'dContainer'
--
-- * 'dFiles'
deployment
:: Deployment
deployment =
Deployment'
{ _dZip = Nothing
, _dContainer = Nothing
, _dFiles = Nothing
}
-- | The zip file for this deployment, if this is a zip deployment.
dZip :: Lens' Deployment (Maybe ZipInfo)
dZip = lens _dZip (\ s a -> s{_dZip = a})
-- | A Docker image that App Engine uses to run the version. Only applicable
-- for instances in App Engine flexible environment.
dContainer :: Lens' Deployment (Maybe ContainerInfo)
dContainer
= lens _dContainer (\ s a -> s{_dContainer = a})
-- | Manifest of the files stored in Google Cloud Storage that are included
-- as part of this version. All files must be readable using the
-- credentials supplied with this call.
dFiles :: Lens' Deployment (Maybe DeploymentFiles)
dFiles = lens _dFiles (\ s a -> s{_dFiles = a})
instance FromJSON Deployment where
parseJSON
= withObject "Deployment"
(\ o ->
Deployment' <$>
(o .:? "zip") <*> (o .:? "container") <*>
(o .:? "files"))
instance ToJSON Deployment where
toJSON Deployment'{..}
= object
(catMaybes
[("zip" .=) <$> _dZip,
("container" .=) <$> _dContainer,
("files" .=) <$> _dFiles])
|
rueshyna/gogol
|
gogol-appengine/gen/Network/Google/AppEngine/Types/Product.hs
|
mpl-2.0
| 134,660
| 0
| 39
| 32,887
| 24,424
| 14,069
| 10,355
| 2,664
| 1
|
{-# LANGUAGE OverloadedStrings, TemplateHaskell, TypeOperators #-}
module Sparkle.Routes
( Sitemap(..)
, sitemap
) where
import Prelude hiding ((.), id)
import Control.Category ((.), {-id-})
import Text.Boomerang.TH (makeBoomerangs)
import Web.Routes.Boomerang
import qualified Sparkle.API.Routes as API
data Sitemap = Home | API API.Sitemap
deriving (Eq, Read, Show)
$(makeBoomerangs ''Sitemap)
sitemap :: Router () (Sitemap :- ())
sitemap
= rHome
<> rAPI . ("api" </> "v0" </> API.sitemap)
|
lfairy/sparkle
|
Sparkle/Routes.hs
|
agpl-3.0
| 522
| 0
| 8
| 94
| 161
| 97
| 64
| -1
| -1
|
module Parse where
import Text.ParserCombinators.Parsec hiding (spaces)
import Numeric(readHex, readOct, readFloat)
import Control.Monad
import Control.Monad.Error
import AST
import Error
symbol :: Parser Char
symbol = oneOf "!$%&|*+-/:<=?>@^_~#"
spaces :: Parser ()
spaces = skipMany1 space
readExpr :: String -> ThrowsError LispVal
readExpr input = case parse parseExpr "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
readExprList :: String -> ThrowsError [LispVal]
readExprList = readOrThrow (endBy parseExpr spaces)
escapeChar :: Parser Char
escapeChar = char '\\' >> oneOf "\"tnr\\" >>= \x ->
return $ case lookup x [('n', '\n'), ('t', '\t')] of
Just v -> v
Nothing -> x
parseString :: Parser LispVal
parseString = char '"' >> many (escapeChar <|> noneOf "\"") >>= \x ->
char '"' >> (return $ String x)
parseAtom :: Parser LispVal
parseAtom = (letter <|> symbol) >>= \first ->
many (letter <|> digit <|> symbol) >>= \rest ->
return $ let atom = [first] ++ rest
in case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = try parseFloat <|> parseDecimal <|> try parseHex <|> parseOct
parseDecimal :: Parser LispVal
parseDecimal = liftM (Number . read) $ many1 digit
parseFloat :: Parser LispVal
parseFloat = do
x <- many1 digit
char '.'
y <- many1 digit
let ((d,_):_) = readFloat $ x ++ "." ++ y
return $ Float d
parseHex :: Parser LispVal
parseHex = string "#x" >> many1 (hexDigit <|> digit) >>= return . readHex >>=
\((d, _):_) -> return $ Number d
parseOct :: Parser LispVal
parseOct = string "#o" >> many1 (octDigit <|> digit) >>= return . readOct >>=
\((d, _):_) -> return $ Number d
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
h <- endBy parseExpr spaces
t <- char '.' >> spaces >> parseExpr
return $ DottedList h t
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseQuasiQuote :: Parser LispVal
parseQuasiQuote = do
char '`'
x <- parseExpr
return $ List [Atom "quasiquote", x]
parseUnQuote :: Parser LispVal
parseUnQuote = do
char ','
x <- parseExpr
return $ List [Atom "unquote", x]
parseExpr :: Parser LispVal
parseExpr = try parseNumber <|> parseAtom <|> parseAtom <|> parseString
<|> parseQuoted <|> do char '('
x <- (try parseList) <|> parseDottedList
char ')'
return x
readOrThrow :: Parser a -> String -> ThrowsError a
readOrThrow parser input = case parse parser "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
|
kkspeed/SICP-Practise
|
Interpreter/code/Parse.hs
|
lgpl-3.0
| 3,097
| 0
| 15
| 949
| 1,046
| 516
| 530
| 82
| 3
|
module Data.Tree.Lazy (
BinTree(..),
preOrder,
preOrderT,
inOrder,
inOrderT,
postOrder,
postOrderT,
postOrderT1
) where
data BinTree a = EmptyTree
| Branch a (BinTree a) (BinTree a) deriving (Show)
preOrder :: BinTree a -> [a]
preOrder (EmptyTree) = []
preOrder (Branch a left right) = [a] ++ preOrder left ++ preOrder right
inOrder :: BinTree a -> [a]
inOrder (EmptyTree) = []
inOrder (Branch a left right) = inOrder left ++ [a] ++ inOrder right
postOrder :: BinTree a -> [a]
postOrder (EmptyTree) = []
postOrder (Branch a left right) = postOrder left ++ postOrder right ++ [a]
preOrderT :: BinTree a -> [a]
preOrderT bt = go [bt] []
where go [] xs = reverse xs
go (EmptyTree:ts) xs = go ts xs
go (Branch v left right:ts) xs = go (left:right:ts) (v:xs)
inOrderT :: BinTree a -> [a]
inOrderT bt = go [bt] [] []
where go [] [] xs = reverse xs
go (EmptyTree:ts) [] xs = go ts [] xs
go (EmptyTree:ts) (v:left_acc) xs = go ts left_acc (v:xs)
go (Branch v left right:ts) left_acc xs = go (left:right:ts) (v:left_acc) xs
-- tail recursive post order traversal
postOrderT :: BinTree a -> [a]
postOrderT bt = go [bt] []
where go [] xs = xs
go (EmptyTree:ts) xs = go ts xs
go (Branch v left right:ts) xs = go (right:left:ts) (v:xs)
postOrderT1 :: BinTree a -> [a]
postOrderT1 tree = go [tree] [] [] []
where go :: [BinTree a] -> [BinTree a] -> [a] -> [a] -> [a]
go [] [] [] vs = reverse vs
go (EmptyTree:ts) [] [] vs = go [] [] [] vs
go [] (EmptyTree:r_acc) [] vs = go [] [] [] vs
go (EmptyTree:ts) (EmptyTree:r_acc) [] vs =
go ts r_acc [] vs
go (EmptyTree:ts) (EmptyTree:r_acc) (v:v_acc) vs =
go ts r_acc v_acc (v:vs)
go (EmptyTree:ts) (Branch v l r:r_acc) v_acc vs =
go (l:ts) (r:r_acc) (v:v_acc) vs
go (Branch v l r:ts) r_acc v_acc vs =
go (l:ts) (r:r_acc) (v:v_acc) vs
sampleTree :: BinTree Int
sampleTree = Branch 4 (Branch 2 (Branch 1 EmptyTree EmptyTree) (Branch 3 EmptyTree EmptyTree)) (Branch 6 (Branch 5 EmptyTree EmptyTree) (Branch 7 EmptyTree EmptyTree))
|
seckcoder/lang-learn
|
haskell/algo/src/Data/Tree/Lazy.hs
|
unlicense
| 2,165
| 0
| 11
| 567
| 1,160
| 600
| 560
| 52
| 7
|
-- Copyright (C) 2016 Fraser Tweedale
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module AESKW where
import qualified Data.ByteString as B
import Crypto.Cipher.AES
import Crypto.Cipher.Types
import Crypto.Error
import Test.QuickCheck.Monadic
import Test.Tasty
import Test.Tasty.QuickCheck
import Crypto.JOSE.AESKW
aeskwProperties :: TestTree
aeskwProperties = testGroup "AESKW"
[ testProperty "AESKW round-trip" prop_roundTrip
]
prop_roundTrip :: Property
prop_roundTrip = monadicIO $ do
cekLen <- (* 8) . (+ 2) <$> pick arbitrarySizedNatural
cek <- pick $ B.pack <$> vectorOf cekLen arbitrary
kekLen <- pick $ oneof $ pure <$> [16, 24, 32]
kek <- pick $ B.pack <$> vectorOf kekLen arbitrary
let
check :: BlockCipher128 cipher => CryptoFailable cipher -> Bool
check cipher' = case cipher' of
CryptoFailed _ -> False
CryptoPassed cipher ->
let
c = aesKeyWrap cipher cek :: B.ByteString
cek' = aesKeyUnwrap cipher c
in
B.length c == cekLen + 8 && cek' == Just cek
case kekLen of
16 -> assert $ check (cipherInit kek :: CryptoFailable AES128)
24 -> assert $ check (cipherInit kek :: CryptoFailable AES192)
32 -> assert $ check (cipherInit kek :: CryptoFailable AES256)
_ -> assert False -- can't happen
|
frasertweedale/hs-jose
|
test/AESKW.hs
|
apache-2.0
| 1,823
| 0
| 21
| 379
| 400
| 211
| 189
| 32
| 5
|
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Console.Options
( MonitorOptions(..)
, HasMonitorOptions(..)
, Options(..)
, HasOptions(..)
, parseOptions
) where
import Control.Lens hiding (argument)
import Data.Data
import Ermine.Monitor
import Options.Applicative
import Paths_ermine
-- | All command line options.
data Options = Options
{ _optionsMonitorOptions :: MonitorOptions
, _libdir :: FilePath
, _files :: [FilePath]
} deriving (Eq,Ord,Show,Read,Data,Typeable)
makeClassy ''Options
instance HasMonitorOptions Options where
monitorOptions = optionsMonitorOptions
-- | Generate the command line option parser
parseOptions :: IO (Parser Options)
parseOptions = do
dd <- getDataDir
return $ Options
<$> parseMonitorOptions
<*> option (long "libdir" <> short 'l' <> help "location of the ermine library" <> metavar "DIR" <> action "directory" <> value dd)
<*> many (argument Just $ help "files" <> metavar "FILE" <> action "file")
|
ekmett/ermine
|
src/Ermine/Console/Options.hs
|
bsd-2-clause
| 1,338
| 0
| 16
| 214
| 278
| 155
| 123
| 27
| 1
|
{-# LANGUAGE OverloadedStrings, FlexibleInstances #-}
module Handler.Notes where
import Import
import Data.Aeson hiding (object)
instance ToJSON (Entity Note) where
toJSON (Entity note_id (Note user subject content)) = object
[ "id" .= note_id
, "user" .= user
, "subject" .= subject
, "content" .= content]
getNotesR :: Handler RepHtmlJson
getNotesR = do
db <- runDB (selectList [] [])
defaultLayoutJson (html db)
(asNoteEntities db)
where
asNoteEntities :: [Entity Note] -> [Entity Note]
asNoteEntities = id
html dbList = [whamlet|
<ul>
$forall Entity _ note <- dbList
<li>#{noteSubject note}
<p>
#{noteContent note}
|]
|
JanAhrens/yesod-oauth-demo
|
Handler/Notes.hs
|
bsd-2-clause
| 710
| 0
| 11
| 178
| 187
| 100
| 87
| -1
| -1
|
-- | The module implements /directed acyclic word graphs/ (DAWGs) internaly
-- represented as /minimal acyclic deterministic finite-state automata/.
-- The implementation provides fast insert and delete operations
-- which can be used to build the DAWG structure incrementaly.
--
-- See the "Data.DAWG.Ord" module if you look for a more generic
-- solution (which, for the moment, lacks some of the functionality provided
-- here, e.g. the `delete` function).
module Data.DAWG.Int
(
-- * DAWG type
DAWG
, ID
, Sym
, Val
, root
-- * Query
, lookup
, numStates
, numEdges
-- * Traversal
, value
, edges
, follow
-- * Construction
, empty
, fromList
, fromListWith
, fromLang
-- ** Insertion
, insert
, insertWith
-- ** Deletion
, delete
-- * Conversion
, assocs
, keys
, elems
) where
import Prelude hiding (lookup)
import Data.DAWG.Gen.Types
import Data.DAWG.Int.Dynamic
|
kawu/dawg-ord
|
src/Data/DAWG/Int.hs
|
bsd-2-clause
| 911
| 0
| 5
| 187
| 110
| 79
| 31
| 26
| 0
|
module TestModule where
import HEP.Automation.EventGeneration.Type
import HEP.Automation.JobQueue.JobType
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Model.SM
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Type
import HEP.Storage.WebDAV.Type
psetup :: ProcessSetup SM
psetup = PS { model = SM
, process = MGProc [] [ "p p > t t~" ]
, processBrief = "ttbar"
, workname = "ttbar"
, hashSalt = HashSalt Nothing }
param :: ModelParam SM
param = SMParam
rsetup :: RunSetup
rsetup =
RS { numevent = 10000
, machine = LHC7 ATLAS
, rgrun = Auto
, rgscale = 200
, match = MLM
, cut = DefCut
, pythia = RunPYTHIA
, lhesanitizer = []
, pgs = RunPGS (AntiKTJet 0.4, WithTau)
, uploadhep = NoUploadHEP
, setnum = 1
}
eventsets :: [EventSet]
eventsets = [ EventSet psetup param rsetup ]
webdavdir :: WebDAVRemoteDir
webdavdir = WebDAVRemoteDir "newtest"
|
wavewave/jobqueue-sender
|
TestModule.hs
|
bsd-2-clause
| 1,055
| 0
| 9
| 300
| 256
| 159
| 97
| 33
| 1
|
-- | Test folds, scans, and unfolds
{-# OPTIONS_GHC -fno-enable-rewrite-rules -fno-warn-missing-signatures #-}
module Tests.Properties.Folds
( testFolds
) where
import Control.Arrow (second)
import Data.Word (Word8, Word16)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (testProperty)
import Tests.QuickCheckUtils
import Text.Show.Functions ()
import qualified Data.List as L
import qualified Data.Text as T
import qualified Data.Text.Internal.Fusion as S
import qualified Data.Text.Internal.Fusion.Common as S
import qualified Data.Text.Lazy as TL
-- Folds
sf_foldl p f z = (L.foldl f z . L.filter p) `eqP` (S.foldl f z . S.filter p)
where _types = f :: Char -> Char -> Char
t_foldl f z = L.foldl f z `eqP` (T.foldl f z)
where _types = f :: Char -> Char -> Char
tl_foldl f z = L.foldl f z `eqP` (TL.foldl f z)
where _types = f :: Char -> Char -> Char
sf_foldl' p f z = (L.foldl' f z . L.filter p) `eqP`
(S.foldl' f z . S.filter p)
where _types = f :: Char -> Char -> Char
t_foldl' f z = L.foldl' f z `eqP` T.foldl' f z
where _types = f :: Char -> Char -> Char
tl_foldl' f z = L.foldl' f z `eqP` TL.foldl' f z
where _types = f :: Char -> Char -> Char
sf_foldl1 p f = (L.foldl1 f . L.filter p) `eqP` (S.foldl1 f . S.filter p)
t_foldl1 f = L.foldl1 f `eqP` T.foldl1 f
tl_foldl1 f = L.foldl1 f `eqP` TL.foldl1 f
sf_foldl1' p f = (L.foldl1' f . L.filter p) `eqP` (S.foldl1' f . S.filter p)
t_foldl1' f = L.foldl1' f `eqP` T.foldl1' f
tl_foldl1' f = L.foldl1' f `eqP` TL.foldl1' f
sf_foldr p f z = (L.foldr f z . L.filter p) `eqP` (S.foldr f z . S.filter p)
where _types = f :: Char -> Char -> Char
t_foldr f z = L.foldr f z `eqP` T.foldr f z
where _types = f :: Char -> Char -> Char
tl_foldr f z = unsquare $
L.foldr f z `eqP` TL.foldr f z
where _types = f :: Char -> Char -> Char
sf_foldr1 p f = unsquare $
(L.foldr1 f . L.filter p) `eqP` (S.foldr1 f . S.filter p)
t_foldr1 f = L.foldr1 f `eqP` T.foldr1 f
tl_foldr1 f = unsquare $
L.foldr1 f `eqP` TL.foldr1 f
-- Special folds
s_concat_s = unsquare $
L.concat `eq` (unpackS . S.unstream . S.concat . map packS)
sf_concat p = unsquare $
(L.concat . map (L.filter p)) `eq`
(unpackS . S.concat . map (S.filter p . packS))
t_concat = unsquare $
L.concat `eq` (unpackS . T.concat . map packS)
tl_concat = unsquare $
L.concat `eq` (unpackS . TL.concat . map TL.pack)
sf_concatMap p f = unsquare $ (L.concatMap f . L.filter p) `eqP`
(unpackS . S.concatMap (packS . f) . S.filter p)
t_concatMap f = unsquare $
L.concatMap f `eqP` (unpackS . T.concatMap (packS . f))
tl_concatMap f = unsquare $
L.concatMap f `eqP` (unpackS . TL.concatMap (TL.pack . f))
sf_any q p = (L.any p . L.filter q) `eqP` (S.any p . S.filter q)
t_any p = L.any p `eqP` T.any p
tl_any p = L.any p `eqP` TL.any p
sf_all q p = (L.all p . L.filter q) `eqP` (S.all p . S.filter q)
t_all p = L.all p `eqP` T.all p
tl_all p = L.all p `eqP` TL.all p
sf_maximum p = (L.maximum . L.filter p) `eqP` (S.maximum . S.filter p)
t_maximum = L.maximum `eqP` T.maximum
tl_maximum = L.maximum `eqP` TL.maximum
sf_minimum p = (L.minimum . L.filter p) `eqP` (S.minimum . S.filter p)
t_minimum = L.minimum `eqP` T.minimum
tl_minimum = L.minimum `eqP` TL.minimum
-- Scans
sf_scanl p f z = (L.scanl f z . L.filter p) `eqP`
(unpackS . S.scanl f z . S.filter p)
t_scanl f z = L.scanl f z `eqP` (unpackS . T.scanl f z)
tl_scanl f z = L.scanl f z `eqP` (unpackS . TL.scanl f z)
t_scanl1 f = L.scanl1 f `eqP` (unpackS . T.scanl1 f)
tl_scanl1 f = L.scanl1 f `eqP` (unpackS . TL.scanl1 f)
t_scanr f z = L.scanr f z `eqP` (unpackS . T.scanr f z)
tl_scanr f z = L.scanr f z `eqP` (unpackS . TL.scanr f z)
t_scanr1 f = L.scanr1 f `eqP` (unpackS . T.scanr1 f)
tl_scanr1 f = L.scanr1 f `eqP` (unpackS . TL.scanr1 f)
t_mapAccumL f z = L.mapAccumL f z `eqP` (second unpackS . T.mapAccumL f z)
where _types = f :: Int -> Char -> (Int,Char)
tl_mapAccumL f z = L.mapAccumL f z `eqP` (second unpackS . TL.mapAccumL f z)
where _types = f :: Int -> Char -> (Int,Char)
t_mapAccumR f z = L.mapAccumR f z `eqP` (second unpackS . T.mapAccumR f z)
where _types = f :: Int -> Char -> (Int,Char)
tl_mapAccumR f z = L.mapAccumR f z `eqP` (second unpackS . TL.mapAccumR f z)
where _types = f :: Int -> Char -> (Int,Char)
-- Unfolds
tl_repeat n = (L.take m . L.repeat) `eq`
(unpackS . TL.take (fromIntegral m) . TL.repeat)
where m = fromIntegral (n :: Word8)
any_replicate n l = concat (L.replicate n l)
s_replicate n = any_replicate m `eq`
(unpackS . S.replicateI (fromIntegral m) . packS)
where m = fromIntegral (n :: Word8)
t_replicate n = any_replicate m `eq` (unpackS . T.replicate m . packS)
where m = fromIntegral (n :: Word8)
tl_replicate n = any_replicate m `eq`
(unpackS . TL.replicate (fromIntegral m) . packS)
where m = fromIntegral (n :: Word8)
tl_cycle n = (L.take m . L.cycle) `eq`
(unpackS . TL.take (fromIntegral m) . TL.cycle . packS)
where m = fromIntegral (n :: Word8)
tl_iterate f n = (L.take m . L.iterate f) `eq`
(unpackS . TL.take (fromIntegral m) . TL.iterate f)
where m = fromIntegral (n :: Word8)
unf :: Int -> Char -> Maybe (Char, Char)
unf n c | fromEnum c * 100 > n = Nothing
| otherwise = Just (c, succ c)
t_unfoldr n = L.unfoldr (unf m) `eq` (unpackS . T.unfoldr (unf m))
where m = fromIntegral (n :: Word16)
tl_unfoldr n = L.unfoldr (unf m) `eq` (unpackS . TL.unfoldr (unf m))
where m = fromIntegral (n :: Word16)
t_unfoldrN n m = (L.take i . L.unfoldr (unf j)) `eq`
(unpackS . T.unfoldrN i (unf j))
where i = fromIntegral (n :: Word16)
j = fromIntegral (m :: Word16)
tl_unfoldrN n m = (L.take i . L.unfoldr (unf j)) `eq`
(unpackS . TL.unfoldrN (fromIntegral i) (unf j))
where i = fromIntegral (n :: Word16)
j = fromIntegral (m :: Word16)
testFolds :: TestTree
testFolds =
testGroup "folds-unfolds" [
testGroup "folds" [
testProperty "sf_foldl" sf_foldl,
testProperty "t_foldl" t_foldl,
testProperty "tl_foldl" tl_foldl,
testProperty "sf_foldl'" sf_foldl',
testProperty "t_foldl'" t_foldl',
testProperty "tl_foldl'" tl_foldl',
testProperty "sf_foldl1" sf_foldl1,
testProperty "t_foldl1" t_foldl1,
testProperty "tl_foldl1" tl_foldl1,
testProperty "t_foldl1'" t_foldl1',
testProperty "sf_foldl1'" sf_foldl1',
testProperty "tl_foldl1'" tl_foldl1',
testProperty "sf_foldr" sf_foldr,
testProperty "t_foldr" t_foldr,
testProperty "tl_foldr" tl_foldr,
testProperty "sf_foldr1" sf_foldr1,
testProperty "t_foldr1" t_foldr1,
testProperty "tl_foldr1" tl_foldr1,
testGroup "special" [
testProperty "s_concat_s" s_concat_s,
testProperty "sf_concat" sf_concat,
testProperty "t_concat" t_concat,
testProperty "tl_concat" tl_concat,
testProperty "sf_concatMap" sf_concatMap,
testProperty "t_concatMap" t_concatMap,
testProperty "tl_concatMap" tl_concatMap,
testProperty "sf_any" sf_any,
testProperty "t_any" t_any,
testProperty "tl_any" tl_any,
testProperty "sf_all" sf_all,
testProperty "t_all" t_all,
testProperty "tl_all" tl_all,
testProperty "sf_maximum" sf_maximum,
testProperty "t_maximum" t_maximum,
testProperty "tl_maximum" tl_maximum,
testProperty "sf_minimum" sf_minimum,
testProperty "t_minimum" t_minimum,
testProperty "tl_minimum" tl_minimum
]
],
testGroup "scans" [
testProperty "sf_scanl" sf_scanl,
testProperty "t_scanl" t_scanl,
testProperty "tl_scanl" tl_scanl,
testProperty "t_scanl1" t_scanl1,
testProperty "tl_scanl1" tl_scanl1,
testProperty "t_scanr" t_scanr,
testProperty "tl_scanr" tl_scanr,
testProperty "t_scanr1" t_scanr1,
testProperty "tl_scanr1" tl_scanr1
],
testGroup "mapAccum" [
testProperty "t_mapAccumL" t_mapAccumL,
testProperty "tl_mapAccumL" tl_mapAccumL,
testProperty "t_mapAccumR" t_mapAccumR,
testProperty "tl_mapAccumR" tl_mapAccumR
],
testGroup "unfolds" [
testProperty "tl_repeat" tl_repeat,
testProperty "s_replicate" s_replicate,
testProperty "t_replicate" t_replicate,
testProperty "tl_replicate" tl_replicate,
testProperty "tl_cycle" tl_cycle,
testProperty "tl_iterate" tl_iterate,
testProperty "t_unfoldr" t_unfoldr,
testProperty "tl_unfoldr" tl_unfoldr,
testProperty "t_unfoldrN" t_unfoldrN,
testProperty "tl_unfoldrN" tl_unfoldrN
]
]
|
bos/text
|
tests/Tests/Properties/Folds.hs
|
bsd-2-clause
| 9,415
| 0
| 12
| 2,707
| 3,504
| 1,832
| 1,672
| 191
| 1
|
{-| Implementation of cluster-wide logic.
This module holds all pure cluster-logic; I\/O related functionality
goes into the /Main/ module for the individual binaries.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Cluster
(
-- * Types
AllocDetails(..)
, Table(..)
, CStats(..)
, AllocNodes
, AllocResult
, AllocMethod
, GenericAllocSolutionList
, AllocSolutionList
-- * Generic functions
, totalResources
, computeAllocationDelta
, hasRequiredNetworks
-- * First phase functions
, computeBadItems
-- * Second phase functions
, printSolutionLine
, formatCmds
, involvedNodes
, getMoves
, splitJobs
-- * Display functions
, printNodes
, printInsts
-- * Balacing functions
, doNextBalance
, tryBalance
, iMoveToJob
-- * IAllocator functions
, genAllocNodes
, tryAlloc
, tryGroupAlloc
, tryMGAlloc
, filterMGResults
, sortMGResults
, tryChangeGroup
, allocList
-- * Allocation functions
, iterateAlloc
, tieredAlloc
-- * Node group functions
, instanceGroup
, findSplitInstances
) where
import Control.Applicative (liftA2)
import Control.Arrow ((&&&))
import Control.Monad (unless)
import Control.Parallel.Strategies (rseq, parMap)
import qualified Data.IntSet as IntSet
import Data.List
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Ord (comparing)
import Text.Printf (printf)
import Ganeti.BasicTypes
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..), defaultOptions)
import qualified Ganeti.HTools.Container as Container
import Ganeti.HTools.Cluster.AllocatePrimitives ( allocateOnSingle
, allocateOnPair)
import Ganeti.HTools.Cluster.AllocationSolution
( GenericAllocSolution(..) , AllocSolution, emptyAllocSolution
, sumAllocs, extractNl, updateIl
, annotateSolution, solutionDescription, collapseFailures
, emptyAllocCollection, concatAllocCollections, collectionToSolution )
import Ganeti.HTools.Cluster.Evacuate ( EvacSolution(..), emptyEvacSolution
, updateEvacSolution, reverseEvacSolution
, nodeEvacInstance)
import Ganeti.HTools.Cluster.Metrics (compCV, compClusterStatistics)
import Ganeti.HTools.Cluster.Moves (applyMoveEx)
import Ganeti.HTools.Cluster.Utils (splitCluster, instancePriGroup
, availableGroupNodes, iMoveToJob)
import Ganeti.HTools.GlobalN1 (allocGlobalN1, redundant)
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Nic as Nic
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import Ganeti.HTools.Types
import Ganeti.Utils
import Ganeti.Types (EvacMode(..))
-- * Types
-- | Allocation details for an instance, specifying
-- required number of nodes, and
-- an optional group (name) to allocate to
data AllocDetails = AllocDetails Int (Maybe String)
deriving (Show)
-- | Allocation results, as used in 'iterateAlloc' and 'tieredAlloc'.
type AllocResult = (FailStats, Node.List, Instance.List,
[Instance.Instance], [CStats])
-- | Type alias for easier handling.
type GenericAllocSolutionList a =
[(Instance.Instance, GenericAllocSolution a)]
type AllocSolutionList = GenericAllocSolutionList Score
-- | A type denoting the valid allocation mode/pairs.
--
-- For a one-node allocation, this will be a @Left ['Ndx']@, whereas
-- for a two-node allocation, this will be a @Right [('Ndx',
-- ['Ndx'])]@. In the latter case, the list is basically an
-- association list, grouped by primary node and holding the potential
-- secondary nodes in the sub-list.
type AllocNodes = Either [Ndx] [(Ndx, [Ndx])]
-- | The complete state for the balancing solution.
data Table = Table Node.List Instance.List Score [Placement]
deriving (Show)
-- | Cluster statistics data type.
data CStats = CStats
{ csFmem :: Integer -- ^ Cluster free mem
, csFdsk :: Integer -- ^ Cluster free disk
, csFspn :: Integer -- ^ Cluster free spindles
, csAmem :: Integer -- ^ Cluster allocatable mem
, csAdsk :: Integer -- ^ Cluster allocatable disk
, csAcpu :: Integer -- ^ Cluster allocatable cpus
, csMmem :: Integer -- ^ Max node allocatable mem
, csMdsk :: Integer -- ^ Max node allocatable disk
, csMcpu :: Integer -- ^ Max node allocatable cpu
, csImem :: Integer -- ^ Instance used mem
, csIdsk :: Integer -- ^ Instance used disk
, csIspn :: Integer -- ^ Instance used spindles
, csIcpu :: Integer -- ^ Instance used cpu
, csTmem :: Double -- ^ Cluster total mem
, csTdsk :: Double -- ^ Cluster total disk
, csTspn :: Double -- ^ Cluster total spindles
, csTcpu :: Double -- ^ Cluster total cpus
, csVcpu :: Integer -- ^ Cluster total virtual cpus
, csNcpu :: Double -- ^ Equivalent to 'csIcpu' but in terms of
-- physical CPUs, i.e. normalised used phys CPUs
, csXmem :: Integer -- ^ Unnacounted for mem
, csNmem :: Integer -- ^ Node own memory
, csScore :: Score -- ^ The cluster score
, csNinst :: Int -- ^ The total number of instances
} deriving (Show)
-- | A simple type for allocation functions.
type AllocMethod = Node.List -- ^ Node list
-> Instance.List -- ^ Instance list
-> Maybe Int -- ^ Optional allocation limit
-> Instance.Instance -- ^ Instance spec for allocation
-> AllocNodes -- ^ Which nodes we should allocate on
-> [Instance.Instance] -- ^ Allocated instances
-> [CStats] -- ^ Running cluster stats
-> Result AllocResult -- ^ Allocation result
-- * Utility functions
-- | Verifies the N+1 status and return the affected nodes.
verifyN1 :: [Node.Node] -> [Node.Node]
verifyN1 = filter Node.failN1
{-| Computes the pair of bad nodes and instances.
The bad node list is computed via a simple 'verifyN1' check, and the
bad instance list is the list of primary and secondary instances of
those nodes.
-}
computeBadItems :: Node.List -> Instance.List ->
([Node.Node], [Instance.Instance])
computeBadItems nl il =
let bad_nodes = verifyN1 $ getOnline nl
bad_instances = map (`Container.find` il) .
sort . nub $
concatMap (\ n -> Node.sList n ++ Node.pList n) bad_nodes
in
(bad_nodes, bad_instances)
-- | Zero-initializer for the CStats type.
emptyCStats :: CStats
emptyCStats = CStats 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
-- | Update stats with data from a new node.
updateCStats :: CStats -> Node.Node -> CStats
updateCStats cs node =
let CStats { csFmem = x_fmem, csFdsk = x_fdsk,
csAmem = x_amem, csAcpu = x_acpu, csAdsk = x_adsk,
csMmem = x_mmem, csMdsk = x_mdsk, csMcpu = x_mcpu,
csImem = x_imem, csIdsk = x_idsk, csIcpu = x_icpu,
csTmem = x_tmem, csTdsk = x_tdsk, csTcpu = x_tcpu,
csVcpu = x_vcpu, csNcpu = x_ncpu,
csXmem = x_xmem, csNmem = x_nmem, csNinst = x_ninst,
csFspn = x_fspn, csIspn = x_ispn, csTspn = x_tspn
}
= cs
inc_amem = Node.availMem node
inc_adsk = Node.availDisk node
inc_imem = Node.iMem node
inc_icpu = Node.uCpu node
inc_idsk = truncate (Node.tDsk node) - Node.fDsk node
inc_ispn = Node.tSpindles node - Node.fSpindles node
inc_vcpu = Node.hiCpu node
inc_acpu = Node.availCpu node
inc_ncpu = fromIntegral (Node.uCpu node) /
iPolicyVcpuRatio (Node.iPolicy node)
in cs { csFmem = x_fmem + fromIntegral (Node.unallocatedMem node)
, csFdsk = x_fdsk + fromIntegral (Node.fDsk node)
, csFspn = x_fspn + fromIntegral (Node.fSpindles node)
, csAmem = x_amem + fromIntegral inc_amem
, csAdsk = x_adsk + fromIntegral inc_adsk
, csAcpu = x_acpu + fromIntegral inc_acpu
, csMmem = max x_mmem (fromIntegral inc_amem)
, csMdsk = max x_mdsk (fromIntegral inc_adsk)
, csMcpu = max x_mcpu (fromIntegral inc_acpu)
, csImem = x_imem + fromIntegral inc_imem
, csIdsk = x_idsk + fromIntegral inc_idsk
, csIspn = x_ispn + fromIntegral inc_ispn
, csIcpu = x_icpu + fromIntegral inc_icpu
, csTmem = x_tmem + Node.tMem node
, csTdsk = x_tdsk + Node.tDsk node
, csTspn = x_tspn + fromIntegral (Node.tSpindles node)
, csTcpu = x_tcpu + Node.tCpu node
, csVcpu = x_vcpu + fromIntegral inc_vcpu
, csNcpu = x_ncpu + inc_ncpu
, csXmem = x_xmem + fromIntegral (Node.xMem node)
, csNmem = x_nmem + fromIntegral (Node.nMem node)
, csNinst = x_ninst + length (Node.pList node)
}
-- | Compute the total free disk and memory in the cluster.
totalResources :: Node.List -> CStats
totalResources nl =
let cs = foldl' updateCStats emptyCStats . Container.elems $ nl
in cs { csScore = compCV nl }
-- | Compute the delta between two cluster state.
--
-- This is used when doing allocations, to understand better the
-- available cluster resources. The return value is a triple of the
-- current used values, the delta that was still allocated, and what
-- was left unallocated.
computeAllocationDelta :: CStats -> CStats -> AllocStats
computeAllocationDelta cini cfin =
let CStats {csImem = i_imem, csIdsk = i_idsk, csIcpu = i_icpu,
csNcpu = i_ncpu, csIspn = i_ispn } = cini
CStats {csImem = f_imem, csIdsk = f_idsk, csIcpu = f_icpu,
csTmem = t_mem, csTdsk = t_dsk, csVcpu = f_vcpu,
csNcpu = f_ncpu, csTcpu = f_tcpu,
csIspn = f_ispn, csTspn = t_spn } = cfin
rini = AllocInfo { allocInfoVCpus = fromIntegral i_icpu
, allocInfoNCpus = i_ncpu
, allocInfoMem = fromIntegral i_imem
, allocInfoDisk = fromIntegral i_idsk
, allocInfoSpn = fromIntegral i_ispn
}
rfin = AllocInfo { allocInfoVCpus = fromIntegral (f_icpu - i_icpu)
, allocInfoNCpus = f_ncpu - i_ncpu
, allocInfoMem = fromIntegral (f_imem - i_imem)
, allocInfoDisk = fromIntegral (f_idsk - i_idsk)
, allocInfoSpn = fromIntegral (f_ispn - i_ispn)
}
runa = AllocInfo { allocInfoVCpus = fromIntegral (f_vcpu - f_icpu)
, allocInfoNCpus = f_tcpu - f_ncpu
, allocInfoMem = truncate t_mem - fromIntegral f_imem
, allocInfoDisk = truncate t_dsk - fromIntegral f_idsk
, allocInfoSpn = truncate t_spn - fromIntegral f_ispn
}
in (rini, rfin, runa)
-- | Compute online nodes from a 'Node.List'.
getOnline :: Node.List -> [Node.Node]
getOnline = filter (not . Node.offline) . Container.elems
-- * Balancing functions
-- | Compute best table. Note that the ordering of the arguments is important.
compareTables :: Table -> Table -> Table
compareTables a@(Table _ _ a_cv _) b@(Table _ _ b_cv _ ) =
if a_cv > b_cv then b else a
-- | Tries to perform an instance move and returns the best table
-- between the original one and the new one.
checkSingleStep :: Bool -- ^ Whether to unconditionally ignore soft errors
-> Table -- ^ The original table
-> Instance.Instance -- ^ The instance to move
-> Table -- ^ The current best table
-> IMove -- ^ The move to apply
-> Table -- ^ The final best table
checkSingleStep force ini_tbl target cur_tbl move =
let Table ini_nl ini_il _ ini_plc = ini_tbl
tmp_resu = applyMoveEx force ini_nl target move
in case tmp_resu of
Bad _ -> cur_tbl
Ok (upd_nl, new_inst, pri_idx, sec_idx) ->
let tgt_idx = Instance.idx target
upd_cvar = compCV upd_nl
upd_il = Container.add tgt_idx new_inst ini_il
upd_plc = (tgt_idx, pri_idx, sec_idx, move, upd_cvar):ini_plc
upd_tbl = Table upd_nl upd_il upd_cvar upd_plc
in compareTables cur_tbl upd_tbl
-- | Given the status of the current secondary as a valid new node and
-- the current candidate target node, generate the possible moves for
-- a instance.
possibleMoves :: MirrorType -- ^ The mirroring type of the instance
-> Bool -- ^ Whether the secondary node is a valid new node
-> Bool -- ^ Whether we can change the primary node
-> Bool -- ^ Whether we alowed to move disks
-> (Bool, Bool) -- ^ Whether migration is restricted and whether
-- the instance primary is offline
-> Ndx -- ^ Target node candidate
-> [IMove] -- ^ List of valid result moves
possibleMoves MirrorNone _ _ _ _ _ = []
possibleMoves MirrorExternal _ False _ _ _ = []
possibleMoves MirrorExternal _ True _ _ tdx =
[ FailoverToAny tdx ]
possibleMoves MirrorInternal _ _ False _ _ = []
possibleMoves MirrorInternal _ False True _ tdx =
[ ReplaceSecondary tdx ]
possibleMoves MirrorInternal _ _ True (True, False) tdx =
[ ReplaceSecondary tdx
]
possibleMoves MirrorInternal True True True (False, _) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, ReplacePrimary tdx
, FailoverAndReplace tdx
]
possibleMoves MirrorInternal True True True (True, True) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, FailoverAndReplace tdx
]
possibleMoves MirrorInternal False True True _ tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
]
-- | Compute the best move for a given instance.
checkInstanceMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ Original table
-> Instance.Instance -- ^ Instance to move
-> Table -- ^ Best new table for this instance
checkInstanceMove opts nodes_idx ini_tbl@(Table nl _ _ _) target =
let force = algIgnoreSoftErrors opts
disk_moves = algDiskMoves opts
inst_moves = algInstanceMoves opts
rest_mig = algRestrictedMigration opts
opdx = Instance.pNode target
osdx = Instance.sNode target
bad_nodes = [opdx, osdx]
nodes = filter (`notElem` bad_nodes) nodes_idx
mir_type = Instance.mirrorType target
use_secondary = elem osdx nodes_idx && inst_moves
aft_failover = if mir_type == MirrorInternal && use_secondary
-- if drbd and allowed to failover
then checkSingleStep force ini_tbl target ini_tbl
Failover
else ini_tbl
primary_drained = Node.offline
. flip Container.find nl
$ Instance.pNode target
all_moves = concatMap (possibleMoves mir_type use_secondary inst_moves
disk_moves (rest_mig, primary_drained)) nodes
in
-- iterate over the possible nodes for this instance
foldl' (checkSingleStep force ini_tbl target) aft_failover all_moves
-- | Compute the best next move.
checkMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ The current solution
-> [Instance.Instance] -- ^ List of instances still to move
-> Table -- ^ The new solution
checkMove opts nodes_idx ini_tbl victims =
let Table _ _ _ ini_plc = ini_tbl
-- we're using rseq from the Control.Parallel.Strategies
-- package; we don't need to use rnf as that would force too
-- much evaluation in single-threaded cases, and in
-- multi-threaded case the weak head normal form is enough to
-- spark the evaluation
tables = parMap rseq (checkInstanceMove opts nodes_idx ini_tbl)
victims
-- iterate over all instances, computing the best move
best_tbl = foldl' compareTables ini_tbl tables
Table _ _ _ best_plc = best_tbl
in if length best_plc == length ini_plc
then ini_tbl -- no advancement
else best_tbl
-- | Check if we are allowed to go deeper in the balancing.
doNextBalance :: Table -- ^ The starting table
-> Int -- ^ Remaining length
-> Score -- ^ Score at which to stop
-> Bool -- ^ The resulting table and commands
doNextBalance ini_tbl max_rounds min_score =
let Table _ _ ini_cv ini_plc = ini_tbl
ini_plc_len = length ini_plc
in (max_rounds < 0 || ini_plc_len < max_rounds) && ini_cv > min_score
-- | Run a balance move.
tryBalance :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> Table -- ^ The starting table
-> Maybe Table -- ^ The resulting table and commands
tryBalance opts ini_tbl =
let evac_mode = algEvacMode opts
mg_limit = algMinGainLimit opts
min_gain = algMinGain opts
Table ini_nl ini_il ini_cv _ = ini_tbl
all_inst = Container.elems ini_il
all_nodes = Container.elems ini_nl
(offline_nodes, online_nodes) = partition Node.offline all_nodes
all_inst' = if evac_mode
then let bad_nodes = map Node.idx offline_nodes
in filter (any (`elem` bad_nodes) .
Instance.allNodes) all_inst
else all_inst
reloc_inst = filter (\i -> Instance.movable i &&
Instance.autoBalance i) all_inst'
node_idx = map Node.idx online_nodes
fin_tbl = checkMove opts node_idx ini_tbl reloc_inst
(Table _ _ fin_cv _) = fin_tbl
in
if fin_cv < ini_cv && (ini_cv > mg_limit || ini_cv - fin_cv >= min_gain)
then Just fin_tbl -- this round made success, return the new table
else Nothing
-- * Allocation functions
-- | Generate the valid node allocation singles or pairs for a new instance.
genAllocNodes :: AlgorithmOptions -- ^ algorithmic options to honor
-> Group.List -- ^ Group list
-> Node.List -- ^ The node map
-> Int -- ^ The number of nodes required
-> Bool -- ^ Whether to drop or not
-- unallocable nodes
-> Result AllocNodes -- ^ The (monadic) result
genAllocNodes opts gl nl count drop_unalloc =
let filter_fn = if drop_unalloc
then filter (Group.isAllocable .
flip Container.find gl . Node.group)
else id
restrict_fn = maybe id (\ns -> filter (flip elem ns . Node.name))
$ algRestrictToNodes opts
all_nodes = restrict_fn . filter_fn $ getOnline nl
all_pairs = [(Node.idx p,
[Node.idx s | s <- all_nodes,
Node.idx p /= Node.idx s,
Node.group p == Node.group s]) |
p <- all_nodes]
in case count of
1 -> Ok (Left (map Node.idx all_nodes))
2 -> Ok (Right (filter (not . null . snd) all_pairs))
_ -> Bad "Unsupported number of nodes, only one or two supported"
-- | Try to allocate an instance on the cluster.
tryAlloc :: (Monad m) =>
AlgorithmOptions
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> AllocNodes -- ^ The allocation targets
-> m AllocSolution -- ^ Possible solution list
tryAlloc _ _ _ _ (Right []) = fail "Not enough online nodes"
tryAlloc opts nl il inst (Right ok_pairs) =
let cstat = compClusterStatistics $ Container.elems nl
n1pred = if algCapacity opts
then allocGlobalN1 opts nl il
else const True
psols = parMap rseq (\(p, ss) ->
collectionToSolution FailN1 n1pred $
foldl (\cstate ->
concatAllocCollections cstate
. allocateOnPair opts cstat nl inst p)
emptyAllocCollection ss) ok_pairs
sols = foldl' sumAllocs emptyAllocSolution psols
in return $ annotateSolution sols
tryAlloc _ _ _ _ (Left []) = fail "No online nodes"
tryAlloc opts nl il inst (Left all_nodes) =
let sols = foldl (\cstate ->
concatAllocCollections cstate
. allocateOnSingle opts nl inst
) emptyAllocCollection all_nodes
n1pred = if algCapacity opts
then allocGlobalN1 opts nl il
else const True
in return . annotateSolution
$ collectionToSolution FailN1 n1pred sols
-- | From a list of possibly bad and possibly empty solutions, filter
-- only the groups with a valid result. Note that the result will be
-- reversed compared to the original list.
filterMGResults :: [(Group.Group, Result (GenericAllocSolution a))]
-> [(Group.Group, GenericAllocSolution a)]
filterMGResults = foldl' fn []
where unallocable = not . Group.isAllocable
fn accu (grp, rasol) =
case rasol of
Bad _ -> accu
Ok sol | isNothing (asSolution sol) -> accu
| unallocable grp -> accu
| otherwise -> (grp, sol):accu
-- | Sort multigroup results based on policy and score.
sortMGResults :: Ord a
=> [(Group.Group, GenericAllocSolution a)]
-> [(Group.Group, GenericAllocSolution a)]
sortMGResults sols =
let extractScore (_, _, _, x) = x
solScore (grp, sol) = (Group.allocPolicy grp,
(extractScore . fromJust . asSolution) sol)
in sortBy (comparing solScore) sols
-- | Determines if a group is connected to the networks required by the
-- | instance.
hasRequiredNetworks :: Group.Group -> Instance.Instance -> Bool
hasRequiredNetworks ng = all hasNetwork . Instance.nics
where hasNetwork = maybe True (`elem` Group.networks ng) . Nic.network
-- | Removes node groups which can't accommodate the instance
filterValidGroups :: [(Group.Group, (Node.List, Instance.List))]
-> Instance.Instance
-> ([(Group.Group, (Node.List, Instance.List))], [String])
filterValidGroups [] _ = ([], [])
filterValidGroups (ng:ngs) inst =
let (valid_ngs, msgs) = filterValidGroups ngs inst
in if hasRequiredNetworks (fst ng) inst
then (ng:valid_ngs, msgs)
else (valid_ngs,
("group " ++ Group.name (fst ng) ++
" is not connected to a network required by instance " ++
Instance.name inst):msgs)
-- | Finds an allocation solution for an instance on a group
findAllocation :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Gdx -- ^ The group to allocate to
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (AllocSolution, [String])
findAllocation opts mggl mgnl mgil gdx inst cnt = do
let belongsTo nl' nidx = nidx `elem` map Node.idx (Container.elems nl')
nl = Container.filter ((== gdx) . Node.group) mgnl
il = Container.filter (belongsTo nl . Instance.pNode) mgil
group' = Container.find gdx mggl
unless (hasRequiredNetworks group' inst) . failError
$ "The group " ++ Group.name group' ++ " is not connected to\
\ a network required by instance " ++ Instance.name inst
solution <- genAllocNodes opts mggl nl cnt False >>= tryAlloc opts nl il inst
return (solution, solutionDescription (group', return solution))
-- | Finds the best group for an instance on a multi-group cluster.
--
-- Only solutions in @preferred@ and @last_resort@ groups will be
-- accepted as valid, and additionally if the allowed groups parameter
-- is not null then allocation will only be run for those group
-- indices.
findBestAllocGroup :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Maybe [Gdx] -- ^ The allowed groups
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (Group.Group, AllocSolution, [String])
findBestAllocGroup opts mggl mgnl mgil allowed_gdxs inst cnt =
let groups_by_idx = splitCluster mgnl mgil
groups = map (\(gid, d) -> (Container.find gid mggl, d)) groups_by_idx
groups' = maybe groups
(\gs -> filter ((`elem` gs) . Group.idx . fst) groups)
allowed_gdxs
(groups'', filter_group_msgs) = filterValidGroups groups' inst
sols = map (\(gr, (nl, _)) ->
(gr, genAllocNodes opts mggl nl cnt False >>=
tryAlloc opts mgnl mgil inst))
groups''::[(Group.Group, Result AllocSolution)]
all_msgs = filter_group_msgs ++ concatMap solutionDescription sols
goodSols = filterMGResults sols
sortedSols = sortMGResults goodSols
in case sortedSols of
[] -> Bad $ if null groups'
then "no groups for evacuation: allowed groups was " ++
show allowed_gdxs ++ ", all groups: " ++
show (map fst groups)
else intercalate ", " all_msgs
(final_group, final_sol):_ -> return (final_group, final_sol, all_msgs)
-- | Try to allocate an instance on a multi-group cluster.
tryMGAlloc :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Possible solution list
tryMGAlloc opts mggl mgnl mgil inst cnt = do
(best_group, solution, all_msgs) <-
findBestAllocGroup opts mggl mgnl mgil Nothing inst cnt
let group_name = Group.name best_group
selmsg = "Selected group: " ++ group_name
return $ solution { asLog = selmsg:all_msgs }
-- | Try to allocate an instance to a group.
tryGroupAlloc :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> String -- ^ The allocation group (name)
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Solution
tryGroupAlloc opts mggl mgnl ngil gn inst cnt = do
gdx <- Group.idx <$> Container.findByName mggl gn
(solution, msgs) <- findAllocation opts mggl mgnl ngil gdx inst cnt
return $ solution { asLog = msgs }
-- | Try to allocate a list of instances on a multi-group cluster.
allocList :: AlgorithmOptions
-> Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> [(Instance.Instance, AllocDetails)] -- ^ The instance to
-- allocate
-> AllocSolutionList -- ^ Possible solution
-- list
-> Result (Node.List, Instance.List,
AllocSolutionList) -- ^ The final solution
-- list
allocList _ _ nl il [] result = Ok (nl, il, result)
allocList opts gl nl il ((xi, AllocDetails xicnt mgn):xies) result = do
ares <- case mgn of
Nothing -> tryMGAlloc opts gl nl il xi xicnt
Just gn -> tryGroupAlloc opts gl nl il gn xi xicnt
let sol = asSolution ares
nl' = extractNl nl il sol
il' = updateIl il sol
allocList opts gl nl' il' xies ((xi, ares):result)
-- | Change-group IAllocator mode main function.
--
-- This is very similar to 'tryNodeEvac', the only difference is that
-- we don't choose as target group the current instance group, but
-- instead:
--
-- 1. at the start of the function, we compute which are the target
-- groups; either no groups were passed in, in which case we choose
-- all groups out of which we don't evacuate instance, or there were
-- some groups passed, in which case we use those
--
-- 2. for each instance, we use 'findBestAllocGroup' to choose the
-- best group to hold the instance, and then we do what
-- 'tryNodeEvac' does, except for this group instead of the current
-- instance group.
--
-- Note that the correct behaviour of this function relies on the
-- function 'nodeEvacInstance' to be able to do correctly both
-- intra-group and inter-group moves when passed the 'ChangeAll' mode.
tryChangeGroup :: AlgorithmOptions
-> Group.List -- ^ The cluster groups
-> Node.List -- ^ The node list (cluster-wide)
-> Instance.List -- ^ Instance list (cluster-wide)
-> [Gdx] -- ^ Target groups; if empty, any
-- groups not being evacuated
-> [Idx] -- ^ List of instance (indices) to be evacuated
-> Result (Node.List, Instance.List, EvacSolution)
tryChangeGroup opts gl ini_nl ini_il gdxs idxs =
let evac_gdxs = nub $ map (instancePriGroup ini_nl .
flip Container.find ini_il) idxs
target_gdxs = (if null gdxs
then Container.keys gl
else gdxs) \\ evac_gdxs
offline = map Node.idx . filter Node.offline $ Container.elems ini_nl
excl_ndx = foldl' (flip IntSet.insert) IntSet.empty offline
group_ndx = map (\(gdx, (nl, _)) -> (gdx, map Node.idx
(Container.elems nl))) $
splitCluster ini_nl ini_il
(fin_nl, fin_il, esol) =
foldl' (\state@(nl, il, _) inst ->
let solution = do
let ncnt = Instance.requiredNodes $
Instance.diskTemplate inst
(grp, _, _) <- findBestAllocGroup opts gl nl il
(Just target_gdxs) inst ncnt
let gdx = Group.idx grp
av_nodes <- availableGroupNodes group_ndx
excl_ndx gdx
nodeEvacInstance defaultOptions
nl il ChangeAll inst gdx av_nodes
in updateEvacSolution state (Instance.idx inst) solution
)
(ini_nl, ini_il, emptyEvacSolution)
(map (`Container.find` ini_il) idxs)
in return (fin_nl, fin_il, reverseEvacSolution esol)
-- | Standard-sized allocation method.
--
-- This places instances of the same size on the cluster until we're
-- out of space. The result will be a list of identically-sized
-- instances.
iterateAllocSmallStep :: AlgorithmOptions -> AllocMethod
iterateAllocSmallStep opts nl il limit newinst allocnodes ixes cstats =
let depth = length ixes
newname = printf "new-%d" depth::String
newidx = Container.size il
newi2 = Instance.setIdx (Instance.setName newinst newname) newidx
newlimit = fmap (flip (-) 1) limit
opts' = if Instance.diskTemplate newi2 == DTDrbd8
then opts { algCapacity = False }
else opts
in case tryAlloc opts' nl il newi2 allocnodes of
Bad s -> Bad s
Ok (AllocSolution { asFailures = errs, asSolution = sols3 }) ->
let newsol = Ok (collapseFailures errs, nl, il, ixes, cstats) in
case sols3 of
Nothing -> newsol
Just (xnl, xi, _, _) ->
if limit == Just 0
then newsol
else iterateAllocSmallStep opts xnl (Container.add newidx xi il)
newlimit newinst allocnodes (xi:ixes)
(totalResources xnl:cstats)
-- | Guess a number of machines worth trying to put on the cluster in one step.
-- The goal is to guess a number close to the actual capacity of the cluster but
-- preferrably not bigger, unless it is quite small (as we don't want to do
-- big steps smaller than 20).
guessBigstepSize :: Node.List -> Instance.Instance -> Int
guessBigstepSize nl inst =
let nodes = Container.elems nl
totalAvail = sum $ map Node.availMem nodes
capacity = totalAvail `div` Instance.mem inst
-- however, at every node we might lose almost an instance if it just
-- doesn't fit by a tiny margin
guess = capacity - Container.size nl
in if guess < 20 then 20 else guess
-- | A speed-up version of `iterateAllocSmallStep`.
--
-- This function returns precisely the same result as `iterateAllocSmallStep`.
-- However the computation is speed up by the following heuristic: allocate
-- a group of instances iteratively without considering global N+1 redundancy;
-- if the result of this is globally N+1 redundant, then everything was OK
-- inbetween and we can continue from there. Only if that fails, do a
-- step-by-step iterative allocation.
-- In order to further speed up the computation while keeping it robust, we
-- first try (if the first argument is True) a number of steps guessed from
-- the node capacity, then, if that failed, a fixed step size and only as last
-- restort step-by-step iterative allocation.
iterateAlloc' :: Bool -> AlgorithmOptions -> AllocMethod
iterateAlloc' tryHugestep opts nl il limit newinst allocnodes ixes cstats =
if not $ algCapacity opts
then iterateAllocSmallStep opts nl il limit newinst allocnodes ixes cstats
else let bigstepsize = if tryHugestep
then guessBigstepSize nl newinst
else 10
(limit', newlimit) = maybe (Just bigstepsize, Nothing)
(Just . min bigstepsize
&&& Just . max 0 . flip (-) bigstepsize)
limit
opts' = opts { algCapacity = False }
in case iterateAllocSmallStep opts' nl il limit'
newinst allocnodes ixes cstats of
Bad s -> Bad s
Ok res@(_, nl', il', ixes', cstats') | redundant opts nl' il' ->
if newlimit == Just 0 || length ixes' == length ixes
then return res
else iterateAlloc' tryHugestep opts nl' il' newlimit newinst
allocnodes ixes' cstats'
_ -> if tryHugestep
then iterateAlloc' False opts nl il limit newinst allocnodes
ixes cstats
else iterateAllocSmallStep opts nl il limit newinst
allocnodes ixes cstats
-- | A speed-up version of `iterateAllocSmallStep`.
iterateAlloc :: AlgorithmOptions -> AllocMethod
iterateAlloc = iterateAlloc' True
-- | Predicate whether shrinking a single resource can lead to a valid
-- allocation.
sufficesShrinking :: (Instance.Instance -> AllocSolution) -> Instance.Instance
-> FailMode -> Maybe Instance.Instance
sufficesShrinking allocFn inst fm =
case dropWhile (isNothing . asSolution . fst)
. takeWhile (liftA2 (||) (elem fm . asFailures . fst)
(isJust . asSolution . fst))
. map (allocFn &&& id) $
iterateOk (`Instance.shrinkByType` fm) inst
of x:_ -> Just . snd $ x
_ -> Nothing
-- | Tiered allocation method.
--
-- This places instances on the cluster, and decreases the spec until
-- we can allocate again. The result will be a list of decreasing
-- instance specs.
tieredAlloc :: AlgorithmOptions -> AllocMethod
tieredAlloc opts nl il limit newinst allocnodes ixes cstats =
case iterateAlloc opts nl il limit newinst allocnodes ixes cstats of
Bad s -> Bad s
Ok (errs, nl', il', ixes', cstats') ->
let newsol = Ok (errs, nl', il', ixes', cstats')
ixes_cnt = length ixes'
(stop, newlimit) = case limit of
Nothing -> (False, Nothing)
Just n -> (n <= ixes_cnt,
Just (n - ixes_cnt))
sortedErrs = map fst $ sortBy (comparing snd) errs
suffShrink = sufficesShrinking
(fromMaybe emptyAllocSolution
. flip (tryAlloc opts nl' il') allocnodes)
newinst
bigSteps = filter isJust . map suffShrink . reverse $ sortedErrs
progress (Ok (_, _, _, newil', _)) (Ok (_, _, _, newil, _)) =
length newil' > length newil
progress _ _ = False
in if stop then newsol else
let newsol' = case Instance.shrinkByType newinst . last
$ sortedErrs of
Bad _ -> newsol
Ok newinst' -> tieredAlloc opts nl' il' newlimit
newinst' allocnodes ixes' cstats'
in if progress newsol' newsol then newsol' else
case bigSteps of
Just newinst':_ -> tieredAlloc opts nl' il' newlimit
newinst' allocnodes ixes' cstats'
_ -> newsol
-- * Formatting functions
-- | Given the original and final nodes, computes the relocation description.
computeMoves :: Instance.Instance -- ^ The instance to be moved
-> String -- ^ The instance name
-> IMove -- ^ The move being performed
-> String -- ^ New primary
-> String -- ^ New secondary
-> (String, [String])
-- ^ Tuple of moves and commands list; moves is containing
-- either @/f/@ for failover or @/r:name/@ for replace
-- secondary, while the command list holds gnt-instance
-- commands (without that prefix), e.g \"@failover instance1@\"
computeMoves i inam mv c d =
case mv of
Failover -> ("f", [mig])
FailoverToAny _ -> (printf "fa:%s" c, [mig_any])
FailoverAndReplace _ -> (printf "f r:%s" d, [mig, rep d])
ReplaceSecondary _ -> (printf "r:%s" d, [rep d])
ReplaceAndFailover _ -> (printf "r:%s f" c, [rep c, mig])
ReplacePrimary _ -> (printf "f r:%s f" c, [mig, rep c, mig])
where morf = if Instance.isRunning i then "migrate" else "failover"
mig = printf "%s -f %s" morf inam::String
mig_any = printf "%s -f -n %s %s" morf c inam::String
rep n = printf "replace-disks -n %s %s" n inam::String
-- | Converts a placement to string format.
printSolutionLine :: Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Int -- ^ Maximum node name length
-> Int -- ^ Maximum instance name length
-> Placement -- ^ The current placement
-> Int -- ^ The index of the placement in
-- the solution
-> (String, [String])
printSolutionLine nl il nmlen imlen plc pos =
let pmlen = (2*nmlen + 1)
(i, p, s, mv, c) = plc
old_sec = Instance.sNode inst
inst = Container.find i il
inam = Instance.alias inst
npri = Node.alias $ Container.find p nl
nsec = Node.alias $ Container.find s nl
opri = Node.alias $ Container.find (Instance.pNode inst) nl
osec = Node.alias $ Container.find old_sec nl
(moves, cmds) = computeMoves inst inam mv npri nsec
-- FIXME: this should check instead/also the disk template
ostr = if old_sec == Node.noSecondary
then printf "%s" opri::String
else printf "%s:%s" opri osec::String
nstr = if s == Node.noSecondary
then printf "%s" npri::String
else printf "%s:%s" npri nsec::String
in (printf " %3d. %-*s %-*s => %-*s %12.8f a=%s"
pos imlen inam pmlen ostr pmlen nstr c moves,
cmds)
-- | Return the instance and involved nodes in an instance move.
--
-- Note that the output list length can vary, and is not required nor
-- guaranteed to be of any specific length.
involvedNodes :: Instance.List -- ^ Instance list, used for retrieving
-- the instance from its index; note
-- that this /must/ be the original
-- instance list, so that we can
-- retrieve the old nodes
-> Placement -- ^ The placement we're investigating,
-- containing the new nodes and
-- instance index
-> [Ndx] -- ^ Resulting list of node indices
involvedNodes il plc =
let (i, np, ns, _, _) = plc
inst = Container.find i il
in nub . filter (>= 0) $ [np, ns] ++ Instance.allNodes inst
-- | From two adjacent cluster tables get the list of moves that transitions
-- from to the other
getMoves :: (Table, Table) -> [MoveJob]
getMoves (Table _ initial_il _ initial_plc, Table final_nl _ _ final_plc) =
let
plctoMoves (plc@(idx, p, s, mv, _)) =
let inst = Container.find idx initial_il
inst_name = Instance.name inst
affected = involvedNodes initial_il plc
np = Node.alias $ Container.find p final_nl
ns = Node.alias $ Container.find s final_nl
(_, cmds) = computeMoves inst inst_name mv np ns
in (affected, idx, mv, cmds)
in map plctoMoves . reverse . drop (length initial_plc) $ reverse final_plc
-- | Inner function for splitJobs, that either appends the next job to
-- the current jobset, or starts a new jobset.
mergeJobs :: ([JobSet], [Ndx]) -> MoveJob -> ([JobSet], [Ndx])
mergeJobs ([], _) n@(ndx, _, _, _) = ([[n]], ndx)
mergeJobs (cjs@(j:js), nbuf) n@(ndx, _, _, _)
| null (ndx `intersect` nbuf) = ((n:j):js, ndx ++ nbuf)
| otherwise = ([n]:cjs, ndx)
-- | Break a list of moves into independent groups. Note that this
-- will reverse the order of jobs.
splitJobs :: [MoveJob] -> [JobSet]
splitJobs = fst . foldl mergeJobs ([], [])
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatJob :: Int -> Int -> (Int, MoveJob) -> [String]
formatJob jsn jsl (sn, (_, _, _, cmds)) =
let out =
printf " echo job %d/%d" jsn sn:
printf " check":
map (" gnt-instance " ++) cmds
in if sn == 1
then ["", printf "echo jobset %d, %d jobs" jsn jsl] ++ out
else out
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatCmds :: [JobSet] -> String
formatCmds =
unlines .
concatMap (\(jsn, js) -> concatMap (formatJob jsn (length js))
(zip [1..] js)) .
zip [1..]
-- | Print the node list.
printNodes :: Node.List -> [String] -> String
printNodes nl fs =
let fields = case fs of
[] -> Node.defaultFields
"+":rest -> Node.defaultFields ++ rest
_ -> fs
snl = sortBy (comparing Node.idx) (Container.elems nl)
(header, isnum) = unzip $ map Node.showHeader fields
in printTable "" header (map (Node.list fields) snl) isnum
-- | Print the instance list.
printInsts :: Node.List -> Instance.List -> String
printInsts nl il =
let sil = sortBy (comparing Instance.idx) (Container.elems il)
helper inst = [ if Instance.isRunning inst then "R" else " "
, Instance.name inst
, Container.nameOf nl (Instance.pNode inst)
, let sdx = Instance.sNode inst
in if sdx == Node.noSecondary
then ""
else Container.nameOf nl sdx
, if Instance.autoBalance inst then "Y" else "N"
, printf "%3d" $ Instance.vcpus inst
, printf "%5d" $ Instance.mem inst
, printf "%5d" $ Instance.dsk inst `div` 1024
, printf "%5.3f" lC
, printf "%5.3f" lM
, printf "%5.3f" lD
, printf "%5.3f" lN
]
where DynUtil lC lM lD lN = Instance.util inst
header = [ "F", "Name", "Pri_node", "Sec_node", "Auto_bal"
, "vcpu", "mem" , "dsk", "lCpu", "lMem", "lDsk", "lNet" ]
isnum = False:False:False:False:False:repeat True
in printTable "" header (map helper sil) isnum
-- * Node group functions
-- | Computes the group of an instance.
instanceGroup :: Node.List -> Instance.Instance -> Result Gdx
instanceGroup nl i =
let sidx = Instance.sNode i
pnode = Container.find (Instance.pNode i) nl
snode = if sidx == Node.noSecondary
then pnode
else Container.find sidx nl
pgroup = Node.group pnode
sgroup = Node.group snode
in if pgroup /= sgroup
then fail ("Instance placed accross two node groups, primary " ++
show pgroup ++ ", secondary " ++ show sgroup)
else return pgroup
-- | Compute the list of badly allocated instances (split across node
-- groups).
findSplitInstances :: Node.List -> Instance.List -> [Instance.Instance]
findSplitInstances nl =
filter (not . isOk . instanceGroup nl) . Container.elems
|
mbakke/ganeti
|
src/Ganeti/HTools/Cluster.hs
|
bsd-2-clause
| 47,810
| 257
| 23
| 15,004
| 9,775
| 5,401
| 4,374
| -1
| -1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.