code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Network.Haskoin.Crypto.Keys.Tests (tests) where
import qualified Crypto.Secp256k1 as EC
import qualified Data.ByteString as BS
import Data.Serialize (encode, runGet, runPut)
import Data.String (fromString)
import Data.String.Conversions (cs)
import Network.Haskoin.Crypto
import Network.Haskoin.Internals (PrvKeyI (..),
PubKeyI (..))
import Network.Haskoin.Test
import Network.Haskoin.Util
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
tests :: [Test]
tests =
[ testGroup
"PubKey Binary"
[ testProperty "is public key canonical" $
forAll arbitraryPubKey (isCanonicalPubKey . snd)
, testProperty "makeKey . toKey" makeToKey
, testProperty "makeKeyU . toKey" makeToKeyU
]
, testGroup
"Key formats"
[ testProperty "fromWif . toWif PrvKey" $
forAll arbitraryPrvKey $ \pk -> fromWif (toWif pk) == Just pk
, testProperty "constant 32-byte encoding PrvKey" $
forAll arbitraryPrvKey binaryPrvKey
]
, testGroup
"Key compression"
[ testProperty "Compressed public key" testCompressed
, testProperty "Uncompressed public key" testUnCompressed
, testProperty "Compressed private key" testPrivateCompressed
, testProperty "Uncompressed private key" testPrivateUnCompressed
]
, testGroup
"From/To strings"
[ testProperty "Read/Show public key" $
forAll arbitraryPubKey $ \(_, k) -> read (show k) == k
, testProperty "Read/Show compressed public key" $
forAll arbitraryPubKeyC $ \(_, k) -> read (show k) == k
, testProperty "Read/Show uncompressed public key" $
forAll arbitraryPubKeyU $ \(_, k) -> read (show k) == k
, testProperty "Read/Show private key" $
forAll arbitraryPrvKey $ \k -> read (show k) == k
, testProperty "Read/Show compressed private key" $
forAll arbitraryPrvKeyC $ \k -> read (show k) == k
, testProperty "Read/Show uncompressed private key" $
forAll arbitraryPrvKeyU $ \k -> read (show k) == k
, testProperty "From string public key" $
forAll arbitraryPubKey $ \(_, k) ->
fromString (cs . encodeHex $ encode k) == k
, testProperty "From string compressed public key" $
forAll arbitraryPubKeyC $ \(_, k) ->
fromString (cs . encodeHex $ encode k) == k
, testProperty "From string uncompressed public key" $
forAll arbitraryPubKeyU $ \(_, k) ->
fromString (cs . encodeHex $ encode k) == k
, testProperty "From string private key" $
forAll arbitraryPrvKey $ \k -> fromString (cs $ toWif k) == k
, testProperty "From string compressed private key" $
forAll arbitraryPrvKeyC $ \k -> fromString (cs $ toWif k) == k
, testProperty "From string uncompressed private key" $
forAll arbitraryPrvKeyU $ \k -> fromString (cs $ toWif k) == k
]
]
-- github.com/bitcoin/bitcoin/blob/master/src/script.cpp
-- from function IsCanonicalPubKey
isCanonicalPubKey :: PubKey -> Bool
isCanonicalPubKey p = not $
-- Non-canonical public key: too short
(BS.length bs < 33) ||
-- Non-canonical public key: invalid length for uncompressed key
(BS.index bs 0 == 4 && BS.length bs /= 65) ||
-- Non-canonical public key: invalid length for compressed key
(BS.index bs 0 `elem` [2,3] && BS.length bs /= 33) ||
-- Non-canonical public key: compressed nor uncompressed
(BS.index bs 0 `notElem` [2,3,4])
where
bs = encode p
makeToKey :: EC.SecKey -> Bool
makeToKey i = prvKeySecKey (makePrvKey i) == i
makeToKeyU :: EC.SecKey -> Bool
makeToKeyU i = prvKeySecKey (makePrvKeyU i) == i
{- Key formats -}
binaryPrvKey :: PrvKey -> Bool
binaryPrvKey k =
(Right k == runGet (prvKeyGetMonad f) (runPut $ prvKeyPutMonad k)) &&
(Just k == decodePrvKey f (encodePrvKey k))
where
f = makePrvKeyG (prvKeyCompressed k)
{- Key Compression -}
testCompressed :: EC.SecKey -> Bool
testCompressed n =
pubKeyCompressed (derivePubKey $ makePrvKey n) &&
pubKeyCompressed (derivePubKey $ makePrvKeyG True n)
testUnCompressed :: EC.SecKey -> Bool
testUnCompressed n =
not (pubKeyCompressed $ derivePubKey $ makePrvKeyG False n) &&
not (pubKeyCompressed $ derivePubKey $ makePrvKeyU n)
testPrivateCompressed :: EC.SecKey -> Bool
testPrivateCompressed n =
prvKeyCompressed (makePrvKey n) &&
prvKeyCompressed (makePrvKeyC n)
testPrivateUnCompressed :: EC.SecKey -> Bool
testPrivateUnCompressed n =
not (prvKeyCompressed $ makePrvKeyG False n) &&
not (prvKeyCompressed $ makePrvKeyU n)
| plaprade/haskoin | haskoin-core/test/Network/Haskoin/Crypto/Keys/Tests.hs | unlicense | 5,082 | 0 | 14 | 1,505 | 1,289 | 671 | 618 | 95 | 1 |
module BOM where
import Control.Applicative
import Control.Monad.State
import Control.Monad.RWS
import Control.Monad.Logic
import Data.Function
import Data.List
import Data.Monoid
import Data.Ord
import qualified Data.Map as M
bom =
[ (1, pcb)
, (3, al8805)
, (3, decouplingCapacitor)
, (3, outputCapacitor)
, (3, diode)
, (3, inductor)
, (3, currentSenseResistor)
]
pcb = oshParkPCB (0.7 * 1.1) "Picobuck clone"
al8805 = basicPart mouser "AL8805W5-7"
[ (1, 0.96)
, (10, 0.769)
, (100, 0.668)
, (250, 0.591)
, (500, 0.524)
, (1000, 0.414)
]
decouplingCapacitor = basicPart mouser "UMK316AB7475KL-T"
[ (1, 0.46)
, (10, 0.201)
, (100, 0.078)
, (1000, 0.063)
]
outputCapacitor = basicPart mouser "C1608X5R1H105K080AB"
[ (1, 0.17)
, (10, 0.075)
, (100, 0.045)
, (1000, 0.038)
]
diode = basicPart mouser "SS25S-E3/5AT"
[ (1, 0.15)
, (10, 0.13)
, (50, 0.123)
, (100, 0.11)
, (750, 0.089)
, (1500, 0.076)
, (3000, 0.065)
]
inductor = basicPart mouser "SRN6045-330M"
[ (1, 0.4)
, (10, 0.293)
, (100, 0.27)
, (200, 0.257)
, (500, 0.189)
, (1000, 0.162)
, (2000, 0.16)
, (5000, 0.15)
]
currentSenseResistor = basicPart mouser "CRL0805-FW-R300ELF"
[ (1, 0.38)
, (10, 0.317)
, (100, 0.171)
, (1000, 0.13)
]
---------------------------------------
data Supplier = Supplier
{ supplierName :: String
, shipping :: Double -- TODO: [(Integer, Part)] -> Double
} deriving (Eq, Ord, Read, Show)
mouser = Supplier "Mouser" 4.99
oshPark = Supplier "OSH Park" 0
digikey = Supplier "Digikey" 5.47
newark = Supplier "Newark" 8.50
oshParkPCB sz boardName =
[ Part
{ supplier = oshPark
, partNo = boardName
, minimumQty = 3
, increment = 3
, price = sz * 5 / 3
}
, Part
{ supplier = oshPark
, partNo = boardName
, minimumQty = 10 * ceiling (15 / sz)
, increment = 10
, price = sz
}
]
data Part = Part
{ supplier :: Supplier
, partNo :: String
, minimumQty :: Integer
, increment :: Integer
, price :: Double
} deriving (Eq, Ord, Read, Show)
basicPart supp num breaks =
[ Part supp num moq 1 p
| (moq, p) <- breaks
]
---------------------------------------
unitCost withShipping bom qty = cost / fromIntegral qty
where
cost | withShipping = pCost + sCost
| otherwise = pCost
(_, pCost, sCost) = selectBOM bom qty
selectParts bom = (\(a,_,_) -> a ) . selectBOM bom
orderCost bom = (\(_,b,c) -> b + c) . selectBOM bom
partsCost bom = (\(_,b,_) -> b ) . selectBOM bom
shippingCost bom = (\(_,_,c) -> c) . selectBOM bom
selectBOM parts qty = (bom, sum partCosts, shippingCost)
where
totalCost ((_, x), Sum y) = sum x + y
((bom, partCosts), Sum shippingCost) = minimumBy (comparing totalCost) $
map (\(a,b) -> (unzip a, b)) $
observeAll $
(\x -> evalRWST x () M.empty) $
flip mapM parts $ \(count, part) -> do
selectPart part (count * qty)
selectPart parts qty = do
let suppliers = nub (map supplier parts)
selected <- map supplierName <$> filterM selectSupplier suppliers
let selectedParts = filter (flip elem selected . supplierName . supplier) parts
if null selectedParts
then empty
else pure (selectPart' selectedParts qty)
selectPart' parts qty = ((actualQty part, part), extendedPrice part)
where
part = minimumBy cmpParts parts
extras part = max 0 (qty - minimumQty part)
increments part = ceiling (fromIntegral (extras part) / fromIntegral (increment part))
actualQty part = minimumQty part + increments part * increment part
extendedPrice part = price part * fromIntegral (actualQty part)
-- minimize price, break ties by maximizing qty
cmpParts = mconcat
[ comparing extendedPrice
, flip (comparing actualQty)
]
-- nondeterministically accept/reject each supplier,
-- remembering the choice and (if accepting) tallying
-- the shipping cost
selectSupplier s = do
mbPrev <- gets (M.lookup (supplierName s))
case mbPrev of
Just prev -> return prev
Nothing -> do
accept <- pure True <|> pure False
modify (M.insert (supplierName s) accept)
when accept (tell (Sum (shipping s)))
return accept
| mokus0/schematics | picobuck/BOM.hs | unlicense | 4,819 | 0 | 18 | 1,598 | 1,579 | 883 | 696 | 130 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ParallelListComp #-}
module Language.K3.Codegen.CPP.Declaration where
import Control.Arrow ((&&&))
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import qualified Data.List as L
import qualified Data.Map as M
import Language.K3.Core.Annotation
import Language.K3.Core.Annotation.Syntax
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Type
import qualified Language.K3.Core.Constructor.Declaration as D
import qualified Language.K3.Core.Constructor.Type as T
import Language.K3.Codegen.CPP.Expression
import Language.K3.Codegen.CPP.Primitives
import Language.K3.Codegen.CPP.Types
import Language.K3.Codegen.CPP.Materialization.Hints
import qualified Language.K3.Codegen.CPP.Representation as R
import Language.K3.Utils.Pretty
-- Builtin names to explicitly skip.
skip_builtins :: [String]
skip_builtins = ["hasRead", "doRead", "doBlockRead", "hasWrite", "doWrite"]
declaration :: K3 Declaration -> CPPGenM [R.Definition]
declaration (tna -> ((DGlobal n (tnc -> (TSource, [t])) _), as)) = return []
-- Sinks with a valid body are handled in the same way as triggers.
declaration d@(tna -> (DGlobal i (tnc -> (TSink, [t])) (Just e), as)) = do
declaration $ D.global i (T.function t T.unit) $ Just e
declaration (tag -> DGlobal i (tag -> TSink) Nothing) =
throwE $ CPPGenE $ unwords ["Invalid sink trigger", i, "(missing body)"]
-- Global functions without implementations -- Built-Ins.
declaration (tag -> DGlobal name t@(tag -> TFunction) Nothing)
| name `elem` skip_builtins = return []
| any (`L.isSuffixOf` name) source_builtins = genSourceBuiltin t name >>= return . replicate 1
| otherwise = return []
-- Global polymorphic functions without implementations -- Built-Ins
declaration (tag -> DGlobal _ (tag &&& children -> (TForall _, [tag &&& children -> (TFunction, [_, _])]))
Nothing) = return []
-- Global monomorphic function with direct implementations.
declaration (tag -> DGlobal i t@(tag &&& children -> (TFunction, [ta, tr]))
(Just e@(tag &&& children -> (ELambda x, [body]))))
| i == "processRole" = do
([], R.Lambda _ mits _ _ body) <- inline e
return $ [R.FunctionDefn (R.Name i) [(Nothing, R.Reference $ R.Const $ R.Unit)] (Just R.Unit) [] True body]
| otherwise = do
([], e') <- inline e
ct <- R.flattenFnType <$> genCType t
return [R.GlobalDefn $ R.Forward $ R.ScalarDecl (R.Name i) ct $ Just e']
-- Global polymorphic functions with direct implementations.
declaration (tag -> DGlobal i (tag &&& children -> (TForall _, [tag &&& children -> (TFunction, [ta, tr])]))
(Just e@(tag &&& children -> (ELambda x, [body])))) = do
returnType <- genCInferredType tr
(argumentType, template) <- case tag ta of
TDeclaredVar t -> return (R.Named (R.Name t), Just t)
_ -> genCType ta >>= \cta -> return (cta, Nothing)
let templatize = maybe id (\t -> R.TemplateDefn [(t, Nothing)]) template
addForward $ maybe id (\t -> R.TemplateDecl [(t, Nothing)]) template $
R.FunctionDecl (R.Name i) [argumentType] returnType
-- mtrlzns <- case e @~ isEMaterialization of
-- Just (EMaterialization ms) -> return ms
-- Nothing -> return $ M.fromList [(x, defaultDecision)]
let argMtrlznType = case getInMethodFor x e of
ConstReferenced -> R.Reference (R.Const argumentType)
Referenced -> R.Reference argumentType
Forwarded -> R.RValueReference argumentType
_ -> argumentType
body' <- reify (RReturn False) body
return [templatize $ R.FunctionDefn (R.Name i) [(Just x, argMtrlznType)] (Just returnType) [] False body']
-- Global scalars.
declaration d@(tag -> DGlobal i t me) = do
globalType <- genCType t
let pinned = isJust $ d @~ (\case { DProperty (dPropertyV -> ("Pinned", Nothing)) -> True; _ -> False })
let globalType' = if pinned then R.Static globalType else globalType
-- Need to declare static members outside of class scope
let staticGlobalDecl = [R.Forward $ R.ScalarDecl
(R.Qualified (R.Name "__global_context") (R.Name i))
globalType
Nothing
| pinned]
addStaticDeclaration staticGlobalDecl
-- Initialize the variable.
let rName = RName (R.Variable $ R.Name $ if pinned then "__global_context::" ++ i else i) Nothing
globalInit <- maybe (return []) (liftM (addSetCheck pinned i) . reify rName) me
-- Add to proper initialization list
let addFn = if pinned then addStaticInitialization else addGlobalInitialization
addFn globalInit
-- Add any annotation to the state
when (tag t == TCollection) $ addComposite (namedTAnnotations $ annotations t) (head $ children t)
-- Return the class-scope-declaration including the set variable if needed
let setOp = if False then [] else
[R.GlobalDefn $ R.Forward $ R.ScalarDecl
(R.Name $ setName i) (R.Primitive R.PBool) (Just $ R.Literal $ R.LBool False)]
return $ (R.GlobalDefn $ R.Forward $ R.ScalarDecl (R.Name i) globalType' Nothing):setOp
where
setName n = "__"++n++"_set__"
addSetCheck pinned n f = if pinned then f else
[R.IfThenElse
(R.Unary "!" $ R.Variable $ R.Name $ setName n)
(f ++ [R.Assignment (R.Variable $ R.Name $ setName n) (R.Literal $ R.LBool True)])
[]]
-- Triggers are implementationally identical to functions returning unit, except they also generate
-- dispatch wrappers.
declaration (tag -> DTrigger i t e) = declaration (D.global i (T.function t T.unit) (Just e))
declaration (tag -> DDataAnnotation i _ amds) = addAnnotation i amds >> return []
declaration (tag -> DRole _) = throwE $ CPPGenE "Roles below top-level are deprecated."
declaration _ = return []
-- Generated Builtins
-- Interface for source builtins.
-- Map special builtin suffix to a function that will generate the builtin.
-- These suffixes are taken from L.K3.Parser.ProgramBuilder.hs
source_builtin_map :: [(String, (String -> K3 Type -> String -> CPPGenM R.Definition))]
source_builtin_map = [("MuxHasRead", genHasRead True Nothing),
("MuxRead", genDoRead True Nothing),
("PDataHasRead", genHasRead True Nothing),
("PDataRead", genDoRead True Nothing),
("POrdHasRead", genHasRead False $ Just "order"),
("POrdRead", genDoRead False $ Just "order"),
("HasRead", genHasRead False Nothing),
("Read", genDoRead False Nothing),
("HasWrite", genHasWrite),
("Write", genDoWrite)]
++ extraSuffixes
-- These suffixes are for data loading hacks.
-- TODO this needs refactoring, big time
where extraSuffixes = [("Loader", genLoader False False False False False False ","),
("LoaderC", genLoader False False True False False False ","),
("LoaderF", genLoader False True False False False False ","),
("LoaderFC", genLoader False True True False False False ","),
("LoaderE", genLoader True False False False False False ","),
("LoaderP", genLoader False False False False False False "|"),
("LoaderPC", genLoader False False True False False False "|"),
("LoaderPF", genLoader False True False False False False "|"),
("LoaderPFC", genLoader False True True False False False "|"),
("LoaderRP", genLoader False False False True False False "|"),
("LoaderPE", genLoader True False False False False False "|"),
("LoaderMPC", genLoader False False True False True False "|"),
("LoaderMosaic", genLoader False False False False False True "|"),
("Logger", genLogger)]
source_builtins :: [String]
source_builtins = map fst source_builtin_map
stripSuffix :: String -> String -> String
stripSuffix suffix name = maybe (error "not a suffix!") reverse $ L.stripPrefix (reverse suffix) (reverse name)
genSourceBuiltin :: K3 Type -> Identifier -> CPPGenM R.Definition
genSourceBuiltin typ name = do
suffix <- return $ head $ filter (\y -> y `L.isSuffixOf` name) source_builtins
f <- return $ getSourceBuiltin suffix
f typ name
-- Grab the generator function from the map, currying the key of the builtin to be generated.
getSourceBuiltin :: String -> K3 Type -> String -> CPPGenM R.Definition
getSourceBuiltin k =
case filter (\(x,_) -> k == x) source_builtin_map of
[] -> error $ "Could not find builtin with name" ++ k
((_,f):_) -> f k
genHasRead :: Bool -> Maybe String -> String -> K3 Type -> String -> CPPGenM R.Definition
genHasRead asMux chanIdSuffixOpt suf _ name = do
concatOp <- binarySymbol OConcat
let source_name = stripSuffix suf name
let e_has_r = R.Variable $ R.Name "hasRead"
let source_pfx = source_name ++ if asMux || isJust chanIdSuffixOpt then "_" else ""
let chan_id_e = case (chanIdSuffixOpt, asMux) of
(Just chan_suf, _) -> R.Literal $ R.LString $ source_pfx ++ chan_suf
(Nothing, True) -> R.Binary concatOp (R.Literal $ R.LString source_pfx) $
R.Call (R.Variable $ R.Name "itos")
[R.Variable $ R.Name "muxid"]
(_, _) -> R.Literal $ R.LString source_pfx
let body = R.Return $ R.Call e_has_r [R.Variable $ R.Name "me", chan_id_e]
let args = if asMux then [(Just "muxid", R.Primitive R.PInt)]
else [(Just "_", R.Named $ R.Name "unit_t")]
return $ R.FunctionDefn (R.Name name) args (Just $ R.Primitive R.PBool) [] False [body]
genDoRead :: Bool -> Maybe String -> String -> K3 Type -> String -> CPPGenM R.Definition
genDoRead asMux chanIdSuffixOpt suf typ name = do
concatOp <- binarySymbol OConcat
ret_type <- genCType $ last $ children typ
let source_name = stripSuffix suf name
let source_pfx = source_name ++ if asMux || isJust chanIdSuffixOpt then "_" else ""
let chan_id_e = case (chanIdSuffixOpt, asMux) of
(Just chan_suf, _) -> R.Literal $ R.LString $ source_pfx ++ chan_suf
(Nothing, True) -> R.Binary concatOp (R.Literal $ R.LString source_pfx) $
R.Call (R.Variable $ R.Name "itos")
[R.Variable $ R.Name "muxid"]
(_, _) -> R.Literal $ R.LString source_pfx
let body = R.Return $ (R.Call (R.Variable (R.Specialized [ret_type] $ R.Name "doRead"))
[R.Variable $ R.Name "me", chan_id_e])
let args = if asMux then [(Just "muxid", R.Primitive R.PInt)]
else [(Just "_", R.Named $ R.Name "unit_t")]
return $ R.FunctionDefn (R.Name name) args (Just ret_type) [] False [body]
genHasWrite :: String -> K3 Type -> String -> CPPGenM R.Definition
genHasWrite suf _ name = do
let sink_name = stripSuffix suf name
let e_has_w = R.Variable (R.Name "hasWrite")
let body = R.Return $ R.Call e_has_w [R.Variable $ R.Name "me", R.Literal $ R.LString sink_name]
return $ R.FunctionDefn (R.Name $ sink_name ++ suf) [(Just "_", R.Named $ R.Name "unit_t")]
(Just $ R.Primitive R.PBool) [] False [body]
genDoWrite :: String -> K3 Type -> String -> CPPGenM R.Definition
genDoWrite suf typ name = do
val_type <- genCType $ head $ children typ
let sink_name = stripSuffix suf name
let write_expr = R.Call (R.Variable $ (R.Specialized [val_type] $ R.Name "doWrite"))
[R.Variable $ R.Name "me", R.Literal $ R.LString sink_name, R.Variable $ R.Name "v"]
return $ R.FunctionDefn (R.Name $ sink_name ++ suf) [(Just "v", R.Reference $ R.Const val_type)]
(Just $ R.Named $ R.Name "unit_t") [] False
([R.Ignore write_expr, R.Return $ R.Initialization R.Unit []])
-- TODO: Loader is not quite valid K3. The collection should be passed by indirection so we are not working with a copy
-- (since the collection is technically passed-by-value)
genLoader :: Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> String -> String -> K3 Type -> String -> CPPGenM R.Definition
genLoader elemWrap fixedSize projectedLoader asReturn addMeta addMultiplicity sep suf ft@(children -> [_,f]) name = do
void (genCType ft) -- Force full type to generate potential record/collection variants.
(colType, recType, fullRecTypeOpt) <- return $ getColType f
cColType <- genCType colType
cRecType <- genCType recType
cfRecType <- maybe (return Nothing) (\t -> genCType t >>= return . Just) fullRecTypeOpt
fields <- getRecFields recType
fullFieldsOpt <- maybe (return Nothing) (\frt -> getRecFields frt >>= return . Just) fullRecTypeOpt
br <- gets (boxRecords . flags)
let bw = if br then R.Dereference else id
let coll_name = stripSuffix suf name
let bufferDecl = [R.Forward $ R.ScalarDecl (R.Name "tmp_buffer")
(R.Named $ R.Qualified (R.Name "std") (R.Name "string")) Nothing]
let readField f t skip b = [ R.Ignore $ R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "getline")) $
[ R.Variable (R.Name "in")
, R.Variable (R.Name "tmp_buffer")
] ++ [R.Literal (R.LChar sep) | not b]
] ++
(if skip then []
else [ R.Assignment (R.Project (bw $ R.Variable $ R.Name "record") (R.Name f))
(typeMap t $ R.Variable $ R.Name "tmp_buffer")
])
let recordDecl = [R.Forward $ R.ScalarDecl (R.Name "record") cRecType Nothing]
let fts = uncurry zip fields
let fullfts = fullFieldsOpt >>= return . uncurry zip
let ftsWSkip = maybe (map (\(x,y) -> (x, y, False)) fts)
(map (\(x,y) -> (x, y, (x /= "meta") && (x `notElem` (map fst fts)))))
fullfts
let containerDecl = R.Forward $ R.ScalarDecl (R.Name "c2") cColType Nothing
let container = R.Variable $ R.Name (if asReturn then "c2" else "c")
let setMeta = R.Assignment (R.Project (bw $ R.Variable $ R.Name "record") (R.Name "meta")) (R.Call (R.Variable $ R.Name "mf") [R.Initialization R.Unit [] ])
let setMult = R.Assignment (R.Project (R.Variable $ R.Name "record") (R.Name . last . fst $ fields)) (R.Literal $ R.LInt 1)
let ftsToRead = if addMultiplicity then init ftsWSkip else ftsWSkip
let recordGetLines = recordDecl
++ concat [readField field ft skip False | (field, ft, skip) <- init ftsToRead]
++ (\(a,b,c) -> readField a b c True) (last ftsToRead)
++ (if addMeta then [setMeta] else [])
++ (if addMultiplicity then [setMult] else [])
++ [R.Return $ R.Variable $ R.Name "record"]
let extraCaptures = if addMeta then [R.RefCapture (Just ("mf", Nothing))] else []
let readRecordFn = R.Lambda ([R.ThisCapture] ++ extraCaptures)
[ (Just "in", (R.Reference $ R.Named $ R.Qualified (R.Name "std") (R.Name "istream")))
, (Just "tmp_buffer", (R.Reference $ R.Named $ (R.Qualified (R.Name "std") (R.Name "string"))))
] False Nothing recordGetLines
let rrm = if br then (++ "_boxed") else id
let readRecordsCall = if asReturn
then R.Call (R.Variable $ R.Qualified (R.Name "K3") $ R.Name $ rrm "read_records_into_container")
[ R.Variable $ R.Name "paths"
, container
, readRecordFn ]
else
(if fixedSize
then R.Call (R.Variable $ R.Qualified (R.Name "K3") $ R.Name "read_records_with_resize")
[ R.Variable $ R.Name "size"
, R.Variable $ R.Name "paths"
, container
, readRecordFn
]
else R.Call (R.Variable $ R.Qualified (R.Name "K3") $ R.Name $ rrm "read_records")
[ R.Variable $ R.Name "paths"
, container
, readRecordFn
])
pathsType <- genCType ((T.collection $ T.record [("path", T.string)]) @+ TAnnotation "Collection")
let defaultArgs = [(Just "paths", pathsType)]
let args = defaultArgs
++ [(Just "c", R.Reference $ (if asReturn then R.Const else id) cColType)]
++ (if projectedLoader then [(Just "_rec", R.Reference $ fromJust cfRecType)] else [])
++ (if fixedSize then [(Just "size", R.Primitive R.PInt)] else [])
++ (if addMeta then [(Just "mf", R.Named $ R.Name "MetaFn")] else [])
let returnType = if asReturn then cColType else R.Named $ R.Name "unit_t"
let functionBody = if asReturn
then [ containerDecl, R.Return $ readRecordsCall ]
else [ R.Ignore $ readRecordsCall, R.Return $ R.Initialization R.Unit [] ]
let defaultDefn = R.FunctionDefn (R.Name $ coll_name ++ suf) args (Just $ returnType) [] False functionBody
return $ if addMeta then R.TemplateDefn [("MetaFn", Nothing)] defaultDefn else defaultDefn
where
typeMap :: K3 Type -> R.Expression -> R.Expression
typeMap (tag &&& (@~ isTDateInt) -> (TInt, Just _)) e =
R.Call (R.Variable $ R.Name "tpch_date") [R.Call (R.Project e (R.Name "c_str")) []]
typeMap (tag -> TInt) e = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "atoi"))
[R.Call (R.Project e (R.Name "c_str")) []]
typeMap (tag -> TReal) e = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "atof"))
[R.Call (R.Project e (R.Name "c_str")) []]
typeMap (tag -> _) x = x
isTDateInt :: Annotation Type -> Bool
isTDateInt (TProperty (tPropertyName -> "TPCHDate")) = True
isTDateInt _ = False
getColType = case fnArgs [] f of
[c, fr, sz] | projectedLoader && fixedSize -> colRecOfType c >>= \(x,y) -> return (x, y, Just fr)
[c, fr] | projectedLoader -> colRecOfType c >>= \(x,y) -> return (x, y, Just fr)
[c, fr, f] | projectedLoader -> colRecOfType c >>= \(x,y) -> return (x, y, Just fr)
[c, _] | fixedSize -> colRecOfType c >>= \(x,y) -> return (x, y, Nothing)
[c] -> colRecOfType c >>= \(x,y) -> return (x, y, Nothing)
_ -> type_mismatch
fnArgs acc t@(tnc -> (TFunction, [a,r])) = fnArgs (acc++[a]) r
fnArgs acc _ = acc
colRecOfType c@(tnc -> (TCollection, [r])) | elemWrap = return (c, removeElem r)
colRecOfType c@(tnc -> (TCollection, [r])) | otherwise = return (c, r)
colRecOfType _ = type_mismatch
removeElem (tag &&& children -> (TRecord [_], [inner])) = inner
removeElem _ = error "Invalid record structure for elemLoader"
getRecFields (tag &&& children -> (TRecord ids, cs)) = return (ids, cs)
getRecFields _ = error "Cannot get fields for non-record type"
type_mismatch = error "Invalid type for Loader function. Should Be String -> Collection R -> ()"
genLoader _ _ _ _ _ _ _ _ _ _ = error "Invalid type for Loader function."
genLogger :: String -> K3 Type -> String -> CPPGenM R.Definition
genLogger _ (children -> [_,f]) name = do
(colType, recType) <- getColType
let (fields,_) = getRecFields recType
let fieldLogs = map (log . proj) fields
let allLogs = L.intersperse (seperate $ R.Variable $ R.Name "sep") fieldLogs
cRecType <- genCType recType
cColType <- genCType colType
let printRecordFn = R.Lambda []
[ (Just "file", R.Reference $ R.Named $ R.Qualified (R.Name "std") (R.Name "ofstream"))
, (Just "elem", R.Reference $ R.Const cRecType)
, (Just "sep", R.Reference $ R.Const $ R.Named $ R.Name "string")
] False Nothing (map R.Ignore allLogs)
return $ R.FunctionDefn (R.Name name)
[ (Just "file", R.Named $ R.Name "string")
, (Just "c", R.Reference cColType)
, (Just "sep", R.Reference $ R.Const $ R.Named $ R.Name "string")]
(Just $ R.Named $ R.Name "unit_t") [] False
[ R.Return $ R.Call (R.Variable $ R.Name "logHelper")
[ R.Variable $ R.Name "file"
, R.Variable $ R.Name "c"
, printRecordFn
, R.Variable $ R.Name "sep"
]
]
where
proj i = R.Project (R.Variable $ R.Name "elem") (R.Name i)
log = R.Binary "<<" (R.Variable $ R.Name "file")
seperate s = log s
getColType = case children f of
([c,_]) -> case children c of
[r] -> return (c, r)
_ -> type_mismatch
_ -> type_mismatch
getRecFields (tag &&& children -> (TRecord ids, cs)) = (ids, cs)
getRecFields _ = error "Cannot get fields for non-record type"
type_mismatch = error "Invalid type for Logger function. Must be a flat-record of ints, reals, and strings"
genLogger _ _ _ = error "Error: Invalid type for Logger function. Must be a flat-record of ints, reals, and strings"
genCsvParser :: K3 Type -> CPPGenM (Maybe R.Expression)
genCsvParser t@(tag &&& children -> (TTuple, ts)) = genCsvParserImpl t ts get >>= (return . Just)
where
get exp i = R.Call
(R.Variable $ R.Qualified (R.Name "std") (R.Specialized [R.Named $ R.Name (show i)] (R.Name "get")))
[exp]
genCsvParser t@(tag &&& children -> (TRecord ids, ts)) = genCsvParserImpl t ts project >>= (return . Just)
where
project exp i = R.Project exp (R.Name (ids L.!! i))
genCsvParser _ = error "Can't generate CsvParser. Only works for flat records and tuples"
genCsvParserImpl :: K3 Type -> [K3 Type] -> (R.Expression -> Int -> R.Expression) -> CPPGenM R.Expression
genCsvParserImpl elemType childTypes accessor = do
et <- genCType elemType
let fields = concatMap (uncurry readField) (zip childTypes [0,1..])
return $ R.Lambda
[]
[(Just "str", R.Reference $ R.Const $ R.Named $ R.Qualified (R.Name "std") (R.Name "string"))]
False
Nothing
( [iss_decl, iss_str, tup_decl et, token_decl] ++ fields ++ [R.Return tup])
where
iss_decl = R.Forward $ R.ScalarDecl (R.Name "iss") (R.Named $ R.Qualified (R.Name "std") (R.Name "istringstream")) Nothing
iss_str = R.Ignore $ R.Call (R.Project iss (R.Name "str")) [R.Variable $ R.Name "str"]
token_decl = R.Forward $ R.ScalarDecl (R.Name "token") (R.Named $ R.Qualified (R.Name "std") (R.Name "string")) Nothing
iss = R.Variable $ R.Name "iss"
token = R.Variable $ R.Name "token"
tup_decl et = R.Forward $ R.ScalarDecl (R.Name "tup") et Nothing
tup = R.Variable $ R.Name "tup"
readField :: K3 Type -> Int -> [R.Statement]
readField t i = [ R.Ignore getline
, R.Assignment (accessor tup i) (typeMap t cstr)
]
cstr = R.Call (R.Project token (R.Name "c_str")) []
getline = R.Call
(R.Variable $ R.Qualified (R.Name "std") (R.Name "getline"))
[iss, token, R.Literal $ R.LChar "|"]
typeMap :: K3 Type -> R.Expression -> R.Expression
typeMap (tag -> TInt) e = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "atoi"))
[e]
typeMap (tag -> TReal) e = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "atof"))
[e]
typeMap (tag -> TString) e = R.Call (R.Variable $ R.Name "string_impl") [e]
typeMap (tag -> _) x = x
| DaMSL/K3 | src/Language/K3/Codegen/CPP/Declaration.hs | apache-2.0 | 24,950 | 0 | 21 | 7,404 | 8,714 | 4,497 | 4,217 | 358 | 32 |
module Kornel.LineHandler.HttpSnippets
( setup
, announceUrl
) where
import Data.Aeson (FromJSON)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Text as T
import Kornel.Common
import Kornel.Config
import Kornel.LineHandler
import Network.HTTP.Client
import qualified Network.HTTP.Client.TLS as HTTPS
import Network.HTTP.Simple hiding (withResponse)
import Prelude hiding (Handler, handle)
import Text.Regex.PCRE
setup :: Config -> (Help, HandlerRaw)
setup cfg =
(cmdHelp, ) . onlySimple . pure $ \respond _ request ->
forM_ (findURLs request) (announceUrl cfg respond)
announceUrl :: Config -> (SimpleReply -> IO ()) -> Text -> IO ()
announceUrl cfg respond url =
asyncWithLog "HttpSnippets.title" $ do
title cfg url >>= mapM_ (respond . Notice)
asyncWithLog "HttpSnippets.smmry" $
smmry cfg url >>= mapM_ (respond . Notice)
cmdHelp :: Help
cmdHelp = Help [([], "Snippets of posted URLs will be announced.")]
smmry :: Config -> Text -> IO (Maybe Text)
smmry Config {smmryApiKey} url =
map join . forM smmryApiKey $ \apiKey -> do
manager <- HTTPS.newTlsManager
let request =
setRequestManager manager .
setupUserAgent .
setRequestQueryString
[ ("SM_API_KEY", Just $ encodeUtf8 apiKey)
, ("SM_LENGTH", Just "1")
, ("SM_URL", Just $ encodeUtf8 url)
] $
"https://api.smmry.com/"
SmmryResponse {sm_api_content} <- getResponseBody <$> httpJSON request
pure . map T.strip $ sm_api_content
newtype SmmryResponse = SmmryResponse
{ sm_api_content :: Maybe Text
} deriving (Eq, Generic, Show)
instance FromJSON SmmryResponse
title :: Config -> Text -> IO (Maybe Text)
title Config {httpSnippetsFetchMax} url = do
manager <- HTTPS.newTlsManager
request <- setupUserAgent <$> parseRequest (unpack url)
response <-
withResponse request manager $ \r ->
brReadSome (responseBody r) (fromIntegral httpSnippetsFetchMax)
let title' = findTitle $ LBS.toStrict response
return $ T.strip . decodeHtmlEntities . decodeUtf8_ <$> title'
findTitle :: ByteString -> Maybe ByteString
findTitle haystack = headMay $ drop 1 matches
where
matches :: [ByteString] =
getAllTextSubmatches $
haystack =~ ("(?i)<title(?: [^>]+)?>([^<]+)" :: ByteString)
findURLs :: Text -> [Text]
findURLs input = decodeUtf8_ <$> matches
where
inputBS = encodeUtf8 input
matches :: [ByteString] =
getAllTextMatches $
inputBS =~ ("(?i)https?://[^\\s><\\]\\[]+" :: ByteString)
| michalrus/kornel | src/Kornel/LineHandler/HttpSnippets.hs | apache-2.0 | 2,680 | 0 | 18 | 656 | 772 | 406 | 366 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module I2C.BitMaster.StateMachine where
import CLaSH.Prelude
import Control.Lens hiding (Index)
import Control.Monad.State
import I2C.Types
data BitStateMachine
= Idle
| Start (Index 5)
| Stop (Index 4)
| Read (Index 4)
| Write (Index 4)
deriving Eq
data StateMachine
= StateMachine
{ _sclOen :: Bool -- i2c clock output enable register
, _sdaOen :: Bool -- i2c data output enable register
, _sdaChk :: Bool -- check SDA status (multi-master arbiter)
, _cmdAck :: Bool -- command completed
, _bitStateM :: BitStateMachine -- State Machine
}
makeLenses ''StateMachine
{-# INLINE stateMachineStart #-}
stateMachineStart
= StateMachine
{ _sclOen = True
, _sdaOen = True
, _sdaChk = False
, _cmdAck = False
, _bitStateM = Idle
}
{-# NOINLINE bitStateMachine #-}
bitStateMachine :: Bool
-> Bool
-> Bool
-> I2CCommand
-> Bit
-> State StateMachine ()
bitStateMachine rst al clkEn cmd din = do
(StateMachine {..}) <- get
cmdAck .= False
if rst || al then do
bitStateM .= Idle
cmdAck .= False
sclOen .= True
sdaOen .= True
sdaChk .= False
else do
when clkEn $ case _bitStateM of
-- start
Start 0 -> do
bitStateM .= Start 1
sdaOen .= True -- set SDA high
sdaChk .= False -- don't check SDA
Start 1 -> do
bitStateM .= Start 2
sclOen .= True -- set SCL high
sdaOen .= True -- keep SDA high
sdaChk .= False -- don't check SDA
Start 2 -> do
bitStateM .= Start 3
sclOen .= True -- keep SCL high
sdaOen .= False -- set SDA low
sdaChk .= False -- don't check SDA
Start 3 -> do
bitStateM .= Start 4
sclOen .= True -- keep SCL high
sdaOen .= False -- keep SDA low
sdaChk .= False -- don't check SDA
Start 4 -> do
bitStateM .= Idle
cmdAck .= True -- command completed
sclOen .= False -- set SCL low
sdaOen .= False -- keep SDA low
sdaChk .= False -- don't check SDA
-- stop
Stop 0 -> do
bitStateM .= Stop 1
sclOen .= False -- keep SCL low
sdaOen .= False -- set SDA low
sdaChk .= False -- don't check SDA
Stop 1 -> do
bitStateM .= Stop 2
sclOen .= True -- set SCL high
sdaOen .= False -- keep SDA low
sdaChk .= False -- don't check SDA
Stop 2 -> do
bitStateM .= Stop 3
sclOen .= True -- keep SCL high
sdaOen .= False -- keep SDA low
sdaChk .= False -- don't check SDA
Stop 3 -> do
bitStateM .= Idle
cmdAck .= True -- command completed
sclOen .= True -- keep SCL high
sdaOen .= True -- set SDA high
sdaChk .= False -- don't check SDA
-- read
Read 0 -> do
bitStateM .= Read 1
sclOen .= False -- keep SCL low
sdaOen .= True -- tri-state SDA
sdaChk .= False -- don't check SDA
Read 1 ->
do bitStateM .= Read 2
sclOen .= True -- set SCL high
sdaOen .= True -- tri-state SDA
sdaChk .= False -- don't check SDA
Read 2 -> do
bitStateM .= Read 3
sclOen .= True -- keep SCL high
sdaOen .= True -- tri-state SDA
sdaChk .= False -- don't check SDA
Read 3 -> do
bitStateM .= Idle
cmdAck .= True -- command completed
sclOen .= False -- set SCL low
sdaOen .= True -- tri-state SDA
sdaChk .= False -- don't check SDA
-- write
Write 0 -> do
bitStateM .= Write 1
sclOen .= False -- keep SCL low
sdaOen .= (din == high) -- set SDA
sdaChk .= False -- don't check SDA (SCL low)
Write 1 -> do
bitStateM .= Write 2
sclOen .= True -- set SCL high
sdaOen .= (din == high) -- keep SDA
sdaChk .= False -- don't check SDA yet
-- Allow some more time for SDA and SCL to settle
Write 2 -> do
bitStateM .= Write 3
sclOen .= True -- keep SCL high
sdaOen .= (din == high) -- keep SDA
sdaChk .= True -- check SDA
Write 3 -> do
bitStateM .= Idle
cmdAck .= True -- command completed
sclOen .= False -- set SCL low
sdaOen .= (din == high) -- keep SDA
sdaChk .= False -- don't check SDA (SCL low)
-- idle
_ -> do
bitStateM .= case cmd of
I2Cstart -> Start 0
I2Cstop -> Stop 0
I2Cwrite -> Write 0
I2Cread -> Read 0
otherwise -> Idle
sdaChk .= False
| ggreif/clash-compiler | examples/i2c/I2C/BitMaster/StateMachine.hs | bsd-2-clause | 5,174 | 0 | 20 | 2,193 | 1,158 | 576 | 582 | -1 | -1 |
{-# LANGUAGE BangPatterns,CPP #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-- |
-- Module : Data.Text.Lazy.Encoding
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : portable
--
-- Functions for converting lazy 'Text' values to and from lazy
-- 'ByteString', using several standard encodings.
--
-- To gain access to a much larger variety of encodings, use the
-- @text-icu@ package: <http://hackage.haskell.org/package/text-icu>
module Data.Text.Lazy.Encoding
(
-- * Decoding ByteStrings to Text
-- $strict
decodeASCII
, decodeLatin1
, decodeUtf8
, decodeUtf16LE
, decodeUtf16BE
, decodeUtf32LE
, decodeUtf32BE
-- ** Catchable failure
, decodeUtf8'
-- ** Controllable error handling
, decodeUtf8With
, decodeUtf16LEWith
, decodeUtf16BEWith
, decodeUtf32LEWith
, decodeUtf32BEWith
-- * Encoding Text to ByteStrings
, encodeUtf8
, encodeUtf16LE
, encodeUtf16BE
, encodeUtf32LE
, encodeUtf32BE
#if MIN_VERSION_bytestring(0,10,4)
-- * Encoding Text using ByteString Builders
, encodeUtf8Builder
, encodeUtf8BuilderEscaped
#endif
) where
import Control.Exception (evaluate, try)
import Data.Text.Encoding.Error (OnDecodeError, UnicodeException, strictDecode)
import Data.Text.Internal.Lazy (Text(..), chunk, empty, foldrChunks)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Internal as B
import qualified Data.ByteString.Unsafe as B
#if MIN_VERSION_bytestring(0,10,4)
import Data.Word (Word8)
import Data.Monoid (mempty, (<>))
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Builder.Extra as B (safeStrategy, toLazyByteStringWith)
import qualified Data.ByteString.Builder.Prim as BP
import qualified Data.Text as T
#endif
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Lazy as L
import qualified Data.Text.Internal.Lazy.Encoding.Fusion as E
import qualified Data.Text.Internal.Lazy.Fusion as F
import Data.Text.Unsafe (unsafeDupablePerformIO)
-- $strict
--
-- All of the single-parameter functions for decoding bytestrings
-- encoded in one of the Unicode Transformation Formats (UTF) operate
-- in a /strict/ mode: each will throw an exception if given invalid
-- input.
--
-- Each function has a variant, whose name is suffixed with -'With',
-- that gives greater control over the handling of decoding errors.
-- For instance, 'decodeUtf8' will throw an exception, but
-- 'decodeUtf8With' allows the programmer to determine what to do on a
-- decoding error.
-- | /Deprecated/. Decode a 'ByteString' containing 7-bit ASCII
-- encoded text.
--
-- This function is deprecated. Use 'decodeLatin1' instead.
decodeASCII :: B.ByteString -> Text
decodeASCII = decodeUtf8
{-# DEPRECATED decodeASCII "Use decodeUtf8 instead" #-}
-- | Decode a 'ByteString' containing Latin-1 (aka ISO-8859-1) encoded text.
decodeLatin1 :: B.ByteString -> Text
decodeLatin1 = foldr (chunk . TE.decodeLatin1) empty . B.toChunks
-- | Decode a 'ByteString' containing UTF-8 encoded text.
decodeUtf8With :: OnDecodeError -> B.ByteString -> Text
decodeUtf8With onErr (B.Chunk b0 bs0) =
case TE.streamDecodeUtf8With onErr b0 of
TE.Some t l f -> chunk t (go f l bs0)
where
go f0 _ (B.Chunk b bs) =
case f0 b of
TE.Some t l f -> chunk t (go f l bs)
go _ l _
| S.null l = empty
| otherwise = case onErr desc (Just (B.unsafeHead l)) of
Nothing -> empty
Just c -> L.singleton c
desc = "Data.Text.Lazy.Encoding.decodeUtf8With: Invalid UTF-8 stream"
decodeUtf8With _ _ = empty
-- | Decode a 'ByteString' containing UTF-8 encoded text that is known
-- to be valid.
--
-- If the input contains any invalid UTF-8 data, an exception will be
-- thrown that cannot be caught in pure code. For more control over
-- the handling of invalid data, use 'decodeUtf8'' or
-- 'decodeUtf8With'.
decodeUtf8 :: B.ByteString -> Text
decodeUtf8 = decodeUtf8With strictDecode
{-# INLINE[0] decodeUtf8 #-}
-- This rule seems to cause performance loss.
{- RULES "LAZY STREAM stream/decodeUtf8' fusion" [1]
forall bs. F.stream (decodeUtf8' bs) = E.streamUtf8 strictDecode bs #-}
-- | Decode a 'ByteString' containing UTF-8 encoded text..
--
-- If the input contains any invalid UTF-8 data, the relevant
-- exception will be returned, otherwise the decoded text.
--
-- /Note/: this function is /not/ lazy, as it must decode its entire
-- input before it can return a result. If you need lazy (streaming)
-- decoding, use 'decodeUtf8With' in lenient mode.
decodeUtf8' :: B.ByteString -> Either UnicodeException Text
decodeUtf8' bs = unsafeDupablePerformIO $ do
let t = decodeUtf8 bs
try (evaluate (rnf t `seq` t))
where
rnf Empty = ()
rnf (Chunk _ ts) = rnf ts
{-# INLINE decodeUtf8' #-}
encodeUtf8 :: Text -> B.ByteString
#if MIN_VERSION_bytestring(0,10,4)
encodeUtf8 Empty = B.empty
encodeUtf8 lt@(Chunk t _) =
B.toLazyByteStringWith strategy B.empty $ encodeUtf8Builder lt
where
-- To improve our small string performance, we use a strategy that
-- allocates a buffer that is guaranteed to be large enough for the
-- encoding of the first chunk, but not larger than the default
-- B.smallChunkSize. We clamp the firstChunkSize to ensure that we don't
-- generate too large buffers which hamper streaming.
firstChunkSize = min B.smallChunkSize (4 * (T.length t + 1))
strategy = B.safeStrategy firstChunkSize B.defaultChunkSize
encodeUtf8Builder :: Text -> B.Builder
encodeUtf8Builder = foldrChunks (\c b -> TE.encodeUtf8Builder c <> b) mempty
{-# INLINE encodeUtf8BuilderEscaped #-}
encodeUtf8BuilderEscaped :: BP.BoundedPrim Word8 -> Text -> B.Builder
encodeUtf8BuilderEscaped prim =
foldrChunks (\c b -> TE.encodeUtf8BuilderEscaped prim c <> b) mempty
#else
encodeUtf8 (Chunk c cs) = B.Chunk (TE.encodeUtf8 c) (encodeUtf8 cs)
encodeUtf8 Empty = B.Empty
#endif
-- | Decode text from little endian UTF-16 encoding.
decodeUtf16LEWith :: OnDecodeError -> B.ByteString -> Text
decodeUtf16LEWith onErr bs = F.unstream (E.streamUtf16LE onErr bs)
{-# INLINE decodeUtf16LEWith #-}
-- | Decode text from little endian UTF-16 encoding.
--
-- If the input contains any invalid little endian UTF-16 data, an
-- exception will be thrown. For more control over the handling of
-- invalid data, use 'decodeUtf16LEWith'.
decodeUtf16LE :: B.ByteString -> Text
decodeUtf16LE = decodeUtf16LEWith strictDecode
{-# INLINE decodeUtf16LE #-}
-- | Decode text from big endian UTF-16 encoding.
decodeUtf16BEWith :: OnDecodeError -> B.ByteString -> Text
decodeUtf16BEWith onErr bs = F.unstream (E.streamUtf16BE onErr bs)
{-# INLINE decodeUtf16BEWith #-}
-- | Decode text from big endian UTF-16 encoding.
--
-- If the input contains any invalid big endian UTF-16 data, an
-- exception will be thrown. For more control over the handling of
-- invalid data, use 'decodeUtf16BEWith'.
decodeUtf16BE :: B.ByteString -> Text
decodeUtf16BE = decodeUtf16BEWith strictDecode
{-# INLINE decodeUtf16BE #-}
-- | Encode text using little endian UTF-16 encoding.
encodeUtf16LE :: Text -> B.ByteString
encodeUtf16LE txt = B.fromChunks (foldrChunks ((:) . TE.encodeUtf16LE) [] txt)
{-# INLINE encodeUtf16LE #-}
-- | Encode text using big endian UTF-16 encoding.
encodeUtf16BE :: Text -> B.ByteString
encodeUtf16BE txt = B.fromChunks (foldrChunks ((:) . TE.encodeUtf16BE) [] txt)
{-# INLINE encodeUtf16BE #-}
-- | Decode text from little endian UTF-32 encoding.
decodeUtf32LEWith :: OnDecodeError -> B.ByteString -> Text
decodeUtf32LEWith onErr bs = F.unstream (E.streamUtf32LE onErr bs)
{-# INLINE decodeUtf32LEWith #-}
-- | Decode text from little endian UTF-32 encoding.
--
-- If the input contains any invalid little endian UTF-32 data, an
-- exception will be thrown. For more control over the handling of
-- invalid data, use 'decodeUtf32LEWith'.
decodeUtf32LE :: B.ByteString -> Text
decodeUtf32LE = decodeUtf32LEWith strictDecode
{-# INLINE decodeUtf32LE #-}
-- | Decode text from big endian UTF-32 encoding.
decodeUtf32BEWith :: OnDecodeError -> B.ByteString -> Text
decodeUtf32BEWith onErr bs = F.unstream (E.streamUtf32BE onErr bs)
{-# INLINE decodeUtf32BEWith #-}
-- | Decode text from big endian UTF-32 encoding.
--
-- If the input contains any invalid big endian UTF-32 data, an
-- exception will be thrown. For more control over the handling of
-- invalid data, use 'decodeUtf32BEWith'.
decodeUtf32BE :: B.ByteString -> Text
decodeUtf32BE = decodeUtf32BEWith strictDecode
{-# INLINE decodeUtf32BE #-}
-- | Encode text using little endian UTF-32 encoding.
encodeUtf32LE :: Text -> B.ByteString
encodeUtf32LE txt = B.fromChunks (foldrChunks ((:) . TE.encodeUtf32LE) [] txt)
{-# INLINE encodeUtf32LE #-}
-- | Encode text using big endian UTF-32 encoding.
encodeUtf32BE :: Text -> B.ByteString
encodeUtf32BE txt = B.fromChunks (foldrChunks ((:) . TE.encodeUtf32BE) [] txt)
{-# INLINE encodeUtf32BE #-}
| ekmett/text | Data/Text/Lazy/Encoding.hs | bsd-2-clause | 9,241 | 0 | 14 | 1,653 | 1,413 | 829 | 584 | 101 | 3 |
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables, NamedFieldPuns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the job scheduler.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.JQScheduler (testJQScheduler) where
import Prelude ()
import Ganeti.Prelude
import Control.Lens ((&), (.~), _2)
import qualified Data.ByteString.UTF8 as UTF8
import Data.List (inits)
import Data.Maybe
import qualified Data.Map as Map
import Data.Set (Set, difference)
import qualified Data.Set as Set
import Text.JSON (JSValue(..))
import Test.HUnit
import Test.QuickCheck
import Test.Ganeti.JQueue.Objects (genQueuedOpCode, genJobId, justNoTs)
import Test.Ganeti.SlotMap (genTestKey, overfullKeys)
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types ()
import Ganeti.JQScheduler.Filtering
import Ganeti.JQScheduler.ReasonRateLimiting
import Ganeti.JQScheduler.Types
import Ganeti.JQueue.Lens
import Ganeti.JQueue.Objects
import Ganeti.Objects (FilterRule(..), FilterPredicate(..), FilterAction(..),
filterRuleOrder)
import Ganeti.OpCodes
import Ganeti.OpCodes.Lens
import Ganeti.Query.Language (Filter(..), FilterValue(..))
import Ganeti.SlotMap
import Ganeti.Types
import Ganeti.Utils (isSubsequenceOf, newUUID)
{-# ANN module "HLint: ignore Use camelCase" #-}
genRateLimitReason :: Gen String
genRateLimitReason = do
Slot{ slotLimit = n } <- arbitrary
l <- genTestKey
return $ "rate-limit:" ++ show n ++ ":" ++ l
instance Arbitrary QueuedJob where
arbitrary = do
-- For our scheduler testing purposes here, we only care about
-- opcodes, job ID and reason rate limits.
jid <- genJobId
ops <- resize 5 . listOf1 $ do
o <- genQueuedOpCode
-- Put some rate limits into the OpCode.
limitString <- genRateLimitReason
return $
o & qoInputL . validOpCodeL . metaParamsL . opReasonL . traverse . _2
.~ limitString
return $ QueuedJob jid ops justNoTs justNoTs justNoTs Nothing Nothing
instance Arbitrary JobWithStat where
arbitrary = nullJobWithStat <$> arbitrary
shrink job = [ job { jJob = x } | x <- shrink (jJob job) ]
instance Arbitrary Queue where
arbitrary = do
let genJobsUniqueJIDs :: [JobWithStat] -> Gen [JobWithStat]
genJobsUniqueJIDs = listOfUniqueBy arbitrary (qjId . jJob)
queued <- genJobsUniqueJIDs []
running <- genJobsUniqueJIDs queued
manip <- genJobsUniqueJIDs (queued ++ running)
return $ Queue queued running manip
shrink q =
[ q { qEnqueued = x } | x <- shrink (qEnqueued q) ] ++
[ q { qRunning = x } | x <- shrink (qRunning q) ] ++
[ q { qManipulated = x } | x <- shrink (qManipulated q) ]
-- * Test cases
-- | Tests rate limit reason trail parsing.
case_parseReasonRateLimit :: Assertion
case_parseReasonRateLimit = do
assertBool "default case" $
let a = parseReasonRateLimit "rate-limit:20:my label"
b = parseReasonRateLimit "rate-limit:21:my label"
in and
[ a == Just ("20:my label", 20)
, b == Just ("21:my label", 21)
]
assertEqual "be picky about whitespace"
Nothing
(parseReasonRateLimit " rate-limit:20:my label")
-- | Tests that "rateLimit:n:..." and "rateLimit:m:..." become different
-- rate limiting buckets.
prop_slotMapFromJob_conflicting_buckets :: Property
prop_slotMapFromJob_conflicting_buckets = do
let sameBucketReasonStringGen :: Gen (String, String)
sameBucketReasonStringGen = do
(Positive (n :: Int), Positive (m :: Int)) <- arbitrary
l <- genPrintableAsciiString
return ( "rate-limit:" ++ show n ++ ":" ++ l
, "rate-limit:" ++ show m ++ ":" ++ l )
forAll sameBucketReasonStringGen $ \(s1, s2) ->
(s1 /= s2) ==> do
(lab1, lim1) <- parseReasonRateLimit s1
(lab2, _ ) <- parseReasonRateLimit s2
let sm = Map.fromList [(lab1, Slot 1 lim1)]
cm = Map.fromList [(lab2, 1)]
in return $
(sm `occupySlots` cm) ==? Map.fromList [ (lab1, Slot 1 lim1)
, (lab2, Slot 1 0)
] :: Gen Property
-- | Tests some basic cases for reason rate limiting.
case_reasonRateLimit :: Assertion
case_reasonRateLimit = do
let mkJobWithReason jobNum reasonTrail = do
opc <- genSample genQueuedOpCode
jid <- makeJobId jobNum
let opc' = opc & (qoInputL . validOpCodeL . metaParamsL . opReasonL)
.~ reasonTrail
return . nullJobWithStat
$ QueuedJob
{ qjId = jid
, qjOps = [opc']
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
-- 3 jobs, limited to 2 of them running.
j1 <- mkJobWithReason 1 [("source1", "rate-limit:2:hello", 0)]
j2 <- mkJobWithReason 2 [("source1", "rate-limit:2:hello", 0)]
j3 <- mkJobWithReason 3 [("source1", "rate-limit:2:hello", 0)]
assertEqual "[j1] should not be rate-limited"
[j1]
(reasonRateLimit (Queue [j1] [] []) [j1])
assertEqual "[j1, j2] should not be rate-limited"
[j1, j2]
(reasonRateLimit (Queue [j1, j2] [] []) [j1, j2])
assertEqual "j3 should be rate-limited 1"
[j1, j2]
(reasonRateLimit (Queue [j1, j2, j3] [] []) [j1, j2, j3])
assertEqual "j3 should be rate-limited 2"
[j2]
(reasonRateLimit (Queue [j2, j3] [j1] []) [j2, j3])
assertEqual "j3 should be rate-limited 3"
[]
(reasonRateLimit (Queue [j3] [j1] [j2]) [j3])
-- | Tests the specified properties of `reasonRateLimit`, as defined in
-- `doc/design-optables.rst`.
prop_reasonRateLimit :: Property
prop_reasonRateLimit =
forAllShrink arbitrary shrink $ \q ->
let slotMapFromJobWithStat = slotMapFromJobs . map jJob
enqueued = qEnqueued q
toRun = reasonRateLimit q enqueued
oldSlots = slotMapFromJobWithStat (qRunning q)
newSlots = slotMapFromJobWithStat (qRunning q ++ toRun)
-- What would happen without rate limiting.
newSlotsNoLimits = slotMapFromJobWithStat (qRunning q ++ enqueued)
in -- Ensure it's unlikely that jobs are all in different buckets.
cover
(any ((> 1) . slotOccupied) . Map.elems $ newSlotsNoLimits)
50
"some jobs have the same rate-limit bucket"
-- Ensure it's likely that rate limiting has any effect.
. cover
(overfullKeys newSlotsNoLimits
`difference` overfullKeys oldSlots /= Set.empty)
50
"queued jobs cannot be started because of rate limiting"
$ conjoin
[ counterexample "scheduled jobs must be subsequence" $
toRun `isSubsequenceOf` enqueued
-- This is the key property:
, counterexample "no job may exceed its bucket limits, except from\
\ jobs that were already running with exceeded\
\ limits; those must not increase" $
conjoin
[ if occup <= limit
-- Within limits, all fine.
then passTest
-- Bucket exceeds limits - it must have exceeded them
-- in the initial running list already, with the same
-- slot count.
else Map.lookup k oldSlots ==? Just slot
| (k, slot@(Slot occup limit)) <- Map.toList newSlots ]
]
-- | Tests that filter rule ordering is determined (solely) by priority,
-- watermark and UUID, as defined in `doc/design-optables.rst`.
prop_filterRuleOrder :: Property
prop_filterRuleOrder = property $ do
a <- arbitrary
b <- arbitrary `suchThat` ((frUuid a /=) . frUuid)
return $ filterRuleOrder a b ==? (frPriority a, frWatermark a, frUuid a)
`compare`
(frPriority b, frWatermark b, frUuid b)
-- | Tests common inputs for `matchPredicate`, especially the predicates
-- and fields available to them as defined in the spec.
case_matchPredicate :: Assertion
case_matchPredicate = do
jid1 <- makeJobId 1
clusterName <- mkNonEmpty "cluster1"
let job =
QueuedJob
{ qjId = jid1
, qjOps =
[ QueuedOpCode
{ qoInput = ValidOpCode MetaOpCode
{ metaParams = CommonOpParams
{ opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioHigh
, opDepends = Just []
, opComment = Nothing
, opReason = [("source1", "reason1", 1234)]
}
, metaOpCode = OpClusterRename
{ opName = clusterName
}
}
, qoStatus = OP_STATUS_QUEUED
, qoResult = JSNull
, qoLog = []
, qoPriority = -1
, qoStartTimestamp = Nothing
, qoExecTimestamp = Nothing
, qoEndTimestamp = Nothing
}
]
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
let watermark = jid1
check = matchPredicate job watermark
-- jobid filters
assertEqual "matching jobid filter"
True
. check $ FPJobId (EQFilter "id" (NumericValue 1))
assertEqual "non-matching jobid filter"
False
. check $ FPJobId (EQFilter "id" (NumericValue 2))
assertEqual "non-matching jobid filter (string passed)"
False
. check $ FPJobId (EQFilter "id" (QuotedString "1"))
-- jobid filters: watermarks
assertEqual "matching jobid watermark filter"
True
. check $ FPJobId (EQFilter "id" (QuotedString "watermark"))
-- opcode filters
assertEqual "matching opcode filter (type of opcode)"
True
. check $ FPOpCode (EQFilter "OP_ID" (QuotedString "OP_CLUSTER_RENAME"))
assertEqual "non-matching opcode filter (type of opcode)"
False
. check $ FPOpCode (EQFilter "OP_ID" (QuotedString "OP_INSTANCE_CREATE"))
assertEqual "matching opcode filter (nested access)"
True
. check $ FPOpCode (EQFilter "name" (QuotedString "cluster1"))
assertEqual "non-matching opcode filter (nonexistent nested access)"
False
. check $ FPOpCode (EQFilter "something" (QuotedString "cluster1"))
-- reason filters
assertEqual "matching reason filter (reason field)"
True
. check $ FPReason (EQFilter "reason" (QuotedString "reason1"))
assertEqual "non-matching reason filter (reason field)"
False
. check $ FPReason (EQFilter "reason" (QuotedString "reasonGarbage"))
assertEqual "matching reason filter (source field)"
True
. check $ FPReason (EQFilter "source" (QuotedString "source1"))
assertEqual "matching reason filter (timestamp field)"
True
. check $ FPReason (EQFilter "timestamp" (NumericValue 1234))
assertEqual "non-matching reason filter (nonexistent field)"
False
. check $ FPReason (EQFilter "something" (QuotedString ""))
-- | Tests that jobs selected by `applyingFilter` actually match
-- and have an effect (are not CONTINUE filters).
prop_applyingFilter :: Property
prop_applyingFilter =
forAllShrink arbitrary shrink $ \(job, filters) ->
let applying = applyingFilter (Set.fromList filters) job
in isJust applying ==> case applying of
Just f -> job `matches` f && frAction f /= Continue
Nothing -> True
case_jobFiltering :: Assertion
case_jobFiltering = do
clusterName <- mkNonEmpty "cluster1"
jid1 <- makeJobId 1
jid2 <- makeJobId 2
jid3 <- makeJobId 3
jid4 <- makeJobId 4
unsetPrio <- mkNonNegative 1234
uuid1 <- fmap UTF8.fromString newUUID
let j1 =
nullJobWithStat QueuedJob
{ qjId = jid1
, qjOps =
[ QueuedOpCode
{ qoInput = ValidOpCode MetaOpCode
{ metaParams = CommonOpParams
{ opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioHigh
, opDepends = Just []
, opComment = Nothing
, opReason = [("source1", "reason1", 1234)]}
, metaOpCode = OpClusterRename
{ opName = clusterName
}
}
, qoStatus = OP_STATUS_QUEUED
, qoResult = JSNull
, qoLog = []
, qoPriority = -1
, qoStartTimestamp = Nothing
, qoExecTimestamp = Nothing
, qoEndTimestamp = Nothing
}
]
, qjReceivedTimestamp = Nothing
, qjStartTimestamp = Nothing
, qjEndTimestamp = Nothing
, qjLivelock = Nothing
, qjProcessId = Nothing
}
j2 = j1 & jJobL . qjIdL .~ jid2
j3 = j1 & jJobL . qjIdL .~ jid3
j4 = j1 & jJobL . qjIdL .~ jid4
fr1 =
FilterRule
{ frWatermark = jid1
, frPriority = unsetPrio
, frPredicates = [FPJobId (EQFilter "id" (NumericValue 1))]
, frAction = Reject
, frReasonTrail = []
, frUuid = uuid1
}
-- Gives the rule a new UUID.
rule fr = do
uuid <- fmap UTF8.fromString newUUID
return fr{ frUuid = uuid }
-- Helper to create filter chains: assigns the filters in the list
-- increasing priorities, so that filters listed first are processed
-- first.
chain :: [FilterRule] -> Set FilterRule
chain frs
| any ((/= unsetPrio) . frPriority) frs =
error "Filter was passed to `chain` that already had a priority."
| otherwise =
Set.fromList
[ fr{ frPriority = prio }
| (fr, Just prio) <- zip frs (map mkNonNegative [1..]) ]
fr2 <- rule fr1{ frAction = Accept }
fr3 <- rule fr1{ frAction = Pause }
fr4 <- rule fr1{ frPredicates =
[FPJobId (GTFilter "id" (QuotedString "watermark"))]
}
fr5 <- rule fr1{ frPredicates = [] }
fr6 <- rule fr5{ frAction = Continue }
fr7 <- rule fr6{ frAction = RateLimit 2 }
fr8 <- rule fr4{ frAction = Continue, frWatermark = jid1 }
fr9 <- rule fr8{ frAction = RateLimit 2 }
assertEqual "j1 should be rejected (by fr1)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr1]) [j1])
assertEqual "j1 should be rejected (by fr1, it has priority)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr1, fr2]) [j1])
assertEqual "j1 should be accepted (by fr2, it has priority)"
[j1]
(jobFiltering (Queue [j1] [] []) (chain [fr2, fr1]) [j1])
assertEqual "j1 should be paused (by fr3)"
[]
(jobFiltering (Queue [j1] [] []) (chain [fr3]) [j1])
assertEqual "j2 should be rejected (over watermark1)"
[j1]
(jobFiltering (Queue [j1, j2] [] []) (chain [fr4]) [j1, j2])
assertEqual "all jobs should be rejected (since no predicates)"
[]
(jobFiltering (Queue [j1, j2] [] []) (chain [fr5]) [j1, j2])
assertEqual "j3 should be rate-limited"
[j1, j2]
(jobFiltering (Queue [j1, j2, j3] [] []) (chain [fr6, fr7]) [j1, j2, j3])
assertEqual "j4 should be rate-limited"
-- j1 doesn't apply to fr8/fr9 (since they match only watermark > jid1)
-- so j1 gets scheduled
[j1, j2, j3]
(jobFiltering (Queue [j1, j2, j3, j4] [] []) (chain [fr8, fr9])
[j1, j2, j3, j4])
-- | Tests the specified properties of `jobFiltering`, as defined in
-- `doc/design-optables.rst`.
prop_jobFiltering :: Property
prop_jobFiltering =
forAllShrink arbitrary shrink $ \q ->
forAllShrink (resize 4 arbitrary) shrink $ \(NonEmpty filterList) ->
let running = qRunning q ++ qManipulated q
enqueued = qEnqueued q
filters = Set.fromList filterList
toRun = jobFiltering q filters enqueued -- do the filtering
-- Helpers
-- Whether `fr` applies to more than `n` of the `jobs`
-- (that is, more than allowed).
exceeds :: Int -> FilterRule -> [JobWithStat] -> Bool
exceeds n fr jobs =
n < (length
. filter ((frUuid fr ==) . frUuid)
. mapMaybe (applyingFilter filters)
$ map jJob jobs)
-- Helpers for ensuring sensible coverage.
-- Makes sure that each action appears with some probability.
actionName = head . words . show
allActions = map actionName [ Accept, Continue, Pause, Reject
, RateLimit 0 ]
applyingActions = map (actionName . frAction)
. mapMaybe (applyingFilter filters)
$ map jJob enqueued
perc = 4 -- percent; low because it's per action
actionCovers =
foldr (.) id
[ stableCover (a `elem` applyingActions) perc ("is " ++ a)
| a <- allActions ]
-- `covers` should be after `==>` and before `conjoin` (see QuickCheck
-- bugs 25 and 27).
in (enqueued /= []) ==> actionCovers $ conjoin
[ counterexample "scheduled jobs must be subsequence" $
toRun `isSubsequenceOf` enqueued
, counterexample "a reason for each job (not) being scheduled" .
-- All enqueued jobs must have a reason why they were (not)
-- scheduled, determined by the filter that applies.
flip all enqueued $ \job ->
case applyingFilter filters (jJob job) of
-- If no filter matches, the job must run.
Nothing -> job `elem` toRun
Just fr@FilterRule{ frAction } -> case frAction of
-- ACCEPT filter permit the job immediately,
-- PAUSE/REJECT forbid running, CONTINUE filters cannot
-- be the output of `applyingFilter`, and
-- RATE_LIMIT filters have a more more complex property.
Accept -> job `elem` toRun
Continue -> error "must not happen"
Pause -> job `notElem` toRun
Reject -> job `notElem` toRun
RateLimit n ->
let -- Jobs in queue before our job.
jobsBefore = takeWhile (/= job) enqueued
in if job `elem` toRun
-- If it got scheduled, the job and any job
-- before it doesn't overfill the rate limit.
then not . exceeds n fr $ running
++ jobsBefore ++ [job]
-- If didn't get scheduled, then the rate limit
-- was already full before scheduling or the job
-- or one of the jobs before made it full.
else any (exceeds n fr . (running ++))
(inits $ jobsBefore ++ [job])
-- The `inits` bit includes the [] and [...job]
-- cases.
]
testSuite "JQScheduler"
[ 'case_parseReasonRateLimit
, 'prop_slotMapFromJob_conflicting_buckets
, 'case_reasonRateLimit
, 'prop_reasonRateLimit
, 'prop_filterRuleOrder
, 'case_matchPredicate
, 'prop_applyingFilter
, 'case_jobFiltering
, 'prop_jobFiltering
]
| leshchevds/ganeti | test/hs/Test/Ganeti/JQScheduler.hs | bsd-2-clause | 21,467 | 0 | 29 | 6,952 | 4,550 | 2,447 | 2,103 | -1 | -1 |
{-# LANGUAGE StandaloneDeriving,FlexibleContexts #-}
import Kalman
import Vectorization
import Control.Applicative
import Space
import Data.Functor.Product
import Linear.V1
import Data.IntervalMap.FingerTree
p0 = point 0
s0 = singleton p0 (x0,c0)
x0 :: Product V1 V1 Double
x0 = Pair 0 0
c0 :: Product V1 V1 (Product V1 V1 Double )
c0 = Pair (V1 $ Pair 1 0) (V1 $ Pair 0 1)
f a dt (Pair p v) = Pair (p |+| fmap (*dt) v) (p |+| fmap (*dt) a)
h (Pair p v) = p
r = V1 0.02
q a dt = liftA (fmap (*a)) $ Pair ( V1 $ Pair (V1 (dt^3) ) (V1 (dt^2/2))) (V1 $ Pair (V1 (dt^2/2)) (V1 dt))
| massudaw/mtk | example/1DParticle.hs | bsd-3-clause | 593 | 4 | 14 | 129 | 340 | 171 | 169 | 18 | 1 |
{-# LANGUAGE GADTs #-}
{-
The regular expression refinement algorithm
Inspired by the type checking system and type inference algorithm found in programming language research communities. \cite{TODO}
Type checking and Type inference algorithm
==========================================
Type checking judgement
-----------------------
$\gamma \vdash e : t$
Given the type environemnt $\gamma$, the program expression e and the type $t$, the above is deducable if $e$ has type $t$ under the type environment $\gamma$
Note that the type checking is a relation, where $\gamma$, $e$ and $t$ are the inputs.
Type inference judgement
------------------------
$\gamma \models e : t$
Given the type environment \gamma and the program expression e the algorithm reconstructs the type t.
Note that the type inference is considered an function, where \gamma and e are the inputs.
* Type inference correctness
$\gamma \models e : t$
implies
$\gamma \vdash e : t$
Regular expression debugging and refinement
===============================================
The connection
--------------
Pointed out the XDuce work, we consider the correspondence between the program expression $e$ and the document $d$,
the type $t$ and the regular expression $r$.
* The word problem $d \in r$ corresponds to the type checking problem.
The difference
--------------
* The matching problem
$$ d \in r \Longrightarrow \Delta $$
where r is annotated with a label at each AST level. $\Delta$ maps labels to sub-matches.
The mechanism
-------------
We use partial derivative operations to implement the word problem and the sub matching problem. See our recent papers (PPDP 12 )
The debugging algorithm
-----------------------
Refer to the PDDebug.lhs
The Refinement checking judgement
---------------------------------
-}
module Text.Regex.PDeriv.Debug.Refine5 where
import System.Environment
import qualified Data.Map as M
import qualified Data.IntMap as IM
import System.IO.Unsafe
import Data.List
import Data.Maybe
import Data.Char
import Control.Monad.ST
import Data.STRef
import Control.Monad
import Control.Monad.State
import Text.Regex.PDeriv.Parse
-- import qualified Text.Regex.PDeriv.RE as R
-- import Text.Regex.PDeriv.IntPattern
import Text.Regex.PDeriv.ExtPattern
{-
import Data.Char
import Text.ParserCombinators.Parsec((<|>), (<?>),
unexpected, try, runParser, many, getState, setState, CharParser, ParseError,
sepBy1, option, notFollowedBy, many1, lookAhead, eof, between,
string, oneOf, noneOf, digit, char, anyChar)
import Control.Monad(liftM, when, guard)
-}
logger mesg = unsafePerformIO $ print mesg
{-
* The problem
Let $\gamma$ denote the user specification, $w$ denote the input word , $r$ the pattern and $r'$ the refined pattern,
we use the judgement
$$\gamma, r \vdash d : r'$$
to denote that under the user spec $\gamma$ , $r'$ is a replacement of $r$ that accepts $w$.
* The user requirement
\gamma ::= { (i, r) , ... , }
i ::= 1,2,3,...
-}
type SrcLoc = Int
-- ^ The user requirement is a mapping of labels to the regexs
type UReq = [(Int, Re)]
lookupUR :: Int -> UReq -> Maybe Re
lookupUR i env = lookup i env
updateUR :: Int -> Re -> UReq -> UReq
updateUR x r ureq = map (\(y,t) -> if (x == y) then (y,r) else (y,t)) ureq
allAccEps :: UReq -> Bool
allAccEps ureq = all (\(x,t) -> posEmpty t) ureq
inUR :: Int -> UReq -> Bool
inUR i env = isJust (lookupUR i env)
{- * The Regular expression
p ::= r^i
r ::= () || (p|p) || pp || p* || l || \phi
-}
data Re where
Choice :: SrcLoc -> [Re] -> Re
Pair :: SrcLoc -> Re -> Re -> Re
Star :: SrcLoc -> Re -> Re
Ch :: SrcLoc -> Char -> Re
Eps :: SrcLoc -> Re
Phi :: Re
Any :: SrcLoc -> Re
deriving (Show, Ord)
instance Eq Re where
(==) (Choice _ rs1) (Choice _ rs2) = rs1 == rs2
(==) (Pair _ r1 r2) (Pair _ r1' r2') = r1 == r1' && r2 == r2'
(==) (Star _ r1) (Star _ r2) = r1 == r2
(==) (Ch _ c1) (Ch _ c2) = c1 == c2
(==) Eps{} Eps{} = True
(==) Phi Phi = True
(==) (Any _) (Any _) = True
(==) _ _ = False
pretty :: Re -> String
pretty (Choice _ rs) = "(" ++ interleave "|" (map pretty rs) ++ ")"
pretty (Pair _ r1 r2) = "(" ++ pretty r1 ++ "," ++ pretty r2 ++ ")"
pretty (Star _ r1) = pretty r1 ++ "*"
pretty (Ch _ c1) = [c1]
pretty (Eps _) = "()"
pretty (Any _) = "."
pretty Phi = "{}"
interleave :: String -> [String] -> String
interleave _ [] = ""
interleave _ [x] = x
interleave d (x:xs) = x ++ d ++ interleave d xs
posEmpty :: Re -> Bool
posEmpty (Eps _) = True
posEmpty (Choice _ rs) = any posEmpty rs
posEmpty (Pair _ r1 r2) = posEmpty r1 && posEmpty r2
posEmpty (Star _ _) = True
posEmpty (Ch _ _) = False
posEmpty (Any _) = False
posEmpty Phi = False
isPhi :: Re -> Bool
isPhi Phi = True
isPhi (Ch _ _ ) = False
isPhi (Choice _ rs) = all isPhi rs
isPhi (Pair _ r1 r2) = isPhi r1 || isPhi r2
isPhi (Star _ _) = False
isPhi (Eps _) = False
isPhi (Any _) = False
isChoice :: Re -> Bool
isChoice Choice{} = True
isChoice _ = False
-- containment check
contain :: Re -> Re -> Bool
contain r1 r2 = containBy M.empty r2 r1
data Leq = Leq Re Re deriving (Eq, Ord, Show)
containBy :: M.Map Leq () -> Re -> Re -> Bool
containBy env Phi _ = True
containBy env r1 r2 =
case M.lookup (Leq r1 r2) env of
{ Just _ -> True
; Nothing | posEmpty r1 && not (posEmpty r2) -> False
| otherwise -> let env' = M.insert (Leq r1 r2) () env
in all (\l -> containBy env' (deriv r1 l) (deriv r2 l)) (sigma (Choice dontcare [r1,r2]))
}
deriv r l = Choice dontcare (pderiv r l)
-- semantic equivalence check
equiv :: Re -> Re -> Bool
equiv r1 r2 = r1 `contain` r2 && r2 `contain` r1
-- the simplification
{-
collapse x y = nub (sort (x ++ y))
combine x y = nub (sort (x ++ y))
shift :: [Int] -> Re -> Re
shift ls Phi = Phi
shift ls (Choice ls' rs) = Choice (combine ls' ls) rs
shift ls (Ch ls' c) = Ch (combine ls' ls) c
shift ls (Pair ls' r1 r2) = Pair (combine ls' ls) r1 r2
shift ls (Star ls' r) = Star (combine ls' ls) r
shift ls (Eps ls') = Eps (combine ls' ls)
shift ls (Any ls') = Any (combine ls' ls)
-}
simpl :: Re -> REnv -> (Re, REnv)
simpl (Pair l1 (Eps l2) r) renv
| isPhi r = (Phi,renv)
| otherwise = (r,renv) -- todo:check
simpl (Pair l r1 r2) renv
| isPhi r1 || isPhi r2 = (Phi,renv)
| otherwise = let (r1', renv') = simpl r1 renv
(r2', renv'') = simpl r2 renv'
in (Pair l r1' r2', renv'')
simpl (Choice l []) renv = (Eps l, renv)
simpl (Choice l [r]) renv = case getLabel r of
{ (l':_) -> (r, reloc l l' renv) -- todo: check
; _ -> (r, renv) -- r is phi
}
simpl (Choice l rs) renv
| any isChoice rs =
let (rs',e') = foldl (\(rs,e) -> \r -> case r of
{ Choice l' rs2 -> ((rs ++ (map {- (shift l')-} id rs2)), reloc l' l e)
; _ -> (rs ++ [r], e) }) ([],renv) rs
in (Choice l rs', e')
| otherwise =
let (rs',e'') = foldl (\(rs,e) -> \r->
let (r',e') = simpl r e
in if isPhi r'
then (rs,e')
else (rs++[r],e')
) ([],renv) rs -- todo: check for duplicate
in (Choice l rs', e'')
simpl (Star l1 (Eps l2)) renv = (Eps l2, reloc l1 l2 renv) --todo:
simpl (Star l1 (Star l2 r)) renv = (Star l2 r, reloc l1 l2 renv)
simpl (Star l r) renv
| isPhi r = (Eps l, renv)
| otherwise = let (r',e) = simpl r renv in (Star l r',e)
simpl x e = (x,e)
-- reloc : relocate rop under l' to l in a renv
reloc :: Int -> Int -> REnv -> REnv
reloc l' l renv =
case IM.lookup l' renv of
{ Just rops' -> IM.delete l' $ case IM.lookup l renv of
{ Nothing -> IM.insert l rops' renv
; Just rops -> IM.update (\_ -> Just (rops++rops')) l renv
}
; Nothing -> renv
}
-- simplication w/o changing the REnv
{-
simp :: Re -> Re
simp (Pair l1 (Eps l2) r)
| isPhi r = Phi
| otherwise = shift (collapse l1 l2) r
simp (Pair l r1 r2)
| isPhi r1 || isPhi r2 = Phi
| otherwise = Pair l (simp r1) (simp r2)
simp (Choice l []) = Eps l
simp (Choice l [r]) = shift l r
simp (Choice l rs) = Choice l $ nub $ filter (not.isPhi) $ map simp rs
simp (Star l1 (Eps l2)) = Eps $ collapse l1 l2
simp (Star l1 (Star l2 r)) = Star (combine l1 l2) r
simp (Star l r)
| isPhi r = Eps l
| otherwise = Star l $ simp r
simp x = x
-}
class PDeriv t where
pderiv :: t -> Char -> [Re]
-- partial derivatives of regex
instance PDeriv Re where
pderiv (Eps _) _ = []
pderiv (Choice x rs) l =
nub (concatMap (\r -> pderiv r l) rs )
pderiv (Pair x r1 r2) l | posEmpty r1 = nub [ Pair x r1' r2 | r1' <- pderiv r1 l] ++ (pderiv r2 l)
| otherwise = [ Pair x r1' r2 | r1' <- pderiv r1 l]
pderiv (Star x r) l = [ Pair x r' (Star x r) | r' <- pderiv r l ]
pderiv (Ch x c) l | c == l = [ Eps x ]
| otherwise = []
pderiv (Any x) l = [ Eps x ]
pderiv Phi _ = []
-- * partial derivatives of a set of regexs
instance PDeriv t => PDeriv [t] where
pderiv rs l = concatMap (\r -> pderiv r l) rs
-- * partial dervatives extended to user-requirement-regex pair
{-
We need to annotate the URPair with the recommendation info to 'disambiguate' the refinement process.
To elaborate, we first need to consider the extension of partial derivative operation over the user-requirement-regex pairs.
** Case: i \not\in dom(\gamma)
(\gamma, \epsilon_i) / l = {} -- (Eps1)
(\gamma, l_i) / l = { (\gamma, \epsilon_i) } -- (LabMatch1)
(\gamma, l_i') / l = { } -- (LabMisMatch1)
(\gamma, (r1r2)_i) /l | \epsilon \in r1 = { (\gamma' ++ \gamma(fv(r2)), (r1'r2)_i) | (\gamma', r1') <- (\gamma(fv(r1)), r1) / l } ++ (\gamma(fv(r2)), r2) / l
| otherwise = { (\gamma' ++ \gamma(fv(r2)), (r1'r2)_i) | (\gamma', r1') <- (\gamma(fv(r1)), r1) / l }
-- (Pair1)
(\gamma, (r1|r2)_i) / l = (\gamma(fv(r1_i)), r1_i)/l ++ (\gamma(fv(r2_i)), r2_i)/l -- (Choice1)
(\gamma, r*_i) / l = { (\gamma', (r'r*)_i) | (\gamma', r') <- (\gamma, r) / l } -- (Star1)
** Case: i \in dom(\gamma)
(\gamma, \epsilon_i) / l = {} -- (Eps2)
(\gamma, l_i) / l = { (\gamma,\epsilon_i) } -- (LabMatch2)
(\gamma, l_i') / l = { } -- (LabMisMatch2)
(\gamma, (r1r2)_i) /l | \epsilon \in r1 = { (\gamma' ++ \gamma(fv(r2)) ++ { (i, \gamma(i)/l) } , (r1'r2)_i) | (\gamma', r1') <- (\gamma(fv(r1)), r1) / l } ++ (\gamma(fv(r2)), r2) / l
| otherwise = { (\gamma' ++ \gamma(fv(r2)), (r1'r2)_i) | (\gamma', r1') <- (\gamma(fv(r1)), r1) / l }
-- (Pair2)
(\gamma, (r1|r2)_i) /l = (\gamma(fv(r1_i)), r1_i)/l ++ (\gamma(fv(r2_i)), r2_i) /l -- (Choice2)
(\gamma, r*_i) / l = { (\gamma', (r'r*)_i) | (\gamma', r') <- (\gamma, r) / l } -- (Star2)
NOTE: \gamma([i1,...,in]) denotes { (i,r) | (i,r) \in \gamma, i \in { i1,..., in } }
The above formulation does not refine in case of failure, i.e. pd yields { }. See cases (Eps1), (Eps2), (LabMistMach1) and (LabMistMach2).
e.g. given p = x :: (a|b)* and w = c, matching w with p using partial derivative operation
p / c = {}
whichis a matching failure
This is because p / c -->
{ (r, p) | r <- (a|b) /c }
and
(a|c) / c -->
a/c U b/c -->
{} U {} -->
{}
Suppose the user requirement enforces that x should match with a non-empty word e.g. { x :: .+ }, note that '.' matches with any single symbol.
One naive way to refine the pattern p is to just update by replacing (a|b)* with .+, however doing so is often too liberal, because the user requirement could be specified loosely.
An immediate (less naive) fix would be adjusting the partial derivative operations as follows,
We adjust the pd operation to return the refinement environment besides the partial derivatives
** Case: i \not\in dom(\gamma)
(\gamma, \epsilon_i) /^{level} l = { (\gamma, \epsilon_j, {i : seq+ l_j (max level weak)}) } -- (Eps1')
(\gamma, l_i) /^{level} l = { (\gamma, \epsilon_i, {}) } -- (LabMatch1)
(\gamma, l'_i) /^{level} l = { (\gamma, \epsilon_i, {i : choice+ l_j (max level weak)}) } -- (LabMisMatch1')
(\gamma, (r1r2)_i) /^{level}l | \epsilon \in r1 = { (\gamma' ++ \gamma(fv(r2)), (r1'r2)_i, \theta) | (\gamma', r1', \theta) <- (\gamma(fv(r1)), r1) /^{level} l } ++ (\gamma(fv(r2)), r2) /^{level} l
| otherwise = { (\gamma' ++ \gamma(fv(r2)), (r1'r2)_i, \theta) | (\gamma', r1'. \theta) <- (\gamma(fv(r1)), r1) /^{level} l }
-- (Pair1)
(\gamma, (r1|r2)_i) /^{level} l = (\gamma(fv(r1_i)), r1_i)/^{level}l ++ (\gamma(fv(r2_i)), r2_i)/^{level}l -- (Choice1)
(\gamma, r*_i) /^{level} l = { (\gamma', (r'r*)_i) | (\gamma', r', \theta) <- (\gamma, r) /^{level} l } -- (Star1)
** Case: i \in dom(\gamma)
(\gamma, \epsilon_i) /^{level} l = { (\gamma', \epsilon_j, {i : seq+ l_j strong}) | \gamma' <- \gamma / l } -- (Eps2')
(\gamma, l_i) /^{level} l = { (\gamma, \epsilon_i, {}) } -- (LabMatch2)
(\gamma, l_i') /^{level} l = { (\gamma', \epsilon_i, {i : choice+ l_j strong}) | \gamma' <- \gamma / l} -- (LabMisMatch2')
(\gamma, (r1r2)_i) /^{level}l | \epsilon \in r1 = { (\gamma' ++ \gamma(fv(r2)) ++ { (i, \gamma(i)/l) } , (r1'r2)_i, \theta) | (\gamma', r1', \theta) <- (\gamma(fv(r1)), r1) /^{strong} l } ++ (\gamma(fv(r2)), r2) / l
| otherwise = { (\gamma' ++ \gamma(fv(r2)) ++ { (i, \gamma(i)/l)}, (r1'r2)_i, \theta) | (\gamma', r1', \theta) <- (\gamma(fv(r1)), r1) /^{strong} l }
-- (Pair2)
(\gamma, (r1|r2)_i) /^{level} l = (\gamma(fv(r1_i)), r1_i)/^{strong} l ++ (\gamma(fv(r2_i)), r2_i) /^{strong} l -- (Choice2)
(\gamma, r*_i) /^{level} l = { (\gamma', (r'r*)_i, \theta) | (\gamma', r', \theta) <- (\gamma, r) /^{strong} l } -- (Star2)
Now the partial derivative operation is extended to handle the (\gamma, r) pairs.
The pderiv operation over (\gamma,r) pairs provides refinement suggestions \theta to those partial derivative cases which yield an empty set,
ref to case (Eps1'), (LabMisMatch1'), (Eps2') and (LabMisMatch2').
The refinement suggestion \theta is defined as follows,
\theta ::= { 1:rop_1, ..., n : rop_n }
where 1..n are labels
rop ::= (seq+ l_i level) | (choice+ l_i level)
level ::= weak | strong
A refinement suggestion environment \theta is a set of mapping which maps regex src location i to recommendation operation rop.
There are two kinds of rops, sequence expansion seq+ or choice expansion choice+. The sequence expansion recommends to append l_i
to the src location. e.g. let \theta_1 = { 1 : seq+ b_2 weak }, r = a_1, then \theta_1(r) = a_1b_2
The choice expansion recommends to add l_i to the union. e.g. let \theta_2 = { 1 : choice+ b_2 weak}, r = a_1 then \tehta_1(r) = (a_1|b_2)
The helper function 'max' returns the upper bound of two recommendation levels
max strong _ = strong
max _ strong = strong
max weak weak = weak
Apart from the (\gamma, r) pairs extension, the pderiv op / is now parameterized by the {level} which stands for the level of recommendation. Let's ignore it for a while,
we will come back to it soon.
* KEY IDEA #1: CHOOSING THE MINIMAL REFINEMENT BASED ON THE RESULTING NFA
Note that given a pattern r and a input word w where w \not \in r, we have \theta_1 and \theta_2
such that \theta_1 \not\eq \theta_2 and w \in \theta_1(r) and w \in \theta_2(r). Let's consider the following example.
Let r = (a|b)* and w = abc, we can either refinement r by extending the choice resulting (a|(b|c))* or a appending c after b resulting (a|(bc))*
TODO: show the full details of the refinmenet with the src loc.
Which one is better? of course we favor the minimal refinement. In a general settings, we of course will pick a \theta which is smaller in size
(i.e. few changes). However as the above example highlights that there are situation where there are multiple \theta of the same size.
One key idea is that the choice+ extension is favored because it leads to a smaller NFA (i.e. only a transition is added).
Whilst the seq+ is leading to a larger NFA (i.e. it adds a transition and a new state)
* KEY IDEA #2: CHOOISING THE REFEINEMENT BASED ON THE USER REQUIREMENT
Let's consider an example,
let r = ((a)_1 | (b)_2), w = c
\gamma = { 1 : (.) }
we have two possible refinement recommendation,
\theta_1 = { 1: (choice+ c_3 strong) } -- note that the recommendataion level is taken into consideration
\theta_2 = { 2: (choice+ c_3 weak) }
We favor \tehta_1 because as the user requirement enforces that src loc 1 should match at least one character (any character).
-}
-- partial derivative
class Accept t where
accept :: t -> Doc -> Bool
instance Accept Re where
accept r ls = any posEmpty $ foldl (\rs l -> pderiv r l) [r] ls
instance Accept t => Accept [t] where
accept rs ls = any (\r -> accept r ls) rs
type Doc = [Char]
getLabels :: Re -> [Int]
getLabels (Eps x) = [x]
getLabels (Choice x rs) = [x] ++ concatMap getLabels rs
getLabels (Pair x r1 r2) = [x] ++ (getLabels r1) ++ (getLabels r2)
getLabels (Star x r) = [x] ++ (getLabels r)
getLabels (Ch x _) = [x]
getLabels (Any x) = [x]
getLabels Phi = []
getLabel :: Re -> [Int]
getLabel (Eps x) = [x]
getLabel (Choice x _) = [x]
getLabel (Pair x _ _) = [x]
getLabel (Star x _) = [x]
getLabel (Ch x _) = [x]
getLabel Phi = []
getLabel (Any x) = [x]
dontcare = (-999)
annotate :: Int -> Re -> Re
annotate i (Eps _) = Eps i
annotate i (Choice x cs) = Choice i cs
annotate i (Pair x r1 r2) = Pair i r1 r2
annotate i (Star x r) = Star i r
annotate i (Ch x c) = Ch i c
annotate i (Any x) = Any i
annotate i Phi = Phi
sigma :: Re -> String
sigma (Choice _ rs) = nub $ concat $ map sigma rs
sigma (Pair _ r1 r2) = nub $ (sigma r1) ++ (sigma r2)
sigma (Star _ r) = sigma r
sigma (Ch _ c) = [c]
sigma (Any _) = []
sigma Eps{} = []
sigma Phi = []
{-
* The Replacement Relation
We use the following judgement to denote the replacement relation
$$ \gamma, r \turns w : q $$
where $\gamma$ is the user requirement, $r$ is the existing regular expression, $w$ is the input document and $t$ is the replacement regular expression.
It reads, under user requirement $\gamma$, $r$ can be replaced by $t$ which accepts $w$.
There are two properties follows
1. $w \in r \Longrightarrow \Delta$ implies $ \Delta \vdash \gamma$ and $r \subseq t$.
2. $w \not \in r$ implies $r \gamma\over\approximate t$ and $w \in t \Longrightarrow \Delta$ and $\Delta \vdash \gamma$.
The first property ensures that the replacement is subsuming the orginal regex $r$ if $w$ is already in $r$ and the matching result is conforming to the user requirement.
The second property ensures that if $w$ is not in $r$, the replacement shall have the same requirement-shape as the orginal one and conform to the user requirement.
-}
replacement :: UReq -> Re -> Doc -> Re -> Bool
{-
i \in dom(\gamma)
w \in \gamma(i)
\gamma - {i}, r \vdash w, r'
----------------------------- (pExist)
\gamma, r^i \vdash w : r'^i
-}
replacement ureq r w r' =
let ls = getLabel r
ls' = getLabel r'
in ls == ls' &&
replacement' ureq r w r' &&
( case ls of
{ [] -> True
; (l:_) -> case lookup l ureq of
{ Just r -> w `match` [r]
; Nothing -> True } } )
{-
i \not \in dom(\gamma)
\gamma - {i}, r \vdash w : r'
------------------------------- (pIgnore)
\gamma, r^i \vdash d : r'^i
w \in r
------------------------- (rEmp)
\gamma, () \vdash w : r
-}
replacement' ureq (Eps ls) w r' = w `match` [r']
{-
d \in r
------------------------- (rLab)
\gamma, l \vdash d : r
-}
replacement' ureq (Ch ls c) w r' = w `match` [r']
{-
d \in r
------------------------- (rAny)
\gamma, . \vdash d : r
-}
replacement' ureq (Any ls) w r' = w `match` [r']
{-
fv(r1) = \bar{i1} fv(r2 = \bar{i2}
\gamma_{\bar{i1}}, r1 \vdash d1 : r1'
\gamma_{\bar{i2}}, r2 \vdash d2 : r2'
------------------------------------- (rSeq)
\gamma, r1r2 \vdash d1d2 : r1'r2'
-}
replacement' ureq (Pair ls r1 r2) w (Pair ls' r1' r2') =
let ls1 = getLabels r1
ls2 = getLabels r2
ureq1 = limit ureq ls1
ureq2 = limit ureq ls2
ws = split2 w
in any (\(w1,w2) -> replacement ureq1 r1 w1 r1' && replacement ureq2 r2 w2 r2' ) ws
{-
we use \gamma_{\bar{i}} to denote { (i,\gamma(i)) | i \in \bar{i} and i \in dom(\gamma) }
\gamma, r1 \vdash d : r1'
-------------------------------------- ( rOr1)
\gamma, r1|r2 \vdash d : r1'|r2
\gamma, r2 \vdash d : r2'
-------------------------------------- ( rOr2)
\gamma, r1|r2 \vdash d : r1|r2'
-}
replacement' ureq (Choice ls rs) w (Choice ls' rs') = replChoiceSub rs rs'
where replChoiceSub [] rs = w `match` rs
replChoiceSub _ [] = False
replChoiceSub (r:rs) (r':rs') = replacement ureq r w r' || replChoiceSub rs rs'
{-
\gamma, r \vdash di : r' \forall i \in {1,n}
------------------------------------------------- ( rStar)
\gamma, r* \vdash d1...dn : r'*
-}
replacement' ureq (Star ls r) w (Star ls' r') =
let wss = split w
in any (\ws -> all (\w' -> replacement ureq r w' r') ws) wss
{- Rules rSeq, rOr1, rOr2 and rStar validate the replacement relation indeterministically
\gamma,p \vdash d:p' \forall d\in D
---------------------------------------- (pDocS)
\gamma p \vdash D : p'
-}
split2 :: String -> [(String,String)]
split2 [] = [ ([],[]) ]
split2 (w@(c:ws)) =
nub $ (w,[]) : ([],w) :
(map (\(w1,w2) -> (c:w1,w2)) $ split2 ws)
split :: String -> [[String]]
split [] = [ [] ]
split [c] = [ [[c]] ]
split (w@(c:ws)) =
[w]:[ take i w : xs | i <- [1..length ws], xs <- split (drop i w) ]
match :: [Char] -> [Re] -> Bool
match cs rs = let finals = foldl (\ts c -> concatMap (\t -> pderiv t c) ts) rs cs
in any posEmpty finals
{-
* The Refinement Algorithm
Naively, we use the judgement
$$\gamma,p \models d : q $$
to denote the refinement algorithm.
However this is NOT going to work, consider the following example
\gamma = { (1 : .+ ) (2 : .+) }
r = ((A*)(A*))
or in explicit annotation
r = (0 :: ( (1 :: A*) (2 :: A*)))
d = A
Note that r is ambigous. If we send A to (1 :: A*), we have the result
\gamma = { (1 : .* ) (2 : .+) } and r = ((A*),(A*)) as we not only
consume the A in (1:: A*), but also update \gamma.
If we send A to (2 :: A*), the resulting pair will be different
\gamma = { (1 : .+ ) (2 : .*) } and r = ((A*),(A*))
In summary, when we perform partial derivative operation, the \gamma and the r go
together in pairs.
Hence we use the judgement
$${ (\gamma_1,p_1), ..., (\gamma_n,p_n) } \models d : {q_1,...,q_m} $$
where
${ (\gamma_1,p_1), ..., (\gamma_n,p_n) }$ are the user requirement and orginal
sub regex (nfa state) pairs. ${q_1,...,q_m}$ denotes the output set of
refined sub regex (nfa state).
The algorithm correctness property (Soundness)
Let $\gamma$ be the user requirement, $r$ denote the initial regular expression pattern, $w$ denote the input document
$ { \gamma, r } \models d : { r1', ... , rn' } $ implies $\gamma, r \vdash d : r1'|...|rn'$.
-}
-- r0 = A*
r0 = Star 1 (Ch 2 'A')
-- r1 = (A|B)*
r1 = Star 1 (Choice 2 [Ch 3 'A', Ch 4 'B'])
-- r2 = (A|B|C)*
r2 = Star 1 (Choice 2 [Ch 3 'A', Ch 4 'B', Ch 5 'C'])
r3 = Pair 1 (Ch 2 'A') (Ch 3 'B')
-- r4 = ()
r4 = Eps 1
-- t3 = .+
t3 = Pair dontcare (Choice dontcare [Ch dontcare 'A',Ch dontcare 'B',Ch dontcare 'C']) (Star dontcare (Choice dontcare [Ch dontcare 'A',Ch dontcare 'B',Ch dontcare 'C']))
t4 = Pair dontcare (Choice dontcare [Ch dontcare 'A',Ch dontcare 'B']) (Star dontcare (Choice dontcare [Ch dontcare 'A',Ch dontcare 'B']))
v = "ABC"
g1 = [(1::Int, t3)]
g2 = [(1::Int, t4)]
{-
w = <h><d>65103020</d></h>
r5 = .*<s>([0-9]+)</s>.*
g5 = [(1::Int, [0-9]+)]
-}
-- anySym x = Choice [x] (map (\i -> (Ch [(100*x+i)] (chr i))) ([47,60,62] ++ [100,104]))
-- anySym x = Choice [x] (map (\i -> (Ch [(100*x+i)] (chr i))) [0..128])
anySym x = Any x
anyNum x = Choice x (map (\i -> (Ch (100*x+i) (chr i))) [48..57])
w = "<h><d>91234567</d></h>"
r5 = Pair 1 p1 (Pair 2 p2 (Pair 3 p3 (Pair 4 p4 p5)))
where p1 = Star 20 (anySym 30)
p2 = Pair 41 (Ch 42 '<') (Pair 43 (Ch 44 's') (Ch 45 '>'))
p3 = Star 50 (anyNum 51)
p4 = Pair 61 (Ch 62 '<') (Pair 63 (Ch 64 '/') (Pair 65 (Ch 66 's') (Ch 67 '>')))
p5 = Star 70 (anySym 80)
g5 = [(50::Int, Pair 0 (anyNum 90) (Star 0 (anyNum 90)))]
w' = "<head><div>91234567</div></head>"
r5' = Pair 1 p1 (Pair 2 p2 (Pair 3 p3 (Pair 4 p4 p5)))
where p1 = Star 20 (anySym 30)
p2 = Pair 41 (Ch 42 '<') (Pair 43 (Ch 44 's') (Ch 45 '>'))
p3 = Star 50 (anyNum 51)
p4 = Pair 61 (Ch 62 '<') (Pair 63 (Ch 64 '/') (Pair 65 (Ch 66 's') (Ch 67 '>')))
p5 = Star 70 (anySym 80)
g5' = [(50::Int, Pair 0 (anyNum 90) (Star 0 (anyNum 90)))]
g5'' = [(50::Int, Pair 0 (anySym 90) (Star 0 (anySym 90)))]
{-
w1 = (A)
r6 = <(A|B)*>
-}
w1 = "(A)"
r6 = Pair 0
(Ch 1 '<')
(Pair 2 (Star 3 (Choice 4 [(Ch 5 'A'),(Ch 6 'B')]))
(Ch 7 '>'))
g6 = [(3::Int, r1)]
{-
w2 = (A)
r7 = .*<(A|B)*>.*
there are still some space for pruning
sortBy (\x y -> compareREnv (snd x) (snd y) ) (ref' [(g7,r7, IM.empty)] "(A)") !! 0
sortBy (\x y -> compareREnv (snd x) (snd y) ) (ref' [(g7,r7, IM.empty)] "(A)") !! 1
sortBy (\x y -> compareREnv (snd x) (snd y) ) (ref' [(g7,r7, IM.empty)] "(A)") !! 2
sortBy (\x y -> compareREnv (snd x) (snd y) ) (ref' [(g7,r7, IM.empty)] "(A)") !! 3
sortBy (\x y -> compareREnv (snd x) (snd y) ) (ref' [(g7,r7, IM.empty)] "(A)") !! 4
-}
w2 = "(A)"
r7 = Pair (-10)
(Star (-20) (Choice (-21) [Ch (-22) 'A', Ch (-23) 'B', Ch (-24) '<', Ch (-25) '>']))
(Pair (-30)
(Pair 0
(Ch 1 '<')
(Pair 2 (Star 3 (Choice 4 [(Ch 5 'A'),(Ch 6 'B')]))
(Ch 7 '>')))
(Star (-40) (Choice (-41) [Ch (-42) 'A', Ch (-43) 'B', Ch (-44) '<', Ch (-45) '>']))
)
g7 = [(3::Int, r1)]
hasStrong :: REnv -> Bool
hasStrong renv = let rops = concatMap snd (IM.toList renv)
in any isStrong rops
{-
main :: IO ()
main = do
[si] <- getArgs
let i = read si
print $ pretty r5
print $ pretty $ (refine g5 r5 w) !! i
-}
{-
New idea: refinement algo takes ureq re pair and the input words returns a set of
```
----------------------------------------------------------------------------------------------------------------------------------- (Eps)
\overline{ \gamma, r, \Psi } |= \epsilon: { \Psi | (\gamma, r, \Psi) \in \overline{ \gamma, r, \Psi }, \epsilon \in \Psi(r) } ++
{ \Psi . (i -> +\epsilon, s) } | (\gamma, r, \Psi) \in \overline{ \gamma, r, \Psi } }
\overline{ \gamma, r, \Psi } / l = \overline { \gamma', r', \Psi' }
\overline { \gamma', r', \Psi o \Psi' } |= w: \overline {\Psi''}
----------------------------------------------------------------------------------------------------------------------------------- (Ind)
\overline{ \gamma, r, \Psi } |= lw : {\Psi''}
```
Note that from (Ind) the refinement environment \Psi is passed along
-}
type REnv = IM.IntMap [ROp]
data ROp = RATr Re RLevel -- add transition
| RASt Re RLevel -- add state
| RMkFin RLevel -- make final
| RNoCh RLevel -- no change
deriving (Eq,Show)
reInROp :: ROp -> Maybe Re
reInROp (RATr r _) = Just r
reInROp (RASt r _) = Just r
reInROp (RMkFin _) = Nothing
reInROp (RNoCh _) = Nothing
resInROps :: [ROp] -> [Re]
resInROps [] = []
resInROps ((RATr r _):rops) = r:(resInROps rops)
resInROps ((RASt r _):rops) = r:(resInROps rops)
resInROps ((RNoCh _):rops) = resInROps rops
resInROps ((RMkFin _):rops) = resInROps rops
resInREnv :: REnv -> [Re]
resInREnv renv =
resInROps $ concat $ map snd (IM.toList renv)
compareREnv :: REnv -> REnv -> Ordering
compareREnv r2 r1 =
let c1 = renvSize r1
c2 = renvSize r2
sTr1 = countStrongATr r1
sTr2 = countStrongATr r2
sSt1 = countStrongASt r1
sSt2 = countStrongASt r2
sNC1 = countStrongNoCh r1
sNC2 = countStrongNoCh r2
sMF1 = countStrongMkFin r1
sMF2 = countStrongMkFin r2
wTr1 = countWeakATr r1
wTr2 = countWeakATr r2
wSt1 = countWeakASt r1
wSt2 = countWeakASt r2
wNC1 = countWeakNoCh r1
wNC2 = countWeakNoCh r2
wMF1 = countWeakMkFin r1
wMF2 = countWeakMkFin r2
in case compare c2 c1 of
{ EQ -> case compare sNC1 sNC2 of
{ EQ -> case compare sMF1 sMF2 of
{ EQ -> case compare sTr1 sTr2 of
{ EQ -> case compare sSt1 sSt2 of
{ EQ -> case compare wNC1 wNC2 of
{ EQ -> case compare wMF1 wMF2 of
{ EQ -> compare wTr1 wTr2
; others -> others }
; others -> others }
; others -> others }
; others -> others }
; others -> others }
; others -> others }
; others -> others }
-- count the number of ROps in renv
renvSize :: REnv -> Int
renvSize renv = sum (map (\ (k,v) -> length v) (IM.toList renv))
renvStrong :: REnv -> Int
renvStrong renv = sum (map (\ (k,v) -> length (filter isStrong v)) (IM.toList renv))
renvWeak :: REnv -> Int
renvWeak renv = sum (map (\ (k,v) -> length (filter isWeak v)) (IM.toList renv))
renvRASt :: REnv -> Int
renvRASt renv = sum (map (\ (k,v) -> length (filter isRASt v)) (IM.toList renv))
countStrongATr :: REnv -> Int
countStrongATr renv = sum (map (\ (k,v) -> length (filter (\x -> (isStrong x) && (isRATr x)) v)) (IM.toList renv))
countStrongASt :: REnv -> Int
countStrongASt renv = sum (map (\ (k,v) -> length (filter (\x -> (isStrong x) && (isRASt x)) v)) (IM.toList renv))
countStrongNoCh :: REnv -> Int
countStrongNoCh renv = sum (map (\ (k,v) -> length (filter (\x -> (isStrong x) && (isRNoCh x)) v)) (IM.toList renv))
countStrongMkFin :: REnv -> Int
countStrongMkFin renv = sum (map (\ (k,v) -> length (filter (\x -> (isStrong x) && (isMkFin x)) v)) (IM.toList renv))
countWeakATr :: REnv -> Int
countWeakATr renv = sum (map (\ (k,v) -> length (filter (\x -> (isWeak x) && (isRATr x)) v)) (IM.toList renv))
countWeakASt :: REnv -> Int
countWeakASt renv = sum (map (\ (k,v) -> length (filter (\x -> (isWeak x) && (isRASt x)) v)) (IM.toList renv))
countWeakNoCh :: REnv -> Int
countWeakNoCh renv = sum (map (\ (k,v) -> length (filter (\x -> (isWeak x) && (isRNoCh x)) v)) (IM.toList renv))
countWeakMkFin :: REnv -> Int
countWeakMkFin renv = sum (map (\ (k,v) -> length (filter (\x -> (isWeak x) && (isMkFin x)) v)) (IM.toList renv))
isStrong :: ROp -> Bool
isStrong (RATr _ Strong) = True
isStrong (RATr _ _) = False
isStrong (RASt _ Strong) = True
isStrong (RASt _ _) = False
isStrong (RNoCh Strong) = True
isStrong (RNoCh _) = False
isStrong (RMkFin Strong) = True
isStrong (RMkFin _) = False
isWeak :: ROp -> Bool
isWeak (RATr _ Weak) = True
isWeak (RATr _ _) = False
isWeak (RASt _ Weak) = True
isWeak (RASt _ _) = False
isWeak (RNoCh Weak) = True
isWeak (RNoCh _) = False
isWeak (RMkFin Weak) = True
isWeak (RMkFin _) = False
isRATr :: ROp -> Bool
isRATr (RATr _ _) = True
isRATr _ = False
isRASt :: ROp -> Bool
isRASt (RASt _ _) = True
isRASt _ = False
isRNoCh :: ROp -> Bool
isRNoCh (RNoCh _) = True
isRNoCh _ = False
isMkFin :: ROp -> Bool
isMkFin (RMkFin _) = True
isMkFin _ = False
data RLevel = Strong | Weak deriving (Ord, Eq, Show)
{-
renv_1 \entails renv_2 iff
\forall l in renv_2 s.t. renv_1(l) superseteq renv_2(l)
note the equality among ROp we ignore the loc of the Re
-}
entail :: REnv -> REnv -> Bool
entail r1 r2 =
let ks = IM.keys r2
in all (\k -> case (IM.lookup k r1, IM.lookup k r2) of
{ (Just rs1, Just rs2) -> rs1 `ropSubsume` rs2
; (_,_) -> False } ) ks
ropSubsume :: [ROp] -> [ROp] -> Bool
ropSubsume rs1 rs2 = all (\r2 -> r2 `elem` rs1) rs2
-- top level function
refine :: UReq -> Re -> [Char] -> [Re]
refine ureq r cs =
let renvs = nub $ sortBy compareREnv (ref [(ureq, r, IM.empty)] cs)
io = renvs `seq` logger ("refine "++ (show $ map (\renv -> " | " ++ show renv ) renvs))
in {- io `seq` -}
map (\renv -> apply_ renv r) renvs
-- the main routine
-- calling urepderiv to apply the R|-^L r/l => { R,r,\sigma } over l1,...,ln
-- the \sigma(s) are propogated by urepderiv
-- we also prune away redundant states
--
ref :: [(UReq, Re, REnv)] -> [Char] -> [REnv]
ref urs [] =
let io = logger ("ref [] "++ (show $ map (\(_,r,renv) -> pretty r ++ " | " {- ++ show renv -}) urs))
in {- io `seq` -}
[ renv | (ureq, r, renv) <- urs, posEmpty {- (renv `apply_` r)-} r ] ++
[ renv' `combineEnv` renv -- try to fix those states which are non-final?
| (ureq, r, renv) <- urs
, renv' <- mkFin r
, not (posEmpty {- (renv `apply_` r) -} r)
, any (\i -> case lookupUR i ureq of
{ Nothing -> False
; Just t -> posEmpty t }) (getLabel r) ]
ref urs (l:w) =
let urs' = concatMap (\ (ur,r,renv) -> prune3 r $ prune4 $ urePDeriv (ur, r, renv) l) urs
io = logger $ length (l:w) -- logger ("ref " ++ (l:w) ++ (show $ map (\(_,r,renv) -> pretty r ++ "|" ++ show renv) urs) )
in io `seq` ref urs' w
-- a debugging functin
ref' :: [(UReq, Re, REnv)] -> [Char] -> [(Re,REnv)]
ref' urs [] = [ (r,renv) | (ureq, r, renv) <- urs ]
ref' urs (l:w) = let
urs' = concatMap (\ (ur,r,renv) ->
let urs'' = urePDeriv (ur, r, renv) l
in prune3 r $ prune4 $ map (\(ur', r', renv') ->
let io = logger $ ("combining " ++ show renv ++ " with " ++ show renv' ++ " yielding " ++ (show $ combineEnv renv renv') )
in (ur', r', combineEnv renv renv')) urs'') urs
in ref' urs' w
{-
pruning by checking for entailment among REnvs, looking for local optimal
this pruning is not working. see
(A+B)*(B)(C*)
matching "DDC"
where UReq = [(3, C*)]
rops_1 = [(0, ATr D), (0, ATr C)]
rops_2 = [(0, ATr D), (1, ATr D)]
rops_2 should be favored because C is matched with 3 which is under the UReq
however, with this pruning scheme, rops_2 is pruned at the 2nd D input character, before the 'C' is considered.
as rops_1 = [(0, ATr D) ] is entailed by rops_2 = [(0, ATr D), (1, ATr D)]
-}
prune :: [(UReq, Re, REnv)] -> [(UReq, Re, REnv)]
prune ts = let sts = sortBy (\(_,_,r1) (_,_,r2) -> compare (renvSize r2) (renvSize r1)) ts
in prune' sts
prune' :: [(UReq, Re, REnv)] -> [(UReq, Re, REnv)]
prune' [] = []
prune' (x:xs) | any (\y -> (trd x) `entail` (trd y)) xs = prune' xs
| otherwise = (x:(prune' xs))
trd :: (a,b,c) -> c
trd (_,_,x) = x
{-
prune by semantic equivalent
too expensive!
-}
prune2 :: Re -> [(UReq, Re, REnv)] -> [(UReq, Re, REnv)]
prune2 r [] = []
prune2 r (x:xs) | any (\y -> containGTE r x y) xs = prune2 r xs
| otherwise = x:(prune2 r xs)
where containGTE r x y = let ex = trd x
ey = trd y
in (apply_ ex r) `equiv` (apply_ ey r) && (compareREnv ex ey >= EQ)
{-
prune by isomorphism.
Let r be a regular expression, (l1, rops1) and (l2, rops2) Label-ROp pairs, they are considered iso w.r.t to r iff
1) l1 == l2 and rops1 == rops2 or
2) l1 and l2 are labels of the two alternatives of the choice sub-exp in r
e.g. r = (Choice [1] (Ch [2] 'A') (Ch [3] 'B'))
(2, ATr (Ch [4] 'C')) and (3, ATr (Ch [5] 'C')) are considered iso w.r.t to r.
-}
-- Rationale: applying the above two operations lead to the semantically equivalent regex
prune3 :: Re -> [(UReq, Re, REnv)] -> [(UReq, Re, REnv)]
prune3 r [] = []
prune3 r (x:xs) | any (\y -> iso r (trd x) (trd y)) xs = prune3 r xs
| otherwise = x:(prune3 r xs)
where iso r renv_x renv_y
| (IM.size renv_x /= IM.size renv_y) = False
| otherwise =
let ps1 = IM.toList renv_x
ps2 = IM.toList renv_y
in isoPairs r ps1 ps2
isoPairs r [] [] = True
isoPairs r _ [] = False
isoPairs r [] _ = False
isoPairs r ((lx,x):xs) ((ly,y):ys) -- assumption, IM.toList sort by the keys
| lx == ly = (x == y) && (isoPairs r xs ys)
| choiceAlts r lx ly = (x == y) && (isoPairs r xs ys)
| otherwise = False
prune4 :: [(UReq, Re, REnv)] -> [(UReq, Re, REnv)]
prune4 [] = []
prune4 ((x@(_,_,renv)):xs) | hasDupROps renv = prune4 xs
| otherwise = x:(prune4 xs)
hasDupROps :: REnv -> Bool
hasDupROps renv = let ps = IM.toList renv
in any (\(k,rops) ->
let rops' = filter (not . isRNoCh) rops
in length (nub rops') /= length rops')
ps
-- check whether the two labels are siblings under the choice sub-exp in r
choiceAlts :: Re -> Int -> Int -> Bool
choiceAlts (Choice _ rs) x y =
let ls = concatMap getLabels rs
in x `elem` ls && y `elem` ls
choiceAlts (Pair _ r1 r2) x y = choiceAlts r1 x y || choiceAlts r2 x y
choiceAlts (Star _ r) x y = choiceAlts r x y
choiceAlts (Ch _ _) _ _ = False
choiceAlts (Eps _ ) _ _ = False
choiceAlts Phi _ _ = False
choiceAlts (Any _ ) _ _ = False
urePDeriv :: (UReq, Re, REnv) -> Char -> [(UReq, Re, REnv)]
urePDeriv (ur, r, psi) l =
let
max_i = maximum $ (getLabels r) ++ (concatMap getLabels $ resInREnv psi)
(t,e) = run (Env max_i) (urPDeriv (ur, r) l Weak)
io = logger ("urePDeriv: " ++ show ur ++ "|" ++ pretty r ++ "|" ++ show l ++ "|" ++ show t)
in {- io `seq` -} [ (ur', r''', psi'' `combineEnv` psi) | (ur', r', psi') <- t, -- let r''' = r', let psi'' = psi' ]
let r'' = r', -- run_ e (psi' `apply` r'), -- we can only apply simplification after we apply psi' to r'
let io = logger ("simpl " ++ show r'' ++ " with " ++ show psi'),
let (r''',psi'') = {- io `seq` -} simpl r'' psi' ,
not (redundantREnv psi'' r) ]
-- not (isRedundant psi r psi'' r''' ) ] -- e.g. adding 'a' to (a|b), since there already an 'a' -- can't just check whether r \equiv r''' , because there are RNoCh rops
-- check whether REnv is redundant w.r.t to r
redundantREnv :: REnv -> Re -> Bool
redundantREnv renv r =
let srcAndOps = IM.toList renv
in any (\(src,ops) -> redundantOps src ops r) srcAndOps
where redundantOps :: SrcLoc -> [ROp] -> Re -> Bool
redundantOps i ops r =
let (aTrOps, aStOps, isMadeFin) =
foldl (\(ts,ss,es) op -> case op of
{ (RATr _ _) -> (ts++[op], ss, es)
; (RASt _ _) -> (ts, ss ++ [op], es)
; (RMkFin _) -> (ts, ss, True)
; _ -> (ts, ss, es)
} ) ([],[],False) ops
in any (\aTrOp -> redundantRATr i aTrOp r) aTrOps
-- check whether (i, {RATr l}) is redundant
-- since i is a leaf node in r
-- the rop is redunant if l is already sibling under the choice + operator w.r.t i in r.
-- e.g. (2, RAtr (3:a)) is redundant w.r.t to (1:a+2:b)
redundantRATr :: SrcLoc -> ROp -> Re -> Bool
redundantRATr i (RATr t _) r =
let ts = choiceSiblings r i
in t `elem` ts
choiceSiblings :: Re -> SrcLoc -> [Re]
choiceSiblings (Choice _ rs) i | i `elem` (concatMap getLabel rs) = rs -- TODO: choice can be nested.
| otherwise = concatMap (\r -> choiceSiblings r i) rs
choiceSiblings (Pair _ r1 r2) i = choiceSiblings r1 i ++ choiceSiblings r2 i
choiceSiblings (Star _ r) i = choiceSiblings r i
choiceSiblings _ _ = []
{-
isRedundant :: REnv -> Re -> REnv -> Re -> Bool
isRedundant renv1 r1 renv2 r2 =
let diff = diffREnv renv2 renv1
in -- (not (all isRNoCh diff)) && (r1 `equiv` r2)
-- checking for (r1 `equiv` r2) is expensive
(any (\(k, rops) -> any (not . isRNoCh) rops) (IM.toList diff)) && bogusDiff diff r1
where bogusDiff diff (Choice (l:_) rs) = -- the diff is bogus if it is applied to r1, it does not change the semantics e.g. adding 'a' to (a+b)
case IM.lookup l diff of
{ Just rops -> any (\(RATr r _) -> r `elem` rs) rops
; Nothing -> False }
bogusDiff diff (Pair _ r1 r2) = bogusDiff diff r1 || bogusDiff diff r2
bogusDiff diff (Star _ r) = bogusDiff diff r
bogusDiff diff (Ch _ _) = False
bogusDiff diff (Any _) = False
bogusDiff diff (Eps _) = False
bogusDiff diff Phi = False
diffREnv :: REnv -> REnv -> REnv -- rops in r2 but not in r1
diffREnv r2 r1 =
let ps = IM.toList r2
ps' = foldl (\acc (k,rops) -> case IM.lookup k r1 of
{ Nothing -> acc ++ [(k,rops)]
; Just rops' -> acc ++ [(k,filter (\rop -> not (rop `elem` rops')) rops)] } ) [] ps
in IM.fromList ps'
-}
-- finding the maximal among two RLevels
maximal :: RLevel -> RLevel -> RLevel
maximal Strong _ = Strong
maximal _ Strong = Strong
maximal _ _ = Weak
{-
newtype State s a = State { runState :: (s -> (a,s)) }
instance Monad (State s) where
return a = State (\s -> (a,s))
(State x) >>= f = State (\s -> let (a,s') = x s
stb = f a
in (runState stb) s')
-}
run :: s -> State s a -> (a,s)
run s sta = (runState sta) s
run_ :: s -> State s a -> a
run_ s sta = fst $ run s sta
data Env = Env { maxId :: Int
} deriving Show
setMaxId :: Int -> State Env ()
setMaxId i = state (\env -> let env' = env{maxId = i}
in ((), env'))
getMaxId :: State Env Int
getMaxId = state (\env -> (maxId env,env))
incMaxId :: State Env Int
incMaxId = do
{ max_i <- getMaxId
; let next_i = max_i + 1
; setMaxId next_i
; return next_i
}
urPDeriv :: (UReq, Re) -> Char -> RLevel -> State Env [(UReq, Re, REnv)]
urPDeriv (ur, Eps i) l rlvl
| i `inUR` ur = do
{ next_i <- incMaxId
-- let next_i = i
; return [ ((updateUR i r' ur), Eps next_i, IM.singleton i [RASt (Ch next_i l) Strong])
| let r = fromJust (lookupUR i ur), r' <- pderiv r l ]
}
| otherwise = do
{ next_i <- incMaxId
-- let next_i = i
; return [ (ur, Eps next_i, IM.singleton i [RASt (Ch next_i l) (maximal rlvl Weak)]) ]
}
urPDeriv (ur, (Ch i l)) l' rlvl =
case lookup i ur of
{ Just r | l == l' -> do
{ -- next_i <- incMaxId
; return [ ((updateUR i r' ur), (Eps i), IM.singleton i [RNoCh Strong] )
| r' <- pderiv r l ]
}
| l /= l' -> do
{ -- next_i <- incMaxId
; next_i2 <- incMaxId
; return [ ((updateUR i r' ur), (Eps i), IM.singleton i [RATr (Ch next_i2 l') Strong]) | r' <- pderiv r l]
}
; Nothing | l == l' -> do
{ -- next_i <- incMaxId
; return [ (ur, Eps i, IM.singleton i [RNoCh (maximal rlvl Weak)] ) ]
}
| l /= l' -> do
{ -- next_i <- incMaxId
; next_i2 <- incMaxId
; return [ (ur, Eps i, IM.singleton i [RATr (Ch next_i2 l') (maximal rlvl Weak)] ) ]
}
}
urPDeriv (ur, (Any i)) l rlvl =
case lookup i ur of
{ Just r ->
return [ ((updateUR i r' ur), (Eps i), IM.singleton i [RNoCh Strong] ) | r' <- pderiv r l ]
; Nothing -> return [ (ur, Eps i, IM.singleton i [RNoCh (maximal rlvl Weak)] ) ]
}
urPDeriv (ur, Pair i r1 r2) l rlvl =
case lookup i ur of
{ Just p ->
case pderiv p l of
{ [] -> return []
; ps | posEmpty r1 -> do
{ let ur2 = ur `limit` fv r2
; t1 <- urPDeriv (ur `limit` (fv r1), r1) l Strong
; t2 <- urPDeriv (ur2, r2) l Strong
; return $ [ ((ur' ++ ur2 ++ [(i, Choice dontcare ps)]) , (Pair i r1' r2), renv) | (ur', r1', renv) <- t1 ] ++ t2
}
| otherwise -> do
{ let ur2 = ur `limit` fv r2
; t1 <- urPDeriv (ur `limit` (fv r1), r1) l Strong
; t2 <- urPDeriv (ur2, r2) l Strong
; return $ [ ((ur' ++ ur2 ++ [(i, Choice dontcare ps)]) , (Pair i r1' r2), renv) | (ur', r1', renv) <- t1] ++ [ (ur', r2', renv `combineEnv` renv') | (ur', r2', renv) <- t2, renv' <- mkFin r1 ]
}
}
; Nothing | posEmpty r1 -> do
{ let ur2 = ur `limit` fv r2
; t1 <- urPDeriv (ur `limit` (fv r1), r1) l rlvl
; t2 <- urPDeriv (ur2, r2) l rlvl
; return $ [ ((ur' ++ ur2) , (Pair i r1' r2), renv) | (ur', r1', renv) <- t1 ] ++ t2
}
| otherwise -> do
{ let ur2 = ur `limit` fv r2
; t1 <- urPDeriv (ur `limit` (fv r1), r1) l rlvl
; t2 <- urPDeriv (ur2, r2) l rlvl
; return $ [ ((ur' ++ ur `limit` (fv r2)) , (Pair i r1' r2), renv) | (ur', r1', renv) <- t1 ] ++ [ (ur', r2', renv `combineEnv` renv') | (ur', r2', renv) <- t2, renv' <- mkFin r1 ]
}
}
urPDeriv (ur, Choice i rs) l rlvl =
case lookup i ur of
{ Just p ->
case pderiv p l of
{ [] -> return []
; ps -> do
{ let ur' = updateUR i (Choice dontcare ps) ur
; ts <- mapM (\ r -> urPDeriv (ur', r) l Strong) rs
; return $ concat ts
-- todo:move i:is to each r
}
}
; Nothing -> do
{ ts <- mapM (\ r -> urPDeriv (ur, r) l rlvl) rs
; return $ concat ts
}
}
urPDeriv (ur, Star i r) l rlvl =
case lookup i ur of
{ Just p ->
case pderiv p l of
{ [] -> return []
; ps -> do
{ let ur' = updateUR i (Choice dontcare ps) ur
; t <- urPDeriv (ur',r) l Strong
; return [ (ur'', Pair i r' (Star i r), renv)
| (ur'', r', renv) <- t ]
}
}
; Nothing -> do
{ t <- urPDeriv (ur,r) l rlvl
; return [ (ur', Pair i r' (Star i r), renv)
| (ur', r', renv) <- t ]
}
}
urPDeriv ur c rlvl = error $ "unhandled input: " ++ (show ur) ++ "/" ++ (show c)
-- make a non empty regex to accepts epsilon, structurally --todo shall we consider the ureq?
mkFin :: Re -> [REnv]
mkFin (Eps i) = [IM.empty]
mkFin (Ch i _) = [IM.singleton i [RMkFin Weak]]
mkFin (Any i) = [IM.singleton i [RMkFin Weak]]
mkFin (Pair i r1 r2) = [ renv1 `combineEnv` renv2 | renv1 <- mkFin r1, renv2 <- mkFin r2 ]
mkFin (Choice i rs) = mkFin r1 ++ mkFin r2
mkFin (Star i r) = [IM.empty]
mkFin _ = []
-- return all labels annotation of a re
fv :: Re -> [Int]
fv (Eps i) = [i]
fv (Ch i _) = [i]
fv (Any i) = [i]
fv (Pair i r1 r2) = i:(fv r1 ++ fv r2)
fv (Choice i rs) = i:(concatMap fv rs)
fv (Star i r) = i:(fv r)
fv _ = []
-- retrict the user req based on a set of labels
limit :: UReq -> [Int] -> UReq
limit ur is = filter (\(i,_) -> i `elem` is) ur
-- applying REnv to a Re
apply_ :: REnv -> Re -> Re
apply_ renv r = let is = concatMap getLabels $ resInROps (concatMap snd (IM.toList renv))
max_i = maximum $ (getLabels r) ++ is
io = logger "apply_ \n ============================================================================\n ============================================================================\n ============================================================================\n ============================================================================"
in {- io `seq` -} run_ (Env max_i) (apply renv r)
-- note that for all (i,ROp) \in renv, i is a label to leaf node
apply :: REnv -> Re -> State Env Re
apply renv r =
let io = logger ("applying " ++ pretty r)
(s,renv') = {- io `seq` -} simpl r renv
-- todo: this changes the renv' but it seems faster, not sure about correctness
-- more thoughts, maybe we shall split the simp into choice simplification and others non-choice simplification, because choice simplification is the only one that change the REnv.
in case s of
{ (Eps i) ->
case IM.lookup i renv' of
{ Just ops -> do
{ let (trans, states, eps) =
foldl (\(ts,ss,es) op -> case op of
{ (RATr t _) -> (ts++[t], ss, es)
; (RASt t _) -> (ts, ss ++ [t], es)
; (RMkFin _) -> (ts, ss, True)
} ) ([],[],False) ops
-- create a sequence concatenation out of the add-states ops
; ss' <- mkSeqS =<< mapM (apply renv') states
-- create a choice out of the add transitions ops
; tt' <- mkChoiceS trans
-- union the eps with ss' and tt'
; case (trans,states) of
{ ([], []) -> return s
; ([], (_:_)) -> mkChoiceS [ss',s]
; ((_:_),[] ) -> mkChoiceS [tt',s]
; (_, _) -> mkChoiceS [ss',tt',s]
}
}
; Nothing -> return s
}
; (Ch i c) ->
case IM.lookup i renv' of
{ Just ops -> do
{ let (trans, states, eps) =
foldl (\(ts,ss,es) op -> case op of
{ (RATr t _) -> (ts++[t], ss, es)
; (RASt t _) -> (ts, ss ++ [t], es)
; (RMkFin _) -> (ts, ss, True)
; _ -> (ts, ss, es)
} ) ([],[],False) ops
-- create a sequence concatenation out of the add-states ops
; ss' <- mkSeqS =<< mapM (apply renv') states
-- append ss' to s
; ss'' <- mkSeqS [s, ss']
-- create a choice out of the add transitions ops
; tt' <- mkChoiceS trans
-- union tt' and eps with ss'' if there is mkFin, otherwise, just union tt' with ss''
; if eps
then do
{ e <- mkEpsS
; case (trans,states) of
{ ([], []) -> mkChoiceS [e, s]
; ([], (_:_)) -> mkChoiceS [e,ss'']
; ((_:_), []) -> mkChoiceS [e, tt',s]
; (_, _) -> mkChoiceS [e, tt',ss'']
}
}
else
case (trans,states) of
{ ([], []) -> return s
; ([], (_:_)) -> return ss''
; ((_:_), []) -> mkChoiceS [tt',s]
; (_, _) -> mkChoiceS [tt',ss'']
}
}
; Nothing -> return s
}
; (Any i) ->
case IM.lookup i renv' of
{ Just ops -> do
{ let (trans, states, eps) =
foldl (\(ts,ss,es) op -> case op of
{ (RATr t _) -> (ts++[t], ss, es)
; (RASt t _) -> (ts, ss ++ [t], es)
; (RMkFin _) -> (ts, ss, True)
; _ -> (ts,ss,es)
} ) ([],[],False) ops
; ss' <- mkSeqS =<< mapM (apply renv') states
-- append ss' to s
; ss'' <- mkSeqS [s, ss']
; case states of
{ [] | eps -> do
{ e <- mkEpsS
; mkChoiceS [e,s]
}
| otherwise -> return s
; (_:_) | eps -> do
{ e <- mkEpsS
; mkChoiceS [e,ss'']
}
| otherwise -> return ss''
}
}
; Nothing -> return s
}
; (Choice is rs) -> do
{ rs' <- mapM (apply renv') rs
; return (Choice is rs')
}
; (Pair is r1 r2) -> do
{ r1' <- apply renv' r1
; r2' <- apply renv' r2
; return (Pair is r1' r2')
}
; (Star is r') -> do
{ r'' <- apply renv' r'
; return (Star is r'')
}
; others -> return s
}
mkSeqS :: [Re] -> State Env Re
mkSeqS [] = return Phi
mkSeqS (r:rs) = foldM (\a r -> do
{ i <- incMaxId
; return (Pair i a r)
}) r rs
mkChoiceS :: [Re] -> State Env Re
mkChoiceS [] = do
{ i <- incMaxId
; return (Eps i)
}
mkChoiceS [r] = return r
mkChoiceS (r:rs) = do
{ i <- incMaxId
; return (Choice i (r:rs))
}
mkEpsS :: State Env Re
mkEpsS = do
{ i <- incMaxId
; return (Eps i)
}
{-
apply :: REnv -> Re -> State Env Re
apply renv' s =
let (r,renv) = simpl s renv' -- todo: this changes the renv' but it seems faster, not sure about correctness
-- more thoughts, maybe we shall split the simp into choice simplification and others non-choice simplification, because choice simplification is the only one that change the REnv.
-- r = simp s
-- renv = renv'
in case getLabel r of
{ (i:is) -> -- The first one is always the orginal label annotated to the regexp. The tail could contain those being collapsed because of pderiv op
case IM.lookup i renv of
{ Just rs -> do
{ r' <- apply' renv r
; let adds = map (\ (RATr t _ ) -> t) $ filter isRATr rs
rs' = filter isRASt rs
; apps <- mapM (\ (RASt t _ ) -> apply renv t) rs'
; let r'' = app r' apps
; case adds of
{ (_:_) -> do
{ next_i <- incMaxId
; return $ Choice (i:is) ((annotate [next_i] r''):adds)
}
; [] -> return r'' }
}
; Nothing -> apply' renv r
}
; [] -> error ("apply: getLabel is applied to a regular ex which has no label. " ++ (show r))
}
apply' :: REnv -> Re -> State Env Re
apply' renv (Pair is r1 r2) = do { r1' <- apply renv r1
; r2' <- apply renv r2
; return $ Pair is r1' r2' }
apply' renv (Choice is rs) = do { rs' <- mapM (apply renv) rs
; return $ Choice is rs'
}
apply' renv (Star is r) = do { r' <- apply renv r
; return $ Star is r' }
apply' _ r = return r
app :: Re -> [Re] -> Re
app r [] = r
app r (t:ts) = let is = getLabel r
in app (Pair is r (annotate is t)) ts
-}
combineEnv :: REnv -> REnv -> REnv
combineEnv renv1 renv2 = IM.unionWith (\x y -> (x ++ y)) renv1 renv2 -- don't nub here.
extend :: REnv -> [Int] -> ROp -> REnv
extend renv [] _ = renv
extend renv (i:_) e@(RATr r lvl) = -- we only care about the original label
case IM.lookup i renv of
{ Just rs | not (e `elem` rs) -> IM.adjust (\_ -> rs++[RATr r lvl]) i renv
| otherwise -> renv
; Nothing -> IM.insert i [RATr r lvl] renv
}
showL :: Show a => [a] -> IO ()
showL xs = do
{ mapM_ (\x -> print x >> putStrLn "" ) xs }
{- cheaper hack
parse :: String -> Maybe Re
parse s =
case parsePat s of
Left err -> Nothing
Right pat -> Just (patToRe pat)
patToRe :: Pat -> Re
patToRe (PVar i _ p) =
let r = patToRe p
in r -- todo relabel r using i
patToRe (PPair p1 p2) = Pair dontcare (patToRe p1) (patToRe p2)
patToRe (PChoice p1 p2 _) = Choice dontcare [patToRe p1, patToRe p2]
patToRe (PStar p _) = Star dontcare (patToRe p)
patToRe (PE r) = reToRe r
patToRe p = error $ "patToRe: unhandle pattern " ++ (show p)
reToRe :: R.RE -> Re
reToRe R.Empty = Eps dontcare
reToRe (R.L c) = Ch dontcare c
reToRe (R.Choice r1 r2 _) = Choice dontcare [reToRe r1, reToRe r2]
reToRe (R.Seq r1 r2) = Pair dontcare (reToRe r1) (reToRe r2)
reToRe (R.Star r _ ) = Star dontcare (reToRe r)
reToRe R.Any = Any dontcare
reToRe r = error $ "reToRe: unhandle pattern " ++ (show r)
-}
parse :: String -> Maybe Re
parse s = case parseEPat s of
{ Left error -> Nothing
; Right (epat, estate) -> Just (rmSingletonChoice $ internalize epat)
}
data TState = TState { ngi :: NGI
, gi :: GI
, anchorStart :: Bool
, anchorEnd :: Bool
}
type NGI = Int -- the non group index
type GI = Int -- the group index
initTState = TState { ngi = -3, gi = 1, anchorStart = False, anchorEnd = False }
getNGI :: State TState NGI
getNGI = do { st <- get
; return $ ngi st
}
getIncNGI :: State TState NGI -- get then increase
getIncNGI = do { st <- get
; let i = ngi st
; put st{ngi=(i-1)}
; return i
}
getGI :: State TState GI
getGI = do { st <- get
; return $ gi st
}
getIncGI :: State TState GI -- get then increase
getIncGI = do { st <- get
; let i = gi st
; put st{gi=(i+1)}
; return i
}
getAnchorStart :: State TState Bool
getAnchorStart = do { st <- get
; return (anchorStart st)
}
setAnchorStart :: State TState ()
setAnchorStart = do { st <- get
; put st{anchorStart=True}
}
getAnchorEnd :: State TState Bool
getAnchorEnd = do { st <- get
; return (anchorEnd st)
}
setAnchorEnd :: State TState ()
setAnchorEnd = do { st <- get
; put st{anchorEnd=True}
}
internalize :: EPat -> Re
internalize epat = case runState (intern epat) initTState of
(re, state) -> re -- todo
intern :: EPat -> State TState Re
intern epat = p_intern epat
{-
| hasGroup epat = p_intern epat
| otherwise = do
{ r <- r_intern epat
; return r
}
-}
p_intern :: EPat -> State TState Re
p_intern epat =
case epat of
{ EEmpty -> do
{ i <- getIncNGI
; return (Eps i)
}
; EGroup e -> do
{ i <- getIncGI
; r <- intern e
; return (relabel i r)
}
; EGroupNonMarking e -> intern e
; EOr es -> do
{ i <- getIncNGI
; rs <- mapM intern es
; return (Choice i rs)
}
; EConcat es -> do
{ rs <- mapM intern es
; case reverse rs of
{ (r':rs') ->
foldM (\xs x -> do
{ i <- getIncNGI
; return (Pair i x xs) }) r' rs'
; [] -> error "an empty sequence encountered."
}
}
; EOpt e _ -> do
{ i <- getIncNGI
; j <- getIncNGI
; r <- intern e
; return (Choice i [(Eps j), r])
}
; EPlus e _ -> do
{ i <- getIncNGI
; j <- getIncNGI
; r <- intern e
; r' <- intern e
; return (Pair i r (Star j r'))
}
; EStar e _ -> do
{ i <- getIncNGI
; r <- intern e
; return (Star i r)
}
; EBound e low (Just high) _ -> do
{ r <- intern e
; let r1s = take low $ repeat r
; r1s' <- case r1s of
{ [] -> do
{ i <- getIncNGI
; return (Eps i)
}
; (r1':r1s'') ->
foldM (\xs x -> do
{ i <- getIncNGI
; return (Pair i xs x)}) r1' r1s''
}
; i <- getIncNGI
; let r2s = take (high - low) $ repeat (Choice i [r, Eps i])
; case r2s of
{ [] -> do
{ i <- getIncNGI
; return (Eps i)
}
; (r2':r2s'') ->
foldM (\xs x -> do
{ i <- getIncNGI
; return (Pair i xs x)}) r2' r2s''
}
; case (r1,r2) of
{ (Eps _, Eps _) -> return r1
; (Eps _, _ ) -> return r2
; (_, Eps _ ) -> return r1
; (_ , _ ) -> do
{ i <- getIncNGI
; return (Pair i r1 r2)
}
}
}
; EBound e low Nothing _ -> do
{ r <- intern e
; let r1s = take low $ repeat r
; r1s' <- case r1s of
{ (r1':r1s'') ->
foldM (\xs x -> do
{ i <- getIncNGI
; return (Pair i xs x)}) r1' r1s''
; [] -> do
{ i <- getIncNGI
; return (Eps i)
}
}
; i <- getIncNGI
; j <- getIncNGI
; return (Pair i r1s' (Star j r))
}
; ECarat -> do
{ notFirst <- getAnchorStart
; if notFirst
then do
{ i <- getIncNGI
; return (Ch i '^')
}
else do
{ setAnchorStart
; i <- getIncNGI
; return (Eps i)
}
}
; EDollar -> do
{ f <- getAnchorEnd
; if f
then return ()
else setAnchorEnd
; i <- getIncNGI
; return (Eps i)
}
; EDot -> do
{ i <- getIncNGI
; return (Any i)
}
; EAny cs -> do
{ i <- getIncNGI
; rs <- mapM (\x -> do
{ i <- getIncNGI
; return (Ch i x) }) cs
; return (Choice i rs)
}
; ENoneOf cs -> error "unable to handle NoneOf yet"
; EEscape c -> do
{ i <- getIncNGI
; return (Ch i c)
}
; EChar c -> do
{ i <- getIncNGI
; return (Ch i c)
}
}
-- ^ relabel and taking the max src loc, this is to assume
-- either (i < 0 and j < 0) or (either i > 0 or j > 0)
relabel :: SrcLoc -> Re -> Re
relabel i (Eps j) = Eps (max i j)
relabel i (Ch j c) = Ch (max i j) c
relabel i (Pair j r1 r2) = Pair (max i j) r1 r2
relabel i (Choice j rs) = Choice (max i j) rs
relabel i (Any j) = Any (max i j)
relabel i (Star j r) = Star (max i j) r
-- remove the singleton choice generated by the parser
--
rmSingletonChoice :: Re -> Re
rmSingletonChoice (Choice i [r]) = relabel i (rmSingletonChoice r)
rmSingletonChoice (Choice i rs) = Choice i (map rmSingletonChoice rs)
rmSingletonChoice (Pair i r1 r2) = Pair i (rmSingletonChoice r1) (rmSingletonChoice r2)
rmSingletonChoice (Star i r) = Star i (rmSingletonChoice r)
rmSingletonChoice r = r
test :: UReq -> String -> String -> Maybe String
test g pat_s word =
case parse pat_s of
Nothing -> Nothing
Just r -> Just $ pretty $ (refine g r word) !! 0
| luzhuomi/pddebug | Text/Regex/PDeriv/Debug/Refine5.hs | bsd-3-clause | 66,513 | 4 | 29 | 23,269 | 17,905 | 9,420 | 8,485 | 918 | 29 |
{-# LANGUAGE PatternGuards #-}
module Idris.ProofSearch(trivial, trivialHoles, proofSearch) where
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Core.CaseTree
import Idris.Core.Typecheck
import Idris.AbsSyntax
import Idris.Delaborate
import Idris.Error
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.State.Strict
import qualified Data.Set as S
import Data.List
import Debug.Trace
-- Pass in a term elaborator to avoid a cyclic dependency with ElabTerm
trivial :: (PTerm -> ElabD ()) -> IState -> ElabD ()
trivial = trivialHoles [] []
trivialHoles :: [Name] -> [(Name, Int)] -> (PTerm -> ElabD ()) -> IState -> ElabD ()
trivialHoles psnames ok elab ist
= try' (do elab (PApp (fileFC "prf") (PRef (fileFC "prf") [] eqCon) [pimp (sUN "A") Placeholder False, pimp (sUN "x") Placeholder False])
return ())
(do env <- get_env
g <- goal
tryAll env
return ()) True
where
tryAll [] = fail "No trivial solution"
tryAll ((x, b):xs)
= do -- if type of x has any holes in it, move on
hs <- get_holes
let badhs = hs -- filter (flip notElem holesOK) hs
g <- goal
-- anywhere but the top is okay for a hole, if holesOK set
if -- all (\n -> not (n `elem` badhs)) (freeNames (binderTy b))
holesOK hs (binderTy b) && (null psnames || x `elem` psnames)
then try' (elab (PRef (fileFC "prf") [] x))
(tryAll xs) True
else tryAll xs
holesOK hs ap@(App _ _ _)
| (P _ n _, args) <- unApply ap
= holeArgsOK hs n 0 args
holesOK hs (App _ f a) = holesOK hs f && holesOK hs a
holesOK hs (P _ n _) = not (n `elem` hs)
holesOK hs (Bind n b sc) = holesOK hs (binderTy b) &&
holesOK hs sc
holesOK hs _ = True
holeArgsOK hs n p [] = True
holeArgsOK hs n p (a : as)
| (n, p) `elem` ok = holeArgsOK hs n (p + 1) as
| otherwise = holesOK hs a && holeArgsOK hs n (p + 1) as
cantSolveGoal :: ElabD a
cantSolveGoal = do g <- goal
env <- get_env
lift $ tfail $
CantSolveGoal g (map (\(n,b) -> (n, binderTy b)) env)
proofSearch :: Bool -> -- recursive search (False for 'refine')
Bool -> -- invoked from a tactic proof. If so, making
-- new metavariables is meaningless, and there shoudl
-- be an error reported instead.
Bool -> -- ambiguity ok
Bool -> -- defer on failure
Int -> -- maximum depth
(PTerm -> ElabD ()) -> Maybe Name -> Name ->
[Name] ->
[Name] ->
IState -> ElabD ()
proofSearch False fromProver ambigok deferonfail depth elab _ nroot psnames [fn] ist
= do -- get all possible versions of the name, take the first one that
-- works
let all_imps = lookupCtxtName fn (idris_implicits ist)
tryAllFns all_imps
where
-- if nothing worked, make a new metavariable
tryAllFns [] | fromProver = cantSolveGoal
tryAllFns [] = do attack; defer [] nroot; solve
tryAllFns (f : fs) = try' (tryFn f) (tryAllFns fs) True
tryFn (f, args) = do let imps = map isImp args
ps <- get_probs
hs <- get_holes
args <- map snd <$> try' (apply (Var f) imps)
(match_apply (Var f) imps) True
ps' <- get_probs
-- when (length ps < length ps') $ fail "Can't apply constructor"
-- Make metavariables for new holes
hs' <- get_holes
ptm <- get_term
if fromProver then cantSolveGoal
else do
mapM_ (\ h -> do focus h
attack; defer [] nroot; solve)
(hs' \\ hs)
-- (filter (\ (x, y) -> not x) (zip (map fst imps) args))
solve
isImp (PImp p _ _ _ _) = (True, p)
isImp arg = (True, priority arg) -- try to get all of them by unification
proofSearch rec fromProver ambigok deferonfail maxDepth elab fn nroot psnames hints ist
= do compute
ty <- goal
hs <- get_holes
env <- get_env
tm <- get_term
argsok <- conArgsOK ty
if ambigok || argsok then
case lookupCtxt nroot (idris_tyinfodata ist) of
[TISolution ts] -> findInferredTy ts
_ -> psRec rec maxDepth [] S.empty
else do ptm <- get_term
autoArg (sUN "auto") -- not enough info in the type yet
where
findInferredTy (t : _) = elab (delab ist (toUN t))
conArgsOK ty
= let (f, as) = unApply ty in
case f of
P _ n _ ->
let autohints = case lookupCtxtExact n (idris_autohints ist) of
Nothing -> []
Just hs -> hs in
case lookupCtxtExact n (idris_datatypes ist) of
Just t -> do rs <- mapM (conReady as)
(autohints ++ con_names t)
return (and rs)
Nothing -> -- local variable, go for it
return True
TType _ -> return True
_ -> fail "Not a data type"
conReady :: [Term] -> Name -> ElabD Bool
conReady as n
= case lookupTyExact n (tt_ctxt ist) of
Just ty -> do let (_, cs) = unApply (getRetTy ty)
-- if any metavariables in 'as' correspond to
-- a constructor form in 'cs', then we're not
-- ready to run auto yet. Otherwise, go for it
hs <- get_holes
return $ and (map (notHole hs) (zip as cs))
Nothing -> fail "Can't happen"
notHole hs (P _ n _, c)
| (P _ cn _, _) <- unApply c,
n `elem` hs && isConName cn (tt_ctxt ist) = False
| Constant _ <- c = not (n `elem` hs)
notHole _ _ = True
toUN t@(P nt (MN i n) ty)
| ('_':xs) <- str n = t
| otherwise = P nt (UN n) ty
toUN (App s f a) = App s (toUN f) (toUN a)
toUN t = t
-- psRec counts depth and the local variable applications we're under
-- (so we don't try a pointless application of something to itself,
-- which obviously won't work anyway but might lead us on a wild
-- goose chase...)
-- Also keep track of the types we've proved so far in this branch
-- (if we get back to one we've been to before, we're just in a cycle and
-- that's no use)
psRec :: Bool -> Int -> [Name] -> S.Set Type -> ElabD ()
psRec _ 0 locs tys | fromProver = cantSolveGoal
psRec rec 0 locs tys = do attack; defer [] nroot; solve --fail "Maximum depth reached"
psRec False d locs tys = tryCons d locs tys hints
psRec True d locs tys
= do compute
ty <- goal
when (S.member ty tys) $ fail "Been here before"
let tys' = S.insert ty tys
try' (trivialHoles psnames [] elab ist)
(try' (try' (resolveByCon (d - 1) locs tys')
(resolveByLocals (d - 1) locs tys')
True)
-- if all else fails, make a new metavariable
(if fromProver
then fail "cantSolveGoal"
else do attack; defer [] nroot; solve) True) True
getFn d Nothing = []
getFn d (Just f) | d < maxDepth-1 = [f]
| otherwise = []
resolveByCon d locs tys
= do t <- goal
let (f, _) = unApply t
case f of
P _ n _ ->
do let autohints = case lookupCtxtExact n (idris_autohints ist) of
Nothing -> []
Just hs -> hs
case lookupCtxtExact n (idris_datatypes ist) of
Just t -> tryCons d locs tys
(hints ++
con_names t ++
autohints ++
getFn d fn)
Nothing -> fail "Not a data type"
_ -> fail "Not a data type"
-- if there are local variables which have a function type, try
-- applying them too
resolveByLocals d locs tys
= do env <- get_env
tryLocals d locs tys env
tryLocals d locs tys [] = fail "Locals failed"
tryLocals d locs tys ((x, t) : xs)
| x `elem` locs || x `notElem` psnames = tryLocals d locs tys xs
| otherwise = try' (tryLocal d (x : locs) tys x t)
(tryLocals d locs tys xs) True
tryCons d locs tys cs = do when (not fromProver) -- in interactive mode,
-- don't just guess (fine for 'auto',
-- since that's part of the point...)
$ checkDisjoint ist [] cs
tryCons' d locs tys cs
tryCons' d locs tys [] = fail "Constructors failed"
tryCons' d locs tys (c : cs)
= try' (tryCon d locs tys c) (tryCons' d locs tys cs) True
tryLocal d locs tys n t
= do let a = getPArity (delab ist (binderTy t))
tryLocalArg d locs tys n a
tryLocalArg d locs tys n 0 = elab (PRef (fileFC "prf") [] n)
tryLocalArg d locs tys n i
= simple_app False (tryLocalArg d locs tys n (i - 1))
(psRec True d locs tys) "proof search local apply"
-- Like type class resolution, but searching with constructors
tryCon d locs tys n =
do ty <- goal
let imps = case lookupCtxtExact n (idris_implicits ist) of
Nothing -> []
Just args -> map isImp args
ps <- get_probs
hs <- get_holes
args <- map snd <$> try' (apply (Var n) imps)
(match_apply (Var n) imps) True
ps' <- get_probs
hs' <- get_holes
when (length ps < length ps') $ fail "Can't apply constructor"
mapM_ (\ (_, h) -> do focus h
aty <- goal
psRec True d locs tys)
(filter (\ (x, y) -> not x) (zip (map fst imps) args))
solve
isImp (PImp p _ _ _ _) = (True, p)
isImp arg = (False, priority arg)
-- Fails if any of the given constructor/function names have the same
-- return type (ignoring local variables - we need to be able to distinguish
-- by constructor)
checkDisjoint :: IState -> [Type] -> [Name] -> ElabD ()
checkDisjoint ist ts [] = return ()
checkDisjoint ist ts (n : ns) =
case lookupTyExact n (tt_ctxt ist) of
Just t -> if any (matchTypes (getRetTy t)) ts
then fail "Overlapping constructor types"
else checkDisjoint ist (getRetTy t : ts) ns
where
matchTypes (V _) (V _) = True
matchTypes (App _ f a) (App _ f' a') = matchTypes f f' && matchTypes a a'
matchTypes (Bind _ t sc) (Bind _ t' sc')
= matchTypes (binderTy t) (binderTy t') && matchTypes sc sc'
matchTypes x y = x == y
| uwap/Idris-dev | src/Idris/ProofSearch.hs | bsd-3-clause | 12,296 | 35 | 20 | 5,320 | 3,536 | 1,766 | 1,770 | 219 | 28 |
module Sexy.Data.Either (
Either(..)
, either'
, fromLeft'
, fromRight'
) where
data Either a b = Left a | Right b
either' :: (a -> c) -> (b -> c) -> Either a b -> c
either' f _ (Left x) = f x
either' _ g (Right x) = g x
fromLeft' :: a -> Either a b -> a
fromLeft' _ (Left x) = x
fromLeft' x (Right _) = x
fromRight' :: b -> Either a b -> b
fromRight' _ (Right x) = x
fromRight' x (Left _) = x
| DanBurton/sexy | src/Sexy/Data/Either.hs | bsd-3-clause | 413 | 0 | 8 | 114 | 219 | 116 | 103 | 15 | 1 |
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.VMath.VFloating
-- Copyright : Copyright (c) 2010, Patrick Perry <patperry@gmail.com>
-- License : BSD3
-- Maintainer : Patrick Perry <patperry@gmail.com>
-- Stability : experimental
--
-- Vector Floating operations.
--
module Foreign.VMath.VFloating (
VFloating(..)
) where
import Foreign( Ptr, Storable, peek, poke, advancePtr )
import Data.Complex( Complex(..) )
import Foreign.VMath.VFractional
import Foreign.VMath.Double
import Foreign.VMath.Zomplex
-- | Types with vectorized 'Floating' operations.
class (VFractional a, Floating a) => VFloating a where
vExp :: Int -> Ptr a -> Ptr a -> IO ()
vSqrt :: Int -> Ptr a -> Ptr a -> IO ()
vLog :: Int -> Ptr a -> Ptr a -> IO ()
vPow :: Int -> Ptr a -> Ptr a -> Ptr a -> IO ()
vSin :: Int -> Ptr a -> Ptr a -> IO ()
vCos :: Int -> Ptr a -> Ptr a -> IO ()
vTan :: Int -> Ptr a -> Ptr a -> IO ()
vASin :: Int -> Ptr a -> Ptr a -> IO ()
vACos :: Int -> Ptr a -> Ptr a -> IO ()
vATan :: Int -> Ptr a -> Ptr a -> IO ()
vSinh :: Int -> Ptr a -> Ptr a -> IO ()
vCosh :: Int -> Ptr a -> Ptr a -> IO ()
vTanh :: Int -> Ptr a -> Ptr a -> IO ()
vASinh :: Int -> Ptr a -> Ptr a -> IO ()
vACosh :: Int -> Ptr a -> Ptr a -> IO ()
vATanh :: Int -> Ptr a -> Ptr a -> IO ()
vExp = vop exp
vSqrt = vop sqrt
vLog = vop log
vPow = vop2 (**)
vSin = vop sin
vCos = vop cos
vTan = vop tan
vASin = vop asin
vACos = vop acos
vATan = vop atan
vSinh = vop sinh
vCosh = vop cosh
vTanh = vop tanh
vASinh = vop asinh
vACosh = vop acosh
vATanh = vop atanh
vop :: (Storable a, Storable b)
=> (a -> b) -> Int -> Ptr a -> Ptr b -> IO ()
vop f n src dst | n <= 0 = return ()
| otherwise = do
a <- peek src
poke dst $ f a
vop f (n-1) (src `advancePtr` 1) (dst `advancePtr` 1)
{-# INLINE vop #-}
vop2 :: (Storable a1, Storable a2, Storable b)
=> (a1 -> a2 -> b) -> Int -> Ptr a1 -> Ptr a2 -> Ptr b -> IO ()
vop2 f n src1 src2 dst | n <= 0 = return ()
| otherwise = do
a1 <- peek src1
a2 <- peek src2
poke dst $ f a1 a2
vop2 f (n-1) (src1 `advancePtr` 1) (src2 `advancePtr` 1)
(dst `advancePtr` 1)
{-# INLINE vop2 #-}
instance VFloating Double where
vExp = vdExp
{-# INLINE vExp #-}
vSqrt = vdSqrt
{-# INLINE vSqrt #-}
vLog = vdLog
{-# INLINE vLog #-}
vPow = vdPow
{-# INLINE vPow #-}
vSin = vdSin
{-# INLINE vSin #-}
vCos = vdCos
{-# INLINE vCos #-}
vTan = vdTan
{-# INLINE vTan #-}
vASin = vdASin
{-# INLINE vASin #-}
vACos = vdACos
{-# INLINE vACos #-}
vATan = vdATan
{-# INLINE vATan #-}
vSinh = vdSinh
{-# INLINE vSinh #-}
vCosh = vdCosh
{-# INLINE vCosh #-}
vTanh = vdTanh
{-# INLINE vTanh #-}
vASinh = vdASinh
{-# INLINE vASinh #-}
vACosh = vdACosh
{-# INLINE vACosh #-}
vATanh = vdATanh
{-# INLINE vATanh #-}
instance VFloating (Complex Double) where
vSqrt = vzSqrt
{-# INLINE vSqrt #-}
vLog = vzLog
{-# INLINE vLog #-}
{- These functions have branch cuts in the wrong places
vExp = vzExp
{-# INLINE vExp #-}
vPow = vzPow
{-# INLINE vPow #-}
vSin = vzSin
{-# INLINE vSin #-}
vCos = vzCos
{-# INLINE vCos #-}
vTan = vzTan
{-# INLINE vTan #-}
vASin = vzASin
{-# INLINE vASin #-}
vACos = vzACos
{-# INLINE vACos #-}
vATan = vzATan
{-# INLINE vATan #-}
vSinh = vzSinh
{-# INLINE vSinh #-}
vCosh = vzCosh
{-# INLINE vCosh #-}
vTanh = vzTanh
{-# INLINE vTanh #-}
vASinh = vzASinh
{-# INLINE vASinh #-}
vACosh = vzACosh
{-# INLINE vACosh #-}
vATanh = vzATanh
{-# INLINE vATanh #-}
-}
| patperry/hs-linear-algebra | lib/Foreign/VMath/VFloating.hs | bsd-3-clause | 4,207 | 0 | 12 | 1,489 | 1,168 | 601 | 567 | 97 | 1 |
-- |Â Number component of intervals.
module Music.Pitch.Common.Number
(
Number,
HasNumber(..),
unison,
prime,
second,
third,
fourth,
fifth,
sixth,
seventh,
octave,
ninth,
tenth,
eleventh,
twelfth,
thirteenth,
fourteenth,
fifteenth,
diatonicSteps,
) where
import Control.Lens
import Music.Pitch.Common.Types
instance HasNumber Number where number = id
-- | A synonym for @1@.
unison :: Number
unison = 1
-- | A synonym for @1@.
prime :: Number
prime = 1
-- | A synonym for @2@.
second :: Number
second = 2
-- | A synonym for @3@.
third :: Number
third = 3
-- | A synonym for @4@.
fourth :: Number
fourth = 4
-- | A synonym for @5@.
fifth :: Number
fifth = 5
-- | A synonym for @6@.
sixth :: Number
sixth = 6
-- | A synonym for @7@.
seventh :: Number
seventh = 7
-- | A synonym for @8@.
octave :: Number
octave = 8
-- | A synonym for @9@.
ninth :: Number
ninth = 9
-- | A synonym for @10@.
tenth :: Number
tenth = 10
-- | A synonym for @11@.
eleventh :: Number
eleventh = 11
-- | A synonym for @12@.
twelfth :: Number
twelfth = 12
-- | A synonym for @12@.
duodecim :: Number
duodecim = 12
-- | A synonym for @13@.
thirteenth :: Number
thirteenth = 13
-- | A synonym for @14@.
fourteenth :: Number
fourteenth = 14
-- | A synonym for @15@.
fifteenth :: Number
fifteenth = 15
class HasNumber a where
-- |
-- Returns the number portion of an interval.
--
-- The interval number is negative if and only if the interval is negative.
--
-- See also 'quality', 'octaves' and 'semitones'.
--
number :: a -> Number
-- TODO rename numberDiatonicSteps
diatonicSteps :: Iso' Number DiatonicSteps
diatonicSteps = iso n2d d2n
where
n2d n | n > 0 = fromIntegral (n - 1)
n2d n | n == 0 = error "diatonicSteps: Invalid number 0"
n2d n | n < 0 = fromIntegral (n + 1)
d2n n | n >= 0 = fromIntegral (n + 1)
d2n n | n < 0 = fromIntegral (n - 1)
| music-suite/music-pitch | src/Music/Pitch/Common/Number.hs | bsd-3-clause | 2,203 | 0 | 10 | 740 | 466 | 273 | 193 | 67 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving
, ScopedTypeVariables
#-}
module Editor where
import System.IO
import Buffer
import Control.Exception
import Control.Monad.State
import Control.Applicative
import Control.Arrow (first, second)
import Data.Char
import Data.List
-- Editor commands
data Command = View
| Edit
| Load String
| Line Int
| Next
| Prev
| Quit
| Help
| Noop
deriving (Eq, Show, Read)
commands :: [String]
commands = map show [View, Edit, Next, Prev, Quit]
-- Editor monad
instance Applicative (Editor b) where
pure = return
(<*>) = ap
newtype Editor b a = Editor (StateT (b,Int) IO a)
deriving (Functor, Monad, MonadIO, MonadState (b,Int))
runEditor :: Buffer b => Editor b a -> b -> IO a
runEditor (Editor e) b = evalStateT e (b,0)
getCurLine :: Editor b Int
getCurLine = gets snd
setCurLine :: Int -> Editor b ()
setCurLine = modify . second . const
onBuffer :: (b -> a) -> Editor b a
onBuffer f = gets (f . fst)
getBuffer :: Editor b b
getBuffer = onBuffer id
modBuffer :: (b -> b) -> Editor b ()
modBuffer = modify . first
io :: MonadIO m => IO a -> m a
io = liftIO
-- Utility functions
readMay :: Read a => String -> Maybe a
readMay s = case reads s of
[(r,_)] -> Just r
_ -> Nothing
-- Main editor loop
editor :: Buffer b => Editor b ()
editor = io (hSetBuffering stdout NoBuffering) >> loop
where loop = do prompt
cmd <- getCommand
when (cmd /= Quit) (doCommand cmd >> loop)
prompt :: Buffer b => Editor b ()
prompt = do
s <- onBuffer value
io $ putStr (show s ++ "> ")
getCommand :: Editor b Command
getCommand = io $ readCom <$> getLine
where
readCom "" = Noop
readCom inp@(c:cs) | isDigit c = maybe Noop Line (readMay inp)
| toUpper c == 'L' = Load (unwords $ words cs)
| c == '?' = Help
| otherwise = maybe Noop read $
find ((== toUpper c) . head) commands
doCommand :: Buffer b => Command -> Editor b ()
doCommand View = do
cur <- getCurLine
let ls = [(cur - 2) .. (cur + 2)]
ss <- mapM (\l -> onBuffer $ line l) ls
zipWithM_ (showL cur) ls ss
where
showL _ _ Nothing = return ()
showL l n (Just s) = io $ putStrLn (m ++ show n ++ ": " ++ s)
where m | n == l = "*"
| otherwise = " "
doCommand Edit = do
l <- getCurLine
io $ putStr $ "Replace line " ++ show l ++ ": "
new <- io getLine
modBuffer $ replaceLine l new
doCommand (Load filename) = do
mstr <- io $ handle (\(_ :: IOException) ->
putStrLn "File not found." >> return Nothing
) $ do
h <- openFile filename ReadMode
hSetEncoding h utf8
Just <$> hGetContents h
maybe (return ()) (modBuffer . const . fromString) mstr
doCommand (Line n) = modCurLine (const n) >> doCommand View
doCommand Next = modCurLine (+1) >> doCommand View
doCommand Prev = modCurLine (subtract 1) >> doCommand View
doCommand Quit = return () -- do nothing, main loop notices this and quits
doCommand Help = io . putStr . unlines $
[ "v --- view the current location in the document"
, "n --- move to the next line"
, "p --- move to the previous line"
, "l --- load a file into the editor"
, "e --- edit the current line"
, "q --- quit"
, "? --- show this list of commands"
]
doCommand Noop = return ()
inBuffer :: Buffer b => Int -> Editor b Bool
inBuffer n = do
nl <- onBuffer numLines
return (n >= 0 && n < nl)
modCurLine :: Buffer b => (Int -> Int) -> Editor b ()
modCurLine f = do
l <- getCurLine
nl <- onBuffer numLines
setCurLine . max 0 . min (nl - 1) $ f l
| wangwangwar/cis194 | src/ch7/Editor.hs | bsd-3-clause | 3,871 | 0 | 14 | 1,214 | 1,444 | 726 | 718 | 108 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Irc(runIrcClient) where
import Data.Conduit
import Control.Concurrent.Async (concurrently)
import Control.Monad (void)
import Data.Conduit.Network
import Data.Conduit.Attoparsec
import Data.Conduit.Text (encode, decode, utf8)
import IrcParser
import Control.Monad.Trans.Class (lift)
import Data.Text
debug :: Show a => Conduit a IO a
debug = do
awaitForever $ \l -> do
(lift . print) l
yield l
runIrcClient :: IO ()
runIrcClient = runTCPClient (clientSettings 6667 "192.168.33.10") $ \server ->
void $ concurrently
((yield "Nick asdf\nUSER asdf 0 * :asdf\n") $$ appSink server)
(appSource server $= decode utf8 =$= debug =$= parseMessage =$= debug =$= action =$= encode utf8 $$ appSink server)
parseMessage :: Conduit Text IO IrcEvent
parseMessage = do
conduitParser parseIrc =$= awaitForever go
where
go (_, irc) = yield irc
action :: Conduit IrcEvent IO Text
action = awaitForever $ \m ->
yield ":asdf JOIN #technik\n"
| UnrealQuester/irc-bot | lib/Irc.hs | bsd-3-clause | 1,043 | 0 | 17 | 216 | 321 | 170 | 151 | 28 | 1 |
module Tokenizer (tokenize, Token(..), TokenKind(..)
, isStringToken, isCharToken, isIntToken, isFloatToken, isKeywordToken
, isOperatorToken, isReservedOperatorToken, isIdentToken, isTypeIdentToken
, reconstructSpan, reservedOperators) where
import Prelude hiding (span)
import Data.Maybe
import Control.Applicative ((<$>), (<*>), (<*), (*>), (<$))
import Numeric
import Data.Char
import Data.List hiding (span)
import Text.ParserCombinators.Parsec hiding (parseFromFile)
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Error
import Text.ParserCombinators.Parsec.Pos
import Span
data TokenKind =
TString String
| TChar Char
| TInt Integer
| TFloat Double
| TKeyword String
| TOperator String
| TReservedOperator String
| TIdent String
| TTypeIdent String
deriving (Show, Eq)
data Token =
Token { tokenKind :: TokenKind
, tokenSpan :: Span }
deriving (Show, Eq)
isStringToken (Token (TString _) _) = True
isStringToken _ = False
isCharToken (Token (TChar _) _) = True
isCharToken _ = False
isIntToken (Token (TInt _) _) = True
isIntToken _ = False
isFloatToken (Token (TFloat _) _) = True
isFloatToken _ = False
isOperatorToken (Token (TOperator _) _) = True
isOperatorToken _ = False
isIdentToken (Token (TIdent _) _) = True
isIdentToken _ = False
isTypeIdentToken (Token (TTypeIdent _) _) = True
isTypeIdentToken _ = False
isReservedOperatorToken x (Token (TReservedOperator o) _) = x == o
isReservedOperatorToken _ _ = False
isKeywordToken x (Token (TKeyword k) _) = x == k
isKeywordToken _ _ = False
instance Spanable Token where
spanOf = tokenSpan
span :: Parser (Span -> a) -> Parser a
span p = do
st <- getPosition
x <- p
en <- getPosition
return $ x (charPosToSpan st en)
hexToNum x =
case readHex x of
[(v, "")] -> v
_ -> error $ "Failed to parse hex value: " ++ x
escapeSeq :: Parser Char
escapeSeq =
char '\\' *> (char '\\'
<|> char '\''
<|> char '\"'
<|> nlEsc
<|> crEsc
<|> tpEsc
<|> hexEsc
<|> unicodeEsc)
where
nlEsc = char 'n' *> return '\n'
crEsc = char 'r' *> return '\r'
tpEsc = char 't' *> return '\t'
hexEsc =
char 'x' *> ((chr. hexToNum) <$> ((\x y -> x:[y]) <$> hexDigit <*> hexDigit))
unicodeEsc =
char 'u' *> ((chr . hexToNum) <$> ((\x y z æ -> x:y:z:[æ]) <$> hexDigit
<*> hexDigit
<*> hexDigit
<*> hexDigit))
stringChar :: Char -> Parser Char
stringChar s = escapeSeq <|> noneOf ('\\' : s : whiteSpaceChars)
tkString =
span $ Token <$> TString
<$> between (char '"') (char '"') (many $ stringChar '\"')
tkChar =
span $ Token <$> TChar
<$> between (char '\'') (char '\'') (stringChar '\'')
tkNum =
span theNum
where
theNum = do
sign <- isJust <$> optionMaybe (char '-' *> return ())
inte <- many1 digit
flte <- optionMaybe (char '.' *> many1 digit)
case flte of
Just f -> return $ Token $ TFloat $ rFloat sign $ inte ++ '.' : f
Nothing -> return $ Token $ TInt $ rInt sign inte
rInt s v =
case readDec v of
[(r, "")] -> r * si
_ -> error $ "Failed to parse integer value: " ++ v
where
si = if s then -1 else 1
rFloat s v =
case readSigned readFloat v of
[(r, "")] -> r * si
_ -> error $ "Failed to parse integer value: " ++ v
where
si = if s then -1 else 1
identStart = lower
identLetter = (alphaNum <|> char '_')
keywords = [ "and", "or", "not", "if", "then", "else", "fn", "case", "data"
, "alias", "return", "where", "of", "in", "instance", "trait"
, "impl", "has", "let", "mut" ]
typeIdentStart = upper
typeIdentLetter = alphaNum
tkTypeIdent =
span $ Token <$>
TTypeIdent <$> ((:) <$> typeIdentStart <*> many typeIdentLetter)
tkKwOrIdent = span $ do
nm <- ((:) <$> identStart <*> many identLetter)
if nm `elem` keywords
then return $ Token $ TKeyword nm
else return $ Token $ TIdent nm
reservedOperators = [ ":", "*", "/", "%", "+", "-", "=", "&", "|", "!"
, "==", "<", ">", "!=", ">=", "<=", "<|", "|>", "->"
, "()", "_", ".", "::", ",", "@", ";", "(", ")", "["
, "]", "{", "}", "=>" ]
operatorLetterSingles = oneOf "[]{}()"
operatorStartLetter = (oneOf "`~!@$%^&*-+|=;:<>.,_/\\?")
operatorLetter = (oneOf "`~!@$%^&*-+|=:<>./\\?")
tkOperator = span $ do
single <- optionMaybe operatorLetterSingles
case single of
Just op -> return $ Token $ TReservedOperator $ op:""
Nothing -> combination
where
combination = do
op <- (try $ string "()")
<|> ((:) <$> operatorStartLetter <*> many operatorLetter)
if op `elem` reservedOperators
then return $ Token $ TReservedOperator op
else return $ Token $ TOperator op
comment :: Parser ()
comment =
lineComment <|> blockComment
where
lineComment = do
try $ string "//"
lineCommentBody
lineCommentBody = do
optional blockComment
end <- ((== '\n') <$> anyChar)
<|> (eof *> return True)
if end
then return ()
else lineCommentBody
blockComment = do
try $ string "/*"
blockCommentBody
blockCommentBody = do
optional blockComment
end <- option False $ try $ string "*/" *> return True
if end
then return ()
else anyChar *> blockCommentBody
<?> "terminating '*/'"
whiteSpaceChars = " \t\n\r"
whiteSpaceChar = (oneOf whiteSpaceChars) *> return ()
ignore :: Parser ()
ignore = many (whiteSpaceChar <|> comment) *> return ()
pTokens :: Parser [Token]
pTokens =
ignore *> (many (tk <* ignore)) <* eof
where
tk = tkOperator <|> tkTypeIdent
<|> tkKwOrIdent
<|> tkNum
<|> tkChar
<|> tkString
tokenize :: String -> SourceName -> Either ParseError [Token]
tokenize src nm = parse pTokens nm src
reconstructSpan :: String -> [Token] -> ParseError -> String
reconstructSpan src tokens err =
"Syntax error: " ++ (intercalate "\n" $ map showError $ mergeErrors $ errorMessages err)
++ '\n' : (showSpan errTok src)
where
inner (SysUnExpect what) = what
inner (UnExpect what) = what
inner (Expect what) = what
inner (Message what) = what
mergeErrors errs =
nub $ filter ((/= "") . inner) errs
showError (SysUnExpect what) = "got unexpected " ++ what
showError (UnExpect what) = "got unexpected " ++ what
showError (Expect what) = "expected " ++ what
showError (Message errMsg) = errMsg
errTok = head $ filter (\(Token _ s) -> enclosedBySpan s) $ tokens
pos = errorPos err
enclosedBySpan spn =
((sourceLine pos) <= (startLine spn))
&& ((sourceColumn pos) <= (startColumn spn))
| nulldatamap/bastet | src/Tokenizer.hs | bsd-3-clause | 7,433 | 0 | 19 | 2,386 | 2,437 | 1,293 | 1,144 | 196 | 7 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.PalettedTexture
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the EXT_paletted_texture extension, see
-- <http://www.opengl.org/registry/specs/EXT/paletted_texture.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.PalettedTexture (
-- * Functions
glColorTable,
glColorSubTable,
glGetColorTable,
glGetColorTableParameteriv,
glGetColorTableParameterfv,
-- * Tokens
gl_COLOR_INDEX1,
gl_COLOR_INDEX2,
gl_COLOR_INDEX4,
gl_COLOR_INDEX8,
gl_COLOR_INDEX12,
gl_COLOR_INDEX16,
gl_COLOR_TABLE_FORMAT,
gl_COLOR_TABLE_WIDTH,
gl_COLOR_TABLE_RED_SIZE,
gl_COLOR_TABLE_GREEN_SIZE,
gl_COLOR_TABLE_BLUE_SIZE,
gl_COLOR_TABLE_ALPHA_SIZE,
gl_COLOR_TABLE_LUMINANCE_SIZE,
gl_COLOR_TABLE_INTENSITY_SIZE,
gl_TEXTURE_INDEX_SIZE,
gl_TEXTURE_1D,
gl_TEXTURE_2D,
gl_TEXTURE_3D,
gl_TEXTURE_CUBE_MAP,
gl_PROXY_TEXTURE_1D,
gl_PROXY_TEXTURE_2D,
gl_PROXY_TEXTURE_3D,
gl_PROXY_TEXTURE_CUBE_MAP
) where
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility
import Graphics.Rendering.OpenGL.Raw.Core32
gl_COLOR_INDEX1 :: GLenum
gl_COLOR_INDEX1 = 0x80E2
gl_COLOR_INDEX2 :: GLenum
gl_COLOR_INDEX2 = 0x80E3
gl_COLOR_INDEX4 :: GLenum
gl_COLOR_INDEX4 = 0x80E4
gl_COLOR_INDEX8 :: GLenum
gl_COLOR_INDEX8 = 0x80E5
gl_COLOR_INDEX12 :: GLenum
gl_COLOR_INDEX12 = 0x80E6
gl_COLOR_INDEX16 :: GLenum
gl_COLOR_INDEX16 = 0x80E7
gl_TEXTURE_INDEX_SIZE :: GLenum
gl_TEXTURE_INDEX_SIZE = 0x80ED
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/EXT/PalettedTexture.hs | bsd-3-clause | 1,816 | 0 | 4 | 253 | 199 | 136 | 63 | 45 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving,MultiParamTypeClasses,FlexibleInstances #-}
module SetWriter (SetWriterT(), runSetWriterT, runSetWriter, SetWriter) where
import Control.Monad.Writer.Class
import Control.Monad.Trans
import Control.Monad.State
import Control.Monad.Identity
import Data.Set (Set)
import qualified Data.Set as S
import Data.Map (Map)
import qualified Data.Map as M
newtype SetWriterT w m a =
SetWriterT { unWriterT :: StateT w m a } deriving (Monad,Functor,MonadTrans)
instance (Ord el, Monad m) => MonadWriter (Set el) (SetWriterT (Set el) m) where
tell set = SetWriterT $ modify (`S.union` set)
listen m = SetWriterT $ do
-- in StateT (Set el) m a
-- unWriterT m is too
a <- unWriterT m
w <- get
return (a,w)
pass m = SetWriterT $ do
(a,f) <- unWriterT m
modify f
return a
instance (Ord el, Monad m) => MonadWriter (Map el a) (SetWriterT (Map el a) m) where
tell w = SetWriterT $ modify (`M.union` w)
listen m = SetWriterT $ do
-- in StateT (Set el) m a
-- unWriterT m is too
a <- unWriterT m
w <- get
return (a,w)
pass m = SetWriterT $ do
(a,f) <- unWriterT m
modify f
return a
type SetWriter el = SetWriterT el Identity
runSetWriterT :: s -> SetWriterT s m a -> m (a, s)
runSetWriterT s m = runStateT (unWriterT m) s
runSetWriter :: s -> SetWriter s a -> (a,s)
runSetWriter s m = runIdentity (runSetWriterT s m)
| olsner/m3 | SetWriter.hs | bsd-3-clause | 1,423 | 0 | 10 | 310 | 534 | 286 | 248 | 40 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
module Astro.Celestrak.Parser (parseEOPFile) where
import Astro.Celestrak
import Data.Time
import Text.Parsec
import Text.Parsec.String
--import Text.Parsec.ByteString.Lazy
import qualified Text.Parsec.Token as P
import Text.Parsec.Language (haskellDef)
import Numeric.Units.Dimensional.Prelude
import qualified Prelude
lexer = P.makeTokenParser haskellDef
integer = P.integer lexer
float = P.float lexer
float' = do
many space
f <- sign
float >>= return . f
where sign = choice [char '-' >> return Prelude.negate, char '+' >> return id, return id]
mjd :: Parser Day
mjd = integer >>= return . ModifiedJulianDay
arcsec :: Parser (Angle Double)
arcsec = float' >>= return . (*~arcsecond)
secs :: Parser (Time Double)
secs = float' >>= return . (*~second)
{-
# FORMAT(I4,I3,I3,I6,2F10.6,2F11.7,4F10.6,I4)
# ----------------------------------------------------------------------------------------------------
# Date MJD x y UT1-UTC LOD dPsi dEpsilon dX dY DAT
# (0h UTC) " " s s " " " " s
# ----------------------------------------------------------------------------------------------------
# yy dd mm nnnnn +n.nnnnnn +n.nnnnnn +n.nnnnnnn +n.nnnnnnn +n.nnnnnn +n.nnnnnn +n.nnnnnn +n.nnnnnn nnn
# ----------------------------------------------------------------------------------------------------
#
NUM_OBSERVED_POINTS 2116
BEGIN OBSERVED
2003 01 01 52640 -0.088474 0.188235 -0.2894287 0.0004278 -0.055412 -0.000565 -0.000054 0.000103 32
-}
line :: Parser (Day, EOPData Double)
line = do
integer >> integer >> integer
d <- mjd
x <- arcsec
y <- arcsec
ut1MinusUTC <- secs
lod <- secs
dPsi <- arcsec
dEpsilon <- arcsec
dX <- arcsec
dY <- arcsec
deltaAT <- integer -- Seems to consume trailing newline!?
--newline
return (d, EOPData { x, y, ut1MinusUTC, lod, dPsi, dEpsilon, dX, dY, deltaAT })
--parseEOPFile :: String -> EOPList a
parseEOPFile = parseFromFile parser
parser :: Parser (EOPList Double)
parser = between (string "BEGIN PREDICTED" >> newline) (string "END PREDICTED" >> newline) (many line)
testLine = "2003 01 01 52640 -0.088474 0.188235 -0.2894287 0.0004278 -0.055412 -0.000565 -0.000054 0.000103 32"
test = parse line "bub" testLine
test2 = parseEOPFile "tmp.bb"
| bjornbm/astro | src/Astro/Celestrak/Parser.hs | bsd-3-clause | 2,482 | 0 | 11 | 568 | 485 | 258 | 227 | 44 | 1 |
module Main where
import Prelude hiding (Either(..))
import GameInput
import GameState
import GameRenderer
main :: IO ()
main = do
let gs = GameStatus {runStatus = Init, board = emptyBoard}
initializePainter "Game of Life"
drawGame gs inputInstructions
gameLoop gs
gameLoop :: GameStatus -> IO ()
gameLoop gs = do
input <- getInput
case input of
Quit -> quitWithMessage
_ -> handleInput gs input
handleInput :: GameStatus -> Input -> IO ()
handleInput gs input = case input of
Up -> upAction gs
Down -> downAction gs
Right -> rightAction gs
Left -> leftAction gs
Liven -> livenAction gs
Dead -> deadAction gs
Start -> runSimulation gs
_ -> gameLoop gs
upAction :: GameStatus -> IO ()
upAction gs = do
curUp
let updatedBoard = moveUp (board gs)
let ngs = gs {board = updatedBoard}
gameLoop ngs
downAction :: GameStatus -> IO ()
downAction gs = do
curDown
let updatedBoard = moveDown (board gs)
let ngs = gs {board = updatedBoard}
gameLoop ngs
rightAction :: GameStatus -> IO ()
rightAction gs = do
curRight
let updatedBoard = moveForward (board gs)
let ngs = gs {board = updatedBoard}
gameLoop ngs
leftAction :: GameStatus -> IO ()
leftAction gs = do
curLeft
let updatedBoard = moveBackward (board gs)
let ngs = gs {board = updatedBoard}
gameLoop ngs
livenAction :: GameStatus -> IO ()
livenAction gs = do
drawLiveCell
let updatedBoard = makeLive b curPos
let ub = moveForward updatedBoard
let ngs = gs {board = ub}
gameLoop ngs
where b = board gs
curPos = currentPos b
deadAction :: GameStatus -> IO ()
deadAction gs = do
drawDeadCell
let updatedBoard = makeDead b curPos
let ub = moveForward updatedBoard
let ngs = gs {board = ub}
gameLoop ngs
where b = board gs
curPos = currentPos b
quitWithMessage :: IO ()
quitWithMessage = quit "Thanks for playing!!"
runSimulation :: GameStatus -> IO ()
runSimulation gs = do
drawGame gs inputInstructions
pause
let nb = stepBoard (board gs)
let ngs = gs {board = nb}
handleCharPress 'q' quitWithMessage (runSimulation ngs) | JobaerChowdhury/game-of-life | app/Main.hs | bsd-3-clause | 2,136 | 0 | 12 | 508 | 791 | 377 | 414 | 78 | 8 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Test.Set2 where
import qualified Data.ByteString as B
import Disorder.Core
import Disorder.Core.IO
import P
import Set1
import Set2
import System.IO (readFile)
import Test.QuickCheck
import Test.QuickCheck.Instances ()
prop_pkcs7_unit = once $ pkcs7Blocks 20 inp === exp
where
inp = "YELLOW SUBMARINE"
exp = "YELLOW SUBMARINE\EOT\EOT\EOT\EOT"
prop_pkcs7_length bs k =
B.length (pkcs7Blocks (getPositive k) bs) `mod` (getPositive k) === 0
prop_pkcs7_noop b k i =
pkcs7Blocks (getPositive k) inp === inp
where
inp = B.replicate (getPositive i * getPositive k) b
return []
tests = $quickCheckAll
| thumphries/cryptopals | test/Test/Set2.hs | bsd-3-clause | 912 | 0 | 11 | 219 | 206 | 114 | 92 | 25 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.Bits52.Kf727da8aa8ad (Bits52(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.Bit.K65149ce3b366
data Bits52 = Bits52 {bit0 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit1 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit2 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit3 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit4 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit5 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit6 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit7 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit8 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit9 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit10 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit11 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit12 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit13 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit14 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit15 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit16 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit17 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit18 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit19 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit20 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit21 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit22 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit23 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit24 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit25 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit26 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit27 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit28 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit29 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit30 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit31 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit32 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit33 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit34 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit35 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit36 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit37 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit38 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit39 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit40 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit41 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit42 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit43 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit44 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit45 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit46 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit47 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit48 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit49 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit50 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit51 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit}
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model Bits52
| tittoassini/typed | test/Test/ZM/ADT/Bits52/Kf727da8aa8ad.hs | bsd-3-clause | 3,932 | 0 | 9 | 1,426 | 737 | 539 | 198 | 62 | 0 |
{-# Language DataKinds #-}
{-# Language FlexibleContexts #-}
{-# Language FlexibleInstances #-}
{-# Language GADTs #-}
{-# Language MultiParamTypeClasses #-}
{-# Language OverloadedStrings #-}
{-# Language PatternSynonyms #-}
{-# Language RankNTypes #-}
{-# Language ScopedTypeVariables #-}
{-# Language TemplateHaskell #-}
{-# Language TypeApplications #-}
{-# Language TypeOperators #-}
module Mir.Compositional.Override
where
import Control.Applicative ((<|>))
import Control.Lens (makeLenses, (^.), (^..), (^?), (%=), use, ix, each, to)
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.State
import qualified Data.BitVector.Sized as BV
import qualified Data.ByteString as BS
import Data.Foldable
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Parameterized.Context (pattern Empty, pattern (:>), Assignment)
import qualified Data.Parameterized.Context as Ctx
import Data.Parameterized.Some
import Data.Parameterized.TraversableFC
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Data.Vector as V
import GHC.Stack (HasCallStack)
import qualified What4.Expr.Builder as W4
import qualified What4.Interface as W4
import qualified What4.Partial as W4
import What4.ProgramLoc
import Lang.Crucible.Backend
import Lang.Crucible.FunctionHandle
import Lang.Crucible.Simulator
import Lang.Crucible.Types
import qualified Verifier.SAW.Prelude as SAW
import qualified Verifier.SAW.Recognizer as SAW
import qualified Verifier.SAW.SharedTerm as SAW
import qualified Verifier.SAW.Term.Functor as SAW
import qualified Verifier.SAW.TypedTerm as SAW
import qualified SAWScript.Crucible.Common.MethodSpec as MS
import qualified SAWScript.Crucible.Common.Override as MS
import Mir.Generator
import Mir.Intrinsics hiding (MethodSpec)
import qualified Mir.Mir as M
import Mir.Compositional.Clobber
import Mir.Compositional.Convert
import Mir.Compositional.MethodSpec
type MirOverrideMatcher sym a = forall p rorw rtp args ret.
MS.OverrideMatcher' sym MIR rorw (OverrideSim (p sym) sym MIR rtp args ret) a
data MethodSpec = MethodSpec
{ _msCollectionState :: CollectionState
, _msSpec :: MIRMethodSpec
}
makeLenses ''MethodSpec
instance (IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) => MethodSpecImpl sym MethodSpec where
msPrettyPrint = printSpec
msEnable = enable
-- | Pretty-print a MethodSpec. This wraps `ppMethodSpec` and returns the
-- result as a Rust string.
printSpec ::
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) =>
MethodSpec ->
OverrideSim (p sym) sym MIR rtp args ret (RegValue sym (MirSlice (BVType 8)))
printSpec ms = do
let str = show $ MS.ppMethodSpec (ms ^. msSpec)
let bytes = Text.encodeUtf8 $ Text.pack str
sym <- getSymInterface
len <- liftIO $ W4.bvLit sym knownRepr (BV.mkBV knownRepr $ fromIntegral $ BS.length bytes)
byteVals <- forM (BS.unpack bytes) $ \b -> do
liftIO $ W4.bvLit sym (knownNat @8) (BV.mkBV knownRepr $ fromIntegral b)
let vec = MirVector_Vector $ V.fromList byteVals
let vecRef = newConstMirRef sym knownRepr vec
ptr <- subindexMirRefSim knownRepr vecRef =<<
liftIO (W4.bvLit sym knownRepr (BV.zero knownRepr))
return $ Empty :> RV ptr :> RV len
-- | Enable a MethodSpec. This installs an override, so for the remainder of
-- the current test, calls to the subject function will be replaced with
-- `runSpec`.
enable ::
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) =>
MethodSpec ->
OverrideSim (p sym) sym MIR rtp args ret ()
enable ms = do
let funcName = ms ^. msSpec . MS.csMethod
MirHandle _name _sig mh <- case cs ^? handleMap . ix funcName of
Just x -> return x
Nothing -> error $ "MethodSpec has bad method name " ++
show (ms ^. msSpec . MS.csMethod) ++ "?"
-- TODO: handle multiple specs for the same function
bindFnHandle mh $ UseOverride $ mkOverride' (handleName mh) (handleReturnType mh) $
runSpec cs mh (ms ^. msSpec)
where
cs = ms ^. msCollectionState
-- | "Run" a MethodSpec: assert its preconditions, create fresh symbolic
-- variables for its outputs, and assert its postconditions.
runSpec :: forall sym p t st fs args ret rtp.
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) =>
CollectionState -> FnHandle args ret -> MIRMethodSpec ->
OverrideSim (p sym) sym MIR rtp args ret (RegValue sym ret)
runSpec cs mh ms = ovrWithBackend $ \bak ->
do let col = cs ^. collection
sym <- getSymInterface
RegMap argVals <- getOverrideArgs
let argVals' = Map.fromList $ zip [0..] $ MS.assignmentToList argVals
loc <- liftIO $ W4.getCurrentProgramLoc sym
let freeVars = Set.fromList $
ms ^.. MS.csPreState . MS.csFreshVars . each . to SAW.tecExt . to SAW.ecVarIndex
sc <- liftIO $ SAW.mkSharedContext
liftIO $ SAW.scLoadPreludeModule sc
-- `eval` converts `W4.Expr`s to `SAW.Term`s. We take what4 exprs from the
-- context (e.g., in the actual arguments passed to the override) and
-- convert them to SAWCore terms for use in the OverrideMatcher macro.
-- Later, we need to convert some SAWCore terms back to what4, so during
-- this conversion, we also build up a mapping from SAWCore variables
-- (`SAW.ExtCns`) to what4 ones (`W4.ExprBoundVar`).
w4VarMapRef <- liftIO $ newIORef Map.empty
let eval :: forall tp. W4.Expr t tp -> IO SAW.Term
eval x = exprToTerm sym sc w4VarMapRef x
-- Generate fresh variables for use in postconditions and result. The
-- result, `postFreshTermSub`, maps MethodSpec `VarIndex`es to `Term`s
-- (really just `ExtCns`s). Those `Term`s are produced by `eval`
-- (conversion from what4 to SAW), just like everything else that we put on
-- the RHS of the OverrideMatcher's `termSub`.
--
-- We could allocate these later (it only needs to happen before we process
-- post-state PointsTos and conditions) but it's easier to do it up-front
-- so we don't need to split up our `runOverrideMatcher` call into multiple
-- blocks.
let postFresh = ms ^. MS.csPostState . MS.csFreshVars
postFreshTermSub <- liftM Map.fromList $ forM postFresh $ \tec -> do
let ec = SAW.tecExt tec
let nameStr = Text.unpack $ SAW.toShortName $ SAW.ecName ec
let nameSymbol = W4.safeSymbol nameStr
Some btpr <- liftIO $ termToType sym sc (SAW.ecType ec)
expr <- liftIO $ W4.freshConstant sym nameSymbol btpr
let ev = CreateVariableEvent loc nameStr btpr expr
liftIO $ addAssumptions bak (singleEvent ev)
term <- liftIO $ eval expr
return (SAW.ecVarIndex ec, term)
-- Accesses to globals should go through the underlying OverrideSim monad,
-- rather than using OverrideMatcher's `readGlobal`/`writeGlobal` methods.
let sgs = error "tried to access SimGlobalState through OverrideMatcher"
result <- MS.runOverrideMatcher sym sgs mempty postFreshTermSub freeVars loc $ do
-- Match the override's inputs against the MethodSpec inputs. This
-- sets up the `termSub` (symbolic variable bindings) and
-- `setupValueSub` (allocation bindings) in the OverrideMatcher state.
-- Match argument SetupValues against argVals.
forM_ (Map.toList $ ms ^. MS.csArgBindings) $ \(i, (_, sv)) -> do
ty <- case ms ^. MS.csArgs ^? ix (fromIntegral i) of
Nothing -> error $ "wrong number of args for " ++ show (ms ^. MS.csMethod) ++
": no arg at index " ++ show i
Just x -> return x
AnyValue tpr rv <- case argVals' ^? ix i of
Nothing -> error $ "wrong number of args for " ++ show (ms ^. MS.csMethod) ++
": no arg at index " ++ show i
Just x -> return x
let shp = tyToShapeEq col ty tpr
matchArg sym sc eval (ms ^. MS.csPreState . MS.csAllocs) shp rv sv
-- Match PointsTo SetupValues against accessible memory.
--
-- We assume the PointsTos are stored in reversed top-down order (which
-- is what `builderAddArg` does), so if we walk over them in reverse,
-- we'll always see the argument or PointsTo that binds a MirReference
-- to allocation `alloc` before we see the PointsTo for `alloc` itself.
-- This ensures we can obtain a MirReference for each PointsTo that we
-- see.
forM_ (reverse $ ms ^. MS.csPreState . MS.csPointsTos) $ \(MirPointsTo alloc svs) -> do
allocSub <- use MS.setupValueSub
Some ptr <- case Map.lookup alloc allocSub of
Just x -> return x
Nothing -> error $
"PointsTos are out of order: no ref is available for " ++ show alloc
(ty, len) <- case ms ^? MS.csPreState . MS.csAllocs . ix alloc of
Just (Some allocSpec) -> return $ (allocSpec ^. maMirType, allocSpec ^. maLen)
Nothing -> error $
"impossible: alloc mentioned in csPointsTo is absent from csAllocs?"
forM_ (zip svs [0 .. len - 1]) $ \(sv, i) -> do
iSym <- liftIO $ W4.bvLit sym knownNat $ BV.mkBV knownNat $ fromIntegral i
ref' <- lift $ mirRef_offsetSim (ptr ^. mpType) (ptr ^. mpRef) iSym
rv <- lift $ readMirRefSim (ptr ^. mpType) ref'
let shp = tyToShapeEq col ty (ptr ^. mpType)
matchArg sym sc eval (ms ^. MS.csPreState . MS.csAllocs) shp rv sv
-- Validity checks
-- All pre-state and post-state fresh vars must be bound.
termSub <- use MS.termSub
let allFresh = ms ^. MS.csPreState . MS.csFreshVars ++
ms ^. MS.csPostState . MS.csFreshVars
forM_ allFresh $ \tec -> do
let var = SAW.ecVarIndex $ SAW.tecExt tec
when (not $ Map.member var termSub) $ do
error $ "argument matching failed to produce a binding for " ++
show (MS.ppTypedExtCns tec)
-- All pre-state allocs must be bound.
allocSub <- use MS.setupValueSub
forM_ (Map.toList $ ms ^. MS.csPreState . MS.csAllocs) $ \(alloc, info) -> do
when (not $ Map.member alloc allocSub) $ do
error $ "argument matching failed to produce a binding for " ++
show alloc ++ " (info: " ++ show info ++ ")"
-- All references in `allocSub` must point to disjoint memory regions.
liftIO $ checkDisjoint bak (Map.toList allocSub)
-- TODO: see if we need any other assertions from LLVM OverrideMatcher
-- Handle preconditions and postconditions.
-- Convert preconditions to `osAsserts`
forM_ (ms ^. MS.csPreState . MS.csConditions) $ \cond -> do
term <- condTerm sc cond
w4VarMap <- liftIO $ readIORef w4VarMapRef
pred <- liftIO $ termToPred sym sc w4VarMap term
MS.addAssert pred $
SimError loc (AssertFailureSimError (show $ W4.printSymExpr pred) "")
-- Convert postconditions to `osAssumes`
forM_ (ms ^. MS.csPostState . MS.csConditions) $ \cond -> do
term <- condTerm sc cond
w4VarMap <- liftIO $ readIORef w4VarMapRef
pred <- liftIO $ termToPred sym sc w4VarMap term
MS.addAssume pred
((), os) <- case result of
Left err -> error $ show err
Right x -> return x
forM_ (os ^. MS.osAsserts) $ \lp ->
liftIO $ addAssertion bak lp
forM_ (os ^. MS.osAssumes) $ \p ->
liftIO $ addAssumption bak (GenericAssumption loc "methodspec postcondition" p)
let preAllocMap = os ^. MS.setupValueSub
let postAllocDefs = filter (\(k,_v) -> not $ Map.member k preAllocMap) $
Map.toList $ ms ^. MS.csPostState . MS.csAllocs
postAllocMap <- liftM Map.fromList $ forM postAllocDefs $ \(alloc, Some allocSpec) -> do
ref <- newMirRefSim (allocSpec ^. maType)
return (alloc, Some $ MirPointer (allocSpec ^. maType) ref)
let allocMap = preAllocMap <> postAllocMap
-- Handle return value and post-state PointsTos
let retTy = maybe (M.TyTuple []) id $ ms ^. MS.csRet
let retTpr = handleReturnType mh
let retShp = tyToShapeEq col retTy retTpr
w4VarMap <- liftIO $ readIORef w4VarMapRef
let termSub = os ^. MS.termSub
retVal <- case ms ^. MS.csRetValue of
Just sv -> liftIO $ setupToReg sym sc termSub w4VarMap allocMap retShp sv
Nothing -> case testEquality retTpr UnitRepr of
Just Refl -> return ()
Nothing -> error $ "no return value, but return type is " ++ show retTpr
-- For every post-state PointsTo, write the RHS value into the LHS pointer.
--
-- We assume any memory not mentioned in a post-state PointsTo is left
-- unchanged by the subject function. `builderAddArg` is responsible for
-- figuring out which memory is accessible and mutable and thus needs to be
-- clobbered, and for adding appropriate fresh variables and `PointsTo`s to
-- the post state.
forM_ (ms ^. MS.csPostState . MS.csPointsTos) $ \(MirPointsTo alloc svs) -> do
Some ptr <- case Map.lookup alloc allocMap of
Just x -> return x
Nothing -> error $ "post PointsTos are out of order: no ref for " ++ show alloc
let optAlloc = (ms ^? MS.csPostState . MS.csAllocs . ix alloc) <|>
(ms ^? MS.csPreState . MS.csAllocs . ix alloc)
(ty, len) <- case optAlloc of
Just (Some allocSpec) -> return $ (allocSpec ^. maMirType, allocSpec ^. maLen)
Nothing -> error $
"impossible: alloc mentioned in post csPointsTo is absent from csAllocs?"
let shp = tyToShapeEq col ty (ptr ^. mpType)
forM_ (zip svs [0 .. len - 1]) $ \(sv, i) -> do
iSym <- liftIO $ W4.bvLit sym knownNat $ BV.mkBV knownNat $ fromIntegral i
ref' <- mirRef_offsetSim (ptr ^. mpType) (ptr ^. mpRef) iSym
rv <- liftIO $ setupToReg sym sc termSub w4VarMap allocMap shp sv
writeMirRefSim (ptr ^. mpType) ref' rv
-- Clobber all globals. We don't yet support mentioning globals in specs.
-- However, we also don't prevent the subject function from modifying
-- globals. Since we have no idea what the subject function might do to
-- globals during a normal call, we conservatively clobber all globals as
-- part of the spec override.
clobberGlobals sym loc "run_spec_clobber_globals" cs
return retVal
-- | Match argument RegValue `rv` against SetupValue pattern `sv`. On success,
-- this may update `termSub` and `setupValueSub` with new bindings for the
-- MethodSpec's symbolic variables and allocations.
matchArg ::
forall sym t st fs tp.
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs, HasCallStack) =>
sym ->
SAW.SharedContext ->
(forall tp'. W4.Expr t tp' -> IO SAW.Term) ->
Map MS.AllocIndex (Some MirAllocSpec) ->
TypeShape tp -> RegValue sym tp -> MS.SetupValue MIR ->
MirOverrideMatcher sym ()
matchArg sym sc eval allocSpecs shp rv sv = go shp rv sv
where
go :: forall tp. TypeShape tp -> RegValue sym tp -> MS.SetupValue MIR ->
MirOverrideMatcher sym ()
go (UnitShape _) () (MS.SetupStruct () False []) = return ()
go (PrimShape _ _btpr) expr (MS.SetupTerm tt) = do
loc <- use MS.osLocation
exprTerm <- liftIO $ eval expr
case SAW.asExtCns $ SAW.ttTerm tt of
Just ec -> do
let var = SAW.ecVarIndex ec
sub <- use MS.termSub
when (Map.member var sub) $
MS.failure loc MS.NonlinearPatternNotSupported
MS.termSub %= Map.insert var exprTerm
Nothing -> do
-- If the `TypedTerm` is a constant, we want to assert that the
-- argument `expr` matches the constant.
--
-- For now, this is the case that fires for the length fields
-- of slices. This means the slice length must exactly match
-- the length used in the MethodSpec, or else the spec must
-- specifically handle symbolic lengths in some range. It
-- would be nice to allow any longer slice length, but it's not
-- clear how to do that soundly (the function might branch on
-- the length of the slice, for instance).
Some val <- liftIO $ termToExpr sym sc mempty (SAW.ttTerm tt)
Refl <- case testEquality (W4.exprType expr) (W4.exprType val) of
Just x -> return x
Nothing -> error $ "type mismatch: concrete argument type " ++
show (W4.exprType expr) ++ " doesn't match SetupValue type " ++
show (W4.exprType val)
eq <- liftIO $ W4.isEq sym expr val
MS.addAssert eq $ SimError loc $
AssertFailureSimError
("mismatch on " ++ show (W4.exprType expr) ++ ": expected " ++
show (W4.printSymExpr val))
""
go (TupleShape _ _ flds) rvs (MS.SetupStruct () False svs) = goFields flds rvs svs
go (ArrayShape _ _ shp) vec (MS.SetupArray () svs) = case vec of
MirVector_Vector v -> zipWithM_ (\x y -> go shp x y) (toList v) svs
MirVector_PartialVector pv -> forM_ (zip (toList pv) svs) $ \(p, sv) -> do
rv <- liftIO $ readMaybeType sym "vector element" (shapeType shp) p
go shp rv sv
MirVector_Array _ -> error $ "matchArg: MirVector_Array NYI"
go (StructShape _ _ flds) (AnyValue tpr rvs) (MS.SetupStruct () False svs)
| Just Refl <- testEquality tpr shpTpr = goFields flds rvs svs
| otherwise = error $ "matchArg: type error: expected " ++ show shpTpr ++
", but got Any wrapping " ++ show tpr
where shpTpr = StructRepr $ fmapFC fieldShapeType flds
go (TransparentShape _ shp) rv sv = go shp rv sv
go (RefShape refTy _ tpr) ref (MS.SetupVar alloc) =
goRef refTy tpr ref alloc 0
go (RefShape refTy _ tpr) ref (MS.SetupElem () (MS.SetupVar alloc) idx) =
goRef refTy tpr ref alloc idx
go shp _ sv = error $ "matchArg: type error: bad SetupValue " ++
show (MS.ppSetupValue sv) ++ " for " ++ show (shapeType shp)
goFields :: forall ctx. Assignment FieldShape ctx -> Assignment (RegValue' sym) ctx ->
[MS.SetupValue MIR] -> MirOverrideMatcher sym ()
goFields flds rvs svs = loop flds rvs (reverse svs)
where
loop :: forall ctx. Assignment FieldShape ctx -> Assignment (RegValue' sym) ctx ->
[MS.SetupValue MIR] -> MirOverrideMatcher sym ()
loop Empty Empty [] = return ()
loop (flds :> fld) (rvs :> RV rv) (sv : svs) = do
case fld of
ReqField shp -> go shp rv sv
OptField shp -> do
rv' <- liftIO $ readMaybeType sym "field" (shapeType shp) rv
go shp rv' sv
loop flds rvs svs
loop _ rvs svs = error $ "matchArg: type error: got RegValues for " ++
show (Ctx.sizeInt $ Ctx.size rvs) ++ " fields, but got " ++
show (length svs) ++ " SetupValues"
goRef :: forall tp'.
M.Ty ->
TypeRepr tp' ->
MirReferenceMux sym tp' ->
MS.AllocIndex ->
-- | The expected offset of `ref` past the start of the allocation.
Int ->
MirOverrideMatcher sym ()
goRef refTy tpr ref alloc refOffset = do
partIdxLen <- lift $ mirRef_indexAndLenSim ref
optIdxLen <- liftIO $ readPartExprMaybe sym partIdxLen
let (optIdx, optLen) =
(BV.asUnsigned <$> (W4.asBV =<< (fst <$> optIdxLen)),
BV.asUnsigned <$> (W4.asBV =<< (snd <$> optIdxLen)))
idx <- case optIdx of
Just x -> return $ fromIntegral x
Nothing -> error $ "unsupported: reference has symbolic offset within allocation " ++
"(for a ref of type " ++ show refTy ++ ")"
len <- case optLen of
Just x -> return $ fromIntegral x
Nothing -> error $ "unsupported: memory allocation has symbolic size " ++
"(for a ref of type " ++ show refTy ++ ")"
-- Offset backward by `idx` to get a pointer to the start of the accessible
-- allocation.
--offsetSym <- liftIO $ W4.bvLit sym knownNat $ BV.mkBV knownNat $ fromIntegral $ negate idx
--startRef <- lift $ mirRef_offsetWrapSim tpr ref offsetSym
when (idx < refOffset) $ error $
"matchArg: expected at least " ++ show refOffset ++ " accessible elements " ++
"before reference, but only got " ++ show idx
Some allocSpec <- return $ case Map.lookup alloc allocSpecs of
Just x -> x
Nothing -> error $ "no such alloc " ++ show alloc
let numAfter = allocSpec ^. maLen - refOffset
when (len - idx < numAfter) $ error $
"matchArg: expected at least " ++ show numAfter ++ " accessible elements " ++
"after reference, but only got " ++ show (len - idx)
-- Offset backward by `idx` to get a pointer to the start of the accessible
-- allocation.
offsetSym <- liftIO $ W4.bvLit sym knownNat $ BV.mkBV knownNat $
fromIntegral $ negate refOffset
ref' <- lift $ mirRef_offsetWrapSim tpr ref offsetSym
m <- use MS.setupValueSub
case Map.lookup alloc m of
Nothing -> return ()
Just (Some ptr)
| Just Refl <- testEquality tpr (ptr ^. mpType) -> do
eq <- lift $ ovrWithBackend $ \bak ->
liftIO $ mirRef_eqIO bak ref' (ptr ^. mpRef)
let loc = mkProgramLoc "matchArg" InternalPos
MS.addAssert eq $
SimError loc (AssertFailureSimError ("mismatch on " ++ show alloc) "")
| otherwise -> error $ "mismatched types for " ++ show alloc ++ ": " ++
show tpr ++ " does not match " ++ show (ptr ^. mpType)
MS.setupValueSub %= Map.insert alloc (Some $ MirPointer tpr ref')
-- | Convert a SetupValue to a RegValue. This is used for MethodSpec outputs,
-- namely the return value and any post-state PointsTos.
setupToReg :: forall sym t st fs tp.
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs, HasCallStack) =>
sym ->
SAW.SharedContext ->
-- | `termSub`: maps `VarIndex`es in the MethodSpec's namespace to `Term`s
-- in the context's namespace.
Map SAW.VarIndex SAW.Term ->
-- | `regMap`: maps `VarIndex`es in the context's namespace to the
-- corresponding W4 variables in the context's namespace.
Map SAW.VarIndex (Some (W4.Expr t)) ->
Map MS.AllocIndex (Some (MirPointer sym)) ->
TypeShape tp ->
MS.SetupValue MIR ->
IO (RegValue sym tp)
setupToReg sym sc termSub regMap allocMap shp sv = go shp sv
where
go :: forall tp. TypeShape tp -> MS.SetupValue MIR -> IO (RegValue sym tp)
go (UnitShape _) (MS.SetupStruct _ False []) = return ()
go (PrimShape _ btpr) (MS.SetupTerm tt) = do
term <- liftIO $ SAW.scInstantiateExt sc termSub $ SAW.ttTerm tt
Some expr <- termToExpr sym sc regMap term
Refl <- case testEquality (W4.exprType expr) btpr of
Just x -> return x
Nothing -> error $ "setupToReg: expected " ++ show btpr ++ ", but got " ++
show (W4.exprType expr)
return expr
go (TupleShape _ _ flds) (MS.SetupStruct _ False svs) = goFields flds svs
go (ArrayShape _ _ shp) (MS.SetupArray _ svs) = do
rvs <- mapM (go shp) svs
return $ MirVector_Vector $ V.fromList rvs
go (StructShape _ _ flds) (MS.SetupStruct _ False svs) =
AnyValue (StructRepr $ fmapFC fieldShapeType flds) <$> goFields flds svs
go (TransparentShape _ shp) sv = go shp sv
go (RefShape _ _ tpr) (MS.SetupVar alloc) = case Map.lookup alloc allocMap of
Just (Some ptr) -> case testEquality tpr (ptr ^. mpType) of
Just Refl -> return $ ptr ^. mpRef
Nothing -> error $ "setupToReg: type error: bad reference type for " ++ show alloc ++
": got " ++ show (ptr ^. mpType) ++ " but expected " ++ show tpr
Nothing -> error $ "setupToReg: no definition for " ++ show alloc
go shp sv = error $ "setupToReg: type error: bad SetupValue for " ++ show (shapeType shp) ++
": " ++ show (MS.ppSetupValue sv)
goFields :: forall ctx. Assignment FieldShape ctx -> [MS.SetupValue MIR] ->
IO (Assignment (RegValue' sym) ctx)
goFields shps svs = loop shps (reverse svs)
where
loop :: forall ctx. Assignment FieldShape ctx -> [MS.SetupValue MIR] ->
IO (Assignment (RegValue' sym) ctx)
loop Empty [] = return Empty
loop (shps :> shp) (sv : svs) = do
rv <- case shp of
ReqField shp' -> go shp' sv
OptField shp' -> W4.justPartExpr sym <$> go shp' sv
rvs <- loop shps svs
return $ rvs :> RV rv
loop shps svs = error $ "setupToReg: type error: got TypeShapes for " ++
show (Ctx.sizeInt $ Ctx.size shps) ++ " fields, but got " ++
show (length svs) ++ " SetupValues"
-- | Convert a `SetupCondition` from the MethodSpec into a boolean `SAW.Term`
-- referencing variables from the override's context. This uses the current
-- `termSub` to perform the necessary substitution.
condTerm ::
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) =>
SAW.SharedContext ->
MS.SetupCondition MIR ->
MirOverrideMatcher sym SAW.Term
condTerm _sc (MS.SetupCond_Equal _loc _sv1 _sv2) = do
error $ "learnCond: SetupCond_Equal NYI" -- TODO
condTerm sc (MS.SetupCond_Pred _loc tt) = do
sub <- use MS.termSub
t' <- liftIO $ SAW.scInstantiateExt sc sub $ SAW.ttTerm tt
return t'
condTerm _ (MS.SetupCond_Ghost _ _ _ _) = do
error $ "learnCond: SetupCond_Ghost is not supported"
checkDisjoint ::
(sym ~ W4.ExprBuilder t st fs, IsSymBackend sym bak) =>
bak ->
[(MS.AllocIndex, Some (MirPointer sym))] ->
IO ()
checkDisjoint bak refs = go refs
where
sym = backendGetSym bak
go [] = return ()
go ((alloc, Some ptr) : rest) = do
forM_ rest $ \(alloc', Some ptr') -> do
disjoint <- W4.notPred sym =<< mirRef_overlapsIO bak (ptr ^. mpRef) (ptr' ^. mpRef)
assert bak disjoint $ GenericSimError $
"references " ++ show alloc ++ " and " ++ show alloc' ++ " must not overlap"
go rest
| GaloisInc/saw-script | crux-mir-comp/src/Mir/Compositional/Override.hs | bsd-3-clause | 26,900 | 0 | 27 | 7,581 | 7,280 | 3,611 | 3,669 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- Disable this warning so we can still test deprecated functionality.
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
import Crypto.Random
import Network.BSD
import Network.Socket (socket, Family(..), SocketType(..), close, SockAddr(..), bind, listen, accept, iNADDR_ANY)
import qualified Network.Socket as S
import Network.TLS
import Network.TLS.Extra.Cipher
import System.Console.GetOpt
import System.IO
import System.Timeout
import qualified Data.ByteString.Lazy.Char8 as LC
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString as B
import Control.Monad
import System.Environment
import System.Exit
import System.X509
import Data.X509.CertificateStore
import Data.Default.Class
import Data.IORef
import Data.Monoid
import Data.List (find)
import Data.Maybe (isJust, mapMaybe)
import Common
import HexDump
defaultBenchAmount = 1024 * 1024
defaultTimeout = 2000
bogusCipher cid = cipher_AES128_SHA1 { cipherID = cid }
runTLS debug ioDebug params cSock f = do
ctx <- contextNew cSock params
contextHookSetLogging ctx getLogging
f ctx
where getLogging = ioLogging $ packetLogging $ def
packetLogging logging
| debug = logging { loggingPacketSent = putStrLn . ("debug: >> " ++)
, loggingPacketRecv = putStrLn . ("debug: << " ++)
}
| otherwise = logging
ioLogging logging
| ioDebug = logging { loggingIOSent = mapM_ putStrLn . hexdump ">>"
, loggingIORecv = \hdr body -> do
putStrLn ("<< " ++ show hdr)
mapM_ putStrLn $ hexdump "<<" body
}
| otherwise = logging
sessionRef ref = SessionManager
{ sessionEstablish = \sid sdata -> writeIORef ref (sid,sdata)
, sessionResume = \sid -> readIORef ref >>= \(s,d) -> if s == sid then return (Just d) else return Nothing
, sessionInvalidate = \_ -> return ()
}
getDefaultParams :: [Flag] -> CertificateStore -> IORef (SessionID, SessionData) -> Credential -> IO ServerParams
getDefaultParams flags store sStorage cred = do
dhParams <- case getDHParams flags of
Nothing -> return Nothing
Just name -> readDHParams name
return ServerParams
{ serverWantClientCert = False
, serverCACertificates = []
, serverDHEParams = dhParams
, serverShared = def { sharedSessionManager = sessionRef sStorage
, sharedCAStore = store
, sharedValidationCache = validateCache
, sharedCredentials = Credentials [cred]
}
, serverHooks = def
, serverSupported = def { supportedVersions = supportedVers
, supportedCiphers = myCiphers
, supportedClientInitiatedRenegotiation = allowRenegotiation }
, serverDebug = def { debugSeed = foldl getDebugSeed Nothing flags
, debugPrintSeed = if DebugPrintSeed `elem` flags
then (\seed -> putStrLn ("seed: " ++ show (seedToInteger seed)))
else (\_ -> return ())
}
}
where
validateCache
| validateCert = def
| otherwise = ValidationCache (\_ _ _ -> return ValidationCachePass)
(\_ _ _ -> return ())
myCiphers = foldl accBogusCipher getSelectedCiphers flags
where accBogusCipher acc (BogusCipher c) =
case reads c of
[(v, "")] -> acc ++ [bogusCipher v]
_ -> acc
accBogusCipher acc _ = acc
getUsedCipherIDs = foldl f [] flags
where f acc (UseCipher am) =
case readCiphers am of
Just l -> l ++ acc
Nothing -> acc
f acc _ = acc
getSelectedCiphers =
case getUsedCipherIDs of
[] -> ciphersuite_default
l -> mapMaybe (\cid -> find ((== cid) . cipherID) ciphersuite_all) l
getDHParams opts = foldl accf Nothing opts
where accf _ (DHParams file) = Just file
accf acc _ = acc
getDebugSeed :: Maybe Seed -> Flag -> Maybe Seed
getDebugSeed _ (DebugSeed seed) = seedFromInteger `fmap` readNumber seed
getDebugSeed acc _ = acc
tlsConnectVer
| Tls12 `elem` flags = TLS12
| Tls11 `elem` flags = TLS11
| Ssl3 `elem` flags = SSL3
| Tls10 `elem` flags = TLS10
| otherwise = TLS12
supportedVers
| NoVersionDowngrade `elem` flags = [tlsConnectVer]
| otherwise = filter (<= tlsConnectVer) allVers
allVers = [SSL3, TLS10, TLS11, TLS12]
validateCert = not (NoValidateCert `elem` flags)
allowRenegotiation = AllowRenegotiation `elem` flags
data Flag = Verbose | Debug | IODebug | NoValidateCert | Session | Http11
| Ssl3 | Tls10 | Tls11 | Tls12
| NoVersionDowngrade
| AllowRenegotiation
| Output String
| Timeout String
| BogusCipher String
| BenchSend
| BenchRecv
| BenchData String
| UseCipher String
| ListCiphers
| ListDHParams
| Certificate String
| Key String
| DHParams String
| DebugSeed String
| DebugPrintSeed
| Help
deriving (Show,Eq)
options :: [OptDescr Flag]
options =
[ Option ['v'] ["verbose"] (NoArg Verbose) "verbose output on stdout"
, Option ['d'] ["debug"] (NoArg Debug) "TLS debug output on stdout"
, Option [] ["io-debug"] (NoArg IODebug) "TLS IO debug output on stdout"
, Option ['s'] ["session"] (NoArg Session) "try to resume a session"
, Option ['O'] ["output"] (ReqArg Output "stdout") "output "
, Option ['t'] ["timeout"] (ReqArg Timeout "timeout") "timeout in milliseconds (2s by default)"
, Option [] ["no-validation"] (NoArg NoValidateCert) "disable certificate validation"
, Option [] ["http1.1"] (NoArg Http11) "use http1.1 instead of http1.0"
, Option [] ["ssl3"] (NoArg Ssl3) "use SSL 3.0"
, Option [] ["tls10"] (NoArg Tls10) "use TLS 1.0"
, Option [] ["tls11"] (NoArg Tls11) "use TLS 1.1"
, Option [] ["tls12"] (NoArg Tls12) "use TLS 1.2 (default)"
, Option [] ["bogocipher"] (ReqArg BogusCipher "cipher-id") "add a bogus cipher id for testing"
, Option ['x'] ["no-version-downgrade"] (NoArg NoVersionDowngrade) "do not allow version downgrade"
, Option [] ["allow-renegotiation"] (NoArg AllowRenegotiation) "allow client-initiated renegotiation"
, Option ['h'] ["help"] (NoArg Help) "request help"
, Option [] ["bench-send"] (NoArg BenchSend) "benchmark send path. only with compatible server"
, Option [] ["bench-recv"] (NoArg BenchRecv) "benchmark recv path. only with compatible server"
, Option [] ["bench-data"] (ReqArg BenchData "amount") "amount of data to benchmark with"
, Option [] ["use-cipher"] (ReqArg UseCipher "cipher-id") "use a specific cipher"
, Option [] ["list-ciphers"] (NoArg ListCiphers) "list all ciphers supported and exit"
, Option [] ["list-dhparams"] (NoArg ListDHParams) "list all DH parameters supported and exit"
, Option [] ["certificate"] (ReqArg Certificate "certificate") "certificate file"
, Option [] ["debug-seed"] (ReqArg DebugSeed "debug-seed") "debug: set a specific seed for randomness"
, Option [] ["debug-print-seed"] (NoArg DebugPrintSeed) "debug: set a specific seed for randomness"
, Option [] ["key"] (ReqArg Key "key") "certificate file"
, Option [] ["dhparams"] (ReqArg DHParams "dhparams") "DH parameters (name or file)"
]
loadCred (Just key) (Just cert) = do
res <- credentialLoadX509 cert key
case res of
Left err -> error ("cannot load certificate: " ++ err)
Right v -> return v
loadCred Nothing _ =
error "missing credential key"
loadCred _ Nothing =
error "missing credential certificate"
runOn (sStorage, certStore) flags port = do
sock <- socket AF_INET Stream defaultProtocol
S.setSocketOption sock S.ReuseAddr 1
let sockaddr = SockAddrInet port iNADDR_ANY
bind sock sockaddr
listen sock 10
runOn' sock
close sock
where
runOn' sock
| BenchSend `elem` flags = runBench True sock
| BenchRecv `elem` flags = runBench False sock
| otherwise = do
--certCredRequest <- getCredRequest
doTLS sock
when (Session `elem` flags) $ doTLS sock
runBench isSend sock = do
(cSock, cAddr) <- accept sock
putStrLn ("connection from " ++ show cAddr)
cred <- loadCred getKey getCertificate
params <- getDefaultParams flags certStore sStorage cred
runTLS False False params cSock $ \ctx -> do
handshake ctx
if isSend
then loopSendData getBenchAmount ctx
else loopRecvData getBenchAmount ctx
bye ctx
close cSock
where
dataSend = BC.replicate 4096 'a'
loopSendData bytes ctx
| bytes <= 0 = return ()
| otherwise = do
sendData ctx $ LC.fromChunks [(if bytes > B.length dataSend then dataSend else BC.take bytes dataSend)]
loopSendData (bytes - B.length dataSend) ctx
loopRecvData bytes ctx
| bytes <= 0 = return ()
| otherwise = do
d <- recvData ctx
loopRecvData (bytes - B.length d) ctx
doTLS sock = do
(cSock, cAddr) <- accept sock
putStrLn ("connection from " ++ show cAddr)
out <- maybe (return stdout) (flip openFile AppendMode) getOutput
cred <- loadCred getKey getCertificate
params <- getDefaultParams flags certStore sStorage cred
runTLS (Debug `elem` flags)
(IODebug `elem` flags)
params cSock $ \ctx -> do
handshake ctx
when (Verbose `elem` flags) $ printHandshakeInfo ctx
loopRecv out ctx
--sendData ctx $ query
bye ctx
return ()
close cSock
when (isJust getOutput) $ hClose out
loopRecv out ctx = do
d <- timeout (timeoutMs * 1000) (recvData ctx) -- 2s per recv
case d of
Nothing -> when (Debug `elem` flags) (hPutStrLn stderr "timeout") >> return ()
Just b | BC.null b -> return ()
| otherwise -> BC.hPutStrLn out b >> loopRecv out ctx
{-
getCredRequest =
case clientCert of
Nothing -> return Nothing
Just s -> do
case break (== ':') s of
(_ ,"") -> error "wrong format for client-cert, expecting 'cert-file:key-file'"
(cert,':':key) -> do
ecred <- credentialLoadX509 cert key
case ecred of
Left err -> error ("cannot load client certificate: " ++ err)
Right cred -> do
let certRequest _ = return $ Just cred
return $ Just (Credentials [cred], certRequest)
(_ ,_) -> error "wrong format for client-cert, expecting 'cert-file:key-file'"
-}
getOutput = foldl f Nothing flags
where f _ (Output o) = Just o
f acc _ = acc
timeoutMs = foldl f defaultTimeout flags
where f _ (Timeout t) = read t
f acc _ = acc
getKey = foldl f Nothing flags
where f _ (Key key) = Just key
f acc _ = acc
getCertificate = foldl f Nothing flags
where f _ (Certificate cert) = Just cert
f acc _ = acc
getBenchAmount = foldl f defaultBenchAmount flags
where f acc (BenchData am) = case readNumber am of
Nothing -> acc
Just i -> i
f acc _ = acc
printUsage =
putStrLn $ usageInfo "usage: simpleserver [opts] [port]\n\n\t(port default to: 443)\noptions:\n" options
main = do
args <- getArgs
let (opts,other,errs) = getOpt Permute options args
when (not $ null errs) $ do
putStrLn $ show errs
exitFailure
when (Help `elem` opts) $ do
printUsage
exitSuccess
when (ListCiphers `elem` opts) $ do
printCiphers
exitSuccess
when (ListDHParams `elem` opts) $ do
printDHParams
exitSuccess
certStore <- getSystemCertificateStore
sStorage <- newIORef (error "storage ioref undefined")
case other of
[] -> runOn (sStorage, certStore) opts 443
[port] -> runOn (sStorage, certStore) opts (fromInteger $ read port)
_ -> printUsage >> exitFailure
| erikd/hs-tls | debug/src/SimpleServer.hs | bsd-3-clause | 13,986 | 0 | 20 | 5,199 | 3,483 | 1,790 | 1,693 | 264 | 10 |
module VideoCore4.QPU.Instruction.Register
(
Register
, ABRW(..)
, rwab
, ra0
, rb0
, ra1
, rb1
, ra2
, rb2
, ra3
, rb3
, ra4
, rb4
, ra5
, rb5
, ra6
, rb6
, ra7
, rb7
, ra8
, rb8
, ra9
, rb9
, ra10
, rb10
, ra11
, rb11
, ra12
, rb12
, ra13
, rb13
, ra14
, rb14
, ra15
, rb15
, ra16
, rb16
, ra17
, rb17
, ra18
, rb18
, ra19
, rb19
, ra20
, rb20
, ra21
, rb21
, ra22
, rb22
, ra23
, rb23
, ra24
, rb24
, ra25
, rb25
, ra26
, rb26
, ra27
, rb27
, ra28
, rb28
, ra29
, rb29
, ra30
, rb30
, ra31
, rb31
, uniform_read
, r0
, r1
, r2
, r3
, tmu_noswap
, r5
, element_number
, qpu_number
, host_int
, nop
, uniforms_address
, vpm_read
, vpm_write
, vpm_ld_busy
, vpm_st_busy
, vpmvcd_rd_setup
, vpmvcd_wr_setup
, vpm_ld_wait
, vpm_st_wait
, vpm_ld_addr
, vpm_st_addr
, mutex_acquire
, mutex_release
, sfu_recip
, sfu_recipsqrt
, sfu_exp
, sfu_log
, tmu0s
, tmu0t
, tmu0r
, tmu0b
, tmu1s
, tmu1t
, tmu1r
, tmu1b
) where
import Data.Bits
import Data.Typeable
import Data.Word
import VideoCore4.QPU.Instruction.Types
data ABRW = AR
| BR
| AW
| BW
deriving (Eq, Show, Typeable)
data Register = Register { registerAddr :: Word8
, registerABRW :: [ABRW]
} deriving (Eq, Show, Typeable)
instance To64 Register where
to64 = toEnum . fromEnum . (.&. 0x3F) . registerAddr
rwab :: Register -> [ABRW]
rwab = registerABRW
ra0 :: Register
ra0 = Register 0 [AR,AW]
rb0 :: Register
rb0 = Register 0 [BR,BW]
ra1 :: Register
ra1 = Register 1 [AR,AW]
rb1 :: Register
rb1 = Register 1 [BR,BW]
ra2 :: Register
ra2 = Register 2 [AR,AW]
rb2 :: Register
rb2 = Register 2 [BR,BW]
ra3 :: Register
ra3 = Register 3 [AR,AW]
rb3 :: Register
rb3 = Register 3 [BR,BW]
ra4 :: Register
ra4 = Register 4 [AR,AW]
rb4 :: Register
rb4 = Register 4 [BR,BW]
ra5 :: Register
ra5 = Register 5 [AR,AW]
rb5 :: Register
rb5 = Register 5 [BR,BW]
ra6 :: Register
ra6 = Register 6 [AR,AW]
rb6 :: Register
rb6 = Register 6 [BR,BW]
ra7 :: Register
ra7 = Register 7 [AR,AW]
rb7 :: Register
rb7 = Register 7 [BR,BW]
ra8 :: Register
ra8 = Register 8 [AR,AW]
rb8 :: Register
rb8 = Register 8 [BR,BW]
ra9 :: Register
ra9 = Register 9 [AR,AW]
rb9 :: Register
rb9 = Register 9 [BR,BW]
ra10 :: Register
ra10 = Register 10 [AR,AW]
rb10 :: Register
rb10 = Register 10 [BR,BW]
ra11 :: Register
ra11 = Register 11 [AR,AW]
rb11 :: Register
rb11 = Register 11 [BR,BW]
ra12 :: Register
ra12 = Register 12 [AR,AW]
rb12 :: Register
rb12 = Register 12 [BR,BW]
ra13 :: Register
ra13 = Register 13 [AR,AW]
rb13 :: Register
rb13 = Register 13 [BR,BW]
ra14 :: Register
ra14 = Register 14 [AR,AW]
rb14 :: Register
rb14 = Register 14 [BR,BW]
ra15 :: Register
ra15 = Register 15 [AR,AW]
rb15 :: Register
rb15 = Register 15 [BR,BW]
ra16 :: Register
ra16 = Register 16 [AR,AW]
rb16 :: Register
rb16 = Register 16 [BR,BW]
ra17 :: Register
ra17 = Register 17 [AR,AW]
rb17 :: Register
rb17 = Register 17 [BR,BW]
ra18 :: Register
ra18 = Register 18 [AR,AW]
rb18 :: Register
rb18 = Register 18 [BR,BW]
ra19 :: Register
ra19 = Register 19 [AR,AW]
rb19 :: Register
rb19 = Register 19 [BR,BW]
ra20 :: Register
ra20 = Register 20 [AR,AW]
rb20 :: Register
rb20 = Register 20 [BR,BW]
ra21 :: Register
ra21 = Register 21 [AR,AW]
rb21 :: Register
rb21 = Register 21 [BR,BW]
ra22 :: Register
ra22 = Register 22 [AR,AW]
rb22 :: Register
rb22 = Register 22 [BR,BW]
ra23 :: Register
ra23 = Register 23 [AR,AW]
rb23 :: Register
rb23 = Register 23 [BR,BW]
ra24 :: Register
ra24 = Register 24 [AR,AW]
rb24 :: Register
rb24 = Register 24 [BR,BW]
ra25 :: Register
ra25 = Register 25 [AR,AW]
rb25 :: Register
rb25 = Register 25 [BR,BW]
ra26 :: Register
ra26 = Register 26 [AR,AW]
rb26 :: Register
rb26 = Register 26 [BR,BW]
ra27 :: Register
ra27 = Register 27 [AR,AW]
rb27 :: Register
rb27 = Register 27 [BR,BW]
ra28 :: Register
ra28 = Register 28 [AR,AW]
rb28 :: Register
rb28 = Register 28 [BR,BW]
ra29 :: Register
ra29 = Register 29 [AR,AW]
rb29 :: Register
rb29 = Register 29 [BR,BW]
ra30 :: Register
ra30 = Register 30 [AR,AW]
rb30 :: Register
rb30 = Register 30 [BR,BW]
ra31 :: Register
ra31 = Register 31 [AR,AW]
rb31 :: Register
rb31 = Register 31 [BR,BW]
uniform_read :: Register
uniform_read = Register 32 [AR,BR]
r0 :: Register
r0 = Register 32 [AW,BW]
r1 :: Register
r1 = Register 33 [AW,BW]
r2 :: Register
r2 = Register 34 [AW,BW]
r3 :: Register
r3 = Register 35 [AW,BW]
tmu_noswap :: Register
tmu_noswap = Register 36 [AW,BW]
r5 :: Register
r5 = Register 37 [AW,BW]
element_number :: Register
element_number = Register 38 [AR]
qpu_number :: Register
qpu_number = Register 38 [BR]
host_int :: Register
host_int = Register 38 [AW,BW]
nop :: Register
nop = Register 39 [AR,BR,AW,BW]
uniforms_address :: Register
uniforms_address = Register 40 [AW,BW]
vpm_read :: Register
vpm_read = Register 48 [AR,BR]
vpm_write :: Register
vpm_write = Register 48 [AW,BW]
vpm_ld_busy :: Register
vpm_ld_busy = Register 49 [AR]
vpm_st_busy :: Register
vpm_st_busy = Register 49 [BR]
vpmvcd_rd_setup :: Register
vpmvcd_rd_setup = Register 49 [AW]
vpmvcd_wr_setup :: Register
vpmvcd_wr_setup = Register 49 [BW]
vpm_ld_wait :: Register
vpm_ld_wait = Register 50 [AR]
vpm_st_wait :: Register
vpm_st_wait = Register 50 [BR]
vpm_ld_addr :: Register
vpm_ld_addr = Register 50 [AW]
vpm_st_addr :: Register
vpm_st_addr = Register 50 [BW]
mutex_acquire :: Register
mutex_acquire = Register 51 [AR,BR]
mutex_release :: Register
mutex_release = Register 51 [AW,BW]
sfu_recip :: Register
sfu_recip = Register 52 [AW,BW]
sfu_recipsqrt :: Register
sfu_recipsqrt = Register 53 [AW,BW]
sfu_exp :: Register
sfu_exp = Register 54 [AW,BW]
sfu_log :: Register
sfu_log = Register 55 [AW,BW]
tmu0s :: Register
tmu0s = Register 56 [AW,BW]
tmu0t :: Register
tmu0t = Register 57 [AW,BW]
tmu0r :: Register
tmu0r = Register 58 [AW,BW]
tmu0b :: Register
tmu0b = Register 59 [AW,BW]
tmu1s :: Register
tmu1s = Register 60 [AW,BW]
tmu1t :: Register
tmu1t = Register 61 [AW,BW]
tmu1r :: Register
tmu1r = Register 62 [AW,BW]
tmu1b :: Register
tmu1b = Register 63 [AW,BW]
| notogawa/VideoCore4 | src/VideoCore4/QPU/Instruction/Register.hs | bsd-3-clause | 6,765 | 0 | 9 | 1,908 | 2,537 | 1,481 | 1,056 | 321 | 1 |
module GDITypes
{- -- still incomplete
( POINT, marshall_point, unmarshall_point
, ListPOINT, marshall_ListPOINT_
, ListLenPOINT, marshall_ListLenPOINT_
, RECT, marshall_rect, unmarshall_rect
, SIZE, marshall_size, unmarshall_size
, nullAddr
, HBITMAP , MbHBITMAP
, HFONT , MbHFONT
, HCURSOR , MbHCURSOR
, HICON , MbHICON
, HRGN , MbHRGN
, HPALETTE , MbHPALETTE
, HBRUSH , MbHBRUSH
, HPEN , MbHPEN
, HACCEL --, MbHACCEL
, HDC , MbHDC
, HDWP , MbHDWP
, HWND , MbHWND
, HMENU , MbHMENU
, PolyFillMode
, ArcDirection
, MbArcDirection
, GraphicsMode
, MbGraphicsMode
, BackgroundMode
, HatchStyle
, StretchBltMode
, COLORREF
, TextAlignment
, ClippingMode
, RegionType
)
-}
where
import StdDIS
import Win32Types
import Monad( zipWithM_ )
import IOExts
import Foreign
----------------------------------------------------------------
--
----------------------------------------------------------------
type POINT =
( LONG -- x
, LONG -- y
)
type RECT =
( LONG -- left
, LONG -- top
, LONG -- right
, LONG -- bottom
)
type SIZE =
( LONG -- cx
, LONG -- cy
)
----------------------------------------------------------------
marshall_listPOINT_ :: [POINT] -> IO Addr
marshall_listPOINT_ cs = do
let l = length cs
ps <- mallocPOINTs l
zipWithM_ (setPOINT ps) [0..] cs
return ps
marshall_listLenPOINT_ :: [POINT] -> IO (Addr, Int)
marshall_listLenPOINT_ cs = do
let l = length cs
ps <- mallocPOINTs l
zipWithM_ (setPOINT ps) [0..] cs
return (ps,l)
mallocPOINTs :: Int -> IO Addr
mallocPOINTs arg1 =
prim_GDITypes_cpp_mallocPOINTs arg1 >>= \ (ps,gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (ps))
primitive prim_GDITypes_cpp_mallocPOINTs :: Int -> IO (Addr,Int,Addr)
setPOINT :: Addr -> Int -> POINT -> IO ()
setPOINT ps i gc_arg1 =
case gc_arg1 of { (gc_arg2,gc_arg4) ->
case ( fromIntegral gc_arg2) of { gc_arg3 ->
case ( fromIntegral gc_arg4) of { gc_arg5 ->
prim_GDITypes_cpp_setPOINT ps i gc_arg3 gc_arg5}}}
primitive prim_GDITypes_cpp_setPOINT :: Addr -> Int -> Int -> Int -> IO ()
type LPRECT = Addr
type MbLPRECT = Maybe LPRECT
getRECT :: LPRECT -> IO RECT
getRECT r =
prim_GDITypes_cpp_getRECT r >>= \ (gc_res2,gc_res4,gc_res6,gc_res8) ->
let gc_res1 = ( fromIntegral (gc_res2)) in
let gc_res3 = ( fromIntegral (gc_res4)) in
let gc_res5 = ( fromIntegral (gc_res6)) in
let gc_res7 = ( fromIntegral (gc_res8)) in
(return ((gc_res1,gc_res3,gc_res5,gc_res7)))
primitive prim_GDITypes_cpp_getRECT :: Addr -> IO (Int,Int,Int,Int)
----------------------------------------------------------------
-- (GDI related) Handles
----------------------------------------------------------------
type HBITMAP = Addr
type MbHBITMAP = Maybe HBITMAP
type HFONT = Addr
type MbHFONT = Maybe HFONT
type HCURSOR = Addr
type MbHCURSOR = Maybe HCURSOR
type HICON = Addr
type MbHICON = Maybe HICON
-- This is not the only handle / resource that should be
-- finalised for you, but it's a start.
-- ToDo.
type HRGN = ForeignObj
type MbHRGN = Maybe HRGN
type HPALETTE = Addr
type MbHPALETTE = Maybe HPALETTE
type HBRUSH = Addr
type MbHBRUSH = Maybe HBRUSH
type HPEN = Addr
type MbHPEN = Maybe HPEN
type HACCEL = Addr
type HDC = Addr
type MbHDC = Maybe HDC
type HDWP = Addr
type MbHDWP = Maybe HDWP
type HWND = Addr
type MbHWND = Maybe HWND
hWND_BOTTOM :: HWND
hWND_BOTTOM =
unsafePerformIO(
prim_GDITypes_cpp_hWND_BOTTOM >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_hWND_BOTTOM :: IO (Addr)
hWND_NOTOPMOST :: HWND
hWND_NOTOPMOST =
unsafePerformIO(
prim_GDITypes_cpp_hWND_NOTOPMOST >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_hWND_NOTOPMOST :: IO (Addr)
hWND_TOP :: HWND
hWND_TOP =
unsafePerformIO(
prim_GDITypes_cpp_hWND_TOP >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_hWND_TOP :: IO (Addr)
hWND_TOPMOST :: HWND
hWND_TOPMOST =
unsafePerformIO(
prim_GDITypes_cpp_hWND_TOPMOST >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_hWND_TOPMOST :: IO (Addr)
type HMENU = Addr
type MbHMENU = Maybe HMENU
----------------------------------------------------------------
-- COLORREF
----------------------------------------------------------------
type COLORREF = DWORD
rgb :: BYTE -> BYTE -> BYTE -> COLORREF
rgb gc_arg1 gc_arg2 gc_arg3 =
unsafePerformIO(
case ( fromIntegral gc_arg1) of { arg1 ->
case ( fromIntegral gc_arg2) of { arg2 ->
case ( fromIntegral gc_arg3) of { arg3 ->
prim_GDITypes_cpp_rgb arg1 arg2 arg3 >>= \ (res1) ->
(return (res1))}}})
primitive prim_GDITypes_cpp_rgb :: Word32 -> Word32 -> Word32 -> IO (Word32)
getRValue :: COLORREF -> BYTE
getRValue arg1 =
unsafePerformIO(
prim_GDITypes_cpp_getRValue arg1 >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_getRValue :: Word32 -> IO (Word32)
getGValue :: COLORREF -> BYTE
getGValue arg1 =
unsafePerformIO(
prim_GDITypes_cpp_getGValue arg1 >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_getGValue :: Word32 -> IO (Word32)
getBValue :: COLORREF -> BYTE
getBValue arg1 =
unsafePerformIO(
prim_GDITypes_cpp_getBValue arg1 >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_getBValue :: Word32 -> IO (Word32)
----------------------------------------------------------------
-- Miscellaneous enumerations
----------------------------------------------------------------
type PolyFillMode = WORD
aLTERNATE :: PolyFillMode
aLTERNATE =
unsafePerformIO(
prim_GDITypes_cpp_aLTERNATE >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_aLTERNATE :: IO (Word32)
wINDING :: PolyFillMode
wINDING =
unsafePerformIO(
prim_GDITypes_cpp_wINDING >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_wINDING :: IO (Word32)
----------------------------------------------------------------
type ArcDirection = WORD
type MbArcDirection = Maybe ArcDirection
aD_COUNTERCLOCKWISE :: ArcDirection
aD_COUNTERCLOCKWISE =
unsafePerformIO(
prim_GDITypes_cpp_aD_COUNTERCLOCKWISE >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_aD_COUNTERCLOCKWISE :: IO (Word32)
aD_CLOCKWISE :: ArcDirection
aD_CLOCKWISE =
unsafePerformIO(
prim_GDITypes_cpp_aD_CLOCKWISE >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_aD_CLOCKWISE :: IO (Word32)
----------------------------------------------------------------
type GraphicsMode = DWORD
type MbGraphicsMode = Maybe GraphicsMode
gM_COMPATIBLE :: GraphicsMode
gM_COMPATIBLE =
unsafePerformIO(
prim_GDITypes_cpp_gM_COMPATIBLE >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_gM_COMPATIBLE :: IO (Word32)
gM_ADVANCED :: GraphicsMode
gM_ADVANCED =
unsafePerformIO(
prim_GDITypes_cpp_gM_ADVANCED >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_gM_ADVANCED :: IO (Word32)
----------------------------------------------------------------
type BackgroundMode = UINT
tRANSPARENT :: BackgroundMode
tRANSPARENT =
unsafePerformIO(
prim_GDITypes_cpp_tRANSPARENT >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tRANSPARENT :: IO (Word32)
oPAQUE :: BackgroundMode
oPAQUE =
unsafePerformIO(
prim_GDITypes_cpp_oPAQUE >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_oPAQUE :: IO (Word32)
----------------------------------------------------------------
type HatchStyle = WORD
hS_HORIZONTAL :: HatchStyle
hS_HORIZONTAL =
unsafePerformIO(
prim_GDITypes_cpp_hS_HORIZONTAL >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_HORIZONTAL :: IO (Word32)
hS_VERTICAL :: HatchStyle
hS_VERTICAL =
unsafePerformIO(
prim_GDITypes_cpp_hS_VERTICAL >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_VERTICAL :: IO (Word32)
hS_FDIAGONAL :: HatchStyle
hS_FDIAGONAL =
unsafePerformIO(
prim_GDITypes_cpp_hS_FDIAGONAL >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_FDIAGONAL :: IO (Word32)
hS_BDIAGONAL :: HatchStyle
hS_BDIAGONAL =
unsafePerformIO(
prim_GDITypes_cpp_hS_BDIAGONAL >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_BDIAGONAL :: IO (Word32)
hS_CROSS :: HatchStyle
hS_CROSS =
unsafePerformIO(
prim_GDITypes_cpp_hS_CROSS >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_CROSS :: IO (Word32)
hS_DIAGCROSS :: HatchStyle
hS_DIAGCROSS =
unsafePerformIO(
prim_GDITypes_cpp_hS_DIAGCROSS >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_hS_DIAGCROSS :: IO (Word32)
----------------------------------------------------------------
type StretchBltMode = UINT
bLACKONWHITE :: StretchBltMode
bLACKONWHITE =
unsafePerformIO(
prim_GDITypes_cpp_bLACKONWHITE >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_bLACKONWHITE :: IO (Word32)
wHITEONBLACK :: StretchBltMode
wHITEONBLACK =
unsafePerformIO(
prim_GDITypes_cpp_wHITEONBLACK >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_wHITEONBLACK :: IO (Word32)
cOLORONCOLOR :: StretchBltMode
cOLORONCOLOR =
unsafePerformIO(
prim_GDITypes_cpp_cOLORONCOLOR >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_cOLORONCOLOR :: IO (Word32)
hALFTONE :: StretchBltMode
hALFTONE =
unsafePerformIO(
prim_GDITypes_cpp_hALFTONE >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_hALFTONE :: IO (Word32)
sTRETCH_ANDSCANS :: StretchBltMode
sTRETCH_ANDSCANS =
unsafePerformIO(
prim_GDITypes_cpp_sTRETCH_ANDSCANS >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_sTRETCH_ANDSCANS :: IO (Word32)
sTRETCH_ORSCANS :: StretchBltMode
sTRETCH_ORSCANS =
unsafePerformIO(
prim_GDITypes_cpp_sTRETCH_ORSCANS >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_sTRETCH_ORSCANS :: IO (Word32)
sTRETCH_DELETESCANS :: StretchBltMode
sTRETCH_DELETESCANS =
unsafePerformIO(
prim_GDITypes_cpp_sTRETCH_DELETESCANS >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_sTRETCH_DELETESCANS :: IO (Word32)
----------------------------------------------------------------
type TextAlignment = UINT
tA_NOUPDATECP :: TextAlignment
tA_NOUPDATECP =
unsafePerformIO(
prim_GDITypes_cpp_tA_NOUPDATECP >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_NOUPDATECP :: IO (Word32)
tA_UPDATECP :: TextAlignment
tA_UPDATECP =
unsafePerformIO(
prim_GDITypes_cpp_tA_UPDATECP >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_UPDATECP :: IO (Word32)
tA_LEFT :: TextAlignment
tA_LEFT =
unsafePerformIO(
prim_GDITypes_cpp_tA_LEFT >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_LEFT :: IO (Word32)
tA_RIGHT :: TextAlignment
tA_RIGHT =
unsafePerformIO(
prim_GDITypes_cpp_tA_RIGHT >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_RIGHT :: IO (Word32)
tA_CENTER :: TextAlignment
tA_CENTER =
unsafePerformIO(
prim_GDITypes_cpp_tA_CENTER >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_CENTER :: IO (Word32)
tA_TOP :: TextAlignment
tA_TOP =
unsafePerformIO(
prim_GDITypes_cpp_tA_TOP >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_TOP :: IO (Word32)
tA_BOTTOM :: TextAlignment
tA_BOTTOM =
unsafePerformIO(
prim_GDITypes_cpp_tA_BOTTOM >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_BOTTOM :: IO (Word32)
tA_BASELINE :: TextAlignment
tA_BASELINE =
unsafePerformIO(
prim_GDITypes_cpp_tA_BASELINE >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_tA_BASELINE :: IO (Word32)
----------------------------------------------------------------
type ClippingMode = UINT
rGN_AND :: ClippingMode
rGN_AND =
unsafePerformIO(
prim_GDITypes_cpp_rGN_AND >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_rGN_AND :: IO (Word32)
rGN_OR :: ClippingMode
rGN_OR =
unsafePerformIO(
prim_GDITypes_cpp_rGN_OR >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_rGN_OR :: IO (Word32)
rGN_XOR :: ClippingMode
rGN_XOR =
unsafePerformIO(
prim_GDITypes_cpp_rGN_XOR >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_rGN_XOR :: IO (Word32)
rGN_DIFF :: ClippingMode
rGN_DIFF =
unsafePerformIO(
prim_GDITypes_cpp_rGN_DIFF >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_rGN_DIFF :: IO (Word32)
rGN_COPY :: ClippingMode
rGN_COPY =
unsafePerformIO(
prim_GDITypes_cpp_rGN_COPY >>= \ (res1) ->
(return (res1)))
primitive prim_GDITypes_cpp_rGN_COPY :: IO (Word32)
----------------------------------------------------------------
type RegionType = WORD
eRROR :: RegionType
eRROR =
unsafePerformIO(
prim_GDITypes_cpp_eRROR >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_eRROR :: IO (Word32)
nULLREGION :: RegionType
nULLREGION =
unsafePerformIO(
prim_GDITypes_cpp_nULLREGION >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_nULLREGION :: IO (Word32)
sIMPLEREGION :: RegionType
sIMPLEREGION =
unsafePerformIO(
prim_GDITypes_cpp_sIMPLEREGION >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_sIMPLEREGION :: IO (Word32)
cOMPLEXREGION :: RegionType
cOMPLEXREGION =
unsafePerformIO(
prim_GDITypes_cpp_cOMPLEXREGION >>= \ (res1) ->
let gc_res1 = ( fromIntegral (res1)) in
(return (gc_res1)))
primitive prim_GDITypes_cpp_cOMPLEXREGION :: IO (Word32)
----------------------------------------------------------------
-- End
----------------------------------------------------------------
needPrims_hugs 2
| OS2World/DEV-UTIL-HUGS | libraries/win32/GDITypes.hs | bsd-3-clause | 14,607 | 104 | 21 | 2,547 | 3,804 | 2,065 | 1,739 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
-- |
-- Module: $HEADER$
-- Description: TODO
-- Copyright: (c) 2016 Peter Trško
-- License: BSD3
--
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- TODO
module Data.DHT.DKS.Internal
where
import Control.Concurrent (ThreadId)
import Control.Exception (throwIO)
import Control.Monad (Monad((>>=)), (>>), forever, return)
import Control.Monad.IO.Class (liftIO)
import Data.Bool (otherwise)
import Data.Either (Either(Left, Right))
import Data.Eq (Eq((==)))
import Data.Function (($), (.))
import Data.Functor (Functor(fmap), {-(<$), -}(<$>))
import Data.Maybe (Maybe(Nothing), maybe)
import System.IO (IO)
import Text.Show (Show(showsPrec), showChar, shows, showString)
import Control.Concurrent.Chan.Unagi
( InChan
, newChan
, readChan
, writeChan
)
import Data.DHT (DhtKey, Encoding)
import Data.LogStr.Formatting ((%), shown)
import System.Lumberjack.Backend (pushLogStrLn)
import Data.DHT.DKS.Internal.Monad
( DksM
, DksMonadEnv(DksMonadEnv)
, dksState
, hash
, logf
, mkBoxedThreadState
-- , registerOnInsertDoneCallback
, registerOnJoinCallback
, registerOnLeaveCallback
-- , registerOnLookupResultCallback
, runDksM
, send_
)
import qualified Data.DHT.DKS.Internal.Monad as DksMonadEnv
( DksMonadEnv
( _logger
, _mutableState
, _self
, _send
, _yield
)
)
import Data.DHT.DKS.Internal.Operation
( DksOperation
( JoinOp
, LeaveOp
, LookupOp
, InsertOp
, ProcessMessageOp
, GetStateOp
)
, OnDone
, OnJoin
, OnLeave
, OnResult
, handleGrantLeave
, handleJoin
, handleJoinDone
, handleJoinPoint
, handleJoinRequest
, handleJoinRetry
, handleLeave
, handleLeaveDone
, handleLeavePoint
, handleLeaveRequest
, handleLeaveRetry
, handleNewSuccessor
, handleNewSuccessorAck
, handleUpdateSuccessor
, handleUpdateSuccessorAck
)
import Data.DHT.DKS.Type.State (DksState)
import Data.DHT.DKS.Type.EVar
( EVarIO
, failure
, success
)
import Data.DHT.DKS.Type.Hash (DksHash)
import Data.DHT.DKS.Type.Message
( DksMessage(DksMessage, _body, _header)
, DksMessageBody
( GrantLeaveBody
, JoinDoneBody
, JoinPointBody
, JoinRequestBody
, JoinRetryBody
, LeaveDoneBody
, LeavePointBody
, LeaveRequestBody
, LeaveRetryBody
, NewSuccessorAckBody
, NewSuccessorBody
, UpdateSuccessorAckBody
, UpdateSuccessorBody
)
, DksMessageHeader(_from, _to)
)
import Data.DHT.DKS.Type.MessageChannel
( DksMessageChannel
( registerReceiveMessage
, sendMessage
)
)
import Data.DHT.DKS.Type.Params
( DksParams
( DksParams
, _discovery
, _logging
, _runThread
, _singleton
, _yield
)
)
-- {{{ Handle -----------------------------------------------------------------
data DksHandle = DksHandle
{ _options :: !DksParams
, _self :: !DksHash
, _mainThread :: !ThreadId
, _sendOp :: !(DksOperation -> IO ())
}
instance Show DksHandle where
showsPrec _ DksHandle{_self = self} =
showString "DhtHandle{implementation = DKS, self = " . shows self
. showChar '}'
-- | Smart constructor for 'DksHandle'.
mkDksHandle
:: DksParams
-> DksHash
-> InChan DksOperation
-> ThreadId
-> DksHandle
mkDksHandle opts self inch mainTid = DksHandle
{ _options = opts
, _self = self
, _mainThread = mainTid
, _sendOp = writeChan inch
}
newDksHandle :: DksMessageChannel chan => chan -> DksParams -> DksHash -> IO DksHandle
newDksHandle msgChan opts self = do
(inChan, outChan) <- newChan
env <- mkDksMonadEnv <$> mkBoxedThreadState opts
let DksParams{_runThread = run} = opts
h <- mkDksHandle opts self inChan <$> run (mainLoop outChan env)
registerReceiveMessage msgChan self (receiveDksMessage h)
return h
where
-- TODO: Finalizer; exception handling.
mainLoop outChan env = forever $ do
readChan outChan >>= runHandler env (threadMain self)
-- If exception makes its way up here, then it should crash
-- the whole node.
runHandler env f op = runDksM throwIO env $ do
logf (hash % ": Processig DKS operation: " % shown) self op
f op
receiveDksMessage :: DksHandle -> DksMessage -> EVarIO ()
receiveDksMessage h = fmap success . _sendOp h . ProcessMessageOp . success
mkDksMonadEnv s = DksMonadEnv
{ DksMonadEnv._self = self
, DksMonadEnv._logger = pushLogStrLn (_logging opts)
, DksMonadEnv._send = sendMessage msgChan
, DksMonadEnv._yield = _yield opts
, DksMonadEnv._mutableState = s
}
threadMain :: DksHash -> DksOperation -> DksM ()
threadMain self = \case
JoinOp onJoin possiblyEntryNode ->
registerOnJoinCallback onJoin >> handleJoin possiblyEntryNode
LeaveOp possiblyOnLeave -> do
maybe (return ()) registerOnLeaveCallback possiblyOnLeave
handleLeave
LookupOp _onResult _key ->
-- registerOnLookupResultCallback key onResult >> handleLookup key
logf (hash % ": LookupOp: Not implemented.") self
InsertOp _onDone _key _encoding ->
-- registerOnInsertDoneCallback key onDone >> handleInsert key encoding
logf (hash % ": InsertOp: Not implemented.") self
ProcessMessageOp (Right msg@DksMessage{_header = hdr})
| _to hdr == self -> do
logf (hash % ": Processing received message: " % shown) self msg
processMessage (_from hdr) (_body msg)
| otherwise -> do
logf (hash % ": Message not for us; resending it: " % shown) self msg
send_ msg
ProcessMessageOp _ -> return ()
GetStateOp onState -> dksState >>= liftIO . onState
where
processMessage from = \case
JoinRequestBody msg -> handleJoinRequest from msg
JoinRetryBody msg -> handleJoinRetry msg
JoinPointBody msg -> handleJoinPoint msg
NewSuccessorBody msg -> handleNewSuccessor msg
NewSuccessorAckBody msg -> handleNewSuccessorAck msg
JoinDoneBody msg -> handleJoinDone msg
LeaveRequestBody msg -> handleLeaveRequest msg
LeaveRetryBody msg -> handleLeaveRetry from msg
GrantLeaveBody msg -> handleGrantLeave from msg
LeavePointBody msg -> handleLeavePoint msg
UpdateSuccessorBody msg -> handleUpdateSuccessor msg
UpdateSuccessorAckBody msg -> handleUpdateSuccessorAck msg
LeaveDoneBody msg -> handleLeaveDone msg
-- }}} Handle -----------------------------------------------------------------
-- {{{ DHT Operations ---------------------------------------------------------
dksJoin :: OnJoin -> DksHandle -> IO ()
dksJoin callback handle@DksHandle{_options = opts} =
discoverEntryNode >>= \case
Left e -> callback (failure e)
Right entry -> _sendOp handle $ JoinOp callback entry
where
discoverEntryNode = if _singleton opts
then return (Right Nothing)
else _discovery opts
dksLeave :: Maybe OnLeave -> DksHandle -> IO ()
dksLeave callback handle = _sendOp handle (LeaveOp callback)
dksLookup :: OnResult Encoding -> DksHandle -> DhtKey -> IO ()
dksLookup callback handle = _sendOp handle . LookupOp callback
dksInsert :: Maybe OnDone -> DksHandle -> DhtKey -> Encoding -> IO ()
dksInsert callback handle = (_sendOp handle .) . InsertOp callback
dksGetState :: (DksState -> IO ()) -> DksHandle -> IO ()
dksGetState callback handle = _sendOp handle $ GetStateOp callback
-- }}} DHT Operations ---------------------------------------------------------
| FPBrno/dht-dks | src/Data/DHT/DKS/Internal.hs | bsd-3-clause | 8,015 | 0 | 14 | 1,961 | 1,837 | 1,023 | 814 | 206 | 19 |
--------------------------------------------------------------
-- Module for demonstrating the work of the library
----------------------------------------------------------------
module WebUI.Demo.LibDemo
( testDemo
) where
-- Импорт модулей
import qualified Text.Blaze.Html5 as H
import Text.Hamlet
import Text.Julius
import WebUI.HFitUI
import WebUI.Scripts.JavaScript.HJavaScript
import WebUI.Themes.SolarizedUITheme
-- | Demo test
testDemo :: IO ()
testDemo = do
res <- testOne
putStrLn $ show res
testOne :: UI H.Html
testOne =
widgetLayoutUI $ do
-- Базовые виджеты
shell <- shellExtUI "TestGL (Dyn)" []
(root, idRoot, _) <- expandWUI $ wuiPanel <#> boundsBRDNum (44, 0, 0, 0)
<#> overflowHiddenX
<#> overflowAutoY
scriptsOutside <- wuiScriptSrc []
jsScript <- wuiScriptTextJS $ runScript defaultHBConfig {hbc_entryLine = "\n"
} $ do
var_b <- newVarInt "b" 10
var_c <- newVarInt "c" 5
var_cm <- newVar "cm" $ mathACos var_c
(//) "Test variable"
var_testPer <- newVar "testPer" HJsEmpty
var_st <- newVarStr "st" "TestSt"
flag_1 <- newVarBool "flag_1" True
var_res <- newVar "res" HJsNull
var_res <- eql var_res $ (var_b + var_c) * var_st / var_b
var_res <- eql var_res var_b
hjs $(juliusFile "templates/test/TestScript.julius")
hjs $[julius|
function veryTest (){
console.log("VeryTest");
}
|]
(***) $ "The first multi-line comment" +-+
"before the myFunc function"
vatFn_myFunc <- functJS "myFunc" [] $ do
return endH
call vatFn_myFunc []
var_res_myFunc <- eqlMT (varJS "res_myFunc") $ call vatFn_myFunc []
var_testFunc <- eqlMT (varJS "testFunc") $ functJS "" [] $ do
var_bbb <- newVarInt "bbb" 23
returnJS thisJS
call var_testFunc []
var_res_testFunc <- eqlMT (varJS "res_testFunc") $ call var_testFunc [5, var_b, valStr "qwe", valInt 19, valBool True]
var_res_new_testFunc <- eqlMT (varJS "res_new_testFunc") $ newJS $ call var_testFunc [5, var_b, valStr "qwe", valInt 19, valBool True]
jsFinish
shell `addWUIs` [ root
, scriptsOutside
, jsScript
]
| iqsf/HFitUI | src/WebUI/Demo/LibDemo.hs | bsd-3-clause | 3,673 | 0 | 17 | 1,956 | 623 | 306 | 317 | -1 | -1 |
{-# LINE 1 "Data.Char.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Char
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : stable
-- Portability : portable
--
-- The Char type and associated operations.
--
-----------------------------------------------------------------------------
module Data.Char
(
Char
-- * Character classification
-- | Unicode characters are divided into letters, numbers, marks,
-- punctuation, symbols, separators (including spaces) and others
-- (including control characters).
, isControl, isSpace
, isLower, isUpper, isAlpha, isAlphaNum, isPrint
, isDigit, isOctDigit, isHexDigit
, isLetter, isMark, isNumber, isPunctuation, isSymbol, isSeparator
-- ** Subranges
, isAscii, isLatin1
, isAsciiUpper, isAsciiLower
-- ** Unicode general categories
, GeneralCategory(..), generalCategory
-- * Case conversion
, toUpper, toLower, toTitle
-- * Single digit characters
, digitToInt
, intToDigit
-- * Numeric representations
, ord
, chr
-- * String representations
, showLitChar
, lexLitChar
, readLitChar
) where
import GHC.Base
import GHC.Char
import GHC.Real (fromIntegral)
import GHC.Show
import GHC.Read (readLitChar, lexLitChar)
import GHC.Unicode
import GHC.Num
-- $setup
-- Allow the use of Prelude in doctests.
-- >>> import Prelude
-- | Convert a single digit 'Char' to the corresponding 'Int'. This
-- function fails unless its argument satisfies 'isHexDigit', but
-- recognises both upper- and lower-case hexadecimal digits (that
-- is, @\'0\'@..@\'9\'@, @\'a\'@..@\'f\'@, @\'A\'@..@\'F\'@).
--
-- ==== __Examples__
--
-- Characters @\'0\'@ through @\'9\'@ are converted properly to
-- @0..9@:
--
-- >>> map digitToInt ['0'..'9']
-- [0,1,2,3,4,5,6,7,8,9]
--
-- Both upper- and lower-case @\'A\'@ through @\'F\'@ are converted
-- as well, to @10..15@.
--
-- >>> map digitToInt ['a'..'f']
-- [10,11,12,13,14,15]
-- >>> map digitToInt ['A'..'F']
-- [10,11,12,13,14,15]
--
-- Anything else throws an exception:
--
-- >>> digitToInt 'G'
-- *** Exception: Char.digitToInt: not a digit 'G'
-- >>> digitToInt '♥'
-- *** Exception: Char.digitToInt: not a digit '\9829'
--
digitToInt :: Char -> Int
digitToInt c
| (fromIntegral dec::Word) <= 9 = dec
| (fromIntegral hexl::Word) <= 5 = hexl + 10
| (fromIntegral hexu::Word) <= 5 = hexu + 10
| otherwise = errorWithoutStackTrace ("Char.digitToInt: not a digit " ++ show c) -- sigh
where
dec = ord c - ord '0'
hexl = ord c - ord 'a'
hexu = ord c - ord 'A'
-- derived character classifiers
-- | Selects alphabetic Unicode characters (lower-case, upper-case and
-- title-case letters, plus letters of caseless scripts and
-- modifiers letters). This function is equivalent to
-- 'Data.Char.isAlpha'.
--
-- This function returns 'True' if its argument has one of the
-- following 'GeneralCategory's, or 'False' otherwise:
--
-- * 'UppercaseLetter'
-- * 'LowercaseLetter'
-- * 'TitlecaseLetter'
-- * 'ModifierLetter'
-- * 'OtherLetter'
--
-- These classes are defined in the
-- <http://www.unicode.org/reports/tr44/tr44-14.html#GC_Values_Table Unicode Character Database>,
-- part of the Unicode standard. The same document defines what is
-- and is not a \"Letter\".
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isLetter 'a'
-- True
-- >>> isLetter 'A'
-- True
-- >>> isLetter '0'
-- False
-- >>> isLetter '%'
-- False
-- >>> isLetter '♥'
-- False
-- >>> isLetter '\31'
-- False
--
-- Ensure that 'isLetter' and 'isAlpha' are equivalent.
--
-- >>> let chars = [(chr 0)..]
-- >>> let letters = map isLetter chars
-- >>> let alphas = map isAlpha chars
-- >>> letters == alphas
-- True
--
isLetter :: Char -> Bool
isLetter c = case generalCategory c of
UppercaseLetter -> True
LowercaseLetter -> True
TitlecaseLetter -> True
ModifierLetter -> True
OtherLetter -> True
_ -> False
-- | Selects Unicode mark characters, for example accents and the
-- like, which combine with preceding characters.
--
-- This function returns 'True' if its argument has one of the
-- following 'GeneralCategory's, or 'False' otherwise:
--
-- * 'NonSpacingMark'
-- * 'SpacingCombiningMark'
-- * 'EnclosingMark'
--
-- These classes are defined in the
-- <http://www.unicode.org/reports/tr44/tr44-14.html#GC_Values_Table Unicode Character Database>,
-- part of the Unicode standard. The same document defines what is
-- and is not a \"Mark\".
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isMark 'a'
-- False
-- >>> isMark '0'
-- False
--
-- Combining marks such as accent characters usually need to follow
-- another character before they become printable:
--
-- >>> map isMark "ò"
-- [False,True]
--
-- Puns are not necessarily supported:
--
-- >>> isMark '✓'
-- False
--
isMark :: Char -> Bool
isMark c = case generalCategory c of
NonSpacingMark -> True
SpacingCombiningMark -> True
EnclosingMark -> True
_ -> False
-- | Selects Unicode numeric characters, including digits from various
-- scripts, Roman numerals, et cetera.
--
-- This function returns 'True' if its argument has one of the
-- following 'GeneralCategory's, or 'False' otherwise:
--
-- * 'DecimalNumber'
-- * 'LetterNumber'
-- * 'OtherNumber'
--
-- These classes are defined in the
-- <http://www.unicode.org/reports/tr44/tr44-14.html#GC_Values_Table Unicode Character Database>,
-- part of the Unicode standard. The same document defines what is
-- and is not a \"Number\".
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isNumber 'a'
-- False
-- >>> isNumber '%'
-- False
-- >>> isNumber '3'
-- True
--
-- ASCII @\'0\'@ through @\'9\'@ are all numbers:
--
-- >>> and $ map isNumber ['0'..'9']
-- True
--
-- Unicode Roman numerals are \"numbers\" as well:
--
-- >>> isNumber 'â…¨'
-- True
--
isNumber :: Char -> Bool
isNumber c = case generalCategory c of
DecimalNumber -> True
LetterNumber -> True
OtherNumber -> True
_ -> False
-- | Selects Unicode space and separator characters.
--
-- This function returns 'True' if its argument has one of the
-- following 'GeneralCategory's, or 'False' otherwise:
--
-- * 'Space'
-- * 'LineSeparator'
-- * 'ParagraphSeparator'
--
-- These classes are defined in the
-- <http://www.unicode.org/reports/tr44/tr44-14.html#GC_Values_Table Unicode Character Database>,
-- part of the Unicode standard. The same document defines what is
-- and is not a \"Separator\".
--
-- ==== __Examples__
--
-- Basic usage:
--
-- >>> isSeparator 'a'
-- False
-- >>> isSeparator '6'
-- False
-- >>> isSeparator ' '
-- True
--
-- Warning: newlines and tab characters are not considered
-- separators.
--
-- >>> isSeparator '\n'
-- False
-- >>> isSeparator '\t'
-- False
--
-- But some more exotic characters are (like HTML's @ @):
--
-- >>> isSeparator '\160'
-- True
--
isSeparator :: Char -> Bool
isSeparator c = case generalCategory c of
Space -> True
LineSeparator -> True
ParagraphSeparator -> True
_ -> False
| phischu/fragnix | builtins/base/Data.Char.hs | bsd-3-clause | 7,563 | 0 | 10 | 1,633 | 732 | 493 | 239 | 62 | 6 |
{-# LANGUAGE DeriveGeneric #-}
module Network.Telegraphs.Video where
import Data.Aeson
import GHC.Generics
import Network.Telegraphs.PhotoSize
data Video =
Video { file_id :: String
, width :: Integer
, height :: Integer
, duration :: Integer
, thumb :: Maybe PhotoSize
, mime_type :: Maybe String
, file_size :: Maybe Integer
, caption :: Maybe String }
deriving (Read, Show, Generic)
instance FromJSON Video
instance ToJSON Video
| l-a-i-n/Telegraphs | src/Network/Telegraphs/Video.hs | mit | 512 | 0 | 9 | 144 | 122 | 71 | 51 | 17 | 0 |
module Cardano.Wallet.API.V1.Handlers.Info (handlers) where
import Universum
import Servant
import Cardano.Wallet.API.Response (APIResponse, single)
import qualified Cardano.Wallet.API.V1.Info as Info
import Cardano.Wallet.API.V1.Types (ForceNtpCheck, NodeInfo)
import Cardano.Wallet.WalletLayer (ActiveWalletLayer)
import qualified Cardano.Wallet.WalletLayer as WalletLayer
handlers :: ActiveWalletLayer IO -> ServerT Info.API Handler
handlers = getNodeInfo
getNodeInfo
:: ActiveWalletLayer IO
-> ForceNtpCheck
-> Handler (APIResponse NodeInfo)
getNodeInfo w forceNtp =
liftIO $ single <$> WalletLayer.getNodeInfo w forceNtp
| input-output-hk/pos-haskell-prototype | wallet/src/Cardano/Wallet/API/V1/Handlers/Info.hs | mit | 697 | 0 | 9 | 132 | 157 | 95 | 62 | 16 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Unison.DocView where
import Control.Comonad.Cofree (Cofree(..), unwrap) -- (:<)
import Control.Monad.IO.Class
import Data.Maybe (fromMaybe)
import Data.Semigroup ((<>))
import Data.Text (Text)
import Data.Word (Word)
import Reflex.Dom
import Unison.Doc (Box, Doc, Layout)
import Unison.Dom (Dom)
import Unison.Dimensions (X(..), Y(..), Width(..), Height(..))
import Unison.Path (Path)
import qualified Data.Text as Text
import qualified GHCJS.DOM.Document as Document
import qualified GHCJS.DOM.Element as Element
import qualified Unison.Dimensions as Dimensions
import qualified Unison.Doc as Doc
import qualified Unison.Dom as Dom
import qualified Unison.HTML as HTML
import qualified Unison.UI as UI
data DocView p = DocView
{ at :: (X,Y) -> [p]
, contains :: (X,Y,Width,Height) -> [p]
, intersects :: (X,Y,Width,Height) -> [p]
, regions :: [p] -> [(X,Y,Width,Height)] }
widget :: (Path p, Eq p, MonadWidget t m) => Width -> Doc Text p -> m (El t, DocView p)
widget available d =
let
leaf txt = Text.replace " " " " txt
width (_, (w,_)) = w
box = Doc.bounds snd . Doc.box . Doc.layout width available <$> Doc.etraverse layout d
layout txt = do
node <- runDom (Dom.el "div" [("class", "docwidget")] [Dom.raw (leaf txt)])
-- todo, this method of computing preferred dimensions seems pretty slow,
-- try just using canvas measureText function, see
-- http://stackoverflow.com/questions/118241/calculate-text-width-with-javascript/21015393#21015393
(w,h) <- liftIO $ UI.preferredDimensions (Element.castToElement node)
pure (txt, (w,h))
view box = DocView (Doc.at box) (Doc.contains box) (Doc.intersects box) (Doc.regions box)
interpret b = Dom.el "div" [("class","docwidget")] [dom]
where
dom = fromMaybe (HTML.hbox []) . Doc.einterpret go $ b'
b' = Doc.emap (\(txt, (Width w, Height h)) -> Just $ Dom.el "div" (fixDims w h) [Dom.raw (leaf txt)])
b -- (Doc.rewrite collapse b)
fixDims w h = [( "style","width:" <> (Text.pack . show $ w) <> "px;height:" <>
(Text.pack . show $ h) <> "px;")]
go b = case b of
Doc.BEmpty -> Nothing
Doc.BEmbed dom -> dom
Doc.BFlow dir bs -> case [ b | Just b <- bs ] of
[] -> Nothing
bs -> Just $ flexbox dir bs
where flexbox Doc.Horizontal = HTML.hbox
flexbox Doc.Vertical = HTML.vbox
in do
b <- box
node <- runDom $ interpret (Doc.flatten b)
e <- el "div" $ unsafePlaceElement (Dom.unsafeAsHTMLElement node)
pure $ (e, view b)
runDom :: MonadWidget t m => Dom a -> m a
runDom dom = do
doc <- askDocument
liftIO $ Dom.run dom (Document.toDocument doc)
| CGenie/platform | editor/src/Unison/DocView.hs | mit | 2,802 | 0 | 20 | 627 | 1,064 | 587 | 477 | 61 | 5 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module ErrorMessages
(
tests
) where
import Prelude.Compat
import Data.Aeson (FromJSON(..), Value, json)
import Data.Aeson.Types (Parser)
import Data.Aeson.Parser (eitherDecodeWith)
import Data.Aeson.Internal (formatError, iparse)
import Data.Algorithm.Diff (PolyDiff (..), getGroupedDiff)
import Data.Proxy (Proxy(..))
import Data.Semigroup ((<>))
import Data.Sequence (Seq)
import Instances ()
import Numeric.Natural (Natural)
import Test.Tasty (TestTree, TestName)
import Test.Tasty.Golden.Advanced (goldenTest)
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.HashMap.Strict as HM
import Encoders
import Types
tests :: [TestTree]
tests =
[ aesonGoldenTest "simple" "tests/golden/simple.expected" output
, aesonGoldenTest "generic" "tests/golden/generic.expected" (outputGeneric G)
, aesonGoldenTest "generic" "tests/golden/th.expected" (outputGeneric TH)
]
output :: Output
output = concat
[ testFor "Int" (Proxy :: Proxy Int)
[ "\"\""
, "[]"
, "{}"
, "null"
]
, testFor "Integer" (Proxy :: Proxy Integer)
[ "44.44"
]
, testFor "Natural" (Proxy :: Proxy Natural)
[ "44.44"
, "-50"
]
, testFor "String" (Proxy :: Proxy String)
[ "1"
, "[]"
, "{}"
, "null"
]
, testFor "HashMap" (Proxy :: Proxy (HM.HashMap String Int))
[ "\"\""
, "[]"
]
-- issue #356
, testFor "Either" (Proxy :: Proxy (Int, Either (Int, Bool) ()))
[ "[1,{\"Left\":[2,3]}]"
]
-- issue #358
, testFor "Seq" (Proxy :: Proxy (Seq Int))
[ "[0,1,true]"
]
]
data Choice = TH | G
outputGeneric :: Choice -> Output
outputGeneric choice = concat
[ testWith "OneConstructor"
(select
thOneConstructorParseJSONDefault
gOneConstructorParseJSONDefault)
[ "\"X\""
, "[0]"
]
, testWith "Nullary"
(select
thNullaryParseJSONString
gNullaryParseJSONString)
[ "\"X\""
, "[]"
]
, testWithSomeType "SomeType (tagged)"
(select
thSomeTypeParseJSONTaggedObject
gSomeTypeParseJSONTaggedObject)
[ "{\"tag\": \"unary\", \"contents\": true}"
, "{\"tag\": \"unary\"}"
, "{\"tag\": \"record\"}"
, "{\"tag\": \"record\", \"testone\": true, \"testtwo\": null, \"testthree\": null}"
, "{\"tag\": \"X\"}"
, "{}"
, "[]"
]
, testWithSomeType "SomeType (single-field)"
(select
thSomeTypeParseJSONObjectWithSingleField
gSomeTypeParseJSONObjectWithSingleField)
[ "{\"unary\": {}}"
, "{\"unary\": []}"
, "{\"X\": []}"
, "{\"record\": {}, \"W\":{}}"
, "{}"
, "[]"
, "{\"unary\""
, "{\"unary\":"
, "{\"unary\":1"
]
, testWithSomeType "SomeType (two-element array)"
(select
thSomeTypeParseJSON2ElemArray
gSomeTypeParseJSON2ElemArray)
[ "[\"unary\", true]"
, "[\"record\", null]"
, "[\"X\", 0]"
, "[null, 0]"
, "[]"
, "{}"
, "[1"
, "[1,"
]
, testWithSomeType "SomeType (reject unknown fields)"
(select
thSomeTypeParseJSONRejectUnknownFields
gSomeTypeParseJSONRejectUnknownFields)
[ "{\"tag\": \"record\", \"testone\": 1.0, \"testZero\": 1}"
, "{\"testZero\": 1}"
, "{\"tag\": \"record\", \"testone\": true, \"testtwo\": null, \"testthree\": null}"
]
, testWithFoo "Foo (reject unknown fields)"
(select
thFooParseJSONRejectUnknownFields
gFooParseJSONRejectUnknownFields)
[ "{\"tag\": \"foo\"}"
]
, testWithFoo "Foo (reject unknown fields, tagged single)"
(select
thFooParseJSONRejectUnknownFieldsTagged
gFooParseJSONRejectUnknownFieldsTagged)
[ "{\"tag\": \"foo\", \"unknownField\": 0}"
]
, testWith "EitherTextInt"
(select
thEitherTextIntParseJSONUntaggedValue
gEitherTextIntParseJSONUntaggedValue)
[ "\"X\""
, "[]"
]
, testWith "Product2 Int Bool"
(select
thProduct2ParseJSON
gProduct2ParseJSON)
[ "[1, null]"
, "[]"
, "{}"
]
]
where
select a b = case choice of
TH -> a
G -> b
-- Test infrastructure
type Output = [String]
outputLine :: String -> Output
outputLine = pure
aesonGoldenTest :: TestName -> FilePath -> Output -> TestTree
aesonGoldenTest name ref out = goldenTest name (L.readFile ref) act cmp upd
where
act = pure (L.pack (unlines out))
upd = L.writeFile ref
cmp x y | x == y = return Nothing
cmp x y = return $ Just $ unlines $
concatMap f (getGroupedDiff (L.lines x) (L.lines y))
where
f (First xs) = map (cons3 '-' . L.unpack) xs
f (Second ys) = map (cons3 '+' . L.unpack) ys
-- we print unchanged lines too. It shouldn't be a problem while we have
-- reasonably small examples
f (Both xs _) = map (cons3 ' ' . L.unpack) xs
-- we add three characters, so the changed lines are easier to spot
cons3 c cs = c : c : c : ' ' : cs
testWith :: Show a => String -> (Value -> Parser a) -> [L.ByteString] -> Output
testWith name parser ts =
outputLine name <>
foldMap (\s ->
case eitherDecodeWith json (iparse parser) s of
Left err -> outputLine $ uncurry formatError err
Right a -> outputLine $ show a) ts
testFor :: forall a proxy. (FromJSON a, Show a)
=> String -> proxy a -> [L.ByteString] -> Output
testFor name _ = testWith name (parseJSON :: Value -> Parser a)
testWithSomeType :: String -> (Value -> Parser (SomeType Int)) -> [L.ByteString] -> Output
testWithSomeType = testWith
testWithFoo :: String -> (Value -> Parser Foo) -> [L.ByteString] -> Output
testWithFoo = testWith
| dmjio/aeson | tests/ErrorMessages.hs | bsd-3-clause | 5,915 | 0 | 13 | 1,568 | 1,410 | 776 | 634 | 164 | 4 |
-- | Possibly convenient facilities for constructing constants.
module Futhark.Representation.AST.Attributes.Constants
(
IsValue (..)
, constant
, intConst
, floatConst
)
where
import Futhark.Representation.AST.Syntax.Core
-- | If a Haskell type is an instance of 'IsValue', it means that a
-- value of that type can be converted to a Futhark 'PrimValue'.
-- This is intended to cut down on boilerplate when writing compiler
-- code - for example, you'll quickly grow tired of writing @Constant
-- (LogVal True) loc@.
class IsValue a where
value :: a -> PrimValue
instance IsValue Int where
value = IntValue . Int32Value . fromIntegral
instance IsValue Int8 where
value = IntValue . Int8Value
instance IsValue Int16 where
value = IntValue . Int16Value
instance IsValue Int32 where
value = IntValue . Int32Value
instance IsValue Int64 where
value = IntValue . Int64Value
instance IsValue Double where
value = FloatValue . Float64Value
instance IsValue Float where
value = FloatValue . Float32Value
instance IsValue Bool where
value = BoolValue
instance IsValue PrimValue where
value = id
instance IsValue IntValue where
value = IntValue
instance IsValue FloatValue where
value = FloatValue
-- | Create a 'Constant' 'SubExp' containing the given value.
constant :: IsValue v => v -> SubExp
constant = Constant . value
-- | Utility definition for reasons of type ambiguity.
intConst :: IntType -> Integer -> SubExp
intConst t v = constant $ intValue t v
-- | Utility definition for reasons of type ambiguity.
floatConst :: FloatType -> Double -> SubExp
floatConst t v = constant $ floatValue t v
| CulpaBS/wbBach | src/Futhark/Representation/AST/Attributes/Constants.hs | bsd-3-clause | 1,684 | 0 | 7 | 344 | 326 | 181 | 145 | 37 | 1 |
-- trac #2806
{-# LANGUAGE MagicHash, UnboxedTuples, BangPatterns #-}
module Tcfail203a where
import GHC.Base
fail10 = 'a'
where !(b, ~(c, (I# x))) = (True, (False, 5))
| rahulmutt/ghcvm | tests/suite/typecheck/fail/tcfail203/Tcfail203a.hs | bsd-3-clause | 177 | 0 | 12 | 34 | 56 | 34 | 22 | 5 | 1 |
{-|
Module : Idris.DataOpts
Description : Optimisations for Idris code i.e. Forcing, detagging and collapsing.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.DataOpts(applyOpts) where
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Core.TT
import Control.Applicative
import Data.List
import Data.Maybe
import Debug.Trace
class Optimisable term where
applyOpts :: term -> Idris term
instance (Optimisable a, Optimisable b) => Optimisable (a, b) where
applyOpts (x, y) = (,) <$> applyOpts x <*> applyOpts y
instance (Optimisable a, Optimisable b) => Optimisable (vs, a, b) where
applyOpts (v, x, y) = (,,) v <$> applyOpts x <*> applyOpts y
instance Optimisable a => Optimisable [a] where
applyOpts = mapM applyOpts
instance Optimisable a => Optimisable (Either a (a, a)) where
applyOpts (Left t) = Left <$> applyOpts t
applyOpts (Right t) = Right <$> applyOpts t
-- Raw is for compile time optimisation (before type checking)
-- Term is for run time optimisation (after type checking, collapsing allowed)
-- Compile time: no collapsing
instance Optimisable Raw where
applyOpts t@(RApp f a)
| (Var n, args) <- raw_unapply t -- MAGIC HERE
= raw_apply (Var n) <$> mapM applyOpts args
| otherwise = RApp <$> applyOpts f <*> applyOpts a
applyOpts (RBind n b t) = RBind n <$> applyOpts b <*> applyOpts t
applyOpts t = return t
-- Erase types (makes ibc smaller, and we don't need them)
instance Optimisable (Binder (TT Name)) where
applyOpts (Let t v) = Let <$> return Erased <*> applyOpts v
applyOpts b = return (b { binderTy = Erased })
instance Optimisable (Binder Raw) where
applyOpts b = do t' <- applyOpts (binderTy b)
return (b { binderTy = t' })
-- Run-time: do everything
prel = [txt "Nat", txt "Prelude"]
instance Optimisable (TT Name) where
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "plus" && mod == prel
= return (P Ref (sUN "prim__addBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "mult" && mod == prel
= return (P Ref (sUN "prim__mulBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "divNat" && mod == prel
= return (P Ref (sUN "prim__sdivBigInt") Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "modNat" && mod == prel
= return (P Ref (sUN "prim__sremBigInt") Erased)
applyOpts (App _ (P _ (NS (UN fn) mod) _) x)
| fn == txt "fromIntegerNat" && mod == prel
= applyOpts x
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "fromIntegerNat" && mod == prel
= return (App Complete (P Ref (sNS (sUN "id") ["Basics","Prelude"]) Erased) Erased)
applyOpts (P _ (NS (UN fn) mod) _)
| fn == txt "toIntegerNat" && mod == prel
= return (App Complete (P Ref (sNS (sUN "id") ["Basics","Prelude"]) Erased) Erased)
applyOpts c@(P (DCon t arity uniq) n _)
= return $ applyDataOptRT n t arity uniq []
applyOpts t@(App s f a)
| (c@(P (DCon t arity uniq) n _), args) <- unApply t
= applyDataOptRT n t arity uniq <$> mapM applyOpts args
| otherwise = App s <$> applyOpts f <*> applyOpts a
applyOpts (Bind n b t) = Bind n <$> applyOpts b <*> applyOpts t
applyOpts (Proj t i) = Proj <$> applyOpts t <*> pure i
applyOpts t = return t
-- | Need to saturate arguments first to ensure that optimisation happens uniformly
applyDataOptRT :: Name -> Int -> Int -> Bool -> [Term] -> Term
applyDataOptRT n tag arity uniq args
| length args == arity = doOpts n args
| otherwise = let extra = satArgs (arity - length args)
tm = doOpts n (args ++ map (\n -> P Bound n Erased) extra)
in bind extra tm
where
satArgs n = map (\i -> sMN i "sat") [1..n]
bind [] tm = tm
bind (n:ns) tm = Bind n (Lam RigW Erased) (pToV n (bind ns tm))
-- Nat special cases
-- TODO: Would be nice if this was configurable in idris source!
-- Issue #1597 https://github.com/idris-lang/Idris-dev/issues/1597
doOpts (NS (UN z) [nat, prelude]) []
| z == txt "Z" && nat == txt "Nat" && prelude == txt "Prelude"
= Constant (BI 0)
doOpts (NS (UN s) [nat, prelude]) [k]
| s == txt "S" && nat == txt "Nat" && prelude == txt "Prelude"
= App Complete (App Complete (P Ref (sUN "prim__addBigInt") Erased) k) (Constant (BI 1))
doOpts n args = mkApp (P (DCon tag arity uniq) n Erased) args
| bravit/Idris-dev | src/Idris/DataOpts.hs | bsd-3-clause | 4,582 | 0 | 16 | 1,197 | 1,832 | 903 | 929 | 81 | 4 |
{-# OPTIONS_GHC -O0 #-}
{- |
The ghcjs-boot program installs the libraries and runtime system for GHCJS
There are two types of installation:
- release (default):
install ghcjs-boot and shims from the tar cache archives included
in the package
- development:
install ghcjs-boot and shims from their git repository
You can customize the boot configuration in boot.yaml and override some
of the options on the command line.
If you want to install to a different directory, set the ghcjs and ghcjs-pkg
programs to wrapper scripts that pass the correct -B flag to the executable
(see lib/etc/ghcjs.sh and lib/etc/ghcjs-pkg.sh in the GHCJS data dir)
-}
{-# LANGUAGE CPP, ExtendedDefaultRules, OverloadedStrings, ScopedTypeVariables,
TemplateHaskell, LambdaCase, FlexibleInstances, DeriveDataTypeable,
GeneralizedNewtypeDeriving, NoMonomorphismRestriction, FlexibleContexts,
ImpredicativeTypes, TupleSections
#-}
module Main where
import Prelude hiding (FilePath, forM_, elem, mapM, mapM_, any, all, concat, concatMap)
import qualified Distribution.Simple.Utils as Cabal
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import Control.Applicative
import qualified Control.Exception as Ex
import Control.Lens hiding ((<.>))
import Control.Monad (void, when, unless, mplus, join)
import Control.Monad.Reader (MonadReader, ReaderT(..), MonadIO, ask, local, lift, liftIO)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Char
import Data.Data
import Data.Data.Lens
import Data.Foldable
import qualified Data.HashMap.Strict as HM
import Data.List (intercalate, transpose)
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import Data.Time.Clock
import Data.Traversable
import Data.Typeable
import qualified Data.Vector as V
import Data.Yaml ((.:))
import qualified Data.Yaml as Yaml
import Filesystem (getWorkingDirectory, getModified, getSize
,canonicalizePath)
import Filesystem.Path hiding ((<.>), (</>), null, concat)
import Filesystem.Path.CurrentOS (encodeString)
import GHC.IO.Encoding (setLocaleEncoding, setForeignEncoding, utf8)
import qualified Network.Browser as Br
import Network.HTTP (mkRequest, RequestMethod(..), Response(..))
import Network.URI (parseURI, URI(..))
import Options.Applicative hiding (info, (&))
import qualified Options.Applicative as O
import System.Directory (findExecutable)
import System.Environment (getEnvironment, getArgs)
import System.Environment.Executable (getExecutablePath)
import System.Exit (exitSuccess, exitFailure, ExitCode(..))
import qualified System.FilePath
import System.IO (hSetBuffering, stdout, BufferMode(..))
import System.PosixCompat.Files (setFileMode)
import System.Process (readProcessWithExitCode)
import Shelly ((<.>),{-(</>),-} fromText)
import qualified Shelly as Sh
import Text.Read (readEither, readMaybe)
--
import Compiler.GhcjsProgram (printVersion)
import qualified Compiler.Info as Info
import Compiler.Utils as Utils
default (Text)
isWindows :: Bool
#ifdef WINDOWS
isWindows = True
#else
isWindows = False
#endif
newtype Verbosity = Verbosity Int deriving (Eq, Ord, Data, Typeable)
trace = Verbosity 3
info = Verbosity 2
warn = Verbosity 1
err = Verbosity 0
data BootSettings = BootSettings { _bsClean :: Bool -- ^ remove existing tree first
, _bsShowVersion :: Bool -- ^ show the version and exit
, _bsQuick :: Bool -- ^ don't install the Cabal library and stage2 packages
, _bsDev :: Bool -- ^ do a development boot
, _bsJobs :: Maybe Int -- ^ number of parallel jobs
, _bsDebug :: Bool -- ^ build debug version of the libraries (GHCJS records the STG in the object files for easier inspection)
, _bsProf :: Bool -- ^ build profiling version of the libraries
, _bsHaddock :: Bool -- ^ build documentation
, _bsVerbosity :: Verbosity -- ^ verbosity level 0..3, 2 is default
, _bsIconvInclude :: Maybe Text -- ^ directory containing iconv.h
, _bsIconvLib :: Maybe Text -- ^ directory containing iconv library
, _bsGmpInclude :: Maybe Text -- ^ directory containing gmp.h
, _bsGmpLib :: Maybe Text -- ^ directory containing gmp library
, _bsGmpFramework :: Bool -- ^ with-gmp-framework-preferred
, _bsGmpInTree :: Bool -- ^ force using the in-tree GMP
, _bsWithCabal :: Maybe Text -- ^ location of cabal (cabal-install) executable, must have GHCJS support
, _bsWithGhcjsBin :: Maybe Text -- ^ bin directory for GHCJS programs
, _bsWithGhcjs :: Maybe Text -- ^ location of GHCJS compiler
, _bsWithGhcjsPkg :: Maybe Text -- ^ location of ghcjs-pkg program
, _bsWithGhcjsRun :: Maybe Text -- ^ location of ghcjs-run program
, _bsWithGhc :: Maybe Text -- ^ location of GHC compiler (must have a GHCJS-compatible Cabal library installed. ghcjs-boot copies some files from this compiler)
, _bsWithGhcPkg :: Maybe Text -- ^ location of ghc-pkg program
, _bsWithNode :: Maybe Text -- ^ location of the node.js program
, _bsWithDataDir :: Maybe Text -- ^ override data dir
, _bsWithConfig :: Maybe Text -- ^ installation source configuration (default: lib/etc/boot-sources.yaml in data dir)
, _bsShimsDevRepo :: Maybe Text -- ^ override shims repository
, _bsShimsDevBranch :: Maybe Text -- ^ override shims branch or commit
, _bsBootDevRepo :: Maybe Text -- ^ override ghcjs-boot repository
, _bsBootDevBranch :: Maybe Text -- ^ override ghcjs-boot branch or commit
, _bsStage1Unbooted :: Bool -- ^ build stage1 (like --quick) but leave the compiler in unbooted state with the Cabal package still registered
} deriving (Ord, Eq, Data, Typeable)
{- | locations to get installation files from
files may have multiple locations, they're tried in order until one succeeds
locations are typically read from boot-sources.yaml, customize the defaults
in lib/etc/boot-sources.yaml in the installed data dir, or use
the --sources or --datadir options
-}
data BootSources = BootSources { _bsrcShims :: [Text]
, _bsrcBoot :: [Text]
, _bsrcTest :: [Text]
, _bsrcEtc :: [Text]
, _bsrcDoc :: [Text]
, _bsrcGhcjsPrim :: [Text]
, _bsrcInclude :: [Text]
, _bsrcShimsDev :: [Text]
, _bsrcShimsDevBranch :: Text
, _bsrcBootDev :: [Text]
, _bsrcBootDevBranch :: Text
, _bsrcBuildtoolsWindows :: [Text]
, _bsrcBuildtoolsBootWindows :: [Text]
} deriving (Data, Typeable)
{- | Stage configuration file: packages to install in each stage
see boot.yaml for more information
-}
data BootStages = BootStages { _bstStage1a :: Stage
, _bstStage1b :: Stage
, _bstStage2 :: Stage
, _bstPretend :: [Package] -- ^ packages we pretend to have in stage one, but actually hand off to GHC
, _bstCabal :: Package -- ^ installed between 1b and 2, only when doing a full boot
, _bstGhcjsPrim :: Package -- ^ installed between 1a and 1b
, _bstGhcPrim :: Package -- ^ installed before stage 1a
} deriving (Data, Typeable)
type Stage = [CondPackage]
type Package = Text -- ^ just the package name, can be a directory name
-- (starting with ./ relative to the ghcjs-boot root),
-- a url or a plain package name
data PlatformCond = Windows | Unix deriving (Eq, Ord, Enum, Data, Typeable)
data BootTypeCond = Full | Quick deriving (Eq, Ord, Enum, Data, Typeable)
data CondPackage = CondPackage { _cpPlatform :: Maybe PlatformCond
, _cpBootType :: Maybe BootTypeCond
, _cpPackage :: Package
} deriving (Data, Typeable)
data BootLocations = BootLocations { _blGhcjsTopDir :: FilePath -- ^ install to here
, _blGhcjsLibDir :: FilePath
, _blGhcLibDir :: FilePath -- ^ copy GHC files from here
, _blGlobalDB :: FilePath -- ^ global package database
, _blUserDBDir :: Maybe FilePath -- ^ user package database location
, _blNativeToo :: Bool -- ^ build/install native code too
} deriving (Data, Typeable)
data Program a = Program { _pgmName :: Text -- ^ program name for messages
, _pgmSearch :: Text -- ^ name searched for when configuring the program (from command line or config file)
, _pgmVersion :: Maybe Text -- ^ version if known
, _pgmLoc :: Maybe FilePath -- ^ absolute path to the program
, _pgmArgs :: [Text] -- ^ extra arguments to pass to the program
} deriving (Data, Typeable)
data Required = Required deriving (Data, Typeable)
data Optional = Optional deriving (Data, Typeable)
class MaybeRequired a where isRequired :: a -> Bool
instance MaybeRequired (Program Optional) where isRequired = const False
instance MaybeRequired (Program Required) where isRequired = const True
-- | configured programs, fail early if any of the required programs is missing
data BootPrograms = BootPrograms { _bpGhcjs :: Program Required
, _bpGhcjsPkg :: Program Required
, _bpGhcjsRun :: Program Required
, _bpGhc :: Program Required
, _bpGhcPkg :: Program Required
, _bpCabal :: Program Required
, _bpNode :: Program Required
, _bpHaddock :: Program Required
, _bpGit :: Program Optional
, _bpAlex :: Program Optional
, _bpHappy :: Program Optional
, _bpTar :: Program Optional
, _bpCpp :: Program Optional
, _bpBash :: Program Optional
, _bpAutoreconf :: Program Optional
, _bpMake :: Program Optional
} deriving (Data, Typeable)
data BootEnv = BootEnv { _beSettings :: BootSettings
, _beSources :: BootSources
, _beLocations :: BootLocations
, _bePrograms :: BootPrograms
, _beStages :: BootStages
}
data BootConfigFile = BootConfigFile BootStages BootSources BootPrograms
deriving (Data, Typeable)
makeLenses ''Program
makeLenses ''CondPackage
makeLenses ''BootSettings
makeLenses ''BootSources
makeLenses ''BootLocations
makeLenses ''BootPrograms
makeLenses ''BootStages
makeLenses ''BootEnv
resolveConds :: Bool -> [CondPackage] -> [Package]
resolveConds quick stage =
let excluded cp = cp ^. cpPlatform == Just (if isWindows then Unix else Windows) ||
cp ^. cpBootType == Just (if quick then Full else Quick)
in map (view cpPackage) (filter (not . excluded) stage)
-- | all packages that can be built on this host
resolveCondsHost :: [CondPackage] -> [Package]
resolveCondsHost stage =
let excluded cp = cp ^. cpPlatform == Just (if isWindows then Unix else Windows)
in map (view cpPackage) (filter (not . excluded) stage)
-- | all packages from all stages that can be built on this machine
allPackages :: B [Package]
allPackages = p <$> view beStages
where
p s = [s ^. bstGhcjsPrim, s ^. bstCabal, s ^. bstGhcPrim] ++
resolveCondsHost ((s ^. bstStage1a) ++ (s ^. bstStage1b) ++ (s ^. bstStage2))
main :: IO ()
main = do
-- temporary warning
whenM ((==["--init"]) <$> getArgs) (putStrLn "ghcjs-boot has been updated. see README.\nUse `ghcjs-boot --dev' for a development build (if you installed GHCJS from a Git repo) or `ghcjs-boot' for a release build" >> exitFailure)
settings <- adjustDefaultSettings <$> execParser optParser'
when (settings ^. bsShowVersion) (printVersion >> exitSuccess)
hSetBuffering stdout LineBuffering
setLocaleEncoding utf8
setForeignEncoding utf8
env <- initBootEnv settings
printBootEnvSummary False env
r <- Sh.shelly $ runReaderT ((actions >> pure Nothing) `catchAny` (pure . Just)) env
maybe exitSuccess Ex.throwIO r
where
actions :: B ()
actions = verbosely . tracing False $ do
e <- ask
when (e ^. beSettings . bsClean) cleanTree
removeCompleted
mapM_ addCheckpoint ["ghcjs-boot checkpoints file", "init"]
installBuildTools
bool (e ^. beSettings . bsDev) installDevelopmentTree installReleaseTree
initPackageDB
cleanCache
installRts
installEtc
installDocs
installTests
copyGhcjsPrim
copyIncludes
let base = e ^. beLocations . blGhcjsLibDir
setenv "CFLAGS" $ "-I" <> toTextI (base </> "include")
installFakes
installStage1
unless (e ^. beSettings . bsStage1Unbooted) $ do
removeFakes
unless (e ^. beSettings . bsQuick) installStage2
when (e ^. beSettings . bsHaddock) buildDocIndex
liftIO . printBootEnvSummary True =<< ask
unless (e ^. beSettings . bsStage1Unbooted) addCompleted
cleanTree :: B ()
cleanTree = do
topDir <- view (beLocations . blGhcjsTopDir)
msg info ("cleaning installation tree " <> toTextI topDir)
hasCheckpoint "init" >>= cond (rm_rf topDir)
(failWith ("directory to clean might not be a GHCJS installation directory: " <> toTextI topDir <> ", not cleaning"))
instance Yaml.FromJSON BootSources where
parseJSON (Yaml.Object v) = BootSources
<$> v ..: "shims" <*> v ..: "boot" <*> v ..: "test"
<*> v ..: "etc" <*> v ..: "doc"
<*> v ..: "ghcjs-prim" <*> v ..: "include"
<*> v ..: "shims-dev" <*> v .: "shims-dev-branch"
<*> v ..: "ghcjs-boot-dev" <*> v .: "ghcjs-boot-dev-branch"
<*> v ..: "buildtools-windows" <*> v ..: "buildtools-boot-windows"
where
o ..: p = (nonempty =<< o .: p) <|> ((:[]) <$> o .: p)
nonempty xs = if null xs then mempty else return xs
parseJSON _ = mempty
instance Yaml.FromJSON BootPrograms where
parseJSON (Yaml.Object v) = BootPrograms
<$> v ..: "ghcjs" <*> v ..: "ghcjs-pkg" <*> v ..: "ghcjs-run"
<*> v ..: "ghc" <*> v ..: "ghc-pkg"
<*> v ..: "cabal" <*> v ..: "node" <*> v ..: "haddock-ghcjs"
<*> v ..: "git" <*> v ..: "alex"
<*> v ..: "happy" <*> v ..: "tar"
<*> v ..: "cpp" <*> v ..: "bash" <*> v ..: "autoreconf"
<*> v ..: "make"
where
o ..: p = ((\t -> Program p t Nothing Nothing []) <$> o .: p) <|> (withArgs p =<< o .: p)
withArgs :: Text -> Yaml.Value -> Yaml.Parser (Program a)
withArgs p (Yaml.Object o) | [(k,v)] <- HM.toList o = Program p k Nothing Nothing <$> Yaml.parseJSON v
withArgs _ _ = mempty
parseJSON _ = mempty
instance Yaml.FromJSON BootStages where
parseJSON (Yaml.Object v) = BootStages
<$> v ..: "stage1a" <*> v ..: "stage1b" <*> v ..: "stage2"
<*> v .:: "stage1PretendToHave" <*> v .: "cabal" <*> v .: "ghcjs-prim"
<*> v .: "ghc-prim"
where
o .:: p = ((:[])<$>o.:p) <|> o.:p
o ..: p = pkgs Nothing Nothing =<< o .: p
pkgs plc btc (Yaml.Object o) | [(k,v)] <- HM.toList o = matchCond plc btc k v
pkgs plc btc (Yaml.String t) = pure [CondPackage plc btc t]
pkgs plc btc (Yaml.Array v) = concat <$> mapM (pkgs plc btc) (V.toList v)
pkgs _ _ _ = mempty
matchCond plc btc k v
| k == "IfWindows" && plc /= Just Unix = pkgs (Just Windows) btc v
| k == "IfUnix" && plc /= Just Windows = pkgs (Just Unix) btc v
| k == "IfQuick" && btc /= Just Full = pkgs plc (Just Quick) v
| k == "IfFull" && btc /= Just Quick = pkgs plc (Just Full) v
| otherwise = mempty
parseJSON _ = mempty
instance Yaml.FromJSON BootConfigFile where
parseJSON (Yaml.Object v) = BootConfigFile
<$> v .: "packages" <*> v .: "sources" <*> v .: "programs"
parseJSON _ = mempty
adjustDefaultSettings :: BootSettings -> BootSettings
adjustDefaultSettings s
| isWindows && isNothing (s ^. bsGmpInclude) && isNothing (s ^. bsGmpLib) = s & bsGmpInTree .~ True
| otherwise = s
{-
We install some build tools automatically if we're on Windows
-}
installBuildTools :: B ()
installBuildTools
| not isWindows = return ()
| otherwise = instBt -- >> setBuildEnv
where
instBt = checkpoint' "buildtools" "buildtools already installed" $ do
subTop $ do
checkpoint' "mingw" "MingW installation already copied" $ do
msg info "MingW installation not found, copying from GHC"
flip cp_r ".." <^> beLocations . blGhcLibDir . to (</> (".." </> "mingw"))
{-
subTop $ do
p <- absPath =<< pwd
checkpoint' "buildtools" "Buildtools already installed" $ do
checkpoint' "buildtools-boot" "Buildtools bootstrap archive already installed" $
install' "Windows buildtools bootstrap archive" "buildtools-boot" <^> beSources . bsrcBuildtoolsBootWindows
prependPathEnv [p </> "buildtools-boot" </> "bin"]
install' "Windows buildtools" "buildtools" <^> beSources . bsrcBuildtoolsWindows
setBuildEnv = do
libDir <- view (beLocations . blGhcjsLibDir)
cd libDir
p <- absPath =<< pwd
let bt = p </> "buildtools"
mw <- canonicalize (p </> ".." </> "mingw")
prependPathEnv [ mw </> "bin"
, bt </> "bin"
, bt </> "msys" </> "1.0" </> "bin"
, bt </> "git" </> "bin"
]
setenv "MINGW_HOME" (toTextI mw)
setenv "PERL5LIB" (msysPath $ bt </> "share" </> "autoconf")
mkdir_p (bt </> "etc")
mkdir_p (bt </> "msys" </> "1.0" </> "mingw")
writefile (bt </> "msys" </> "1.0" </> "etc" </> "fstab") $ T.unlines
[ escapePath bt <> " /mingw"
, escapePath (bt </> "msys" </> "1.0" </> "bin") <> " /bin"
]
-}
prependPathEnv :: [FilePath] -> B ()
prependPathEnv xs = do
path1 <- get_env "Path"
path2 <- get_env "PATH"
let path = maybe "" (";"<>) (path1 <> path2)
newPath = T.intercalate ";" (map toTextI xs) <> path
setenv "Path" newPath
setenv "PATH" newPath
-- convert C:\x\y to /c/x/y (only on Windows)
msysPath :: FilePath -> Text
msysPath p
| isWindows = let p' = toTextI p
backToForward '\\' = '/'
backToForward x = x
isRel = "." `T.isPrefixOf` p' -- fixme
in bool isRel "" "/" <> T.map backToForward (T.filter (/=':') p')
| otherwise = toTextI p
escapePath :: FilePath -> Text
escapePath p = let p' = toTextI p
escape ' ' = "\\ "
escape '\\' = "/"
escape c = T.singleton c
in T.concatMap escape p'
optParser' :: ParserInfo BootSettings
optParser' = O.info (helper <*> optParser)
(fullDesc <>
header "GHCJS booter, build base libraries for the compiler" <>
progDesc description
)
description :: String
description = unlines
[ "ghcjs-boot builds an initial set of libraries for GHCJS."
]
optParser :: Parser BootSettings
optParser = BootSettings
<$> switch ( long "clean" <> short 'c' <>
help "clean the installation directory first" )
<*> switch ( long "version" <>
help "show the ghcjs-boot version" )
<*> switch ( long "quick" <> short 'q' <>
help "quick boot (no Cabal or ghcjs-base, but enough to compile basic tests)" )
<*> switch ( long "dev" <> short 'd' <>
help "fetch development sources (requires more build tools)" )
<*> (optional . option auto) ( long "jobs" <> short 'j' <> metavar "JOBS" <>
help "number of jobs to run in parallel" )
<*> switch ( long "debug" <> short 'd' <>
help "build debug libraries with extra checks" )
<*> fmap not (switch ( long "no-prof" <>
help "don't generate profiling version of the libraries" ))
<*> fmap not (switch ( long "no-haddock" <>
help "don't generate documentation" ))
<*> (fmap Verbosity . option auto) ( long "verbosity" <> short 'v' <> value 2 <>
help "verbose output" )
<*> (optional . fmap T.pack . strOption) ( long "with-iconv-includes" <> metavar "DIR" <>
help "directory containing iconv.h" )
<*> (optional . fmap T.pack . strOption) ( long "with-iconv-libraries" <> metavar "DIR" <>
help "directory containing iconv library" )
<*> (optional . fmap T.pack . strOption) ( long "with-gmp-includes" <> metavar "DIR" <>
help "directory containing gmp.h" )
<*> (optional . fmap T.pack . strOption) ( long "with-gmp-libraries" <> metavar "DIR" <>
help "directory containing gmp library" )
<*> switch ( long "with-gmp-framework-preferred" <>
help "on OSX, prefer the GMP framework to the gmp lib" )
<*> switch ( long "with-intree-gmp" <>
help "force using the in-tree GMP" )
<*> (optional . fmap T.pack . strOption) ( long "with-cabal" <> metavar "PROGRAM" <>
help "cabal program to use" )
<*> (optional . fmap T.pack . strOption) ( long "with-ghcjs-bin" <> metavar "DIR" <>
help "bin directory for GHCJS programs" )
<*> (optional . fmap T.pack . strOption) ( long "with-ghcjs" <> metavar "PROGRAM" <>
help "ghcjs program to use" )
<*> (optional . fmap T.pack . strOption ) ( long "with-ghcjs-pkg" <> metavar "PROGRAM" <>
help "ghcjs-pkg program to use" )
<*> (optional . fmap T.pack . strOption ) ( long "with-ghcjs-run" <> metavar "PROGRAM" <>
help "ghcjs-run program to use" )
<*> (optional . fmap T.pack . strOption) ( long "with-ghc" <> metavar "PROGRAM" <>
help "ghc program to use" )
<*> (optional . fmap T.pack . strOption) ( long "with-ghc-pkg" <> metavar "PROGRAM" <>
help "ghc-pkg program to use" )
<*> (optional . fmap T.pack . strOption) ( long "with-node" <> metavar "PROGRAM" <>
help "node.js program to use" )
<*> (optional . fmap T.pack . strOption) ( long "with-datadir" <> metavar "DIR" <>
help "data directory with libraries and configuration files" )
<*> (optional . fmap T.pack . strOption) ( long "with-config" <> metavar "FILE" <>
help "boot configuration file (default: boot.yaml in datadir)" )
<*> (optional . fmap T.pack . strOption ) ( long "shims-dev-repo" <> metavar "REPOSITORY" <>
help "override shims repository location" )
<*> (optional . fmap T.pack . strOption ) ( long "shims-dev-branch" <> metavar "BRANCH" <>
help "override shims branch or commit to check out" )
<*> (optional . fmap T.pack . strOption ) ( long "ghcjs-boot-dev-repo" <> metavar "REPOSITORY" <>
help "override ghcjs-boot repository location" )
<*> (optional . fmap T.pack . strOption ) ( long "ghcjs-boot-dev-branch" <> metavar "BRANCH" <>
help "override ghcjs-boot branch or commit to check out" )
<*> switch ( long "build-stage1-unbooted" <>
help "build stage1 packages but leave the compiler in unbooted state (for testing only)" )
initPackageDB :: B ()
initPackageDB = do
msg info "creating package databases"
initDB "--global" <^> beLocations . blGlobalDB
traverseOf_ _Just initUser <^> beLocations . blUserDBDir
where
initUser dir = rm_f (dir </> "package.conf") >> initDB "--user" (dir </> "package.conf.d")
initDB dbName db = do
rm_rf db >> mkdir_p db
ghcjs_pkg_ ["init", toTextI db] `catchAny_` return ()
ghcjs_pkg_ ["recache", dbName]
cleanCache :: B ()
cleanCache =
liftIO Info.getUserCacheDir >>= \case
Just p -> rm_rf (fromString p) `catchAny_` return ()
Nothing -> return ()
bootDescr :: Text
bootDescr = "boot libraries"
shimsDescr :: Text
shimsDescr = "shims, runtime system and support libraries"
installDevelopmentTree :: B ()
installDevelopmentTree = subTop $ do
p <- pwd
msgD info $ "preparing development boot tree"
checkpoint' "ghcjs-boot-git" "ghcjs-boot repository already cloned and prepared" $ do
testGit "ghcjs-boot" >>= \case
Just False -> failWith "ghcjs-boot already exists and is not a git repository"
Just True -> do
msg info "ghcjs-boot repository already exists but checkpoint not reached, cleaning first, then cloning"
rm_rf "ghcjs-boot"
initGhcjsBoot
Nothing -> do
msgD info "cloning ghcjs-boot git repository"
initGhcjsBoot
checkpoint' "shims-git" "shims repository already cloned" $ do
testGit "shims" >>= \case
Just False -> failWith "shims already exists and is not a git repository"
Just True -> do
msgD info "shims repository already exists but checkpoint not reached, cleaning first, then cloning"
rm_rf "shims"
cloneGit shimsDescr "shims" bsrcShimsDevBranch bsrcShimsDev
Nothing -> do
msgD info "cloning shims git repository"
cloneGit shimsDescr "shims" bsrcShimsDevBranch bsrcShimsDev
where
initGhcjsBoot = sub $ do
cloneGit bootDescr "ghcjs-boot" bsrcBootDevBranch bsrcBootDev
cd "ghcjs-boot"
git_ ["submodule", "update", "--init", "--recursive"]
mapM_ patchPackage =<< allPackages
preparePrimops
buildGenPrim
cleanGmp
testGit d = cond (Just<$>test_d (d</>".git")) (pure Nothing) =<< test_d d
cloneGit descr repoName branch srcs = do
msgD info ("cloning git repository for " <> descr)
cloneGitSrcs descr <^> beSources . srcs
branch' <- view (beSources . branch)
sub $ do
cd repoName
git_ ["checkout", branch']
cloneGitSrcs d [] = failWith ("could not clone " <> d <> ", no available sources")
cloneGitSrcs d (x:xs) = git_ ["clone", x] `catchAny_`
(msgD warn "clone failed, trying next source" >> cloneGitSrcs d xs)
installReleaseTree :: B ()
installReleaseTree = subTop $ do
msgD info "preparing release boot tree"
checkpoint' "ghcjs-boot-release" "ghcjs-boot tree already installed" $ do
whenM (test_d "ghcjs-boot") (msgD warn "existing ghcjs-boot tree found from incomplete installation")
install' bootDescr "ghcjs-boot" <^> beSources . bsrcBoot
preparePrimops
buildGenPrim
checkpoint' "shims-release" "shims tree already installed" $ do
whenM (test_d "shims") (msgD warn "existing shims tree found from incomplete installation")
install' shimsDescr "shims" <^> beSources . bsrcShims
-- | preprocess primops.txt.pp, one version for the JS platform
-- one for native
preparePrimops :: B ()
preparePrimops = subTop' ("ghcjs-boot" </> "data") . checkpoint' "primops" "primops already prepared" $ do
msg info "preparing primops"
mkdir_p "native"
ghcLibDir <- view (beLocations . blGhcLibDir)
cp (ghcLibDir </> "include" </> "MachDeps.h") "native"
cp (ghcLibDir </> "include" </> "ghcautoconf.h") "native"
cp (ghcLibDir </> "include" </> "ghcplatform.h") ("native" </> "ghc_boot_platform.h")
silently $ do
primopsJs <- cpp ["-P", "-Ijs", "primops.txt.pp"]
writefile "primops-js.txt" primopsJs
primopsNative <- cpp ["-P", "-Inative", "primops.txt.pp"]
writefile "primops-native.txt" primopsNative
-- | build the genprimopcode tool, this requires alex and happy
buildGenPrim :: B ()
buildGenPrim = subTop' ("ghcjs-boot" </> "utils" </> "genprimopcode") $ do
make "genprimopcode" [] $ do
make "Lexer.hs" ["Lexer.x"] (alex_ ["Lexer.x"])
make "Parser.hs" ["Parser.y"] (happy_ ["Parser.y"])
ghc_ ["-o", "genprimopcode", "-O", "Main.hs", "+RTS", "-K128M"]
-- fixme this hardcodes the location of integer-gmp
integerGmp :: FilePath
integerGmp = "ghcjs-boot" </> "boot" </> "integer-gmp"
cleanGmp :: B ()
cleanGmp = subTop' integerGmp $ do
rm_rf ("gmp" </> "intree")
rm_f ("mkGmpDerivedConstants" </> exe "mkGmpDerivedConstants")
rm_f "GmpDerivedConstants.h"
prepareGmp :: B ()
prepareGmp = subTop' integerGmp . checkpoint' "gmp" "in-tree gmp already prepared" $ do
intreeInstalled <- test_f ("gmp" </> "intree" </> "include" </> "gmp.h")
gmpInTree <- view (beSettings . bsGmpInTree)
sub $ when (gmpInTree && not intreeInstalled) $ do
cd "gmp"
lsFilter "." isGmpSubDir rm_rf
msg info "unpacking in-tree GMP"
lsFilter "tarball" (return . isTarball) (installArchive False "in-tree libgmp" ".")
d <- pwd
ad <- absPath d
lsFilter "." isGmpSubDir $ \dir -> do
-- patch has already been applied
cd dir
adir <- absPath dir
msgD info "building GMP"
configure_ ["--prefix=" <> msysPath (ad </> "intree")]
runMake_ []
runMake_ ["install"]
make "GmpGeneratedConstants.h" [] $ do
gmpIncl <- view (beSettings . bsGmpInclude)
p <- absPath =<< pwd
buildGmpConstants (gmpIncl `mplus` bj gmpInTree (toTextI $ p </> "gmp" </> "intree" </> "include"))
where
lsFilter :: FilePath -> (FilePath -> B Bool) -> (FilePath -> B ()) -> B ()
lsFilter dir p a = ls dir >>= mapM_ (\x -> p x >>= flip when (a x))
isTarball file = any (`T.isSuffixOf` toTextI file) [".tar", ".tar.bz2"]
isGmpSubDir dir = (("gmp-" `T.isPrefixOf`) . toTextI) <$> relativeTo "." dir
buildGmpConstants :: Maybe Text -> B ()
buildGmpConstants includeDir = subTop' integerGmp $ do
msg info "generating GMP derived constants"
cd "mkGmpDerivedConstants"
ghc_ $ maybe [] (\d -> ["-I" <> d]) includeDir ++
["-fforce-recomp", "-no-hs-main", "-o", "mkGmpDerivedConstants", "mkGmpDerivedConstants.c"]
p <- pwd
constants <- run (Program "" "" Nothing (Just $ p </> "mkGmpDerivedConstants") []) []
writefile "GmpDerivedConstants.h" constants
patchPackage :: Package -> B ()
patchPackage pkg
| Just pkg' <- T.stripPrefix "./" (T.strip pkg) =
let pkgName = last (T.splitOn "/" pkg')
p = "patches" </> fromText pkgName <.> "patch"
applyPatch = do
msg info ("applying patch: " <> toTextI p)
cd (fromText pkg')
when isWindows (git_ ["config", "core.filemode", "false"])
-- workaround for Windows MSYS2 git not liking our absolute paths
git_ ["apply", T.replicate (1 + T.count "/" pkg') "../" <>
"patches/" <> pkgName <> ".patch"]
in sub $ cond applyPatch (msg info $ "no patch for package " <> pkgName <> " found") =<< test_f p
| otherwise = return ()
installRts :: B ()
installRts = subTop' "ghcjs-boot" $ do
msg info "installing RTS"
globalDB <- view (beLocations . blGlobalDB)
ghcLib <- view (beLocations . blGhcLibDir)
ghcjsLib <- view (beLocations . blGhcjsLibDir)
ghcjsTop <- view (beLocations . blGhcjsTopDir)
let inc = ghcjsLib </> "include"
incNative = ghcjsLib </> "include_native"
#if __GLASGOW_HASKELL__ >= 709
rtsLib = ghcjsLib </> "rts"
#else
rtsLib = ghcjsLib </> "rts-1.0"
#endif
rtsConf <- readfile (ghcLib </> "package.conf.d" </> "builtin_rts.conf")
writefile (globalDB </> "builtin_rts.conf") (fixRtsConf (toTextI inc) (toTextI rtsLib) rtsConf)
ghcjs_pkg_ ["recache", "--global", "--no-user-package-db"]
forM_ [ghcjsLib, inc, incNative] mkdir_p
sub $ cd (ghcLib </> "include") >> cp_r "." incNative
#if __GLASGOW_HASKELL__ >= 709
sub $ cd (ghcLib </> "rts") >> cp_r "." rtsLib
#else
sub $ cd (ghcLib </> "rts-1.0") >> cp_r "." rtsLib
#endif
sub $ cd ("data" </> "include") >> installPlatformIncludes inc incNative
cp (ghcLib </> "settings") (ghcjsLib </> "settings")
cp (ghcLib </> "platformConstants") (ghcjsLib </> "platformConstants")
let unlitDest = ghcjsLib </> exe "unlit"
ghcjsRunDest = ghcjsLib </> exe "ghcjs-run"
ghcjsRunSrc <- view (bePrograms . bpGhcjsRun . pgmLoc . to fromJust)
cp (ghcLib </> exe "unlit") unlitDest
cp ghcjsRunSrc ghcjsRunDest
mapM_ (liftIO . Cabal.setFileExecutable . toStringI) [unlitDest, ghcjsRunDest]
writefile (ghcjsLib </> "node") <^> bePrograms . bpNode . pgmLoc . to (maybe "-" toTextI)
when (not isWindows) $ do
let runSh = ghcjsLib </> "run" <.> "sh"
writefile runSh "#!/bin/sh\nCOMMAND=$1\nshift\n\"$COMMAND\" \"$@\"\n"
liftIO . Cabal.setFileExecutable . toStringI =<< absPath runSh
-- required for integer-gmp
whenM (view (beLocations . blNativeToo)) $ do
prepareGmp
cp ("boot" </> "integer-gmp" </> "mkGmpDerivedConstants" </> "GmpDerivedConstants.h") inc
subTop $ do
writefile "empty.c" ""
ghc_ ["-c", "empty.c"]
when isWindows $ cp (ghcLib </> exe "touchy") (ghcjsLib </> exe "touchy")
msg info "RTS prepared"
installPlatformIncludes :: FilePath -> FilePath -> B ()
installPlatformIncludes inc incNative = do
pw <- pwd
cp_r "." inc
nativeHeaders <- findWhen (return . isHeaderFile) incNative
forM_ nativeHeaders $ \h -> do
h' <- relativeTo incNative h
e <- test_f ("." </> h')
when (not e) $ do
mkdir_p (directory $ inc </> h')
writefile (inc </> h') (wrappedHeader h')
where
isHeaderFile = (`hasExtension` "h")
wrappedHeader file =
let pathParts = length (splitDirectories file)
included = iterate (".." </>) ("include_native" </> file) !! pathParts
in T.unlines [ "#ifndef ghcjs_HOST_OS"
, "#include \"" <> toTextI included <> "\""
, "#endif"
]
exe :: FilePath -> FilePath
exe = bool isWindows (<.>"exe") id
copyGhcjsPrim :: B ()
copyGhcjsPrim = checkpoint' "ghcjs-prim" "ghcjs-prim" $
install' "ghcjs-prim package sources" <$^> beLocations . blGhcjsLibDir . to (</> "ghcjs-prim") <<*^> beSources . bsrcGhcjsPrim
copyIncludes :: B ()
copyIncludes = checkpoint' "includes" "includes" $
install' "ghcjs rts include files" <$^> beLocations . blGhcjsLibDir . to (</> "include") <<*^> beSources . bsrcInclude
installEtc :: B ()
installEtc = checkpoint' "additional configuration files" "etc" $ do
install' "additional configuration files" <$^> beLocations . blGhcjsLibDir <<*^> beSources . bsrcEtc
#ifdef WINDOWS
-- compile the resources we need for the runner to prevent Windows from trying to detect
-- programs that require elevated privileges
ghcjsTop <- view (beLocations . blGhcjsTopDir)
let windres = Program "windres" "windres" Nothing
(Just $ ghcjsTop </> ".." </> "mingw" </> "bin" </> "windres.exe") []
subTop $ run_ windres ["runner.rc", "-o", "runner-resources.o"]
#endif
installDocs :: B ()
installDocs = checkpoint' "documentation" "doc" $
install' "documentation" <$^> beLocations . blGhcjsLibDir . to (</>"doc") <<*^> beSources . bsrcDoc
installTests :: B ()
installTests = unlessM (hasCheckpoint "tests") $ do
msg info "installing test suite"
(install False "test suite" <$^> beLocations . blGhcjsLibDir . to (</>"test") <<*^> beSources . bsrcTest) >>=
cond (addCheckpoint "tests") (msg warn "test suite could not be installed, continuing without")
buildDocIndex :: B ()
buildDocIndex = subTop' "doc" $ do
haddockFiles <- findWhen (return . flip hasExtension "haddock") "."
haddock_ $ ["--gen-contents", "--gen-index", "-o", "html", "--title=GHCJS Libraries"] ++
map (\p -> "--read-interface=../" <> toTextI (directory p) <> "," <> toTextI p) haddockFiles
installStage2 :: B ()
installStage2 = subTop' "ghcjs-boot" $ do
msg info "installing Cabal library"
removeFakes
cabalPkg <- view (beStages . bstCabal)
preparePackage cabalPkg
cabalInstall [cabalPkg]
msg info "installing stage 2 packages"
stage2 <- stagePackages bstStage2
forM_ stage2 preparePackage
cabalInstall stage2
installGhcjsPrim :: B ()
installGhcjsPrim = do
msg info "installing ghcjs-prim"
prim <- view (beStages . bstGhcjsPrim)
preparePackage prim
cabalStage1 [prim]
installStage1 :: B ()
installStage1 = subTop' "ghcjs-boot" $ do
prim <- view (beStages . bstGhcPrim)
installStage "0" [prim]
fixGhcPrim
installStage "1a" =<< stagePackages bstStage1a
s <- ask
when (s ^. beSettings . bsGmpInTree && s ^. beLocations . blNativeToo) installInTreeGmp
installGhcjsPrim
installStage "1b" =<< stagePackages bstStage1b
resolveWiredInPackages
where
fixGhcPrim = do
descr <- T.lines <$> ghcjs_pkg ["describe", "ghc-prim", "--no-user-package-db"]
setStdin (T.unlines $ map fixGhcPrimDescr descr)
ghcjs_pkg_ ["update", "-", "--global", "--no-user-package-db"]
-- add GHC.Prim to exposed-modules
fixGhcPrimDescr line
| "GHC.PrimopWrappers" `T.isInfixOf` line = line <> " GHC.Prim"
| otherwise = line
installStage name s = do
msg info ("installing stage " <> name)
forM_ s preparePackage >> cabalStage1 s
resolveWiredInPackages :: B ()
resolveWiredInPackages = subTop $ do
wips <- readBinary ("wiredinpkgs" <.> "yaml")
case Yaml.decodeEither wips of
Left err -> failWith ("error parsing wired-in packages file wiredinpkgs.yaml\n" <> T.pack err)
Right pkgs -> do
pkgs' <- forM pkgs $ \p ->
(p,) . T.strip <$> ghcjs_pkg [ "--simple-output"
, "field"
, p
#if __GLASGOW_HASKELL__ >= 709
, "key"
#else
, "id"
#endif
]
writefile ("wiredinkeys" <.> "yaml") $
T.unlines ("# resolved wired-in packages" :
map (\(p,k) -> p <> ": " <> k) pkgs')
-- fixme: urk, this is probably not how it's supposed to be done
installInTreeGmp :: B ()
installInTreeGmp = subTop' integerGmp $ do
p <- absPath =<< pwd
let gmpLib = p </> "gmp" </> "intree" </> "lib" </> "libgmp.a"
libPath <- ghcjs_pkg ["field", "integer-gmp", "library-dirs", "--simple-output", "--no-user-package-db"]
libPath' <- canonic (fromText $ T.strip libPath)
msg info $ "installing in-tree gmp: " <> toTextI gmpLib <> " -> " <> toTextI libPath'
cp gmpLib libPath'
descr <- T.lines <$> ghcjs_pkg ["describe", "integer-gmp", "--no-user-package-db"]
let updateLine line | "extra-libraries:" `T.isPrefixOf` line = line <> " gmp"
| otherwise = line
setStdin (T.unlines $ map updateLine descr)
ghcjs_pkg_ ["update", "-", "--global", "--no-user-package-db"]
preparePackage :: Package -> B ()
preparePackage pkg
| "./" `T.isPrefixOf` pkg || "../" `T.isPrefixOf` pkg = sub $ do
msg trace ("preparing package " <> pkg)
cd (fromText pkg)
e <- ask
when (e ^. beSettings . bsDev) $
whenM (test_f "configure.ac") $
make "configure" ["configure.ac"]
(msg info ("generating configure script for " <> pkg) >> autoreconf_)
rm_rf "dist"
| otherwise = return ()
fixRtsConf :: Text -> Text -> Text -> Text
fixRtsConf incl lib conf = T.unlines . map fixLine . T.lines $ conf
where
fixLine l
| "library-dirs:" `T.isPrefixOf` l = "library-dirs: " <> lib
| "include-dirs:" `T.isPrefixOf` l = "include-dirs: " <> incl
| otherwise = l
-- | register fake, empty packages to be able to build packages
-- that depend on Cabal
installFakes :: B ()
installFakes = silently $ do
installed <- T.words <$> ghc_pkg ["list", "--simple-output"]
dumped <- T.lines <$> ghc_pkg ["dump"]
fakes <- view (beStages . bstPretend)
forM_ fakes $ \pkg ->
case reverse (filter ((==pkg<>"-") . fst . T.breakOnEnd "-") installed) of
[] -> failWith ("required package " <> pkg <> " not found in host GHC")
(x:_) -> do
let version = T.drop 1 (T.dropWhile (/='-') x)
case findPkgId dumped pkg version of
Nothing -> failWith ("cannot find package id of " <> pkg <> "-" <> version)
Just pkgId -> do
globalDB <- view (beLocations . blGlobalDB)
libDir <- view (beLocations . blGhcjsLibDir)
let conf = fakeConf libDir libDir pkg version pkgId
writefile (globalDB </> fromText pkgId <.> "conf") conf
ghcjs_pkg_ ["recache", "--global", "--no-user-package-db"]
findPkgId:: [Text] -> Text -> Text -> Maybe Text
findPkgId dump pkg version =
listToMaybe (filter (pkgVer `T.isPrefixOf`) ids)
where
pkgVer = pkg <> "-" <> version <> "-"
ids = map (T.dropWhile isSpace . T.drop 3) $ filter ("id:" `T.isPrefixOf`) dump
fakeConf :: FilePath -> FilePath -> Text -> Text -> Text -> Text
fakeConf incl lib name version pkgId = T.unlines
[ "name: " <> name
, "version: " <> version
, "id: " <> pkgId
, "license: BSD3"
, "maintainer: stegeman@gmail.com"
, "import-dirs: " <> toTextI incl
, "include-dirs: " <> toTextI incl
, "library-dirs: " <> toTextI lib
, "exposed: False"
]
-- | remove the fakes after we're done with them
removeFakes :: B ()
removeFakes = do
fakes <- map (<>"-") <$> view (beStages . bstPretend)
pkgs <- T.words <$> ghcjs_pkg ["list", "--simple-output", "--no-user-package-db"]
forM_ pkgs $ \p -> when (any (`T.isPrefixOf` p) fakes)
(msg info ("unregistering " <> p) >> ghcjs_pkg_ ["unregister", p, "--no-user-package-db"])
-- | subshell in path relative to top installation dir
subTop' :: FilePath -> B a -> B a
subTop' p a = subTop (cd p >> a)
subTop :: B a -> B a
subTop a = sub (view (beLocations . blGhcjsTopDir) >>= cd >> a)
writeBinary :: FilePath -> BL.ByteString -> B ()
writeBinary file bs = do
msgD info ("writing file " <> toTextI file)
file' <- absPath file
liftIO $ BL.writeFile (toStringI file') bs
-- | unpack a tar file (does not support compression)
-- only supports files, does not try to emulate symlinks
unpackTar :: Bool -- ^ strip the first directory component?
-> FilePath -- ^ destination to unpack to
-> FilePath -- ^ the tar file
-> B ()
unpackTar stripFirst dest tarFile = do
mkdir_p dest
entries <- Tar.read . BL.fromStrict <$> readBinary tarFile
void $ Tar.foldEntries (\e -> (>>=checkExtract e)) (return Nothing) (\e -> failWith $ "error unpacking tar: " <> showT e) entries
where
dropComps = if stripFirst then 1 else 0
failSec e msg = failWith $ "tar security check, " <> msg <> ": " <> T.pack (Tar.entryPath e)
checkExtract e Nothing
| (p:_) <- System.FilePath.splitDirectories (Tar.entryPath e)
= checkExtract e (Just p)
| otherwise = failSec e "no path"
checkExtract e je@(Just expected)
| System.FilePath.isAbsolute ep = failSec e "absolute path"
| any (=="..") epd = failSec e "'..' in path"
| listToMaybe epd /= je && isSupportedEntry (Tar.entryContent e)
= failSec e ("tar bomb, expected path component: " <> T.pack expected)
| otherwise = do
view (beSettings . bsVerbosity) >>= \v ->
-- this gets chatty, reduce verbosity for file writes / directory creates here unless we're at trace level
(if v < trace then quieter warn else id)
(extractEntry e $ dest </> fromString (System.FilePath.joinPath (drop (if stripFirst then 1 else 0) epd)))
return je
where ep = Tar.entryPath e
epd = System.FilePath.splitDirectories ep
isSupportedEntry (Tar.NormalFile{}) = True
isSupportedEntry (Tar.Directory{}) = True
isSupportedEntry _ = False
extractEntry e tgt
| Tar.NormalFile bs size <- Tar.entryContent e = do
mkdir_p (directory tgt)
writeBinary tgt bs
setPermissions (Tar.entryPermissions e) tgt
| Tar.Directory <- Tar.entryContent e = do
mkdir_p tgt
setPermissions (Tar.entryPermissions e) tgt
| otherwise =
msg warn ("ignoring unexpected entry type in tar. only normal files and directories (no links) are supported:\n " <> toTextI tgt)
setPermissions mode tgt = do
absTgt <- absPath tgt
msgD trace ("setting permissions of " <> toTextI tgt <> " to " <> showT mode)
let tgt = toStringI absTgt
tgt' = bool (last tgt `elem` ['/','\\']) (init tgt) tgt
liftIO (setFileMode tgt' mode)
ghc_ = runE_ bpGhc
ghc_pkg = runE bpGhcPkg
ghcjs_pkg = runE bpGhcjsPkg
ghcjs_pkg_ = runE_ bpGhcjsPkg
alex_ = runE_ bpAlex
happy_ = runE_ bpHappy
haddock_ = runE_ bpHaddock
tar_ = runE_ bpTar
git_ = runE_ bpGit
cpp = runE bpCpp
cabal = runE bpCabal
cabal_ = runE_ bpCabal
runE g a = view (bePrograms . g) >>= flip run a
runE_ g a = view (bePrograms . g) >>= flip run_ a
cabalStage1 :: [Text] -> B ()
-- | stage 1 cabal install: boot mode, hand off to GHC if GHCJS cannot yet compile it
cabalStage1 pkgs = sub $ do
ghc <- requirePgmLoc =<< view (bePrograms . bpGhc)
s <- view beSettings
p <- pwd
setenv "GHCJS_BOOTING" "1"
setenv "GHCJS_BOOTING_STAGE1" "1"
setenv "GHCJS_WITH_GHC" (toTextI ghc)
let configureOpts = catMaybes $ [("--with-iconv-includes=" <>)<$>s^.bsIconvInclude
,("--with-iconv-libraries=" <>)<$>s^.bsIconvLib
,("--with-gmp-includes=" <>)<$>(s^.bsGmpInclude<>inTreePath p "include")
,("--with-gmp-libraries= " <>)<$>s^.bsGmpLib
] ++ gmpOpts
-- fixme this hardcodes the location of integer-gmp
inTreePath p sub =
bj (s^.bsGmpInTree) (toTextI (p </> "boot" </> "integer-gmp" </> "gmp" </> "intree" </> sub))
gmpOpts = [bj (s^.bsGmpFramework) "--with-gmp-framework-preferred"
,bj (s^.bsGmpInTree) "--with-intree-gmp"
]
globalFlags <- cabalGlobalFlags
flags <- cabalInstallFlags (length pkgs == 1)
let args = globalFlags ++ ("install" : pkgs) ++
[ "--solver=topdown" -- the modular solver refuses to install stage1 packages
] ++ map ("--configure-option="<>) configureOpts ++ flags
checkInstallPlan pkgs args
cabal_ args
-- | regular cabal install for GHCJS
cabalInstall [] = do
msg info "cabal-install: no packages, nothing to do"
return ()
cabalInstall pkgs = do
globalFlags <- cabalGlobalFlags
flags <- cabalInstallFlags (length pkgs == 1)
setenv "GHCJS_BOOTING" "1"
let args = globalFlags ++ "install" : pkgs ++ flags
checkInstallPlan pkgs args
cabal_ args
-- check that Cabal is only going to install the packages we specified
-- uses somewhat fragile parsing of --dry-run output, find a better way
checkInstallPlan :: [Package] -> [Text] -> B ()
checkInstallPlan pkgs opts = do
plan <- cabal (opts ++ ["-v2", "--dry-run"])
when (hasReinstalls plan || hasUnexpectedInstalls plan || hasNewVersion plan) (err plan)
where
hasReinstalls = T.isInfixOf "(reinstall)" -- reject reinstalls
hasNewVersion = T.isInfixOf "(new version)" -- only allow one version of each package during boot
hasUnexpectedInstalls plan =
let ls = filter ("(new package)" `T.isInfixOf`) (T.lines plan)
in length ls /= length pkgs || not (all isExpected ls)
isExpected l
| (w:_) <- T.words l, ps@(_:_) <- T.splitOn "-" w =
any (T.intercalate "-" (init ps) `T.isInfixOf`) pkgs
| otherwise = False
err plan = failWith $ "unacceptable install plan, expecting exactly the following list of packages to be installed,\n" <>
"without reinstalls and only one version of each package in the database:\n\n" <>
T.unlines (map (" - " <>) pkgs) <> "\nbut got:\n\n" <> plan
cabalGlobalFlags :: B [Text]
cabalGlobalFlags = do
instDir <- view (beLocations . blGhcjsTopDir)
return [ "--config-file", toTextI (instDir </> "cabalBootConfig")
, "--ignore-sandbox"
]
cabalInstallFlags :: Bool -> B [Text]
cabalInstallFlags parmakeGhcjs = do
debug <- view (beSettings . bsDebug)
v <- view (beSettings . bsVerbosity)
j <- view (beSettings . bsJobs)
ghcjs <- view (bePrograms . bpGhcjs)
ghcjsPkg <- view (bePrograms . bpGhcjsPkg)
instDir <- view (beLocations . blGhcjsTopDir)
prof <- view (beSettings . bsProf)
haddock <- view (beSettings . bsHaddock)
return $ [ "--global"
, "--ghcjs"
, "--one-shot"
, "--avoid-reinstalls"
, "--builddir", "dist"
, "--with-compiler", ghcjs ^. pgmLocText
, "--with-hc-pkg", ghcjsPkg ^. pgmLocText
, "--prefix", toTextI instDir
, bool haddock "--enable-documentation" "--disable-documentation"
, "--haddock-html"
-- workaround for hoogle support being broken in haddock for GHC 7.10RC1
#if !(__GLASGOW_HASKELL__ >= 709)
, "--haddock-hoogle"
#endif
, "--haddock-hyperlink-source"
-- don't slow down Windows builds too much, on other platforms we get this more
-- or less for free, thanks to dynamic-too
#ifndef WINDOWS
, "--enable-shared"
#endif
, bool prof "--enable-library-profiling" "--disable-library-profiling"
] ++
bool isWindows [] ["--root-cmd", toTextI (instDir </> "run" <.> "sh")] ++
-- workaround for Cabal bug?
bool isWindows ["--disable-executable-stripping", "--disable-library-stripping"] [] ++
catMaybes [ (((bool parmakeGhcjs "--ghcjs-options=-j" "-j")<>) . showT) <$> j
, bj debug "--ghcjs-options=-debug"
, bj (v > info) "-v2"
]
configure_ = run_ (Program "configure" "./configure" Nothing (Just "./configure") [])
#ifdef WINDOWS
autoreconf_ = runE_ bpBash ["autoreconf"]
#else
autoreconf_ = runE_ bpAutoreconf []
#endif
runMake_ = runE_ bpMake
ignoreExcep a = a `catchAny` (\e -> msg info $ "ignored exception: " <> showT e)
stagePackages :: Getter BootStages Stage -> B [Package]
stagePackages l = do
quick <- view (beSettings . bsQuick)
condPkgs <- view (beStages . l)
return (resolveConds quick condPkgs)
whenM :: Monad m => m Bool -> m () -> m ()
whenM c m = c >>= flip when m
unlessM :: Monad m => m Bool -> m () -> m ()
unlessM c m = c >>= flip unless m
make :: FilePath -- ^ target, build this file if not exists
-> [FilePath] -- ^ also build if any of these is newer than the target (ignored if they don't exist)
-> B () -- ^ action to run for building
-> B ()
make tgt deps m = mtime tgt >>= \case
Nothing -> m
Just tm -> whenM (any (>tm) . catMaybes <$> mapM mtime deps) m
failWith :: MonadIO m => Text -> m a
failWith err = liftIO (T.putStrLn ("fatal: " <> err) >> exitFailure)
mtime :: FilePath -> B (Maybe UTCTime)
mtime p = do
p' <- absPath p
fmap Just (liftIO $ getModified p') `catchAny_` return Nothing
filesize :: FilePath -> B Integer
filesize file = do
absFile <- absPath file
liftIO (getSize absFile)
install' :: Text -> FilePath -> [Text] -> B ()
install' descr dest srcs = void (install True descr dest srcs)
-- | install some files, from multiple sources with fallback
install :: Bool -- ^ install is required, exit with a panic if it didn't succeed
-> Text -- ^ description, for progress output
-> FilePath -- ^ destination
-> [Text] -- ^ sources, can be tar files, directories, tar.gz / tar.xz works with external tools
-> B Bool -- ^ whether installation was succesful
install req descr dest []
| req = failWith ("cannot install " <> descr <> " to " <> toTextI dest <> " , no more sources")
| otherwise = return False
install req descr dest (s:ss)
| "http://" `T.isPrefixOf` s = withTmpDir $
\t -> let file = t </> fromText (last $ T.split (=='/') s)
in fetch descr file s >>= cond
(install req descr dest (toTextI file:ss))
(msg warn ("could not fetch " <> s <> ", trying next source") >> install req descr dest ss)
| otherwise = do
let s' = fromText s
d <- test_d s'
if d then do
msg info ("installing " <> descr <> ", copying directory: " <> s <> " -> " <> toTextI dest)
mkdir_p dest >> ls s' >>= mapM_ (\file -> cp_r (s' </> filename file) (dest </> filename file))
return True
else do
f <- test_f s'
if f then do
size <- filesize s'
if size == 0
then do
isDev <- view (beSettings . bsDev)
if isDev
then msg info ("source " <> s <> " for " <> descr <> " is empty, trying next")
else msg warn $ T.unlines
[ "Archive file " <> s <> " for " <> descr <> " is empty."
, "You might be missing the required cache archives for doing a release build."
, "Use `ghcjs-boot --dev' if you installed GHCJS from a Git repository."
]
install req descr dest ss
else installArchive True descr dest s' >> return True
else do
msg trace ("source " <> s <> " for " <> descr <> " does not exist, trying next")
install req descr dest ss
-- | install files from an archive
installArchive :: Bool
-> Text
-> FilePath
-> FilePath
-> B ()
installArchive stripFirst descr dest src
| suff ".tar" = do
msg info ("installing " <> descr <> " unpacking tar (internal) " <> s <> " -> " <> d)
unpackTar stripFirst dest src
| suff ".tar.gz" = m "tar.gz" >> untar "-xzf"
| suff ".tar.bz2" = m "tar.bz2" >> untar "-xjf"
| suff ".tar.xz" = m "tar.xz" >> untar "-xJf"
| otherwise = failWith ("unknown archive type installing " <> descr <> ": " <> s)
where
m e = msg info ("installing " <> descr <> " unpacking " <> e <> " " <> s <> " -> " <> d)
suff e = e `T.isSuffixOf` s
d = toTextI dest
s = toTextI src
str = bool stripFirst ["--strip-components=1"] []
untar o = sub (absPath src >>= \as -> mkdir_p dest >> cd dest >> tar_ ([o, msysPath as] ++ str))
-- | download a file over HTTP
fetch :: Text -- ^ description
-> FilePath -- ^ target
-> Text -- ^ url to download
-> B Bool -- ^ True if the file was downloaded succesfully
fetch descr dest url
| Just u <- parseURI (T.unpack url) = do
msg info ("installing " <> descr <> ", downloading " <> url <> " -> " <> toTextI dest)
liftIO (download u) >>= \case
Nothing -> return False
Just r | (2,_,_) <- rspCode r -> do
msg info ("finished downloading, status " <> (T.pack . show . rspCode $ r) <> " writing file")
writeBinary dest (BL.fromStrict $ rspBody r)
return True
| otherwise -> do
msg info ("file not downloaded, status " <> (T.pack . show . rspCode $ r))
return False
| otherwise = return False
where
download :: URI -> IO (Maybe (Response B.ByteString))
download u =
(Just . snd <$> (Br.browse $ Br.setAllowRedirects True >> Br.request (mkRequest GET u)))
`Ex.catch` \(Ex.SomeException _) -> return Nothing
-- | initialize our boot environment by reading the configuration files, finding all programs
initBootEnv :: BootSettings -> IO BootEnv
initBootEnv bs = do
dataDir <- bootDataDir bs
env <- (traverse . both %~ T.pack) <$> getEnvironment
-- substitute some values in our config files
let subst = [ ("datadir", toTextI dataDir)
, ("version", T.pack Info.getCompilerVersion)
]
substituteConfig c = c & template %~ substText
& template . iso toTextI fromText %~ substText
substText = Utils.substPatterns subst env
BootConfigFile stgs srcs pgms1 <- substituteConfig <$> readBootConfigFile bs
let srcs' = configureBootSources bs srcs
pgms2 <- configureBootPrograms bs srcs' pgms1
locs <- configureBootLocations bs pgms2
return (BootEnv bs srcs' locs pgms2 stgs)
-- | configure the sources
configureBootSources :: BootSettings -> BootSources -> BootSources
configureBootSources bs srcs =
srcs & bsrcShimsDev %~ override (const . (:[])) bsShimsDevRepo
& bsrcShimsDevBranch %~ override const bsShimsDevBranch
& bsrcBootDev %~ override (const . (:[])) bsBootDevRepo
& bsrcBootDevBranch %~ override const bsBootDevBranch
where override f l = maybe id f (bs^.l)
-- | configure the locations
configureBootLocations :: BootSettings
-> BootPrograms
-> IO BootLocations
configureBootLocations bs pgms = do
ghcLibDir <- fromText . T.strip <$> run' bs (pgms ^. bpGhc) ["--print-libdir"]
ghcjsLibDir <- fromText . T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-libdir"]
ghcjsTopDir <- fromText . T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-topdir"]
globalDB <- fromText . T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-global-db"]
userDBT <- T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-user-db-dir"]
nativeToo <- (=="True") . T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-native-too"]
when (T.null (toTextI ghcjsLibDir)) $
failWith ("Could not determine GHCJS library installation path.\n" <>
"Make sure that the ghcjs wrapper script (or options file on Windows) " <>
"has been set up correctly.")
return $ BootLocations ghcjsTopDir ghcjsLibDir ghcLibDir globalDB
(bool (userDBT == "<none>") Nothing (Just $ fromText userDBT))
nativeToo
-- | build the program configuration and do some sanity checks
configureBootPrograms :: BootSettings -- ^ command line settings
-> BootSources
-> BootPrograms -- ^ default programs from config file
-> IO BootPrograms -- ^ configured programs
configureBootPrograms bs srcs pgms0 = do
-- first replace all defaults with the overrides from the command line
let r l = maybe id (pgmSearch .~) (bs ^. l)
tpo = template :: Traversal' BootPrograms (Program Optional)
tpr = template :: Traversal' BootPrograms (Program Required)
binPrefix pgms pfx =
let addBin p = p . pgmSearch . iso fromText toTextI %~ (pfx</>)
in pgms & addBin bpGhcjs
& addBin bpGhcjsPkg
& addBin bpGhcjsRun
pgms1 = maybe pgms0 (binPrefix pgms0 . fromText) (bs ^. bsWithGhcjsBin)
pgms2 = pgms1 & bpGhcjs %~ r bsWithGhcjs
& bpGhcjsPkg %~ r bsWithGhcjsPkg
& bpGhcjsRun %~ r bsWithGhcjsRun
& bpGhc %~ r bsWithGhc
& bpGhcPkg %~ r bsWithGhcPkg
& bpCabal %~ r bsWithCabal
& bpNode %~ r bsWithNode
-- resolve all programs
pgms3 <- mapMOf tpo (resolveProgram bs) =<< mapMOf tpr (resolveProgram bs) pgms2
traverseOf_ tpr (reportProgramLocation bs) pgms3
traverseOf_ tpo (reportProgramLocation bs) pgms3
pgms4 <- checkProgramVersions bs pgms3
checkCabalSupport bs pgms4
return pgms4
-- | resolves program
resolveProgram :: MaybeRequired (Program a) => BootSettings -> Program a -> IO (Program a)
resolveProgram bs pgm = do
let search' = pgm ^. pgmSearch . to fromText
absSearch <- (</> search') <$> getWorkingDirectory
let searchPaths = catMaybes [ Just search'
, bj (relative search' && length (splitDirectories search') > 1) absSearch
]
fmap catMaybes (mapM (findExecutable . encodeString) searchPaths) >>= \case
(p':_) -> (\cp -> pgm & pgmLoc .~ Just cp) <$> {- canonicalizePath -} return (fromString p')
_ | isRequired pgm -> failWith ("program " <> pgm ^. pgmName <>
" is required but could not be found at " <> pgm ^. pgmSearch)
| otherwise -> return (pgm & pgmLoc .~ Nothing)
-- | report location of a configured program
reportProgramLocation :: BootSettings -> Program a -> IO ()
reportProgramLocation bs p
| Just l <- p ^. pgmLoc = msg' bs info ("program " <> p ^. pgmName <> " found at " <> toTextI l)
| otherwise = msg' bs info ("program " <> p ^. pgmName <> " NOT found, searched for " <> p ^. pgmSearch)
-- | check that the GHC, ghcjs and ghcjs-pkg we're using are the correct version
checkProgramVersions :: BootSettings -> BootPrograms -> IO BootPrograms
checkProgramVersions bs pgms = do
pgms' <- foldrM verifyVersion pgms
[ (bpGhcjs, "--numeric-version", Just Info.getCompilerVersion, True)
, (bpGhcjs, "--numeric-ghc-version", Just Info.getGhcCompilerVersion, False)
, (bpGhc, "--numeric-version", Just Info.getGhcCompilerVersion, True)
, (bpGhcjsPkg, "--numeric-ghcjs-version", Nothing, True)
, (bpGhcjsPkg, "--numeric-ghc-version", Just Info.getGhcCompilerVersion, False)
, (bpCabal, "--numeric-version", Nothing, True)
, (bpNode, "--version", Nothing, True)
]
verifyNotProfiled
verifyNodeVersion pgms'
where
verifyNotProfiled :: IO ()
verifyNotProfiled = return ()
-- res <- T.strip <$> run' bs (pgms ^. bpGhcjs) ["--ghcjs-booting-print", "--print-rts-profiled"]
-- when (res /= "False") $ failWith ("GHCJS program " <> pgms ^. bpGhcjs . pgmLocText <>
-- " has been installed with executable profiling.\n" <>
-- "You need a non-profiled executable to boot")
verifyVersion :: (Lens' BootPrograms (Program a), Text, Maybe String, Bool) -> BootPrograms -> IO BootPrograms
verifyVersion (l, arg :: Text, expected :: Maybe String, update :: Bool) ps = do
res <- T.strip <$> run' bs (ps ^. l) [arg]
case expected of
Nothing -> return ()
Just exp -> when (res /= T.pack exp) $
failWith ("version mismatch for program " <> ps ^. l . pgmName <> " at " <> ps ^. l . pgmLocText
<> ", expected " <> T.pack exp <> " but got " <> res)
return $ (if update then (l . pgmVersion .~ Just res) else id) ps
verifyNodeVersion pgms = do
let verTxt = fromMaybe "-" (pgms ^. bpNode . pgmVersion)
v = mapM (readMaybe . T.unpack . T.dropWhile (== 'v')) . T.splitOn "." . T.takeWhile (/='-') $ verTxt :: Maybe [Integer]
case v of
Just (x:y:z:_)
| x > 0 || y > 10 || (y == 10 && z >= 28) -> return pgms
| otherwise -> failWith ("minimum required version for node.js is 0.10.28, found: " <> verTxt)
_ -> failWith ("unrecognized version for node.js: " <> verTxt)
-- | check that cabal-install supports GHCJS and that our boot-GHC has a Cabal library that supports GHCJS
checkCabalSupport :: BootSettings -> BootPrograms -> IO ()
checkCabalSupport bs pgms = do
cbl <- run' bs (pgms ^. bpCabal) ["install", "--help"]
when (not $ "--ghcjs" `T.isInfixOf` cbl) $
failWith ("cabal-install program " <> pgms ^. bpCabal . pgmLocText <> " does not support GHCJS")
void (run' bs (pgms ^. bpGhc) ["-e", "either error id (Text.Read.readEither \"GHCJS\" :: Either String Distribution.Simple.CompilerFlavor)"]) `Ex.catch`
\(Ex.SomeException _) -> failWith
("GHC program " <> pgms ^. bpGhc . pgmLocText <> " does not have a Cabal library that supports GHCJS\n" <>
"(note that the Cabal library is not the same as the cabal-install program, you need a compatible version for both)")
-- | read the boot configuration yaml file
readBootConfigFile :: BootSettings -> IO BootConfigFile
readBootConfigFile bs = do
bf <- bootConfigFile bs
msgD' bs trace ("reading file " <> toTextI bf)
b <- B.readFile (toStringI bf)
case Yaml.decodeEither b of
Left err -> failWith ("error parsing boot configuration file " <> toTextI bf <> "\n" <> T.pack err)
Right bss -> return bss
printBootEnvSummary :: Bool -> BootEnv -> IO ()
printBootEnvSummary after be = do
section "Boot libraries installation for GHCJS" $ do
bootLoc <- getExecutablePath
bootMod <- getModified (fromString bootLoc)
bootConf <- bootConfigFile (be ^. beSettings)
ghcjsMod <- maybe (return "<unknown>") (fmap show . getModified) (be ^. bePrograms . bpGhcjs . pgmLoc)
curDir <- getWorkingDirectory
p $ bool after
["ghcjs-boot has installed the libraries and runtime system for GHCJS"]
["ghcjs-boot will install the libraries and runtime system for GHCJS"]
h "boot program"
t "rl" [["ghcjs-boot program version", Info.getCompilerVersion]
,["file location", bootLoc]
,["last modified", show bootMod],[]
,["using configuration file", toStringI bootConf]
,["current directory", toStringI curDir]
]
h "boot configuration"
t "rl" [["installation directory", path $ beLocations . blGhcjsTopDir]
,["global package DB", path $ beLocations . blGlobalDB]
,["user package DB location", path $ beLocations . blUserDBDir . to (fromMaybe "<none>")],[]
,["GHCJS version", ver "<unknown>" bpGhcjs]
,["program location", loc bpGhcjs]
,["library path", path $ beLocations . blGhcjsLibDir]
,["last modified", ghcjsMod],[]
,["GHC version", ver "<unknown>" bpGhc]
,["location", loc bpGhc]
,["library path", path $ beLocations . blGhcLibDir],[]
,["cabal-install version", ver "<unknown>" bpCabal]
,["location", loc bpCabal],[]
,["ghcjs-pkg version", ver "<unknown>" bpGhcjsPkg]
,["location", loc bpGhcjsPkg],[]
,["quick boot", y isQuick]
,["clean tree first", be ^. beSettings . bsClean . to y]
,["development boot", y isDev]
,["native too", be ^. beLocations . blNativeToo . to y]
]
h "packages"
p ["stage 1a"] >> l (stg bstStage1a)
p ["ghcjs-prim: " ++ be ^. beStages . bstGhcjsPrim . to str]
p ["stage 1b"] >> l (stg bstStage1b)
when (not isQuick) $ do
p ["Cabal: " ++ be ^. beStages . bstCabal . to str]
p ["stage 2"] >> l (stg bstStage2)
section "Configured programs" $ do
t "hlll" $ ["program", "version", "location"] :
be ^.. bePrograms . (template :: Traversal' BootPrograms (Program Required)) . to pgm ++
be ^.. bePrograms . (template :: Traversal' BootPrograms (Program Optional)) . to pgm
section "Installation sources" $ do
t "rl" $ concatMap (\(t,l) -> [t,""] : be ^.. beSources . l . traverse . to (\x->["",str x]) ++ [["",""]])
[("shims (runtime system)", bsrcShims), ("boot libraries", bsrcBoot), ("test suite", bsrcTest), ("configuration files", bsrcEtc), ("documentation", bsrcDoc)] ++
[["bootstrap GHC library path",""],["", path $ beLocations . blGhcLibDir]]
when isWindows $ do
h "Windows development tools"
t "rl" $ ["development tools",""] : be ^.. beSources . bsrcBuildtoolsWindows . traverse . to (\x->["",str x]) ++
["bootstrap package",""] : be ^.. beSources . bsrcBuildtoolsBootWindows . traverse . to (\x->["",str x])
when (isDev) $ do
h "development source repositories"
p ["shims (" ++ be ^. beSources . bsrcShimsDevBranch . to str ++ ")"]
l (be ^.. beSources . bsrcShimsDev . traverse . to str)
p ["ghcjs-boot (" ++ be ^. beSources . bsrcBootDevBranch . to str ++ ")"]
l (be ^.. beSources . bsrcBootDev . traverse . to str)
where
stg s = be ^.. beStages . s . to (resolveConds isQuick) . traverse . to str
isDev = be ^. beSettings . bsDev
isQuick = be ^. beSettings . bsQuick
h xs = b >> mapM_ (putStrLn . indent 2) [xs, replicate (length xs) '-'] >> b
p xs = mapM_ (putStrLn . indent 3) xs >> b
l xs = mapM_ (putStrLn . indent 3 . ("- "++)) xs >> b
t :: String -> [[String]] -> IO ()
t aln xxs = let colWidths = map (foldl' (\m xs -> max m (length xs)) 0) (transpose xxs)
(colAlign,hdr) = case aln of
('h':a) -> (a, True)
a -> (a, False)
colSep = replicate 3 ' '
cell w a xs = let pad = sp (w - length xs) in if a == 'r' then pad ++ xs else xs ++ pad
cols xs = sp 3 ++ intercalate (sp 3) xs
row xs = cols (zipWith3 cell colWidths colAlign xs)
in case (xxs, hdr) of
(x:ys, True) -> putStrLn (row x) >> putStrLn (cols $ map sp colWidths) >> mapM_ (putStrLn . row) ys >> b
_ -> mapM_ (putStrLn . row) xxs
b = putStrLn ""
sp n = replicate n ' '
indent n xs = sp n ++ xs
sep = putStrLn (replicate 75 '=')
y b = if b then "Yes" else "No"
section :: String -> IO () -> IO ()
section t a = b >> b >> sep >> b >> p [t] >> sep >> b >> a >> b
ver d l = be ^. bePrograms . l . pgmVersion . to (maybe d T.unpack)
loc l = be ^. bePrograms . l . pgmLocString
path l = be ^. l . to toStringI
str = T.unpack
pgm x = [x ^. pgmName . to str, maybe "-" T.unpack (x ^. pgmVersion) , x ^. pgmLocString]
-- | boot.yaml
bootConfigFile :: BootSettings -> IO FilePath
bootConfigFile bs
| Just bsf <- bs ^. bsWithConfig = return (fromText bsf)
| otherwise = (</> ("lib" </> "etc" </> "boot" <.> "yaml")) <$> bootDataDir bs
bootDataDir :: BootSettings -> IO FilePath
bootDataDir bs
| Just dd <- bs ^. bsWithDataDir = return (fromText dd)
| otherwise = fromString <$> Info.ghcjsBootDefaultDataDir
-- | our boot monad, we wrap around shelly but with a config environment
-- shelly commands are wrapped with logging
type B = ReaderT BootEnv Sh.Sh
runB :: BootEnv -> B a -> Sh.Sh a
runB e b = runReaderT b e
msg' :: BootSettings -> Verbosity -> Text -> IO ()
msg' bs v t = when (bs ^. bsVerbosity >= v) (T.putStrLn t)
msg :: Verbosity -> Text -> B ()
msg v t =
view beSettings >>= \s -> when (s ^. bsVerbosity >= v) $ lift (Sh.echo t)
-- | log a message printing the current directory
msgD :: Verbosity -> Text -> B ()
msgD v t = pwd >>= \p -> msg v (toTextI p <> "$ " <> t)
msgD' :: BootSettings -> Verbosity -> Text -> IO ()
msgD' bs v t = getWorkingDirectory >>= \p -> msg' bs v (toTextI p <> "$ " <> t)
(</>) :: FilePath -> FilePath -> FilePath
(</>) = (Sh.</>)
{-
lifted versions of the shelly operations we need. everything that
makes externally visible changes is logged at the info (-v2)
verbosity level.
internal changes and file reads are logged at trace (-v3) level.
-}
ls = lift . Sh.ls
mkdir p = msgD info ("mkdir " <> toTextI p) >> lift (Sh.mkdir p)
mkdir_p p = msgD info ("mkdir_p " <> toTextI p) >> lift (Sh.mkdir_p p)
cp f t = msgD info ("cp " <> toTextI f <> " -> " <> toTextI t) >> lift (Sh.cp f t)
cp_r f t = msgD info ("cp_r " <> toTextI f <> " -> " <> toTextI t) >> lift (Sh.cp_r f t)
rm_f p = msgD info ("rm_f " <> toTextI p) >> lift (Sh.rm_f p)
rm_rf p = msgD info ("rm_rf " <> toTextI p) >> lift (Sh.rm_rf p)
cd p = msgD trace ("cd " <> toTextI p) >> lift (Sh.cd p)
sub = liftE Sh.sub
test_d = lift . Sh.test_d
test_f = lift . Sh.test_f
test_s = lift . Sh.test_s
run p xs = msgD info (traceRun p xs) >> requirePgmLoc p >>= \loc -> lift (Sh.run loc (p ^. pgmArgs ++ xs))
run_ p xs = msgD info (traceRun p xs) >> requirePgmLoc p >>= \loc -> lift (Sh.run_ loc (p ^. pgmArgs ++ xs))
readBinary p = msgD trace ("reading " <> toTextI p) >> lift (Sh.readBinary p)
canonic = lift . Sh.canonic
absPath = lift . Sh.absPath
pwd = lift Sh.pwd
silently = liftE Sh.silently
verbosely = liftE Sh.verbosely
tracing b = liftE (Sh.tracing b)
findWhen f p = ask >>= \e -> lift (Sh.findWhen (runB e . f) p)
errorExit = lift . Sh.errorExit
writefile p t = msgD info ("writing " <> toTextI p) >> lift (Sh.writefile p t)
appendfile p t = msgD info ("appending " <> toTextI p) >> lift (Sh.appendfile p t)
readfile p = msgD trace ("reading " <> toTextI p) >> lift (Sh.readfile p)
withTmpDir = liftE2 Sh.withTmpDir
catchAny a h = ask >>= \e -> lift (Sh.catchany_sh (runReaderT a e) (\ex -> runReaderT (h ex) e))
catchAny_ a h = catchAny a (\_ -> h)
setenv e v = lift (Sh.setenv e v)
get_env = lift . Sh.get_env
setStdin = lift . Sh.setStdin
canonicalize = lift . Sh.canonicalize
relativeTo e = lift . (Sh.relativeTo e)
liftE :: (Sh.Sh a -> Sh.Sh a) -> B a -> B a
liftE s m = ask >>= \e -> lift (s $ runB e m)
liftE2 :: ((a -> Sh.Sh b) -> Sh.Sh b) -> (a -> B b) -> B b
liftE2 s f = ask >>= \e -> lift (s $ runB e . f)
traceRun :: Program a -> [Text] -> Text
traceRun p xs = "[" <> p ^. pgmName <> "]: " <> p ^. pgmLocText <> " " <> T.intercalate " " (map (showT . T.unpack) xs)
-- | add a checkpoint to the file
addCheckpoint :: Text -> B ()
addCheckpoint name = unlessM (hasCheckpoint name) $ do
mkdir_p =<< view (beLocations . blGhcjsTopDir)
flip appendfile (name <> "\n") =<< checkpointFile
-- | check whether we have passed a checkpoint. this reads the
-- whole checkpoints file so use sparingly
hasCheckpoint :: Text -> B Bool
hasCheckpoint name =
(((name `elem`) . map T.strip . T.lines) <$> (readfile =<< checkpointFile)) `catchAny`
\e -> msg warn ("no checkpoint " <> name <> " because of " <> showT e) >> return False
-- | perform the action if the checkpoint does not exist,
-- add the checkpoint when the action completes without exceptions
checkpoint :: Text -> B () -> B ()
checkpoint name m = unlessM (hasCheckpoint name) (m <* addCheckpoint name)
checkpoint' name txt m = hasCheckpoint name >>= cond (msg info txt) (m <* addCheckpoint name)
checkpointFile :: B FilePath
checkpointFile =
absPath . (</> ("ghcjs_boot" <.> "charlie")) =<< view (beLocations . blGhcjsTopDir)
addCompleted :: B ()
addCompleted = do
t <- cond "quick" "full" <$^> beSettings . bsQuick
f <- completedFile
writefile f t
removeCompleted :: B ()
removeCompleted = rm_f =<< completedFile
completedFile :: B FilePath
completedFile =
absPath . (</> ("ghcjs_boot" <.> "completed")) =<< view (beLocations . blGhcjsTopDir)
requirePgmLoc :: Program a -> B FilePath
requirePgmLoc p
| Just loc <- p ^. pgmLoc = return loc
| otherwise = do
-- search in original path, where we configured the programs. the shelly path might be local
path <- fromMaybe "" <$> liftIO (Utils.getEnvMay "PATH")
failWith $ "program " <> p ^. pgmName <> " is required but was not found\n" <>
" name searched for (from boot.yaml or command line): " <> p ^. pgmSearch <> "\n" <>
" searched in PATH:\n" <> T.pack path
run' :: BootSettings -> Program a -> [Text] -> IO Text
run' bs p xs = do
msgD' bs info (traceRun p xs)
(e, out, _err) <- readProcessWithExitCode (p ^. pgmLocString) (map T.unpack xs) ""
when (e /= ExitSuccess) (failWith $ "program " <> p ^. pgmLocText <> " returned a nonzero exit code")
return (T.pack out)
-- | reduces verbosity of the action to the specified level
quieter :: Verbosity -> B a -> B a
quieter v = local $ over (beSettings . bsVerbosity) (min v)
toTextI :: FilePath -> Text
toTextI = Sh.toTextIgnore
fromString :: String -> FilePath
fromString = fromText . T.pack
toStringI :: FilePath -> String
toStringI = T.unpack . toTextI
pgmLocText :: Getter (Program a) Text
pgmLocText = pgmLoc . to (maybe "<not found>" toTextI)
pgmLocString :: Getter (Program a) String
pgmLocString = pgmLocText . to T.unpack
showT :: Show a => a -> Text
showT = T.pack . show
bool :: Bool -> a -> a -> a
bool b t f = if b then t else f
cond :: a -> a -> Bool -> a
cond t f b = bool b t f
bj :: Bool -> a -> Maybe a
bj b v = if b then Just v else Nothing
infixl 2 <^>
(<^>) :: MonadReader s m => (a -> m b) -> Getting a s a -> m b
(<^>) m l = m =<< view l
infixl 3 <*^>
(<*^>) :: (Applicative m, MonadReader s m) => (m (a -> b)) -> Getting a s a -> m b
(<*^>) f l = f <*> view l
infixl 3 <<*^>
(<<*^>) :: (Applicative m, MonadReader s m) => (m (a -> m b)) -> Getting a s a -> m b
(<<*^>) f l = join (f <*> view l)
infixl 4 <$^>
(<$^>) :: (Functor m, MonadReader s m) => (a -> b) -> Getting a s a -> m b
(<$^>) f l = f <$> view l
| tavisrudd/ghcjs | src-bin/Boot.hs | mit | 81,369 | 1,314 | 39 | 24,649 | 21,299 | 11,163 | 10,136 | -1 | -1 |
{-| Implementation of Utility functions for storage
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Storage.Utils
( getStorageUnitsOfNode
, nodesWithValidConfig
) where
import Ganeti.Config
import Ganeti.Objects
import Ganeti.Types
import qualified Ganeti.Types as T
import Control.Monad
import Data.List (nub)
import Data.Maybe
-- | Get the cluster's default storage unit for a given disk template
getDefaultStorageKey :: ConfigData -> DiskTemplate -> Maybe StorageKey
getDefaultStorageKey cfg T.DTDrbd8 = clusterVolumeGroupName $ configCluster cfg
getDefaultStorageKey cfg T.DTPlain = clusterVolumeGroupName $ configCluster cfg
getDefaultStorageKey cfg T.DTFile =
Just (clusterFileStorageDir $ configCluster cfg)
getDefaultStorageKey _ _ = Nothing
-- | Get the cluster's default spindle storage unit
getDefaultSpindleSU :: ConfigData -> (StorageType, Maybe StorageKey)
getDefaultSpindleSU cfg =
(T.StorageLvmPv, clusterVolumeGroupName $ configCluster cfg)
-- | Get the cluster's storage units from the configuration
getClusterStorageUnitRaws :: ConfigData -> [StorageUnitRaw]
getClusterStorageUnitRaws cfg =
foldSUs (nub (maybe_units ++ [spindle_unit]))
where disk_templates = clusterEnabledDiskTemplates $ configCluster cfg
storage_types = map diskTemplateToStorageType disk_templates
maybe_units = zip storage_types (map (getDefaultStorageKey cfg)
disk_templates)
spindle_unit = getDefaultSpindleSU cfg
-- | fold the storage unit list by sorting out the ones without keys
foldSUs :: [(StorageType, Maybe StorageKey)] -> [StorageUnitRaw]
foldSUs = foldr ff []
where ff (st, Just sk) acc = SURaw st sk : acc
ff (_, Nothing) acc = acc
-- | Gets the value of the 'exclusive storage' flag of the node
getExclusiveStorage :: ConfigData -> Node -> Maybe Bool
getExclusiveStorage cfg n = liftM ndpExclusiveStorage (getNodeNdParams cfg n)
-- | Determines whether a node's config contains an 'exclusive storage' flag
hasExclusiveStorageFlag :: ConfigData -> Node -> Bool
hasExclusiveStorageFlag cfg = isJust . getExclusiveStorage cfg
-- | Filter for nodes with a valid config
nodesWithValidConfig :: ConfigData -> [Node] -> [Node]
nodesWithValidConfig cfg = filter (hasExclusiveStorageFlag cfg)
-- | Get the storage units of the node
getStorageUnitsOfNode :: ConfigData -> Node -> [StorageUnit]
getStorageUnitsOfNode cfg n =
let clusterSUs = getClusterStorageUnitRaws cfg
es = fromJust (getExclusiveStorage cfg n)
in map (addParamsToStorageUnit es) clusterSUs
| onponomarev/ganeti | src/Ganeti/Storage/Utils.hs | bsd-2-clause | 3,811 | 0 | 11 | 606 | 545 | 290 | 255 | 42 | 2 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE PatternGuards, Safe #-}
module Cryptol.TypeCheck.Solver.Selector (tryHasGoal) where
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.InferTypes
import Cryptol.TypeCheck.Monad( InferM, unify, newGoals, lookupNewtype
, newType, applySubst, addHasGoal, solveHasGoal
)
import Cryptol.TypeCheck.Subst(listSubst,apSubst)
import Cryptol.Utils.PP(text,pp,ordinal,(<+>))
import Cryptol.Utils.Panic(panic)
import Control.Monad(forM,guard)
recordType :: [Name] -> InferM Type
recordType labels =
do fields <- forM labels $ \l ->
do t <- newType (text "record field" <+> pp l) KType
return (l,t)
return (TRec fields)
tupleType :: Int -> InferM Type
tupleType n =
do fields <- mapM (\x -> newType (ordinal x <+> text "tuple field") KType)
[ 0 .. (n-1) ]
return (tTuple fields)
listType :: Int -> InferM Type
listType n =
do elems <- newType (text "sequence element type") KType
return (tSeq (tNum n) elems)
improveSelector :: Selector -> Type -> InferM (Expr -> Expr)
improveSelector sel outerT =
case sel of
RecordSel _ mb -> cvt recordType mb
TupleSel _ mb -> cvt tupleType mb
ListSel _ mb -> cvt listType mb
where
cvt _ Nothing = return id
cvt f (Just a) = do ty <- f a
cs <- unify ty outerT
case cs of
[] -> return id
_ -> do newGoals CtExactType cs
return (`ECast` ty)
{- | Compute the type of a field based on the selector.
The given type should be "zonked" (i.e., substitution was applied to it),
and (outermost) type synonyms have been expanded.
-}
solveSelector :: Selector -> Type -> InferM (Maybe Type)
solveSelector sel outerT =
case (sel, outerT) of
(RecordSel l _, ty) ->
case ty of
TRec fs -> return (lookup l fs)
TCon (TC TCSeq) [len,el] -> liftSeq len el
TCon (TC TCFun) [t1,t2] -> liftFun t1 t2
TCon (TC (TCNewtype (UserTC x _))) ts ->
do mb <- lookupNewtype x
case mb of
Nothing -> return Nothing
Just nt ->
case lookup l (ntFields nt) of
Nothing -> return Nothing
Just t ->
do let su = listSubst (zip (map tpVar (ntParams nt)) ts)
newGoals (CtPartialTypeFun $ UserTyFun x)
$ apSubst su $ ntConstraints nt
return $ Just $ apSubst su t
_ -> return Nothing
(TupleSel n _, ty) ->
case ty of
TCon (TC (TCTuple m)) ts ->
return $ do guard (0 <= n && n < m)
return $ ts !! n
TCon (TC TCSeq) [len,el] -> liftSeq len el
TCon (TC TCFun) [t1,t2] -> liftFun t1 t2
_ -> return Nothing
(ListSel n _, TCon (TC TCSeq) [l,t]) ->
do newGoals CtSelector [ (l .+. tNum (1::Int)) >== tNum n ]
return (Just t)
_ -> return Nothing
where
liftSeq len el =
do mb <- solveSelector sel el
return $ do el' <- mb
return (TCon (TC TCSeq) [len,el'])
liftFun t1 t2 =
do mb <- solveSelector sel t2
return $ do t2' <- mb
return (TCon (TC TCFun) [t1,t2'])
-- | Solve has-constraints.
tryHasGoal :: HasGoal -> InferM ()
tryHasGoal has
| TCon (PC (PHas sel)) [ th, ft ] <- goal (hasGoal has) =
do outerCast <- improveSelector sel th
outerT <- tNoUser `fmap` applySubst th
mbInnerT <- solveSelector sel outerT
case mbInnerT of
Nothing -> addHasGoal has
Just innerT ->
do cs <- unify innerT ft
innerCast <- case cs of
[] -> return id
_ -> do newGoals CtExactType cs
return (`ECast` ft)
solveHasGoal (hasName has) (innerCast . (`ESel` sel) . outerCast)
| otherwise = panic "hasGoalSolved"
[ "Unexpected selector proposition:"
, show (hasGoal has)
]
| iblumenfeld/cryptol | src/Cryptol/TypeCheck/Solver/Selector.hs | bsd-3-clause | 4,402 | 0 | 30 | 1,589 | 1,477 | 731 | 746 | 97 | 13 |
module MonadIn1 where
f :: Monad m => m Int
f = do
let x@(y:ys) = [1,2]
return y
| kmate/HaRe | old/testing/simplifyExpr/MonadIn1_TokOut.hs | bsd-3-clause | 99 | 0 | 12 | 37 | 56 | 29 | 27 | 5 | 1 |
module Main where
import Control.Monad
import System.Environment
import Language.C
import Language.C.System.GCC
main = do
input <- getArgs >>= \args ->
case args of
[f] -> return f
_ -> error "Usage: ./Test.hs c-file"
ast <- parseCFile (newGCC "gcc") Nothing [] input
case ast of
Left err -> error (show err)
Right ast -> print (pretty ast)
| micknelso/language-c | test/harness/bug22_file_permission_cpp/Test.hs | bsd-3-clause | 436 | 0 | 13 | 151 | 139 | 70 | 69 | 14 | 3 |
module TypeSigs where
sq,anotherFun :: Int -> Int
sq 0 = 0
sq z = z^2
anotherFun x = x^2
a,b,c::Int->Integer->Char
a x y = undefined
b x y = undefined
c x y = undefined
| RefactoringTools/HaRe | test/testdata/TypeUtils/TypeSigs.hs | bsd-3-clause | 174 | 0 | 6 | 41 | 94 | 52 | 42 | 9 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Typeable
-- Copyright : (c) The University of Glasgow, CWI 2001--2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- The 'Typeable' class reifies types to some extent by associating type
-- representations to types. These type representations can be compared,
-- and one can in turn define a type-safe cast operation. To this end,
-- an unsafe cast is guarded by a test for type (representation)
-- equivalence. The module "Data.Dynamic" uses Typeable for an
-- implementation of dynamics. The module "Data.Data" uses Typeable
-- and type-safe cast (but not dynamics) to support the \"Scrap your
-- boilerplate\" style of generic programming.
--
-- == Compatibility Notes
--
-- Since GHC 7.8, 'Typeable' is poly-kinded. The changes required for this might
-- break some old programs involving 'Typeable'. More details on this, including
-- how to fix your code, can be found on the
-- <https://ghc.haskell.org/trac/ghc/wiki/GhcKinds/PolyTypeable PolyTypeable wiki page>
--
-----------------------------------------------------------------------------
module Data.Typeable
(
-- * The Typeable class
Typeable,
typeRep,
-- * Propositional equality
(:~:)(Refl),
-- * For backwards compatibility
typeOf, typeOf1, typeOf2, typeOf3, typeOf4, typeOf5, typeOf6, typeOf7,
Typeable1, Typeable2, Typeable3, Typeable4, Typeable5, Typeable6,
Typeable7,
-- * Type-safe cast
cast,
eqT,
gcast, -- a generalisation of cast
-- * Generalized casts for higher-order kinds
gcast1, -- :: ... => c (t a) -> Maybe (c (t' a))
gcast2, -- :: ... => c (t a b) -> Maybe (c (t' a b))
-- * A canonical proxy type
Proxy (..),
-- * Type representations
TypeRep, -- abstract, instance of: Eq, Show, Typeable
typeRepFingerprint,
rnfTypeRep,
showsTypeRep,
TyCon, -- abstract, instance of: Eq, Show, Typeable
tyConFingerprint,
tyConString,
tyConPackage,
tyConModule,
tyConName,
rnfTyCon,
-- * Construction of type representations
-- mkTyCon, -- :: String -> TyCon
mkTyCon3, -- :: String -> String -> String -> TyCon
mkTyConApp, -- :: TyCon -> [TypeRep] -> TypeRep
mkAppTy, -- :: TypeRep -> TypeRep -> TypeRep
mkFunTy, -- :: TypeRep -> TypeRep -> TypeRep
-- * Observation of type representations
splitTyConApp, -- :: TypeRep -> (TyCon, [TypeRep])
funResultTy, -- :: TypeRep -> TypeRep -> Maybe TypeRep
typeRepTyCon, -- :: TypeRep -> TyCon
typeRepArgs, -- :: TypeRep -> [TypeRep]
) where
import Data.Typeable.Internal hiding (mkTyCon)
import Data.Type.Equality
import Unsafe.Coerce
import Data.Maybe
import GHC.Base
-------------------------------------------------------------
--
-- Type-safe cast
--
-------------------------------------------------------------
-- | The type-safe cast operation
cast :: forall a b. (Typeable a, Typeable b) => a -> Maybe b
cast x = if typeRep (Proxy :: Proxy a) == typeRep (Proxy :: Proxy b)
then Just $ unsafeCoerce x
else Nothing
-- | Extract a witness of equality of two types
--
-- @since 4.7.0.0
eqT :: forall a b. (Typeable a, Typeable b) => Maybe (a :~: b)
eqT = if typeRep (Proxy :: Proxy a) == typeRep (Proxy :: Proxy b)
then Just $ unsafeCoerce Refl
else Nothing
-- | A flexible variation parameterised in a type constructor
gcast :: forall a b c. (Typeable a, Typeable b) => c a -> Maybe (c b)
gcast x = fmap (\Refl -> x) (eqT :: Maybe (a :~: b))
-- | Cast over @k1 -> k2@
gcast1 :: forall c t t' a. (Typeable t, Typeable t')
=> c (t a) -> Maybe (c (t' a))
gcast1 x = fmap (\Refl -> x) (eqT :: Maybe (t :~: t'))
-- | Cast over @k1 -> k2 -> k3@
gcast2 :: forall c t t' a b. (Typeable t, Typeable t')
=> c (t a b) -> Maybe (c (t' a b))
gcast2 x = fmap (\Refl -> x) (eqT :: Maybe (t :~: t'))
| jtojnar/haste-compiler | libraries/ghc-7.10/base/Data/Typeable.hs | bsd-3-clause | 4,552 | 0 | 12 | 1,192 | 688 | 424 | 264 | 63 | 2 |
module Network.Gazelle.Types.Error (
GazelleError(..)
) where
import Data.Aeson
import Network.API.Builder
import Data.Text (Text)
data GazelleError = GenericGazelleError
deriving Show
instance ErrorReceivable GazelleError where
receiveError = useErrorFromJSON
instance FromJSON GazelleError where
parseJSON = withObject "GazelleError" $ \o -> do
stat <- o .: "status"
let textStat = stat :: Text
if textStat == "failure"
then return GenericGazelleError
else fail "Not an error"
| mr/gazelle | src/Network/Gazelle/Types/Error.hs | mit | 549 | 0 | 12 | 131 | 130 | 72 | 58 | 16 | 0 |
module Unused.ResponseFilterSpec
( main
, spec
) where
import Data.List (find)
import Test.Hspec
import Unused.ResponseFilter
import Unused.ResultsClassifier
import Unused.Types
(TermMatch(..), TermResults, resultsFromMatches)
main :: IO ()
main = hspec spec
spec :: Spec
spec =
parallel $ do
describe "railsAutoLowLikelihood" $ do
it "allows controllers" $ do
let match =
TermMatch
"ApplicationController"
"app/controllers/application_controller.rb"
Nothing
1
let result = resultsFromMatches [match]
railsAutoLowLikelihood result `shouldBe` True
it "allows helpers" $ do
let match =
TermMatch "ApplicationHelper" "app/helpers/application_helper.rb" Nothing 1
let result = resultsFromMatches [match]
railsAutoLowLikelihood result `shouldBe` True
it "allows migrations" $ do
let match =
TermMatch
"CreateUsers"
"db/migrate/20160101120000_create_users.rb"
Nothing
1
let result = resultsFromMatches [match]
railsAutoLowLikelihood result `shouldBe` True
it "disallows service objects" $ do
let match =
TermMatch
"CreatePostWithNotifications"
"app/services/create_post_with_notifications.rb"
Nothing
1
let result = resultsFromMatches [match]
railsAutoLowLikelihood result `shouldBe` False
it "disallows methods" $ do
let match =
TermMatch
"my_method"
"app/services/create_post_with_notifications.rb"
Nothing
1
let result = resultsFromMatches [match]
railsAutoLowLikelihood result `shouldBe` False
it "disallows models that occur in migrations" $ do
let model = TermMatch "User" "app/models/user.rb" Nothing 1
let migration =
TermMatch "User" "db/migrate/20160101120000_create_users.rb" Nothing 1
let result = resultsFromMatches [model, migration]
railsAutoLowLikelihood result `shouldBe` False
it "allows matches intermixed with other results" $ do
let appToken =
TermMatch "ApplicationHelper" "app/helpers/application_helper.rb" Nothing 1
let testToken =
TermMatch
"ApplicationHelper"
"spec/helpers/application_helper_spec.rb"
Nothing
10
let result = resultsFromMatches [appToken, testToken]
railsAutoLowLikelihood result `shouldBe` True
describe "elixirAutoLowLikelihood" $ do
it "disallows controllers" $ do
let match =
TermMatch "PageController" "web/controllers/page_controller.rb" Nothing 1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` False
it "allows views" $ do
let match = TermMatch "PageView" "web/views/page_view.rb" Nothing 1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` True
it "allows migrations" $ do
let match =
TermMatch
"CreateUsers"
"priv/repo/migrations/20160101120000_create_users.exs"
Nothing
1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` True
it "allows tests" $ do
let match = TermMatch "UserTest" "test/models/user_test.exs" Nothing 1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` True
it "allows Mixfile" $ do
let match = TermMatch "Mixfile" "mix.exs" Nothing 1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` True
it "allows __using__" $ do
let match = TermMatch "__using__" "web/web.ex" Nothing 1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` True
it "disallows service modules" $ do
let match =
TermMatch
"CreatePostWithNotifications"
"web/services/create_post_with_notifications.ex"
Nothing
1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` False
it "disallows functions" $ do
let match =
TermMatch
"my_function"
"web/services/create_post_with_notifications.ex"
Nothing
1
let result = resultsFromMatches [match]
elixirAutoLowLikelihood result `shouldBe` False
it "allows matches intermixed with other results" $ do
let appToken = TermMatch "UserView" "web/views/user_view.ex" Nothing 1
let testToken = TermMatch "UserView" "test/views/user_view_test.exs" Nothing 10
let result = resultsFromMatches [appToken, testToken]
elixirAutoLowLikelihood result `shouldBe` True
describe "haskellAutoLowLikelihood" $ do
it "allows instance" $ do
let match = TermMatch "instance" "src/Lib/Types.hs" Nothing 1
let result = resultsFromMatches [match]
haskellAutoLowLikelihood result `shouldBe` True
it "allows items in the *.cabal file" $ do
let match = TermMatch "Lib.SomethingSpec" "lib.cabal" Nothing 1
let result = resultsFromMatches [match]
haskellAutoLowLikelihood result `shouldBe` True
describe "autoLowLikelihood" $
it "doesn't qualify as low when no matchers are present in a language config" $ do
let match = TermMatch "AwesomeThing" "app/foo/awesome_thing.rb" Nothing 1
let result = resultsFromMatches [match]
let languageConfig =
LanguageConfiguration
"Bad"
[]
[LowLikelihoodMatch "Match with empty matchers" [] False]
[]
autoLowLikelihood languageConfig result `shouldBe` False
configByName :: String -> LanguageConfiguration
configByName s = config'
where
(Right config) = loadConfig
(Just config') = find ((==) s . lcName) config
railsAutoLowLikelihood :: TermResults -> Bool
railsAutoLowLikelihood = autoLowLikelihood (configByName "Rails")
elixirAutoLowLikelihood :: TermResults -> Bool
elixirAutoLowLikelihood = autoLowLikelihood (configByName "Phoenix")
haskellAutoLowLikelihood :: TermResults -> Bool
haskellAutoLowLikelihood = autoLowLikelihood (configByName "Haskell")
| joshuaclayton/unused | test/Unused/ResponseFilterSpec.hs | mit | 7,851 | 0 | 18 | 3,191 | 1,401 | 658 | 743 | 157 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Functionality.Explicit.Server where
import Control.Applicative ((<$>))
import Data.Foldable (for_)
import Data.Function (($), (.))
import Data.List (length)
import Data.UUID (toString)
import qualified Database.Couch.Explicit.Server as Server (activeTasks, allDbs,
meta, restart, stats,
uuids)
import Database.Couch.Response as Response (asUUID)
import Database.Couch.Types (Context)
import Functionality.Util (runTests, serverContext,
testAgainstSchema,
testAgainstSchemaAndValue)
import Network.HTTP.Client (Manager)
import System.IO (IO)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit ((@=?))
_main :: IO ()
_main = runTests tests
-- We specifically don't use makeTests here because we want no-databas-selected context
tests :: IO Manager -> TestTree
tests manager = testGroup "Tests of the server interface" $
($ serverContext manager) <$> [serverMeta, activeTasks, allDbs, stats, uuids]
-- Server-oriented functions
serverMeta :: IO Context -> TestTree
serverMeta = testAgainstSchema "Get server meta information" Server.meta "get--.json"
activeTasks :: IO Context -> TestTree
activeTasks = testAgainstSchema "Get list of active tasks" Server.activeTasks "get--_active_tasks.json"
allDbs :: IO Context -> TestTree
allDbs = testAgainstSchema "Retrieve list of all dbs (should be empty)" Server.allDbs "get--_all_dbs.json"
-- This fails when run alone, so it's commented out; I need to have it
-- able to run some other thing concurrently to provoke some actual
-- content
-- dbUpdates :: IO Context -> TestTree
-- dbUpdates getContext = testCaseSteps "Retrieve list of database updates" $ do
-- res <- getContext >>= Server.dbUpdates
-- checkRequestSuccess res
-- assertBool "should have an array of objects" $ allOf (_Right._1.each) (has _Object) res
-- assertBool "should have pids for all tasks" $ allOf (_Right._1.each) (has (key "pid")) res
-- checkEmptyCookieJar res
restart :: IO Context -> TestTree
restart = testAgainstSchema "Restart server" Server.restart "post--_restart.json"
stats :: IO Context -> TestTree
stats = testAgainstSchema "Retrieve statistics" Server.stats "get--_stats.json"
uuids :: IO Context -> TestTree
uuids = testAgainstSchemaAndValue "Retrieve UUIDs" (Server.uuids 1) "get--_stats.json" Response.asUUID $ \step val -> do
step "Check length of list"
length val @=? 1
step "Check lengths of items"
for_ val $ \u -> (length . toString) u @=? 36
| mdorman/couch-simple | test/Functionality/Explicit/Server.hs | mit | 3,023 | 0 | 14 | 873 | 499 | 285 | 214 | 41 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls #-}
module SqliteTest (specs) where
import Control.Monad.IO.Class
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Map as Map
import qualified Data.Text as T
import Database.Persist.Sqlite
import Init
import MigrationSql
-- Test lower case names
share [mkPersist sqlSettings, mkMigrate "lowerCaseMigrate"] [persistLowerWithSql|
LowerCaseTable id=my_id
fullName Text
Triggers
tableIdTrig AFTER INSERT
tableTrig BEFORE DELETE
Indexes
tableIndex full_name
RefTable
someVal Int sql=something_else
lct LowerCaseTableId
UniqueRefTable someVal
|]
specs :: Spec
specs = describe "rename specs" $ do
it "Check extra blocks" $ do
entityExtra (entityDef (Nothing :: Maybe LowerCaseTable)) @?=
Map.fromList
[ ( "Indexes"
, map T.words ["tableIndex full_name"])
, ( "Triggers"
, map T.words [ "tableIdTrig AFTER INSERT"
, "tableTrig BEFORE DELETE"])
]
it "Create the tables and run additional migration" $ asIO $ do
runConn' (getSqlCode,sql) $ do
runMigration lowerCaseMigrate
it "Activates the insertion trigger" $ asIO $ do
runConn' (getSqlCode,sql) $ do
C.runResourceT $
rawExecute "INSERT INTO lower_case_table (full_name) VALUES ('abc');" [] C.$$ CL.sinkNull
value <- C.runResourceT $
rawQuery "SELECT something_else, lct FROM ref_table ORDER BY id DESC LIMIT 1" []
C.$$ CL.consume
liftIO $ map (drop 1) value `shouldBe` [[PersistInt64 1]]
asIO :: IO a -> IO a
asIO = id
| jcristovao/migrationplus | test/SqliteTest.hs | mit | 1,930 | 0 | 18 | 472 | 364 | 199 | 165 | 42 | 1 |
{-|
Module : Test.Make.Instances
Description : Tests the Instances Make modules
Copyright : (c) Andrew Burnett 2014-2015
Maintainer : andyburnett88@gmail.com
Stability : experimental
Portability : Unknown
Contains the test hierarchy for the Make Instances modules
-}
module Test.Make.Instances (
tests -- TestTree
) where
import qualified Test.Make.Instances.CNF as CNF
import qualified Test.Make.Instances.Common as Common
import TestUtils
name :: String
name = "Instances"
tests :: TestTree
tests =
testGroup name [
CNF.tests,
Common.tests
]
| aburnett88/HSat | tests-src/Test/Make/Instances.hs | mit | 590 | 0 | 7 | 119 | 72 | 48 | 24 | 12 | 1 |
-- | A minimalistic DOM
module Hom.DOM
( Elem
, newElem
, appendChild
, newTextElem
, getBody
)
where
import GHCJS.Types
import GHCJS.Foreign
import Data.Text
newtype Elem = Elem (JSRef Elem)
foreign import javascript unsafe "document.createElement($1)"
js_newElem :: JSString -> IO Elem
newElem :: Text -> IO Elem
newElem = js_newElem . toJSString
newTextElem :: Text -> IO Elem
newTextElem = js_newTextElem . toJSString
foreign import javascript unsafe "document.createTextNode($1)"
js_newTextElem :: JSString -> IO Elem
foreign import javascript unsafe "document.body"
js_body :: IO Elem
getBody :: IO Elem
getBody = js_body
foreign import javascript unsafe "$1.appendChild($2)"
js_appendChild :: Elem -> Elem -> IO Elem
appendChild :: Elem -> Elem -> IO Elem
appendChild = js_appendChild
| arianvp/ghcjs-hom | src/Hom/DOM.hs | mit | 817 | 17 | 7 | 140 | 217 | 117 | 100 | 26 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
-- |
-- Module : Text.XML.Mapping.Internal.Parser
-- Copyright : (c) Joseph Abrahamson 2013
-- License : MIT
-- .
-- Maintainer : me@jspha.com
-- Stability : experimental
-- Portability : non-portable
-- .
-- Low-level definition of the parser.
-- .
module Text.XML.Mapping.Internal.Parser where
import Control.Applicative
import qualified Data.Attoparsec as A
import qualified Data.ByteString as S
import Data.Semigroup
import Text.XML.Mapping.Internal.Class (El (El), X (..))
import Text.XML.Mapping.Internal.Err
import Text.XML.Mapping.Internal.Level
import Text.XML.Mapping.Internal.ParseError
import Text.XML.Mapping.Types
newtype Parser a = P {
unP :: Level -> [Tag] -> (Err ParseError a, [Tag])
} deriving Functor
instance Applicative Parser where
pure a = P $ \l ts -> (ate l $ pure a, ts)
P pf <*> P px = P $ \l ts ->
let
(errf, ts' ) = pf l ts
(errx, ts'') = px l ts'
in
(ate l $ perE errf <*> perE errx, ts'')
instance Alternative Parser where
empty = P $ \l ts -> (ate l empty, ts)
P p1 <|> P p2 = P $ \l ts ->
let
(err1, ts1) = p1 l ts
(err2, ts2) = p2 l ts
in
uncarry ts . ate l $ carry ts1 err1 <|> carry ts2 err2
where
-- The carry/uncarry functions store the remaining tags of each
-- branch of (<|>) along with the result and then unfold it at
-- the end (along with a "don't consume anything" default if
-- needed).
carry :: [Tag] -> Err ParseError a -> Err ParseErrorR (a, [Tag])
carry ts = perE . fmap (,ts)
uncarry :: [Tag] -> Err ParseError (a, [Tag]) -> (Err ParseError a, [Tag])
uncarry ts (Err e) = (Err e, ts)
uncarry _ (Ok (a, ts)) = (Ok a, ts)
instance Semigroup (Parser a) where
(<>) = (<|>)
instance Monoid (Parser a) where
mempty = empty
mappend = (<|>)
tryAtto :: Level -> A.Parser a -> S.ByteString -> Err ParseError a
tryAtto l atto bs = case A.parseOnly atto bs of
Left attoReason -> Err $ simpleFail attoReason +++ l
Right simple -> Ok simple
withOne :: (Level -> Tag -> Err ParseError a) -> Level -> [Tag] -> (Err ParseError a, [Tag])
withOne _ l [] = (Err $ exhausted +++ l, [])
withOne go l (t:ts)
| ignorable t = withOne go l ts
| otherwise =
case go l t of
Err pe -> (Err pe, t:ts)
Ok a -> (Ok a , ts)
instance X Parser where
pAttr atto qn = P $ \l ts -> (go l, ts) where
go l = case getAttr l qn of
Nothing -> Err $ noAttr qn +++ l
Just bs -> tryAtto l atto bs
pText atto = P (withOne go) where
go l t = case rawText t of
Nothing -> Err (expectingText +++ l)
Just bs -> tryAtto l atto bs
pElem checkQN pf = P (withOne go) where
go l t =
-- Build a new element context
case step t l of
Left levelE -> Err (levelError levelE +++ l)
Right l' ->
let Just qn = name l'
-- Check that the element matches
in if not (checkQN qn)
then Err (wrongElement +++ l')
-- Run the inner parser on the element children
else case unP pf l' (children t) of
(Err pe, _) -> Err pe
(Ok a , leftovers)
-- Fail if there are leftovers
| not (null leftovers) -> Err (leftoverElements +++ l')
| otherwise -> Ok (El qn a)
errLevelError :: Level -> Either LevelError a -> Err ParseError a
errLevelError l = either (\le -> Err $ levelError le +++ l) Ok
runParser :: Parser a -> Tag -> Either ParseError a
runParser p t =
let (res, _leftovers) = unP p level0 [t]
in errEither res
| tel/xml-mapping | src/Text/XML/Mapping/Internal/Parser.hs | mit | 3,845 | 0 | 22 | 1,199 | 1,367 | 715 | 652 | 80 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Blame (BlameModule (BlameModule)) where
import Prelude hiding (mapM_, sum)
import GHC.IO.Handle (Handle, hIsEOF)
import Control.Applicative ((<$>))
import Control.Monad (unless, when)
import Pipes
import qualified Pipes.Prelude as P
import System.Process
import System.IO (hGetLine)
import System.Directory (doesFileExist)
import System.FilePath.Posix ((</>), takeFileName)
import Data.Foldable
import Data.List (isPrefixOf, filter, sortBy, intercalate, dropWhile)
import Data.Function (on)
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified IrcBot (Message)
import IrcBot
data BlameModule = BlameModule FilePath FilePath
instance BotModule BlameModule where
initForConnection m = return (return (), consumer m)
consumer :: BlameModule -> Consumer IrcBot.Message Bot ()
consumer mod@(BlameModule gitPath repoPath) = do
m <- await
case parseCommand "blame" m of
Just (chan, body) -> do
o <- B.pack <$> liftIO (blame gitPath repoPath $ B.unpack body)
lift $ privmsg chan o
Nothing -> return ()
consumer mod
blame :: FilePath -> FilePath -> String -> IO String
blame gitPath repoPath file = do
let sfile = filter (/='.') file
e <- doesFileExist (repoPath </> sfile)
if e then do
m <- sortBy (flip compare `on` snd) . Map.toList <$> blameMap sfile
let total = sum $ map snd m
let percentage = filter ((>0) . snd) $ map (\(s, k) -> (s, k * 100 `div` total)) m
let formatted = map (\(s, k) -> s ++ " (" ++ show k ++ "%)") percentage
return $ sfile ++ ": " ++ intercalate ", " (take 3 formatted)
else return "No such file"
where
addAuthor :: Map.Map String Int -> String -> Map.Map String Int
addAuthor m a = Map.insertWith (+) a 1 m
blameMap :: FilePath -> IO (Map.Map String Int)
blameMap sfile = P.fold addAuthor Map.empty id (blameProducer sfile gitPath repoPath >-> extractAuthors)
extractAuthors :: Pipe String String IO ()
extractAuthors = do s <- await
when ("author " `isPrefixOf` s) (yield $ drop 7 s)
extractAuthors
blameProducer :: FilePath -> FilePath -> FilePath -> Producer String IO ()
blameProducer f git repo = do
(_, hout, _, _) <- lift $ runInteractiveProcess git ["blame", "--line-porcelain", repo </> f] (Just repo) Nothing
P.fromHandle hout
| EDmitry/ircbot-hs | Blame.hs | mit | 2,396 | 0 | 19 | 502 | 895 | 475 | 420 | 57 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
module IHaskell.Display.Widgets.Box.Box (
-- * The Box widget
Box,
-- * Constructor
mkBox) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Data.Aeson
import Data.IORef (newIORef)
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
-- | A 'Box' represents a Box widget from IPython.html.widgets.
type Box = IPythonWidget BoxType
-- | Create a new box
mkBox :: IO Box
mkBox = do
-- Default properties, with a random uuid
uuid <- U.random
let widgetState = WidgetState $ defaultBoxWidget "BoxView" "BoxModel"
stateIO <- newIORef widgetState
let box = IPythonWidget uuid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen box $ toJSON widgetState
-- Return the widget
return box
instance IHaskellDisplay Box where
display b = do
widgetSendView b
return $ Display []
instance IHaskellWidget Box where
getCommUUID = uuid
| sumitsahrawat/IHaskell | ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Box/Box.hs | mit | 1,239 | 0 | 11 | 286 | 213 | 117 | 96 | 29 | 1 |
--If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
--Find the sum of all the multiples of 3 or 5 below 1000.
is_divisible x y = x `mod` y == 0
get_factors :: Integer -> Integer -> [Integer]
get_factors n 1 = get_factors n 2
get_factors 0 k = []
get_factors 1 k = [1]
get_factors n k
| k >= n = [k]
| otherwise = if is_divisible n k == True
then [k] ++ get_factors (n `div` k) (k)
else get_factors n (k+1)
is_prime n = get_factors n 1 == [n]
ans = last $ filter (is_prime) (get_factors 600851475143 1) | stefan-j/ProjectEuler | q3.hs | mit | 629 | 8 | 9 | 170 | 216 | 112 | 104 | 12 | 2 |
module ProjectEuler.Problem016 (solve) where
import ProjectEuler.Prelude (digits)
solve :: Integer -> Integer
solve = sum . digits
| hachibu/project-euler | src/ProjectEuler/Problem016.hs | mit | 133 | 0 | 5 | 19 | 39 | 23 | 16 | 4 | 1 |
module Data.Geometry.Geos.TopologySpec
( topologySpec
)
where
import Test.Hspec
import qualified Data.Vector as V
import Data.Either hiding (fromRight)
import Helpers
import Data.Geometry.Geos.Geometry
import Data.Geometry.Geos.Topology
poly1 = fromRight $ makePolygonGeo [[(0, 0), (0, 1), (1, 1), (1.5, 1.5), (1, 0), (0, 0)]]
cornerOnlyPoly = fromRight $ makePolygonGeo [[(1.0, 1.0), (1.5, 1.5), (1.0, 0.0), (1.0, 1.0)]]
poly2 = fromRight $ makePolygonGeo
[ [(0, 0), (0, 1), (1, 1), (1.5, 1.5), (1, 0), (0, 0)]
, [(0.1, 0.1), (0.1, 0.9), (0.9, 0.1), (0.1, 0.1)]
]
poly2Hull =
fromRight $ makePolygonGeo [[(0.0, 0.0), (0.0, 1.0), (1.5, 1.5), (1.0, 0.0), (0.0, 0.0)]]
poly3 =
fromRight $ makePolygonGeo [[(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0), (0.0, 0.0)]]
poly3Boundary = fromRight $ makeLineStringGeo [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0), (0.0, 0.0)]
poly4 =
fromRight $ makePolygonGeo [
[(1.0, 1.0), (1.0, 2.0), (2.0, 2.0), (2.0, 1.0), (1.0, 1.0)]
]
poly5 =
fromRight $ makePolygonGeo [[(0.0, 0.0),(0.0, 1.0),(1.0, 1.0),(1.0, 2.0),(2.0, 2.0),(2.0, 1.0),(1.3333333333333333, 1.0),(1.0, 0.0),(0.0, 0.0)]]
env1 =
fromRight $ makePolygonGeo [[(0.0, 0.0), (1.5, 0.0), (1.5, 1.5), (0.0, 1.5), (0.0, 0.0)]]
lineString1 = fromRight $ makeLineStringGeo [(0, 0), (0, 1), (1, 1)]
lineString1Hull = fromRight $ makePolygonGeo [[(0, 0), (0, 1), (1, 1), (0, 0)]]
point1 = makePointGeo (0.5, 1.0)
topologySpec = describe "Topology" $ do
describe "envelope" $ do
it "should create an envelope for a plain polygon" $ (envelope poly1 >>= ensurePolygon) `shouldBe` Just env1
it "should create an envelope for a polygon with holes" $ (envelope poly2 >>= ensurePolygon) `shouldBe` Just env1
it "should create an envelope for a linestring" $ (envelope lineString1 >>= ensurePolygon) `shouldBe` Just poly3
describe "intersection" $ do
it "should create a point for polygons with a single-point intersection" $ (intersection poly3 poly4 >>= ensurePoint) `shouldBe` (Just $ makePointGeo (1.0, 1.0))
describe "convexHull" $ do
it "should make a convex hull from a polygon" $ convexHull poly2 `shouldBe` Just poly2Hull
it "should make a convex hull from a linestring" $ convexHull lineString1 `shouldBe` Just lineString1Hull
it "should not make a convex hull from a point" $ convexHull point1 `shouldBe` Nothing
describe "difference" $ do
it "should compute the difference between polygons" $ (difference poly1 poly3 >>= ensurePolygon) `shouldBe` Just cornerOnlyPoly
it "should compute the difference between a polygon and a linestring" $ do
(difference poly4 lineString1 >>= ensurePolygon) `shouldBe` Just poly4
describe "boundary" $ do
it "should compute the boundary of a polygon" $ do
(boundary poly3 >>= ensureLineString) `shouldBe` Just poly3Boundary
describe "union" $ do
it "should compute the union of two polygons" $ do
(union poly1 poly4) `shouldBe` (Just $ Some poly5)
describe "unaryUnion" $ do
it "should compute the unary union of a multi polygon" $
let pgons = fmap (\case (PolygonGeometry p _) -> p) [poly1, poly4]
multiPgon = MultiPolygonGeometry (multiPolygon $ V.fromList pgons) Nothing
in (unaryUnion multiPgon) `shouldBe` (Just $ Some poly5)
describe "centroid" $ do
it "should compute the centroid of a polygon" $
centroid poly1 `shouldBe` (Just $ PointGeometry (point $ coordinate2 0.6333333333333333 0.5666666666666667) Nothing)
describe "delaunayTriangulation" $ do
it "should compute the delaunay triangulation of a simple polygon" $ do
let ls1 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(0.0, 1.0),(1.5, 1.5) ] )
ls2 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(0.0, 0.0),(0.0, 1.0) ] )
ls3 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(0.0, 0.0),(1.0, 0.0) ] )
ls4 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(1.0, 0.0),(1.5, 1.5) ] )
ls5 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(1.0, 0.0),(1.0, 1.0) ] )
ls6 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(1.0, 1.0),(1.5, 1.5) ] )
ls7 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(0.0, 1.0),(1.0, 1.0) ] )
ls8 = lineString $ V.map (uncurry coordinate2) ( V.fromList [(0.0, 1.0),(1.0, 0.0) ] )
lineStrings = multiLineString $ V.fromList $ rights [
ls1, ls2, ls3, ls4, ls5, ls6, ls7, ls8 ]
in (delaunayTriangulation poly1 0.1 True >>= ensureMultiLineString) `shouldBe` (Just $ MultiLineStringGeometry lineStrings Nothing)
describe "voronoiDiagram" $ do
it "should compute the voronoi diagram of a polygon" $
let p1 = makePolygonGeo [[(0.5, 0.5), (0.5, 2.0), (2.0, 0.5), (0.5, 0.5)]]
p2 = makePolygonGeo [[(-1.5, 0.5), (-1.5, 3.0), (0.16666666666666652, 3.0), (0.5, 2.0), (0.5, 0.5), (-1.5, 0.5)]]
p3 = makePolygonGeo [[(0.5, -1.5), (-1.5, -1.5), (-1.5, 0.5), (0.5, 0.5), (0.5, -1.5)]]
p4 = makePolygonGeo [[(3.0, 0.16666666666666663), (3.0, -1.5), (0.5, -1.5), (0.5, 0.5), (2.0, 0.5), (3.0, 0.16666666666666663)]]
p5 = makePolygonGeo [[(0.16666666666666652, 3.0), (3.0, 3.0), (3.0, 0.16666666666666663), (2.0, 0.5), (0.5, 2.0), (0.16666666666666652, 3.0)]]
collection = geometryCollection $ V.fromList $ Some <$> rights [p1, p2, p3, p4, p5 ]
in ( ensureGeometryCollection $ voronoiDiagram poly1 Nothing 0.1 False) `shouldBe` (Just $ CollectionGeometry collection Nothing)
describe "polygonize" $ do
it "should polygonize a set of polygons" $ do
let result = fromRight $ makePolygonGeo $ [[(0.1, 0.1), (0.1, 0.9), (0.9, 0.1), (0.1, 0.1)]]
(polygonize $ V.fromList [poly1, poly2]) `shouldBe` Just result
describe "minimumRotatedRectangle" $ do
it "should create a minumum rotated rectangle from a polygon" $
let poly = fromRight $ makePolygonGeo [[(-0.3, 0.9), (9.999999999999999e-2, -0.3), (1.9, 0.3), (1.5, 1.5), (-0.3, 0.9)]]
in minimumRotatedRectangle poly1 `shouldBe` Just poly
describe "minimumClearance" $ do
it "should compute the minimum clearance for a polygon" $ do
minimumClearance poly1 `shouldBe` Just 0.316227766016838
| ewestern/geos | tests/Data/Geometry/Geos/TopologySpec.hs | mit | 6,382 | 0 | 23 | 1,343 | 2,484 | 1,412 | 1,072 | -1 | -1 |
module Graphics.Oedel.Color where
-- | @c@ is a color, containing brightness information, but not transparency.
class Color c where
-- | Gets the best approximation of the given RGB color. Components may
-- range from @0.0@ to @1.0@.
rgb :: Double -> Double -> Double -> c
-- | The closest approximation to black.
black :: (Color c) => c
black = rgb 0 0 0
-- | Gets the closest approximation to a gray with the given relative
-- luminosity between @0.0@ and @1.0@.
gray :: (Color c) => Double -> c
gray l = rgb l l l
-- | The closest approximation to white.
white :: (Color c) => c
white = rgb 1 1 1
-- | The closest approximation to red.
red :: (Color c) => c
red = rgb 1 0 0
-- | The closest approximation to green.
green :: (Color c) => c
green = rgb 0 1 0
-- | The closest approximation to blue.
blue :: (Color c) => c
blue = rgb 0 0 1
-- | The closest approximation to yellow.
yellow :: (Color c) => c
yellow = rgb 1 1 0
-- | The closest approximation to magenta.
magenta :: (Color c) => c
magenta = rgb 1 0 1
-- | The closest approximation to cyan.
cyan :: (Color c) => c
cyan = rgb 0 1 1
| dzamkov/Oedel | src/Graphics/Oedel/Color.hs | mit | 1,163 | 0 | 9 | 301 | 287 | 160 | 127 | 21 | 1 |
module Test where
data MarkusTest = MarkusTest {
markusName :: String
}
justATest :: IO String
justATest = return $ markusName $ MarkusTest {markusName = "Test"}
| MarkusBa/StockHaskell | Test.hs | mit | 173 | 0 | 8 | 37 | 48 | 28 | 20 | 5 | 1 |
module Ithkuil.Core
( {- Catigories -}
Pattern(..)
, Stem(..)
, Configuration(..)
, Affiliation(..)
, Perspective(..)
, Extension(..)
, Essence(..)
, Context(..)
, Designation(..)
, Case(..)
, Function(..)
, Mood(..)
, Illocution(..)
, Relation(..)
, Phase(..)
, Sanction(..)
, Valence(..)
, Version(..)
, Validation(..)
, Aspect(..)
, Bias(..)
, Modality(..)
, Level(..)
, Format(..)
{- Words -}
, Formative(..)
) where
--------------------------------------------------------------------------------
-- Patterns & Stems ------------------------------------------------------------
--------------------------------------------------------------------------------
data Pattern = Pattern1 -- P1
| Pattern2 -- P2
| Pattern3 -- P3
deriving(Eq,Ord,Enum,Show,Read)
data Stem = Stem1 -- S1
| Stem2 -- S2
| Stem3 -- S3
deriving(Eq,Ord,Enum,Show,Read)
--------------------------------------------------------------------------------
-- Basic Catigories Section ----------------------------------------------------
--------------------------------------------------------------------------------
data Configuration = Uniplex -- UNI
| Duplex -- DPX
| Discrete -- DCT
| Aggregative -- AGG
| Segmentative -- SEG
| Componential -- CPN
| Coherent -- COH
| Composite -- CST
| Multiform -- MLT
deriving(Eq,Ord,Show,Enum,Read)
data Affiliation = Consolidative -- CSL
| Associative -- ASO
| Variative -- VAR
| Coalescent -- COA
deriving(Eq,Ord,Enum,Show,Read)
data Perspective = Monadic -- M
| Unbounded -- U
| Nomic -- N
| Abstract -- A
deriving(Eq,Ord,Enum,Show,Read)
data Extension = Delimitive -- DEL
| Proximal -- PRX
| Inceptive -- ICP
| Terminative -- TRM
| Depletive -- DPL
| Graduative -- GRA
deriving(Eq,Ord,Enum,Show,Read)
data Essence = Normal -- NRM
| Representative -- RPV
deriving(Eq,Ord,Enum,Show,Read)
data Context = Existential -- EXS
| Functional -- FNC
| Representational -- RPS
| Amalgamate -- AMG
deriving(Eq,Ord,Enum,Show,Read)
data Designation = Informal -- IFL
| Formal -- FML
deriving(Eq,Ord,Enum,Show,Read)
--------------------------------------------------------------------------------
-- Case Catigories Section -----------------------------------------------------
--------------------------------------------------------------------------------
data TransrelativeCase = Oblique -- OBL
| Inductive -- IND
| Absolutive -- ABS
| Ergative -- ERG
| Effectuative -- EFF
| Affective -- AFF
| Dative -- DAT
| Instrumental -- INS
| Activative -- ACT
| Derivative -- DER
| Situative -- SIT
deriving(Eq,Ord,Enum,Show,Read)
data PosessiveCase = Possessive -- POS
| Proprietive -- PRP
| Genitive -- GEN
| Attributive -- ATT
| Productive -- PDC
| Interpretative -- ITP
| Originatuve -- OGN
deriving(Eq,Ord,Enum,Show,Read)
data AssociativeCase = Partitive -- PAR
| Contrastive -- CRS
| Compositive -- CPS
| Predicative -- PRD
| Mediative -- MED
| Applicative -- APL
| Purposive -- PUR
| Considerative -- CSD
| Essive -- ESS
| Assimilative -- ASI
| Functive -- FUN
| Transformative -- TFM
| Referential -- REF
| Classificative -- CLA
| Conductive -- CNV
| Interdependent -- IDP
| Benefactive -- BEN
| Transpositive -- TSP
| Commutative -- CMM
| Comitative -- COM
| Conjunctive -- CNJ
| Utilitative -- UTL
| Abessive -- ABE
| Conversive -- CVS
| Correlative -- COR
| Dependent -- DEP
| Provisional -- PVS
| Postulative -- PTL
| Concessive -- CON
| Exceptive -- EXC
| Aversive -- AVR
| Comparative -- CMP
deriving(Eq,Ord,Enum,Show,Read)
data TemporalCase = Simultaneitive -- SML
| Assessive -- ASS
| Concursive -- CNR
| Accessive -- ACS
| Diffusive -- DFF
| Periodic -- PER
| Prolapsive -- PRO
| Precursive -- PCV
| Postcursive -- PCR
| Elapsive -- ELP
| Allapsive -- ALP
| Interpolative -- INP
| Episodic -- EPS
| Prolimitive -- PLM
| Limitative -- LIM
deriving(Eq,Ord,Enum,Show,Read)
data SpatialCase = Locative -- LOC
| Orientative -- ORI
| Procursive -- PSV
| Allative -- ALL
| Ablative -- ABL
| Navigative -- NAV
deriving(Eq,Ord,Enum,Show,Read)
data VocativeCase = Vocative -- VOC
deriving(Eq,Ord,Enum,Show,Read)
data Case = TransrelativeCase
| PosessiveCase
| AssociativeCase
| TemporalCase
| SpatialCase
| VocativeCase
deriving(Eq,Ord,Enum,Show,Read)
--------------------------------------------------------------------------------
-- Verb Catigories Section -----------------------------------------------------
--------------------------------------------------------------------------------
data Function = Stative
| Dynamic
| Manifestive
| Descriptive
deriving(Eq,Ord,Enum,Show,Read)
data Mood = Factual
| Subjunctive
| Assumptive
| Speculative
| Counterfactive
| Hypothetical
| Implicative
| Ascriptive
deriving(Eq,Ord,Enum,Show,Read)
data Illocution = Assertive
| Directive
| Interrogative
| Admonitive
| Horative
| Declarative
deriving(Eq,Ord,Enum,Show,Read)
data Relation = Framed
| Unframed
deriving(Eq,Ord,Enum,Show,Read)
data Phase = Contextual
| Punctual
| Iterative
| Repititive
| Intermittent
| Recurrent
| Frequentative
| Fragmentative
| Fluctuative
deriving(Eq,Ord,Enum,Show,Read)
data Sanction = Propositional
| Epistemic
| Allegative
| Imputative
| Refutative
| Rebuttative
| Theoretical
| Expatiative
| Axiomatic
deriving(Eq,Ord,Enum,Show,Read)
data Valence = Monoactive
| Parallel
| Corollary
| Reciprocal
| Complementary
| Nonrelational
| Duplicative
| Demonstrative
| Resistive
| Imitative
| Contingent
| Participative
| Indicative
| Mutual
deriving(Eq,Ord,Enum,Show,Read)
data Version = Processual
| Completive
| Ineffectual
| Incompletive
| Positive
| Effective
deriving(Eq,Ord,Enum,Show,Read)
data Validation = Confirmative
| Affirmative
| Reportive
| Inferential
| Intuitive
| Presumptive
| Presumptive2
| Purportive
| Purportive2
| Conjectural
| Dubitative
| Tentative
| Putative
| Improbable
deriving(Eq,Ord,Enum,Show,Read)
data Aspect = Retrospective
| Prospective
| Habitual
| Progressive
| Imminent
| Precessive
| Regulative
| Experiential
| Resumptive
| Cessative
| Recessative
| Pausal
| Regressive
| Preclusive
| Continuative
| Incessative
| Preemptive
| Climactic
| Protractive
| Temporary
| Motive
| Consequential
| Sequential
| Expeditive
| Disclusive
| Conclusive
| Culminative
| Intermediative
| Tardative
| Transitional
| Intercommutative
| Consumptive
deriving(Eq,Ord,Enum,Show,Read)
data BiasValue = Assurative
| Hyperbolic
| Coincidental
| Acceptive
| Reactive
| Stupefactive
| Contemplative
| Desperative
| Revelative
| Gratificative
| Solicitive
| Selective
| Ironic
| Exasperative
| Literal
| Corrective
| Ephemistic
| Skeptical
| Cynical
| Contemptive
| Dismissive
| Indignative
| Suggestive
| Propositive
deriving(Eq,Ord,Enum,Show,Read)
data BiasIntence = Nonintensive
| Intensive
deriving(Eq,Ord,Enum,Show,Read)
data Bias = Bias { biasValue :: BiasValue
, biasIntence :: BiasIntence}
deriving(Show)
data Modality = Desiderative
| Aspirative
| Expectative
| Credential
| Requisitive
| Exhortative
| Opportunitive
| Capasitative
| Permissive
| Potential
| Compulsory
| Obligative
| Impositive
| Advocative
| Intentive
| Anticipative
| Dispositive
| Preparative
| Necessitative
| Decisive
| Proclivitive
| Voluntative
| Accordative
| Inclinative
| Complusive
| Divertive
| Devotive
| Preferential
| Impressional
| Promissory
deriving(Eq,Ord,Enum,Show,Read)
data Level = Equative
| Surpassive
| Deficient
| Optimal
| Minimal
| Superlative
| Inferior
| Superequative
| Subequative
deriving(Eq,Ord,Enum,Show,Read)
data Format = Schematic
| Instrumentative
| Objective
| Authoritive
| Preccurent
| Resultative
| Subsequent
| Concommitant
| Affinitive
deriving(Eq,Ord,Enum,Show,Read)
data SuffixDegree = Degree1
| Degree2
| Degree3
| Degree4
| Degree5
| Degree6
| Degree8
| Degree9
deriving(Eq,Ord,Enum,Show,Read)
data SuffixType = TypeI
| TypeII
| TypeIII
deriving(Eq,Ord,Enum,Show,Read)
data Suffix = Suffix { suffixType :: SuffixType
{-, suffixValue :: SuffixValue-}
, suffixDegree :: SuffixDegree}
deriving(Show)
data Formative = Formative {
{-1-} root :: Root
{-2-} , incorporate :: Root
{-3-} , pattern :: Pattern
{-4-} , stem :: Stem
{-5-} , configuration :: Configuration
{-6-} , affiliation :: Affiliation
{-7-} , perspective :: Perspective
{-8-} , extension :: Extension
{-9-} , essence :: Essence
{-10-} , context :: Context
{-11-} , designation :: Designation
{-12-} , case_ :: Case -- collides with "case"
{-13-} , funcion :: Function
{-14-} , mood :: Mood
{-15-} , illocution :: Illocution
{-16-} , relation :: Relation
{-17-} , phase :: Phase
{-18-} , sanction :: Sanction
{-19-} , valence :: Valence
{-20-} , version :: Version
{-21-} , validation :: Validation
{-22-} , aspect :: Aspect
{-23-} , bias :: Bias
{-24-} , modality :: Modality
{-25-} , level :: Level
{-26-} , format :: Maybe Format
{-27-} , suffix1 :: Maybe Suffix
{-28-} , suffix2 :: Maybe Suffix
{-29-} , suffix3 :: Maybe Suffix }
deriving(Show)
type IthkuilString = [Formative]
| hina-ichigo/haskell-ithkuil | core.hs | mit | 9,975 | 138 | 9 | 2,376 | 2,545 | 1,592 | 953 | 402 | 0 |
{-# LANGUAGE Rank2Types #-}
{- |
Module : Summoner.Tui.Validation
Copyright : (c) 2018-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <xrom.xkov@gmail.com>
Stability : Stable
Portability : Portable
This module contains function to validate Form fields.
-}
module Summoner.Tui.Validation
( ctrlD
, summonFormValidation
, formErrorMessages
, handleAutofill
, projectDescNewLine
) where
import Brick.Forms (formState, invalidFields, setFieldValid, setFormFocus)
import Lens.Micro (Lens', (%~), (.~), (^.))
import Validation (Validation (..), failureIf)
import Summoner.Text (moduleNameValid, packageNameValid, packageToModule)
import Summoner.Tui.Form (KitForm, SummonForm (..), getCurrentFocus, mkForm)
import Summoner.Tui.Kit
import qualified Data.Text as T
-- | Clears the 'Text' fields by @Ctrl + d@ key combination.
ctrlD :: KitForm e -> KitForm e
ctrlD =
clearField "" UserFullName (user . fullName)
. clearField "" UserEmail (user . email)
. clearField "" ProjectName (project . repo)
. clearField "" ProjectDesc (project . desc)
. clearField "" ProjectCat (project . category)
. clearField "" CustomPreludeName (projectMeta . preludeName)
. clearField "" CustomPreludeModule (projectMeta . preludeModule)
. clearField [] Ghcs (projectMeta . ghcs)
. clearField "" UserOwner (user . owner)
where
clearField :: a -> SummonForm -> Lens' SummonKit a -> KitForm e -> KitForm e
clearField nil formField fieldLens f =
if getCurrentFocus f == Just formField
then setFormFocus formField $ mkForm $ formState f & fieldLens .~ nil
else f
handleAutofill :: KitForm e -> KitForm e
handleAutofill f = case getCurrentFocus f of
Just CustomPreludeName ->
let curPreludeName = formState f ^. projectMeta . preludeName
newState = formState f
& projectMeta . preludeModule .~ packageToModule curPreludeName
in setFormFocus CustomPreludeName $ mkForm newState
_anyOtherField -> f
-- | Adds a newline for project description.
projectDescNewLine :: KitForm e -> KitForm e
projectDescNewLine f =
if getCurrentFocus f == Just ProjectDesc
then setFormFocus ProjectDesc $ mkForm $ formState f & project . desc %~ (<> "\n\n")
else f
-- | Validates the main @new@ command form.
summonFormValidation :: forall e . [FilePath] -> KitForm e -> KitForm e
summonFormValidation dirs kitForm = foldr setValidation kitForm universe
where
kit :: SummonKit
kit = formState kitForm
wrongFields :: [SummonForm]
wrongFields = case validateKit dirs kit of
Success _ -> []
Failure errors -> concatMap (toList . errorToInvalidFields) errors
setValidation :: SummonForm -> KitForm e -> KitForm e
setValidation field = setFieldValid (field `notElem` wrongFields) field
-- | This data type represents all possible errors that can happen during
-- validation of form input fields.
data FormError
-- | List of empty fields that shouldn't be empty.
= EmptyFields !(NonEmpty SummonForm)
-- | List of fields that should be exactly one word.
| OneWord !(NonEmpty SummonForm)
-- | Project with such name already exist.
| ProjectExist
-- | At least one build tool should be chosen.
| CabalOrStack
-- | At least library or executable should be selected.
| LibOrExe
-- | Prelude package name should only contain letters, numbers
-- and hyphens.
| PreludePackageError
-- | Prelude module name restrictions check. See 'moduleNameValid'.
| PreludeModuleError
-- | Show 'FormError' to display later in TUI.
showFormError :: FormError -> String
showFormError = \case
EmptyFields fields -> "These fields must not be empty: " ++ joinFields fields
OneWord fields -> "These fields should contain exactly one word: " ++ joinFields fields
ProjectExist -> "Directory with such name already exists"
CabalOrStack -> "Choose at least one: Cabal or Stack"
LibOrExe -> "Choose at least one: Library or Executable"
PreludePackageError -> "Prelude package should only contain letters, numbers and hyphens"
PreludeModuleError -> "Prelude module name could only contain dot-separated capitalized letter/numeral fragments. Ex: This.Is.Valid1"
where
joinFields :: NonEmpty SummonForm -> String
joinFields = intercalate ", " . mapMaybe showField . toList
showField :: SummonForm -> Maybe String
showField = \case
UserOwner -> Just "Owner"
UserFullName -> Just "Full name"
UserEmail -> Just "Email"
ProjectName -> Just "Name"
ProjectDesc -> Just "Description"
ProjectCat -> Just "Category"
CustomPreludeName -> Just "Prelude name"
CustomPreludeModule -> Just "Module"
_nonMandatoryFields -> Nothing
-- | Returns list of all invalid fields according to the error.
errorToInvalidFields :: FormError -> NonEmpty SummonForm
errorToInvalidFields = \case
EmptyFields fields -> fields
OneWord fields -> fields
ProjectExist -> one ProjectName
CabalOrStack -> CabalField :| [StackField]
LibOrExe -> Lib :| [Exe]
PreludePackageError -> one CustomPreludeName
PreludeModuleError -> one CustomPreludeModule
-- | Validates 'SummonKit' and returns list of all possible errors or success.
validateKit :: [FilePath] -> SummonKit -> Validation (NonEmpty FormError) ()
validateKit dirs kit =
validateEmpty
*> validateOneWord
*> validateProjectExist
*> validateBuildTools
*> validateLibOrExe
*> validatePreludePackage
*> validatePreludeModule
where
liftValidation
:: (e -> FormError)
-> Validation e ()
-> Validation (NonEmpty FormError) ()
liftValidation mkError = first (one . mkError)
validateEmpty :: Validation (NonEmpty FormError) ()
validateEmpty = liftValidation EmptyFields validateFields
where
validateFields :: Validation (NonEmpty SummonForm) ()
validateFields =
checkField (user . owner) UserOwner
*> checkField (user . fullName) UserFullName
*> checkField (user . email) UserEmail
*> checkField (project . repo) ProjectName
*> checkField (project . desc) ProjectDesc
*> failureIf isEmptyPrelude CustomPreludeModule
checkField :: Lens' SummonKit Text -> SummonForm -> Validation (NonEmpty SummonForm) ()
checkField textL = failureIf $ isEmpty $ kit ^. textL
isEmpty :: Text -> Bool
isEmpty t = T.strip t == ""
isEmptyPrelude :: Bool
isEmptyPrelude =
not (isEmpty $ kit ^. projectMeta . preludeName)
&& isEmpty (kit ^. projectMeta . preludeModule)
validateOneWord :: Validation (NonEmpty FormError) ()
validateOneWord = liftValidation OneWord validateFields
where
validateFields :: Validation (NonEmpty SummonForm) ()
validateFields =
checkField (user . owner) UserOwner
*> checkField (user . email) UserEmail
*> checkField (project . repo) ProjectName
*> checkField (projectMeta . preludeName) CustomPreludeName
*> checkField (projectMeta . preludeModule) CustomPreludeModule
checkField :: Lens' SummonKit Text -> SummonForm -> Validation (NonEmpty SummonForm) ()
checkField textL = failureIf $ case words $ kit ^. textL of
[] -> False
[_x] -> False
_x:_ -> True
validateProjectExist :: Validation (NonEmpty FormError) ()
validateProjectExist = failureIf
(toString (kit ^. project . repo) `elem` dirs)
ProjectExist
validateBuildTools :: Validation (NonEmpty FormError) ()
validateBuildTools = failureIf
(not $ kit ^. cabal || kit ^. stack)
CabalOrStack
validateLibOrExe :: Validation (NonEmpty FormError) ()
validateLibOrExe = failureIf
(not $ kit ^. projectMeta . lib || kit ^. projectMeta . exe)
LibOrExe
validatePreludePackage :: Validation (NonEmpty FormError) ()
validatePreludePackage = failureIf
(not $ T.null packageName || packageNameValid packageName)
PreludePackageError
where
packageName :: Text
packageName = kit ^. projectMeta . preludeName
validatePreludeModule :: Validation (NonEmpty FormError) ()
validatePreludeModule = failureIf
(not $ T.null moduleName || moduleNameValid moduleName)
PreludeModuleError
where
moduleName :: Text
moduleName = kit ^. projectMeta . preludeModule
-- | Returns list of error messages according to all invalid fields.
formErrorMessages :: [FilePath] -> KitForm e -> [String]
formErrorMessages dirs kitForm = validatedErrorMessages ++ ghcErrorMessage
where
validatedErrorMessages :: [String]
validatedErrorMessages = case validateKit dirs $ formState kitForm of
Success _ -> []
Failure errs -> map showFormError (toList errs)
-- Hack because input field for GHC versions uses custom @editField@ with its own validation
ghcErrorMessage :: [String]
ghcErrorMessage =
["Some GHC versions failed to parse: use space-separated valid GHC versions"
| Ghcs `elem` invalidFields kitForm
]
| vrom911/hs-init | summoner-tui/src/Summoner/Tui/Validation.hs | mit | 9,509 | 0 | 16 | 2,392 | 2,098 | 1,081 | 1,017 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2017.M11.D30.Exercise where
{--
Today is the dual of yesterday's problem: we have the recommended articles to
be published in the database already, now, from some source article id, we need
to read out the associated recommended article ids and their ranks, and then
materialize and return a set of article briefs-as-JSON.
--}
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy.Char8 as BL
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
-- below imports available via 1HaskellADay git repository
import Store.SQL.Connection
import Store.SQL.Util.Indexed
import Y2017.M11.D01.Exercise -- for SpecialCharTable
import Y2017.M11.D03.Exercise -- for Strength-type
import Y2017.M11.D06.Exercise -- for Value-type
import Y2017.M11.D21.Exercise -- for Brief-type
import Y2017.M11.D24.Exercise -- for loading Briefs from the database
import Y2017.M11.D29.Exercise -- for publish recommend article info
fetchToBePublishedStmt :: Query
fetchToBePublishedStmt =
[sql|SELECT source_article_id,recommended_article_id,rank
FROM recommendation_publish WHERE source_article_id=?|]
fetchPublish :: Connection -> Integer -> IO [Publish]
fetchPublish conn srcId = undefined
-- and to-be-published info contains the kernel of article briefs we'll return
-- Oh, and P.S.: don't forget the brief of the source article
-- so we have to define Publish -> IxValue (Value Strength) where the rank is
-- the strength and the source article has value QRY
pub2Val :: Publish -> IxValue (Value Strength)
pub2Val pub = undefined
-- with that we can use Y2017.M11.D24.Exercise.articleData to get our briefs
-- (we have to load the special character table along the way ...)
briefs :: SpecialCharTable -> Connection -> [Publish] -> IO [Brief]
briefs pubs = undefined
-- don't forget the source article!
{-- BONUS -----------------------------------------------------------------
Create an app that, from a source article id, outputs the briefs of the
recommended articles to be published with it as JSON
--}
main' :: [String] -> IO ()
main' args = undefined
| geophf/1HaskellADay | exercises/HAD/Y2017/M11/D30/Exercise.hs | mit | 2,177 | 0 | 9 | 320 | 243 | 156 | 87 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-ec2fleet-fleetlaunchtemplateconfigrequest.html
module Stratosphere.ResourceProperties.EC2EC2FleetFleetLaunchTemplateConfigRequest where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.EC2EC2FleetFleetLaunchTemplateSpecificationRequest
import Stratosphere.ResourceProperties.EC2EC2FleetFleetLaunchTemplateOverridesRequest
-- | Full data type definition for
-- EC2EC2FleetFleetLaunchTemplateConfigRequest. See
-- 'ec2EC2FleetFleetLaunchTemplateConfigRequest' for a more convenient
-- constructor.
data EC2EC2FleetFleetLaunchTemplateConfigRequest =
EC2EC2FleetFleetLaunchTemplateConfigRequest
{ _eC2EC2FleetFleetLaunchTemplateConfigRequestLaunchTemplateSpecification :: Maybe EC2EC2FleetFleetLaunchTemplateSpecificationRequest
, _eC2EC2FleetFleetLaunchTemplateConfigRequestOverrides :: Maybe [EC2EC2FleetFleetLaunchTemplateOverridesRequest]
} deriving (Show, Eq)
instance ToJSON EC2EC2FleetFleetLaunchTemplateConfigRequest where
toJSON EC2EC2FleetFleetLaunchTemplateConfigRequest{..} =
object $
catMaybes
[ fmap (("LaunchTemplateSpecification",) . toJSON) _eC2EC2FleetFleetLaunchTemplateConfigRequestLaunchTemplateSpecification
, fmap (("Overrides",) . toJSON) _eC2EC2FleetFleetLaunchTemplateConfigRequestOverrides
]
-- | Constructor for 'EC2EC2FleetFleetLaunchTemplateConfigRequest' containing
-- required fields as arguments.
ec2EC2FleetFleetLaunchTemplateConfigRequest
:: EC2EC2FleetFleetLaunchTemplateConfigRequest
ec2EC2FleetFleetLaunchTemplateConfigRequest =
EC2EC2FleetFleetLaunchTemplateConfigRequest
{ _eC2EC2FleetFleetLaunchTemplateConfigRequestLaunchTemplateSpecification = Nothing
, _eC2EC2FleetFleetLaunchTemplateConfigRequestOverrides = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-ec2fleet-fleetlaunchtemplateconfigrequest.html#cfn-ec2-ec2fleet-fleetlaunchtemplateconfigrequest-launchtemplatespecification
ececffltcrLaunchTemplateSpecification :: Lens' EC2EC2FleetFleetLaunchTemplateConfigRequest (Maybe EC2EC2FleetFleetLaunchTemplateSpecificationRequest)
ececffltcrLaunchTemplateSpecification = lens _eC2EC2FleetFleetLaunchTemplateConfigRequestLaunchTemplateSpecification (\s a -> s { _eC2EC2FleetFleetLaunchTemplateConfigRequestLaunchTemplateSpecification = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-ec2fleet-fleetlaunchtemplateconfigrequest.html#cfn-ec2-ec2fleet-fleetlaunchtemplateconfigrequest-overrides
ececffltcrOverrides :: Lens' EC2EC2FleetFleetLaunchTemplateConfigRequest (Maybe [EC2EC2FleetFleetLaunchTemplateOverridesRequest])
ececffltcrOverrides = lens _eC2EC2FleetFleetLaunchTemplateConfigRequestOverrides (\s a -> s { _eC2EC2FleetFleetLaunchTemplateConfigRequestOverrides = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EC2EC2FleetFleetLaunchTemplateConfigRequest.hs | mit | 2,995 | 0 | 12 | 206 | 260 | 153 | 107 | 29 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Character.Database (
getPlayers,
selectPlayer,
updatePlayer
) where
import Control.Applicative ((<$>), (<*>))
import Database.SQLite.Simple (FromRow(..),
Connection(..),
Query(..),
Only(..),
field,
open,
query,
execute,
close)
import Data.Map (Map, fromList, lookup)
import Prelude hiding (lookup)
import Data.Text (pack)
import Character.Util (readMaybe)
import Character.Types
import Instances.Classes
import Instances.Races
getPlayers :: IO [String]
getPlayers = do
conn <- open "dungeons.db"
players <- query conn "SELECT * from character" () :: IO [DBPlayer]
return $ map (\x -> name x) players
selectPlayer :: String -> IO (Maybe Player)
selectPlayer inputName = do
conn <- open "dungeons.db"
dbps <- ((query conn "SELECT * from character WHERE name = ?" (Only inputName)) :: IO [DBPlayer])
case dbps of
[dbp] -> do
let p = pid dbp
descIds <- (subSelect "featsToCharacters" conn p :: IO [AToB])
feats <- getDescribedList descIds "feats" conn
miIds <- (subSelect "magicItemsToCharacters" conn p :: IO [AToB])
mis <- getDescribedList miIds "magicItems" conn
powerIds <- (subSelect "powersToCharacters" conn p :: IO [AToB])
powers <- getDescribedList powerIds "powers" conn
skillIds <- (subSelect "skillsToCharacters" conn p :: IO [AToB])
items <- (subSelect "itemsToCharacters" conn p :: IO [Counted])
close conn
return $ buildPlayer dbp feats mis powers skillIds items
_ -> return Nothing
subSelect :: FromRow r => [Char] -> Connection -> Int -> IO [r]
subSelect tableName conn p = query conn (Query $ pack $ "SELECT * FROM " ++ tableName ++ " WHERE charId = ?") (Only p)
buildPlayer :: DBPlayer -> [Described] -> [Described] -> [Described] -> [AToB] -> [Counted] -> Maybe Player
buildPlayer dbp dbfeats dbmis dbpowers dbskills dbitems = do
r <- parseRace (race dbp)
c <- parseClass (class_ dbp) (classSpec dbp)
a <- parseArmor (armor dbp)
w <- parseWeapon (weapon dbp)
sk <- parseSkills dbskills
return $ newPlayer (name dbp) (buildFeats dbfeats) (level dbp) (xp dbp) r c a w (buildMagicItems dbmis) (buildItems dbitems) (buildPowers dbpowers) sk (buildSkills dbp)
updatePlayer :: Map String String -> String -> IO ()
updatePlayer params playerName = do
conn <- open "dungeons.db"
case getKeyVal params of
Just (key, val) -> do
dbPlayers <- ((query conn "SELECT * FROM character WHERE name = ?" (Only playerName)) :: IO [DBPlayer])
case dbPlayers of
[dbPlayer] -> updatePlayerParser key val conn $ pid dbPlayer
_ -> return ()
Nothing -> return ()
getKeyVal :: (Map String String) -> Maybe (String, Int)
getKeyVal params = do
key <- lookup "key" params
val <- lookup "value" params
intVal <- readMaybe val :: Maybe Int
return (key, intVal)
updatePlayerParser :: String -> Int -> Connection -> Int -> IO ()
updatePlayerParser key val conn p = do
case key of
"xp" -> do
execute conn (Query $ pack $ "UPDATE character SET xp = " ++ show val ++ " WHERE charId = ?") (Only p)
_ -> do
itemCount <- ((query conn "SELECT * FROM itemsToCharacters WHERE name = ? AND charId = ?" (key, p)) :: IO [Counted])
case itemCount of
[] -> execute conn (Query $ pack $ "INSERT INTO itemsToCharacters VALUES (?, ?, ?)") (key, val, p)
_ -> case val of
0 -> execute conn (Query $ pack $ "DELETE FROM itemsToCharacters WHERE name = ? AND charId = ?") (key, p)
_ -> execute conn (Query $ pack $ "UPDATE itemsToCharacters SET count = " ++ show val ++ " WHERE name = '" ++ key ++ "' AND charId = ?") (Only p)
-- DB Data Structures
data DBPlayer = DBPlayer { pid :: Int
, name :: String
, level :: Int
, xp :: Int
, class_ :: String
, classSpec :: String
, race :: String
, str :: Int
, con :: Int
, dex :: Int
, int :: Int
, wis :: Int
, cha :: Int
, armor :: String
, weapon :: String } deriving (Show)
instance FromRow DBPlayer where
fromRow = DBPlayer <$>
field <*> field <*> field <*> field <*>
field <*> field <*> field <*> field <*>
field <*> field <*> field <*> field <*>
field <*> field <*> field
data Described = Described { descId :: String
, descName :: String
, descDescription :: String }
instance FromRow Described where
fromRow = Described <$> field <*> field <*> field
data Counted = Counted { countedName :: String
, countedCount :: Int
, countedCharId :: Int }
instance FromRow Counted where
fromRow = Counted <$> field <*> field <*> field
data AToB = AToB { aToBDesc :: String
, aToBChar :: Int }
instance FromRow AToB where
fromRow = AToB <$> field <*> field
getDescribedList :: [AToB] -> String -> Connection -> IO [Described]
getDescribedList [] _ _ = return []
getDescribedList (x:xs) tableName conn = do
let selectStatement = (Query $ pack $ "SELECT * from " ++ tableName ++ " WHERE id = ?")
feat <- ((query conn selectStatement (Only $ aToBDesc x)) :: IO [Described])
feats <- getDescribedList xs tableName conn
return $ (head feat):feats
-- DB -> Internal Data Structure Translators
buildSkills :: DBPlayer -> (Map Ability Int)
buildSkills dbp = fromList [
(Str, str dbp),
(Con, con dbp),
(Dex, dex dbp),
(Int, int dbp),
(Wis, wis dbp),
(Cha, cha dbp)]
buildFeats :: [Described] -> [Feat]
buildFeats [] = []
buildFeats (x:xs) = (Feat (descName x) (descDescription x)):(buildFeats xs)
buildMagicItems :: [Described] -> [MagicItem]
buildMagicItems [] = []
buildMagicItems (x:xs) = (MagicItem (descName x) (descDescription x)):(buildMagicItems xs)
buildPowers :: [Described] -> [Power]
buildPowers [] = []
buildPowers (x:xs) = (Power (descName x) (descDescription x)):(buildPowers xs)
buildItems :: [Counted] -> (Map Item Int)
buildItems items = fromList $ buildItems_ items
buildItems_ :: [Counted] -> [(Item, Int)]
buildItems_ [] = []
buildItems_ (x:xs) = ((Item $ countedName x, countedCount x)):(buildItems_ xs)
parseSkills :: [AToB] -> Maybe [Skill]
parseSkills [] = Just []
parseSkills (x:xs) = do
sk <- readMaybe $ aToBDesc x
sks <- parseSkills xs
return $ sk:sks
parseArmor :: String -> Maybe Armor
parseArmor "light" = Just $ Armor Light 1
parseArmor _ = Nothing
parseWeapon :: String -> Maybe Weapons
parseWeapon "staff" = Just $ TwoHandedWeapon $ TwoHander Staff 8
parseWeapon _ = Nothing
parseRace :: String -> Maybe Race
parseRace "halfling" = Just halfling
parseRace _ = Nothing
parseClass :: String -> String -> Maybe Class
parseClass "druid" "primalPredator" = Just $ druid primalPredator
parseClass _ _ = Nothing
| quintenpalmer/dungeons | server/src/Character/Database.hs | mit | 7,567 | 0 | 25 | 2,340 | 2,488 | 1,293 | 1,195 | 165 | 4 |
-- | Annotating type checker
module TypeCheckerA where
import Control.Monad(mapAndUnzipM,zipWithM)
import Debug.Trace(trace)
import AbsCPP
import PrintCPP
import ErrM
import BuiltIns
import Environment
-- 2) do type-checking and annotation
-- check EApp f es
-- (ts, t) <- lookUp f
-- smart way is to use zip:
-- let ets = zip es ts - but will truncate the longer list!
-- mapM (\(e,t) -> check e t) ets
typecheck :: Program -> Err Program
typecheck (Prog defs) = do
env <- buildFunTable emptyEnvT (defs ++ builtInFunctions)
(defs',_) <- checkDefs env defs
return (Prog defs')
-- | Builds a symbol table for functions in the environment.
buildFunTable :: EnvT -> [Def] -> Err EnvT -- or just SigTab
buildFunTable env [] = return env
buildFunTable env (d:ds) =
case d of
Fun ftype id args _ -> do env' <- updateFunT env id (map argType args, ftype)
buildFunTable env' ds
_ -> fail "Bad function definition, buildFunTable"
-- | Type-checks a list of function definitions.
checkDefs :: EnvT -> [Def] -> Err ([Def],())
checkDefs env [] = return ([],())
checkDefs env (d:ds) = do (d' ,env' ) <- checkDef env d
(ds',env'') <- checkDefs env' ds
return(d':ds', env'')
-- | Type-checks a function definition.
checkDef :: EnvT -> Def -> Err (Def, EnvT)
checkDef env (Fun t id args stms) = do env' <- addArgsT (addScopeT env) args
env'' <- updateVarT env' (Id "return") t
--since return i a reserved word, there will never be a variable with that as id
--so we can use it to store the function type in every scope
(stms',env''') <- checkStms env'' stms
return (Fun t id args stms',env''')
checkStms :: EnvT -> [Stm] -> Err ([Stm],EnvT)
checkStms env [] = return ([],env)
checkStms env (st:stms) = do
(st' ,env' ) <- checkStm env st
(stms',env'') <- checkStms env' stms
return (st':stms',env'')
checkStm :: EnvT -> Stm -> Err (Stm,EnvT)
checkStm env s =
case s of
SDecl t x -> do env' <- updateVarT env x t
return(s,env')
SDecls t ids -> do (_,env') <- checkOne env t ids
return (SDecls t ids, env')
where
checkOne :: EnvT -> Type -> [Id] -> Err (Stm,EnvT)
checkOne env t (id:ids) = do
(_,env') <- checkStm env (SDecl t id)
checkOne env' t ids
checkOne env t [] = return (SDecls t [],env)
SAss x e -> do t <- lookupVarT env x
e' <- checkExp env e t
return (SAss x e',env)
SInit t id exp -> do -- first declare
(_,env') <- checkStm env (SDecl t id)
-- then check assignment
(SAss id exp',env'') <- checkStm env' (SAss id exp)
return (SInit t id exp', env'')
SBlock stms -> do (stms',_) <- checkStms (addScopeT env) stms
return (SBlock stms',env) -- or env'?
SReturn exp -> do retType <- lookupVarT env (Id "return")
exp' <- checkExp env exp retType
return (SReturn exp', env)
SExp exp -> do (exp',t) <- inferExp env exp
return (SExp exp', env)
SIfElse exp s1 s2 -> do exp' <- checkExp env exp TBool
(s1',env' ) <- checkStm env s1
(s2',env'') <- checkStm env s2
return (SIfElse exp' s1' s2', env'')
SWhile exp stm -> do exp' <- checkExp env exp TBool
(stm',env') <- checkStm env stm
return (SWhile exp' stm', env')
--updateVars env ids typ
_ -> error ("Case not exhaustive in checkstm \n"
++ show s ++ " \n " ++ printTree s)
-- | Checks type of the expression argument.
checkExp :: EnvT -> Exp -> Type -> Err Exp
checkExp env e t =
do (e',t') <- inferExp env e
if t' /= t
then fail (printTree e ++ " has type " ++ printTree t'
++ ", expected: " ++ printTree t)
else return e'
-- | Infers types of the expressionargument.
inferExp :: EnvT -> Exp -> Err (Exp,Type)
inferExp env e =
case e of
-- variable declaration and assignment
EId x -> do t <- lookupVarT env x
return (ETyped t e, t)
EAss e1 e2 -> do (e1',t1) <- inferExp env e1
(e2',t2) <- inferExp env e2
if t1 == t2
then return (ETyped t1 (EAss e1' e2'), t1)
else fail ("Assignment type mismatch: \n" ++
"left-hand side " ++ printTree e1 ++
" has type " ++ printTree t1 ++
" but right-hand side " ++ printTree e2 ++
" has type " ++ printTree t2)
-- literals
EInt _ -> return (ETyped TInt e, TInt)
EDouble _ -> return (ETyped TDouble e, TDouble)
ETrue -> return (ETyped TBool ETrue, TBool)
EFalse -> return (ETyped TBool EFalse, TBool)
-- comparison expressions type-check similarly to ELtEq
ENEq e1 e2 -> do
(ETyped TBool (ELtEq e1' e2'), TBool) <- inferExp env (ELtEq e1 e2)
return (ETyped TBool (ENEq e1' e2'), TBool)
EEq e1 e2 -> do
(ETyped TBool (ELtEq e1' e2'), TBool) <- inferExp env (ELtEq e1 e2)
return (ETyped TBool (EEq e1' e2'), TBool)
EGt e1 e2 -> do
(ETyped TBool (ELtEq e1' e2'), TBool) <- inferExp env (ELtEq e1 e2)
return (ETyped TBool (EGt e1' e2'), TBool)
ELt e1 e2 -> do
(ETyped TBool (ELtEq e1' e2'), TBool) <- inferExp env (ELtEq e1 e2)
return (ETyped TBool (ELt e1' e2'), TBool)
EGtEq e1 e2 -> do
(ETyped TBool (ELtEq e1' e2'), TBool) <- inferExp env (ELtEq e1 e2)
return (ETyped TBool (EGtEq e1' e2'), TBool)
---- 'base case'
ELtEq e1 e2 -> do (e1',t1) <- inferExp env e1
(e2',t2) <- inferExp env e2
if t1 == t2
then case t1 of
TVoid -> fail "Comparison of void values"
_ -> return (ETyped TBool (ELtEq e1' e2'), TBool)
else fail ("Type mismatch: \n" ++
printTree e1 ++ " has type " ++
printTree t1 ++ " but " ++
printTree e2 ++ " has type " ++
printTree t2)
-- arithmetic expressions type-check similarly to EPlus
EDiv e1 e2 -> do (ETyped t (EPlus e1' e2'), t') <- inferExp env (EPlus e1 e2)
return (ETyped t (EDiv e1' e2'), t)
ETimes e1 e2 -> do (ETyped t (EPlus e1' e2'), t') <- inferExp env (EPlus e1 e2)
return (ETyped t (ETimes e1' e2'), t)
EMinus e1 e2 -> do (ETyped t (EPlus e1' e2'), t') <- inferExp env (EPlus e1 e2)
return (ETyped t (EMinus e1' e2'), t)
---- 'base case'
EPlus e1 e2 -> do (e1',t1) <- inferExp env e1
(e2',t2) <- inferExp env e2
if t1 == t2
then case t1 of
TBool -> fail "Arithmetic operation on bool values"
TVoid -> fail "Arithmetic operation on void values"
_ -> return (ETyped t1 (EPlus e1' e2'), t1)
else fail ("Type mismatch: \n" ++
printTree e1 ++ " has type " ++
printTree t1 ++ " but " ++
printTree e2 ++ " has type " ++
printTree t2)
-- conjunction and disjunction
EOr e1 e2 -> do e1' <- checkExp env e1 TBool
e2' <- checkExp env e2 TBool
return (ETyped TBool (EOr e1' e2'), TBool)
EAnd e1 e2 -> do e1' <- checkExp env e1 TBool
e2' <- checkExp env e2 TBool
return (ETyped TBool (EAnd e1' e2'), TBool)
-- increments and decrements type-check similarly to EIncr
EPDecr e -> do (ETyped t (EIncr e'),t') <- inferExp env (EIncr e)
return (ETyped t (EPDecr e'), t)
EPIncr e -> do (ETyped t (EIncr e'),t') <- inferExp env (EIncr e)
return (ETyped t (EPIncr e'), t)
EDecr e -> do (ETyped t (EIncr e'),t') <- inferExp env (EIncr e)
return (ETyped t (EDecr e'), t)
EIncr e -> do (e',t) <- inferExp env e
case t of
TBool -> fail "Increment of boolean value"
TVoid -> fail "Increment of void value"
_ -> return (ETyped t (EIncr e'),t)
-- function call
EApp id exps -> inferFun env e
_ -> fail ("inferExp has a non exhaustive case pattern \n"
++ show e ++ " \n " ++ printTree e)
-- | Annotates return type of a function
-- and infers types of its argument expressions.
inferFun :: EnvT -> Exp -> Err (Exp,Type)
inferFun env (EApp id exps) = do (types, ftype) <- lookupFunT env id
if length exps == length types
then do
exps' <- zipWithM (checkExp env) exps types
return (ETyped ftype (EApp id exps'),ftype)
else
fail "Incorrect no. arguments in function call"
-- inferFunHelper :: Env -> [Exp] -> [Type] -> Err Exp
-- inferFunHelper env e:[] t:[] = return ()
-- inferFunHelper env exps types
-- inferFunHelper env (e:es) (t:ts) = do etyp <- inferExp env e
-- if etyp == t
-- then inferFunHelper env es ts
-- else fail "type error in argument of function call"
-- inferFunHelper _ _ _ = fail "inferFunHelper has non exhaustive case pattern"
| izimbra/PLT2014 | Lab2/TypeCheckerA.hs | gpl-2.0 | 11,490 | 0 | 20 | 5,297 | 3,210 | 1,577 | 1,633 | 167 | 32 |
--------------------------------------------------------------------------------
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Patat.Theme
( Theme (..)
, defaultTheme
, Style (..)
, SyntaxHighlighting (..)
, defaultSyntaxHighlighting
, syntaxHighlight
) where
--------------------------------------------------------------------------------
import Control.Monad (forM_, mplus)
import qualified Data.Aeson as A
import qualified Data.Aeson.TH.Extended as A
import Data.Char (toLower, toUpper)
import Data.Colour.SRGB (RGB (..), sRGB24reads, toSRGB24)
import Data.List (intercalate, isPrefixOf, isSuffixOf)
import qualified Data.Map as M
import Data.Maybe (mapMaybe, maybeToList)
import qualified Data.Text as T
import Numeric (showHex)
import Prelude
import qualified Skylighting as Skylighting
import qualified System.Console.ANSI as Ansi
import Text.Read (readMaybe)
--------------------------------------------------------------------------------
data Theme = Theme
{ themeBorders :: !(Maybe Style)
, themeHeader :: !(Maybe Style)
, themeCodeBlock :: !(Maybe Style)
, themeBulletList :: !(Maybe Style)
, themeBulletListMarkers :: !(Maybe T.Text)
, themeOrderedList :: !(Maybe Style)
, themeBlockQuote :: !(Maybe Style)
, themeDefinitionTerm :: !(Maybe Style)
, themeDefinitionList :: !(Maybe Style)
, themeTableHeader :: !(Maybe Style)
, themeTableSeparator :: !(Maybe Style)
, themeLineBlock :: !(Maybe Style)
, themeEmph :: !(Maybe Style)
, themeStrong :: !(Maybe Style)
, themeUnderline :: !(Maybe Style)
, themeCode :: !(Maybe Style)
, themeLinkText :: !(Maybe Style)
, themeLinkTarget :: !(Maybe Style)
, themeStrikeout :: !(Maybe Style)
, themeQuoted :: !(Maybe Style)
, themeMath :: !(Maybe Style)
, themeImageText :: !(Maybe Style)
, themeImageTarget :: !(Maybe Style)
, themeSyntaxHighlighting :: !(Maybe SyntaxHighlighting)
} deriving (Show)
--------------------------------------------------------------------------------
instance Semigroup Theme where
l <> r = Theme
{ themeBorders = mplusOn themeBorders
, themeHeader = mplusOn themeHeader
, themeCodeBlock = mplusOn themeCodeBlock
, themeBulletList = mplusOn themeBulletList
, themeBulletListMarkers = mplusOn themeBulletListMarkers
, themeOrderedList = mplusOn themeOrderedList
, themeBlockQuote = mplusOn themeBlockQuote
, themeDefinitionTerm = mplusOn themeDefinitionTerm
, themeDefinitionList = mplusOn themeDefinitionList
, themeTableHeader = mplusOn themeTableHeader
, themeTableSeparator = mplusOn themeTableSeparator
, themeLineBlock = mplusOn themeLineBlock
, themeEmph = mplusOn themeEmph
, themeStrong = mplusOn themeStrong
, themeUnderline = mplusOn themeUnderline
, themeCode = mplusOn themeCode
, themeLinkText = mplusOn themeLinkText
, themeLinkTarget = mplusOn themeLinkTarget
, themeStrikeout = mplusOn themeStrikeout
, themeQuoted = mplusOn themeQuoted
, themeMath = mplusOn themeMath
, themeImageText = mplusOn themeImageText
, themeImageTarget = mplusOn themeImageTarget
, themeSyntaxHighlighting = mappendOn themeSyntaxHighlighting
}
where
mplusOn f = f l `mplus` f r
mappendOn f = f l `mappend` f r
--------------------------------------------------------------------------------
instance Monoid Theme where
mappend = (<>)
mempty = Theme
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing
--------------------------------------------------------------------------------
defaultTheme :: Theme
defaultTheme = Theme
{ themeBorders = dull Ansi.Yellow
, themeHeader = dull Ansi.Blue
, themeCodeBlock = dull Ansi.White `mappend` ondull Ansi.Black
, themeBulletList = dull Ansi.Magenta
, themeBulletListMarkers = Just "-*"
, themeOrderedList = dull Ansi.Magenta
, themeBlockQuote = dull Ansi.Green
, themeDefinitionTerm = dull Ansi.Blue
, themeDefinitionList = dull Ansi.Magenta
, themeTableHeader = dull Ansi.Blue
, themeTableSeparator = dull Ansi.Magenta
, themeLineBlock = dull Ansi.Magenta
, themeEmph = dull Ansi.Green
, themeStrong = dull Ansi.Red `mappend` bold
, themeUnderline = dull Ansi.Red `mappend` underline
, themeCode = dull Ansi.White `mappend` ondull Ansi.Black
, themeLinkText = dull Ansi.Green
, themeLinkTarget = dull Ansi.Cyan `mappend` underline
, themeStrikeout = ondull Ansi.Red
, themeQuoted = dull Ansi.Green
, themeMath = dull Ansi.Green
, themeImageText = dull Ansi.Green
, themeImageTarget = dull Ansi.Cyan `mappend` underline
, themeSyntaxHighlighting = Just defaultSyntaxHighlighting
}
where
dull c = Just $ Style [Ansi.SetColor Ansi.Foreground Ansi.Dull c]
ondull c = Just $ Style [Ansi.SetColor Ansi.Background Ansi.Dull c]
bold = Just $ Style [Ansi.SetConsoleIntensity Ansi.BoldIntensity]
underline = Just $ Style [Ansi.SetUnderlining Ansi.SingleUnderline]
--------------------------------------------------------------------------------
newtype Style = Style {unStyle :: [Ansi.SGR]}
deriving (Monoid, Semigroup, Show)
--------------------------------------------------------------------------------
instance A.ToJSON Style where
toJSON = A.toJSON . mapMaybe sgrToString . unStyle
--------------------------------------------------------------------------------
instance A.FromJSON Style where
parseJSON val = do
names <- A.parseJSON val
sgrs <- mapM toSgr names
return $! Style sgrs
where
toSgr name = case stringToSgr name of
Just sgr -> return sgr
Nothing -> fail $!
"Unknown style: " ++ show name ++ ". Known styles are: " ++
intercalate ", " (map show $ M.keys namedSgrs) ++
", or \"rgb#RrGgBb\" and \"onRgb#RrGgBb\" where 'Rr', " ++
"'Gg' and 'Bb' are hexadecimal bytes (e.g. \"rgb#f08000\")."
--------------------------------------------------------------------------------
stringToSgr :: String -> Maybe Ansi.SGR
stringToSgr s
| "rgb#" `isPrefixOf` s = rgbToSgr Ansi.Foreground $ drop 4 s
| "onRgb#" `isPrefixOf` s = rgbToSgr Ansi.Background $ drop 6 s
| otherwise = M.lookup s namedSgrs
--------------------------------------------------------------------------------
rgbToSgr :: Ansi.ConsoleLayer -> String -> Maybe Ansi.SGR
rgbToSgr layer rgbHex =
case sRGB24reads rgbHex of
[(color, "")] -> Just $ Ansi.SetRGBColor layer color
_ -> Nothing
--------------------------------------------------------------------------------
sgrToString :: Ansi.SGR -> Maybe String
sgrToString (Ansi.SetColor layer intensity color) = Just $
(\str -> case layer of
Ansi.Foreground -> str
Ansi.Background -> "on" ++ capitalize str) $
(case intensity of
Ansi.Dull -> "dull"
Ansi.Vivid -> "vivid") ++
(case color of
Ansi.Black -> "Black"
Ansi.Red -> "Red"
Ansi.Green -> "Green"
Ansi.Yellow -> "Yellow"
Ansi.Blue -> "Blue"
Ansi.Magenta -> "Magenta"
Ansi.Cyan -> "Cyan"
Ansi.White -> "White")
sgrToString (Ansi.SetUnderlining Ansi.SingleUnderline) = Just "underline"
sgrToString (Ansi.SetConsoleIntensity Ansi.BoldIntensity) = Just "bold"
sgrToString (Ansi.SetItalicized True) = Just "italic"
sgrToString (Ansi.SetRGBColor layer color) = Just $
(\str -> case layer of
Ansi.Foreground -> str
Ansi.Background -> "on" ++ capitalize str) $
"rgb#" ++ (toRGBHex $ toSRGB24 color)
where
toRGBHex (RGB r g b) = concat $ map toHexByte [r, g, b]
toHexByte x = showHex2 x ""
showHex2 x | x <= 0xf = ("0" ++) . showHex x
| otherwise = showHex x
sgrToString _ = Nothing
--------------------------------------------------------------------------------
namedSgrs :: M.Map String Ansi.SGR
namedSgrs = M.fromList
[ (name, sgr)
| sgr <- knownSgrs
, name <- maybeToList (sgrToString sgr)
]
where
-- It doesn't really matter if we generate "too much" SGRs here since
-- 'sgrToString' will only pick the ones we support.
knownSgrs =
[ Ansi.SetColor l i c
| l <- [minBound .. maxBound]
, i <- [minBound .. maxBound]
, c <- [minBound .. maxBound]
] ++
[Ansi.SetUnderlining u | u <- [minBound .. maxBound]] ++
[Ansi.SetConsoleIntensity c | c <- [minBound .. maxBound]] ++
[Ansi.SetItalicized i | i <- [minBound .. maxBound]]
--------------------------------------------------------------------------------
newtype SyntaxHighlighting = SyntaxHighlighting
{ unSyntaxHighlighting :: M.Map String Style
} deriving (Monoid, Semigroup, Show, A.ToJSON)
--------------------------------------------------------------------------------
instance A.FromJSON SyntaxHighlighting where
parseJSON val = do
styleMap <- A.parseJSON val
forM_ (M.keys styleMap) $ \k -> case nameToTokenType k of
Just _ -> return ()
Nothing -> fail $ "Unknown token type: " ++ show k
return (SyntaxHighlighting styleMap)
--------------------------------------------------------------------------------
defaultSyntaxHighlighting :: SyntaxHighlighting
defaultSyntaxHighlighting = mkSyntaxHighlighting
[ (Skylighting.KeywordTok, dull Ansi.Yellow)
, (Skylighting.ControlFlowTok, dull Ansi.Yellow)
, (Skylighting.DataTypeTok, dull Ansi.Green)
, (Skylighting.DecValTok, dull Ansi.Red)
, (Skylighting.BaseNTok, dull Ansi.Red)
, (Skylighting.FloatTok, dull Ansi.Red)
, (Skylighting.ConstantTok, dull Ansi.Red)
, (Skylighting.CharTok, dull Ansi.Red)
, (Skylighting.SpecialCharTok, dull Ansi.Red)
, (Skylighting.StringTok, dull Ansi.Red)
, (Skylighting.VerbatimStringTok, dull Ansi.Red)
, (Skylighting.SpecialStringTok, dull Ansi.Red)
, (Skylighting.CommentTok, dull Ansi.Blue)
, (Skylighting.DocumentationTok, dull Ansi.Blue)
, (Skylighting.AnnotationTok, dull Ansi.Blue)
, (Skylighting.CommentVarTok, dull Ansi.Blue)
, (Skylighting.ImportTok, dull Ansi.Cyan)
, (Skylighting.OperatorTok, dull Ansi.Cyan)
, (Skylighting.FunctionTok, dull Ansi.Cyan)
, (Skylighting.PreprocessorTok, dull Ansi.Cyan)
]
where
dull c = Style [Ansi.SetColor Ansi.Foreground Ansi.Dull c]
mkSyntaxHighlighting ls = SyntaxHighlighting $
M.fromList [(nameForTokenType tt, s) | (tt, s) <- ls]
--------------------------------------------------------------------------------
nameForTokenType :: Skylighting.TokenType -> String
nameForTokenType =
unCapitalize . dropTok . show
where
unCapitalize (x : xs) = toLower x : xs
unCapitalize xs = xs
dropTok :: String -> String
dropTok str
| "Tok" `isSuffixOf` str = take (length str - 3) str
| otherwise = str
--------------------------------------------------------------------------------
nameToTokenType :: String -> Maybe Skylighting.TokenType
nameToTokenType = readMaybe . capitalize . (++ "Tok")
--------------------------------------------------------------------------------
capitalize :: String -> String
capitalize "" = ""
capitalize (x : xs) = toUpper x : xs
--------------------------------------------------------------------------------
syntaxHighlight :: Theme -> Skylighting.TokenType -> Maybe Style
syntaxHighlight theme tokenType = do
sh <- themeSyntaxHighlighting theme
M.lookup (nameForTokenType tokenType) (unSyntaxHighlighting sh)
--------------------------------------------------------------------------------
$(A.deriveJSON A.dropPrefixOptions ''Theme)
| jaspervdj/patat | lib/Patat/Theme.hs | gpl-2.0 | 13,219 | 0 | 18 | 3,566 | 3,085 | 1,653 | 1,432 | 286 | 11 |
{-
Auf wie viele Weisen kann man Spielsteine auf ein 3 x 10-Spielbrett setzen,
sodass keine zwei Steine horizontal, vertikal oder diagonal benachbart sind?
(Quelle: Preisaufgabe bei LSGM-Wochenendseminar 2011 in Bennewitz
http://www.lsgm.de/tiki-index.php?page=Seminare.2011-09)
BUILD: ghc --make Placement
RUN : ./Placement 3 10
-}
import OBDD (OBDD)
import qualified OBDD
import Control.Monad ( guard, forM_ )
import System.Environment ( getArgs )
import qualified Data.Set
type Position = (Int,Int)
positions :: Int -> Int -> [ Position ]
positions width height = do
a <- [ 1 .. width ]
b <- [ 1 .. height ]
return (a,b)
adjacent :: Position -> Position -> Bool
adjacent (a,b) (c,d) =
abs (a-c) <= 1 && abs (b-d) <= 1
main = do
args <- getArgs
case map read args :: [Int] of
[] -> mainf 3 10
[ width, height ] -> mainf width height
mainf width height = do
let ps = positions width height
print $ OBDD.number_of_models ( Data.Set.fromList ps )
$ OBDD.and $ do
p <- ps
q <- ps
guard $ p < q
guard $ adjacent p q
return $ OBDD.or [ OBDD.unit p False, OBDD.unit q False ]
| jwaldmann/haskell-obdd | examples/Placement.hs | gpl-2.0 | 1,208 | 1 | 14 | 328 | 383 | 194 | 189 | 28 | 2 |
{-| Implementation of the LUXI loader.
-}
{-
Copyright (C) 2009, 2010, 2011 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Luxi
(
loadData
, parseData
) where
import qualified Control.Exception as E
import Text.JSON.Types
import qualified Text.JSON
import qualified Ganeti.Luxi as L
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.HTools.Utils (fromJVal, annotateResult, tryFromObj, asJSObject)
-- * Utility functions
-- | Ensure a given JSValue is actually a JSArray.
toArray :: (Monad m) => JSValue -> m [JSValue]
toArray v =
case v of
JSArray arr -> return arr
o -> fail ("Invalid input, expected array but got " ++ show o)
-- | Annotate errors when converting values with owner/attribute for
-- better debugging.
genericConvert :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> JSValue -- ^ The value we try to convert
-> Result a -- ^ The annotated result
genericConvert otype oname oattr =
annotateResult (otype ++ " '" ++ oname ++ "', attribute '" ++
oattr ++ "'") . fromJVal
-- * Data querying functionality
-- | The input data for node query.
queryNodesMsg :: L.LuxiOp
queryNodesMsg =
L.QueryNodes [] ["name", "mtotal", "mnode", "mfree", "dtotal", "dfree",
"ctotal", "offline", "drained", "vm_capable",
"group.uuid"] False
-- | The input data for instance query.
queryInstancesMsg :: L.LuxiOp
queryInstancesMsg =
L.QueryInstances [] ["name", "disk_usage", "be/memory", "be/vcpus",
"status", "pnode", "snodes", "tags", "oper_ram"] False
-- | The input data for cluster query.
queryClusterInfoMsg :: L.LuxiOp
queryClusterInfoMsg = L.QueryClusterInfo
-- | The input data for node group query.
queryGroupsMsg :: L.LuxiOp
queryGroupsMsg =
L.QueryGroups [] ["uuid", "name", "alloc_policy"] False
-- | Wraper over callMethod doing node query.
queryNodes :: L.Client -> IO (Result JSValue)
queryNodes = L.callMethod queryNodesMsg
-- | Wraper over callMethod doing instance query.
queryInstances :: L.Client -> IO (Result JSValue)
queryInstances = L.callMethod queryInstancesMsg
queryClusterInfo :: L.Client -> IO (Result JSValue)
queryClusterInfo = L.callMethod queryClusterInfoMsg
-- | Wrapper over callMethod doing group query.
queryGroups :: L.Client -> IO (Result JSValue)
queryGroups = L.callMethod queryGroupsMsg
-- | Parse a instance list in JSON format.
getInstances :: NameAssoc
-> JSValue
-> Result [(String, Instance.Instance)]
getInstances ktn arr = toArray arr >>= mapM (parseInstance ktn)
-- | Construct an instance from a JSON object.
parseInstance :: NameAssoc
-> JSValue
-> Result (String, Instance.Instance)
parseInstance ktn (JSArray [ name, disk, mem, vcpus
, status, pnode, snodes, tags, oram ]) = do
xname <- annotateResult "Parsing new instance" (fromJVal name)
let convert a = genericConvert "Instance" xname a
xdisk <- convert "disk_usage" disk
xmem <- (case oram of
JSRational _ _ -> convert "oper_ram" oram
_ -> convert "be/memory" mem)
xvcpus <- convert "be/vcpus" vcpus
xpnode <- convert "pnode" pnode >>= lookupNode ktn xname
xsnodes <- convert "snodes" snodes::Result [JSString]
snode <- (if null xsnodes then return Node.noSecondary
else lookupNode ktn xname (fromJSString $ head xsnodes))
xrunning <- convert "status" status
xtags <- convert "tags" tags
let inst = Instance.create xname xmem xdisk xvcpus
xrunning xtags xpnode snode
return (xname, inst)
parseInstance _ v = fail ("Invalid instance query result: " ++ show v)
-- | Parse a node list in JSON format.
getNodes :: NameAssoc -> JSValue -> Result [(String, Node.Node)]
getNodes ktg arr = toArray arr >>= mapM (parseNode ktg)
-- | Construct a node from a JSON object.
parseNode :: NameAssoc -> JSValue -> Result (String, Node.Node)
parseNode ktg (JSArray [ name, mtotal, mnode, mfree, dtotal, dfree
, ctotal, offline, drained, vm_capable, g_uuid ])
= do
xname <- annotateResult "Parsing new node" (fromJVal name)
let convert a = genericConvert "Node" xname a
xoffline <- convert "offline" offline
xdrained <- convert "drained" drained
xvm_capable <- convert "vm_capable" vm_capable
xgdx <- convert "group.uuid" g_uuid >>= lookupGroup ktg xname
node <- (if xoffline || xdrained || not xvm_capable
then return $ Node.create xname 0 0 0 0 0 0 True xgdx
else do
xmtotal <- convert "mtotal" mtotal
xmnode <- convert "mnode" mnode
xmfree <- convert "mfree" mfree
xdtotal <- convert "dtotal" dtotal
xdfree <- convert "dfree" dfree
xctotal <- convert "ctotal" ctotal
return $ Node.create xname xmtotal xmnode xmfree
xdtotal xdfree xctotal False xgdx)
return (xname, node)
parseNode _ v = fail ("Invalid node query result: " ++ show v)
getClusterTags :: JSValue -> Result [String]
getClusterTags v = do
let errmsg = "Parsing cluster info"
obj <- annotateResult errmsg $ asJSObject v
tryFromObj errmsg (fromJSObject obj) "tags"
getGroups :: JSValue -> Result [(String, Group.Group)]
getGroups arr = toArray arr >>= mapM parseGroup
parseGroup :: JSValue -> Result (String, Group.Group)
parseGroup (JSArray [ uuid, name, apol ]) = do
xname <- annotateResult "Parsing new group" (fromJVal name)
let convert a = genericConvert "Group" xname a
xuuid <- convert "uuid" uuid
xapol <- convert "alloc_policy" apol
return $ (xuuid, Group.create xname xuuid xapol)
parseGroup v = fail ("Invalid group query result: " ++ show v)
-- * Main loader functionality
-- | Builds the cluster data from an URL.
readData :: String -- ^ Unix socket to use as source
-> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
readData master =
E.bracket
(L.getClient master)
L.closeClient
(\s -> do
nodes <- queryNodes s
instances <- queryInstances s
cinfo <- queryClusterInfo s
groups <- queryGroups s
return (groups, nodes, instances, cinfo)
)
parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result ClusterData
parseData (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (group_names, group_idx) = assignIndices group_data
node_data <- nodes >>= getNodes group_names
let (node_names, node_idx) = assignIndices node_data
inst_data <- instances >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
ctags <- cinfo >>= getClusterTags
return (ClusterData group_idx node_idx inst_idx ctags)
-- | Top level function for data loading
loadData :: String -- ^ Unix socket to use as source
-> IO (Result ClusterData)
loadData master = readData master >>= return . parseData
| ganeti/htools | Ganeti/HTools/Luxi.hs | gpl-2.0 | 7,976 | 0 | 14 | 1,835 | 1,950 | 1,000 | 950 | 142 | 3 |
module Language.Java.Jdi.Event
( J.Event
, thread
, J.EventKind(..)
, J.eventKind
, referenceType
, location
) where
import Language.Java.Jdi.Impl
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Error (ErrorT, runErrorT, MonadError(..), Error(..))
import qualified Language.Java.Jdwp as J
location :: (Error e, MonadIO m, MonadError e m)
=> J.Event -> VirtualMachine m Location
location (J.BreakpointEvent _ _ javaLocation) =
locationFromJavaLocation javaLocation
location (J.StepEvent _ _ javaLocation) =
locationFromJavaLocation javaLocation
| VictorDenisov/jdi | src/Language/Java/Jdi/Event.hs | gpl-2.0 | 577 | 0 | 8 | 83 | 179 | 106 | 73 | 17 | 1 |
{-# LANGUAGE BangPatterns, TypeSynonymInstances, DeriveDataTypeable #-}
module Data.Ephys.EphysDefs where
type Voltage = Double
type PlaceCellName = Int
type TrodeName = Int
type ExperimentTime = Double -- TODO: How to record the wall clock time of "0 :: ExperimentTime"
-- Is 0 the time that the system was turned on? Clock reset?
-- How to ensure that data that straddle a clock reset aren't combined?
-- Possibly ExperimentTime should be (StartUpUTCTime, ClockResetsSinceStartup, FloatTimeSinceReset)
| imalsogreg/tetrode-ephys | lib/Data/Ephys/EphysDefs.hs | gpl-3.0 | 509 | 0 | 4 | 77 | 36 | 26 | 10 | 6 | 0 |
instance Functor ((->) r) where
fmap f g = f . g | hmemcpy/milewski-ctfp-pdf | src/content/1.7/code/haskell/snippet22.hs | gpl-3.0 | 52 | 0 | 8 | 15 | 32 | 16 | 16 | 2 | 0 |
import Control.Applicative ((<$>), (<*>))
import Control.Monad (join, liftM2)
import Data.List (group, inits, nub, permutations, sort, tails)
import Data.Maybe (catMaybes)
-- | a data structure for machine-readable arithmetic expressions
data AExpr = IntCon !Integer
| ABin !ABinOp !AExpr !AExpr
-- | an auxiliary data structure for representing binary ops in AExpr
data ABinOp = Add | Sub | Mul | Div deriving Eq
evalAExpr :: AExpr -> Maybe Rational
-- ^ safely evaluates an AExpr to a Maybe Rational
evalAExpr (IntCon x) = Just $ fromInteger x
evalAExpr (ABin op l r)
| op == Add = liftM2 (+) l' r'
| op == Sub = liftM2 (-) l' r'
| op == Mul = liftM2 (*) l' r'
| op == Div = join $ liftM2 safeDiv l' r'
where
l' = evalAExpr l
r' = evalAExpr r
safeDiv _ 0 = Nothing
safeDiv a b = Just $ a / b
opInsert :: [Integer] -> [AExpr]
-- ^ returns list of all AExprs that use each member of the given list,
-- in the order given, exactly once
opInsert [x] = [IntCon x]
opInsert xs = do
(ls, rs) <- splits xs
ABin <$> [Add, Sub, Mul, Div] <*> opInsert ls <*> opInsert rs
where
splits xs = init . tail $ zip (inits xs) (tails xs)
allAExprs :: [Integer] -> [AExpr]
-- ^ returns list of all AExprs that use each member of the given list,
-- in any order, exactly once
allAExprs = concatMap opInsert . permutations
infixl 9 #
(#) :: (a -> b) -> (b -> c) -> a -> c
-- ^ convenience infix op for forward function composition
(#) = flip (.)
targets :: [Integer] -> [Integer]
-- ^ returns the strictly increasing list of all target integers that
-- are obtainable from the given list of integers
targets = allAExprs
# map evalAExpr
# catMaybes
# filter (\x -> x == (fromInteger . round $ x))
# map round
# nub
# sort
result :: [Integer] -> Int
-- ^ returns the largest positive sequential integer obtainable from the
-- input list
result = targets
# filter (> 0)
# zip [1..]
# takeWhile (uncurry (==))
# length
main :: IO ()
main = getLine >> getLine >>= words # map read # result # print
| friedbrice/euler93 | euler93.hs | gpl-3.0 | 2,153 | 21 | 11 | 565 | 729 | 374 | 355 | 53 | 2 |
{- NewmanGirvan
Gregory W. Schwartz
Collections the functions pertaining to calculating the Newman-Girvan modularity
-}
{-# LANGUAGE QuasiQuotes #-}
module NewmanGirvan
( ngModularity
) where
-- Standard
-- Cabal
import qualified Foreign.R as R
import Foreign.R (SEXP, SEXPTYPE)
import Language.R.Instance as R
import Language.R.QQ
-- Local
import Types
-- | Finds the Newman Girvan modularity from the B matrix and its partitioning
ngModularity :: R.SomeSEXP s -> R.SomeSEXP s -> R s (R.SomeSEXP s)
ngModularity mat part =
[r| partt = matrix(rep(1, times=nrow(mat_hs)))
part1 = part_hs
part2 = abs(part_hs - 1)
l = (t(t(mat_hs) %*% partt) %*% (t(mat_hs) %*% partt)) - nrow(mat_hs)
l1 = (t(t(mat_hs) %*% part1) %*% (t(mat_hs) %*% partt)) - sum(part1)
l2 = (t(t(mat_hs) %*% part2) %*% (t(mat_hs) %*% partt)) - sum(part2)
o11 = (t(t(mat_hs) %*% part1) %*% (t(mat_hs) %*% part1)) - sum(part1)
o22 = (t(t(mat_hs) %*% part2) %*% (t(mat_hs) %*% part2)) - sum(part2)
modularity = ((o11 / l) - ((l1 / l) ^ 2)) + ((o22 / l) - ((l2 / l) ^ 2))
modularity[1,1]
|]
| GregorySchwartz/scan | src/NewmanGirvan.hs | gpl-3.0 | 1,151 | 0 | 10 | 268 | 103 | 64 | 39 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------
-- |
-- Module : Dhek.AppUtil
--
-- This module declares utilities related to application management,
-- in env which is neither Darwin (Mac OS X) nor Windows (assuming Unix).
--
--------------------------------------------------------------------------------
module Dhek.AppUtil where
--------------------------------------------------------------------------------
import Data.Text (Text)
import qualified Graphics.UI.Gtk as Gtk
--------------------------------------------------------------------------------
import Dhek.I18N
--------------------------------------------------------------------------------
uiLoaded :: (DhekMessage -> String) -> Gtk.Window -> IO Gtk.Window
uiLoaded _ mainWin = return mainWin
--------------------------------------------------------------------------------
appTerminate :: IO ()
appTerminate = return ()
--------------------------------------------------------------------------------
-- | Given @String@ must be a valid URL
browserOpen :: String -> IO ()
browserOpen _ = return ()
--------------------------------------------------------------------------------
-- | Returns true if given key name is the one of expected modifier
isKeyModifier :: Text -> Bool
isKeyModifier "Control_L" = True
isKeyModifier "Control_R" = True
isKeyModifier _ = False
--------------------------------------------------------------------------------
keyModifierName :: String
keyModifierName = "CTRL"
--------------------------------------------------------------------------------
closeKeystrokes :: Text -> [Gtk.Modifier] -> Bool
closeKeystrokes _ _ = False
| cchantep/dhek | unix/Dhek/AppUtil.hs | gpl-3.0 | 1,723 | 0 | 8 | 179 | 207 | 120 | 87 | 19 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <jmillikin@gmail.com>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module DBusTests.Message (test_Message) where
import Test.Chell
import DBus
test_Message :: Suite
test_Message = suite "Message"
test_MethodErrorMessage
test_MethodErrorMessage :: Test
test_MethodErrorMessage = assertions "methodErrorMessage" $ do
let emptyError = methodError firstSerial (errorName_ "com.example.Error")
$expect (equal
"(no error message)"
(methodErrorMessage emptyError
{ methodErrorBody = []
}))
$expect (equal
"(no error message)"
(methodErrorMessage emptyError
{ methodErrorBody = [toVariant True]
}))
$expect (equal
"(no error message)"
(methodErrorMessage emptyError
{ methodErrorBody = [toVariant ""]
}))
$expect (equal
"error"
(methodErrorMessage emptyError
{ methodErrorBody = [toVariant "error"]
}))
| tmishima/haskell-dbus | tests/DBusTests/Message.hs | gpl-3.0 | 1,525 | 37 | 18 | 277 | 254 | 137 | 117 | 26 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSense.Reports.Saved.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all saved reports in this AdSense account.
--
-- /See:/ <https://developers.google.com/adsense/management/ AdSense Management API Reference> for @adsense.reports.saved.list@.
module Network.Google.Resource.AdSense.Reports.Saved.List
(
-- * REST Resource
ReportsSavedListResource
-- * Creating a Request
, reportsSavedList
, ReportsSavedList
-- * Request Lenses
, rslPageToken
, rslMaxResults
) where
import Network.Google.AdSense.Types
import Network.Google.Prelude
-- | A resource alias for @adsense.reports.saved.list@ method which the
-- 'ReportsSavedList' request conforms to.
type ReportsSavedListResource =
"adsense" :>
"v1.4" :>
"reports" :>
"saved" :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] SavedReports
-- | List all saved reports in this AdSense account.
--
-- /See:/ 'reportsSavedList' smart constructor.
data ReportsSavedList = ReportsSavedList'
{ _rslPageToken :: !(Maybe Text)
, _rslMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReportsSavedList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rslPageToken'
--
-- * 'rslMaxResults'
reportsSavedList
:: ReportsSavedList
reportsSavedList =
ReportsSavedList'
{ _rslPageToken = Nothing
, _rslMaxResults = Nothing
}
-- | A continuation token, used to page through saved reports. To retrieve
-- the next page, set this parameter to the value of \"nextPageToken\" from
-- the previous response.
rslPageToken :: Lens' ReportsSavedList (Maybe Text)
rslPageToken
= lens _rslPageToken (\ s a -> s{_rslPageToken = a})
-- | The maximum number of saved reports to include in the response, used for
-- paging.
rslMaxResults :: Lens' ReportsSavedList (Maybe Int32)
rslMaxResults
= lens _rslMaxResults
(\ s a -> s{_rslMaxResults = a})
. mapping _Coerce
instance GoogleRequest ReportsSavedList where
type Rs ReportsSavedList = SavedReports
type Scopes ReportsSavedList =
'["https://www.googleapis.com/auth/adsense",
"https://www.googleapis.com/auth/adsense.readonly"]
requestClient ReportsSavedList'{..}
= go _rslPageToken _rslMaxResults (Just AltJSON)
adSenseService
where go
= buildClient
(Proxy :: Proxy ReportsSavedListResource)
mempty
| rueshyna/gogol | gogol-adsense/gen/Network/Google/Resource/AdSense/Reports/Saved/List.hs | mpl-2.0 | 3,434 | 0 | 14 | 782 | 416 | 248 | 168 | 63 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.ZoneOperations.Wait
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Waits for the specified Operation resource to return as \`DONE\` or for
-- the request to approach the 2 minute deadline, and retrieves the
-- specified Operation resource. This method differs from the \`GET\`
-- method in that it waits for no more than the default deadline (2
-- minutes) and then returns the current state of the operation, which
-- might be \`DONE\` or still in progress. This method is called on a
-- best-effort basis. Specifically: - In uncommon cases, when the server is
-- overloaded, the request might return before the default deadline is
-- reached, or might return after zero seconds. - If the default deadline
-- is reached, there is no guarantee that the operation is actually done
-- when the method returns. Be prepared to retry if the operation is not
-- \`DONE\`.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.zoneOperations.wait@.
module Network.Google.Resource.Compute.ZoneOperations.Wait
(
-- * REST Resource
ZoneOperationsWaitResource
-- * Creating a Request
, zoneOperationsWait
, ZoneOperationsWait
-- * Request Lenses
, zowProject
, zowOperation
, zowZone
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.zoneOperations.wait@ method which the
-- 'ZoneOperationsWait' request conforms to.
type ZoneOperationsWaitResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"operations" :>
Capture "operation" Text :>
"wait" :>
QueryParam "alt" AltJSON :> Post '[JSON] Operation
-- | Waits for the specified Operation resource to return as \`DONE\` or for
-- the request to approach the 2 minute deadline, and retrieves the
-- specified Operation resource. This method differs from the \`GET\`
-- method in that it waits for no more than the default deadline (2
-- minutes) and then returns the current state of the operation, which
-- might be \`DONE\` or still in progress. This method is called on a
-- best-effort basis. Specifically: - In uncommon cases, when the server is
-- overloaded, the request might return before the default deadline is
-- reached, or might return after zero seconds. - If the default deadline
-- is reached, there is no guarantee that the operation is actually done
-- when the method returns. Be prepared to retry if the operation is not
-- \`DONE\`.
--
-- /See:/ 'zoneOperationsWait' smart constructor.
data ZoneOperationsWait =
ZoneOperationsWait'
{ _zowProject :: !Text
, _zowOperation :: !Text
, _zowZone :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ZoneOperationsWait' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'zowProject'
--
-- * 'zowOperation'
--
-- * 'zowZone'
zoneOperationsWait
:: Text -- ^ 'zowProject'
-> Text -- ^ 'zowOperation'
-> Text -- ^ 'zowZone'
-> ZoneOperationsWait
zoneOperationsWait pZowProject_ pZowOperation_ pZowZone_ =
ZoneOperationsWait'
{ _zowProject = pZowProject_
, _zowOperation = pZowOperation_
, _zowZone = pZowZone_
}
-- | Project ID for this request.
zowProject :: Lens' ZoneOperationsWait Text
zowProject
= lens _zowProject (\ s a -> s{_zowProject = a})
-- | Name of the Operations resource to return.
zowOperation :: Lens' ZoneOperationsWait Text
zowOperation
= lens _zowOperation (\ s a -> s{_zowOperation = a})
-- | Name of the zone for this request.
zowZone :: Lens' ZoneOperationsWait Text
zowZone = lens _zowZone (\ s a -> s{_zowZone = a})
instance GoogleRequest ZoneOperationsWait where
type Rs ZoneOperationsWait = Operation
type Scopes ZoneOperationsWait =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient ZoneOperationsWait'{..}
= go _zowProject _zowZone _zowOperation
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy ZoneOperationsWaitResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/ZoneOperations/Wait.hs | mpl-2.0 | 5,218 | 0 | 17 | 1,163 | 492 | 302 | 190 | 77 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudPrivateCatalog.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudPrivateCatalog.Types.Product where
import Network.Google.CloudPrivateCatalog.Types.Sum
import Network.Google.Prelude
-- | Response message for PrivateCatalog.SearchCatalogs.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1SearchCatalogsResponse' smart constructor.
data GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse =
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse'
{ _gcpvscrNextPageToken :: !(Maybe Text)
, _gcpvscrCatalogs :: !(Maybe [GoogleCloudPrivatecatalogV1beta1Catalog])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvscrNextPageToken'
--
-- * 'gcpvscrCatalogs'
googleCloudPrivatecatalogV1beta1SearchCatalogsResponse
:: GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse
googleCloudPrivatecatalogV1beta1SearchCatalogsResponse =
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse'
{_gcpvscrNextPageToken = Nothing, _gcpvscrCatalogs = Nothing}
-- | A pagination token returned from a previous call to SearchCatalogs that
-- indicates from where listing should continue. This field is optional.
gcpvscrNextPageToken :: Lens' GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse (Maybe Text)
gcpvscrNextPageToken
= lens _gcpvscrNextPageToken
(\ s a -> s{_gcpvscrNextPageToken = a})
-- | The \`Catalog\`s computed from the resource context.
gcpvscrCatalogs :: Lens' GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse [GoogleCloudPrivatecatalogV1beta1Catalog]
gcpvscrCatalogs
= lens _gcpvscrCatalogs
(\ s a -> s{_gcpvscrCatalogs = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse"
(\ o ->
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse'
<$>
(o .:? "nextPageToken") <*>
(o .:? "catalogs" .!= mempty))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse
where
toJSON
GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcpvscrNextPageToken,
("catalogs" .=) <$> _gcpvscrCatalogs])
-- | Output only. The display metadata to describe the product. The JSON
-- schema of the metadata differs by Product.asset_type. When the type is
-- \`google.deploymentmanager.Template\`, the schema is as follows: \`\`\`
-- \"$schema\": http:\/\/json-schema.org\/draft-04\/schema# type: object
-- properties: name: type: string minLength: 1 maxLength: 64 description:
-- type: string minLength: 1 maxLength: 2048 tagline: type: string
-- minLength: 1 maxLength: 100 support_info: type: string minLength: 1
-- maxLength: 2048 creator: type: string minLength: 1 maxLength: 100
-- documentation: type: array items: type: object properties: url: type:
-- string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- title: type: string minLength: 1 maxLength: 64 description: type: string
-- minLength: 1 maxLength: 2048 required: - name - description
-- additionalProperties: false \`\`\` When the asset type is
-- \`google.cloudprivatecatalog.ListingOnly\`, the schema is as follows:
-- \`\`\` \"$schema\": http:\/\/json-schema.org\/draft-04\/schema# type:
-- object properties: name: type: string minLength: 1 maxLength: 64
-- description: type: string minLength: 1 maxLength: 2048 tagline: type:
-- string minLength: 1 maxLength: 100 support_info: type: string minLength:
-- 1 maxLength: 2048 creator: type: string minLength: 1 maxLength: 100
-- documentation: type: array items: type: object properties: url: type:
-- string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- title: type: string minLength: 1 maxLength: 64 description: type: string
-- minLength: 1 maxLength: 2048 signup_url: type: string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- required: - name - description - signup_url additionalProperties: false
-- \`\`\`
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1ProductDisplayMetadata' smart constructor.
newtype GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata =
GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata'
{ _gcpvpdmAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvpdmAddtional'
googleCloudPrivatecatalogV1beta1ProductDisplayMetadata
:: HashMap Text JSONValue -- ^ 'gcpvpdmAddtional'
-> GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata
googleCloudPrivatecatalogV1beta1ProductDisplayMetadata pGcpvpdmAddtional_ =
GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata'
{_gcpvpdmAddtional = _Coerce # pGcpvpdmAddtional_}
-- | Properties of the object.
gcpvpdmAddtional :: Lens' GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata (HashMap Text JSONValue)
gcpvpdmAddtional
= lens _gcpvpdmAddtional
(\ s a -> s{_gcpvpdmAddtional = a})
. _Coerce
instance FromJSON
GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata"
(\ o ->
GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata'
<$> (parseJSONObject o))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata
where
toJSON = toJSON . _gcpvpdmAddtional
-- | The readonly representation of a catalog computed with a given resource
-- context.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1Catalog' smart constructor.
data GoogleCloudPrivatecatalogV1beta1Catalog =
GoogleCloudPrivatecatalogV1beta1Catalog'
{ _gcpvcUpdateTime :: !(Maybe DateTime')
, _gcpvcName :: !(Maybe Text)
, _gcpvcDisplayName :: !(Maybe Text)
, _gcpvcDescription :: !(Maybe Text)
, _gcpvcCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1Catalog' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvcUpdateTime'
--
-- * 'gcpvcName'
--
-- * 'gcpvcDisplayName'
--
-- * 'gcpvcDescription'
--
-- * 'gcpvcCreateTime'
googleCloudPrivatecatalogV1beta1Catalog
:: GoogleCloudPrivatecatalogV1beta1Catalog
googleCloudPrivatecatalogV1beta1Catalog =
GoogleCloudPrivatecatalogV1beta1Catalog'
{ _gcpvcUpdateTime = Nothing
, _gcpvcName = Nothing
, _gcpvcDisplayName = Nothing
, _gcpvcDescription = Nothing
, _gcpvcCreateTime = Nothing
}
-- | Output only. The time when the catalog was last updated.
gcpvcUpdateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Catalog (Maybe UTCTime)
gcpvcUpdateTime
= lens _gcpvcUpdateTime
(\ s a -> s{_gcpvcUpdateTime = a})
. mapping _DateTime
-- | Output only. The resource name of the target catalog, in the format of
-- \`catalogs\/{catalog_id}\'.
gcpvcName :: Lens' GoogleCloudPrivatecatalogV1beta1Catalog (Maybe Text)
gcpvcName
= lens _gcpvcName (\ s a -> s{_gcpvcName = a})
-- | Output only. The descriptive name of the catalog as it appears in UIs.
gcpvcDisplayName :: Lens' GoogleCloudPrivatecatalogV1beta1Catalog (Maybe Text)
gcpvcDisplayName
= lens _gcpvcDisplayName
(\ s a -> s{_gcpvcDisplayName = a})
-- | Output only. The description of the catalog.
gcpvcDescription :: Lens' GoogleCloudPrivatecatalogV1beta1Catalog (Maybe Text)
gcpvcDescription
= lens _gcpvcDescription
(\ s a -> s{_gcpvcDescription = a})
-- | Output only. The time when the catalog was created.
gcpvcCreateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Catalog (Maybe UTCTime)
gcpvcCreateTime
= lens _gcpvcCreateTime
(\ s a -> s{_gcpvcCreateTime = a})
. mapping _DateTime
instance FromJSON
GoogleCloudPrivatecatalogV1beta1Catalog
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1Catalog"
(\ o ->
GoogleCloudPrivatecatalogV1beta1Catalog' <$>
(o .:? "updateTime") <*> (o .:? "name") <*>
(o .:? "displayName")
<*> (o .:? "description")
<*> (o .:? "createTime"))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1Catalog
where
toJSON GoogleCloudPrivatecatalogV1beta1Catalog'{..}
= object
(catMaybes
[("updateTime" .=) <$> _gcpvcUpdateTime,
("name" .=) <$> _gcpvcName,
("displayName" .=) <$> _gcpvcDisplayName,
("description" .=) <$> _gcpvcDescription,
("createTime" .=) <$> _gcpvcCreateTime])
-- | Response message for PrivateCatalog.SearchProducts.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1SearchProductsResponse' smart constructor.
data GoogleCloudPrivatecatalogV1beta1SearchProductsResponse =
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse'
{ _gcpvsprNextPageToken :: !(Maybe Text)
, _gcpvsprProducts :: !(Maybe [GoogleCloudPrivatecatalogV1beta1Product])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1SearchProductsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvsprNextPageToken'
--
-- * 'gcpvsprProducts'
googleCloudPrivatecatalogV1beta1SearchProductsResponse
:: GoogleCloudPrivatecatalogV1beta1SearchProductsResponse
googleCloudPrivatecatalogV1beta1SearchProductsResponse =
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse'
{_gcpvsprNextPageToken = Nothing, _gcpvsprProducts = Nothing}
-- | A pagination token returned from a previous call to SearchProducts that
-- indicates from where listing should continue. This field is optional.
gcpvsprNextPageToken :: Lens' GoogleCloudPrivatecatalogV1beta1SearchProductsResponse (Maybe Text)
gcpvsprNextPageToken
= lens _gcpvsprNextPageToken
(\ s a -> s{_gcpvsprNextPageToken = a})
-- | The \`Product\` resources computed from the resource context.
gcpvsprProducts :: Lens' GoogleCloudPrivatecatalogV1beta1SearchProductsResponse [GoogleCloudPrivatecatalogV1beta1Product]
gcpvsprProducts
= lens _gcpvsprProducts
(\ s a -> s{_gcpvsprProducts = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1SearchProductsResponse"
(\ o ->
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse'
<$>
(o .:? "nextPageToken") <*>
(o .:? "products" .!= mempty))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse
where
toJSON
GoogleCloudPrivatecatalogV1beta1SearchProductsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcpvsprNextPageToken,
("products" .=) <$> _gcpvsprProducts])
-- | Output only. The asset which has been validated and is ready to be
-- provisioned. See
-- google.cloud.privatecatalogproducer.v1beta.Version.asset for details.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1VersionAsset' smart constructor.
newtype GoogleCloudPrivatecatalogV1beta1VersionAsset =
GoogleCloudPrivatecatalogV1beta1VersionAsset'
{ _gcpvvaAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1VersionAsset' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvvaAddtional'
googleCloudPrivatecatalogV1beta1VersionAsset
:: HashMap Text JSONValue -- ^ 'gcpvvaAddtional'
-> GoogleCloudPrivatecatalogV1beta1VersionAsset
googleCloudPrivatecatalogV1beta1VersionAsset pGcpvvaAddtional_ =
GoogleCloudPrivatecatalogV1beta1VersionAsset'
{_gcpvvaAddtional = _Coerce # pGcpvvaAddtional_}
-- | Properties of the object.
gcpvvaAddtional :: Lens' GoogleCloudPrivatecatalogV1beta1VersionAsset (HashMap Text JSONValue)
gcpvvaAddtional
= lens _gcpvvaAddtional
(\ s a -> s{_gcpvvaAddtional = a})
. _Coerce
instance FromJSON
GoogleCloudPrivatecatalogV1beta1VersionAsset
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1VersionAsset"
(\ o ->
GoogleCloudPrivatecatalogV1beta1VersionAsset' <$>
(parseJSONObject o))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1VersionAsset
where
toJSON = toJSON . _gcpvvaAddtional
-- | The consumer representation of a version which is a child resource under
-- a \`Product\` with asset data.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1Version' smart constructor.
data GoogleCloudPrivatecatalogV1beta1Version =
GoogleCloudPrivatecatalogV1beta1Version'
{ _gcpvvAsset :: !(Maybe GoogleCloudPrivatecatalogV1beta1VersionAsset)
, _gcpvvUpdateTime :: !(Maybe DateTime')
, _gcpvvName :: !(Maybe Text)
, _gcpvvDescription :: !(Maybe Text)
, _gcpvvCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1Version' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvvAsset'
--
-- * 'gcpvvUpdateTime'
--
-- * 'gcpvvName'
--
-- * 'gcpvvDescription'
--
-- * 'gcpvvCreateTime'
googleCloudPrivatecatalogV1beta1Version
:: GoogleCloudPrivatecatalogV1beta1Version
googleCloudPrivatecatalogV1beta1Version =
GoogleCloudPrivatecatalogV1beta1Version'
{ _gcpvvAsset = Nothing
, _gcpvvUpdateTime = Nothing
, _gcpvvName = Nothing
, _gcpvvDescription = Nothing
, _gcpvvCreateTime = Nothing
}
-- | Output only. The asset which has been validated and is ready to be
-- provisioned. See
-- google.cloud.privatecatalogproducer.v1beta.Version.asset for details.
gcpvvAsset :: Lens' GoogleCloudPrivatecatalogV1beta1Version (Maybe GoogleCloudPrivatecatalogV1beta1VersionAsset)
gcpvvAsset
= lens _gcpvvAsset (\ s a -> s{_gcpvvAsset = a})
-- | Output only. The time when the version was last updated.
gcpvvUpdateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Version (Maybe UTCTime)
gcpvvUpdateTime
= lens _gcpvvUpdateTime
(\ s a -> s{_gcpvvUpdateTime = a})
. mapping _DateTime
-- | Output only. The resource name of the version, in the format
-- \`catalogs\/{catalog_id}\/products\/{product_id}\/versions\/a-z*[a-z0-9]\'.
-- A unique identifier for the version under a product.
gcpvvName :: Lens' GoogleCloudPrivatecatalogV1beta1Version (Maybe Text)
gcpvvName
= lens _gcpvvName (\ s a -> s{_gcpvvName = a})
-- | Output only. The user-supplied description of the version. Maximum of
-- 256 characters.
gcpvvDescription :: Lens' GoogleCloudPrivatecatalogV1beta1Version (Maybe Text)
gcpvvDescription
= lens _gcpvvDescription
(\ s a -> s{_gcpvvDescription = a})
-- | Output only. The time when the version was created.
gcpvvCreateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Version (Maybe UTCTime)
gcpvvCreateTime
= lens _gcpvvCreateTime
(\ s a -> s{_gcpvvCreateTime = a})
. mapping _DateTime
instance FromJSON
GoogleCloudPrivatecatalogV1beta1Version
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1Version"
(\ o ->
GoogleCloudPrivatecatalogV1beta1Version' <$>
(o .:? "asset") <*> (o .:? "updateTime") <*>
(o .:? "name")
<*> (o .:? "description")
<*> (o .:? "createTime"))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1Version
where
toJSON GoogleCloudPrivatecatalogV1beta1Version'{..}
= object
(catMaybes
[("asset" .=) <$> _gcpvvAsset,
("updateTime" .=) <$> _gcpvvUpdateTime,
("name" .=) <$> _gcpvvName,
("description" .=) <$> _gcpvvDescription,
("createTime" .=) <$> _gcpvvCreateTime])
-- | Response message for PrivateCatalog.SearchVersions.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1SearchVersionsResponse' smart constructor.
data GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse =
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse'
{ _gcpvsvrNextPageToken :: !(Maybe Text)
, _gcpvsvrVersions :: !(Maybe [GoogleCloudPrivatecatalogV1beta1Version])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvsvrNextPageToken'
--
-- * 'gcpvsvrVersions'
googleCloudPrivatecatalogV1beta1SearchVersionsResponse
:: GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse
googleCloudPrivatecatalogV1beta1SearchVersionsResponse =
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse'
{_gcpvsvrNextPageToken = Nothing, _gcpvsvrVersions = Nothing}
-- | A pagination token returned from a previous call to SearchVersions that
-- indicates from where the listing should continue. This field is
-- optional.
gcpvsvrNextPageToken :: Lens' GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse (Maybe Text)
gcpvsvrNextPageToken
= lens _gcpvsvrNextPageToken
(\ s a -> s{_gcpvsvrNextPageToken = a})
-- | The \`Version\` resources computed from the resource context.
gcpvsvrVersions :: Lens' GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse [GoogleCloudPrivatecatalogV1beta1Version]
gcpvsvrVersions
= lens _gcpvsvrVersions
(\ s a -> s{_gcpvsvrVersions = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse"
(\ o ->
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse'
<$>
(o .:? "nextPageToken") <*>
(o .:? "versions" .!= mempty))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse
where
toJSON
GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcpvsvrNextPageToken,
("versions" .=) <$> _gcpvsvrVersions])
-- | The readonly representation of a product computed with a given resource
-- context.
--
-- /See:/ 'googleCloudPrivatecatalogV1beta1Product' smart constructor.
data GoogleCloudPrivatecatalogV1beta1Product =
GoogleCloudPrivatecatalogV1beta1Product'
{ _gcpvpIconURI :: !(Maybe Text)
, _gcpvpUpdateTime :: !(Maybe DateTime')
, _gcpvpDisplayMetadata :: !(Maybe GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata)
, _gcpvpName :: !(Maybe Text)
, _gcpvpAssetType :: !(Maybe Text)
, _gcpvpCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GoogleCloudPrivatecatalogV1beta1Product' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcpvpIconURI'
--
-- * 'gcpvpUpdateTime'
--
-- * 'gcpvpDisplayMetadata'
--
-- * 'gcpvpName'
--
-- * 'gcpvpAssetType'
--
-- * 'gcpvpCreateTime'
googleCloudPrivatecatalogV1beta1Product
:: GoogleCloudPrivatecatalogV1beta1Product
googleCloudPrivatecatalogV1beta1Product =
GoogleCloudPrivatecatalogV1beta1Product'
{ _gcpvpIconURI = Nothing
, _gcpvpUpdateTime = Nothing
, _gcpvpDisplayMetadata = Nothing
, _gcpvpName = Nothing
, _gcpvpAssetType = Nothing
, _gcpvpCreateTime = Nothing
}
-- | Output only. The icon URI of the product.
gcpvpIconURI :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe Text)
gcpvpIconURI
= lens _gcpvpIconURI (\ s a -> s{_gcpvpIconURI = a})
-- | Output only. The time when the product was last updated.
gcpvpUpdateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe UTCTime)
gcpvpUpdateTime
= lens _gcpvpUpdateTime
(\ s a -> s{_gcpvpUpdateTime = a})
. mapping _DateTime
-- | Output only. The display metadata to describe the product. The JSON
-- schema of the metadata differs by Product.asset_type. When the type is
-- \`google.deploymentmanager.Template\`, the schema is as follows: \`\`\`
-- \"$schema\": http:\/\/json-schema.org\/draft-04\/schema# type: object
-- properties: name: type: string minLength: 1 maxLength: 64 description:
-- type: string minLength: 1 maxLength: 2048 tagline: type: string
-- minLength: 1 maxLength: 100 support_info: type: string minLength: 1
-- maxLength: 2048 creator: type: string minLength: 1 maxLength: 100
-- documentation: type: array items: type: object properties: url: type:
-- string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- title: type: string minLength: 1 maxLength: 64 description: type: string
-- minLength: 1 maxLength: 2048 required: - name - description
-- additionalProperties: false \`\`\` When the asset type is
-- \`google.cloudprivatecatalog.ListingOnly\`, the schema is as follows:
-- \`\`\` \"$schema\": http:\/\/json-schema.org\/draft-04\/schema# type:
-- object properties: name: type: string minLength: 1 maxLength: 64
-- description: type: string minLength: 1 maxLength: 2048 tagline: type:
-- string minLength: 1 maxLength: 100 support_info: type: string minLength:
-- 1 maxLength: 2048 creator: type: string minLength: 1 maxLength: 100
-- documentation: type: array items: type: object properties: url: type:
-- string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- title: type: string minLength: 1 maxLength: 64 description: type: string
-- minLength: 1 maxLength: 2048 signup_url: type: string pattern:
-- \"^(https?):\/\/[-a-zA-Z0-9+&\'#\/%?=~_|!:,.;]*[-a-zA-Z0-9+&\'#\/%=~_|]\"
-- required: - name - description - signup_url additionalProperties: false
-- \`\`\`
gcpvpDisplayMetadata :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata)
gcpvpDisplayMetadata
= lens _gcpvpDisplayMetadata
(\ s a -> s{_gcpvpDisplayMetadata = a})
-- | Output only. The resource name of the target product, in the format of
-- \`products\/a-z*[a-z0-9]\'. A unique identifier for the product under a
-- catalog.
gcpvpName :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe Text)
gcpvpName
= lens _gcpvpName (\ s a -> s{_gcpvpName = a})
-- | Output only. The type of the product asset. It can be one of the
-- following values: * \`google.deploymentmanager.Template\` *
-- \`google.cloudprivatecatalog.ListingOnly\`
gcpvpAssetType :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe Text)
gcpvpAssetType
= lens _gcpvpAssetType
(\ s a -> s{_gcpvpAssetType = a})
-- | Output only. The time when the product was created.
gcpvpCreateTime :: Lens' GoogleCloudPrivatecatalogV1beta1Product (Maybe UTCTime)
gcpvpCreateTime
= lens _gcpvpCreateTime
(\ s a -> s{_gcpvpCreateTime = a})
. mapping _DateTime
instance FromJSON
GoogleCloudPrivatecatalogV1beta1Product
where
parseJSON
= withObject
"GoogleCloudPrivatecatalogV1beta1Product"
(\ o ->
GoogleCloudPrivatecatalogV1beta1Product' <$>
(o .:? "iconUri") <*> (o .:? "updateTime") <*>
(o .:? "displayMetadata")
<*> (o .:? "name")
<*> (o .:? "assetType")
<*> (o .:? "createTime"))
instance ToJSON
GoogleCloudPrivatecatalogV1beta1Product
where
toJSON GoogleCloudPrivatecatalogV1beta1Product'{..}
= object
(catMaybes
[("iconUri" .=) <$> _gcpvpIconURI,
("updateTime" .=) <$> _gcpvpUpdateTime,
("displayMetadata" .=) <$> _gcpvpDisplayMetadata,
("name" .=) <$> _gcpvpName,
("assetType" .=) <$> _gcpvpAssetType,
("createTime" .=) <$> _gcpvpCreateTime])
| brendanhay/gogol | gogol-cloudprivatecatalog/gen/Network/Google/CloudPrivateCatalog/Types/Product.hs | mpl-2.0 | 25,834 | 0 | 16 | 5,104 | 3,143 | 1,832 | 1,311 | 403 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Subnetworks.AggregatedList
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves an aggregated list of subnetworks.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.subnetworks.aggregatedList@.
module Network.Google.Resource.Compute.Subnetworks.AggregatedList
(
-- * REST Resource
SubnetworksAggregatedListResource
-- * Creating a Request
, subnetworksAggregatedList
, SubnetworksAggregatedList
-- * Request Lenses
, salIncludeAllScopes
, salReturnPartialSuccess
, salOrderBy
, salProject
, salFilter
, salPageToken
, salMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.subnetworks.aggregatedList@ method which the
-- 'SubnetworksAggregatedList' request conforms to.
type SubnetworksAggregatedListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"aggregated" :>
"subnetworks" :>
QueryParam "includeAllScopes" Bool :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] SubnetworkAggregatedList
-- | Retrieves an aggregated list of subnetworks.
--
-- /See:/ 'subnetworksAggregatedList' smart constructor.
data SubnetworksAggregatedList =
SubnetworksAggregatedList'
{ _salIncludeAllScopes :: !(Maybe Bool)
, _salReturnPartialSuccess :: !(Maybe Bool)
, _salOrderBy :: !(Maybe Text)
, _salProject :: !Text
, _salFilter :: !(Maybe Text)
, _salPageToken :: !(Maybe Text)
, _salMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SubnetworksAggregatedList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'salIncludeAllScopes'
--
-- * 'salReturnPartialSuccess'
--
-- * 'salOrderBy'
--
-- * 'salProject'
--
-- * 'salFilter'
--
-- * 'salPageToken'
--
-- * 'salMaxResults'
subnetworksAggregatedList
:: Text -- ^ 'salProject'
-> SubnetworksAggregatedList
subnetworksAggregatedList pSalProject_ =
SubnetworksAggregatedList'
{ _salIncludeAllScopes = Nothing
, _salReturnPartialSuccess = Nothing
, _salOrderBy = Nothing
, _salProject = pSalProject_
, _salFilter = Nothing
, _salPageToken = Nothing
, _salMaxResults = 500
}
-- | Indicates whether every visible scope for each scope type (zone, region,
-- global) should be included in the response. For new resource types added
-- after this field, the flag has no effect as new resource types will
-- always include every visible scope for each scope type in response. For
-- resource types which predate this field, if this flag is omitted or
-- false, only scopes of the scope types where the resource type is
-- expected to be found will be included.
salIncludeAllScopes :: Lens' SubnetworksAggregatedList (Maybe Bool)
salIncludeAllScopes
= lens _salIncludeAllScopes
(\ s a -> s{_salIncludeAllScopes = a})
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
salReturnPartialSuccess :: Lens' SubnetworksAggregatedList (Maybe Bool)
salReturnPartialSuccess
= lens _salReturnPartialSuccess
(\ s a -> s{_salReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
salOrderBy :: Lens' SubnetworksAggregatedList (Maybe Text)
salOrderBy
= lens _salOrderBy (\ s a -> s{_salOrderBy = a})
-- | Project ID for this request.
salProject :: Lens' SubnetworksAggregatedList Text
salProject
= lens _salProject (\ s a -> s{_salProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
salFilter :: Lens' SubnetworksAggregatedList (Maybe Text)
salFilter
= lens _salFilter (\ s a -> s{_salFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
salPageToken :: Lens' SubnetworksAggregatedList (Maybe Text)
salPageToken
= lens _salPageToken (\ s a -> s{_salPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
salMaxResults :: Lens' SubnetworksAggregatedList Word32
salMaxResults
= lens _salMaxResults
(\ s a -> s{_salMaxResults = a})
. _Coerce
instance GoogleRequest SubnetworksAggregatedList
where
type Rs SubnetworksAggregatedList =
SubnetworkAggregatedList
type Scopes SubnetworksAggregatedList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient SubnetworksAggregatedList'{..}
= go _salProject _salIncludeAllScopes
_salReturnPartialSuccess
_salOrderBy
_salFilter
_salPageToken
(Just _salMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy SubnetworksAggregatedListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Subnetworks/AggregatedList.hs | mpl-2.0 | 8,225 | 0 | 20 | 1,764 | 842 | 503 | 339 | 122 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Blogger.Pages.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update a page.
--
-- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API Reference> for @blogger.pages.update@.
module Network.Google.Resource.Blogger.Pages.Update
(
-- * REST Resource
PagesUpdateResource
-- * Creating a Request
, pagesUpdate
, PagesUpdate
-- * Request Lenses
, puuBlogId
, puuPageId
, puuPayload
, puuRevert
, puuPublish
) where
import Network.Google.Blogger.Types
import Network.Google.Prelude
-- | A resource alias for @blogger.pages.update@ method which the
-- 'PagesUpdate' request conforms to.
type PagesUpdateResource =
"blogger" :>
"v3" :>
"blogs" :>
Capture "blogId" Text :>
"pages" :>
Capture "pageId" Text :>
QueryParam "revert" Bool :>
QueryParam "publish" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Page :> Put '[JSON] Page
-- | Update a page.
--
-- /See:/ 'pagesUpdate' smart constructor.
data PagesUpdate = PagesUpdate'
{ _puuBlogId :: !Text
, _puuPageId :: !Text
, _puuPayload :: !Page
, _puuRevert :: !(Maybe Bool)
, _puuPublish :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PagesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'puuBlogId'
--
-- * 'puuPageId'
--
-- * 'puuPayload'
--
-- * 'puuRevert'
--
-- * 'puuPublish'
pagesUpdate
:: Text -- ^ 'puuBlogId'
-> Text -- ^ 'puuPageId'
-> Page -- ^ 'puuPayload'
-> PagesUpdate
pagesUpdate pPuuBlogId_ pPuuPageId_ pPuuPayload_ =
PagesUpdate'
{ _puuBlogId = pPuuBlogId_
, _puuPageId = pPuuPageId_
, _puuPayload = pPuuPayload_
, _puuRevert = Nothing
, _puuPublish = Nothing
}
-- | The ID of the Blog.
puuBlogId :: Lens' PagesUpdate Text
puuBlogId
= lens _puuBlogId (\ s a -> s{_puuBlogId = a})
-- | The ID of the Page.
puuPageId :: Lens' PagesUpdate Text
puuPageId
= lens _puuPageId (\ s a -> s{_puuPageId = a})
-- | Multipart request metadata.
puuPayload :: Lens' PagesUpdate Page
puuPayload
= lens _puuPayload (\ s a -> s{_puuPayload = a})
-- | Whether a revert action should be performed when the page is updated
-- (default: false).
puuRevert :: Lens' PagesUpdate (Maybe Bool)
puuRevert
= lens _puuRevert (\ s a -> s{_puuRevert = a})
-- | Whether a publish action should be performed when the page is updated
-- (default: false).
puuPublish :: Lens' PagesUpdate (Maybe Bool)
puuPublish
= lens _puuPublish (\ s a -> s{_puuPublish = a})
instance GoogleRequest PagesUpdate where
type Rs PagesUpdate = Page
type Scopes PagesUpdate =
'["https://www.googleapis.com/auth/blogger"]
requestClient PagesUpdate'{..}
= go _puuBlogId _puuPageId _puuRevert _puuPublish
(Just AltJSON)
_puuPayload
bloggerService
where go
= buildClient (Proxy :: Proxy PagesUpdateResource)
mempty
| rueshyna/gogol | gogol-blogger/gen/Network/Google/Resource/Blogger/Pages/Update.hs | mpl-2.0 | 3,934 | 0 | 17 | 995 | 623 | 367 | 256 | 91 | 1 |
{-# LANGUAGE OverloadedStrings, MultiParamTypeClasses #-}
{-
Bustle.Loader.Pcap: loads logs out of pcap files
Copyright © 2011–2012 Collabora Ltd.
Copyright © 2017–2018 Will Thompson
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-}
{-# LANGUAGE PatternGuards, FlexibleContexts #-}
module Bustle.Loader.Pcap
( readPcap
, convert
)
where
import Data.Maybe (fromMaybe)
import Data.Either (partitionEithers)
import Data.List (isSuffixOf)
import qualified Data.Map as Map
import Data.Map (Map)
import Control.Exception (try, tryJust)
import Control.Monad.State
import System.IO.Error ( mkIOError
, userErrorType
, isUserError
, ioeGetErrorString
, ioeSetErrorString
)
import Network.Pcap
import DBus
import qualified Data.ByteString as BS
import qualified Bustle.Types as B
import Bustle.Translation (__)
-- Conversions from dbus-core's types into Bustle's more stupid types. This
-- whole section is pretty upsetting.
stupifyBusName :: BusName
-> B.TaggedBusName
stupifyBusName n
| isUnique n = B.U $ B.UniqueName n
| otherwise = B.O $ B.OtherName n
isUnique :: BusName -> Bool
isUnique n = head (formatBusName n) == ':'
convertBusName :: String
-> Maybe BusName
-> B.TaggedBusName
convertBusName fallback n =
stupifyBusName (fromMaybe fallback_ n)
where
fallback_ = busName_ fallback
convertMember :: (a -> ObjectPath)
-> (a -> Maybe InterfaceName)
-> (a -> MemberName)
-> a
-> B.Member
convertMember getObjectPath getInterfaceName getMemberName m =
B.Member (getObjectPath m)
(getInterfaceName m)
(getMemberName m)
type PendingMessages = Map (Maybe BusName, Serial)
(MethodCall, B.Detailed B.Message)
popMatchingCall :: (MonadState PendingMessages m)
=> Maybe BusName
-> Serial
-> m (Maybe (MethodCall, B.Detailed B.Message))
popMatchingCall name serial = do
ret <- tryPop (name, serial)
case (ret, name) of
-- If we don't get an answer, but we know a destination, this may be
-- because we didn't know the sender's bus name because it was the
-- logger itself. So try looking up pending replies whose sender is
-- Nothing.
(Nothing, Just _) -> tryPop (Nothing, serial)
_ -> return ret
where
tryPop key = do
call <- gets $ Map.lookup key
modify $ Map.delete key
return call
insertPending :: MonadState PendingMessages m
=> Maybe BusName
-> Serial
-> MethodCall
-> B.Detailed B.Message
-> m ()
insertPending n s rawCall b = modify $ Map.insert (n, s) (rawCall, b)
isNOC :: Maybe BusName -> Signal -> Maybe (BusName, Maybe BusName, Maybe BusName)
isNOC (Just sender) s | looksLikeNOC =
case names of
[Just n, old, new] -> Just (n, old, new)
_ -> Nothing
where
names :: [Maybe BusName]
names = map fromVariant $ signalBody s
looksLikeNOC =
(sender == B.dbusName) &&
(signalInterface s == B.dbusInterface) &&
(formatMemberName (signalMember s) == "NameOwnerChanged")
isNOC _ _ = Nothing
bustlifyNOC :: (BusName, Maybe BusName, Maybe BusName)
-> B.NOC
bustlifyNOC ns@(name, oldOwner, newOwner)
| isUnique name =
case (oldOwner, newOwner) of
(Nothing, Just _) -> B.Connected (uniquify name)
(Just _, Nothing) -> B.Disconnected (uniquify name)
_ -> error $ "wtf: NOC" ++ show ns
| otherwise = B.NameChanged (otherify name) $
case (oldOwner, newOwner) of
(Just old, Nothing) -> B.Released (uniquify old)
(Just old, Just new) -> B.Stolen (uniquify old) (uniquify new)
(Nothing, Just new) -> B.Claimed (uniquify new)
(Nothing, Nothing) -> error $ "wtf: NOC" ++ show ns
where
uniquify = B.UniqueName
otherify = B.OtherName
tryBustlifyGetNameOwnerReply :: Maybe (MethodCall, a)
-> MethodReturn
-> Maybe B.NOC
tryBustlifyGetNameOwnerReply maybeCall mr = do
-- FIXME: obviously this should be more robust:
-- • check that the service really is the bus daemon
-- • don't crash if the body of the call or reply doesn't contain one bus name.
(rawCall, _) <- maybeCall
guard (formatMemberName (methodCallMember rawCall) == "GetNameOwner")
ownedName <- fromVariant (head (methodCallBody rawCall))
return $ bustlifyNOC ( ownedName
, Nothing
, fromVariant (head (methodReturnBody mr))
)
bustlify :: MonadState PendingMessages m
=> B.Microseconds
-> Int
-> ReceivedMessage
-> m B.DetailedEvent
bustlify µs bytes m = do
bm <- buildBustledMessage
return $ B.Detailed µs bm bytes m
where
sender = receivedMessageSender m
-- FIXME: can we do away with the un-Maybe-ing and just push that Nothing
-- means 'the monitor' downwards? Or skip the message if sender is Nothing.
wrappedSender = convertBusName "sen.der" sender
buildBustledMessage = case m of
(ReceivedMethodCall serial mc) -> do
let call = B.MethodCall
{ B.serial = serialValue serial
, B.sender = wrappedSender
, B.destination = convertBusName "method.call.destination" $ methodCallDestination mc
, B.member = convertMember methodCallPath methodCallInterface methodCallMember mc
}
-- FIXME: we shouldn't need to construct almost the same thing here
-- and 10 lines above maybe?
insertPending sender serial mc (B.Detailed µs call bytes m)
return $ B.MessageEvent call
(ReceivedMethodReturn _serial mr) -> do
call <- popMatchingCall (methodReturnDestination mr) (methodReturnSerial mr)
return $ case tryBustlifyGetNameOwnerReply call mr of
Just noc -> B.NOCEvent noc
Nothing -> B.MessageEvent $ B.MethodReturn
{ B.inReplyTo = fmap snd call
, B.sender = wrappedSender
, B.destination = convertBusName "method.return.destination" $ methodReturnDestination mr
}
(ReceivedMethodError _serial e) -> do
call <- popMatchingCall (methodErrorDestination e) (methodErrorSerial e)
return $ B.MessageEvent $ B.Error
{ B.inReplyTo = fmap snd call
, B.sender = wrappedSender
, B.destination = convertBusName "method.error.destination" $ methodErrorDestination e
}
(ReceivedSignal _serial sig)
| Just names <- isNOC sender sig -> return $ B.NOCEvent $ bustlifyNOC names
| otherwise -> return $ B.MessageEvent $
B.Signal { B.sender = wrappedSender
, B.member = convertMember signalPath (Just . signalInterface) signalMember sig
, B.signalDestination = stupifyBusName <$> signalDestination sig
}
_ -> error "woah there! someone added a new message type."
convert :: MonadState PendingMessages m
=> B.Microseconds
-> BS.ByteString
-> m (Either String B.DetailedEvent)
convert µs body =
case unmarshal body of
Left e -> return $ Left $ unmarshalErrorMessage e
Right m -> Right <$> bustlify µs (BS.length body) m
data Result e a =
EOF
| Packet (Either e a)
deriving Show
readOne :: (MonadState s m, MonadIO m)
=> PcapHandle
-> (B.Microseconds -> BS.ByteString -> m (Either e a))
-> m (Result e a)
readOne p f = do
(hdr, body) <- liftIO $ nextBS p
-- No really, nextBS just returns null packets when you hit the end of the
-- file.
--
-- It occurs to me that we could stream by just polling this every second
-- or something?
if hdrCaptureLength hdr == 0
then return EOF
else Packet <$> f (fromIntegral (hdrTime hdr)) body
-- This shows up as the biggest thing on the heap profile. Which is kind of a
-- surprise. It's supposedly the list.
mapBodies :: (MonadState s m, MonadIO m)
=> PcapHandle
-> (B.Microseconds -> BS.ByteString -> m (Either e a))
-> m [Either e a]
mapBodies p f = do
ret <- readOne p f
case ret of
EOF -> return []
Packet x -> do
xs <- mapBodies p f
return $ x:xs
readPcap :: MonadIO m
=> FilePath
-> m (Either IOError ([String], [B.DetailedEvent]))
readPcap path = liftIO $ try $ do
p <- openOffline path
dlt <- datalink p
-- DLT_NULL for extremely old logs.
-- DLT_DBUS is missing: https://github.com/bos/pcap/pull/8
unless (dlt `elem` [DLT_NULL, DLT_UNKNOWN 231]) $ do
let message = "Incorrect link type " ++ show dlt
ioError $ mkIOError userErrorType message Nothing (Just path)
partitionEithers <$> evalStateT (mapBodies p convert) Map.empty
| wjt/bustle | Bustle/Loader/Pcap.hs | lgpl-2.1 | 10,213 | 0 | 20 | 3,235 | 2,409 | 1,228 | 1,181 | 186 | 6 |
module EKG.A169855 (a169855) where
import Helpers.EKGBuilder (buildEKG)
a169855 :: Int -> Integer
a169855 n = a169855_list !! (n - 1)
a169855_list :: [Integer]
a169855_list = buildEKG [12]
| peterokagey/haskellOEIS | src/EKG/A169855.hs | apache-2.0 | 191 | 0 | 7 | 29 | 68 | 39 | 29 | 6 | 1 |
import Data.Char
type Parser a = String -> (a, String)
several :: Parser a -> String -> [a]
several parse "" = []
several parse str = let (s, str') = parse str
in s : several parse str'
num :: Parser Int
num str = let (x, str') = span isDigit str
(_, str'') = span isSpace str'
in (read x, str'')
main = print $ several num "12 4 128"
| cbare/Etudes | haskell/several.hs | apache-2.0 | 387 | 2 | 9 | 124 | 183 | 90 | 93 | 11 | 1 |
module Marvin.API.Preprocess.FeatureScalingSpec (spec) where
import Test.Hspec
import Test.QuickCheck hiding (vector)
import Marvin.API
import Marvin.API.Preprocess.FeatureScaling
import Marvin.Test.TestUtils
spec :: Spec
spec = do
minMaxScaleDesc
standarizeDesc
standarizeDesc = describe "standardize" $ do
it "standardizes column" $
standardizeColumn original original `shouldSatisfy` isAround expectedStd
it "ignores standardization on column with constant elements" $
property $ \col -> standardizeColumn constColumn col `shouldBe` col
it "makes the elements' avg 0" $ property $
\v -> notCornerCase v ==> let Right col = fromList v in
isAround (0 :: Double) $ avg $ toList $ standardizeColumn col col
it "makes the elements' standard deviation 1" $ property $
\v -> notCornerCase v ==> let Right col = fromList v in
isAround (1 :: Double) $ stdDeviation $ toList $ standardizeColumn col col
minMaxScaleDesc = describe "minMaxScale" $ do
it "scales column by min-max" $
minMaxScaleColumn original original `shouldSatisfy` isAround expectedMinMax
it "ignores scaling on column with constant elements" $
property $ \col -> minMaxScaleColumn constColumn col `shouldBe` col
it "makes the minimum 0" $ property $
\v -> notCornerCase v ==> let Right col = fromList v in
isAround (0 :: Double) $ minimum $ toList $ minMaxScaleColumn col col
it "makes the maximum 1" $ property $
\v -> notCornerCase v ==> let Right col = fromList v in
isAround (1 :: Double) $ maximum $ toList $ minMaxScaleColumn col col
avg v = sum v / fromIntegral (length v)
stdDeviation v = sqrt $ (sum . fmap (\x -> (x - avg v)^2)) v / fromIntegral (length v)
notCornerCase :: [Double] -> Bool
notCornerCase v =
length v >= 1 &&
not (isAround 0.0 (maximum v)) &&
stdDeviation v /= 0
Right constColumn = fromList
[8,8,8,8,8]
Right original = fromList
[19,9,4,10,23]
Right expectedMinMax = fromList
[0.7894736842105,0.2631578947368,0.0,0.3157894736842,1.0]
Right expectedStd = fromList
[0.862439361864104,-0.574959574576069,-1.29365904279616,-0.431219680932052,1.43739893644017]
| gaborhermann/marvin | test-suite/Marvin/API/Preprocess/FeatureScalingSpec.hs | apache-2.0 | 2,159 | 0 | 17 | 409 | 732 | 364 | 368 | 47 | 1 |
{- #############################################################################
Sample code from:
Simon Thompson - Haskell: the Craft of Functional Programming, 2011
++++ Addison-Wesley ++++
http://www.haskellcraft.com/craft3e/Home.html
############################################################################# -}
module Craft3e.HsGame where
import Data.List hiding (cycle)
import Test.QuickCheck hiding (Result)
import Prelude hiding (cycle)
data Move = Rock | Paper | Scissors
deriving (Eq, Ord, Show, Read, Enum)
instance Arbitrary Move where
arbitrary = elements [Rock, Paper, Scissors]
data Result = Win | Draw | Lose
deriving (Eq, Ord, Show, Read, Enum)
instance Arbitrary Result where
arbitrary = elements [Win, Draw, Lose]
type Tournament = ([Move], [Move])
game :: Tournament
game = ([Rock,Rock,Paper], [Scissors,Paper,Rock])
moves :: [Move]
moves = [Rock,Rock,Paper, Scissors,Paper,Rock]
initial :: Move
initial = Rock
type Strategy = [Move] -> Move
rock, paper, scissors :: Strategy
rock _ = Rock
paper _ = Paper
scissors _ = Scissors
cycleMoves :: Strategy
cycleMoves moves = case (length moves) `rem` 3 of
0 -> Rock
1 -> Paper
2 -> Scissors
echo :: Strategy
echo (m:_) = m
echo [] = initial
beatLast :: Strategy
beatLast [] = initial
beatLast (m:_) = beat m
loseLast :: Strategy
loseLast [] = initial
loseLast (m:_) = lose m
byFrequency :: Strategy
byFrequency [] = initial
byFrequency ms = let movesFreq = sort $ map (\ ms -> (length ms, head ms)) $ group $ sort ms
in snd $ head $ movesFreq
score :: Move -> Move -> Int
score Rock Rock = 0
score Rock Paper = -1
score Rock Scissors = 1
score Paper Rock = 1
beat :: Move -> Move
beat Rock = Paper
beat Paper = Scissors
beat Scissors = Rock
lose :: Move -> Move
lose Rock = Scissors
lose Paper = Rock
lose _ = Paper
moves_prop1 :: Move -> Bool
moves_prop1 m = (beat . lose) m == m
outcome :: Move -> Move -> Result
outcome m1 m2 | m1 == m2 = Draw
outcome m1 m2
| m1 == beat m2 = Win
| otherwise = Lose
outcome_prop1 :: Move -> Move -> Bool
outcome_prop1 m n = let o1 = outcome m n
o2 = outcome n m
in compareOutcome o1 o2
where compareOutcome Draw Draw = True
compareOutcome Win Lose = True
compareOutcome Lose Win = True
compareOutcome _ _ = False
tournamentOutcome :: Tournament -> (Result, Result)
tournamentOutcome ([], []) = (Draw, Draw)
tournamentOutcome t = let (ma, mb) = t
outcomes = filter (/= Draw) $ map (toOutcome) $ zip ma mb
results = map (\r -> (head r, length r)) $ group $ sort outcomes
in toResult results
where toOutcome (m, n) = outcome m n
toResult [] = (Draw, Draw)
toResult [(Win,w),(Lose,l)] | w > l = (Win, Lose)
| w < l = (Lose, Win)
| otherwise = (Draw, Draw)
toResult [(Win,_)] = (Win, Lose)
toResult [(Lose,_)] = (Lose, Win) | CarloMicieli/fun-with-haskell | src/craft3e/HsGame.hs | apache-2.0 | 3,406 | 9 | 17 | 1,150 | 1,133 | 613 | 520 | -1 | -1 |
module Main (main) where
main :: IO ()
main = do
return ()
| osa1/criterion | app/App.hs | bsd-2-clause | 62 | 0 | 8 | 16 | 32 | 17 | 15 | 4 | 1 |
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables, CPP #-}
{-| Utility functions. -}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Utils
( debug
, debugFn
, debugXy
, sepSplit
, findFirst
, stdDev
, if'
, select
, applyIf
, commaJoin
, ensureQuoted
, divideList
, balancedSum
, tryRead
, readMaybe
, formatTable
, printTable
, parseUnit
, parseUnitAssumeBinary
, plural
, niceSort
, niceSortKey
, exitIfBad
, exitErr
, exitWhen
, exitUnless
, logWarningIfBad
, logAndBad
, rStripSpace
, newUUID
, isUUID
, getCurrentTime
, getCurrentTimeUSec
, clockTimeToString
, clockTimeToCTime
, clockTimeToUSec
, cTimeToClockTime
, diffClockTimes
, chompPrefix
, warn
, wrap
, trim
, defaultHead
, exitIfEmpty
, splitEithers
, recombineEithers
, resolveAddr
, monadicThe
, setOwnerAndGroupFromNames
, setOwnerWGroupR
, formatOrdinal
, tryAndLogIOError
, withDefaultOnIOError
, lockFile
, FStat
, nullFStat
, getFStat
, getFStatSafe
, needsReload
, watchFile
, watchFileBy
, safeRenameFile
, FilePermissions(..)
, ensurePermissions
, ordNub
, isSubsequenceOf
, maxBy
, threadDelaySeconds
, monotoneFind
, iterateJust
) where
import Prelude ()
import Ganeti.Prelude
import Control.Concurrent
import Control.Exception (try, bracket)
import Control.Monad
import qualified Data.Attoparsec.ByteString as A
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char (toUpper, isAlphaNum, isDigit, isSpace)
import qualified Data.Either as E
import Data.Function (on)
import Data.IORef
#if MIN_VERSION_base(4,8,0)
import Data.List hiding (isSubsequenceOf)
#else
import Data.List ( intercalate
, find
, foldl'
, transpose
, sortBy
, isPrefixOf
, maximumBy)
#endif
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import Foreign.C.Types (CTime(..))
import Numeric (showOct)
import System.Directory (renameFile, createDirectoryIfMissing)
import System.FilePath.Posix (takeDirectory)
import System.INotify
import System.Posix.Types
import Debug.Trace
import Network.Socket
import Ganeti.BasicTypes
import qualified Ganeti.ConstantUtils as ConstantUtils
import Ganeti.Logging
import Ganeti.Runtime
import System.IO
import System.Exit
import System.Posix.Files
import System.Posix.IO
import System.Time (ClockTime(..), getClockTime, TimeDiff(..))
import qualified System.Time as STime
-- * Debug functions
-- | To be used only for debugging, breaks referential integrity.
debug :: Show a => a -> a
debug x = trace (show x) x
-- | Displays a modified form of the second parameter before returning
-- it.
debugFn :: Show b => (a -> b) -> a -> a
debugFn fn x = debug (fn x) `seq` x
-- | Show the first parameter before returning the second one.
debugXy :: Show a => a -> b -> b
debugXy = seq . debug
-- * Miscellaneous
-- | Apply the function if condition holds, otherwise use default value.
applyIf :: Bool -> (a -> a) -> a -> a
applyIf b f x = if b then f x else x
-- | Comma-join a string list.
commaJoin :: [String] -> String
commaJoin = intercalate ","
-- | Split a list on a separator and return a list of lists.
sepSplit :: Eq a => a -> [a] -> [[a]]
sepSplit sep s
| null s = []
| null xs = [x]
| null ys = [x,[]]
| otherwise = x:sepSplit sep ys
where (x, xs) = break (== sep) s
ys = drop 1 xs
-- | Finds the first unused element in a set starting from a given base.
findFirst :: (Ord a, Enum a) => a -> S.Set a -> a
findFirst base xs =
case S.splitMember base xs of
(_, False, _) -> base
(_, True, ys) -> fromMaybe (succ base) $
(fmap fst . find (uncurry (<)) . zip [succ base..] . S.toAscList $ ys)
`mplus` fmap (succ . fst) (S.maxView ys)
-- | Simple pluralize helper
plural :: Int -> String -> String -> String
plural 1 s _ = s
plural _ _ p = p
-- | Ensure a value is quoted if needed.
ensureQuoted :: String -> String
ensureQuoted v = if not (all (\c -> isAlphaNum c || c == '.') v)
then '\'':v ++ "'"
else v
-- | Delay a thread for several seconds.
threadDelaySeconds :: Int -> IO ()
threadDelaySeconds = threadDelay . (*) 1000000
-- | Split a list into two lists of approximately the same length.
divideList :: [a] -> ([a], [a])
divideList [] = ([], [])
divideList [a] = ([a], [])
divideList (a:b:xs) = let (ls, rs) = divideList xs in (a:ls, b:rs)
-- * Mathematical functions
-- | Compute the sum of a list of numbers, all about the same value,
-- and do so in a balanced way to avoid adding numbers of too different
-- values (and thus too bad inaccuracies).
balancedSum :: Num a => [a] -> a
balancedSum [] = 0
balancedSum [x] = x
balancedSum xs = let (ls, rs) = divideList xs
in balancedSum ls + balancedSum rs
-- Simple and slow statistical functions, please replace with better
-- versions
-- | Standard deviation function.
stdDev :: [Double] -> Double
stdDev lst =
let len = fromIntegral $ length lst
mean = balancedSum lst / len
sqDist x = let d = x - mean in d * d
variance = balancedSum (map sqDist lst) / len
in sqrt variance
-- * Logical functions
-- Avoid syntactic sugar and enhance readability. These functions are proposed
-- by some for inclusion in the Prelude, and at the moment they are present
-- (with various definitions) in the utility-ht package. Some rationale and
-- discussion is available at <http://www.haskell.org/haskellwiki/If-then-else>
-- | \"if\" as a function, rather than as syntactic sugar.
if' :: Bool -- ^ condition
-> a -- ^ \"then\" result
-> a -- ^ \"else\" result
-> a -- ^ \"then\" or "else" result depending on the condition
if' True x _ = x
if' _ _ y = y
-- * Parsing utility functions
-- | Parse results from readsPrec.
parseChoices :: Monad m => String -> String -> [(a, String)] -> m a
parseChoices _ _ [(v, "")] = return v
parseChoices name s [(_, e)] =
fail $ name ++ ": leftover characters when parsing '"
++ s ++ "': '" ++ e ++ "'"
parseChoices name s _ = fail $ name ++ ": cannot parse string '" ++ s ++ "'"
-- | Safe 'read' function returning data encapsulated in a Result.
tryRead :: (Monad m, Read a) => String -> String -> m a
tryRead name s = parseChoices name s $ reads s
-- | Parse a string using the 'Read' instance.
-- Succeeds if there is exactly one valid result.
--
-- /Backport from Text.Read introduced in base-4.6.0.0/
readMaybe :: Read a => String -> Maybe a
readMaybe s = case reads s of
[(a, "")] -> Just a
_ -> Nothing
-- | Format a table of strings to maintain consistent length.
formatTable :: [[String]] -> [Bool] -> [[String]]
formatTable vals numpos =
let vtrans = transpose vals -- transpose, so that we work on rows
-- rather than columns
mlens = map (maximum . map length) vtrans
expnd = map (\(flds, isnum, ml) ->
map (\val ->
let delta = ml - length val
filler = replicate delta ' '
in if delta > 0
then if isnum
then filler ++ val
else val ++ filler
else val
) flds
) (zip3 vtrans numpos mlens)
in transpose expnd
-- | Constructs a printable table from given header and rows
printTable :: String -> [String] -> [[String]] -> [Bool] -> String
printTable lp header rows isnum =
unlines . map ((++) lp . (:) ' ' . unwords) $
formatTable (header:rows) isnum
-- | Converts a unit (e.g. m or GB) into a scaling factor.
parseUnitValue :: (Monad m) => Bool -> String -> m Rational
parseUnitValue noDecimal unit
-- binary conversions first
| null unit = return 1
| unit == "m" || upper == "MIB" = return 1
| unit == "g" || upper == "GIB" = return kbBinary
| unit == "t" || upper == "TIB" = return $ kbBinary * kbBinary
-- SI conversions
| unit == "M" || upper == "MB" = return mbFactor
| unit == "G" || upper == "GB" = return $ mbFactor * kbDecimal
| unit == "T" || upper == "TB" = return $ mbFactor * kbDecimal * kbDecimal
| otherwise = fail $ "Unknown unit '" ++ unit ++ "'"
where upper = map toUpper unit
kbBinary = 1024 :: Rational
kbDecimal = if noDecimal then kbBinary else 1000
decToBin = kbDecimal / kbBinary -- factor for 1K conversion
mbFactor = decToBin * decToBin -- twice the factor for just 1K
-- | Tries to extract number and scale from the given string.
--
-- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is
-- specified, it defaults to MiB. Return value is always an integral
-- value in MiB; if the first argument is True, all kilos are binary.
parseUnitEx :: (Monad m, Integral a, Read a) => Bool -> String -> m a
parseUnitEx noDecimal str =
-- TODO: enhance this by splitting the unit parsing code out and
-- accepting floating-point numbers
case (reads str::[(Int, String)]) of
[(v, suffix)] ->
let unit = dropWhile (== ' ') suffix
in do
scaling <- parseUnitValue noDecimal unit
return $ truncate (fromIntegral v * scaling)
_ -> fail $ "Can't parse string '" ++ str ++ "'"
-- | Tries to extract number and scale from the given string.
--
-- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is
-- specified, it defaults to MiB. Return value is always an integral
-- value in MiB.
parseUnit :: (Monad m, Integral a, Read a) => String -> m a
parseUnit = parseUnitEx False
-- | Tries to extract a number and scale from a given string, taking
-- all kilos to be binary.
parseUnitAssumeBinary :: (Monad m, Integral a, Read a) => String -> m a
parseUnitAssumeBinary = parseUnitEx True
-- | Unwraps a 'Result', exiting the program if it is a 'Bad' value,
-- otherwise returning the actual contained value.
exitIfBad :: String -> Result a -> IO a
exitIfBad msg (Bad s) = exitErr (msg ++ ": " ++ s)
exitIfBad _ (Ok v) = return v
-- | Exits immediately with an error message.
exitErr :: String -> IO a
exitErr errmsg = do
hPutStrLn stderr $ "Error: " ++ errmsg
exitWith (ExitFailure 1)
-- | Exits with an error message if the given boolean condition if true.
exitWhen :: Bool -> String -> IO ()
exitWhen True msg = exitErr msg
exitWhen False _ = return ()
-- | Exits with an error message /unless/ the given boolean condition
-- if true, the opposite of 'exitWhen'.
exitUnless :: Bool -> String -> IO ()
exitUnless cond = exitWhen (not cond)
-- | Unwraps a 'Result', logging a warning message and then returning a default
-- value if it is a 'Bad' value, otherwise returning the actual contained value.
logWarningIfBad :: String -> a -> Result a -> IO a
logWarningIfBad msg defVal (Bad s) = do
logWarning $ msg ++ ": " ++ s
return defVal
logWarningIfBad _ _ (Ok v) = return v
-- | Log a message and return a Bad result.
logAndBad :: String -> IO (Result a)
logAndBad msg = do
logNotice msg
return $ Bad msg
-- | Try an IO interaction, log errors and unfold as a 'Result'.
tryAndLogIOError :: IO a -> String -> (a -> Result b) -> IO (Result b)
tryAndLogIOError io msg okfn =
try io >>= either
(\ e -> do
let combinedmsg = msg ++ ": " ++ show (e :: IOError)
logError combinedmsg
return . Bad $ combinedmsg)
(return . okfn)
-- | Try an IO interaction and return a default value if the interaction
-- throws an IOError.
withDefaultOnIOError :: a -> IO a -> IO a
withDefaultOnIOError a io =
try io >>= either (\ (_ :: IOError) -> return a) return
-- | Print a warning, but do not exit.
warn :: String -> IO ()
warn = hPutStrLn stderr . (++) "Warning: "
-- | Helper for 'niceSort'. Computes the key element for a given string.
extractKey :: [Either Integer String] -- ^ Current (partial) key, reversed
-> String -- ^ Remaining string
-> ([Either Integer String], String)
extractKey ek [] = (reverse ek, [])
extractKey ek xs@(x:_) =
let (span_fn, conv_fn) = if isDigit x
then (isDigit, Left . read)
else (not . isDigit, Right)
(k, rest) = span span_fn xs
in extractKey (conv_fn k:ek) rest
{-| Sort a list of strings based on digit and non-digit groupings.
Given a list of names @['a1', 'a10', 'a11', 'a2']@ this function
will sort the list in the logical order @['a1', 'a2', 'a10', 'a11']@.
The sort algorithm breaks each name in groups of either only-digits or
no-digits, and sorts based on each group.
Internally, this is not implemented via regexes (like the Python
version), but via actual splitting of the string in sequences of
either digits or everything else, and converting the digit sequences
in /Left Integer/ and the non-digit ones in /Right String/, at which
point sorting becomes trivial due to the built-in 'Either' ordering;
we only need one extra step of dropping the key at the end.
-}
niceSort :: [String] -> [String]
niceSort = niceSortKey id
-- | Key-version of 'niceSort'. We use 'sortBy' and @compare `on` fst@
-- since we don't want to add an ordering constraint on the /a/ type,
-- hence the need to only compare the first element of the /(key, a)/
-- tuple.
niceSortKey :: (a -> String) -> [a] -> [a]
niceSortKey keyfn =
map snd . sortBy (compare `on` fst) .
map (\s -> (fst . extractKey [] $ keyfn s, s))
-- | Strip space characthers (including newline). As this is
-- expensive, should only be run on small strings.
rStripSpace :: String -> String
rStripSpace = reverse . dropWhile isSpace . reverse
-- | Returns a random UUID.
-- This is a Linux-specific method as it uses the /proc filesystem.
newUUID :: IO String
newUUID = do
contents <- readFile ConstantUtils.randomUuidFile
return $! rStripSpace $ take 128 contents
-- | Parser that doesn't fail on a valid UUIDs (same as
-- "Ganeti.Constants.uuidRegex").
uuidCheckParser :: A.Parser ()
uuidCheckParser = do
-- Not using Attoparsec.Char8 because "all attempts to use characters
-- above code point U+00FF will give wrong answers" and we don't
-- want such things to be accepted as UUIDs.
let lowerHex = A.satisfy (\c -> (48 <= c && c <= 57) || -- 0-9
(97 <= c && c <= 102)) -- a-f
hx n = A.count n lowerHex
d = A.word8 45 -- '-'
void $ hx 8 >> d >> hx 4 >> d >> hx 4 >> d >> hx 4 >> d >> hx 12
-- | Checks if the string is a valid UUID as in "Ganeti.Constants.uuidRegex".
isUUID :: String -> Bool
isUUID =
isRight . A.parseOnly (uuidCheckParser <* A.endOfInput) . UTF8.fromString
-- | Returns the current time as an 'Integer' representing the number
-- of seconds from the Unix epoch.
getCurrentTime :: IO Integer
getCurrentTime = do
TOD ctime _ <- getClockTime
return ctime
-- | Returns the current time as an 'Integer' representing the number
-- of microseconds from the Unix epoch (hence the need for 'Integer').
getCurrentTimeUSec :: IO Integer
getCurrentTimeUSec = liftM clockTimeToUSec getClockTime
-- | Convert a ClockTime into a (seconds-only) timestamp.
clockTimeToString :: ClockTime -> String
clockTimeToString (TOD t _) = show t
-- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@).
clockTimeToCTime :: ClockTime -> EpochTime
clockTimeToCTime (TOD secs _) = fromInteger secs
-- | Convert a ClockTime the number of microseconds since the epoch.
clockTimeToUSec :: ClockTime -> Integer
clockTimeToUSec (TOD ctime pico) =
-- pico: 10^-12, micro: 10^-6, so we have to shift seconds left and
-- picoseconds right
ctime * 1000000 + pico `div` 1000000
-- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@).
cTimeToClockTime :: EpochTime -> ClockTime
cTimeToClockTime (CTime timet) = TOD (toInteger timet) 0
-- | A version of `diffClockTimes` that works around ghc bug #2519.
diffClockTimes :: ClockTime -> ClockTime -> TimeDiff
diffClockTimes t1 t2 =
let delta = STime.diffClockTimes t1 t2
secondInPicoseconds = 1000000000000
in if tdPicosec delta < 0
then delta { tdSec = tdSec delta - 1
, tdPicosec = tdPicosec delta + secondInPicoseconds
}
else delta
{-| Strip a prefix from a string, allowing the last character of the prefix
(which is assumed to be a separator) to be absent from the string if the string
terminates there.
\>>> chompPrefix \"foo:bar:\" \"a:b:c\"
Nothing
\>>> chompPrefix \"foo:bar:\" \"foo:bar:baz\"
Just \"baz\"
\>>> chompPrefix \"foo:bar:\" \"foo:bar:\"
Just \"\"
\>>> chompPrefix \"foo:bar:\" \"foo:bar\"
Just \"\"
\>>> chompPrefix \"foo:bar:\" \"foo:barbaz\"
Nothing
-}
chompPrefix :: String -> String -> Maybe String
chompPrefix pfx str =
if pfx `isPrefixOf` str || str == init pfx
then Just $ drop (length pfx) str
else Nothing
-- | Breaks a string in lines with length \<= maxWidth.
--
-- NOTE: The split is OK if:
--
-- * It doesn't break a word, i.e. the next line begins with space
-- (@isSpace . head $ rest@) or the current line ends with space
-- (@null revExtra@);
--
-- * It breaks a very big word that doesn't fit anyway (@null revLine@).
wrap :: Int -- ^ maxWidth
-> String -- ^ string that needs wrapping
-> [String] -- ^ string \"broken\" in lines
wrap maxWidth = filter (not . null) . map trim . wrap0
where wrap0 :: String -> [String]
wrap0 text
| length text <= maxWidth = [text]
| isSplitOK = line : wrap0 rest
| otherwise = line' : wrap0 rest'
where (line, rest) = splitAt maxWidth text
(revExtra, revLine) = break isSpace . reverse $ line
(line', rest') = (reverse revLine, reverse revExtra ++ rest)
isSplitOK =
null revLine || null revExtra || startsWithSpace rest
startsWithSpace (x:_) = isSpace x
startsWithSpace _ = False
-- | Removes surrounding whitespace. Should only be used in small
-- strings.
trim :: String -> String
trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace
-- | A safer head version, with a default value.
defaultHead :: a -> [a] -> a
defaultHead def [] = def
defaultHead _ (x:_) = x
-- | A 'head' version in the I/O monad, for validating parameters
-- without which we cannot continue.
exitIfEmpty :: String -> [a] -> IO a
exitIfEmpty _ (x:_) = return x
exitIfEmpty s [] = exitErr s
-- | Obtain the unique element of a list in an arbitrary monad.
monadicThe :: (Eq a, Monad m) => String -> [a] -> m a
monadicThe s [] = fail s
monadicThe s (x:xs)
| all (x ==) xs = return x
| otherwise = fail s
-- | Split an 'Either' list into two separate lists (containing the
-- 'Left' and 'Right' elements, plus a \"trail\" list that allows
-- recombination later.
--
-- This is splitter; for recombination, look at 'recombineEithers'.
-- The sum of \"left\" and \"right\" lists should be equal to the
-- original list length, and the trail list should be the same length
-- as well. The entries in the resulting lists are reversed in
-- comparison with the original list.
splitEithers :: [Either a b] -> ([a], [b], [Bool])
splitEithers = foldl' splitter ([], [], [])
where splitter (l, r, t) e =
case e of
Left v -> (v:l, r, False:t)
Right v -> (l, v:r, True:t)
-- | Recombines two \"left\" and \"right\" lists using a \"trail\"
-- list into a single 'Either' list.
--
-- This is the counterpart to 'splitEithers'. It does the opposite
-- transformation, and the output list will be the reverse of the
-- input lists. Since 'splitEithers' also reverses the lists, calling
-- these together will result in the original list.
--
-- Mismatches in the structure of the lists (e.g. inconsistent
-- lengths) are represented via 'Bad'; normally this function should
-- not fail, if lists are passed as generated by 'splitEithers'.
recombineEithers :: (Show a, Show b) =>
[a] -> [b] -> [Bool] -> Result [Either a b]
recombineEithers lefts rights trail =
foldM recombiner ([], lefts, rights) trail >>= checker
where checker (eithers, [], []) = Ok eithers
checker (_, lefts', rights') =
Bad $ "Inconsistent results after recombination, l'=" ++
show lefts' ++ ", r'=" ++ show rights'
recombiner (es, l:ls, rs) False = Ok (Left l:es, ls, rs)
recombiner (es, ls, r:rs) True = Ok (Right r:es, ls, rs)
recombiner (_, ls, rs) t = Bad $ "Inconsistent trail log: l=" ++
show ls ++ ", r=" ++ show rs ++ ",t=" ++
show t
-- | Default hints for the resolver
resolveAddrHints :: Maybe AddrInfo
resolveAddrHints =
Just defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV] }
-- | Resolves a numeric address.
resolveAddr :: Int -> String -> IO (Result (Family, SockAddr))
resolveAddr port str = do
resolved <- getAddrInfo resolveAddrHints (Just str) (Just (show port))
return $ case resolved of
[] -> Bad "Invalid results from lookup?"
best:_ -> Ok (addrFamily best, addrAddress best)
-- | Set the owner and the group of a file (given as names, not numeric id).
setOwnerAndGroupFromNames :: FilePath -> GanetiDaemon -> GanetiGroup -> IO ()
setOwnerAndGroupFromNames filename daemon dGroup = do
-- TODO: it would be nice to rework this (or getEnts) so that runtimeEnts
-- is read only once per daemon startup, and then cached for further usage.
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
-- note: we use directly ! as lookup failures shouldn't happen, due
-- to the map construction
let uid = reUserToUid ents M.! daemon
let gid = reGroupToGid ents M.! dGroup
setOwnerAndGroup filename uid gid
-- | Resets permissions so that the owner can read/write and the group only
-- read. All other permissions are cleared.
setOwnerWGroupR :: FilePath -> IO ()
setOwnerWGroupR path = setFileMode path mode
where mode = foldl unionFileModes nullFileMode
[ownerReadMode, ownerWriteMode, groupReadMode]
-- | Formats an integral number, appending a suffix.
formatOrdinal :: (Integral a, Show a) => a -> String
formatOrdinal num
| num > 10 && num < 20 = suffix "th"
| tens == 1 = suffix "st"
| tens == 2 = suffix "nd"
| tens == 3 = suffix "rd"
| otherwise = suffix "th"
where tens = num `mod` 10
suffix s = show num ++ s
-- | Attempt, in a non-blocking way, to obtain a lock on a given file; report
-- back success.
-- Returns the file descriptor so that the lock can be released by closing
lockFile :: FilePath -> IO (Result Fd)
lockFile path = runResultT . liftIO $ do
handle <- openFile path WriteMode
fd <- handleToFd handle
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
return fd
-- | File stat identifier.
type FStat = (EpochTime, FileID, FileOffset)
-- | Null 'FStat' value.
nullFStat :: FStat
nullFStat = (-1, -1, -1)
-- | Computes the file cache data from a FileStatus structure.
buildFileStatus :: FileStatus -> FStat
buildFileStatus ofs =
let modt = modificationTime ofs
inum = fileID ofs
fsize = fileSize ofs
in (modt, inum, fsize)
-- | Wrapper over 'buildFileStatus'. This reads the data from the
-- filesystem and then builds our cache structure.
getFStat :: FilePath -> IO FStat
getFStat p = liftM buildFileStatus (getFileStatus p)
-- | Safe version of 'getFStat', that ignores IOErrors.
getFStatSafe :: FilePath -> IO FStat
getFStatSafe fpath = liftM (either (const nullFStat) id)
((try $ getFStat fpath) :: IO (Either IOError FStat))
-- | Check if the file needs reloading
needsReload :: FStat -> FilePath -> IO (Maybe FStat)
needsReload oldstat path = do
newstat <- getFStat path
return $ if newstat /= oldstat
then Just newstat
else Nothing
-- | Until the given point in time (useconds since the epoch), wait
-- for the output of a given method to change and return the new value;
-- make use of the promise that the output only changes if the reference
-- has a value different than the given one.
watchFileEx :: (Eq b) => Integer -> b -> IORef b -> (a -> Bool) -> IO a -> IO a
watchFileEx endtime base ref check read_fn = do
current <- getCurrentTimeUSec
if current > endtime then read_fn else do
val <- readIORef ref
if val /= base
then do
new <- read_fn
if check new then return new else do
logDebug "Observed change not relevant"
threadDelay 100000
watchFileEx endtime val ref check read_fn
else do
threadDelay 100000
watchFileEx endtime base ref check read_fn
-- | Within the given timeout (in seconds), wait for for the output
-- of the given method to satisfy a given predicate and return the new value;
-- make use of the promise that the method will only change its value, if
-- the given file changes on disk. If the file does not exist on disk, return
-- immediately.
watchFileBy :: FilePath -> Int -> (a -> Bool) -> IO a -> IO a
watchFileBy fpath timeout check read_fn = do
current <- getCurrentTimeUSec
let endtime = current + fromIntegral timeout * 1000000
fstat <- getFStatSafe fpath
ref <- newIORef fstat
bracket initINotify killINotify $ \inotify -> do
let do_watch e = do
logDebug $ "Notified of change in " ++ fpath
++ "; event: " ++ show e
when (e == Ignored)
(addWatch inotify [Modify, Delete] fpath do_watch
>> return ())
fstat' <- getFStatSafe fpath
writeIORef ref fstat'
_ <- addWatch inotify [Modify, Delete] fpath do_watch
newval <- read_fn
if check newval
then do
logDebug $ "File " ++ fpath ++ " changed during setup of inotify"
return newval
else watchFileEx endtime fstat ref check read_fn
-- | Within the given timeout (in seconds), wait for for the output
-- of the given method to change and return the new value; make use of
-- the promise that the method will only change its value, if
-- the given file changes on disk. If the file does not exist on disk, return
-- immediately.
watchFile :: Eq a => FilePath -> Int -> a -> IO a -> IO a
watchFile fpath timeout old = watchFileBy fpath timeout (/= old)
-- | Type describing ownership and permissions of newly generated
-- directories and files. All parameters are optional, with nothing
-- meaning that the default value should be left untouched.
data FilePermissions = FilePermissions { fpOwner :: Maybe GanetiDaemon
, fpGroup :: Maybe GanetiGroup
, fpPermissions :: FileMode
}
-- | Ensure that a given file or directory has the permissions, and
-- possibly ownerships, as required.
ensurePermissions :: FilePath -> FilePermissions -> IO (Result ())
ensurePermissions fpath perms = do
-- Fetch the list of entities
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't determine user/group ids" runtimeEnts
-- Get the existing file properties
eitherFileStatus <- try $ getFileStatus fpath
:: IO (Either IOError FileStatus)
-- And see if any modifications are needed
(flip $ either (return . Bad . show)) eitherFileStatus $ \fstat -> do
ownertry <- case fpOwner perms of
Nothing -> return $ Right ()
Just owner -> try $ do
let ownerid = reUserToUid ents M.! owner
unless (ownerid == fileOwner fstat) $ do
logDebug $ "Changing owner of " ++ fpath ++ " to " ++ show owner
setOwnerAndGroup fpath ownerid (-1)
grouptry <- case fpGroup perms of
Nothing -> return $ Right ()
Just grp -> try $ do
let groupid = reGroupToGid ents M.! grp
unless (groupid == fileGroup fstat) $ do
logDebug $ "Changing group of " ++ fpath ++ " to " ++ show grp
setOwnerAndGroup fpath (-1) groupid
let fp = fpPermissions perms
permtry <- if fileMode fstat == fp
then return $ Right ()
else try $ do
logInfo $ "Changing permissions of " ++ fpath ++ " to "
++ showOct fp ""
setFileMode fpath fp
let errors = E.lefts ([ownertry, grouptry, permtry] :: [Either IOError ()])
if null errors
then return $ Ok ()
else return . Bad $ show errors
-- | Safely rename a file, creating the target directory, if needed.
safeRenameFile :: FilePermissions -> FilePath -> FilePath -> IO (Result ())
safeRenameFile perms from to = do
directtry <- try $ renameFile from to
case (directtry :: Either IOError ()) of
Right () -> return $ Ok ()
Left _ -> do
result <- try $ do
let dir = takeDirectory to
createDirectoryIfMissing True dir
_ <- ensurePermissions dir perms
renameFile from to
return $ either (Bad . show) Ok (result :: Either IOError ())
-- | Removes duplicates, preserving order.
ordNub :: (Ord a) => [a] -> [a]
ordNub =
let go _ [] = []
go s (x:xs) = if x `S.member` s
then go s xs
else x : go (S.insert x s) xs
in go S.empty
-- | `isSubsequenceOf a b`: Checks if a is a subsequence of b.
isSubsequenceOf :: (Eq a) => [a] -> [a] -> Bool
isSubsequenceOf [] _ = True
isSubsequenceOf _ [] = False
isSubsequenceOf a@(x:a') (y:b) | x == y = isSubsequenceOf a' b
| otherwise = isSubsequenceOf a b
-- | Compute the maximum of two elements by a given order.
-- As opposed to using `maximumBy`, is function is guaranteed
-- to be total, as the signature enforces a non-empty list of
-- arguments.
maxBy :: (a -> a -> Ordering) -> a -> a -> a
maxBy ord a b = maximumBy ord [a, b]
-- | Given a predicate that is monotone on a list, find the
-- first list entry where it holds, if any. Use the monotonicity
-- property to evaluate the property at as few places as possible,
-- guided by the heuristics provided.
monotoneFind :: ([a] -> Int) -> (a -> Bool) -> [a] -> Maybe a
monotoneFind heuristics p xs =
let count = heuristics xs
in case () of
_ | x:xs' <- drop count xs
-> if p x
then (`mplus` Just x) . monotoneFind heuristics p
$ take count xs
else monotoneFind heuristics p xs'
_ | x:xs' <- xs
-> if p x
then Just x
else monotoneFind heuristics p xs'
_ -> Nothing
-- | Iterate a funtion as long as it returns Just values, collecting
-- all the Justs that where obtained.
iterateJust :: (a -> Maybe a) -> a -> [a]
iterateJust f a = a : maybe [] (iterateJust f) (f a)
| leshchevds/ganeti | src/Ganeti/Utils.hs | bsd-2-clause | 32,390 | 0 | 24 | 8,158 | 7,521 | 3,949 | 3,572 | 540 | 5 |
{-# LANGUAGE CPP, GADTs, RankNTypes #-}
-----------------------------------------------------------------------------
--
-- Cmm utilities.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module CmmUtils(
-- CmmType
primRepCmmType, primRepForeignHint,
typeCmmType, typeForeignHint,
-- CmmLit
zeroCLit, mkIntCLit,
mkWordCLit, packHalfWordsCLit,
mkByteStringCLit,
mkDataLits, mkRODataLits,
mkStgWordCLit,
-- CmmExpr
mkIntExpr, zeroExpr,
mkLblExpr,
cmmRegOff, cmmOffset, cmmLabelOff, cmmOffsetLit, cmmOffsetExpr,
cmmRegOffB, cmmOffsetB, cmmLabelOffB, cmmOffsetLitB, cmmOffsetExprB,
cmmRegOffW, cmmOffsetW, cmmLabelOffW, cmmOffsetLitW, cmmOffsetExprW,
cmmIndex, cmmIndexExpr, cmmLoadIndex, cmmLoadIndexW,
cmmNegate,
cmmULtWord, cmmUGeWord, cmmUGtWord, cmmSubWord,
cmmNeWord, cmmEqWord, cmmOrWord, cmmAndWord,
cmmUShrWord, cmmAddWord, cmmMulWord, cmmQuotWord,
cmmToWord,
isTrivialCmmExpr, hasNoGlobalRegs,
-- Statics
blankWord,
-- Tagging
cmmTagMask, cmmPointerMask, cmmUntag, cmmIsTagged,
cmmConstrTag1,
-- Liveness and bitmaps
mkLiveness,
-- * Operations that probably don't belong here
modifyGraph,
ofBlockMap, toBlockMap, insertBlock,
ofBlockList, toBlockList, bodyToBlockList,
toBlockListEntryFirst, toBlockListEntryFirstFalseFallthrough,
foldGraphBlocks, mapGraphNodes, postorderDfs, mapGraphNodes1,
analFwd, analBwd, analRewFwd, analRewBwd,
dataflowPassFwd, dataflowPassBwd, dataflowAnalFwd, dataflowAnalBwd,
dataflowAnalFwdBlocks,
-- * Ticks
blockTicks
) where
#include "HsVersions.h"
import TyCon ( PrimRep(..), PrimElemRep(..) )
import Type ( UnaryType, typePrimRep )
import SMRep
import Cmm
import BlockId
import CLabel
import Outputable
import Unique
import UniqSupply
import DynFlags
import Util
import Data.Word
import Data.Maybe
import Data.Bits
import Hoopl
---------------------------------------------------
--
-- CmmTypes
--
---------------------------------------------------
primRepCmmType :: DynFlags -> PrimRep -> CmmType
primRepCmmType _ VoidRep = panic "primRepCmmType:VoidRep"
primRepCmmType dflags PtrRep = gcWord dflags
primRepCmmType dflags IntRep = bWord dflags
primRepCmmType dflags WordRep = bWord dflags
primRepCmmType _ Int64Rep = b64
primRepCmmType _ Word64Rep = b64
primRepCmmType dflags AddrRep = bWord dflags
primRepCmmType _ FloatRep = f32
primRepCmmType _ DoubleRep = f64
primRepCmmType _ (VecRep len rep) = vec len (primElemRepCmmType rep)
primElemRepCmmType :: PrimElemRep -> CmmType
primElemRepCmmType Int8ElemRep = b8
primElemRepCmmType Int16ElemRep = b16
primElemRepCmmType Int32ElemRep = b32
primElemRepCmmType Int64ElemRep = b64
primElemRepCmmType Word8ElemRep = b8
primElemRepCmmType Word16ElemRep = b16
primElemRepCmmType Word32ElemRep = b32
primElemRepCmmType Word64ElemRep = b64
primElemRepCmmType FloatElemRep = f32
primElemRepCmmType DoubleElemRep = f64
typeCmmType :: DynFlags -> UnaryType -> CmmType
typeCmmType dflags ty = primRepCmmType dflags (typePrimRep ty)
primRepForeignHint :: PrimRep -> ForeignHint
primRepForeignHint VoidRep = panic "primRepForeignHint:VoidRep"
primRepForeignHint PtrRep = AddrHint
primRepForeignHint IntRep = SignedHint
primRepForeignHint WordRep = NoHint
primRepForeignHint Int64Rep = SignedHint
primRepForeignHint Word64Rep = NoHint
primRepForeignHint AddrRep = AddrHint -- NB! AddrHint, but NonPtrArg
primRepForeignHint FloatRep = NoHint
primRepForeignHint DoubleRep = NoHint
primRepForeignHint (VecRep {}) = NoHint
typeForeignHint :: UnaryType -> ForeignHint
typeForeignHint = primRepForeignHint . typePrimRep
---------------------------------------------------
--
-- CmmLit
--
---------------------------------------------------
-- XXX: should really be Integer, since Int doesn't necessarily cover
-- the full range of target Ints.
mkIntCLit :: DynFlags -> Int -> CmmLit
mkIntCLit dflags i = CmmInt (toInteger i) (wordWidth dflags)
mkIntExpr :: DynFlags -> Int -> CmmExpr
mkIntExpr dflags i = CmmLit $! mkIntCLit dflags i
zeroCLit :: DynFlags -> CmmLit
zeroCLit dflags = CmmInt 0 (wordWidth dflags)
zeroExpr :: DynFlags -> CmmExpr
zeroExpr dflags = CmmLit (zeroCLit dflags)
mkWordCLit :: DynFlags -> Integer -> CmmLit
mkWordCLit dflags wd = CmmInt wd (wordWidth dflags)
mkByteStringCLit :: Unique -> [Word8] -> (CmmLit, GenCmmDecl CmmStatics info stmt)
-- We have to make a top-level decl for the string,
-- and return a literal pointing to it
mkByteStringCLit uniq bytes
= (CmmLabel lbl, CmmData ReadOnlyData $ Statics lbl [CmmString bytes])
where
lbl = mkStringLitLabel uniq
mkDataLits :: Section -> CLabel -> [CmmLit] -> GenCmmDecl CmmStatics info stmt
-- Build a data-segment data block
mkDataLits section lbl lits
= CmmData section (Statics lbl $ map CmmStaticLit lits)
mkRODataLits :: CLabel -> [CmmLit] -> GenCmmDecl CmmStatics info stmt
-- Build a read-only data block
mkRODataLits lbl lits
= mkDataLits section lbl lits
where
section | any needsRelocation lits = RelocatableReadOnlyData
| otherwise = ReadOnlyData
needsRelocation (CmmLabel _) = True
needsRelocation (CmmLabelOff _ _) = True
needsRelocation _ = False
mkStgWordCLit :: DynFlags -> StgWord -> CmmLit
mkStgWordCLit dflags wd = CmmInt (fromStgWord wd) (wordWidth dflags)
packHalfWordsCLit :: DynFlags -> StgHalfWord -> StgHalfWord -> CmmLit
-- Make a single word literal in which the lower_half_word is
-- at the lower address, and the upper_half_word is at the
-- higher address
-- ToDo: consider using half-word lits instead
-- but be careful: that's vulnerable when reversed
packHalfWordsCLit dflags lower_half_word upper_half_word
= if wORDS_BIGENDIAN dflags
then mkWordCLit dflags ((l `shiftL` hALF_WORD_SIZE_IN_BITS dflags) .|. u)
else mkWordCLit dflags (l .|. (u `shiftL` hALF_WORD_SIZE_IN_BITS dflags))
where l = fromStgHalfWord lower_half_word
u = fromStgHalfWord upper_half_word
---------------------------------------------------
--
-- CmmExpr
--
---------------------------------------------------
mkLblExpr :: CLabel -> CmmExpr
mkLblExpr lbl = CmmLit (CmmLabel lbl)
cmmOffsetExpr :: DynFlags -> CmmExpr -> CmmExpr -> CmmExpr
-- assumes base and offset have the same CmmType
cmmOffsetExpr dflags e (CmmLit (CmmInt n _)) = cmmOffset dflags e (fromInteger n)
cmmOffsetExpr dflags e byte_off = CmmMachOp (MO_Add (cmmExprWidth dflags e)) [e, byte_off]
cmmOffset :: DynFlags -> CmmExpr -> Int -> CmmExpr
cmmOffset _ e 0 = e
cmmOffset _ (CmmReg reg) byte_off = cmmRegOff reg byte_off
cmmOffset _ (CmmRegOff reg m) byte_off = cmmRegOff reg (m+byte_off)
cmmOffset _ (CmmLit lit) byte_off = CmmLit (cmmOffsetLit lit byte_off)
cmmOffset _ (CmmStackSlot area off) byte_off
= CmmStackSlot area (off - byte_off)
-- note stack area offsets increase towards lower addresses
cmmOffset _ (CmmMachOp (MO_Add rep) [expr, CmmLit (CmmInt byte_off1 _rep)]) byte_off2
= CmmMachOp (MO_Add rep)
[expr, CmmLit (CmmInt (byte_off1 + toInteger byte_off2) rep)]
cmmOffset dflags expr byte_off
= CmmMachOp (MO_Add width) [expr, CmmLit (CmmInt (toInteger byte_off) width)]
where
width = cmmExprWidth dflags expr
-- Smart constructor for CmmRegOff. Same caveats as cmmOffset above.
cmmRegOff :: CmmReg -> Int -> CmmExpr
cmmRegOff reg 0 = CmmReg reg
cmmRegOff reg byte_off = CmmRegOff reg byte_off
cmmOffsetLit :: CmmLit -> Int -> CmmLit
cmmOffsetLit (CmmLabel l) byte_off = cmmLabelOff l byte_off
cmmOffsetLit (CmmLabelOff l m) byte_off = cmmLabelOff l (m+byte_off)
cmmOffsetLit (CmmLabelDiffOff l1 l2 m) byte_off
= CmmLabelDiffOff l1 l2 (m+byte_off)
cmmOffsetLit (CmmInt m rep) byte_off = CmmInt (m + fromIntegral byte_off) rep
cmmOffsetLit _ byte_off = pprPanic "cmmOffsetLit" (ppr byte_off)
cmmLabelOff :: CLabel -> Int -> CmmLit
-- Smart constructor for CmmLabelOff
cmmLabelOff lbl 0 = CmmLabel lbl
cmmLabelOff lbl byte_off = CmmLabelOff lbl byte_off
-- | Useful for creating an index into an array, with a staticaly known offset.
-- The type is the element type; used for making the multiplier
cmmIndex :: DynFlags
-> Width -- Width w
-> CmmExpr -- Address of vector of items of width w
-> Int -- Which element of the vector (0 based)
-> CmmExpr -- Address of i'th element
cmmIndex dflags width base idx = cmmOffset dflags base (idx * widthInBytes width)
-- | Useful for creating an index into an array, with an unknown offset.
cmmIndexExpr :: DynFlags
-> Width -- Width w
-> CmmExpr -- Address of vector of items of width w
-> CmmExpr -- Which element of the vector (0 based)
-> CmmExpr -- Address of i'th element
cmmIndexExpr dflags width base (CmmLit (CmmInt n _)) = cmmIndex dflags width base (fromInteger n)
cmmIndexExpr dflags width base idx =
cmmOffsetExpr dflags base byte_off
where
idx_w = cmmExprWidth dflags idx
byte_off = CmmMachOp (MO_Shl idx_w) [idx, mkIntExpr dflags (widthInLog width)]
cmmLoadIndex :: DynFlags -> CmmType -> CmmExpr -> Int -> CmmExpr
cmmLoadIndex dflags ty expr ix = CmmLoad (cmmIndex dflags (typeWidth ty) expr ix) ty
-- The "B" variants take byte offsets
cmmRegOffB :: CmmReg -> ByteOff -> CmmExpr
cmmRegOffB = cmmRegOff
cmmOffsetB :: DynFlags -> CmmExpr -> ByteOff -> CmmExpr
cmmOffsetB = cmmOffset
cmmOffsetExprB :: DynFlags -> CmmExpr -> CmmExpr -> CmmExpr
cmmOffsetExprB = cmmOffsetExpr
cmmLabelOffB :: CLabel -> ByteOff -> CmmLit
cmmLabelOffB = cmmLabelOff
cmmOffsetLitB :: CmmLit -> ByteOff -> CmmLit
cmmOffsetLitB = cmmOffsetLit
-----------------------
-- The "W" variants take word offsets
cmmOffsetExprW :: DynFlags -> CmmExpr -> CmmExpr -> CmmExpr
-- The second arg is a *word* offset; need to change it to bytes
cmmOffsetExprW dflags e (CmmLit (CmmInt n _)) = cmmOffsetW dflags e (fromInteger n)
cmmOffsetExprW dflags e wd_off = cmmIndexExpr dflags (wordWidth dflags) e wd_off
cmmOffsetW :: DynFlags -> CmmExpr -> WordOff -> CmmExpr
cmmOffsetW dflags e n = cmmOffsetB dflags e (wordsToBytes dflags n)
cmmRegOffW :: DynFlags -> CmmReg -> WordOff -> CmmExpr
cmmRegOffW dflags reg wd_off = cmmRegOffB reg (wordsToBytes dflags wd_off)
cmmOffsetLitW :: DynFlags -> CmmLit -> WordOff -> CmmLit
cmmOffsetLitW dflags lit wd_off = cmmOffsetLitB lit (wordsToBytes dflags wd_off)
cmmLabelOffW :: DynFlags -> CLabel -> WordOff -> CmmLit
cmmLabelOffW dflags lbl wd_off = cmmLabelOffB lbl (wordsToBytes dflags wd_off)
cmmLoadIndexW :: DynFlags -> CmmExpr -> Int -> CmmType -> CmmExpr
cmmLoadIndexW dflags base off ty = CmmLoad (cmmOffsetW dflags base off) ty
-----------------------
cmmULtWord, cmmUGeWord, cmmUGtWord, cmmSubWord,
cmmNeWord, cmmEqWord, cmmOrWord, cmmAndWord,
cmmUShrWord, cmmAddWord, cmmMulWord, cmmQuotWord
:: DynFlags -> CmmExpr -> CmmExpr -> CmmExpr
cmmOrWord dflags e1 e2 = CmmMachOp (mo_wordOr dflags) [e1, e2]
cmmAndWord dflags e1 e2 = CmmMachOp (mo_wordAnd dflags) [e1, e2]
cmmNeWord dflags e1 e2 = CmmMachOp (mo_wordNe dflags) [e1, e2]
cmmEqWord dflags e1 e2 = CmmMachOp (mo_wordEq dflags) [e1, e2]
cmmULtWord dflags e1 e2 = CmmMachOp (mo_wordULt dflags) [e1, e2]
cmmUGeWord dflags e1 e2 = CmmMachOp (mo_wordUGe dflags) [e1, e2]
cmmUGtWord dflags e1 e2 = CmmMachOp (mo_wordUGt dflags) [e1, e2]
--cmmShlWord dflags e1 e2 = CmmMachOp (mo_wordShl dflags) [e1, e2]
cmmUShrWord dflags e1 e2 = CmmMachOp (mo_wordUShr dflags) [e1, e2]
cmmAddWord dflags e1 e2 = CmmMachOp (mo_wordAdd dflags) [e1, e2]
cmmSubWord dflags e1 e2 = CmmMachOp (mo_wordSub dflags) [e1, e2]
cmmMulWord dflags e1 e2 = CmmMachOp (mo_wordMul dflags) [e1, e2]
cmmQuotWord dflags e1 e2 = CmmMachOp (mo_wordUQuot dflags) [e1, e2]
cmmNegate :: DynFlags -> CmmExpr -> CmmExpr
cmmNegate _ (CmmLit (CmmInt n rep)) = CmmLit (CmmInt (-n) rep)
cmmNegate dflags e = CmmMachOp (MO_S_Neg (cmmExprWidth dflags e)) [e]
blankWord :: DynFlags -> CmmStatic
blankWord dflags = CmmUninitialised (wORD_SIZE dflags)
cmmToWord :: DynFlags -> CmmExpr -> CmmExpr
cmmToWord dflags e
| w == word = e
| otherwise = CmmMachOp (MO_UU_Conv w word) [e]
where
w = cmmExprWidth dflags e
word = wordWidth dflags
---------------------------------------------------
--
-- CmmExpr predicates
--
---------------------------------------------------
isTrivialCmmExpr :: CmmExpr -> Bool
isTrivialCmmExpr (CmmLoad _ _) = False
isTrivialCmmExpr (CmmMachOp _ _) = False
isTrivialCmmExpr (CmmLit _) = True
isTrivialCmmExpr (CmmReg _) = True
isTrivialCmmExpr (CmmRegOff _ _) = True
isTrivialCmmExpr (CmmStackSlot _ _) = panic "isTrivialCmmExpr CmmStackSlot"
hasNoGlobalRegs :: CmmExpr -> Bool
hasNoGlobalRegs (CmmLoad e _) = hasNoGlobalRegs e
hasNoGlobalRegs (CmmMachOp _ es) = all hasNoGlobalRegs es
hasNoGlobalRegs (CmmLit _) = True
hasNoGlobalRegs (CmmReg (CmmLocal _)) = True
hasNoGlobalRegs (CmmRegOff (CmmLocal _) _) = True
hasNoGlobalRegs _ = False
---------------------------------------------------
--
-- Tagging
--
---------------------------------------------------
-- Tag bits mask
--cmmTagBits = CmmLit (mkIntCLit tAG_BITS)
cmmTagMask, cmmPointerMask :: DynFlags -> CmmExpr
cmmTagMask dflags = mkIntExpr dflags (tAG_MASK dflags)
cmmPointerMask dflags = mkIntExpr dflags (complement (tAG_MASK dflags))
-- Used to untag a possibly tagged pointer
-- A static label need not be untagged
cmmUntag :: DynFlags -> CmmExpr -> CmmExpr
cmmUntag _ e@(CmmLit (CmmLabel _)) = e
-- Default case
cmmUntag dflags e = cmmAndWord dflags e (cmmPointerMask dflags)
-- Test if a closure pointer is untagged
cmmIsTagged :: DynFlags -> CmmExpr -> CmmExpr
cmmIsTagged dflags e = cmmNeWord dflags (cmmAndWord dflags e (cmmTagMask dflags)) (zeroExpr dflags)
cmmConstrTag1 :: DynFlags -> CmmExpr -> CmmExpr
-- Get constructor tag, but one based.
cmmConstrTag1 dflags e = cmmAndWord dflags e (cmmTagMask dflags)
--------------------------------------------
--
-- mkLiveness
--
---------------------------------------------
mkLiveness :: DynFlags -> [Maybe LocalReg] -> Liveness
mkLiveness _ [] = []
mkLiveness dflags (reg:regs)
= take sizeW bits ++ mkLiveness dflags regs
where
sizeW = case reg of
Nothing -> 1
Just r -> (widthInBytes (typeWidth (localRegType r)) + wORD_SIZE dflags - 1)
`quot` wORD_SIZE dflags
-- number of words, rounded up
bits = repeat $ is_non_ptr reg -- True <=> Non Ptr
is_non_ptr Nothing = True
is_non_ptr (Just reg) = not $ isGcPtrType (localRegType reg)
-- ============================================== -
-- ============================================== -
-- ============================================== -
---------------------------------------------------
--
-- Manipulating CmmGraphs
--
---------------------------------------------------
modifyGraph :: (Graph n C C -> Graph n' C C) -> GenCmmGraph n -> GenCmmGraph n'
modifyGraph f g = CmmGraph {g_entry=g_entry g, g_graph=f (g_graph g)}
toBlockMap :: CmmGraph -> BlockEnv CmmBlock
toBlockMap (CmmGraph {g_graph=GMany NothingO body NothingO}) = body
ofBlockMap :: BlockId -> BlockEnv CmmBlock -> CmmGraph
ofBlockMap entry bodyMap = CmmGraph {g_entry=entry, g_graph=GMany NothingO bodyMap NothingO}
insertBlock :: CmmBlock -> BlockEnv CmmBlock -> BlockEnv CmmBlock
insertBlock block map =
ASSERT(isNothing $ mapLookup id map)
mapInsert id block map
where id = entryLabel block
toBlockList :: CmmGraph -> [CmmBlock]
toBlockList g = mapElems $ toBlockMap g
-- | like 'toBlockList', but the entry block always comes first
toBlockListEntryFirst :: CmmGraph -> [CmmBlock]
toBlockListEntryFirst g
| mapNull m = []
| otherwise = entry_block : others
where
m = toBlockMap g
entry_id = g_entry g
Just entry_block = mapLookup entry_id m
others = filter ((/= entry_id) . entryLabel) (mapElems m)
-- | Like 'toBlockListEntryFirst', but we strive to ensure that we order blocks
-- so that the false case of a conditional jumps to the next block in the output
-- list of blocks. This matches the way OldCmm blocks were output since in
-- OldCmm the false case was a fallthrough, whereas in Cmm conditional branches
-- have both true and false successors. Block ordering can make a big difference
-- in performance in the LLVM backend. Note that we rely crucially on the order
-- of successors returned for CmmCondBranch by the NonLocal instance for CmmNode
-- defind in cmm/CmmNode.hs. -GBM
toBlockListEntryFirstFalseFallthrough :: CmmGraph -> [CmmBlock]
toBlockListEntryFirstFalseFallthrough g
| mapNull m = []
| otherwise = dfs setEmpty [entry_block]
where
m = toBlockMap g
entry_id = g_entry g
Just entry_block = mapLookup entry_id m
dfs :: LabelSet -> [CmmBlock] -> [CmmBlock]
dfs _ [] = []
dfs visited (block:bs)
| id `setMember` visited = dfs visited bs
| otherwise = block : dfs (setInsert id visited) bs'
where id = entryLabel block
bs' = foldr add_id bs (successors block)
add_id id bs = case mapLookup id m of
Just b -> b : bs
Nothing -> bs
ofBlockList :: BlockId -> [CmmBlock] -> CmmGraph
ofBlockList entry blocks = CmmGraph { g_entry = entry
, g_graph = GMany NothingO body NothingO }
where body = foldr addBlock emptyBody blocks
bodyToBlockList :: Body CmmNode -> [CmmBlock]
bodyToBlockList body = mapElems body
mapGraphNodes :: ( CmmNode C O -> CmmNode C O
, CmmNode O O -> CmmNode O O
, CmmNode O C -> CmmNode O C)
-> CmmGraph -> CmmGraph
mapGraphNodes funs@(mf,_,_) g =
ofBlockMap (entryLabel $ mf $ CmmEntry (g_entry g) GlobalScope) $
mapMap (mapBlock3' funs) $ toBlockMap g
mapGraphNodes1 :: (forall e x. CmmNode e x -> CmmNode e x) -> CmmGraph -> CmmGraph
mapGraphNodes1 f = modifyGraph (mapGraph f)
foldGraphBlocks :: (CmmBlock -> a -> a) -> a -> CmmGraph -> a
foldGraphBlocks k z g = mapFold k z $ toBlockMap g
postorderDfs :: CmmGraph -> [CmmBlock]
postorderDfs g = {-# SCC "postorderDfs" #-} postorder_dfs_from (toBlockMap g) (g_entry g)
-------------------------------------------------
-- Running dataflow analysis and/or rewrites
-- Constructing forward and backward analysis-only pass
analFwd :: DataflowLattice f -> FwdTransfer n f -> FwdPass UniqSM n f
analBwd :: DataflowLattice f -> BwdTransfer n f -> BwdPass UniqSM n f
analFwd lat xfer = analRewFwd lat xfer noFwdRewrite
analBwd lat xfer = analRewBwd lat xfer noBwdRewrite
-- Constructing forward and backward analysis + rewrite pass
analRewFwd :: DataflowLattice f -> FwdTransfer n f
-> FwdRewrite UniqSM n f
-> FwdPass UniqSM n f
analRewBwd :: DataflowLattice f
-> BwdTransfer n f
-> BwdRewrite UniqSM n f
-> BwdPass UniqSM n f
analRewFwd lat xfer rew = FwdPass {fp_lattice = lat, fp_transfer = xfer, fp_rewrite = rew}
analRewBwd lat xfer rew = BwdPass {bp_lattice = lat, bp_transfer = xfer, bp_rewrite = rew}
-- Running forward and backward dataflow analysis + optional rewrite
dataflowPassFwd :: NonLocal n =>
GenCmmGraph n -> [(BlockId, f)]
-> FwdPass UniqSM n f
-> UniqSM (GenCmmGraph n, BlockEnv f)
dataflowPassFwd (CmmGraph {g_entry=entry, g_graph=graph}) facts fwd = do
(graph, facts, NothingO) <- analyzeAndRewriteFwd fwd (JustC [entry]) graph (mkFactBase (fp_lattice fwd) facts)
return (CmmGraph {g_entry=entry, g_graph=graph}, facts)
dataflowAnalFwd :: NonLocal n =>
GenCmmGraph n -> [(BlockId, f)]
-> FwdPass UniqSM n f
-> BlockEnv f
dataflowAnalFwd (CmmGraph {g_entry=entry, g_graph=graph}) facts fwd =
analyzeFwd fwd (JustC [entry]) graph (mkFactBase (fp_lattice fwd) facts)
dataflowAnalFwdBlocks :: NonLocal n =>
GenCmmGraph n -> [(BlockId, f)]
-> FwdPass UniqSM n f
-> UniqSM (BlockEnv f)
dataflowAnalFwdBlocks (CmmGraph {g_entry=entry, g_graph=graph}) facts fwd = do
-- (graph, facts, NothingO) <- analyzeAndRewriteFwd fwd (JustC [entry]) graph (mkFactBase (fp_lattice fwd) facts)
-- return facts
return (analyzeFwdBlocks fwd (JustC [entry]) graph (mkFactBase (fp_lattice fwd) facts))
dataflowAnalBwd :: NonLocal n =>
GenCmmGraph n -> [(BlockId, f)]
-> BwdPass UniqSM n f
-> BlockEnv f
dataflowAnalBwd (CmmGraph {g_entry=entry, g_graph=graph}) facts bwd =
analyzeBwd bwd (JustC [entry]) graph (mkFactBase (bp_lattice bwd) facts)
dataflowPassBwd :: NonLocal n =>
GenCmmGraph n -> [(BlockId, f)]
-> BwdPass UniqSM n f
-> UniqSM (GenCmmGraph n, BlockEnv f)
dataflowPassBwd (CmmGraph {g_entry=entry, g_graph=graph}) facts bwd = do
(graph, facts, NothingO) <- analyzeAndRewriteBwd bwd (JustC [entry]) graph (mkFactBase (bp_lattice bwd) facts)
return (CmmGraph {g_entry=entry, g_graph=graph}, facts)
-------------------------------------------------
-- Tick utilities
-- | Extract all tick annotations from the given block
blockTicks :: Block CmmNode C C -> [CmmTickish]
blockTicks b = reverse $ foldBlockNodesF goStmt b []
where goStmt :: CmmNode e x -> [CmmTickish] -> [CmmTickish]
goStmt (CmmTick t) ts = t:ts
goStmt _other ts = ts
| gcampax/ghc | compiler/cmm/CmmUtils.hs | bsd-3-clause | 22,166 | 0 | 18 | 4,722 | 5,765 | 3,033 | 2,732 | 361 | 3 |
module Main (main) where
import Control.Concurrent (threadDelay)
import Control.Exception (IOException, catch)
import Control.Monad (unless, when)
import Data.Aeson
import qualified Data.ByteString.Lazy as LBS
import Data.Either
import Data.List (isSuffixOf)
import Data.Maybe
import Network.HTTP.Client hiding (path)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Text.Printf (hPrintf)
import Options.Applicative
import System.Directory
import System.FilePath
import System.Environment (getEnv)
import System.Exit
import System.IO
import Resource
exceptIO :: a -> IOException -> a
-- ^Helper `catch` function that only processes IOExceptions
exceptIO = const
-- ARGUMENTS for command-line parsing
data Arguments = Arguments Bool Command
data Command = SaveCommand String [Int]
| LoadCommand Bool [String]
saveCommand :: Parser Command
saveCommand =
SaveCommand <$>
strArgument (metavar "DEST" <>
help "File/Directory in which to store configuration"
) <*>
many (argument auto (metavar "ID..." <>
help "Datadog monitor ID from which to pull configuration"
))
loadCommand :: Parser Command
loadCommand =
LoadCommand <$>
switch (long "force" <> short 'f' <> help "Update monitors regardless of update status") <*>
some (strArgument (metavar "SOURCE..." <>
help "File/Directory from which to read configuration"
))
arguments :: Parser Arguments
arguments =
Arguments <$>
switch (long "dry-run" <> short 'd' <> help "Do not perform any changes") <*>
subparser (
command "save" (info (helper <*> saveCommand)
(progDesc "Sync Datadog configurations to the local filesystem")) <>
command "load" (info (helper <*> loadCommand)
(progDesc "Sync local filesystem configurations to Datadog")))
parser :: ParserInfo Arguments
parser = info (helper <*> arguments)
(progDesc "Synchronize between Datadog and the local filesystem")
-- LOCAL FILESYSTEM LOADING functions for loading monitors
loadLocalConfigFromFile :: FilePath -> IO [Either String (FilePath,Monitor)]
-- ^Blindly attempt to read a monitor(s) from a file
loadLocalConfigFromFile path = do
contents <- LBS.readFile path
let decodedSingle = (:[]) <$> eitherDecode contents :: Either String [Monitor]
let decodedMulti = eitherDecode contents :: Either String [Monitor]
let decodedSingleOld = (\(OldMonitor (DatadogMonitor m)) -> [m]) <$> eitherDecode contents
let decodedMultiOld = map (\(OldMonitor (DatadogMonitor m)) -> m) <$> eitherDecode contents
let decoded = decodedSingle <|> decodedMulti <|> decodedSingleOld <|> decodedMultiOld <|> Left ("Could not decode to a monitor: " ++ path)
return $ if ".json" `isSuffixOf` path
then either ((:[]) . Left) (map (\x -> Right (path,x))) decoded
else []
loadLocalConfigFromDir :: FilePath -> IO [Either String (FilePath,Monitor)]
-- ^Blindly attempt to read a monitor(s) from a directory
loadLocalConfigFromDir path =
((concat <$>) . mapM loadLocalConfig) =<<
map (path </>) <$>
filter ((/='.') . head) <$>
getDirectoryContents path
loadLocalConfig :: FilePath -> IO [Either String (FilePath,Monitor)]
-- ^Attempt to read a monitor(s) from a path on the filesystem
loadLocalConfig path =
catch (loadLocalConfigFromFile path) $ exceptIO $
catch (loadLocalConfigFromDir path) $ exceptIO $
return [Left ("Cannot access file for reading: " ++ path)]
-- REMOTE DATADOG LOADING functions for loading monitors from Datadog
loadRemoteMonitors :: Manager -> (String,String) -> Maybe Int -> IO [(Int,Monitor)]
loadRemoteMonitors manager (api,app) = maybe loadAll loadOne
where loadOne = fmap (:[]) . getFromDatadog manager (api,app)
loadAll = getAllFromDatadog manager (api,app)
loadRemoteConfig :: Manager -> (String,String) -> [Int] -> IO (Either String [(Int,Monitor)])
-- ^Attempt to load monitors
loadRemoteConfig manager (api,app) [] =
catch (Right <$> loadRemoteMonitors manager (api,app) Nothing)
(\e -> return (Left ("Failure loading all monitors: " ++ show (e :: HttpException))))
loadRemoteConfig manager (api,app) [x] =
catch (Right <$> loadRemoteMonitors manager (api,app) (Just x))
(\e -> return (Left ("Failure loading all monitors: " ++ show (e :: HttpException))))
loadRemoteConfig manager (api,app) xs = do
lrs <- mapM (\x -> catch (Right <$> loadRemoteMonitors manager (api,app) (Just x))
(\e -> return (Left ("Failure loading monitor " ++ show x ++ ": " ++ show (e :: HttpException))))) xs
return (concat <$> sequence lrs)
-- LOADING FUNCTIONS
gatherMonitors :: Manager -> (String,String) -> [FilePath] -> [Int] -> IO ([(String,Monitor)],[(Int,Monitor)])
-- ^Attempt to collect all the monitors from the local filesystem and Datadog
gatherMonitors manager (api,app) localPaths remoteIDs = do
(localErrors, localMonitors) <- (partitionEithers . concat) <$>
mapM loadLocalConfig localPaths
unless (null localErrors) (mapM_ (hPutStrLn stderr . ("ERROR: "++)) localErrors >> exitFailure)
remoteMonitors <- either (\l -> hPutStrLn stderr ("ERROR: " ++ l) >> exitFailure) return =<<
loadRemoteConfig manager (api,app) remoteIDs
let similarLocal = pairSimilar localMonitors
let similarRemote = pairSimilar remoteMonitors
mapM_ (\((ia,ra),(ib,rb)) ->
hPrintf stderr
"ERROR: Monitor %s in file %s duplicates monitor %s in file %s\n"
(show ra)
ia
(show rb)
ib
) similarLocal
mapM_ (\((ia,ra),(ib,rb)) ->
hPrintf stderr
"ERROR: Monitor %s (%s) duplicates monitor %s (%s)\n"
(show ia)
(show ra)
(show ib)
(show rb)
) similarRemote
when (length similarLocal + length similarRemote > 0) exitFailure
return (localMonitors, remoteMonitors)
-- LOCAL FILESYSTEM SAVING functions for saving monitors
writeToPathFileDry :: FilePath -> (Maybe Int,Monitor) -> IO ()
-- ^Simulate a successful monitor file write
writeToPathFileDry _ (Nothing,_) = return ()
writeToPathFileDry path (Just c,monitor) =
hPrintf stdout
"INFO: Would have written monitor %s (%d) to file: %s\n"
(show monitor) c path
writeToPathDirDry :: FilePath -> (Maybe Int,Monitor) -> IO ()
-- ^Simulate a successful monitor directory write
writeToPathDirDry _ (Nothing,_) = return ()
writeToPathDirDry path (Just c,monitor) =
hPrintf stdout
"INFO: Would have written monitor %s (%d) to new file in directory: %s\n"
(show monitor) c path
writeToPathsDry :: [(String,[(Maybe Int,Monitor)])] -> IO Bool
-- ^Simulate writing monitors to their respective files
writeToPathsDry [] = return True
writeToPathsDry ((_,[]):xs) = writeToPathsDry xs
writeToPathsDry ((path,monitors):xs) = do
isDir <- doesDirectoryExist path
mapM_ ((if isDir then writeToPathDirDry else writeToPathFileDry) path) monitors
writeToPathsDry xs
writeToPathFile :: FilePath -> [(Maybe Int, Monitor)] -> IO ()
-- ^Attempt to write monitors to a file
-- May raise IOException
writeToPathFile path monitors = do
let bytes = encodePrettyMonitor $ map snd monitors
LBS.writeFile path (LBS.snoc bytes 10) -- append newline ('\n')
let message = "INFO: Monitor %s (%s) written to %s\n"
mapM_ (\(mi,r) -> maybe (return ()) (\i -> hPrintf stdout message (show i) (show r) path) mi) monitors
writeToPathDir :: FilePath -> [(Maybe Int, Monitor)] -> IO Bool
-- ^Attempt to write monitors each to their own file within a directory
writeToPathDir path monitors = do
let actionable = filter (isJust . fst) monitors
let tryFile (c,(mi,r)) = let fpath = path </> c <.> "json"
in catch (writeToPathFile fpath [(mi,r)] >> return True)
(exceptIO (hPutStrLn stderr ("ERROR: Could not write to file: " ++ fpath) >> return False))
fmap and $ mapM tryFile $ zip (map show [(1::Int)..]) actionable
writeToPaths :: [(String,[(Maybe Int, Monitor)])] -> IO Bool
-- ^Attempt to write monitors to their respective files
writeToPaths [] = return True
writeToPaths ((_,[]):xs) = writeToPaths xs
writeToPaths ((path,monitors):xs) = do
let tryFile = catch (writeToPathFile path monitors >> return True)
let tryDir = catch (writeToPathDir path monitors)
-- Only catch IOExceptions by using a type cast
success <- tryFile $ exceptIO $ tryDir $ exceptIO (hPutStrLn stderr ("ERROR: Could not write to path: " ++ path) >> return False)
(success &&) <$> writeToPaths xs
-- REMOTE DATADOG SAVING functions for saving monitors
writeToDatadogDry :: [(Maybe Int,(FilePath,Monitor))] -> IO Bool
-- ^Simulate writing monitors to Datadog
writeToDatadogDry [] = return True
writeToDatadogDry ((mc,(path,monitor)):xs) = do
let createMessage = hPrintf stdout
"INFO: Would have created new monitor %s from %s in Datadog\n"
(show monitor) path
let updateMessage c = hPrintf stdout
"INFO: Would have updated Datadog ID %d with monitor %s from %s\n"
c (show monitor) path
maybe createMessage updateMessage mc
writeToDatadogDry xs
writeToDatadog :: Manager -> (String,String) -> [(Maybe Int,(FilePath,Monitor))] -> IO Bool
-- ^Attempt to write monitors to Datadog
writeToDatadog manager (api,app) xs = writeToDatadogBackoff manager (api,app) xs 1
writeToDatadogBackoff :: Manager -> (String,String) -> [(Maybe Int,(FilePath,Monitor))] -> Int -> IO Bool
writeToDatadogBackoff _ _ [] _ = return True
writeToDatadogBackoff manager (api,app) ((mc,(path,monitor)):xs) wait = do
let createMessage = hPrintf stdout
"INFO: Created new monitor %s from %s as %d in Datadog\n"
(show monitor) path
let updateMessage c = hPrintf stdout
"INFO: Updated Datadog ID %d with monitor %s from %s\n"
c (show monitor) path
let message = if isNothing mc then createMessage else updateMessage
let errorMessage e = hPrintf stderr
"ERROR: Could not send monitor %s from %s to Datadog: %s\n"
(show monitor) path (show (e :: HttpException))
success <- catch (sendToDatadog manager (api,app) mc monitor >>= message >> return True)
(\e -> if wait < 4
then (threadDelay (wait * 256000) >> writeToDatadogBackoff manager (api,app) ((mc,(path,monitor)):xs) (wait + 1))
else (errorMessage e >> return False))
(success &&) <$> writeToDatadog manager (api,app) xs
-- MAIN
loadKeysFromEnv :: IO (String,String)
loadKeysFromEnv = do
api <- getEnv "DATADOG_API_KEY"
app <- getEnv "DATADOG_APP_KEY"
return (api,app)
run :: Arguments -> IO Bool
run (Arguments dryrun (SaveCommand localPath remoteIDs)) = do
manager <- newManager tlsManagerSettings
apiapp <- loadKeysFromEnv
(localMonitors, remoteMonitors) <- gatherMonitors manager apiapp [localPath] remoteIDs
let actions = groupToFilePath localPath remoteMonitors localMonitors
(if dryrun then writeToPathsDry else writeToPaths) actions
run (Arguments dryrun (LoadCommand force localPaths)) = do
manager <- newManager tlsManagerSettings
apiapp <- loadKeysFromEnv
let allRemoteIDs = []
(localMonitors, remoteMonitors) <- gatherMonitors manager apiapp localPaths allRemoteIDs
let actions = (if force then groupToForce else groupToRemote) localMonitors remoteMonitors
(if dryrun then writeToDatadogDry else writeToDatadog manager apiapp) actions
main :: IO ()
main = execParser parser >>=
run >>=
(\success -> if success then exitSuccess else exitFailure)
| thumbtack/datadog-petshop | src/Main.hs | bsd-3-clause | 11,727 | 0 | 21 | 2,492 | 3,548 | 1,855 | 1,693 | 208 | 4 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.S3
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- A convenience module, combining all raw modules containing S3 extensions.
--
--------------------------------------------------------------------------------
module Graphics.GL.S3 (
module Graphics.GL.S3.S3TC
) where
import Graphics.GL.S3.S3TC
| haskell-opengl/OpenGLRaw | src/Graphics/GL/S3.hs | bsd-3-clause | 540 | 0 | 5 | 80 | 37 | 30 | 7 | 3 | 0 |
{-# LANGUAGE TupleSections #-}
module FromID (convertOutput) where
import Parser
import Output
import Control.Applicative
import Data.Maybe
fromID :: String -> String -> Maybe String
fromID tbl i = lookup i (map parseItemList $ lines tbl)
convertOutput :: String -> Output -> Output
convertOutput tbl (d, i) = fromJust $ (d ,) <$> fromID tbl i
| YoshikuniJujo/forest | subprojects/schedevr/src/mkschd/FromID.hs | bsd-3-clause | 348 | 0 | 8 | 59 | 116 | 63 | 53 | 10 | 1 |
-- | This module encodes what we know about GHC, including existing/supported versions.
module HaskellCI.Compiler (
-- * Compiler version
CompilerVersion (..),
maybeGHC,
isGHCJS,
maybeGHCJS,
previewGHC,
-- ** Selectors
compilerKind,
compilerVersion,
-- * Compiler version range
CompilerRange (..),
compilerWithinRange,
invertCompilerRange,
-- * Known versions
knownGhcVersions,
knownGhcjsVersions,
-- * Showing
dispGhcVersion,
dispGhcVersionShort,
dispCabalVersion,
-- * Cabal version
correspondingCabalVersion,
-- * Misc
ghcMajVer,
) where
import HaskellCI.Prelude
import Distribution.Version (hasUpperBound, invertVersionRange, versionNumbers, withinRange)
import qualified Data.Set as S
import qualified Distribution.Pretty as C
-------------------------------------------------------------------------------
-- CompilerVersion
-------------------------------------------------------------------------------
data CompilerVersion
= GHCHead
| GHC Version
| GHCJS Version
deriving (Eq, Ord, Show)
maybeGHC :: a -> (Version -> a) -> CompilerVersion -> a
maybeGHC _ f (GHC v) = f v
maybeGHC x _ _ = x
isGHCJS :: CompilerVersion -> Bool
isGHCJS (GHCJS _) = True
isGHCJS _ = False
maybeGHCJS :: CompilerVersion -> Maybe Version
maybeGHCJS (GHCJS v) = Just v
maybeGHCJS _ = Nothing
-------------------------------------------------------------------------------
-- String selectors
-------------------------------------------------------------------------------
compilerKind :: CompilerVersion -> String
compilerKind GHCHead = "ghc"
compilerKind (GHC _) = "ghc"
compilerKind (GHCJS _) = "ghcjs"
compilerVersion :: CompilerVersion -> String
compilerVersion GHCHead = "head"
compilerVersion (GHC v) = C.prettyShow v
compilerVersion (GHCJS v) = C.prettyShow v
-------------------------------------------------------------------------------
-- CompilerRange
-------------------------------------------------------------------------------
data CompilerRange
= Range VersionRange
| RangeGHC
| RangeGHCJS
| RangePoints (Set CompilerVersion)
| RangeInter CompilerRange CompilerRange
| RangeUnion CompilerRange CompilerRange
deriving (Show)
instance Lattice CompilerRange where
(/\) = RangeInter
(\/) = RangeUnion
instance BoundedJoinSemiLattice CompilerRange where
bottom = RangePoints S.empty
instance BoundedMeetSemiLattice CompilerRange where
top = RangePoints allCompilerVersions
compilerWithinRange :: CompilerVersion -> CompilerRange -> Bool
compilerWithinRange v (RangeInter a b) = compilerWithinRange v a /\ compilerWithinRange v b
compilerWithinRange v (RangeUnion a b) = compilerWithinRange v a \/ compilerWithinRange v b
compilerWithinRange (GHC v) (Range vr) = withinRange v vr
compilerWithinRange (GHCJS v) (Range vr) = withinRange v vr
compilerWithinRange GHCHead (Range vr) = not (hasUpperBound vr)
compilerWithinRange (GHC _) RangeGHC = True
compilerWithinRange GHCHead RangeGHC = True
compilerWithinRange (GHCJS _) RangeGHC = False
compilerWithinRange (GHC _) RangeGHCJS = False
compilerWithinRange GHCHead RangeGHCJS = False
compilerWithinRange (GHCJS _) RangeGHCJS = True
compilerWithinRange v (RangePoints vs) = S.member v vs
invertCompilerRange :: CompilerRange -> CompilerRange
invertCompilerRange (Range vr) = Range (invertVersionRange vr)
invertCompilerRange RangeGHC = RangeGHCJS
invertCompilerRange RangeGHCJS = RangeGHC
invertCompilerRange (RangeInter a b) = RangeUnion (invertCompilerRange a) (invertCompilerRange b)
invertCompilerRange (RangeUnion a b) = RangeInter (invertCompilerRange a) (invertCompilerRange b)
invertCompilerRange (RangePoints vs) = RangePoints (S.difference allCompilerVersions vs)
-------------------------------------------------------------------------------
-- Known versions
-------------------------------------------------------------------------------
knownGhcVersions :: [Version]
knownGhcVersions = fmap mkVersion
[ [7,0,1], [7,0,2], [7,0,3], [7,0,4]
, [7,2,1], [7,2,2]
, [7,4,1], [7,4,2]
, [7,6,1], [7,6,2], [7,6,3]
, [7,8,1], [7,8,2], [7,8,3], [7,8,4]
, [7,10,1], [7,10,2], [7,10,3]
, [8,0,1], [8,0,2]
, [8,2,1], [8,2,2]
, [8,4,1], [8,4,2], [8,4,3], [8,4,4]
, [8,6,1], [8,6,2], [8,6,3], [8,6,4], [8,6,5]
, [8,8,1], [8,8,2], [8,8,3], [8,8,4]
, [8,10,1], [8,10,2], [8,10,3], [8,10,4], [8,10,5]
, [9,0,1]
, [9,2,0,20210422]
]
knownGhcjsVersions :: [Version]
knownGhcjsVersions = fmap mkVersion
[ [8,4]
]
allCompilerVersions :: Set CompilerVersion
allCompilerVersions = S.insert GHCHead $ S.fromList $
[ GHC v | v <- knownGhcVersions ] ++
[ GHCJS v | v <- knownGhcjsVersions ]
-------------------------------------------------------------------------------
-- Combinators
-------------------------------------------------------------------------------
correspondingCabalVersion
:: Maybe Version -- ^ Preferred Cabal Version
-> CompilerVersion -- ^ GHC Version
-> Maybe Version
correspondingCabalVersion Nothing _ = Nothing
correspondingCabalVersion (Just _) GHCHead = Nothing
correspondingCabalVersion (Just _) (GHCJS _) = Just (mkVersion [3,4])
correspondingCabalVersion (Just cv) (GHC gv)
| gv >= mkVersion [8,10] = Just $ max (mkVersion [3,2]) cv
| otherwise = Just $ max (mkVersion [3,0]) cv
dispGhcVersion :: CompilerVersion -> String
dispGhcVersion GHCHead = "ghc-head"
dispGhcVersion (GHC v) = "ghc-" ++ C.prettyShow v
dispGhcVersion (GHCJS v) = "ghcjs-" ++ C.prettyShow v
dispGhcVersionShort :: CompilerVersion -> String
dispGhcVersionShort GHCHead = "ghc-head"
dispGhcVersionShort (GHC v) = C.prettyShow v
dispGhcVersionShort (GHCJS v) = "ghcjs-" ++ C.prettyShow v
dispCabalVersion :: Maybe Version -> String
dispCabalVersion = maybe "head" C.prettyShow
-- | Alphas, RCs and HEAD.
previewGHC
:: VersionRange -- ^ head.hackage range
-> CompilerVersion
-> Bool
previewGHC _vr GHCHead = True
previewGHC vr (GHC v) = withinRange v vr || odd (snd (ghcMajVer v))
previewGHC _vr (GHCJS _) = False
ghcMajVer :: Version -> (Int,Int)
ghcMajVer v
| x:y:_ <- versionNumbers v = (x,y)
| otherwise = error $ "panic: ghcMajVer called with " ++ show v
| hvr/multi-ghc-travis | src/HaskellCI/Compiler.hs | bsd-3-clause | 6,506 | 0 | 10 | 1,219 | 1,980 | 1,113 | 867 | 133 | 1 |
module Data.Enum.Num where
fromEnum' :: (Enum a, Num b) => a -> b
fromEnum' = fromIntegral . fromEnum
toEnum' :: (Enum a, Integral b) => b -> a
toEnum' = toEnum . fromIntegral
| abbradar/MySDL | src/Data/Enum/Num.hs | bsd-3-clause | 178 | 0 | 6 | 35 | 73 | 41 | 32 | 5 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_NV_linear_color_attachment - device extension
--
-- == VK_NV_linear_color_attachment
--
-- [__Name String__]
-- @VK_NV_linear_color_attachment@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 431
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - sourav parmar
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_NV_linear_color_attachment] @souravpNV%0A<<Here describe the issue or question you have about the VK_NV_linear_color_attachment extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-12-02
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
--
-- [__Contributors__]
--
-- - Pat Brown, NVIDIA
--
-- - Piers Daniell, NVIDIA
--
-- - Sourav Parmar, NVIDIA
--
-- == Description
--
-- This extension expands support for using
-- 'Vulkan.Core10.Enums.ImageTiling.IMAGE_TILING_LINEAR' images as color
-- attachments when all the color attachments in the render pass instance
-- have 'Vulkan.Core10.Enums.ImageTiling.IMAGE_TILING_LINEAR' tiling. This
-- extension adds a new flag bit
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV'
-- that extends the existing
-- 'Vulkan.Extensions.VK_KHR_format_feature_flags2.FormatFeatureFlagBits2KHR'
-- bits. This flag /can/ be set for renderable color formats in the
-- 'Vulkan.Extensions.VK_KHR_format_feature_flags2.FormatProperties3KHR'::@linearTilingFeatures@
-- format properties structure member. Formats with the
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV'
-- flag /may/ be used as color attachments as long as all the color
-- attachments in the render pass instance have
-- 'Vulkan.Core10.Enums.ImageTiling.IMAGE_TILING_LINEAR' tiling, and the
-- formats their images views are created with have
-- 'Vulkan.Extensions.VK_KHR_format_feature_flags2.FormatProperties3KHR'::@linearTilingFeatures@
-- which include
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV'.
-- This extension supports both dynamic rendering and traditional render
-- passes.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceLinearColorAttachmentFeaturesNV'
--
-- == New Enum Constants
--
-- - 'NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME'
--
-- - 'NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
-- is supported:
--
-- - Extending
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FormatFeatureFlagBits2':
--
-- - 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV'
--
-- == Version History
--
-- - Revision 1, 2021-11-29 (sourav parmar)
--
-- - Initial draft
--
-- == See Also
--
-- 'PhysicalDeviceLinearColorAttachmentFeaturesNV'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_NV_linear_color_attachment Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NV_linear_color_attachment ( PhysicalDeviceLinearColorAttachmentFeaturesNV(..)
, NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION
, pattern NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION
, NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME
, pattern NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV))
-- | VkPhysicalDeviceLinearColorAttachmentFeaturesNV - Structure describing
-- whether
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#glossary Linear Color Attachment>
-- rendering is supported by the implementation
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceLinearColorAttachmentFeaturesNV' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceLinearColorAttachmentFeaturesNV' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_linear_color_attachment VK_NV_linear_color_attachment>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceLinearColorAttachmentFeaturesNV = PhysicalDeviceLinearColorAttachmentFeaturesNV
{ -- | #features-linearColorAttachment# @linearColorAttachment@ indicates
-- whether the implementation supports renderable
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#glossary Linear Color Attachment>
linearColorAttachment :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceLinearColorAttachmentFeaturesNV)
#endif
deriving instance Show PhysicalDeviceLinearColorAttachmentFeaturesNV
instance ToCStruct PhysicalDeviceLinearColorAttachmentFeaturesNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceLinearColorAttachmentFeaturesNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (linearColorAttachment))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceLinearColorAttachmentFeaturesNV where
peekCStruct p = do
linearColorAttachment <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceLinearColorAttachmentFeaturesNV
(bool32ToBool linearColorAttachment)
instance Storable PhysicalDeviceLinearColorAttachmentFeaturesNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceLinearColorAttachmentFeaturesNV where
zero = PhysicalDeviceLinearColorAttachmentFeaturesNV
zero
type NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION"
pattern NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION :: forall a . Integral a => a
pattern NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION = 1
type NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME = "VK_NV_linear_color_attachment"
-- No documentation found for TopLevel "VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME"
pattern NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME = "VK_NV_linear_color_attachment"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_NV_linear_color_attachment.hs | bsd-3-clause | 9,218 | 0 | 14 | 1,322 | 982 | 618 | 364 | -1 | -1 |
-- Copyright 2009 Mikael Vejdemo Johansson <mik@stanford.edu>
-- Released under a BSD license
module Math.Operad (module Math.Operad.PPrint,
module Math.Operad.MapOperad,
module Math.Operad.OrderedTree,
module Math.Operad.OperadGB,
m12_3,
m13_2,
m1_23,
m2,
m3,
yTree,
lgb,
Tree,
FreeOperad) where
import Math.Operad.OperadGB
import Math.Operad.OrderedTree
import Math.Operad.PPrint
import Math.Operad.MapOperad
type Tree = DecoratedTree Integer
type FreeOperad a = OperadElement a Rational PathPerm
-- ** Examples and useful predefined operad elements.
-- | The element m2(m2(1,2),3)
m12_3 :: DecoratedTree Integer
m12_3 = symmetricCompose 1 [1,2,3] (corolla 2 [1,2]) (corolla 2 [1,2])
-- | The element m2(m2(1,3),2)
m13_2 :: DecoratedTree Integer
m13_2 = symmetricCompose 1 [1,3,2] (corolla 2 [1,2]) (corolla 2 [1,2])
-- | The element m2(1,m2(2,3))
m1_23 :: DecoratedTree Integer
m1_23 = symmetricCompose 2 [1,2,3] (corolla 2 [1,2]) (corolla 2 [1,2])
-- | The element m2(1,2)
m2 :: DecoratedTree Integer
m2 = corolla 2 [1,2]
-- | The element m3(1,2,3)
m3 :: DecoratedTree Integer
m3 = corolla 3 [1,2,3]
-- | The element m2(m2(1,2),m2(3,4))
yTree :: DecoratedTree Integer
yTree = nsCompose 1 (nsCompose 2 m2 m2) m2
-- The Lie operad example computation
lo1 :: OperadElement Integer Rational PathPerm
lo1 = oet m12_3
lo2 :: OperadElement Integer Rational PathPerm
lo2 = oet m13_2
lo3 :: OperadElement Integer Rational PathPerm
lo3 = oet m1_23
-- | The list of operad elements consisting of 'm12_3'-'m13_2'-'m1_23'. This generates the
-- ideal of relations for the operad Lie.
lgb :: [OperadElement Integer Rational PathPerm]
lgb = [lo1 - lo2 - lo3]
| Dronte/Operads | Math/Operad.hs | bsd-3-clause | 1,909 | 0 | 8 | 499 | 471 | 273 | 198 | 39 | 1 |
module Jerimum.Tests.Unit.PostgreSQL.Types.Int32ArrayTest
( tests
) where
import Jerimum.PostgreSQL.Types.Int32Array
import Jerimum.Tests.Unit.PostgreSQL.Types.Helpers
import Test.Tasty
import Test.Tasty.QuickCheck
tests :: TestTree
tests = testGroup "PostgreSQL.Types.Int32Array" [testCborCodec, testTextCodec]
testCborCodec :: TestTree
testCborCodec =
testGroup
"cbor codec"
[ testProperty "identity" $ \xs ->
Right xs ==
runDecoder int32ArrayDecoderV0 (runEncoder $ int32ArrayEncoderV0 xs)
]
testTextCodec :: TestTree
testTextCodec =
testGroup
"text codec"
[ testProperty "identity" $ \xs ->
Just xs === parseInt32Array (formatInt32Array xs)
]
| dgvncsz0f/nws | test/Jerimum/Tests/Unit/PostgreSQL/Types/Int32ArrayTest.hs | bsd-3-clause | 707 | 0 | 13 | 128 | 161 | 91 | 70 | 21 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module AI.Trainer
( Trainer(..)
, CostFunction
, CostFunction'
, TrainingData
, Selection
, StopCondition
, quadraticCost
, quadraticCost'
, softmax
, softmaxCost
, softmaxCost'
, minibatch
, online
, trainNTimes
, trainUntilErrorLessThan
, trainUntil
) where
import AI.Layer
import AI.Network
import AI.Neuron
import Data.List.Split (chunksOf)
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Data (size)
import System.Random
import System.Random.Shuffle (shuffle')
-- | Trainer is a typeclass for all trainer types - a trainer will take in
-- an instance of itself, a network, a list of training data, and return a
-- new network trained on the data.
class (Network n) => Trainer a n where
fit :: Selection -> a -> n -> [TrainingData] -> n
evaluate :: a -> n -> TrainingData -> Double
-- | A CostFunction is used for evaluating a network's performance on a given
-- input
type CostFunction = Vector Double -> Vector Double -> Double
-- | A CostFunction' (derivative) is used in backPropagation
type CostFunction' = Vector Double -> Vector Double -> Vector Double
-- | A tuple of (input, expected output)
type TrainingData = (Vector Double, Vector Double)
-- | A selection function for performing gradient descent
type Selection = [TrainingData] -> [[TrainingData]]
-- | A predicate (given a network, trainer, a list of training
-- data, and the number of [fit]s performed) that
-- tells the trainer to stop training
type StopCondition t n = n -> t -> [TrainingData] -> Int -> Bool
-- | The quadratic cost function (1/2) * sum (y - a) ^ 2
quadraticCost :: Vector Double -> Vector Double -> Double
quadraticCost y a = sumElements $ 0.5 * (a - y) ** 2
-- | The derivative of the quadratic cost function sum (y - a)
quadraticCost' :: Vector Double -> Vector Double -> Vector Double
quadraticCost' y a = a - y
-- | The softmax function: a / (e ** a)
-- Subtracts the maxium element when computes 'exp' to avoid numerical issues.
softmax :: Vector Double -> Vector Double
softmax a = (1 / sumElements a') `scale` a'
where a' = cmap (\ x -> exp $ x - maxElement a) a
-- | The softmax cost function: - y * (log p), where p = softmax a
softmaxCost :: Vector Double -> Vector Double -> Double
softmaxCost y a = - (y <.> log (softmax a)) / fromIntegral (size y)
-- | The derivative of the softmax cost function: a - y
softmaxCost' :: Vector Double -> Vector Double -> Vector Double
softmaxCost' y a = (1.0 / fromIntegral (size y)) `scale` (a - y)
-- | The minibatch function becomes a Selection when partially applied
-- with the minibatch size
minibatch :: Int -> [TrainingData] -> [[TrainingData]]
minibatch = chunksOf
-- | If we want to train the network online
online :: [TrainingData] -> [[TrainingData]]
online = minibatch 1
-- | This function returns true if the error of the network is less than
-- a given error value, given a network, a trainer, a list of
-- training data, and a counter (should start with 0)
-- Note: Is there a way to have a counter with a recursive function
-- without providing 0?
networkErrorLessThan :: (Trainer t n) => Double -> n -> t -> [TrainingData] -> Int -> Bool
networkErrorLessThan err network trainer dat _ = meanError < err
where meanError = sum errors / fromIntegral (length errors)
errors = map (evaluate trainer network) dat
-- | Given a network, a trainer, a list of training data,
-- and N, this function trains the network with the list of
-- training data N times
trainNTimes :: (Trainer t n, RandomGen g) => g -> n -> t -> Selection -> [TrainingData] -> Int -> n
trainNTimes g network trainer s dat n =
trainUntil g network trainer s dat completion 0
where completion _ _ _ n' = n == n'
-- | Given a network, a trainer, a list of training data,
-- and an error value, this function trains the network with the list of
-- training data until the error of the network (calculated
-- by averaging the errors of each training data) is less than
-- the given error value
trainUntilErrorLessThan :: (Trainer t n, RandomGen g) => g -> n -> t -> Selection -> [TrainingData] -> Double -> n
trainUntilErrorLessThan g network trainer s dat err =
trainUntil g network trainer s dat (networkErrorLessThan err) 0
-- | This function trains a network until a given StopCondition
-- is satisfied.
trainUntil :: (Trainer t n, RandomGen g) => g -> n -> t -> Selection -> [TrainingData] -> StopCondition t n -> Int -> n
trainUntil g network trainer s dat completion n =
if completion network trainer dat n
then network
else trainUntil g' network' trainer s (shuffle' dat (length dat) g'') completion (n+1)
where network' = fit s trainer network dat
(g', g'') = split g
| jbarrow/LambdaNet | AI/Trainer.hs | mit | 4,875 | 0 | 12 | 1,033 | 1,105 | 604 | 501 | 68 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Dampf.Postgres.Connect where
import Control.Lens
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Text (Text)
import qualified Data.Text as T
import Database.PostgreSQL.Simple
import Dampf.Types
lookupPassword :: (HasPostgresConfig c) => Text -> c -> String
lookupPassword name cfg = case cfg ^. users . at name of
Nothing -> error $ "no password for user "++ T.unpack name ++ " in .dampf.cfg"
Just pw -> T.unpack pw
createSuperUserConn :: (MonadIO m, MonadThrow m)
=> Text -> DampfT m Connection
createSuperUserConn name = createConn name spec
where
spec = DatabaseSpec Nothing "postgres" []
createSuperUserPostgresConn :: (MonadIO m, MonadThrow m)
=> DampfT m Connection
createSuperUserPostgresConn = createConn "postgres" spec
where
spec = DatabaseSpec Nothing "postgres" []
createConn :: (MonadIO m, MonadThrow m)
=> Text -> DatabaseSpec -> DampfT m Connection
createConn name spec = view (config . postgres) >>= \case
Just s -> liftIO $ connect ConnectInfo
{ connectHost = s ^. host . to T.unpack
, connectPort = s ^. port . to fromIntegral
, connectUser = spec ^. user . to T.unpack
, connectPassword = lookupPassword (spec ^. user) s
, connectDatabase = T.unpack name
}
Nothing -> throwM NoDatabaseServer
destroyConn :: (MonadIO m) => Connection -> DampfT m ()
destroyConn = liftIO . close
| diffusionkinetics/open | dampf/lib/Dampf/Postgres/Connect.hs | mit | 1,686 | 0 | 15 | 440 | 470 | 250 | 220 | -1 | -1 |
{-# LANGUAGE OverloadedStrings,ExtendedDefaultRules, QuasiQuotes #-}
module Main where
import Lucid
import Lucid.Html5
import Graphics.Plotly hiding (text)
import Graphics.Plotly.Lucid
import qualified Graphics.Plotly.GoG as GG
import Lucid.Bootstrap
import Data.Monoid ((<>))
import NeatInterpolation
import Data.Aeson
import Lens.Micro
import Numeric.Datasets.Iris
import Data.Text (Text)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
hbarData :: [(Text, Double)]
hbarData = [("Simon", 14.5), ("Joe", 18.9), ("Dorothy", 16.2)]
pointsData :: [(Double, Double)]
pointsData = zip [1,2,3,4] [500,3000,700,200]
myTrace
= points (aes & x .~ fst
& y .~ snd) pointsData
main = T.writeFile "../docs/index.html" $ renderText $ doctypehtml_ $ do
head_ $ do meta_ [charset_ "utf-8"]
link_ [rel_ "stylesheet",
href_ "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css"]
link_ [rel_ "stylesheet",
href_ "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap-theme.min.css"]
plotlyCDN
body_ $ do container_ $ do
row_ $ do
h2_ "Plotly.js Haskell bindings examples"
p_ $ "This web pages shows plots generated with the plotlyhs packages, along with the " <>
"Haskell code that generated the plots. To use the plots on the generate page, "<>
"the Plotly.js source code should first be including by adding this tag to your HTML "<>
"header:"
pre_ $ code_ $ "<script src=\"https://cdn.plot.ly/plotly-latest.min.js\"></script>"
p_ $ "Alternatively, this tag can be included in an HTML page using the "<>code_ "plotlyCDN"<>
" function in "<>code_ "Graphics.Plotly.Lucid"<>" (when using Lucid) or "<>
code_ "Graphics.Plotly.Blaze" <> " (when using blaze-html)."
row_ $ h4_ "A complete & minimal example"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
{-# LANGUAGE OverloadedStrings #-}
import Lucid
import Lucid.Html5
import Graphics.Plotly
import Graphics.Plotly.Lucid
import Lens.Micro
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
main =
T.writeFile "test.html" $ renderText $ doctypehtml_ $ do
head_ $ do meta_ [charset_ "utf-8"]
plotlyCDN
body_ $ toHtml $ plotly "myDiv" [myTrace]
myTrace = scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200] |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "p0" [myTrace]
row_ $ p_ $ "In the examples below, we omit all of the imports, main function, html header and focus only"<>
" on the "<> code_ "Plotly" <> " value (the argument to "<> code_ "toHtml"<>"). The "<>
code_ "Plotly" <> " value can be constructed with the function "<> code_ "plotly" <>
" which takes two arguments: the element id of the "<> code_ "<div>" <>
" for the plot (this element will be created if you call toHtml on the "<> code_ "Plotly" <>
" value) and a list of traces."
row_ $ h4_ "A simple plot"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let myTrace
= scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
in plotly "div1" [myTrace] |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "p1" [myTrace]
row_ $ p_ $ "Note that Plotlyjs considers a line plot to be a kind of scatter plot, which may not "<>
"be the terminology you are used to. The above is quite unbearably sized & padded for this tutorial, so let's fix the " <>
"margins and the plot height"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let myTrace
= scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
in plotly "div2" [myTrace] & layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "p2" [myTrace] & layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ h4_ "Lines and Markers"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let myTrace
= scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
in plotly "div3"
[myTrace & mode ?~ [Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "div3" [myTrace & mode ?~ [Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let myTrace
= scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
in plotly "div4"
[myTrace & mode ?~ [Lines]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "div4" [myTrace & mode ?~ [Lines]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let myTrace
= scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
in plotly "div5"
[myTrace & mode ?~ [Lines,Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "div5" [myTrace & mode ?~ [Lines,Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ h4_ "Iris plots"
row_ $ p_ "This plot uses the iris value from the datasets package"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
plotly "div6"
[scatter & x ?~ map sepalLength iris
& y ?~ map sepalWidth iris
& marker ?~
(defMarker
& markercolor ?~ catColors (map irisClass
iris))
& mode ?~ [Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
div_ [class_ "col-md-6"] $ toHtml $ plotly "div6"
[scatter & x ?~ map (toJSON . sepalLength) iris
& y ?~ map (toJSON . sepalWidth) iris
& marker ?~ (defMarker & markercolor ?~ (catColors (map irisClass iris)))
& mode ?~ [Markers]]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ p_ "Grammar of Graphics-style interface"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
plotly "div6gg"
[points (aes & x .~ sepalLength
& y .~ sepalWidth
& color ?~ fromEnum . irisClass) iris]
& layout . margin ?~ thinMargins
& layout . height ?~ 300 |]
{-let irisClassN Setosa = 1
irisClassN Versicolor = 2
irisClassN Virginica = 3 -}
div_ [class_ "col-md-6"] $ toHtml $ plotly "div6gg"
[GG.points (GG.aes & GG.x .~ sepalLength
& GG.y .~ sepalWidth
& GG.color ?~ fromEnum . irisClass) iris]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
row_ $ h4_ "Horizontal bar plots"
row_ $ do
div_ [class_ "col-md-6"] $ pre_ $ code_ $ toHtml
[text|
let hbarData :: [(Text, Double)]
hbarData = [("Simon", 14.5), ("Joe", 18.9), ("Dorothy", 16.2)]
in plotly "div7"
[bars & ytext ?~ map fst hbarData
& x ?~ map snd hbarData
& orientation ?~ Horizontal]
& layout . margin ?~ thinMargins
& layout . height ?~ 300|]
div_ [class_ "col-md-6"] $ toHtml $ plotly "div7"
[bars & y ?~ map (toJSON . fst) hbarData
& x ?~ map (toJSON . snd) hbarData
& orientation ?~ Horizontal]
& layout . margin ?~ thinMargins
& layout . height ?~ 300
script_ [src_ "https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"] ""
script_ [src_ "https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"] ""
| diffusionkinetics/open | plotlyhs/gendoc/GenDoc.hs | mit | 11,095 | 0 | 35 | 5,429 | 1,579 | 794 | 785 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
module Commands.Plugins.Spiros.Emacs.Types where
import Commands.Plugins.Spiros.Extra.Types
import Commands.Plugins.Spiros.Phrase.Types
type ElispSexp = String
-- -- type ElispSexp = Sexp String String
data Emacs
= EmacsFunction (Maybe Phrase)
| EmacsExpression (Maybe Phrase)
-- TODO | EmacsKeyriff Keyriff
deriving (Show,Read,Eq,Ord,Generic,Data,NFData)
| sboosali/commands-spiros | config/Commands/Plugins/Spiros/Emacs/Types.hs | gpl-2.0 | 418 | 0 | 8 | 70 | 91 | 56 | 35 | 9 | 0 |
{- Copyright 2013 Gabriel Gonzalez
This file is part of the Suns Search Engine
The Suns Search Engine is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or (at your
option) any later version.
The Suns Search Engine is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
the Suns Search Engine. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE ScopedTypeVariables #-}
{-| Fast serialization and deserialization specialized to 'Handle's. The \'H\'
in 'HSerialize' stands for 'Handle'
I do not use @binary@ or @cereal@ because they were both really slow the
last time I checked. So I wrote a serialization interface specialized to
interacting with 'Handle's in 'IO'. This improved performance 5-fold the
last time I measured it, but those libraries may have improved since then si
this solution may be obsolete.
-}
module HSerialize.Core (
-- * The HSerialize Class
HSerialize(get, put)
-- * Encoding and Decoding
, encodeFile
, decodeFile
-- * Default Storable Instance
, Store(Store, unStore)
) where
import Control.Applicative ((<$>), (<*>))
import Control.Exception (throwIO)
import Control.Monad (replicateM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Reader (ReaderT(ReaderT, runReaderT))
import Data.Array (Array, Ix, listArray, bounds, elems)
import System.IO as IO
import Foreign.Safe as F
class HSerialize t where
get :: ReaderT IO.Handle IO t
put :: t -> ReaderT IO.Handle IO ()
encodeFile :: (HSerialize t) => FilePath -> t -> IO ()
encodeFile file t = IO.withFile file IO.WriteMode $ runReaderT (put t)
decodeFile :: (HSerialize t) => FilePath -> IO t
decodeFile file = IO.withFile file ReadMode $ runReaderT get
newtype Store a = Store { unStore :: a }
instance (F.Storable a) => HSerialize (Store a) where
put n = ReaderT $ \h -> F.with (unStore n) $ \p ->
IO.hPutBuf h p (F.sizeOf (undefined :: a))
get = ReaderT $ \h -> alloca $ \p -> do
_ <- IO.hGetBuf h p (F.sizeOf (undefined :: a))
fmap Store $ F.peek p
instance HSerialize Char where
put = put . Store
get = fmap unStore get
instance HSerialize Word8 where
put = put . Store
get = fmap unStore get
instance HSerialize Int where
put = put . Store
get = fmap unStore get
instance (HSerialize a) => HSerialize [a] where
put as = do
if (null as)
then put (0 :: Word8)
else do
put (1 :: Word8)
let chunkSize = 100 :: Int
(prefix, suffix) = splitAt chunkSize as
if (null suffix)
then put (length prefix)
else put chunkSize
mapM_ put prefix
put suffix
get = do
b <- get :: ReaderT IO.Handle IO Word8
case b of
0 -> return []
1 -> do
n <- get :: ReaderT IO.Handle IO Int
prefix <- replicateM n get
fmap (prefix ++) get
_ -> lift
$ throwIO
$ userError "HSerialize [a]: get - Invalid format"
instance (HSerialize a, HSerialize b) => HSerialize (a, b) where
put (a, b) = do
put a
put b
get = (,) <$> get <*> get
instance (HSerialize a, HSerialize b, HSerialize c)
=> HSerialize (a, b, c) where
put (a, b, c) = do
put a
put b
put c
get = (,,) <$> get <*> get <*> get
instance (Ix i, HSerialize i, HSerialize e) => HSerialize (Array i e) where
put arr = put (bounds arr, elems arr)
get = fmap (uncurry listArray) get
| Gabriel439/suns-search | src/HSerialize/Core.hs | gpl-3.0 | 4,053 | 0 | 16 | 1,175 | 1,021 | 543 | 478 | 81 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Category.TypedGraphRule.Limit where
import Abstract.Category
import Abstract.Category.Limit
import Abstract.Rewriting.DPO
import Category.TypedGraph
import Category.TypedGraph.CommutingSquares
import Category.TypedGraphRule.Category
instance Complete (RuleMorphism a b) where
-- TODO: implement missing methods of Config for RuleMorphism
-- @
-- g'
-- X──────▶A
-- │ │
-- f' │ │ f
-- â–¼ â–¼
-- B──────▶C
-- g
-- @
calculatePullback (RuleMorphism fA _ fL fK fR) (RuleMorphism gB _ gL gK gR) = (f',g')
where
(f'L, g'L) = calculatePullback fL gL
(f'K, g'K) = calculatePullback fK gK
(f'R, g'R) = calculatePullback fR gR
l = commutingMorphism
(leftMorphism gB <&> f'K) f'L
(leftMorphism fA <&> g'K) g'L
r = commutingMorphism
(rightMorphism gB <&> f'K) f'R
(rightMorphism fA <&> g'K) g'R
x = Production l r []
f' = RuleMorphism x gB f'L f'K f'R
g' = RuleMorphism x fA g'L g'K g'R
instance Cocomplete (RuleMorphism a b) where
calculateCoequalizer (RuleMorphism _ ruleB fL fK fR) (RuleMorphism _ _ gL gK gR) =
RuleMorphism ruleB coequalizerRule eqL eqK eqR
where
eqL = coequalizerTGM fL gL
eqK = coequalizerTGM fK gK
eqR = coequalizerTGM fR gR
l = commutingMorphismSameDomain eqK (eqL <&> leftMorphism ruleB) eqK (eqL <&> leftMorphism ruleB)
r = commutingMorphismSameDomain eqK (eqR <&> rightMorphism ruleB) eqK (eqR <&> rightMorphism ruleB)
coequalizerRule = Production l r []
calculateNCoequalizer = error "calculateNCoequalizer for Second-order not implemented"
calculateCoproduct rule1 rule2 = (m1,m2)
where
(l1,l2) = calculateCoproduct (leftObject rule1) (leftObject rule2)
(k1,k2) = calculateCoproduct (interfaceObject rule1) (interfaceObject rule2)
(r1,r2) = calculateCoproduct (rightObject rule1) (rightObject rule2)
l = commutingMorphismSameDomain k1 (l1 <&> leftMorphism rule1) k2 (l2 <&> leftMorphism rule2)
r = commutingMorphismSameDomain k1 (r1 <&> rightMorphism rule1) k2 (r2 <&> rightMorphism rule2)
coproductRule = Production l r []
m1 = RuleMorphism rule1 coproductRule l1 k1 r1
m2 = RuleMorphism rule2 coproductRule l2 k2 r2
calculateNCoproduct = error "calculateNCoproduct for Second-order not implemented"
initialObject = initialRule . leftMorphism . domain
morphismFromInitialTo rule =
RuleMorphism (initialRule $ leftMorphism rule) rule
(morphismFromInitialTo $ leftObject rule)
(morphismFromInitialTo $ interfaceObject rule)
(morphismFromInitialTo $ rightObject rule)
initialRule :: TypedGraphMorphism a b -> TypedGraphRule a b
initialRule morph = Production idInitial idInitial []
where idInitial = identity (initialObject morph)
coequalizerTGM :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> TypedGraphMorphism a b
coequalizerTGM = calculateCoequalizer
coproductCod :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> (TypedGraphMorphism a b, TypedGraphMorphism a b)
coproductCod a b = calculateCoproduct (codomain a) (codomain b)
coproductDom :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> (TypedGraphMorphism a b, TypedGraphMorphism a b)
coproductDom a b = calculateCoproduct (domain a) (domain b)
| Verites/verigraph | src/library/Category/TypedGraphRule/Limit.hs | apache-2.0 | 3,537 | 0 | 11 | 817 | 983 | 505 | 478 | 58 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.