code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
#!/usr/bin/env runhaskell
import PGF
import System.Environment
import Control.Applicative
import Data.Maybe
import Data.List
type Sentence = String
type Word = String
main = do
args <- getArgs
if length args == 2 then do
[p, l] <- getArgs
pgf <- readPGF p
loop $ fromMaybe (\x -> x :: String) $ (startStuff pgf) <$> (readLanguage l)
else do
[p, l, s] <- getArgs
pgf <- readPGF p
putStrLn $ fromMaybe "" $ (startStuff pgf) <$> (readLanguage l) <*> pure s
getWord :: Bool -> String -> String
getWord _ ('^':xs) = getWord True xs
getWord _ ('/':xs) = ' ' : getWord False xs
getWord False (x:xs) = getWord False xs
getWord True (x:xs) = x : getWord True xs
getWord _ "" = ""
parseSentence :: String -> [String]
parseSentence = words . getWord False
loop :: (String -> String) -> IO ()
loop parse = do
s <- getLine
if s == "quit" then putStrLn "bye" else do
putStrLn $ parse s
loop parse
getMorph :: PGF -> Language -> Word -> [(Lemma, Analysis)]
getMorph p l s = lookupMorpho (buildMorpho p l) s
initStream :: PGF -> Language -> Sentence -> [Tree] -> Morpho -> Word -> String
initStream p l orig pt m s
| length morph > 0 = "^" ++ s ++ buildStream p l morph orig pt
| otherwise = "^" ++ s ++ "/*" ++ s ++ "$ "
where morph = lookupMorpho m s
buildStream :: PGF -> Language -> [(Lemma, Analysis)] -> Sentence -> [Tree] -> String
buildStream _ _ [] _ _ = "$ "
buildStream p ln ((l, a):xs) s pt
| isValid pt l = "/" ++ show l ++ buildTags (words a) ++ buildStream p ln xs s pt
| otherwise = buildStream p ln xs s pt
isValid :: [Tree] -> Lemma -> Bool
isValid pt l = isInfixOf (show l) . show $ (head $ filter (\x -> not $ isInfixOf "LStr" (show x)) pt)
buildTags :: [String] -> String
buildTags [] = ""
buildTags (x:xs) = "<" ++ x ++ ">" ++ buildTags xs
parseString :: PGF -> Language -> [Tree] -> Morpho -> Sentence -> String
--parseString p l s = foldl (\acc x -> acc ++ x) "" (map (initStream p l (getWord False s)) (parseSentence s))
parseString p l pt m s = foldl (\acc x -> acc ++ x) "" (map (initStream p l s pt m) (words s))
startStuff :: PGF -> Language -> Sentence -> String
startStuff p l s = parseString p l (parse p l (startCat p) s) (buildMorpho p l) s
| vinit-ivar/apertium-gf | GhettoDisambiguator.hs | gpl-2.0 | 2,312 | 0 | 15 | 589 | 1,017 | 515 | 502 | 52 | 2 |
{- |
Module : ./Static/WACocone.hs
Description : heterogeneous signatures colimits approximations
Copyright : (c) Mihai Codescu, and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : mcodescu@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable
Heterogeneous version of weakly_amalgamable_cocones.
Needs some improvements (see TO DO).
-}
module Static.WACocone (isConnected,
isAcyclic,
isThin,
removeIdentities,
hetWeakAmalgCocone,
initDescList,
dijkstra,
buildStrMorphisms,
weakly_amalgamable_colimit
) where
import Control.Monad
import Data.List (nub)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Graph.Inductive.Graph as Graph
import Common.Lib.Graph as Tree
import Common.ExtSign
import Common.Result
import Common.LogicT
import Logic.Logic
import Logic.Comorphism
import Logic.Modification
import Logic.Grothendieck
import Logic.Coerce
import Static.GTheory
import Comorphisms.LogicGraph
weakly_amalgamable_colimit :: StaticAnalysis lid
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol
=> lid -> Tree.Gr sign (Int, morphism)
-> Result (sign, Map.Map Int morphism)
weakly_amalgamable_colimit l diag = do
(sig, sink) <- signature_colimit l diag
return (sig, sink)
{- until amalgamability check is fixed, just return a colimit
get (commented out) code from rev:11881 -}
-- | checks whether a graph is connected
isConnected :: Gr a b -> Bool
isConnected graph = let
nodeList = nodes graph
root = head nodeList
availNodes = Map.fromList $ zip nodeList (repeat True)
bfs queue avail = case queue of
[] -> avail
n : ns -> let
avail1 = Map.insert n False avail
nbs = filter (\ x -> Map.findWithDefault (error "isconnected") x avail)
$ neighbors graph n
in bfs (ns ++ nbs) avail1
in not $ any (\ x -> Map.findWithDefault (error "iscon 2") x
(bfs [root] availNodes)) nodeList
-- | checks whether the graph is thin
isThin :: Gr a b -> Bool
isThin = checkThinness Map.empty . edges
checkThinness :: Map.Map Edge Int -> [Edge] -> Bool
checkThinness paths eList =
case eList of
[] -> True
(sn, tn) : eList' ->
(sn, tn) `notElem` Map.keys paths &&
-- multiple paths between (sn, tn)
let pathsToS = filter (\ (_, y) -> y == sn) $ Map.keys paths
updatePaths pathF dest pList =
case pList of
[] -> Just pathF
(x, _) : pList' ->
if (x, dest) `elem` Map.keys pathF then Nothing
else updatePaths (Map.insert (x, dest) 1 pathF) dest pList'
in case updatePaths paths tn pathsToS of
Nothing -> False
Just paths' -> checkThinness (Map.insert (sn, tn) 1 paths') eList'
-- | checks whether a graph is acyclic
isAcyclic :: (Eq b) => Gr a b -> Bool
isAcyclic graph = let
filterIns gr = filter (\ x -> indeg gr x == 0)
queue = filterIns graph $ nodes graph
topologicalSort q gr = case q of
[] -> null $ edges gr
n : ns -> let
oEdges = lsuc gr n
graph1 = foldl (flip Graph.delLEdge) gr
$ map (\ (y, label) -> (n, y, label)) oEdges
succs = filterIns graph1 $ suc gr n
in topologicalSort (ns ++ succs) graph1
in topologicalSort queue graph
-- | auxiliary for removing the identity edges from a graph
removeIdentities :: Gr a b -> Gr a b
removeIdentities graph = let
addEdges gr eList = case eList of
[] -> gr
(sn, tn, label) : eList1 -> if sn == tn then addEdges gr eList1
else addEdges (insEdge (sn, tn, label) gr) eList1
in addEdges (insNodes (labNodes graph) Graph.empty) $ labEdges graph
-- assigns to a node all proper descendents
initDescList :: (Eq a, Eq b) => Gr a b -> Map.Map Node [(Node, a)]
initDescList graph = let
descsOf n = let
nodeList = filter (n /=) $ pre graph n
f = Map.fromList $ zip nodeList (repeat False)
precs nList nList' avail =
case nList of
[] -> nList'
_ -> let
nList'' = concatMap (\ y -> filter
(\ x -> x `notElem` Map.keys avail ||
Map.findWithDefault (error "iDL") x avail) $
filter (y /=) $ pre graph y) nList
avail' = Map.union avail $
Map.fromList $ zip nList'' (repeat False)
in precs (nub nList'') (nub $ nList' ++ nList'') avail'
in precs nodeList nodeList f
in Map.fromList $ map (\ node -> (node, filter (\ x -> fst x `elem`
descsOf node)
$ labNodes graph )) $ nodes graph
commonBounds :: (Eq a) => Map.Map Node [(Node, a)] -> Node -> Node -> [(Node, a)]
commonBounds funDesc n1 n2 = filter
(\ x -> x `elem` (Map.!) funDesc n1 && x `elem` (Map.!) funDesc n2 )
$ nub $ (Map.!) funDesc n1 ++ (Map.!) funDesc n2
-- returns the greatest lower bound of two maximal nodes,if it exists
glb :: (Eq a) => Map.Map Node [(Node, a)] -> Node -> Node -> Maybe (Node, a)
glb funDesc n1 n2 = let
cDescs = commonBounds funDesc n1 n2
subList [] _ = True
subList (x : xs) l2 = x `elem` l2 && subList xs l2
glbList = filter (\ (n, x) -> subList
(filter (\ (n0, x0) -> (n, x) /= (n0, x0)) cDescs) (funDesc Map.! n)
) cDescs
{- a node n is glb of n1 and n2 iff
all common bounds of n1 and n2 are also descendants of n -}
in case glbList of
[] -> Nothing
x : _ -> Just x -- because if it exists, there can be only one
-- if no greatest lower bound exists, compute all maximal bounds of the nodes
maxBounds :: (Eq a) => Map.Map Node [(Node, a)] -> Node -> Node -> [(Node, a)]
maxBounds funDesc n1 n2 = let
cDescs = commonBounds funDesc n1 n2
isDesc n0 (n, y) = (n, y) `elem` funDesc Map.! n0
noDescs (n, y) = not $ any (\ (n0, _) -> isDesc n0 (n, y)) cDescs
in filter noDescs cDescs
-- dijsktra algorithm for finding the the shortest path between two nodes
dijkstra :: GDiagram -> Node -> Node -> Result GMorphism
dijkstra graph source target = do
let
dist = Map.insert source 0 $ Map.fromList $
zip (nodes graph) $ repeat $ 2 * length (edges graph)
prev = if source == target then Map.insert source source Map.empty
else Map.empty
q = nodes graph
com = case lab graph source of
Nothing -> Map.empty -- shouldnt be the case
Just gt -> Map.insert source (ide $ signOf gt) Map.empty
extractMin queue dMap = let
u = head $
filter (\ x -> Map.findWithDefault (error "dijkstra") x dMap ==
minimum
(map (\ x1 -> Map.findWithDefault (error "dijkstra") x1 dMap)
queue))
queue
in ( Set.toList $ Set.difference (Set.fromList queue) (Set.fromList [u]) , u)
updateNeighbors d p c u gr = let
outEdges = out gr u
upNeighbor dMap pMap cMap uNode edgeList = case edgeList of
[] -> (dMap, pMap, cMap)
(_, v, (_, gmor)) : edgeL -> let
alt = Map.findWithDefault (error "dijkstra") uNode dMap + 1
in
if alt >= Map.findWithDefault (error "dijsktra") v dMap then
upNeighbor dMap pMap cMap uNode edgeL
else let
d1 = Map.insert v alt dMap
p1 = Map.insert v uNode pMap
c1 = Map.insert v gmor cMap
in upNeighbor d1 p1 c1 uNode edgeL
in upNeighbor d p c u outEdges
-- for each neighbor of u, if d(u)+1 < d(v), modify p(v) = u, d(v) = d(u)+1
mainloop gr sn tn qL d p c = let
(q1, u) = extractMin qL d
(d1, p1, c1) = updateNeighbors d p c u gr
in if u == tn then shortPath sn p1 c1 [] tn
else mainloop gr sn tn q1 d1 p1 c1
shortPath sn p1 c s u =
if u `notElem` Map.keys p1 then fail "path not found"
else let
x = Map.findWithDefault (error $ show u) u p1 in
if x == sn then return (u : s, c)
else shortPath sn p1 c (u : s) x
(nodeList, com1) <- mainloop graph source target q dist prev com
foldM comp ((Map.!) com1 source) . map ((Map.!) com1) $ nodeList
{- builds the arrows from the nodes of the original graph
to the unique maximal node of the obtained graph -}
buildStrMorphisms :: GDiagram -> GDiagram
-> Result (G_theory, Map.Map Node GMorphism)
buildStrMorphisms initGraph newGraph = do
let (maxNode, sigma) = head $ filter (\ (node, _) -> outdeg newGraph node == 0) $
labNodes newGraph
buildMor pairList solList =
case pairList of
(n, _) : pairs -> do nMor <- dijkstra newGraph n maxNode
buildMor pairs (solList ++ [(n, nMor)])
[] -> return solList
morList <- buildMor (labNodes initGraph) []
return (sigma, Map.fromList morList)
-- computes the colimit and inserts it into the graph
addNodeToGraph :: GDiagram -> G_theory -> G_theory -> G_theory -> Int -> Int
-> Int -> GMorphism -> GMorphism
-> Map.Map Node [(Node, G_theory)] -> [(Int, G_theory)]
-> Result (GDiagram, Map.Map Node [(Node, G_theory)])
addNodeToGraph oldGraph
(G_theory lid _ extSign _ _ _)
gt1@(G_theory lid1 _ extSign1 idx1 _ _)
gt2@(G_theory lid2 _ extSign2 idx2 _ _)
n
n1
n2
(GMorphism cid1 ss1 _ mor1 _)
(GMorphism cid2 ss2 _ mor2 _)
funDesc maxNodes = do
let newNode = 1 + maximum (nodes oldGraph) -- get a new node
s1 <- coerceSign lid1 lid "addToNodeGraph" extSign1
s2 <- coerceSign lid2 lid "addToNodeGraph" extSign2
m1 <- coerceMorphism (targetLogic cid1) lid "addToNodeGraph" mor1
m2 <- coerceMorphism (targetLogic cid2) lid "addToNodeGraph" mor2
let spanGr = Graph.mkGraph
[(n, plainSign extSign), (n1, plainSign s1), (n2, plainSign s2)]
[(n, n1, (1, m1)), (n, n2, (1, m2))]
(sig, morMap) <- weakly_amalgamable_colimit lid spanGr
-- must coerce here
m11 <- coerceMorphism lid (targetLogic cid1) "addToNodeGraph" $
morMap Map.! n1
m22 <- coerceMorphism lid (targetLogic cid2) "addToNodeGraph" $
morMap Map.! n2
let gth = noSensGTheory lid (mkExtSign sig) startSigId
gmor1 = GMorphism cid1 ss1 idx1 m11 startMorId
gmor2 = GMorphism cid2 ss2 idx2 m22 startMorId
case maxNodes of
[] -> do
let newGraph = insEdges [(n1, newNode, (1, gmor1)), (n2, newNode, (1, gmor2))] $
insNode (newNode, gth) oldGraph
funDesc1 = Map.insert newNode
(nub $ (Map.!) funDesc n1 ++ (Map.!) funDesc n2 ) funDesc
return (newGraph, funDesc1)
_ -> computeCoeqs oldGraph funDesc (n1, gt1) (n2, gt2)
(newNode, gth) gmor1 gmor2 maxNodes
{- for each node in the list, check whether the coequalizer can be computed
if so, modify the maximal node of graph and the edges to it from n1 and n2 -}
computeCoeqs :: GDiagram -> Map.Map Node [(Node, G_theory)]
-> (Node, G_theory) -> (Node, G_theory) -> (Node, G_theory)
-> GMorphism -> GMorphism -> [(Node, G_theory)] ->
Result (GDiagram, Map.Map Node [(Node, G_theory)])
computeCoeqs oldGraph funDesc (n1, _) (n2, _) (newN, newGt) gmor1 gmor2 [] = do
let newGraph = insEdges [(n1, newN, (1, gmor1)), (n2, newN, (1, gmor2))] $
insNode (newN, newGt) oldGraph
descFun1 = Map.insert newN
(nub $ (Map.!) funDesc n1 ++ (Map.!) funDesc n2 ) funDesc
return (newGraph, descFun1)
computeCoeqs graph funDesc (n1, gt1) (n2, gt2)
(newN, _newGt@(G_theory tlid _ tsign _ _ _))
_gmor1@(GMorphism cid1 sig1 idx1 mor1 _ )
_gmor2@(GMorphism cid2 sig2 idx2 mor2 _ ) ((n, gt) : descs) = do
_rho1@(GMorphism cid3 _ _ mor3 _) <- dijkstra graph n n1
_rho2@(GMorphism cid4 _ _ mor4 _) <- dijkstra graph n n2
com1 <- compComorphism (Comorphism cid1) (Comorphism cid3)
com2 <- compComorphism (Comorphism cid1) (Comorphism cid3)
if com1 /= com2 then fail "Unable to compute coequalizer" else do
_gtM@(G_theory lidM _ signM _idxM _ _) <- mapG_theory com1 gt
s1 <- coerceSign lidM tlid "coequalizers" signM
mor3' <- coerceMorphism (targetLogic cid3) (sourceLogic cid1) "coeqs" mor3
mor4' <- coerceMorphism (targetLogic cid4) (sourceLogic cid2) "coeqs" mor4
m1 <- map_morphism cid1 mor3'
m2 <- map_morphism cid2 mor4'
phi1' <- comp m1 mor1
phi2' <- comp m2 mor2
phi1 <- coerceMorphism (targetLogic cid1) tlid "coeqs" phi1'
phi2 <- coerceMorphism (targetLogic cid2) tlid "coeqs" phi2'
-- build the double arrow for computing the coequalizers
let doubleArrow = Graph.mkGraph
[(n, plainSign s1), (newN, plainSign tsign)]
[(n, newN, (1, phi1)), (n, newN, (1, phi2))]
(colS, colM) <- weakly_amalgamable_colimit tlid doubleArrow
let newGt1 = noSensGTheory tlid (mkExtSign colS) startSigId
mor11' <- coerceMorphism tlid (targetLogic cid1) "coeqs" $ (Map.!) colM newN
mor11 <- comp mor1 mor11'
mor22' <- coerceMorphism tlid (targetLogic cid2) "coeqs" $ (Map.!) colM newN
mor22 <- comp mor2 mor22'
let gMor11 = GMorphism cid1 sig1 idx1 mor11 startMorId
let gMor22 = GMorphism cid2 sig2 idx2 mor22 startMorId
computeCoeqs graph funDesc (n1, gt1) (n2, gt2) (newN, newGt1)
gMor11 gMor22 descs
-- returns a maximal node available
pickMaxNode :: (MonadPlus t) => Gr a b -> t (Node, a)
pickMaxNode graph = msum $ map return $
filter (\ (node, _) -> outdeg graph node == 0) $
labNodes graph
{- returns a list of common descendants of two maximal nodes:
one node if a glb exists, or all maximal descendants otherwise -}
commonDesc :: Map.Map Node [(Node, G_theory)] -> Node -> Node
-> [(Node, G_theory)]
commonDesc funDesc n1 n2 = case glb funDesc n1 n2 of
Just x -> [x]
Nothing -> maxBounds funDesc n1 n2
-- returns a weakly amalgamable square of lax triangles
pickSquare :: (MonadPlus t) => Result GMorphism -> Result GMorphism -> t Square
pickSquare (Result _ (Just phi1@(GMorphism cid1 _ _ _ _)))
(Result _ (Just phi2@(GMorphism cid2 _ _ _ _))) =
if isHomogeneous phi1 && isHomogeneous phi2 then
return $ mkIdSquare $ Logic $ sourceLogic cid1
-- since they have the same target, both homogeneous implies same logic
else do
{- if one of them is homogeneous, build the square
with identity modification of the other comorphism -}
let defaultSquare
| isHomogeneous phi1 = [mkDefSquare $ Comorphism cid2]
| isHomogeneous phi2 = [mirrorSquare $ mkDefSquare $ Comorphism cid1]
| otherwise = []
case maybeResult $ lookupSquare_in_LG (Comorphism cid1) (Comorphism cid2) of
Nothing -> msum $ map return defaultSquare
Just sqList -> msum $ map return $ sqList ++ defaultSquare
pickSquare (Result _ Nothing) _ = fail "Error computing comorphisms"
pickSquare _ (Result _ Nothing) = fail "Error computing comorphisms"
-- builds the span for which the colimit is computed
buildSpan :: GDiagram ->
Map.Map Node [(Node, G_theory)] ->
AnyComorphism ->
AnyComorphism ->
AnyComorphism ->
AnyComorphism ->
AnyComorphism ->
AnyModification ->
AnyModification ->
G_theory ->
G_theory ->
G_theory ->
GMorphism ->
GMorphism ->
Int -> Int -> Int ->
[(Int, G_theory)] ->
Result (GDiagram, Map.Map Node [(Node, G_theory)])
buildSpan graph
funDesc
d@(Comorphism _cidD)
e1@(Comorphism cidE1)
e2@(Comorphism cidE2)
_d1@(Comorphism _cidD1)
_d2@(Comorphism _cidD2)
_m1@(Modification cidM1)
_m2@(Modification cidM2)
gt@(G_theory lid _ sign _ _ _)
gt1@(G_theory lid1 _ sign1 _ _ _)
gt2@(G_theory lid2 _ sign2 _ _ _)
_phi1@(GMorphism cid1 _ _ mor1 _)
_phi2@(GMorphism cid2 _ _ mor2 _)
n n1 n2
maxNodes
= do
sig@(G_theory _lid0 _ _sign0 _ _ _) <- mapG_theory d gt -- phi^d(Sigma)
sig1 <- mapG_theory e1 gt1 -- phi^e1(Sigma1)
sig2 <- mapG_theory e2 gt2 -- phi^e2(Sigma2)
mor1' <- coerceMorphism (targetLogic cid1) (sourceLogic cidE1) "buildSpan" mor1
eps1 <- map_morphism cidE1 mor1' -- phi^e1(sigma1)
sign' <- coerceSign lid (sourceLogic $ sourceComorphism cidM1) "buildSpan" sign
tau1 <- tauSigma cidM1 (plainSign sign') -- I^u1_Sigma
tau1' <- coerceMorphism (targetLogic $ sourceComorphism cidM1)
(targetLogic cidE1) "buildSpan" tau1
rho1 <- comp tau1' eps1
mor2' <- coerceMorphism (targetLogic cid2) (sourceLogic cidE2) "buildSpan" mor2
eps2 <- map_morphism cidE2 mor2' -- phi^e2(sigma2)
sign'' <- coerceSign lid (sourceLogic $ sourceComorphism cidM2) "buildSpan" sign
tau2 <- tauSigma cidM2 (plainSign sign'') -- I^u2_Sigma
tau2' <- coerceMorphism (targetLogic $ sourceComorphism cidM2)
(targetLogic cidE2) "buildSpan" tau2
rho2 <- comp tau2' eps2
signE1 <- coerceSign lid1 (sourceLogic cidE1) " " sign1
signE2 <- coerceSign lid2 (sourceLogic cidE2) " " sign2
(graph1, funDesc1) <- addNodeToGraph graph sig sig1 sig2 n n1 n2
(GMorphism cidE1 signE1 startSigId rho1 startMorId)
(GMorphism cidE2 signE2 startSigId rho2 startMorId)
funDesc maxNodes
return (graph1, funDesc1)
pickMaximalDesc :: (MonadPlus t) => [(Node, G_theory)] -> t (Node, G_theory)
pickMaximalDesc descList = msum $ map return descList
nrMaxNodes :: Gr a b -> Int
nrMaxNodes graph = length $ filter (\ n -> outdeg graph n == 0) $ nodes graph
-- | backtracking function for heterogeneous weak amalgamable cocones
hetWeakAmalgCocone :: (Monad m, LogicT t, MonadPlus (t m)) =>
GDiagram -> Map.Map Int [(Int, G_theory)] -> t m GDiagram
hetWeakAmalgCocone graph funDesc =
if nrMaxNodes graph == 1 then return graph
else once $ do
(n1, gt1) <- pickMaxNode graph
(n2, gt2) <- pickMaxNode graph
guard (n1 < n2) -- to consider each pair of maximal nodes only once
let descList = commonDesc funDesc n1 n2
case length descList of
0 -> mzero -- no common descendants for n1 and n2
_ -> do {- just one common descendant implies greatest lower bound
for several, the tail is not empty and we compute coequalizers -}
(n, gt) <- pickMaximalDesc descList
let phi1 = dijkstra graph n n1
phi2 = dijkstra graph n n2
square <- pickSquare phi1 phi2
let d = laxTarget $ leftTriangle square
e1 = laxFst $ leftTriangle square
d1 = laxSnd $ leftTriangle square
e2 = laxFst $ rightTriangle square
d2 = laxSnd $ rightTriangle square
m1 = laxModif $ leftTriangle square
m2 = laxModif $ rightTriangle square
case maybeResult phi1 of
Nothing -> mzero
Just phi1' -> case maybeResult phi2 of
Nothing -> mzero
Just phi2' -> do
let mGraph = buildSpan graph funDesc d e1 e2 d1 d2 m1 m2 gt gt1 gt2
phi1' phi2' n n1 n2 $ filter (\ (nx, _) -> nx /= n) descList
case maybeResult mGraph of
Nothing -> mzero
Just (graph1, funDesc1) -> hetWeakAmalgCocone graph1 funDesc1
| gnn/Hets | Static/WACocone.hs | gpl-2.0 | 19,591 | 269 | 27 | 5,550 | 6,318 | 3,353 | 2,965 | 384 | 8 |
-- udisksevt source file
-- Copyright (C) Vladimir Matveev, 2010
-- Disk structure related functions
module UDisksEvt.Disk where
import Control.Monad
import Control.Concurrent.STM
import DBus.Types
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Text as B
import UDisksEvt.Datatypes
-- Cast ObjectPath value to string
objectPathToString :: ObjectPath -> String
objectPathToString = B.unpack . objectPathText
-- Get device property from cache
getDevicePropertyCached :: (?st :: UState) => ObjectPath -> String -> IO (Maybe Variant)
getDevicePropertyCached obj pname = do
dmap <- getDeviceInfoCached obj
return $ M.lookup pname . diProperties =<< dmap
-- Get the whole device property Map
getDevicePropertyMapCached :: (?st :: UState) => ObjectPath -> IO (Maybe (M.Map String Variant))
getDevicePropertyMapCached obj = do
dmap <- getDeviceInfoCached obj
return $ fmap diProperties dmap
-- Get device info structure from runtime state
getDeviceInfoCached :: (?st :: UState) => ObjectPath -> IO (Maybe DeviceInfo)
getDeviceInfoCached obj =
atomically $ fmap (M.lookup $ objectPathToString obj) $ readTVar (uDevices ?st)
-- Retrieve the first of device mount points
getDeviceMountPoint :: DeviceInfo -> Maybe String
getDeviceMountPoint di = M.lookup "DeviceMountPaths" (diProperties di) >>=
fromVariant >>= fromArray >>= listToMaybe
where fromArray = mapM fromVariant . arrayItems
| netvl/udisksevt | UDisksEvt/Disk.hs | gpl-2.0 | 1,454 | 0 | 12 | 245 | 340 | 182 | 158 | -1 | -1 |
{- -----------------------------------------------------------------------------
PTrader is a Personal Stock Trader Toolbox.
Copyright (C) 2012 Luis Cabellos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
----------------------------------------------------------------------------- -}
module PTrader.Graph( GraphConfig(..), runGraph) where
-- -----------------------------------------------------------------------------
import Control.Arrow( (***) )
import Control.Concurrent( threadDelay )
import Control.Monad( forM_, when, foldM_ )
import Data.List( transpose )
import qualified Graphics.Rendering.Cairo as Cr
-- -----------------------------------------------------------------------------
border :: Double
border = 0.02
-- -----------------------------------------------------------------------------
data Color = Color !Double !Double !Double
setColor :: Color -> Cr.Render()
setColor (Color r g b) = Cr.setSourceRGB r g b
-- -----------------------------------------------------------------------------
styles :: [Color]
styles = cycle [ Color 1 0 0, Color 0 0.6 0, Color 0 0 1
, Color 1 0.5 0, Color 1 0 1, Color 0 0.7 1
, Color 0.1 0.1 0.1 ]
-- -----------------------------------------------------------------------------
render :: Double -> Double -> [String] -> [Double] -> [[Double]] -> Cr.Render ()
render w h names refvals xxs = do
Cr.setSourceRGB 1 1 1
Cr.rectangle 0 0 w h
Cr.fill
forM_ (zip3 styles refvals rows) $ \(name,val,row) ->
renderLine w h name val row
Cr.setLineCap Cr.LineCapSquare
Cr.setLineWidth 2
Cr.setSourceRGB 0 0 0
Cr.moveTo (border*w) (border*w)
Cr.lineTo (border*w) (h - 2*border*w)
Cr.lineTo (w - border*w) (h - 2*border*w)
Cr.lineTo (w - border*w) (border*w)
Cr.lineTo (border*w) (border*w)
Cr.stroke
foldM_ (renderLabel (h - 0.6*border*w)) (border*w) $ zip styles names
where
rows = fmap (zip [0..]) $ transpose xxs
-- -----------------------------------------------------------------------------
renderLabel :: Double -> Double -> (Color,String) -> Cr.Render Double
renderLabel y x (col,label) = do
setColor col
Cr.moveTo x y
extents <- Cr.textExtents (' ':' ':label)
Cr.showText label
Cr.stroke
return $! x + Cr.textExtentsXadvance extents
-- -----------------------------------------------------------------------------
renderLine :: Double -> Double -> Color -> Double -> [(Double,Double)]
-> Cr.Render ()
renderLine _ _ _ _ [] = return ()
renderLine w h col refval (x:xs) = do
let (miny, maxy) = yLimits $ refval : map snd (x:xs)
offx = border*w
maxx = fromIntegral $ max 5 (length xs)
y:ys = map (transx offx w maxx *** transy offx h miny maxy) (x:xs)
tval = transy offx h miny maxy refval
ylast = transx offx w maxx maxx
setColor col
uncurry Cr.moveTo y
mapM_ (uncurry Cr.lineTo) ys
Cr.setDash [] 0
Cr.setLineWidth 0.5
Cr.stroke
Cr.setLineWidth 0.2
Cr.setDash [6,4] 0
Cr.moveTo (fst y) tval
Cr.lineTo ylast tval
Cr.stroke
Cr.setDash [] 0
-- -----------------------------------------------------------------------------
transy :: Double -> Double -> Double -> Double -> Double -> Double
transy lo l miny maxy y = offset - (lcanvas * ((y - miny) / (maxy - miny)))
where
offset = l - 2*lo
lcanvas = l - 3*lo
-- -----------------------------------------------------------------------------
transx :: Double -> Double -> Double -> Double -> Double
transx lo l maxx y = lcanvas * (y / maxx) + lo
where
lcanvas = l - 2*lo
-- -----------------------------------------------------------------------------
data GraphConfig = GraphConfig
{ graphIters :: Maybe Int
, graphSleep :: ! Int }
deriving( Show )
-- -----------------------------------------------------------------------------
genImage :: FilePath -> Int -> Int -> [String] -> [Double] -> [[Double]] -> IO ()
genImage fn w h xs vals ys = Cr.withImageSurface Cr.FormatARGB32 w h $ \srf -> do
Cr.renderWith srf (render (fromIntegral w) (fromIntegral h) xs vals ys)
Cr.surfaceWriteToPNG srf fn
-- -----------------------------------------------------------------------------
runGraph :: GraphConfig -> [String] -> [Double] -> IO [Double] -> IO ()
runGraph c names vals = graphLoop c names vals []
graphLoop :: GraphConfig -> [String] -> [Double] -> [[Double]] -> IO [Double]
-> IO ()
graphLoop conf names vals xs f = do
x <- f
let newxs = xs ++ [x]
genImage "test.png" 600 300 names vals newxs
_ <- threadDelay (graphSleep conf * 10^(6 :: Int))
when notEnded
(graphLoop newConf names vals newxs f)
where
newConf = case graphIters conf of
Nothing -> conf
Just n -> conf { graphIters= Just (n-1) }
notEnded = case graphIters conf of
Nothing -> True
Just n -> n > 0
-- -----------------------------------------------------------------------------
yLimits :: [Double] -> (Double, Double)
yLimits xs = if abs (m1 - m2) < 1.0 then (m1, m1+1) else (m1, m2)
where
m1 = fromInteger . floor $ minimum xs
m2 = fromInteger . ceiling $ maximum xs
-- -----------------------------------------------------------------------------
| zhensydow/ptrader | src/PTrader/Graph.hs | gpl-3.0 | 5,813 | 2 | 14 | 1,112 | 1,817 | 925 | 892 | 107 | 3 |
module TB.Useless.Permutations (
carvePerms,
dupPerms
) where
-- | carvePerms
--
-- >>> carvePerms 4 [0,1]
-- [[0],[1],[0,0],[0,1],[1,0],[1,1],[0,0,0],[0,0,1],[0,1,0],[0,1,1],[1,0,0],[1,0,1],[1,1,0],[1,1,1],[0,0,0,0],[0,0,0,1],[0,0,1,0],[0,0,1,1],[0,1,0,0],[0,1,0,1],[0,1,1,0],[0,1,1,1],[1,0,0,0],[1,0,0,1],[1,0,1,0],[1,0,1,1],[1,1,0,0],[1,1,0,1],[1,1,1,0],[1,1,1,1]]
carvePerms :: Int -> [a] -> [[a]]
carvePerms n codes = fst $ break (\xs -> length xs > n) $ dupPerms codes
dupPerms :: [a] -> [[a]]
dupPerms xs = dupPerms' xs [[]]
dupPerms' :: [a] -> [[a]] -> [[a]]
dupPerms' [] _ = []
dupPerms' codes acc = concated ++ dupPerms' codes concated
where
concated = [ xs : ys | xs <- codes, ys <- acc ]
| adarqui/ToyBox | haskell/adarqui/useless/src/TB/Useless/Permutations.hs | gpl-3.0 | 713 | 0 | 11 | 101 | 207 | 115 | 92 | 11 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE FlexibleInstances #-}
module Helper.EnumHelper where
import Data.Bits(shiftL,(.&.),(.|.),Bits)
all ::(Enum a, Bounded a) => [a]
all = [minBound .. maxBound]
enumToMask :: (Enum a, Num b, Bits b) => a -> b
enumToMask x = fromInteger $ 1 `shiftL` (fromEnum x)
| shinjiro-itagaki/shinjirecs | shinjirecs-api/src/Helper/EnumHelper.hs | gpl-3.0 | 350 | 0 | 7 | 53 | 120 | 72 | 48 | 9 | 1 |
-- Tests.hs
-- Copyright 2015 Remy E. Goldschmidt <taktoa@gmail.com>
-- This file is part of hskpipe.
-- hskpipe is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- hskpipe is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY-- without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with hskpipe. If not, see <http://www.gnu.org/licenses/>.
module Tests where
-- import Control.Monad (liftM, liftM2, replicateM)
-- import Data.Functor ((<$>))
-- import Data.Map.Strict (empty)
-- import Data.Ratio (denominator, numerator)
-- import Data.Text (Text, pack, unpack)
-- import Debug.Trace (trace)
import Distribution.TestSuite
-- import Distribution.TestSuite.QuickCheck
-- import Eval
-- import Expr
-- import Parse
-- import Test.QuickCheck
-- import Text.Parsec (ParseError, parse)
-- -- Helper functions
-- ratToInt :: Rational -> Integer
-- ratToInt r = numerator r `div` denominator r
-- showR :: Rational -> String
-- showR r = show (numerator r) ++ "%" ++ show (denominator r)
-- exprParser :: String -> Either ParseError Expr
-- exprParser = parse exprParse "test" . pack
-- evaluate :: String -> Rational
-- evaluate s = case either perr eval $ exprParser s of
-- ERat i -> i
-- e -> eerr e
-- where
-- throw m e = error $ m ++ show e
-- perr = throw "Parse error in evaluate: "
-- eerr = throw "Evaluation error in evaluate: "
-- applyStr :: String -> [String] -> String
-- applyStr f as = "(" ++ f ++ concatMap (' ':) as ++ ")"
-- applyRat :: String -> [Rational] -> String
-- applyRat f rs = applyStr f (map showR rs)
-- (<=>) :: Rational -> String -> Property
-- a <=> b = a === evaluate b
-- -- Instances
-- alphaFreqList :: [(Int, Gen Char)]
-- alphaFreqList =
-- [ (26, choose ('a', 'z'))
-- , (26, choose ('A', 'Z'))]
-- letter :: Gen Char
-- letter = frequency alphaFreqList
-- identifier :: Gen Text
-- identifier = pack <$> (liftM2 (:) letter $ resize 1 $ sized (`replicateM` letter))
-- instance Arbitrary Name where
-- arbitrary = liftM Name identifier
-- newtype BExpr = BExpr Expr deriving (Show, Eq)
-- newtype AExpr = AExpr Expr deriving (Show, Eq)
-- newtype LExpr = LExpr Expr deriving (Show, Eq)
-- unAExpr :: AExpr -> Expr
-- unAExpr (AExpr a) = a
-- unLExpr :: LExpr -> Expr
-- unLExpr (LExpr a) = a
-- aexprTree :: (Ord a, Num a) => a -> Gen AExpr
-- aexprTree 0 = AExpr <$> liftM ERat arbitrary
-- aexprTree n
-- | n > 0 = oneof
-- [ AExpr <$> liftM ENeg subtree
-- , AExpr <$> liftM2 EAdd subtree subtree
-- , AExpr <$> liftM ERcp subtree
-- , AExpr <$> liftM2 EMul subtree subtree
-- ]
-- where
-- subtree = unAExpr <$> aexprTree (n - 1)
-- varName :: Int -> Gen Name
-- varName a = Name . pack . (\m -> "x" ++ show m) <$> elements [0..a]
-- lexprTree' :: Int -> Int -> Int -> Gen LExpr
-- lexprTree' a _ 0 = LExpr . ERef <$> varName a
-- lexprTree' a 0 _ = LExpr . ERef <$> varName a
-- lexprTree' a b n
-- | n > 0 = oneof [ LExpr <$> liftM2 ELam v t
-- , LExpr <$> liftM2 EApp t' t'' ]
-- where
-- v = varName a
-- t = unLExpr <$> lexprTree' (a + 1) (b + 1) (n - 1)
-- t' = unLExpr <$> lexprTree' a (b - 1) (n - 1)
-- t'' = unLExpr <$> lexprTree' a b (n - 1)
-- lexprTree :: Int -> Gen LExpr
-- lexprTree x = lexprTree' 0 x x
-- instance Arbitrary AExpr where
-- arbitrary = sized aexprTree
-- -- Count
-- memoize :: (Int -> a) -> (Int -> a)
-- memoize f = (map f [0 ..] !!)
-- memoize2 :: (Int -> Int -> v) -> (Int -> Int -> v)
-- memoize2 v = memoize (memoize . v)
-- memoFix2 :: ((Int -> Int -> v) -> Int -> Int -> v) -> (Int -> Int -> v)
-- memoFix2 ff = f where f = memoize2 (ff f)
-- count' :: (Int -> Int -> Integer) -> Int -> Int -> Integer
-- count' _ 0 _ = 0
-- count' _ 1 f = fromIntegral f
-- count' cnt n f
-- | even n = theSum endE + cnt (n - 1) (f + 1)
-- | otherwise = extra + theSum endO + cnt (n - 1) (f + 1)
-- where
-- endE = (n - 2) `div` 2
-- endO = (n - 3) `div` 2
-- sqr x = x * x
-- extra = sqr $ cnt ((n - 1) `div` 2) f
-- theSum e = 2 * sigma 1 e (\i -> cnt i f * cnt (n - 1 - i) f)
-- sigma a b e = if a < b then e a + sigma (a + 1) b e else e b
-- count :: Int -> Int -> Integer
-- count = memoFix2 count'
-- --count n f = trace ("calling count with: " ++ show n ++ ", " ++ show f) $ memoFix2 count' n f
-- countLams :: Int -> Int -> Integer
-- countLams n f = count (n - 1) (f + 1)
-- countApps :: Int -> Int -> Integer
-- countApps n f = count n f - countLams n f
-- genTerm :: Int -> Gen LExpr
-- genTerm n = gen n 0
-- gen :: Int -> Int -> Gen LExpr
-- gen n f
-- | n == 1 && f > 0 = LExpr . ERef <$> varName (f - 1)
-- | n == 2 = genLamTerm n f
-- | otherwise = genOther n f
-- genOther :: Int -> Int -> Gen LExpr
-- genOther n f = do
-- r1 <- elements [0 .. count n f]
-- r2 <- elements [1 .. (n - 2)]
-- r3 <- elements [0 .. countApps n f]
-- if r1 < countApps n f
-- then genAppTerm n f r2 r3
-- else genLamTerm n f
-- genAppTerm :: Int -> Int -> Int -> Integer -> Gen LExpr
-- genAppTerm n f i r = trace t $ LExpr <$> liftM2 EApp a1 a2
-- where
-- t = ("i: " ++ show i ++ ", c1: " ++ show c1 ++ ", c2: " ++ show c2)
-- a1 = unLExpr <$> gen n_1 f
-- a2 = unLExpr <$> gen (n - 1 - n_1) f
-- sigma a b e = if a < b then e a + sigma (a + 1) b e else e b
-- c0 = (count 1 f * count (n - 2) f) - 1
-- c1 = sigma 1 (i - 1) (\j -> count j f * count (n - 1 - j) f)
-- c2 = sigma 1 i (\j -> count j f * count (n - 1 - j) f)
-- -- c2 = c1 + (count i f * count (n - 1 - i) f) - 1
-- -- i = n - 2 -- PROBLEM AREA --
-- bracket a b1 b2 = (b1 < a) && (a < b2)
-- n_1
-- | bracket r 0 c0 = 1
-- | bracket r c1 c2 = i
-- -- | otherwise = i
-- genLamTerm :: Int -> Int -> Gen LExpr
-- genLamTerm n f = LExpr . ELam (Name $ pack $ ('x':) $ show f) . unLExpr <$> gen (n - 1) (f + 1)
-- --genAppTerm
-- instance Arbitrary LExpr where
-- arbitrary = sized genTerm
-- -- Tests
-- propEvalFac :: Positive Integer -> Property
-- propEvalFac (Positive r) = factorial r <=> facStr
-- where
-- factorial n = toRational $ product [1 .. n]
-- facStr = "(app (mu f (lam x (if (<= x 1) 1 (* x (app f (+ x (- 1))))))) " ++ show r ++ ")"
-- propEvalRcp :: NonZero Rational -> Property
-- propEvalRcp (NonZero r) = recip r <=> applyRat "~" [r]
-- propEvalDiv2 :: Rational -> NonZero Rational -> Property
-- propEvalDiv2 r1 (NonZero r2) = (r1 / r2) <=> applyStr "*" [showR r1, applyRat "~" [r2]]
-- propEvalMulN :: NonEmptyList Rational -> Property
-- propEvalMulN (NonEmpty rs) = product rs <=> applyRat "*" rs
-- propEvalAddN :: NonEmptyList Rational -> Property
-- propEvalAddN (NonEmpty rs) = sum rs <=> applyRat "+" rs
-- propEvalMul2 :: Rational -> Rational -> Property
-- propEvalMul2 r1 r2 = propEvalMulN $ NonEmpty [r1, r2]
-- propEvalAdd2 :: Rational -> Rational -> Property
-- propEvalAdd2 r1 r2 = propEvalAddN $ NonEmpty [r1, r2]
-- -- propEvalStep :: LExpr -> Property
-- -- propEvalStep e = eval' ce === eval' (step ce) where ce = (empty, e)
-- propEvalStep :: LExpr -> Property
-- propEvalStep (LExpr e) = eval' ce === eval' (step ce) where ce = return e
-- propEvalArith :: AExpr -> Property
-- propEvalArith (AExpr a) = case eval' $ return a of
-- c@(Clsr _ _ i@(ERat _)) -> c === return i
-- _ -> property False
-- propEvalGroup :: [Test]
-- propEvalGroup =
-- [ testProperty "Add n numbers" propEvalAddN
-- , testProperty "Add two numbers" propEvalAdd2
-- , testProperty "Multiply n numbers" propEvalMulN
-- , testProperty "Multiply two numbers" propEvalMul2
-- , testProperty "Reciprocal nonzero number" propEvalRcp
-- , testProperty "Divide two numbers" propEvalDiv2
-- , testProperty "Factorial of a positive integer" propEvalFac
-- -- , testProperty "Step idempotency on evaluated values" propEvalStep
-- ]
-- tests :: IO [Test]
-- tests = return [ testGroup "Evaluator tests" propEvalGroup ]
tests :: IO [Test]
tests = return []
| taktoa/hskpipe | src/Tests.hs | gpl-3.0 | 8,958 | 0 | 6 | 2,604 | 227 | 214 | 13 | 4 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DeleteVpnGateway
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes the specified virtual private gateway. We recommend that before you
-- delete a virtual private gateway, you detach it from the VPC and delete the
-- VPN connection. Note that you don't need to delete the virtual private
-- gateway if you plan to delete and recreate the VPN connection between your
-- VPC and your network.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DeleteVpnGateway.html>
module Network.AWS.EC2.DeleteVpnGateway
(
-- * Request
DeleteVpnGateway
-- ** Request constructor
, deleteVpnGateway
-- ** Request lenses
, dvgDryRun
, dvgVpnGatewayId
-- * Response
, DeleteVpnGatewayResponse
-- ** Response constructor
, deleteVpnGatewayResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DeleteVpnGateway = DeleteVpnGateway
{ _dvgDryRun :: Maybe Bool
, _dvgVpnGatewayId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DeleteVpnGateway' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dvgDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dvgVpnGatewayId' @::@ 'Text'
--
deleteVpnGateway :: Text -- ^ 'dvgVpnGatewayId'
-> DeleteVpnGateway
deleteVpnGateway p1 = DeleteVpnGateway
{ _dvgVpnGatewayId = p1
, _dvgDryRun = Nothing
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
dvgDryRun :: Lens' DeleteVpnGateway (Maybe Bool)
dvgDryRun = lens _dvgDryRun (\s a -> s { _dvgDryRun = a })
-- | The ID of the virtual private gateway.
dvgVpnGatewayId :: Lens' DeleteVpnGateway Text
dvgVpnGatewayId = lens _dvgVpnGatewayId (\s a -> s { _dvgVpnGatewayId = a })
data DeleteVpnGatewayResponse = DeleteVpnGatewayResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteVpnGatewayResponse' constructor.
deleteVpnGatewayResponse :: DeleteVpnGatewayResponse
deleteVpnGatewayResponse = DeleteVpnGatewayResponse
instance ToPath DeleteVpnGateway where
toPath = const "/"
instance ToQuery DeleteVpnGateway where
toQuery DeleteVpnGateway{..} = mconcat
[ "DryRun" =? _dvgDryRun
, "VpnGatewayId" =? _dvgVpnGatewayId
]
instance ToHeaders DeleteVpnGateway
instance AWSRequest DeleteVpnGateway where
type Sv DeleteVpnGateway = EC2
type Rs DeleteVpnGateway = DeleteVpnGatewayResponse
request = post "DeleteVpnGateway"
response = nullResponse DeleteVpnGatewayResponse
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DeleteVpnGateway.hs | mpl-2.0 | 3,737 | 0 | 9 | 793 | 401 | 246 | 155 | 51 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AndroidEnterprise.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AndroidEnterprise.Types
(
-- * Service Configuration
androidEnterpriseService
-- * OAuth Scopes
, androidEnterpriseScope
-- * GroupLicense
, GroupLicense
, groupLicense
, glNumProvisioned
, glNumPurchased
, glApproval
, glPermissions
, glProductId
, glAcquisitionKind
-- * StoreLayoutPagesListResponse
, StoreLayoutPagesListResponse
, storeLayoutPagesListResponse
, slplrPage
-- * EnterpriseAccount
, EnterpriseAccount
, enterpriseAccount
, eaAccountEmail
-- * AppRestrictionsSchemaRestrictionRestrictionValue
, AppRestrictionsSchemaRestrictionRestrictionValue
, appRestrictionsSchemaRestrictionRestrictionValue
, arsrrvValueMultiselect
, arsrrvValueBool
, arsrrvValueInteger
, arsrrvType
, arsrrvValueString
-- * AdministratorWebTokenSpecPlaySearch
, AdministratorWebTokenSpecPlaySearch
, administratorWebTokenSpecPlaySearch
, awtspsEnabled
, awtspsApproveApps
-- * DeviceState
, DeviceState
, deviceState
, dsAccountState
-- * AppRestrictionsSchemaRestrictionRestrictionType
, AppRestrictionsSchemaRestrictionRestrictionType (..)
-- * ProductPermissionState
, ProductPermissionState (..)
-- * GroupLicenseUsersListResponse
, GroupLicenseUsersListResponse
, groupLicenseUsersListResponse
, glulrUser
-- * TokenPagination
, TokenPagination
, tokenPagination
, tpNextPageToken
, tpPreviousPageToken
-- * AdministratorWebTokenSpecWebApps
, AdministratorWebTokenSpecWebApps
, administratorWebTokenSpecWebApps
, awtswaEnabled
-- * ProductDistributionChannel
, ProductDistributionChannel (..)
-- * ApprovalURLInfo
, ApprovalURLInfo
, approvalURLInfo
, auiApprovalURL
-- * ManagedConfigurationsSettingsListResponse
, ManagedConfigurationsSettingsListResponse
, managedConfigurationsSettingsListResponse
, mcslrManagedConfigurationsSettings
-- * ManagedProperty
, ManagedProperty
, managedProperty
, mpValueStringArray
, mpValueBool
, mpKey
, mpValueBundle
, mpValueInteger
, mpValueBundleArray
, mpValueString
-- * StoreLayoutClustersListResponse
, StoreLayoutClustersListResponse
, storeLayoutClustersListResponse
, slclrCluster
-- * ManagedConfiguration
, ManagedConfiguration
, managedConfiguration
, mcManagedProperty
, mcKind
, mcConfigurationVariables
, mcProductId
-- * AutoInstallConstraintDeviceIdleStateConstraint
, AutoInstallConstraintDeviceIdleStateConstraint (..)
-- * StoreCluster
, StoreCluster
, storeCluster
, scName
, scOrderInPage
, scId
, scProductId
-- * AdministratorWebTokenSpec
, AdministratorWebTokenSpec
, administratorWebTokenSpec
, awtsParent
, awtsZeroTouch
, awtsPrivateApps
, awtsPlaySearch
, awtsWebApps
, awtsPermission
, awtsStoreBuilder
, awtsManagedConfigurations
-- * ProductContentRating
, ProductContentRating (..)
-- * ProductVisibility
, ProductVisibility
, productVisibility
, pvTracks
, pvTrackIds
, pvProductId
-- * EntitlementReason
, EntitlementReason (..)
-- * Notification
, Notification
, notification
, nEnterpriseId
, nNewPermissionsEvent
, nProductApprovalEvent
, nProductAvailabilityChangeEvent
, nAppUpdateEvent
, nInstallFailureEvent
, nNotificationType
, nAppRestrictionsSchemaChangeEvent
, nNewDeviceEvent
, nTimestampMillis
, nDeviceReportUpdateEvent
-- * PageInfo
, PageInfo
, pageInfo
, piResultPerPage
, piTotalResults
, piStartIndex
-- * ProductAvailabilityChangeEventAvailabilityStatus
, ProductAvailabilityChangeEventAvailabilityStatus (..)
-- * ProductPermission
, ProductPermission
, productPermission
, ppState
, ppPermissionId
-- * AutoInstallConstraintNetworkTypeConstraint
, AutoInstallConstraintNetworkTypeConstraint (..)
-- * StoreLayoutStoreLayoutType
, StoreLayoutStoreLayoutType (..)
-- * NewPermissionsEvent
, NewPermissionsEvent
, newPermissionsEvent
, npeRequestedPermissions
, npeApprovedPermissions
, npeProductId
-- * ProductAvailabilityChangeEvent
, ProductAvailabilityChangeEvent
, productAvailabilityChangeEvent
, paceAvailabilityStatus
, paceProductId
-- * ProductApprovalEvent
, ProductApprovalEvent
, productApprovalEvent
, paeApproved
, paeProductId
-- * UserAccountType
, UserAccountType (..)
-- * Device
, Device
, device
, dReport
, dPolicy
, dManagementType
, dAndroidId
-- * WebAppDisplayMode
, WebAppDisplayMode (..)
-- * AutoInstallConstraint
, AutoInstallConstraint
, autoInstallConstraint
, aicChargingStateConstraint
, aicDeviceIdleStateConstraint
, aicNetworkTypeConstraint
-- * ServiceAccountKey
, ServiceAccountKey
, serviceAccountKey
, sakData
, sakId
, sakType
, sakPublicData
-- * InstallsListResponse
, InstallsListResponse
, installsListResponse
, ilrInstall
-- * AppRestrictionsSchemaRestriction
, AppRestrictionsSchemaRestriction
, appRestrictionsSchemaRestriction
, arsrRestrictionType
, arsrEntry
, arsrKey
, arsrEntryValue
, arsrDefaultValue
, arsrTitle
, arsrDescription
, arsrNestedRestriction
-- * ProductPolicy
, ProductPolicy
, productPolicy
, ppTracks
, ppManagedConfiguration
, ppTrackIds
, ppAutoUpdateMode
, ppAutoInstallPolicy
, ppProductId
-- * Administrator
, Administrator
, administrator
, aEmail
-- * UsersListResponse
, UsersListResponse
, usersListResponse
, ulrUser
-- * NewDeviceEventManagementType
, NewDeviceEventManagementType (..)
-- * AdministratorWebTokenSpecStoreBuilder
, AdministratorWebTokenSpecStoreBuilder
, administratorWebTokenSpecStoreBuilder
, awtssbEnabled
-- * AuthenticationToken
, AuthenticationToken
, authenticationToken
, atToken
-- * ProductAvailableTracksItem
, ProductAvailableTracksItem (..)
-- * ManagedConfigurationsSettings
, ManagedConfigurationsSettings
, managedConfigurationsSettings
, mcsLastUpdatedTimestampMillis
, mcsMcmId
, mcsName
-- * AppVersion
, AppVersion
, appVersion
, avTrack
, avVersionCode
, avVersionString
, avTrackId
, avIsProduction
-- * AdministratorWebTokenSpecPermissionItem
, AdministratorWebTokenSpecPermissionItem (..)
-- * AppState
, AppState
, appState
, asPackageName
, asKeyedAppState
-- * EnterprisesPullNotificationSetRequestMode
, EnterprisesPullNotificationSetRequestMode (..)
-- * DeviceReport
, DeviceReport
, deviceReport
, drLastUpdatedTimestampMillis
, drAppState
-- * PolicyAutoUpdatePolicy
, PolicyAutoUpdatePolicy (..)
-- * ManagedPropertyBundle
, ManagedPropertyBundle
, managedPropertyBundle
, mpbManagedProperty
-- * GroupLicensesListResponse
, GroupLicensesListResponse
, groupLicensesListResponse
, gllrGroupLicense
-- * PolicyDeviceReportPolicy
, PolicyDeviceReportPolicy (..)
-- * ProductPolicyAutoUpdateMode
, ProductPolicyAutoUpdateMode (..)
-- * AutoInstallConstraintChargingStateConstraint
, AutoInstallConstraintChargingStateConstraint (..)
-- * InstallFailureEventFailureReason
, InstallFailureEventFailureReason (..)
-- * ProductSet
, ProductSet
, productSet
, psProductVisibility
, psProductSetBehavior
, psProductId
-- * Install
, Install
, install
, iVersionCode
, iInstallState
, iProductId
-- * MaintenanceWindow
, MaintenanceWindow
, maintenanceWindow
, mwDurationMs
, mwStartTimeAfterMidnightMs
-- * ServiceAccountKeysListResponse
, ServiceAccountKeysListResponse
, serviceAccountKeysListResponse
, saklrServiceAccountKey
-- * TrackInfo
, TrackInfo
, trackInfo
, tiTrackAlias
, tiTrackId
-- * User
, User
, user
, uAccountIdentifier
, uDisplayName
, uId
, uPrimaryEmail
, uManagementType
, uAccountType
-- * AppVersionTrack
, AppVersionTrack (..)
-- * AppRestrictionsSchemaRestrictionRestrictionValueType
, AppRestrictionsSchemaRestrictionRestrictionValueType (..)
-- * ProductSetProductSetBehavior
, ProductSetProductSetBehavior (..)
-- * ManagedConfigurationsForDeviceListResponse
, ManagedConfigurationsForDeviceListResponse
, managedConfigurationsForDeviceListResponse
, mcfdlrManagedConfigurationForDevice
-- * ProductsGenerateApprovalURLResponse
, ProductsGenerateApprovalURLResponse
, productsGenerateApprovalURLResponse
, pgaurURL
-- * StorePage
, StorePage
, storePage
, spLink
, spName
, spId
-- * ProductVisibilityTracksItem
, ProductVisibilityTracksItem (..)
-- * EnterprisesSendTestPushNotificationResponse
, EnterprisesSendTestPushNotificationResponse
, enterprisesSendTestPushNotificationResponse
, estpnrTopicName
, estpnrMessageId
-- * ServiceAccount
, ServiceAccount
, serviceAccount
, saKey
, saName
-- * VariableSet
, VariableSet
, variableSet
, vsUserValue
, vsPlaceholder
-- * AppUpdateEvent
, AppUpdateEvent
, appUpdateEvent
, aueProductId
-- * GroupLicensePermissions
, GroupLicensePermissions (..)
-- * EnterprisesListResponse
, EnterprisesListResponse
, enterprisesListResponse
, elrEnterprise
-- * NotificationSet
, NotificationSet
, notificationSet
, nsNotificationSetId
, nsNotification
-- * InstallInstallState
, InstallInstallState (..)
-- * AppRestrictionsSchema
, AppRestrictionsSchema
, appRestrictionsSchema
, arsKind
, arsRestrictions
-- * UserManagementType
, UserManagementType (..)
-- * PolicyProductAvailabilityPolicy
, PolicyProductAvailabilityPolicy (..)
-- * WebAppIcon
, WebAppIcon
, webAppIcon
, waiImageData
-- * LocalizedText
, LocalizedText
, localizedText
, ltText
, ltLocale
-- * Xgafv
, Xgafv (..)
-- * AdministratorWebTokenSpecPrivateApps
, AdministratorWebTokenSpecPrivateApps
, administratorWebTokenSpecPrivateApps
, awtspaEnabled
-- * ProductPolicyTracksItem
, ProductPolicyTracksItem (..)
-- * AdministratorWebTokenSpecZeroTouch
, AdministratorWebTokenSpecZeroTouch
, administratorWebTokenSpecZeroTouch
, awtsztEnabled
-- * DevicesListResponse
, DevicesListResponse
, devicesListResponse
, dlrDevice
-- * ProductSigningCertificate
, ProductSigningCertificate
, productSigningCertificate
, pscCertificateHashSha256
, pscCertificateHashSha1
-- * Enterprise
, Enterprise
, enterprise
, eAdministrator
, ePrimaryDomain
, eName
, eId
-- * GroupLicenseAcquisitionKind
, GroupLicenseAcquisitionKind (..)
-- * ProductsApproveRequestApprovedPermissions
, ProductsApproveRequestApprovedPermissions (..)
-- * InstallFailureEvent
, InstallFailureEvent
, installFailureEvent
, ifeFailureReason
, ifeFailureDetails
, ifeUserId
, ifeDeviceId
, ifeProductId
-- * ManagedConfigurationsForUserListResponse
, ManagedConfigurationsForUserListResponse
, managedConfigurationsForUserListResponse
, mcfulrManagedConfigurationForUser
-- * ConfigurationVariables
, ConfigurationVariables
, configurationVariables
, cvMcmId
, cvVariableSet
-- * StoreLayout
, StoreLayout
, storeLayout
, slStoreLayoutType
, slHomepageId
-- * AppRestrictionsSchemaChangeEvent
, AppRestrictionsSchemaChangeEvent
, appRestrictionsSchemaChangeEvent
, arsceProductId
-- * NotificationNotificationType
, NotificationNotificationType (..)
-- * ProductProductPricing
, ProductProductPricing (..)
-- * NewDeviceEvent
, NewDeviceEvent
, newDeviceEvent
, ndeUserId
, ndeDpcPackageName
, ndeDeviceId
, ndeManagementType
-- * Policy
, Policy
, policy
, pProductAvailabilityPolicy
, pProductPolicy
, pMaintenanceWindow
, pDeviceReportPolicy
, pAutoUpdatePolicy
-- * KeyedAppState
, KeyedAppState
, keyedAppState
, kasStateTimestampMillis
, kasData
, kasSeverity
, kasKey
, kasMessage
-- * AdministratorWebToken
, AdministratorWebToken
, administratorWebToken
, awtToken
-- * SignupInfo
, SignupInfo
, signupInfo
, siCompletionToken
, siKind
, siURL
-- * DeviceManagementType
, DeviceManagementType (..)
-- * Product
, Product
, product
, pScreenshotURLs
, pLastUpdatedTimestampMillis
, pSmallIconURL
, pAuthorName
, pAppTracks
, pWorkDetailsURL
, pRequiresContainerApp
, pCategory
, pAppVersion
, pProductPricing
, pDistributionChannel
, pMinAndroidSdkVersion
, pAvailableCountries
, pFeatures
, pAvailableTracks
, pIconURL
, pPermissions
, pTitle
, pSigningCertificate
, pContentRating
, pProductId
, pRecentChanges
, pDescription
, pDetailsURL
-- * GroupLicenseApproval
, GroupLicenseApproval (..)
-- * EntitlementsListResponse
, EntitlementsListResponse
, entitlementsListResponse
, elrEntitlement
-- * KeyedAppStateSeverity
, KeyedAppStateSeverity (..)
-- * EnterprisesGetServiceAccountKeyType
, EnterprisesGetServiceAccountKeyType (..)
-- * ProductPermissions
, ProductPermissions
, productPermissions
, ppsPermission
, ppsProductId
-- * AdministratorWebTokenSpecManagedConfigurations
, AdministratorWebTokenSpecManagedConfigurations
, administratorWebTokenSpecManagedConfigurations
, awtsmcEnabled
-- * Permission
, Permission
, permission
, perName
, perDescription
, perPermissionId
-- * ServiceAccountKeyType
, ServiceAccountKeyType (..)
-- * AutoInstallPolicyAutoInstallMode
, AutoInstallPolicyAutoInstallMode (..)
-- * DeviceReportUpdateEvent
, DeviceReportUpdateEvent
, deviceReportUpdateEvent
, drueReport
, drueUserId
, drueDeviceId
-- * ProductApprovalEventApproved
, ProductApprovalEventApproved (..)
-- * WebAppsListResponse
, WebAppsListResponse
, webAppsListResponse
, walrWebApp
-- * ProductsApproveRequest
, ProductsApproveRequest
, productsApproveRequest
, parApprovalURLInfo
, parApprovedPermissions
-- * AutoInstallPolicy
, AutoInstallPolicy
, autoInstallPolicy
, aipAutoInstallConstraint
, aipAutoInstallPriority
, aipAutoInstallMode
, aipMinimumVersionCode
-- * ProductFeaturesItem
, ProductFeaturesItem (..)
-- * Entitlement
, Entitlement
, entitlement
, eReason
, eProductId
-- * ProductsListResponse
, ProductsListResponse
, productsListResponse
, plrTokenPagination
, plrPageInfo
, plrProduct
-- * DeviceStateAccountState
, DeviceStateAccountState (..)
-- * WebApp
, WebApp
, webApp
, waWebAppId
, waVersionCode
, waIcons
, waStartURL
, waDisplayMode
, waIsPublished
, waTitle
) where
import Network.Google.AndroidEnterprise.Types.Product
import Network.Google.AndroidEnterprise.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Google Play EMM API. This contains the host and root path used as a starting point for constructing service requests.
androidEnterpriseService :: ServiceConfig
androidEnterpriseService
= defaultService (ServiceId "androidenterprise:v1")
"androidenterprise.googleapis.com"
-- | Manage corporate Android devices
androidEnterpriseScope :: Proxy '["https://www.googleapis.com/auth/androidenterprise"]
androidEnterpriseScope = Proxy
| brendanhay/gogol | gogol-android-enterprise/gen/Network/Google/AndroidEnterprise/Types.hs | mpl-2.0 | 16,883 | 0 | 7 | 3,995 | 1,759 | 1,232 | 527 | 474 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Orphans() where
import Control.Applicative
import Data.Binary
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Short as BSS
import qualified Data.Text as T
import qualified Test.QuickCheck as QC
import qualified Test.SmallCheck.Series as SC
instance Binary BSS.ShortByteString where
put = put . BSS.fromShort
get = BSS.toShort <$> get
instance QC.Arbitrary BS.ByteString where
arbitrary = BS.pack <$> QC.arbitrary
instance QC.Arbitrary BSL.ByteString where
arbitrary = BSL.pack <$> QC.arbitrary
instance QC.Arbitrary BSS.ShortByteString where
arbitrary = BSS.pack . take 256 <$> QC.arbitrary
instance QC.Arbitrary T.Text where
arbitrary = T.pack <$> QC.arbitrary
instance Monad m ⇒ SC.Serial m BSL.ByteString where
series = BSL.pack <$> SC.series
instance Monad m ⇒ SC.Serial m BS.ByteString where
series = BS.pack <$> SC.series
instance Monad m => SC.Serial m Word8 where
series = SC.generate $ \d -> [0 .. fromIntegral d]
instance Monad m => SC.Serial m Word16 where
series = SC.generate $ \d -> [0 .. fromIntegral d]
instance Monad m => SC.Serial m Word32 where
series = SC.generate $ \d -> [0 .. fromIntegral d]
instance Monad m ⇒ SC.Serial m BSS.ShortByteString where
series = BSS.pack <$> SC.series
| bsummer4/ogz | src/Orphans.hs | agpl-3.0 | 1,411 | 0 | 9 | 274 | 442 | 243 | 199 | -1 | -1 |
-- Author: Eric Kalosa-Kenyon
module Tictactoe where
import Data.Matrix
import Control.Monad
data Player = X | O | E deriving (Show, Read, Eq)
type Board = Matrix Player
-- define initial variables
dim = (3, 3) -- NOTE: This should be treated as hardcoded - the winchecking
-- methods below rely on 3x3 board
initial_board = fromList (fst dim) (snd dim) (iterate id E)
-- board status calculation subroutines
-- put (w) on (b) at (x, y)
put :: Board -> (Int, Int) -> Player -> Board
put b ix p = setElem p ix b
full :: Board -> Bool
full b = not . any (==E) $ b
-- endgame conditions
-- did player (p) win on board (b)?
won, wonMainDiag, wonAltDiag, wonRows, wonCols :: Player -> Board -> Bool
winconds = [wonMainDiag, wonAltDiag, wonRows, wonCols]
won p b = any (==True) [wincond p b | wincond <- winconds]
wonMainDiag p b = all (==p) (getDiag b)
wonAltDiag p b = ((wonMainDiag p) . (switchCols 1 3)) b
wonRows p b = any (==True) [wonRow p b i | i <- [1..(fst dim)]]
wonCols p b = any (==True) [wonCol p b i | i <- [1..(fst dim)]]
wonRow p b i = all (==p) $ getRow i b
wonCol p b i = all (==p) $ getCol i b
-- user input handling, IO monad stuff
getIndex inp = read inp :: (Int, Int)
getUserIndex = fmap getIndex getLine
boardOutIO = putStrLn . show
-- main workhorse function
playGame b = boardOutIO b >>
putStrLn "Place your X with format: (Int,Int)" >>
getUserIndex >>=
return . (\x -> put b x X) >>= -- returns Monad m :: m Board (IO Board)
-- TODO: check whether board is valid before updating
-- TODO: check whether anyone won
-- return $ (\b -> -- case b of
-- | won X b -> strPutLn "X won!" >> initial_board
-- | won O b -> strPutLn "O won!" >> initial_board
-- | otherwise -> >> b
-- ) >>=
playGame
-- main loop
main = playGame initial_board
| ekalosak/haskell-practice | tictactoe/Tictactoe.hs | lgpl-3.0 | 1,863 | 0 | 11 | 459 | 567 | 312 | 255 | 29 | 1 |
import System.Environment
import System.IO
import Network.HTTP.Proxy
import Network.TShot.Parser
import Network.TShot.Remote
main = putStrLn "tshot"
| crab2313/tshot | Main.hs | artistic-2.0 | 151 | 0 | 5 | 16 | 37 | 22 | 15 | 6 | 1 |
module Handler.Plans where
import Import
import Util
import Util.Angular
postPlansR :: Handler RepJson
postPlansR = do
userId <- requireAuthIdPreventingXsrf
newPlan <- parseJsonBody_ -- TODO error page is HTML, not friendly!
planEntity <- runDB $ createPlan userId newPlan
jsonToRepJson planEntity
postCompletePlanR :: PlanId -> Handler RepJson
postCompletePlanR planId = do
Entity _ plan <- authedPlan planId
time <- now
runDB $ update planId [PlanDoneAt =. Just time]
jsonToRepJson $ Entity planId plan { planDoneAt = Just time }
putPlanR :: PlanId -> Handler RepJson
putPlanR planId = do
_ <- authedPlan planId
editedPlan <- parseJsonBody_ -- TODO error page is HTML, not friendly!
(_, mCurrentPlan) <- runDB $ updatePlan editedPlan planId
case mCurrentPlan of
Just plan -> jsonToRepJson $ Entity planId plan
Nothing -> notFound
deletePlanR :: PlanId -> Handler RepJson
deletePlanR planId = do
_ <- authedPlan planId
runDB $ delete planId
jsonToRepJson $ object ["deleted" .= True]
authedPlan :: PlanId -> Handler (Entity Plan)
authedPlan planId = do
userId <- requireAuthIdPreventingXsrf
maybeAuthedPlan <- runDB $ selectFirst [PlanId ==. planId, PlanUser ==. userId] []
case maybeAuthedPlan of
Just plan -> return plan
Nothing -> notFound
| samstokes/yesodoro-reboot | Handler/Plans.hs | bsd-2-clause | 1,307 | 0 | 11 | 248 | 392 | 186 | 206 | 36 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QCompleter.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QCompleter (
CompletionMode, ePopupCompletion, eUnfilteredPopupCompletion, eInlineCompletion
, ModelSorting, eUnsortedModel, eCaseSensitivelySortedModel, eCaseInsensitivelySortedModel
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CCompletionMode a = CCompletionMode a
type CompletionMode = QEnum(CCompletionMode Int)
ieCompletionMode :: Int -> CompletionMode
ieCompletionMode x = QEnum (CCompletionMode x)
instance QEnumC (CCompletionMode Int) where
qEnum_toInt (QEnum (CCompletionMode x)) = x
qEnum_fromInt x = QEnum (CCompletionMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> CompletionMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
ePopupCompletion :: CompletionMode
ePopupCompletion
= ieCompletionMode $ 0
eUnfilteredPopupCompletion :: CompletionMode
eUnfilteredPopupCompletion
= ieCompletionMode $ 1
eInlineCompletion :: CompletionMode
eInlineCompletion
= ieCompletionMode $ 2
data CModelSorting a = CModelSorting a
type ModelSorting = QEnum(CModelSorting Int)
ieModelSorting :: Int -> ModelSorting
ieModelSorting x = QEnum (CModelSorting x)
instance QEnumC (CModelSorting Int) where
qEnum_toInt (QEnum (CModelSorting x)) = x
qEnum_fromInt x = QEnum (CModelSorting x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ModelSorting -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eUnsortedModel :: ModelSorting
eUnsortedModel
= ieModelSorting $ 0
eCaseSensitivelySortedModel :: ModelSorting
eCaseSensitivelySortedModel
= ieModelSorting $ 1
eCaseInsensitivelySortedModel :: ModelSorting
eCaseInsensitivelySortedModel
= ieModelSorting $ 2
| uduki/hsQt | Qtc/Enums/Gui/QCompleter.hs | bsd-2-clause | 4,469 | 0 | 18 | 961 | 1,142 | 570 | 572 | 101 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGraphicsSceneDragDropEvent.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:16
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QGraphicsSceneDragDropEvent (
setPossibleActions
,setProposedAction
,qGraphicsSceneDragDropEvent_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Enums.Core.QEvent
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QacceptProposedAction (QGraphicsSceneDragDropEvent a) (()) where
acceptProposedAction x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_acceptProposedAction cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_acceptProposedAction" qtc_QGraphicsSceneDragDropEvent_acceptProposedAction :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO ()
instance Qbuttons (QGraphicsSceneDragDropEvent a) (()) (IO (MouseButtons)) where
buttons x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_buttons cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_buttons" qtc_QGraphicsSceneDragDropEvent_buttons :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO CLong
instance QdropAction (QGraphicsSceneDragDropEvent a) (()) where
dropAction x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_dropAction cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_dropAction" qtc_QGraphicsSceneDragDropEvent_dropAction :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO CLong
instance QmimeData (QGraphicsSceneDragDropEvent a) (()) where
mimeData x0 ()
= withQMimeDataResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_mimeData cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_mimeData" qtc_QGraphicsSceneDragDropEvent_mimeData :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO (Ptr (TQMimeData ()))
instance Qmodifiers (QGraphicsSceneDragDropEvent a) (()) where
modifiers x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_modifiers cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_modifiers" qtc_QGraphicsSceneDragDropEvent_modifiers :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO CLong
instance Qpos (QGraphicsSceneDragDropEvent a) (()) (IO (PointF)) where
pos x0 ()
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_pos_qth cobj_x0 cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_pos_qth" qtc_QGraphicsSceneDragDropEvent_pos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqpos (QGraphicsSceneDragDropEvent a) (()) (IO (QPointF ())) where
qpos x0 ()
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_pos cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_pos" qtc_QGraphicsSceneDragDropEvent_pos :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO (Ptr (TQPointF ()))
instance QpossibleActions (QGraphicsSceneDragDropEvent a) (()) where
possibleActions x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_possibleActions cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_possibleActions" qtc_QGraphicsSceneDragDropEvent_possibleActions :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO CLong
instance QproposedAction (QGraphicsSceneDragDropEvent a) (()) where
proposedAction x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_proposedAction cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_proposedAction" qtc_QGraphicsSceneDragDropEvent_proposedAction :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO CLong
instance QscenePos (QGraphicsSceneDragDropEvent a) (()) where
scenePos x0 ()
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_scenePos_qth cobj_x0 cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_scenePos_qth" qtc_QGraphicsSceneDragDropEvent_scenePos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr CDouble -> Ptr CDouble -> IO ()
instance QqscenePos (QGraphicsSceneDragDropEvent a) (()) where
qscenePos x0 ()
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_scenePos cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_scenePos" qtc_QGraphicsSceneDragDropEvent_scenePos :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO (Ptr (TQPointF ()))
instance QscreenPos (QGraphicsSceneDragDropEvent a) (()) where
screenPos x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_screenPos_qth cobj_x0 cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_screenPos_qth" qtc_QGraphicsSceneDragDropEvent_screenPos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QqscreenPos (QGraphicsSceneDragDropEvent a) (()) where
qscreenPos x0 ()
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_screenPos cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_screenPos" qtc_QGraphicsSceneDragDropEvent_screenPos :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO (Ptr (TQPoint ()))
instance QsetButtons (QGraphicsSceneDragDropEvent a) ((MouseButtons)) where
setButtons x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_setButtons cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setButtons" qtc_QGraphicsSceneDragDropEvent_setButtons :: Ptr (TQGraphicsSceneDragDropEvent a) -> CLong -> IO ()
instance QsetDropAction (QGraphicsSceneDragDropEvent a) ((DropAction)) where
setDropAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_setDropAction cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setDropAction" qtc_QGraphicsSceneDragDropEvent_setDropAction :: Ptr (TQGraphicsSceneDragDropEvent a) -> CLong -> IO ()
instance QsetMimeData (QGraphicsSceneDragDropEvent a) ((QMimeData t1)) where
setMimeData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneDragDropEvent_setMimeData cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setMimeData" qtc_QGraphicsSceneDragDropEvent_setMimeData :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr (TQMimeData t1) -> IO ()
instance QsetModifiers (QGraphicsSceneDragDropEvent a) ((KeyboardModifiers)) where
setModifiers x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_setModifiers cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setModifiers" qtc_QGraphicsSceneDragDropEvent_setModifiers :: Ptr (TQGraphicsSceneDragDropEvent a) -> CLong -> IO ()
instance QsetPos (QGraphicsSceneDragDropEvent a) ((PointF)) where
setPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QGraphicsSceneDragDropEvent_setPos_qth cobj_x0 cpointf_x1_x cpointf_x1_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setPos_qth" qtc_QGraphicsSceneDragDropEvent_setPos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> CDouble -> CDouble -> IO ()
instance QqsetPos (QGraphicsSceneDragDropEvent a) ((QPointF t1)) where
qsetPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneDragDropEvent_setPos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setPos" qtc_QGraphicsSceneDragDropEvent_setPos :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr (TQPointF t1) -> IO ()
setPossibleActions :: QGraphicsSceneDragDropEvent a -> ((DropActions)) -> IO ()
setPossibleActions x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_setPossibleActions cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setPossibleActions" qtc_QGraphicsSceneDragDropEvent_setPossibleActions :: Ptr (TQGraphicsSceneDragDropEvent a) -> CLong -> IO ()
setProposedAction :: QGraphicsSceneDragDropEvent a -> ((DropAction)) -> IO ()
setProposedAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_setProposedAction cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setProposedAction" qtc_QGraphicsSceneDragDropEvent_setProposedAction :: Ptr (TQGraphicsSceneDragDropEvent a) -> CLong -> IO ()
instance QsetScenePos (QGraphicsSceneDragDropEvent a) ((PointF)) where
setScenePos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QGraphicsSceneDragDropEvent_setScenePos_qth cobj_x0 cpointf_x1_x cpointf_x1_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setScenePos_qth" qtc_QGraphicsSceneDragDropEvent_setScenePos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> CDouble -> CDouble -> IO ()
instance QqsetScenePos (QGraphicsSceneDragDropEvent a) ((QPointF t1)) where
qsetScenePos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneDragDropEvent_setScenePos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setScenePos" qtc_QGraphicsSceneDragDropEvent_setScenePos :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr (TQPointF t1) -> IO ()
instance QsetScreenPos (QGraphicsSceneDragDropEvent a) ((Point)) where
setScreenPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QGraphicsSceneDragDropEvent_setScreenPos_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setScreenPos_qth" qtc_QGraphicsSceneDragDropEvent_setScreenPos_qth :: Ptr (TQGraphicsSceneDragDropEvent a) -> CInt -> CInt -> IO ()
instance QqsetScreenPos (QGraphicsSceneDragDropEvent a) ((QPoint t1)) where
qsetScreenPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneDragDropEvent_setScreenPos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setScreenPos" qtc_QGraphicsSceneDragDropEvent_setScreenPos :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr (TQPoint t1) -> IO ()
instance QsetSource (QGraphicsSceneDragDropEvent a) ((QWidget t1)) where
setSource x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneDragDropEvent_setSource cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_setSource" qtc_QGraphicsSceneDragDropEvent_setSource :: Ptr (TQGraphicsSceneDragDropEvent a) -> Ptr (TQWidget t1) -> IO ()
instance Qsource (QGraphicsSceneDragDropEvent a) (()) (IO (QWidget ())) where
source x0 ()
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_source cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_source" qtc_QGraphicsSceneDragDropEvent_source :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO (Ptr (TQWidget ()))
qGraphicsSceneDragDropEvent_delete :: QGraphicsSceneDragDropEvent a -> IO ()
qGraphicsSceneDragDropEvent_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneDragDropEvent_delete cobj_x0
foreign import ccall "qtc_QGraphicsSceneDragDropEvent_delete" qtc_QGraphicsSceneDragDropEvent_delete :: Ptr (TQGraphicsSceneDragDropEvent a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QGraphicsSceneDragDropEvent.hs | bsd-2-clause | 11,982 | 0 | 12 | 1,476 | 2,847 | 1,447 | 1,400 | -1 | -1 |
module TriangleKata.Day10Spec (spec) where
import Test.Hspec
import TriangleKata.Day10 (triangle, TriangleType(..))
spec :: Spec
spec = do
it "equilateral triangle has all sides equal" $ do
triangle (10, 10, 10) `shouldBe` Equilateral
it "isosceles triangle has first two sides equal" $ do
triangle (7, 7, 10) `shouldBe` Isosceles
it "isosceles triangle has last two sides equal" $ do
triangle (10, 7, 7) `shouldBe` Isosceles
it "isosceles triangle has the first and the last sides equal" $ do
triangle (8, 11, 8) `shouldBe` Isosceles
it "scalene triangle has no equal sides" $ do
triangle (9, 10, 5) `shouldBe` Scalene
it "illegal triangle has sum of first two sides less or equal to the third one" $ do
triangle (1, 9, 10) `shouldBe` Illegal
triangle (2, 7, 10) `shouldBe` Illegal
it "illegal triangle has sum of last two sides less or equal to the first one" $ do
triangle (12, 5, 7) `shouldBe` Illegal
triangle (12, 5, 6) `shouldBe` Illegal
it "illegal triangle has sum of the first and the last sides less or equal to the second one" $ do
triangle (5, 11, 6) `shouldBe` Illegal
triangle (5, 10, 5) `shouldBe` Illegal
it "illegal triangle has all sides equal to zero" $ do
triangle (0, 0, 0) `shouldBe` Illegal
| Alex-Diez/haskell-tdd-kata | old-katas/test/TriangleKata/Day10Spec.hs | bsd-3-clause | 1,457 | 0 | 12 | 449 | 385 | 207 | 178 | 26 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
module Sequence.Alignment
( Substitution, Gap, Alignment (..)
, alignment
, mkGlobal, mkLocal, mkSemiglobal, mkEditDistance
) where
import Data.Array (array, range, (!))
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Sequence.Alignment.Instances
import Sequence.Alignment.Type
alignment :: Alignment a => a -> ByteString -> ByteString -> (Int, (ByteString, ByteString))
alignment sa s t = (score, trace sm sn si ti)
where (m, n) = (B.length s, B.length t)
bounds = ((0, 0), (m, n))
(needGaps, (sm, sn)) = selector sa matrix
(si, ti) = if needGaps then tails else ([], [])
score = matrix ! (sm, sn)
gapSymbol = '-'
tails :: (String, String)
tails | m == sm = (gaps (n - sn), B.unpack (B.drop sn t))
| n == sn = (B.unpack (B.drop sm s), gaps (m - sm))
gaps :: Int -> String
gaps size = replicate size gapSymbol
distance :: Int -> Int -> Int
distance i 0 = inits sa i
distance 0 j = inits sa j
distance i j = maximum [ matrix ! (i - 1, j - 1) + sub i j
, matrix ! (i - 1, j) + g
, matrix ! (i, j - 1) + g
, additional sa
]
where !g = gap sa
sub :: Substitution Int
sub = subIJ sa s t
matrix :: Matrix
!matrix = array bounds [(ij, uncurry distance ij) | ij <- range bounds]
trace :: Int -> Int -> String -> String -> (ByteString, ByteString)
trace i j s' t' | isStop matrix s t i j = (B.pack s', B.pack t')
| isVert matrix s t i j = trace (i - 1) j (addToS i) (gapSymbol:t')
| isHoriz matrix s t i j = trace i (j - 1) (gapSymbol:s') (addToT j)
| isDiag matrix s t i j = trace (i - 1) (j - 1) (addToS i) (addToT j)
where addToS i = (s `B.index` (i - 1)):s'
addToT j = (t `B.index` (j - 1)):t'
Conditions {..} = conditions sa
| zmactep/zero-aligner | src/Sequence/Alignment.hs | bsd-3-clause | 2,248 | 0 | 13 | 861 | 912 | 491 | 421 | 44 | 4 |
module Test.Themis.Test.Arbitrary (
alpha
, alphaNum
, num
, Interval(..)
, module Test.QuickCheck.Arbitrary
) where
-- Reimports the quickcheck's arbitrary module.
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
alpha = elements ['a' .. 'z']
num = elements ['0' .. '9']
alphaNum = oneof [alpha, num]
class Interval n where
interval :: n -> n -> Gen n
instance Interval Double where
interval l u = do
n <- choose (0,10000000000)
m <- choose (0,n)
return ((u - l) * (m / n))
instance Interval Int where
interval l u = let n = u -l in fmap (l+) $ choose (0, n)
| andorp/themis | src/Test/Themis/Test/Arbitrary.hs | bsd-3-clause | 612 | 0 | 12 | 140 | 243 | 134 | 109 | 20 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : FRP.UISF.UIMonad
-- Copyright : (c) Daniel Winograd-Cort 2014
-- License : see the LICENSE file in the distribution
--
-- Maintainer : dwc@cs.yale.edu
-- Stability : experimental
{-# LANGUAGE RecursiveDo #-}
module FRP.UISF.UITypes (
-- * UI Types
-- $uitypes
TerminationProc(..), nullTP, mergeTP,
-- * Rendering Context
CTX(..), Flow(..),
-- * UI Layout
makeLayout, LayoutType(..), nullLayout, Layout(..),
-- * Context and Layout Functions
divideCTX, mergeLayout,
-- * Graphics
mergeGraphics,
-- * System State
DirtyBit, Focus, WidgetID, FocusInfo(..),
-- * UIEvent
UIEvent(..),
Key(..),
SpecialKey(..),
MouseButton(..),
-- * Key State Checks
hasShiftModifier, hasCtrlModifier, hasAltModifier,
isKeyPressed,
-- * Framework Connections
-- $frameworkconnections
updateKeyState
) where
import FRP.UISF.Graphics
import Data.IORef
import Data.List (delete)
import System.IO.Unsafe (unsafePerformIO)
------------------------------------------------------------
-- * UI Types
------------------------------------------------------------
{- $uitypes
In this module, we will declare the various types to make creating the
overall UI possible. We will discuss the ideas for widgets in some
detail, but for specifics on the type of a widget (the 'UISF' type),
see the UISF type in "FRP.UISF.UISF", and for information on specific
widgets, see "FRP.UISF.Widget".
Widgets are arrows that map multiple inputs to multiple outputs.
Additionally, they have a relatively static layout argument that,
while it can change over time, is not dependent on any of its
inputs at any given moment.
On the input end, a widget will accept:
- a graphical context,
- some information about which widget is in focus (for the purposes
of routing key presses and mouse clicks and potentially for drawing
the widget differently),
- and the current time.
- an event with data relating to UI actions.
On the output end, a widget will produce from these inputs:
- an indicator of whether the widget needs to be redrawn,
- any focus information that needs to be conveyed to future widgets,
- the graphics to render to display this widget,
- and a procedure to run upon termination (for proper shutdown when finished).
Additionally, as widgets are generic arrows, there will be a parameterized
input and output types.
-}
------------------------------------------------------------
-- * Control Data
------------------------------------------------------------
-- | The termination procedure is simply a potential IO action.
type TerminationProc = Maybe (IO ())
-- | The null termination procedure is no action.
nullTP :: TerminationProc
nullTP = Nothing
-- | A method for merging two termination procedures.
mergeTP :: TerminationProc -> TerminationProc -> TerminationProc
mergeTP Nothing Nothing = Nothing
mergeTP le@(Just _) Nothing = le
mergeTP Nothing re@(Just _) = re
mergeTP (Just l) (Just r) = Just (l >> r)
------------------------------------------------------------
-- * Rendering Context
------------------------------------------------------------
-- | A rendering context specifies the following:
data CTX = CTX
{ flow :: Flow
-- ^ A layout direction to flow widgets.
, bounds :: Rect
-- ^ A rectangle bound of current drawing area to render a UI
-- component. It specifies the max size of a widget, not the
-- actual size. It's up to each individual widget to decide
-- where in this bound to put itself.
, isConjoined :: Bool
-- ^ A flag to tell whether we are in a conjoined state or not.
-- A conjoined context will duplicate itself for subcomponents
-- rather than splitting. This can be useful for making compound
-- widgets when one widget takes up space and the other performs
-- some side effect having to do with that space.
} deriving Show
-- | Flow determines widget ordering.
data Flow = TopDown | BottomUp | LeftRight | RightLeft deriving (Eq, Show)
------------------------------------------------------------
-- * UI Layout
------------------------------------------------------------
-- $ The layout of a widget provides data to calculate its actual size
-- in a given context.
-- Layout calculation makes use of lazy evaluation to do everything in one pass.
-- Although the UI function maps from Context to Layout, all of the fields of
-- Layout must be independent of the Context so that they are avaiable before
-- the UI function is even evaluated.
-- | Layouts for individual widgets typically come in a few standard flavors,
-- so we have this convenience function for their creation.
-- This function takes layout information for first the horizontal
-- dimension and then the vertical.
makeLayout :: LayoutType -- ^ Horizontal Layout information
-> LayoutType -- ^ Vertical Layout information
-> Layout
makeLayout (Fixed w) (Fixed h) = Layout 0 0 w h 0 0 0
makeLayout (Stretchy wMin) (Fixed h) = Layout 1 0 0 h wMin 0 0
makeLayout (Fixed w) (Stretchy hMin) = Layout 0 1 w 0 0 hMin 0
makeLayout (Stretchy wMin) (Stretchy hMin) = Layout 1 1 0 0 wMin hMin 0
-- | A dimension can either be:
data LayoutType =
Stretchy { minSize :: Int }
-- ^ Stretchy with a minimum size in pixels
| Fixed { fixedSize :: Int }
-- ^ Fixed with a size measured in pixels
-- | The null layout is useful for \"widgets\" that do not appear or
-- take up space on the screen.
nullLayout = NullLayout --Layout 0 0 0 0 0 0 0
-- | More complicated layouts can be manually constructed with direct
-- access to the Layout data type.
--
-- 1. wStretch and hStretch specify how much stretching space (in comparative
-- units) in the width and height should be allocated for this widget.
--
-- 2. wFixed and hFixed specify how much non-stretching space (in pixels)
-- of width and height should be allocated for this widget.
--
-- 3. wMin and hMin specify minimum values (in pixels) of width and height
-- for the widget's stretchy dimensions.
--
-- 4. lFill specifies how much expanding space (in comparative units) this
-- widget should fill out in excess space that would otherwise be unused.
data Layout = NullLayout | Layout
{ wStretch :: Int
, hStretch :: Int
, wFixed :: Int
, hFixed :: Int
, wMin :: Int
, hMin :: Int
, lFill :: Int
} deriving (Eq, Show)
------------------------------------------------------------
-- * Context and Layout Functions
------------------------------------------------------------
---------------
-- divideCTX --
---------------
-- | Divides the CTX among the two given layouts.
divideCTX :: CTX -> Layout -> Layout -> (CTX, CTX)
divideCTX ctx@(CTX a ((x, y), (w, h)) c) l1 l2 = if c then (ctx,ctx) else case (l1,l2) of
(NullLayout, _) -> (CTX a ((0,0),(0,0)) c, ctx)
(_, NullLayout) -> (ctx, CTX a ((0,0),(0,0)) c)
((Layout wStretch hStretch wFixed hFixed wMin hMin lFill),
(Layout wStretch' hStretch' wFixed' hFixed' wMin' hMin' lFill')) ->
case a of
TopDown -> (CTX a ((x, y), (w1T, h1T)) c,
CTX a ((x, y + h1T), (w2T, h2T)) c)
BottomUp -> (CTX a ((x, y + h - h1T), (w1T, h1T)) c,
CTX a ((x, y + h - h1T - h2T), (w2T, h2T)) c)
LeftRight -> (CTX a ((x, y), (w1L, h1L)) c,
CTX a ((x + w1L, y), (w2L, h2L)) c)
RightLeft -> (CTX a ((x + w - w1L, y), (w1L, h1L)) c,
CTX a ((x + w - w1L - w2L, y), (w2L, h2L)) c)
where
(w1L,w2L,w1T,w2T) = calc w wStretch wStretch' wFixed wFixed' wMin wMin' lFill lFill'
(h1T,h2T,h1L,h2L) = calc h hStretch hStretch' hFixed hFixed' hMin hMin' lFill lFill'
calc len stretch stretch' fixed fixed' lmin lmin' fill fill' = (st1, st2, fi1, fi2) where
portion s = div' (s * (len - fixed - fixed')) (stretch + stretch')
(st1,st2) = let u = min len $ fixed + max lmin (portion stretch)
v = fixed' + max lmin' (portion stretch')
por f = div' (f * (len - u - v)) (fill + fill')
in if u+v > len then (u, len-u) else (u + por fill, v + por fill')
fi1 = if fill > 0 then len else max lmin (if stretch == 0 then fixed else len)
fi2 = if fill' > 0 then len else max lmin' (if stretch' == 0 then fixed' else len)
div' b 0 = 0
div' b d = div b d
-----------------
-- mergeLayout --
-----------------
-- | Merge two layouts into one.
mergeLayout :: Flow -> Layout -> Layout -> Layout
mergeLayout a NullLayout l = l
mergeLayout a l NullLayout = l
mergeLayout a (Layout n m u v minw minh lFill) (Layout n' m' u' v' minw' minh' lFill') =
case a of
TopDown -> Layout (max' n n') (m + m') (max u u') (v + v') (max minw minw') (minh + minh') lFill''
BottomUp -> Layout (max' n n') (m + m') (max u u') (v + v') (max minw minw') (minh + minh') lFill''
LeftRight -> Layout (n + n') (max' m m') (u + u') (max v v') (minw + minw') (max minh minh') lFill''
RightLeft -> Layout (n + n') (max' m m') (u + u') (max v v') (minw + minw') (max minh minh') lFill''
where
max' 0 0 = 0
max' _ _ = 1
lFill'' = lFill + lFill'
------------------------------------------------------------
-- * Graphics
------------------------------------------------------------
-- | Merging two graphics can be achieved with overGraphic, but
-- the mergeGraphic function additionally constrains the graphics
-- based on their layouts and the context.
-- TODO: Make sure this works as well as it should
mergeGraphics :: CTX -> (Graphic, Layout) -> (Graphic, Layout) -> Graphic
mergeGraphics ctx (g1, l1) (g2, l2) = case (l1, l2) of
(NullLayout, NullLayout) -> nullGraphic
(NullLayout, _) -> g2
(_, NullLayout) -> g1
(_, _) -> overGraphic g2 g1
------------------------------------------------------------
-- * System State
------------------------------------------------------------
-- $ The DirtyBit and Focus types are for system state.
-- | The dirty bit is a bit to indicate if the widget needs to be redrawn.
type DirtyBit = Bool
-- | The Focus type helps focusable widgets communicate with each
-- other about which widget is in focus. It consists of a WidgetID
-- and a FocusInfo.
type Focus = (WidgetID, FocusInfo)
-- | The WidgetID for any given widget is dynamic based
-- on how many focusable widgets are active at the moment. It is designed
-- basically as a counter that focusable widgets will automatically (via the
-- focusable function) increment.
type WidgetID = Int
-- | The FocusInfo means one of the following:
data FocusInfo =
HasFocus
-- ^ Indicates that this widget is a subwidget of
-- a widget that is in focus. Thus, this widget too is in focus, and
-- this widget should pass HasFocus forward.
| NoFocus
-- ^ Indicates that there is no focus information to
-- communicate between widgets.
| SetFocusTo WidgetID
-- ^ Indicates that the widget whose id is given
-- should take focus. That widget should then pass NoFocus onward.
| DenyFocus
-- ^ Any widget that sees this value should recognize that
-- they are no longer in focus. This is useful for nested focus.
deriving (Show, Eq)
------------------------------------------------------------
-- * UIEvent
------------------------------------------------------------
-- | The UIEvent data type captures the various types of events that
-- the UI can produce. These are covered by regular keys, special
-- keys, mouse button presses, and mouse movement. Any key event
-- is accompanied by a list of 'Key's that were down when the given
-- event took place.
data UIEvent =
-- | A Key UIEvent indicates that the user has typed a regular key
-- on his/her keyboard. These will either be upper or lowercase
-- characters.
Key {
char :: Char,
modifiers :: [Key],
isDown :: Bool
}
-- | A SKey UIEvent indicates that the user has typed a special
-- key. These are Enter, Backspace, Tab, Delete, etc. See
-- 'SpecialKey' for more.
| SKey {
skey :: SpecialKey,
modifiers :: [Key],
isDown :: Bool
}
-- | A Button UIEvent indicates that the user has pressed a mouse
-- button.
| Button {
pt :: Point,
mbutton :: MouseButton,
isDown :: Bool
}
-- | Every time the mouse moves, a MouseMove UIEvent will fire.
| MouseMove {
pt :: Point
}
-- | The NoUIEvent fires when nothing else is going on. It is
-- important that this happens to allow interaction-independent
-- processing to continue (e.g. timers, animations, etc.).
| NoUIEvent
deriving (Eq,Show)
-------------------
-- Key state
-------------------
{- $frameworkconnections
The 'updateKeyState' function is for use by the GUI framework. It is
not intended for use unless one wants to build their own framework.
The key state is kept around so that it is easy to check if a given
key or button is currently pressed down. Unfortunately, I've coded it
as a global IORef, which means I'm using unsafePerformIO.
-}
-- | The global IORef storing the state of all current key presses.
keyState :: IORef [Key]
keyState = unsafePerformIO $ newIORef []
-- | This should be called by the GUI engine (GLUT) whenever the user
-- presses or releases a key/button. As long as it is called every
-- time, it will keep an accurate key state.
updateKeyState :: Key -- ^ The Key pressed/released.
-> Bool -- ^ True if pressed, False if released.
-> IO [Key] -- ^ The updated key state.
updateKeyState k s = case s of
True -> atomicModifyIORef keyState (dup . add)
False -> atomicModifyIORef keyState (dup . remove)
where
add ks = if k `elem` ks then ks else k:ks
remove ks = delete k ks
dup x = (x,x)
-- | This is a convenience function that tests whether either of the
-- right or left shift keys is in the given list.
hasShiftModifier :: [Key] -> Bool
hasShiftModifier ks = elem (SpecialKey KeyShiftL) ks || elem (SpecialKey KeyShiftR) ks
-- | This is a convenience function that tests whether either of the
-- right or left control keys is in the given list.
hasCtrlModifier :: [Key] -> Bool
hasCtrlModifier ks = elem (SpecialKey KeyCtrlL) ks || elem (SpecialKey KeyCtrlR) ks
-- | This is a convenience function that tests whether either of the
-- right or left alt keys is in the given list.
hasAltModifier :: [Key] -> Bool
hasAltModifier ks = elem (SpecialKey KeyAltL) ks || elem (SpecialKey KeyAltR) ks
-- | Checks the global key state to determine whether the given key is
-- currently pressed down.
isKeyPressed :: Key -> IO Bool
isKeyPressed k = do
ks <- readIORef keyState
return $ elem k ks
-- | A Key can either be a character, a special key, or a mouse button.
data Key
= Char Char
| SpecialKey SpecialKey
| MouseButton MouseButton
deriving ( Eq, Ord, Show )
-- | A special key is any non-standard character key. According to
-- GLUT, 'KeyUnknown' should never be used, probably because it will
-- be treated as a weird Char instead of a SpecialKey.
data SpecialKey
= KeyF1
| KeyF2
| KeyF3
| KeyF4
| KeyF5
| KeyF6
| KeyF7
| KeyF8
| KeyF9
| KeyF10
| KeyF11
| KeyF12
| KeyLeft
| KeyUp
| KeyRight
| KeyDown
| KeyPageUp
| KeyPageDown
| KeyHome
| KeyEnd
| KeyInsert
| KeyNumLock
| KeyBegin
| KeyDelete
| KeyShiftL
| KeyShiftR
| KeyCtrlL
| KeyCtrlR
| KeyAltL
| KeyAltR
| KeyEnter
| KeyTab
| KeyEsc
| KeyBackspace
| KeyUnknown Int
deriving ( Eq, Ord, Show )
-- | The standard mouse buttons are represented, but for specialty mice,
-- one can also use the 'AdditionalButton' value.
data MouseButton
= LeftButton
| MiddleButton
| RightButton
| WheelUp
| WheelDown
| AdditionalButton Int
deriving ( Eq, Ord, Show )
| dwincort/UISF | FRP/UISF/UITypes.hs | bsd-3-clause | 16,358 | 0 | 22 | 3,932 | 2,868 | 1,679 | 1,189 | 195 | 13 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TypeFamilies #-}
-- ---------------------------------------------------------------------------
-- |
-- Module : Data.Vector.Algorithms.Merge
-- Copyright : (c) 2008-2011 Dan Doel
-- Maintainer : Dan Doel <dan.doel@gmail.com>
-- Stability : Experimental
-- Portability : Portable
--
-- This module implements a simple top-down merge sort. The temporary buffer
-- is preallocated to 1/2 the size of the input array, and shared through
-- the entire sorting process to ease the amount of allocation performed in
-- total. This is a stable sort.
module Data.Vector.Algorithms.Merge
( sort
, sortBy
, Comparison
) where
import Prelude hiding (read, length)
import Control.Monad.Primitive
import Data.Bits
import Data.Vector.Generic.Mutable
import Data.Vector.Algorithms.Common (Comparison, copyOffset)
import qualified Data.Vector.Algorithms.Optimal as O
import qualified Data.Vector.Algorithms.Insertion as I
-- | Sorts an array using the default comparison.
sort :: (PrimMonad m, MVector v e, Ord e) => v (PrimState m) e -> m ()
sort = sortBy compare
{-# INLINE sort #-}
-- | Sorts an array using a custom comparison.
sortBy :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> m ()
sortBy cmp vec
| len <= 1 = return ()
| len == 2 = O.sort2ByOffset cmp vec 0
| len == 3 = O.sort3ByOffset cmp vec 0
| len == 4 = O.sort4ByOffset cmp vec 0
| otherwise = do buf <- new len
mergeSortWithBuf cmp vec buf
where
len = length vec
{-# INLINE sortBy #-}
mergeSortWithBuf :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> v (PrimState m) e -> m ()
mergeSortWithBuf cmp src buf = loop 0 (length src)
where
loop l u
| len < threshold = I.sortByBounds cmp src l u
| otherwise = do loop l mid
loop mid u
merge cmp (unsafeSlice l len src) buf (mid - l)
where len = u - l
mid = (u + l) `shiftR` 1
{-# INLINE mergeSortWithBuf #-}
merge :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> v (PrimState m) e
-> Int -> m ()
merge cmp src buf mid = do unsafeCopy tmp lower
eTmp <- unsafeRead tmp 0
eUpp <- unsafeRead upper 0
loop tmp 0 eTmp upper 0 eUpp 0
where
lower = unsafeSlice 0 mid src
upper = unsafeSlice mid (length src - mid) src
tmp = unsafeSlice 0 mid buf
wroteHigh low iLow eLow high iHigh iIns
| iHigh >= length high = unsafeCopy (unsafeSlice iIns (length low - iLow) src)
(unsafeSlice iLow (length low - iLow) low)
| otherwise = do eHigh <- unsafeRead high iHigh
loop low iLow eLow high iHigh eHigh iIns
wroteLow low iLow high iHigh eHigh iIns
| iLow >= length low = return ()
| otherwise = do eLow <- unsafeRead low iLow
loop low iLow eLow high iHigh eHigh iIns
loop !low !iLow !eLow !high !iHigh !eHigh !iIns = case cmp eHigh eLow of
LT -> do unsafeWrite src iIns eHigh
wroteHigh low iLow eLow high (iHigh + 1) (iIns + 1)
_ -> do unsafeWrite src iIns eLow
wroteLow low (iLow + 1) high iHigh eHigh (iIns + 1)
{-# INLINE merge #-}
threshold :: Int
threshold = 25
{-# INLINE threshold #-}
| tolysz/vector-algorithms | src/Data/Vector/Algorithms/Merge.hs | bsd-3-clause | 3,471 | 0 | 14 | 1,032 | 1,051 | 523 | 528 | 65 | 2 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
import Test.Hspec
import Data.Conduit.Shell hiding (ignore) -- https://github.com/fpco/stackage/issues/2355#issue-212177275
import Data.Conduit.Shell.PATH (true, false)
import Data.Conduit.Shell.Segments (strings, ignore)
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Binary as CB
import qualified Data.ByteString.Char8 as S8
import Control.Applicative
import Data.ByteString
import Data.Char (toUpper)
import Data.Either (isRight, isLeft)
import Control.Exception (try)
main :: IO ()
main =
hspec $
do describe "SHELL path functions" $
do it "false" $
do val <- run $ strings (false <|> echo "failed")
val `shouldBe` ["failed"]
it "true" $
do val <- run $ strings (true <|> echo "passed")
val `shouldBe` []
describe "ls" $
do it "home directory check" $
do val <- run $ strings (ls "/")
val `shouldContain` ["home"]
it "long option" $
do val <- run $ strings (ls "-a" ["/"])
val `shouldContain` ["home"]
describe "multiple string usage" $
do it "make two directory" $
do val <-
run $
do ignore $ mkdir "-p" "mtest1" "mtest2" "mtest3"
strings $ ls "."
run $ rmdir ["mtest1", "mtest2", "mtest3"]
val `shouldContain` ["mtest1", "mtest2", "mtest3"]
describe "list usage in variadic" $
do it "two directory" $
do val <-
run $
do ignore $ mkdir "-p" ["test1", "test2"]
strings $ ls "."
run $ rmdir ["test1", "test2"]
val `shouldContain` ["test1", "test2"]
describe "shell calls" $
do it "shell ls" $
do val <- run $ do strings $ shell "ls /"
val `shouldContain` ["home"]
describe "ordering of arguments" $
do it "echo -e" $
do val <- run $ do strings $ echo "-e" "hello\n" "haskell"
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell"]
#else
val `shouldBe` ["hello", " haskell"]
#endif
it "mixed variant" $
do val <- run $ strings $ echo "-e" ["hello\n", "haskell"]
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell"]
#else
val `shouldBe` ["hello", " haskell"]
#endif
it "list variant" $
do val <- run $ strings $ echo ["-e", "hello\n", "haskell"]
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell"]
#else
val `shouldBe` ["hello", " haskell"]
#endif
it "list mixed variant - 1" $
do val <- run $ strings $ echo "-e" ["hello\n", "haskell"]
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell"]
#else
val `shouldBe` ["hello", " haskell"]
#endif
it "list mixed variant - 2" $
do val <- run $ strings $ echo "-e" ["hello\n", "haskell\n"] "world"
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell", " world"]
#else
val `shouldBe` ["hello", " haskell", " world"]
#endif
it "list mixed variant - 3" $
do val <- run $ strings $ echo "-e" ["hello\n", "haskell\n"] "world\n" ["planet"]
#ifdef darwin_HOST_OS
val `shouldBe` ["-e hello", " haskell", " world", " planet"]
#else
val `shouldBe` ["hello", " haskell", " world", " planet"]
#endif
describe "cd" $
do it "cd /" $
do val <-
run $
do ignore $ cd "/"
strings pwd
val `shouldBe` ["/"]
it "cd /home" $
do val <-
run $
do ignore $ cd ["/home", undefined]
strings pwd
val `shouldBe` ["/home"]
describe "Piping" $
do it "basic piping" $
do (val :: [String]) <-
run $ strings (echo "hello" $| conduit (CL.map (S8.map toUpper)))
val `shouldBe` ["HELLO"]
it "piping of commands - example 1" $
do val <- run $ strings (ls "/" $| grep "etc")
val `shouldBe` ["etc"]
it "piping of commands - example 2" $
do val <- run $ strings (echo "hello" $| tr "[a-z]" "[A-Z]")
val `shouldBe` ["HELLO"]
describe "Exception" $
do it "Basic exception handling - success" $
do (val :: Either ProcessException () ) <- try $ run (ls "/bin")
val `shouldSatisfy` isRight
it "Basic exception handling - failure" $
do (val :: Either ProcessException () ) <- try $ run (ls "/non_existent_directory")
val `shouldSatisfy` isLeft
it "Basic exception handling with <|> - success" $
do (val :: Either ProcessException () ) <- try $ run (ls "/non_existent_directory" <|> ls "/bin")
val `shouldSatisfy` isRight
it "Basic exception handling with <|> - failure" $
do (val :: Either ProcessException () ) <- try $ run (ls "/non_existent_directory" <|> ls "/non_existent_directory")
val `shouldSatisfy` isLeft
it "Basic exception handling with <|> - first success" $
do (val :: Either ProcessException () ) <- try $ run (ls "/bin" <|> ls "/non_existent_directory")
val `shouldSatisfy` isRight
| chrisdone/shell-conduit | test/Spec.hs | bsd-3-clause | 5,583 | 0 | 24 | 1,925 | 1,525 | 764 | 761 | 111 | 1 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-}
module Language.C.Simple.CType.Types where
import Data.Data
import Data.Typeable
import GHC.Generics
-- |Primitive C Types
data PrimitiveType = TChar
| TSChar
| TUChar
| TShort
| TUShort
| TInt
| TUInt
| TLong
| TULong
| TPtrdiff
| TSize
| TWchar
| TSigAtomic
| TLLong
| TULLong
| TIntPtr
| TUIntPtr
| TIntMax
| TUIntMax
| TClock
| TTime
| TUSeconds
| TSUSeconds
| TFloat
| TDouble
| TVoid
deriving(Show, Eq, Read, Ord, Data, Typeable, Generic)
-- |A simplified C data type AST
data CType = TStruct String [CType]
| TUnion String [CType]
| TPrimitive PrimitiveType
| TArray Int CType
| TPointer CType
| TVariable CType
| TMember String CType
| TNamed String
| TFuncPointer [CType]
| TEnum String [String]
deriving(Show, Eq, Read, Ord, Data, Typeable, Generic)
-- | A helper type for conversion
data FixedArray n a = FixedArray n [a]
deriving(Eq, Show, Read, Ord, Data, Typeable, Generic)
| jfischoff/simple-c-type | src/Language/C/Simple/CType/Types.hs | bsd-3-clause | 1,574 | 0 | 7 | 787 | 295 | 177 | 118 | 45 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Temperature.ZH.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Locale
import Duckling.Resolve
import Duckling.Temperature.Types
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale ZH Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple Celsius 37)
[ "37°C"
, "摄氏37°"
, "攝氏37°"
, "摄氏37度"
, "攝氏37度"
, "37摄氏°"
, "37攝氏°"
, "37摄氏度"
, "37攝氏度"
]
, examples (simple Fahrenheit 70)
[ "70°F"
, "华氏70°"
, "華氏70°"
, "华氏70度"
, "華氏70度"
, "70华氏°"
, "70華氏°"
, "70华氏度"
, "70華氏度"
]
, examples (simple Degree 45)
[ "45°"
, "45度"
]
]
| facebookincubator/duckling | Duckling/Temperature/ZH/Corpus.hs | bsd-3-clause | 1,291 | 0 | 9 | 451 | 205 | 126 | 79 | 36 | 1 |
{-# LANGUAGE BangPatterns #-}
module Network.HPACK.Huffman.Tree (
-- * Huffman decoding
HTree(..)
, eosInfo
, toHTree
, showTree
, printTree
, flatten
) where
import Control.Arrow (second)
import Data.List (partition)
import Network.HPACK.Huffman.Bit
import Network.HPACK.Huffman.Params
----------------------------------------------------------------
type EOSInfo = Maybe Int
-- | Type for Huffman decoding.
data HTree = Tip
EOSInfo -- EOS info from 1
{-# UNPACK #-} !Int -- Decoded value. Essentially Word8
| Bin
EOSInfo -- EOS info from 1
{-# UNPACK #-} !Int -- Sequence no from 0
HTree -- Left
HTree -- Right
deriving Show
eosInfo :: HTree -> EOSInfo
eosInfo (Tip mx _) = mx
eosInfo (Bin mx _ _ _) = mx
----------------------------------------------------------------
showTree :: HTree -> String
showTree = showTree' ""
showTree' :: String -> HTree -> String
showTree' _ (Tip _ i) = show i ++ "\n"
showTree' pref (Bin _ n l r) = "No " ++ show n ++ "\n"
++ pref ++ "+ " ++ showTree' pref' l
++ pref ++ "+ " ++ showTree' pref' r
where
pref' = " " ++ pref
printTree :: HTree -> IO ()
printTree = putStr . showTree
----------------------------------------------------------------
-- | Creating 'HTree'.
toHTree :: [Bits] -> HTree
toHTree bs = mark 1 eos $ snd $ build 0 $ zip [0..idxEos] bs
where
eos = bs !! idxEos
build :: Int -> [(Int,Bits)] -> (Int, HTree)
build !cnt0 [(v,[])] = (cnt0,Tip Nothing v)
build !cnt0 xs = let (cnt1,l) = build (cnt0 + 1) fs
(cnt2,r) = build cnt1 ts
in (cnt2, Bin Nothing cnt0 l r)
where
(fs',ts') = partition ((==) F . head . snd) xs
fs = map (second tail) fs'
ts = map (second tail) ts'
-- | Marking the EOS path
mark :: Int -> Bits -> HTree -> HTree
mark i [] (Tip Nothing v) = Tip (Just i) v
mark i (F:bs) (Bin Nothing n l r) = Bin (Just i) n (mark (i+1) bs l) r
mark i (T:bs) (Bin Nothing n l r) = Bin (Just i) n l (mark (i+1) bs r)
mark _ _ _ = error "mark"
----------------------------------------------------------------
flatten :: HTree -> [HTree]
flatten (Tip _ _) = []
flatten t@(Bin _ _ l r) = t : (flatten l ++ flatten r)
| bergmark/http2 | Network/HPACK/Huffman/Tree.hs | bsd-3-clause | 2,440 | 0 | 13 | 748 | 843 | 451 | 392 | 54 | 1 |
{-
- Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Hacq.
- Hacq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Control.Monad.Quantum.PhaseShift.Binary.Class (module Control.Monad.Quantum.Class,
MonadBinaryPhaseShift(..), applyBinaryPhaseShiftConst) where
import Control.Monad.Reader (ReaderT)
import Control.Monad.Trans (MonadTrans, lift)
import Data.Sequence (Seq)
import Control.Monad.Memo.Null
import Control.Monad.Quantum.Class
class MonadQuantum w m => MonadBinaryPhaseShift w m | m -> w where
applyGlobalBinaryPhase :: Int -> Integer -> m ()
-- |@applyBinaryPhaseShift k m@ maps |m> to e^{2πim/2^k}|m>.
--
-- The first qubit in @m@ is LSB, and @m@ is considered as unsigned.
applyBinaryPhaseShift :: Int -> Seq (Bit w) -> m ()
-- |@applyBinaryPhaseShiftConst k m w@ maps |0> to |0> and |1> to e^{2πim/2^k}|1>.
applyBinaryPhaseShiftConst :: MonadBinaryPhaseShift w m => Int -> Integer -> Bit w -> m ()
applyBinaryPhaseShiftConst k m w =
control w $ applyGlobalBinaryPhase k m
-- The following instance requires UndecidableInstances.
instance MonadBinaryPhaseShift w m => MonadBinaryPhaseShift w (ReaderT r m) where
applyGlobalBinaryPhase k m =
lift $ applyGlobalBinaryPhase k m
{-# INLINABLE applyGlobalBinaryPhase #-}
applyBinaryPhaseShift k m =
lift $ applyBinaryPhaseShift k m
{-# INLINABLE applyBinaryPhaseShift #-}
-- The following instance requires UndecidableInstances.
instance MonadBinaryPhaseShift w m => MonadBinaryPhaseShift w (MemoNullT k m) where
applyGlobalBinaryPhase k m =
lift $ applyGlobalBinaryPhase k m
{-# INLINABLE applyGlobalBinaryPhase #-}
applyBinaryPhaseShift k m =
lift $ applyBinaryPhaseShift k m
{-# INLINABLE applyBinaryPhaseShift #-}
| ti1024/hacq | src/Control/Monad/Quantum/PhaseShift/Binary/Class.hs | bsd-3-clause | 1,998 | 0 | 11 | 331 | 353 | 193 | 160 | 27 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
module Jerimum.Storage.PostgreSQL.SqlMonad
( SqlMonadError(..)
, SqlMonad
, performIO
, performSQL
, performTxn
, catchError
, throwError
, runSqlM
) where
import Control.Exception
import Control.Monad.Except
import Control.Monad.Reader
import qualified Database.PostgreSQL.Simple as PQ
data SqlMonadError
= SomeSqlError PQ.SqlError
| CheckViolationError PQ.SqlError
| forall a. Show a =>
StaleError a
| forall a. Show a =>
SystemError a
type SqlMonad a = ExceptT SqlMonadError (ReaderT PQ.Connection IO) a
runSqlM :: PQ.Connection -> SqlMonad a -> IO (Either SqlMonadError a)
runSqlM conn exec = runReaderT (runExceptT exec) conn
performIO :: IO a -> SqlMonad a
performIO = lift . lift
performSQL :: (PQ.Connection -> IO a) -> SqlMonad a
performSQL task = do
conn <- ask
mans <- performIO $ try (task conn)
case mans of
Left e
| PQ.sqlState e == "23514" -> throwError $ CheckViolationError e
| otherwise -> throwError $ SomeSqlError e
Right ans -> pure ans
performTxn :: (PQ.Connection -> IO a) -> SqlMonad a
performTxn task = performSQL $ \conn -> PQ.withTransaction conn (task conn)
instance Show SqlMonadError where
show (StaleError m) = "StaleError " ++ show m
show (CheckViolationError e) = "CheckViolationError " ++ show e
show (SomeSqlError e) = "SQL " ++ show e
show (SystemError m) = "SystemError " ++ show m
| dgvncsz0f/nws | src/Jerimum/Storage/PostgreSQL/SqlMonad.hs | bsd-3-clause | 1,513 | 0 | 15 | 361 | 486 | 249 | 237 | 42 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module BitData where
import Control.Monad
import Ivory.Language
import Ivory.Compile.C.CmdlineFrontend
import BitDataTypes
import Ivory.Language.BitData.Array (ArraySize)
import Ivory.Language.BitData.Bits (BitSize)
import Ivory.Language.BitData.BitData (BitType)
[ivory|
bitdata SPI_CR1 :: Bits 16 = spi_cr1
{ spi_cr1_bidimode :: Bit
, spi_cr1_bidioe :: Bit
, spi_cr1_crcen :: Bit
, spi_cr1_crcnext :: Bit
, spi_cr1_dff :: Bit
, spi_cr1_rxonly :: Bit
, spi_cr1_ssm :: Bit
, spi_cr1_ssi :: Bit
, spi_cr1_lsbfirst :: Bit
, spi_cr1_spe :: Bit
, spi_cr1_br :: SPIBaud
, spi_cr1_mstr :: Bit
, spi_cr1_cpol :: Bit
, spi_cr1_cpha :: Bit
}
-- The "SPI_CR2" register defined using a layout clause.
bitdata SPI_CR2 :: Bits 16 = spi_cr2
{ spi_cr2_txeie :: Bit
, spi_cr2_rxneie :: Bit
, spi_cr2_errie :: Bit
, spi_cr2_frf :: Bit
, spi_cr2_ssoe :: Bit
, spi_cr2_txdmaen :: Bit
, spi_cr2_rxdmaen :: Bit
} as 8b0 # spi_cr2_txeie # spi_cr2_rxneie # spi_cr2_errie # spi_cr2_frf
# 1b0 # spi_cr2_ssoe # spi_cr2_txdmaen # spi_cr2_rxdmaen
-- The "SPI_CR2" register defined using the default layout and
-- padding fields.
bitdata Alt_SPI_CR2 :: Bits 16 = alt_spi_cr2
{ _ :: Bits 8
, alt_spi_cr2_txeie :: Bit
, alt_spi_cr2_rxneie :: Bit
, alt_spi_cr2_errie :: Bit
, alt_spi_cr2_frf :: Bit
, _ :: Bit
, alt_spi_cr2_ssoe :: Bit
, alt_spi_cr2_txdmaen :: Bit
, alt_spi_cr2_rxdmaen :: Bit
}
-- The "NVIC_ISER" register is an array of 32 bits.
--
-- We will want to access the array both at Ivory run-time using an
-- "Ix 32" and at code generation time using a Haskell integer.
bitdata NVIC_ISER :: Bits 32 = nvic_iser
{ nvic_iser_setena :: BitArray 32 Bit
}
-- A bit data type with an array of 4-bit integers.
bitdata ArrayTest :: Bits 32 = array_test
{ at_4bits :: BitArray 8 (Bits 4)
}
|]
test1 :: Def ('[Uint16] ':-> Uint16)
test1 = proc "test1" $ \x -> body $
ret $ withBits x $ do
clearBit spi_cr1_cpha
setBit spi_cr1_cpol
setField spi_cr1_br spi_baud_div_8
test2 :: Def ('[Uint32] ':-> Uint8)
test2 = proc "test2" $ \x -> body $ do
let d = fromRep x :: NVIC_ISER
ret $ toRep (d #. nvic_iser_setena #! 0)
-- | Iterate over the elements of a bit array.
forBitArray_ ::
( ANat n
, BitCast (BitRep (ArraySize n a)) (BitRep (BitSize (BitType a)))
, IvoryStore (BitRep (ArraySize n a))
, IvoryOrd (BitRep (ArraySize n a))
, IvoryZeroVal (BitRep (ArraySize n a))
, IvoryInit (BitRep (ArraySize n a))
, BitData a
, ANat (BitSize a)
, ANat (ArraySize n a)
) => BitArray n a -> (a -> Ivory eff ()) -> Ivory eff ()
forBitArray_ arr f =
forM_ [0 .. bitLength arr - 1] $ \i ->
f (arr #! i)
-- | Test looping over the elements of a bit array:
test3 :: Def ('[Uint32] ':-> Uint32)
test3 = proc "test3" $ \x -> body $ do
let d = fromRep x
total <- local (ival 0)
forBitArray_ (d #. at_4bits) $ \i -> do
x' <- deref total
let y = safeCast (toRep i)
store total (x' + y)
ret =<< deref total
get_baud :: Def ('[Uint16] ':-> Uint8)
get_baud = proc "get_baud" $ \x -> body $ do
let d = fromRep x
ret (toRep (d #. spi_cr1_br))
-- | Examples from Ivory paper:
[ivory|
bitdata CtrlReg :: Bits 8 = ctrl_reg
{ ctrl_tx_enable :: Bit
, ctrl_rx_enable :: Bit
, ctrl_baud_rate :: BaudRate
} as 0b0000 # ctrl_tx_enable # ctrl_rx_enable # ctrl_baud_rate
|]
cmodule :: Module
cmodule = package "BitData" $ do
incl get_baud
incl test1
incl test2
incl test3
main :: IO ()
main = runCompiler [cmodule] [] (initialOpts {outDir = Nothing, constFold = True})
| GaloisInc/ivory | ivory-examples/examples/BitData.hs | bsd-3-clause | 4,094 | 0 | 19 | 1,014 | 789 | 408 | 381 | 64 | 1 |
module Control.ConstraintClasses.KeyFoldableFunctor
(
-- * Constraint KeyFoldableFunctor
CKeyFoldableFunctor
) where
import Control.ConstraintClasses.Domain
import Control.ConstraintClasses.Key
import Control.ConstraintClasses.KeyFoldable
import Control.ConstraintClasses.KeyFunctor
import Data.Key
-- base
import Data.Functor.Product
import Data.Functor.Sum
import Data.Functor.Compose
-- vector
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as VectorStorable
import qualified Data.Vector.Unboxed as VectorUnboxed
--------------------------------------------------------------------------------
-- CLASS
--------------------------------------------------------------------------------
class (CKeyFunctor f, CKeyFoldable f) => CKeyFoldableFunctor f
--------------------------------------------------------------------------------
-- INSTANCES
--------------------------------------------------------------------------------
-- base
-- vector
instance CKeyFoldableFunctor Vector.Vector
instance CKeyFoldableFunctor VectorStorable.Vector
instance CKeyFoldableFunctor VectorUnboxed.Vector
| guaraqe/constraint-classes | src/Control/ConstraintClasses/KeyFoldableFunctor.hs | bsd-3-clause | 1,137 | 0 | 6 | 94 | 145 | 93 | 52 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Visualize a force-directed network from connectivity.
module Web.Lightning.Plots.Force
(
ForcePlot(..)
, Visualization (..)
, forcePlot
)
where
--------------------------------------------------------------------------------
import Control.Monad.Reader
import Data.Aeson
import Data.Default.Class
import qualified Data.Text as T
import qualified Web.Lightning.Routes as R
import Web.Lightning.Types.Lightning
import Web.Lightning.Types.Visualization (Visualization (..))
import Web.Lightning.Utilities
--------------------------------------------------------------------------------
-- | Force plot parameters
data ForcePlot =
ForcePlot { fpConn :: [[Double]]
-- ^ Matrix that defines the connectivity of the plot. The
-- dimensions of the matrix can be (n, n), (n, 2) or (n, 3).
-- Matrix can be binary or continuous valued. Links should
-- contain either 2 elements per link (source, target) or
-- 3 elements (source, target, value).
, fpValues :: Maybe [Double]
-- ^ Values to set node colors via a linear scale.
, fpLabels :: Maybe [T.Text]
-- ^ List of text labels to set as tooltips.
, fpColor :: Maybe [Int]
-- ^ Single RGB value or list to set node colors.
, fpGroup :: Maybe [Int]
-- ^ Single integer or list to set node colors via groups.
, fpColorMap :: Maybe T.Text
-- ^ Specification of color map, only colorbrewer types supported.
, fpSize :: Maybe [Int]
-- ^ Single size or list to set node sizes.
, fpToolTips :: Maybe Bool
-- ^ Whether or not to show tooltips.
, fpZoom :: Maybe Bool
-- ^ Whether or not to allow zooming.
, fpBrush :: Maybe Bool
-- ^ Whether or not to support brushing.
}
deriving (Show, Eq)
instance Default ForcePlot where
def = ForcePlot [[]] Nothing Nothing Nothing Nothing Nothing
Nothing (Just True) (Just True) (Just True)
instance ToJSON ForcePlot where
toJSON (ForcePlot conn vs lbs cs gs cm ss tt z b) =
omitNulls [ "links" .= getLinks conn
, "nodes" .= getNodes conn
, "values" .= vs
, "labels" .= lbs
, "color" .= cs
, "group" .= gs
, "colormap" .= cm
, "size" .= ss
, "tooltips" .= tt
, "zoom" .= z
, "brush" .= b
]
instance ValidatablePlot ForcePlot where
validatePlot (ForcePlot conn vl lbl c grp cm s tt z b) = do
conn' <- validateConn conn
c' <- validateColor c
cm' <- validateColorMap cm
s' <- validateSize s
return $ ForcePlot conn' vl lbl c' grp cm' s' tt z b
-- | Submits a request to the specified lightning-viz server to create a
-- force-directed network visualization from connectivity.
--
-- <http://lightning-viz.org/visualizations/force/ Force-Directed Network Visualization>
forcePlot :: Monad m => ForcePlot
-- ^ Force plot to create.
-> LightningT m Visualization
-- ^ Transformer stack with created visualization.
forcePlot forcePlt = do
url <- ask
viz <- sendPlot "force" forcePlt R.plot
return $ viz { vizBaseUrl = Just url }
| cmoresid/lightning-haskell | src/Web/Lightning/Plots/Force.hs | bsd-3-clause | 3,606 | 0 | 11 | 1,224 | 608 | 342 | 266 | 55 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Arbitrary instances for Delegation types.
module Test.Pos.Chain.Delegation.Arbitrary
( genDlgPayload
) where
import Universum
import qualified Data.HashMap.Strict as HM
import Test.QuickCheck (Arbitrary (..), Gen, listOf)
import Test.QuickCheck.Arbitrary.Generic (genericArbitrary,
genericShrink)
import Pos.Chain.Delegation (DlgPayload (..), DlgUndo (..),
HeavyDlgIndex (..), LightDlgIndices (..))
import Pos.Core (EpochIndex)
import Pos.Crypto (ProtocolMagic, ProxySecretKey (..), createPsk)
import Test.Pos.Core.Arbitrary ()
genDlgPayload :: ProtocolMagic -> EpochIndex -> Gen DlgPayload
genDlgPayload pm epoch =
UnsafeDlgPayload . toList . HM.fromList . map convert <$> listOf genPSK
where
convert psk = (pskIssuerPk psk, psk)
genPSK = createPsk pm <$> arbitrary <*> arbitrary <*> pure (HeavyDlgIndex epoch)
instance Arbitrary DlgPayload where
arbitrary = do
pm <- arbitrary
ei <- arbitrary
genDlgPayload pm ei
shrink = genericShrink
instance Arbitrary DlgUndo where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary HeavyDlgIndex where
arbitrary = HeavyDlgIndex <$> arbitrary
shrink = genericShrink
instance Arbitrary LightDlgIndices where
arbitrary = do
l <- arbitrary
r <- arbitrary
pure $ LightDlgIndices $ if r >= l then (l,r) else (r,l)
shrink = genericShrink
| input-output-hk/pos-haskell-prototype | chain/test/Test/Pos/Chain/Delegation/Arbitrary.hs | mit | 1,601 | 0 | 10 | 403 | 385 | 221 | 164 | 37 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{- |
Module : ./CASL/Amalgamability.hs
Description : Amalgamability analysis for CASL.
Copyright : (c) Maciek Makowski, Warsaw University 2004-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : portable
Amalgamability analysis for CASL.
Follows the algorithm outlined in MFCS 2001 (LNCS 2136, pp. 451-463,
Springer 2001) paper.
-}
module CASL.Amalgamability
( CASLDiag
, ensuresAmalgamability
) where
import CASL.AS_Basic_CASL
import CASL.Morphism
import CASL.Sign
import Common.Amalgamate
import Common.Doc
import Common.DocUtils
import Common.Id
import Common.Result
import Data.Graph.Inductive.Graph
import Data.List
import Data.Maybe
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Graph as Tree
import qualified Common.Lib.Rel as Rel
import qualified Data.Map as Map
import qualified Data.Set as Set
-- Miscellaneous types
type CASLDiag = Tree.Gr CASLSign (Int, CASLMor)
type DiagSort = (Node, SORT)
type DiagOp = (Node, (Id, OpType))
type DiagPred = (Node, (Id, PredType))
type DiagEmb = (Node, SORT, SORT)
type DiagEmbWord = [DiagEmb]
-- | equivalence classes are represented as lists of elements
type EquivClass a = [a]
-- | equivalence relations are represented as lists of equivalence classes
type EquivRel a = [EquivClass a]
-- | or, sometimes, as lists of pairs (element, equiv. class tag)
type EquivRelTagged a b = [(a, b)]
-- Pretty instance (for diagnostic output)
instance (Pretty a, Pretty b) => Pretty (Tree.Gr a (Int, b)) where
pretty diag =
text "nodes:"
<+> pretty (labNodes diag)
$+$ text "edges:"
<+> pretty (map (\ (_, _, label) -> snd label) $ labEdges diag)
-- | find in Map
findInMap :: Ord k => k -> Map.Map k a -> a
findInMap k = fromMaybe (error "Amalgamability.findInMap") .
Map.lookup k
{- | Compute the Sorts set -- a disjoint union of all the sorts
in the diagram. -}
sorts :: CASLDiag -- ^ the diagram to get the sorts from
-> [DiagSort]
sorts diag =
let mkNodeSortPair n srt = (n, srt)
appendSorts sl (n, sig) =
sl ++ map (mkNodeSortPair n) (Set.toList $ sortSet sig)
in foldl appendSorts [] (labNodes diag)
{- | Compute the Ops set -- a disjoint union of all the operation symbols
in the diagram. -}
ops :: CASLDiag -- ^ the diagram to get the ops from
-> [DiagOp]
ops diag =
let mkNodeOp n opId opType ol = ol ++ [(n, (opId, opType))]
mkNodeOps n opId opTypes ol =
ol ++ Set.fold (mkNodeOp n opId) [] opTypes
appendOps ol (n, Sign { opMap = m }) =
ol ++ Map.foldWithKey (mkNodeOps n) [] (MapSet.toMap m)
in foldl appendOps [] (labNodes diag)
{- | Compute the Preds set -- a disjoint union of all the predicate symbols
in the diagram. -}
preds :: CASLDiag -- ^ the diagram to get the preds from
-> [DiagPred]
preds diag =
let mkNodePred n predId predType pl = pl ++ [(n, (predId, predType))]
mkNodePreds n predId predTypes pl =
pl ++ Set.fold (mkNodePred n predId) [] predTypes
appendPreds pl (n, Sign { predMap = m }) =
pl ++ Map.foldWithKey (mkNodePreds n) [] (MapSet.toMap m)
in foldl appendPreds [] (labNodes diag)
{- | Convert the relation representation from list of pairs
(val, equiv. class tag) to a list of equivalence classes. -}
taggedValsToEquivClasses :: Ord b
=> EquivRelTagged a b -- ^ a list of (value,tag) pairs
-> EquivRel a
taggedValsToEquivClasses [] = []
taggedValsToEquivClasses rel' =
let {- prepMap: create a map with all the equivalence class tags mapped to
empty lists -}
prepMap =
foldl (\ m k -> Map.insert (snd k) [] m) Map.empty
-- conv: perform actual conversion
convert [] m = map snd (Map.toList m)
convert ((ds, ect) : dsps) m =
let m' = Map.update (\ ec -> Just (ds : ec)) ect m
in convert dsps m'
in convert rel' (prepMap rel')
{- | Convert the relation representation from list of
equivalence classes to list of (value, tag) pairs. -}
equivClassesToTaggedVals :: Ord a
=> EquivRel a
-> EquivRelTagged a a
equivClassesToTaggedVals rel =
let eqClToList [] = []
eqClToList eqcl@(ft : _) = map (\ x -> (x, ft)) eqcl
in foldl (\ vtl eqcl -> vtl ++ eqClToList eqcl) [] rel
{- the old, n^3 version of mergeEquivClassesBy:
-- | Merge the equivalence classes for elements fulfilling given condition.
mergeEquivClassesBy :: Eq b
=> (a -> a -> Bool)
-- ^ the condition stating when two elements are in relation
-> EquivRelTagged a b -- ^ the input relation
-> EquivRelTagged a b
-- ^ returns the input relation with equivalence classes merged according to
-- the condition.
mergeEquivClassesBy cond rel =
-- Starting with the first element in the list an element (elem, tag) is taken
-- and cond is subsequently applied to it and all the elements
-- following it in the list. Whenever an element (elem', tag')
-- that is in relation with the chosen one is found, all the equivalence
-- class tags in the list that are equal to tag' are updated to tag.
let merge rel pos | pos >= length rel = rel
merge rel pos | otherwise =
let mergeWith cmpl _ [] = cmpl
mergeWith cmpl vtp@(elem, ec) toCmpl@((elem', ec') : _) =
let (cmpl', toCmpl') = if ec /= ec' && (cond elem elem')
then let upd (elem'', ec'') =
if ec'' == ec'
then (elem'', ec)
else (elem'', ec'')
in (map upd cmpl,
map upd toCmpl)
else (cmpl, toCmpl)
in mergeWith (cmpl' ++ [head toCmpl']) vtp (tail toCmpl')
(cmpl, (vtp : vtps)) = splitAt pos rel
rel' = mergeWith (cmpl ++ [vtp]) vtp vtps
in merge rel' (pos + 1)
in merge rel 0
-}
data TagEqcl a b = Eqcl [a] | TagRef b
deriving Show
-- | Merge the equivalence classes for elements fulfilling given condition.
mergeEquivClassesBy :: (Ord b)
=> (a -> a -> Bool)
-- ^ the condition stating when two elements are in relation
-> EquivRelTagged a b -- ^ the input relation
-> EquivRelTagged a b
{- ^ returns the input relation with equivalence classes merged according to
the condition. -}
mergeEquivClassesBy cond rel =
{- Starting with the first element in the list an element (elem,
tag) is taken and cond is subsequently applied to it and all the
elements following it in the list. Whenever an element (elem',
tag') that is in relation with the chosen one is found, the
equivalence classes in tagMap for tag and tag' are merged: tag in
tagMap points to the merged equivalence class and tag' in tagMap
is a reference to tag. -}
let -- create the initial map mapping tags to equivalence classes
initialTagMap =
let insEl tagMap (val, tag) =
Map.insertWith (++) tag [val] tagMap
in Map.map Eqcl $ foldl insEl Map.empty rel
-- merge equivalence classes tagged with t1 and t2
mergeInMap inTagMap t1 t2 =
let {- find the tag and equivalence class that corresponds
to the given tag performing path compression while
traversing the referneces. -}
findEqcl t tagMap =
case findInMap t tagMap of
Eqcl eqcl -> (t, eqcl, tagMap)
TagRef t' -> let
(rt, eqcl, tagMap') = findEqcl t' tagMap
tagMap'' = if rt == t' then tagMap' else
Map.update (\ _ -> Just (TagRef rt)) t tagMap'
in (rt, eqcl, tagMap'')
(rt1, eqcl1, tagMap1) = findEqcl t1 inTagMap
(rt2, eqcl2, tagMap2) = findEqcl t2 tagMap1
in if rt1 == rt2 then tagMap2
else let (nrt1, nrt2) = if rt1 > rt2 then (rt2, rt1)
else (rt1, rt2)
tagMap3 = Map.update
(\ _ -> Just (Eqcl (eqcl1 ++ eqcl2))) nrt1 tagMap2
tagMap4 = Map.update
(\ _ -> Just (TagRef nrt1)) nrt2 tagMap3
in tagMap4
{- iterate through the relation merging equivalence classes of
appropriate elements -}
merge tagMap' rel' pos | pos >= length rel' = tagMap'
merge tagMap' rel' pos =
let mergeWith tagMap _ [] = tagMap
mergeWith tagMap vtp@(elem1, ec) toCmpl@((elem2, ec') : _) =
let tagMap1 = if ec /= ec' && cond elem1 elem2
then mergeInMap tagMap ec ec'
else tagMap
in mergeWith tagMap1 vtp (tail toCmpl)
(_, vtp' : vtps) = splitAt pos rel'
tagMap'' = mergeWith tagMap' vtp' vtps
in merge tagMap'' rel' (pos + 1)
-- append given equivalence class to the list of (value, tag) pairs
tagMapToRel rel' (_, TagRef _) = rel'
tagMapToRel rel' (tag, Eqcl eqcl) =
foldl (\ l v -> (v, tag) : l) rel' eqcl
myTagMap = merge initialTagMap rel 0
in foldl tagMapToRel [] (Map.toList myTagMap)
-- | Merge the equivalence classes for given tags.
mergeEquivClasses :: Eq b
=> EquivRelTagged a b
-> b -- ^ tag 1
-> b -- ^ tag 2
-> EquivRelTagged a b
mergeEquivClasses rel tag1 tag2 | tag1 == tag2 = rel
| otherwise =
let upd (el, tag) | tag == tag2 = (el, tag1)
| otherwise = (el, tag)
in map upd rel
{- | Return true if there is an edge between srcNode and targetNode
and the morphism with which it's labelled maps srcSort to targetSort -}
isMorphSort :: CASLDiag
-> DiagSort
-> DiagSort
-> Bool
isMorphSort diag (srcNode, srcSort) (targetNode, targetSort) =
let checkEdges [] = False
checkEdges ((sn, tn, (_, Morphism { sort_map = sm })) : edgs) =
sn == srcNode &&
tn == targetNode &&
mapSort sm srcSort == targetSort
|| checkEdges edgs
in checkEdges (out diag srcNode)
{- | Return true if there is an edge between srcNode and targetNode
and the morphism with which it's labelled maps srcOp to targetOp -}
isMorphOp :: CASLDiag
-> DiagOp
-> DiagOp
-> Bool
isMorphOp diag (srcNode, srcOp) (targetNode, targetOp) =
let checkEdges [] = False
checkEdges
((sn, tn, (_, Morphism { sort_map = sm, op_map = fm })) : edgs) =
sn == srcNode &&
tn == targetNode &&
mapOpSym sm fm srcOp == targetOp
|| checkEdges edgs
in checkEdges (out diag srcNode)
{- | Return true if there is an edge between srcNode and targetNode
and the morphism with which it's labelled maps srcPred to targetPred -}
isMorphPred :: CASLDiag
-> DiagPred
-> DiagPred
-> Bool
isMorphPred diag (srcNode, srcPred) (targetNode, targetPred) =
let checkEdges [] = False
checkEdges
((sn, tn, (_, Morphism { sort_map = sm, pred_map = pm })) : edgs) =
sn == srcNode &&
tn == targetNode &&
mapPredSym sm pm srcPred == targetPred
|| checkEdges edgs
in checkEdges (out diag srcNode)
-- | Compute the simeq relation for given diagram.
simeq :: CASLDiag -- ^ the diagram for which the relation should be created
-> EquivRel DiagSort
{- ^ returns the relation represented as a list of equivalence
classes (each represented as a list of diagram ops) -}
simeq diag =
{- During the computations the relation is represented as a list of pairs
(DiagSort, DiagSort). The first element is a diagram sort and the second
denotes the equivalence class to which it belongs. All the pairs with
equal second element denote elements of one equivalence class. -}
let mergeCond ds ds' = isMorphSort diag ds ds' || isMorphSort diag ds' ds
-- compute the relation
rel = map (\ ds -> (ds, ds)) (sorts diag)
rel' = mergeEquivClassesBy mergeCond rel
in taggedValsToEquivClasses rel'
-- | Compute the simeq^op relation for given diagram.
simeqOp :: CASLDiag -- ^ the diagram for which the relation should be created
-> EquivRel DiagOp
{- ^ returns the relation represented as a list of equivalence
classes (each represented as a list of diagram ops) -}
simeqOp diag =
{- During the computations the relation is represented as a list of pairs
(DiagOp, DiagOp). The first element is a diagram op and the second
denotes the equivalence class to which it belongs. All the pairs with
equal second element denote elements of one equivalence class. -}
let mergeCond ds ds' = isMorphOp diag ds ds' || isMorphOp diag ds' ds
-- compute the relation
rel = map (\ ds -> (ds, ds)) (ops diag)
rel' = mergeEquivClassesBy mergeCond rel
in taggedValsToEquivClasses rel'
-- | Compute the simeq^pred relation for given diagram.
simeqPred :: CASLDiag
-- ^ the diagram for which the relation should be created
-> EquivRel DiagPred
{- ^ returns the relation represented as a list of equivalence
classes (each represented as a list of diagram preds) -}
simeqPred diag =
{- During the computations the relation is represented as a list of pairs
(DiagPred, DiagPred). The first element is a diagram pred and the second
denotes the equivalence class to which it belongs. All the pairs with
equal second element denote elements of one equivalence class. -}
let mergeCond ds ds' = isMorphPred diag ds ds' || isMorphPred diag ds' ds
-- compute the relation
rel = map (\ ds -> (ds, ds)) (preds diag)
rel' = mergeEquivClassesBy mergeCond rel
in taggedValsToEquivClasses rel'
-- | Compute the simeq_tau relation for given diagram.
simeqTau :: [(Node, CASLMor)]
-> EquivRel DiagSort
simeqTau sink =
let {- tagEdge: for given morphism m create a list of pairs
(a, b) where a is DiagSort from the source signature that
is mapped by m to b -}
tagEdge (sn, Morphism { msource = src, sort_map = sm }) =
map (\ ss -> ((sn, ss), mapSort sm ss))
(Set.toList $ sortSet src)
rel = foldl (\ l e -> l ++ tagEdge e) [] sink
in taggedValsToEquivClasses rel
-- | Compute the simeq^op_tau relation for given diagram.
simeqOpTau :: [(Node, CASLMor)]
-> EquivRel DiagOp
simeqOpTau sink =
let {- tagEdge: for given morphism m create a list of pairs
(a, b) where a is DiagOp from the source signature that
is mapped by m to b -}
tagEdge (sn, Morphism { msource = src, sort_map = sm, op_map = fm }) =
map (\ srcOp -> ((sn, srcOp), mapOpSym sm fm srcOp))
(mapSetToList $ opMap src)
rel = foldl (\ l e -> l ++ tagEdge e) [] sink
in taggedValsToEquivClasses rel
-- | Compute the simeq^pred_tau relation for given diagram.
simeqPredTau :: [(Node, CASLMor)]
-> EquivRel DiagPred
simeqPredTau sink =
let {- tagEdge: for given morphism m create a list of pairs
(a, b) where a is DiagPred from the source signature that
is mapped by m to b -}
tagEdge (sn, Morphism { msource = src, sort_map = sm, pred_map = pm }) =
map (\ srcPred -> ((sn, srcPred), mapPredSym sm pm srcPred))
(mapSetToList $ predMap src)
rel = foldl (\ l e -> l ++ tagEdge e) [] sink
in taggedValsToEquivClasses rel
{- | Check that one equivalence relation is a subset of another.
The relations are represented as a lists of equivalence classes,
where equivalence classes are lists of elements. -}
subRelation :: Eq a
=> EquivRel a -- ^ the relation that is supposed to be a subset
-> EquivRel a -- ^ the relation that is supposed to be a superset
-> Maybe (a, a)
{- ^ returns a pair of elements that are in the same equivalence class of the
first relation but are not in the same equivalence class of the second
relation or Nothing the first relation is a subset of the second one. -}
subRelation [] _ = Nothing
subRelation ([] : eqcls) sup = subRelation eqcls sup
-- this should never be the case
subRelation (elts'@(elt' : _) : eqcls') sup =
let findEqCl _ [] = []
findEqCl elt (eqcl : eqcls) =
if elem elt eqcl then eqcl else findEqCl elt eqcls
checkEqCl [] _ = Nothing
checkEqCl (elt : elts) supEqCl =
if elem elt supEqCl
then checkEqCl elts supEqCl
else Just elt
curFail = checkEqCl elts' (findEqCl elt' sup)
in case curFail of
Nothing -> subRelation eqcls' sup
Just elt2 -> Just (elt', elt2)
-- | Compute the set of sort embeddings defined in the diagram.
embs :: CASLDiag
-> [DiagEmb]
embs diag =
let embs' [] = []
embs' ((n, sig) : lNodes) =
let ssl = Rel.toList . Rel.irreflex $ sortRel sig in
map (\ (s1, s2) -> (n, s1, s2)) ssl ++ embs' lNodes
in embs' (labNodes diag)
{- | Compute the set of sort embeddings (relations on sorts) defined
in the source nodes of the sink. -}
sinkEmbs :: CASLDiag -- ^ the diagram
-> [(Node, CASLMor)] -- ^ the sink
-> [DiagEmb]
sinkEmbs _ [] = []
sinkEmbs diag ((srcNode, _) : edgs) =
let (_, _, sig, _) = context diag srcNode
ssl = Rel.toList . Rel.irreflex $ sortRel sig
in map (\ (s1, s2) -> (srcNode, s1, s2)) ssl
++ sinkEmbs diag edgs
-- | Check if the two given elements are in the given relation.
inRel :: Eq a
=> EquivRel a -- ^ the relation
-> a -- ^ the first element
-> a -- ^ the second element
-> Bool
inRel [] _ _ = False
inRel (eqc : eqcs) a b | a == b = True
| otherwise =
case find (== a) eqc of
Nothing -> inRel eqcs a b
Just _ -> case find (== b) eqc of
Nothing -> False
Just _ -> True
{- | Check if two embeddings can occur subsequently in a word
given the simeq relation on sorts. -}
admissible :: EquivRel DiagSort -- ^ the \simeq relation
-> DiagEmb -- ^ the first embedding
-> DiagEmb -- ^ the second embedding
-> Bool
admissible simeq' (n1, s1, _) (n2, _, s2) =
inRel simeq' (n1, s1) (n2, s2)
{- | Compute the set of all the loopless, admissible
words over given set of embeddings. Paper section 6 -}
looplessWords :: [DiagEmb] -- ^ the embeddings
-> EquivRel DiagSort
-- ^ the \simeq relation that defines admissibility
-> [DiagEmbWord]
looplessWords embs1 simeq1 =
let {- generate the list of all loopless words over given alphabet
with given suffix -}
looplessWords' suff@(e : _) embs2 pos | pos >= length embs2 = [suff]
| otherwise =
let emb = embs2 !! pos
embs' = embs2 \\ [emb]
ws = if admissible simeq1 emb e
then looplessWords' (emb : suff) embs' 0
else []
in ws ++ looplessWords' suff embs2 (pos + 1)
looplessWords' [] embs2 pos | pos >= length embs2 = []
| otherwise =
let emb = embs2 !! pos
embs' = embs2 \\ [emb]
in looplessWords' [emb] embs' 0 ++
looplessWords' [] embs2 (pos + 1)
in looplessWords' [] embs1 0
-- | Return the codomain of an embedding path.
wordCod :: DiagEmbWord
-> DiagSort
wordCod ((n, _, s2) : _) = (n, s2)
wordCod [] = error "wordCod"
-- | Return the domain of an embedding path.
wordDom :: DiagEmbWord
-> DiagSort
wordDom [] = error "wordDom"
wordDom w = let (n, s1, _) = last w in (n, s1)
-- | Find an equivalence class tag for given element.
findTag :: Eq a
=> EquivRelTagged a b
-> a
-> Maybe b
findTag [] _ = Nothing
findTag ((w', t) : wtps) w =
if w == w' then Just t else findTag wtps w
-- | Compute the left-cancellable closure of a relation on words.
leftCancellableClosure :: EquivRelTagged DiagEmbWord DiagEmbWord
-> EquivRelTagged DiagEmbWord DiagEmbWord
leftCancellableClosure rel1 =
let {- checkPrefixes: for each common prefix of two given words
merge the equivalence classes of the suffixes -}
checkPrefixes [] _ rel = rel
checkPrefixes _ [] rel = rel
checkPrefixes w1@(l1 : suf1) w2@(l2 : suf2) rel
| w1 == w2 = rel
| l1 /= l2 = rel
| otherwise =
let tag1 = fromMaybe (error "checkPrefixes: tag1")
$ findTag rel suf1
tag2 = fromMaybe (error "checkPrefixes: tag2")
$ findTag rel suf2
rel' = if tag1 == tag2 then rel
else let upd (w, t) | t == tag2 = (w, tag1)
| otherwise = (w, t)
in map upd rel
in checkPrefixes suf1 suf2 rel'
-- iterateWord1: for each pair of related words call checkPrefixes
iterateWord1 rel pos | pos >= length rel = rel
| otherwise =
let iterateWord2 wtp1@(w1, t1) rel2 pos2
| pos2 >= length rel2 = rel2
| otherwise =
let _wtp2@(w2, t2) = rel2 !! pos2
rel3 = if t1 == t2 then checkPrefixes w1 w2 rel2
else rel2
in iterateWord2 wtp1 rel3 (pos2 + 1)
wtp = rel !! pos
rel' = iterateWord2 wtp rel 0
in iterateWord1 rel' (pos + 1)
in iterateWord1 rel1 0
{- | Compute the congruence closure of an equivalence R: two pairs of
elements (1, 3) and (2, 4) are chosen such that 1 R 2 and 3 R 4. It is
then checked that elements 1, 3 and 2, 4 are in relation supplied and
if so equivalence classes for (op 1 3) and (op 1 4) in R are merged.
This function should be applied to the relation until a fixpoint is
reached. -}
congruenceClosure :: (Eq a, Eq b)
=> (a -> a -> Bool)
-- ^ the check to be performed on elements 1, 3 and 2, 4
-> (a -> a -> a)
-- ^ the operation to be performed on elements 1, 3 and 2, 4
-> EquivRelTagged a b
-> EquivRelTagged a b
congruenceClosure check op rel =
let -- iterateWord1
iterateWord1 rel1 pos1 | pos1 >= length rel1 = rel1
| otherwise = let -- iterateWord2
iterateWord2 wtp1@(_, t1) rel2 pos2 | pos2 >= length rel2 = rel2
| otherwise = let -- iterateWord3
iterateWord3 wtp1'@(w1', _) wtp2' rel3 pos3
| pos3 >= length rel3 = rel3
| otherwise = let -- iterateWord4
iterateWord4 wtp1''@(w1, _) wtp2''@(w2, _) wtp3'@(w3, t3) rel4 pos4
| pos4 >= length rel4 = rel4
| otherwise = let
(w4, t4) = rel4 !! pos4
rel4' = if t3 /= t4 || not (check w2 w4) then rel4 else let
mct1 = findTag rel (op w1 w3)
mct2 = findTag rel (op w2 w4)
in case (mct1, mct2) of
(Nothing, _) -> rel4 -- w3w1 is not in the domain of rel
(_, Nothing) -> rel4 -- w4w2 is not in the domain of rel
(Just ct1, Just ct2) -> mergeEquivClasses rel4 ct1 ct2
in iterateWord4 wtp1'' wtp2'' wtp3' rel4' (pos4 + 1)
wtp3@(w3', _) = rel3 !! pos3
rel3' = if check w1' w3' {- inRel here is usually much more efficient
than findTag rel (w3 ++ w1) -}
then iterateWord4 wtp1' wtp2' wtp3 rel3 0 else rel3
in iterateWord3 wtp1' wtp2 rel3' (pos3 + 1)
wtp2@(_, t2) = rel2 !! pos2
rel2' = if t1 /= t2 then rel2 else iterateWord3 wtp1 wtp2 rel2 0
in iterateWord2 wtp1 rel2' (pos2 + 1)
wtp = rel1 !! pos1
rel' = iterateWord2 wtp rel1 0
in iterateWord1 rel' (pos1 + 1)
in iterateWord1 rel 0
-- | Compute the cong_tau relation for given diagram and sink.
congTau :: CASLDiag -- ^ the diagram
-> [(Node, CASLMor)] -- ^ the sink
-> EquivRel DiagSort -- ^ the \simeq_tau relation
-> EquivRel DiagEmbWord
congTau diag sink st =
-- domCodSimeq: check that domains and codomains of given words are related
let domCodSimeq w1 w2 = inRel st (wordDom w1) (wordDom w2)
&& inRel st (wordCod w1) (wordCod w2)
embs1 = sinkEmbs diag sink
words1 = looplessWords embs1 st
rel = map (\ w -> (w, w)) words1
rel' = mergeEquivClassesBy domCodSimeq rel
in taggedValsToEquivClasses rel'
{- | Compute the finite representation of cong_0 relation for given diagram.
The representation consists only of equivalence classes that
contain more than one element. -}
cong0 :: CASLDiag
-> EquivRel DiagSort -- ^ the \simeq relation
-> EquivRel DiagEmbWord
-- Comp rule is not applied
cong0 diag simeq' =
let -- diagRule: the Diag rule
diagRule [(n1, s11, s12)] [(n2, s21, s22)] =
isMorphSort diag (n1, s11) (n2, s21)
&& isMorphSort diag (n1, s12) (n2, s22)
|| isMorphSort diag (n2, s21) (n1, s11)
&& isMorphSort diag (n2, s22) (n1, s12)
diagRule _ _ = False
-- addToRel: add given word to given relation
addToRel [] _ = []
addToRel ([] : _) _ = error "addToRel"
addToRel (eqcl@(refw : _) : eqcls) w =
if wordDom w == wordDom refw && wordCod w == wordCod refw
then (w : eqcl) : eqcls
else eqcl : addToRel eqcls w
-- words2: generate all the admissible 2-letter words over given alphabet
words2 _ [] _ = []
words2 alph (_ : embs1) [] = words2 alph embs1 alph
words2 alph embs1@(emb1 : _) (emb2 : embs2) =
let ws = words2 alph embs1 embs2
in if admissible simeq' emb1 emb2
then [emb1, emb2] : ws else ws
-- compute the relation
em = embs diag
rel = map (\ e -> ([e], [e])) em
rel' = mergeEquivClassesBy diagRule rel
rel'' = taggedValsToEquivClasses rel'
w2s = words2 em em em
rel''' = foldl addToRel rel'' w2s
in rel'''
-- | Compute the set Adm_\simeq if it's finite.
finiteAdmSimeq :: [DiagEmb] -- ^ the embeddings
-> EquivRel DiagSort
-- ^ the \simeq relation that defines admissibility
-> Maybe [DiagEmbWord]
-- ^ returns the computed set or Nothing if it's infinite
finiteAdmSimeq embs' simeq' =
let {- generate the list of the words over given alphabet
with given suffix -}
embWords' suff@(e : _) embs1 pos | pos >= length embs1 = Just [suff]
| otherwise =
let emb = embs1 !! pos
mws1 = if admissible simeq' emb e
then if elem emb suff
then Nothing
else embWords' (emb : suff) embs1 0
else Just []
mws2 = case mws1 of
Nothing -> Nothing
Just _ -> embWords' suff embs1 (pos + 1)
in case mws1 of
Nothing -> Nothing
Just ws1 -> case mws2 of
Nothing -> Nothing
Just ws2 -> Just (ws1 ++ ws2)
embWords' [] embs1 pos | pos >= length embs1 = Just []
embWords' [] embs1 pos =
let emb = embs1 !! pos
mws1 = embWords' [emb] embs1 0
mws2 = case mws1 of
Nothing -> Nothing
Just _ -> embWords' [] embs1 (pos + 1)
in case mws1 of
Nothing -> Nothing
Just ws1 -> case mws2 of
Nothing -> Nothing
Just ws2 -> Just (ws1 ++ ws2)
in embWords' [] embs' 0
-- | Check if the colimit is thin.
colimitIsThin :: EquivRel DiagSort -- ^ the simeq relation
-> [DiagEmb] -- ^ the set of diagram embeddings
-> EquivRel DiagEmbWord -- ^ the cong_0 relation
-> Bool
colimitIsThin simeq' embs' c0 =
let -- sortsC: a list of colimit sorts
sortsC = foldl (\ ls eqcl -> head eqcl : ls) [] simeq'
simeqT = equivClassesToTaggedVals simeq'
-- ordMap: map representing the topological order on sorts in the colimit
ordMap =
let sortClasses' m [] = m
sortClasses' m ((n, s1, s2) : embs1) =
let c1 = fromMaybe (error "sortClasses:s1")
$ findTag simeqT (n, s1)
c2 = fromMaybe (error "sortClasses:s2")
$ findTag simeqT (n, s2)
in sortClasses' (Map.update (Just . Set.insert c2) c1 m)
embs1
ordMap' = foldl (\ m cl -> Map.insert cl Set.empty m)
Map.empty sortsC
in sortClasses' ordMap' embs'
-- larger: return a list of colimit sorts larger than given sort
larger srt =
let dl = Set.toList (findInMap srt ordMap)
in srt : foldl (\ l so -> l ++ larger so) [] dl
-- s: the map representing sets S_{\geq s1,s2}
s = let compS m (s1, s2) =
let ls1 = Set.fromList (larger s1)
ls2 = Set.fromList (larger s2)
in Map.insert (s1, s2) (Set.intersection ls1 ls2) m
in foldl compS Map.empty [(s1, s2) | s1 <- sortsC, s2 <- sortsC]
-- b: the map representing sets B_{s1,s2}
b = let compB m sp =
let sim' s' s'' = not (Set.null (findInMap (s', s'') s))
rel = map (\ x -> (x, x)) (Set.toList (findInMap sp s))
rel' = mergeEquivClassesBy sim' rel
in Map.insert sp (taggedValsToEquivClasses rel') m
in foldl compB Map.empty [(s1, s2) | s1 <- sortsC, s2 <- sortsC]
embDomS (n, dom, _) = fromMaybe (error "embDomS")
$ findTag simeqT (n, dom)
embCodS (n, _, cod) = fromMaybe (error "embCodS")
$ findTag simeqT (n, cod)
-- checkAllSorts: check the C = B condition for all colimit sorts
checkAllSorts m | Map.null m = {- trace "CT: Yes" -} True
| otherwise =
let -- checkSort: check if for given colimit sort C = B
checkSort chs = let
embsCs = filter (\ e -> embDomS e == chs) embs'
c = foldl (\ ma ep -> Map.insert ep [] ma) Map.empty
[(d, e) | d <- embsCs, e <- embsCs]
c' = let
updC c1 (d, e) = let
s1 = embCodS d
s2 = embCodS e
in Map.update (\ _ -> Just (findInMap (s1, s2) b)) (d, e) c1
in foldl updC c
[(d, e) | d <- embsCs, e <- embsCs, inRel c0 [d] [e]]
c'' = let
updC c1 (d@(n1, _, cod1), e@(n2, _, cod2)) = let
s1 = embCodS d
s2 = embCodS e
in if not (any (\ (n, dom, cod) -> (n, dom) == (n1, cod1)
&& (n, cod) == (n2, cod2)) embs')
then c else let
[absCls] = filter (elem s2) (findInMap (s1, s2) b)
in foldl (flip $ Map.update (\ l -> Just
(l ++ [absCls]))) c1 [(d, e), (e, d)]
in foldl updC c' [(d, e) | d <- embsCs,
e <- embsCs, wordDom [d] == wordDom [e]]
fixUpdRule cFix = let
updC c1 (e1, e2, e3) = let
updC' c2 (b12, b23, b13) = let
sb12 = Set.fromList b12
sb23 = Set.fromList b23
sb13 = Set.fromList b13
comm = Set.intersection sb12 (Set.intersection sb23 sb13)
in if Set.null comm then c2 else let
c2' = if elem b13 (findInMap (e1, e3) c2)
then c2
else Map.update (\ l -> Just (l ++ [b13])) (e1, e3) c2
in if elem b13 (findInMap (e1, e3) c2')
then c2'
else Map.update (\ l -> Just (l ++ [b13])) (e3, e1) c2'
s1 = embCodS e1
s3 = embCodS e3
in foldl updC' c1 [ (b12, b23, b13)
| b12 <- findInMap (e1, e2) c1
, b23 <- findInMap (e2, e3) c1
, b13 <- findInMap (s1, s3) b ]
cFix' = foldl updC cFix [(e1, e2, e3) |
e1 <- embsCs, e2 <- embsCs, e3 <- embsCs]
in if cFix' == cFix then cFix else fixUpdRule cFix'
c3 = fixUpdRule c''
checkIncl [] = True
checkIncl ((e1, e2) : embprs) = let
s1 = embCodS e1
s2 = embCodS e2
res = isNothing (subRelation (findInMap (s1, s2) b)
(findInMap (e1, e2) c3)) && checkIncl embprs
in res
in checkIncl [(e1, e2) | e1 <- embsCs, e2 <- embsCs]
{- cs: next colimit sort to process
m1: the order map with cs removed -}
(cs, m1) = let
[(cs', _)] = take 1 (filter (\ (_, lt) -> Set.null lt)
(Map.toList m))
m' = Map.delete cs' m
m'' = foldl (flip $ Map.update (Just . Set.delete cs))
m' (Map.keys m')
in (cs', m'')
in checkSort cs && checkAllSorts m1
in checkAllSorts ordMap
{- the old, unoptimised version of cong:
-- | Compute the \cong relation given its (finite) domain
cong :: CASLDiag
-> [DiagEmbWord] -- ^ the Adm_\simeq set (the domain of \cong relation)
-> EquivRel DiagSort -- ^ the \simeq relation
-> EquivRel DiagEmbWord
cong diag adm simeq =
-- domCodEqual: check that domains and codomains of given words are equal
let domCodEqual w1 w2 =
wordDom w1 == wordDom w2 && wordCod w1 == wordCod w2
-- diagRule: the Diag rule
diagRule [(n1, s11, s12)] [(n2, s21, s22)] =
isMorphSort diag (n1, s11) (n2, s21) && isMorphSort diag (n1, s12)
(n2, s22) ||
isMorphSort diag (n2, s21) (n1, s11) && isMorphSort diag (n2, s22)
(n1, s12)
diagRule _ _ = False
-- compRule: the Comp rule works for words 1 and 2-letter long
-- with equal domains and codomains
compRule w1@[_] w2@[_, _] = domCodEqual w1 w2
compRule w1@[_, _] w2@[_] = domCodEqual w1 w2
compRule _ _ = False
-- fixCongLc: apply Cong and Lc rules until a fixpoint is reached
fixCongLc rel =
let rel' = (leftCancellableClosure . congruenceClosure simeq) rel
in if rel == rel' then rel else fixCongLc rel'
-- compute the relation
rel = map (\w -> (w, w)) adm
rel' = mergeEquivClassesBy diagRule rel
rel'' = mergeEquivClassesBy compRule rel'
rel''' = fixCongLc rel''
in taggedValsToEquivClasses rel'''
-}
{- | Compute the (optimised) \cong relation given its (finite) domain
and \sim relation. Optimised \cong is supposed to contain only words
composed of canonical embeddings; we also use a (CompDiag) rule
instead of (Comp) and (Diag) rules. -}
cong :: CASLDiag
-> [DiagEmbWord] -- ^ the Adm_\simeq set (the domain of \cong relation)
-> EquivRel DiagSort -- ^ the \simeq relation
-> EquivRel DiagEmb -- ^ the \sim relation
-> EquivRel DiagEmbWord
cong diag adm simeq' sim' =
-- domCodEqual: check that domains and codomains of given words are equal
let _domCodEqual w1 w2 =
wordDom w1 == wordDom w2 && wordCod w1 == wordCod w2
-- diagRule: the Diag rule
_diagRule [(n1, s11, s12)] [(n2, s21, s22)] =
isMorphSort diag (n1, s11) (n2, s21)
&& isMorphSort diag (n1, s12) (n2, s22)
|| isMorphSort diag (n2, s21) (n1, s11)
&& isMorphSort diag (n2, s22) (n1, s12)
_diagRule _ _ = False
-- compDiagRule: the combination of Comp and Diag rules
compDiagRule w1@[_] w2@[_, _] = compDiagRule w2 w1
compDiagRule [e1, e2] [d] =
let findSim e3 = filter (\ l -> let e : _ = l in e == e3) sim'
[ec1] = findSim e1
[ec2] = findSim e2
matches' [] = False
matches' (((n1, _, s12), (n2, s21, _)) : eps) =
n1 == n2 && inRel sim' d (n1, s21, s12)
|| matches' eps
in matches' [(me1, me2) | me1 <- ec1, me2 <- ec2]
compDiagRule _ _ = False
-- fixCongLc: apply Cong and Lc rules until a fixpoint is reached
fixCongLc rel1 =
let rel2 = (leftCancellableClosure .
congruenceClosure (\ w1 w2 ->
inRel simeq' (wordCod w1) (wordDom w2))
(flip (++))) rel1
in if rel1 == rel2 then rel1 else fixCongLc rel2
-- compute the relation
rel = map (\ w -> (w, w)) adm
rel' = mergeEquivClassesBy compDiagRule rel
rel'' = fixCongLc rel'
in taggedValsToEquivClasses rel''
-- | Compute the \cong^R relation
congR :: CASLDiag
-> EquivRel DiagSort -- ^ the \simeq relation
-> EquivRel DiagEmb -- ^ the \sim relation
-> EquivRel DiagEmbWord
congR diag simeq' sim' =
-- cong diag (looplessWords (embs diag) simeq) simeq
cong diag (looplessWords (canonicalEmbs sim') simeq') simeq' sim'
-- | Compute the \sim relation
sim :: CASLDiag
-> [DiagEmb]
-> EquivRel DiagEmb
sim diag embs' =
let -- diagRule: the Diag rule
diagRule (n1, s11, s12) (n2, s21, s22) =
isMorphSort diag (n1, s11) (n2, s21)
&& isMorphSort diag (n1, s12) (n2, s22)
|| isMorphSort diag (n2, s21) (n1, s11)
&& isMorphSort diag (n2, s22) (n1, s12)
-- the check for congruenceClosure
check (p, s11, s12) (q, s21, s22) =
not (p /= q || s12 /= s21) &&
any (\ (n, s1, s2) -> n == p && s1 == s11 && s2 == s22) embs'
-- the op for congruence closure
op (p, s1, _) (_, _, s2) = (p, s1, s2)
-- fixCong: apply Cong rule until a fixpoint is reached
fixCong rel1 =
let rel2 = congruenceClosure check op rel1
in if rel1 == rel2 then rel1 else fixCong rel2
rel = map (\ e -> (e, e)) embs'
rel' = fixCong rel
rel'' = mergeEquivClassesBy diagRule rel'
in taggedValsToEquivClasses rel''
-- | Compute the CanonicalEmbs(D) set given \sim relation
canonicalEmbs :: EquivRel DiagEmb
-> [DiagEmb]
canonicalEmbs = foldl (\ l cl -> let e : _ = cl in e : l) []
{- | Convert given \cong_\tau relation to the canonical form
w.r.t. given \sim relation -}
canonicalCongTau :: EquivRel DiagEmbWord
-> EquivRel DiagEmb
-> EquivRel DiagEmbWord
canonicalCongTau ct sim' =
let mapEmb e = let Just (ce : _) = find (elem e) sim'
in ce
mapWord = map mapEmb
mapEqcl = map mapWord
in map mapEqcl ct
-- | Convert a word to a list of sorts that are embedded
wordToEmbPath :: DiagEmbWord
-> [SORT]
wordToEmbPath [] = []
wordToEmbPath ((_, s1, s2) : embs1) =
let rest [] = []
rest ((_, s, _) : embs2) = rest embs2 ++ [s]
in rest embs1 ++ [s1, s2]
hasCellCaslAmalgOpt :: [CASLAmalgOpt] -> Bool
hasCellCaslAmalgOpt = any ( \ o -> case o of
Cell -> True
_ -> False)
hasColimitThinnessOpt :: [CASLAmalgOpt] -> Bool
hasColimitThinnessOpt = any ( \ o -> case o of
ColimitThinness -> True
_ -> False)
-- | The amalgamability checking function for CASL.
ensuresAmalgamability :: [CASLAmalgOpt] -- ^ program options
-> CASLDiag -- ^ the diagram to be checked
-> [(Node, CASLMor)] -- ^ the sink
-> Tree.Gr String String
-- ^ the diagram containing descriptions of nodes and edges
-> Result Amalgamates
ensuresAmalgamability opts diag sink desc =
if null opts then return (DontKnow "Skipping amalgamability check")
else let -- aux. functions that help printing out diagnostics
getNodeSig _ [] = emptySign () -- this should never be the case
getNodeSig n ((n1, sig) : nss) = if n == n1 then sig else getNodeSig n nss
lns = labNodes diag
formatOp (idt, t) = showDoc idt " :" ++ showDoc t ""
formatPred (idt, t) = showDoc idt " : " ++ showDoc t ""
formatSig n = case find (\ (n', d) -> n' == n && d /= "") (labNodes desc) of
Just (_, d) -> d
Nothing -> showDoc (getNodeSig n lns) ""
-- and now the relevant stuff
s = simeq diag
st = simeqTau sink
{- 2. Check the inclusion (*). If it doesn't hold, the
specification is incorrect. -}
in case subRelation st s of
Just (ns1, ns2) -> let
sortString1 = showDoc (snd ns1) " in\n\n" ++ formatSig (fst ns1)
++ "\n\n"
sortString2 = showDoc (snd ns2) " in\n\n" ++ formatSig (fst ns2)
++ "\n\n"
in return (NoAmalgamation ("\nsorts " ++ sortString1
++ "and " ++ sortString2 ++ "might be different"))
Nothing -> let
sop = simeqOp diag
sopt = simeqOpTau sink
{- 2a. Check sharing of operations. If the check
fails, the specification is incorrect -}
in case subRelation sopt sop of
Just (nop1, nop2) -> let
opString1 = formatOp (snd nop1) ++
" in\n\n" ++ formatSig (fst nop1) ++ "\n\n"
opString2 = formatOp (snd nop2) ++
" in\n\n" ++ formatSig (fst nop2) ++ "\n\n"
in return (NoAmalgamation ("\noperations "
++ opString1 ++ "and " ++ opString2
++ "might be different"))
Nothing -> let
spred = simeqPred diag
spredt = simeqPredTau sink
{- 2b. Check sharing of predicates. If the
check fails, the specification is incorrect -}
in case subRelation spredt spred of
Just (np1, np2) -> let
pString1 = formatPred (snd np1) ++
" in\n\n" ++ formatSig (fst np1) ++ "\n\n"
pString2 = formatPred (snd np2) ++
" in\n\n" ++ formatSig (fst np2) ++ "\n\n"
in return (NoAmalgamation ("\npredicates "
++ pString1 ++ "and " ++ pString2
++ "might be different"))
Nothing -> if not (hasCellCaslAmalgOpt opts
|| hasColimitThinnessOpt opts)
then return defaultDontKnow else let
ct = congTau diag sink st
{- As we will be using a finite representation
of \cong_0 that may not contain some of the
equivalence classes with only one element
it's sufficient to check that the subrelation
ct0 of ct that has only non-reflexive
elements is a subrelation of \cong_0.
Section 4.1 in the paper -}
ct0 = -- trace ("ct:" ++ show ct ++ "\n") $
filter (\ l -> length l > 1) ct
c0 = cong0 diag s
{- 3. Check the simple case: \cong_0 \in
\cong, so if \cong_\tau \in \cong_0 the
specification is correct. -}
in case subRelation ct0 c0 of
Nothing -> return Amalgamates
Just _ -> let
em = embs diag
cem = canonicalEmbs si
mas = finiteAdmSimeq cem s
si = sim diag em
cct = canonicalCongTau ct si
-- 4. Check if the set Adm_\simeq is finite.
in case mas of
Just cas -> {- 5. check the colimit thinness. If
the colimit is thin then the
specification is correct. -}
if hasColimitThinnessOpt opts && colimitIsThin s em c0
then return Amalgamates else let
c = cong diag cas s si
{- c = cong diag as s
6. Check the cell condition in its full generality. -}
in if hasCellCaslAmalgOpt opts
then case subRelation cct c of
Just (w1, w2) -> let
rendEmbPath [] = []
rendEmbPath (h : w) = foldl (\ t srt -> t ++ " < "
++ showDoc srt "")
(showDoc h "") w
word1 = rendEmbPath (wordToEmbPath w1)
word2 = rendEmbPath (wordToEmbPath w2)
in return (NoAmalgamation ("embedding paths \n "
++ word1 ++ "\nand\n " ++ word2
++ "\nmight be different"))
Nothing -> return Amalgamates
else return defaultDontKnow
Nothing -> let
cR = congR diag s si
{- 7. Check the restricted cell condition. If it holds
then the specification is correct. Otherwise proof
obligations need to be generated. -}
in if hasCellCaslAmalgOpt opts then case subRelation cct cR of
Just _ -> return defaultDontKnow
-- TODO: 8 generate proof obligations
Nothing -> return Amalgamates
else return defaultDontKnow
| spechub/Hets | CASL/Amalgamability.hs | gpl-2.0 | 47,803 | 24 | 48 | 17,773 | 11,160 | 5,839 | 5,321 | 746 | 17 |
module Rewriting.DPO.TypedGraphRule.NacManipulationSpec where
import Data.Maybe (fromMaybe)
import Test.Hspec
import Abstract.Category
import Abstract.Category.FindMorphism
import Abstract.Rewriting.DPO
import Data.TypedGraph.Morphism
import Rewriting.DPO.TypedGraphRule.NacManipulation
import qualified XML.GGXReader as XML
fileName = "tests/grammars/NacManipulation.ggx"
dpoConf :: Category morph => MorphismsConfig morph
dpoConf = MorphismsConfig monic
spec :: Spec
spec = context "NAC Manipulation Test" nacmanipTest
nacmanipTest :: Spec
nacmanipTest =
it "create/delete the expected number of NACs" $
do (gg,_,_) <- XML.readGrammar fileName False dpoConf
checkNacManipulation gg
-- | Checks if the NAC manipulations functions create/delete the
-- expected number of NACs
checkNacManipulation gg =
do
let find :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> [TypedGraphMorphism a b]
find x y = findAllMorphisms (codomain x) (codomain y)
-- Creation
creation_modeledNACs_rule = getRule "creation_modeledNACs" gg
creation_concreteNACs_rule = getRule "creation_concreteNACs" gg
match = head (find (leftMorphism creation_modeledNACs_rule) (leftMorphism creation_concreteNACs_rule))
creation_modeledNACs = nacs creation_modeledNACs_rule
createDisable = createStep DisableCreate match creation_modeledNACs
createPO = createStep Pushout match creation_modeledNACs
createShift = createStep ShiftNACs match creation_modeledNACs
-- Deletion
deletion_modeledNACs_rule = getRule "deletion_modeledNACs" gg
deletion_concreteNACs_rule = getRule "deletion_concreteNACs" gg
deletion_modeledNACs = nacs deletion_modeledNACs_rule
deletion_concreteNACs = nacs deletion_concreteNACs_rule
deleteDisable = deleteStep DisableDelete deletion_modeledNACs deletion_concreteNACs
deleteMono = deleteStep Monomorphisms deletion_modeledNACs deletion_concreteNACs
deleteIPO = deleteStep InitialPushouts deletion_modeledNACs deletion_concreteNACs
length createDisable `shouldBe` 0
length createPO `shouldBe` 1
length createShift `shouldBe` 3
length deleteDisable `shouldBe` 3
length deleteMono `shouldBe` 0
length deleteIPO `shouldBe` 2
getRule str gg =
fromMaybe
(error ("secondOrderTest: " ++ str ++ " is not in secondOrderMatchTest.ggx"))
(lookup str (productions gg))
| rodrigo-machado/verigraph | tests/Rewriting/DPO/TypedGraphRule/NacManipulationSpec.hs | gpl-3.0 | 2,589 | 0 | 14 | 574 | 508 | 265 | 243 | 47 | 1 |
{-# LANGUAGE PackageImports #-}
import "12Factor" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| beni55/12Factor | devel.hs | bsd-2-clause | 702 | 0 | 10 | 123 | 186 | 101 | 85 | 23 | 2 |
-- |
-- Module : Foundation.List.DList
-- License : BSD-style
-- Maintainer : Nicolas Di Prima <nicolas@primetype.co.uk>
-- Stability : statble
-- Portability : portable
--
-- Data structure for optimised operations (append, cons, snoc) on list
--
module Foundation.List.DList
( DList
) where
import Basement.Compat.Base
import Basement.Compat.Semigroup
import Basement.Compat.Bifunctor
import Foundation.Collection
newtype DList a = DList { unDList :: [a] -> [a] }
deriving (Typeable)
instance Eq a => Eq (DList a) where
(==) dl1 dl2 = (==) (toList dl1) (toList dl2)
instance Ord a => Ord (DList a) where
compare dl1 dl2 = compare (toList dl1) (toList dl2)
instance Show a => Show (DList a) where
show = show . toList
instance IsList (DList a) where
type Item (DList a) = a
fromList = DList . (Basement.Compat.Semigroup.<>)
toList = flip unDList []
instance Semigroup (DList a) where
(<>) dl1 dl2 = DList $ unDList dl1 . unDList dl2
instance Monoid (DList a) where
mempty = DList id
mappend dl1 dl2 = DList $ unDList dl1 . unDList dl2
instance Functor DList where
fmap f = foldr (cons . f) mempty
instance Applicative DList where
pure = singleton
(<*>) m1 m2 = m1 >>= \x1 -> m2 >>= \x2 -> return (x1 x2)
instance Monad DList where
(>>=) m k = foldr (mappend . k) mempty m
return = singleton
type instance Element (DList a) = a
instance Foldable (DList a) where
foldr f b = foldr f b . toList
foldl' f b = foldl' f b . toList
instance Collection (DList a) where
null = null . toList
length = length . toList
elem a = elem a . toList
maximum = maximum . nonEmpty_ . toList
minimum = minimum . nonEmpty_ . toList
all f = all f . toList
any f = any f . toList
instance Sequential (DList a) where
take n = fromList . take n . toList
revTake n = fromList . revTake n . toList
drop n = fromList . drop n . toList
revDrop n = fromList . revDrop n . toList
splitAt n = bimap fromList fromList . splitAt n . toList
splitOn f = fmap fromList . splitOn f . toList
break f = bimap fromList fromList . break f . toList
breakEnd f = bimap fromList fromList . breakEnd f . toList
breakElem e = bimap fromList fromList . breakElem e . toList
intersperse e = fromList . intersperse e . toList
intercalate e = intercalate e . toList
span f = bimap fromList fromList . span f . toList
spanEnd f = bimap fromList fromList . spanEnd f . toList
filter f = fromList . filter f . toList
partition f = bimap fromList fromList . partition f . toList
reverse = fromList . reverse . toList
uncons dl = second fromList <$> uncons (toList dl)
unsnoc dl = first fromList <$> unsnoc (toList dl)
cons e dl = DList $ (:) e . unDList dl
snoc dl e = DList $ unDList dl . (:) e
find f = find f . toList
sortBy comp = fromList . sortBy comp . toList
singleton = DList . (:)
replicate n e = fromList $ replicate n e
| vincenthz/hs-foundation | foundation/Foundation/List/DList.hs | bsd-3-clause | 3,001 | 0 | 12 | 755 | 1,147 | 581 | 566 | -1 | -1 |
module L10.Compose where
import Control.Applicative
-- Exactly one of these exercises will not be possible to achieve.
newtype Compose f g a =
Compose (f (g a))
-- Exercise 1
-- Implement a Functor instance for Compose
instance (Functor f, Functor g) =>
Functor (Compose f g) where
fmap f (Compose k) =
error "todo"
instance (Applicative f, Applicative g) =>
Applicative (Compose f g) where
-- Exercise 2
-- Implement the pure function for an Applicative instance for Compose
pure =
error "todo"
-- Exercise 3
-- Implement the (<*>) function for an Applicative instance for Compose
Compose f <*> Compose a =
error "todo"
instance (Monad f, Monad g) =>
Monad (Compose f g) where
-- Exercise 4
-- Implement the return function for a Monad instance for Compose
return =
error "todo"
-- Exercise 5
-- Implement the (>>=) function for a Monad instance for Compose
Compose a >>= f =
error "todo"
| juretta/course | src/L10/Compose.hs | bsd-3-clause | 939 | 0 | 9 | 205 | 210 | 112 | 98 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Prelude hiding ( mod, id, mapM )
import GHC
--import Packages
import HscTypes ( isBootSummary )
import Digraph ( flattenSCCs )
import DriverPhases ( isHaskellSrcFilename )
import HscTypes ( msHsFilePath )
import Name ( getOccString )
--import ErrUtils ( printBagOfErrors )
import Panic ( panic )
import DynFlags ( defaultFatalMessager, defaultFlushOut )
import Bag
import Exception
import FastString
import MonadUtils ( liftIO )
import SrcLoc
import Distribution.Simple.GHC ( componentGhcOptions )
import Distribution.Simple.Configure ( getPersistBuildConfig )
import Distribution.Simple.Program.GHC ( renderGhcOptions )
import Distribution.PackageDescription ( library, libBuildInfo )
import Distribution.Simple.LocalBuildInfo
import qualified Distribution.Verbosity as V
import Control.Monad hiding (mapM)
import System.Environment
import System.Console.GetOpt
import System.Exit
import System.IO
import Data.List as List hiding ( group )
import Data.Traversable (mapM)
import Data.Map ( Map )
import qualified Data.Map as M
--import UniqFM
--import Debug.Trace
-- search for definitions of things
-- we do this by parsing the source and grabbing top-level definitions
-- We generate both CTAGS and ETAGS format tags files
-- The former is for use in most sensible editors, while EMACS uses ETAGS
----------------------------------
---- CENTRAL DATA TYPES ----------
type FileName = String
type ThingName = String -- name of a defined entity in a Haskell program
-- A definition we have found (we know its containing module, name, and location)
data FoundThing = FoundThing ModuleName ThingName RealSrcLoc
-- Data we have obtained from a file (list of things we found)
data FileData = FileData FileName [FoundThing] (Map Int String)
--- invariant (not checked): every found thing has a source location in that file?
------------------------------
-------- MAIN PROGRAM --------
main :: IO ()
main = do
progName <- getProgName
let usageString =
"Usage: " ++ progName ++ " [OPTION...] [-- GHC OPTION... --] [files...]"
args <- getArgs
let (ghcArgs', ourArgs, unbalanced) = splitArgs args
let (flags, filenames, errs) = getOpt Permute options ourArgs
let (hsfiles, otherfiles) = List.partition isHaskellSrcFilename filenames
let ghc_topdir = case [ d | FlagTopDir d <- flags ] of
[] -> ""
(x:_) -> x
mapM_ (\n -> putStr $ "Warning: ignoring non-Haskellish file " ++ n ++ "\n")
otherfiles
if unbalanced || errs /= [] || elem FlagHelp flags || hsfiles == []
then do
putStr $ unlines errs
putStr $ usageInfo usageString options
exitWith (ExitFailure 1)
else return ()
ghcArgs <- case [ d | FlagUseCabalConfig d <- flags ] of
[distPref] -> do
cabalOpts <- flagsFromCabal distPref
return (cabalOpts ++ ghcArgs')
[] ->
return ghcArgs'
_ -> error "Too many --use-cabal-config flags"
print ghcArgs
let modes = getMode flags
let openFileMode = if elem FlagAppend flags
then AppendMode
else WriteMode
ctags_hdl <- if CTags `elem` modes
then Just `liftM` openFile "tags" openFileMode
else return Nothing
etags_hdl <- if ETags `elem` modes
then Just `liftM` openFile "TAGS" openFileMode
else return Nothing
GHC.defaultErrorHandler defaultFatalMessager defaultFlushOut $
runGhc (Just ghc_topdir) $ do
--liftIO $ print "starting up session"
dflags <- getSessionDynFlags
(pflags, unrec, warns) <- parseDynamicFlags dflags{ verbosity=1 }
(map noLoc ghcArgs)
unless (null unrec) $
liftIO $ putStrLn $ "Unrecognised options:\n" ++ show (map unLoc unrec)
liftIO $ mapM_ putStrLn (map unLoc warns)
let dflags2 = pflags { hscTarget = HscNothing } -- don't generate anything
-- liftIO $ print ("pkgDB", case (pkgDatabase dflags2) of Nothing -> 0
-- Just m -> sizeUFM m)
_ <- setSessionDynFlags dflags2
--liftIO $ print (length pkgs)
GHC.defaultCleanupHandler dflags2 $ do
targetsAtOneGo hsfiles (ctags_hdl,etags_hdl)
mapM_ (mapM (liftIO . hClose)) [ctags_hdl, etags_hdl]
----------------------------------------------
---------- ARGUMENT PROCESSING --------------
data Flag
= FlagETags
| FlagCTags
| FlagBoth
| FlagAppend
| FlagHelp
| FlagTopDir FilePath
| FlagUseCabalConfig FilePath
| FlagFilesFromCabal
deriving (Ord, Eq, Show)
-- ^Represents options passed to the program
data Mode = ETags | CTags deriving Eq
getMode :: [Flag] -> [Mode]
getMode fs = go (concatMap modeLike fs)
where go [] = [ETags,CTags]
go [x] = [x]
go more = nub more
modeLike FlagETags = [ETags]
modeLike FlagCTags = [CTags]
modeLike FlagBoth = [ETags,CTags]
modeLike _ = []
splitArgs :: [String] -> ([String], [String], Bool)
-- ^Pull out arguments between -- for GHC
splitArgs args0 = split [] [] False args0
where split ghc' tags' unbal ("--" : args) = split tags' ghc' (not unbal) args
split ghc' tags' unbal (arg : args) = split ghc' (arg:tags') unbal args
split ghc' tags' unbal [] = (reverse ghc', reverse tags', unbal)
options :: [OptDescr Flag]
-- supports getopt
options = [ Option "" ["topdir"]
(ReqArg FlagTopDir "DIR") "root of GHC installation (optional)"
, Option "c" ["ctags"]
(NoArg FlagCTags) "generate CTAGS file (ctags)"
, Option "e" ["etags"]
(NoArg FlagETags) "generate ETAGS file (etags)"
, Option "b" ["both"]
(NoArg FlagBoth) ("generate both CTAGS and ETAGS")
, Option "a" ["append"]
(NoArg FlagAppend) ("append to existing CTAGS and/or ETAGS file(s)")
, Option "" ["use-cabal-config"]
(ReqArg FlagUseCabalConfig "DIR") "use local cabal configuration from dist dir"
, Option "" ["files-from-cabal"]
(NoArg FlagFilesFromCabal) "use files from cabal"
, Option "h" ["help"] (NoArg FlagHelp) "This help"
]
flagsFromCabal :: FilePath -> IO [String]
flagsFromCabal distPref = do
lbi <- getPersistBuildConfig distPref
let pd = localPkgDescr lbi
findLibraryConfig [] = Nothing
findLibraryConfig ((CLibName, clbi, _) : _) = Just clbi
findLibraryConfig (_ : xs) = findLibraryConfig xs
mLibraryConfig = findLibraryConfig (componentsConfigs lbi)
case (library pd, mLibraryConfig) of
(Just lib, Just clbi) ->
let bi = libBuildInfo lib
odir = buildDir lbi
opts = componentGhcOptions V.normal lbi bi clbi odir
in return $ renderGhcOptions (compiler lbi) opts
_ -> error "no library"
----------------------------------------------------------------
--- LOADING HASKELL SOURCE
--- (these bits actually run the compiler and produce abstract syntax)
safeLoad :: LoadHowMuch -> Ghc SuccessFlag
-- like GHC.load, but does not stop process on exception
safeLoad mode = do
_dflags <- getSessionDynFlags
ghandle (\(e :: SomeException) -> liftIO (print e) >> return Failed ) $
handleSourceError (\e -> printException e >> return Failed) $
load mode
targetsAtOneGo :: [FileName] -> (Maybe Handle, Maybe Handle) -> Ghc ()
-- load a list of targets
targetsAtOneGo hsfiles handles = do
targets <- mapM (\f -> guessTarget f Nothing) hsfiles
setTargets targets
modgraph <- depanal [] False
let mods = flattenSCCs $ topSortModuleGraph False modgraph Nothing
graphData mods handles
fileTarget :: FileName -> Target
fileTarget filename = Target (TargetFile filename Nothing) True Nothing
---------------------------------------------------------------
----- CRAWLING ABSTRACT SYNTAX TO SNAFFLE THE DEFINITIONS -----
graphData :: ModuleGraph -> (Maybe Handle, Maybe Handle) -> Ghc ()
graphData graph handles = do
mapM_ foundthings graph
where foundthings ms
| Just filename <- msHsFilePath ms =
let modname = moduleName $ ms_mod ms
in handleSourceError (\e -> do
printException e
liftIO $ exitWith (ExitFailure 1)) $
do liftIO $ putStrLn ("loading " ++ filename)
mod <- loadModule =<< typecheckModule =<< parseModule ms
case mod of
_ | isBootSummary ms -> return ()
_ | Just s <- renamedSource mod ->
liftIO (writeTagsData handles =<< fileData filename modname s)
_otherwise ->
liftIO $ exitWith (ExitFailure 1)
| otherwise = return ()
fileData :: FileName -> ModuleName -> RenamedSource -> IO FileData
fileData filename modname (group, _imports, _lie, _doc) = do
-- lie is related to type checking and so is irrelevant
-- imports contains import declarations and no definitions
-- doc and haddock seem haddock-related; let's hope to ignore them
ls <- lines `fmap` readFile filename
let line_map = M.fromAscList $ zip [1..] ls
line_map' <- evaluate line_map
return $ FileData filename (boundValues modname group) line_map'
boundValues :: ModuleName -> HsGroup Name -> [FoundThing]
-- ^Finds all the top-level definitions in a module
boundValues mod group =
let vals = case hs_valds group of
ValBindsOut nest _sigs ->
[ x | (_rec, binds) <- nest
, bind <- bagToList binds
, x <- boundThings mod bind ]
_other -> error "boundValues"
tys = [ n | ns <- map hsLTyClDeclBinders (tyClGroupConcat (hs_tyclds group))
, n <- map found ns ]
fors = concat $ map forBound (hs_fords group)
where forBound lford = case unLoc lford of
ForeignImport n _ _ _ -> [found n]
ForeignExport { } -> []
in vals ++ tys ++ fors
where found = foundOfLName mod
startOfLocated :: Located a -> RealSrcLoc
startOfLocated lHs = case getLoc lHs of
RealSrcSpan l -> realSrcSpanStart l
UnhelpfulSpan _ -> panic "startOfLocated UnhelpfulSpan"
foundOfLName :: ModuleName -> Located Name -> FoundThing
foundOfLName mod id = FoundThing mod (getOccString $ unLoc id) (startOfLocated id)
boundThings :: ModuleName -> LHsBind Name -> [FoundThing]
boundThings modname lbinding =
case unLoc lbinding of
FunBind { fun_id = id } -> [thing id]
PatBind { pat_lhs = lhs } -> patThings lhs []
VarBind { var_id = id } -> [FoundThing modname (getOccString id) (startOfLocated lbinding)]
AbsBinds { } -> [] -- nothing interesting in a type abstraction
PatSynBind PSB{ psb_id = id } -> [thing id]
where thing = foundOfLName modname
patThings lpat tl =
let loc = startOfLocated lpat
lid id = FoundThing modname (getOccString id) loc
in case unLoc lpat of
WildPat _ -> tl
VarPat name -> lid name : tl
LazyPat p -> patThings p tl
AsPat id p -> patThings p (thing id : tl)
ParPat p -> patThings p tl
BangPat p -> patThings p tl
ListPat ps _ _ -> foldr patThings tl ps
TuplePat ps _ _ -> foldr patThings tl ps
PArrPat ps _ -> foldr patThings tl ps
ConPatIn _ conargs -> conArgs conargs tl
ConPatOut{ pat_args = conargs } -> conArgs conargs tl
LitPat _ -> tl
NPat _ _ _ -> tl -- form of literal pattern?
NPlusKPat id _ _ _ -> thing id : tl
SigPatIn p _ -> patThings p tl
SigPatOut p _ -> patThings p tl
_ -> error "boundThings"
conArgs (PrefixCon ps) tl = foldr patThings tl ps
conArgs (RecCon (HsRecFields { rec_flds = flds })) tl
= foldr (\(L _ f) tl' -> patThings (hsRecFieldArg f) tl') tl flds
conArgs (InfixCon p1 p2) tl = patThings p1 $ patThings p2 tl
-- stuff for dealing with ctags output format
writeTagsData :: (Maybe Handle, Maybe Handle) -> FileData -> IO ()
writeTagsData (mb_ctags_hdl, mb_etags_hdl) fd = do
maybe (return ()) (\hdl -> writectagsfile hdl fd) mb_ctags_hdl
maybe (return ()) (\hdl -> writeetagsfile hdl fd) mb_etags_hdl
writectagsfile :: Handle -> FileData -> IO ()
writectagsfile ctagsfile filedata = do
let things = getfoundthings filedata
mapM_ (\x -> hPutStrLn ctagsfile $ dumpthing False x) things
mapM_ (\x -> hPutStrLn ctagsfile $ dumpthing True x) things
getfoundthings :: FileData -> [FoundThing]
getfoundthings (FileData _filename things _src_lines) = things
dumpthing :: Bool -> FoundThing -> String
dumpthing showmod (FoundThing modname name loc) =
fullname ++ "\t" ++ filename ++ "\t" ++ (show line)
where line = srcLocLine loc
filename = unpackFS $ srcLocFile loc
fullname = if showmod then moduleNameString modname ++ "." ++ name
else name
-- stuff for dealing with etags output format
writeetagsfile :: Handle -> FileData -> IO ()
writeetagsfile etagsfile = hPutStr etagsfile . e_dumpfiledata
e_dumpfiledata :: FileData -> String
e_dumpfiledata (FileData filename things line_map) =
"\x0c\n" ++ filename ++ "," ++ (show thingslength) ++ "\n" ++ thingsdump
where
thingsdump = concat $ map (e_dumpthing line_map) things
thingslength = length thingsdump
e_dumpthing :: Map Int String -> FoundThing -> String
e_dumpthing src_lines (FoundThing modname name loc) =
tagline name ++ tagline (moduleNameString modname ++ "." ++ name)
where tagline n = src_code ++ "\x7f"
++ n ++ "\x01"
++ (show line) ++ "," ++ (show $ column) ++ "\n"
line = srcLocLine loc
column = srcLocCol loc
src_code = case M.lookup line src_lines of
Just l -> take (column + length name) l
Nothing -> --trace (show ("not found: ", moduleNameString modname, name, line, column))
name
| ml9951/ghc | utils/ghctags/Main.hs | bsd-3-clause | 14,667 | 0 | 20 | 4,251 | 3,935 | 1,992 | 1,943 | 274 | 23 |
-- These types follow the format of Twitter search results, as can be
-- found in the benchmarks/json-data directory.
--
-- For uses of these types, see the Twitter subdirectory.
--
-- There is one deviation for the sake of convenience: the Geo field
-- named "type_" is really named "type" in Twitter's real feed. I
-- renamed "type" to "type_" in the *.json files, to avoid overlap
-- with a Haskell reserved keyword.
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Twitter
(
Metadata(..)
, Geo(..)
, Story(..)
, Result(..)
) where
import Prelude ()
import Prelude.Compat
import Control.DeepSeq
import Data.Data (Typeable, Data)
import Data.Int (Int64)
import Data.Text (Text)
import GHC.Generics (Generic)
import Prelude hiding (id)
data Metadata = Metadata {
result_type :: Text
} deriving (Eq, Show, Typeable, Data, Generic)
instance NFData Metadata
data Geo = Geo {
type_ :: Text
, coordinates :: (Double, Double)
} deriving (Eq, Show, Typeable, Data, Generic)
instance NFData Geo
data Story = Story {
from_user_id_str :: Text
, profile_image_url :: Text
, created_at :: Text -- ZonedTime
, from_user :: Text
, id_str :: Text
, metadata :: Metadata
, to_user_id :: Maybe Int64
, text :: Text
, id_ :: Int64
, from_user_id :: Int64
, geo :: Maybe Geo
, iso_language_code :: Text
, to_user_id_str :: Maybe Text
, source :: Text
} deriving (Show, Typeable, Data, Generic)
instance NFData Story
data Result = Result {
results :: [Story]
, max_id :: Int64
, since_id :: Int64
, refresh_url :: Text
, next_page :: Text
, results_per_page :: Int
, page :: Int
, completed_in :: Double
, since_id_str :: Text
, max_id_str :: Text
, query :: Text
} deriving (Show, Typeable, Data, Generic)
instance NFData Result
| tolysz/prepare-ghcjs | spec-lts8/aeson/examples/Twitter.hs | bsd-3-clause | 2,032 | 0 | 9 | 586 | 448 | 275 | 173 | 56 | 0 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Eta.Utils.Digraph(
Graph, graphFromVerticesAndAdjacency, graphFromEdgedVertices,
SCC(..), Node, flattenSCC, flattenSCCs,
stronglyConnCompG,
topologicalSortG, dfsTopSortG,
verticesG, edgesG, hasVertexG,
reachableG, reachablesG, transposeG,
outdegreeG, indegreeG,
vertexGroupsG, emptyG,
componentsG,
findCycle,
-- For backwards compatability with the simpler version of Digraph
stronglyConnCompFromEdgedVertices, stronglyConnCompFromEdgedVerticesR,
-- No friendly interface yet, not used but exported to avoid warnings
tabulate, preArr,
components, undirected,
back, cross, forward,
path,
bcc, do_label, bicomps, collect
) where
#include "HsVersions.h"
------------------------------------------------------------------------------
-- A version of the graph algorithms described in:
--
-- ``Lazy Depth-First Search and Linear IntGraph Algorithms in Haskell''
-- by David King and John Launchbury
--
-- Also included is some additional code for printing tree structures ...
------------------------------------------------------------------------------
import Eta.Utils.Util ( minWith, count )
import Eta.Utils.Outputable
import Eta.Utils.Maybes ( expectJust )
import Eta.Utils.MonadUtils ( allM )
-- Extensions
import Control.Monad ( filterM, liftM, liftM2 )
import Control.Monad.ST
-- std interfaces
import Data.Maybe
import Data.Array
import Data.List hiding (transpose)
import Data.Array.ST
import qualified Data.Map as Map
import qualified Data.Set as Set
{-
************************************************************************
* *
* Graphs and Graph Construction
* *
************************************************************************
Note [Nodes, keys, vertices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A 'node' is a big blob of client-stuff
* Each 'node' has a unique (client) 'key', but the latter
is in Ord and has fast comparison
* Digraph then maps each 'key' to a Vertex (Int) which is
arranged densely in 0.n
-}
data Graph node = Graph {
gr_int_graph :: IntGraph,
gr_vertex_to_node :: Vertex -> node,
gr_node_to_vertex :: node -> Maybe Vertex
}
data Edge node = Edge node node
type Node key payload = (payload, key, [key])
-- The payload is user data, just carried around in this module
-- The keys are ordered
-- The [key] are the dependencies of the node;
-- it's ok to have extra keys in the dependencies that
-- are not the key of any Node in the graph
emptyGraph :: Graph a
emptyGraph = Graph (array (1, 0) []) (error "emptyGraph") (const Nothing)
graphFromVerticesAndAdjacency
:: Ord key
=> [(node, key)]
-> [(key, key)] -- First component is source vertex key,
-- second is target vertex key (thing depended on)
-- Unlike the other interface I insist they correspond to
-- actual vertices because the alternative hides bugs. I can't
-- do the same thing for the other one for backcompat reasons.
-> Graph (node, key)
graphFromVerticesAndAdjacency [] _ = emptyGraph
graphFromVerticesAndAdjacency vertices edges = Graph graph vertex_node (key_vertex . key_extractor)
where key_extractor = snd
(bounds, vertex_node, key_vertex, _) = reduceNodesIntoVertices vertices key_extractor
key_vertex_pair (a, b) = (expectJust "graphFromVerticesAndAdjacency" $ key_vertex a,
expectJust "graphFromVerticesAndAdjacency" $ key_vertex b)
reduced_edges = map key_vertex_pair edges
graph = buildG bounds reduced_edges
graphFromEdgedVertices
:: Ord key -- We only use Ord for efficiency,
-- it doesn't effect the result, so
-- it can be safely used with Unique's.
=> [Node key payload] -- The graph; its ok for the
-- out-list to contain keys which arent
-- a vertex key, they are ignored
-> Graph (Node key payload)
graphFromEdgedVertices [] = emptyGraph
graphFromEdgedVertices edged_vertices = Graph graph vertex_fn (key_vertex . key_extractor)
where key_extractor (_, k, _) = k
(bounds, vertex_fn, key_vertex, numbered_nodes) = reduceNodesIntoVertices edged_vertices key_extractor
graph = array bounds [ (v, sort $ mapMaybe key_vertex ks)
| (v, (_, _, ks)) <- numbered_nodes]
-- We normalize outgoing edges by sorting on node order, so
-- that the result doesn't depend on the order of the edges
reduceNodesIntoVertices
:: Ord key
=> [node]
-> (node -> key)
-> (Bounds, Vertex -> node, key -> Maybe Vertex, [(Vertex, node)])
reduceNodesIntoVertices nodes key_extractor = (bounds, (!) vertex_map, key_vertex, numbered_nodes)
where
max_v = length nodes - 1
bounds = (0, max_v) :: (Vertex, Vertex)
-- Keep the order intact to make the result depend on input order
-- instead of key order
numbered_nodes = zip [0..] nodes
vertex_map = array bounds numbered_nodes
key_map = Map.fromList
[ (key_extractor node, v) | (v, node) <- numbered_nodes ]
key_vertex k = Map.lookup k key_map
{-
************************************************************************
* *
* SCC
* *
************************************************************************
-}
type WorkItem key payload
= (Node key payload, -- Tip of the path
[payload]) -- Rest of the path;
-- [a,b,c] means c depends on b, b depends on a
-- | Find a reasonably short cycle a->b->c->a, in a strongly
-- connected component. The input nodes are presumed to be
-- a SCC, so you can start anywhere.
findCycle :: forall payload key. Ord key
=> [Node key payload] -- The nodes. The dependencies can
-- contain extra keys, which are ignored
-> Maybe [payload] -- A cycle, starting with node
-- so each depends on the next
findCycle graph
= go Set.empty (new_work root_deps []) []
where
env :: Map.Map key (Node key payload)
env = Map.fromList [ (key, node) | node@(_, key, _) <- graph ]
-- Find the node with fewest dependencies among the SCC modules
-- This is just a heuristic to find some plausible root module
root :: Node key payload
root = fst (minWith snd [ (node, count (`Map.member` env) deps)
| node@(_,_,deps) <- graph ])
(root_payload,root_key,root_deps) = root
-- 'go' implements Dijkstra's algorithm, more or less
go :: Set.Set key -- Visited
-> [WorkItem key payload] -- Work list, items length n
-> [WorkItem key payload] -- Work list, items length n+1
-> Maybe [payload] -- Returned cycle
-- Invariant: in a call (go visited ps qs),
-- visited = union (map tail (ps ++ qs))
go _ [] [] = Nothing -- No cycles
go visited [] qs = go visited qs []
go visited (((payload,key,deps), path) : ps) qs
| key == root_key = Just (root_payload : reverse path)
| key `Set.member` visited = go visited ps qs
| key `Map.notMember` env = go visited ps qs
| otherwise = go (Set.insert key visited)
ps (new_qs ++ qs)
where
new_qs = new_work deps (payload : path)
new_work :: [key] -> [payload] -> [WorkItem key payload]
new_work deps path = [ (n, path) | Just n <- map (`Map.lookup` env) deps ]
{-
************************************************************************
* *
* SCC
* *
************************************************************************
-}
data SCC vertex = AcyclicSCC vertex
| CyclicSCC [vertex]
instance Functor SCC where
fmap f (AcyclicSCC v) = AcyclicSCC (f v)
fmap f (CyclicSCC vs) = CyclicSCC (fmap f vs)
flattenSCCs :: [SCC a] -> [a]
flattenSCCs = concatMap flattenSCC
flattenSCC :: SCC a -> [a]
flattenSCC (AcyclicSCC v) = [v]
flattenSCC (CyclicSCC vs) = vs
instance Outputable a => Outputable (SCC a) where
ppr (AcyclicSCC v) = text "NONREC" $$ (nest 3 (ppr v))
ppr (CyclicSCC vs) = text "REC" $$ (nest 3 (vcat (map ppr vs)))
{-
************************************************************************
* *
* Strongly Connected Component wrappers for Graph
* *
************************************************************************
Note: the components are returned topologically sorted: later components
depend on earlier ones, but not vice versa i.e. later components only have
edges going from them to earlier ones.
-}
stronglyConnCompG :: Graph node -> [SCC node]
stronglyConnCompG graph = decodeSccs graph forest
where forest = {-# SCC "Digraph.scc" #-} scc (gr_int_graph graph)
decodeSccs :: Graph node -> Forest Vertex -> [SCC node]
decodeSccs Graph { gr_int_graph = graph, gr_vertex_to_node = vertex_fn } forest
= map decode forest
where
decode (Node v []) | mentions_itself v = CyclicSCC [vertex_fn v]
| otherwise = AcyclicSCC (vertex_fn v)
decode other = CyclicSCC (dec other [])
where dec (Node v ts) vs = vertex_fn v : foldr dec vs ts
mentions_itself v = v `elem` (graph ! v)
-- The following two versions are provided for backwards compatability:
stronglyConnCompFromEdgedVertices
:: Ord key
=> [Node key payload]
-> [SCC payload]
stronglyConnCompFromEdgedVertices
= map (fmap get_node) . stronglyConnCompFromEdgedVerticesR
where get_node (n, _, _) = n
-- The "R" interface is used when you expect to apply SCC to
-- (some of) the result of SCC, so you dont want to lose the dependency info
stronglyConnCompFromEdgedVerticesR
:: Ord key
=> [Node key payload]
-> [SCC (Node key payload)]
stronglyConnCompFromEdgedVerticesR = stronglyConnCompG . graphFromEdgedVertices
{-
************************************************************************
* *
* Misc wrappers for Graph
* *
************************************************************************
-}
topologicalSortG :: Graph node -> [node]
topologicalSortG graph = map (gr_vertex_to_node graph) result
where result = {-# SCC "Digraph.topSort" #-} topSort (gr_int_graph graph)
dfsTopSortG :: Graph node -> [[node]]
dfsTopSortG graph =
map (map (gr_vertex_to_node graph) . flattenTree) $ dfs g (topSort g)
where
g = gr_int_graph graph
reachableG :: Graph node -> node -> [node]
reachableG graph from = map (gr_vertex_to_node graph) result
where from_vertex = expectJust "reachableG" (gr_node_to_vertex graph from)
result = {-# SCC "Digraph.reachable" #-} reachable (gr_int_graph graph) [from_vertex]
reachablesG :: Graph node -> [node] -> [node]
reachablesG graph froms = map (gr_vertex_to_node graph) result
where result = {-# SCC "Digraph.reachable" #-}
reachable (gr_int_graph graph) vs
vs = [ v | Just v <- map (gr_node_to_vertex graph) froms ]
hasVertexG :: Graph node -> node -> Bool
hasVertexG graph node = isJust $ gr_node_to_vertex graph node
verticesG :: Graph node -> [node]
verticesG graph = map (gr_vertex_to_node graph) $ vertices (gr_int_graph graph)
edgesG :: Graph node -> [Edge node]
edgesG graph = map (\(v1, v2) -> Edge (v2n v1) (v2n v2)) $ edges (gr_int_graph graph)
where v2n = gr_vertex_to_node graph
transposeG :: Graph node -> Graph node
transposeG graph = Graph (transpose (gr_int_graph graph)) (gr_vertex_to_node graph) (gr_node_to_vertex graph)
outdegreeG :: Graph node -> node -> Maybe Int
outdegreeG = degreeG outdegree
indegreeG :: Graph node -> node -> Maybe Int
indegreeG = degreeG indegree
degreeG :: (IntGraph -> Table Int) -> Graph node -> node -> Maybe Int
degreeG degree graph node = let table = degree (gr_int_graph graph)
in fmap ((!) table) $ gr_node_to_vertex graph node
vertexGroupsG :: Graph node -> [[node]]
vertexGroupsG graph = map (map (gr_vertex_to_node graph)) result
where result = vertexGroups (gr_int_graph graph)
emptyG :: Graph node -> Bool
emptyG g = graphEmpty (gr_int_graph g)
componentsG :: Graph node -> [[node]]
componentsG graph = map (map (gr_vertex_to_node graph) . flattenTree) $ components (gr_int_graph graph)
{-
************************************************************************
* *
* Showing Graphs
* *
************************************************************************
-}
instance Outputable node => Outputable (Graph node) where
ppr graph = vcat [
hang (text "Vertices:") 2 (vcat (map ppr $ verticesG graph)),
hang (text "Edges:") 2 (vcat (map ppr $ edgesG graph))
]
instance Outputable node => Outputable (Edge node) where
ppr (Edge from to) = ppr from <+> text "->" <+> ppr to
{-
************************************************************************
* *
* IntGraphs
* *
************************************************************************
-}
type Vertex = Int
type Table a = Array Vertex a
type IntGraph = Table [Vertex]
type Bounds = (Vertex, Vertex)
type IntEdge = (Vertex, Vertex)
vertices :: IntGraph -> [Vertex]
vertices = indices
edges :: IntGraph -> [IntEdge]
edges g = [ (v, w) | v <- vertices g, w <- g!v ]
mapT :: (Vertex -> a -> b) -> Table a -> Table b
mapT f t = array (bounds t) [ (v, f v (t ! v)) | v <- indices t ]
buildG :: Bounds -> [IntEdge] -> IntGraph
buildG bounds edges = accumArray (flip (:)) [] bounds edges
transpose :: IntGraph -> IntGraph
transpose g = buildG (bounds g) (reverseE g)
reverseE :: IntGraph -> [IntEdge]
reverseE g = [ (w, v) | (v, w) <- edges g ]
outdegree :: IntGraph -> Table Int
outdegree = mapT numEdges
where numEdges _ ws = length ws
indegree :: IntGraph -> Table Int
indegree = outdegree . transpose
graphEmpty :: IntGraph -> Bool
graphEmpty g = lo > hi
where (lo, hi) = bounds g
{-
************************************************************************
* *
* Trees and forests
* *
************************************************************************
-}
data Tree a = Node a (Forest a)
type Forest a = [Tree a]
mapTree :: (a -> b) -> (Tree a -> Tree b)
mapTree f (Node x ts) = Node (f x) (map (mapTree f) ts)
flattenTree :: Tree a -> [a]
flattenTree (Node x ts) = x : concatMap flattenTree ts
instance Show a => Show (Tree a) where
showsPrec _ t s = showTree t ++ s
showTree :: Show a => Tree a -> String
showTree = drawTree . mapTree show
drawTree :: Tree String -> String
drawTree = unlines . draw
draw :: Tree String -> [String]
draw (Node x ts) = grp this (space (length this)) (stLoop ts)
where this = s1 ++ x ++ " "
space n = replicate n ' '
stLoop [] = [""]
stLoop [t] = grp s2 " " (draw t)
stLoop (t:ts) = grp s3 s4 (draw t) ++ [s4] ++ rsLoop ts
rsLoop [] = []
rsLoop [t] = grp s5 " " (draw t)
rsLoop (t:ts) = grp s6 s4 (draw t) ++ [s4] ++ rsLoop ts
grp fst rst = zipWith (++) (fst:repeat rst)
[s1,s2,s3,s4,s5,s6] = ["- ", "--", "-+", " |", " `", " +"]
{-
************************************************************************
* *
* Depth first search
* *
************************************************************************
-}
type Set s = STArray s Vertex Bool
mkEmpty :: Bounds -> ST s (Set s)
mkEmpty bnds = newArray bnds False
contains :: Set s -> Vertex -> ST s Bool
contains m v = readArray m v
include :: Set s -> Vertex -> ST s ()
include m v = writeArray m v True
dff :: IntGraph -> Forest Vertex
dff g = dfs g (vertices g)
dfs :: IntGraph -> [Vertex] -> Forest Vertex
dfs g vs = prune (bounds g) (map (generate g) vs)
generate :: IntGraph -> Vertex -> Tree Vertex
generate g v = Node v (map (generate g) (g!v))
prune :: Bounds -> Forest Vertex -> Forest Vertex
prune bnds ts = runST (mkEmpty bnds >>= \m ->
chop m ts)
chop :: Set s -> Forest Vertex -> ST s (Forest Vertex)
chop _ [] = return []
chop m (Node v ts : us)
= contains m v >>= \visited ->
if visited then
chop m us
else
include m v >>= \_ ->
chop m ts >>= \as ->
chop m us >>= \bs ->
return (Node v as : bs)
{-
************************************************************************
* *
* Algorithms
* *
************************************************************************
------------------------------------------------------------
-- Algorithm 1: depth first search numbering
------------------------------------------------------------
-}
preorder :: Tree a -> [a]
preorder (Node a ts) = a : preorderF ts
preorderF :: Forest a -> [a]
preorderF ts = concat (map preorder ts)
tabulate :: Bounds -> [Vertex] -> Table Int
tabulate bnds vs = array bnds (zip vs [1..])
preArr :: Bounds -> Forest Vertex -> Table Int
preArr bnds = tabulate bnds . preorderF
{-
------------------------------------------------------------
-- Algorithm 2: topological sorting
------------------------------------------------------------
-}
postorder :: Tree a -> [a] -> [a]
postorder (Node a ts) = postorderF ts . (a :)
postorderF :: Forest a -> [a] -> [a]
postorderF ts = foldr (.) id $ map postorder ts
postOrd :: IntGraph -> [Vertex]
postOrd g = postorderF (dff g) []
topSort :: IntGraph -> [Vertex]
topSort = reverse . postOrd
{-
------------------------------------------------------------
-- Algorithm 3: connected components
------------------------------------------------------------
-}
components :: IntGraph -> Forest Vertex
components = dff . undirected
undirected :: IntGraph -> IntGraph
undirected g = buildG (bounds g) (edges g ++ reverseE g)
{-
------------------------------------------------------------
-- Algorithm 4: strongly connected components
------------------------------------------------------------
-}
scc :: IntGraph -> Forest Vertex
scc g = dfs g (reverse (postOrd (transpose g)))
{-
------------------------------------------------------------
-- Algorithm 5: Classifying edges
------------------------------------------------------------
-}
back :: IntGraph -> Table Int -> IntGraph
back g post = mapT select g
where select v ws = [ w | w <- ws, post!v < post!w ]
cross :: IntGraph -> Table Int -> Table Int -> IntGraph
cross g pre post = mapT select g
where select v ws = [ w | w <- ws, post!v > post!w, pre!v > pre!w ]
forward :: IntGraph -> IntGraph -> Table Int -> IntGraph
forward g tree pre = mapT select g
where select v ws = [ w | w <- ws, pre!v < pre!w ] \\ tree!v
{-
------------------------------------------------------------
-- Algorithm 6: Finding reachable vertices
------------------------------------------------------------
-}
reachable :: IntGraph -> [Vertex] -> [Vertex]
reachable g vs = preorderF (dfs g vs)
path :: IntGraph -> Vertex -> Vertex -> Bool
path g v w = w `elem` (reachable g [v])
{-
------------------------------------------------------------
-- Algorithm 7: Biconnected components
------------------------------------------------------------
-}
bcc :: IntGraph -> Forest [Vertex]
bcc g = (concat . map bicomps . map (do_label g dnum)) forest
where forest = dff g
dnum = preArr (bounds g) forest
do_label :: IntGraph -> Table Int -> Tree Vertex -> Tree (Vertex,Int,Int)
do_label g dnum (Node v ts) = Node (v,dnum!v,lv) us
where us = map (do_label g dnum) ts
lv = minimum ([dnum!v] ++ [dnum!w | w <- g!v]
++ [lu | Node (_,_,lu) _ <- us])
bicomps :: Tree (Vertex, Int, Int) -> Forest [Vertex]
bicomps (Node (v,_,_) ts)
= [ Node (v:vs) us | (_,Node vs us) <- map collect ts]
collect :: Tree (Vertex, Int, Int) -> (Int, Tree [Vertex])
collect (Node (v,dv,lv) ts) = (lv, Node (v:vs) cs)
where collected = map collect ts
vs = concat [ ws | (lw, Node ws _) <- collected, lw<dv]
cs = concat [ if lw<dv then us else [Node (v:ws) us]
| (lw, Node ws us) <- collected ]
{-
------------------------------------------------------------
-- Algorithm 8: Total ordering on groups of vertices
------------------------------------------------------------
The plan here is to extract a list of groups of elements of the graph
such that each group has no dependence except on nodes in previous
groups (i.e. in particular they may not depend on nodes in their own
group) and is maximal such group.
Clearly we cannot provide a solution for cyclic graphs.
We proceed by iteratively removing elements with no outgoing edges
and their associated edges from the graph.
This probably isn't very efficient and certainly isn't very clever.
-}
vertexGroups :: IntGraph -> [[Vertex]]
vertexGroups g = runST (mkEmpty (bounds g) >>= \provided -> vertexGroupsS provided g next_vertices)
where next_vertices = noOutEdges g
noOutEdges :: IntGraph -> [Vertex]
noOutEdges g = [ v | v <- vertices g, null (g!v)]
vertexGroupsS :: Set s -> IntGraph -> [Vertex] -> ST s [[Vertex]]
vertexGroupsS provided g to_provide
= if null to_provide
then do {
all_provided <- allM (provided `contains`) (vertices g)
; if all_provided
then return []
else error "vertexGroup: cyclic graph"
}
else do {
mapM_ (include provided) to_provide
; to_provide' <- filterM (vertexReady provided g) (vertices g)
; rest <- vertexGroupsS provided g to_provide'
; return $ to_provide : rest
}
vertexReady :: Set s -> IntGraph -> Vertex -> ST s Bool
vertexReady provided g v = liftM2 (&&) (liftM not $ provided `contains` v) (allM (provided `contains`) (g!v))
| rahulmutt/ghcvm | compiler/Eta/Utils/Digraph.hs | bsd-3-clause | 23,836 | 0 | 17 | 6,926 | 6,099 | 3,221 | 2,878 | 332 | 5 |
{-# LANGUAGE Haskell98, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
{-# LINE 1 "Control/Monad/RWS/Class.hs" #-}
{-# LANGUAGE UndecidableInstances #-}
-- Search for UndecidableInstances to see why this is needed
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.RWS.Class
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Declaration of the MonadRWS class.
--
-- Inspired by the paper
-- /Functional Programming with Overloading and Higher-Order Polymorphism/,
-- Mark P Jones (<http://web.cecs.pdx.edu/~mpj/>)
-- Advanced School of Functional Programming, 1995.
-----------------------------------------------------------------------------
module Control.Monad.RWS.Class (
MonadRWS,
module Control.Monad.Reader.Class,
module Control.Monad.State.Class,
module Control.Monad.Writer.Class,
) where
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Control.Monad.Writer.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Error(Error, ErrorT)
import Control.Monad.Trans.Except(ExceptT)
import Control.Monad.Trans.Maybe(MaybeT)
import Control.Monad.Trans.Identity(IdentityT)
import Control.Monad.Trans.RWS.Lazy as Lazy (RWST)
import qualified Control.Monad.Trans.RWS.Strict as Strict (RWST)
import Data.Monoid
class (Monoid w, MonadReader r m, MonadWriter w m, MonadState s m)
=> MonadRWS r w s m | m -> r, m -> w, m -> s
instance (Monoid w, Monad m) => MonadRWS r w s (Lazy.RWST r w s m)
instance (Monoid w, Monad m) => MonadRWS r w s (Strict.RWST r w s m)
---------------------------------------------------------------------------
-- Instances for other mtl transformers
--
-- All of these instances need UndecidableInstances,
-- because they do not satisfy the coverage condition.
instance MonadRWS r w s m => MonadRWS r w s (ExceptT e m)
instance (Error e, MonadRWS r w s m) => MonadRWS r w s (ErrorT e m)
instance MonadRWS r w s m => MonadRWS r w s (IdentityT m)
instance MonadRWS r w s m => MonadRWS r w s (MaybeT m)
| phischu/fragnix | tests/packages/scotty/Control.Monad.RWS.Class.hs | bsd-3-clause | 2,382 | 0 | 8 | 382 | 466 | 279 | 187 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.PackageDump
( Line
, eachSection
, eachPair
, DumpPackage (..)
, conduitDumpPackage
, ghcPkgDump
, ghcPkgDescribe
, newInstalledCache
, loadInstalledCache
, saveInstalledCache
, addProfiling
, addHaddock
, addSymbols
, sinkMatching
, pruneDeps
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Exception.Safe (tryIO)
import Control.Monad (liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Trans.Control
import Data.Attoparsec.Args
import Data.Attoparsec.Text as P
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import Data.Either (partitionEithers)
import Data.IORef
import Data.List (isPrefixOf)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, listToMaybe)
import Data.Maybe.Extra (mapMaybeM)
import qualified Data.Set as Set
import Data.Store.VersionTagged
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import qualified Distribution.License as C
import qualified Distribution.System as OS
import qualified Distribution.Text as C
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Prelude -- Fix AMP warning
import Stack.GhcPkg
import Stack.Types.Compiler
import Stack.Types.GhcPkgId
import Stack.Types.PackageDump
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import System.Directory (getDirectoryContents, doesFileExist)
import System.Process.Read
-- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDump
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink Text IO a
-> m a
ghcPkgDump = ghcPkgCmdArgs ["dump"]
-- | Call ghc-pkg describe with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDescribe
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> PackageName
-> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink Text IO a
-> m a
ghcPkgDescribe pkgName = ghcPkgCmdArgs ["describe", "--simple-output", packageNameString pkgName]
-- | Call ghc-pkg and stream to the given @Sink@, for a single database
ghcPkgCmdArgs
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> [String]
-> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink Text IO a
-> m a
ghcPkgCmdArgs cmd menv wc mpkgDbs sink = do
case reverse mpkgDbs of
(pkgDb:_) -> createDatabase menv wc pkgDb -- TODO maybe use some retry logic instead?
_ -> return ()
sinkProcessStdout Nothing menv (ghcPkgExeName wc) args sink'
where
args = concat
[ case mpkgDbs of
[] -> ["--global", "--no-user-package-db"]
_ -> ["--user", "--no-user-package-db"] ++
concatMap (\pkgDb -> ["--package-db", toFilePathNoTrailingSep pkgDb]) mpkgDbs
, cmd
, ["--expand-pkgroot"]
]
sink' = CT.decodeUtf8 =$= sink
-- | Create a new, empty @InstalledCache@
newInstalledCache :: MonadIO m => m InstalledCache
newInstalledCache = liftIO $ InstalledCache <$> newIORef (InstalledCacheInner Map.empty)
-- | Load a @InstalledCache@ from disk, swallowing any errors and returning an
-- empty cache.
loadInstalledCache :: (MonadLogger m, MonadIO m, MonadBaseControl IO m)
=> Path Abs File -> m InstalledCache
loadInstalledCache path = do
m <- $(versionedDecodeOrLoad installedCacheVC) path (return $ InstalledCacheInner Map.empty)
liftIO $ InstalledCache <$> newIORef m
-- | Save a @InstalledCache@ to disk
saveInstalledCache :: (MonadLogger m, MonadIO m) => Path Abs File -> InstalledCache -> m ()
saveInstalledCache path (InstalledCache ref) =
liftIO (readIORef ref) >>= $(versionedEncodeFile installedCacheVC) path
-- | Prune a list of possible packages down to those whose dependencies are met.
--
-- * id uniquely identifies an item
--
-- * There can be multiple items per name
pruneDeps
:: (Ord name, Ord id)
=> (id -> name) -- ^ extract the name from an id
-> (item -> id) -- ^ the id of an item
-> (item -> [id]) -- ^ get the dependencies of an item
-> (item -> item -> item) -- ^ choose the desired of two possible items
-> [item] -- ^ input items
-> Map name item
pruneDeps getName getId getDepends chooseBest =
Map.fromList
. fmap (getName . getId &&& id)
. loop Set.empty Set.empty []
where
loop foundIds usedNames foundItems dps =
case partitionEithers $ map depsMet dps of
([], _) -> foundItems
(s', dps') ->
let foundIds' = Map.fromListWith chooseBest s'
foundIds'' = Set.fromList $ map getId $ Map.elems foundIds'
usedNames' = Map.keysSet foundIds'
foundItems' = Map.elems foundIds'
in loop
(Set.union foundIds foundIds'')
(Set.union usedNames usedNames')
(foundItems ++ foundItems')
(catMaybes dps')
where
depsMet dp
| name `Set.member` usedNames = Right Nothing
| all (`Set.member` foundIds) (getDepends dp) = Left (name, dp)
| otherwise = Right $ Just dp
where
id' = getId dp
name = getName id'
-- | Find the package IDs matching the given constraints with all dependencies installed.
-- Packages not mentioned in the provided @Map@ are allowed to be present too.
sinkMatching :: Monad m
=> Bool -- ^ require profiling?
-> Bool -- ^ require haddock?
-> Bool -- ^ require debugging symbols?
-> Map PackageName Version -- ^ allowed versions
-> Consumer (DumpPackage Bool Bool Bool)
m
(Map PackageName (DumpPackage Bool Bool Bool))
sinkMatching reqProfiling reqHaddock reqSymbols allowed = do
dps <- CL.filter (\dp -> isAllowed (dpPackageIdent dp) &&
(not reqProfiling || dpProfiling dp) &&
(not reqHaddock || dpHaddock dp) &&
(not reqSymbols || dpSymbols dp))
=$= CL.consume
return $ Map.fromList $ map (packageIdentifierName . dpPackageIdent &&& id) $ Map.elems $ pruneDeps
id
dpGhcPkgId
dpDepends
const -- Could consider a better comparison in the future
dps
where
isAllowed (PackageIdentifier name version) =
case Map.lookup name allowed of
Just version' | version /= version' -> False
_ -> True
-- | Add profiling information to the stream of @DumpPackage@s
addProfiling :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b c) m (DumpPackage Bool b c)
addProfiling (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
p <- case Map.lookup gid m of
Just installed -> return (installedCacheProfiling installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let loop [] = return False
loop (dir:dirs) = do
econtents <- tryIO $ getDirectoryContents dir
let contents = either (const []) id econtents
if or [isProfiling content lib
| content <- contents
, lib <- dpLibraries dp
] && not (null contents)
then return True
else loop dirs
loop $ dpLibDirs dp
return dp { dpProfiling = p }
isProfiling :: FilePath -- ^ entry in directory
-> Text -- ^ name of library
-> Bool
isProfiling content lib =
prefix `T.isPrefixOf` T.pack content
where
prefix = T.concat ["lib", lib, "_p"]
-- | Add haddock information to the stream of @DumpPackage@s
addHaddock :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b c) m (DumpPackage a Bool c)
addHaddock (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
h <- case Map.lookup gid m of
Just installed -> return (installedCacheHaddock installed)
Nothing | not (dpHasExposedModules dp) -> return True
Nothing -> do
let loop [] = return False
loop (ifc:ifcs) = do
exists <- doesFileExist ifc
if exists
then return True
else loop ifcs
loop $ dpHaddockInterfaces dp
return dp { dpHaddock = h }
-- | Add debugging symbol information to the stream of @DumpPackage@s
addSymbols :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b c) m (DumpPackage a b Bool)
addSymbols (InstalledCache ref) =
CL.mapM go
where
go dp = do
InstalledCacheInner m <- liftIO $ readIORef ref
let gid = dpGhcPkgId dp
s <- case Map.lookup gid m of
Just installed -> return (installedCacheSymbols installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let lib = T.unpack . head $ dpLibraries dp
liftM or . mapM (\dir -> liftIO $ hasDebuggingSymbols dir lib) $ dpLibDirs dp
return dp { dpSymbols = s }
hasDebuggingSymbols :: FilePath -- ^ library directory
-> String -- ^ name of library
-> IO Bool
hasDebuggingSymbols dir lib = do
let path = concat [dir, "/lib", lib, ".a"]
exists <- doesFileExist path
if not exists then return False
else case OS.buildOS of
OS.OSX -> liftM (any (isPrefixOf "0x") . lines) $
readProcess "dwarfdump" [path] ""
OS.Linux -> liftM (any (isPrefixOf "Contents") . lines) $
readProcess "readelf" ["--debug-dump=info", "--dwarf-depth=1", path] ""
OS.FreeBSD -> liftM (any (isPrefixOf "Contents") . lines) $
readProcess "readelf" ["--debug-dump=info", "--dwarf-depth=1", path] ""
OS.Windows -> return False -- No support, so it can't be there.
_ -> return False
-- | Dump information for a single package
data DumpPackage profiling haddock symbols = DumpPackage
{ dpGhcPkgId :: !GhcPkgId
, dpPackageIdent :: !PackageIdentifier
, dpLicense :: !(Maybe C.License)
, dpLibDirs :: ![FilePath]
, dpLibraries :: ![Text]
, dpHasExposedModules :: !Bool
, dpDepends :: ![GhcPkgId]
, dpHaddockInterfaces :: ![FilePath]
, dpHaddockHtml :: !(Maybe FilePath)
, dpProfiling :: !profiling
, dpHaddock :: !haddock
, dpSymbols :: !symbols
, dpIsExposed :: !Bool
}
deriving (Show, Eq)
data PackageDumpException
= MissingSingleField Text (Map Text [Line])
| Couldn'tParseField Text [Line]
deriving Typeable
instance Exception PackageDumpException
instance Show PackageDumpException where
show (MissingSingleField name values) = unlines $
return (concat
[ "Expected single value for field name "
, show name
, " when parsing ghc-pkg dump output:"
]) ++ map (\(k, v) -> " " ++ show (k, v)) (Map.toList values)
show (Couldn'tParseField name ls) =
"Couldn't parse the field " ++ show name ++ " from lines: " ++ show ls
-- | Convert a stream of bytes into a stream of @DumpPackage@s
conduitDumpPackage :: MonadThrow m
=> Conduit Text m (DumpPackage () () ())
conduitDumpPackage = (=$= CL.catMaybes) $ eachSection $ do
pairs <- eachPair (\k -> (k, ) <$> CL.consume) =$= CL.consume
let m = Map.fromList pairs
let parseS k =
case Map.lookup k m of
Just [v] -> return v
_ -> throwM $ MissingSingleField k m
-- Can't fail: if not found, same as an empty list. See:
-- https://github.com/fpco/stack/issues/182
parseM k = Map.findWithDefault [] k m
parseDepend :: MonadThrow m => Text -> m (Maybe GhcPkgId)
parseDepend "builtin_rts" = return Nothing
parseDepend bs =
liftM Just $ parseGhcPkgId bs'
where
(bs', _builtinRts) =
case stripSuffixText " builtin_rts" bs of
Nothing ->
case stripPrefixText "builtin_rts " bs of
Nothing -> (bs, False)
Just x -> (x, True)
Just x -> (x, True)
case Map.lookup "id" m of
Just ["builtin_rts"] -> return Nothing
_ -> do
name <- parseS "name" >>= parsePackageName
version <- parseS "version" >>= parseVersion
ghcPkgId <- parseS "id" >>= parseGhcPkgId
-- if a package has no modules, these won't exist
let libDirKey = "library-dirs"
libraries = parseM "hs-libraries"
exposedModules = parseM "exposed-modules"
exposed = parseM "exposed"
license =
case parseM "license" of
[licenseText] -> C.simpleParse (T.unpack licenseText)
_ -> Nothing
depends <- mapMaybeM parseDepend $ concatMap T.words $ parseM "depends"
let parseQuoted key =
case mapM (P.parseOnly (argsParser NoEscaping)) val of
Left{} -> throwM (Couldn'tParseField key val)
Right dirs -> return (concat dirs)
where
val = parseM key
libDirPaths <- parseQuoted libDirKey
haddockInterfaces <- parseQuoted "haddock-interfaces"
haddockHtml <- parseQuoted "haddock-html"
return $ Just DumpPackage
{ dpGhcPkgId = ghcPkgId
, dpPackageIdent = PackageIdentifier name version
, dpLicense = license
, dpLibDirs = libDirPaths
, dpLibraries = T.words $ T.unwords libraries
, dpHasExposedModules = not (null libraries || null exposedModules)
, dpDepends = depends
, dpHaddockInterfaces = haddockInterfaces
, dpHaddockHtml = listToMaybe haddockHtml
, dpProfiling = ()
, dpHaddock = ()
, dpSymbols = ()
, dpIsExposed = exposed == ["True"]
}
stripPrefixText :: Text -> Text -> Maybe Text
stripPrefixText x y
| x `T.isPrefixOf` y = Just $ T.drop (T.length x) y
| otherwise = Nothing
stripSuffixText :: Text -> Text -> Maybe Text
stripSuffixText x y
| x `T.isSuffixOf` y = Just $ T.take (T.length y - T.length x) y
| otherwise = Nothing
-- | A single line of input, not including line endings
type Line = Text
-- | Apply the given Sink to each section of output, broken by a single line containing ---
eachSection :: Monad m
=> Sink Line m a
-> Conduit Text m a
eachSection inner =
CL.map (T.filter (/= '\r')) =$= CT.lines =$= start
where
peekText = await >>= maybe (return Nothing) (\bs ->
if T.null bs
then peekText
else leftover bs >> return (Just bs))
start = peekText >>= maybe (return ()) (const go)
go = do
x <- toConsumer $ takeWhileC (/= "---") =$= inner
yield x
CL.drop 1
start
-- | Grab each key/value pair
eachPair :: Monad m
=> (Text -> Sink Line m a)
-> Conduit Line m a
eachPair inner =
start
where
start = await >>= maybe (return ()) start'
start' bs1 =
toConsumer (valSrc =$= inner key) >>= yield >> start
where
(key, bs2) = T.break (== ':') bs1
(spaces, bs3) = T.span (== ' ') $ T.drop 1 bs2
indent = T.length key + 1 + T.length spaces
valSrc
| T.null bs3 = noIndent
| otherwise = yield bs3 >> loopIndent indent
noIndent = do
mx <- await
case mx of
Nothing -> return ()
Just bs -> do
let (spaces, val) = T.span (== ' ') bs
if T.length spaces == 0
then leftover val
else do
yield val
loopIndent (T.length spaces)
loopIndent i =
loop
where
loop = await >>= maybe (return ()) go
go bs
| T.length spaces == i && T.all (== ' ') spaces =
yield val >> loop
| otherwise = leftover bs
where
(spaces, val) = T.splitAt i bs
-- | General purpose utility
takeWhileC :: Monad m => (a -> Bool) -> Conduit a m a
takeWhileC f =
loop
where
loop = await >>= maybe (return ()) go
go x
| f x = yield x >> loop
| otherwise = leftover x
| mrkkrp/stack | src/Stack/PackageDump.hs | bsd-3-clause | 18,082 | 0 | 27 | 6,044 | 4,738 | 2,417 | 2,321 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
module SDL.Event
( Event(..)
, EventPayload(..)
, KeyMotion(..)
, KeyState(..)
, MouseButton(..)
, MouseMotion(..)
, WindowID
, pollEvent
, mapEvents
, Raw.pumpEvents
, waitEvent
, waitEventTimeout
) where
import Control.Applicative
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Data (Data)
import Data.Maybe (catMaybes)
import Data.Text (Text)
import Data.Typeable
import Foreign
import Foreign.C
import GHC.Generics (Generic)
import Linear
import Linear.Affine (Point(P))
import SDL.Input.Keyboard
import SDL.Input.Mouse
import SDL.Internal.Numbered
import SDL.Internal.Types (WindowID(WindowID))
import qualified Data.ByteString.Char8 as BSC8
import qualified Data.Text.Encoding as Text
import qualified SDL.Exception as SDLEx
import qualified SDL.Raw as Raw
data Event = Event
{ eventTimestamp :: Word32
, eventPayload :: EventPayload
} deriving (Eq, Ord, Generic, Show, Typeable)
data KeyMotion = KeyUp | KeyDown
deriving (Bounded, Enum, Eq, Ord, Read, Data, Generic, Show, Typeable)
data KeyState = KeyPressed | KeyReleased
deriving (Bounded, Enum, Eq, Ord, Read, Data, Generic, Show, Typeable)
instance FromNumber KeyState Word8 where
fromNumber n' = case n' of
Raw.SDL_PRESSED -> KeyPressed
Raw.SDL_RELEASED -> KeyReleased
data EventPayload
= WindowShown
{ windowEventWindowID :: WindowID
}
| WindowHidden
{ windowEventWindowID :: WindowID
}
| WindowExposed
{ windowEventWindowID :: WindowID
}
| WindowMoved
{ windowEventWindowID :: WindowID
, windowEventPosition :: Point V2 Int32
}
| WindowResized
{ windowEventWindowID :: WindowID
, windowEventSize :: V2 Int32
}
| WindowSizeChanged
{ windowEventWindowID :: WindowID
}
| WindowMinimized
{ windowEventWindowID :: WindowID
}
| WindowMaximized
{ windowEventWindowID :: WindowID
}
| WindowRestored
{ windowEventWindowID :: WindowID
}
| WindowGainedMouseFocus
{ windowEventWindowID :: WindowID
}
| WindowLostMouseFocus
{ windowEventWindowID :: WindowID
}
| WindowGainedKeyboardFocus
{ windowEventWindowID :: WindowID
}
| WindowLostKeyboardFocus
{ windowEventWindowID :: WindowID
}
| WindowClosed
{ windowEventWindowID :: WindowID
}
| KeyboardEvent
{ keyboardEventWindowID :: WindowID
, keyboardEventKeyMotion :: KeyMotion
, keyboardEventState :: KeyState
, keyboardEventRepeat :: Bool
, keyboardEventKeysym :: Keysym
}
| TextEditingEvent
{ textEditingEventWindowID :: WindowID
, textEditingEventText :: Text
, textEditingEventStart :: Int32
, textEditingEventLength :: Int32
}
| TextInputEvent
{ textInputEventWindowID :: WindowID
, textInputEventText :: Text
}
| MouseMotionEvent
{ mouseMotionEventWindowID :: WindowID
, mouseMotionEventWhich :: MouseDevice
, mouseMotionEventState :: [MouseButton]
, mouseMotionEventPos :: Point V2 Int32
, mouseMotionEventRelMotion :: V2 Int32
}
| MouseButtonEvent
{ mouseButtonEventWindowID :: WindowID
, mouseButtonEventMotion :: MouseMotion
, mouseButtonEventWhich :: MouseDevice
, mouseButtonEventButton :: MouseButton
, mouseButtonEventState :: Word8
, mouseButtonEventClicks :: Word8
, mouseButtonEventPos :: Point V2 Int32
}
| MouseWheelEvent
{ mouseWheelEventWindowID :: WindowID
, mouseWheelEventWhich :: MouseDevice
, mouseWheelEventPos :: V2 Int32
}
| JoyAxisEvent
{ joyAxisEventWhich :: Raw.JoystickID
, joyAxisEventAxis :: Word8
, joyAxisEventValue :: Int16
}
| JoyBallEvent
{ joyBallEventWhich :: Raw.JoystickID
, joyBallEventBall :: Word8
, joyBallEventRelMotion :: V2 Int16
}
| JoyHatEvent
{ joyHatEventWhich :: Raw.JoystickID
, joyHatEventHat :: Word8
, joyHatEventValue :: Word8
}
| JoyButtonEvent
{ joyButtonEventWhich :: Raw.JoystickID
, joyButtonEventButton :: Word8
, joyButtonEventState :: Word8
}
| JoyDeviceEvent
{ joyDeviceEventWhich :: Int32
}
| ControllerAxisEvent
{ controllerAxisEventWhich :: Raw.JoystickID
, controllerAxisEventAxis :: Word8
, controllerAxisEventValue :: Int16
}
| ControllerButtonEvent
{ controllerButtonEventWhich :: Raw.JoystickID
, controllerButtonEventButton :: Word8
, controllerButtonEventState :: Word8
}
| ControllerDeviceEvent
{ controllerDeviceEventWhich :: Int32
}
| QuitEvent
| UserEvent
{ userEventWindowID :: WindowID
, userEventCode :: Int32
, userEventData1 :: Ptr ()
, userEventData2 :: Ptr ()
}
| SysWMEvent
{ sysWMEventMsg :: Raw.SysWMmsg
}
| TouchFingerEvent
{ touchFingerEventTouchID :: Raw.TouchID
, touchFingerEventFingerID :: Raw.FingerID
, touchFingerEventPos :: Point V2 CFloat
, touchFingerEventRelMotion :: V2 CFloat
, touchFingerEventPressure :: CFloat
}
| MultiGestureEvent
{ multiGestureEventTouchID :: Raw.TouchID
, multiGestureEventDTheta :: CFloat
, multiGestureEventDDist :: CFloat
, multiGestureEventPos :: Point V2 CFloat
, multiGestureEventNumFingers :: Word16
}
| DollarGestureEvent
{ dollarGestureEventTouchID :: Raw.TouchID
, dollarGestureEventGestureID :: Raw.GestureID
, dollarGestureEventNumFingers :: Word32
, dollarGestureEventError :: CFloat
, dollagGestureEventPos :: Point V2 CFloat
}
| DropEvent
{ dropEventFile :: CString
}
| ClipboardUpdateEvent
| UnknownEvent
{ unknownEventType :: Word32
}
deriving (Eq, Ord, Show, Typeable, Generic)
ccharStringToText :: [CChar] -> Text
ccharStringToText = Text.decodeUtf8 . BSC8.pack . map castCCharToChar
fromRawKeysym :: Raw.Keysym -> Keysym
fromRawKeysym (Raw.Keysym scancode keycode modifier) =
Keysym scancode' keycode' modifier'
where scancode' = fromNumber scancode
keycode' = fromNumber keycode
modifier' = fromNumber (fromIntegral modifier)
convertRaw :: Raw.Event -> Event
convertRaw (Raw.WindowEvent t ts a b c d) = Event ts $
let w' = WindowID a in case b of
Raw.SDL_WINDOWEVENT_SHOWN -> WindowShown w'
Raw.SDL_WINDOWEVENT_HIDDEN -> WindowHidden w'
Raw.SDL_WINDOWEVENT_EXPOSED -> WindowExposed w'
Raw.SDL_WINDOWEVENT_MOVED -> WindowMoved w' (P (V2 c d))
Raw.SDL_WINDOWEVENT_RESIZED -> WindowResized w' (V2 c d)
Raw.SDL_WINDOWEVENT_SIZE_CHANGED -> WindowSizeChanged w'
Raw.SDL_WINDOWEVENT_MINIMIZED -> WindowMinimized w'
Raw.SDL_WINDOWEVENT_MAXIMIZED -> WindowMaximized w'
Raw.SDL_WINDOWEVENT_RESTORED -> WindowRestored w'
Raw.SDL_WINDOWEVENT_ENTER -> WindowGainedMouseFocus w'
Raw.SDL_WINDOWEVENT_LEAVE -> WindowLostMouseFocus w'
Raw.SDL_WINDOWEVENT_FOCUS_GAINED -> WindowGainedKeyboardFocus w'
Raw.SDL_WINDOWEVENT_FOCUS_LOST -> WindowLostKeyboardFocus w'
Raw.SDL_WINDOWEVENT_CLOSE -> WindowClosed w'
_ -> UnknownEvent t
convertRaw (Raw.KeyboardEvent Raw.SDL_KEYDOWN ts a b c d) =
Event ts (KeyboardEvent (WindowID a) KeyDown (fromNumber b) (c /= 0) (fromRawKeysym d))
convertRaw (Raw.KeyboardEvent Raw.SDL_KEYUP ts a b c d) =
Event ts (KeyboardEvent (WindowID a) KeyUp (fromNumber b) (c /= 0) (fromRawKeysym d))
convertRaw (Raw.TextEditingEvent _ ts a b c d) = Event ts (TextEditingEvent (WindowID a) (ccharStringToText b) c d)
convertRaw (Raw.TextInputEvent _ ts a b) = Event ts (TextInputEvent (WindowID a) (ccharStringToText b))
convertRaw (Raw.MouseMotionEvent _ ts a b c d e f g)
= let buttons = catMaybes
[ (Raw.SDL_BUTTON_LMASK `test` c) ButtonLeft
, (Raw.SDL_BUTTON_RMASK `test` c) ButtonRight
, (Raw.SDL_BUTTON_MMASK `test` c) ButtonMiddle
, (Raw.SDL_BUTTON_X1MASK `test` c) ButtonX1
, (Raw.SDL_BUTTON_X2MASK `test` c) ButtonX2 ]
in Event ts (MouseMotionEvent (WindowID a) (fromNumber b) buttons (P (V2 d e)) (V2 f g))
where mask `test` x = if mask .&. x /= 0 then Just else const Nothing
convertRaw (Raw.MouseButtonEvent t ts a b c d e f g)
= let motion | t == Raw.SDL_MOUSEBUTTONUP = MouseButtonUp
| t == Raw.SDL_MOUSEBUTTONDOWN = MouseButtonDown
button | c == Raw.SDL_BUTTON_LEFT = ButtonLeft
| c == Raw.SDL_BUTTON_MIDDLE = ButtonMiddle
| c == Raw.SDL_BUTTON_RIGHT = ButtonRight
| c == Raw.SDL_BUTTON_X1 = ButtonX1
| c == Raw.SDL_BUTTON_X2 = ButtonX2
| otherwise = ButtonExtra $ fromIntegral c
in Event ts (MouseButtonEvent (WindowID a) motion (fromNumber b) button d e (P (V2 f g)))
convertRaw (Raw.MouseWheelEvent _ ts a b c d) = Event ts (MouseWheelEvent (WindowID a) (fromNumber b) (V2 c d))
convertRaw (Raw.JoyAxisEvent _ ts a b c) = Event ts (JoyAxisEvent a b c)
convertRaw (Raw.JoyBallEvent _ ts a b c d) = Event ts (JoyBallEvent a b (V2 c d))
convertRaw (Raw.JoyHatEvent _ ts a b c) = Event ts (JoyHatEvent a b c)
convertRaw (Raw.JoyButtonEvent _ ts a b c) = Event ts (JoyButtonEvent a b c)
convertRaw (Raw.JoyDeviceEvent _ ts a) = Event ts (JoyDeviceEvent a)
convertRaw (Raw.ControllerAxisEvent _ ts a b c) = Event ts (ControllerAxisEvent a b c)
convertRaw (Raw.ControllerButtonEvent _ ts a b c) = Event ts (ControllerButtonEvent a b c)
convertRaw (Raw.ControllerDeviceEvent _ ts a) = Event ts (ControllerDeviceEvent a)
convertRaw (Raw.QuitEvent _ ts) = Event ts QuitEvent
convertRaw (Raw.UserEvent _ ts a b c d) = Event ts (UserEvent (WindowID a) b c d)
convertRaw (Raw.SysWMEvent _ ts a) = Event ts (SysWMEvent a)
convertRaw (Raw.TouchFingerEvent _ ts a b c d e f g) = Event ts (TouchFingerEvent a b (P (V2 c d)) (V2 e f) g)
convertRaw (Raw.MultiGestureEvent _ ts a b c d e f) = Event ts (MultiGestureEvent a b c (P (V2 d e)) f)
convertRaw (Raw.DollarGestureEvent _ ts a b c d e f) = Event ts (DollarGestureEvent a b c d (P (V2 e f)))
convertRaw (Raw.DropEvent _ ts a) = Event ts (DropEvent a)
convertRaw (Raw.ClipboardUpdateEvent _ ts) = Event ts ClipboardUpdateEvent
convertRaw (Raw.UnknownEvent t ts) = Event ts (UnknownEvent t)
pollEvent :: MonadIO m => m (Maybe Event)
pollEvent = liftIO $ alloca $ \e -> do
n <- Raw.pollEvent e
if n == 0
then return Nothing
else Just . convertRaw <$> peek e
mapEvents :: MonadIO m => (Event -> m ()) -> m ()
mapEvents h = do
event' <- pollEvent
case event' of
Just event -> h event >> mapEvents h
Nothing -> return ()
waitEvent :: MonadIO m => m Event
waitEvent = liftIO $ alloca $ \e -> do
SDLEx.throwIfNeg_ "SDL.Events.waitEvent" "SDL_WaitEvent" $
Raw.waitEvent e
convertRaw <$> peek e
waitEventTimeout :: MonadIO m => CInt -> m (Maybe Event)
waitEventTimeout timeout = liftIO $ alloca $ \e -> do
n <- Raw.waitEventTimeout e timeout
if n == 0
then return Nothing
else Just . convertRaw <$> peek e
| svenkeidel/sdl2 | src/SDL/Event.hs | bsd-3-clause | 11,071 | 0 | 15 | 2,356 | 3,304 | 1,772 | 1,532 | 265 | 16 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SDB.CreateDomain
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | The 'CreateDomain' operation creates a new domain. The domain name should be
-- unique among the domains associated with the Access Key ID provided in the
-- request. The 'CreateDomain' operation may take 10 or more seconds to complete.
--
-- The client can create up to 100 domains per account.
--
-- If the client requires additional domains, go to <http://aws.amazon.com/contact-us/simpledb-limit-request/ http://aws.amazon.com/contact-us/simpledb-limit-request/>.
--
-- <http://docs.aws.amazon.com/AmazonSimpleDB/latest/DeveloperGuide/SDB_API_CreateDomain.html>
module Network.AWS.SDB.CreateDomain
(
-- * Request
CreateDomain
-- ** Request constructor
, createDomain
-- ** Request lenses
, cdDomainName
-- * Response
, CreateDomainResponse
-- ** Response constructor
, createDomainResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.SDB.Types
import qualified GHC.Exts
newtype CreateDomain = CreateDomain
{ _cdDomainName :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'CreateDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdDomainName' @::@ 'Text'
--
createDomain :: Text -- ^ 'cdDomainName'
-> CreateDomain
createDomain p1 = CreateDomain
{ _cdDomainName = p1
}
-- | The name of the domain to create. The name can range between 3 and 255
-- characters and can contain the following characters: a-z, A-Z, 0-9, '_', '-',
-- and '.'.
cdDomainName :: Lens' CreateDomain Text
cdDomainName = lens _cdDomainName (\s a -> s { _cdDomainName = a })
data CreateDomainResponse = CreateDomainResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'CreateDomainResponse' constructor.
createDomainResponse :: CreateDomainResponse
createDomainResponse = CreateDomainResponse
instance ToPath CreateDomain where
toPath = const "/"
instance ToQuery CreateDomain where
toQuery CreateDomain{..} = mconcat
[ "DomainName" =? _cdDomainName
]
instance ToHeaders CreateDomain
instance AWSRequest CreateDomain where
type Sv CreateDomain = SDB
type Rs CreateDomain = CreateDomainResponse
request = post "CreateDomain"
response = nullResponse CreateDomainResponse
| romanb/amazonka | amazonka-sdb/gen/Network/AWS/SDB/CreateDomain.hs | mpl-2.0 | 3,314 | 0 | 9 | 701 | 337 | 210 | 127 | 45 | 1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Data.Unique (module M) where
import "base" Data.Unique as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/Data/Unique.hs | apache-2.0 | 737 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
module Graphics.Vty.Error where
-- | The type of exceptions specific to vty.
--
-- These have fully qualified names by default since, IMO, exception
-- handling requires this.
data VtyException
= VtyFailure String -- ^ Uncategorized failure specific to vty.
| jtdaugherty/vty | src/Graphics/Vty/Error.hs | bsd-3-clause | 264 | 0 | 6 | 46 | 21 | 15 | 6 | 3 | 0 |
{-
(c) The GRASP Project, Glasgow University, 1994-1998
\section[TysWiredIn]{Wired-in knowledge about {\em non-primitive} types}
-}
{-# LANGUAGE CPP #-}
-- | This module is about types that can be defined in Haskell, but which
-- must be wired into the compiler nonetheless. C.f module TysPrim
module TysWiredIn (
-- * All wired in things
wiredInTyCons, isBuiltInOcc_maybe,
-- * Bool
boolTy, boolTyCon, boolTyCon_RDR, boolTyConName,
trueDataCon, trueDataConId, true_RDR,
falseDataCon, falseDataConId, false_RDR,
promotedBoolTyCon, promotedFalseDataCon, promotedTrueDataCon,
-- * Ordering
ltDataCon, ltDataConId,
eqDataCon, eqDataConId,
gtDataCon, gtDataConId,
promotedOrderingTyCon,
promotedLTDataCon, promotedEQDataCon, promotedGTDataCon,
-- * Char
charTyCon, charDataCon, charTyCon_RDR,
charTy, stringTy, charTyConName,
-- * Double
doubleTyCon, doubleDataCon, doubleTy, doubleTyConName,
-- * Float
floatTyCon, floatDataCon, floatTy, floatTyConName,
-- * Int
intTyCon, intDataCon, intTyCon_RDR, intDataCon_RDR, intTyConName,
intTy,
-- * Word
wordTyCon, wordDataCon, wordTyConName, wordTy,
-- * List
listTyCon, nilDataCon, nilDataConName, consDataCon, consDataConName,
listTyCon_RDR, consDataCon_RDR, listTyConName,
mkListTy, mkPromotedListTy,
-- * Tuples
mkTupleTy, mkBoxedTupleTy,
tupleTyCon, tupleCon,
promotedTupleTyCon, promotedTupleDataCon,
unitTyCon, unitDataCon, unitDataConId, pairTyCon,
unboxedUnitTyCon, unboxedUnitDataCon,
unboxedSingletonTyCon, unboxedSingletonDataCon,
unboxedPairTyCon, unboxedPairDataCon,
-- * Unit
unitTy,
-- * Kinds
typeNatKindCon, typeNatKind, typeSymbolKindCon, typeSymbolKind,
-- * Parallel arrays
mkPArrTy,
parrTyCon, parrFakeCon, isPArrTyCon, isPArrFakeCon,
parrTyCon_RDR, parrTyConName,
-- * Equality predicates
eqTyCon_RDR, eqTyCon, eqTyConName, eqBoxDataCon,
coercibleTyCon, coercibleDataCon, coercibleClass,
mkWiredInTyConName -- This is used in TcTypeNats to define the
-- built-in functions for evaluation.
) where
#include "HsVersions.h"
import {-# SOURCE #-} MkId( mkDataConWorkId )
-- friends:
import PrelNames
import TysPrim
-- others:
import Constants ( mAX_TUPLE_SIZE )
import Module ( Module )
import Type ( mkTyConApp )
import DataCon
import ConLike
import Var
import TyCon
import Class ( Class, mkClass )
import TypeRep
import RdrName
import Name
import BasicTypes ( TupleSort(..), tupleSortBoxity,
Arity, RecFlag(..), Boxity(..) )
import ForeignCall
import Unique ( incrUnique, mkTupleTyConUnique,
mkTupleDataConUnique, mkPArrDataConUnique )
import Data.Array
import FastString
import Outputable
import Util
import BooleanFormula ( mkAnd )
alpha_tyvar :: [TyVar]
alpha_tyvar = [alphaTyVar]
alpha_ty :: [Type]
alpha_ty = [alphaTy]
{-
************************************************************************
* *
\subsection{Wired in type constructors}
* *
************************************************************************
If you change which things are wired in, make sure you change their
names in PrelNames, so they use wTcQual, wDataQual, etc
-}
-- This list is used only to define PrelInfo.wiredInThings. That in turn
-- is used to initialise the name environment carried around by the renamer.
-- This means that if we look up the name of a TyCon (or its implicit binders)
-- that occurs in this list that name will be assigned the wired-in key we
-- define here.
--
-- Because of their infinite nature, this list excludes tuples, Any and implicit
-- parameter TyCons. Instead, we have a hack in lookupOrigNameCache to deal with
-- these names.
--
-- See also Note [Known-key names]
wiredInTyCons :: [TyCon]
wiredInTyCons = [ unitTyCon -- Not treated like other tuples, because
-- it's defined in GHC.Base, and there's only
-- one of it. We put it in wiredInTyCons so
-- that it'll pre-populate the name cache, so
-- the special case in lookupOrigNameCache
-- doesn't need to look out for it
, boolTyCon
, charTyCon
, doubleTyCon
, floatTyCon
, intTyCon
, wordTyCon
, listTyCon
, parrTyCon
, eqTyCon
, coercibleTyCon
, typeNatKindCon
, typeSymbolKindCon
]
mkWiredInTyConName :: BuiltInSyntax -> Module -> FastString -> Unique -> TyCon -> Name
mkWiredInTyConName built_in modu fs unique tycon
= mkWiredInName modu (mkTcOccFS fs) unique
(ATyCon tycon) -- Relevant TyCon
built_in
mkWiredInDataConName :: BuiltInSyntax -> Module -> FastString -> Unique -> DataCon -> Name
mkWiredInDataConName built_in modu fs unique datacon
= mkWiredInName modu (mkDataOccFS fs) unique
(AConLike (RealDataCon datacon)) -- Relevant DataCon
built_in
-- See Note [Kind-changing of (~) and Coercible]
eqTyConName, eqBoxDataConName :: Name
eqTyConName = mkWiredInTyConName BuiltInSyntax gHC_TYPES (fsLit "~") eqTyConKey eqTyCon
eqBoxDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "Eq#") eqBoxDataConKey eqBoxDataCon
-- See Note [Kind-changing of (~) and Coercible]
coercibleTyConName, coercibleDataConName :: Name
coercibleTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Coercible") coercibleTyConKey coercibleTyCon
coercibleDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "MkCoercible") coercibleDataConKey coercibleDataCon
charTyConName, charDataConName, intTyConName, intDataConName :: Name
charTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Char") charTyConKey charTyCon
charDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "C#") charDataConKey charDataCon
intTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Int") intTyConKey intTyCon
intDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "I#") intDataConKey intDataCon
boolTyConName, falseDataConName, trueDataConName :: Name
boolTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Bool") boolTyConKey boolTyCon
falseDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "False") falseDataConKey falseDataCon
trueDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "True") trueDataConKey trueDataCon
listTyConName, nilDataConName, consDataConName :: Name
listTyConName = mkWiredInTyConName BuiltInSyntax gHC_TYPES (fsLit "[]") listTyConKey listTyCon
nilDataConName = mkWiredInDataConName BuiltInSyntax gHC_TYPES (fsLit "[]") nilDataConKey nilDataCon
consDataConName = mkWiredInDataConName BuiltInSyntax gHC_TYPES (fsLit ":") consDataConKey consDataCon
wordTyConName, wordDataConName, floatTyConName, floatDataConName, doubleTyConName, doubleDataConName :: Name
wordTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Word") wordTyConKey wordTyCon
wordDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "W#") wordDataConKey wordDataCon
floatTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Float") floatTyConKey floatTyCon
floatDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "F#") floatDataConKey floatDataCon
doubleTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Double") doubleTyConKey doubleTyCon
doubleDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "D#") doubleDataConKey doubleDataCon
-- Kinds
typeNatKindConName, typeSymbolKindConName :: Name
typeNatKindConName = mkWiredInTyConName UserSyntax gHC_TYPELITS (fsLit "Nat") typeNatKindConNameKey typeNatKindCon
typeSymbolKindConName = mkWiredInTyConName UserSyntax gHC_TYPELITS (fsLit "Symbol") typeSymbolKindConNameKey typeSymbolKindCon
parrTyConName, parrDataConName :: Name
parrTyConName = mkWiredInTyConName BuiltInSyntax
gHC_PARR' (fsLit "[::]") parrTyConKey parrTyCon
parrDataConName = mkWiredInDataConName UserSyntax
gHC_PARR' (fsLit "PArr") parrDataConKey parrDataCon
boolTyCon_RDR, false_RDR, true_RDR, intTyCon_RDR, charTyCon_RDR,
intDataCon_RDR, listTyCon_RDR, consDataCon_RDR, parrTyCon_RDR, eqTyCon_RDR :: RdrName
boolTyCon_RDR = nameRdrName boolTyConName
false_RDR = nameRdrName falseDataConName
true_RDR = nameRdrName trueDataConName
intTyCon_RDR = nameRdrName intTyConName
charTyCon_RDR = nameRdrName charTyConName
intDataCon_RDR = nameRdrName intDataConName
listTyCon_RDR = nameRdrName listTyConName
consDataCon_RDR = nameRdrName consDataConName
parrTyCon_RDR = nameRdrName parrTyConName
eqTyCon_RDR = nameRdrName eqTyConName
{-
************************************************************************
* *
\subsection{mkWiredInTyCon}
* *
************************************************************************
-}
pcNonRecDataTyCon :: Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon
-- Not an enumeration, not promotable
pcNonRecDataTyCon = pcTyCon False NonRecursive False
-- This function assumes that the types it creates have all parameters at
-- Representational role!
pcTyCon :: Bool -> RecFlag -> Bool -> Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon
pcTyCon is_enum is_rec is_prom name cType tyvars cons
= buildAlgTyCon name
tyvars
(map (const Representational) tyvars)
cType
[] -- No stupid theta
(DataTyCon cons is_enum)
is_rec
is_prom
False -- Not in GADT syntax
NoParentTyCon
pcDataCon :: Name -> [TyVar] -> [Type] -> TyCon -> DataCon
pcDataCon = pcDataConWithFixity False
pcDataConWithFixity :: Bool -> Name -> [TyVar] -> [Type] -> TyCon -> DataCon
pcDataConWithFixity infx n = pcDataConWithFixity' infx n (incrUnique (nameUnique n))
-- The Name's unique is the first of two free uniques;
-- the first is used for the datacon itself,
-- the second is used for the "worker name"
--
-- To support this the mkPreludeDataConUnique function "allocates"
-- one DataCon unique per pair of Ints.
pcDataConWithFixity' :: Bool -> Name -> Unique -> [TyVar] -> [Type] -> TyCon -> DataCon
-- The Name should be in the DataName name space; it's the name
-- of the DataCon itself.
pcDataConWithFixity' declared_infix dc_name wrk_key tyvars arg_tys tycon
= data_con
where
data_con = mkDataCon dc_name declared_infix
(map (const HsNoBang) arg_tys)
[] -- No labelled fields
tyvars
[] -- No existential type variables
[] -- No equality spec
[] -- No theta
arg_tys (mkTyConApp tycon (mkTyVarTys tyvars))
tycon
[] -- No stupid theta
(mkDataConWorkId wrk_name data_con)
NoDataConRep -- Wired-in types are too simple to need wrappers
modu = ASSERT( isExternalName dc_name )
nameModule dc_name
wrk_occ = mkDataConWorkerOcc (nameOccName dc_name)
wrk_name = mkWiredInName modu wrk_occ wrk_key
(AnId (dataConWorkId data_con)) UserSyntax
{-
************************************************************************
* *
Kinds
* *
************************************************************************
-}
typeNatKindCon, typeSymbolKindCon :: TyCon
-- data Nat
-- data Symbol
typeNatKindCon = pcTyCon False NonRecursive True typeNatKindConName Nothing [] []
typeSymbolKindCon = pcTyCon False NonRecursive True typeSymbolKindConName Nothing [] []
typeNatKind, typeSymbolKind :: Kind
typeNatKind = TyConApp (promoteTyCon typeNatKindCon) []
typeSymbolKind = TyConApp (promoteTyCon typeSymbolKindCon) []
{-
************************************************************************
* *
Stuff for dealing with tuples
* *
************************************************************************
Note [How tuples work] See also Note [Known-key names] in PrelNames
~~~~~~~~~~~~~~~~~~~~~~
* There are three families of tuple TyCons and corresponding
DataCons, (boxed, unboxed, and constraint tuples), expressed by the
type BasicTypes.TupleSort.
* DataCons (and workers etc) for BoxedTuple and ConstraintTuple have
- distinct Uniques
- the same OccName
Using the same OccName means (hack!) that a single copy of the
runtime library code (info tables etc) works for both.
* When looking up an OccName in the original-name cache
(IfaceEnv.lookupOrigNameCache), we spot the tuple OccName to make sure
we get the right wired-in name. This guy can't tell the difference
betweeen BoxedTuple and ConstraintTuple (same OccName!), so tuples
are not serialised into interface files using OccNames at all.
-}
isBuiltInOcc_maybe :: OccName -> Maybe Name
-- Built in syntax isn't "in scope" so these OccNames
-- map to wired-in Names with BuiltInSyntax
isBuiltInOcc_maybe occ
= case occNameString occ of
"[]" -> choose_ns listTyCon nilDataCon
":" -> Just consDataConName
"[::]" -> Just parrTyConName
"(##)" -> choose_ns unboxedUnitTyCon unboxedUnitDataCon
"()" -> choose_ns unitTyCon unitDataCon
'(':'#':',':rest -> parse_tuple UnboxedTuple 2 rest
'(':',':rest -> parse_tuple BoxedTuple 2 rest
_other -> Nothing
where
ns = occNameSpace occ
parse_tuple sort n rest
| (',' : rest2) <- rest = parse_tuple sort (n+1) rest2
| tail_matches sort rest = choose_ns (tupleTyCon sort n)
(tupleCon sort n)
| otherwise = Nothing
tail_matches BoxedTuple ")" = True
tail_matches UnboxedTuple "#)" = True
tail_matches _ _ = False
choose_ns tc dc
| isTcClsNameSpace ns = Just (getName tc)
| isDataConNameSpace ns = Just (getName dc)
| otherwise = Just (getName (dataConWorkId dc))
mkTupleOcc :: NameSpace -> TupleSort -> Arity -> OccName
mkTupleOcc ns sort ar = mkOccName ns str
where
-- No need to cache these, the caching is done in mk_tuple
str = case sort of
UnboxedTuple -> '(' : '#' : commas ++ "#)"
BoxedTuple -> '(' : commas ++ ")"
ConstraintTuple -> '(' : commas ++ ")"
commas = take (ar-1) (repeat ',')
-- Cute hack: we reuse the standard tuple OccNames (and hence code)
-- for fact tuples, but give them different Uniques so they are not equal.
--
-- You might think that this will go wrong because isBuiltInOcc_maybe won't
-- be able to tell the difference between boxed tuples and constraint tuples. BUT:
-- 1. Constraint tuples never occur directly in user code, so it doesn't matter
-- that we can't detect them in Orig OccNames originating from the user
-- programs (or those built by setRdrNameSpace used on an Exact tuple Name)
-- 2. Interface files have a special representation for tuple *occurrences*
-- in IfaceTyCons, their workers (in IfaceSyn) and their DataCons (in case
-- alternatives). Thus we don't rely on the OccName to figure out what kind
-- of tuple an occurrence was trying to use in these situations.
-- 3. We *don't* represent tuple data type declarations specially, so those
-- are still turned into wired-in names via isBuiltInOcc_maybe. But that's OK
-- because we don't actually need to declare constraint tuples thanks to this hack.
--
-- So basically any OccName like (,,) flowing to isBuiltInOcc_maybe will always
-- refer to the standard boxed tuple. Cool :-)
tupleTyCon :: TupleSort -> Arity -> TyCon
tupleTyCon sort i | i > mAX_TUPLE_SIZE = fst (mk_tuple sort i) -- Build one specially
tupleTyCon BoxedTuple i = fst (boxedTupleArr ! i)
tupleTyCon UnboxedTuple i = fst (unboxedTupleArr ! i)
tupleTyCon ConstraintTuple i = fst (factTupleArr ! i)
promotedTupleTyCon :: TupleSort -> Arity -> TyCon
promotedTupleTyCon sort i = promoteTyCon (tupleTyCon sort i)
promotedTupleDataCon :: TupleSort -> Arity -> TyCon
promotedTupleDataCon sort i = promoteDataCon (tupleCon sort i)
tupleCon :: TupleSort -> Arity -> DataCon
tupleCon sort i | i > mAX_TUPLE_SIZE = snd (mk_tuple sort i) -- Build one specially
tupleCon BoxedTuple i = snd (boxedTupleArr ! i)
tupleCon UnboxedTuple i = snd (unboxedTupleArr ! i)
tupleCon ConstraintTuple i = snd (factTupleArr ! i)
boxedTupleArr, unboxedTupleArr, factTupleArr :: Array Int (TyCon,DataCon)
boxedTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple BoxedTuple i | i <- [0..mAX_TUPLE_SIZE]]
unboxedTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple UnboxedTuple i | i <- [0..mAX_TUPLE_SIZE]]
factTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple ConstraintTuple i | i <- [0..mAX_TUPLE_SIZE]]
mk_tuple :: TupleSort -> Int -> (TyCon,DataCon)
mk_tuple sort arity = (tycon, tuple_con)
where
tycon = mkTupleTyCon tc_name tc_kind arity tyvars tuple_con sort prom_tc
prom_tc = case sort of
BoxedTuple -> Just (mkPromotedTyCon tycon (promoteKind tc_kind))
UnboxedTuple -> Nothing
ConstraintTuple -> Nothing
modu = mkTupleModule sort
tc_name = mkWiredInName modu (mkTupleOcc tcName sort arity) tc_uniq
(ATyCon tycon) BuiltInSyntax
tc_kind = mkArrowKinds (map tyVarKind tyvars) res_kind
res_kind = case sort of
BoxedTuple -> liftedTypeKind
UnboxedTuple -> unliftedTypeKind
ConstraintTuple -> constraintKind
tyvars = take arity $ case sort of
BoxedTuple -> alphaTyVars
UnboxedTuple -> openAlphaTyVars
ConstraintTuple -> tyVarList constraintKind
tuple_con = pcDataCon dc_name tyvars tyvar_tys tycon
tyvar_tys = mkTyVarTys tyvars
dc_name = mkWiredInName modu (mkTupleOcc dataName sort arity) dc_uniq
(AConLike (RealDataCon tuple_con)) BuiltInSyntax
tc_uniq = mkTupleTyConUnique sort arity
dc_uniq = mkTupleDataConUnique sort arity
unitTyCon :: TyCon
unitTyCon = tupleTyCon BoxedTuple 0
unitDataCon :: DataCon
unitDataCon = head (tyConDataCons unitTyCon)
unitDataConId :: Id
unitDataConId = dataConWorkId unitDataCon
pairTyCon :: TyCon
pairTyCon = tupleTyCon BoxedTuple 2
unboxedUnitTyCon :: TyCon
unboxedUnitTyCon = tupleTyCon UnboxedTuple 0
unboxedUnitDataCon :: DataCon
unboxedUnitDataCon = tupleCon UnboxedTuple 0
unboxedSingletonTyCon :: TyCon
unboxedSingletonTyCon = tupleTyCon UnboxedTuple 1
unboxedSingletonDataCon :: DataCon
unboxedSingletonDataCon = tupleCon UnboxedTuple 1
unboxedPairTyCon :: TyCon
unboxedPairTyCon = tupleTyCon UnboxedTuple 2
unboxedPairDataCon :: DataCon
unboxedPairDataCon = tupleCon UnboxedTuple 2
{-
************************************************************************
* *
\subsection[TysWiredIn-boxed-prim]{The ``boxed primitive'' types (@Char@, @Int@, etc)}
* *
************************************************************************
-}
eqTyCon :: TyCon
eqTyCon = mkAlgTyCon eqTyConName
(ForAllTy kv $ mkArrowKinds [k, k] constraintKind)
[kv, a, b]
[Nominal, Nominal, Nominal]
Nothing
[] -- No stupid theta
(DataTyCon [eqBoxDataCon] False)
NoParentTyCon
NonRecursive
False
Nothing -- No parent for constraint-kinded types
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
eqBoxDataCon :: DataCon
eqBoxDataCon = pcDataCon eqBoxDataConName args [TyConApp eqPrimTyCon (map mkTyVarTy args)] eqTyCon
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
args = [kv, a, b]
coercibleTyCon :: TyCon
coercibleTyCon = mkClassTyCon
coercibleTyConName kind tvs [Nominal, Representational, Representational]
rhs coercibleClass NonRecursive
where kind = (ForAllTy kv $ mkArrowKinds [k, k] constraintKind)
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
tvs = [kv, a, b]
rhs = DataTyCon [coercibleDataCon] False
coercibleDataCon :: DataCon
coercibleDataCon = pcDataCon coercibleDataConName args [TyConApp eqReprPrimTyCon (map mkTyVarTy args)] coercibleTyCon
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
args = [kv, a, b]
coercibleClass :: Class
coercibleClass = mkClass (tyConTyVars coercibleTyCon) [] [] [] [] [] (mkAnd []) coercibleTyCon
charTy :: Type
charTy = mkTyConTy charTyCon
charTyCon :: TyCon
charTyCon = pcNonRecDataTyCon charTyConName
(Just (CType "" Nothing (fsLit "HsChar")))
[] [charDataCon]
charDataCon :: DataCon
charDataCon = pcDataCon charDataConName [] [charPrimTy] charTyCon
stringTy :: Type
stringTy = mkListTy charTy -- convenience only
intTy :: Type
intTy = mkTyConTy intTyCon
intTyCon :: TyCon
intTyCon = pcNonRecDataTyCon intTyConName
(Just (CType "" Nothing (fsLit "HsInt"))) []
[intDataCon]
intDataCon :: DataCon
intDataCon = pcDataCon intDataConName [] [intPrimTy] intTyCon
wordTy :: Type
wordTy = mkTyConTy wordTyCon
wordTyCon :: TyCon
wordTyCon = pcNonRecDataTyCon wordTyConName
(Just (CType "" Nothing (fsLit "HsWord"))) []
[wordDataCon]
wordDataCon :: DataCon
wordDataCon = pcDataCon wordDataConName [] [wordPrimTy] wordTyCon
floatTy :: Type
floatTy = mkTyConTy floatTyCon
floatTyCon :: TyCon
floatTyCon = pcNonRecDataTyCon floatTyConName
(Just (CType "" Nothing (fsLit "HsFloat"))) []
[floatDataCon]
floatDataCon :: DataCon
floatDataCon = pcDataCon floatDataConName [] [floatPrimTy] floatTyCon
doubleTy :: Type
doubleTy = mkTyConTy doubleTyCon
doubleTyCon :: TyCon
doubleTyCon = pcNonRecDataTyCon doubleTyConName
(Just (CType "" Nothing (fsLit "HsDouble"))) []
[doubleDataCon]
doubleDataCon :: DataCon
doubleDataCon = pcDataCon doubleDataConName [] [doublePrimTy] doubleTyCon
{-
************************************************************************
* *
\subsection[TysWiredIn-Bool]{The @Bool@ type}
* *
************************************************************************
An ordinary enumeration type, but deeply wired in. There are no
magical operations on @Bool@ (just the regular Prelude code).
{\em BEGIN IDLE SPECULATION BY SIMON}
This is not the only way to encode @Bool@. A more obvious coding makes
@Bool@ just a boxed up version of @Bool#@, like this:
\begin{verbatim}
type Bool# = Int#
data Bool = MkBool Bool#
\end{verbatim}
Unfortunately, this doesn't correspond to what the Report says @Bool@
looks like! Furthermore, we get slightly less efficient code (I
think) with this coding. @gtInt@ would look like this:
\begin{verbatim}
gtInt :: Int -> Int -> Bool
gtInt x y = case x of I# x# ->
case y of I# y# ->
case (gtIntPrim x# y#) of
b# -> MkBool b#
\end{verbatim}
Notice that the result of the @gtIntPrim@ comparison has to be turned
into an integer (here called @b#@), and returned in a @MkBool@ box.
The @if@ expression would compile to this:
\begin{verbatim}
case (gtInt x y) of
MkBool b# -> case b# of { 1# -> e1; 0# -> e2 }
\end{verbatim}
I think this code is a little less efficient than the previous code,
but I'm not certain. At all events, corresponding with the Report is
important. The interesting thing is that the language is expressive
enough to describe more than one alternative; and that a type doesn't
necessarily need to be a straightforwardly boxed version of its
primitive counterpart.
{\em END IDLE SPECULATION BY SIMON}
-}
boolTy :: Type
boolTy = mkTyConTy boolTyCon
boolTyCon :: TyCon
boolTyCon = pcTyCon True NonRecursive True boolTyConName
(Just (CType "" Nothing (fsLit "HsBool")))
[] [falseDataCon, trueDataCon]
falseDataCon, trueDataCon :: DataCon
falseDataCon = pcDataCon falseDataConName [] [] boolTyCon
trueDataCon = pcDataCon trueDataConName [] [] boolTyCon
falseDataConId, trueDataConId :: Id
falseDataConId = dataConWorkId falseDataCon
trueDataConId = dataConWorkId trueDataCon
orderingTyCon :: TyCon
orderingTyCon = pcTyCon True NonRecursive True orderingTyConName Nothing
[] [ltDataCon, eqDataCon, gtDataCon]
ltDataCon, eqDataCon, gtDataCon :: DataCon
ltDataCon = pcDataCon ltDataConName [] [] orderingTyCon
eqDataCon = pcDataCon eqDataConName [] [] orderingTyCon
gtDataCon = pcDataCon gtDataConName [] [] orderingTyCon
ltDataConId, eqDataConId, gtDataConId :: Id
ltDataConId = dataConWorkId ltDataCon
eqDataConId = dataConWorkId eqDataCon
gtDataConId = dataConWorkId gtDataCon
{-
************************************************************************
* *
\subsection[TysWiredIn-List]{The @List@ type (incl ``build'' magic)}
* *
************************************************************************
Special syntax, deeply wired in, but otherwise an ordinary algebraic
data types:
\begin{verbatim}
data [] a = [] | a : (List a)
data () = ()
data (,) a b = (,,) a b
...
\end{verbatim}
-}
mkListTy :: Type -> Type
mkListTy ty = mkTyConApp listTyCon [ty]
listTyCon :: TyCon
listTyCon = pcTyCon False Recursive True
listTyConName Nothing alpha_tyvar [nilDataCon, consDataCon]
mkPromotedListTy :: Type -> Type
mkPromotedListTy ty = mkTyConApp promotedListTyCon [ty]
promotedListTyCon :: TyCon
promotedListTyCon = promoteTyCon listTyCon
nilDataCon :: DataCon
nilDataCon = pcDataCon nilDataConName alpha_tyvar [] listTyCon
consDataCon :: DataCon
consDataCon = pcDataConWithFixity True {- Declared infix -}
consDataConName
alpha_tyvar [alphaTy, mkTyConApp listTyCon alpha_ty] listTyCon
-- Interesting: polymorphic recursion would help here.
-- We can't use (mkListTy alphaTy) in the defn of consDataCon, else mkListTy
-- gets the over-specific type (Type -> Type)
{-
************************************************************************
* *
\subsection[TysWiredIn-Tuples]{The @Tuple@ types}
* *
************************************************************************
The tuple types are definitely magic, because they form an infinite
family.
\begin{itemize}
\item
They have a special family of type constructors, of type @TyCon@
These contain the tycon arity, but don't require a Unique.
\item
They have a special family of constructors, of type
@Id@. Again these contain their arity but don't need a Unique.
\item
There should be a magic way of generating the info tables and
entry code for all tuples.
But at the moment we just compile a Haskell source
file\srcloc{lib/prelude/...} containing declarations like:
\begin{verbatim}
data Tuple0 = Tup0
data Tuple2 a b = Tup2 a b
data Tuple3 a b c = Tup3 a b c
data Tuple4 a b c d = Tup4 a b c d
...
\end{verbatim}
The print-names associated with the magic @Id@s for tuple constructors
``just happen'' to be the same as those generated by these
declarations.
\item
The instance environment should have a magic way to know
that each tuple type is an instances of classes @Eq@, @Ix@, @Ord@ and
so on. \ToDo{Not implemented yet.}
\item
There should also be a way to generate the appropriate code for each
of these instances, but (like the info tables and entry code) it is
done by enumeration\srcloc{lib/prelude/InTup?.hs}.
\end{itemize}
-}
mkTupleTy :: TupleSort -> [Type] -> Type
-- Special case for *boxed* 1-tuples, which are represented by the type itself
mkTupleTy sort [ty] | Boxed <- tupleSortBoxity sort = ty
mkTupleTy sort tys = mkTyConApp (tupleTyCon sort (length tys)) tys
-- | Build the type of a small tuple that holds the specified type of thing
mkBoxedTupleTy :: [Type] -> Type
mkBoxedTupleTy tys = mkTupleTy BoxedTuple tys
unitTy :: Type
unitTy = mkTupleTy BoxedTuple []
{-
************************************************************************
* *
\subsection[TysWiredIn-PArr]{The @[::]@ type}
* *
************************************************************************
Special syntax for parallel arrays needs some wired in definitions.
-}
-- | Construct a type representing the application of the parallel array constructor
mkPArrTy :: Type -> Type
mkPArrTy ty = mkTyConApp parrTyCon [ty]
-- | Represents the type constructor of parallel arrays
--
-- * This must match the definition in @PrelPArr@
--
-- NB: Although the constructor is given here, it will not be accessible in
-- user code as it is not in the environment of any compiled module except
-- @PrelPArr@.
--
parrTyCon :: TyCon
parrTyCon = pcNonRecDataTyCon parrTyConName Nothing alpha_tyvar [parrDataCon]
parrDataCon :: DataCon
parrDataCon = pcDataCon
parrDataConName
alpha_tyvar -- forall'ed type variables
[intTy, -- 1st argument: Int
mkTyConApp -- 2nd argument: Array# a
arrayPrimTyCon
alpha_ty]
parrTyCon
-- | Check whether a type constructor is the constructor for parallel arrays
isPArrTyCon :: TyCon -> Bool
isPArrTyCon tc = tyConName tc == parrTyConName
-- | Fake array constructors
--
-- * These constructors are never really used to represent array values;
-- however, they are very convenient during desugaring (and, in particular,
-- in the pattern matching compiler) to treat array pattern just like
-- yet another constructor pattern
--
parrFakeCon :: Arity -> DataCon
parrFakeCon i | i > mAX_TUPLE_SIZE = mkPArrFakeCon i -- build one specially
parrFakeCon i = parrFakeConArr!i
-- pre-defined set of constructors
--
parrFakeConArr :: Array Int DataCon
parrFakeConArr = array (0, mAX_TUPLE_SIZE) [(i, mkPArrFakeCon i)
| i <- [0..mAX_TUPLE_SIZE]]
-- build a fake parallel array constructor for the given arity
--
mkPArrFakeCon :: Int -> DataCon
mkPArrFakeCon arity = data_con
where
data_con = pcDataCon name [tyvar] tyvarTys parrTyCon
tyvar = head alphaTyVars
tyvarTys = replicate arity $ mkTyVarTy tyvar
nameStr = mkFastString ("MkPArr" ++ show arity)
name = mkWiredInName gHC_PARR' (mkDataOccFS nameStr) unique
(AConLike (RealDataCon data_con)) UserSyntax
unique = mkPArrDataConUnique arity
-- | Checks whether a data constructor is a fake constructor for parallel arrays
isPArrFakeCon :: DataCon -> Bool
isPArrFakeCon dcon = dcon == parrFakeCon (dataConSourceArity dcon)
-- Promoted Booleans
promotedBoolTyCon, promotedFalseDataCon, promotedTrueDataCon :: TyCon
promotedBoolTyCon = promoteTyCon boolTyCon
promotedTrueDataCon = promoteDataCon trueDataCon
promotedFalseDataCon = promoteDataCon falseDataCon
-- Promoted Ordering
promotedOrderingTyCon
, promotedLTDataCon
, promotedEQDataCon
, promotedGTDataCon
:: TyCon
promotedOrderingTyCon = promoteTyCon orderingTyCon
promotedLTDataCon = promoteDataCon ltDataCon
promotedEQDataCon = promoteDataCon eqDataCon
promotedGTDataCon = promoteDataCon gtDataCon
| forked-upstream-packages-for-ghcjs/ghc | compiler/prelude/TysWiredIn.hs | bsd-3-clause | 33,431 | 0 | 14 | 8,709 | 5,157 | 2,826 | 2,331 | 444 | 10 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[FloatOut]{Float bindings outwards (towards the top level)}
``Long-distance'' floating of bindings towards the top level.
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module FloatOut ( floatOutwards ) where
import CoreSyn
import CoreUtils
import MkCore
import CoreArity ( etaExpand )
import CoreMonad ( FloatOutSwitches(..) )
import DynFlags
import ErrUtils ( dumpIfSet_dyn )
import Id ( Id, idArity, isBottomingId )
import Var ( Var )
import SetLevels
import UniqSupply ( UniqSupply )
import Bag
import Util
import Maybes
import Outputable
import FastString
import qualified Data.IntMap as M
#include "HsVersions.h"
{-
-----------------
Overall game plan
-----------------
The Big Main Idea is:
To float out sub-expressions that can thereby get outside
a non-one-shot value lambda, and hence may be shared.
To achieve this we may need to do two thing:
a) Let-bind the sub-expression:
f (g x) ==> let lvl = f (g x) in lvl
Now we can float the binding for 'lvl'.
b) More than that, we may need to abstract wrt a type variable
\x -> ... /\a -> let v = ...a... in ....
Here the binding for v mentions 'a' but not 'x'. So we
abstract wrt 'a', to give this binding for 'v':
vp = /\a -> ...a...
v = vp a
Now the binding for vp can float out unimpeded.
I can't remember why this case seemed important enough to
deal with, but I certainly found cases where important floats
didn't happen if we did not abstract wrt tyvars.
With this in mind we can also achieve another goal: lambda lifting.
We can make an arbitrary (function) binding float to top level by
abstracting wrt *all* local variables, not just type variables, leaving
a binding that can be floated right to top level. Whether or not this
happens is controlled by a flag.
Random comments
~~~~~~~~~~~~~~~
At the moment we never float a binding out to between two adjacent
lambdas. For example:
@
\x y -> let t = x+x in ...
===>
\x -> let t = x+x in \y -> ...
@
Reason: this is less efficient in the case where the original lambda
is never partially applied.
But there's a case I've seen where this might not be true. Consider:
@
elEm2 x ys
= elem' x ys
where
elem' _ [] = False
elem' x (y:ys) = x==y || elem' x ys
@
It turns out that this generates a subexpression of the form
@
\deq x ys -> let eq = eqFromEqDict deq in ...
@
vwhich might usefully be separated to
@
\deq -> let eq = eqFromEqDict deq in \xy -> ...
@
Well, maybe. We don't do this at the moment.
************************************************************************
* *
\subsection[floatOutwards]{@floatOutwards@: let-floating interface function}
* *
************************************************************************
-}
floatOutwards :: FloatOutSwitches
-> DynFlags
-> UniqSupply
-> CoreProgram -> IO CoreProgram
floatOutwards float_sws dflags us pgm
= do {
let { annotated_w_levels = setLevels float_sws pgm us ;
(fss, binds_s') = unzip (map floatTopBind annotated_w_levels)
} ;
dumpIfSet_dyn dflags Opt_D_verbose_core2core "Levels added:"
(vcat (map ppr annotated_w_levels));
let { (tlets, ntlets, lams) = get_stats (sum_stats fss) };
dumpIfSet_dyn dflags Opt_D_dump_simpl_stats "FloatOut stats:"
(hcat [ int tlets, ptext (sLit " Lets floated to top level; "),
int ntlets, ptext (sLit " Lets floated elsewhere; from "),
int lams, ptext (sLit " Lambda groups")]);
return (bagToList (unionManyBags binds_s'))
}
floatTopBind :: LevelledBind -> (FloatStats, Bag CoreBind)
floatTopBind bind
= case (floatBind bind) of { (fs, floats, bind') ->
let float_bag = flattenTopFloats floats
in case bind' of
Rec prs -> (fs, unitBag (Rec (addTopFloatPairs float_bag prs)))
NonRec {} -> (fs, float_bag `snocBag` bind') }
{-
************************************************************************
* *
\subsection[FloatOut-Bind]{Floating in a binding (the business end)}
* *
************************************************************************
-}
floatBind :: LevelledBind -> (FloatStats, FloatBinds, CoreBind)
floatBind (NonRec (TB var _) rhs)
= case (floatExpr rhs) of { (fs, rhs_floats, rhs') ->
-- A tiresome hack:
-- see Note [Bottoming floats: eta expansion] in SetLevels
let rhs'' | isBottomingId var = etaExpand (idArity var) rhs'
| otherwise = rhs'
in (fs, rhs_floats, NonRec var rhs'') }
floatBind (Rec pairs)
= case floatList do_pair pairs of { (fs, rhs_floats, new_pairs) ->
(fs, rhs_floats, Rec (concat new_pairs)) }
where
do_pair (TB name spec, rhs)
| isTopLvl dest_lvl -- See Note [floatBind for top level]
= case (floatExpr rhs) of { (fs, rhs_floats, rhs') ->
(fs, emptyFloats, addTopFloatPairs (flattenTopFloats rhs_floats) [(name, rhs')])}
| otherwise -- Note [Floating out of Rec rhss]
= case (floatExpr rhs) of { (fs, rhs_floats, rhs') ->
case (partitionByLevel dest_lvl rhs_floats) of { (rhs_floats', heres) ->
case (splitRecFloats heres) of { (pairs, case_heres) ->
(fs, rhs_floats', (name, installUnderLambdas case_heres rhs') : pairs) }}}
where
dest_lvl = floatSpecLevel spec
splitRecFloats :: Bag FloatBind -> ([(Id,CoreExpr)], Bag FloatBind)
-- The "tail" begins with a case
-- See Note [Floating out of Rec rhss]
splitRecFloats fs
= go [] (bagToList fs)
where
go prs (FloatLet (NonRec b r) : fs) = go ((b,r):prs) fs
go prs (FloatLet (Rec prs') : fs) = go (prs' ++ prs) fs
go prs fs = (prs, listToBag fs)
installUnderLambdas :: Bag FloatBind -> CoreExpr -> CoreExpr
-- Note [Floating out of Rec rhss]
installUnderLambdas floats e
| isEmptyBag floats = e
| otherwise = go e
where
go (Lam b e) = Lam b (go e)
go e = install floats e
---------------
floatList :: (a -> (FloatStats, FloatBinds, b)) -> [a] -> (FloatStats, FloatBinds, [b])
floatList _ [] = (zeroStats, emptyFloats, [])
floatList f (a:as) = case f a of { (fs_a, binds_a, b) ->
case floatList f as of { (fs_as, binds_as, bs) ->
(fs_a `add_stats` fs_as, binds_a `plusFloats` binds_as, b:bs) }}
{-
Note [Floating out of Rec rhss]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider Rec { f<1,0> = \xy. body }
From the body we may get some floats. The ones with level <1,0> must
stay here, since they may mention f. Ideally we'd like to make them
part of the Rec block pairs -- but we can't if there are any
FloatCases involved.
Nor is it a good idea to dump them in the rhs, but outside the lambda
f = case x of I# y -> \xy. body
because now f's arity might get worse, which is Not Good. (And if
there's an SCC around the RHS it might not get better again.
See Trac #5342.)
So, gruesomely, we split the floats into
* the outer FloatLets, which can join the Rec, and
* an inner batch starting in a FloatCase, which are then
pushed *inside* the lambdas.
This loses full-laziness the rare situation where there is a
FloatCase and a Rec interacting.
Note [floatBind for top level]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may have a *nested* binding whose destination level is (FloatMe tOP_LEVEL), thus
letrec { foo <0,0> = .... (let bar<0,0> = .. in ..) .... }
The binding for bar will be in the "tops" part of the floating binds,
and thus not partioned by floatBody.
We could perhaps get rid of the 'tops' component of the floating binds,
but this case works just as well.
************************************************************************
\subsection[FloatOut-Expr]{Floating in expressions}
* *
************************************************************************
-}
floatBody :: Level
-> LevelledExpr
-> (FloatStats, FloatBinds, CoreExpr)
floatBody lvl arg -- Used rec rhss, and case-alternative rhss
= case (floatExpr arg) of { (fsa, floats, arg') ->
case (partitionByLevel lvl floats) of { (floats', heres) ->
-- Dump bindings are bound here
(fsa, floats', install heres arg') }}
-----------------
floatExpr :: LevelledExpr
-> (FloatStats, FloatBinds, CoreExpr)
floatExpr (Var v) = (zeroStats, emptyFloats, Var v)
floatExpr (Type ty) = (zeroStats, emptyFloats, Type ty)
floatExpr (Coercion co) = (zeroStats, emptyFloats, Coercion co)
floatExpr (Lit lit) = (zeroStats, emptyFloats, Lit lit)
floatExpr (App e a)
= case (floatExpr e) of { (fse, floats_e, e') ->
case (floatExpr a) of { (fsa, floats_a, a') ->
(fse `add_stats` fsa, floats_e `plusFloats` floats_a, App e' a') }}
floatExpr lam@(Lam (TB _ lam_spec) _)
= let (bndrs_w_lvls, body) = collectBinders lam
bndrs = [b | TB b _ <- bndrs_w_lvls]
bndr_lvl = floatSpecLevel lam_spec
-- All the binders have the same level
-- See SetLevels.lvlLamBndrs
in
case (floatBody bndr_lvl body) of { (fs, floats, body') ->
(add_to_stats fs floats, floats, mkLams bndrs body') }
floatExpr (Tick tickish expr)
| tickish `tickishScopesLike` SoftScope -- not scoped, can just float
= case (floatExpr expr) of { (fs, floating_defns, expr') ->
(fs, floating_defns, Tick tickish expr') }
| not (tickishCounts tickish) || tickishCanSplit tickish
= case (floatExpr expr) of { (fs, floating_defns, expr') ->
let -- Annotate bindings floated outwards past an scc expression
-- with the cc. We mark that cc as "duplicated", though.
annotated_defns = wrapTick (mkNoCount tickish) floating_defns
in
(fs, annotated_defns, Tick tickish expr') }
| otherwise
= pprPanic "floatExpr tick" (ppr tickish)
floatExpr (Cast expr co)
= case (floatExpr expr) of { (fs, floating_defns, expr') ->
(fs, floating_defns, Cast expr' co) }
floatExpr (Let bind body)
= case bind_spec of
FloatMe dest_lvl
-> case (floatBind bind) of { (fsb, bind_floats, bind') ->
case (floatExpr body) of { (fse, body_floats, body') ->
( add_stats fsb fse
, bind_floats `plusFloats` unitLetFloat dest_lvl bind'
`plusFloats` body_floats
, body') }}
StayPut bind_lvl -- See Note [Avoiding unnecessary floating]
-> case (floatBind bind) of { (fsb, bind_floats, bind') ->
case (floatBody bind_lvl body) of { (fse, body_floats, body') ->
( add_stats fsb fse
, bind_floats `plusFloats` body_floats
, Let bind' body') }}
where
bind_spec = case bind of
NonRec (TB _ s) _ -> s
Rec ((TB _ s, _) : _) -> s
Rec [] -> panic "floatExpr:rec"
floatExpr (Case scrut (TB case_bndr case_spec) ty alts)
= case case_spec of
FloatMe dest_lvl -- Case expression moves
| [(con@(DataAlt {}), bndrs, rhs)] <- alts
-> case floatExpr scrut of { (fse, fde, scrut') ->
case floatExpr rhs of { (fsb, fdb, rhs') ->
let
float = unitCaseFloat dest_lvl scrut'
case_bndr con [b | TB b _ <- bndrs]
in
(add_stats fse fsb, fde `plusFloats` float `plusFloats` fdb, rhs') }}
| otherwise
-> pprPanic "Floating multi-case" (ppr alts)
StayPut bind_lvl -- Case expression stays put
-> case floatExpr scrut of { (fse, fde, scrut') ->
case floatList (float_alt bind_lvl) alts of { (fsa, fda, alts') ->
(add_stats fse fsa, fda `plusFloats` fde, Case scrut' case_bndr ty alts')
}}
where
float_alt bind_lvl (con, bs, rhs)
= case (floatBody bind_lvl rhs) of { (fs, rhs_floats, rhs') ->
(fs, rhs_floats, (con, [b | TB b _ <- bs], rhs')) }
{-
Note [Avoiding unnecessary floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we want to avoid floating a let unnecessarily, because
it might worsen strictness:
let
x = ...(let y = e in y+y)....
Here y is demanded. If we float it outside the lazy 'x=..' then
we'd have to zap its demand info, and it may never be restored.
So at a 'let' we leave the binding right where the are unless
the binding will escape a value lambda, e.g.
(\x -> let y = fac 100 in y)
That's what the partitionByMajorLevel does in the floatExpr (Let ...)
case.
Notice, though, that we must take care to drop any bindings
from the body of the let that depend on the staying-put bindings.
We used instead to do the partitionByMajorLevel on the RHS of an '=',
in floatRhs. But that was quite tiresome. We needed to test for
values or trival rhss, because (in particular) we don't want to insert
new bindings between the "=" and the "\". E.g.
f = \x -> let <bind> in <body>
We do not want
f = let <bind> in \x -> <body>
(a) The simplifier will immediately float it further out, so we may
as well do so right now; in general, keeping rhss as manifest
values is good
(b) If a float-in pass follows immediately, it might add yet more
bindings just after the '='. And some of them might (correctly)
be strict even though the 'let f' is lazy, because f, being a value,
gets its demand-info zapped by the simplifier.
And even all that turned out to be very fragile, and broke
altogether when profiling got in the way.
So now we do the partition right at the (Let..) itself.
************************************************************************
* *
\subsection{Utility bits for floating stats}
* *
************************************************************************
I didn't implement this with unboxed numbers. I don't want to be too
strict in this stuff, as it is rarely turned on. (WDP 95/09)
-}
data FloatStats
= FlS Int -- Number of top-floats * lambda groups they've been past
Int -- Number of non-top-floats * lambda groups they've been past
Int -- Number of lambda (groups) seen
get_stats :: FloatStats -> (Int, Int, Int)
get_stats (FlS a b c) = (a, b, c)
zeroStats :: FloatStats
zeroStats = FlS 0 0 0
sum_stats :: [FloatStats] -> FloatStats
sum_stats xs = foldr add_stats zeroStats xs
add_stats :: FloatStats -> FloatStats -> FloatStats
add_stats (FlS a1 b1 c1) (FlS a2 b2 c2)
= FlS (a1 + a2) (b1 + b2) (c1 + c2)
add_to_stats :: FloatStats -> FloatBinds -> FloatStats
add_to_stats (FlS a b c) (FB tops others)
= FlS (a + lengthBag tops) (b + lengthBag (flattenMajor others)) (c + 1)
{-
************************************************************************
* *
\subsection{Utility bits for floating}
* *
************************************************************************
Note [Representation of FloatBinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The FloatBinds types is somewhat important. We can get very large numbers
of floating bindings, often all destined for the top level. A typical example
is x = [4,2,5,2,5, .... ]
Then we get lots of small expressions like (fromInteger 4), which all get
lifted to top level.
The trouble is that
(a) we partition these floating bindings *at every binding site*
(b) SetLevels introduces a new bindings site for every float
So we had better not look at each binding at each binding site!
That is why MajorEnv is represented as a finite map.
We keep the bindings destined for the *top* level separate, because
we float them out even if they don't escape a *value* lambda; see
partitionByMajorLevel.
-}
type FloatLet = CoreBind -- INVARIANT: a FloatLet is always lifted
type MajorEnv = M.IntMap MinorEnv -- Keyed by major level
type MinorEnv = M.IntMap (Bag FloatBind) -- Keyed by minor level
data FloatBinds = FB !(Bag FloatLet) -- Destined for top level
!MajorEnv -- Levels other than top
-- See Note [Representation of FloatBinds]
instance Outputable FloatBinds where
ppr (FB fbs defs)
= ptext (sLit "FB") <+> (braces $ vcat
[ ptext (sLit "tops =") <+> ppr fbs
, ptext (sLit "non-tops =") <+> ppr defs ])
flattenTopFloats :: FloatBinds -> Bag CoreBind
flattenTopFloats (FB tops defs)
= ASSERT2( isEmptyBag (flattenMajor defs), ppr defs )
tops
addTopFloatPairs :: Bag CoreBind -> [(Id,CoreExpr)] -> [(Id,CoreExpr)]
addTopFloatPairs float_bag prs
= foldrBag add prs float_bag
where
add (NonRec b r) prs = (b,r):prs
add (Rec prs1) prs2 = prs1 ++ prs2
flattenMajor :: MajorEnv -> Bag FloatBind
flattenMajor = M.fold (unionBags . flattenMinor) emptyBag
flattenMinor :: MinorEnv -> Bag FloatBind
flattenMinor = M.fold unionBags emptyBag
emptyFloats :: FloatBinds
emptyFloats = FB emptyBag M.empty
unitCaseFloat :: Level -> CoreExpr -> Id -> AltCon -> [Var] -> FloatBinds
unitCaseFloat (Level major minor) e b con bs
= FB emptyBag (M.singleton major (M.singleton minor (unitBag (FloatCase e b con bs))))
unitLetFloat :: Level -> FloatLet -> FloatBinds
unitLetFloat lvl@(Level major minor) b
| isTopLvl lvl = FB (unitBag b) M.empty
| otherwise = FB emptyBag (M.singleton major (M.singleton minor floats))
where
floats = unitBag (FloatLet b)
plusFloats :: FloatBinds -> FloatBinds -> FloatBinds
plusFloats (FB t1 l1) (FB t2 l2)
= FB (t1 `unionBags` t2) (l1 `plusMajor` l2)
plusMajor :: MajorEnv -> MajorEnv -> MajorEnv
plusMajor = M.unionWith plusMinor
plusMinor :: MinorEnv -> MinorEnv -> MinorEnv
plusMinor = M.unionWith unionBags
install :: Bag FloatBind -> CoreExpr -> CoreExpr
install defn_groups expr
= foldrBag wrapFloat expr defn_groups
partitionByLevel
:: Level -- Partitioning level
-> FloatBinds -- Defns to be divided into 2 piles...
-> (FloatBinds, -- Defns with level strictly < partition level,
Bag FloatBind) -- The rest
{-
-- ---- partitionByMajorLevel ----
-- Float it if we escape a value lambda,
-- *or* if we get to the top level
-- *or* if it's a case-float and its minor level is < current
--
-- If we can get to the top level, say "yes" anyway. This means that
-- x = f e
-- transforms to
-- lvl = e
-- x = f lvl
-- which is as it should be
partitionByMajorLevel (Level major _) (FB tops defns)
= (FB tops outer, heres `unionBags` flattenMajor inner)
where
(outer, mb_heres, inner) = M.splitLookup major defns
heres = case mb_heres of
Nothing -> emptyBag
Just h -> flattenMinor h
-}
partitionByLevel (Level major minor) (FB tops defns)
= (FB tops (outer_maj `plusMajor` M.singleton major outer_min),
here_min `unionBags` flattenMinor inner_min
`unionBags` flattenMajor inner_maj)
where
(outer_maj, mb_here_maj, inner_maj) = M.splitLookup major defns
(outer_min, mb_here_min, inner_min) = case mb_here_maj of
Nothing -> (M.empty, Nothing, M.empty)
Just min_defns -> M.splitLookup minor min_defns
here_min = mb_here_min `orElse` emptyBag
wrapTick :: Tickish Id -> FloatBinds -> FloatBinds
wrapTick t (FB tops defns)
= FB (mapBag wrap_bind tops) (M.map (M.map wrap_defns) defns)
where
wrap_defns = mapBag wrap_one
wrap_bind (NonRec binder rhs) = NonRec binder (maybe_tick rhs)
wrap_bind (Rec pairs) = Rec (mapSnd maybe_tick pairs)
wrap_one (FloatLet bind) = FloatLet (wrap_bind bind)
wrap_one (FloatCase e b c bs) = FloatCase (maybe_tick e) b c bs
maybe_tick e | exprIsHNF e = tickHNFArgs t e
| otherwise = mkTick t e
-- we don't need to wrap a tick around an HNF when we float it
-- outside a tick: that is an invariant of the tick semantics
-- Conversely, inlining of HNFs inside an SCC is allowed, and
-- indeed the HNF we're floating here might well be inlined back
-- again, and we don't want to end up with duplicate ticks.
| green-haskell/ghc | compiler/simplCore/FloatOut.hs | bsd-3-clause | 20,987 | 1 | 27 | 5,706 | 4,008 | 2,156 | 1,852 | 241 | 5 |
#!/usr/bin/runhaskell
import Distribution.Simple
main = defaultMain
| jordanemedlock/fungen | Setup.hs | bsd-3-clause | 68 | 0 | 4 | 6 | 12 | 7 | 5 | 2 | 1 |
{-# LANGUAGE TypeApplications #-}
module E where
import A
import B
import C
import D
c :: F (a, C) -> Bool
c = id
e :: () -> Bool
e = c . b @ C
| ezyang/ghc | testsuite/tests/driver/recomp017/E.hs | bsd-3-clause | 144 | 0 | 7 | 37 | 65 | 38 | 27 | 10 | 1 |
module Language.Atom.MSP430 (
module Language.Atom,
module Language.Atom.MSP430.Watchdog,
module Language.Atom.MSP430.DigitalIO,
module Language.Atom.MSP430.TimerA,
module Language.Atom.MSP430.Interrupts,
module Language.Atom.MSP430.Compile
) where
import Language.Atom
import Language.Atom.MSP430.Watchdog
import Language.Atom.MSP430.DigitalIO
import Language.Atom.MSP430.TimerA
import Language.Atom.MSP430.Interrupts
import Language.Atom.MSP430.Compile
| eightyeight/atom-msp430 | Language/Atom/MSP430.hs | mit | 482 | 0 | 5 | 56 | 95 | 68 | 27 | 13 | 0 |
-- Counting Change Combinations
-- http://www.codewars.com/kata/541af676b589989aed0009e7/
module Change where
import Data.List (sortBy)
countChange :: Integer -> [Integer] -> Integer
countChange n ss = f n (sortBy (flip compare) ss)
where f 0 _ = 1
f _ [] = 0
f n [x] = toInteger . fromEnum . (==0) . mod n $ x
f n (x:xs) = sum . map (\d -> f (n-d) xs) . takeWhile (<=n) $ (0:[x, 2*x..]) | gafiatulin/codewars | src/4 kyu/Change.hs | mit | 425 | 0 | 15 | 109 | 197 | 106 | 91 | 8 | 4 |
-- Compiler Toolkit: compiler state management basics
--
-- Author : Manuel M. T. Chakravarty
-- Created: 7 November 97
--
-- Copyright (C) [1997..1999] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides basic types and services used to realize the state
-- management of the compiler.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * The monad `PreCST' is an instance of `STB' where the base state is fixed.
-- However, the base state itself is parametrized by an extra state
-- component that can be instantiated by the compiler that uses the toolkit
-- (to store information like compiler switches) -- this is the reason for
-- adding the prefix `Pre'.
--
-- * The module exports the details of the `BaseState' etc as they have to be
-- know by `State'. The latter ensures the necessary abstraction for
-- modules that do not belong to the state management.
--
-- * Due to this module, the state management modules can share internal
-- information about the data types hidden to the rest of the system.
--
-- * The following state components are maintained:
--
-- + errorsBS (type `ErrorState') -- keeps track of raised errors
-- + namesBS (type `NameSupply') -- provides unique names
-- + extraBS (generic type) -- extra compiler-dependent state
-- information, e.g., for compiler
-- switches
--
--- TODO ----------------------------------------------------------------------
--
module Control.StateBase (PreCST(..), ErrorState(..), BaseState(..),
unpackCST, readCST, writeCST, transCST, liftIO)
where
import Control.StateTrans (STB, readGeneric, writeGeneric, transGeneric)
import qualified Control.StateTrans as StateTrans (liftIO)
import Data.Errors (ErrorLevel(..), Error)
import Language.C.Data.Name
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
-- state used in the whole compiler
-- --------------------------------
-- | form of the error state
--
-- * when no error was raised yet, the error level is the lowest possible one
--
data ErrorState = ErrorState ErrorLevel -- worst error level that was raised
Int -- number of errors (excl warnings)
[Error] -- already raised errors
-- | base state
--
data BaseState e = BaseState {
errorsBS :: ErrorState,
supplyBS :: [Name], -- unique names
extraBS :: e -- extra state
}
-- | the compiler state transformer
--
newtype PreCST e s a = CST (STB (BaseState e) s a)
instance Functor (PreCST e s) where
fmap = liftM
instance Applicative (PreCST e s) where
pure = return
(<*>) = ap
instance Monad (PreCST e s) where
return = yield
(>>=) = (+>=)
-- | unwrapper coercion function
--
unpackCST :: PreCST e s a -> STB (BaseState e) s a
unpackCST m = let CST m' = m in m'
-- monad operations
-- ----------------
-- | the monad's unit
--
yield :: a -> PreCST e s a
yield a = CST $ return a
-- | the monad's bind
--
(+>=) :: PreCST e s a -> (a -> PreCST e s b) -> PreCST e s b
m +>= k = CST $ unpackCST m >>= (\a -> unpackCST (k a))
-- generic state manipulation
-- --------------------------
-- | given a reader function for the state, wrap it into an CST monad
--
readCST :: (s -> a) -> PreCST e s a
readCST f = CST $ readGeneric f
-- | given a new state, inject it into an CST monad
--
writeCST :: s -> PreCST e s ()
writeCST s' = CST $ writeGeneric s'
-- | given a transformer function for the state, wrap it into an CST monad
--
transCST :: (s -> (s, a)) -> PreCST e s a
transCST f = CST $ transGeneric f
-- interaction with the encapsulated 'IO' monad
-- --------------------------------------------
-- | lifts an 'IO' state transformer into 'CST'
--
liftIO :: IO a -> PreCST e s a
liftIO m = CST $ (StateTrans.liftIO m)
| ian-ross/c2hs-macos-test | c2hs-0.26.1/src/Control/StateBase.hs | mit | 4,685 | 0 | 10 | 1,157 | 683 | 415 | 268 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html
module Stratosphere.Resources.ElastiCacheCacheCluster where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for ElastiCacheCacheCluster. See
-- 'elastiCacheCacheCluster' for a more convenient constructor.
data ElastiCacheCacheCluster =
ElastiCacheCacheCluster
{ _elastiCacheCacheClusterAZMode :: Maybe (Val Text)
, _elastiCacheCacheClusterAutoMinorVersionUpgrade :: Maybe (Val Bool)
, _elastiCacheCacheClusterCacheNodeType :: Val Text
, _elastiCacheCacheClusterCacheParameterGroupName :: Maybe (Val Text)
, _elastiCacheCacheClusterCacheSecurityGroupNames :: Maybe (ValList Text)
, _elastiCacheCacheClusterCacheSubnetGroupName :: Maybe (Val Text)
, _elastiCacheCacheClusterClusterName :: Maybe (Val Text)
, _elastiCacheCacheClusterEngine :: Val Text
, _elastiCacheCacheClusterEngineVersion :: Maybe (Val Text)
, _elastiCacheCacheClusterNotificationTopicArn :: Maybe (Val Text)
, _elastiCacheCacheClusterNumCacheNodes :: Val Integer
, _elastiCacheCacheClusterPort :: Maybe (Val Integer)
, _elastiCacheCacheClusterPreferredAvailabilityZone :: Maybe (Val Text)
, _elastiCacheCacheClusterPreferredAvailabilityZones :: Maybe (ValList Text)
, _elastiCacheCacheClusterPreferredMaintenanceWindow :: Maybe (Val Text)
, _elastiCacheCacheClusterSnapshotArns :: Maybe (ValList Text)
, _elastiCacheCacheClusterSnapshotName :: Maybe (Val Text)
, _elastiCacheCacheClusterSnapshotRetentionLimit :: Maybe (Val Integer)
, _elastiCacheCacheClusterSnapshotWindow :: Maybe (Val Text)
, _elastiCacheCacheClusterTags :: Maybe [Tag]
, _elastiCacheCacheClusterVpcSecurityGroupIds :: Maybe (ValList Text)
} deriving (Show, Eq)
instance ToResourceProperties ElastiCacheCacheCluster where
toResourceProperties ElastiCacheCacheCluster{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ElastiCache::CacheCluster"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AZMode",) . toJSON) _elastiCacheCacheClusterAZMode
, fmap (("AutoMinorVersionUpgrade",) . toJSON) _elastiCacheCacheClusterAutoMinorVersionUpgrade
, (Just . ("CacheNodeType",) . toJSON) _elastiCacheCacheClusterCacheNodeType
, fmap (("CacheParameterGroupName",) . toJSON) _elastiCacheCacheClusterCacheParameterGroupName
, fmap (("CacheSecurityGroupNames",) . toJSON) _elastiCacheCacheClusterCacheSecurityGroupNames
, fmap (("CacheSubnetGroupName",) . toJSON) _elastiCacheCacheClusterCacheSubnetGroupName
, fmap (("ClusterName",) . toJSON) _elastiCacheCacheClusterClusterName
, (Just . ("Engine",) . toJSON) _elastiCacheCacheClusterEngine
, fmap (("EngineVersion",) . toJSON) _elastiCacheCacheClusterEngineVersion
, fmap (("NotificationTopicArn",) . toJSON) _elastiCacheCacheClusterNotificationTopicArn
, (Just . ("NumCacheNodes",) . toJSON) _elastiCacheCacheClusterNumCacheNodes
, fmap (("Port",) . toJSON) _elastiCacheCacheClusterPort
, fmap (("PreferredAvailabilityZone",) . toJSON) _elastiCacheCacheClusterPreferredAvailabilityZone
, fmap (("PreferredAvailabilityZones",) . toJSON) _elastiCacheCacheClusterPreferredAvailabilityZones
, fmap (("PreferredMaintenanceWindow",) . toJSON) _elastiCacheCacheClusterPreferredMaintenanceWindow
, fmap (("SnapshotArns",) . toJSON) _elastiCacheCacheClusterSnapshotArns
, fmap (("SnapshotName",) . toJSON) _elastiCacheCacheClusterSnapshotName
, fmap (("SnapshotRetentionLimit",) . toJSON) _elastiCacheCacheClusterSnapshotRetentionLimit
, fmap (("SnapshotWindow",) . toJSON) _elastiCacheCacheClusterSnapshotWindow
, fmap (("Tags",) . toJSON) _elastiCacheCacheClusterTags
, fmap (("VpcSecurityGroupIds",) . toJSON) _elastiCacheCacheClusterVpcSecurityGroupIds
]
}
-- | Constructor for 'ElastiCacheCacheCluster' containing required fields as
-- arguments.
elastiCacheCacheCluster
:: Val Text -- ^ 'ecccCacheNodeType'
-> Val Text -- ^ 'ecccEngine'
-> Val Integer -- ^ 'ecccNumCacheNodes'
-> ElastiCacheCacheCluster
elastiCacheCacheCluster cacheNodeTypearg enginearg numCacheNodesarg =
ElastiCacheCacheCluster
{ _elastiCacheCacheClusterAZMode = Nothing
, _elastiCacheCacheClusterAutoMinorVersionUpgrade = Nothing
, _elastiCacheCacheClusterCacheNodeType = cacheNodeTypearg
, _elastiCacheCacheClusterCacheParameterGroupName = Nothing
, _elastiCacheCacheClusterCacheSecurityGroupNames = Nothing
, _elastiCacheCacheClusterCacheSubnetGroupName = Nothing
, _elastiCacheCacheClusterClusterName = Nothing
, _elastiCacheCacheClusterEngine = enginearg
, _elastiCacheCacheClusterEngineVersion = Nothing
, _elastiCacheCacheClusterNotificationTopicArn = Nothing
, _elastiCacheCacheClusterNumCacheNodes = numCacheNodesarg
, _elastiCacheCacheClusterPort = Nothing
, _elastiCacheCacheClusterPreferredAvailabilityZone = Nothing
, _elastiCacheCacheClusterPreferredAvailabilityZones = Nothing
, _elastiCacheCacheClusterPreferredMaintenanceWindow = Nothing
, _elastiCacheCacheClusterSnapshotArns = Nothing
, _elastiCacheCacheClusterSnapshotName = Nothing
, _elastiCacheCacheClusterSnapshotRetentionLimit = Nothing
, _elastiCacheCacheClusterSnapshotWindow = Nothing
, _elastiCacheCacheClusterTags = Nothing
, _elastiCacheCacheClusterVpcSecurityGroupIds = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-azmode
ecccAZMode :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccAZMode = lens _elastiCacheCacheClusterAZMode (\s a -> s { _elastiCacheCacheClusterAZMode = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-autominorversionupgrade
ecccAutoMinorVersionUpgrade :: Lens' ElastiCacheCacheCluster (Maybe (Val Bool))
ecccAutoMinorVersionUpgrade = lens _elastiCacheCacheClusterAutoMinorVersionUpgrade (\s a -> s { _elastiCacheCacheClusterAutoMinorVersionUpgrade = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-cachenodetype
ecccCacheNodeType :: Lens' ElastiCacheCacheCluster (Val Text)
ecccCacheNodeType = lens _elastiCacheCacheClusterCacheNodeType (\s a -> s { _elastiCacheCacheClusterCacheNodeType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-cacheparametergroupname
ecccCacheParameterGroupName :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccCacheParameterGroupName = lens _elastiCacheCacheClusterCacheParameterGroupName (\s a -> s { _elastiCacheCacheClusterCacheParameterGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-cachesecuritygroupnames
ecccCacheSecurityGroupNames :: Lens' ElastiCacheCacheCluster (Maybe (ValList Text))
ecccCacheSecurityGroupNames = lens _elastiCacheCacheClusterCacheSecurityGroupNames (\s a -> s { _elastiCacheCacheClusterCacheSecurityGroupNames = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-cachesubnetgroupname
ecccCacheSubnetGroupName :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccCacheSubnetGroupName = lens _elastiCacheCacheClusterCacheSubnetGroupName (\s a -> s { _elastiCacheCacheClusterCacheSubnetGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-clustername
ecccClusterName :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccClusterName = lens _elastiCacheCacheClusterClusterName (\s a -> s { _elastiCacheCacheClusterClusterName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-engine
ecccEngine :: Lens' ElastiCacheCacheCluster (Val Text)
ecccEngine = lens _elastiCacheCacheClusterEngine (\s a -> s { _elastiCacheCacheClusterEngine = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-engineversion
ecccEngineVersion :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccEngineVersion = lens _elastiCacheCacheClusterEngineVersion (\s a -> s { _elastiCacheCacheClusterEngineVersion = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-notificationtopicarn
ecccNotificationTopicArn :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccNotificationTopicArn = lens _elastiCacheCacheClusterNotificationTopicArn (\s a -> s { _elastiCacheCacheClusterNotificationTopicArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-numcachenodes
ecccNumCacheNodes :: Lens' ElastiCacheCacheCluster (Val Integer)
ecccNumCacheNodes = lens _elastiCacheCacheClusterNumCacheNodes (\s a -> s { _elastiCacheCacheClusterNumCacheNodes = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-port
ecccPort :: Lens' ElastiCacheCacheCluster (Maybe (Val Integer))
ecccPort = lens _elastiCacheCacheClusterPort (\s a -> s { _elastiCacheCacheClusterPort = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-preferredavailabilityzone
ecccPreferredAvailabilityZone :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccPreferredAvailabilityZone = lens _elastiCacheCacheClusterPreferredAvailabilityZone (\s a -> s { _elastiCacheCacheClusterPreferredAvailabilityZone = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-preferredavailabilityzones
ecccPreferredAvailabilityZones :: Lens' ElastiCacheCacheCluster (Maybe (ValList Text))
ecccPreferredAvailabilityZones = lens _elastiCacheCacheClusterPreferredAvailabilityZones (\s a -> s { _elastiCacheCacheClusterPreferredAvailabilityZones = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-preferredmaintenancewindow
ecccPreferredMaintenanceWindow :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccPreferredMaintenanceWindow = lens _elastiCacheCacheClusterPreferredMaintenanceWindow (\s a -> s { _elastiCacheCacheClusterPreferredMaintenanceWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-snapshotarns
ecccSnapshotArns :: Lens' ElastiCacheCacheCluster (Maybe (ValList Text))
ecccSnapshotArns = lens _elastiCacheCacheClusterSnapshotArns (\s a -> s { _elastiCacheCacheClusterSnapshotArns = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-snapshotname
ecccSnapshotName :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccSnapshotName = lens _elastiCacheCacheClusterSnapshotName (\s a -> s { _elastiCacheCacheClusterSnapshotName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-snapshotretentionlimit
ecccSnapshotRetentionLimit :: Lens' ElastiCacheCacheCluster (Maybe (Val Integer))
ecccSnapshotRetentionLimit = lens _elastiCacheCacheClusterSnapshotRetentionLimit (\s a -> s { _elastiCacheCacheClusterSnapshotRetentionLimit = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-snapshotwindow
ecccSnapshotWindow :: Lens' ElastiCacheCacheCluster (Maybe (Val Text))
ecccSnapshotWindow = lens _elastiCacheCacheClusterSnapshotWindow (\s a -> s { _elastiCacheCacheClusterSnapshotWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-tags
ecccTags :: Lens' ElastiCacheCacheCluster (Maybe [Tag])
ecccTags = lens _elastiCacheCacheClusterTags (\s a -> s { _elastiCacheCacheClusterTags = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-cache-cluster.html#cfn-elasticache-cachecluster-vpcsecuritygroupids
ecccVpcSecurityGroupIds :: Lens' ElastiCacheCacheCluster (Maybe (ValList Text))
ecccVpcSecurityGroupIds = lens _elastiCacheCacheClusterVpcSecurityGroupIds (\s a -> s { _elastiCacheCacheClusterVpcSecurityGroupIds = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/ElastiCacheCacheCluster.hs | mit | 13,261 | 0 | 15 | 1,252 | 2,008 | 1,131 | 877 | 128 | 1 |
module GHCJS.DOM.SVGFEDiffuseLightingElement (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/SVGFEDiffuseLightingElement.hs | mit | 57 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module Main where
import Test.Hspec.Runner
import Test.Hspec.Formatters
import qualified Spec
main :: IO ()
main = hspecWith defaultConfig
{ configFormatter = Just specdoc
, configColorMode = ColorAlways
} Spec.spec
| wayofthepie/emu-mos-6502 | test/Main.hs | mit | 224 | 0 | 8 | 37 | 59 | 35 | 24 | 9 | 1 |
module Oczor.Compiler.CommandLine where
import ClassyPrelude
import Data.Version (showVersion)
import Options.Applicative
import Oczor.Compiler.Compiler
import Oczor.Compiler.State
import Oczor.Utl hiding (argument)
import Paths_oczor (version)
data Options = Options
{ lng :: String
, output :: String
, showMdl :: Bool
, srcDirList :: [String]
, moduleName :: String}
options :: Parser Options
options = Options
<$> strOption ( long "lang" <> short 'l' <> value "js" <> showDefault <> metavar "LANGUAGE" <> help "target language (js, lua, rb, el)")
<*> strOption ( long "output" <> short 'o' <> value "output" <> showDefault <> metavar "DIRECTORY" <> help "output directory" )
<*> switch ( long "browse" <> short 'b' <> help "display the names defined by module" )
<*> many (strOption ( long "src" <> short 's' <> metavar "DIRECTORIES..." <> help "source file directories" ))
<*> argument str (metavar "FILE")
optionsToState (Options lng output showMdl srcDirList moduleName) = initState
& outputDir .~ output
& srcDirs .~ srcDirList
& showModule .~ showMdl
& combine .~ True
& lang .~ lng
runWith :: Options -> IO ()
runWith x@Options {} = runCompilerPrint (optionsToState x) $ compileAndWrite (fileToModuleName (moduleName x))
desc = unwords ["Oczor compiler", showVersion version]
run :: IO ()
run = execParser opts >>= runWith
where
opts = info (helper <*> options)
( fullDesc
<> progDesc desc
<> header desc )
| ptol/oczor | src/Oczor/Compiler/CommandLine.hs | mit | 1,476 | 0 | 17 | 276 | 477 | 244 | 233 | 36 | 1 |
-- |
-- Module : System.AtomicWrite.Writer.LazyText
-- Copyright : © 2015-2019 Stack Builders Inc.
-- License : MIT
--
-- Maintainer : Stack Builders <hackage@stackbuilders.com>
-- Stability : experimental
-- Portability : portable
--
-- Provides functionality to dump the contents of a Text
-- to a file.
module System.AtomicWrite.Writer.LazyText (atomicWriteFile, atomicWriteFileWithMode) where
import System.AtomicWrite.Internal (atomicWriteFileMaybeModeText)
import Data.Text.Lazy (Text)
import Data.Text.Lazy.IO (hPutStr)
import System.Posix.Types (FileMode)
-- | Creates a file atomically on POSIX-compliant
-- systems while preserving permissions.
atomicWriteFile ::
FilePath -- ^ The path where the file will be updated or created
-> Text -- ^ The content to write to the file
-> IO ()
atomicWriteFile =
atomicWriteFileMaybeMode Nothing
-- | Creates or modifies a file atomically on
-- POSIX-compliant systems and updates permissions
atomicWriteFileWithMode ::
FileMode -- ^ The mode to set the file to
-> FilePath -- ^ The path where the file will be updated or created
-> Text -- ^ The content to write to the file
-> IO ()
atomicWriteFileWithMode =
atomicWriteFileMaybeMode . Just
-- Helper Function
atomicWriteFileMaybeMode ::
Maybe FileMode -- ^ The mode to set the file to
-> FilePath -- ^ The path where the file will be updated or created
-> Text -- ^ The content to write to the file
-> IO ()
atomicWriteFileMaybeMode mmode path = atomicWriteFileMaybeModeText mmode path hPutStr
| stackbuilders/atomic-write | src/System/AtomicWrite/Writer/LazyText.hs | mit | 1,651 | 0 | 9 | 377 | 184 | 113 | 71 | 24 | 1 |
{-# LANGUAGE BangPatterns #-}
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC8
import Data.List.Split
import Data.Bits
import Data.Word
import Prelude
filename = "p059_cipher.txt"
word :: BS.ByteString
word = BSC8.pack "chapter"
-- All possible key combinations
combinations :: [(Word8, Word8, Word8)]
combinations = [(x,y,z) | x <- [97..122], y <- [97..122], z <- [97..122]]
applyKey :: (Word8, Word8, Word8) -> [Word8] -> BS.ByteString
applyKey key cipherText = BS.pack $ applyKey' [] key cipherText
where
applyKey' :: [Word8] -> (Word8, Word8, Word8) -> [Word8] -> [Word8]
applyKey' res _ [] = res
applyKey' res (x, _, _) (a:[]) = res ++ [a `xor` x]
applyKey' res (x, y, _) (a:b:[]) = res ++ [a `xor` x, b `xor` y]
applyKey' res key@(x, y, z) (a:b:c:xs) =
applyKey' (res ++ [a `xor` x, b `xor` y, c `xor` z]) key xs
getCipherText :: IO [Word8]
getCipherText = do
contents <- readFile filename
return $ map read $ splitOn "," contents
findKey :: [(Word8, Word8, Word8)] -> [Word8] -> Maybe (Word8, Word8, Word8)
findKey [] _ = Nothing
findKey (key:keys) cipherText =
if word `BS.isInfixOf` decryptedText
then return key
else findKey keys cipherText
where
decryptedText = applyKey key cipherText
euler59 = do
cipherText <- getCipherText
let !key = findKey combinations cipherText
putStrLn "Key:"
print key
case key of
Just k -> do
let originalText = applyKey k cipherText
putStrLn "Answer:"
let answer = BS.foldl' (\a b -> a + (fromIntegral b) :: Int) 0 originalText
print answer
putStrLn "Text:"
BSC8.putStrLn originalText
Nothing -> return ()
| RossMeikleham/Project-Euler-Haskell | 59.hs | mit | 1,805 | 0 | 20 | 475 | 714 | 387 | 327 | 45 | 4 |
module Hasgel.Game.Movement (
maxSpeed, tryMove
) where
import Data.List (findIndex)
import Data.Maybe (fromMaybe, isNothing)
import Control.Lens ((^.))
import qualified Linear as L
import Hasgel.Transform (Transform (..), translate)
-- | Maximum speed of movement allowed when checking collisions.
maxSpeed :: Float
maxSpeed = 0.25
-- | Returns the new position and the index of the colliding object when moving
-- for the given velocity.
tryMove :: [Transform] -> Transform -> L.V3 Float -> (Transform, Maybe Int)
tryMove [] mobj speed = (translate mobj speed, Nothing)
tryMove blockers mobj speed =
case clampSpeed of
-- Speed is below maximum, move to destination.
(speed', Nothing) -> tryMove' speed'
-- Speed is above maximum, move incrementally.
(speed', Just remSpeed) ->
case tryMove' speed' of
-- No collision, continue moving.
(mobj', Nothing) -> tryMove blockers mobj' remSpeed
-- Collision, return the result.
colRes -> colRes
where clampSpeed = let L.V3 (x, mx) (y, my) (z, mz) = clampComp <$> speed
-- Remainder speed if any.
rv = if all isNothing [mx, my, mz]
then Nothing
else let [rx, ry, rz] = fromMaybe 0 <$> [mx, my, mz]
in Just $ L.V3 rx ry rz
in (L.V3 x y z, rv)
clampComp c
| c < 0, c < -maxSpeed = (-maxSpeed, Just $ maxSpeed + c)
| c > 0, c > maxSpeed = (maxSpeed, Just $ c - maxSpeed)
| otherwise = (c, Nothing)
tryMove' dv
| Just i <- checkPosition (translate mobj dv) blockers = (mobj, Just i)
| otherwise = (translate mobj dv, Nothing)
-- | Returns the index of the colliding object.
checkPosition :: Transform -> [Transform] -> Maybe Int
checkPosition mobj = findIndex (bvOverlaps mobj)
-- | Returns True if the axis aligned bounding volumes of given transforms
-- overlap.
bvOverlaps :: Transform -> Transform -> Bool
bvOverlaps a b
| aMinY > bMaxY || aMaxY < bMinY = False
| aMinX > bMaxX || aMaxX < bMinX = False
| aMinZ > bMaxZ || aMaxZ < bMinZ = False
| otherwise = True
where bounds t c = let scale = transformScale t ^. c
pos = transformPosition t ^. c
in (pos - scale, pos + scale)
[(aMinX, aMaxX), (aMinY, aMaxY), (aMinZ, aMaxZ)] =
bounds a <$> [L._x, L._y, L._z]
[(bMinX, bMaxX), (bMinY, bMaxY), (bMinZ, bMaxZ)] =
bounds b <$> [L._x, L._y, L._z]
| Th30n/hasgel | src/Hasgel/Game/Movement.hs | mit | 2,579 | 13 | 24 | 777 | 738 | 419 | 319 | 46 | 4 |
{-# htermination addListToFM :: FiniteMap Char b -> [(Char,b)] -> FiniteMap Char b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addListToFM_3.hs | mit | 104 | 0 | 3 | 16 | 5 | 3 | 2 | 1 | 0 |
-- |
-- Module : Occlusion.TH
-- Description : Experimental Template Haskell module
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created Jonatan H Sundqvist 2015
-- Based on https://github.com/ekmett/lens/blob/ec19f31617d8c826f4f1bb0196b3ba94bf94c0cc/src/Control/Lens/Internal/FieldTH.hs
-- TODO | -
-- -
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module Occlusion.TH where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import Language.Haskell.TH
--------------------------------------------------------------------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
makeLenses :: Name -> Q DescQ
makeLenses name = do
info <- reify name
case info of
TyConI desc -> makeFieldOpticsFor desc
_ -> "Expected type constructor name"
-- |
makeFieldOpticsFor :: Dec -> DescQ
makeFieldOpticsFor desc = case descr of
DataD _ tyName vars cons _ -> makeFieldOpticsForDec' tyName (mkS tyName vars) cons
NewTypeD _ tyName vars cons _ -> makeFieldOpticsForDec' tyName (mkS tyName vars) [con]
DataInstD _ tyName args cons _ -> makeFieldOpticsForDec' tyName (tyName `conAppsT` args) cons
NewTypeInstD _ tyName args con _ -> makeFieldOpticsForDec' tyName (tyName `conAppsT` args) [con]
where
mkS tyName vars = tyName `conAppsT` map VarT (toListOf typeVars vars)
-- |
makeFieldOpticsForDec' :: Name -> Type -> [Con] -> DescQ
makeFieldOpticsForDec' tyName s cons = do
fieldCons <- traverse normalizeConstructor cons
let allFields = toListOf (folded . _2 . folded . _1 . folded) fieldCons
let defCons = over normFieldLabels (expandName allfields) fieldCons
allDefs = setOf (normFieldLabels . folded) defCons
perDef <- T.sequenceA (fromSet (buildScaffold s defCons) allDefs)
let defs = Map.toList perDef
case _classyLenses tyName of
Just (className, methodName) -> _
Nothing -> do
| SwiftsNamesake/Occlusion | src/Occlusion/TH.hs | mit | 3,080 | 1 | 13 | 439 | 470 | 250 | 220 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module FlagsSpec
( flagsSpec
) where
import DarkSky.Response.Flags
import DarkSky.Unit
import Data.Aeson
import Data.ByteString.Lazy as BL
import Test.Hspec
import Text.RawString.QQ
flagsSpec :: IO ()
flagsSpec =
hspec $
describe "Flags" $
describe "FromJSON" $
do describe "it can parse valid JSON" $
do it "full" $ decode sampleFlagsFullJSON `shouldBe` Just sampleFlagsFull
it "empty" $
decode sampleFlagsEmptyJSON `shouldBe` Just sampleFlagsEmpty
it "doesn't parse invalid JSON" $
decode invalidSampleFlagsJSON `shouldBe` (Nothing :: Maybe Flags)
sampleFlagsFullJSON :: BL.ByteString
sampleFlagsFullJSON =
[r|{
"darksky-unavailable": "unavailable",
"metno-license": "license",
"sources": ["source1", "source2"],
"units": "us"
}|]
sampleFlagsFull :: Flags
sampleFlagsFull =
Flags
{ darkSkyUnavailable = Just "unavailable"
, metnoLicense = Just "license"
, sources = ["source1", "source2"]
, units = UnitedStates
}
sampleFlagsEmptyJSON :: BL.ByteString
sampleFlagsEmptyJSON =
[r|{
"units": "us",
"sources": []
}|]
sampleFlagsEmpty :: Flags
sampleFlagsEmpty =
Flags
{ darkSkyUnavailable = Nothing
, metnoLicense = Nothing
, sources = []
, units = UnitedStates
}
invalidSampleFlagsJSON :: BL.ByteString
invalidSampleFlagsJSON = "{}"
| peterstuart/dark-sky | test/FlagsSpec.hs | mit | 1,397 | 0 | 13 | 263 | 291 | 167 | 124 | 43 | 1 |
module Display where
import Graphics.Rendering.OpenGL as GL
import Graphics.UI.GLFW as GLFW
import Graphics.Rendering.OpenGL (($=))
import DataStructures as DS
import Data.List hiding (concat, foldl, foldl')
import Numeric.Units.Dimensional.Prelude ((*~), meter)
import qualified Data.Sequence as Seq
import Data.Foldable
import Prelude hiding (concat, foldl, mapM_)
sfactor = 5e-9 -- 5e-9
display :: (Int, Int, Seq.Seq Object, ObjPosition,a) -> IO ()
display (viewObjNum,zoomLevel,objects,cam@(Vector3 x y z),_) = do
fpsLimiter
GL.clear [GL.ColorBuffer, GL.DepthBuffer]
GL.color $ color3 1 0 0
GL.preservingMatrix $ do
-- GL.rotate (90 :: GLdouble) (Vector3 0 1 0)
GL.translate drawPos -- (vectorDeMeterize cam)
foldl' (\acc obj -> colorFromMass obj >> renderBody adjustedZoom obj >> acc) (return ()) objects
-- renderLayer (Vector3 (100) (-200) (100))
GLFW.swapBuffers
where drawPos = vectorDeMeterize earthPos -- cam -- $ vectorAdd cam earthPos
earthPos = (\vec -> vectorTimesScalar vec (DS.num (-adjustedZoom))) . getPos . (flip Seq.index (viewObjNum-1)) $ objects
adjustedZoom = 5 * (2^^zoomLevel)
--display :: (a, [Point], DS.Sphere, ObjPosition) -> IO ()
--display (_,lines,sphere,cam@(Vector3 x y z)) = do
-- fpsLimiter
-- GL.clear [GL.ColorBuffer]
-- GL.preservingMatrix $ do
-- GL.translate $ (vectorDeMeterize cam)
-- renderLines lines
-- renderSphere sphere
-- renderLayer (Vector3 (100) (-200) (100))
-- GLFW.swapBuffers
colorFromMass (Object _ _ _ _ m)
| dm > 1e27 = GL.color $ color3 1 1 0
| dm > 1e24 = GL.color $ color3 0 0 1
| dm > 1e23 = GL.color $ color3 1 0 0
| otherwise = GL.color $ color3 (0.5) (0.5) (0.5)
where dm = deMass m
fpsLimiter = GLFW.sleep 0.01
renderLines :: [Point] -> IO ()
renderLines l = do
GL.renderPrimitive GL.Lines $ mapM_
(\ (Vector3 x y z) -> GL.vertex (GL.Vertex3 x y z)) l
color3 :: GLdouble -> GLdouble -> GLdouble -> GL.Color3 GLdouble
color3 = GL.Color3
renderLayer (Vector3 x y z) = renderLines points
where points = [(Vector3 (a-(0.5*x)) y (b-(0.5*z))) | a <- widthPoints, b <- heightPoints]
widthPoints = map (*(width/precision)) [0..precision]
heightPoints = map (*(height/precision)) [0..precision]
height = 100
width = 100
precision = 40
--this function is bugged but cooooool looking
renderSphere :: DS.Sphere -> IO()
renderSphere (DS.Sphere obj r) = renderLines circleAsLines
--where circleAsLines = totalSphere100
--where circleAsLines = map (\(Vector3 a b c) -> (Vector3 (x + (a ur)) (y + (b ur)) (z + (c ur)))) totalSphere
--where circleAsLines = map (\(Vector3 a b c) -> (Vector3 (a x ur) (b y ur) (c z ur))) wrongSphere
where circleAsLines = sphere
sphere = concat . map (\(theta,circle) -> map (\(Vector3 x y z) -> (Vector3 (x + ur * sin theta) y (z + ur*cos theta))) circle) . map (\x -> (x,circle)) $ [1..precision]
circle = map (\theta -> (Vector3 (sizeFactor*x + ur*sin theta) (sizeFactor*y + ur*cos theta) (sizeFactor*z + ur * sin theta))) thetaList
thetaList = map (*(2*glpi/precision)) [1..precision]
precision = 40
ur = deMeterize r
Vector3 x y z = vectorDeMeterize . getPos $ obj
sizeFactor = sfactor
renderBody sizeFactor obj = renderLines $ map(\(Vector3 a b c) -> (Vector3 (a+sizeFactor*x) (b+sizeFactor*y) (c+sizeFactor*z))) body
where Vector3 x y z = vectorDeMeterize . getPos $ obj
body = sphere
where sphere = concat . map (\(theta,circle) -> map (\(Vector3 x y z) -> (Vector3 (x + ur * sin theta) y (z + ur*cos theta))) circle) . map (\x -> (x,circle)) $ [1..precision]
circle = map (\theta -> (Vector3 (ur*sin theta) (ur*cos theta) (ur * sin theta))) thetaList
thetaList = map (*(2*glpi/precision)) [1..precision]
precision = 40
ur = 50
--wrongSphere = sphere
-- where sphere = concat . map (\(theta,circle) -> foo theta circle) . map (\x -> (x,circle)) $ [1..precision]
-- foo theta vec = map (\(Vector3 x y z) -> (Vector3 (\a b -> a + (x b + b * sin theta)) (\a b -> a + (y b + b * cos theta)) (\a b -> a+z b))) vec
-- circle = map (\theta -> (Vector3 (*sin theta) (*cos theta) (*0))) thetaList
-- thetaList = map (*(2*glpi/precision)) [1..precision]
-- precision = 80
--totalSphere100 = map (\(Vector3 x y z) -> (Vector3 (x 100) (y 100) (z 100))) totalSphere
--totalSphere = circle
-- where sphere = concat . map (\(theta, circles) -> foo theta circles) . map (\x -> (x,circle)) $ [1..precision]
-- foo theta = map (\(Vector3 x y z) -> (Vector3 (\a -> x $ a * cos theta) (\a -> y $ a * sin theta) (\a -> z a)))
-- circle = map (\theta -> (Vector3 (*cos theta) (*cos theta) (*sin theta))) thetaList
-- thetaList = map (*(2*glpi/precision)) [1..precision]
-- precision = 40
--weirdBody = thing
-- where thing = concat . map (\(theta,circle) -> map (\(Vector3 x y z) -> (Vector3 (x + ur * sin theta) y (z + ur*cos theta))) circle) . map (\x -> (x,circle)) $ [1..precision]
-- circle = map (\theta -> (Vector3 (x + ur*sin theta) (y + ur*cos theta) (z + ur * sin theta))) thetaList
-- thetaList = map (*(2*glpi/precision)) [1..precision]
-- precision = 40
-- ur = 100
-- Vector3 x y z = (Vector3 0 0 0)
glpi = 3.14 :: GLdouble | Stratege/NBody-Simulation | display.hs | mit | 5,178 | 30 | 21 | 1,006 | 1,401 | 780 | 621 | 62 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.NGH.Formats.Fasta
( writeSeq
) where
import Data.Convertible
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 () -- import IsString instance
import qualified Data.ByteString.Lazy as L
-- |writeSeq formats a sequence in Fast-A format
writeSeq :: Int -- ^ Width of the lines to use
-> L.ByteString -- ^ Header
-> L.ByteString -- ^ The DNA sequence
-> L.ByteString -- ^ The output
writeSeq lw header s = L.fromChunks ([">"] ++ L.toChunks header ++ ["\n"] ++ breakup lw s)
breakup :: Int -> L.ByteString -> [S.ByteString]
breakup n s | n < 0 = L.toChunks s
breakup n s = L.toChunks h ++ ["\n"] ++ (if L.null t then [] else breakup n t)
where (h,t) = L.splitAt (convert n) s
| luispedro/NGH | Data/NGH/Formats/Fasta.hs | mit | 796 | 0 | 11 | 176 | 244 | 136 | 108 | 16 | 2 |
import Text.CSV
import Data.List
import Data.Function
import DBGrader.Flags
import DBGrader.Questions
import DBGrader.Types
import DBGrader.Config
-- CSV file to parse for grades.
-- The csv columns are as follows:
-- name, view, db, matches, notes, flags, query, errors
csvFile :: String
csvFile = "studentGrades.csv"
main :: IO ()
main = do
grades <- parseCSVFromFile csvFile
case grades of
Left _ -> print "Error"
Right x -> do
-- Get list of student answers
let stugrades = filterBlankLines $ groupByName x
-- Create a summary file for each
mapM_ createFile stugrades
where
groupByName xs = tail $ groupBy ((==) `on` head) xs
filterBlankLines = filter (\y -> head (head y) /= "")
-- Given a list of answers for a single student, create a summary sheet
-- in ./gradesheets/studentname.hs
createFile :: [[Field]] -> IO ()
createFile g = writeFile ("gradesheets/" ++ rmvSpcAndCmm nme ++ ".md") tem
where
-- Remove characters we don't want in file name.
rmvSpcAndCmm = (\\ " ,")
-- Given a character and a list of flags, produces
-- the note associated with flag.
getFlagNote :: [Flag] -> Char -> String
getFlagNote xs c = case find (\(Flag ch _ _) -> ch == c) xs of
Nothing -> ""
Just (Flag _ _ n) -> n
-- Given a list of chars produces notes.
charsToNotes = concatMap (getFlagNote flags)
-- Name of student
nme = head $ head g
-- Queries made
queries = map (!!5) g
-- Notes
nts = map (charsToNotes . (!!4)) g
-- Correct Solutions
sol = map solution questions
-- List of "Correct" or "Incorrect". All questions with notes are incorrect.
cor = map corOrInc g
-- Check if question is correct or incorrect
corOrInc x | x!!4 == "" || x!!4 == " " = "Correct"
| otherwise = "Incorrect"
-- String to write to file
tem = template nme (zipWith3 solutionTemplate sol queries nts) cor
-- Template for printing the correct solution for an incorrect answer.
solutionTemplate :: String -> String -> String -> String
solutionTemplate cor sol nt = "\n Notes: \n" ++ nt ++ "\n" ++
"\n Your Solution:\n" ++
"```SQL\n" ++
sol ++
"\n```\n" ++
" Possible Solution:\n" ++
"```SQL\n" ++
cor ++
"\n```\n"
-- Given the name of student, list of questions, and a list of correct or incorrect
-- strings, this function produces a the final string to print to file.
template :: String -> [String] -> [String] -> String
template nme ques cor = summarySheetHeader ++
"\n Student: " ++ nme ++
concatMap (\x -> quesString x (cor!!(x-1)) (ques!!(x-1)))
[1..length ques]
where
quesString :: Int -> String -> String -> String
quesString n c qe = show n ++ ". " ++ c ++ "\n" ++ ifIncorrect c qe
ifIncorrect coi s | coi == "Correct" = "\n"
| otherwise = s
| GarrisonJ/DBGrader | CreateSummarySheets.hs | mit | 3,321 | 0 | 17 | 1,169 | 750 | 392 | 358 | 54 | 2 |
{-# LANGUAGE RankNTypes #-}
{- |
Module : $Header$
Copyright : (c) 2005, Amr Sabry, Chung-chieh Shan, Oleg Kiselyov
and Daniel P. Friedman
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : non-portable (RankNTypes)
Implementation of LogicT based on the two-continuation model of streams
-}
module Common.SFKT
( SFKT
, runM
, observe
) where
import Control.Monad
import Control.Monad.Trans
import Common.LogicT
{- Monad with success, failure continuations, mzero, and mplus
We can also split computations using msplit
Cf. Hinze's ICFP00 paper, Fig. 8: CPS implementation of BACKTR -}
{- The extra `r' is just to be compatible with the SRReifT.hs
type SG r m a = SFKT m a -}
newtype SFKT m a =
SFKT { unSFKT :: (forall ans . SK (m ans) a -> FK (m ans) -> m ans) }
type FK ans = ans
type SK ans a = a -> FK ans -> ans
{- the success continuation gets one answer(value) and a computation
to run to get more answers -}
instance Monad m => Monad (SFKT m) where
return e = SFKT (\ sk -> sk e)
m >>= f = SFKT (\ sk -> unSFKT m (\ a -> unSFKT (f a) sk))
instance Monad m => MonadPlus (SFKT m) where
mzero = SFKT (\ _ fk -> fk)
m1 `mplus` m2 = SFKT (\ sk fk -> unSFKT m1 sk (unSFKT m2 sk fk))
instance MonadTrans SFKT where
-- Hinze's promote
lift m = SFKT (\ sk fk -> m >>= (`sk` fk))
instance (MonadIO m) => MonadIO (SFKT m) where
liftIO = lift . liftIO
-- But this is not in Hinze's paper
instance LogicT SFKT where
msplit m = lift $ unSFKT m ssk (return Nothing)
where ssk a fk = return $ Just (a, lift fk >>= reflect)
-- This is a poly-answer `observe' function of Hinze
runM :: (Monad m) => Maybe Int -> SFKT m a -> m [a]
runM Nothing (SFKT m) = m (\ a -> liftM (a :)) (return [])
runM (Just n) (SFKT _m) | n <= 0 = return []
runM (Just 1) (SFKT m) = m (\ a _fk -> return [a]) (return [])
runM (Just n) m = unSFKT (msplit m) runM' (return [])
where runM' Nothing _ = return []
runM' (Just (a, m')) _ = liftM (a :) (runM (Just (n - 1)) m')
observe :: Monad m => SFKT m a -> m a
observe m = unSFKT m (\ a _fk -> return a) (fail "no answer")
| nevrenato/Hets_Fork | Common/SFKT.hs | gpl-2.0 | 2,217 | 0 | 14 | 540 | 760 | 398 | 362 | 34 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Snippets
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
module Yi.Snippets where
import GHC.Generics (Generic)
import Control.Applicative (some)
import Control.Arrow (second)
import Lens.Micro.Platform (use, (.=))
import Control.Monad.RWS (MonadPlus (mplus), MonadReader (ask),
MonadState, MonadTrans (..),
MonadWriter (tell), RWST, evalRWST,
filterM, forM, forM_, liftM2, unless,
when, (<>))
import Data.Binary (Binary)
import Data.Char (isSpace)
import Data.Default (Default, def)
import Data.Foldable (find)
import Data.List (foldl', groupBy, intersperse, nub, sort)
import Data.Maybe (catMaybes)
import qualified Data.Text as T (Text)
import Data.Typeable (Typeable)
import Yi.Buffer
import Yi.Editor (withCurrentBuffer)
import Yi.Keymap (Action)
import Yi.Keymap.Keys
import qualified Yi.Rope as R
import Yi.TextCompletion (resetComplete, wordCompleteString')
import Yi.Types (YiVariable)
type SnippetCmd = RWST (Int, Int) [MarkInfo] () BufferM
data SnippetMark = SimpleMark !Int
| ValuedMark !Int R.YiString
| DependentMark !Int
data MarkInfo = SimpleMarkInfo { userIndex :: !Int
, startMark :: !Mark }
| ValuedMarkInfo { userIndex :: !Int
, startMark :: !Mark
, endMark :: !Mark }
| DependentMarkInfo { userIndex :: !Int
, startMark :: !Mark
, endMark :: !Mark }
deriving (Eq, Show, Generic)
instance Binary MarkInfo
newtype BufferMarks = BufferMarks { bufferMarks :: [MarkInfo] }
deriving (Eq, Show, Monoid, Typeable, Binary)
newtype DependentMarks = DependentMarks { marks :: [[MarkInfo]] }
deriving (Eq, Show, Monoid, Typeable, Binary)
instance Default BufferMarks where
def = BufferMarks []
instance Default DependentMarks where
def = DependentMarks []
instance YiVariable BufferMarks
instance YiVariable DependentMarks
instance Ord MarkInfo where
a `compare` b = userIndex a `compare` userIndex b
cursor :: Int -> SnippetMark
cursor = SimpleMark
cursorWith :: Int -> R.YiString -> SnippetMark
cursorWith = ValuedMark
dep :: Int -> SnippetMark
dep = DependentMark
isDependentMark :: MarkInfo -> Bool
isDependentMark (SimpleMarkInfo{}) = False
isDependentMark (ValuedMarkInfo{}) = False
isDependentMark (DependentMarkInfo{}) = True
bufferMarkers :: MarkInfo -> [Mark]
bufferMarkers (SimpleMarkInfo _ s) = [s]
bufferMarkers m = [startMark m, endMark m]
-- used to translate a datatype into a snippet cmd for
-- freely combining data with '&'
class MkSnippetCmd a b | a -> b where
mkSnippetCmd :: a -> SnippetCmd b
instance MkSnippetCmd String () where
mkSnippetCmd = text . R.fromString
instance MkSnippetCmd R.YiString () where
mkSnippetCmd = text
instance MkSnippetCmd T.Text () where
mkSnippetCmd = text . R.fromText
instance MkSnippetCmd (SnippetCmd a) a where
mkSnippetCmd = id
-- mkSnippetCmd for 'cursor...'-functions
instance MkSnippetCmd SnippetMark () where
mkSnippetCmd (SimpleMark i) = do
mk <- mkMark
tell [SimpleMarkInfo i mk]
mkSnippetCmd (ValuedMark i str) = do
start <- mkMark
lift $ insertN str
end <- mkMark
tell [ValuedMarkInfo i start end]
mkSnippetCmd (DependentMark i) = do
start <- mkMark
end <- mkMark
tell [DependentMarkInfo i start end]
-- create a mark at current position
mkMark :: MonadTrans t => t BufferM Mark
mkMark = lift $ do p <- pointB
newMarkB $ MarkValue p Backward
-- Indentation support has been temporarily removed
text :: R.YiString -> SnippetCmd ()
text txt = do
(_, indent) <- ask
indentSettings <- lift indentSettingsB
lift . foldl' (>>) (return ()) .
intersperse (newlineB >> indentToB indent) .
map (if expandTabs indentSettings
then insertN . expand indentSettings ""
else insertN) $ lines' txt
where
lines' txt' = case R.last txt' of
Just '\n' -> R.lines txt' <> [mempty]
_ -> R.lines txt
expand :: IndentSettings -> R.YiString -> R.YiString -> R.YiString
expand is str rst = case R.head rst of
Nothing -> R.reverse str
Just '\t' -> let t = R.replicateChar (tabSize is) ' ' <> str
in expand is t (R.drop 1 rst)
Just s -> expand is (s `R.cons` str) rst
-- unfortunatelly data converted to snippets are no monads, but '&' is
-- very similar to '>>' and '&>' is similar to '>>=', since
-- SnippetCmd's can be used monadically
infixr 5 &
(&) :: (MkSnippetCmd a any , MkSnippetCmd b c) => a -> b -> SnippetCmd c
str & rst = mkSnippetCmd str >> mkSnippetCmd rst
(&>) :: (MkSnippetCmd a b, MkSnippetCmd c d) => a -> (b -> c) -> SnippetCmd d
str &> rst = mkSnippetCmd str >>= mkSnippetCmd . rst
runSnippet :: Bool -> SnippetCmd a -> BufferM a
runSnippet deleteLast s = do
line <- lineOf =<< pointB
indent <- indentOfCurrentPosB
(a, markInfo) <- evalRWST s (line, indent) ()
unless (null markInfo) $ do
let newMarks = sort $ filter (not . isDependentMark) markInfo
let newDepMarks = filter (not . len1) $
groupBy belongTogether $
sort markInfo
getBufferDyn >>= putBufferDyn.(BufferMarks newMarks `mappend`)
unless (null newDepMarks) $
getBufferDyn >>= putBufferDyn.(DependentMarks newDepMarks `mappend`)
moveToNextBufferMark deleteLast
return a
where
len1 (_:[]) = True
len1 _ = False
belongTogether a b = userIndex a == userIndex b
updateUpdatedMarks :: [Update] -> BufferM ()
updateUpdatedMarks upds = findEditedMarks upds >>=
mapM_ updateDependents
findEditedMarks :: [Update] -> BufferM [MarkInfo]
findEditedMarks upds = fmap (nub . concat) (mapM findEditedMarks' upds)
where
findEditedMarks' :: Update -> BufferM [MarkInfo]
findEditedMarks' upd = do
let p = updatePoint upd
ms <- return . nub . concat . marks =<< getBufferDyn
ms' <- forM ms $ \m ->do
r <- adjMarkRegion m
return $ if (updateIsDelete upd && p `nearRegion` r)
|| p `inRegion` r
then Just m
else Nothing
return . catMaybes $ ms'
dependentSiblings :: MarkInfo -> [[MarkInfo]] -> [MarkInfo]
dependentSiblings mark deps =
case find (elem mark) deps of
Nothing -> []
Just lst -> filter (not . (mark==)) lst
updateDependents :: MarkInfo -> BufferM ()
updateDependents m = getBufferDyn >>= updateDependents' m . marks
updateDependents' :: MarkInfo -> [[MarkInfo]] -> BufferM ()
updateDependents' mark deps =
case dependentSiblings mark deps of
[] -> return ()
deps' -> do
txt <- markText mark
forM_ deps' $ \d -> do
dTxt <- markText d
when (txt /= dTxt) $ setMarkText txt d
markText :: MarkInfo -> BufferM R.YiString
markText m = markRegion m >>= readRegionB
setMarkText :: R.YiString -> MarkInfo -> BufferM ()
setMarkText txt (SimpleMarkInfo _ start) = do
p <- use $ markPointA start
c <- readAtB p
if isSpace c
then insertNAt txt p
else do
r <- regionOfPartNonEmptyAtB unitViWordOnLine Forward p
modifyRegionB (const txt) r
setMarkText txt mi = do
start <- use $ markPointA $ startMark mi
end <- use $ markPointA $ endMark mi
let r = mkRegion start end
modifyRegionB (const txt) r
when (start == end) $
markPointA (endMark mi) .= end + Point (R.length txt)
withSimpleRegion :: MarkInfo -> (Region -> BufferM Region) -> BufferM Region
withSimpleRegion (SimpleMarkInfo _ s) f = do
p <- use $ markPointA s
c <- readAtB p
if isSpace c
then return $ mkRegion p p -- return empty region
else f =<< regionOfPartNonEmptyAtB unitViWordOnLine Forward p
withSimpleRegion r _ = error $ "withSimpleRegion: " <> show r
markRegion :: MarkInfo -> BufferM Region
markRegion m@SimpleMarkInfo{} = withSimpleRegion m $ \r -> do
os <- findOverlappingMarksWith safeMarkRegion concat True r m
rOs <- mapM safeMarkRegion os
return . mkRegion (regionStart r) $ foldl' minEnd (regionEnd r) rOs
where
minEnd end r = if regionEnd r < end
then end
else min end $ regionStart r
markRegion m = liftM2 mkRegion
(use $ markPointA $ startMark m)
(use $ markPointA $ endMark m)
safeMarkRegion :: MarkInfo -> BufferM Region
safeMarkRegion m@(SimpleMarkInfo _ _) = withSimpleRegion m return
safeMarkRegion m = markRegion m
adjMarkRegion :: MarkInfo -> BufferM Region
adjMarkRegion s@(SimpleMarkInfo _ _) = markRegion s
adjMarkRegion m = do
s <- use $ markPointA $ startMark m
e <- use $ markPointA $ endMark m
c <- readAtB e
when (isWordChar c) $ do adjustEnding e
repairOverlappings e
e' <- use $ markPointA $ endMark m
s' <- adjustStart s e'
return $ mkRegion s' e'
where
adjustEnding end = do
r' <- regionOfPartNonEmptyAtB unitViWordOnLine Forward end
markPointA (endMark m) .= (regionEnd r')
adjustStart s e = do
txt <- readRegionB (mkRegion s e)
let sP = s + (Point . R.length $ R.takeWhile isSpace txt)
when (sP > s) $
markPointA (startMark m) .= sP
return sP
-- test if we generated overlappings and repair
repairOverlappings origEnd = do overlappings <- allOverlappingMarks True m
unless (null overlappings) $
markPointA (endMark m) .= origEnd
findOverlappingMarksWith :: (MarkInfo -> BufferM Region)
-> ([[MarkInfo]] -> [MarkInfo])
-> Bool -> Region -> MarkInfo -> BufferM [MarkInfo]
findOverlappingMarksWith fMarkRegion flattenMarks border r m =
let markFilter = filter (m /=) . flattenMarks . marks
regOverlap = fmap (regionsOverlap border r) . fMarkRegion
in fmap markFilter getBufferDyn >>= filterM regOverlap
findOverlappingMarks :: ([[MarkInfo]] -> [MarkInfo]) -> Bool -> Region ->
MarkInfo -> BufferM [MarkInfo]
findOverlappingMarks = findOverlappingMarksWith markRegion
regionsOverlappingMarks :: Bool -> Region -> MarkInfo -> BufferM [MarkInfo]
regionsOverlappingMarks = findOverlappingMarks concat
overlappingMarks :: Bool -> Bool -> MarkInfo -> BufferM [MarkInfo]
overlappingMarks border belongingTogether mark = do
r <- markRegion mark
findOverlappingMarks (if belongingTogether
then dependentSiblings mark
else concat)
border
r
mark
allOverlappingMarks :: Bool -> MarkInfo -> BufferM [MarkInfo]
allOverlappingMarks border = overlappingMarks border False
dependentOverlappingMarks :: Bool -> MarkInfo -> BufferM [MarkInfo]
dependentOverlappingMarks border = overlappingMarks border True
nextBufferMark :: Bool -> BufferM (Maybe MarkInfo)
nextBufferMark deleteLast = do
BufferMarks ms <- getBufferDyn
if null ms
then return Nothing
else do
let mks = if deleteLast then const $ tail ms else (tail ms <>)
putBufferDyn . BufferMarks . mks $ [head ms]
return . Just $ head ms
isDependentMarker :: (MonadState FBuffer m, Functor m) => Mark -> m Bool
isDependentMarker bMark = do
DependentMarks ms <- getBufferDyn
return . elem bMark . concatMap bufferMarkers . concat $ ms
safeDeleteMarkB :: Mark -> BufferM ()
safeDeleteMarkB m = do
b <- isDependentMarker m
unless b (deleteMarkB m)
moveToNextBufferMark :: Bool -> BufferM ()
moveToNextBufferMark deleteLast = nextBufferMark deleteLast >>= \case
Just p -> mv p
Nothing -> return ()
where
mv (SimpleMarkInfo _ m) = do
moveTo =<< use (markPointA m)
when deleteLast $ safeDeleteMarkB m
mv (ValuedMarkInfo _ s e) = do
sp <- use $ markPointA s
ep <- use $ markPointA e
deleteRegionB (mkRegion sp ep)
moveTo sp
when deleteLast $ do
safeDeleteMarkB s
safeDeleteMarkB e
mv r = error $ "moveToNextBufferMark.mv: " <> show r
-- Keymap support
newtype SupertabExt = Supertab (R.YiString -> Maybe (BufferM ()))
instance Monoid SupertabExt where
mempty = Supertab $ const Nothing
(Supertab f) `mappend` (Supertab g) =
Supertab $ \s -> f s `mplus` g s
superTab :: (MonadInteract m Action Event) => Bool -> SupertabExt -> m ()
superTab caseSensitive (Supertab expander) =
some (spec KTab ?>>! doSuperTab) >> deprioritize >>! resetComplete
where
doSuperTab = do canExpand <- withCurrentBuffer $ do
sol <- atSol
ws <- hasWhiteSpaceBefore
return $ sol || ws
if canExpand
then insertTab
else runCompleter
insertTab = withCurrentBuffer $ mapM_ insertB =<< tabB
runCompleter = do w <- withCurrentBuffer readPrevWordB
case expander w of
Just cmd -> withCurrentBuffer $ bkillWordB >> cmd
_ -> autoComplete
autoComplete = wordCompleteString' caseSensitive >>=
withCurrentBuffer . (bkillWordB >>) . (insertN . R.fromText)
-- | Convert snippet description list into a SuperTab extension
fromSnippets :: Bool -> [(R.YiString, SnippetCmd ())] -> SupertabExt
fromSnippets deleteLast snippets =
Supertab $ \str -> lookup str $ map (second $ runSnippet deleteLast) snippets
snippet :: MkSnippetCmd a b => a -> SnippetCmd b
snippet = mkSnippetCmd
| Hi-Angel/yi | yi-core/src/Yi/Snippets.hs | gpl-2.0 | 14,849 | 308 | 17 | 4,416 | 4,205 | 2,222 | 1,983 | -1 | -1 |
{-# OPTIONS -w -O0 #-}
{- |
Module : Adl/ATC_Adl.der.hs
Description : generated Typeable, ShATermConvertible instances
Copyright : (c) DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(overlapping Typeable instances)
Automatic derivation of instances via DrIFT-rule Typeable, ShATermConvertible
for the type(s):
'Adl.As.Concept'
'Adl.As.RelType'
'Adl.As.Relation'
'Adl.As.UnOp'
'Adl.As.MulOp'
'Adl.As.Rule'
'Adl.As.Prop'
'Adl.As.RangedProp'
'Adl.As.Object'
'Adl.As.KeyAtt'
'Adl.As.KeyDef'
'Adl.As.RuleKind'
'Adl.As.RuleHeader'
'Adl.As.Pair'
'Adl.As.Plugin'
'Adl.As.PatElem'
'Adl.As.Context'
'Adl.Sign.Sign'
'Adl.Sign.Symbol'
'Adl.Sign.RawSymbol'
'Adl.Sign.Sen'
-}
{-
Generated by 'genRules' (automatic rule generation for DrIFT). Don't touch!!
dependency files:
Adl/As.hs
Adl/Sign.hs
-}
module Adl.ATC_Adl () where
import ATC.AS_Annotation
import ATerm.Lib
import Adl.As
import Adl.Print ()
import Adl.Sign
import Common.AS_Annotation
import Common.Doc
import Common.DocUtils
import Common.Id
import Common.Keywords
import Common.Result
import Data.Char
import Data.List (sortBy)
import Data.Typeable
import qualified Common.Lib.Rel as Rel
import qualified Data.Map as Map
import qualified Data.Set as Set
{-! for Adl.As.Concept derive : Typeable !-}
{-! for Adl.As.RelType derive : Typeable !-}
{-! for Adl.As.Relation derive : Typeable !-}
{-! for Adl.As.UnOp derive : Typeable !-}
{-! for Adl.As.MulOp derive : Typeable !-}
{-! for Adl.As.Rule derive : Typeable !-}
{-! for Adl.As.Prop derive : Typeable !-}
{-! for Adl.As.RangedProp derive : Typeable !-}
{-! for Adl.As.Object derive : Typeable !-}
{-! for Adl.As.KeyAtt derive : Typeable !-}
{-! for Adl.As.KeyDef derive : Typeable !-}
{-! for Adl.As.RuleKind derive : Typeable !-}
{-! for Adl.As.RuleHeader derive : Typeable !-}
{-! for Adl.As.Pair derive : Typeable !-}
{-! for Adl.As.Plugin derive : Typeable !-}
{-! for Adl.As.PatElem derive : Typeable !-}
{-! for Adl.As.Context derive : Typeable !-}
{-! for Adl.Sign.Sign derive : Typeable !-}
{-! for Adl.Sign.Symbol derive : Typeable !-}
{-! for Adl.Sign.RawSymbol derive : Typeable !-}
{-! for Adl.Sign.Sen derive : Typeable !-}
{-! for Adl.As.Concept derive : ShATermConvertible !-}
{-! for Adl.As.RelType derive : ShATermConvertible !-}
{-! for Adl.As.Relation derive : ShATermConvertible !-}
{-! for Adl.As.UnOp derive : ShATermConvertible !-}
{-! for Adl.As.MulOp derive : ShATermConvertible !-}
{-! for Adl.As.Rule derive : ShATermConvertible !-}
{-! for Adl.As.Prop derive : ShATermConvertible !-}
{-! for Adl.As.RangedProp derive : ShATermConvertible !-}
{-! for Adl.As.Object derive : ShATermConvertible !-}
{-! for Adl.As.KeyAtt derive : ShATermConvertible !-}
{-! for Adl.As.KeyDef derive : ShATermConvertible !-}
{-! for Adl.As.RuleKind derive : ShATermConvertible !-}
{-! for Adl.As.RuleHeader derive : ShATermConvertible !-}
{-! for Adl.As.Pair derive : ShATermConvertible !-}
{-! for Adl.As.Plugin derive : ShATermConvertible !-}
{-! for Adl.As.PatElem derive : ShATermConvertible !-}
{-! for Adl.As.Context derive : ShATermConvertible !-}
{-! for Adl.Sign.Sign derive : ShATermConvertible !-}
{-! for Adl.Sign.Symbol derive : ShATermConvertible !-}
{-! for Adl.Sign.RawSymbol derive : ShATermConvertible !-}
{-! for Adl.Sign.Sen derive : ShATermConvertible !-}
| nevrenato/Hets_Fork | Adl/ATC_Adl.der.hs | gpl-2.0 | 3,423 | 0 | 5 | 466 | 157 | 117 | 40 | 19 | 0 |
{-# language TypeFamilies, FlexibleContexts #-}
module Numeric.LinearAlgebra.EigenSolvers.Experimental where
import Control.Exception.Common
import Control.Iterative
import Data.Sparse.Common
import Control.Monad.Catch
import Control.Monad.State.Strict
-- | `eigRayleigh n mm` performs `n` iterations of the Rayleigh algorithm on matrix `mm` and returns the eigenpair closest to the initialization. It displays cubic-order convergence, but it also requires an educated guess on the initial eigenpair.
-- eigRayleigh nitermax debq prntf m = untilConvergedGM "eigRayleigh" config (const True) (rayStep m)
-- where
-- ii = eye (nrows m)
-- config = IterConf nitermax debq fst prntf
-- rayStep aa (b, mu) = do
-- nom <- (m ^-^ (mu `matScale` ii)) <\> b
-- let b' = normalize2' nom
-- mu' = (b' <.> (aa #> b')) / (b' <.> b')
-- return (b', mu')
-- | Golub-Kahan-Lanczos bidiagonalization (see "Restarted Lanczos Bidiagonalization for the SVD", SLEPc STR-8, http://slepc.upv.es/documentation/manual.htm )
gklBidiag aa q1nn | dim q1nn == n = return (pp, bb, qq)
| otherwise = throwM (MatVecSizeMismatchException "hhBidiag" (dim aa) (dim q1nn))
where
(m,n) = (nrows aa, ncols aa)
aat = transpose aa
bb = fromListSM (n,n) bl
(ql, _, pl, _, pp, bl, qq) = execState (modifyUntil tf bidiagStep) bidiagInit
tf (_, _, _, i, _, _, _) = i == n
bidiagInit = (q2n, beta1, p1n, 1 :: Int, pp, bb', qq)
where
q1 = normalize2' q1nn
p1 = aa #> q1
alpha1 = norm2' p1
p1n = p1 ./ alpha1
q2 = (aat #> p1) ^-^ (alpha1 .* q1)
beta1 = norm2' q2
q2n = q2 ./ beta1
pp = insertCol (zeroSM m n) p1n 0
qq = insertCol (zeroSM n n) q2n 0
bb' = [(0, 0, alpha1)]
bidiagStep (qj , betajm, pjm , j , pp, bb, qq ) =
(qjp, betaj , pj, succ j, pp', bb', qq') where
u = (aa #> qj) ^-^ (betajm .* pjm)
alphaj = norm2' u
pj = u ./ alphaj
v = (aat #> pj) ^-^ (alphaj .* qj)
betaj = norm2' v
qjp = v ./ betaj
pp' = insertCol pp pj j
bb' = [(j-1, j, betaj),
(j ,j, alphaj)] ++ bb
qq' = insertCol qq qjp j
| ocramz/sparse-linear-algebra | src/Numeric/LinearAlgebra/EigenSolvers/Experimental.hs | gpl-3.0 | 2,178 | 23 | 11 | 576 | 500 | 307 | 193 | 37 | 1 |
{-| Module : PhaseImport
License : GPL
Maintainer : helium@cs.uu.nl
Stability : experimental
Portability : portable
-}
module Helium.Main.PhaseImport(phaseImport) where
import Helium.ModuleSystem.GatherImports
import Helium.Main.CompileUtils
import Helium.ModuleSystem.CoreToImportEnv(getImportEnvironment)
import Helium.Syntax.UHA_Syntax
import qualified Lvm.Core.Expr as Core
{-
import qualified Lvm.Core.Expr as Core
import qualified Lvm.Core.Utils as Core
import Lvm.Common.Id(Id, stringFromId, idFromString, dummyId)
import Lvm.Common.IdSet(IdSet, elemSet)
import Helium.Syntax.UHA_Utils
import Lvm.Path(searchPath)
import qualified Helium.ModuleSystem.ExtractImportDecls as EID-}
phaseImport :: String -> Module -> [String] -> [Option] ->
IO ([Core.CoreDecl], [(Name, ImportEnvironment, ModuleDecls)])
phaseImport fullName module_ lvmPath options = do
enterNewPhase "Importing" options
let (_, baseName, _) = splitFilePath fullName
-- Add HeliumLang and Prelude import
let moduleWithExtraImports = addImplicitImports module_
-- Chase imports
chasedImpsList <- chaseImports lvmPath moduleWithExtraImports
let indirectionDecls = concatMap (\(_,x,_) -> x) chasedImpsList
importEnvs =
map (\(name,decls,moddecls) -> (name, getImportEnvironment baseName decls, moddecls)) chasedImpsList
return (indirectionDecls, importEnvs) | Helium4Haskell/helium | src/Helium/Main/PhaseImport.hs | gpl-3.0 | 1,469 | 0 | 14 | 277 | 259 | 147 | 112 | 17 | 1 |
module Lamdu.Sugar.Convert.Annotation
( makeAnnotation, makeTypeAnnotation
) where
import Control.Monad.Unit (Unit)
import Control.Monad.Transaction (MonadTransaction)
import qualified Lamdu.Annotations as Annotations
import qualified Lamdu.Calc.Type as T
import Lamdu.Sugar.Annotations
import Lamdu.Sugar.Internal
import Lamdu.Sugar.Internal.EntityId (EntityId)
import qualified Lamdu.Sugar.Internal.EntityId as EntityId
import Lamdu.Sugar.OrderTags (orderType)
import Lamdu.Sugar.Convert.Type (convertType)
import qualified Lamdu.Sugar.Types as Sugar
import Lamdu.Prelude
makeAnnotation ::
MonadTransaction n m =>
Annotations.Mode ->
(ShowAnnotation, EvalPrep) ->
m (Sugar.Annotation EvalPrep InternalName)
makeAnnotation annMode (showAnn, x) =
case annMode of
_ | showAnn ^. showTypeAlways -> typeAnnotationFromEvalRes x
Annotations.Types | showAnn ^. showInTypeMode -> typeAnnotationFromEvalRes x
Annotations.Evaluation | showAnn ^. showInEvalMode -> Sugar.AnnotationVal x & pure
_ -> pure Sugar.AnnotationNone
typeAnnotationFromEvalRes ::
MonadTransaction n f => EvalPrep -> f (Sugar.Annotation v InternalName)
typeAnnotationFromEvalRes x =
makeTypeAnnotation (x ^. eEvalId) (x ^. eType) <&> Sugar.AnnotationType
makeTypeAnnotation ::
MonadTransaction n m =>
EntityId -> Pure # T.Type -> m (Annotated EntityId # Sugar.Type InternalName Unit)
makeTypeAnnotation e t = convertType (EntityId.ofTypeOf e) t >>= orderType
| lamdu/lamdu | src/Lamdu/Sugar/Convert/Annotation.hs | gpl-3.0 | 1,567 | 0 | 12 | 299 | 406 | 222 | 184 | -1 | -1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
| senbox-org/snap-desktop | snap-product-library-ui-v2/src/main/resources/org/esa/snap/product/library/ui/v2/docs/help.hs | gpl-3.0 | 761 | 54 | 44 | 164 | 278 | 141 | 137 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Accounts.Updatelabels
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates labels that are assigned to the Merchant Center account by CSS
-- user.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.accounts.updatelabels@.
module Network.Google.Resource.Content.Accounts.Updatelabels
(
-- * REST Resource
AccountsUpdatelabelsResource
-- * Creating a Request
, accountsUpdatelabels
, AccountsUpdatelabels
-- * Request Lenses
, ausXgafv
, ausMerchantId
, ausUploadProtocol
, ausAccessToken
, ausUploadType
, ausPayload
, ausAccountId
, ausCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.accounts.updatelabels@ method which the
-- 'AccountsUpdatelabels' request conforms to.
type AccountsUpdatelabelsResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"accounts" :>
Capture "accountId" (Textual Word64) :>
"updatelabels" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AccountsUpdateLabelsRequest :>
Post '[JSON] AccountsUpdateLabelsResponse
-- | Updates labels that are assigned to the Merchant Center account by CSS
-- user.
--
-- /See:/ 'accountsUpdatelabels' smart constructor.
data AccountsUpdatelabels =
AccountsUpdatelabels'
{ _ausXgafv :: !(Maybe Xgafv)
, _ausMerchantId :: !(Textual Word64)
, _ausUploadProtocol :: !(Maybe Text)
, _ausAccessToken :: !(Maybe Text)
, _ausUploadType :: !(Maybe Text)
, _ausPayload :: !AccountsUpdateLabelsRequest
, _ausAccountId :: !(Textual Word64)
, _ausCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsUpdatelabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ausXgafv'
--
-- * 'ausMerchantId'
--
-- * 'ausUploadProtocol'
--
-- * 'ausAccessToken'
--
-- * 'ausUploadType'
--
-- * 'ausPayload'
--
-- * 'ausAccountId'
--
-- * 'ausCallback'
accountsUpdatelabels
:: Word64 -- ^ 'ausMerchantId'
-> AccountsUpdateLabelsRequest -- ^ 'ausPayload'
-> Word64 -- ^ 'ausAccountId'
-> AccountsUpdatelabels
accountsUpdatelabels pAusMerchantId_ pAusPayload_ pAusAccountId_ =
AccountsUpdatelabels'
{ _ausXgafv = Nothing
, _ausMerchantId = _Coerce # pAusMerchantId_
, _ausUploadProtocol = Nothing
, _ausAccessToken = Nothing
, _ausUploadType = Nothing
, _ausPayload = pAusPayload_
, _ausAccountId = _Coerce # pAusAccountId_
, _ausCallback = Nothing
}
-- | V1 error format.
ausXgafv :: Lens' AccountsUpdatelabels (Maybe Xgafv)
ausXgafv = lens _ausXgafv (\ s a -> s{_ausXgafv = a})
-- | The ID of the managing account.
ausMerchantId :: Lens' AccountsUpdatelabels Word64
ausMerchantId
= lens _ausMerchantId
(\ s a -> s{_ausMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ausUploadProtocol :: Lens' AccountsUpdatelabels (Maybe Text)
ausUploadProtocol
= lens _ausUploadProtocol
(\ s a -> s{_ausUploadProtocol = a})
-- | OAuth access token.
ausAccessToken :: Lens' AccountsUpdatelabels (Maybe Text)
ausAccessToken
= lens _ausAccessToken
(\ s a -> s{_ausAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ausUploadType :: Lens' AccountsUpdatelabels (Maybe Text)
ausUploadType
= lens _ausUploadType
(\ s a -> s{_ausUploadType = a})
-- | Multipart request metadata.
ausPayload :: Lens' AccountsUpdatelabels AccountsUpdateLabelsRequest
ausPayload
= lens _ausPayload (\ s a -> s{_ausPayload = a})
-- | The ID of the account whose labels are updated.
ausAccountId :: Lens' AccountsUpdatelabels Word64
ausAccountId
= lens _ausAccountId (\ s a -> s{_ausAccountId = a})
. _Coerce
-- | JSONP
ausCallback :: Lens' AccountsUpdatelabels (Maybe Text)
ausCallback
= lens _ausCallback (\ s a -> s{_ausCallback = a})
instance GoogleRequest AccountsUpdatelabels where
type Rs AccountsUpdatelabels =
AccountsUpdateLabelsResponse
type Scopes AccountsUpdatelabels =
'["https://www.googleapis.com/auth/content"]
requestClient AccountsUpdatelabels'{..}
= go _ausMerchantId _ausAccountId _ausXgafv
_ausUploadProtocol
_ausAccessToken
_ausUploadType
_ausCallback
(Just AltJSON)
_ausPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy AccountsUpdatelabelsResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Accounts/Updatelabels.hs | mpl-2.0 | 5,850 | 0 | 20 | 1,412 | 903 | 522 | 381 | 131 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
module Affection.MessageBus.Class
( Participant(..)
, genUUID
, UUID
) where
import Affection.MessageBus.Message
import Affection.Types
import Control.Monad.IO.Class (liftIO)
import Data.UUID
import Data.UUID.V4
import Data.String as S (fromString)
import Affection.Logging
-- | This typeclass defines the behaviour of a participant in the message system
class (Message (Mesg prt), Show (Mesg prt)) => Participant prt where
-- | Message datatype
type Mesg prt :: *
-- | Function to get the list of subscribers from the participant
partSubscribers
:: prt
-- ^ the 'Participant''s subscriber storage
-> Affection [Mesg prt -> Affection ()]
-- ^ List of Subscriber functions
-- | Subscribe to the 'Participant''s events
partSubscribe
:: prt
-- ^ The 'Participant''s subscriber storage
-> (Mesg prt -> Affection ())
-- ^ What to do in case of a 'Message'
-- (Subscriber function)
-> Affection UUID
-- ^ 'UUID' of the registered subscriber Function
-- | Unsubscribe a Subscriber function from Participant
partUnSubscribe
:: prt
-- ^ The 'Participant''s subscriber storage to unsubscribe from
-> UUID
-- ^ The subscriber function's 'UUID'
-> Affection ()
-- | Get the 'Participant' to emit a 'Message' on all of its subscribers
partEmit
:: prt
-- ^ The 'Participant''s subscriber storage
-> Mesg prt
-- ^ The 'Message' to emit
-> Affection ()
partEmit p m = do
liftIO $ logIO Verbose $ "Emitting message: " <> S.fromString (show m)
l <- partSubscribers p
mapM_ ($ m) l
-- | Helper function to generate new 'UUID's
genUUID :: Affection UUID
genUUID = liftIO nextRandom
| nek0/affection | src/Affection/MessageBus/Class.hs | lgpl-3.0 | 1,882 | 0 | 12 | 406 | 312 | 176 | 136 | 39 | 1 |
data Slovo = Samo Char | Crtica Slovo
deriving (Eq)
instance Show Slovo where
show (Samo c) = [c]
show (Crtica s) = (show s) ++ "'"
data Lambda = Varijabla Slovo
| Aplikacija Lambda Lambda
| Apstrakcija Slovo Lambda
instance Show Lambda where
show (Varijabla s) = show s
show (Aplikacija funkcija argument) = show funkcija ++ " " ++ show argument
show (Apstrakcija slovo povratna_vrijednost) =
"(lambda " ++ show slovo ++ " . " ++ show povratna_vrijednost ++ ")"
lam (Varijabla v) izraz = Apstrakcija v izraz
f # arg = Aplikacija f arg
supst :: Lambda -> Lambda -> Lambda -> Lambda
supst (Varijabla c) (Varijabla v) cime | c == v = cime
| otherwise = (Varijabla c)
supst (Aplikacija f arg) sto@(Varijabla v) cime =
Aplikacija (supst f sto cime) (supst arg sto cime)
supst (Apstrakcija x f) sto@(Varijabla v) cime
| x == v = Apstrakcija x f
| x `sePojavljujeU` cime = let
nova_apstrakcija = Apstrakcija (novi x f) (supst f (Varijabla x) (Varijabla (novi x f)))
in supst nova_apstrakcija sto cime
| otherwise = Apstrakcija x (supst f sto cime)
novi x f | x `sePojavljujeU` f = novi (Crtica x) f
| otherwise = x
sePojavljujeU :: Slovo -> Lambda -> Bool
x `sePojavljujeU` Varijabla v = x == v
x `sePojavljujeU` Aplikacija f arg = x `sePojavljujeU` f || x `sePojavljujeU` arg
x `sePojavljujeU` Apstrakcija y f = x == y || x `sePojavljujeU` f
-- (\ x -> x + y) [y |-> x] ne radi kako treba
problem = beta (lam y (lam x (f # x # y)) # x)
-- trebalo bi dati (\ x' -> x' + x)
beta (Aplikacija (Apstrakcija x f) y) = supst f (Varijabla x) y
x = Varijabla (Samo 'x')
x' = Varijabla (Crtica (Samo 'x'))
x'' = Varijabla (Crtica (Crtica (Samo 'x')))
y = Varijabla (Samo 'y')
y' = Varijabla (Crtica (Samo 'y'))
z = Varijabla (Samo 'z')
a = Varijabla (Samo 'a')
b = Varijabla (Samo 'b')
f = Varijabla (Samo 'f')
test1 = problem
test2 = beta (lam y (lam x (f # x # x')) # x)
| vedgar/mlr | 2016 Kolokvij/Z5.hs | unlicense | 2,050 | 90 | 13 | 554 | 900 | 456 | 444 | 45 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Test where
import qualified FAM (test)
import qualified FM_DebasishG_2013_01_a_language_and_its_interpretation as FM_D (test)
import qualified FM_GG_2012_07_purify_code as FM_GG (test)
import qualified FM_MX_Redis as FM_MX (test)
import qualified HA_Operational_Monad_Tutorial as HA_Op (test)
allTests = do
FAM.test
FM_D.test
FM_GG.test
FM_MX.test
HA_Op.test
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/fix-free/free/src/Test.hs | unlicense | 584 | 0 | 7 | 212 | 88 | 55 | 33 | 13 | 1 |
module ShapeTest1 where
import Test.HUnit
data Shape = Circle Float Float Float | Rectangle Float Float Float Float deriving Show
surface :: Shape -> Float
surface (Circle _ _ r) = pi * r ^ 2
surface (Rectangle x1 y1 x2 y2) = (abs $ x2 - x1) * (abs $ y2 - y1)
circleSurfaceTest :: Test
circleSurfaceTest = TestCase $ assertEqual
"Should be pi" (surface $ Circle 1 1 1) pi
rectSurfaceTest :: Test
rectSurfaceTest = TestCase $ assertEqual
"" (surface $ Rectangle 1 0 0 1) 1
-- testcases
testcases :: Test
testcases = TestList [circleSurfaceTest, rectSurfaceTest]
main :: IO Counts
main = runTestTT $ testcases | dnvriend/study-category-theory | haskell/learn_a_haskell/ch8/ShapeTest1.hs | apache-2.0 | 627 | 0 | 9 | 127 | 226 | 121 | 105 | 16 | 1 |
module Presentation where
import System.IO (hFlush, stdout)
banner :: String -> IO ()
banner msg = do
putStrLn line
putStrLn $ " " ++ msg
putStrLn line
where len = length msg + 2
line = replicate len '-'
prompt :: String -> String -> IO String
prompt msg status = do
putStrLn msg
putStr $ "(" ++ status ++ ") > "
hFlush stdout
getLine
promptWithDefault :: String -> String -> IO String
promptWithDefault msg defaultValue = do
result <- prompt msg ("default: " ++ defaultValue)
if null result
then return defaultValue
else return result
gameover :: String -> IO ()
gameover msg = do
putStrLn "G A M E O V E R"
putStrLn msg
| tr00per/adventure | src/main/haskell/Presentation.hs | bsd-2-clause | 714 | 0 | 10 | 209 | 241 | 114 | 127 | 25 | 2 |
{-# LANGUAGE PackageImports #-}
import "yosogr" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort, settingsHost)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
, settingsHost = "0.0.0.0"
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess | yogsototh/yosogr | devel.hs | bsd-2-clause | 748 | 0 | 10 | 135 | 196 | 107 | 89 | 24 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PackageImports #-}
module PkgStore(PkgStore(..)
--,serlN2
,Store,getSize
,sd) where -- Store,serializeF,deserializeF) where
import Control.Exception
import Data.ByteString as B
import Data.ByteString.Lazy as L
import Data.Store
import Test.Data
import Test.Data.Values
import Types
-- | Get the number of bytes needed to store the given value. See
-- 'size'.
getSize :: Store a => a -> Int
getSize = getSizeWith size
{-# INLINE getSize #-}
-- | Given a 'Size' value and a value of the type @a@, returns its 'Int'
-- size.
getSizeWith :: Size a -> a -> Int
getSizeWith (VarSize f) x = f x
getSizeWith (ConstSize n) _ = n
{-# INLINE getSizeWith #-}
data PkgStore a = PkgStore a deriving (Eq,Show)
instance Arbitrary a => Arbitrary (PkgStore a) where arbitrary = fmap PkgStore arbitrary
instance Store a => Serialize PkgStore a where
serialize (PkgStore a) = serializeF a
deserialize = (PkgStore <$>) . deserializeF
pkg = PkgStore
unpkg (PkgStore a) = a
sd = ("store4","store4",serializeF,deserializeF)
serializeF = L.fromStrict . encode
deserializeF = either (Left . error . show) Right . decode . L.toStrict
instance Store Various
instance Store N
instance Store a => Store (List a)
instance Store Car
instance Store Acceleration
instance Store Consumption
instance Store CarModel
instance Store OptionalExtra
instance Store Engine
-- instance Store a => Store (Tree a)
-- Specialised instances
instance {-# OVERLAPPABLE #-} Store a => Store (Tree a)
instance {-# OVERLAPPING #-} Store (Tree N)
instance {-# OVERLAPPING #-} Store (Tree (N,N,N))
--instance {-# OVERLAPPING #-} Store [N]
instance {-# OVERLAPPING #-} Store (N,N,N)
s = B.unpack $ encode $ lN2
f = B.unpack $ encode $ (22.33::Float,44.55::Double)
| tittoassini/flat | benchmarks/PkgStore.hs | bsd-3-clause | 1,939 | 0 | 10 | 419 | 520 | 283 | 237 | 45 | 1 |
module Sound.Fluidsynth.Event
( Channel
, Key
, Velocity
, Duration
, BankNum
, PresetNum
, SoundFontID
, Value
, Control
, ClientID
, Pitch
, SeqEventType(..)
, SeqEvent(..)
, seqEventType
, seqEventSource
, seqEventDestination
) where
import Sound.Fluidsynth.Internal.Event
| projedi/fluidsynth-hs-complete | src/Sound/Fluidsynth/Event.hs | bsd-3-clause | 328 | 0 | 5 | 90 | 72 | 49 | 23 | 18 | 0 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.TextureFilterAnisotropic
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/EXT/texture_filter_anisotropic.txt EXT_texture_filter_anisotropic> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.TextureFilterAnisotropic (
-- * Enums
gl_MAX_TEXTURE_MAX_ANISOTROPY_EXT,
gl_TEXTURE_MAX_ANISOTROPY_EXT
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/EXT/TextureFilterAnisotropic.hs | bsd-3-clause | 745 | 0 | 4 | 81 | 40 | 33 | 7 | 4 | 0 |
import Control.Monad (filterM, forM_)
import qualified Data.Vector.Unboxed.Mutable as MV
import qualified Data.Vector.Unboxed as V
import Data.Vector.Unboxed ((!))
m = 10^9
k = 4321
n = 1234567898765
pd = [5, 41, 25343, 237631]
divisor = map product xs where
xs = filterM (const [False, True]) pd
conv :: V.Vector Int -> V.Vector Int -> V.Vector Int
conv a b = V.create $ do
ret <- MV.replicate k 0
forM_ [0 .. k - 1] $ \i -> do
forM_ [0 .. k - 1] $ \j -> do
let k' = (i + j) `mod` k
let v = ((a!i) * (b!j)) `mod` m
v' <- MV.read ret k'
MV.write ret k' $ (v + v') `mod` m
return ret
power :: V.Vector Int -> Int -> V.Vector Int
power a p = helper a p init where
init = V.fromListN k (1 : repeat 0)
helper a 0 ret = ret `seq` ret
helper a p ret = if odd p
then helper a' p' (conv ret a)
else helper a' p' ret where
a' = conv a a
p' = p `div` 2
solve = poly ! (n `mod` k) where
d = map (\x -> k - (x `mod` k)) divisor
base = V.fromList $ map (\x -> count x d) [0 .. k - 1] where
count x xs = length $ filter (== x) xs
poly = power base n
main = print solve
| foreverbell/project-euler-solutions | src/511.hs | bsd-3-clause | 1,205 | 36 | 9 | 388 | 582 | 317 | 265 | 35 | 3 |
module Main where
import Dropbox ( DropboxSession(..)
, getSession
, accountInfo
, metadata)
main = do
session <- getSession Nothing
-- res <- getAccountInfo sID
let url = getAuthorizationUrl session
putStrLn url
putStrLn "Continue?"
inputStr <- getLine
info <- accountInfo session
putStrLn $ show info
| tinkhaven/haskell-dropbox-api | test/DropboxTest.hs | bsd-3-clause | 380 | 0 | 10 | 122 | 92 | 45 | 47 | 13 | 1 |
{-# LANGUAGE DeriveGeneric, DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
module Hop.Apps.Juno.Ledger (
dirtyPickOutAccount50a
,runQuery
,convertQuery
,Transaction(..)
,SwiftAPI(..)
,AcctRole(..)
,LedgerQuery(..)
,QueryResult(..)
) where
import Data.Either ()
import Control.Lens
import Control.Applicative ((<|>))
import Data.Aeson
import GHC.Natural
import Data.Text (Text, intercalate)
import qualified Data.Text as Text
import Data.Set (Set)
import qualified Data.Set as Set
import GHC.Generics
import Data.Map.Strict (Map)
import Data.Ratio
import qualified Data.Map.Strict as Map
import Hop.Hoplite.Eval (TransactionId(..), OpId(..), OrderedOp, Cmd(..))
import Hop.Schwifty.Swift.M105.Types
data Transaction = Transaction {
transId :: Int
,opId :: Int
,from :: Text
,to :: Text
,amount :: Double
} deriving (Eq, Show, Generic, ToJSON, FromJSON)
data SwiftAPI = SwiftAPI {
ref :: Text
,opCode :: Text
,orderingAcct :: Text
,orderingAcctDescription :: Text
,beneficiaryAcct :: Text
,beneficiaryAcctDescription :: Text
,settled :: Double
,currency :: Text
,valueDate :: Text
,details :: Text
} deriving (Show, Eq, Generic, ToJSON, FromJSON)
data AcctRole = Sender | Receiver | Both deriving (Show, Eq, Generic, ToJSON, FromJSON)
data LedgerQuery = BySwiftId Integer
| ByAcctName AcctRole Text
| And [LedgerQuery]
-- | Or [LedgerQuery] -- If we need this, we'll add it
deriving (Show, Eq, Generic, ToJSON, FromJSON)
data QueryResult = QueryResult {
swifts :: Map String SwiftAPI
,trans :: [Transaction]
,inputs :: Map String Text
} deriving (Show, Eq, Generic, ToJSON)
-- these are here solely for convenience
type HopperLog = ([(TransactionId, [OrderedOp])], Map TransactionId SWIFT,Map TransactionId Text)
type TransLog = [(TransactionId, [OrderedOp])]
--type SwiftLog = Map TransactionId SWIFT
runQuery :: LedgerQuery -> HopperLog -> QueryResult
runQuery l hl = convertQuery $ execQuery l hl
execQuery :: LedgerQuery -> HopperLog -> HopperLog
execQuery (BySwiftId i) lss = filterById (fromInteger i) lss
execQuery (ByAcctName r i) lss = filterByAcct r i lss
execQuery (And []) _ = ([],Map.empty,mempty) -- if there's nothing to query, return nothing... techincally an error
execQuery (And [x]) lss = execQuery x lss
execQuery (And (x:xs)) lss = execQuery (And xs) (execQuery x lss)
filterById :: Natural -> HopperLog -> HopperLog
filterById i (l, ss, ts) = (l', ss', ts')
where
l' = filter (\(TransactionId i', _) -> i' == i) l
ss' = Map.filterWithKey (\k _ -> k == TransactionId i) ss
ts' = Map.filterWithKey (\k _ -> k == TransactionId i) ts
filterByAcct :: AcctRole -> Text -> HopperLog -> HopperLog
filterByAcct r a (l, ss, ts) = (l', ss', ts')
where
l' = filter (acctInvolved r a . snd) l
ss' = Map.filterWithKey (\k _ -> Set.member k (associatedSwiftIds l')) ss
ts' = Map.filterWithKey (\k _ -> Set.member k (associatedSwiftIds l')) ts
acctInTrans :: AcctRole -> Text -> OrderedOp -> Bool
acctInTrans Sender a (_, Cmd to' _ _ _) = to' == a
acctInTrans Receiver a (_, Cmd _ from' _ _) = from' == a
acctInTrans Both a (_, Cmd to' from' _ _) = from' == a || to' == a
acctInvolved :: AcctRole -> Text -> [OrderedOp] -> Bool
acctInvolved r a = any (acctInTrans r a)
associatedSwiftIds :: TransLog -> Set TransactionId
associatedSwiftIds = Set.fromList . fmap fst
convertQuery :: HopperLog -> QueryResult
convertQuery (l, ss, ts) = QueryResult ss' l' ts'
where
ss' = Map.map convertSWIFT $ Map.mapKeys (\(TransactionId i) -> show i) ss
l' = convertTrans l
ts' = Map.mapKeys (\(TransactionId i) -> show i) ts
convertSWIFT :: SWIFT -> SwiftAPI
convertSWIFT m = SwiftAPI
(m ^. sCode20 . unSendersRef) -- ref :: Text
(Text.pack $ show $ m ^. sCode23B) -- opCode :: Text
(dirtyPickOutAccount50a m) -- orderingAcct :: Text
(orderingAcctFreetext m) -- orderingAcctDescription :: Text
(m ^. sCode59a . bcAccount) -- beneficiaryAcct :: Text
(beneficiaryAcctFreetext m) -- beneficiaryAcctDescription :: Text
(convertAmount m) -- settled :: Double
(m ^. sCode32A . vcsCurrency) -- currency :: Text
(Text.pack $ m ^. sCode32A . vcsValueDate . unTime ) -- valueDate :: Text
(Text.pack $ show $ m ^. sCode71A) -- details :: Text
convertAmount :: SWIFT -> Double
convertAmount m = fromRational $ wholeDollars + cents
where
wholeDollars :: Rational
wholeDollars = fromIntegral $ m ^. sCode32A . vcsSettlementAmount . vWhole
stupidCents :: Ratio Int
stupidCents = m ^. sCode32A . vcsSettlementAmount . vPart
cents :: Rational
cents = fromIntegral (numerator stupidCents) % fromIntegral (denominator stupidCents)
dirtyPickOutAccount50a :: SWIFT -> Text
dirtyPickOutAccount50a s = case s ^? (sCode50a . ocA_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocK_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocF_Account . unF50F_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocF_Account . unF50F_PartyId . piIdentifier) of
Just v -> v
Nothing -> error "Invariant Error: invalid swift detected, no Code50a account"
orderingAcctFreetext :: SWIFT -> Text
orderingAcctFreetext s = maybe "" (intercalate "\n") $
(s ^? sCode50a . ocA_remainder . _Just) <|>
(s ^? sCode50a . ocF_remainder . _Just) <|>
(s ^? sCode50a . ocK_remainder . _Just)
beneficiaryAcctFreetext :: SWIFT -> Text
beneficiaryAcctFreetext s = s ^. sCode59a . bcDetails
convertTrans :: TransLog -> [Transaction]
convertTrans t = concat $ convertEntry <$> t
where
convertOp :: TransactionId -> OrderedOp -> Transaction
convertOp (TransactionId tId) (OpId oId, Cmd from' to' amt' _) =
Transaction (fromIntegral tId) (fromIntegral oId) from' to' (fromRational amt')
convertEntry :: (TransactionId, [OrderedOp]) -> [Transaction]
convertEntry (tId, oOps) = convertOp tId <$> oOps
| haroldcarr/juno | z-no-longer-used/src/Hop/Apps/Juno/Ledger.hs | bsd-3-clause | 6,009 | 1 | 20 | 1,186 | 2,006 | 1,104 | 902 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
module Lib
( someFunc
) where
import Data.Text as T
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Base64 as B64
import Data.ByteString as BS hiding (putStrLn)
import Data.ByteString.Char8 as Ch8 (pack)
import Data.ByteString.Lazy as BSL hiding (putStrLn)
import Codec.Binary.UTF8.String as UTF8 (decode)
import Network.Wreq
import Control.Lens
import Data.Configurator
import Prelude hiding (putStrLn, lookup)
import Data.String.Class (putStrLn)
import Data.Aeson.Lens
import Data.Aeson
import Data.Time.Clock (UTCTime)
import GHC.Generics
data Timeline = Timeline { statuses :: [Tweet]
} deriving (Generic)
data Tweet = Tweet { text :: String,
created_at :: String
} deriving (Generic)
instance Show Timeline where
show Timeline { statuses = [] } = ""
show Timeline { statuses = (x:xs) } = show x ++ "\n\n" ++ show Timeline { statuses = xs }
instance Show Tweet where
show Tweet { text = tweetText, created_at = date } = tweetText ++ "\n" ++ date
instance FromJSON Tweet
instance ToJSON Tweet
instance FromJSON Timeline
instance ToJSON Timeline
getTweetsFromResponse :: Response BSL.ByteString -> Either String Timeline
getTweetsFromResponse r = eitherDecode $ r ^. responseBody
data Credentials = Credentials { consumerKey :: String, consumerSecret :: String }
getCredentials :: IO (Maybe Credentials)
getCredentials = do
(config, _) <- autoReload autoConfig [("config.local")]
res <- lookup config "consumerKey"
res2 <- lookup config "consumerSecret"
case res of
Nothing -> return $ Nothing
Just a -> case res2 of
Nothing -> return $ Nothing
Just b -> return $ Just $ Credentials { consumerKey = a, consumerSecret = b }
getBase64BearerCredentials :: Credentials -> C.ByteString
getBase64BearerCredentials Credentials {consumerKey = a, consumerSecret = b} = B64.encode $ C.pack (a ++ ":" ++ b)
someFunc :: String -> IO ()
someFunc keyword = do
credentials <- getCredentials
case credentials of
Nothing -> putStrLn ("config.local doesn't exist or has invalid format" :: String)
Just cr -> do
let opts = defaults & header "Authorization" .~ [C.pack $ "Basic " ++ C.unpack (getBase64BearerCredentials cr)] & header "Content-Type" .~ ["application/x-www-form-urlencoded;charset=UTF-8"]
r <- postWith opts "https://api.twitter.com/oauth2/token" (C.pack "grant_type=client_credentials")
case access_token r of
Nothing -> putStrLn ("Nothing 1" :: String)
Just token -> do
let n_opts = defaults & header "Authorization" .~ [C.pack $ "Bearer " ++ T.unpack token]
d <- getWith n_opts ("https://api.twitter.com/1.1/search/tweets.json?q=" ++ keyword)
case getTweetsFromResponse d of
Left str -> putStrLn str
Right xs -> putStrLn $ show xs
where
access_token _r = _r ^? responseBody . key "access_token" . _String
| ashgaliyev/tweeter-viewer | src/Lib.hs | bsd-3-clause | 3,026 | 0 | 25 | 636 | 892 | 475 | 417 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-|
Module : $Header$
Copyright : (c) 2016 Deakin Software & Technology Innovation Lab
License : BSD3
Maintainer : Shannon Pace <shannon.pace@deakin.edu.au>
Stability : unstable
Portability : portable
Utilities for testing Eclogues functionality.
-}
module TestUtils where
import Eclogues.Prelude
import Eclogues.API (AbsoluteURI, JobError (..), parseAbsoluteURI)
import qualified Eclogues.Job as Job
import Eclogues.Monitoring.Cluster (Cluster, NodeResources(..))
import Eclogues.State.Types
import qualified Eclogues.State.Monad as ES
import Eclogues.State (createBox, createJob)
import Control.Lens (at, only)
import Control.Monad.State (State)
import Data.Default.Generics (def)
import qualified Data.HashMap.Strict as HM
import Data.Scientific.Suspicious (Sustific)
import qualified Data.Set as Set
import Data.UUID (nil)
import Units.Micro (of')
import Units.Micro.SI (Byte (..), Hertz (..), Mega (..), Second (..))
import Test.Hspec (Expectation, shouldSatisfy)
type Scheduler a = ExceptT JobError (State ES.TransitionaryState) a
data TestError = EncounteredError JobError
| JobNotFound Job.Name
| RevDepNotFound Job.Name
| ExpectedError JobError
| UnexpectedError TestError
deriving (Show)
type EitherError a = Either TestError a
maybeToEither :: a -> Maybe b -> Either a b
maybeToEither x = maybe (Left x) Right
mkResources :: Sustific -> Sustific -> Sustific -> Sustific -> Job.Resources
mkResources d r c t = fromMaybe (error "invalid test resources") $
Job.mkResources (d `of'` Mega Byte) (r `of'` Mega Byte) (c `of'` Mega Hertz) (t `of'` Second)
halfResources, fullResources, overResources :: Job.Resources
halfResources = mkResources 5000 1024 1 1
fullResources = mkResources 10000 2048 2 1
overResources = mkResources 20000 4096 4 1
nodeResources :: Job.Resources -> NodeResources
nodeResources res = NodeResources (res ^. Job.disk) (res ^. Job.ram) (res ^. Job.cpu)
defWithContainer :: AppState
defWithContainer = def & containers .~ HM.singleton echoContainer nil
schedule :: Scheduler a -> EitherError ES.TransitionaryState
schedule = packageResult . ES.runState defWithContainer . runExceptT
packageResult :: (Either JobError a, ES.TransitionaryState) -> EitherError ES.TransitionaryState
packageResult (res, st) = bimap EncounteredError (const st) res
shouldHave :: (HasAppState s, Show s) => EitherError s -> (EitherError s -> EitherError Bool) -> Expectation
shouldHave result f = result `shouldSatisfy` either (const False) id . f
createJob' :: Job.Spec -> Scheduler ()
createJob' = createJob nil Nothing
createWithCluster :: Cluster -> Job.Spec -> Scheduler ()
createWithCluster cluster = createJob nil (Just cluster)
createBox' :: Job.Name -> Scheduler ()
createBox' = createBox . Job.mkBoxSpec
echoContainer :: Job.ContainerId
echoContainer = Job.ContainerId $ forceName "echo"
isolatedJob' :: Job.Name -> Job.Spec
isolatedJob' x = Job.mkSpec x echoContainer [] halfResources False mempty
isolatedJob :: Job.Name -> Job.Resources -> Job.Spec
isolatedJob x res = Job.mkSpec x echoContainer [] res False mempty
dependentJob' :: Job.Name -> [Job.Name] -> Job.Spec
dependentJob' name deps = dependentJob name ((`Job.Dependency` False) <$> deps) halfResources
dependentJob :: Job.Name -> [Job.Dependency] -> Job.Resources -> Job.Spec
dependentJob name deps res = isolatedJob name res & Job.dependsOn .~ listToDeps deps
noSideEffects :: EitherError ES.TransitionaryState -> EitherError Bool
noSideEffects = fmap $ \(ES.TransitionaryState _ cs mp) -> null cs && null mp
noSchedulingSideEffects :: EitherError ES.TransitionaryState -> EitherError Bool
noSchedulingSideEffects = fmap $ \(ES.TransitionaryState _ cs _) -> null cs
getJob :: (HasAppState s) => Job.Name -> s -> Maybe Job.Status
getJob jName aState = aState ^? job jName
type StatePredicate = forall s. (HasAppState s) => EitherError s -> EitherError Bool
noEntity :: Job.Name -> StatePredicate
noEntity n = fmap (not . has (nodes . ix n))
jobInStage :: Job.Name -> Job.Stage -> StatePredicate
jobInStage jName jState result = has (Job.stage . only jState) <$> (eitherGetJob jName =<< result)
eitherGetJob :: HasAppState s => Job.Name -> s -> Either TestError Job.Status
eitherGetJob n = maybeToEither (JobNotFound n) . getJob n
jobWithDeps :: Job.Name -> [Job.Name] -> StatePredicate
jobWithDeps n ds result = has (Job.dependsOn . Job.dependenciesMap . to (Set.fromList . HM.keys) . only (Set.fromList ds)) <$> (eitherGetJob n =<< result)
jobWithRevDeps :: Job.Name -> [Job.Name] -> StatePredicate
jobWithRevDeps jName rds = fmap . has $ revDeps . at jName . only (bool (Just $ namesToDeps rds) Nothing $ null rds)
listToDeps :: [Job.Dependency] -> Job.Dependencies
listToDeps = Job.Dependencies . HM.fromList . fmap (\(Job.Dependency n o) -> (n, o))
namesToDeps :: [Job.Name] -> Job.Dependencies
namesToDeps = listToDeps . fmap (`Job.Dependency` False)
noRevDep :: Job.Name -> StatePredicate
noRevDep n = jobWithRevDeps n []
noContainer :: Job.ContainerId -> StatePredicate
noContainer n = fmap (not . has (containers . ix n))
producedError :: JobError -> StatePredicate
producedError jError (Left e) = case e of
EncounteredError ex -> Right (ex == jError)
ex -> Left (UnexpectedError ex)
producedError jError (Right _) = Left (ExpectedError jError)
satisfiability :: Job.Name -> Job.Satisfiability -> StatePredicate
satisfiability jName jSatis aState = has (job jName . Job.satis . only jSatis) <$> aState
file :: Job.Name -> Job.FileId -> StatePredicate
file jName fileId = fmap . has $ nodes . ix jName . Job.files . ix fileId
boxInStage :: Job.Name -> Job.Sealed -> StatePredicate
boxInStage name st = fmap . has $ nodes . ix name . _ABox . Job.sealed . only st
forceName :: Text -> Job.Name
forceName jName = fromMaybe (error $ "invalid test name " ++ show jName) $ Job.mkName jName
exampleBase :: AbsoluteURI
exampleBase = fromMaybe (error "bad example base") $ parseAbsoluteURI "http://example.com"
| rimmington/eclogues | eclogues-impl/test/TestUtils.hs | bsd-3-clause | 6,117 | 0 | 13 | 996 | 2,032 | 1,072 | 960 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
module SnapshotVersions.Output where
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.State
import Data.Monoid
import SnapshotVersions.CmdLine
import System.IO
data OutputState =
OutputState { osIndent :: String
, osOutputType :: OutputType
, osDebug :: Bool
}
newtype OutputMonad m a =
OutputMonad { om :: StateT OutputState m a }
deriving (Applicative, Functor, Monad, MonadIO, MonadState OutputState, MonadTrans)
data ListPos = ListStart
| ListMiddle
| ListEnd
deriving (Show, Eq)
toListPos :: Int -> Int -> ListPos
toListPos 0 _ = ListStart
toListPos i l | i < (l-1) = ListMiddle
toListPos _ _ = ListEnd
withOutput :: Monad m => OutputType -> Bool -> OutputMonad m () -> m ()
withOutput typ dbg fn = do
_ <- runStateT (om fn) $ OutputState { osIndent = ""
, osOutputType = typ
, osDebug = dbg
}
return ()
class Monad m => MonadOutput m where
indented :: m a -> m a
debug :: String -> m ()
info :: String -> m ()
logError :: String -> m ()
logWarning :: String -> m ()
resultStart :: m ()
resultEnd :: m ()
result :: ListPos -> String -> String -> m ()
instance (Monad m, MonadIO m) => MonadOutput (OutputMonad m) where
indented fn = do
saved <- get
modify (\os -> os { osIndent = (osIndent os) <> " " })
r <- fn
put saved
return r
debug s = get >>= \OutputState{..} ->
if osDebug
then liftIO $ putStrLn $ osIndent <> s
else return ()
info s = get >>= \OutputState{..} ->
case osOutputType of
Default -> liftIO $ putStrLn $ osIndent <> s
_ -> liftIO $ hPutStrLn stderr $ osIndent <> s
logError s = get >>= \OutputState{..} ->
liftIO $ hPutStrLn stderr $ osIndent <> s
logWarning s = get >>= \OutputState{..} ->
liftIO $ hPutStrLn stderr $ osIndent <> "Warning: " <> s
resultStart = get >>= \OutputState{..} ->
case osOutputType of
Default -> liftIO $ putStrLn "Results:"
CabalConstraints -> liftIO $ putStr "constraints: "
_ -> return ()
resultEnd = return ()
result pos name ver = get >>= \OutputState{..} ->
case osOutputType of
Default -> liftIO $ putStrLn $ name <> ": " <> ver
StackYaml -> liftIO $ putStrLn $ name <> "-" <> ver
CabalConstraints ->
case pos of
ListStart -> liftIO $ putStrLn $ name <> " ==" <> ver <> ","
ListMiddle -> liftIO $ putStrLn $ "\t\t\t " <> name <> " ==" <> ver <> ","
ListEnd -> liftIO $ putStrLn $ "\t\t\t " <> name <> " ==" <> ver
| vigoo/snapshot-versions | src/SnapshotVersions/Output.hs | bsd-3-clause | 2,940 | 0 | 19 | 935 | 958 | 494 | 464 | 75 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Raven.DataBase
( ensureUsers
, checkUser
, getAllUsers
, Raven.DataBase.addUser
, deleteUser
, updateUsersAccess
, updateUsersPassword
)where
import Database.MongoDB
import Crypto.Hash
import Data.Text (Text)
import qualified Data.Text as Text
import Data.List (foldl')
-- |Default root info
root :: Document
root = [ "username" := String "root"
, "password" :=
String (Text.pack (show (hashlazy "entry" :: Digest SHA3_512)))
, "rootAccess" := Bool True
]
-- |Makes sure there is a root user.
-- If there isn't, it is created
ensureUsers :: Pipe -> IO ()
ensureUsers p = access p master "raven"
(findOne (select ["username" := String "root"] "users") >>=
(\u -> case u of
Nothing -> insert_ "users" root
_ -> return ()))
-- |Checks if a user exists, return rootAccess and id if so.
-- Returns Nothing if user not fount, Just Right if an error occurs
checkUser :: Pipe -> Text -> Text -> IO (Maybe (Either (Text,Bool) String))
checkUser p name pass = access p master "raven"
(findOne (select ["username" := String name,"password" := String pass]
"users") >>=
(\doc -> case doc of
Just doc' -> case (doc' !? "_id",doc' !? "rootAccess") of
(Just (Oid _ id'),Just acc) -> return $ Just $ Left (Text.pack (show id'),acc)
_ -> return $ Just $ Right $
"User data corrupted for " ++ Text.unpack name
++ ", Data: " ++ show doc
_ -> return Nothing))
-- |Get all user information (formatted)
getAllUsers :: Pipe -> IO String
getAllUsers p = access p master "raven"
(find (select [] "users") >>= rest >>=
return . foldl' handleUser "" >>=
return . Text.unpack)
where
handleUser acc vl =
case (vl !? "username",vl !? "rootAccess") of
(Just name,Just (rootAcc :: Bool)) ->
Text.concat [ name
, ": "
, Text.pack $ show rootAcc
, "\n"
, acc
]
_ -> Text.concat [ "User data corrupted\n",acc]
-- |Add a user to the database and returns outcome
addUser :: Pipe -> Text -> Text -> Bool -> IO String
addUser p name pswd rootAcc = access p master "raven"
(find (select ["username" := String name] "users") >>= rest >>=
(\us -> if null us
then insert "users" [ "username" := String name
, "password" := String pswd
, "rootAccess" := Bool rootAcc
] >>
return "User created"
else return "User already exists"))
-- |Delete a user from the database and return the id if successful
deleteUser :: Pipe -> Text -> IO (Maybe Text)
deleteUser p name = access p master "raven"
(findOne (select ["username" := String name] "users") >>=
(\user -> case user of
Just user' -> delete (select ["username" := String name] "users") >>
case user' !? "_id" of
Just (Oid _ id') -> return $ Just $ Text.pack $ show id'
_ -> return Nothing
_ -> return Nothing))
-- |Change a user's root access and return the id if successful
updateUsersAccess :: Pipe -> Text -> Bool -> IO (Maybe Text)
updateUsersAccess p name rAcc = access p master "raven"
(findOne (select ["username" := String name] "users") >>=
(\user -> case user of
Just user' ->
case (user' !? "_id",user' !? "password") of
(Just i@(Oid _ id'),Just pswd) ->
replace (select [ "_id" := ObjId i] "users")
[ "_id" := ObjId i
, "username" := String name
, "password" := String pswd
, "rootAccess" := Bool rAcc
] >>
return (Just (Text.pack (show id')))
_ -> return Nothing
_ -> return Nothing))
-- |Change a user's password and return Just if successful (use username)
updateUsersPassword :: Pipe -> Text -> Text -> IO (Maybe ())
updateUsersPassword p name pswd = access p master "raven"
(findOne (select ["username" := String name] "users") >>=
(\user -> case user of
Just user' ->
case (user' !? "_id",user' !? "rootAccess") of
(Just id',Just rAcc) ->
replace (select [ "_id" := ObjId id'] "users")
[ "_id" := ObjId id'
, "username" := String name
, "password" := String pswd
, "rootAccess" := Bool rAcc
] >>
return (Just ())
_ -> return Nothing
_ -> return Nothing))
| denumerate/raven | src/Raven/DataBase.hs | bsd-3-clause | 4,609 | 0 | 23 | 1,435 | 1,451 | 740 | 711 | 101 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
-- |
-- Module : Crypto.PubKey.ECC.types
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : Experimental
-- Portability : Excellent
--
-- references:
-- <https://tools.ietf.org/html/rfc5915>
--
module Crypto.PubKey.ECC.Types
( Curve(..)
, Point(..)
, PublicPoint
, PrivateNumber
, CurveBinary(..)
, CurvePrime(..)
, common_curve
, ecc_fx
, ecc_p
, CurveCommon(..)
-- * recommended curves definition
, CurveName(..)
, getCurveByName
) where
import Data.Data
import Crypto.Internal.Imports
-- | Define either a binary curve or a prime curve.
data Curve = CurveF2m CurveBinary -- ^ 𝔽(2^m)
| CurveFP CurvePrime -- ^ 𝔽p
deriving (Show,Read,Eq,Data,Typeable)
-- | ECC Public Point
type PublicPoint = Point
-- | ECC Private Number
type PrivateNumber = Integer
-- | Define a point on a curve.
data Point = Point Integer Integer
| PointO -- ^ Point at Infinity
deriving (Show,Read,Eq,Data,Typeable)
instance NFData Point where
rnf (Point x y) = x `seq` y `seq` ()
rnf PointO = ()
-- | Define an elliptic curve in 𝔽(2^m).
-- The firt parameter is the Integer representatioin of the irreducible polynomial f(x).
data CurveBinary = CurveBinary Integer CurveCommon
deriving (Show,Read,Eq,Data,Typeable)
instance NFData CurveBinary where
rnf (CurveBinary i cc) = i `seq` cc `seq` ()
-- | Define an elliptic curve in 𝔽p.
-- The first parameter is the Prime Number.
data CurvePrime = CurvePrime Integer CurveCommon
deriving (Show,Read,Eq,Data,Typeable)
-- | Parameters in common between binary and prime curves.
common_curve :: Curve -> CurveCommon
common_curve (CurveF2m (CurveBinary _ cc)) = cc
common_curve (CurveFP (CurvePrime _ cc)) = cc
-- | Irreducible polynomial representing the characteristic of a CurveBinary.
ecc_fx :: CurveBinary -> Integer
ecc_fx (CurveBinary fx _) = fx
-- | Prime number representing the characteristic of a CurvePrime.
ecc_p :: CurvePrime -> Integer
ecc_p (CurvePrime p _) = p
-- | Define common parameters in a curve definition
-- of the form: y^2 = x^3 + ax + b.
data CurveCommon = CurveCommon
{ ecc_a :: Integer -- ^ curve parameter a
, ecc_b :: Integer -- ^ curve parameter b
, ecc_g :: Point -- ^ base point
, ecc_n :: Integer -- ^ order of G
, ecc_h :: Integer -- ^ cofactor
} deriving (Show,Read,Eq,Data,Typeable)
-- | Define names for known recommended curves.
data CurveName =
SEC_p112r1
| SEC_p112r2
| SEC_p128r1
| SEC_p128r2
| SEC_p160k1
| SEC_p160r1
| SEC_p160r2
| SEC_p192k1
| SEC_p192r1 -- aka prime192v1
| SEC_p224k1
| SEC_p224r1
| SEC_p256k1
| SEC_p256r1 -- aka prime256v1
| SEC_p384r1
| SEC_p521r1
| SEC_t113r1
| SEC_t113r2
| SEC_t131r1
| SEC_t131r2
| SEC_t163k1
| SEC_t163r1
| SEC_t163r2
| SEC_t193r1
| SEC_t193r2
| SEC_t233k1 -- aka NIST K-233
| SEC_t233r1
| SEC_t239k1
| SEC_t283k1
| SEC_t283r1
| SEC_t409k1
| SEC_t409r1
| SEC_t571k1
| SEC_t571r1
deriving (Show,Read,Eq,Ord,Enum,Data,Typeable)
{-
curvesOIDs :: [ (CurveName, [Integer]) ]
curvesOIDs =
[ (SEC_p112r1, [1,3,132,0,6])
, (SEC_p112r2, [1,3,132,0,7])
, (SEC_p128r1, [1,3,132,0,28])
, (SEC_p128r2, [1,3,132,0,29])
, (SEC_p160k1, [1,3,132,0,9])
, (SEC_p160r1, [1,3,132,0,8])
, (SEC_p160r2, [1,3,132,0,30])
, (SEC_p192k1, [1,3,132,0,31])
, (SEC_p192r1, [1,2,840,10045,3,1,1])
, (SEC_p224k1, [1,3,132,0,32])
, (SEC_p224r1, [1,3,132,0,33])
, (SEC_p256k1, [1,3,132,0,10])
, (SEC_p256r1, [1,2,840,10045,3,1,7])
, (SEC_p384r1, [1,3,132,0,34])
, (SEC_p521r1, [1,3,132,0,35])
, (SEC_t113r1, [1,3,132,0,4])
, (SEC_t113r2, [1,3,132,0,5])
, (SEC_t131r1, [1,3,132,0,22])
, (SEC_t131r2, [1,3,132,0,23])
, (SEC_t163k1, [1,3,132,0,1])
, (SEC_t163r1, [1,3,132,0,2])
, (SEC_t163r2, [1,3,132,0,15])
, (SEC_t193r1, [1,3,132,0,24])
, (SEC_t193r2, [1,3,132,0,25])
, (SEC_t233k1, [1,3,132,0,26])
, (SEC_t233r1, [1,3,132,0,27])
, (SEC_t239k1, [1,3,132,0,3])
, (SEC_t283k1, [1,3,132,0,16])
, (SEC_t283r1, [1,3,132,0,17])
, (SEC_t409k1, [1,3,132,0,36])
, (SEC_t409r1, [1,3,132,0,37])
, (SEC_t571k1, [1,3,132,0,38])
, (SEC_t571r1, [1,3,132,0,39])
]
-}
-- | Get the curve definition associated with a recommended known curve name.
getCurveByName :: CurveName -> Curve
getCurveByName SEC_p112r1 = CurveFP $ CurvePrime
0xdb7c2abf62e35e668076bead208b
(CurveCommon
{ ecc_a = 0xdb7c2abf62e35e668076bead2088
, ecc_b = 0x659ef8ba043916eede8911702b22
, ecc_g = Point 0x09487239995a5ee76b55f9c2f098
0xa89ce5af8724c0a23e0e0ff77500
, ecc_n = 0xdb7c2abf62e35e7628dfac6561c5
, ecc_h = 1
})
getCurveByName SEC_p112r2 = CurveFP $ CurvePrime
0xdb7c2abf62e35e668076bead208b
(CurveCommon
{ ecc_a = 0x6127c24c05f38a0aaaf65c0ef02c
, ecc_b = 0x51def1815db5ed74fcc34c85d709
, ecc_g = Point 0x4ba30ab5e892b4e1649dd0928643
0xadcd46f5882e3747def36e956e97
, ecc_n = 0x36df0aafd8b8d7597ca10520d04b
, ecc_h = 4
})
getCurveByName SEC_p128r1 = CurveFP $ CurvePrime
0xfffffffdffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xfffffffdfffffffffffffffffffffffc
, ecc_b = 0xe87579c11079f43dd824993c2cee5ed3
, ecc_g = Point 0x161ff7528b899b2d0c28607ca52c5b86
0xcf5ac8395bafeb13c02da292dded7a83
, ecc_n = 0xfffffffe0000000075a30d1b9038a115
, ecc_h = 1
})
getCurveByName SEC_p128r2 = CurveFP $ CurvePrime
0xfffffffdffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xd6031998d1b3bbfebf59cc9bbff9aee1
, ecc_b = 0x5eeefca380d02919dc2c6558bb6d8a5d
, ecc_g = Point 0x7b6aa5d85e572983e6fb32a7cdebc140
0x27b6916a894d3aee7106fe805fc34b44
, ecc_n = 0x3fffffff7fffffffbe0024720613b5a3
, ecc_h = 4
})
getCurveByName SEC_p160k1 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffeffffac73
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000007
, ecc_g = Point 0x003b4c382ce37aa192a4019e763036f4f5dd4d7ebb
0x00938cf935318fdced6bc28286531733c3f03c4fee
, ecc_n = 0x0100000000000000000001b8fa16dfab9aca16b6b3
, ecc_h = 1
})
getCurveByName SEC_p160r1 = CurveFP $ CurvePrime
0x00ffffffffffffffffffffffffffffffff7fffffff
(CurveCommon
{ ecc_a = 0x00ffffffffffffffffffffffffffffffff7ffffffc
, ecc_b = 0x001c97befc54bd7a8b65acf89f81d4d4adc565fa45
, ecc_g = Point 0x004a96b5688ef573284664698968c38bb913cbfc82
0x0023a628553168947d59dcc912042351377ac5fb32
, ecc_n = 0x0100000000000000000001f4c8f927aed3ca752257
, ecc_h = 1
})
getCurveByName SEC_p160r2 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffeffffac73
(CurveCommon
{ ecc_a = 0x00fffffffffffffffffffffffffffffffeffffac70
, ecc_b = 0x00b4e134d3fb59eb8bab57274904664d5af50388ba
, ecc_g = Point 0x0052dcb034293a117e1f4ff11b30f7199d3144ce6d
0x00feaffef2e331f296e071fa0df9982cfea7d43f2e
, ecc_n = 0x0100000000000000000000351ee786a818f3a1a16b
, ecc_h = 1
})
getCurveByName SEC_p192k1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffeffffee37
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000003
, ecc_g = Point 0xdb4ff10ec057e9ae26b07d0280b7f4341da5d1b1eae06c7d
0x9b2f2f6d9c5628a7844163d015be86344082aa88d95e2f9d
, ecc_n = 0xfffffffffffffffffffffffe26f2fc170f69466a74defd8d
, ecc_h = 1
})
getCurveByName SEC_p192r1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffeffffffffffffffff
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffefffffffffffffffc
, ecc_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
, ecc_g = Point 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
, ecc_n = 0xffffffffffffffffffffffff99def836146bc9b1b4d22831
, ecc_h = 1
})
getCurveByName SEC_p224k1 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffffffffffffffffffeffffe56d
(CurveCommon
{ ecc_a = 0x0000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x0000000000000000000000000000000000000000000000000000000005
, ecc_g = Point 0x00a1455b334df099df30fc28a169a467e9e47075a90f7e650eb6b7a45c
0x007e089fed7fba344282cafbd6f7e319f7c0b0bd59e2ca4bdb556d61a5
, ecc_n = 0x010000000000000000000000000001dce8d2ec6184caf0a971769fb1f7
, ecc_h = 1
})
getCurveByName SEC_p224r1 = CurveFP $ CurvePrime
0xffffffffffffffffffffffffffffffff000000000000000000000001
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe
, ecc_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
, ecc_g = Point 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
, ecc_n = 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3d
, ecc_h = 1
})
getCurveByName SEC_p256k1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
(CurveCommon
{ ecc_a = 0x0000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x0000000000000000000000000000000000000000000000000000000000000007
, ecc_g = Point 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
, ecc_n = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
, ecc_h = 1
})
getCurveByName SEC_p256r1 = CurveFP $ CurvePrime
0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xffffffff00000001000000000000000000000000fffffffffffffffffffffffc
, ecc_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
, ecc_g = Point 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
, ecc_n = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551
, ecc_h = 1
})
getCurveByName SEC_p384r1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffff
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000fffffffc
, ecc_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
, ecc_g = Point 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
, ecc_n = 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973
, ecc_h = 1
})
getCurveByName SEC_p521r1 = CurveFP $ CurvePrime
0x01ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0x01fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc
, ecc_b = 0x0051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
, ecc_g = Point 0x00c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
0x011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
, ecc_n = 0x01fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409
, ecc_h = 1
})
getCurveByName SEC_t113r1 = CurveF2m $ CurveBinary
0x020000000000000000000000000201
(CurveCommon
{ ecc_a = 0x003088250ca6e7c7fe649ce85820f7
, ecc_b = 0x00e8bee4d3e2260744188be0e9c723
, ecc_g = Point 0x009d73616f35f4ab1407d73562c10f
0x00a52830277958ee84d1315ed31886
, ecc_n = 0x0100000000000000d9ccec8a39e56f
, ecc_h = 2
})
getCurveByName SEC_t113r2 = CurveF2m $ CurveBinary
0x020000000000000000000000000201
(CurveCommon
{ ecc_a = 0x00689918dbec7e5a0dd6dfc0aa55c7
, ecc_b = 0x0095e9a9ec9b297bd4bf36e059184f
, ecc_g = Point 0x01a57a6a7b26ca5ef52fcdb8164797
0x00b3adc94ed1fe674c06e695baba1d
, ecc_n = 0x010000000000000108789b2496af93
, ecc_h = 2
})
getCurveByName SEC_t131r1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000010d
(CurveCommon
{ ecc_a = 0x07a11b09a76b562144418ff3ff8c2570b8
, ecc_b = 0x0217c05610884b63b9c6c7291678f9d341
, ecc_g = Point 0x0081baf91fdf9833c40f9c181343638399
0x078c6e7ea38c001f73c8134b1b4ef9e150
, ecc_n = 0x0400000000000000023123953a9464b54d
, ecc_h = 2
})
getCurveByName SEC_t131r2 = CurveF2m $ CurveBinary
0x080000000000000000000000000000010d
(CurveCommon
{ ecc_a = 0x03e5a88919d7cafcbf415f07c2176573b2
, ecc_b = 0x04b8266a46c55657ac734ce38f018f2192
, ecc_g = Point 0x0356dcd8f2f95031ad652d23951bb366a8
0x0648f06d867940a5366d9e265de9eb240f
, ecc_n = 0x0400000000000000016954a233049ba98f
, ecc_h = 2
})
getCurveByName SEC_t163k1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000001
, ecc_b = 0x000000000000000000000000000000000000000001
, ecc_g = Point 0x02fe13c0537bbc11acaa07d793de4e6d5e5c94eee8
0x0289070fb05d38ff58321f2e800536d538ccdaa3d9
, ecc_n = 0x04000000000000000000020108a2e0cc0d99f8a5ef
, ecc_h = 2
})
getCurveByName SEC_t163r1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x07b6882caaefa84f9554ff8428bd88e246d2782ae2
, ecc_b = 0x0713612dcddcb40aab946bda29ca91f73af958afd9
, ecc_g = Point 0x0369979697ab43897789566789567f787a7876a654
0x00435edb42efafb2989d51fefce3c80988f41ff883
, ecc_n = 0x03ffffffffffffffffffff48aab689c29ca710279b
, ecc_h = 2
})
getCurveByName SEC_t163r2 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000001
, ecc_b = 0x020a601907b8c953ca1481eb10512f78744a3205fd
, ecc_g = Point 0x03f0eba16286a2d57ea0991168d4994637e8343e36
0x00d51fbc6c71a0094fa2cdd545b11c5c0c797324f1
, ecc_n = 0x040000000000000000000292fe77e70c12a4234c33
, ecc_h = 2
})
getCurveByName SEC_t193r1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000008001
(CurveCommon
{ ecc_a = 0x0017858feb7a98975169e171f77b4087de098ac8a911df7b01
, ecc_b = 0x00fdfb49bfe6c3a89facadaa7a1e5bbc7cc1c2e5d831478814
, ecc_g = Point 0x01f481bc5f0ff84a74ad6cdf6fdef4bf6179625372d8c0c5e1
0x0025e399f2903712ccf3ea9e3a1ad17fb0b3201b6af7ce1b05
, ecc_n = 0x01000000000000000000000000c7f34a778f443acc920eba49
, ecc_h = 2
})
getCurveByName SEC_t193r2 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000008001
(CurveCommon
{ ecc_a = 0x0163f35a5137c2ce3ea6ed8667190b0bc43ecd69977702709b
, ecc_b = 0x00c9bb9e8927d4d64c377e2ab2856a5b16e3efb7f61d4316ae
, ecc_g = Point 0x00d9b67d192e0367c803f39e1a7e82ca14a651350aae617e8f
0x01ce94335607c304ac29e7defbd9ca01f596f927224cdecf6c
, ecc_n = 0x010000000000000000000000015aab561b005413ccd4ee99d5
, ecc_h = 2
})
getCurveByName SEC_t233k1 = CurveF2m $ CurveBinary
0x020000000000000000000000000000000000000004000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x017232ba853a7e731af129f22ff4149563a419c26bf50a4c9d6eefad6126
0x01db537dece819b7f70f555a67c427a8cd9bf18aeb9b56e0c11056fae6a3
, ecc_n = 0x008000000000000000000000000000069d5bb915bcd46efb1ad5f173abdf
, ecc_h = 4
})
getCurveByName SEC_t233r1 = CurveF2m $ CurveBinary
0x020000000000000000000000000000000000000004000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x0066647ede6c332c7f8c0923bb58213b333b20e9ce4281fe115f7d8f90ad
, ecc_g = Point 0x00fac9dfcbac8313bb2139f1bb755fef65bc391f8b36f8f8eb7371fd558b
0x01006a08a41903350678e58528bebf8a0beff867a7ca36716f7e01f81052
, ecc_n = 0x01000000000000000000000000000013e974e72f8a6922031d2603cfe0d7
, ecc_h = 2
})
getCurveByName SEC_t239k1 = CurveF2m $ CurveBinary
0x800000000000000000004000000000000000000000000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x29a0b6a887a983e9730988a68727a8b2d126c44cc2cc7b2a6555193035dc
0x76310804f12e549bdb011c103089e73510acb275fc312a5dc6b76553f0ca
, ecc_n = 0x2000000000000000000000000000005a79fec67cb6e91f1c1da800e478a5
, ecc_h = 4
})
getCurveByName SEC_t283k1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000000000000000000000000000000010a1
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x0503213f78ca44883f1a3b8162f188e553cd265f23c1567a16876913b0c2ac2458492836
0x01ccda380f1c9e318d90f95d07e5426fe87e45c0e8184698e45962364e34116177dd2259
, ecc_n = 0x01ffffffffffffffffffffffffffffffffffe9ae2ed07577265dff7f94451e061e163c61
, ecc_h = 4
})
getCurveByName SEC_t283r1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000000000000000000000000000000010a1
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x027b680ac8b8596da5a4af8a19a0303fca97fd7645309fa2a581485af6263e313b79a2f5
, ecc_g = Point 0x05f939258db7dd90e1934f8c70b0dfec2eed25b8557eac9c80e2e198f8cdbecd86b12053
0x03676854fe24141cb98fe6d4b20d02b4516ff702350eddb0826779c813f0df45be8112f4
, ecc_n = 0x03ffffffffffffffffffffffffffffffffffef90399660fc938a90165b042a7cefadb307
, ecc_h = 2
})
getCurveByName SEC_t409k1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001
(CurveCommon
{ ecc_a = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x0060f05f658f49c1ad3ab1890f7184210efd0987e307c84c27accfb8f9f67cc2c460189eb5aaaa62ee222eb1b35540cfe9023746
0x01e369050b7c4e42acba1dacbf04299c3460782f918ea427e6325165e9ea10e3da5f6c42e9c55215aa9ca27a5863ec48d8e0286b
, ecc_n = 0x007ffffffffffffffffffffffffffffffffffffffffffffffffffe5f83b2d4ea20400ec4557d5ed3e3e7ca5b4b5c83b8e01e5fcf
, ecc_h = 4
})
getCurveByName SEC_t409r1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001
(CurveCommon
{ ecc_a = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x0021a5c2c8ee9feb5c4b9a753b7b476b7fd6422ef1f3dd674761fa99d6ac27c8a9a197b272822f6cd57a55aa4f50ae317b13545f
, ecc_g = Point 0x015d4860d088ddb3496b0c6064756260441cde4af1771d4db01ffe5b34e59703dc255a868a1180515603aeab60794e54bb7996a7
0x0061b1cfab6be5f32bbfa78324ed106a7636b9c5a7bd198d0158aa4f5488d08f38514f1fdf4b4f40d2181b3681c364ba0273c706
, ecc_n = 0x010000000000000000000000000000000000000000000000000001e2aad6a612f33307be5fa47c3c9e052f838164cd37d9a21173
, ecc_h = 2
})
getCurveByName SEC_t571k1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x026eb7a859923fbc82189631f8103fe4ac9ca2970012d5d46024804801841ca44370958493b205e647da304db4ceb08cbbd1ba39494776fb988b47174dca88c7e2945283a01c8972
0x0349dc807f4fbf374f4aeade3bca95314dd58cec9f307a54ffc61efc006d8a2c9d4979c0ac44aea74fbebbb9f772aedcb620b01a7ba7af1b320430c8591984f601cd4c143ef1c7a3
, ecc_n = 0x020000000000000000000000000000000000000000000000000000000000000000000000131850e1f19a63e4b391a8db917f4138b630d84be5d639381e91deb45cfe778f637c1001
, ecc_h = 4
})
getCurveByName SEC_t571r1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x02f40e7e2221f295de297117b7f3d62f5c6a97ffcb8ceff1cd6ba8ce4a9a18ad84ffabbd8efa59332be7ad6756a66e294afd185a78ff12aa520e4de739baca0c7ffeff7f2955727a
, ecc_g = Point 0x0303001d34b856296c16c0d40d3cd7750a93d1d2955fa80aa5f40fc8db7b2abdbde53950f4c0d293cdd711a35b67fb1499ae60038614f1394abfa3b4c850d927e1e7769c8eec2d19
0x037bf27342da639b6dccfffeb73d69d78c6c27a6009cbbca1980f8533921e8a684423e43bab08a576291af8f461bb2a8b3531d2f0485c19b16e2f1516e23dd3c1a4827af1b8ac15b
, ecc_n = 0x03ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe661ce18ff55987308059b186823851ec7dd9ca1161de93d5174d66e8382e9bb2fe84e47
, ecc_h = 2
})
| nomeata/cryptonite | Crypto/PubKey/ECC/Types.hs | bsd-3-clause | 23,857 | 0 | 10 | 4,732 | 2,549 | 1,494 | 1,055 | 380 | 1 |
module Database.DSH.Backend.Sql.Opt.Properties.Empty where
import Database.Algebra.Table.Lang
import Database.DSH.Backend.Sql.Opt.Properties.Types
inferEmptyNullOp :: NullOp -> Empty
inferEmptyNullOp op =
case op of
LitTable (vs, _) -> null vs
TableRef (_, _, _) -> False
inferEmptyUnOp :: Empty -> UnOp -> Empty
inferEmptyUnOp childEmpty op =
case op of
WinFun _ -> childEmpty
RowNum (_, _, _) -> childEmpty
RowRank (_, _) -> childEmpty
Rank (_, _) -> childEmpty
Project _ -> childEmpty
Select _ -> childEmpty
Distinct _ -> childEmpty
Aggr (_, _) -> childEmpty
Serialize _ -> childEmpty
inferEmptyBinOp :: Empty -> Empty -> BinOp -> Empty
inferEmptyBinOp leftEmpty rightEmpty op =
case op of
Cross _ -> leftEmpty || rightEmpty
ThetaJoin _ -> leftEmpty || rightEmpty
LeftOuterJoin _ -> leftEmpty || rightEmpty
SemiJoin _ -> leftEmpty
AntiJoin _ -> False
DisjUnion _ -> False
Difference _ -> False
| ulricha/dsh-sql | src/Database/DSH/Backend/Sql/Opt/Properties/Empty.hs | bsd-3-clause | 1,155 | 0 | 9 | 394 | 328 | 171 | 157 | 30 | 9 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Ordinal.EN.Tests
( tests
) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.EN.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "EN Tests"
[ makeCorpusTest [This Ordinal] corpus
]
| rfranek/duckling | tests/Duckling/Ordinal/EN/Tests.hs | bsd-3-clause | 602 | 0 | 9 | 98 | 80 | 51 | 29 | 11 | 1 |
-- | This module contains operations to form function stack frame and access
-- its elements.
module Toy.X86.Frame
( Frame
, mkFrame
, resolveMemRefs
, evalStackShift
) where
import Control.Applicative ((<|>))
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import Formatting (build, sformat, (%))
import Universum hiding (Const)
import Toy.Base (Var)
import Toy.X86.Data (Inst, Operand (..), traverseOperands)
import Toy.X86.SymStack (SymStackSpace, regSymStack)
---------------------------
------ Frame layout -------
---------------------------
-- Argument n
-- ...
-- Argument 1
-- --- <return address> ---
-- Registers backup space
-- Local variables
-- Symbolic stack
---------------------------
-- inspired by @wotopul
data Frame = Frame
{ fArgs :: M.Map Var Int -- ^ names of arguments
, fVars :: M.Map Var Int -- ^ names of variables
, fSym :: SymStackSpace -- ^ space size for symbolic stack
}
mkFrame :: [Var] -> S.Set Var -> SymStackSpace -> Frame
mkFrame args vars fSym =
let vars' = S.toList $ foldr S.delete vars args
fArgs = M.fromList $ zip args [0..]
fVars = M.fromList $ zip vars' [0..]
in Frame {..}
resolveMemRefs :: Traversable f => Frame -> f Inst -> f Inst
resolveMemRefs Frame{..} = fmap $ traverseOperands %~ \case
Stack i -> Mem i
Backup i -> Mem (stSymSize + varsNum + i)
Local n ->
let noVar = error $ sformat ("No such variable / argument: '"%build%"'") n
asVar i = Mem (stSymSize + i)
asArg i = Mem (stSymSize + varsNum + backupSize + 1 + i)
in fromMaybe noVar $ asVar <$> M.lookup n fVars
<|> asArg <$> M.lookup n fArgs
o@HardMem{} -> o
o@HeapMem{} -> o
o@HeapMemExt{} -> o
Mem _ -> error "Resolving Mem reference??"
o@Reg{} -> o
o@Const{} -> o
where
backupSize = length regSymStack
stSymSize = fromIntegral fSym
varsNum = M.size fVars
evalStackShift :: Frame -> Int
evalStackShift Frame{..} = fromIntegral fSym + M.size fVars + length regSymStack
| Martoon-00/toy-compiler | src/Toy/X86/Frame.hs | bsd-3-clause | 2,273 | 0 | 18 | 664 | 646 | 352 | 294 | -1 | -1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
module Spec.Flavor where
import Relude
data SpecFlavor
= SpecVk
| SpecXr
data SSpecFlavor t where
SSpecVk ::SSpecFlavor SpecVk
SSpecXr ::SSpecFlavor SpecXr
class KnownSpecFlavor (t :: SpecFlavor) where
sSpecFlavor :: SSpecFlavor t
instance KnownSpecFlavor SpecVk where
sSpecFlavor = SSpecVk
instance KnownSpecFlavor SpecXr where
sSpecFlavor = SSpecXr
specFlavor :: forall t . KnownSpecFlavor t => SpecFlavor
specFlavor = case sSpecFlavor @t of
SSpecVk -> SpecVk
SSpecXr -> SpecXr
flavorPrefixCaps :: forall t . KnownSpecFlavor t => ByteString
flavorPrefixCaps = case sSpecFlavor @t of
SSpecVk -> "VK"
SSpecXr -> "XR"
| expipiplus1/vulkan | generate-new/src/Spec/Flavor.hs | bsd-3-clause | 692 | 0 | 7 | 129 | 176 | 95 | 81 | -1 | -1 |
module Rumpus.Systems.Script where
import PreludeExtra
import Rumpus.Systems.PlayPause
import Rumpus.Systems.Shared
import Rumpus.Systems.CodeEditor
import Rumpus.Systems.Knobs
import qualified Data.HashMap.Strict as Map
checkIfReadyToStart :: ECSMonad ()
checkIfReadyToStart = do
startExprIDs <- Map.keys <$> getComponentMap myStartCodeFile
haveStart <- forM startExprIDs $ \entityID -> do
hasStart <- entityHasComponent entityID myStart
if hasStart
then return True
-- If the start compilation has an error, count that as "started"
-- anyway so we don't end up with dead scenes due to one error
else do
mCodeEditor <- inEntity entityID getStartCodeEditor
return $ case mCodeEditor of
Just codeEditor -> codeEditor ^. cedHasResult
Nothing -> True
let allReadyToStart = and haveStart
when allReadyToStart $
setWorldPlaying True
tickScriptSystem :: ECSMonad ()
tickScriptSystem = do
isWorldPlaying <- getWorldPlaying
if isWorldPlaying
then runScripts
else checkIfReadyToStart
runScripts :: ECSMonad ()
runScripts = runUserScriptsWithTimeout_ $ do
forEntitiesWithComponent myStart $
\(entityID, onStart) -> inEntity entityID $ do
--putStrLnIO ("Running Start for " ++ show entityID)
-- Automatically remove children when start runs.
-- This should probably be configurable but it's what
-- I always find myself doing so I'm hardcoding it for now.
-- FIXME:
-- This has an undesirable side-effect:
-- Children added programmatically outside myStart
-- will get removed too. So we really only want to do this
-- when new code is received from the CodeEditor.
-- (e.g., when doing this:
-- fooID <- spawnEntity $ myStart ==> animateSizeInFrom0 0.3
-- inEntity barID $ setParent fooID
-- bar will be immediately deleted.
removeChildren
removeComponent myKnobDefs
-- Only call Start once.
runUserFunctionProtected myStart onStart
removeComponent myStart
forEntitiesWithComponent myUpdate $
\(entityID, update) -> do
inEntity entityID $
runUserFunctionProtected myUpdate update
withState :: (Typeable a, MonadIO m, MonadState ECS m, MonadReader EntityID m)
=> (a -> m ()) -> m ()
withState f =
withComponent_ myState $ \dynState -> do
case fromDynamic dynState of
Just scriptState -> f scriptState
Nothing -> do
entityID <- ask
putStrLnIO $
"withState: Attempted to use entityID " ++ show entityID
++ "'s script data of type " ++ show dynState
++ " with a function that accepts a different type."
getState :: (Typeable a, MonadIO m, MonadState ECS m, MonadReader EntityID m)
=> a -> m a
getState def = do
dynState <- getComponent myState
let maybeState = join (fromDynamic <$> dynState)
return (fromMaybe def maybeState)
editState :: (Typeable a, MonadIO m, MonadState ECS m, MonadReader EntityID m)
=> (a -> m a) -> m ()
editState f = withState $ \scriptState ->
setState =<< f scriptState
setState :: (Typeable a, MonadIO m, MonadState ECS m, MonadReader EntityID m)
=> a -> m ()
setState scriptState = myState ==> (toDyn $! scriptState)
-- FIXME not everything is NFData, so need to figure out
-- (from API perspective) how to allow
-- non NFData (e.g. TVars) while still encouraging NFData
--setState scriptState = myState ==> (toDyn $!! scriptState)
| lukexi/rumpus | src/Rumpus/Systems/Script.hs | bsd-3-clause | 3,819 | 0 | 19 | 1,139 | 723 | 366 | 357 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Helpers.Auth where
import Snap.Snaplet
import Snap.Core
import Snap.Snaplet.Auth
import qualified Data.ByteString as B
import Application
import State.Accounts
import Helpers.Errors
withAccount :: (Account -> AppHandler ()) -> AppHandler ()
withAccount hndlr = do
au <- with auth currentUser
case au of
Nothing -> loginRedirect
Just user ->
case (userId user) of
Nothing -> failureError "No id on user account." Nothing
Just uid -> do
ac <- getAccount uid
case ac of
Nothing -> failureError "No account for user." (Just $ unUid uid)
Just account -> hndlr account
loginRedirect :: AppHandler ()
loginRedirect = do
url <- fmap rqURI getRequest
redirect $ B.concat [rootUrl, "/auth/login", "?redirect=", urlEncode url]
| dbp/analyze | src/Helpers/Auth.hs | bsd-3-clause | 846 | 0 | 21 | 199 | 246 | 124 | 122 | 26 | 4 |
{-# LANGUAGE NoImplicitPrelude #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Http
-- Copyright : (C) 2017 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Http (module X) where
import Irreverent.Bitbucket.Http.Repositories.List as X
import Irreverent.Bitbucket.Http.Repositories.New as X
import Irreverent.Bitbucket.Http.Repositories.Pipelines.AddEnvironmentVariable as X
import Irreverent.Bitbucket.Http.Repositories.Pipelines.GetConfig as X
import Irreverent.Bitbucket.Http.Repositories.Pipelines.UpdateConfig as X
| irreverent-pixel-feats/bitbucket | bitbucket-http-client/src/Irreverent/Bitbucket/Http.hs | bsd-3-clause | 747 | 0 | 4 | 77 | 75 | 61 | 14 | 7 | 0 |
{-- snippet all --}
safeTail :: [a] -> Maybe [a]
safeTail [] = Nothing
safeTail (_:xs) = Just xs
{-- /snippet all --}
| binesiyu/ifl | examples/ch19/safetail.hs | mit | 118 | 0 | 7 | 23 | 49 | 26 | 23 | 3 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
-- API server logic
module Pos.Explorer.Web.Server
( explorerServeImpl
, explorerApp
, explorerHandlers
-- pure functions
, getBlockDifficulty
, roundToBlockPage
-- api functions
, getBlocksTotal
, getBlocksPagesTotal
, getBlocksPage
, getEpochSlot
, getEpochPage
-- function useful for socket-io server
, topsortTxsOrFail
, getMempoolTxs
, getBlocksLastPage
, getEpochPagesOrThrow
, cAddrToAddr
) where
import Universum hiding (id)
import Control.Lens (at)
import qualified Data.ByteString as BS
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import Data.Maybe (fromMaybe)
import qualified Data.Vector as V
import Formatting (build, int, sformat, (%))
import Network.Wai (Application)
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import qualified Serokell.Util.Base64 as B64
import Servant.API.Generic (toServant)
import Servant.Server (Server, ServerT, err405, errReasonPhrase,
serve)
import Servant.Server.Generic (AsServerT)
import Pos.Crypto (WithHash (..), hash, redeemPkBuild, withHash)
import Pos.DB.Block (getBlund, resolveForwardLink)
import Pos.DB.Class (MonadDBRead)
import Pos.Infra.Diffusion.Types (Diffusion)
import Pos.Binary.Class (biSize)
import Pos.Chain.Block (Block, Blund, HeaderHash, MainBlock, Undo,
gbHeader, gbhConsensus, mainBlockSlot, mainBlockTxPayload,
mcdSlot, headerHash)
import Pos.Chain.Genesis as Genesis (Config (..), GenesisHash,
configEpochSlots)
import Pos.Chain.Txp (Tx (..), TxAux, TxId, TxIn (..), TxMap,
TxOutAux (..), mpLocalTxs, taTx, topsortTxs, txOutAddress,
txOutValue, txpTxs, _txOutputs)
import Pos.Core (AddrType (..), Address (..), Coin, EpochIndex,
SlotCount, Timestamp, coinToInteger, difficultyL,
getChainDifficulty, isUnknownAddressType,
makeRedeemAddress, siEpoch, siSlot, sumCoins,
timestampToPosix, unsafeAddCoin, unsafeIntegerToCoin,
unsafeSubCoin)
import Pos.Core.Chrono (NewestFirst (..))
import Pos.Core.NetworkMagic (NetworkMagic, makeNetworkMagic)
import Pos.DB.Txp (MonadTxpMem, getFilteredUtxo, getLocalTxs,
getMemPool, withTxpLocalData)
import Pos.Infra.Slotting (MonadSlots (..), getSlotStart)
import Pos.Util (divRoundUp, maybeThrow)
import Pos.Util.Wlog (logDebug)
import Pos.Web (serveImpl)
import Pos.Explorer.Aeson.ClientTypes ()
import Pos.Explorer.Core (TxExtra (..))
import Pos.Explorer.DB (Page)
import qualified Pos.Explorer.DB as ExDB
import Pos.Explorer.ExplorerMode (ExplorerMode)
import Pos.Explorer.ExtraContext (HasExplorerCSLInterface (..),
HasGenesisRedeemAddressInfo (..))
import Pos.Explorer.Web.Api (ExplorerApi, ExplorerApiRecord (..),
explorerApi)
import Pos.Explorer.Web.ClientTypes (Byte, CAda (..), CAddress (..),
CAddressSummary (..), CAddressType (..),
CAddressesFilter (..), CBlockEntry (..),
CBlockSummary (..), CByteString (..),
CGenesisAddressInfo (..), CGenesisSummary (..), CHash,
CTxBrief (..), CTxEntry (..), CTxId (..), CTxSummary (..), CBlockRange (..),
CUtxo (..), TxInternal (..), convertTxOutputs,
convertTxOutputsMB, fromCAddress, fromCHash, fromCTxId,
getEpochIndex, getSlotIndex, mkCCoin, mkCCoinMB,
tiToTxEntry, toBlockEntry, toBlockSummary, toCAddress,
toCHash, toCTxId, toTxBrief)
import Pos.Explorer.Web.Error (ExplorerError (..))
import qualified Data.Map as M
import Pos.Configuration (explorerExtendedApi)
----------------------------------------------------------------
-- Top level functionality
----------------------------------------------------------------
type MainBlund = (MainBlock, Undo)
explorerServeImpl
:: ExplorerMode ctx m
=> m Application
-> Word16
-> m ()
explorerServeImpl app port = serveImpl loggingApp "*" port Nothing Nothing Nothing
where
loggingApp = logStdoutDev <$> app
explorerApp :: ExplorerMode ctx m => m (Server ExplorerApi) -> m Application
explorerApp serv = serve explorerApi <$> serv
----------------------------------------------------------------
-- Handlers
----------------------------------------------------------------
explorerHandlers
:: forall ctx m. ExplorerMode ctx m
=> Genesis.Config -> Diffusion m -> ServerT ExplorerApi m
explorerHandlers genesisConfig _diffusion =
toServant (ExplorerApiRecord
{ _totalAda = getTotalAda
, _blocksPages = getBlocksPage epochSlots
, _dumpBlockRange = getBlockRange genesisConfig
, _blocksPagesTotal = getBlocksPagesTotal
, _blocksSummary = getBlockSummary genesisConfig
, _blocksTxs = getBlockTxs genesisHash
, _txsLast = getLastTxs
, _txsSummary = getTxSummary genesisHash
, _addressSummary = getAddressSummary nm genesisHash
, _addressUtxoBulk = getAddressUtxoBulk nm
, _epochPages = getEpochPage epochSlots
, _epochSlots = getEpochSlot epochSlots
, _genesisSummary = getGenesisSummary
, _genesisPagesTotal = getGenesisPagesTotal
, _genesisAddressInfo = getGenesisAddressInfo
, _statsTxs = getStatsTxs genesisConfig
}
:: ExplorerApiRecord (AsServerT m))
where
nm :: NetworkMagic
nm = makeNetworkMagic $ configProtocolMagic genesisConfig
--
epochSlots = configEpochSlots genesisConfig
--
genesisHash = configGenesisHash genesisConfig
----------------------------------------------------------------
-- API Functions
----------------------------------------------------------------
getTotalAda :: ExplorerMode ctx m => m CAda
getTotalAda = do
utxoSum <- ExDB.getUtxoSum
validateUtxoSum utxoSum
pure $ CAda $ fromInteger utxoSum / 1e6
where
validateUtxoSum :: ExplorerMode ctx m => Integer -> m ()
validateUtxoSum n
| n < 0 = throwM $ Internal $
sformat ("Internal tracker of utxo sum has a negative value: "%build) n
| n > coinToInteger (maxBound :: Coin) = throwM $ Internal $
sformat ("Internal tracker of utxo sum overflows: "%build) n
| otherwise = pure ()
-- | Get the total number of blocks/slots currently available.
-- Total number of main blocks = difficulty of the topmost (tip) header.
-- Total number of anchor blocks = current epoch + 1
getBlocksTotal
:: ExplorerMode ctx m
=> m Integer
getBlocksTotal = do
-- Get the tip block.
tipBlock <- getTipBlockCSLI
pure $ getBlockDifficulty tipBlock
-- | Get last blocks with a page parameter. This enables easier paging on the
-- client side and should enable a simple and thin client logic.
-- Currently the pages are in chronological order.
getBlocksPage
:: ExplorerMode ctx m
=> SlotCount
-> Maybe Word -- ^ Page number
-> Maybe Word -- ^ Page size
-> m (Integer, [CBlockEntry])
getBlocksPage epochSlots mPageNumber mPageSize = do
let pageSize = toPageSize mPageSize
-- Get total pages from the blocks.
totalPages <- getBlocksPagesTotal mPageSize
-- Initially set on the last page number if page number not defined.
let pageNumber = fromMaybe totalPages $ toInteger <$> mPageNumber
-- Make sure the parameters are valid.
when (pageNumber <= 0) $
throwM $ Internal "Number of pages must be greater than 0."
when (pageNumber > totalPages) $
throwM $ Internal "Number of pages exceeds total pages number."
-- TODO: Fix in the future.
when (pageSize /= fromIntegral ExDB.defaultPageSize) $
throwM $ Internal "We currently support only page size of 10."
when (pageSize > 1000) $
throwM $ Internal "The upper bound for pageSize is 1000."
-- Get pages from the database
-- TODO: Fix this Int / Integer thing once we merge repositories
pageBlocksHH <- getPageHHsOrThrow $ fromIntegral pageNumber
blunds <- forM pageBlocksHH getBlundOrThrow
cBlocksEntry <- forM (blundToMainBlockUndo blunds) (toBlockEntry epochSlots)
-- Return total pages and the blocks. We start from page 1.
pure (totalPages, reverse cBlocksEntry)
where
blundToMainBlockUndo :: [Blund] -> [(MainBlock, Undo)]
blundToMainBlockUndo blund = [(mainBlock, undo) | (Right mainBlock, undo) <- blund]
-- Either get the @HeaderHash@es from the @Page@ or throw an exception.
getPageHHsOrThrow
:: ExplorerMode ctx m
=> Int
-> m [HeaderHash]
getPageHHsOrThrow pageNumber =
-- Then let's fetch blocks for a specific page from it and raise exception if not
-- found.
getPageBlocksCSLI pageNumber >>= maybeThrow (Internal errMsg)
where
errMsg :: Text
errMsg = sformat ("No blocks on page "%build%" found!") pageNumber
-- | Get total pages from blocks. Calculated from
-- pageSize we pass to it.
getBlocksPagesTotal
:: ExplorerMode ctx m
=> Maybe Word
-> m Integer
getBlocksPagesTotal mPageSize = do
let pageSize = toPageSize mPageSize
-- Get total blocks in the blockchain. Get the blocks total using this mode.
blocksTotal <- toInteger <$> getBlocksTotal
-- Make sure the parameters are valid.
when (blocksTotal < 1) $
throwM $ Internal "There are currently no block to display."
when (pageSize < 1) $
throwM $ Internal "Page size must be greater than 1 if you want to display blocks."
-- We start from page 1.
let pagesTotal = roundToBlockPage blocksTotal
pure pagesTotal
-- | Get the last page from the blockchain. We use the default 10
-- for the page size since this is called from __explorer only__.
getBlocksLastPage
:: ExplorerMode ctx m
=> SlotCount -> m (Integer, [CBlockEntry])
getBlocksLastPage epochSlots =
getBlocksPage epochSlots Nothing (Just defaultPageSizeWord)
-- | Get last transactions from the blockchain.
getLastTxs
:: ExplorerMode ctx m
=> m [CTxEntry]
getLastTxs = do
mempoolTxs <- getMempoolTxs
blockTxsWithTs <- getBlockchainLastTxs
-- We take the mempool txs first, then topsorted blockchain ones.
let newTxs = mempoolTxs <> blockTxsWithTs
pure $ tiToTxEntry <$> newTxs
where
-- Get last transactions from the blockchain.
getBlockchainLastTxs
:: ExplorerMode ctx m
=> m [TxInternal]
getBlockchainLastTxs = do
mLastTxs <- ExDB.getLastTransactions
let lastTxs = fromMaybe [] mLastTxs
let lastTxsWH = map withHash lastTxs
forM lastTxsWH toTxInternal
where
-- Convert transaction to TxInternal.
toTxInternal
:: (MonadThrow m, MonadDBRead m)
=> WithHash Tx
-> m TxInternal
toTxInternal (WithHash tx txId) = do
extra <- ExDB.getTxExtra txId >>=
maybeThrow (Internal "No extra info for tx in DB!")
pure $ TxInternal extra tx
-- | Get block summary.
getBlockSummary
:: ExplorerMode ctx m
=> Genesis.Config
-> CHash
-> m CBlockSummary
getBlockSummary genesisConfig cHash = do
hh <- unwrapOrThrow $ fromCHash cHash
mainBlund <- getMainBlund (configGenesisHash genesisConfig) hh
toBlockSummary (configEpochSlots genesisConfig) mainBlund
-- | Get transactions from a block.
getBlockTxs
:: ExplorerMode ctx m
=> GenesisHash
-> CHash
-> Maybe Word
-> Maybe Word
-> m [CTxBrief]
getBlockTxs genesisHash cHash mLimit mSkip = do
let limit = fromIntegral $ fromMaybe defaultPageSizeWord mLimit
let skip = fromIntegral $ fromMaybe 0 mSkip
txs <- getMainBlockTxs genesisHash cHash
forM (take limit . drop skip $ txs) $ \tx -> do
extra <- ExDB.getTxExtra (hash tx) >>=
maybeThrow (Internal "In-block transaction doesn't \
\have extra info in DB")
pure $ makeTxBrief tx extra
-- | Get address summary. Can return several addresses.
-- @PubKeyAddress@, @ScriptAddress@, @RedeemAddress@ and finally
-- @UnknownAddressType@.
getAddressSummary
:: ExplorerMode ctx m
=> NetworkMagic
-> GenesisHash
-> CAddress
-> m CAddressSummary
getAddressSummary nm genesisHash cAddr = do
addr <- cAddrToAddr nm cAddr
when (isUnknownAddressType addr) $
throwM $ Internal "Unknown address type"
balance <- mkCCoin . fromMaybe minBound <$> ExDB.getAddrBalance addr
txIds <- getNewestFirst <$> ExDB.getAddrHistory addr
let nTxs = length txIds
-- FIXME [CBR-119] Waiting for design discussion
when (nTxs > 1000) $
throwM $ Internal $ "Response too large: no more than 1000 transactions"
<> " can be returned at once. This issue is known and being worked on"
transactions <- forM txIds $ \id -> do
extra <- getTxExtraOrFail id
tx <- getTxMain genesisHash id extra
pure $ makeTxBrief tx extra
pure CAddressSummary {
caAddress = cAddr,
caType = getAddressType addr,
caTxNum = fromIntegral $ length transactions,
caBalance = balance,
caTxList = transactions
}
where
getAddressType :: Address -> CAddressType
getAddressType Address {..} =
case addrType of
ATPubKey -> CPubKeyAddress
ATScript -> CScriptAddress
ATRedeem -> CRedeemAddress
ATUnknown {} -> CUnknownAddress
getAddressUtxoBulk
:: (ExplorerMode ctx m)
=> NetworkMagic
-> [CAddress]
-> m [CUtxo]
getAddressUtxoBulk nm cAddrs = do
unless explorerExtendedApi $
throwM err405
{ errReasonPhrase = "Explorer extended API is disabled by configuration!"
}
let nAddrs = length cAddrs
when (nAddrs > 10) $
throwM err405
{ errReasonPhrase = "Maximum number of addresses you can send to fetch Utxo in bulk is 10!"
}
addrs <- mapM (cAddrToAddr nm) cAddrs
utxo <- getFilteredUtxo addrs
pure . map futxoToCUtxo . M.toList $ utxo
where
futxoToCUtxo :: (TxIn, TxOutAux) -> CUtxo
futxoToCUtxo ((TxInUtxo txInHash txInIndex), txOutAux) = CUtxo {
cuId = toCTxId txInHash,
cuOutIndex = fromIntegral txInIndex,
cuAddress = toCAddress . txOutAddress . toaOut $ txOutAux,
cuCoins = mkCCoin . txOutValue . toaOut $ txOutAux
}
futxoToCUtxo ((TxInUnknown tag bs), _) = CUtxoUnknown {
cuTag = fromIntegral tag,
cuBs = CByteString bs
}
getBlockRange
:: ExplorerMode ctx m
=> Genesis.Config
-> CHash
-> CHash
-> m CBlockRange
getBlockRange genesisConfig start stop = do
startHeaderHash <- unwrapOrThrow $ fromCHash start
stopHeaderHash <- unwrapOrThrow $ fromCHash stop
let
getTxSummaryFromBlock
:: (ExplorerMode ctx m)
=> MainBlock
-> Tx
-> m CTxSummary
getTxSummaryFromBlock mb tx = do
let txId = hash tx
txExtra <- getTxExtraOrFail txId
blkSlotStart <- getBlkSlotStart mb
let
blockTime = timestampToPosix <$> blkSlotStart
inputOutputsMB = map (fmap toaOut) $ NE.toList $ teInputOutputs txExtra
txOutputs = convertTxOutputs . NE.toList $ _txOutputs tx
totalInputMB = unsafeIntegerToCoin . sumCoins . map txOutValue <$> sequence inputOutputsMB
totalOutput = unsafeIntegerToCoin $ sumCoins $ map snd txOutputs
-- Verify that strange things don't happen with transactions
whenJust totalInputMB $ \totalInput -> when (totalOutput > totalInput) $
throwM $ Internal "Detected tx with output greater than input"
pure $ CTxSummary
{ ctsId = toCTxId txId
, ctsTxTimeIssued = timestampToPosix <$> teReceivedTime txExtra
, ctsBlockTimeIssued = blockTime
, ctsBlockHeight = Nothing
, ctsBlockEpoch = Nothing
, ctsBlockSlot = Nothing
, ctsBlockHash = Just $ toCHash $ headerHash mb
, ctsRelayedBy = Nothing
, ctsTotalInput = mkCCoinMB totalInputMB
, ctsTotalOutput = mkCCoin totalOutput
, ctsFees = mkCCoinMB $ (`unsafeSubCoin` totalOutput) <$> totalInputMB
, ctsInputs = map (fmap (second mkCCoin)) $ convertTxOutputsMB inputOutputsMB
, ctsOutputs = map (second mkCCoin) txOutputs
}
genesisHash = configGenesisHash genesisConfig
go :: ExplorerMode ctx m => HeaderHash -> CBlockRange -> m CBlockRange
go hh state1 = do
maybeBlund <- getBlund genesisHash hh
newState <- case maybeBlund of
Just (Right blk', undo) -> do
let
txs :: [Tx]
txs = blk' ^. mainBlockTxPayload . txpTxs
blockSum <- toBlockSummary (configEpochSlots genesisConfig) (blk',undo)
let
state2 = state1 { cbrBlocks = blockSum : (cbrBlocks state1) }
iterateTx :: ExplorerMode ctx m => CBlockRange -> Tx -> m CBlockRange
iterateTx stateIn tx = do
txSummary <- getTxSummaryFromBlock blk' tx
pure $ stateIn { cbrTransactions = txSummary : (cbrTransactions stateIn) }
foldM iterateTx state2 txs
_ -> pure state1
if hh == stopHeaderHash then
pure newState
else do
nextHh <- resolveForwardLink hh
case nextHh of
Nothing -> do
pure newState
Just nextHh' -> go nextHh' newState
backwards <- go startHeaderHash (CBlockRange [] [])
pure $ CBlockRange
{ cbrBlocks = reverse $ cbrBlocks backwards
, cbrTransactions = reverse $ cbrTransactions backwards
}
-- | Get transaction summary from transaction id. Looks at both the database
-- and the memory (mempool) for the transaction. What we have at the mempool
-- are transactions that have to be written in the blockchain.
getTxSummary
:: ExplorerMode ctx m
=> GenesisHash
-> CTxId
-> m CTxSummary
getTxSummary genesisHash cTxId = do
-- There are two places whence we can fetch a transaction: MemPool and DB.
-- However, TxExtra should be added in the DB when a transaction is added
-- to MemPool. So we start with TxExtra and then figure out whence to fetch
-- the rest.
txId <- cTxIdToTxId cTxId
-- Get from database, @TxExtra
txExtra <- ExDB.getTxExtra txId
-- If we found @TxExtra@ that means we found something saved on the
-- blockchain and we don't have to fetch @MemPool@. But if we don't find
-- anything on the blockchain, we go searching in the @MemPool@.
if isJust txExtra
then getTxSummaryFromBlockchain cTxId
else getTxSummaryFromMemPool cTxId
where
-- Get transaction from blockchain (the database).
getTxSummaryFromBlockchain
:: (ExplorerMode ctx m)
=> CTxId
-> m CTxSummary
getTxSummaryFromBlockchain cTxId' = do
txId <- cTxIdToTxId cTxId'
txExtra <- getTxExtraOrFail txId
-- Return transaction extra (txExtra) fields
let mBlockchainPlace = teBlockchainPlace txExtra
blockchainPlace <- maybeThrow (Internal "No blockchain place.") mBlockchainPlace
let headerHashBP = fst blockchainPlace
let txIndexInBlock = snd blockchainPlace
mb <- getMainBlock genesisHash headerHashBP
blkSlotStart <- getBlkSlotStart mb
let blockHeight = fromIntegral $ mb ^. difficultyL
let receivedTime = teReceivedTime txExtra
let blockTime = timestampToPosix <$> blkSlotStart
-- Get block epoch and slot index
let blkHeaderSlot = mb ^. mainBlockSlot
let epochIndex = getEpochIndex $ siEpoch blkHeaderSlot
let slotIndex = getSlotIndex $ siSlot blkHeaderSlot
let blkHash = toCHash headerHashBP
tx <- maybeThrow (Internal "TxExtra return tx index that is out of bounds") $
atMay (toList $ mb ^. mainBlockTxPayload . txpTxs) (fromIntegral txIndexInBlock)
let inputOutputsMB = map (fmap toaOut) $ NE.toList $ teInputOutputs txExtra
let txOutputs = convertTxOutputs . NE.toList $ _txOutputs tx
let totalInputMB = unsafeIntegerToCoin . sumCoins . map txOutValue <$> sequence inputOutputsMB
let totalOutput = unsafeIntegerToCoin $ sumCoins $ map snd txOutputs
-- Verify that strange things don't happen with transactions
whenJust totalInputMB $ \totalInput -> when (totalOutput > totalInput) $
throwM $ Internal "Detected tx with output greater than input"
pure $ CTxSummary
{ ctsId = cTxId'
, ctsTxTimeIssued = timestampToPosix <$> receivedTime
, ctsBlockTimeIssued = blockTime
, ctsBlockHeight = Just blockHeight
, ctsBlockEpoch = Just epochIndex
, ctsBlockSlot = Just slotIndex
, ctsBlockHash = Just blkHash
, ctsRelayedBy = Nothing
, ctsTotalInput = mkCCoinMB totalInputMB
, ctsTotalOutput = mkCCoin totalOutput
, ctsFees = mkCCoinMB $ (`unsafeSubCoin` totalOutput) <$> totalInputMB
, ctsInputs = map (fmap (second mkCCoin)) $ convertTxOutputsMB inputOutputsMB
, ctsOutputs = map (second mkCCoin) txOutputs
}
-- Get transaction from mempool (the memory).
getTxSummaryFromMemPool
:: (ExplorerMode ctx m)
=> CTxId
-> m CTxSummary
getTxSummaryFromMemPool cTxId' = do
txId <- cTxIdToTxId cTxId'
tx <- fetchTxFromMempoolOrFail txId
let inputOutputs = NE.toList . _txOutputs $ taTx tx
let txOutputs = convertTxOutputs inputOutputs
let totalInput = unsafeIntegerToCoin $ sumCoins $ map txOutValue inputOutputs
let totalOutput = unsafeIntegerToCoin $ sumCoins $ map snd txOutputs
-- Verify that strange things don't happen with transactions
when (totalOutput > totalInput) $
throwM $ Internal "Detected tx with output greater than input"
pure $ CTxSummary
{ ctsId = cTxId'
, ctsTxTimeIssued = Nothing
, ctsBlockTimeIssued = Nothing
, ctsBlockHeight = Nothing
, ctsBlockEpoch = Nothing
, ctsBlockSlot = Nothing
, ctsBlockHash = Nothing
, ctsRelayedBy = Nothing
, ctsTotalInput = mkCCoin totalInput
, ctsTotalOutput = mkCCoin totalOutput
, ctsFees = mkCCoin $ unsafeSubCoin totalInput totalOutput
, ctsInputs = map (Just . second mkCCoin) $ convertTxOutputs inputOutputs
, ctsOutputs = map (second mkCCoin) txOutputs
}
data GenesisSummaryInternal = GenesisSummaryInternal
{ gsiNumRedeemed :: !Int
, gsiRedeemedAmountTotal :: !Coin
, gsiNonRedeemedAmountTotal :: !Coin
}
getGenesisSummary
:: ExplorerMode ctx m
=> m CGenesisSummary
getGenesisSummary = do
grai <- getGenesisRedeemAddressInfo
redeemAddressInfo <- V.mapM (uncurry getRedeemAddressInfo) grai
let GenesisSummaryInternal {..} =
V.foldr folder (GenesisSummaryInternal 0 minBound minBound)
redeemAddressInfo
let numTotal = length grai
pure CGenesisSummary
{ cgsNumTotal = numTotal
, cgsNumRedeemed = gsiNumRedeemed
, cgsNumNotRedeemed = numTotal - gsiNumRedeemed
, cgsRedeemedAmountTotal = mkCCoin gsiRedeemedAmountTotal
, cgsNonRedeemedAmountTotal = mkCCoin gsiNonRedeemedAmountTotal
}
where
getRedeemAddressInfo
:: MonadDBRead m
=> Address -> Coin -> m GenesisSummaryInternal
getRedeemAddressInfo address initialBalance = do
currentBalance <- fromMaybe minBound <$> ExDB.getAddrBalance address
if currentBalance > initialBalance then
throwM $ Internal $ sformat
("Redeem address "%build%" had "%build%" at genesis, but now has "%build)
address initialBalance currentBalance
else
-- Abusing gsiNumRedeemed here. We'd like to keep
-- only one wrapper datatype, so we're storing an Int
-- with a 0/1 value in a field that we call isRedeemed.
let isRedeemed = if currentBalance == minBound then 1 else 0
redeemedAmount = initialBalance `unsafeSubCoin` currentBalance
amountLeft = currentBalance
in pure $ GenesisSummaryInternal isRedeemed redeemedAmount amountLeft
folder
:: GenesisSummaryInternal
-> GenesisSummaryInternal
-> GenesisSummaryInternal
folder
(GenesisSummaryInternal isRedeemed redeemedAmount amountLeft)
(GenesisSummaryInternal numRedeemed redeemedAmountTotal nonRedeemedAmountTotal) =
GenesisSummaryInternal
{ gsiNumRedeemed = numRedeemed + isRedeemed
, gsiRedeemedAmountTotal = redeemedAmountTotal `unsafeAddCoin` redeemedAmount
, gsiNonRedeemedAmountTotal = nonRedeemedAmountTotal `unsafeAddCoin` amountLeft
}
isAddressRedeemed :: MonadDBRead m => Address -> m Bool
isAddressRedeemed address = do
currentBalance <- fromMaybe minBound <$> ExDB.getAddrBalance address
pure $ currentBalance == minBound
getFilteredGrai :: ExplorerMode ctx m => CAddressesFilter -> m (V.Vector (Address, Coin))
getFilteredGrai addrFilt = do
grai <- getGenesisRedeemAddressInfo
case addrFilt of
AllAddresses ->
pure grai
RedeemedAddresses ->
V.filterM (isAddressRedeemed . fst) grai
NonRedeemedAddresses ->
V.filterM (isAddressNotRedeemed . fst) grai
where
isAddressNotRedeemed :: MonadDBRead m => Address -> m Bool
isAddressNotRedeemed = fmap not . isAddressRedeemed
getGenesisAddressInfo
:: (ExplorerMode ctx m)
=> Maybe Word -- ^ pageNumber
-> Maybe Word -- ^ pageSize
-> CAddressesFilter
-> m [CGenesisAddressInfo]
getGenesisAddressInfo mPage mPageSize addrFilt = do
filteredGrai <- getFilteredGrai addrFilt
let pageNumber = fromMaybe 1 $ fmap fromIntegral mPage
pageSize = fromIntegral $ toPageSize mPageSize
skipItems = (pageNumber - 1) * pageSize
requestedPage = V.slice skipItems pageSize filteredGrai
V.toList <$> V.mapM toGenesisAddressInfo requestedPage
where
toGenesisAddressInfo :: ExplorerMode ctx m => (Address, Coin) -> m CGenesisAddressInfo
toGenesisAddressInfo (address, coin) = do
cgaiIsRedeemed <- isAddressRedeemed address
-- Commenting out RSCoin address until it can actually be displayed.
-- See comment in src/Pos/Explorer/Web/ClientTypes.hs for more information.
pure CGenesisAddressInfo
{ cgaiCardanoAddress = toCAddress address
-- , cgaiRSCoinAddress = toCAddress address
, cgaiGenesisAmount = mkCCoin coin
, ..
}
getGenesisPagesTotal
:: ExplorerMode ctx m
=> Maybe Word
-> CAddressesFilter
-> m Integer
getGenesisPagesTotal mPageSize addrFilt = do
filteredGrai <- getFilteredGrai addrFilt
pure $ fromIntegral $ (length filteredGrai + pageSize - 1) `div` pageSize
where
pageSize = fromIntegral $ toPageSize mPageSize
-- | Search the blocks by epoch and slot.
getEpochSlot
:: ExplorerMode ctx m
=> SlotCount
-> EpochIndex
-> Word16
-> m [CBlockEntry]
getEpochSlot epochSlots epochIndex slotIndex = do
-- The slots start from 0 so we need to modify the calculation of the index.
let page = fromIntegral $ (slotIndex `div` 10) + 1
-- Get pages from the database
-- TODO: Fix this Int / Integer thing once we merge repositories
epochBlocksHH <- getPageHHsOrThrow epochIndex page
blunds <- forM epochBlocksHH getBlundOrThrow
forM (getEpochSlots slotIndex (blundToMainBlockUndo blunds)) (toBlockEntry epochSlots)
where
blundToMainBlockUndo :: [Blund] -> [(MainBlock, Undo)]
blundToMainBlockUndo blund = [(mainBlock, undo) | (Right mainBlock, undo) <- blund]
-- Get epoch slot block that's being searched or return all epochs if
-- the slot is @Nothing@.
getEpochSlots
:: Word16
-> [MainBlund]
-> [MainBlund]
getEpochSlots slotIndex' blunds = filter filterBlundsBySlotIndex blunds
where
getBlundSlotIndex
:: MainBlund
-> Word16
getBlundSlotIndex blund = getSlotIndex $ siSlot $ fst blund ^. mainBlockSlot
filterBlundsBySlotIndex
:: MainBlund
-> Bool
filterBlundsBySlotIndex blund = getBlundSlotIndex blund == slotIndex'
-- Either get the @HeaderHash@es from the @Epoch@ or throw an exception.
getPageHHsOrThrow
:: (HasExplorerCSLInterface m, MonadThrow m)
=> EpochIndex
-> Int
-> m [HeaderHash]
getPageHHsOrThrow epoch page =
getEpochBlocksCSLI epoch page >>= maybeThrow (Internal errMsg)
where
errMsg :: Text
errMsg = sformat ("No blocks on epoch "%build%" page "%build%" found!") epoch page
-- | Search the blocks by epoch and epoch page number.
getEpochPage
:: ExplorerMode ctx m
=> SlotCount
-> EpochIndex
-> Maybe Int
-> m (Int, [CBlockEntry])
getEpochPage epochSlots epochIndex mPage = do
-- Get the page if it exists, return first page otherwise.
let page = fromMaybe 1 mPage
-- We want to fetch as many pages as we have in this @Epoch@.
epochPagesNumber <- getEpochPagesOrThrow epochIndex
-- Get pages from the database
-- TODO: Fix this Int / Integer thing once we merge repositories
epochBlocksHH <- getPageHHsOrThrow epochIndex page
blunds <- forM epochBlocksHH getBlundOrThrow
let sortedBlunds = sortBlocksByEpochSlots blunds
let sortedMainBlocks = blundToMainBlockUndo sortedBlunds
cBlocksEntry <- forM sortedMainBlocks (toBlockEntry epochSlots)
pure (epochPagesNumber, cBlocksEntry)
where
blundToMainBlockUndo :: [Blund] -> [(MainBlock, Undo)]
blundToMainBlockUndo blund = [(mainBlock, undo) | (Right mainBlock, undo) <- blund]
-- Either get the @HeaderHash@es from the @Epoch@ or throw an exception.
getPageHHsOrThrow
:: (HasExplorerCSLInterface m, MonadThrow m)
=> EpochIndex
-> Int
-> m [HeaderHash]
getPageHHsOrThrow epoch page' =
getEpochBlocksCSLI epoch page' >>= maybeThrow (Internal errMsg)
where
errMsg :: Text
errMsg = sformat ("No blocks on epoch "%build%" page "%build%" found!") epoch page'
-- | Sorting.
sortBlocksByEpochSlots
:: [(Block, Undo)]
-> [(Block, Undo)]
sortBlocksByEpochSlots blocks = sortOn (Down . getBlockIndex . fst) blocks
where
-- | Get the block index number. We start with the the index 1 for the
-- genesis block and add 1 for the main blocks since they start with 1
-- as well.
getBlockIndex :: Block -> Int
getBlockIndex (Left _) = 1
getBlockIndex (Right block) =
fromIntegral $ (+1) $ getSlotIndex $ siSlot $ block ^. mainBlockSlot
getStatsTxs
:: forall ctx m. ExplorerMode ctx m
=> Genesis.Config
-> Maybe Word
-> m (Integer, [(CTxId, Byte)])
getStatsTxs genesisConfig mPageNumber = do
-- Get blocks from the requested page
blocksPage <- getBlocksPage (configEpochSlots genesisConfig)
mPageNumber
(Just defaultPageSizeWord)
getBlockPageTxsInfo blocksPage
where
getBlockPageTxsInfo
:: (Integer, [CBlockEntry])
-> m (Integer, [(CTxId, Byte)])
getBlockPageTxsInfo (blockPageNumber, cBlockEntries) = do
blockTxsInfo <- blockPageTxsInfo
pure (blockPageNumber, blockTxsInfo)
where
cHashes :: [CHash]
cHashes = cbeBlkHash <$> cBlockEntries
blockPageTxsInfo :: m [(CTxId, Byte)]
blockPageTxsInfo = concatForM cHashes getBlockTxsInfo
getBlockTxsInfo
:: CHash
-> m [(CTxId, Byte)]
getBlockTxsInfo cHash = do
txs <- getMainBlockTxs (configGenesisHash genesisConfig) cHash
pure $ txToTxIdSize <$> txs
where
txToTxIdSize :: Tx -> (CTxId, Byte)
txToTxIdSize tx = (toCTxId $ hash tx, biSize tx)
--------------------------------------------------------------------------------
-- Helpers
--------------------------------------------------------------------------------
-- | A pure calculation of the page number.
-- Get total pages from the blocks. And we want the page
-- with the example, the page size 10,
-- to start with 10 + 1 == 11, not with 10 since with
-- 10 we'll have an empty page.
-- Could also be `((blocksTotal - 1) `div` pageSizeInt) + 1`.
roundToBlockPage :: Integer -> Integer
roundToBlockPage blocksTotal = divRoundUp blocksTotal $ fromIntegral ExDB.defaultPageSize
-- | A pure function that return the number of blocks.
getBlockDifficulty :: Block -> Integer
getBlockDifficulty tipBlock = fromIntegral $ getChainDifficulty $ tipBlock ^. difficultyL
defaultPageSizeWord :: Word
defaultPageSizeWord = fromIntegral ExDB.defaultPageSize
toPageSize :: Maybe Word -> Integer
toPageSize = fromIntegral . fromMaybe defaultPageSizeWord
getMainBlockTxs :: ExplorerMode ctx m => GenesisHash -> CHash -> m [Tx]
getMainBlockTxs genesisHash cHash = do
hash' <- unwrapOrThrow $ fromCHash cHash
blk <- getMainBlock genesisHash hash'
topsortTxsOrFail withHash $ toList $ blk ^. mainBlockTxPayload . txpTxs
makeTxBrief :: Tx -> TxExtra -> CTxBrief
makeTxBrief tx extra = toTxBrief (TxInternal extra tx)
unwrapOrThrow :: ExplorerMode ctx m => Either Text a -> m a
unwrapOrThrow = either (throwM . Internal) pure
-- | Get transaction from memory (STM) or throw exception.
fetchTxFromMempoolOrFail :: ExplorerMode ctx m => TxId -> m TxAux
fetchTxFromMempoolOrFail txId = do
memPoolTxs <- localMemPoolTxs
let memPoolTxsSize = HM.size memPoolTxs
logDebug $ sformat ("Mempool size "%int%" found!") memPoolTxsSize
let maybeTxAux = memPoolTxs ^. at txId
maybeThrow (Internal "Transaction missing in MemPool!") maybeTxAux
where
-- type TxMap = HashMap TxId TxAux
localMemPoolTxs
:: (MonadIO m, MonadTxpMem ext ctx m)
=> m TxMap
localMemPoolTxs = do
memPool <- withTxpLocalData getMemPool
pure $ memPool ^. mpLocalTxs
getMempoolTxs :: ExplorerMode ctx m => m [TxInternal]
getMempoolTxs = do
localTxs <- fmap reverse $ topsortTxsOrFail mkWhTx =<< tlocalTxs
fmap catMaybes . forM localTxs $ \(id, txAux) -> do
mextra <- ExDB.getTxExtra id
forM mextra $ \extra -> pure $ TxInternal extra (taTx txAux)
where
tlocalTxs :: (MonadIO m, MonadTxpMem ext ctx m) => m [(TxId, TxAux)]
tlocalTxs = withTxpLocalData getLocalTxs
mkWhTx :: (TxId, TxAux) -> WithHash Tx
mkWhTx (txid, txAux) = WithHash (taTx txAux) txid
getBlkSlotStart :: MonadSlots ctx m => MainBlock -> m (Maybe Timestamp)
getBlkSlotStart blk = getSlotStart $ blk ^. gbHeader . gbhConsensus . mcdSlot
topsortTxsOrFail :: (MonadThrow m, Eq a) => (a -> WithHash Tx) -> [a] -> m [a]
topsortTxsOrFail f =
maybeThrow (Internal "Dependency loop in txs set") .
topsortTxs f
-- Either get the block from the @HeaderHash@ or throw an exception.
getBlundOrThrow
:: ExplorerMode ctx m
=> HeaderHash
-> m Blund
getBlundOrThrow hh =
getBlundFromHHCSLI hh >>= maybeThrow (Internal "Blund with hash cannot be found!")
-- | Deserialize Cardano or RSCoin address and convert it to Cardano address.
-- Throw exception on failure.
cAddrToAddr :: MonadThrow m => NetworkMagic -> CAddress -> m Address
cAddrToAddr nm cAddr@(CAddress rawAddrText) =
-- Try decoding address as base64. If both decoders succeed,
-- the output of the first one is returned
let mDecodedBase64 =
rightToMaybe (B64.decode rawAddrText) <|>
rightToMaybe (B64.decodeUrl rawAddrText)
in case mDecodedBase64 of
Just addr -> do
-- the decoded address can be both the RSCoin address and the Cardano address.
-- > RSCoin address == 32 bytes
-- > Cardano address >= 34 bytes
if (BS.length addr == 32)
then pure $ makeRedeemAddress nm $ redeemPkBuild addr
else either badCardanoAddress pure (fromCAddress cAddr)
Nothing ->
-- cAddr is in Cardano address format or it's not valid
either badCardanoAddress pure (fromCAddress cAddr)
where
badCardanoAddress = const $ throwM $ Internal "Invalid Cardano address!"
-- | Deserialize transaction ID.
-- Throw exception on failure.
cTxIdToTxId :: MonadThrow m => CTxId -> m TxId
cTxIdToTxId cTxId = either exception pure (fromCTxId cTxId)
where
exception = const $ throwM $ Internal "Invalid transaction id!"
getMainBlund :: ExplorerMode ctx m => GenesisHash -> HeaderHash -> m MainBlund
getMainBlund genesisHash h = do
(blk, undo) <- getBlund genesisHash h >>= maybeThrow (Internal "No block found")
either (const $ throwM $ Internal "Block is genesis block") (pure . (,undo)) blk
getMainBlock :: ExplorerMode ctx m => GenesisHash -> HeaderHash -> m MainBlock
getMainBlock genesisHash = fmap fst . getMainBlund genesisHash
-- | Get transaction extra from the database, and if you don't find it
-- throw an exception.
getTxExtraOrFail :: MonadDBRead m => TxId -> m TxExtra
getTxExtraOrFail txId = ExDB.getTxExtra txId >>= maybeThrow exception
where
exception = Internal "Transaction not found"
getTxMain :: ExplorerMode ctx m => GenesisHash -> TxId -> TxExtra -> m Tx
getTxMain genesisHash id TxExtra {..} = case teBlockchainPlace of
Nothing -> taTx <$> fetchTxFromMempoolOrFail id
Just (hh, idx) -> do
mb <- getMainBlock genesisHash hh
maybeThrow (Internal "TxExtra return tx index that is out of bounds") $
atMay (toList $ mb ^. mainBlockTxPayload . txpTxs) $ fromIntegral idx
-- | Get @Page@ numbers from an @Epoch@ or throw an exception.
getEpochPagesOrThrow
:: (HasExplorerCSLInterface m, MonadThrow m)
=> EpochIndex
-> m Page
getEpochPagesOrThrow epochIndex =
getEpochPagesCSLI epochIndex >>= maybeThrow (Internal "No epoch pages.")
-- Silly name for a list index-lookup function.
atMay :: [a] -> Int -> Maybe a
atMay xs n
| n < 0 = Nothing
| n == 0 = fmap fst (uncons xs)
| otherwise = case xs of
[] -> Nothing
(_ : xs') -> atMay xs' (n - 1)
| input-output-hk/pos-haskell-prototype | explorer/src/Pos/Explorer/Web/Server.hs | mit | 40,447 | 0 | 28 | 11,399 | 8,530 | 4,436 | 4,094 | -1 | -1 |
{- Copyright 2013 Gabriel Gonzalez
This file is part of the Suns Search Engine
The Suns Search Engine is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or (at your
option) any later version.
The Suns Search Engine is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
the Suns Search Engine. If not, see <http://www.gnu.org/licenses/>.
-}
{-| 'meld' is a small hack to work around the fact that users can build search
queries from search results, which will give rise to overlapping atoms in
the query which are supposed to be the same atom.
For example, if a user searches for a carboxylic acid originating from an
aspartate, there will be several valid linkers to choose from that connect
to that carboxylic acid. However, if the user selects an aspartic acid
linker, the search query will have two Cγs: one from the linker and one from
the original carboxylic acid. If you tokenize the query, the tokenizer will
match the linker twice: once for each Cγ. 'meld' filters out overlapping
atoms which are probably duplicates (i.e. same residue, same name, and
within 1.0 Å of each other.
There is no need to remove atoms that have different residue or atom names
since these will not generate duplicate matches for motifs. In fact, you
don't want to remove these because then their correspond motifs will no
longer be matched. For example, if you searched for a carboxylic acid from
an aspartate, but then connected a linker from a glutamate, this would
generate an overlapping Cδ from the glutamate and Cγ from the carboxylic
acid, neither of which share the same residue or atom name. You wouldn't
want to remove either of these because then one of the two motifs would no
longer be matched by the tokenizer.
-}
module Meld
( -- * Meld Atoms
meld
) where
import Atom(Atom(name), distSqA)
import Data.List (tails)
{- I chose this cutoff assuming no hydrogen atoms. The smallest non-hydrogen
bonds are C-C bonds of 1.2 Angstroms, so I rounded down to 1.0 to be safe -}
cutoff :: Double
cutoff = 1.0 -- Angstroms
cutoffSq :: Double
cutoffSq = cutoff * cutoff
{-| Remove clashing atoms that share the same 'AtomName' and are within 1.0
Å of each other. 'meld' arbitrarily keeps the second atom (in list order)
every time it detects a clash.
-}
meld :: [Atom] -> [Atom]
meld query = map fst . filter snd $ zipWith
(\a1 rest ->
(a1, all (\a2 -> distSqA a1 a2 > cutoffSq || name a1 /= name a2) rest))
query
(tail $ tails query)
| Gabriel439/suns-search | src/Meld.hs | gpl-3.0 | 2,987 | 0 | 16 | 670 | 170 | 96 | 74 | 15 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Bug Tracker</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/bugtracker/src/main/javahelp/org/zaproxy/zap/extension/bugtracker/resources/help_hi_IN/helpset_hi_IN.hs | apache-2.0 | 957 | 79 | 66 | 157 | 409 | 207 | 202 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.