code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.TwoD.Apollonian
-- Copyright : (c) 2011 Brent Yorgey
-- License : BSD-style (see LICENSE)
-- Maintainer : byorgey@cis.upenn.edu
--
-- Generation of Apollonian gaskets. Any three mutually tangent
-- circles uniquely determine exactly two others which are mutually
-- tangent to all three. This process can be repeated, generating a
-- fractal circle packing.
--
-- See J. Lagarias, C. Mallows, and A. Wilks, \"Beyond the Descartes
-- circle theorem\", /Amer. Math. Monthly/ 109 (2002), 338--361.
-- <http://arxiv.org/abs/math/0101066>.
--
-- A few examples:
--
-- > import Diagrams.TwoD.Apollonian
-- > apollonian1 = apollonianGasket 0.01 2 2 2
--
-- <<diagrams/src_Diagrams_TwoD_Apollonian_apollonian1.svg#diagram=apollonian1&width=400>>
--
-- > import Diagrams.TwoD.Apollonian
-- > apollonian2 = apollonianGasket 0.01 2 3 3
--
-- <<diagrams/src_Diagrams_TwoD_Apollonian_apollonian2.svg#diagram=apollonian2&width=400>>
--
-- > import Diagrams.TwoD.Apollonian
-- > apollonian3 = apollonianGasket 0.01 2 4 7
--
-- <<diagrams/src_Diagrams_TwoD_Apollonian_apollonian3.svg#diagram=apollonian3&width=400>>
--
-----------------------------------------------------------------------------
module Diagrams.TwoD.Apollonian
( -- * Circles
Circle(..), mkCircle, center, radius
-- * Descartes' Theorem
, descartes, other, initialConfig
-- * Apollonian gasket generation
, apollonian
-- * Diagram generation
, drawCircle
, drawGasket
, apollonianGasket
) where
import Data.Complex
import qualified Data.Foldable as F
import Diagrams.Prelude hiding (center, radius)
import Control.Arrow (second)
------------------------------------------------------------
-- Circles
------------------------------------------------------------
-- | Representation for circles that lets us quickly compute an
-- Apollonian gasket.
data Circle n = Circle { bend :: n
-- ^ The bend is the reciprocal of signed
-- radius: a negative radius means the
-- outside and inside of the circle are
-- switched. The bends of any four mutually
-- tangent circles satisfy Descartes'
-- Theorem.
, cb :: Complex n
-- ^ /Product/ of bend and center represented
-- as a complex number. Amazingly, these
-- products also satisfy the equation of
-- Descartes' Theorem.
}
deriving (Eq, Show)
-- | Create a @Circle@ given a signed radius and a location for its center.
mkCircle :: Fractional n =>
n -- ^ signed radius
-> P2 n -- ^ center
-> Circle n
mkCircle r (unp2 -> (x,y)) = Circle (1/r) (b*x :+ b*y)
where b = 1/r
-- | Get the center of a circle.
center :: Fractional n => Circle n -> P2 n
center (Circle b (cbx :+ cby)) = p2 (cbx / b, cby / b)
-- | Get the (unsigned) radius of a circle.
radius :: Fractional n => Circle n -> n
radius = abs . recip . bend
liftF :: RealFloat n => (forall a. Floating a => a -> a) -> Circle n -> Circle n
liftF f (Circle b c) = Circle (f b) (f c)
liftF2 :: RealFloat n => (forall a. Floating a => a -> a -> a) ->
Circle n -> Circle n -> Circle n
liftF2 f (Circle b1 cb1) (Circle b2 cb2) = Circle (f b1 b2) (f cb1 cb2)
instance RealFloat n => Num (Circle n) where
(+) = liftF2 (+)
(-) = liftF2 (-)
(*) = liftF2 (*)
negate = liftF negate
abs = liftF abs
fromInteger n = Circle (fromInteger n) (fromInteger n)
instance RealFloat n => Fractional (Circle n) where
(/) = liftF2 (/)
recip = liftF recip
-- | The @Num@, @Fractional@, and @Floating@ instances for @Circle@
-- (all simply lifted elementwise over @Circle@'s fields) let us use
-- Descartes' Theorem directly on circles.
instance RealFloat n => Floating (Circle n) where
sqrt = liftF sqrt
------------------------------------------------------------
-- Descartes' Theorem
------------------------------------------------------------
-- | Descartes' Theorem states that if @b1@, @b2@, @b3@ and @b4@ are
-- the bends of four mutually tangent circles, then
--
-- @
-- b1^2 + b2^2 + b3^2 + b4^2 = 1/2 * (b1 + b2 + b3 + b4)^2.
-- @
--
-- Surprisingly, if we replace each of the @bi@ with the /product/
-- of @bi@ and the center of the corresponding circle (represented
-- as a complex number), the equation continues to hold! (See the
-- paper referenced at the top of the module.)
--
-- @descartes [b1,b2,b3]@ solves for @b4@, returning both solutions.
-- Notably, @descartes@ works for any instance of @Floating@, which
-- includes both @Double@ (for bends), @Complex Double@ (for
-- bend/center product), and @Circle@ (for both at once).
descartes :: Floating n => [n] -> [n]
descartes [b1,b2,b3] = [r + s, -r + s]
where r = 2 * sqrt (b1*b2 + b1*b3 + b2*b3)
s = b1+b2+b3
descartes _ = error "descartes must be called on a list of length 3"
-- | If we have /four/ mutually tangent circles we can choose one of
-- them to replace; the remaining three determine exactly one other
-- circle which is mutually tangent. However, in this situation
-- there is no need to apply 'descartes' again, since the two
-- solutions @b4@ and @b4'@ satisfy
--
-- @
-- b4 + b4' = 2 * (b1 + b2 + b3)
-- @
--
-- Hence, to replace @b4@ with its dual, we need only sum the other
-- three, multiply by two, and subtract @b4@. Again, this works for
-- bends as well as bend/center products.
other :: Num n => [n] -> n -> n
other xs x = 2 * sum xs - x
-- | Generate an initial configuration of four mutually tangent
-- circles, given just the signed bends of three of them.
initialConfig :: RealFloat n => n -> n -> n -> [Circle n]
initialConfig b1 b2 b3 = cs ++ [c4]
where cs = [Circle b1 0, Circle b2 ((b2/b1 + 1) :+ 0), Circle b3 cb3]
a = 1/b1 + 1/b2
b = 1/b1 + 1/b3
c = 1/b2 + 1/b3
x = (b*b + a*a - c*c)/(2*a)
y = sqrt (b*b - x*x)
cb3 = b3*x :+ b3*y
[c4,_] = descartes cs
------------------------------------------------------------
-- Gasket generation
------------------------------------------------------------
select :: [a] -> [(a, [a])]
select [] = []
select (x:xs) = (x,xs) : (map . second) (x:) (select xs)
-- | Given a threshold radius and a list of /four/ mutually tangent
-- circles, generate the Apollonian gasket containing those circles.
-- Stop the recursion when encountering a circle with an (unsigned)
-- radius smaller than the threshold.
apollonian :: RealFloat n => n -> [Circle n] -> [Circle n]
apollonian thresh cs
= cs
++ (concat . map (\(c,cs') -> apollonian' thresh (other cs' c) cs') . select $ cs)
apollonian' :: RealFloat n => n -> Circle n -> [Circle n] -> [Circle n]
apollonian' thresh cur others
| radius cur < thresh = []
| otherwise = cur
: (concat $
map (\(c, cs') -> apollonian' thresh
(other (cur:cs') c)
(cur:cs')
)
(select others)
)
------------------------------------------------------------
-- Diagram generation
------------------------------------------------------------
-- | Draw a circle.
drawCircle :: (Renderable (Path V2 n) b, TypeableFloat n) =>
n -> Circle n -> QDiagram b V2 n Any
drawCircle w c = circle (radius c) # moveTo (center c)
# lwG w # fcA transparent
-- | Draw a generated gasket, using a line width 0.003 times the
-- radius of the largest circle.
drawGasket :: (Renderable (Path V2 n) b, TypeableFloat n) =>
[Circle n] -> QDiagram b V2 n Any
drawGasket cs = F.foldMap (drawCircle w) cs
where w = (*0.003) . maximum . map radius $ cs
-- | Draw an Apollonian gasket: the first argument is the threshold;
-- the recursion will stop upon reaching circles with radii less than
-- it. The next three arguments are bends of three circles.
apollonianGasket :: (Renderable (Path V2 n) b, TypeableFloat n)
=> n -> n -> n -> n -> QDiagram b V2 n Any
apollonianGasket thresh b1 b2 b3 = drawGasket . apollonian thresh $ (initialConfig b1 b2 b3)
|
wherkendell/diagrams-contrib
|
src/Diagrams/TwoD/Apollonian.hs
|
bsd-3-clause
| 8,789
| 0
| 16
| 2,291
| 1,770
| 981
| 789
| 91
| 1
|
{-# OPTIONS -w #-}
module Plugin.Free.Type where
import Control.Monad
import Plugin.Free.Parse
import Data.List
import Plugin.Free.Util
type TyVar = String
type TyName = String
data Type
= TyForall TyVar Type
| TyArr Type Type
| TyTuple [Type]
| TyCons TyName [Type]
| TyVar TyVar
deriving (Eq, Show)
precTYAPP, precARROW :: Int
precTYAPP = 11
precARROW = 10
instance Pretty Type where
prettyP p (TyForall v t)
= prettyParen (p > 0) (
text "forall" <+> text v <> text "." <+> prettyP 0 t
)
prettyP p (TyArr t1 t2)
= prettyParen (p > precARROW) (
prettyP (precARROW+1) t1 <+> text "->" <+> prettyP precARROW t2
)
prettyP _ (TyTuple [])
= parens empty
prettyP _ (TyTuple (t:ts))
= parens (prettyP 0 t <> prettyTs 0 (text ",") ts)
prettyP _ (TyCons "[]" [t])
= lbrack <> prettyP 0 t <> rbrack
prettyP p (TyCons cons ts)
= prettyParen (p > precTYAPP) (
text cons <> prettyTs (precTYAPP+1) empty ts
)
prettyP _ (TyVar v)
= text v
prettyTs :: Int -> Doc -> [Type] -> Doc
prettyTs p c [] = empty
prettyTs p c (t:ts) = c <+> prettyP p t <> prettyTs p c ts
parseType :: ParseS Type
parseType
= parseType' >>= return . normaliseType
parseType' :: ParseS Type
parseType'
= do
t <- peekToken
case t of
Just IdForall -> getToken >> parseForall
_ -> parseArrType
where
parseForall
= do
t <- getToken
case t of
Just (QVarId v)
-> parseForall >>= \t -> return (TyForall v t)
Just (QVarSym ".")
-> parseType'
_ -> fail "Expected variable or '.'"
parseArrType
= do
t1 <- parseBType
t <- peekToken
case t of
Just OpArrow
-> getToken >> parseType' >>= \t2 ->
return (TyArr t1 t2)
_ -> return t1
parseBType
= do
t1 <- parseAType
case t1 of
TyCons c ts
-> do
ts' <- parseBTypes
return (TyCons c (ts++ts'))
_ -> return t1
parseBTypes
= (parseBType >>= \t -> parseBTypes >>= \ts -> return (t:ts))
`mplus` return []
parseAType
= parseQTyCon `mplus` parseOtherAType
parseQTyCon
= do
t <- getToken
case t of
Just OpenParen
-> do
t <- getToken
case t of
Just CloseParen
-> return (TyCons "()" [])
Just OpArrow
-> match CloseParen
>> return (TyCons "->" [])
Just Comma
-> parseQTyConTuple 1
_ -> fail "Badly formed type constructor"
Just OpenBracket
-> match CloseBracket >> return (TyCons "[]" [])
Just (QConId v)
-> return (TyCons v [])
_ -> fail "Badly formed type constructor"
parseQTyConTuple :: Int -> ParseS Type
parseQTyConTuple i
= do
t <- getToken
case t of
Just Comma
-> parseQTyConTuple (i+1)
Just CloseParen
-> return (TyCons ("(" ++ take i (repeat ',') ++ ")") [])
_ -> fail "Badly formed type constructor"
parseOtherAType
= do
t1 <- getToken
case t1 of
Just OpenParen
-> do
t <- parseType'
parseTuple [t]
Just OpenBracket
-> parseType' >>= \t -> match CloseBracket
>> return (TyCons "[]" [t])
Just (QVarId v)
-> return (TyVar v)
_ -> fail "Badly formed type"
parseTuple ts
= do
t1 <- getToken
case t1 of
Just CloseParen
-> case ts of
[t] -> return t
_ -> return (TyTuple (reverse ts))
Just Comma
-> do
t <- parseType'
parseTuple (t:ts)
normaliseType :: Type -> Type
normaliseType t
= let (fvs,nt) = normaliseType' t
in foldr TyForall nt (nub fvs)
where
normaliseType' t@(TyVar v)
= ([v],t)
normaliseType' (TyForall v t')
= let (fvs,t) = normaliseType' t'
in (filter (/=v) fvs, TyForall v t)
normaliseType' (TyArr t1 t2)
= let
(fvs1,t1') = normaliseType' t1
(fvs2,t2') = normaliseType' t2
in
(fvs1++fvs2, TyArr t1' t2')
normaliseType' (TyTuple ts)
= let
fvsts = map normaliseType' ts
fvs = concat (map fst fvsts)
ts' = map snd fvsts
in (fvs, TyTuple ts')
normaliseType' (TyCons c ts)
= let
fvsts = map normaliseType' ts
fvs = concat (map fst fvsts)
ts' = map snd fvsts
in case c of
"->" -> case ts' of
[t1,t2] -> (fvs, TyArr t1 t2)
_ -> error "Arrow type should have 2 arguments"
_ -> case checkTuple c of
Just i
-> if i == length ts'
then (fvs, TyTuple ts')
else error "Tuple type has the wrong number of arguments"
Nothing
-> (fvs, TyCons c ts')
checkTuple ('(':')':cs)
= Just 0
checkTuple ('(':cs)
= checkTuple' 1 cs
checkTuple _
= Nothing
checkTuple' k ")"
= Just k
checkTuple' k (',':cs)
= checkTuple' (k+1) cs
checkTuple' _ _
= Nothing
readType :: String -> Type
readType s
= case parse parseType (lexer s) of
ParseSuccess t [] -> t
ParseSuccess t _ -> error "Extra stuff at end of type"
ParseError msg -> error msg
-- vim: ts=4:sts=4:expandtab:ai
|
zeekay/lambdabot
|
Plugin/Free/Type.hs
|
mit
| 7,270
| 0
| 21
| 3,801
| 1,942
| 959
| 983
| 192
| 19
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.RequestHeader (
parseHeaderLines
) where
import Control.Exception (throwIO)
import Control.Monad (when)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as B (unpack)
import Data.ByteString.Internal (ByteString(..), memchr)
import qualified Data.CaseInsensitive as CI
import Data.Word (Word8)
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr (Ptr, plusPtr, minusPtr, nullPtr)
import Foreign.Storable (peek)
import qualified Network.HTTP.Types as H
import Network.Wai.Handler.Warp.Types
-- $setup
-- >>> :set -XOverloadedStrings
----------------------------------------------------------------
parseHeaderLines :: [ByteString]
-> IO (H.Method
,ByteString -- Path
,ByteString -- Path, parsed
,ByteString -- Query
,H.HttpVersion
,H.RequestHeaders
)
parseHeaderLines [] = throwIO $ NotEnoughLines []
parseHeaderLines (firstLine:otherLines) = do
(method, path', query, httpversion) <- parseRequestLine firstLine
let path = H.extractPath path'
hdr = map parseHeader otherLines
return (method, path', path, query, httpversion, hdr)
----------------------------------------------------------------
-- |
--
-- >>> parseRequestLine "GET / HTTP/1.1"
-- ("GET","/","",HTTP/1.1)
-- >>> parseRequestLine "POST /cgi/search.cgi?key=foo HTTP/1.0"
-- ("POST","/cgi/search.cgi","?key=foo",HTTP/1.0)
-- >>> parseRequestLine "GET "
-- *** Exception: Warp: Invalid first line of request: "GET "
-- >>> parseRequestLine "GET /NotHTTP UNKNOWN/1.1"
-- *** Exception: Warp: Request line specified a non-HTTP request
-- >>> parseRequestLine "PRI * HTTP/2.0"
-- ("PRI","*","",HTTP/2.0)
parseRequestLine :: ByteString
-> IO (H.Method
,ByteString -- Path
,ByteString -- Query
,H.HttpVersion)
parseRequestLine requestLine@(PS fptr off len) = withForeignPtr fptr $ \ptr -> do
when (len < 14) $ throwIO baderr
let methodptr = ptr `plusPtr` off
limptr = methodptr `plusPtr` len
lim0 = fromIntegral len
pathptr0 <- memchr methodptr 32 lim0 -- ' '
when (pathptr0 == nullPtr || (limptr `minusPtr` pathptr0) < 11) $
throwIO baderr
let pathptr = pathptr0 `plusPtr` 1
lim1 = fromIntegral (limptr `minusPtr` pathptr0)
httpptr0 <- memchr pathptr 32 lim1 -- ' '
when (httpptr0 == nullPtr || (limptr `minusPtr` httpptr0) < 9) $
throwIO baderr
let httpptr = httpptr0 `plusPtr` 1
lim2 = fromIntegral (httpptr0 `minusPtr` pathptr)
checkHTTP httpptr
!hv <- httpVersion httpptr
queryptr <- memchr pathptr 63 lim2 -- '?'
let !method = bs ptr methodptr pathptr0
!path
| queryptr == nullPtr = bs ptr pathptr httpptr0
| otherwise = bs ptr pathptr queryptr
!query
| queryptr == nullPtr = S.empty
| otherwise = bs ptr queryptr httpptr0
return (method,path,query,hv)
where
baderr = BadFirstLine $ B.unpack requestLine
check :: Ptr Word8 -> Int -> Word8 -> IO ()
check p n w = do
w0 <- peek $ p `plusPtr` n
when (w0 /= w) $ throwIO NonHttp
checkHTTP httpptr = do
check httpptr 0 72 -- 'H'
check httpptr 1 84 -- 'T'
check httpptr 2 84 -- 'T'
check httpptr 3 80 -- 'P'
check httpptr 4 47 -- '/'
check httpptr 6 46 -- '.'
httpVersion httpptr = do
major <- peek (httpptr `plusPtr` 5) :: IO Word8
minor <- peek (httpptr `plusPtr` 7) :: IO Word8
let version
| major == 49 = if minor == 49 then H.http11 else H.http10
| major == 50 && minor == 48 = H.HttpVersion 2 0
| otherwise = H.http10
return version
bs ptr p0 p1 = PS fptr o l
where
o = p0 `minusPtr` ptr
l = p1 `minusPtr` p0
----------------------------------------------------------------
-- |
--
-- >>> parseHeader "Content-Length:47"
-- ("Content-Length","47")
-- >>> parseHeader "Accept-Ranges: bytes"
-- ("Accept-Ranges","bytes")
-- >>> parseHeader "Host: example.com:8080"
-- ("Host","example.com:8080")
-- >>> parseHeader "NoSemiColon"
-- ("NoSemiColon","")
parseHeader :: ByteString -> H.Header
parseHeader s =
let (k, rest) = S.break (== 58) s -- ':'
rest' = S.dropWhile (\c -> c == 32 || c == 9) $ S.drop 1 rest
in (CI.mk k, rest')
|
utdemir/wai
|
warp/Network/Wai/Handler/Warp/RequestHeader.hs
|
mit
| 4,646
| 0
| 17
| 1,247
| 1,197
| 643
| 554
| 88
| 2
|
module LF.ComorphFram
( mapTheory
, mapMorphism
, mapSen
, mapSymb
) where
import LF.Morphism
import LF.Sign
import Common.Result
import Common.DocUtils
import Common.AS_Annotation
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe (fromMaybe, isNothing)
mapTheory :: Morphism -> (Sign, [Named EXP]) -> Result (Sign, [Named EXP])
mapTheory mor (s1, ne) =
let s2 = mapSign mor s1
ne2 = map (mapNamedSent mor) ne
in return (s2, ne2)
mapNamedSent :: Morphism -> Named EXP -> Named EXP
mapNamedSent mor ne = ne {sentence = mapSent mor $ sentence ne}
mapMorphism :: Morphism -> Morphism -> Result Morphism
mapMorphism mor m1 =
let s1 = source m1
t1 = target m1
t2 = mapSign mor t1
defs1 = filter (\ (Def _ _ v) -> isNothing v) $ getDefs s1
(defs2, m2) = makeSigMap ([], Map.empty) $ map (mapMorphH mor)
(mkPairs defs1 $ symMap m1)
s2 = Sign gen_base gen_module defs2
in return $ Morphism gen_base gen_module gen_morph s2 t2 Unknown m2
mapMorphH :: Morphism -> (DEF, EXP) -> (DEF, Maybe EXP)
mapMorphH mor (Def sym stp _ , e) =
case mapSymbH mor sym stp of
Right sym2 -> let stp2 = mapSent mor stp
e2 = mapSent mor e
sval2 = takeSymValue sym2 $ getDefs $ target mor
in case sval2 of
Nothing -> (Def sym2 stp2 Nothing, Just e2)
_ -> (Def sym2 stp2 sval2, Nothing)
Left err -> error $ show err
mkPairs :: [DEF] -> Map.Map Symbol EXP -> [(DEF, EXP)]
mkPairs defs m = case defs of
[] -> []
Def s t v : ds -> case Map.lookup s m of
Just e -> (Def s t v, e) : mkPairs ds m
Nothing -> error $ "mkPairs : " ++
-- "The symbol "
show (pretty s) ++
" is not in the map.\n"
makeSigMap :: ([DEF], Map.Map Symbol EXP) -> [(DEF, Maybe EXP)]
-> ([DEF], Map.Map Symbol EXP)
makeSigMap dms defs = case defs of
[] -> dms
(Def s t v, e) : des -> let (ds, m) = dms
in case e of
Just e' -> makeSigMap (Def s t v : ds,
Map.insert s e' m) des
Nothing -> makeSigMap (Def s t v : ds, m) des
mapSign :: Morphism -> Sign -> Sign
mapSign mor sig =
let ds = filter (\ (Def _ _ v) -> isNothing v) $ getDefs sig
defs = mapDefs mor ds
in Sign gen_base gen_module defs
mapDefs :: Morphism -> [DEF] -> [DEF]
mapDefs mor defs = case defs of
[] -> []
Def s t _ : ds -> case mapSymbH mor s t of
Right s2 -> let t2 = mapSent mor t
sval = takeSymValue s2 $ getDefs $ target mor
in Def s2 t2 sval : mapDefs mor ds
Left err -> error $ show err
takeSymValue :: Symbol -> [DEF] -> Maybe EXP
takeSymValue sym defs = case defs of
[] -> Nothing
Def sym2 _ val : ds -> if sym == sym2
then val
else takeSymValue sym ds
mapSymb :: Morphism -> Sign -> Symbol -> Set.Set Symbol
mapSymb mor sig sym = Set.singleton $ mapSymb' mor sig sym
mapSymb' :: Morphism -> Sign -> Symbol -> Symbol
mapSymb' mor sig sym =
let symType = getSymType sym sig
in case symType of
Just symT -> case mapSymbH mor sym symT of
Right s -> s
Left err -> error $ "mapSymb' : " ++ show err
Nothing -> error $ "mapSymb' : The symbol " ++
show (pretty sym) ++
" is not in the signature.\n"
mapSymbH :: Morphism -> Symbol -> EXP -> Either String Symbol
mapSymbH mor sym symType' =
let sig2 = target mor
symType = mapSent mor symType'
in if Just symType == getSymType sym sig2
then Right sym
else let syms = getSymsOfType symType sig2
in case Set.toList syms of
[s'] -> Right s'
[] -> Left $ noSymError sym symType
_ -> let locals = getLocalSyms sig2
inter = Set.intersection syms locals
in case Set.toList inter of
[s'] -> Right s'
[] -> Left $ noSymError sym symType
_ -> Left $ manySymError sym symType
mapSen :: Morphism -> EXP -> Result EXP
mapSen mor ex = return $ mapSent mor ex
mapSent :: Morphism -> EXP -> EXP
mapSent m e =
let re = translate m e
in fromMaybe (error $ "The sentence morphism" ++
"could not be performed.\n") re
{-
-- mapSent synM lmod_target lmod_source modM e
mapSent :: Morphism -> Morphism -> Morphism -> Morphism -> EXP -> Result EXP
mapSent synM lmodTarget lmodSource modM e =
let route1 = compMorph synM lmodTarget
route2 = compMorph lmodSource modM
em1 = translate route1 e
em2 = translate route2 e
if (em1 == em2)
then let re = translate synM e
in case re of
Nothing -> fail $ "The sentence morphism" ++
"could not be performed.\n"
Just ex -> ex
else fail $
-}
-- ERROR MESSAGES
noSymError :: Symbol -> EXP -> String
noSymError s t = "Symbol " ++ show (pretty s) ++
" cannot be mapped to anything as the target signature contains" ++
" no symbols of type/kind " ++ show (pretty t) ++ "."
manySymError :: Symbol -> EXP -> String
manySymError s t = "Symbol " ++ show (pretty s) ++
" cannot be mapped to anything as the target signature contains" ++
" more than one symbol of type/kind " ++ show (pretty t) ++ "."
|
mariefarrell/Hets
|
LF/ComorphFram.hs
|
gpl-2.0
| 5,985
| 0
| 19
| 2,305
| 1,818
| 909
| 909
| 119
| 6
|
module PackageTests.BuildDeps.InternalLibrary0.Check where
import Control.Monad
import Data.Version
import PackageTests.PackageTester
import System.FilePath
import Test.Tasty.HUnit
suite :: Version -> FilePath -> Assertion
suite cabalVersion ghcPath = do
let spec = PackageSpec
{ directory = "PackageTests" </> "BuildDeps" </> "InternalLibrary0"
, configOpts = []
, distPref = Nothing
}
result <- cabal_build spec ghcPath
assertBuildFailed result
when (cabalVersion >= Version [1, 7] []) $ do
let sb = "library which is defined within the same package."
-- In 1.7 it should tell you how to enable the desired behaviour.
assertOutputContains sb result
|
Helkafen/cabal
|
Cabal/tests/PackageTests/BuildDeps/InternalLibrary0/Check.hs
|
bsd-3-clause
| 742
| 0
| 13
| 180
| 162
| 86
| 76
| 17
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>GraalVM JavaScript</title>
<maps>
<homeID>graaljs</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/graaljs/src/main/javahelp/help_el_GR/helpset_el_GR.hs
|
apache-2.0
| 967
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
module Pat2 where
data P = S P deriving Show
zero :: P
zero = (error "Z no longer defined for P at line: 4")
two :: P
two = S ( S (error "Z no longer defined for P at line: 4"))
|
SAdams601/HaRe
|
old/testing/removeCon/Pat2_TokOut.hs
|
bsd-3-clause
| 183
| 0
| 9
| 48
| 57
| 32
| 25
| 6
| 1
|
{-# LANGUAGE BangPatterns, TemplateHaskell #-}
module Distribution.Server.Packages.Index (
writeIncremental,
TarIndexEntry(..),
legacyExtras,
writeLegacy,
) where
import qualified Codec.Archive.Tar as Tar
( write )
import qualified Codec.Archive.Tar.Entry as Tar
( Entry(..), fileEntry, toTarPath, Ownership(..) )
import Distribution.Server.Packages.PackageIndex (PackageIndex)
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Server.Framework.MemSize
import Distribution.Server.Packages.Types
( CabalFileText(..), PkgInfo(..)
, pkgLatestCabalFileText, pkgLatestUploadInfo
)
import Distribution.Server.Packages.Metadata
import Distribution.Server.Users.Users
( Users, userIdToName )
import Distribution.Server.Users.Types
( UserName(..) )
import Distribution.Text
( display )
import Distribution.Package
( Package, PackageId, PackageName(..), packageName, packageVersion )
import Data.Time.Clock
( UTCTime )
import Data.Time.Clock.POSIX
( utcTimeToPOSIXSeconds )
import Data.Int (Int64)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Vector as Vec
import Data.ByteString.Lazy (ByteString)
import System.FilePath.Posix
import Data.Maybe (catMaybes)
-- | Entries used to construct the contents of the hackage index tarball
--
data TarIndexEntry =
-- | Package cabal files
--
-- We keep a copy of the UserName because usernames can change (and if they
-- do, old entries in the index should remain the same); similarly, we
-- keep a copy of the upload time because the upload time of a package
-- can also be changed (this is used during mirroring, for instance).
--
-- The UTCTime and userName are used as file metadata in the tarball.
CabalFileEntry !PackageId !RevisionNo !UTCTime !UserName
-- | Package metadata
--
-- We add these whenever a new package tarball is uploaded.
--
-- This metadata entry can be for any revision of the packages
-- (not necessarily the latest) so we need to know the revision number.
--
-- Although we do not currently allow to change the upload time for package
-- tarballs, but I'm not sure why not (TODO) and it's conceivable we may
-- change this, so we record the original upload time.
| MetadataEntry !PackageId !RevisionNo !UTCTime
-- | Additional entries that we add to the tarball
--
-- This is currently used for @preferred-versions@.
| ExtraEntry !FilePath !ByteString !UTCTime
deriving (Eq, Show)
type RevisionNo = Int
instance MemSize TarIndexEntry where
memSize (CabalFileEntry a b c d) = memSize4 a b c d
memSize (MetadataEntry a b c) = memSize3 a b c
memSize (ExtraEntry a b c) = memSize3 a b c
deriveSafeCopy 0 'base ''TarIndexEntry
-- Construct, with the specified user database, extra top-level files, and
-- a package index, an index tarball. This tarball has the modification times
-- and uploading users built-in.
writeIncremental :: PackageIndex PkgInfo -> [TarIndexEntry] -> ByteString
writeIncremental pkgs =
Tar.write . catMaybes . map mkTarEntry
where
-- This should never return Nothing, it'd be an internal error but just
-- in case we'll skip them
mkTarEntry :: TarIndexEntry -> Maybe Tar.Entry
mkTarEntry (CabalFileEntry pkgid revno timestamp username) = do
pkginfo <- PackageIndex.lookupPackageId pkgs pkgid
cabalfile <- fmap (cabalFileByteString . fst) $
pkgMetadataRevisions pkginfo Vec.!? revno
tarPath <- either (const Nothing) Just $
Tar.toTarPath False fileName
let !tarEntry = addTimestampAndOwner timestamp username $
Tar.fileEntry tarPath cabalfile
return tarEntry
where
PackageName pkgname = packageName pkgid
fileName = pkgname </> display (packageVersion pkgid)
</> pkgname <.> "cabal"
mkTarEntry (MetadataEntry pkgid revno timestamp) = do
pkginfo <- PackageIndex.lookupPackageId pkgs pkgid
let (filePath, content) = computePkgMetadata pkginfo revno
tarPath <- either (const Nothing) Just $ Tar.toTarPath False filePath
let !tarEntry = addTimestampAndOwner timestamp (UserName "Hackage") $
Tar.fileEntry tarPath content
return tarEntry
mkTarEntry (ExtraEntry fileName content timestamp) = do
tarPath <- either (const Nothing) Just $
Tar.toTarPath False fileName
let !tarEntry = addTimestampAndOwner timestamp (UserName "Hackage") $
Tar.fileEntry tarPath content
return tarEntry
addTimestampAndOwner timestamp (UserName username) entry =
entry {
Tar.entryTime = utcToUnixTime timestamp,
Tar.entryOwnership = Tar.Ownership {
Tar.ownerName = username,
Tar.groupName = "Hackage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
utcToUnixTime :: UTCTime -> Int64
utcToUnixTime = truncate . utcTimeToPOSIXSeconds
-- | Extract legacy entries
legacyExtras :: [TarIndexEntry] -> Map String (ByteString, UTCTime)
legacyExtras = go Map.empty
where
-- Later entries in the update log will override earlier ones. This is
-- intentional.
go :: Map String (ByteString, UTCTime)
-> [TarIndexEntry]
-> Map String (ByteString, UTCTime)
go acc [] = acc
go acc (ExtraEntry fp bs time : es) =
let acc' = Map.insert fp (bs, time) acc
in go acc' es
-- CabalFileEntry simply points into the package DB. It is here only to
-- record the order in which things were added, as well as the original
-- username; it's not important for the legacy index, which will just
-- extract the latest cabal files directly from the package DB.
go acc (CabalFileEntry{} : es) = go acc es
-- The legacy index does not contain the TUF metadata
go acc (MetadataEntry{} : es) = go acc es
-- | Write tarball in legacy format
--
-- "Legacy format" here refers to prior to the intrudction of the incremental
-- index, and contains the packages in order of packages/versions (for better
-- compression), contains at most one preferred-version per package (important
-- because of a bug in cabal which would otherwise merge all perferred-versions
-- files for a package), and does not contain the TUF files.
writeLegacy :: Users -> Map String (ByteString, UTCTime) -> PackageIndex PkgInfo -> ByteString
writeLegacy users =
writeLegacyAux (cabalFileByteString . pkgLatestCabalFileText) setModTime
. extraEntries
where
setModTime pkgInfo entry =
let (utime, uuser) = pkgLatestUploadInfo pkgInfo in
entry {
Tar.entryTime = utcToUnixTime utime,
Tar.entryOwnership = Tar.Ownership {
Tar.ownerName = userName uuser,
Tar.groupName = "Hackage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
userName = display . userIdToName users
extraEntries emap = do
(path, (entry, mtime)) <- Map.toList emap
Right tarPath <- return $ Tar.toTarPath False path
return $ (Tar.fileEntry tarPath entry) { Tar.entryTime = utcToUnixTime mtime }
-- | Create an uncompressed tar repository index file as a 'ByteString'.
--
-- Takes a couple functions to turn a package into a tar entry. Extra
-- entries are also accepted.
--
-- This used to live in Distribution.Server.Util.Index.
writeLegacyAux :: Package pkg
=> (pkg -> ByteString)
-> (pkg -> Tar.Entry -> Tar.Entry)
-> [Tar.Entry]
-> PackageIndex pkg
-> ByteString
writeLegacyAux externalPackageRep updateEntry extras =
Tar.write . (extras++) . map entry . PackageIndex.allPackages
where
entry pkg = updateEntry pkg
. Tar.fileEntry tarPath
$ externalPackageRep pkg
where
Right tarPath = Tar.toTarPath False fileName
PackageName name = packageName pkg
fileName = name </> display (packageVersion pkg)
</> name <.> "cabal"
|
ocharles/hackage-server
|
Distribution/Server/Packages/Index.hs
|
bsd-3-clause
| 8,302
| 0
| 15
| 2,071
| 1,649
| 892
| 757
| 151
| 4
|
module Zipper () where
import Prelude hiding (reverse)
import Data.Set
data Stack a = Stack { focus :: !a -- focused thing in this set
, up :: [a] -- jokers to the left
, down :: [a] } -- jokers to the right
{-@ type UListDif a N = {v:[a] | ((not (Set_mem N (listElts v))) && (Set_emp (listDup v)))} @-}
{-@
data Stack a = Stack { focus :: a
, up :: UListDif a focus
, down :: UListDif a focus }
@-}
{-@
measure listDup :: [a] -> (Set a)
listDup([]) = {v | Set_emp v }
listDup(x:xs) = {v | v = if (Set_mem x (listElts xs)) then (Set_cup (Set_sng x) (listDup xs)) else (listDup xs) }
@-}
{-@ type UStack a = {v:Stack a |(Set_emp (Set_cap (listElts (getUp v)) (listElts (getDown v))))}@-}
{-@ measure getFocus :: forall a. (Stack a) -> a
getFocus (Stack focus up down) = focus
@-}
{-@ measure getUp :: forall a. (Stack a) -> [a]
getUp (Stack focus up down) = up
@-}
{-@ measure getDown :: forall a. (Stack a) -> [a]
getDown (Stack focus up down) = down
@-}
-- QUALIFIERS
{-@ q :: x:a -> {v:[a] |(not (Set_mem x (listElts v)))} @-}
q :: a -> [a]
q = undefined
{-@ q1 :: x:a -> {v:[a] |(Set_mem x (listElts v))} @-}
q1 :: a -> [a]
q1 = undefined
{-@ q0 :: x:a -> {v:[a] |(Set_emp(listDup v))} @-}
q0 :: a -> [a]
q0 = undefined
{-@ focusUp :: UStack a -> UStack a @-}
focusUp :: Stack a -> Stack a
focusUp (Stack t [] rs) = Stack x xs [] where (x:xs) = reverse (t:rs)
focusUp (Stack t (l:ls) rs) = Stack l ls (t:rs)
{-@ focusDown :: UStack a -> UStack a @-}
focusDown :: Stack a -> Stack a
focusDown = reverseStack . focusUp . reverseStack
-- | reverse a stack: up becomes down and down becomes up.
{-@ reverseStack :: UStack a -> UStack a @-}
reverseStack :: Stack a -> Stack a
reverseStack (Stack t ls rs) = Stack t rs ls
-- TODO ASSUMES
{-@ reverse :: {v:[a] | (Set_emp (listDup v))} -> {v:[a]|(Set_emp (listDup v))} @-}
reverse :: [a] -> [a]
reverse = undefined
|
mightymoose/liquidhaskell
|
tests/pos/zipper0.hs
|
bsd-3-clause
| 2,049
| 0
| 9
| 564
| 323
| 185
| 138
| 23
| 1
|
-- !!! ds020 -- lazy patterns (in detail)
--
{-# LANGUAGE NPlusKPatterns #-}
module ShouldCompile where
a ~([],[],[]) = []
a ~(~[],~[],~[]) = []
b ~(x:xs:ys) = []
b ~(~x: ~xs: ~ys) = []
c ~x ~ _ ~11111 ~3.14159265 = x
d 11 = 4
d 12 = 3
d ~(n+4) = 2
d ~(n+43) = 1
d ~(n+999) = 0
f ~(x@[]) = []
f x@(~[]) = []
g ~(~(~(~([])))) = []
-- pattern bindings (implicitly lazy)
([],[],[]) = ([],[],[])
(~[],~[],~[]) = ([],[],[])
(x1: xs1: ys1) = []
(~x: ~xs: ~ys) = []
(x2 : xs2: ys2) | eq2 = []
| eq3 = [x2]
| eq4 = [x2]
| True = []
where
eq2 = (2::Int) == (4::Int)
eq3 = (3::Int) == (3::Int)
eq4 = (4::Int) == (2::Int)
(x3,y3) | x3 > 3 = (4, 5)
| x3 <= 3 = (2, 3)
-- above: x & y should both be \bottom.
(x4,(y4,(z4,a4))) | eq2 = ('a',('a',('a','a')))
| eq3 = ('b',('b',('b','b')))
| eq4 = ('c',('c',('c','c')))
| True = ('d',('d',('d','d')))
where
eq2 = (2::Int) == (4::Int)
eq3 = (3::Int) == (3::Int)
eq4 = (4::Int) == (2::Int)
|
siddhanathan/ghc
|
testsuite/tests/deSugar/should_compile/ds020.hs
|
bsd-3-clause
| 1,070
| 9
| 14
| 336
| 752
| 407
| 345
| 35
| 1
|
module Data.Source.ByteString where
import qualified Data.ByteString as BS
import Data.Monoid
import Data.Source hiding ( head )
import Data.Word
import Prelude hiding ( head, null, take )
head :: Monad m => Transducer m c BS.ByteString (Word8, Source m c BS.ByteString)
head = whenChunk f
where
f a sa | BS.null a = head sa
| otherwise = let h = BS.head a
t = BS.tail a
in Source $ pure $ Chunk (h, prepend t sa)
(head $ prepend t sa)
take :: (Monad m, Integral i) => i -> Transducer m c BS.ByteString (BS.ByteString, Source m c BS.ByteString)
take i = whenChunk f
where
f a sa | j < i = whenChunk g $ take (i - j) sa
| j == i = Source $ pure $ Chunk (a, sa) (take i sa)
| otherwise = Source
$ pure
$ Chunk
(ta, prepend da sa)
(take i $ Source $ pure $ Chunk da $ prepend da sa)
where
j = fromIntegral $ BS.length a
ta = BS.take (fromIntegral i) a
da = BS.drop (fromIntegral i) a
g (b, sb) _ = Source $ pure $ Chunk (a <> b, sb) (take i sb)
|
lpeterse/haskell-source
|
src/Data/Source/ByteString.hs
|
mit
| 1,263
| 0
| 15
| 518
| 500
| 254
| 246
| 26
| 1
|
module HaskellMistakes where
import Control.Monad.State
-- often put an equals by mistake after
-- fib n =
-- |
--fib :: Int -> Int
--fib n
-- | n <= 1 = 1
-- | otherwise =
-- fib (n - 1) + fib (n - 2)
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
fib n = evalState (do
forM [0..(n-1)] $ \_ -> do
(a,b) <- get
put (b,a+b)
(a,b) <- get
return a) (0, 1)
|
NickAger/LearningHaskell
|
ParallelConcurrent/ForkExperiments.hsproj/HaskellMistakes.hs
|
mit
| 396
| 0
| 15
| 127
| 143
| 80
| 63
| 9
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Biri.Backend.Routes.Automaton
( Automaton(..)
, transform
) where
import Control.Applicative (pure, (<$>), (<*>))
import Control.Arrow (first, second, (&&&))
import Control.Exception (assert)
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IM
import Data.IntSet (IntSet)
import qualified Data.IntSet as IS
import Data.List (foldl', nub, partition)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe)
import Data.Ord (comparing)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Vector (Vector)
import qualified Data.Vector as V
import Biri.Language
-- | Transforms a list of resources into an automaton.
--
-- The overall structure is as follows:
-- 1. The resources a grouped by common prefixes, which yields
-- a tree representing the branching points.
-- This ensures that each segment is compiled once.
-- 2. The tree is then converted into an automaton.
-- 3. The automaton is then turned into a deterministic automaton.
transform :: [Resource] -> Automaton
transform = determinize . fromTree . mkTree
-- | A representation of an automaton.
--
-- The set of states of an automaton with @n@ states equals {0,...,n-1}.
data Automaton = Automaton
{ states :: Int
, delta :: V.Vector (Map Char [Int])
, matching :: IntSet
, handlers :: IntMap [Handler]
} deriving (Show, Eq, Ord)
-- | Returns true iff the given state is a matching state.
isMatching :: Int -> Automaton -> Bool
isMatching k = IS.member k . matching
-- | Returns the list of handlers associated with given state.
getHandlers :: Int -> Automaton -> [Handler]
getHandlers k = IM.findWithDefault [] k . handlers
-- | A tree represents a branching point.
data Tree = Tree [(Automaton, Tree)]
deriving (Show, Eq, Ord)
-- | Compiles a segment into an automaton.
--
-- Currently, the following patterns can be compiled: Int, String.
mkAutomaton :: Segment -> Automaton
mkAutomaton segment = case segment of
Fixed str -> Automaton
{ states = T.length str + 1
, delta = V.fromList (zipWith M.singleton (T.unpack str) (map (:[]) [1..]) ++ [M.empty])
, matching = IS.singleton (T.length str)
, handlers = IM.empty
}
Pattern (Constructor typ) -> case typ of
"Int" -> Automaton
{ states = 3
, delta = V.fromList
[ M.insert '0' [1] (M.fromList $ zip ['1'..'9'] (repeat [2]))
, M.empty
, M.fromList $ zip ['0'..'9'] (repeat [2])
]
, matching = IS.fromList [1,2]
, handlers = IM.empty
}
"String" -> Automaton
{ states = 1
, delta = V.singleton . M.delete '/' . M.fromList $ zip (take 128 ['\0'..]) (repeat [0])
, matching = IS.singleton 0
, handlers = IM.empty
}
-- | @concats [a1, ..., an]@ represents the concatenation of the automata @a1, ..., an@.
concats :: [Automaton] -> Automaton
concats auts
| null auts = error "Biri.Routes.Automaton.concats: empty list"
| null (tail auts) = head auts
| otherwise = Automaton
{ states = last offsets
, delta = V.concat (zipWith cross auts offsets)
, matching = let finalAut = last auts
in IS.map (subtract (states finalAut) . (+ last offsets)) (matching finalAut)
, handlers = IM.unions (zipWith (IM.mapKeys . (+)) offsets (map handlers auts))
}
where
offsets :: [Int]
offsets = scanl (+) 0 (map states auts)
cross :: Automaton -> Int -> V.Vector (Map Char [Int])
cross aut offset = V.imap (\i -> epsilon i . M.map (map (+ offset))) (delta aut)
where
epsilon :: Int -> Map Char [Int] -> Map Char [Int]
epsilon i | offset + states aut >= last offsets = id
| IS.member i (matching aut) = M.insertWith (++) '\0' [offset + states aut]
| otherwise = id
-- | @unions [a1, ..., an]@ represents the union of the automata @a1, ..., an@.
unions :: [Automaton] -> Automaton
unions auts
| null auts = error "Biri.Routes.Automaton.unions: empty list"
| null (tail auts) = head auts
| otherwise = Automaton
{ states = last offsets
, delta = V.concat (bifurcation : deltas)
, matching = IS.unions (zipWith (IS.map . (+)) offsets (map matching auts))
, handlers = IM.unions (zipWith (IM.mapKeys . (+)) offsets (map handlers auts))
}
where
offsets :: [Int]
offsets = scanl (+) 1 (map states auts)
bifurcation :: V.Vector (Map Char [Int])
bifurcation = V.singleton (M.singleton '\0' (init offsets))
deltas :: [V.Vector (Map Char [Int])]
deltas = zipWith (V.map . M.map . map . (+)) offsets (map delta auts)
-- | Turns a list of resources into a tree by grouping common prefixes.
mkTree :: [Resource] -> Tree
mkTree = uncurry root . first (concatMap (\(Resource _ hndls) -> hndls))
. partition (\(Resource (URI segs) _) -> null segs)
where
root :: [Handler] -> [Resource] -> Tree
root hndls = Tree . (:[]) . (,) (install (mkAutomaton (Fixed "/")) hndls) . go
go :: [Resource] -> Tree
go = Tree . map (\(seg, (hndls, rs)) -> (install (mkAutomaton seg) hndls, go rs))
. M.toList . subsume
install :: Automaton -> [Handler] -> Automaton
install aut hndls
| null hndls = aut
| otherwise = aut { handlers = IM.fromAscList . map (flip (,) hndls) $ IS.toAscList (matching aut) }
subsume :: [Resource] -> Map Segment ([Handler], [Resource])
subsume = foldl (\m -> M.insertWith merge <$> key <*> value <*> pure m) M.empty
where
merge :: ([a],[b]) -> ([a],[b]) -> ([a],[b])
merge (xs1, ys1) (xs2, ys2) = (xs1 ++ xs2, ys1 ++ ys2)
key :: Resource -> Segment
key (Resource (URI segs) _)
| null segs = error "Biri.Routes.Automaton.mkTree: empty list"
| otherwise = head segs
value :: Resource -> ([Handler], [Resource])
value (Resource (URI segs) hndls)
| null segs = error "Biri.Routes.Automaton.mkTree: empty list"
| tail segs == [] = (hndls, [])
| otherwise = ([], [Resource (URI (tail segs)) hndls])
-- | Unfolds the branching structure the resources.
fromTree :: Tree -> Automaton
fromTree (Tree rs) = case rs of
[(aut, Tree list)] -> concats [aut, unions (map (uncurry go) list)]
_ -> undefined
where
go :: Automaton -> Tree -> Automaton
go aut (Tree list) = case list of
[] -> aut
xs -> concats [aut, mkAutomaton (Fixed "/"), unions (map (uncurry go) xs)]
-- | Converts an automaton into a deterministic automaton.
determinize :: Automaton -> Automaton
determinize (Automaton { delta = delta, handlers = handlers }) = convert (go (closure (IS.singleton 0)) M.empty)
where
convert :: Map IntSet (Map Char IntSet) -> Automaton
convert deltas = Automaton
{ states = M.size deltas
, delta = V.fromList . map (M.map (return . toState) . snd) $ M.toAscList deltas
, matching = IM.keysSet hndls
, handlers = hndls
}
where
hndls :: IntMap [Handler]
hndls = IM.fromAscList . filter (not . null . snd) . map (toState &&& collectHandlers) $ M.keys deltas
toState :: IntSet -> Int
toState = (M.!) (M.fromAscList $ zip (map fst (M.toAscList deltas)) [0..])
collectHandlers :: IntSet -> [Handler]
collectHandlers = concatMap (flip (IM.findWithDefault []) handlers) . IS.toList
go :: IntSet -> Map IntSet (Map Char IntSet) -> Map IntSet (Map Char IntSet)
go state deltas = foldl' (flip traverse) (M.insert state table deltas) (nub . map snd $ M.toList table)
where
traverse :: IntSet -> Map IntSet (Map Char IntSet) -> Map IntSet (Map Char IntSet)
traverse state' deltas' | M.notMember state' deltas' = go state' deltas'
| otherwise = deltas'
table :: Map Char IntSet
table = M.fromList (filter (not . IS.null . snd) $ zip chars (map (flip step state) chars))
where
chars :: [Char]
chars = tail (take 128 ['\0'..])
step :: Char -> IntSet -> IntSet
step c = closure . IS.fromList . concatMap (M.findWithDefault [] c . (V.!) delta) . IS.toList
closure :: IntSet -> IntSet
closure = go <*> IS.toList
where
go :: IntSet -> [Int] -> IntSet
go state list = case list of
[] -> state
x:xs -> let new = filter (flip IS.notMember state) (M.findWithDefault [] '\0' (delta V.! x))
in go (IS.union state (IS.fromList new)) (xs ++ new)
|
am-/biri-lang
|
src/Biri/Backend/Routes/Automaton.hs
|
mit
| 9,215
| 0
| 21
| 2,757
| 3,140
| 1,677
| 1,463
| 156
| 3
|
module Feed where
------------------------------------------------------------------------------
import Snap.Snaplet
import Snap.Snaplet.PostgresqlSimple
------------------------------------------------------------------------------
import Application
import Users
import Errors
------------------------------------------------------------------------------
getFollows :: ExceptT Error AppHandler [Follow]
getFollows = lift $ with pg $ query_ "SELECT follower_id,followed_id FROM relationships"
getFollowedsById :: User -> ExceptT Error AppHandler [Follow]
getFollowedsById user = do
follows <- getFollows
(lift . return) $ filter (\follow -> (follower_id follow) == (uid user)) follows
|
lkania/Haskitter
|
src/Feed.hs
|
mit
| 716
| 0
| 13
| 91
| 138
| 75
| 63
| 12
| 1
|
import Data.Maybe (isNothing)
import Utils.Common
import Utils.Hierarchy (getChildren)
import Utils.Misc (load)
data Record = Record Name [Record] deriving (Show)
-- Запись для вывода на экран
-- Содержит имя конфига и набор дочерних записей для порождённых конфигов
main :: IO ()
main = do
st <- load statusFile :: IO [Name]
hc <- load hierarchyFile :: IO [Relation]
let model = build hc st
let view = format model
putStr view
build :: [Relation] -> [Name] -> [Record]
build = build' Nothing
where build' :: Maybe Parent -> [Relation] -> [Name] -> [Record]
build' parent hc ns =
[ Record n sub
| n <- ns
, parent == (n `lookup` hc)
, let sub = build' (Just n) hc (getChildren hc n)
]
format :: [Record] -> String
format model = unlines (format' 0 model)
where format' :: Int -> [Record] -> [String]
format' level model = do
Record name children <- model
let indent = (level * 3) `replicate` ' '
(indent ++ name) : format' (level + 1) children
|
kahless/netm
|
src/Cmd/Status/Main.hs
|
mit
| 1,191
| 0
| 14
| 335
| 403
| 210
| 193
| 27
| 1
|
module Main where
import LI11718
import qualified Tarefa1_2017li1g180 as T1
import System.Environment
import Text.Read
main = do
args <- getArgs
case args of
["constroi"] -> do
str <- getContents
let caminho = readMaybe str
case caminho of
Nothing -> error "caminho invalido"
Just c -> print $ T1.constroi c
["testes"] -> print $ T1.testesT1
otherwise -> error "RunT1 argumentos invalidos"
|
hpacheco/HAAP
|
examples/plab/svn/2017li1g180/src/RunT1.hs
|
mit
| 492
| 0
| 17
| 162
| 128
| 65
| 63
| 16
| 4
|
module Input where
import System.IO
import Data.Maybe
import Direction
import Utils
_getInput :: Char -> Int -> IO Int
_getInput coord cap = do
putStr outString
hFlush stdout
n <- getLine
if allDigit n
then let rn = ((read n) :: Int) in
if (rn >= 0) && (rn < cap)
then return rn
else _getInput coord cap
else _getInput coord cap
where outString = "Enter " ++ [coord] ++ "-coordinate (" ++ [coord] ++ " >= 0 & " ++ [coord] ++ " < " ++ show cap ++ ") : "
_toDirection :: String -> IO (Maybe Direction)
_toDirection s
| tls == "up" = return $ Just Upwards
| tls == "down" = return $ Just Downwards
| tls == "left" = return $ Just Leftwards
| tls == "right" = return $ Just Rightwards
| otherwise = return Nothing
where tls = allLower s
getX :: Int -> IO Int
getX n = _getInput 'x' n
getY :: Int -> IO Int
getY n = _getInput 'y' n
getDirection :: IO Direction
getDirection = do
putStr "Enter direction (up, down, left, right): "
hFlush stdout
dirs <- getLine
dir <- _toDirection dirs
if dir == Nothing
then getDirection
else return $ fromJust dir
|
crockeo/ballgame
|
src/Input.hs
|
mit
| 1,134
| 0
| 15
| 298
| 431
| 209
| 222
| 38
| 3
|
module IgrepCashbook2
( CashbookLine(..)
, dateRegex
, priceRegex
, parseWithoutDate
)
where
-- for new style cashbook
import qualified IgrepCashbook as Old ( Item, isCommentLine, parseItemLine )
import Data.String.Utils (join)
import Data.Char (isDigit)
import Text.Regex.Posix
data CashbookLine =
-- Comment String
-- | Item
Item
{ getDate :: Maybe String
, getName :: String
, getPrice :: Int
, isIncome :: Bool
, getGroup :: String }
parseWithoutDate :: String -> [ Either String CashbookLine ]
parseWithoutDate c = map parseLineWithoutDate nls'
where
ls = lines c
ns = [1..]
nls = zip ns ls
nls' = filter (\(_, x) -> isItemLine x ) nls
parseLineWithoutDate :: (Int, String) -> Either String CashbookLine
parseLineWithoutDate (n, x) = itemFromLine Nothing n x
dateRegex :: String
dateRegex = "^([0-9][0-9]/)?[0-9][0-9]/[0-9][0-9]$"
priceRegex :: String
priceRegex = "^\\+?[1-9][_,0-9]*$"
isItemLine :: String -> Bool
isItemLine x = not $ Old.isCommentLine x || isDateLine x
isDateLine :: String -> Bool
isDateLine x = x =~ dateRegex
parseItemLine :: String -> Old.Item
parseItemLine (' ':s) = Old.parseItemLine s
parseItemLine s = Old.parseItemLine s
emptyItem :: String
emptyItem = "invalid item: empty item. no data given"
noPriceAndGroup :: String
noPriceAndGroup = "invalid item: neither price nor group given"
noGroup :: String
noGroup = "invalid item: no group given"
invalidPrice :: String
invalidPrice = "invalid item: malformed price"
itemFromLine :: Maybe String -> Int -> String -> Either String CashbookLine
itemFromLine d n x = validate $ parseItemLine x
where
validate [] = Left emptyItem
validate [name] = Left $ mkMsg noPriceAndGroup name
validate [name, signedPrice] = Left $ mkMsg noGroup ( name ++ " " ++ signedPrice )
validate i@(name:signedPrice:group:_)
| signedPrice =~ priceRegex = Right $ mkItem d name signedPrice group
| otherwise = Left $ mkMsg invalidPrice $ join " " i
mkMsg :: String -> String -> String
mkMsg e c = e ++ " \"" ++ c ++ "\" " ++ " at line " ++ show n
mkItem :: Maybe String -> String -> String -> String -> CashbookLine
mkItem d n s g = Item d n p ip g
where
p = mkPrice s
ip = isIncomePrice s
mkPrice :: String -> Int
mkPrice x = read $ filter isDigit x
isIncomePrice :: String -> Bool
isIncomePrice s = ( s !! 0 ) == '+'
|
igrep/igrep-cashbook
|
hs/IgrepCashbook2.hs
|
mit
| 2,383
| 0
| 12
| 494
| 749
| 400
| 349
| 61
| 4
|
-- Copyright © 2012 Julian Blake Kongslie <jblake@omgwallhack.org>
-- Licensed under the MIT license.
{-# LANGUAGE Arrows #-}
module Source.FanfictionNet
( fetchFanfictionNet
, infoFanfictionNet
)
where
import Data.DateTime
import Data.Char
import Data.List
import Text.Regex.Posix
import Text.XML.HXT.Cache
import Text.XML.HXT.Core
import Text.XML.HXT.HTTP
fetchFanfictionNet :: String -> IOStateArrow s b XmlTree
fetchFanfictionNet storyID = proc _ -> do
(title, author, date) <- fetchAuxilliary storyID -< ()
sections <- listA $ fetchChapter storyID 1 -< ()
root []
[ mkelem "FictionBook" [sattr "xmlns" "http://www.gribuser.ru/xml/fictionbook/2.0"]
[ selem "description"
[ selem "title-info"
[ selem "author"
[ selem "nickname"
[ txt author
]
]
, selem "book-title"
[ txt title
]
]
, selem "document-info"
[ selem "author"
[ selem "nickname"
[ txt "jblake"
]
]
, selem "date"
[ txt $ formatDateTime "%F" date
]
, selem "program-used"
[ txt "ff"
]
]
]
, eelem "body" >>> setChildren sections
]
] -<< ()
infoFanfictionNet :: String -> IOStateArrow s b (String, String, DateTime)
infoFanfictionNet = fetchAuxilliary
downloadXML :: String -> Int -> IOStateArrow s b XmlTree
downloadXML storyID chapter = proc _ -> do
readDocument
-- We use a cache because the auxilliary would otherwise require a separate fetch.
[ withCache "cache" 3600 False
, withHTTP []
, withParseHTML True
, withStrictInput True
, withWarnings False
] $ uri -< ()
where
uri = "http://m.fanfiction.net/s/" ++ storyID ++ "/" ++ show chapter
fetchAuxilliary :: String -> IOStateArrow s b (String, String, DateTime)
fetchAuxilliary storyID = proc _ -> do
xml <- downloadXML storyID 1 -< ()
title <- storyTitle -< xml
author <- storyAuthor -< xml
date <- updateDate -< xml
returnA -< (title, author, date)
fetchChapter :: String -> Int -> IOStateArrow s b XmlTree
fetchChapter storyID chapter = proc _ -> do
xml <- downloadXML storyID chapter -< ()
title <- single $ chapterTitle <+> constA ("Chapter " ++ show chapter) -< xml
titleE <- selem "title" [ selem "p" [ mkText ] ] -< title
body <- bodyHTML -< xml
bodyEs <- listA $ processBottomUp formatBody -< body
section <- eelem "section" >>> setChildren (titleE : bodyEs) -<< ()
remainder <- ifA nextChapterLink (constA () >>> (listA $ fetchChapter storyID $ chapter + 1)) (constA []) -< xml
unlistA -< section : remainder
storyTitle :: IOStateArrow s XmlTree String
storyTitle = single $ deepest $
hasName "div" >>> hasAttrValue "id" (== "content") />
hasName "center" />
hasName "b" />
getText
storyAuthor :: IOStateArrow s XmlTree String
storyAuthor = single $ deepest $
hasName "div" >>> hasAttrValue "id" (== "content") />
hasName "center" />
hasName "a" >>> hasAttr "href" />
getText
updateDate :: IOStateArrow s XmlTree DateTime
updateDate = single $ deepest $
hasName "div" >>> hasAttrValue "id" (== "content") />
getText >>> (matchUpdated <+> matchPublished)
matchUpdated :: IOStateArrow s String DateTime
matchUpdated = proc text -> do
res <- arr (\t -> getAllTextSubmatches $ t =~ "Updated: ([0-9]{1,2})-([0-9]{1,2})-([0-9]{1,2})") -< text
case res of
[_,month,day,year] -> do returnA -< fromGregorian' (let y = read year in if y < 90 then y + 2000 else y + 1900) (read month) (read day)
_ -> do none -< ()
matchPublished :: IOStateArrow s String DateTime
matchPublished = proc text -> do
res <- arr (\t -> getAllTextSubmatches $ t =~ "Published: ([0-9]{1,2})-([0-9]{1,2})-([0-9]{1,2})") -< text
case res of
[_,month,day,year] -> returnA -< fromGregorian' (let y = read year in if y < 90 then y + 2000 else y + 1900) (read month) (read day)
_ -> none -< ()
-- This could probably be made more robust. Look for the last text node in that block, maybe?
chapterTitle :: IOStateArrow s XmlTree String
chapterTitle = single $ deepest $
hasName "div" >>> hasAttrValue "id" (== "content") />
getText >>> arr (reverse . dropWhile isSpace . reverse . dropWhile isSpace) >>> guardsP (isPrefixOf "Chapter") returnA
bodyHTML :: IOStateArrow s XmlTree XmlTree
bodyHTML = single $ deep $
hasName "div" >>> hasAttrValue "id" (== "storycontent")
nextChapterLink :: IOStateArrow s XmlTree ()
nextChapterLink = single $ deepest $
hasName "a" >>> hasAttr "href" />
hasText (== "Next »") >>> constA ()
isNonEmpty :: (ArrowChoice a, ArrowXml a) => a XmlTree XmlTree
isNonEmpty = proc n -> do
textNodes <- listA $ deep getText -< n
let allText = concat textNodes
case allText of
[] -> zeroArrow -< n
_ -> returnA -< n
hasNoWhitespace :: (ArrowChoice a, ArrowXml a) => a XmlTree XmlTree
hasNoWhitespace = proc n -> do
textNodes <- listA $ deep getText -< n
let allText = concat textNodes
let whitespace = filter isSpace allText
case whitespace of
[] -> returnA -< n
_ -> zeroArrow -< n
formatBody :: IOStateArrow s XmlTree XmlTree
formatBody = proc n -> do
res <- listA $ catA
-- Primitive nodes that we copy directly.
[ isBlob
, isCdata
, isCharRef
, isEntityRef
, isText
-- Horizontal rules are replaced.
, hasName "hr" >>> proc _ -> eelem "empty-line" -< ()
-- Heuristic to detect fake horizontal rules.
-- Specifically, I am searching for lines with style="text-align:center;" that are nonempty but contain no whitespace.
, hasName "p" >>> hasAttrValue "style" (== "text-align:center;") >>> isNonEmpty >>> hasNoWhitespace >>> proc _ -> eelem "empty-line" -< ()
-- Markup nodes that we rewrite as a different type.
, hasName "b" >>> listA getChildren >>> proc cs -> eelem "strong" >>> setChildren cs -<< ()
, hasName "i" >>> listA getChildren >>> proc cs -> eelem "emphasis" >>> setChildren cs -<< ()
, hasName "p" >>> listA getChildren >>> proc cs -> eelem "p" >>> setChildren cs -<< ()
-- Markup nodes that we just ditch and use the children from.
, hasName "div" >>> getChildren
]
-< n
-- If we didn't get any acceptable result from processing this node, then ditch it and try to process its children.
case res of
[] -> (getName >>> arr ("unrecognized node: " ++) >>> mkCmt) <+> getChildren -< n
_ -> unlistA -< res
|
jblake/fanfiction
|
src/Source/FanfictionNet.hs
|
mit
| 6,523
| 14
| 21
| 1,574
| 1,981
| 988
| 993
| 135
| 3
|
{-|
Module : Database.Orville.PostgreSQL.Internal.QueryCache
Copyright : Flipstone Technology Partners 2016-2018
License : MIT
-}
{-# LANGUAGE FlexibleContexts #-}
module Database.Orville.PostgreSQL.Internal.QueryCache
( QueryCached
, runQueryCached
, selectCached
, selectFirstCached
, findRecordCached
, findRecordsCached
, findRecordsByCached
, unsafeLift
) where
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Trans
import Control.Monad.Trans.State
import qualified Data.Map as Map
import qualified Data.Map.Helpers as Map
import Data.Maybe
import Data.String (fromString)
import Database.Orville.PostgreSQL.Internal.MappendCompat ((<>))
import Database.Orville.PostgreSQL.Internal.Expr
import Database.Orville.PostgreSQL.Internal.FromSql
import Database.Orville.PostgreSQL.Internal.Monad
import Database.Orville.PostgreSQL.Internal.PrimaryKey
import Database.Orville.PostgreSQL.Internal.QueryKey
import Database.Orville.PostgreSQL.Internal.SelectOptions
import Database.Orville.PostgreSQL.Internal.TableDefinition
import Database.Orville.PostgreSQL.Internal.Types
import Database.Orville.PostgreSQL.Select
type QueryCache = Map.Map QueryKey ResultSet
newtype QueryCached m a =
QueryCached (StateT QueryCache m a)
deriving (Functor, Applicative, Monad)
runQueryCached :: Monad m => QueryCached m a -> m a
runQueryCached (QueryCached statet) = evalStateT statet Map.empty
cached ::
Monad m => QueryKey -> QueryCached m ResultSet -> QueryCached m ResultSet
cached key action = do
cache <- QueryCached get
case Map.lookup key cache of
Just result -> do
pure result
Nothing -> do
result <- action
QueryCached $ put (Map.insert key result cache)
pure result
selectCachedRows ::
(MonadThrow m, MonadOrville conn m)
=> TableDefinition readEntity writeEntity key
-> SelectOptions
-> QueryCached m ResultSet
selectCachedRows tableDef opts =
cached key $
unsafeLift $
runSelect $ selectQueryRows selects (fromClauseTable tableDef) opts
where
selects = expr . selectColumn . fromString <$> tableColumnNames tableDef
key = mconcat [queryKey tableDef, queryKey opts]
selectCached ::
(MonadThrow m, MonadOrville conn m)
=> TableDefinition readEntity writeEntity key
-> SelectOptions
-> QueryCached m [readEntity]
selectCached tableDef opts = do
rows <- selectCachedRows tableDef opts
unsafeLift $ decodeSqlRows (tableFromSql tableDef) rows
selectFirstCached ::
(MonadThrow m, MonadOrville conn m)
=> TableDefinition readEntity writeEntity key
-> SelectOptions
-> QueryCached m (Maybe readEntity)
selectFirstCached tableDef opts =
listToMaybe <$> selectCached tableDef (limit 1 <> opts)
findRecordsCached ::
(MonadThrow m, MonadOrville conn m, Ord key)
=> TableDefinition readEntity writeEntity key
-> [key]
-> QueryCached m (Map.Map key readEntity)
findRecordsCached tableDef keys = do
let
primKey = tablePrimaryKey tableDef
mkEntry record = (tableGetKey tableDef record, record)
recordList <- selectCached tableDef (where_ $ primaryKeyIn primKey keys)
pure $ Map.fromList (map mkEntry recordList)
findRecordCached ::
(MonadThrow m, MonadOrville conn m)
=> TableDefinition readEntity writeEntity key
-> key
-> QueryCached m (Maybe readEntity)
findRecordCached tableDef key =
let
primKey = tablePrimaryKey tableDef
in
selectFirstCached tableDef (where_ $ primaryKeyEquals primKey key)
findRecordsByCached ::
(Ord fieldValue, MonadThrow m, MonadOrville conn m)
=> TableDefinition readEntity writeEntity key
-> FieldDefinition nullability fieldValue
-> SelectOptions
-> QueryCached m (Map.Map fieldValue [readEntity])
findRecordsByCached tableDef field opts = do
let builder = (,) <$> fieldFromSql field <*> tableFromSql tableDef
rows <- selectCachedRows tableDef opts
Map.groupBy' id <$> unsafeLift (decodeSqlRows builder rows)
-- this is unsafe in the sense that it does not provide
-- any guarantees that the action won't chance values in
-- the database, rendering the cache incorrect. It is not
-- exposed publically, but all usages of it here need to
-- be examined for correctness manually.
--
unsafeLift :: Monad m => m a -> QueryCached m a
unsafeLift = QueryCached . lift
|
flipstone/orville
|
orville-postgresql/src/Database/Orville/PostgreSQL/Internal/QueryCache.hs
|
mit
| 4,290
| 0
| 16
| 703
| 1,093
| 572
| 521
| 102
| 2
|
import qualified Data.Binary.Put as P
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Internal as I
escrita = do
txt <- readFile "Teste.txt"
let xs = freq txt
let bs = P.runPut (put xs)
L.writeFile "Teste.bin" bs
freq [] = []
freq (x:xs) = (x,length (filter (==x) xs) + 1) : freq (filter (/= x) xs)
put [] = P.flush
put ((c,f) : xs) = do
P.putWord8 (I.c2w c)
P.putWord32be (toEnum f)
put xs
|
AndressaUmetsu/PapaEhPop
|
huffman/stringToBin.hs
|
mit
| 451
| 0
| 12
| 105
| 225
| 117
| 108
| 15
| 1
|
module GhcPkg where
import Control.Applicative
import Control.Monad
import Data.Char
import Data.Maybe
import qualified Data.Version as Version
import Data.Version hiding (parseVersion)
import Text.ParserCombinators.ReadP
import System.Process
import System.Exit
import System.IO
import System.Environment
data Package = Package {
packageName :: String
, packageVersion :: Version
} deriving (Eq, Show, Ord)
dependencies :: Package -> IO [Package]
dependencies p = do
r <- uncurry readProcess c ""
let err = die ("Could not parse the output of `" ++ showCommand c ++ "`!")
maybe err return (parseDependencies r)
where
c = ("ghc-pkg", ["field", showPackage p, "depends"])
showCommand = unwords . uncurry (:)
parseDependencies :: String -> Maybe [Package]
parseDependencies xs = case xs of
'd':'e':'p':'e':'n':'d':'s':':':ys ->
mapM (stripPackageFingerprint >=> parsePackage) (dropBoring ys)
_ -> Nothing
where
dropBoring = filter (/= "builtin_rts") . map strip . lines
stripPackageFingerprint :: String -> Maybe String
stripPackageFingerprint xs = case (dropWhile (/= '-') . reverse) xs of
'-':ys -> (Just . reverse) ys
_ -> Nothing
latest :: String -> IO Package
latest name = do
p <- strip <$> readProcess "ghc-pkg" ["latest", name] ""
let err = die (show p ++ " is not a valid package name!")
maybe err return (parsePackage p)
parsePackage :: String -> Maybe Package
parsePackage xs = do
(name, version) <- splitVersion xs
Package name <$> parseVersion version
where
splitVersion ys = case (break (== '-') . reverse) ys of
(version, '-' : name) -> Just (reverse name, reverse version)
_ -> Nothing
showPackage :: Package -> String
showPackage (Package name version) = name ++ "-" ++ showVersion version
parseVersion :: String -> Maybe Version
parseVersion = fmap fst . listToMaybe . filter ((== "") . snd) . readP_to_S Version.parseVersion
strip :: String -> String
strip = dropWhile isSpace . reverse . dropWhile isSpace . reverse
die :: String -> IO a
die err = do
name <- getProgName
hPutStrLn stderr (name ++ ": " ++ err)
exitFailure
|
sol/depends
|
src/GhcPkg.hs
|
mit
| 2,222
| 0
| 15
| 498
| 771
| 397
| 374
| 56
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module HlAdif
( adifLogParser
, writeTag
, writeRecord
, writeLog
) where
import Control.Applicative
import Data.Attoparsec.ByteString.Char8
import Data.Maybe
import Data.Monoid
import Data.String
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import HlLog
import Prelude hiding (take, takeWhile)
parseTag :: Parser Tag
parseTag = do
_ <- char '<'
tName <- takeWhile (\x -> x /= ':' && x /= '>')
n1 <- peekChar
(tDataType, tData) <- case n1 of
Nothing -> fail "Unexpected end of tag"
Just ':' -> do
_ <- take 1 -- Drop :
len <- decimal
n2 <- peekChar
tDataType <- case n2 of
Nothing -> fail "Uexpected end of tag"
Just ':' -> do
_ <- take 1 -- Drop :
Just <$> takeWhile (/='>')
Just '>' -> do
return Nothing
Just c -> fail $ "Unexpected character: " ++ [c]
_ <- take 1 -- Drop >
tData <- Just <$> take len
return (tDataType, tData)
Just '>' -> do
_ <- take 1 -- Drop >
return (Nothing, Nothing)
Just c -> fail $ "Unexpected character: " ++ [c]
_ <- takeWhile (/='<') -- Drop extra characters after useful data
return $ toTag tName tData tDataType
parseLog :: Parser Log
parseLog = do
hTxt <- takeWhile $ (/=) '<'
(headerTags, bodyTags) <- break isEOH <$> many parseTag
return $ case (headerTags, bodyTags) of
(hts, []) -> toLog hTxt [] (records hts) -- No EOH tag, no header
(hts, bts) -> toLog hTxt hts (records $ drop 1 bts) -- Use tags after the EOH tag
adifLogParser :: ByteString -> Either String Log
adifLogParser = parseOnly parseLog
writeTag :: Tag -> ByteString
writeTag t = B.concat
[ "<"
, tagName t
, fromMaybe "" $ (":"<>) <$> B.pack <$> show <$> tagDataLength t
, fromMaybe "" $ (":"<>) <$> tagDataType t
, ">"
, fromMaybe "" (tagData t)
]
writeRecord :: Record -> ByteString
writeRecord r = B.intercalate "\n" (map writeTag $ fromRecord r) <> "\n<EOR>\n"
writeLog :: Log -> ByteString
writeLog l =
B.concat [ logHeaderTxt l
, B.intercalate "\n" $ map writeTag $ logHeaderTags l
, "<EOH>\n"
, B.concat $ map writeRecord $ logRecords l
]
|
netom/hamloghs
|
src/HlAdif.hs
|
mit
| 2,473
| 0
| 21
| 806
| 774
| 394
| 380
| 68
| 7
|
module Main where
import Lib
import System.Environment
import Control.Monad (liftM)
import Repl (runRepl, runOne)
main :: IO ()
main = do
args <- getArgs
case length args of
0 -> runRepl
1 -> runOne $ args !! 0
otherwise -> putStrLn "Program takes only 0 or 1 argument"
|
vaibhav276/scheme-compiler
|
app/Main.hs
|
mit
| 305
| 0
| 11
| 83
| 94
| 50
| 44
| 12
| 3
|
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_rollenKurator (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/mnt/legacy/development/haskell/rollenKurator/.cabal-sandbox/bin"
libdir = "/mnt/legacy/development/haskell/rollenKurator/.cabal-sandbox/lib/x86_64-linux-ghc-7.10.3/rollenKurator-0.1.0.0-CjBiD4FGcJJDcGAOkpmTum"
datadir = "/mnt/legacy/development/haskell/rollenKurator/.cabal-sandbox/share/x86_64-linux-ghc-7.10.3/rollenKurator-0.1.0.0"
libexecdir = "/mnt/legacy/development/haskell/rollenKurator/.cabal-sandbox/libexec"
sysconfdir = "/mnt/legacy/development/haskell/rollenKurator/.cabal-sandbox/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "rollenKurator_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "rollenKurator_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "rollenKurator_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "rollenKurator_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "rollenKurator_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
flajann2/rollenKurator
|
dist/dist-sandbox-96085f3c/build/autogen/Paths_rollenKurator.hs
|
mit
| 1,905
| 0
| 10
| 223
| 371
| 215
| 156
| 31
| 1
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGFESpotLightElement
(js_getX, getX, js_getY, getY, js_getZ, getZ, js_getPointsAtX,
getPointsAtX, js_getPointsAtY, getPointsAtY, js_getPointsAtZ,
getPointsAtZ, js_getSpecularExponent, getSpecularExponent,
js_getLimitingConeAngle, getLimitingConeAngle,
SVGFESpotLightElement, castToSVGFESpotLightElement,
gTypeSVGFESpotLightElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.x Mozilla SVGFESpotLightElement.x documentation>
getX ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getX self
= liftIO ((js_getX (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.y Mozilla SVGFESpotLightElement.y documentation>
getY ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getY self
= liftIO ((js_getY (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"z\"]" js_getZ ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.z Mozilla SVGFESpotLightElement.z documentation>
getZ ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getZ self
= liftIO ((js_getZ (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"pointsAtX\"]"
js_getPointsAtX ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.pointsAtX Mozilla SVGFESpotLightElement.pointsAtX documentation>
getPointsAtX ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getPointsAtX self
= liftIO
((js_getPointsAtX (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"pointsAtY\"]"
js_getPointsAtY ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.pointsAtY Mozilla SVGFESpotLightElement.pointsAtY documentation>
getPointsAtY ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getPointsAtY self
= liftIO
((js_getPointsAtY (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"pointsAtZ\"]"
js_getPointsAtZ ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.pointsAtZ Mozilla SVGFESpotLightElement.pointsAtZ documentation>
getPointsAtZ ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getPointsAtZ self
= liftIO
((js_getPointsAtZ (unSVGFESpotLightElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"specularExponent\"]"
js_getSpecularExponent ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.specularExponent Mozilla SVGFESpotLightElement.specularExponent documentation>
getSpecularExponent ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getSpecularExponent self
= liftIO
((js_getSpecularExponent (unSVGFESpotLightElement self)) >>=
fromJSRef)
foreign import javascript unsafe "$1[\"limitingConeAngle\"]"
js_getLimitingConeAngle ::
JSRef SVGFESpotLightElement -> IO (JSRef SVGAnimatedNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGFESpotLightElement.limitingConeAngle Mozilla SVGFESpotLightElement.limitingConeAngle documentation>
getLimitingConeAngle ::
(MonadIO m) => SVGFESpotLightElement -> m (Maybe SVGAnimatedNumber)
getLimitingConeAngle self
= liftIO
((js_getLimitingConeAngle (unSVGFESpotLightElement self)) >>=
fromJSRef)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/SVGFESpotLightElement.hs
|
mit
| 5,072
| 48
| 11
| 749
| 1,035
| 581
| 454
| 82
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-|
Copyright: (c) Guilherme Azzi, 2014
License: MIT
Maintainer: ggazzi@inf.ufrgs.br
Stability: experimental
Provides the core definitions of the public API for Widgets. The internal
structure of widgets, defined in the hidden module
'Reactive.Banana.Dzen.Widget', is /NOT/ exported here.
Therefore, other packages are free to build widgets by combining the basic ones
defined by this package, without the risk of breaking our invariants.
-}
module Reactive.Banana.Dzen.Widget
( Widget
, label
, string
, wshow
) where
import Control.Applicative
import Data.String
import Reactive.Banana
import Reactive.Banana.Dzen.Internal.Widget
instance IsString (Widget t) where
fromString = label
-- | Displays an invariant string.
--
-- Some characters may be escaped, so that this never
-- generates dzen commands.
label :: String -> Widget t
label = Widget . pure . tellString . escape
-- | Displays a time-varying string.
--
-- Some characters may be escaped, so that this never
-- generates dzen commands.
string :: Behavior t String -> Widget t
string = Widget . fmap (tellString . escape)
-- | Displays a time-varying value according to its 'Show' instance.
--
-- Some characters may be escaped, so that this never
-- generates dzen commands.
wshow :: Show a => Behavior t a -> Widget t
wshow = Widget . fmap (tellString . escape . show)
escape :: String -> String
escape "" = ""
escape ('^':cs) = "^^" ++ escape cs
escape (c:cs) = c : escape cs
|
ggazzi/hzen
|
src/Reactive/Banana/Dzen/Widget.hs
|
mit
| 1,509
| 0
| 9
| 266
| 251
| 141
| 110
| 22
| 1
|
{-# LANGUAGE TypeOperators, TypeFamilies, DataKinds, PolyKinds, GADTs #-}
module Tuples where
--import Data.HList
data Z
data x :. y = x :. y
type family FMap (f :: * -> *) (a :: [*]) :: [*]
{-
type instance FMap f Z = Z
type instance FMap f (x :. y) = (f x) :. (FMap f y)
type instance FMap f (x, y) = (f x, f y)
type instance FMap f (x, y, z) = (f x, f y, f z)
type instance FMap f (x, y, z, w) = (f x, f y, f z, f w)
-}
type instance FMap f '[] = '[]
type instance FMap f (e ': l) = (f e) ': (FMap f l)
--stype instance FMap f (`[]` :: *) = (`[]` :: *)
|
vladfi1/hs-misc
|
Tuples.hs
|
mit
| 575
| 0
| 7
| 152
| 118
| 71
| 47
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html
module Stratosphere.ResourceProperties.EMRClusterKerberosAttributes where
import Stratosphere.ResourceImports
-- | Full data type definition for EMRClusterKerberosAttributes. See
-- 'emrClusterKerberosAttributes' for a more convenient constructor.
data EMRClusterKerberosAttributes =
EMRClusterKerberosAttributes
{ _eMRClusterKerberosAttributesADDomainJoinPassword :: Maybe (Val Text)
, _eMRClusterKerberosAttributesADDomainJoinUser :: Maybe (Val Text)
, _eMRClusterKerberosAttributesCrossRealmTrustPrincipalPassword :: Maybe (Val Text)
, _eMRClusterKerberosAttributesKdcAdminPassword :: Val Text
, _eMRClusterKerberosAttributesRealm :: Val Text
} deriving (Show, Eq)
instance ToJSON EMRClusterKerberosAttributes where
toJSON EMRClusterKerberosAttributes{..} =
object $
catMaybes
[ fmap (("ADDomainJoinPassword",) . toJSON) _eMRClusterKerberosAttributesADDomainJoinPassword
, fmap (("ADDomainJoinUser",) . toJSON) _eMRClusterKerberosAttributesADDomainJoinUser
, fmap (("CrossRealmTrustPrincipalPassword",) . toJSON) _eMRClusterKerberosAttributesCrossRealmTrustPrincipalPassword
, (Just . ("KdcAdminPassword",) . toJSON) _eMRClusterKerberosAttributesKdcAdminPassword
, (Just . ("Realm",) . toJSON) _eMRClusterKerberosAttributesRealm
]
-- | Constructor for 'EMRClusterKerberosAttributes' containing required fields
-- as arguments.
emrClusterKerberosAttributes
:: Val Text -- ^ 'emrckaKdcAdminPassword'
-> Val Text -- ^ 'emrckaRealm'
-> EMRClusterKerberosAttributes
emrClusterKerberosAttributes kdcAdminPasswordarg realmarg =
EMRClusterKerberosAttributes
{ _eMRClusterKerberosAttributesADDomainJoinPassword = Nothing
, _eMRClusterKerberosAttributesADDomainJoinUser = Nothing
, _eMRClusterKerberosAttributesCrossRealmTrustPrincipalPassword = Nothing
, _eMRClusterKerberosAttributesKdcAdminPassword = kdcAdminPasswordarg
, _eMRClusterKerberosAttributesRealm = realmarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html#cfn-elasticmapreduce-cluster-kerberosattributes-addomainjoinpassword
emrckaADDomainJoinPassword :: Lens' EMRClusterKerberosAttributes (Maybe (Val Text))
emrckaADDomainJoinPassword = lens _eMRClusterKerberosAttributesADDomainJoinPassword (\s a -> s { _eMRClusterKerberosAttributesADDomainJoinPassword = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html#cfn-elasticmapreduce-cluster-kerberosattributes-addomainjoinuser
emrckaADDomainJoinUser :: Lens' EMRClusterKerberosAttributes (Maybe (Val Text))
emrckaADDomainJoinUser = lens _eMRClusterKerberosAttributesADDomainJoinUser (\s a -> s { _eMRClusterKerberosAttributesADDomainJoinUser = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html#cfn-elasticmapreduce-cluster-kerberosattributes-crossrealmtrustprincipalpassword
emrckaCrossRealmTrustPrincipalPassword :: Lens' EMRClusterKerberosAttributes (Maybe (Val Text))
emrckaCrossRealmTrustPrincipalPassword = lens _eMRClusterKerberosAttributesCrossRealmTrustPrincipalPassword (\s a -> s { _eMRClusterKerberosAttributesCrossRealmTrustPrincipalPassword = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html#cfn-elasticmapreduce-cluster-kerberosattributes-kdcadminpassword
emrckaKdcAdminPassword :: Lens' EMRClusterKerberosAttributes (Val Text)
emrckaKdcAdminPassword = lens _eMRClusterKerberosAttributesKdcAdminPassword (\s a -> s { _eMRClusterKerberosAttributesKdcAdminPassword = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-cluster-kerberosattributes.html#cfn-elasticmapreduce-cluster-kerberosattributes-realm
emrckaRealm :: Lens' EMRClusterKerberosAttributes (Val Text)
emrckaRealm = lens _eMRClusterKerberosAttributesRealm (\s a -> s { _eMRClusterKerberosAttributesRealm = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/EMRClusterKerberosAttributes.hs
|
mit
| 4,324
| 0
| 13
| 364
| 538
| 304
| 234
| 44
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-| DRBD proc file parser
This module holds the definition of the parser that extracts status
information from the DRBD proc file.
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Storage.Drbd.Parser (drbdStatusParser, commaIntParser) where
import Control.Applicative ((<*>), (*>), (<*), (<$>), (<|>), pure)
import qualified Data.Attoparsec.Text as A
import qualified Data.Attoparsec.Combinator as AC
import Data.Attoparsec.Text (Parser)
import Data.List
import Data.Maybe
import Data.Text (Text, unpack)
import Ganeti.Storage.Drbd.Types
-- | Our own space-skipping function, because A.skipSpace also skips
-- newline characters. It skips ZERO or more spaces, so it does not
-- fail if there are no spaces.
skipSpaces :: Parser ()
skipSpaces = A.skipWhile A.isHorizontalSpace
-- | Skips spaces and the given string, then executes a parser and
-- returns its result.
skipSpacesAndString :: Text -> Parser a -> Parser a
skipSpacesAndString s parser =
skipSpaces
*> A.string s
*> parser
-- | Predicate verifying (potentially bad) end of lines
isBadEndOfLine :: Char -> Bool
isBadEndOfLine c = (c == '\0') || A.isEndOfLine c
-- | Takes a parser and returns it with the content wrapped in a Maybe
-- object. The resulting parser never fails, but contains Nothing if
-- it couldn't properly parse the string.
optional :: Parser a -> Parser (Maybe a)
optional parser = (Just <$> parser) <|> pure Nothing
-- | The parser for a whole DRBD status file.
drbdStatusParser :: [DrbdInstMinor] -> Parser DRBDStatus
drbdStatusParser instMinor =
DRBDStatus <$> versionInfoParser
<*> deviceParser instMinor `AC.manyTill` A.endOfInput
<* A.endOfInput
-- | The parser for the version information lines.
versionInfoParser :: Parser VersionInfo
versionInfoParser = do
versionF <- optional versionP
apiF <- optional apiP
protoF <- optional protoP
srcVersionF <- optional srcVersion
ghF <- fmap unpack <$> optional gh
builderF <- fmap unpack <$> optional builder
if isNothing versionF
&& isNothing apiF
&& isNothing protoF
&& isNothing srcVersionF
&& isNothing ghF
&& isNothing builderF
then fail "versionInfo"
else pure $ VersionInfo versionF apiF protoF srcVersionF ghF builderF
where versionP =
A.string "version:"
*> skipSpaces
*> fmap unpack (A.takeWhile $ not . A.isHorizontalSpace)
apiP =
skipSpacesAndString "(api:" . fmap unpack $ A.takeWhile (/= '/')
protoP =
A.string "/proto:"
*> fmap Data.Text.unpack (A.takeWhile (/= ')'))
<* A.takeTill A.isEndOfLine <* A.endOfLine
srcVersion =
A.string "srcversion:"
*> AC.skipMany1 A.space
*> fmap unpack (A.takeTill A.isEndOfLine)
<* A.endOfLine
gh =
A.string "GIT-hash:"
*> skipSpaces
*> A.takeWhile (not . A.isHorizontalSpace)
builder =
skipSpacesAndString "build by" $
skipSpaces
*> A.takeTill A.isEndOfLine
<* A.endOfLine
-- | The parser for a (multi-line) string representing a device.
deviceParser :: [DrbdInstMinor] -> Parser DeviceInfo
deviceParser instMinor = do
_ <- additionalEOL
deviceNum <- skipSpaces *> A.decimal <* A.char ':'
cs <- skipSpacesAndString "cs:" connStateParser
if cs == Unconfigured
then do
_ <- additionalEOL
return $ UnconfiguredDevice deviceNum
else do
ro <- skipSpaces *> skipRoleString *> localRemoteParser roleParser
ds <- skipSpacesAndString "ds:" $ localRemoteParser diskStateParser
replicProtocol <- A.space *> A.anyChar
io <- skipSpaces *> ioFlagsParser <* A.skipWhile isBadEndOfLine
pIndicators <- perfIndicatorsParser
syncS <- conditionalSyncStatusParser cs
reS <- optional resyncParser
act <- optional actLogParser
_ <- additionalEOL
let inst = find ((deviceNum ==) . dimMinor) instMinor
iName = fmap dimInstName inst
return $ DeviceInfo deviceNum cs ro ds replicProtocol io pIndicators
syncS reS act iName
where conditionalSyncStatusParser SyncSource = Just <$> syncStatusParser
conditionalSyncStatusParser SyncTarget = Just <$> syncStatusParser
conditionalSyncStatusParser _ = pure Nothing
skipRoleString = A.string "ro:" <|> A.string "st:"
resyncParser = skipSpacesAndString "resync:" additionalInfoParser
actLogParser = skipSpacesAndString "act_log:" additionalInfoParser
additionalEOL = A.skipWhile A.isEndOfLine
-- | The parser for the connection state.
connStateParser :: Parser ConnState
connStateParser =
standAlone
<|> disconnecting
<|> unconnected
<|> timeout
<|> brokenPipe
<|> networkFailure
<|> protocolError
<|> tearDown
<|> wfConnection
<|> wfReportParams
<|> connected
<|> startingSyncS
<|> startingSyncT
<|> wfBitMapS
<|> wfBitMapT
<|> wfSyncUUID
<|> syncSource
<|> syncTarget
<|> pausedSyncS
<|> pausedSyncT
<|> verifyS
<|> verifyT
<|> unconfigured
where standAlone = A.string "StandAlone" *> pure StandAlone
disconnecting = A.string "Disconnectiog" *> pure Disconnecting
unconnected = A.string "Unconnected" *> pure Unconnected
timeout = A.string "Timeout" *> pure Timeout
brokenPipe = A.string "BrokenPipe" *> pure BrokenPipe
networkFailure = A.string "NetworkFailure" *> pure NetworkFailure
protocolError = A.string "ProtocolError" *> pure ProtocolError
tearDown = A.string "TearDown" *> pure TearDown
wfConnection = A.string "WFConnection" *> pure WFConnection
wfReportParams = A.string "WFReportParams" *> pure WFReportParams
connected = A.string "Connected" *> pure Connected
startingSyncS = A.string "StartingSyncS" *> pure StartingSyncS
startingSyncT = A.string "StartingSyncT" *> pure StartingSyncT
wfBitMapS = A.string "WFBitMapS" *> pure WFBitMapS
wfBitMapT = A.string "WFBitMapT" *> pure WFBitMapT
wfSyncUUID = A.string "WFSyncUUID" *> pure WFSyncUUID
syncSource = A.string "SyncSource" *> pure SyncSource
syncTarget = A.string "SyncTarget" *> pure SyncTarget
pausedSyncS = A.string "PausedSyncS" *> pure PausedSyncS
pausedSyncT = A.string "PausedSyncT" *> pure PausedSyncT
verifyS = A.string "VerifyS" *> pure VerifyS
verifyT = A.string "VerifyT" *> pure VerifyT
unconfigured = A.string "Unconfigured" *> pure Unconfigured
-- | Parser for recognizing strings describing two elements of the
-- same type separated by a '/'. The first one is considered local,
-- the second remote.
localRemoteParser :: Parser a -> Parser (LocalRemote a)
localRemoteParser parser = LocalRemote <$> parser <*> (A.char '/' *> parser)
-- | The parser for resource roles.
roleParser :: Parser Role
roleParser =
primary
<|> secondary
<|> unknown
where primary = A.string "Primary" *> pure Primary
secondary = A.string "Secondary" *> pure Secondary
unknown = A.string "Unknown" *> pure Unknown
-- | The parser for disk states.
diskStateParser :: Parser DiskState
diskStateParser =
diskless
<|> attaching
<|> failed
<|> negotiating
<|> inconsistent
<|> outdated
<|> dUnknown
<|> consistent
<|> upToDate
where diskless = A.string "Diskless" *> pure Diskless
attaching = A.string "Attaching" *> pure Attaching
failed = A.string "Failed" *> pure Failed
negotiating = A.string "Negotiating" *> pure Negotiating
inconsistent = A.string "Inconsistent" *> pure Inconsistent
outdated = A.string "Outdated" *> pure Outdated
dUnknown = A.string "DUnknown" *> pure DUnknown
consistent = A.string "Consistent" *> pure Consistent
upToDate = A.string "UpToDate" *> pure UpToDate
-- | The parser for I/O flags.
ioFlagsParser :: Parser String
ioFlagsParser = fmap unpack . A.takeWhile $ not . isBadEndOfLine
-- | The parser for performance indicators.
perfIndicatorsParser :: Parser PerfIndicators
perfIndicatorsParser =
PerfIndicators
<$> skipSpacesAndString "ns:" A.decimal
<*> skipSpacesAndString "nr:" A.decimal
<*> skipSpacesAndString "dw:" A.decimal
<*> skipSpacesAndString "dr:" A.decimal
<*> skipSpacesAndString "al:" A.decimal
<*> skipSpacesAndString "bm:" A.decimal
<*> skipSpacesAndString "lo:" A.decimal
<*> skipSpacesAndString "pe:" A.decimal
<*> skipSpacesAndString "ua:" A.decimal
<*> skipSpacesAndString "ap:" A.decimal
<*> optional (skipSpacesAndString "ep:" A.decimal)
<*> optional (skipSpacesAndString "wo:" A.anyChar)
<*> optional (skipSpacesAndString "oos:" A.decimal)
<* skipSpaces <* A.endOfLine
-- | The parser for the syncronization status.
syncStatusParser :: Parser SyncStatus
syncStatusParser = do
_ <- statusBarParser
percent <-
skipSpacesAndString "sync'ed:" $ skipSpaces *> A.double <* A.char '%'
partSyncSize <- skipSpaces *> A.char '(' *> A.decimal
totSyncSize <- A.char '/' *> A.decimal <* A.char ')'
sizeUnit <- sizeUnitParser <* optional A.endOfLine
timeToEnd <- skipSpacesAndString "finish:" $ skipSpaces *> timeParser
sp <-
skipSpacesAndString "speed:" $
skipSpaces
*> commaIntParser
<* skipSpaces
<* A.char '('
<* commaIntParser
<* A.char ')'
w <- skipSpacesAndString "want:" (
skipSpaces
*> (Just <$> commaIntParser)
)
<|> pure Nothing
sSizeUnit <- skipSpaces *> sizeUnitParser
sTimeUnit <- A.char '/' *> timeUnitParser
_ <- A.endOfLine
return $
SyncStatus percent partSyncSize totSyncSize sizeUnit timeToEnd sp w
sSizeUnit sTimeUnit
-- | The parser for recognizing (and discarding) the sync status bar.
statusBarParser :: Parser ()
statusBarParser =
skipSpaces
*> A.char '['
*> A.skipWhile (== '=')
*> A.skipWhile (== '>')
*> A.skipWhile (== '.')
*> A.char ']'
*> pure ()
-- | The parser for recognizing data size units (only the ones
-- actually found in DRBD files are implemented).
sizeUnitParser :: Parser SizeUnit
sizeUnitParser =
kilobyte
<|> megabyte
where kilobyte = A.string "K" *> pure KiloByte
megabyte = A.string "M" *> pure MegaByte
-- | The parser for recognizing time (hh:mm:ss).
timeParser :: Parser Time
timeParser = Time <$> h <*> m <*> s
where h = A.decimal :: Parser Int
m = A.char ':' *> A.decimal :: Parser Int
s = A.char ':' *> A.decimal :: Parser Int
-- | The parser for recognizing time units (only the ones actually
-- found in DRBD files are implemented).
timeUnitParser :: Parser TimeUnit
timeUnitParser = second
where second = A.string "sec" *> pure Second
-- | Haskell does not recognise ',' as the thousands separator every 3
-- digits but DRBD uses it, so we need an ah-hoc parser.
-- If a number beginning with more than 3 digits without a comma is
-- parsed, only the first 3 digits are considered to be valid, the rest
-- is not consumed, and left for further parsing.
commaIntParser :: Parser Int
commaIntParser = do
first <-
AC.count 3 A.digit <|> AC.count 2 A.digit <|> AC.count 1 A.digit
allDigits <- commaIntHelper (read first)
pure allDigits
-- | Helper (triplet parser) for the commaIntParser
commaIntHelper :: Int -> Parser Int
commaIntHelper acc = nextTriplet <|> end
where nextTriplet = do
_ <- A.char ','
triplet <- AC.count 3 A.digit
commaIntHelper $ acc * 1000 + (read triplet :: Int)
end = pure acc :: Parser Int
-- | Parser for the additional information provided by DRBD <= 8.0.
additionalInfoParser::Parser AdditionalInfo
additionalInfoParser = AdditionalInfo
<$> skipSpacesAndString "used:" A.decimal
<*> (A.char '/' *> A.decimal)
<*> skipSpacesAndString "hits:" A.decimal
<*> skipSpacesAndString "misses:" A.decimal
<*> skipSpacesAndString "starving:" A.decimal
<*> skipSpacesAndString "dirty:" A.decimal
<*> skipSpacesAndString "changed:" A.decimal
<* A.endOfLine
|
ribag/ganeti-experiments
|
src/Ganeti/Storage/Drbd/Parser.hs
|
gpl-2.0
| 13,128
| 0
| 26
| 3,135
| 2,911
| 1,432
| 1,479
| 266
| 4
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Darcs.Patch.V1 ( Patch ) where
import Darcs.Patch.Matchable ( Matchable )
import Darcs.Patch.Patchy ( Patchy )
import Darcs.Patch.Prim ( PrimPatch )
import Darcs.Patch.RepoPatch ( RepoPatch )
import Darcs.Patch.V1.Apply ()
import Darcs.Patch.V1.Commute ()
import Darcs.Patch.V1.Core ( Patch )
import Darcs.Patch.V1.Read ()
import Darcs.Patch.V1.Show ()
import Darcs.Patch.V1.Viewing ()
instance PrimPatch prim => Patchy (Patch prim)
instance PrimPatch prim => Matchable (Patch prim)
instance PrimPatch prim => RepoPatch (Patch prim)
|
DavidAlphaFox/darcs
|
src/Darcs/Patch/V1.hs
|
gpl-2.0
| 581
| 0
| 7
| 74
| 179
| 107
| 72
| 15
| 0
|
module Bot.Commands where
import Bot.Types
import Bot.Methods
import Control.Monad
import Network.HTTP.Client
import System.Process (readProcess)
import Data.Time.Clock.POSIX
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
#endif
type Posix = Int
helpText :: String
helpText = "I can grant access to h4h with the command `/door` or `/gate`"
isAuthenticated :: Config -> Chat -> Bool
isAuthenticated cfg chat = chatId chat == cfgGroup cfg
isTimely :: Config -> Posix -> IO Bool
isTimely cfg t0 = do
t1 <- round <$> getPOSIXTime
return $ t1 - t0 < cfgTimeout cfg
handleUpdate :: Config -> Manager -> Update -> IO ()
handleUpdate cfg http (Update _ (Message _ _ t chat mtext)) = do
-- silently ignore untimely messages.
timely <- isTimely cfg t
when timely $ do
let respond msg = void $ sendMessage cfg http (chatId chat) msg
tryDoor = if isAuthenticated cfg chat then
do respond "Attempting to open door..."
access http $ cfgDoor cfg
else respond "Sorry, only works from the House4Hack Access group."
tryGate = if isAuthenticated cfg chat then
do respond "Attempting to open gate..."
access http $ cfgGate cfg
else respond "Sorry, only works from the House4Hack Access group."
hostname = do
s <- readProcess "hostname" ["-I"] []
respond $ "IP addresses: " ++ take (length s - 2) s
case mtext of
Just "/start" -> respond helpText
Just "/help" -> respond helpText
Just "/settings" -> respond "settings text"
Just "/door" -> tryDoor
Just "/door@h4hBot" -> tryDoor
Just "/gate" -> tryGate
Just "/gate@h4hBot" -> tryGate
Just "/hostname" -> hostname
_ -> respond "I'm the h4h access bot"
access :: Manager -> String -> IO ()
access http url = void $ flip httpLbs http =<< parseUrlThrow url
|
house4hack/h4h-bot
|
src/Bot/Commands.hs
|
gpl-3.0
| 1,942
| 0
| 19
| 522
| 543
| 265
| 278
| 45
| 11
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module System.Console.Quickterm.CanMarshall
( CanMarshall (..)
) where
import Text.Regex.Base hiding (empty)
import Text.Regex.TDFA hiding (empty)
import System.Console.Quickterm.Deserializer
import System.Console.Quickterm.Help
-- |Handles marshaling from a cmd-line argument to a Haskell data type.
class CanMarshall a where
-- |A default value for the generic atomic operation 'param'.
defaultM :: a
-- |A help description for the generic atomic operation 'param'.
helpU :: a -> Int -> String
-- |A deserializer declaration for the generic atomic operation 'param'.
deserializer :: Deserializer a
-- |A conversion of a value to the predicted input.
asInput :: a -> String
instance CanMarshall Int where
defaultM = 0
helpU _ = indent "<Integer>"
deserializer = tryConvert $ \st ->
if st =~ "((0|1|2|3|4|5|6|7|8|9)+)"
then [(read st,0)]
else [(0,length st * 2)]
asInput = show
instance CanMarshall String where
defaultM = "<value>"
helpU _ = indent "<String>"
deserializer = tryConvert $ \st ->
if st =~ "([^-]+)"
then [(st,0)]
else [("str",length st * 2)]
asInput = id
|
SamuelSchlesinger/Quickterm
|
src/lib/System/Console/Quickterm/CanMarshall.hs
|
gpl-3.0
| 1,322
| 0
| 12
| 345
| 269
| 159
| 110
| 29
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Test.Language.Verilog.Syntax.Number where
import Data.Bits
import Test.Invariant
import Test.Tasty (TestTree, testGroup)
import qualified Test.Tasty.QuickCheck as QC
import qualified Test.Tasty.SmallCheck as SC
import Test.QuickCheck (Arbitrary, Gen, arbitrary, choose, frequency, shrink)
import Test.SmallCheck.Series
import Language.Verilog.Syntax.Number
import Language.Verilog.Syntax.Number.Value
import qualified Test.Language.Verilog.Syntax.Number.Value as TV
import Test.Num (Known (..))
import qualified Test.Num as TN
arbitraryValues :: Arbitrary a => Gen a -> Int -> Gen [a]
arbitraryValues gen m
| m == 0 = return []
| otherwise = (:) <$> gen <*> arbitraryValues gen (m - 1)
instance Arbitrary Sign where
arbitrary = choose (True, False)
>>= \s -> return $
if s then Unsigned
else Signed
shrink Unsigned = []
shrink Signed = [Unsigned]
instance Arbitrary Number where
arbitrary = QC.sized $ \n -> do
sign <- arbitrary :: Gen Sign
known <- frequency [ (7, return 0)
, (2, return 1)
]
let gen = case known of
0 -> (arbitrary :: Gen (Known Value)) >>= \(Known a) -> return a
1 -> arbitrary :: Gen Value
val <- arbitraryValues gen n
return $ Number sign n val
shrink (Number sign _ val) = do
v <- shrink val
return $ Number sign (length v) v
instance Arbitrary (Known Number) where
arbitrary = QC.sized $ \n -> do
sign <- arbitrary :: Gen Sign
let gen = (arbitrary :: Gen (Known Value)) >>= \(Known a) -> return a
val <- arbitraryValues gen n
return . Known $ Number sign n val
shrink (Known n) = Known <$> shrink n
instance Monad m => Serial m Sign where
series = cons0 Unsigned \/ cons0 Signed
numbers :: [Value] -> Depth -> [Number]
numbers set d = (signed <$> values) ++ (unsigned <$> values)
where unsigned = Number Unsigned d
signed = Number Signed d
values = go d [[]]
go :: Depth -> [[Value]] -> [[Value]]
go 0 vs = vs
go n vs = go (n-1) $ (:) <$> set <*> vs
instance Monad m => Serial m Number where
series = generate (numbers [Zero, One, Unknown, HighZ])
instance Monad m => Serial m (Known Number) where
series = Known <$> generate (numbers [Zero, One])
properties :: TestTree
properties = testGroup "Number Num instance"
[smallNum]
-- | Num properties
smallNum :: TestTree
smallNum = testGroup "Num Number properties (SmallCheck)"
[ smallPropsAdd
, smallPropsMul
-- , propsSign
]
limitBits :: SC.Testable m a => SC.Depth -> a -> SC.Property m
limitBits n = SC.changeDepth (const n)
smallPropsAdd :: TestTree
smallPropsAdd = testGroup "Addition properties"
[ SC.testProperty "commutativity"
$ limitBits 4 (TN.commutativeAdd :: Number -> Number -> Bool)
, SC.testProperty "associativity"
$ limitBits 3 (TN.associativeAdd :: Number -> Number -> Number -> Bool)
, SC.testProperty "adding zero"
-- Unsigned Zero acctually changes expression type to Unsigned
-- Signed Zero is neutral in that matter
$ limitBits 4 (knownUnitAdd (Number Signed 1 [Zero]))
]
smallPropsMul :: TestTree
smallPropsMul = testGroup "Multiplication properties"
[ SC.testProperty "commutativity"
$ limitBits 4 (TN.commutativeMul :: Number -> Number -> Bool)
, SC.testProperty "associativity"
$ limitBits 3 (TN.associativeMul :: Number -> Number -> Number -> Bool)
, SC.testProperty "multiply by 1"
$ limitBits 4 (knownUnitMul 1)
, SC.testProperty "multiply by 0"
$ limitBits 4 (knownAnihilatorMul $ Number Unsigned 1 [Zero])
, SC.testProperty "distributivity"
$ limitBits 3 (TN.distributive :: Number -> Number -> Number -> Bool)
]
knownUnitAdd :: Number -> Known Number -> Bool
knownUnitAdd zero (Known x) = TN.unitAdd zero x
knownUnitMul :: Number -> Known Number -> Bool
knownUnitMul one (Known x) = TN.unitMul one x
knownAnihilatorMul :: Number -> Known Number -> Known Number -> Bool
knownAnihilatorMul zero (Known x) (Known y) = TN.anihilatorMul zero x y
|
quivade/screwdriver
|
test/Test/Language/Verilog/Syntax/Number.hs
|
gpl-3.0
| 4,123
| 0
| 20
| 901
| 1,428
| 750
| 678
| 95
| 2
|
module UnitConversions.Tests where
import Data.Tuple (swap)
import Instances ()
import Test.HUnit ((@?=))
import Test.QuickCheck ((===), (==>))
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (testCase)
import Test.Tasty.QuickCheck (testProperty)
import Types
import UnitConversions
isMetric :: Unit -> Bool
isMetric unit =
case unit of
Ml -> True
L -> True
G -> True
_ -> False
test :: TestTree
test =
testGroup
"ConversionTests"
[ ingredientTests,
temperatureUnitTests,
findingTemperaturesUnitTests
]
ingredientTests :: TestTree
ingredientTests =
testGroup
"IngredientConversions"
[ testProperty "testToMetric" $
\x ->
isMetric (unit x)
==> convertIngredientToMetric x === x,
testProperty "testToImperial" $
\x ->
not (isMetric (unit x))
==> convertIngredientToImperial x === x
]
temperatureUnitTests :: TestTree
temperatureUnitTests =
testGroup
"TemperatureConversions"
[ testCase "Converting °F to °C in a text" $
convertTemperatureToMetric "preheat to 110°F" @?= "preheat to 43°C",
testCase "Converting °C to °F in a text" $
-- because of rounding we get 109°F when converting 110° F -> °C -> °F
convertTemperatureToImperial "preheat to 43°C" @?= "preheat to 109°F"
]
findingTemperaturesUnitTests :: TestTree
findingTemperaturesUnitTests =
testGroup "recognizing/ignoring temperatures in a test" $
let getTemperatures = map snd . findReplacements
cases =
[ ("250°C", [Temperature 250 C]),
("10 °C", [Temperature 10 C]),
("-10 °C", [Temperature (-10) C]),
("abc 15 C def", [Temperature 15 C]),
("5 C", [Temperature 5 C]),
("250C", [Temperature 250 C]),
("999°F", [Temperature 999 F]),
("34F", [Temperature 34 F]),
("250°C.", [Temperature 250 C]),
("250°C!", [Temperature 250 C]),
("250°C?", [Temperature 250 C]),
("10C 15F", [Temperature 10 C, Temperature 15 F]),
("0 Frites", []),
("250Crabs", []),
("250 Crabs", [])
] ::
[(String, [Temperature])]
in map ((\(exp, test) -> testCase test (getTemperatures test @?= exp)) . swap) cases
|
JackKiefer/herms
|
test/UnitConversions/Tests.hs
|
gpl-3.0
| 2,366
| 0
| 16
| 658
| 648
| 367
| 281
| 66
| 4
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AdSenseHost.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AdSenseHost.Types.Product where
import Network.Google.AdSenseHost.Types.Sum
import Network.Google.Prelude
--
-- /See:/ 'adClients' smart constructor.
data AdClients =
AdClients'
{ _acEtag :: !(Maybe Text)
, _acNextPageToken :: !(Maybe Text)
, _acKind :: !Text
, _acItems :: !(Maybe [AdClient])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdClients' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acEtag'
--
-- * 'acNextPageToken'
--
-- * 'acKind'
--
-- * 'acItems'
adClients
:: AdClients
adClients =
AdClients'
{ _acEtag = Nothing
, _acNextPageToken = Nothing
, _acKind = "adsensehost#adClients"
, _acItems = Nothing
}
-- | ETag of this response for caching purposes.
acEtag :: Lens' AdClients (Maybe Text)
acEtag = lens _acEtag (\ s a -> s{_acEtag = a})
-- | Continuation token used to page through ad clients. To retrieve the next
-- page of results, set the next request\'s \"pageToken\" value to this.
acNextPageToken :: Lens' AdClients (Maybe Text)
acNextPageToken
= lens _acNextPageToken
(\ s a -> s{_acNextPageToken = a})
-- | Kind of list this is, in this case adsensehost#adClients.
acKind :: Lens' AdClients Text
acKind = lens _acKind (\ s a -> s{_acKind = a})
-- | The ad clients returned in this list response.
acItems :: Lens' AdClients [AdClient]
acItems
= lens _acItems (\ s a -> s{_acItems = a}) . _Default
. _Coerce
instance FromJSON AdClients where
parseJSON
= withObject "AdClients"
(\ o ->
AdClients' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "adsensehost#adClients")
<*> (o .:? "items" .!= mempty))
instance ToJSON AdClients where
toJSON AdClients'{..}
= object
(catMaybes
[("etag" .=) <$> _acEtag,
("nextPageToken" .=) <$> _acNextPageToken,
Just ("kind" .= _acKind), ("items" .=) <$> _acItems])
--
-- /See:/ 'associationSession' smart constructor.
data AssociationSession =
AssociationSession'
{ _asStatus :: !(Maybe Text)
, _asKind :: !Text
, _asWebsiteLocale :: !(Maybe Text)
, _asUserLocale :: !(Maybe Text)
, _asAccountId :: !(Maybe Text)
, _asProductCodes :: !(Maybe [Text])
, _asId :: !(Maybe Text)
, _asWebsiteURL :: !(Maybe Text)
, _asRedirectURL :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AssociationSession' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asStatus'
--
-- * 'asKind'
--
-- * 'asWebsiteLocale'
--
-- * 'asUserLocale'
--
-- * 'asAccountId'
--
-- * 'asProductCodes'
--
-- * 'asId'
--
-- * 'asWebsiteURL'
--
-- * 'asRedirectURL'
associationSession
:: AssociationSession
associationSession =
AssociationSession'
{ _asStatus = Nothing
, _asKind = "adsensehost#associationSession"
, _asWebsiteLocale = Nothing
, _asUserLocale = Nothing
, _asAccountId = Nothing
, _asProductCodes = Nothing
, _asId = Nothing
, _asWebsiteURL = Nothing
, _asRedirectURL = Nothing
}
-- | Status of the completed association, available once the association
-- callback token has been verified. One of ACCEPTED, REJECTED, or ERROR.
asStatus :: Lens' AssociationSession (Maybe Text)
asStatus = lens _asStatus (\ s a -> s{_asStatus = a})
-- | Kind of resource this is, in this case adsensehost#associationSession.
asKind :: Lens' AssociationSession Text
asKind = lens _asKind (\ s a -> s{_asKind = a})
-- | The locale of the user\'s hosted website.
asWebsiteLocale :: Lens' AssociationSession (Maybe Text)
asWebsiteLocale
= lens _asWebsiteLocale
(\ s a -> s{_asWebsiteLocale = a})
-- | The preferred locale of the user themselves when going through the
-- AdSense association flow.
asUserLocale :: Lens' AssociationSession (Maybe Text)
asUserLocale
= lens _asUserLocale (\ s a -> s{_asUserLocale = a})
-- | Hosted account id of the associated publisher after association. Present
-- if status is ACCEPTED.
asAccountId :: Lens' AssociationSession (Maybe Text)
asAccountId
= lens _asAccountId (\ s a -> s{_asAccountId = a})
-- | The products to associate with the user. Options: AFC, AFG, AFV, AFS
-- (deprecated), AFMC (deprecated)
asProductCodes :: Lens' AssociationSession [Text]
asProductCodes
= lens _asProductCodes
(\ s a -> s{_asProductCodes = a})
. _Default
. _Coerce
-- | Unique identifier of this association session.
asId :: Lens' AssociationSession (Maybe Text)
asId = lens _asId (\ s a -> s{_asId = a})
-- | The URL of the user\'s hosted website.
asWebsiteURL :: Lens' AssociationSession (Maybe Text)
asWebsiteURL
= lens _asWebsiteURL (\ s a -> s{_asWebsiteURL = a})
-- | Redirect URL of this association session. Used to redirect users into
-- the AdSense association flow.
asRedirectURL :: Lens' AssociationSession (Maybe Text)
asRedirectURL
= lens _asRedirectURL
(\ s a -> s{_asRedirectURL = a})
instance FromJSON AssociationSession where
parseJSON
= withObject "AssociationSession"
(\ o ->
AssociationSession' <$>
(o .:? "status") <*>
(o .:? "kind" .!= "adsensehost#associationSession")
<*> (o .:? "websiteLocale")
<*> (o .:? "userLocale")
<*> (o .:? "accountId")
<*> (o .:? "productCodes" .!= mempty)
<*> (o .:? "id")
<*> (o .:? "websiteUrl")
<*> (o .:? "redirectUrl"))
instance ToJSON AssociationSession where
toJSON AssociationSession'{..}
= object
(catMaybes
[("status" .=) <$> _asStatus,
Just ("kind" .= _asKind),
("websiteLocale" .=) <$> _asWebsiteLocale,
("userLocale" .=) <$> _asUserLocale,
("accountId" .=) <$> _asAccountId,
("productCodes" .=) <$> _asProductCodes,
("id" .=) <$> _asId,
("websiteUrl" .=) <$> _asWebsiteURL,
("redirectUrl" .=) <$> _asRedirectURL])
--
-- /See:/ 'accounts' smart constructor.
data Accounts =
Accounts'
{ _aEtag :: !(Maybe Text)
, _aKind :: !Text
, _aItems :: !(Maybe [Account])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Accounts' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aEtag'
--
-- * 'aKind'
--
-- * 'aItems'
accounts
:: Accounts
accounts =
Accounts'
{_aEtag = Nothing, _aKind = "adsensehost#accounts", _aItems = Nothing}
-- | ETag of this response for caching purposes.
aEtag :: Lens' Accounts (Maybe Text)
aEtag = lens _aEtag (\ s a -> s{_aEtag = a})
-- | Kind of list this is, in this case adsensehost#accounts.
aKind :: Lens' Accounts Text
aKind = lens _aKind (\ s a -> s{_aKind = a})
-- | The accounts returned in this list response.
aItems :: Lens' Accounts [Account]
aItems
= lens _aItems (\ s a -> s{_aItems = a}) . _Default .
_Coerce
instance FromJSON Accounts where
parseJSON
= withObject "Accounts"
(\ o ->
Accounts' <$>
(o .:? "etag") <*>
(o .:? "kind" .!= "adsensehost#accounts")
<*> (o .:? "items" .!= mempty))
instance ToJSON Accounts where
toJSON Accounts'{..}
= object
(catMaybes
[("etag" .=) <$> _aEtag, Just ("kind" .= _aKind),
("items" .=) <$> _aItems])
--
-- /See:/ 'adUnits' smart constructor.
data AdUnits =
AdUnits'
{ _auEtag :: !(Maybe Text)
, _auNextPageToken :: !(Maybe Text)
, _auKind :: !Text
, _auItems :: !(Maybe [AdUnit])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnits' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'auEtag'
--
-- * 'auNextPageToken'
--
-- * 'auKind'
--
-- * 'auItems'
adUnits
:: AdUnits
adUnits =
AdUnits'
{ _auEtag = Nothing
, _auNextPageToken = Nothing
, _auKind = "adsensehost#adUnits"
, _auItems = Nothing
}
-- | ETag of this response for caching purposes.
auEtag :: Lens' AdUnits (Maybe Text)
auEtag = lens _auEtag (\ s a -> s{_auEtag = a})
-- | Continuation token used to page through ad units. To retrieve the next
-- page of results, set the next request\'s \"pageToken\" value to this.
auNextPageToken :: Lens' AdUnits (Maybe Text)
auNextPageToken
= lens _auNextPageToken
(\ s a -> s{_auNextPageToken = a})
-- | Kind of list this is, in this case adsensehost#adUnits.
auKind :: Lens' AdUnits Text
auKind = lens _auKind (\ s a -> s{_auKind = a})
-- | The ad units returned in this list response.
auItems :: Lens' AdUnits [AdUnit]
auItems
= lens _auItems (\ s a -> s{_auItems = a}) . _Default
. _Coerce
instance FromJSON AdUnits where
parseJSON
= withObject "AdUnits"
(\ o ->
AdUnits' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "adsensehost#adUnits")
<*> (o .:? "items" .!= mempty))
instance ToJSON AdUnits where
toJSON AdUnits'{..}
= object
(catMaybes
[("etag" .=) <$> _auEtag,
("nextPageToken" .=) <$> _auNextPageToken,
Just ("kind" .= _auKind), ("items" .=) <$> _auItems])
--
-- /See:/ 'urlChannels' smart constructor.
data URLChannels =
URLChannels'
{ _ucEtag :: !(Maybe Text)
, _ucNextPageToken :: !(Maybe Text)
, _ucKind :: !Text
, _ucItems :: !(Maybe [URLChannel])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'URLChannels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ucEtag'
--
-- * 'ucNextPageToken'
--
-- * 'ucKind'
--
-- * 'ucItems'
urlChannels
:: URLChannels
urlChannels =
URLChannels'
{ _ucEtag = Nothing
, _ucNextPageToken = Nothing
, _ucKind = "adsensehost#urlChannels"
, _ucItems = Nothing
}
-- | ETag of this response for caching purposes.
ucEtag :: Lens' URLChannels (Maybe Text)
ucEtag = lens _ucEtag (\ s a -> s{_ucEtag = a})
-- | Continuation token used to page through URL channels. To retrieve the
-- next page of results, set the next request\'s \"pageToken\" value to
-- this.
ucNextPageToken :: Lens' URLChannels (Maybe Text)
ucNextPageToken
= lens _ucNextPageToken
(\ s a -> s{_ucNextPageToken = a})
-- | Kind of list this is, in this case adsensehost#urlChannels.
ucKind :: Lens' URLChannels Text
ucKind = lens _ucKind (\ s a -> s{_ucKind = a})
-- | The URL channels returned in this list response.
ucItems :: Lens' URLChannels [URLChannel]
ucItems
= lens _ucItems (\ s a -> s{_ucItems = a}) . _Default
. _Coerce
instance FromJSON URLChannels where
parseJSON
= withObject "URLChannels"
(\ o ->
URLChannels' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "adsensehost#urlChannels")
<*> (o .:? "items" .!= mempty))
instance ToJSON URLChannels where
toJSON URLChannels'{..}
= object
(catMaybes
[("etag" .=) <$> _ucEtag,
("nextPageToken" .=) <$> _ucNextPageToken,
Just ("kind" .= _ucKind), ("items" .=) <$> _ucItems])
--
-- /See:/ 'customChannels' smart constructor.
data CustomChannels =
CustomChannels'
{ _ccEtag :: !(Maybe Text)
, _ccNextPageToken :: !(Maybe Text)
, _ccKind :: !Text
, _ccItems :: !(Maybe [CustomChannel])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomChannels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccEtag'
--
-- * 'ccNextPageToken'
--
-- * 'ccKind'
--
-- * 'ccItems'
customChannels
:: CustomChannels
customChannels =
CustomChannels'
{ _ccEtag = Nothing
, _ccNextPageToken = Nothing
, _ccKind = "adsensehost#customChannels"
, _ccItems = Nothing
}
-- | ETag of this response for caching purposes.
ccEtag :: Lens' CustomChannels (Maybe Text)
ccEtag = lens _ccEtag (\ s a -> s{_ccEtag = a})
-- | Continuation token used to page through custom channels. To retrieve the
-- next page of results, set the next request\'s \"pageToken\" value to
-- this.
ccNextPageToken :: Lens' CustomChannels (Maybe Text)
ccNextPageToken
= lens _ccNextPageToken
(\ s a -> s{_ccNextPageToken = a})
-- | Kind of list this is, in this case adsensehost#customChannels.
ccKind :: Lens' CustomChannels Text
ccKind = lens _ccKind (\ s a -> s{_ccKind = a})
-- | The custom channels returned in this list response.
ccItems :: Lens' CustomChannels [CustomChannel]
ccItems
= lens _ccItems (\ s a -> s{_ccItems = a}) . _Default
. _Coerce
instance FromJSON CustomChannels where
parseJSON
= withObject "CustomChannels"
(\ o ->
CustomChannels' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "adsensehost#customChannels")
<*> (o .:? "items" .!= mempty))
instance ToJSON CustomChannels where
toJSON CustomChannels'{..}
= object
(catMaybes
[("etag" .=) <$> _ccEtag,
("nextPageToken" .=) <$> _ccNextPageToken,
Just ("kind" .= _ccKind), ("items" .=) <$> _ccItems])
--
-- /See:/ 'adUnit' smart constructor.
data AdUnit =
AdUnit'
{ _auuStatus :: !(Maybe Text)
, _auuMobileContentAdsSettings :: !(Maybe AdUnitMobileContentAdsSettings)
, _auuKind :: !Text
, _auuCustomStyle :: !(Maybe AdStyle)
, _auuName :: !(Maybe Text)
, _auuContentAdsSettings :: !(Maybe AdUnitContentAdsSettings)
, _auuCode :: !(Maybe Text)
, _auuId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'auuStatus'
--
-- * 'auuMobileContentAdsSettings'
--
-- * 'auuKind'
--
-- * 'auuCustomStyle'
--
-- * 'auuName'
--
-- * 'auuContentAdsSettings'
--
-- * 'auuCode'
--
-- * 'auuId'
adUnit
:: AdUnit
adUnit =
AdUnit'
{ _auuStatus = Nothing
, _auuMobileContentAdsSettings = Nothing
, _auuKind = "adsensehost#adUnit"
, _auuCustomStyle = Nothing
, _auuName = Nothing
, _auuContentAdsSettings = Nothing
, _auuCode = Nothing
, _auuId = Nothing
}
-- | Status of this ad unit. Possible values are: NEW: Indicates that the ad
-- unit was created within the last seven days and does not yet have any
-- activity associated with it. ACTIVE: Indicates that there has been
-- activity on this ad unit in the last seven days. INACTIVE: Indicates
-- that there has been no activity on this ad unit in the last seven days.
auuStatus :: Lens' AdUnit (Maybe Text)
auuStatus
= lens _auuStatus (\ s a -> s{_auuStatus = a})
-- | Settings specific to WAP mobile content ads (AFMC - deprecated).
auuMobileContentAdsSettings :: Lens' AdUnit (Maybe AdUnitMobileContentAdsSettings)
auuMobileContentAdsSettings
= lens _auuMobileContentAdsSettings
(\ s a -> s{_auuMobileContentAdsSettings = a})
-- | Kind of resource this is, in this case adsensehost#adUnit.
auuKind :: Lens' AdUnit Text
auuKind = lens _auuKind (\ s a -> s{_auuKind = a})
-- | Custom style information specific to this ad unit.
auuCustomStyle :: Lens' AdUnit (Maybe AdStyle)
auuCustomStyle
= lens _auuCustomStyle
(\ s a -> s{_auuCustomStyle = a})
-- | Name of this ad unit.
auuName :: Lens' AdUnit (Maybe Text)
auuName = lens _auuName (\ s a -> s{_auuName = a})
-- | Settings specific to content ads (AFC) and highend mobile content ads
-- (AFMC - deprecated).
auuContentAdsSettings :: Lens' AdUnit (Maybe AdUnitContentAdsSettings)
auuContentAdsSettings
= lens _auuContentAdsSettings
(\ s a -> s{_auuContentAdsSettings = a})
-- | Identity code of this ad unit, not necessarily unique across ad clients.
auuCode :: Lens' AdUnit (Maybe Text)
auuCode = lens _auuCode (\ s a -> s{_auuCode = a})
-- | Unique identifier of this ad unit. This should be considered an opaque
-- identifier; it is not safe to rely on it being in any particular format.
auuId :: Lens' AdUnit (Maybe Text)
auuId = lens _auuId (\ s a -> s{_auuId = a})
instance FromJSON AdUnit where
parseJSON
= withObject "AdUnit"
(\ o ->
AdUnit' <$>
(o .:? "status") <*>
(o .:? "mobileContentAdsSettings")
<*> (o .:? "kind" .!= "adsensehost#adUnit")
<*> (o .:? "customStyle")
<*> (o .:? "name")
<*> (o .:? "contentAdsSettings")
<*> (o .:? "code")
<*> (o .:? "id"))
instance ToJSON AdUnit where
toJSON AdUnit'{..}
= object
(catMaybes
[("status" .=) <$> _auuStatus,
("mobileContentAdsSettings" .=) <$>
_auuMobileContentAdsSettings,
Just ("kind" .= _auuKind),
("customStyle" .=) <$> _auuCustomStyle,
("name" .=) <$> _auuName,
("contentAdsSettings" .=) <$> _auuContentAdsSettings,
("code" .=) <$> _auuCode, ("id" .=) <$> _auuId])
--
-- /See:/ 'report' smart constructor.
data Report =
Report'
{ _rKind :: !Text
, _rAverages :: !(Maybe [Text])
, _rWarnings :: !(Maybe [Text])
, _rRows :: !(Maybe [[Text]])
, _rTotals :: !(Maybe [Text])
, _rHeaders :: !(Maybe [ReportHeadersItem])
, _rTotalMatchedRows :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Report' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rKind'
--
-- * 'rAverages'
--
-- * 'rWarnings'
--
-- * 'rRows'
--
-- * 'rTotals'
--
-- * 'rHeaders'
--
-- * 'rTotalMatchedRows'
report
:: Report
report =
Report'
{ _rKind = "adsensehost#report"
, _rAverages = Nothing
, _rWarnings = Nothing
, _rRows = Nothing
, _rTotals = Nothing
, _rHeaders = Nothing
, _rTotalMatchedRows = Nothing
}
-- | Kind this is, in this case adsensehost#report.
rKind :: Lens' Report Text
rKind = lens _rKind (\ s a -> s{_rKind = a})
-- | The averages of the report. This is the same length as any other row in
-- the report; cells corresponding to dimension columns are empty.
rAverages :: Lens' Report [Text]
rAverages
= lens _rAverages (\ s a -> s{_rAverages = a}) .
_Default
. _Coerce
-- | Any warnings associated with generation of the report.
rWarnings :: Lens' Report [Text]
rWarnings
= lens _rWarnings (\ s a -> s{_rWarnings = a}) .
_Default
. _Coerce
-- | The output rows of the report. Each row is a list of cells; one for each
-- dimension in the request, followed by one for each metric in the
-- request. The dimension cells contain strings, and the metric cells
-- contain numbers.
rRows :: Lens' Report [[Text]]
rRows
= lens _rRows (\ s a -> s{_rRows = a}) . _Default .
_Coerce
-- | The totals of the report. This is the same length as any other row in
-- the report; cells corresponding to dimension columns are empty.
rTotals :: Lens' Report [Text]
rTotals
= lens _rTotals (\ s a -> s{_rTotals = a}) . _Default
. _Coerce
-- | The header information of the columns requested in the report. This is a
-- list of headers; one for each dimension in the request, followed by one
-- for each metric in the request.
rHeaders :: Lens' Report [ReportHeadersItem]
rHeaders
= lens _rHeaders (\ s a -> s{_rHeaders = a}) .
_Default
. _Coerce
-- | The total number of rows matched by the report request. Fewer rows may
-- be returned in the response due to being limited by the row count
-- requested or the report row limit.
rTotalMatchedRows :: Lens' Report (Maybe Int64)
rTotalMatchedRows
= lens _rTotalMatchedRows
(\ s a -> s{_rTotalMatchedRows = a})
. mapping _Coerce
instance FromJSON Report where
parseJSON
= withObject "Report"
(\ o ->
Report' <$>
(o .:? "kind" .!= "adsensehost#report") <*>
(o .:? "averages" .!= mempty)
<*> (o .:? "warnings" .!= mempty)
<*> (o .:? "rows" .!= mempty)
<*> (o .:? "totals" .!= mempty)
<*> (o .:? "headers" .!= mempty)
<*> (o .:? "totalMatchedRows"))
instance ToJSON Report where
toJSON Report'{..}
= object
(catMaybes
[Just ("kind" .= _rKind),
("averages" .=) <$> _rAverages,
("warnings" .=) <$> _rWarnings,
("rows" .=) <$> _rRows, ("totals" .=) <$> _rTotals,
("headers" .=) <$> _rHeaders,
("totalMatchedRows" .=) <$> _rTotalMatchedRows])
-- | The font which is included in the style.
--
-- /See:/ 'adStyleFont' smart constructor.
data AdStyleFont =
AdStyleFont'
{ _asfSize :: !(Maybe Text)
, _asfFamily :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdStyleFont' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asfSize'
--
-- * 'asfFamily'
adStyleFont
:: AdStyleFont
adStyleFont = AdStyleFont' {_asfSize = Nothing, _asfFamily = Nothing}
-- | The size of the font. Possible values are: ACCOUNT_DEFAULT_SIZE,
-- ADSENSE_DEFAULT_SIZE, SMALL, MEDIUM and LARGE.
asfSize :: Lens' AdStyleFont (Maybe Text)
asfSize = lens _asfSize (\ s a -> s{_asfSize = a})
-- | The family of the font. Possible values are: ACCOUNT_DEFAULT_FAMILY,
-- ADSENSE_DEFAULT_FAMILY, ARIAL, TIMES and VERDANA.
asfFamily :: Lens' AdStyleFont (Maybe Text)
asfFamily
= lens _asfFamily (\ s a -> s{_asfFamily = a})
instance FromJSON AdStyleFont where
parseJSON
= withObject "AdStyleFont"
(\ o ->
AdStyleFont' <$> (o .:? "size") <*> (o .:? "family"))
instance ToJSON AdStyleFont where
toJSON AdStyleFont'{..}
= object
(catMaybes
[("size" .=) <$> _asfSize,
("family" .=) <$> _asfFamily])
--
-- /See:/ 'account' smart constructor.
data Account =
Account'
{ _accStatus :: !(Maybe Text)
, _accKind :: !Text
, _accName :: !(Maybe Text)
, _accId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Account' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'accStatus'
--
-- * 'accKind'
--
-- * 'accName'
--
-- * 'accId'
account
:: Account
account =
Account'
{ _accStatus = Nothing
, _accKind = "adsensehost#account"
, _accName = Nothing
, _accId = Nothing
}
-- | Approval status of this account. One of: PENDING, APPROVED, DISABLED.
accStatus :: Lens' Account (Maybe Text)
accStatus
= lens _accStatus (\ s a -> s{_accStatus = a})
-- | Kind of resource this is, in this case adsensehost#account.
accKind :: Lens' Account Text
accKind = lens _accKind (\ s a -> s{_accKind = a})
-- | Name of this account.
accName :: Lens' Account (Maybe Text)
accName = lens _accName (\ s a -> s{_accName = a})
-- | Unique identifier of this account.
accId :: Lens' Account (Maybe Text)
accId = lens _accId (\ s a -> s{_accId = a})
instance FromJSON Account where
parseJSON
= withObject "Account"
(\ o ->
Account' <$>
(o .:? "status") <*>
(o .:? "kind" .!= "adsensehost#account")
<*> (o .:? "name")
<*> (o .:? "id"))
instance ToJSON Account where
toJSON Account'{..}
= object
(catMaybes
[("status" .=) <$> _accStatus,
Just ("kind" .= _accKind), ("name" .=) <$> _accName,
("id" .=) <$> _accId])
-- | Settings specific to WAP mobile content ads (AFMC - deprecated).
--
-- /See:/ 'adUnitMobileContentAdsSettings' smart constructor.
data AdUnitMobileContentAdsSettings =
AdUnitMobileContentAdsSettings'
{ _aumcasSize :: !(Maybe Text)
, _aumcasScriptingLanguage :: !(Maybe Text)
, _aumcasMarkupLanguage :: !(Maybe Text)
, _aumcasType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnitMobileContentAdsSettings' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aumcasSize'
--
-- * 'aumcasScriptingLanguage'
--
-- * 'aumcasMarkupLanguage'
--
-- * 'aumcasType'
adUnitMobileContentAdsSettings
:: AdUnitMobileContentAdsSettings
adUnitMobileContentAdsSettings =
AdUnitMobileContentAdsSettings'
{ _aumcasSize = Nothing
, _aumcasScriptingLanguage = Nothing
, _aumcasMarkupLanguage = Nothing
, _aumcasType = Nothing
}
-- | Size of this ad unit.
aumcasSize :: Lens' AdUnitMobileContentAdsSettings (Maybe Text)
aumcasSize
= lens _aumcasSize (\ s a -> s{_aumcasSize = a})
-- | The scripting language to use for this ad unit.
aumcasScriptingLanguage :: Lens' AdUnitMobileContentAdsSettings (Maybe Text)
aumcasScriptingLanguage
= lens _aumcasScriptingLanguage
(\ s a -> s{_aumcasScriptingLanguage = a})
-- | The markup language to use for this ad unit.
aumcasMarkupLanguage :: Lens' AdUnitMobileContentAdsSettings (Maybe Text)
aumcasMarkupLanguage
= lens _aumcasMarkupLanguage
(\ s a -> s{_aumcasMarkupLanguage = a})
-- | Type of this ad unit.
aumcasType :: Lens' AdUnitMobileContentAdsSettings (Maybe Text)
aumcasType
= lens _aumcasType (\ s a -> s{_aumcasType = a})
instance FromJSON AdUnitMobileContentAdsSettings
where
parseJSON
= withObject "AdUnitMobileContentAdsSettings"
(\ o ->
AdUnitMobileContentAdsSettings' <$>
(o .:? "size") <*> (o .:? "scriptingLanguage") <*>
(o .:? "markupLanguage")
<*> (o .:? "type"))
instance ToJSON AdUnitMobileContentAdsSettings where
toJSON AdUnitMobileContentAdsSettings'{..}
= object
(catMaybes
[("size" .=) <$> _aumcasSize,
("scriptingLanguage" .=) <$>
_aumcasScriptingLanguage,
("markupLanguage" .=) <$> _aumcasMarkupLanguage,
("type" .=) <$> _aumcasType])
-- | The colors included in the style. These are represented as six
-- hexadecimal characters, similar to HTML color codes, but without the
-- leading hash.
--
-- /See:/ 'adStyleColors' smart constructor.
data AdStyleColors =
AdStyleColors'
{ _ascText :: !(Maybe Text)
, _ascURL :: !(Maybe Text)
, _ascBOrder :: !(Maybe Text)
, _ascTitle :: !(Maybe Text)
, _ascBackgRound :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdStyleColors' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ascText'
--
-- * 'ascURL'
--
-- * 'ascBOrder'
--
-- * 'ascTitle'
--
-- * 'ascBackgRound'
adStyleColors
:: AdStyleColors
adStyleColors =
AdStyleColors'
{ _ascText = Nothing
, _ascURL = Nothing
, _ascBOrder = Nothing
, _ascTitle = Nothing
, _ascBackgRound = Nothing
}
-- | The color of the ad text.
ascText :: Lens' AdStyleColors (Maybe Text)
ascText = lens _ascText (\ s a -> s{_ascText = a})
-- | The color of the ad url.
ascURL :: Lens' AdStyleColors (Maybe Text)
ascURL = lens _ascURL (\ s a -> s{_ascURL = a})
-- | The color of the ad border.
ascBOrder :: Lens' AdStyleColors (Maybe Text)
ascBOrder
= lens _ascBOrder (\ s a -> s{_ascBOrder = a})
-- | The color of the ad title.
ascTitle :: Lens' AdStyleColors (Maybe Text)
ascTitle = lens _ascTitle (\ s a -> s{_ascTitle = a})
-- | The color of the ad background.
ascBackgRound :: Lens' AdStyleColors (Maybe Text)
ascBackgRound
= lens _ascBackgRound
(\ s a -> s{_ascBackgRound = a})
instance FromJSON AdStyleColors where
parseJSON
= withObject "AdStyleColors"
(\ o ->
AdStyleColors' <$>
(o .:? "text") <*> (o .:? "url") <*> (o .:? "border")
<*> (o .:? "title")
<*> (o .:? "background"))
instance ToJSON AdStyleColors where
toJSON AdStyleColors'{..}
= object
(catMaybes
[("text" .=) <$> _ascText, ("url" .=) <$> _ascURL,
("border" .=) <$> _ascBOrder,
("title" .=) <$> _ascTitle,
("background" .=) <$> _ascBackgRound])
-- | The backup option to be used in instances where no ad is available.
--
-- /See:/ 'adUnitContentAdsSettingsBackupOption' smart constructor.
data AdUnitContentAdsSettingsBackupOption =
AdUnitContentAdsSettingsBackupOption'
{ _aucasboColor :: !(Maybe Text)
, _aucasboURL :: !(Maybe Text)
, _aucasboType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnitContentAdsSettingsBackupOption' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aucasboColor'
--
-- * 'aucasboURL'
--
-- * 'aucasboType'
adUnitContentAdsSettingsBackupOption
:: AdUnitContentAdsSettingsBackupOption
adUnitContentAdsSettingsBackupOption =
AdUnitContentAdsSettingsBackupOption'
{_aucasboColor = Nothing, _aucasboURL = Nothing, _aucasboType = Nothing}
-- | Color to use when type is set to COLOR. These are represented as six
-- hexadecimal characters, similar to HTML color codes, but without the
-- leading hash.
aucasboColor :: Lens' AdUnitContentAdsSettingsBackupOption (Maybe Text)
aucasboColor
= lens _aucasboColor (\ s a -> s{_aucasboColor = a})
-- | URL to use when type is set to URL.
aucasboURL :: Lens' AdUnitContentAdsSettingsBackupOption (Maybe Text)
aucasboURL
= lens _aucasboURL (\ s a -> s{_aucasboURL = a})
-- | Type of the backup option. Possible values are BLANK, COLOR and URL.
aucasboType :: Lens' AdUnitContentAdsSettingsBackupOption (Maybe Text)
aucasboType
= lens _aucasboType (\ s a -> s{_aucasboType = a})
instance FromJSON
AdUnitContentAdsSettingsBackupOption
where
parseJSON
= withObject "AdUnitContentAdsSettingsBackupOption"
(\ o ->
AdUnitContentAdsSettingsBackupOption' <$>
(o .:? "color") <*> (o .:? "url") <*> (o .:? "type"))
instance ToJSON AdUnitContentAdsSettingsBackupOption
where
toJSON AdUnitContentAdsSettingsBackupOption'{..}
= object
(catMaybes
[("color" .=) <$> _aucasboColor,
("url" .=) <$> _aucasboURL,
("type" .=) <$> _aucasboType])
--
-- /See:/ 'adClient' smart constructor.
data AdClient =
AdClient'
{ _adKind :: !Text
, _adArcOptIn :: !(Maybe Bool)
, _adSupportsReporting :: !(Maybe Bool)
, _adId :: !(Maybe Text)
, _adProductCode :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdClient' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'adKind'
--
-- * 'adArcOptIn'
--
-- * 'adSupportsReporting'
--
-- * 'adId'
--
-- * 'adProductCode'
adClient
:: AdClient
adClient =
AdClient'
{ _adKind = "adsensehost#adClient"
, _adArcOptIn = Nothing
, _adSupportsReporting = Nothing
, _adId = Nothing
, _adProductCode = Nothing
}
-- | Kind of resource this is, in this case adsensehost#adClient.
adKind :: Lens' AdClient Text
adKind = lens _adKind (\ s a -> s{_adKind = a})
-- | Whether this ad client is opted in to ARC.
adArcOptIn :: Lens' AdClient (Maybe Bool)
adArcOptIn
= lens _adArcOptIn (\ s a -> s{_adArcOptIn = a})
-- | Whether this ad client supports being reported on.
adSupportsReporting :: Lens' AdClient (Maybe Bool)
adSupportsReporting
= lens _adSupportsReporting
(\ s a -> s{_adSupportsReporting = a})
-- | Unique identifier of this ad client.
adId :: Lens' AdClient (Maybe Text)
adId = lens _adId (\ s a -> s{_adId = a})
-- | This ad client\'s product code, which corresponds to the PRODUCT_CODE
-- report dimension.
adProductCode :: Lens' AdClient (Maybe Text)
adProductCode
= lens _adProductCode
(\ s a -> s{_adProductCode = a})
instance FromJSON AdClient where
parseJSON
= withObject "AdClient"
(\ o ->
AdClient' <$>
(o .:? "kind" .!= "adsensehost#adClient") <*>
(o .:? "arcOptIn")
<*> (o .:? "supportsReporting")
<*> (o .:? "id")
<*> (o .:? "productCode"))
instance ToJSON AdClient where
toJSON AdClient'{..}
= object
(catMaybes
[Just ("kind" .= _adKind),
("arcOptIn" .=) <$> _adArcOptIn,
("supportsReporting" .=) <$> _adSupportsReporting,
("id" .=) <$> _adId,
("productCode" .=) <$> _adProductCode])
--
-- /See:/ 'reportHeadersItem' smart constructor.
data ReportHeadersItem =
ReportHeadersItem'
{ _rhiName :: !(Maybe Text)
, _rhiCurrency :: !(Maybe Text)
, _rhiType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ReportHeadersItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rhiName'
--
-- * 'rhiCurrency'
--
-- * 'rhiType'
reportHeadersItem
:: ReportHeadersItem
reportHeadersItem =
ReportHeadersItem'
{_rhiName = Nothing, _rhiCurrency = Nothing, _rhiType = Nothing}
-- | The name of the header.
rhiName :: Lens' ReportHeadersItem (Maybe Text)
rhiName = lens _rhiName (\ s a -> s{_rhiName = a})
-- | The currency of this column. Only present if the header type is
-- METRIC_CURRENCY.
rhiCurrency :: Lens' ReportHeadersItem (Maybe Text)
rhiCurrency
= lens _rhiCurrency (\ s a -> s{_rhiCurrency = a})
-- | The type of the header; one of DIMENSION, METRIC_TALLY, METRIC_RATIO, or
-- METRIC_CURRENCY.
rhiType :: Lens' ReportHeadersItem (Maybe Text)
rhiType = lens _rhiType (\ s a -> s{_rhiType = a})
instance FromJSON ReportHeadersItem where
parseJSON
= withObject "ReportHeadersItem"
(\ o ->
ReportHeadersItem' <$>
(o .:? "name") <*> (o .:? "currency") <*>
(o .:? "type"))
instance ToJSON ReportHeadersItem where
toJSON ReportHeadersItem'{..}
= object
(catMaybes
[("name" .=) <$> _rhiName,
("currency" .=) <$> _rhiCurrency,
("type" .=) <$> _rhiType])
--
-- /See:/ 'adStyle' smart constructor.
data AdStyle =
AdStyle'
{ _assCorners :: !(Maybe Text)
, _assKind :: !Text
, _assFont :: !(Maybe AdStyleFont)
, _assColors :: !(Maybe AdStyleColors)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdStyle' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'assCorners'
--
-- * 'assKind'
--
-- * 'assFont'
--
-- * 'assColors'
adStyle
:: AdStyle
adStyle =
AdStyle'
{ _assCorners = Nothing
, _assKind = "adsensehost#adStyle"
, _assFont = Nothing
, _assColors = Nothing
}
-- | The style of the corners in the ad (deprecated: never populated,
-- ignored).
assCorners :: Lens' AdStyle (Maybe Text)
assCorners
= lens _assCorners (\ s a -> s{_assCorners = a})
-- | Kind this is, in this case adsensehost#adStyle.
assKind :: Lens' AdStyle Text
assKind = lens _assKind (\ s a -> s{_assKind = a})
-- | The font which is included in the style.
assFont :: Lens' AdStyle (Maybe AdStyleFont)
assFont = lens _assFont (\ s a -> s{_assFont = a})
-- | The colors included in the style. These are represented as six
-- hexadecimal characters, similar to HTML color codes, but without the
-- leading hash.
assColors :: Lens' AdStyle (Maybe AdStyleColors)
assColors
= lens _assColors (\ s a -> s{_assColors = a})
instance FromJSON AdStyle where
parseJSON
= withObject "AdStyle"
(\ o ->
AdStyle' <$>
(o .:? "corners") <*>
(o .:? "kind" .!= "adsensehost#adStyle")
<*> (o .:? "font")
<*> (o .:? "colors"))
instance ToJSON AdStyle where
toJSON AdStyle'{..}
= object
(catMaybes
[("corners" .=) <$> _assCorners,
Just ("kind" .= _assKind), ("font" .=) <$> _assFont,
("colors" .=) <$> _assColors])
--
-- /See:/ 'customChannel' smart constructor.
data CustomChannel =
CustomChannel'
{ _cKind :: !Text
, _cName :: !(Maybe Text)
, _cCode :: !(Maybe Text)
, _cId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomChannel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cKind'
--
-- * 'cName'
--
-- * 'cCode'
--
-- * 'cId'
customChannel
:: CustomChannel
customChannel =
CustomChannel'
{ _cKind = "adsensehost#customChannel"
, _cName = Nothing
, _cCode = Nothing
, _cId = Nothing
}
-- | Kind of resource this is, in this case adsensehost#customChannel.
cKind :: Lens' CustomChannel Text
cKind = lens _cKind (\ s a -> s{_cKind = a})
-- | Name of this custom channel.
cName :: Lens' CustomChannel (Maybe Text)
cName = lens _cName (\ s a -> s{_cName = a})
-- | Code of this custom channel, not necessarily unique across ad clients.
cCode :: Lens' CustomChannel (Maybe Text)
cCode = lens _cCode (\ s a -> s{_cCode = a})
-- | Unique identifier of this custom channel. This should be considered an
-- opaque identifier; it is not safe to rely on it being in any particular
-- format.
cId :: Lens' CustomChannel (Maybe Text)
cId = lens _cId (\ s a -> s{_cId = a})
instance FromJSON CustomChannel where
parseJSON
= withObject "CustomChannel"
(\ o ->
CustomChannel' <$>
(o .:? "kind" .!= "adsensehost#customChannel") <*>
(o .:? "name")
<*> (o .:? "code")
<*> (o .:? "id"))
instance ToJSON CustomChannel where
toJSON CustomChannel'{..}
= object
(catMaybes
[Just ("kind" .= _cKind), ("name" .=) <$> _cName,
("code" .=) <$> _cCode, ("id" .=) <$> _cId])
--
-- /See:/ 'urlChannel' smart constructor.
data URLChannel =
URLChannel'
{ _urlcKind :: !Text
, _urlcId :: !(Maybe Text)
, _urlcURLPattern :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'URLChannel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'urlcKind'
--
-- * 'urlcId'
--
-- * 'urlcURLPattern'
urlChannel
:: URLChannel
urlChannel =
URLChannel'
{ _urlcKind = "adsensehost#urlChannel"
, _urlcId = Nothing
, _urlcURLPattern = Nothing
}
-- | Kind of resource this is, in this case adsensehost#urlChannel.
urlcKind :: Lens' URLChannel Text
urlcKind = lens _urlcKind (\ s a -> s{_urlcKind = a})
-- | Unique identifier of this URL channel. This should be considered an
-- opaque identifier; it is not safe to rely on it being in any particular
-- format.
urlcId :: Lens' URLChannel (Maybe Text)
urlcId = lens _urlcId (\ s a -> s{_urlcId = a})
-- | URL Pattern of this URL channel. Does not include \"http:\/\/\" or
-- \"https:\/\/\". Example: www.example.com\/home
urlcURLPattern :: Lens' URLChannel (Maybe Text)
urlcURLPattern
= lens _urlcURLPattern
(\ s a -> s{_urlcURLPattern = a})
instance FromJSON URLChannel where
parseJSON
= withObject "URLChannel"
(\ o ->
URLChannel' <$>
(o .:? "kind" .!= "adsensehost#urlChannel") <*>
(o .:? "id")
<*> (o .:? "urlPattern"))
instance ToJSON URLChannel where
toJSON URLChannel'{..}
= object
(catMaybes
[Just ("kind" .= _urlcKind), ("id" .=) <$> _urlcId,
("urlPattern" .=) <$> _urlcURLPattern])
--
-- /See:/ 'adCode' smart constructor.
data AdCode =
AdCode'
{ _aaKind :: !Text
, _aaAdCode :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdCode' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aaKind'
--
-- * 'aaAdCode'
adCode
:: AdCode
adCode = AdCode' {_aaKind = "adsensehost#adCode", _aaAdCode = Nothing}
-- | Kind this is, in this case adsensehost#adCode.
aaKind :: Lens' AdCode Text
aaKind = lens _aaKind (\ s a -> s{_aaKind = a})
-- | The ad code snippet.
aaAdCode :: Lens' AdCode (Maybe Text)
aaAdCode = lens _aaAdCode (\ s a -> s{_aaAdCode = a})
instance FromJSON AdCode where
parseJSON
= withObject "AdCode"
(\ o ->
AdCode' <$>
(o .:? "kind" .!= "adsensehost#adCode") <*>
(o .:? "adCode"))
instance ToJSON AdCode where
toJSON AdCode'{..}
= object
(catMaybes
[Just ("kind" .= _aaKind),
("adCode" .=) <$> _aaAdCode])
-- | Settings specific to content ads (AFC) and highend mobile content ads
-- (AFMC - deprecated).
--
-- /See:/ 'adUnitContentAdsSettings' smart constructor.
data AdUnitContentAdsSettings =
AdUnitContentAdsSettings'
{ _aucasBackupOption :: !(Maybe AdUnitContentAdsSettingsBackupOption)
, _aucasSize :: !(Maybe Text)
, _aucasType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnitContentAdsSettings' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aucasBackupOption'
--
-- * 'aucasSize'
--
-- * 'aucasType'
adUnitContentAdsSettings
:: AdUnitContentAdsSettings
adUnitContentAdsSettings =
AdUnitContentAdsSettings'
{_aucasBackupOption = Nothing, _aucasSize = Nothing, _aucasType = Nothing}
-- | The backup option to be used in instances where no ad is available.
aucasBackupOption :: Lens' AdUnitContentAdsSettings (Maybe AdUnitContentAdsSettingsBackupOption)
aucasBackupOption
= lens _aucasBackupOption
(\ s a -> s{_aucasBackupOption = a})
-- | Size of this ad unit. Size values are in the form SIZE_{width}_{height}.
aucasSize :: Lens' AdUnitContentAdsSettings (Maybe Text)
aucasSize
= lens _aucasSize (\ s a -> s{_aucasSize = a})
-- | Type of this ad unit. Possible values are TEXT, TEXT_IMAGE, IMAGE and
-- LINK.
aucasType :: Lens' AdUnitContentAdsSettings (Maybe Text)
aucasType
= lens _aucasType (\ s a -> s{_aucasType = a})
instance FromJSON AdUnitContentAdsSettings where
parseJSON
= withObject "AdUnitContentAdsSettings"
(\ o ->
AdUnitContentAdsSettings' <$>
(o .:? "backupOption") <*> (o .:? "size") <*>
(o .:? "type"))
instance ToJSON AdUnitContentAdsSettings where
toJSON AdUnitContentAdsSettings'{..}
= object
(catMaybes
[("backupOption" .=) <$> _aucasBackupOption,
("size" .=) <$> _aucasSize,
("type" .=) <$> _aucasType])
|
brendanhay/gogol
|
gogol-adsense-host/gen/Network/Google/AdSenseHost/Types/Product.hs
|
mpl-2.0
| 45,616
| 0
| 19
| 12,142
| 9,755
| 5,608
| 4,147
| 1,053
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.FirebaseRules.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.FirebaseRules.Types
(
-- * Service Configuration
firebaseRulesService
-- * OAuth Scopes
, firebaseScope
, cloudPlatformScope
, firebaseReadOnlyScope
-- * SourcePosition
, SourcePosition
, sourcePosition
, spCurrentOffSet
, spLine
, spEndOffSet
, spColumn
, spFileName
-- * ExpressionReport
, ExpressionReport
, expressionReport
, erSourcePosition
, erValues
, erChildren
-- * TestCase
, TestCase
, testCase
, tcExpressionReportLevel
, tcPathEncoding
, tcResource
, tcExpectation
, tcFunctionMocks
, tcRequest
-- * VisitedExpression
, VisitedExpression
, visitedExpression
, veSourcePosition
, veValue
-- * Empty
, Empty
, empty
-- * FunctionMock
, FunctionMock
, functionMock
, fmArgs
, fmFunction
, fmResult
-- * ProjectsReleasesGetExecutableExecutableVersion
, ProjectsReleasesGetExecutableExecutableVersion (..)
-- * TestCaseExpressionReportLevel
, TestCaseExpressionReportLevel (..)
-- * TestResultState
, TestResultState (..)
-- * FunctionCall
, FunctionCall
, functionCall
, fcArgs
, fcFunction
-- * ListReleasesResponse
, ListReleasesResponse
, listReleasesResponse
, lrrNextPageToken
, lrrReleases
-- * Result
, Result
, result
, rValue
, rUndefined
-- * TestRulesetResponse
, TestRulesetResponse
, testRulesetResponse
, trrTestResults
, trrIssues
-- * Release
, Release
, release
, rRulesetName
, rUpdateTime
, rName
, rCreateTime
-- * Arg
, Arg
, arg
, aAnyValue
, aExactValue
-- * Ruleset
, Ruleset
, ruleset
, rulName
, rulMetadata
, rulSource
, rulCreateTime
-- * GetReleaseExecutableResponse
, GetReleaseExecutableResponse
, getReleaseExecutableResponse
, grerExecutable
, grerRulesetName
, grerUpdateTime
, grerSyncTime
, grerExecutableVersion
, grerLanguage
-- * TestCasePathEncoding
, TestCasePathEncoding (..)
-- * TestResult
, TestResult
, testResult
, trState
, trExpressionReports
, trFunctionCalls
, trVisitedExpressions
, trErrorPosition
, trDebugMessages
-- * Xgafv
, Xgafv (..)
-- * IssueSeverity
, IssueSeverity (..)
-- * Metadata
, Metadata
, metadata
, mServices
-- * Source
, Source
, source
, sFiles
-- * TestCaseExpectation
, TestCaseExpectation (..)
-- * ValueCount
, ValueCount
, valueCount
, vcValue
, vcCount
-- * TestSuite
, TestSuite
, testSuite
, tsTestCases
-- * TestRulesetRequest
, TestRulesetRequest
, testRulesetRequest
, trrSource
, trrTestSuite
-- * File
, File
, file
, fFingerprint
, fContent
, fName
-- * GetReleaseExecutableResponseExecutableVersion
, GetReleaseExecutableResponseExecutableVersion (..)
-- * GetReleaseExecutableResponseLanguage
, GetReleaseExecutableResponseLanguage (..)
-- * ListRulesetsResponse
, ListRulesetsResponse
, listRulesetsResponse
, lRulesets
, lNextPageToken
-- * Issue
, Issue
, issue
, iSourcePosition
, iSeverity
, iDescription
-- * UpdateReleaseRequest
, UpdateReleaseRequest
, updateReleaseRequest
, urrUpdateMask
, urrRelease
) where
import Network.Google.FirebaseRules.Types.Product
import Network.Google.FirebaseRules.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Firebase Rules API. This contains the host and root path used as a starting point for constructing service requests.
firebaseRulesService :: ServiceConfig
firebaseRulesService
= defaultService (ServiceId "firebaserules:v1")
"firebaserules.googleapis.com"
-- | View and administer all your Firebase data and settings
firebaseScope :: Proxy '["https://www.googleapis.com/auth/firebase"]
firebaseScope = Proxy
-- | See, edit, configure, and delete your Google Cloud Platform data
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy
-- | View all your Firebase data and settings
firebaseReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/firebase.readonly"]
firebaseReadOnlyScope = Proxy
|
brendanhay/gogol
|
gogol-firebase-rules/gen/Network/Google/FirebaseRules/Types.hs
|
mpl-2.0
| 4,986
| 0
| 7
| 1,270
| 577
| 398
| 179
| 148
| 1
|
module Example.Eg19 (eg19) where
import Graphics.Radian
import ExampleUtils
eg19 :: IO Html
eg19 = do
let x = [0.01, 0.01 + (1.00 - 0.01) / 10000 .. 1.00]
os = [height.=300, aspect.=3, strokeWidth.=1,
zoomX.=True, uiAxisYTransform.=True,
axisXTransform.=AxisLog, uiAxisXTransform.=AxisLog]
ls = Lines x (map (\x -> 1.1 + sin(1.0 / x))) # [stroke.="red"]
plot = Plot [ls] # os
source = exampleSource "Eg19.hs"
return [shamlet|
<h3>
Example 19 (log axes)
^{plot}
^{source}
|]
|
openbrainsrc/hRadian
|
examples/Example/Eg19.hs
|
mpl-2.0
| 540
| 3
| 19
| 137
| 204
| 112
| 92
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Projects.StoredInfoTypes.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a stored infoType. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.projects.storedInfoTypes.get@.
module Network.Google.Resource.DLP.Projects.StoredInfoTypes.Get
(
-- * REST Resource
ProjectsStoredInfoTypesGetResource
-- * Creating a Request
, projectsStoredInfoTypesGet
, ProjectsStoredInfoTypesGet
-- * Request Lenses
, psitgXgafv
, psitgUploadProtocol
, psitgAccessToken
, psitgUploadType
, psitgName
, psitgCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.projects.storedInfoTypes.get@ method which the
-- 'ProjectsStoredInfoTypesGet' request conforms to.
type ProjectsStoredInfoTypesGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] GooglePrivacyDlpV2StoredInfoType
-- | Gets a stored infoType. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ 'projectsStoredInfoTypesGet' smart constructor.
data ProjectsStoredInfoTypesGet =
ProjectsStoredInfoTypesGet'
{ _psitgXgafv :: !(Maybe Xgafv)
, _psitgUploadProtocol :: !(Maybe Text)
, _psitgAccessToken :: !(Maybe Text)
, _psitgUploadType :: !(Maybe Text)
, _psitgName :: !Text
, _psitgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsStoredInfoTypesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psitgXgafv'
--
-- * 'psitgUploadProtocol'
--
-- * 'psitgAccessToken'
--
-- * 'psitgUploadType'
--
-- * 'psitgName'
--
-- * 'psitgCallback'
projectsStoredInfoTypesGet
:: Text -- ^ 'psitgName'
-> ProjectsStoredInfoTypesGet
projectsStoredInfoTypesGet pPsitgName_ =
ProjectsStoredInfoTypesGet'
{ _psitgXgafv = Nothing
, _psitgUploadProtocol = Nothing
, _psitgAccessToken = Nothing
, _psitgUploadType = Nothing
, _psitgName = pPsitgName_
, _psitgCallback = Nothing
}
-- | V1 error format.
psitgXgafv :: Lens' ProjectsStoredInfoTypesGet (Maybe Xgafv)
psitgXgafv
= lens _psitgXgafv (\ s a -> s{_psitgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
psitgUploadProtocol :: Lens' ProjectsStoredInfoTypesGet (Maybe Text)
psitgUploadProtocol
= lens _psitgUploadProtocol
(\ s a -> s{_psitgUploadProtocol = a})
-- | OAuth access token.
psitgAccessToken :: Lens' ProjectsStoredInfoTypesGet (Maybe Text)
psitgAccessToken
= lens _psitgAccessToken
(\ s a -> s{_psitgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
psitgUploadType :: Lens' ProjectsStoredInfoTypesGet (Maybe Text)
psitgUploadType
= lens _psitgUploadType
(\ s a -> s{_psitgUploadType = a})
-- | Required. Resource name of the organization and storedInfoType to be
-- read, for example
-- \`organizations\/433245324\/storedInfoTypes\/432452342\` or
-- projects\/project-id\/storedInfoTypes\/432452342.
psitgName :: Lens' ProjectsStoredInfoTypesGet Text
psitgName
= lens _psitgName (\ s a -> s{_psitgName = a})
-- | JSONP
psitgCallback :: Lens' ProjectsStoredInfoTypesGet (Maybe Text)
psitgCallback
= lens _psitgCallback
(\ s a -> s{_psitgCallback = a})
instance GoogleRequest ProjectsStoredInfoTypesGet
where
type Rs ProjectsStoredInfoTypesGet =
GooglePrivacyDlpV2StoredInfoType
type Scopes ProjectsStoredInfoTypesGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsStoredInfoTypesGet'{..}
= go _psitgName _psitgXgafv _psitgUploadProtocol
_psitgAccessToken
_psitgUploadType
_psitgCallback
(Just AltJSON)
dLPService
where go
= buildClient
(Proxy :: Proxy ProjectsStoredInfoTypesGetResource)
mempty
|
brendanhay/gogol
|
gogol-dlp/gen/Network/Google/Resource/DLP/Projects/StoredInfoTypes/Get.hs
|
mpl-2.0
| 5,185
| 0
| 15
| 1,103
| 702
| 413
| 289
| 105
| 1
|
{-
Copyright (C) 2013–2014 Albert Krewinkel <tarleb@moltkeplatz.de>
This file is part of ZeitLinse.
ZeitLinse is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License, or (at your option)
any later version.
ZeitLinse is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
License for more details.
You should have received a copy of the GNU Affero General Public License
along with ZeitLinse. If not, see <http://www.gnu.org/licenses/>.
-}
{-# Language DeriveFunctor #-}
{-# Language GeneralizedNewtypeDeriving #-}
{-# Language TemplateHaskell #-}
-- | Types for a time-dependend scoring system.
module ZeitLinse.Core.Types where
import Control.Lens
import Data.Function (on)
import Data.Time.Clock
import Data.Time.LocalTime()
import Data.Time.Format
-- The point of time lenses is to rate the importance of an item based on how
-- high other sources rank the item.
-- | Score on the importance of an entry.
newtype Score = Score { _fromScore :: Double }
deriving (Eq, Floating, Fractional, Num, Ord, Show)
-- | The time at wich an entry was submitted.
newtype SubmissionTime = SubmissionTime { _fromSubmissionTime :: UTCTime }
deriving (Eq, Ord, Read, ParseTime, FormatTime, Show)
-- | A timedScore is a score and the time at which the score was given.
data TimedRating = TimedRating
{ _timedRatingScore :: Score
, _timedRatingTime :: SubmissionTime
} deriving (Eq, Show)
instance Ord TimedRating where
-- FIXME: Should include the time of submission
compare = compare `on` _timedRatingScore
-- | A (possibly time dependent) timedScore of an item is a TimeSpot
data TimeSpot a = TimeSpot
{ _timeSpotRating :: TimedRating
, _timeSpotFocus :: a
} deriving (Eq, Functor, Show)
makeLenses ''Score
makeLenses ''SubmissionTime
makeLenses ''TimedRating
makeLenses ''TimeSpot
|
tarleb/zeitlinse
|
src/ZeitLinse/Core/Types.hs
|
agpl-3.0
| 2,138
| 0
| 8
| 393
| 255
| 150
| 105
| 27
| 0
|
module DecBin where
decBin :: Int -> [Int]
decBin x = if x < 2 then [x]
else (x `mod` 2) : decBin (x `div` 2)
|
tonilopezmr/Learning-Haskell
|
Exercises/2/Exercise_1.hs
|
apache-2.0
| 115
| 2
| 9
| 31
| 66
| 39
| 27
| 4
| 2
|
import Data.List (genericIndex)
a020639 n = spf a000040_list where
spf (p:ps) | n < p^2 = n
| mod n p == 0 = p
| otherwise = spf ps
a000040 n = genericIndex a000040_list (n - 1)
a000040_list = base ++ larger where
base = [2, 3, 5, 7, 11, 13, 17]
larger = p : filter prime more
prime n = all ((> 0) . mod n) $ takeWhile (\x -> x*x <= n) larger
_ : p : more = roll $ makeWheels base
roll (Wheel n rs) = [n * k + r | k <- [0..], r <- rs]
makeWheels = foldl nextSize (Wheel 1 [1])
nextSize (Wheel size bs) p = Wheel (size * p) [r | k <- [0..p-1], b <- bs, let r = size*k+b, mod r p > 0]
data Wheel = Wheel Integer [Integer]
a008365 n = a008365_list !! (n-1)
a008365_list = 1 : filter ((> 11) . a020639) [1..]
|
peterokagey/haskellOEIS
|
src/Sandbox/Rudiger.hs
|
apache-2.0
| 759
| 0
| 14
| 214
| 442
| 229
| 213
| 17
| 1
|
module IntLib where
import System.Directory
directory = doesFileExist "Exe.hs"
intlib = 1
|
DanielG/cabal-helper
|
tests/exeintlib/intlib/IntLib.hs
|
apache-2.0
| 93
| 0
| 5
| 15
| 22
| 13
| 9
| 4
| 1
|
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
module TensorFlow.Gradient
( gradients
) where
import Control.Monad (forM, zipWithM)
import Control.Monad.State.Strict (State, evalState, gets, modify)
import Data.ByteString (ByteString)
import Data.Complex (Complex)
import Data.Default (def)
import Data.Int (Int32, Int64)
import Data.Foldable (foldlM)
import Data.List (foldl', sortBy)
import Data.Map.Strict (Map)
import Data.Maybe (fromMaybe, maybeToList, mapMaybe)
import Data.Ord (comparing)
import Data.ProtoLens.TextFormat (showMessage)
import Data.Set (Set)
import Data.Text (Text)
import Data.Tuple (swap)
import Lens.Family2 (Lens', view, (&), (^.), (.~), (%~))
import Lens.Family2.State.Strict (uses)
import Lens.Family2.Stock (at, intAt)
import Lens.Family2.Unchecked (lens, iso)
import Prelude hiding (sum)
import Text.Printf (printf)
import qualified Data.Graph.Inductive.Basic as FGL
import qualified Data.Graph.Inductive.Graph as FGL
import qualified Data.Graph.Inductive.PatriciaTree as FGL
import qualified Data.Graph.Inductive.Query.DFS as FGL
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified TensorFlow.GenOps.Core as CoreOps
import TensorFlow.Build
( MonadBuild
, Build
, build
, renderedNodeDefs
, opDef
, opAttr
, opInputs
)
import TensorFlow.BuildOp
import TensorFlow.Ops
( addN
, broadcastGradientArgs
, expandDims
, fill
, matMul
, matMul'
, reducedShape
, reluGrad
, reshape
, scalar
, shape
, softmaxCrossEntropyWithLogits
, sum
, scalarize
, vector
, zerosLike
)
import TensorFlow.Output
( NodeName(..)
, Output(..)
, OutputIx(..)
, outputIndex
)
import TensorFlow.Tensor
( Tensor(..)
, Value
, render
, expr
, Rendered
, tensorNodeName
, renderedOutput
, renderValue
)
import TensorFlow.Types (Attribute, OneOf, TensorType, attrLens)
import Proto.Tensorflow.Core.Framework.NodeDef
(NodeDef, attr, input, op, name)
type GradientCompatible a =
-- TODO(fmayle): MaxPoolGrad doesn't support Double for some reason.
(Num a, OneOf '[ Float, Complex Float, Complex Double ] a)
-- TODO(fmayle): Support control flow.
-- TODO(fmayle): Support gate_gradients-like option to avoid race conditions.
-- TODO(fmayle): Do we need to consider control inputs? See _PendingCount in
-- tensorflow/python/ops/gradients.py.
-- TODO(fmayle): Maybe store the gradient functions and numOutputs on the OpDef.
-- | Gradient of @y@ w.r.t. each element of @xs@.
gradients :: forall a v1 v2 m . (MonadBuild m
, Rendered v2
, GradientCompatible a
)
=> Tensor v1 a -- ^ The output of the graph.
-> [Tensor v2 a] -- ^ Tensors for which gradients are computed.
-> m [Tensor Value a]
gradients y xs = build $ do
-- The gradients are computed using "reverse accumulation", similarly to
-- what is described here:
-- https://en.wikipedia.org/wiki/Automatic_differentiation#The_chain_rule.2C_forward_and_reverse_accumulation
--
-- The code is summarised as follows:
--
-- 1. Create an fgl graph of the relevant nodes (ops) and edges (tensors).
-- 2. Initialize the gradient of y to 1 (∂y/∂y = 1) and the rest of tensor's
-- gradients to nothing.
-- 3. Process the nodes in reverse topological order (i.e. each node comes
-- after all of its outputs so that the output gradients for a node have
-- been completely calculated before it is processed):
-- a. Record the gradient for each of the node's output tensors (∂y/∂w
-- for each output tensor w).
-- b. Calculate the gradient of y w.r.t. each of the node's input
-- tensors using the gradients of the node's output tensors.
--
-- Written differently, for each output tensor w and input tensor v:
-- ∂y/∂w = ... (calculated in previous steps)
-- ∂w/∂v = ... (op specific)
-- ∂y/∂v = ∂y/∂w * ∂w/∂v (technically, if tensor v is an input
-- to multiple nodes, then this is only
-- part of ∂y/∂v)
--
-- 4. Lookup the recorded gradient for each x in xs.
y' <- renderValue y
let yName = tensorNodeName y'
yOne <- render $ fill (shape y') (scalar 1)
-- TODO(fmayle): Move this into Build.hs and call it unsafeNodeDefFromName?
nodeDefLookup :: (NodeName -> NodeDef) <- uses renderedNodeDefs $
(\f x -> fromMaybe (error $ "no NodeDef found for " ++ show x) (f x))
. flip Map.lookup
let (gr, nodeMap) = createGraph yName nodeDefLookup
-- Set gradient of y to one.
-- TODO: nicer
let initPending :: Map.Map FGL.Node (PendingGradients a)
= Map.empty & (at (nodeMap Map.! yName)
. nonEmpty
. outputIxAt (outputIndex $ renderedOutput y')
. nonEmpty
.~ [yOne]
)
-- Calculate the gradients of y w.r.t. each node in the graph.
gradientMap <- graphGrads gr initPending
-- Lookup the gradients for each x.
forM xs $ \x ->
let xName = tensorNodeName x
in maybe (render $ zerosLike x) return $ do
n <- nodeMap ^. at xName
let i = outputIndex $ renderedOutput x
gradientMap ^. at n . nonEmpty . outputIxAt i
outputIxAt :: OutputIx -> Lens' (IntMap.IntMap v) (Maybe v)
outputIxAt = intAt . unOutputIx
-- | Incomplete gradients of a node's outputs.
--
-- The lists represent partial sums. The key is an OutputIx sans newtype.
type PendingGradients a = IntMap.IntMap [Tensor Value a]
-- | Gradients of a node's outputs. The key is an OutputIx sans newtype.
-- TODO: precache the rendering?
type Gradients a = IntMap.IntMap (Tensor Value a)
-- | Graph of TensorFlow operations.
type Graph = FGL.Gr NodeDef EdgeLabel
-- | Data associated with an edge.
--
-- Pair of
-- 1. Output index of a tensor from the source node.
-- 2. Input index that the tensor connects to on the destination node.
type EdgeLabel = (OutputIx, OutputIx)
-- | State used for calculating gradients.
data GradientsState a = GradientsState
{ _gradientsPending :: !(Map FGL.Node (PendingGradients a))
, _gradientsResult :: !(Map FGL.Node (Gradients a))
}
gradientsPending :: Lens' (GradientsState a) (Map FGL.Node (PendingGradients a))
gradientsPending = lens _gradientsPending (\x y -> x { _gradientsPending = y })
gradientsResult :: Lens' (GradientsState a) (Map FGL.Node (Gradients a))
gradientsResult = lens _gradientsResult (\x y -> x { _gradientsResult = y })
-- TODO(fmayle): Use something like Data.List.Safe.
-- | Safe version of (!!).
safeIndex :: [a] -> Int -> Maybe a
_ `safeIndex` n | n < 0 = Nothing
[] `safeIndex` _ = Nothing
(x:_) `safeIndex` 0 = Just x
(_:xs) `safeIndex` n = xs `safeIndex` (n-1)
-- Copy of http://hackage.haskell.org/package/lens-3.9.0.2/docs/Control-Lens-Iso.html#v%3anon
anon :: a -> (a -> Bool) -> Lens' (Maybe a) a
anon a p = iso (fromMaybe a) go where
go b | p b = Nothing
| otherwise = Just b
non :: Eq a => a -> Lens' (Maybe a) a
non a = anon a (a==)
-- | Lens that defaults Nothing to mempty.
nonEmpty :: (Monoid (t v), Foldable t) => Lens' (Maybe (t v)) (t v)
nonEmpty = anon mempty null
-- TODO: strictness (e.g., foldlM')
-- | Calculate the gradients for every node in a graph.
graphGrads :: forall a. GradientCompatible a
=> Graph
-> Map FGL.Node (PendingGradients a)
-- ^ Initial gradients (usually just 1 for the node of interest).
-> Build (Map FGL.Node (Gradients a))
graphGrads gr initPending = view gradientsResult <$> foldlM go initState nodeOrder
where
initState = GradientsState initPending Map.empty
-- Reverse topological sort.
-- TODO(fmayle): Filter out nodes that are not successors of any x in xs to
-- avoid calculating gradients that won't be used.
nodeOrder = FGL.topsort $ FGL.grev gr
go :: GradientsState a -> Int -> Build (GradientsState a)
go state node = do
-- Aggregate the accumulated gradients for this node.
outputGrads <-
sumPendingGradient (state ^. gradientsPending . at node . nonEmpty)
if null outputGrads
then pure state
else do
let ctx = FGL.context gr node
inputGrads <- calculateInputGrads ctx outputGrads gr
-- Calculate the gradients for each of the node's inputs.
let nextState = state & gradientsResult %~ Map.insert node outputGrads
pure $ updatePendingGradients ctx inputGrads nextState
-- | Reduce accumulated gradients for each output to one Tensor.
sumPendingGradient :: GradientCompatible a
=> PendingGradients a -> Build (Gradients a)
sumPendingGradient = sequence . IntMap.mapMaybe f
where
f [] = Nothing
f [x] = Just (pure x)
f xs = Just (render $ addN xs)
-- | Calculate the gradients of a node's input tensors.
--
-- This is mostly just a wrapper around opGrad.
calculateInputGrads :: forall a. GradientCompatible a
=> FGL.Context NodeDef EdgeLabel
-> Gradients a -- ^ Output gradients of the node.
-> Graph
-> Build [Maybe (Tensor Value a)]
calculateInputGrads (inputEdges, _, nodeDef, _) outputGrads gr = do
fullOutGrads <- fullOutputGrads (numOutputs nodeDef) (nodeDefName nodeDef)
outputGrads
traverse (traverse render) $ opGrad (nodeDef ^. op) nodeDef inputTensors fullOutGrads
where
-- Create a tensor from an edge (technically an Output, but it seems less
-- confusing to refer to it as a tensor here).
edgeToTensor :: (EdgeLabel, FGL.Node) -> Output
edgeToTensor ((i, _), n) =
case FGL.lab gr n of
Just edgeNodeDef -> Output i (NodeName $ edgeNodeDef ^. name)
Nothing -> error $ "calculateInputGrads: missing input node for "
++ Text.unpack (nodeDef ^. name)
-- Input tensors, sorted by input index.
inputTensors = map edgeToTensor $ sortBy (comparing (snd . fst)) inputEdges
-- | Convert a Map of gradients to a list, with zeros for missing outputs.
fullOutputGrads :: (TensorType a, Num a)
=> OutputIx -- ^ Number of outputs.
-> NodeName
-> Gradients a
-> Build [Tensor Value a]
fullOutputGrads n o gs =
mapM (\i -> maybe (render $ zero i) return (gs ^. outputIxAt i)) [0..n-1]
where
-- A tensor of zeros with the same shape as the i'th output.
zero i = zerosLike $ toT (Output i o)
-- | Update the pending gradients of a node's inputs.
updatePendingGradients :: forall a. (TensorType a, Num a)
=> FGL.Context NodeDef EdgeLabel
-> [Maybe (Tensor Value a)]
-- ^ Gradient of each input tensor.
-> GradientsState a
-> GradientsState a
updatePendingGradients (inputEdges, _, nodeDef, _) inputGrads initState =
foldl' go initState inputEdges
where
go :: GradientsState a -> (EdgeLabel, FGL.Node) -> GradientsState a
go state ((outIndex, OutputIx inIndex), node) =
case maybeGradient of
Nothing -> state
Just g ->
-- Add to the list of pending gradients for this tensor.
state & gradientsPending
. at node
. nonEmpty
. outputIxAt outIndex
. nonEmpty
%~ (g:)
where
badSizeErr = error $ printf "updatePendingGradients: bad input index \
\%d for inputGrads of length %d in %s"
inIndex (length inputGrads)
(show (nodeDef ^. name))
maybeGradient = fromMaybe badSizeErr (safeIndex inputGrads inIndex)
-- | Create a graph that includes a node and its transitive dependencies.
createGraph :: NodeName -> (NodeName -> NodeDef)
-> (Graph, Map NodeName FGL.Node)
createGraph nodeName nodeDefLookup = (FGL.nmap nodeDefLookup graph, nodeMap)
where
-- Parse a tensor name.
parseTensorName :: Text -> Maybe (NodeName, OutputIx)
parseTensorName n
| Text.null n = error "parseTensorName: empty name"
| Text.head n == '^' = Nothing -- Control edge
| otherwise =
let (nm, indexStr) = Text.breakOn ":" n
index | Text.null indexStr = 0
| otherwise = read $ Text.unpack $ Text.tail indexStr
in Just (NodeName nm, OutputIx index)
-- Build a map from node name to outward edges.
--
-- The state is the set of visited nodes.
collect :: Maybe (NodeName, OutputIx, OutputIx)
-> NodeName
-> State (Set NodeName)
(Map NodeName [(NodeName, OutputIx, OutputIx)])
collect outgoingEdge nm = do
let nextLookup = Map.singleton nm (maybeToList outgoingEdge)
seen <- gets (Set.member nm)
modify (Set.insert nm)
if seen
then pure nextLookup
else do
let inputs = nodeDefLookup nm ^. input
recurse inIndex (parentName, outIndex) =
collect (Just (nm, outIndex, inIndex)) parentName
subEdgeLookups <-
zipWithM recurse [0..] $ mapMaybe parseTensorName inputs
pure $ Map.unionsWith (++) (nextLookup:subEdgeLookups)
edgeLookup = evalState (collect Nothing nodeName) Set.empty
-- Associate an ID with each node name.
nodeMap = Map.fromList $ zip (Map.keys edgeLookup) [0..]
-- Create the graph.
graph = FGL.mkGraph (swap <$> Map.toList nodeMap)
[ (nodeMap Map.! n, nodeMap Map.! m, (i, j))
| (n, edges) <- Map.toList edgeLookup
, (m, i, j) <- edges
]
-- | Function to compute the gradient of y w.r.t. each input.
--
-- Let y be an arbitrary tensor
-- and [w_0, ..., w_n] be the output tensors of a node
-- and [v_0, ..., v_n] be the input tensors of the same node.
--
-- Given [∂y/∂w_0, ..., ∂y/∂w_n] and [v_0, ..., v_n], a GradientFunc computes
-- [∂y/∂v_0, ..., ∂y/∂v_n] for a particular op type.
--
-- A Nothing gradient is equivalent to zero (but allows for short circuiting
-- computation when all the gradients for something are Nothing).
type GradientFunc a = NodeDef
-> [Output]
-- ^ Input tensors.
-> [Tensor Value a]
-- ^ Gradient of y w.r.t. each output tensor.
-> [Maybe (Tensor Build a)]
-- ^ Gradient of y w.r.t. each input tensor.
-- TODO(fmayle): Assert the type is correct.
-- | Create a Tensor from an Output.
toT :: Output -> Tensor Build a
toT = Tensor . pure
-- | Wrapper around `TensorFlow.GenOps.Core.slice` that builds vectors from scalars for
-- simple slicing operations.
flatSlice :: forall v1 t . TensorType t
=> Tensor v1 t -- ^ __input__
-> Int32 -- ^ __begin__: specifies the offset into the first dimension of
-- 'input' to slice from.
-> Int32 -- ^ __size__: specifies the number of elements of the first dimension
-- of 'input' to slice. If size is -1, all remaining elements in the dimension
-- are included in the slice (i.e. this is equivalent to setting
-- size = input.dim_size(0) - begin).
-> Tensor Build t -- ^ __output__
flatSlice t begin size = CoreOps.slice t (vector [begin]) (vector [size])
nodeDefName :: NodeDef -> NodeName
nodeDefName = NodeName . view name
-- | The gradient function for an op type.
--
-- These implementations should match their python counterparts in:
-- third_party/tensorflow/python/ops/*_grad.py
opGrad :: forall a . GradientCompatible a => Text -> GradientFunc a
opGrad "Abs" _ [toT -> x] [dz] = [Just $ expr dz * signum x]
opGrad "Neg" _ [_] [dz] = [Just $ negate $ expr dz]
opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x]
opGrad "Square" _ [toT -> x] [dz] =
-- TODO(fmayle): Handle complex numbers.
-- TODO(fmayle): The python code makes dz a control dependency of the 2*x
-- (for performance reasons?). Will need to put these functions in the Build
-- monad to replicate that.
[Just $ dz `CoreOps.mul` (2 * x)]
opGrad "Gather" _ [toT -> x, toT -> indices] [dz] =
-- TODO(fmayle): The python version uses a better performance implementation
-- when the shape is known without having to run the graph.
-- TODO(fmayle): We shouldn't convert the result to a dense tensor. Sparse
-- tensor support will require some thinking.
[ Just $ CoreOps.unsortedSegmentSum values indices' numRows
, Nothing
]
where
-- TODO(gnezdo): Use colocateWith but it requires Build monad.
denseShape = shape (x :: Tensor Build a)
numRows = scalarize $ flatSlice denseShape 0 1
valuesShape = CoreOps.concat 0 [ allDimensions
, flatSlice denseShape 1 (-1)
]
values = reshape dz valuesShape
-- TODO(fmayle): This could be either Int32 or Int64.
indices' = reshape indices allDimensions :: Tensor Build Int32
opGrad "Max" _ [toT -> x, toT -> indices] [dz] =
[Just $ indicators `CoreOps.div` numSelected * dz', Nothing]
where
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
y = CoreOps.max x indices
y' = reshape y outputShapeKeptDims
dz' = reshape dz outputShapeKeptDims
indicators = CoreOps.cast $ CoreOps.equal y' x
numSelected = reshape (sum indicators indices) outputShapeKeptDims
-- Min and Max have identical gradient implementations.
opGrad "Min" u v w = opGrad "Max" u v w
opGrad "Sum" _ [toT -> x, toT -> indices] [dz] =
[ Just $ CoreOps.tile grad tileScaling, Nothing ]
where
-- TODO(gnezdo): Implement the fast-path from math_grad._SumGrad.
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
tileScaling = safeShapeDiv sx outputShapeKeptDims
grad = reshape dz outputShapeKeptDims
opGrad "Mean" u v@[toT -> x, _] w =
[Just $ dz `CoreOps.div` CoreOps.cast factor, Nothing]
where
[Just dz, Nothing] = opGrad "Sum" u v w
inputShape = shape (x :: Tensor Build a)
outputShape = shape (dz :: Tensor Build a)
-- TODO(fmayle): Add fast path when shape is known.
inputSize = CoreOps.prod inputShape $ rangeOfRank inputShape
outputSize = CoreOps.prod outputShape $ rangeOfRank outputShape
factor = safeShapeDiv inputSize outputSize
opGrad "Add" _ [toT -> x, toT -> y] [dz] =
[ Just $ reshape (sum dz rx) sx
, Just $ reshape (sum dz ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "Sub" u v w =
[Just x, Just (-y)]
where
[Just x, Just y] = opGrad "Add" u v w
opGrad "SoftmaxCrossEntropyWithLogits" _ [toT -> x, toT -> y] [dz, _] =
[ Just $ expandDims dz (-1) * snd (softmaxCrossEntropyWithLogits x y)
, Nothing ]
opGrad "Mul" _ [toT -> x, toT -> y] [dz] =
-- TODO(fmayle): Handle complex numbers.
[ Just $ reshape (sum (dz `CoreOps.mul` y) rx) sx
, Just $ reshape (sum (x `CoreOps.mul` dz) ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "Div" _ [toT -> x, toT -> y] [dz] =
-- TODO(fmayle): Handle complex numbers.
-- TODO(gnezdo): Provide Fractional instance and use '/' instead of div.
[ Just $ reshape (sum (dz `CoreOps.div` y) rx) sx
, Just $ reshape (sum (dz `CoreOps.mul` (negate x `CoreOps.div` (y * y)))
ry)
sy
]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "MatMul" nodeDef [toT -> x, toT -> y] [dz] =
let transposeA = lookupAttr nodeDef "transpose_a"
transposeB = lookupAttr nodeDef "transpose_b"
transAttrs a b =
(opAttr "transpose_a" .~ a) . (opAttr "transpose_b" .~ b)
in case (transposeA, transposeB) of
(False, False) ->
[ Just $ matMul' (transAttrs False True) dz y
, Just $ matMul' (transAttrs True False) x dz]
(False, True) ->
[ Just $ matMul dz y
, Just $ matMul' (transAttrs True False) x dz]
(True, False) ->
[ Just $ matMul' (transAttrs False True) dz y
, Just $ matMul x dz]
(True, True) ->
[ Just $ matMul' (transAttrs True True) dz y
, Just $ matMul' (transAttrs True True) x dz]
opGrad "Transpose" _ [_, toT -> p] [dz] =
[ Just $ CoreOps.transpose dz
(CoreOps.invertPermutation p :: Tensor Build Int32)
, Nothing
]
opGrad "Conv2D" nodeDef [toT -> x, toT -> y] [dz] =
[ Just $ CoreOps.conv2DBackpropInput'
((opAttr "strides" .~ strides)
. (opAttr "padding" .~ padding)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
(shape x) y dz
, Just $ CoreOps.conv2DBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "padding" .~ padding)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
x (shape y) dz
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
useCudnnOnGpu = lookupAttr nodeDef "use_cudnn_on_gpu" :: Bool
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "MaxPool" nodeDef [toT -> x] [dz] =
[ Just $ CoreOps.maxPoolGrad'
((opAttr "ksize" .~ ksize)
. (opAttr "strides" .~ strides)
. (opAttr "padding" .~ padding)
. (opAttr "data_format" .~ dataFormat))
x output dz
]
where
output :: Tensor Build a
output = toT $ Output 0 (nodeDefName nodeDef)
ksize = lookupAttr nodeDef "ksize" :: [Int64]
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "Reshape" _ [toT -> x, _] [dz] =
[Just $ reshape dz $ shape (x :: Tensor Build a), Nothing]
opGrad "OneHot" _ _ _ = [Nothing, Nothing, Nothing, Nothing]
opGrad "TruncatedNormal" _ _ _ = [Nothing]
opGrad "RefIdentity" _ _ [dz] = [Just $ expr dz]
opGrad "Cast" nodeDef _ [dz] = [Just reverseCast]
where
-- TODO(gnezdo): too permissive, python only allows float types as src_type.
reverseCast =
pureOp [] $ pure (opDef "Cast"
& opAttr "DstT" .~ (lookupAttr nodeDef "SrcT" :: ByteString)
& opAttr "SrcT" .~ (lookupAttr nodeDef "DstT" :: ByteString)
& opInputs .~ [renderedOutput dz])
opGrad "DynamicStitch" nodeDef inputs [dz] =
replicate halfLen Nothing ++ valuesGrads
where
halfLen =
let len = length inputs
half = len `div` 2
in if 2 * half == len
then half
else error ("Uneven input size " ++ show (len, showMessage nodeDef))
valuesGrads = [ Just $ CoreOps.gather dz (toT idx :: Tensor Build Int32)
| idx <- take halfLen inputs
]
opGrad "DynamicPartition" nodeDef [toT -> xs, toT -> indices] dz =
[ Just reconstructed, Nothing ]
where
reconstructed = CoreOps.reshape stitched
(CoreOps.shape (xs :: Tensor Build a) :: Tensor Build Int32)
stitched = CoreOps.dynamicStitch partitionedIndices dz
partitionedIndices = CoreOps.dynamicPartition np originalIndices indices
np = lookupAttr nodeDef "num_partitions" :: Int64
originalIndices =
CoreOps.reshape (CoreOps.range 0 (CoreOps.size indices) 1) prefixShape
prefixShape = shapeInt32 indices
shapeInt32 t = CoreOps.shape t :: Tensor Build Int32
opGrad "Select" _ [toT -> c, toT -> x, _] [dz] =
[ Nothing
, Just $ CoreOps.select c dz zeros
, Just $ CoreOps.select c zeros dz
]
where zeros = CoreOps.zerosLike x
-- TODO(gnezdo): Unlike Python, no control dependency on dz.
opGrad "Log" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.inv x ]
-- TODO(gnezdo): Reuse the output instead of doing another exp,
-- though, it is probably CSE'd away anyway.
opGrad "Exp" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.exp x ]
opGrad "SparseSegmentSum" _ [toT -> x, toT -> y, toT -> t] [dz] =
[ Just $ CoreOps.unsortedSegmentSum
(CoreOps.gather dz (t :: Tensor Build Int32))
(y :: Tensor Build Int32) inputRows
, Nothing
, Nothing
]
where inputRows = flatSlice (shape (x :: Tensor Build a)) 0 1
opGrad "LabelClasses" _ _ _ = [Nothing, Nothing]
opGrad "LabelWeights" _ _ _ = [Nothing]
opGrad "Size" _ _ _ = [Nothing]
opGrad "ZerosLike" _ _ _ = [Nothing]
-- TODO(fmayle): These can go away if we properly prune the graph.
opGrad "Const" _ _ _ = [Nothing, Nothing]
opGrad "Placeholder" _ _ _ = []
opGrad "Variable" _ _ _ = []
opGrad n nodeDef ins grads =
error $ "no gradient implemented for " ++
show (n, length ins, length grads, showMessage nodeDef, ins)
-- | The number of outputs for an op type.
numOutputs :: NodeDef -> OutputIx
numOutputs o =
case o ^. op of
"Abs" -> 1
"Add" -> 1
"Cast" -> 1
"Const" -> 1
"Conv2D" -> 1
"Div" -> 1
"DynamicStitch" -> 1
"DynamicPartition" ->
fromIntegral (lookupAttr o "num_partitions" :: Int64)
"Exp" -> 1
"Gather" -> 1
"LabelClasses" -> 1
"LabelWeights" -> 1
"Log" -> 1
"MatMul" -> 1
"Max" -> 1
"MaxPool" -> 1
"Mean" -> 1
"Min" -> 1
"Mul" -> 1
"Neg" -> 1
"Placeholder" -> 1
"OneHot" -> 1
"RefIdentity" -> 1
"Relu" -> 1
"Reshape" -> 1
"Select" -> 1
"Size" -> 1
"SoftmaxCrossEntropyWithLogits" -> 2
"Square" -> 1
"SparseSegmentSum" -> 1
"Sub" -> 1
"Sum" -> 1
"Transpose" -> 1
"TruncatedNormal" -> 1
"Variable" -> 1
"ZerosLike" -> 1
_ -> error $ "numOuputs not implemented for " ++ show (o ^. op)
-- Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0`
safeShapeDiv :: Tensor v1 Int32 -> Tensor v2 Int32 -> Tensor Build Int32
safeShapeDiv x y = x `CoreOps.div` (CoreOps.maximum y 1)
allDimensions :: Tensor Build Int32
allDimensions = vector [-1 :: Int32]
rangeOfRank :: forall v1 t. TensorType t => Tensor v1 t -> Tensor Build Int32
rangeOfRank x = CoreOps.range 0 (CoreOps.rank x) 1
lookupAttr :: Attribute a1 => NodeDef -> Text -> a1
lookupAttr nodeDef attrName = nodeDef ^. attr . at attrName . non def . attrLens
|
cem3394/haskell
|
tensorflow-ops/src/TensorFlow/Gradient.hs
|
apache-2.0
| 28,748
| 0
| 19
| 8,305
| 7,150
| 3,834
| 3,316
| -1
| -1
|
module HaskHOL.Lib.IndTypesPre.Base where
import HaskHOL.Core
import HaskHOL.Deductive
import HaskHOL.Lib.Nums
import HaskHOL.Lib.Arith
import HaskHOL.Lib.WF
import HaskHOL.Lib.CalcNum
thmNUMPAIR_INJ_LEMMA :: WFCtxt thry => HOL cls thry HOLThm
thmNUMPAIR_INJ_LEMMA = cacheProof "thmNUMPAIR_INJ_LEMMA" ctxtWF $
prove [txt| !x1 y1 x2 y2. (NUMPAIR x1 y1 = NUMPAIR x2 y2) ==> (x1 = x2) |] $
tacREWRITE [defNUMPAIR] `_THEN`
_REPEAT (tacINDUCT `_THEN` tacGEN) `_THEN`
tacASM_REWRITE [ defEXP, ruleGSYM thmMULT_ASSOC, thmARITH
, thmEQ_MULT_LCANCEL
, thmNOT_SUC, ruleGSYM thmNOT_SUC, thmSUC_INJ ] `_THEN`
_DISCH_THEN (tacMP . ruleAP_TERM [txt| EVEN |]) `_THEN`
tacREWRITE [thmEVEN_MULT, thmEVEN_ADD, thmARITH]
thmNUMSUM_INJ :: WFCtxt thry => HOL cls thry HOLThm
thmNUMSUM_INJ = cacheProof "thmNUMSUM_INJ" ctxtWF $
prove [txt| !b1 x1 b2 x2. (NUMSUM b1 x1 = NUMSUM b2 x2) <=>
(b1 = b2) /\ (x1 = x2) |] $
_REPEAT tacGEN `_THEN` tacEQ `_THEN` tacDISCH `_THEN`
tacASM_REWRITE_NIL `_THEN`
_POP_ASSUM (tacMP . ruleREWRITE [defNUMSUM]) `_THEN`
_DISCH_THEN (\ th -> tacMP th `_THEN`
tacMP (ruleAP_TERM [txt| EVEN |] th)) `_THEN`
_REPEAT tacCOND_CASES `_THEN`
tacREWRITE [defEVEN, thmEVEN_DOUBLE] `_THEN`
tacREWRITE [thmSUC_INJ, thmEQ_MULT_LCANCEL, thmARITH]
thmNUMPAIR_INJ :: WFCtxt thry => HOL cls thry HOLThm
thmNUMPAIR_INJ = cacheProof "thmNUMPAIR_INJ" ctxtWF $
prove [txt| !x1 y1 x2 y2. (NUMPAIR x1 y1 = NUMPAIR x2 y2) <=>
(x1 = x2) /\ (y1 = y2) |] $
_REPEAT tacGEN `_THEN` tacEQ `_THEN` tacDISCH `_THEN`
tacASM_REWRITE_NIL `_THEN`
_FIRST_ASSUM (tacSUBST_ALL . ruleMATCH_MP thmNUMPAIR_INJ_LEMMA) `_THEN`
_POP_ASSUM tacMP `_THEN` tacREWRITE [defNUMPAIR] `_THEN`
tacREWRITE [thmEQ_MULT_LCANCEL, thmEQ_ADD_RCANCEL, thmEXP_EQ_0, thmARITH]
thmINJ_INVERSE2 :: WFCtxt thry => HOL cls thry HOLThm
thmINJ_INVERSE2 = cacheProof "thmINJ_INVERSE2" ctxtWF $
prove [txt| !P:A->B->C.
(!x1 y1 x2 y2. (P x1 y1 = P x2 y2) <=> (x1 = x2) /\ (y1 = y2))
==> ?X Y. !x y. (X(P x y) = x) /\ (Y(P x y) = y) |] $
tacGEN `_THEN` tacDISCH `_THEN`
tacEXISTS [txt| \z:C. @x:A. ?y:B. P x y = z |] `_THEN`
tacEXISTS [txt| \z:C. @y:B. ?x:A. P x y = z |] `_THEN`
_REPEAT tacGEN `_THEN` tacASM_REWRITE [thmBETA] `_THEN`
tacCONJ `_THEN` tacMATCH_MP thmSELECT_UNIQUE `_THEN` tacGEN `_THEN`
tacBETA `_THEN` tacEQ `_THEN` tacSTRIP `_THEN` tacASM_REWRITE_NIL `_THEN`
(\ g@(Goal _ w) -> tacEXISTS
(rand =<< liftM snd (destExists w)) g) `_THEN` tacREFL
|
ecaustin/haskhol-math
|
src/HaskHOL/Lib/IndTypesPre/Base.hs
|
bsd-2-clause
| 2,758
| 0
| 21
| 706
| 679
| 395
| 284
| -1
| -1
|
module Data.GitParser.Types where
import Data.Time.LocalTime
import Data.Time.Format
import Control.Monad.State
type Name = String
type SHA = String
data GitBranch = GitBranch { getBranchName :: Name
, getBranchHEAD :: SHA }
deriving Show
newtype RemoteRepo = RemoteRepo String deriving (Show)
data GitRepo = GitRepo { getLocalPath :: FilePath
, getRemotes :: [RemoteRepo]
, getBranches :: [GitBranch]
, getRepoHEAD :: SHA }
deriving Show
data ParseField = SHA SHA | Author String | Date String | Message String
deriving Show
data CommitAuthor = CommitAuthor { getName :: String, getEmail :: String } deriving (Show,Read,Eq,Ord)
data GitCommit = GitCommit { getSHA :: SHA
, getAuthor :: CommitAuthor
, getDate :: Maybe LocalTime
, getMessage :: String } deriving (Show,Read)
emptyCommit = GitCommit "" (CommitAuthor "Unknown" "Unknown") Nothing ""
data ParserState = PS { psLeft :: [String]
, psRight :: [String]
, psBlocks :: [ParseField] }
deriving (Show)
type GitParser a = State ParserState a
mkParserState c = PS [] (lines c) []
|
jamessanders/gitparser
|
src/Data/GitParser/Types.hs
|
bsd-2-clause
| 1,354
| 0
| 9
| 479
| 334
| 200
| 134
| 29
| 1
|
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, RecordWildCards #-}
-- |
-- Module : Statistics.Resampling.Bootstrap
-- Copyright : (c) 2009, 2011 Bryan O'Sullivan
-- License : BSD3
--
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : portable
--
-- The bootstrap method for statistical inference.
module Statistics.Resampling.Bootstrap
(
Estimate(..)
, bootstrapBCA
, scale
-- * References
-- $references
) where
import Control.DeepSeq (NFData)
import Control.Exception (assert)
import Control.Monad.Par (runPar, parMap)
import Data.Data (Data)
import Data.Typeable (Typeable)
import Data.Vector.Unboxed ((!))
import Statistics.Distribution (cumulative, quantile)
import Statistics.Distribution.Normal
import Statistics.Resampling (Resample(..), jackknife)
import Statistics.Sample (mean)
import Statistics.Types (Estimator, Sample)
import qualified Data.Vector.Unboxed as U
-- | A point and interval estimate computed via an 'Estimator'.
data Estimate = Estimate {
estPoint :: {-# UNPACK #-} !Double
-- ^ Point estimate.
, estLowerBound :: {-# UNPACK #-} !Double
-- ^ Lower bound of the estimate interval (i.e. the lower bound of
-- the confidence interval).
, estUpperBound :: {-# UNPACK #-} !Double
-- ^ Upper bound of the estimate interval (i.e. the upper bound of
-- the confidence interval).
, estConfidenceLevel :: {-# UNPACK #-} !Double
-- ^ Confidence level of the confidence intervals.
} deriving (Eq, Show, Typeable, Data)
instance NFData Estimate
-- | Multiply the point, lower bound, and upper bound in an 'Estimate'
-- by the given value.
scale :: Double -- ^ Value to multiply by.
-> Estimate -> Estimate
scale f e@Estimate{..} = e {
estPoint = f * estPoint
, estLowerBound = f * estLowerBound
, estUpperBound = f * estUpperBound
}
estimate :: Double -> Double -> Double -> Double -> Estimate
estimate pt lb ub cl =
assert (lb <= ub) .
assert (cl > 0 && cl < 1) $
Estimate { estPoint = pt
, estLowerBound = lb
, estUpperBound = ub
, estConfidenceLevel = cl
}
data T = {-# UNPACK #-} !Double :< {-# UNPACK #-} !Double
infixl 2 :<
-- | Bias-corrected accelerated (BCA) bootstrap. This adjusts for both
-- bias and skewness in the resampled distribution.
bootstrapBCA :: Double -- ^ Confidence level
-> Sample -- ^ Sample data
-> [Estimator] -- ^ Estimators
-> [Resample] -- ^ Resampled data
-> [Estimate]
bootstrapBCA confidenceLevel sample estimators resamples =
assert (confidenceLevel > 0 && confidenceLevel < 1)
runPar $ parMap (uncurry e) (zip estimators resamples)
where
e est (Resample resample)
| U.length sample == 1 = estimate pt pt pt confidenceLevel
| otherwise =
estimate pt (resample ! lo) (resample ! hi) confidenceLevel
where
pt = est sample
lo = max (cumn a1) 0
where a1 = bias + b1 / (1 - accel * b1)
b1 = bias + z1
hi = min (cumn a2) (ni - 1)
where a2 = bias + b2 / (1 - accel * b2)
b2 = bias - z1
z1 = quantile standard ((1 - confidenceLevel) / 2)
cumn = round . (*n) . cumulative standard
bias = quantile standard (probN / n)
where probN = fromIntegral . U.length . U.filter (<pt) $ resample
ni = U.length resample
n = fromIntegral ni
accel = sumCubes / (6 * (sumSquares ** 1.5))
where (sumSquares :< sumCubes) = U.foldl' f (0 :< 0) jack
f (s :< c) j = s + d2 :< c + d2 * d
where d = jackMean - j
d2 = d * d
jackMean = mean jack
jack = jackknife est sample
-- $references
--
-- * Davison, A.C; Hinkley, D.V. (1997) Bootstrap methods and their
-- application. <http://statwww.epfl.ch/davison/BMA/>
|
00tau/statistics
|
Statistics/Resampling/Bootstrap.hs
|
bsd-2-clause
| 4,149
| 0
| 14
| 1,263
| 951
| 539
| 412
| 73
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Monad(
ServerEnv(..)
, ServerM
, newServerEnv
, runServerM
, runServerMIO
, serverMtoHandler
, AuthM(..)
, runAuth
) where
import Control.Monad.Base
import Control.Monad.Catch (MonadCatch, MonadThrow)
import Control.Monad.Except
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.Acid
import Data.Monoid
import Data.Text (unpack)
import Database.LevelDB.MonadResource
import Servant.Server
import Servant.Server.Auth.Token.Config
import Servant.Server.Auth.Token.LevelDB
import Servant.Server.Auth.Token.Model
import Config
-- | Server private environment
data ServerEnv = ServerEnv {
-- | Configuration used to create the server
envConfig :: !ServerConfig
-- | Configuration of auth server
, envAuthConfig :: !AuthConfig
-- | DB state
, envDB :: !LevelDBEnv
}
-- | Create new server environment
newServerEnv :: MonadIO m => ServerConfig -> m ServerEnv
newServerEnv cfg = do
let authConfig = defaultAuthConfig
dbEnv <- liftIO . runResourceT $ do
db <- open (unpack $ serverDbPath cfg) defaultOptions { createIfMissing = True }
dbEnv <- newLevelDBEnv db defaultReadOptions defaultWriteOptions
-- ensure default admin if missing one
_ <- runLevelDBBackendT authConfig dbEnv $ ensureAdmin 17 "admin" "123456" "admin@localhost"
return dbEnv
let env = ServerEnv {
envConfig = cfg
, envAuthConfig = authConfig
, envDB = dbEnv
}
return env
-- | Server monad that holds internal environment
newtype ServerM a = ServerM { unServerM :: ReaderT ServerEnv (LoggingT Handler) a }
deriving (Functor, Applicative, Monad, MonadIO, MonadBase IO, MonadReader ServerEnv
, MonadLogger, MonadLoggerIO, MonadThrow, MonadCatch, MonadError ServantErr)
newtype StMServerM a = StMServerM { unStMServerM :: StM (ReaderT ServerEnv (LoggingT Handler)) a }
instance MonadBaseControl IO ServerM where
type StM ServerM a = StMServerM a
liftBaseWith f = ServerM $ liftBaseWith $ \q -> f (fmap StMServerM . q . unServerM)
restoreM = ServerM . restoreM . unStMServerM
-- | Lift servant monad to server monad
liftHandler :: Handler a -> ServerM a
liftHandler = ServerM . lift . lift
-- | Execution of 'ServerM'
runServerM :: ServerEnv -> ServerM a -> Handler a
runServerM e = runStdoutLoggingT . flip runReaderT e . unServerM
-- | Execution of 'ServerM' in IO monad
runServerMIO :: ServerEnv -> ServerM a -> IO a
runServerMIO env m = do
ea <- runExceptT $ runServerM env m
case ea of
Left e -> fail $ "runServerMIO: " <> show e
Right a -> return a
-- | Transformation to Servant 'Handler'
serverMtoHandler :: ServerEnv -> ServerM :~> Handler
serverMtoHandler e = Nat (runServerM e)
-- | Special monad for authorisation actions
newtype AuthM a = AuthM { unAuthM :: LevelDBBackendT IO a }
deriving (Functor, Applicative, Monad, MonadIO, MonadError ServantErr, HasAuthConfig, HasStorage)
-- | Execution of authorisation actions that require 'AuthHandler' context
runAuth :: AuthM a -> ServerM a
runAuth m = do
cfg <- asks envAuthConfig
db <- asks envDB
liftHandler $ ExceptT $ runLevelDBBackendT cfg db $ unAuthM m
|
ivan-m/servant-auth-token
|
example/leveldb/src/Monad.hs
|
bsd-3-clause
| 3,221
| 0
| 15
| 599
| 834
| 447
| 387
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Mismi.SQS.Data (
QueueName(..)
, Queue(..)
, QueueUrl(..)
, MessageId(..)
, SQSError(..)
, sqsErrorRender
, module Mismi.SQS.Core.Data
) where
import Control.Exception.Base
import Data.Text
import Data.Typeable
import Mismi.SQS.Core.Data
import P
data SQSError =
Invariant Text
deriving (Typeable)
instance Exception SQSError
instance Show SQSError where
show = unpack . sqsErrorRender
sqsErrorRender :: SQSError -> Text
sqsErrorRender (Invariant e) =
"[Mismi internal error] - " <> e
|
ambiata/mismi
|
mismi-sqs/src/Mismi/SQS/Data.hs
|
bsd-3-clause
| 683
| 0
| 7
| 161
| 151
| 94
| 57
| 25
| 1
|
{-# LANGUAGE BangPatterns,RankNTypes,OverloadedStrings #-}
{-# LANGUAGE CPP, DeriveDataTypeable, FlexibleContexts,
GeneralizedNewtypeDeriving, MultiParamTypeClasses,
TemplateHaskell, TypeFamilies, RecordWildCards #-}
module Plow.Service.Alarm where
-- import Plow.Service.Alarm.Internal
import Plow.Service.Alarm.Types
import Plow.Service.Alarm.Acid
{-| Various naming conventions for stuff here I will try to stick to:
Controller -> looks at the current state and decides what should happen based apon it
Handler -> takes an incoming state and some parameters and returns a new state
Rxer -> recieves some piece of data from an outside source
Sender -> sends a piece of data to an outside source.
Store -> put a piece of data into the local storage
Get -> get a piece of data from the local storage
|-}
|
smurphy8/alarm-service
|
src/Plow/Service/Alarm.hs
|
bsd-3-clause
| 826
| 0
| 4
| 134
| 25
| 19
| 6
| 7
| 0
|
module Spring13.Week7.StringBufEditor where
import Spring13.Week7.StringBuffer
import Spring13.Week7.Editor
main = runEditor editor $ unlines
[ "This buffer is for notes you don't want to save, and for"
, "evaluation of steam valve coefficients."
, "To load a different file, type the character L followed"
, "by the name of the file."
]
|
bibaijin/cis194
|
src/Spring13/Week7/StringBufEditor.hs
|
bsd-3-clause
| 385
| 0
| 7
| 98
| 46
| 28
| 18
| 8
| 1
|
{-# OPTIONS -fno-warn-missing-methods #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE EmptyDataDecls #-}
-- | A limited subset of the time package.
module Data.Time
(-- * Compatible with the time package
getCurrentTime
,fromGregorian
,UTCTime
,Day
,utctDay
-- * Incompatible Fay-specific helpers
,showTime
,showDay)
where
import Data.Data
import Data.Text
import FFI
import Prelude (Show,Eq,Ord,Int)
-- | Date representation (internally represented as milliseconds from Epoch).
data UTCTime
deriving (Typeable)
-- We provide no methods, this is just to satisfy type-safety. No
-- methods work in Fay anyway.
instance Data UTCTime
instance Show UTCTime
instance Eq UTCTime
instance Ord UTCTime
-- | Day representation (internally represented as milliseconds from Epoch).
data Day
deriving (Typeable)
-- We provide no methods, this is just to satisfy type-safety. No
-- methods work in Fay anyway.
instance Data Day
instance Show Day
instance Eq Day
instance Ord Day
-- | Get the current time.
getCurrentTime :: Fay UTCTime
getCurrentTime = ffi "(new Date()).getTime()"
-- | Convert from proleptic Gregorian calendar. First argument is
-- year, second month number (1-12), third day (1-31).
fromGregorian :: Int -- ^ Year.
-> Int -- ^ Month.
-> Int -- ^ Day.
-> Day
fromGregorian = ffi "Date.UTC(%1,%2-1,%3)"
-- | Extract the day from the time.
utctDay :: UTCTime -> Day
utctDay = ffi "%1"
-- | Show a time. Meant for debugging purposes, not production presentation.
showTime :: UTCTime -> Text
showTime = ffi "new Date(%1).toString()"
-- | Show a day. Meant for debugging purposes, not production presentation.
showDay :: Day -> Text
showDay =
ffi "date.getUTCFullYear() + ' ' + showMonth(date) + ' ' + (date.getUTCDate() + 1)"
|
beni55/fay
|
fay-base/src/Data/Time.hs
|
bsd-3-clause
| 1,815
| 0
| 7
| 351
| 248
| 143
| 105
| -1
| -1
|
module BoxFieldsSpec (spec) where
import Control.Exception (evaluate)
import Data.ByteString.IsoBaseFileFormat.Boxes
import Data.ByteString.IsoBaseFileFormat.ReExports
import Data.ByteString.IsoBaseFileFormat.Util.BoxContent
import Data.ByteString.IsoBaseFileFormat.Util.BoxFields
import qualified Data.ByteString.Lazy as BL
import Test.Hspec
-- import qualified Data.ByteString.Builder as B import qualified Data.ByteString.Lazy as BL import
-- qualified Data.Binary.Get as Binary
spec :: Spec
spec =
do
describe "mkLanguage" $
do
it "throws runtime exceptions when the code is too short or too long" $
do
evaluate (mkLanguage "") `shouldThrow` anyException
evaluate (mkLanguage "a") `shouldThrow` anyException
evaluate (mkLanguage "aa") `shouldThrow` anyException
evaluate (mkLanguage "bbbb") `shouldThrow` anyException
evaluate (mkLanguage "bbbbb") `shouldThrow` anyException
it "throws runtime exceptions when the code is not in aaa .. zzz" $
do
evaluate (mkLanguage "! 3") `shouldThrow` anyException
evaluate (mkLanguage "AAA") `shouldThrow` anyException
it "renders lower case three-letter strings correctly" $
let actual =
BL.unpack (toLazyByteString (boxBuilder (mkLanguage "deu")))
expected =
BL.unpack (toLazyByteString (word16BE (4 * 1024 + 5 * 32 + 21)))
in actual `shouldBe` expected
describe "Scalar, Constant, Template and ScalarArry composition" $
describe "IsBoxContent instances" $
do
describe "example1" $
do
it "boxSize reports the correct size" $
boxSize example1
`shouldBe` (1 + 1 + (3 * 8) + (7 * 8))
it "crashes during rendering because of the invalid number of array elements for \"baz\"" $
evaluate (renderBox example1')
`shouldThrow` anyException
describe "example2" $
do
it "boxSize reports the correct size" $
boxSize example2
`shouldBe` (2 * 4 + 2 + 2 + 2 + 2 + 9 * 4 + 4 + 4)
it "it renders the expected content" $
renderBox example2
`shouldBe` BL.pack
[ 0,
0,
0,
0,
0,
0,
0,
0,
0,
65,
0,
66,
1,
0,
0,
0,
0,
0,
0,
67,
0,
0,
0,
68,
0,
0,
0,
69,
0,
0,
0,
70,
0,
0,
0,
71,
0,
0,
0,
72,
0,
0,
0,
73,
0,
0,
0,
74,
0,
0,
0,
75,
0,
0,
0,
76,
0,
0,
0,
77
]
renderBox ::
IsBoxContent c =>
c ->
BL.ByteString
renderBox = toLazyByteString . boxBuilder
type ExampleContent =
Scalar Word8 "bla"
:+ Constant (Scalar Word8 "blub") 123
:+ Template (ScalarArray "foos" 3 Int64) '[1, 2, 3]
:+ ScalarArray "baz" 7 Word64
type ExampleContentShort =
U8 "bla"
:+ Constant (U8 "blub") 123
:+ Template (I64Arr "foos" 3) '[1, 2, 3]
:+ U64Arr "baz" 7
example1 :: ExampleContent
example1 = Scalar 100 :+ Constant :+ Template :+ u64Arr [1, 2, 3, 4, 5, 6, 7]
example1' :: ExampleContentShort
example1' = Scalar 100 :+ Constant :+ Template :+ u64Arr [6, 6, 6]
type Example2 isAudio =
Constant (I32Arr "reserved" 2) '[0, 0]
:+ Template (I16 "layer") 0
:+ Template (I16 "alternate_group") 0
:+ Template (I16 "volume") (If isAudio 256 0)
:+ Constant (I16 "reserved") 0
:+ Template (I32Arr "matrix" 9) '[65536, 0, 0, 0, 65536, 0, 0, 0, 1073741824]
:+ I32 "width"
:+ I32 "height"
example2 :: Example2 'True
example2 =
Constant :+ Custom 65 :+ Custom 66 :+ Template :+ Constant
:+ Custom (i32Arr [67 .. 75])
:+ i32 76
:+ i32 77
|
sheyll/isobmff-builder
|
spec/BoxFieldsSpec.hs
|
bsd-3-clause
| 5,009
| 0
| 24
| 2,378
| 1,165
| 628
| 537
| -1
| -1
|
{-# LANGUAGE ScopedTypeVariables, MonoPatBinds, MagicHash #-}
-- |
-- Copyright : (c) 2010 Simon Meier
--
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Stability : experimental
-- Portability : non-portable (uses unsafeCoerce)
--
-- 'Encoding's for encoding floating point numbers represented as 'Float' or
-- 'Double' values using big-endian, little-endian, and host-endian encodings.
--
module Codec.Bounded.Encoding.Floating
(
-- * Big-endian encodings
floatBE
, doubleBE
-- * Little-endian encodings
, floatLE
, doubleLE
-- * Host-endian encodings
, floatHost
, doubleHost
) where
import Codec.Bounded.Encoding.Internal (Encoding, writeStorable, (#.) )
import Codec.Bounded.Encoding.Word (word32BE, word32LE, word64BE, word64LE)
import Foreign
-- | Coerce a 'Float' to a 'Word32' as-is.
{-# INLINE coerceFloatToWord32 #-}
coerceFloatToWord32 :: Float -> Word32
coerceFloatToWord32 = fromFloat
-- | Coerce a 'Double' to a 'Word64' as-is.
{-# INLINE coerceDoubleToWord64 #-}
coerceDoubleToWord64 :: Double -> Word64
coerceDoubleToWord64 = fromFloat
-- The implementation of the following function is based on
--
-- http://hackage.haskell.org/package/data-binary-ieee754-0.4.2.1
--
-- Module: Data.Binary.IEEE754
-- Copyright: 2010 John Millikin <jmillikin@gmail.com>
-- License: MIT
--
fromFloat :: forall w f. (Storable w, Storable f, RealFloat f) => f -> w
fromFloat x
| isIEEE x && sizeOf (undefined :: f) == sizeOf (undefined :: w) =
unsafePerformIO $ alloca $ \buf -> do
poke (castPtr buf) x
peek buf
| otherwise = error
"Coded.Bounded.Encoding.Floating: missing support for encoding floating point numbers on your platform!"
{- The speed of the above implementation is not great. The plan is to use the
implementations below for real speed once the following ticket is solved:
See http://hackage.haskell.org/trac/ghc/ticket/4092
-- | Coerce a 'Float' to a 'Word32'; i.e., interpret the 32-bit 'Float' value
-- as an unsigned 32-bit 'Int.
--
-- FIXME: Check with GHC developers if this is really safe. Does the register
-- allocater handle such a case correctly, if the 'Float' is in an FPU
-- register?
{-# INLINE coerceFloatToWord32 #-}
coerceFloatToWord32 :: Float -> Word32
coerceFloatToWord32 = unsafeCoerce
-- | Coerce a 'Double' to a 'Word64'.
{-# INLINE coerceDoubleToWord64 #-}
coerceDoubleToWord64 :: Double -> Word64
coerceDoubleToWord64 = unsafeCoerce
-}
-- | Encode a 'Float' in big endian format.
{-# INLINE floatBE #-}
floatBE :: Encoding Float
floatBE = word32BE #. coerceFloatToWord32
-- | Encode a 'Float' in little endian format.
{-# INLINE floatLE #-}
floatLE :: Encoding Float
floatLE = word32LE #. coerceFloatToWord32
-- | Encode a 'Double' in big endian format.
{-# INLINE doubleBE #-}
doubleBE :: Encoding Double
doubleBE = word64BE #. coerceDoubleToWord64
-- | Encode a 'Double' in little endian format.
{-# INLINE doubleLE #-}
doubleLE :: Encoding Double
doubleLE = word64LE #. coerceDoubleToWord64
-- | Encode a 'Float' in native host order and host endianness. Values written
-- this way are not portable to different endian machines, without conversion.
--
{-# INLINE floatHost #-}
floatHost :: Encoding Float
floatHost = writeStorable
-- | Encode a 'Double' in native host order and host endianness.
{-# INLINE doubleHost #-}
doubleHost :: Encoding Double
doubleHost = writeStorable
|
meiersi/system-io-write
|
src/Codec/Bounded/Encoding/Floating.hs
|
bsd-3-clause
| 3,480
| 0
| 12
| 606
| 361
| 219
| 142
| 44
| 1
|
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, RankNTypes #-}
{-# LANGUAGE TypeFamilies, UndecidableInstances, RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Language.Haskell.GhcMod.Monad (
-- * Monad Types
GhcModT
, IOish
-- ** Environment, state and logging
, GhcModEnv(..)
, newGhcModEnv
, GhcModState(..)
, defaultState
, CompilerMode(..)
, GhcModLog
, GhcModError(..)
-- * Monad utilities
, runGhcModT
, runGhcModT'
, hoistGhcModT
-- ** Accessing 'GhcModEnv' and 'GhcModState'
, gmsGet
, gmsPut
, options
, cradle
, getCompilerMode
, setCompilerMode
, withOptions
, withTempSession
, overrideGhcUserOptions
-- ** Re-exporting convenient stuff
, liftIO
, module Control.Monad.Reader.Class
, module Control.Monad.Journal.Class
) where
#if __GLASGOW_HASKELL__ < 708
-- 'CoreMonad.MonadIO' and 'Control.Monad.IO.Class.MonadIO' are different
-- classes before ghc 7.8
#define DIFFERENT_MONADIO 1
-- RWST doen't have a MonadIO instance before ghc 7.8
#define MONADIO_INSTANCES 1
#endif
import Language.Haskell.GhcMod.Types
import Language.Haskell.GhcMod.Error
import Language.Haskell.GhcMod.Cradle
import Language.Haskell.GhcMod.DynFlags
import Language.Haskell.GhcMod.GhcPkg
import Language.Haskell.GhcMod.CabalApi
import qualified Language.Haskell.GhcMod.Gap as Gap
import DynFlags
import GHC
import qualified GHC as G
import GHC.Paths (libdir)
import GhcMonad hiding (withTempSession)
#if __GLASGOW_HASKELL__ <= 702
import HscTypes
#endif
-- MonadUtils of GHC 7.6 or earlier defines its own MonadIO.
-- RWST does not automatically become an instance of MonadIO.
-- MonadUtils of GHC 7.8 or later imports MonadIO in Monad.Control.IO.Class.
-- So, RWST automatically becomes an instance of MonadIO.
import MonadUtils
#if DIFFERENT_MONADIO
import Control.Monad.Trans.Class (lift)
import qualified Control.Monad.IO.Class
import Data.Monoid (Monoid)
#endif
import Control.Applicative (Alternative)
import Control.Arrow (first)
import Control.Monad (MonadPlus, void, liftM)
import Control.Monad.Base (MonadBase, liftBase)
-- Monad transformer stuff
import Control.Monad.Trans.Control (MonadBaseControl(..), StM, liftBaseWith,
control, liftBaseOp, liftBaseOp_)
import Control.Monad.Trans.Class
import Control.Monad.Reader.Class
import Control.Monad.Writer.Class (MonadWriter)
import Control.Monad.State.Class (MonadState(..))
import Control.Monad.Error (ErrorT, runErrorT)
import Control.Monad.Reader (ReaderT, runReaderT)
import Control.Monad.State.Strict (StateT, runStateT)
import Control.Monad.Trans.Journal (JournalT, runJournalT)
#ifdef MONADIO_INSTANCES
import Control.Monad.Trans.Maybe (MaybeT)
import Control.Monad.Error (Error(..))
#endif
import Control.Monad.Journal.Class
import Data.Maybe (isJust)
import Data.IORef (IORef, readIORef, writeIORef, newIORef)
import System.Directory (getCurrentDirectory)
----------------------------------------------------------------
data GhcModEnv = GhcModEnv {
gmGhcSession :: !(IORef HscEnv)
, gmOptions :: Options
, gmCradle :: Cradle
}
type GhcModLog = ()
data GhcModState = GhcModState {
gmCompilerMode :: CompilerMode
} deriving (Eq,Show,Read)
data CompilerMode = Simple | Intelligent deriving (Eq,Show,Read)
defaultState :: GhcModState
defaultState = GhcModState Simple
----------------------------------------------------------------
-- | This is basically a newtype wrapper around 'StateT', 'ErrorT', 'JournalT'
-- and 'ReaderT' with custom instances for 'GhcMonad' and it's constraints that
-- means you can run (almost) all functions from the GHC API on top of 'GhcModT'
-- transparently.
--
-- The inner monad @m@ should have instances for 'MonadIO' and
-- 'MonadBaseControl' 'IO', in the common case this is simply 'IO'. Most @mtl@
-- monads already have 'MonadBaseControl' 'IO' instances, see the
-- @monad-control@ package.
newtype GhcModT m a = GhcModT {
unGhcModT :: StateT GhcModState
(ErrorT GhcModError
(JournalT GhcModLog
(ReaderT GhcModEnv m) ) ) a
} deriving ( Functor
, Applicative
, Alternative
, Monad
, MonadPlus
#if DIFFERENT_MONADIO
, Control.Monad.IO.Class.MonadIO
#endif
, MonadReader GhcModEnv -- TODO: make MonadReader instance
-- pass-through like MonadState
, MonadWriter w
, MonadError GhcModError
)
instance MonadIO m => MonadIO (GhcModT m) where
liftIO action = do
res <- GhcModT . liftIO . liftIO . liftIO . liftIO $ try action
case res of
Right a -> return a
Left e | isIOError e ->
throwError $ GMEIOException (fromEx e :: IOError)
Left e | isGhcModError e ->
throwError $ (fromEx e :: GhcModError)
Left e -> throw e
where
fromEx :: Exception e => SomeException -> e
fromEx se = let Just e = fromException se in e
isIOError se =
case fromException se of
Just (_ :: IOError) -> True
Nothing -> False
isGhcModError se =
case fromException se of
Just (_ :: GhcModError) -> True
Nothing -> False
instance MonadTrans (GhcModT) where
lift = GhcModT . lift . lift . lift . lift
instance MonadState s m => MonadState s (GhcModT m) where
get = GhcModT $ lift $ lift $ lift get
put = GhcModT . lift . lift . lift . put
state = GhcModT . lift . lift . lift . state
#if MONADIO_INSTANCES
instance MonadIO m => MonadIO (StateT s m) where
liftIO = lift . liftIO
instance MonadIO m => MonadIO (ReaderT r m) where
liftIO = lift . liftIO
instance (Monoid w, MonadIO m) => MonadIO (JournalT w m) where
liftIO = lift . liftIO
instance (Error e, MonadIO m) => MonadIO (ErrorT e m) where
liftIO = lift . liftIO
instance MonadIO m => MonadIO (MaybeT m) where
liftIO = lift . liftIO
#endif
----------------------------------------------------------------
-- | Initialize the 'DynFlags' relating to the compilation of a single
-- file or GHC session according to the 'Cradle' and 'Options'
-- provided.
initializeFlagsWithCradle :: (IOish m, GhcMonad m, MonadError GhcModError m)
=> Options
-> Cradle
-> m ()
initializeFlagsWithCradle opt c
| cabal = withCabal
| otherwise = withSandbox
where
mCabalFile = cradleCabalFile c
cabal = isJust mCabalFile
ghcopts = ghcUserOptions opt
withCabal = do
let Just cabalFile = mCabalFile
pkgDesc <- parseCabalFile c cabalFile
compOpts <- getCompilerOptions ghcopts c pkgDesc
initSession CabalPkg opt compOpts
withSandbox = initSession SingleFile opt compOpts
where
importDirs = [".","..","../..","../../..","../../../..","../../../../.."]
pkgOpts = ghcDbStackOpts $ cradlePkgDbStack c
compOpts
| null pkgOpts = CompilerOptions ghcopts importDirs []
| otherwise = CompilerOptions (ghcopts ++ pkgOpts) [wdir,rdir] []
wdir = cradleCurrentDir c
rdir = cradleRootDir c
initSession :: GhcMonad m
=> Build
-> Options
-> CompilerOptions
-> m ()
initSession build Options {..} CompilerOptions {..} = do
df <- G.getSessionDynFlags
void $ G.setSessionDynFlags =<< addCmdOpts ghcOptions
( setModeSimple
$ Gap.setFlags
$ setIncludeDirs includeDirs
$ setBuildEnv build
$ setEmptyLogger
$ Gap.addPackageFlags depPackages df)
----------------------------------------------------------------
newGhcModEnv :: Options -> FilePath -> IO GhcModEnv
newGhcModEnv opt dir = do
session <- newIORef (error "empty session")
c <- findCradle' dir
return GhcModEnv {
gmGhcSession = session
, gmOptions = opt
, gmCradle = c
}
cleanupGhcModEnv :: GhcModEnv -> IO ()
cleanupGhcModEnv env = cleanupCradle $ gmCradle env
-- | Run a @GhcModT m@ computation.
runGhcModT :: IOish m
=> Options
-> GhcModT m a
-> m (Either GhcModError a, GhcModLog)
runGhcModT opt action = do
env <- liftBase $ newGhcModEnv opt =<< getCurrentDirectory
r <- first (fst <$>) <$> (runGhcModT' env defaultState $ do
dflags <- getSessionDynFlags
defaultCleanupHandler dflags $ do
initializeFlagsWithCradle opt (gmCradle env)
action)
liftBase $ cleanupGhcModEnv env
return r
-- | @hoistGhcModT result@. Embed a GhcModT computation's result into a GhcModT
-- computation. Note that if the computation that returned @result@ modified the
-- state part of GhcModT this cannot be restored.
hoistGhcModT :: IOish m
=> (Either GhcModError a, GhcModLog)
-> GhcModT m a
hoistGhcModT (r,l) = do
GhcModT (lift $ lift $ journal l) >> case r of
Left e -> throwError e
Right a -> return a
-- | Run a computation inside @GhcModT@ providing the RWST environment and
-- initial state. This is a low level function, use it only if you know what to
-- do with 'GhcModEnv' and 'GhcModState'.
--
-- You should probably look at 'runGhcModT' instead.
runGhcModT' :: IOish m
=> GhcModEnv
-> GhcModState
-> GhcModT m a
-> m (Either GhcModError (a, GhcModState), GhcModLog)
runGhcModT' r s a = do
(res, w') <-
flip runReaderT r $ runJournalT $ runErrorT $
runStateT (unGhcModT $ initGhcMonad (Just libdir) >> a) s
return (res, w')
----------------------------------------------------------------
-- | Make a copy of the 'gmGhcSession' IORef, run the action and restore the
-- original 'HscEnv'.
withTempSession :: IOish m => GhcModT m a -> GhcModT m a
withTempSession action = do
session <- gmGhcSession <$> ask
savedHscEnv <- liftIO $ readIORef session
a <- action
liftIO $ writeIORef session savedHscEnv
return a
-- | This is a very ugly workaround don't use it.
overrideGhcUserOptions :: IOish m => ([GHCOption] -> GhcModT m b) -> GhcModT m b
overrideGhcUserOptions action = withTempSession $ do
env <- ask
opt <- options
let ghcOpts = ghcUserOptions opt
opt' = opt { ghcUserOptions = [] }
initializeFlagsWithCradle opt' (gmCradle env)
action ghcOpts
----------------------------------------------------------------
gmeAsk :: IOish m => GhcModT m GhcModEnv
gmeAsk = ask
gmsGet :: IOish m => GhcModT m GhcModState
gmsGet = GhcModT get
gmsPut :: IOish m => GhcModState -> GhcModT m ()
gmsPut = GhcModT . put
options :: IOish m => GhcModT m Options
options = gmOptions <$> gmeAsk
cradle :: IOish m => GhcModT m Cradle
cradle = gmCradle <$> gmeAsk
getCompilerMode :: IOish m => GhcModT m CompilerMode
getCompilerMode = gmCompilerMode <$> gmsGet
setCompilerMode :: IOish m => CompilerMode -> GhcModT m ()
setCompilerMode mode = (\s -> gmsPut s { gmCompilerMode = mode } ) =<< gmsGet
----------------------------------------------------------------
withOptions :: IOish m => (Options -> Options) -> GhcModT m a -> GhcModT m a
withOptions changeOpt action = local changeEnv action
where
changeEnv e = e { gmOptions = changeOpt opt }
where
opt = gmOptions e
----------------------------------------------------------------
instance (MonadBaseControl IO m) => MonadBase IO (GhcModT m) where
liftBase = GhcModT . liftBase
instance (MonadBaseControl IO m) => MonadBaseControl IO (GhcModT m) where
newtype StM (GhcModT m) a = StGhcMod {
unStGhcMod :: StM (StateT GhcModState
(ErrorT GhcModError
(JournalT GhcModLog
(ReaderT GhcModEnv m) ) ) ) a }
liftBaseWith f = GhcModT . liftBaseWith $ \runInBase ->
f $ liftM StGhcMod . runInBase . unGhcModT
restoreM = GhcModT . restoreM . unStGhcMod
{-# INLINE liftBaseWith #-}
{-# INLINE restoreM #-}
-- GHC cannot prove the following instances to be decidable automatically using
-- the FlexibleContexts extension as they violate the second Paterson Condition,
-- namely that: The assertion has fewer constructors and variables (taken
-- together and counting repetitions) than the head. Specifically the
-- @MonadBaseControl IO m@ constraint is causing this violation.
--
-- Proof of termination:
--
-- Assuming all constraints containing the variable `m' exist and are decidable
-- we show termination by manually replacing the current set of constraints with
-- their own set of constraints and show that this, after a finite number of
-- steps, results in the empty set, i.e. not having to check any more
-- constraints.
--
-- We start by setting the constraints to be those immediate constraints of the
-- instance declaration which cannot be proven decidable automatically for the
-- type under consideration.
--
-- @
-- { MonadBaseControl IO m }
-- @
--
-- Classes used:
--
-- * @class MonadBase b m => MonadBaseControl b m@
--
-- @
-- { MonadBase IO m }
-- @
--
-- Classes used:
--
-- * @class (Applicative b, Applicative m, Monad b, Monad m) => MonadBase b m@
--
-- @
-- { Applicative IO, Applicative m, Monad IO, Monad m }
-- @
--
-- Classes used:
--
-- * @class Monad m@
-- * @class Applicative f => Functor f@
--
-- @
-- { Functor m }
-- @
--
-- Classes used:
--
-- * @class Functor f@
--
-- @
-- { }
-- @
-- ∎
instance (Functor m, MonadIO m, MonadBaseControl IO m)
=> GhcMonad (GhcModT m) where
getSession = (liftIO . readIORef) . gmGhcSession =<< ask
setSession a = (liftIO . flip writeIORef a) . gmGhcSession =<< ask
#if __GLASGOW_HASKELL__ >= 706
instance (Functor m, MonadIO m, MonadBaseControl IO m)
=> HasDynFlags (GhcModT m) where
getDynFlags = getSessionDynFlags
#endif
instance (MonadIO m, MonadBaseControl IO m)
=> ExceptionMonad (GhcModT m) where
gcatch act handler = control $ \run ->
run act `gcatch` (run . handler)
gmask = liftBaseOp gmask . liftRestore
where liftRestore f r = f $ liftBaseOp_ r
|
phaazon/ghc-mod
|
Language/Haskell/GhcMod/Monad.hs
|
bsd-3-clause
| 14,268
| 0
| 18
| 3,269
| 3,126
| 1,700
| 1,426
| 263
| 2
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
module Base64
( base64Decode
, base64Encode
) where
import Common
import Word6
import Word24
import qualified Data.Map as M
import qualified Data.Vector.Generic as V
import Data.Word
mapLookup :: Ord k => M.Map k v -> k -> v
mapLookup m k = (\(Just x) -> x) $ M.lookup k m
base64ToChar :: Word6 -> Char
base64ToChar = mapLookup bcMap
where
bcMap = M.fromList $ zip [0..63] $ ['A'..'Z']++['a'..'z']++['0'..'9']++['+','/']
charToBase64 :: Char -> Word6
charToBase64 = mapLookup cbMap
where
cbMap = M.fromList $ zip (['A'..'Z']++['a'..'z']++['0'..'9']++['+','/']) [0..63]
sixBitsToEightBits :: [Word6] -> [Word8]
sixBitsToEightBits ss
| length ss /= 4 = error "Wrong number of six-bits"
| otherwise = splitBitsAt [16, 8, 0] combined
where
combined = foldl appendBits 0 ss :: Word24
eightBitsToSixBits :: [Word8] -> [Word6]
eightBitsToSixBits es
| length es /= 3 = error "Wrong number of eight-bits"
| otherwise = splitBitsAt [18, 12, 6, 0] combined
where
combined = foldl appendBits 0 es :: Word24
base64Decode :: String -> ByteVector
base64Decode s = V.fromList $ go $ map charToBase64 $ filter (/= '=') s
where
go [] = []
go (c1:[]) = take 1 $ go [c1, 0, 0, 0]
go (c1:c2:[]) = take 1 $ go [c1, c2, 0, 0]
go (c1:c2:c3:[]) = take 2 $ go [c1, c2, c3, 0]
go cs = (sixBitsToEightBits $ take 4 cs) ++ (go $ drop 4 cs)
base64Encode :: ByteVector -> String
base64Encode bytes = base64Str ++ padding
where
base64Str = map base64ToChar $ go $ V.toList bytes
padding = replicate padCount '='
padCount = - (V.length bytes `mod` (-3))
go [] = []
go (b1:[]) = take 2 $ go [b1, 0, 0]
go (b1:b2:[]) = take 3 $ go [b1, b2, 0]
go bs = (eightBitsToSixBits $ take 3 bs) ++ (go $ drop 3 bs)
|
andrewcchen/matasano-cryptopals-solutions
|
modules/Base64.hs
|
bsd-3-clause
| 1,740
| 0
| 13
| 338
| 844
| 449
| 395
| 44
| 5
|
{-# LANGUAGE DeriveFoldable, DeriveFunctor, DeriveTraversable,
GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Supercompile.Split (MonadStatics(..), split, generalise) where
import Evaluator.Deeds
import Evaluator.Evaluate (normalise)
import Evaluator.FreeVars
import Evaluator.Residualise
import Evaluator.Syntax
import Name
import Renaming
import StaticFlags
import Utilities hiding (tails)
import Algebra.Lattice
import qualified Data.Foldable as Foldable
import qualified Data.IntSet as IS
import Data.List (mapAccumL, zipWith4)
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Traversable as Traversable
import Core.FreeVars
import Core.Renaming
import Core.Syntax
import Termination.Generaliser (Generaliser (..))
class Monad m => MonadStatics m where
bindCapturedFloats :: FreeVars -> m a -> m (Out [(Var, FVedTerm)], a)
bindCapturedFloats fvs mx = bindCapturedFloats' fvs mx (\hes x -> return (hes, x))
bindCapturedFloats' :: FreeVars -> m a -> (Out [(Var, FVedTerm)] -> a -> m r) -> m r
--
-- == Gathering entry information for the splitter ==
--
data Entered = Once Id -- ^ The Id is a context identifier: if a binding is Entered twice from the same context it's really a single Entrance
| Many -- ^ A result of anything other than Once (or None, represented implicitly) is uninteresting for optimisation purposes
deriving (Eq, Show)
instance Pretty Entered where
pPrint = text . show
instance JoinSemiLattice Entered where
join = plusEntered
isOnce :: Entered -> Bool
isOnce (Once _) = True
isOnce _ = False
plusEntered :: Entered -> Entered -> Entered
plusEntered (Once id1) (Once id2)
| id1 == id2 = Once id1
| otherwise = Many
plusEntered _ _ = Many
type EnteredEnv = M.Map (Out Var) Entered
mkEnteredEnv :: Entered -> FreeVars -> EnteredEnv
mkEnteredEnv = setToMap
--
-- == The splitter ==
-- Note [Phantom variables and bindings introduced by scrutinisation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- If we never introduced bindings from scrutinisation, the world of phantom bindings would be relatively
-- simple. In such a world, we would have this property:
--
-- The free variables of h-functions generated while supercompiling some term would never have
-- more free variables than the term being supercompiled
--
-- Unfortunately, this is not true in the real world. What can happen is this. We supercompile:
-- h1 x = case x of True -> e1; False -> e2
--
-- Which leads to the two recursively-supercompiled components:
-- h2 = let <x = True> in e1
-- h3 = let <x = False> in e2
--
-- Note that x was not static (free) in h1, but it is static (free) in h2. Thus, h-functions generated
-- during supercompilation (h2, h3) have more free variables than the term from which they were generated (h1).
--
--
-- Note [When to bind captured floats]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Ordinarily, we only need to check to see if we residualise some floating h-functions when we produce
-- a residual let-binding. This is because in the normal course of things any binding that was originally
-- introduced as a lambda/alt-binding will never be made into a free variable of a final h-function. However,
-- there are two situations which break this invariant:
-- 1. We might choose to create a LetBound heap binding when driving the branches of a residual case expression
-- that scrutinises a free variable. This changes a LambdaBound thing into a LetBound one, so we need to be
-- careful to residualise the resulting h-function under that lambda-binder.
--
-- In fact, we used to do this but don't any more - see Note [Phantom variables and bindings introduced by scrutinisation]
-- 2. More relevantly, we might implement an optimisation that prevents h-functions from being lambda-abstracted
-- over anything lambda-bound above a let-binding that we can see will trap the h-function under a let. For example,
-- when driving:
--
-- \x -> let f = \y -> ...
-- in D[<x |-> \lambda{}, f |-> l{\y -> ...} | ... f ... x ...>]
--
-- There is no point lambda-abstracting over x because we're going to have to drop the h-function under the f
-- binding anyway. To implement this we might drive with (x |-> l{}) instead, but once again this converts a
-- lambda-binding to a let-binding.
--
-- For this reason, we are careful to use bindCapturedFloats even when driving the arms of case expressions/bodies of lambdas.
--
--
-- Note [Bind captured floats fixed point]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Because bound h-functions (e.g. h2 or h3) may be referred to by other h-functions (e.g. h1) which do not
-- refer to any of the free variables of the h-functions we are about to bind, we have a fixed point in bindCapturedFloats.
-- This fixed point ensures we bind those h-functions that have as free variables any h-functions we are about to bind.
{-# INLINE split #-}
split :: MonadStatics m
=> State
-> (State -> m (Deeds, Out FVedTerm))
-> m (Deeds, Out FVedTerm)
split (deeds, Heap h ids, k, (rn, qa)) opt
= generaliseSplit opt bottom deeds (Heap h ids1, [0..] `zip` k, (case annee qa of Question x -> [rename rn x]; Answer _ -> [], splitQA ids2 (rn, annee qa)))
where (ids1, ids2) = splitIdSupply ids
{-# INLINE generalise #-}
generalise :: MonadStatics m
=> Generaliser
-> State
-> Maybe ((State -> m (Deeds, Out FVedTerm)) -> m (Deeds, Out FVedTerm))
generalise gen (deeds, Heap h ids, k, (rn, qa)) = do
let named_k = [0..] `zip` k
(gen_kfs, gen_xs') <- case gENERALISATION of
NoGeneralisation -> Nothing
AllEligible -> guard (not (IS.null gen_kfs) || not (S.null gen_xs')) >> return (gen_kfs, gen_xs')
where gen_kfs = IS.fromList [i | (i, kf) <- named_k, generaliseStackFrame gen kf]
gen_xs' = S.fromList [x' | (x', hb) <- M.toList h, generaliseHeapBinding gen x' hb, assertRender ("Bad generalisation", x', hb, heapBindingTag hb) (not (howBound hb == LambdaBound && isNothing (heapBindingTerm hb))) True]
StackFirst -> (guard (not (IS.null gen_kfs)) >> return (gen_kfs, S.empty)) `mplus`
(guard (not (S.null gen_xs')) >> return (IS.empty, gen_xs'))
where gen_kfs = IS.fromList [i | (i, kf) <- named_k, generaliseStackFrame gen kf]
gen_xs' = S.fromList [x' | (x', hb) <- M.toList h, generaliseHeapBinding gen x' hb, assertRender ("Bad generalisation", x', hb, heapBindingTag hb) (not (howBound hb == LambdaBound && isNothing (heapBindingTerm hb))) True]
DependencyOrder want_first -> listToMaybe ((if want_first then id else reverse) possibilities)
where -- We consider possibilities starting from the root of the term -- i.e. the bottom of the stack.
-- This is motivated by how the interaction with subgraph generalisation for TreeFlip/TreeSum.
-- FIXME: explain in more detail if this turns out to be sane.
possibilities = findGeneralisable False S.empty (reverse named_k) h
findGeneralisable :: Bool -> FreeVars -> NamedStack -> PureHeap -> [(IS.IntSet, S.Set (Out Var))]
findGeneralisable done_qa pending_xs' unreached_kfs unreached_hbs
| done_qa && null pending_kfs && M.null pending_hbs
= []
| otherwise
= [(gen_kf_is, gen_xs') | not (IS.null gen_kf_is) || not (S.null gen_xs')] ++
findGeneralisable done_qa' reached_xs' unreached_kfs' unreached_hbs'
where
(done_qa', extra_pending_xs') = if done_qa || not (null unreached_kfs) then (done_qa, S.empty) else (True, inFreeVars annedFreeVars (rn, qa))
(pending_kfs, unreached_kfs') = splitAt 1 unreached_kfs
(pending_hbs, unreached_hbs') = M.partitionWithKey (\x' _hb -> x' `S.member` (pending_xs' `S.union` extra_pending_xs')) unreached_hbs
gen_kf_is = IS.fromList [i | (i, kf) <- pending_kfs, generaliseStackFrame gen kf]
gen_xs' = S.fromList [x' | (x', hb) <- M.toList pending_hbs, generaliseHeapBinding gen x' hb, assertRender ("Bad generalisation", x', hb, heapBindingTag hb) (not (howBound hb == LambdaBound && isNothing (heapBindingTerm hb))) True]
reached_xs' = M.foldrWithKey (\_x' hb fvs -> heapBindingFreeVars hb `S.union` fvs)
(S.unions (map (stackFrameFreeVars . tagee . snd) pending_kfs))
pending_hbs
-- If we can find some fraction of the stack or heap to drop that looks like it will be admissable, just residualise those parts and continue
traceRender ("gen_kfs", gen_kfs, "gen_xs'", gen_xs') $ return ()
let (ids', ctxt_id) = stepIdSupply ids
return $ \opt -> generaliseSplit opt (gen_kfs, gen_xs') deeds (Heap h ids', named_k, ([], oneBracketed (Once ctxt_id, \ids -> (0, Heap M.empty ids, [], (rn, fmap qaToAnnedTerm' qa)))))
{-# INLINE generaliseSplit #-}
generaliseSplit :: MonadStatics m
=> (State -> m (Deeds, Out FVedTerm))
-> (IS.IntSet, S.Set (Out Var))
-> Deeds
-> (Heap, NamedStack, ([Out Var], Bracketed (Entered, IdSupply -> UnnormalisedState)))
-> m (Deeds, Out FVedTerm)
generaliseSplit opt split_from deeds prepared_state = optimiseSplit opt deeds' bracketeds_heap bracketed_focus
where (deeds', bracketeds_heap, bracketed_focus) = splitt split_from deeds prepared_state
-- Discard dead bindings:
-- let x = ...
-- in 1
-- ==>
-- 1
--
-- But include transitively needed ones:
-- let w = ...
-- x = ...
-- y = ... x ...
-- z = ... y ...
-- in z
-- ==>
-- let z = let x = ...
-- y = ... x ...
-- in ... y ...
-- in z
--
-- Inline values and linear things into residual bindings:
-- let x = ... y ...
-- y = ...
-- in \_ -> ... x ...
-- ===>
-- let x = let y = ...
-- in ... y ...
-- in \_ -> ... x ...
--
-- Inline values into residual non-linear things:
-- let x = (y:ys)
-- in \_ -> ... x ...
-- ==>
-- \_ -> let x = (y:ys)
-- in ... x ...
--
-- Do NOT inline linear things into non-linear things:
-- let x = (y:ys)
-- y = ...
-- in \_ -> ... x ...
-- =/=>
-- \_ -> let x = let y = ...
-- in (y:ys)
-- in ... x ...
-- ===>
-- let y = ...
-- in \_ -> let x = (y:ys)
-- in ... x ...
--
-- Inline things that are (apparently) used non-linearly times into linear things:
-- let w = ...
-- x = ... w ...
-- y = ... w ...
-- z = (x, y)
-- in Just z
-- ===>
-- let z = let w = ...
-- x = ... w ...
-- y = ... w ...
-- in (x, y)
-- in Just z
--
-- Treat non-linearity due only to |case| branches as linearity:
-- let x = ...
-- in case unk of C -> ... x ...; D -> ... x ...
-- ===>
-- case unk of C -> let x = ... in ... x ...
-- D -> let x = ... in ... x ...
--
-- Let-float things to trivialise them:
-- let x = let y = ... in (y:xs)
-- in \_ -> ... x ...
-- ===>
-- let y = ....
-- \_ -> let x = (y:xs) in ... x ...
--
-- Note [EC binds something we need to refer to above]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- let z = f x
-- y = unk + z
-- x = case y of _ -> 2
-- in x + 2
--
-- After splitting:
-- let x = 2
-- in x + 2
--
-- That's fine, but how are we going to get a reference to the "x" when residualising the y binding above?
-- let z = f x
-- y = unk + z
-- in case y of _ -> h0
--
-- Lacking extra language features, our only option is to under-specialise the floats by inlining less
-- evaluation context.
data Bracketed a = Bracketed {
rebuild :: [Out FVedTerm] -> Out FVedTerm, -- Rebuild the full output term given outputs to plug into each hole
extraFvs :: FreeVars, -- Maximum free variables added by the residual wrapped around the holes
extraBvs :: [BoundVars], -- Maximum bound variables added at each hole by the residual wrapped around the holes
fillers :: [a], -- Hole-fillers themselves. Usually State
tails :: Maybe [Int] -- The indexes of all holes in tail position. If this is not Nothing, this is an *exhaustive* list of possible tail positions.
} deriving (Functor, Foldable.Foldable, Traversable.Traversable)
instance Accumulatable Bracketed where
mapAccumTM f acc b = liftM (\(acc', fillers') -> (acc', b { fillers = fillers' })) $ mapAccumTM f acc (fillers b)
noneBracketed :: Out FVedTerm -> Bracketed a
noneBracketed a = Bracketed {
rebuild = \[] -> a,
extraFvs = freeVars a,
extraBvs = [],
fillers = [],
tails = Nothing
}
oneBracketed :: a -> Bracketed a
oneBracketed x = Bracketed {
rebuild = \[e] -> e,
extraFvs = S.empty,
extraBvs = [S.empty],
fillers = [x],
tails = Just [0]
}
zipBracketeds :: ([Out FVedTerm] -> Out FVedTerm)
-> ([FreeVars] -> FreeVars)
-> [BoundVars -> BoundVars]
-> ([Maybe [Int]] -> Maybe [Int])
-> [Bracketed a]
-> Bracketed a
zipBracketeds a b c d bracketeds = Bracketed {
rebuild = \(splitManyBy xss -> ess') -> a (zipWith rebuild bracketeds ess'),
extraFvs = b (map extraFvs bracketeds),
extraBvs = concat $ zipWith (\c_fn extra_bvs -> map c_fn extra_bvs) c (map extraBvs bracketeds),
fillers = concat xss,
tails = d $ snd $ foldl (\(i, tailss) bracketed -> (i + length (fillers bracketed), tailss ++ [fmap (map (+ i)) (tails bracketed)])) (0, []) bracketeds
}
where xss = map fillers bracketeds
bracketedFreeVars :: (a -> FreeVars) -> Bracketed a -> FreeVars
bracketedFreeVars fvs bracketed = extraFvs bracketed `S.union` transfer (map fvs (fillers bracketed))
where transfer fvss = S.unions (zipWith (S.\\) fvss (extraBvs bracketed))
releaseBracketedDeeds :: (a -> Deeds) -> Bracketed a -> Deeds
releaseBracketedDeeds release b = sumMap release (fillers b)
modifyFillers :: ([a] -> [b]) -> Bracketed a -> Bracketed b
modifyFillers f bracketed = Bracketed {
rebuild = rebuild bracketed,
extraFvs = extraFvs bracketed,
extraBvs = extraBvs bracketed,
fillers = f (fillers bracketed),
tails = tails bracketed
}
modifyTails :: ([a] -> (b, [a])) -> Bracketed a -> Maybe (b, Bracketed a)
modifyTails f bracketed = do
is <- tails bracketed
let (b, fillers') = f (takeIndexes is (fillers bracketed))
return (b, bracketed { fillers = fillIndexes (is `zip` fillers') (fillers bracketed) })
takeIndexes :: [Int] -> [a] -> [a]
takeIndexes is xs = map (xs !!) is
fillIndexes :: [(Int, a)] -> [a] -> [a]
fillIndexes [] xs = xs
fillIndexes ((i, x'):ixs') xs = fillIndexes ixs' (xs_l ++ x' : xs_r)
where (xs_l, _:xs_r) = splitAt i xs
optimiseMany :: Monad m
=> ((Deeds, a) -> m (Deeds, Out FVedTerm))
-> (Deeds, [a])
-> m (Deeds, [Out FVedTerm])
optimiseMany opt (deeds, xs) = mapAccumLM (curry opt) deeds xs
optimiseBracketed :: MonadStatics m
=> (State -> m (Deeds, Out FVedTerm))
-> (Deeds, Bracketed State)
-> m (Deeds, Out FVedTerm)
optimiseBracketed opt (deeds, b) = liftM (second (rebuild b)) $ optimiseMany optimise_one (deeds, extraBvs b `zip` fillers b)
where optimise_one (deeds, (extra_bvs, (s_deeds, s_heap, s_k, s_e))) = liftM (\(xes, (deeds, e)) -> (deeds, letRecSmart xes e)) $ bindCapturedFloats extra_bvs $ opt (deeds + s_deeds, s_heap, s_k, s_e)
-- Because h-functions might potentially refer to the lambda/case-alt bound variables around this hole,
-- we use bindCapturedFloats to residualise such bindings within exactly this context.
-- See Note [When to bind captured floats]
transformWholeList :: ([a] -> [b]) -- Transformer of concatenated lists -- must be length-preserving!
-> [a] -> [[a]] -- Unconcatenated list structures to transform
-> ([b], [[b]]) -- Unconcatenated result of transformation
transformWholeList f xs yss = (xs', yss')
where ys = concat yss
zs0 = f (xs ++ ys)
(xs', zs1) = splitBy xs zs0
(ys', []) = splitBy ys zs1
yss' = splitManyBy yss ys'
-- TODO: when driving a residual binding:
-- let x = D[e]
-- in ..
--
-- Arjan Boeijink suggested driving the following instead of D[e]:
-- D[< | e | update x>]
--
-- This can help propagate more positive information, e.g. if e contains an occurrence of x itself
--
-- I'm not doing this right now because I'm wary about the termination issues. We should also be careful that we
-- don't create loops as a result...
optimiseSplit :: MonadStatics m
=> (State -> m (Deeds, Out FVedTerm))
-> Deeds
-> M.Map (Out Var) (Bracketed State)
-> Bracketed State
-> m (Deeds, Out FVedTerm)
optimiseSplit opt deeds bracketeds_heap bracketed_focus = do
-- 0) The "process tree" splits at this point. We can choose to distribute the deeds between the children in a number of ways
let bracketSizes = map stateSize . fillers
(heap_xs, bracketeds_heap_elts) = unzip (M.toList bracketeds_heap)
-- NB: it is *very important* that the list supplied to apportion contains at least one element and at least one non-zero weight, or some
-- deeds will vanish in a puff of digital smoke. We deal with this in the proportional case by padding the input list with a 1
(deeds_initial:deeds_focus, deedss_heap)
| Proportional <- dEEDS_POLICY = transformWholeList (apportion deeds) (1 : bracketSizes bracketed_focus) (map bracketSizes bracketeds_heap_elts)
| otherwise = (deeds : [0 | _ <- bracketSizes bracketed_focus], [[0 | _ <- bracketSizes b] | b <- bracketeds_heap_elts])
bracketeds_deeded_heap = M.fromList (heap_xs `zip` zipWith (\deeds_heap -> modifyFillers (zipWith addStateDeeds deeds_heap)) deedss_heap bracketeds_heap_elts)
bracketed_deeded_focus = modifyFillers (zipWith addStateDeeds deeds_focus) bracketed_focus
assertRenderM (text "optimiseSplit: deeds lost or gained!", deeds, (deeds_initial, deeds_focus, deedss_heap))
(noChange (sumMap (releaseBracketedDeeds releaseStateDeed) bracketeds_heap + releaseBracketedDeeds releaseStateDeed bracketed_focus + deeds)
(sumMap (releaseBracketedDeeds releaseStateDeed) bracketeds_deeded_heap + releaseBracketedDeeds releaseStateDeed bracketed_deeded_focus + deeds_initial))
-- 1) Recursively drive the focus itself
let extra_statics = M.keysSet bracketeds_heap
bindCapturedFloats' extra_statics (optimiseBracketed opt (deeds_initial, bracketed_deeded_focus)) $ \hes (leftover_deeds, e_focus) -> do
-- 2) We now need to think about how we are going to residualise the letrec. In fact, we need to loop adding
-- stuff to the letrec because it might be the case that:
-- * One of the hes from above refers to some heap binding that is not referred to by the let body
-- * So after we do withStatics above we need to drive some element of the bracketeds_heap
-- * And after driving that we find in our new hes a new h function referring to a new free variable
-- that refers to some binding that is as yet unbound...
(leftover_deeds, bracketeds_deeded_heap, xes, _fvs) <- go hes extra_statics leftover_deeds bracketeds_deeded_heap [] (fvedTermFreeVars e_focus)
-- 3) Combine the residualised let bindings with the let body
return (sumMap (releaseBracketedDeeds releaseStateDeed) bracketeds_deeded_heap + leftover_deeds,
letRecSmart xes e_focus)
where
-- TODO: clean up this incomprehensible loop
-- TODO: investigate the possibility of just fusing in the optimiseLetBinds loop with this one
go hes extra_statics leftover_deeds bracketeds_deeded_heap xes fvs = do
let extra_statics' = extra_statics `S.union` S.fromList (map fst hes) -- NB: the statics already include all the binders from bracketeds_deeded_heap, so no need to add xes stuff
-- TODO: no need to get FVs in this way (they are in Promise)
bindCapturedFloats' extra_statics' (optimiseLetBinds opt leftover_deeds bracketeds_deeded_heap (fvs `S.union` S.unions (map (fvedTermFreeVars . snd) hes))) $ \hes' (leftover_deeds, bracketeds_deeded_heap, fvs, xes') -> do
(if null hes' then (\a b c d -> return (a,b,c,d)) else go hes' extra_statics') leftover_deeds bracketeds_deeded_heap (xes ++ [(x', e') | (x', e') <- hes, x' `S.member` fvs] ++ xes') fvs
-- We only want to drive (and residualise) as much as we actually refer to. This loop does this: it starts
-- by residualising the free variables of the focus residualisation (or whatever is in the let body),
-- and then transitively inlines any bindings whose corresponding binders become free.
optimiseLetBinds :: MonadStatics m
=> (State -> m (Deeds, Out FVedTerm))
-> Deeds
-> M.Map (Out Var) (Bracketed State)
-> FreeVars
-> m (Deeds, M.Map (Out Var) (Bracketed State), FreeVars, Out [(Var, FVedTerm)])
optimiseLetBinds opt leftover_deeds bracketeds_heap fvs' = -- traceRender ("optimiseLetBinds", M.keysSet bracketeds_heap, fvs') $
go leftover_deeds bracketeds_heap [] fvs'
where
go leftover_deeds bracketeds_deeded_heap_not_resid xes_resid resid_fvs
| M.null h_resid = return (leftover_deeds, bracketeds_deeded_heap_not_resid, resid_fvs, xes_resid)
| otherwise = {- traceRender ("optimiseSplit", xs_resid') $ -} do
-- Recursively drive the new residuals arising from the need to bind the resid_fvs
(leftover_deeds, es_resid') <- optimiseMany (optimiseBracketed opt) (leftover_deeds, bracks_resid)
let extra_resid_fvs' = S.unions (map fvedTermFreeVars es_resid')
-- Recurse, because we might now need to residualise and drive even more stuff (as we have added some more FVs and BVs)
go leftover_deeds bracketeds_deeded_heap_not_resid'
(xes_resid ++ zip xs_resid' es_resid')
(resid_fvs `S.union` extra_resid_fvs')
where
-- When assembling the final list of things to drive, ensure that we exclude already-driven things
(h_resid, bracketeds_deeded_heap_not_resid') = M.partitionWithKey (\x _br -> x `S.member` resid_fvs) bracketeds_deeded_heap_not_resid
(xs_resid', bracks_resid) = unzip $ M.toList h_resid
-- FIXME: I could use the improved entered info that comes from the final FVs to adjust the split and float more stuff inwards..
type NamedStack = [(Int, Tagged StackFrame)]
splitt :: (IS.IntSet, S.Set (Out Var))
-> Deeds
-> (Heap, NamedStack, ([Out Var], Bracketed (Entered, IdSupply -> UnnormalisedState))) -- ^ The thing to split, and the Deeds we have available to do it
-> (Deeds, -- ^ The Deeds still available after splitting
M.Map (Out Var) (Bracketed State), -- ^ The residual "let" bindings
Bracketed State) -- ^ The residual "let" body
splitt (gen_kfs, gen_xs) old_deeds (cheapifyHeap old_deeds -> (deeds, Heap h (splitIdSupply -> (ids_brack, ids))), named_k, (scruts, bracketed_qa))
= snd $ split_step split_fp
-- Once we have the correct fixed point, go back and grab the associated information computed in the process
-- of obtaining the fixed point. That is what we are interested in, not the fixed point itselF!
-- TODO: eliminate redundant recomputation here?
where
-- We compute the correct way to split as a least fixed point, slowly building up a set of variables
-- (bound by heap bindings and update frames) that it is safe *not* to residualise.
--
-- Note that as an optimisation, optimiseSplit will only actually creates those residual bindings if the
-- corresponding variables are free *after driving*. Of course, we have no way of knowing which bindings
-- will get this treatment here, so just treat resid_xs as being exactly the set of residualised stuff.
split_fp = lfpFrom (S.empty, S.empty) (fst . split_step)
-- Simultaneously computes the next fixed-point step and some artifacts computed along the way,
-- which happen to correspond to exactly what I need to return from splitt.
split_step (safe_not_resid_xs, deeds_resid_xs) = -- let pPrintBracketedState = map pPrintFullState . fillers in traceRender ("split_step", (not_resid_xs, bound_xs S.\\ not_resid_xs), pureHeapBoundVars h_not_residualised, pureHeapBoundVars h_residualised, M.map pPrintBracketedState bracketeds_heap', pPrintBracketedState bracketed_focus') $
((safe_not_resid_xs', deeds_resid_xs'), (deeds4, bracketeds_heap', bracketed_focus'))
where
-- 0) Compute the set of variables that I can *actually* get away without residualising, once deeds are accounted for
-- See Note [Deeds and splitting] for further details on this.
not_resid_xs = safe_not_resid_xs S.\\ deeds_resid_xs
-- 1) Build a candidate splitting for the Stack and QA components
-- When creating the candidate stack split, we ensure that we create a residual binding
-- for any variable in the resid_xs set, as we're not going to inline it to continue.
--
-- We also take this opportunity to fill in the IdSupply required by each prospective new State.
-- We can use the same one for each context because there is no danger of shadowing.
fill_ids :: Bracketed (Entered, IdSupply -> UnnormalisedState) -> Bracketed (Entered, UnnormalisedState) -- NB: do NOT normalise at this stage because in transitiveInline we assume that State heaps are droppable!
fill_ids = fmap (\(ent, f) -> (ent, f ids_brack))
(deeds1a, bracketeds_updated, bracketed_focus)
= (\(a, b, c) -> (a, M.map fill_ids b, fill_ids c)) $
pushStack ids deeds scruts [(need_not_resid_kf i kf, kf) | (i, kf) <- named_k] bracketed_qa
need_not_resid_kf i kf
| i `IS.member` gen_kfs
= False
| Update x' <- tagee kf -- We infer the stack frames we're not residualising based on the *variables* we're not residualising
= x' `S.member` not_resid_xs
| otherwise
= True
-- 2) Build a splitting for those elements of the heap we propose to residualise not in not_resid_xs
-- TODO: I should residualise those Unfoldings whose free variables have become interesting due to intervening scrutinisation
(h_not_residualised, h_residualised) = M.partitionWithKey (\x' _ -> x' `S.member` not_resid_xs) h
bracketeds_nonupdated0 = M.mapMaybeWithKey (\x' hb -> do { guard (howBound hb == InternallyBound); return $ case heapBindingTerm hb of Nothing -> (error "Unimplemented: no tag for undefined", noneBracketed (fvedTerm (Var (name "undefined")))); Just in_e@(_, e) -> (annedTag e, fill_ids $ oneBracketed (Once (fromJust (name_id x')), \ids -> (0, Heap M.empty ids, [], in_e))) }) h_residualised
-- An idea from Arjan, which is sort of the dual of positive information propagation:
-- FIXME: this is too dangerous presently: we often end up adding an Update at the end just after we generalised it away, building ourselves a nice little loop :(
-- I have tried to work around this by only introducing Update frames for those things that don't presently have one... but that also doesn't work because if we start
-- with (let x = v in x) then we reduce to (let x = v in \underbar{x}) and then split to (let x = v in x)
--(deeds1, bracketeds_nonupdated) = M.mapAccumWithKey (\deeds x' (update_tg, brack) -> modifyTails (\states -> case claimDeeds deeds (length states) of Nothing -> (deeds, states); Just deeds -> (deeds, map (\(entered, (deeds, heap, k, in_e)) -> (entered, (deeds, heap, k ++ [Tagged update_tg (Update x')], in_e))) states)) brack `orElse` (deeds, brack)) deeds1a bracketeds_nonupdated0
(deeds1, bracketeds_nonupdated) = (deeds1a, M.map snd bracketeds_nonupdated0)
-- For every heap binding we ever need to deal with, contains a version of that heap binding as a concrete Bracketed thing
bracketeds_heap = bracketeds_updated `M.union` bracketeds_nonupdated
-- 3) Inline as much of the Heap as possible into the candidate splitting
-- 3a) Release deeds
-- In order to make the Deeds-based stuff less conservative, my first action here is to release our claims to those deeds
-- which we do *not* intend to create a residual let binding for here and now. This will let us always inline a heap-bound
-- thing into *at least one* context (unless it really is referred to by the residual code).
--
-- The equivalent process is done for the stack in splitStack itself: we just subtract 1 from the number of deeds we need to
-- claim when duplicating a stack frame.
deeds2 = releasePureHeapDeeds deeds1 h_not_residualised
-- 3b) Work out which part of the heap is admissable for inlining
-- * We are allowed to inline concrete things which are duplicatable or are not residualised right here and now
-- * Non-concrete stuff should be inlined if and only if it is not explicitly residualised by the caller. The motivation that
-- if we generalise away a term, we want to generalise away the staticness as well. Furthermore, it is clear that if we are
-- just generalising away staticness itself we certainly should not push the corresponding non-concrete binding down.
-- * We take this opportunity to mark all residualised things as static (being careful to not override actual definitions in h_cheap).
-- It important that we do not mark residualised things as phantoms just because they are in bracketeds_heap. If we did, it would mean
-- that *concrete residualised stuff* is recorded as a phantom even if it was explicitly residualised in the initial iteration (since
-- anything residualised in the first iteration is certainly in bracketeds_heap).
-- * If we are inlining a value (into a non-linear context), we are careful to only inline an *indirection* to that value. That
-- allows us to prevent duplicating the allocation of such values. NB: we still duplicate allocation of cheap non-values, but never mind...
--
-- Inlineable things are either:
-- 1) Heap bindings from the input (i.e from the heap and update frames) that have not been residualised for work duplication reasons
-- 2) Concrete values and cheap expressions from the input, in a form that is suitable for pushing down (i.e. values have been turned into indirections).
-- 3) Phantom versions of phantom input heap bindings (just copied verbatim).
-- 4) Phantom versions of concrete input heap bindings
-- The range of this heap is lte that of bracketeds_heap. We explicitly EXCLUDE those bindings that we are residualising based on the generalisation heuristic.
-- We prefer input heap bindings to everything else, and concrete values/cheap expressions to phantoms. For example, even if a value is residualised, we would
-- like to push down *some* version of it, hence the h_cheap full of indirections. And even if a concrete term is residualised we'd like a phantom version of it.
--
-- Basically the idea of this heap is "stuff we want to make available to push down"
h_updated_phantoms = M.fromDistinctAscList [(x', lambdaBound) | x' <- M.keys bracketeds_updated] -- TODO: move this into h_cheap_and_phantoms?
h_inlineable = setToMap lambdaBound gen_xs `M.union` -- The exclusion just makes sure we don't inline explicitly generalised bindings (even phantom ones)
(h_not_residualised `M.union` -- Take any non-residualised bindings from the input heap/stack...
h_cheap_and_phantom `M.union` -- ...failing which, take concrete definitions for cheap heap bindings (even if they are also residualised) or phantom definitions for expensive ones...
h_updated_phantoms) -- ...failing which, take phantoms for things bound by update frames (if supercompilation couldn't turn these into values, GHC is unlikely to get anything good from seeing defs)
-- Generalising the final proposed floats may cause some bindings that we *thought* were going to be inlined to instead be
-- residualised. We need to account for this in the Entered information (for work-duplication purposes), and in that we will
-- also require any FVs of the new residualised things that are bound in the stack to residualise more frames.
inlineHeapT :: Accumulatable t
=> (Deeds -> a -> (Deeds, EnteredEnv, b))
-> Deeds -> t a -> (Deeds, EnteredEnv, t b)
inlineHeapT f deeds b = (deeds', entered', b')
where ((deeds', entered'), b') = mapAccumT (\(deeds, entered) s -> case f deeds s of (deeds, entered', s) -> ((deeds, entered `join` entered'), s)) (deeds, bottom) b
-- Like inlineHeapT, but removes from the EnteredEnv any mention of the actual binder being analysed, so we push more stuff down
-- NB: this would be subsumed if we found a way to push an Update frame for such a thing into its Bracketed, since then it wouldn't even be a FV
inlineHeapWithKey :: (Deeds -> a -> (Deeds, EnteredEnv, b))
-> Deeds -> M.Map (Out Var) a -> (Deeds, EnteredEnv, M.Map (Out Var) b)
inlineHeapWithKey f deeds b = (deeds', entered', b')
where ((deeds', entered'), b') = M.mapAccumWithKey (\(deeds, entered) x' brack -> case f deeds brack of (deeds, entered', brack) -> ((deeds, entered `join` M.delete x' entered'), brack)) (deeds, bottom) b
-- Inline what we can of the heap, and compute the Entered information for the resulting thing.
-- See Note [transitiveInline and entered information] for the story about Entered information.
--
-- TODO: I (probably) need to transfer the EnteredEnv safely out of Bracketed things, taking account of bound variables
-- over the holes. However, I think it's probably safe to ignore that for now because those bound variables will have been
-- renamed so as not to coincide with any of the heap/stack bindings above that we actually care about the entered information for.
-- So the outgoing entered envs will have a bit of junk in them, but who cares?
inlineBracketHeap :: Deeds -> Bracketed (Entered, UnnormalisedState) -> (Deeds, EnteredEnv, Bracketed State)
inlineBracketHeap = inlineHeapT inline_one
where
inline_one deeds (ent, state) = (deeds', mkEnteredEnv ent $ stateFreeVars state', (0, heap', k', in_e'))
where
-- The elements of the Bracketed may contain proposed heap bindings gathered from Case frames.
-- However, we haven't yet claimed deeds for them :-(.
--
-- This is OK, because transitiveInline treats the heap of its state "specially". NB: the correctness
-- of this relies on the fact that only "optional" bindings that shadow something bound above are ever
-- present in this heap.
--
-- We do the normalisation immediately afterwards - we can't do it before transitiveInline, precisely
-- because of the above hack (normalisation might add bindings to the heap).
state'@(deeds', heap', k', in_e') = normalise $ transitiveInline h_inlineable (deeds `addStateDeeds` state)
-- 3c) Actually do the inlining of as much of the heap as possible into the proposed floats
-- We also take this opportunity to strip out the Entered information from each context.
(deeds3, entered_focus, bracketed_focus') = inlineBracketHeap deeds2 bracketed_focus
(deeds4, entered_heap, bracketeds_heap') = inlineHeapWithKey inlineBracketHeap deeds3 bracketeds_heap
-- 4) Construct the next element of the fixed point process:
-- a) We should residualise:
-- * Any x in the extraFvs of a bracketed thing, because we need to be able to refer to it right here, whatever happens
-- * Anything explicitly generalised
must_resid_xs = extraFvs bracketed_focus' `S.union` S.unions (map extraFvs (M.elems bracketeds_heap'))
`S.union` gen_xs
-- b) We should *stop* residualising bindings that got Entered only once in the proposal.
-- I once thought that we should only add a single variable to non_resid_xs' every time around the loop, because I worried
-- that choosing not to residualise some binding would cause some other bindings to stop being candiates (i.e. would increase
-- the number of times they were entered).
--
-- However, I've revised my opinion and decided to add all candidate variables every time. This is because if we inline a binding
-- into a context where it is still evaluated Once, anything it refers to is still evaluated Once. So the existing Entered information
-- does not appear to be invalidated when we decide not to residualise an additional binding.
entered = entered_focus `join` entered_heap
safe_not_resid_xs' = -- traceRender ("candidates", onces, must_resid_xs, not_resid_xs, candidates S.\\ not_resid_xs) $
safe_not_resid_xs `S.union` (onces S.\\ must_resid_xs)
where onces = S.filter (\x' -> maybe True isOnce (M.lookup x' entered)) bound_xs
-- c) We should *start* residualising those bindings we thought were safe to inline but we actually couldn't inline because
-- deeds issues prevented us from inlining them into *all* contexts that needed them. See also Note [Deeds and splitting]
--
-- This should also deal with residualising any InternallyBound stuff that we decided to instead let/lambda bound to e.g. prevent
-- value duplication, because the names of such bound things will be free in the proposed states.
deeds_resid_xs' = deeds_resid_xs `S.union` (safe_not_resid_xs `S.intersection` (bracketedFreeVars stateFreeVars bracketed_focus' `S.union`
S.unions (map (bracketedFreeVars stateFreeVars) (M.elems bracketeds_heap'))))
-- Bound variables: those variables that I am interested in making a decision about whether to residualise or not
bound_xs = pureHeapBoundVars h `S.union` stackBoundVars (map snd named_k)
-- Heap full of cheap expressions and any phantom stuff from the input heap but NOT from update frames
-- Used within the main loop in the process of computing h_inlineable -- see comments there for the full meaning of this stuff.
extract_cheap_hb hb
-- We better not try to push down any bindings that would introduce work-duplication issues
| InternallyBound <- howBound hb
, Just (_, e) <- heapBindingTerm hb
= if isCheap (annee e)
then hb { howBound = howToBindCheap e } -- Use binding heuristics to determine how to refer to the cheap thing
else hb { heapBindingMeaning = Left Nothing, howBound = LambdaBound } -- GHC is unlikely to get any benefit from seeing the binding sites for non-cheap things
-- Inline phantom/unfolding stuff verbatim: there is no work duplication issue (the caller would not have created the bindings unless they were safe-for-duplication)
| otherwise
= hb
h_cheap_and_phantom = M.map extract_cheap_hb h
howToBindCheap :: AnnedTerm -> HowBound
howToBindCheap e
| not lOCAL_TIEBACKS = InternallyBound
| dUPLICATE_VALUES_SPLITTER = InternallyBound
| Value v <- annee e = case v of
Lambda _ _ -> LetBound -- Heuristic: GHC would lose too much if we cut the connection between the definition and use sites
Data _ xs | null xs -> InternallyBound -- Heuristic: GHC will actually statically allocate data with no arguments (this also has the side effect of preventing tons of type errors due to [] getting shared)
| otherwise -> LambdaBound
Literal _ -> InternallyBound -- No allocation duplication since GHC will float them (and common them up, if necessary)
Indirect _ -> InternallyBound -- Always eliminated by GHC
-- GHC is unlikely to get anything useful from seeing the definition of cheap non-values, so we'll have them as unfoldings
| otherwise = LambdaBound
-- Note [Deeds and splitting]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Some heap bindings are safe to inline (from a work-duplication perspective), but bad to inline from a deeds perspective
-- because it can prove impossible to get enough deeds to actually inline them. We apply a rather unsubtle (but safe)
-- heuristic to deal with this situation, by monotonically growing a set of variables that we should *not* attempt
-- to inline even though they appear in the safe_not_resid_xs set.
--
-- This really is extremely conservative, but if we're running out of deeds bad things will happen anyway, so who cares?
--
-- If we did not do this, then the bracketed_heap outgoing from splitt may not bind some of the variables free in what
-- it intends to drive, because bracketeds_heap only contains those bindings that splitt decided should be residualised.
-- Note [Residualisation of things referred to in extraFvs]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We need to residualise stuff like the a and b in this:
-- <a |-> 1, b |-> 2 | (a, b) | >
--
-- But *not* the x in this:
-- < | foo | case foo of \Delta, update x, [_] + 1 >
--
-- How the hell do we accomplish that? The trick is to change how update frames get split. After splitting an
-- update frame for x, I continue splitting the rest of the stack with a oneBracketed rather than a noneBracketed in
-- the focus.
--
-- If I didn't do this, the extraFvs of the bracket would indicate that x was free in a created residual term, so I
-- would be forced to residualise the binding just as if e.g. "Just x" had been in the focus of the state. Since I don't,
-- x doesn't appear in the extraFvs, and I can compute Entered information for it with transitiveInline. If this says x
-- was entered Once in aggregate I can stop residualising the update frame! Beautiful!
--
-- Note [Entered information]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Consider:
-- expensive |-> fact 100
-- a |-> Just expensive
-- b |-> Just expensive
-- (a, b)
--
-- We need to residualise expensive, but what is the mechanism for doing so? Both a and b will get residualised
-- by the rule above because they are FVs of the focus.
--
-- We gather Entered information from each proposed Bracketed to collect Entered information for each free variable.
-- This reports how many times a variable would be (in the worst case) get reevaluated if the binding was made available
-- for inlining and thus pushed into that context. So in this case, Entered information for the a and b bindings report
-- that expensive would get evaluated Once in each context, which joins together to make Many times.
--
-- This is the basis on which we choose to residualise expensive.
-- We are going to use this helper function to inline any eligible inlinings to produce the expressions for driving.
--
-- WARNING! We treat bindings in the incoming Heap very specially we assume that we haven't yet claimed any deeds for them
--
-- This is a consequence of the fact that this heap is only non-empty in the splitter for states originating from the
-- branch of some residual case expression.
transitiveInline :: PureHeap -- ^ What to inline. We have not claimed deeds for any of this.
-> UnnormalisedState -- ^ What to inline into
-> UnnormalisedState
transitiveInline init_h_inlineable _state@(deeds, Heap h ids, k, in_e)
= -- (if not (S.null not_inlined_vs') then traceRender ("transitiveInline: generalise", not_inlined_vs') else id) $
-- traceRender ("transitiveInline", "had bindings for", pureHeapBoundVars init_h_inlineable, "FVs were", state_fvs, "so inlining", pureHeapBoundVars h') $
assertRender ("transitiveInline", M.keysSet h_inlineable, pPrintFullUnnormalisedState _state, pPrintFullUnnormalisedState state', stateUncoveredVars state', M.keysSet h', live') (S.null (stateUncoveredVars state'))
state'
where
state' = (deeds', Heap h' ids, k, in_e)
(live', deeds', h') = heap_worker 0 deeds M.empty (stateFreeVars (deeds, Heap M.empty ids, k, in_e)) S.empty
-- NB: we prefer bindings from h to those from init_h_inlineable if there is any conflict. This is motivated by
-- the fact that bindings from case branches are usually more informative than e.g. a phantom binding for the scrutinee.
h_inlineable = h `M.union` init_h_inlineable
-- This function is rather performance critical: I originally benchmarked transitiveInline as taking 59.2% of runtime for DigitsOfE2!
heap_worker :: Int -> Deeds -> PureHeap -> FreeVars -> FreeVars -> (FreeVars, Deeds, PureHeap)
heap_worker n deeds h_output live live_in_let
= -- traceRender ("go", n, M.keysSet h_inlineable, M.keysSet h_output, fvs) $
if live == live'
then (live', deeds', neutraliseLetLives live_in_let' h_output') -- NB: it's important we use the NEW versions of h_output/deeds, because we might have inlined extra stuff even though live hasn't changed!
else heap_worker (n + 1) deeds' h_output' live' live_in_let'
where
(deeds', h_output', live', live_in_let') = M.foldrWithKey consider_inlining (deeds, h_output, live, live_in_let) ((h_inlineable `restrict` live) M.\\ h_output)
-- NB: we rely here on the fact that our caller will still be able to fill in bindings for stuff from h_inlineable
-- even if we choose not to inline it into the State, and that such bindings will not be evaluated until they are
-- actually demanded (or we could get work duplication by inlining into only *some* Once contexts).
--
-- NB: we also rely here on the fact that the original h contains "optional" bindings in the sense that they are shadowed
-- by something bound above - i.e. it just tells us how to unfold case scrutinees within a case branch.
consider_inlining x' hb (deeds, h_output, live, live_in_let)
= (deeds', M.insert x' inline_hb h_output, live `S.union` fvs, if howBound inline_hb == LetBound then live_in_let `S.union` fvs else live_in_let)
where fvs = heapBindingFreeVars inline_hb
(deeds', inline_hb) = case claimDeeds deeds (heapBindingSize hb) of -- Do we have enough deeds to inline an unmodified version?
Just deeds' -> (deeds', hb)
Nothing -> trace (render $ text "inline-deeds:" <+> pPrint x') (deeds, makeFreeForDeeds hb)
-- Given a HeapBinding that costs some deeds, return one that costs no deeds (and so can be inlined unconditionally)
makeFreeForDeeds (HB InternallyBound (Right in_e))
| not lOCAL_TIEBACKS = lambdaBound -- Without local tiebacks, we just lose information here
| termIsCheap (snd in_e) = HB how (Right in_e) -- With local tiebacks, we can keep the RHS (perhaps we can use it in the future?) but have to make it be able to pass it in from the caller somehow
| otherwise = lambdaBound -- All non-cheap things
where how | termIsValue (snd in_e) = LetBound -- Heuristic: only refer to *values* via a free variable, as those are the ones GHC will get some benefit from. TODO: make data/function distinction here?
| otherwise = LambdaBound
makeFreeForDeeds hb = panic "howToBind: should only be needed for internally bound things with a term" (pPrint hb)
-- Enforce the invariant that anything referred to by a LetBound thing cannot be LambdaBound
neutraliseLetLives live_in_let = M.mapWithKey (\x' hb -> if howBound hb == LambdaBound && x' `S.member` live_in_let then hb { howBound = LetBound } else hb)
-- TODO: replace with a genuine evaluator. However, think VERY hard about the termination implications of this!
-- I think we can only do it when the splitter is being invoked by a non-whistling invocation of sc.
cheapifyHeap :: Deeds -> Heap -> (Deeds, Heap)
cheapifyHeap deeds heap | sPECULATION = (deeds, heap)
cheapifyHeap deeds (Heap h (splitIdSupply -> (ids, ids'))) = (deeds', Heap (M.fromList [(x', internallyBound in_e) | (x', in_e) <- floats] `M.union` h') ids')
where
((deeds', _, floats), h') = M.mapAccum (\(deeds, ids, floats0) hb -> case hb of HB InternallyBound (Right in_e) -> (case cheapify deeds ids in_e of (deeds, ids, floats1, in_e') -> ((deeds, ids, floats0 ++ floats1), HB InternallyBound (Right in_e'))); _ -> ((deeds, ids, floats0), hb)) (deeds, ids, []) h
-- TODO: make cheapification more powerful (i.e. deal with case bindings)
cheapify :: Deeds -> IdSupply -> In AnnedTerm -> (Deeds, IdSupply, [(Out Var, In AnnedTerm)], In AnnedTerm)
cheapify deeds0 ids0 (rn, (annee -> LetRec xes e)) = (deeds3, ids3, zip in_xs in_es' ++ floats0 ++ floats1, in_e')
where deeds1 = deeds0 + 1
( ids1, rn', unzip -> (in_xs, in_es)) = renameBounds (\_ x' -> x') ids0 rn xes
(deeds2, ids2, floats0, in_es') = cheapifyMany deeds1 ids1 in_es
(deeds3, ids3, floats1, in_e') = cheapify deeds2 ids2 (rn', e)
cheapify deeds ids in_e = (deeds, ids, [], in_e)
cheapifyMany :: Deeds -> IdSupply -> [In AnnedTerm] -> (Deeds, IdSupply, [(Out Var, In AnnedTerm)], [In AnnedTerm])
cheapifyMany deeds ids = reassociate . mapAccumL ((associate .) . uncurry cheapify) (deeds, ids)
where reassociate ((deeds, ids), unzip -> (floatss, in_es)) = (deeds, ids, concat floatss, in_es)
associate (deeds, ids, floats, in_e) = ((deeds, ids), (floats, in_e))
-- TODO: I have a clever idea. Currently, if we supercompile:
-- D[ < H | if x then y else z | K > ]
--
-- And we don't know anything about y or z we get:
-- if x
-- then K(True/x)[y]
-- else K(False/x)[z]
--
-- This is not too bad, but I suspect that it is common that K doesn't actually depend on x, in which case we could
-- instead produce:
-- let $j it = K[it]
-- in if x then $j y else $j z
--
-- This is an improvement because we get code sharing. Furthermore, $j won't be causing extra allocation because it's
-- pretty much guaranteed to be a let-no-escape.
--
-- The best part is that making this happen isn't really much much work (I think). One option would be to actually add
-- a (JoinPoint Var) stack frame, and introduce them (along with their corresponding bindings) in the splitter. The reduction
-- rule would be:
-- < H | v | $j [_], K > --> < H, x |-> v | e | K >
-- \x.e = deref(H, $j)
--
-- If we said join points were LetBound this would also let us delay inlining them (and hence consuming deeds) until we
-- were sure we could get some benefit from it.
--
-- The major issue is exactly what *should* be bound up into a join point. We could create one per stack frame, but that
-- might lead to quite a lot of code bloat. I think that ideally we want to create one per shared stack suffix: there is no
-- point creating join points that are only used in one place! But how to detect that?? After all, because h-functions can
-- be tied back to at any later point it looks like we should create one for every possible prefix as they might be useful
-- for guys in the future.
pushStack :: IdSupply
-> Deeds
-> [Out Var]
-> [(Bool, Tagged StackFrame)]
-> Bracketed (Entered, IdSupply -> UnnormalisedState)
-> (Deeds,
M.Map (Out Var) (Bracketed (Entered, IdSupply -> UnnormalisedState)),
Bracketed (Entered, IdSupply -> UnnormalisedState))
pushStack _ deeds _ [] bracketed_hole = (deeds, M.empty, bracketed_hole)
pushStack ids deeds scruts ((may_push, kf):k) bracketed_hole = second3 (`M.union` bracketed_heap') $ pushStack ids2 deeds' scruts' k bracketed_hole'
where
(ids1, ids2) = splitIdSupply ids
-- If we have access to hole tail positions, we should try to inline this stack frame into that tail position.
-- If we do not have access to the tail positions of the hole, all we can do is rebuild a bit of residual syntax around the hole.
(deeds', (scruts', bracketed_heap', bracketed_hole'))
= (guard may_push >> fmap (\(deeds', bracketed_hole') -> (deeds', ([], M.empty, bracketed_hole'))) (pushStackFrame kf deeds bracketed_hole)) `orElse`
(deeds, splitStackFrame ids1 kf scruts bracketed_hole)
pushStackFrame :: Tagged StackFrame
-> Deeds
-> Bracketed (Entered, IdSupply -> UnnormalisedState)
-> Maybe (Deeds, Bracketed (Entered, IdSupply -> UnnormalisedState))
pushStackFrame kf deeds bracketed_hole = do
(Just deeds', bracketed_hole') <- modifyTails push bracketed_hole
return (deeds', bracketed_hole')
where
-- Inline parts of the evaluation context into each branch only if we can get that many deeds for duplication
push fillers = case claimDeeds deeds (stackFrameSize (tagee kf) * (branch_factor - 1)) of -- NB: subtract one because one occurrence is already "paid for". It is OK if the result is negative (i.e. branch_factor 0)!
Nothing -> trace (render $ text "pushStack-deeds" <+> pPrint branch_factor) (Nothing, fillers)
Just deeds -> (Just deeds, map (\(ent, f) -> (ent, third4 (++ [kf]) . f)) fillers)
where branch_factor = length fillers
splitStackFrame :: IdSupply
-> Tagged StackFrame
-> [Out Var]
-> Bracketed (Entered, IdSupply -> UnnormalisedState)
-> ([Out Var],
M.Map (Out Var) (Bracketed (Entered, IdSupply -> UnnormalisedState)),
Bracketed (Entered, IdSupply -> UnnormalisedState))
splitStackFrame ids kf scruts bracketed_hole
| Update x' <- tagee kf = splitUpdate (tag kf) scruts x' bracketed_hole
| otherwise = ([], M.empty, case tagee kf of
Apply x2' -> zipBracketeds (\[e] -> e `app` x2') (\[fvs] -> S.insert x2' fvs) [id] (\_ -> Nothing) [bracketed_hole]
Scrutinise (rn, unzip -> (alt_cons, alt_es)) -> -- (if null k_remaining then id else traceRender ("splitStack: FORCED SPLIT", M.keysSet entered_hole, [x' | Tagged _ (Update x') <- k_remaining])) $
-- (if not (null k_not_inlined) then traceRender ("splitStack: generalise", k_not_inlined) else id) $
zipBracketeds (\(e_hole:es_alts) -> case_ e_hole (alt_cons' `zip` es_alts)) (\(fvs_hole:fvs_alts) -> fvs_hole `S.union` S.unions (zipWith (S.\\) fvs_alts alt_bvss)) (id:[\bvs -> bvs `S.union` alt_bvs | alt_bvs <- alt_bvss]) (\(_tails_hole:tailss_alts) -> liftM concat (sequence tailss_alts)) (bracketed_hole : bracketed_alts)
where -- 0) Manufacture context identifier
(ids', state_ids) = splitIdSupply ids
ctxt_id = idFromSupply state_ids
-- 1) Construct the floats for each case alternative
(_alt_ids', alt_rns, alt_cons') = unzip3 $ map (renameAltCon ids' rn) alt_cons
-- Bind something to the case scrutinee (if possible). This means that:
-- let y = (\z -> case z of C -> ...) unk
-- in y
-- ===>
-- case x of C -> let unk = C; z = C in ...
alt_in_es = alt_rns `zip` alt_es
alt_hs = zipWith4 (\alt_rn alt_con alt_bvs alt_tg -> setToMap lambdaBound alt_bvs `M.union` M.fromList (do { Just scrut_v <- [altConToValue alt_con]; scrut_e <- [annedTerm alt_tg (Value scrut_v)]; scrut <- scruts; return (scrut, HB (howToBindCheap scrut_e) (Right (alt_rn, scrut_e))) })) alt_rns alt_cons alt_bvss (map annedTag alt_es) -- NB: don't need to grab deeds for these just yet, due to the funny contract for transitiveInline
alt_bvss = map (\alt_con' -> fst $ altConOpenFreeVars alt_con' (S.empty, S.empty)) alt_cons'
bracketed_alts = zipWith (\alt_h alt_in_e -> oneBracketed (Once ctxt_id, \ids -> (0, Heap alt_h ids, [], alt_in_e))) alt_hs alt_in_es
PrimApply pop in_vs in_es -> zipBracketeds (primOp pop) S.unions (repeat id) (\_ -> Nothing) (bracketed_vs ++ bracketed_hole : bracketed_es)
where -- 0) Manufacture context identifier
(ids', state_idss) = accumL splitIdSupply ids (length in_es)
ctxt_ids = map idFromSupply state_idss
-- 1) Split every value and expression remaining apart
bracketed_vs = map (splitValue ids' . fmap annee) in_vs
bracketed_es = zipWith (\ctxt_id in_e -> oneBracketed (Once ctxt_id, \ids -> (0, Heap M.empty ids, [], in_e))) ctxt_ids in_es)
where
altConToValue :: AltCon -> Maybe (ValueF ann)
altConToValue (DataAlt dc xs) = Just $ Data dc xs
altConToValue (LiteralAlt l) = Just $ Literal l
altConToValue (DefaultAlt _) = Nothing
-- I'm making use of a clever trick: after splitting an update frame for x, instead of continuing to split the stack with a
-- noneBracketed for x in the focus, I split the stack with a oneBracketed for it in the focus.
--
-- You might think this is utterly worthless, since by definition the splitter will never be able to push the actual definition of
-- x into this hole in the bracketed. However, the fact that we do this is *critical* to the algorithm I use to ensure that
-- we can make variables bound by update frames as non-residualised: see Note [Residualisation of things referred to in extraFvs]
splitUpdate :: Tag -> [Out Var] -> Var -> Bracketed (Entered, IdSupply -> UnnormalisedState)
-> ([Out Var], M.Map (Out Var) (Bracketed (Entered, IdSupply -> UnnormalisedState)), Bracketed (Entered, IdSupply -> UnnormalisedState))
splitUpdate tg_kf scruts x' bracketed_hole = (x' : scruts, M.singleton x' bracketed_hole,
oneBracketed (Once ctxt_id, \ids -> (0, Heap M.empty ids, [], (mkIdentityRenaming [x'], annedTerm tg_kf (Var x')))))
where
ctxt_id = fromJust (name_id x')
splitValue :: IdSupply -> In AnnedValue -> Bracketed (Entered, IdSupply -> UnnormalisedState)
splitValue ids (rn, Lambda x e) = zipBracketeds (\[e'] -> lambda x' e') (\[fvs'] -> fvs') [S.insert x'] (\_ -> Nothing) [oneBracketed (Many, \ids -> (0, Heap (M.singleton x' lambdaBound) ids, [], (rn', e)))]
where (_ids', rn', x') = renameBinder ids rn x
splitValue ids in_v = noneBracketed (value (detagAnnedValue' $ renameIn (renameAnnedValue' ids) in_v))
splitQA :: IdSupply -> In QA -> Bracketed (Entered, IdSupply -> UnnormalisedState)
splitQA _ (rn, Question x) = noneBracketed (var (rename rn x))
splitQA ids (rn, Answer v) = splitValue ids (rn, v)
|
osa1/chsc
|
Supercompile/Split.hs
|
bsd-3-clause
| 60,122
| 1
| 27
| 14,408
| 10,410
| 5,800
| 4,610
| -1
| -1
|
{-# LINE 1 "GHC.Event.Manager.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns
, CPP
, ExistentialQuantification
, NoImplicitPrelude
, RecordWildCards
, TypeSynonymInstances
, FlexibleInstances
#-}
-- |
-- The event manager supports event notification on fds. Each fd may
-- have multiple callbacks registered, each listening for a different
-- set of events. Registrations may be automatically deactivated after
-- the occurrence of an event ("one-shot mode") or active until
-- explicitly unregistered.
--
-- If an fd has only one-shot registrations then we use one-shot
-- polling if available. Otherwise we use multi-shot polling.
module GHC.Event.Manager
( -- * Types
EventManager
-- * Creation
, new
, newWith
, newDefaultBackend
-- * Running
, finished
, loop
, step
, shutdown
, release
, cleanup
, wakeManager
-- * State
, callbackTableVar
, emControl
-- * Registering interest in I/O events
, Lifetime (..)
, Event
, evtRead
, evtWrite
, IOCallback
, FdKey(keyFd)
, FdData
, registerFd
, unregisterFd_
, unregisterFd
, closeFd
, closeFd_
) where
------------------------------------------------------------------------
-- Imports
import Control.Concurrent.MVar (MVar, newMVar, putMVar,
tryPutMVar, takeMVar, withMVar)
import Control.Exception (onException)
import Data.Bits ((.&.))
import Data.Foldable (forM_)
import Data.Functor (void)
import Data.IORef (IORef, atomicModifyIORef', mkWeakIORef, newIORef, readIORef,
writeIORef)
import Data.Maybe (maybe)
import Data.OldList (partition)
import GHC.Arr (Array, (!), listArray)
import GHC.Base
import GHC.Conc.Sync (yield)
import GHC.List (filter, replicate)
import GHC.Num (Num(..))
import GHC.Real (fromIntegral)
import GHC.Show (Show(..))
import GHC.Event.Control
import GHC.Event.IntTable (IntTable)
import GHC.Event.Internal (Backend, Event, evtClose, evtRead, evtWrite,
Lifetime(..), EventLifetime, Timeout(..))
import GHC.Event.Unique (Unique, UniqueSource, newSource, newUnique)
import System.Posix.Types (Fd)
import qualified GHC.Event.IntTable as IT
import qualified GHC.Event.Internal as I
import qualified GHC.Event.EPoll as EPoll
------------------------------------------------------------------------
-- Types
data FdData = FdData {
fdKey :: {-# UNPACK #-} !FdKey
, fdEvents :: {-# UNPACK #-} !EventLifetime
, _fdCallback :: !IOCallback
}
-- | A file descriptor registration cookie.
data FdKey = FdKey {
keyFd :: {-# UNPACK #-} !Fd
, keyUnique :: {-# UNPACK #-} !Unique
} deriving (Eq, Show)
-- | Callback invoked on I/O events.
type IOCallback = FdKey -> Event -> IO ()
data State = Created
| Running
| Dying
| Releasing
| Finished
deriving (Eq, Show)
-- | The event manager state.
data EventManager = EventManager
{ emBackend :: !Backend
, emFds :: {-# UNPACK #-} !(Array Int (MVar (IntTable [FdData])))
, emState :: {-# UNPACK #-} !(IORef State)
, emUniqueSource :: {-# UNPACK #-} !UniqueSource
, emControl :: {-# UNPACK #-} !Control
, emLock :: {-# UNPACK #-} !(MVar ())
}
-- must be power of 2
callbackArraySize :: Int
callbackArraySize = 32
hashFd :: Fd -> Int
hashFd fd = fromIntegral fd .&. (callbackArraySize - 1)
{-# INLINE hashFd #-}
callbackTableVar :: EventManager -> Fd -> MVar (IntTable [FdData])
callbackTableVar mgr fd = emFds mgr ! hashFd fd
{-# INLINE callbackTableVar #-}
haveOneShot :: Bool
{-# INLINE haveOneShot #-}
haveOneShot = True
------------------------------------------------------------------------
-- Creation
handleControlEvent :: EventManager -> Fd -> Event -> IO ()
handleControlEvent mgr fd _evt = do
msg <- readControlMessage (emControl mgr) fd
case msg of
CMsgWakeup -> return ()
CMsgDie -> writeIORef (emState mgr) Finished
_ -> return ()
newDefaultBackend :: IO Backend
newDefaultBackend = EPoll.new
-- | Create a new event manager.
new :: IO EventManager
new = newWith =<< newDefaultBackend
-- | Create a new 'EventManager' with the given polling backend.
newWith :: Backend -> IO EventManager
newWith be = do
iofds <- fmap (listArray (0, callbackArraySize-1)) $
replicateM callbackArraySize (newMVar =<< IT.new 8)
ctrl <- newControl False
state <- newIORef Created
us <- newSource
_ <- mkWeakIORef state $ do
st <- atomicModifyIORef' state $ \s -> (Finished, s)
when (st /= Finished) $ do
I.delete be
closeControl ctrl
lockVar <- newMVar ()
let mgr = EventManager { emBackend = be
, emFds = iofds
, emState = state
, emUniqueSource = us
, emControl = ctrl
, emLock = lockVar
}
registerControlFd mgr (controlReadFd ctrl) evtRead
registerControlFd mgr (wakeupReadFd ctrl) evtRead
return mgr
where
replicateM n x = sequence (replicate n x)
failOnInvalidFile :: String -> Fd -> IO Bool -> IO ()
failOnInvalidFile loc fd m = do
ok <- m
when (not ok) $
let msg = "Failed while attempting to modify registration of file " ++
show fd ++ " at location " ++ loc
in errorWithoutStackTrace msg
registerControlFd :: EventManager -> Fd -> Event -> IO ()
registerControlFd mgr fd evs =
failOnInvalidFile "registerControlFd" fd $
I.modifyFd (emBackend mgr) fd mempty evs
-- | Asynchronously shuts down the event manager, if running.
shutdown :: EventManager -> IO ()
shutdown mgr = do
state <- atomicModifyIORef' (emState mgr) $ \s -> (Dying, s)
when (state == Running) $ sendDie (emControl mgr)
-- | Asynchronously tell the thread executing the event
-- manager loop to exit.
release :: EventManager -> IO ()
release EventManager{..} = do
state <- atomicModifyIORef' emState $ \s -> (Releasing, s)
when (state == Running) $ sendWakeup emControl
finished :: EventManager -> IO Bool
finished mgr = (== Finished) `liftM` readIORef (emState mgr)
cleanup :: EventManager -> IO ()
cleanup EventManager{..} = do
writeIORef emState Finished
void $ tryPutMVar emLock ()
I.delete emBackend
closeControl emControl
------------------------------------------------------------------------
-- Event loop
-- | Start handling events. This function loops until told to stop,
-- using 'shutdown'.
--
-- /Note/: This loop can only be run once per 'EventManager', as it
-- closes all of its control resources when it finishes.
loop :: EventManager -> IO ()
loop mgr@EventManager{..} = do
void $ takeMVar emLock
state <- atomicModifyIORef' emState $ \s -> case s of
Created -> (Running, s)
Releasing -> (Running, s)
_ -> (s, s)
case state of
Created -> go `onException` cleanup mgr
Releasing -> go `onException` cleanup mgr
Dying -> cleanup mgr
-- While a poll loop is never forked when the event manager is in the
-- 'Finished' state, its state could read 'Finished' once the new thread
-- actually runs. This is not an error, just an unfortunate race condition
-- in Thread.restartPollLoop. See #8235
Finished -> return ()
_ -> do cleanup mgr
errorWithoutStackTrace $ "GHC.Event.Manager.loop: state is already " ++
show state
where
go = do state <- step mgr
case state of
Running -> yield >> go
Releasing -> putMVar emLock ()
_ -> cleanup mgr
-- | To make a step, we first do a non-blocking poll, in case
-- there are already events ready to handle. This improves performance
-- because we can make an unsafe foreign C call, thereby avoiding
-- forcing the current Task to release the Capability and forcing a context switch.
-- If the poll fails to find events, we yield, putting the poll loop thread at
-- end of the Haskell run queue. When it comes back around, we do one more
-- non-blocking poll, in case we get lucky and have ready events.
-- If that also returns no events, then we do a blocking poll.
step :: EventManager -> IO State
step mgr@EventManager{..} = do
waitForIO
state <- readIORef emState
state `seq` return state
where
waitForIO = do
n1 <- I.poll emBackend Nothing (onFdEvent mgr)
when (n1 <= 0) $ do
yield
n2 <- I.poll emBackend Nothing (onFdEvent mgr)
when (n2 <= 0) $ do
_ <- I.poll emBackend (Just Forever) (onFdEvent mgr)
return ()
------------------------------------------------------------------------
-- Registering interest in I/O events
-- | Register interest in the given events, without waking the event
-- manager thread. The 'Bool' return value indicates whether the
-- event manager ought to be woken.
--
-- Note that the event manager is generally implemented in terms of the
-- platform's @select@ or @epoll@ system call, which tend to vary in
-- what sort of fds are permitted. For instance, waiting on regular files
-- is not allowed on many platforms.
registerFd_ :: EventManager -> IOCallback -> Fd -> Event -> Lifetime
-> IO (FdKey, Bool)
registerFd_ mgr@(EventManager{..}) cb fd evs lt = do
u <- newUnique emUniqueSource
let fd' = fromIntegral fd
reg = FdKey fd u
el = I.eventLifetime evs lt
!fdd = FdData reg el cb
(modify,ok) <- withMVar (callbackTableVar mgr fd) $ \tbl -> do
oldFdd <- IT.insertWith (++) fd' [fdd] tbl
let prevEvs :: EventLifetime
prevEvs = maybe mempty eventsOf oldFdd
el' :: EventLifetime
el' = prevEvs `mappend` el
case I.elLifetime el' of
-- All registrations want one-shot semantics and this is supported
OneShot | haveOneShot -> do
ok <- I.modifyFdOnce emBackend fd (I.elEvent el')
if ok
then return (False, True)
else IT.reset fd' oldFdd tbl >> return (False, False)
-- We don't want or don't support one-shot semantics
_ -> do
let modify = prevEvs /= el'
ok <- if modify
then let newEvs = I.elEvent el'
oldEvs = I.elEvent prevEvs
in I.modifyFd emBackend fd oldEvs newEvs
else return True
if ok
then return (modify, True)
else IT.reset fd' oldFdd tbl >> return (False, False)
-- this simulates behavior of old IO manager:
-- i.e. just call the callback if the registration fails.
when (not ok) (cb reg evs)
return (reg,modify)
{-# INLINE registerFd_ #-}
-- | @registerFd mgr cb fd evs lt@ registers interest in the events @evs@
-- on the file descriptor @fd@ for lifetime @lt@. @cb@ is called for
-- each event that occurs. Returns a cookie that can be handed to
-- 'unregisterFd'.
registerFd :: EventManager -> IOCallback -> Fd -> Event -> Lifetime -> IO FdKey
registerFd mgr cb fd evs lt = do
(r, wake) <- registerFd_ mgr cb fd evs lt
when wake $ wakeManager mgr
return r
{-# INLINE registerFd #-}
{-
Building GHC with parallel IO manager on Mac freezes when
compiling the dph libraries in the phase 2. As workaround, we
don't use oneshot and we wake up an IO manager on Mac every time
when we register an event.
For more information, please read:
http://ghc.haskell.org/trac/ghc/ticket/7651
-}
-- | Wake up the event manager.
wakeManager :: EventManager -> IO ()
wakeManager _ = return ()
eventsOf :: [FdData] -> EventLifetime
eventsOf [fdd] = fdEvents fdd
eventsOf fdds = mconcat $ map fdEvents fdds
-- | Drop a previous file descriptor registration, without waking the
-- event manager thread. The return value indicates whether the event
-- manager ought to be woken.
unregisterFd_ :: EventManager -> FdKey -> IO Bool
unregisterFd_ mgr@(EventManager{..}) (FdKey fd u) =
withMVar (callbackTableVar mgr fd) $ \tbl -> do
let dropReg = nullToNothing . filter ((/= u) . keyUnique . fdKey)
fd' = fromIntegral fd
pairEvents :: [FdData] -> IO (EventLifetime, EventLifetime)
pairEvents prev = do
r <- maybe mempty eventsOf `fmap` IT.lookup fd' tbl
return (eventsOf prev, r)
(oldEls, newEls) <- IT.updateWith dropReg fd' tbl >>=
maybe (return (mempty, mempty)) pairEvents
let modify = oldEls /= newEls
when modify $ failOnInvalidFile "unregisterFd_" fd $
case I.elLifetime newEls of
OneShot | I.elEvent newEls /= mempty, haveOneShot ->
I.modifyFdOnce emBackend fd (I.elEvent newEls)
_ ->
I.modifyFd emBackend fd (I.elEvent oldEls) (I.elEvent newEls)
return modify
-- | Drop a previous file descriptor registration.
unregisterFd :: EventManager -> FdKey -> IO ()
unregisterFd mgr reg = do
wake <- unregisterFd_ mgr reg
when wake $ wakeManager mgr
-- | Close a file descriptor in a race-safe way.
closeFd :: EventManager -> (Fd -> IO ()) -> Fd -> IO ()
closeFd mgr close fd = do
fds <- withMVar (callbackTableVar mgr fd) $ \tbl -> do
prev <- IT.delete (fromIntegral fd) tbl
case prev of
Nothing -> close fd >> return []
Just fds -> do
let oldEls = eventsOf fds
when (I.elEvent oldEls /= mempty) $ do
_ <- I.modifyFd (emBackend mgr) fd (I.elEvent oldEls) mempty
wakeManager mgr
close fd
return fds
forM_ fds $ \(FdData reg el cb) -> cb reg (I.elEvent el `mappend` evtClose)
-- | Close a file descriptor in a race-safe way.
-- It assumes the caller will update the callback tables and that the caller
-- holds the callback table lock for the fd. It must hold this lock because
-- this command executes a backend command on the fd.
closeFd_ :: EventManager
-> IntTable [FdData]
-> Fd
-> IO (IO ())
closeFd_ mgr tbl fd = do
prev <- IT.delete (fromIntegral fd) tbl
case prev of
Nothing -> return (return ())
Just fds -> do
let oldEls = eventsOf fds
when (oldEls /= mempty) $ do
_ <- I.modifyFd (emBackend mgr) fd (I.elEvent oldEls) mempty
wakeManager mgr
return $
forM_ fds $ \(FdData reg el cb) ->
cb reg (I.elEvent el `mappend` evtClose)
------------------------------------------------------------------------
-- Utilities
-- | Call the callbacks corresponding to the given file descriptor.
onFdEvent :: EventManager -> Fd -> Event -> IO ()
onFdEvent mgr fd evs
| fd == controlReadFd (emControl mgr) || fd == wakeupReadFd (emControl mgr) =
handleControlEvent mgr fd evs
| otherwise = do
fdds <- withMVar (callbackTableVar mgr fd) $ \tbl ->
IT.delete (fromIntegral fd) tbl >>= maybe (return []) (selectCallbacks tbl)
forM_ fdds $ \(FdData reg _ cb) -> cb reg evs
where
-- | Here we look through the list of registrations for the fd of interest
-- and sort out which match the events that were triggered. We,
--
-- 1. re-arm the fd as appropriate
-- 2. reinsert registrations that weren't triggered and multishot
-- registrations
-- 3. return a list containing the callbacks that should be invoked.
selectCallbacks :: IntTable [FdData] -> [FdData] -> IO [FdData]
selectCallbacks tbl fdds = do
let -- figure out which registrations have been triggered
matches :: FdData -> Bool
matches fd' = evs `I.eventIs` I.elEvent (fdEvents fd')
(triggered, notTriggered) = partition matches fdds
-- sort out which registrations we need to retain
isMultishot :: FdData -> Bool
isMultishot fd' = I.elLifetime (fdEvents fd') == MultiShot
saved = notTriggered ++ filter isMultishot triggered
savedEls = eventsOf saved
allEls = eventsOf fdds
-- Reinsert multishot registrations.
-- We deleted the table entry for this fd above so we there isn't a preexisting entry
_ <- IT.insertWith (\_ _ -> saved) (fromIntegral fd) saved tbl
case I.elLifetime allEls of
-- we previously armed the fd for multiple shots, no need to rearm
MultiShot | allEls == savedEls ->
return ()
-- either we previously registered for one shot or the
-- events of interest have changed, we must re-arm
_ ->
case I.elLifetime savedEls of
OneShot | haveOneShot ->
-- if there are no saved events and we registered with one-shot
-- semantics then there is no need to re-arm
unless (OneShot == I.elLifetime allEls
&& mempty == I.elEvent savedEls) $ do
void $ I.modifyFdOnce (emBackend mgr) fd (I.elEvent savedEls)
_ ->
-- we need to re-arm with multi-shot semantics
void $ I.modifyFd (emBackend mgr) fd
(I.elEvent allEls) (I.elEvent savedEls)
return triggered
nullToNothing :: [a] -> Maybe [a]
nullToNothing [] = Nothing
nullToNothing xs@(_:_) = Just xs
unless :: Monad m => Bool -> m () -> m ()
unless p = when (not p)
|
phischu/fragnix
|
builtins/base/GHC.Event.Manager.hs
|
bsd-3-clause
| 17,536
| 0
| 24
| 4,769
| 4,161
| 2,153
| 2,008
| 330
| 9
|
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Main bdo server.
module Main where
import Bdo
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Aeson
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import Http
import Network
import Network.URL
import Paths_bdo
import Prelude hiding (catch)
import System.Environment
import System.IO
-- | Main entry point.
main :: IO ()
main = do
(listenPort:_) <- getArgs
hSetBuffering stdout NoBuffering
startServer (read listenPort)
|
chrisdone/bdo
|
src/Main.hs
|
bsd-3-clause
| 798
| 0
| 9
| 242
| 158
| 96
| 62
| 26
| 1
|
-- | 256 bit Skein as a stream cipher, as specified in the Skein 1.3 paper.
module Crypto.Threefish.Skein.StreamCipher (
Key256, Nonce256, Block256,
encrypt, decrypt, toBlock, fromBlock
) where
import Crypto.Threefish.Skein (Nonce256)
import Crypto.Threefish.UBI
import Crypto.Threefish.Threefish256
import Crypto.Threefish
import Crypto.Threefish.Skein.Internal
import Data.ByteString.Unsafe
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Marshal.Alloc
import System.IO.Unsafe
import Data.Bits (xor)
init256 :: Key256 -> Nonce256 -> Skein256Ctx
init256 (Block256 k) (Block256 n) =
unsafePerformIO $ do
c <- mallocForeignPtrBytes 64
withForeignPtr c $ \ctx -> do
unsafeUseAsCString k $ \key -> do
unsafeUseAsCString n $ \nonce -> do
skein256_init ctx (castPtr key) 0xffffffffffffffff
skein256_update ctx 3 (type2int Nonce) len (castPtr nonce)
return (Skein256Ctx c)
where
len = fromIntegral $ BS.length n
stream256 :: Skein256Ctx -> [BS.ByteString]
stream256 (Skein256Ctx c) =
unsafePerformIO $ withForeignPtr c $ go 0
where
go n ctx = unsafeInterleaveIO $ do
bs <- allocaBytes 1024 $ \ptr -> do
skein256_output ctx n (n+32) ptr
BS.packCStringLen (castPtr ptr, 1024)
bss <- go (n+32) ctx
return $ bs : bss
keystream256 :: Key256 -> Nonce256 -> [BS.ByteString]
keystream256 k n = stream256 (init256 k n)
-- | Encrypt a lazy ByteString using 256 bit Skein as a stream cipher.
encrypt :: Key256 -> Nonce256 -> BSL.ByteString -> BSL.ByteString
encrypt k n plaintext =
BSL.fromChunks $ go (keystream256 k n) plaintext
where
go (ks:kss) msg = unsafePerformIO . unsafeInterleaveIO $ do
case BSL.splitAt 1024 msg of
(chunk, rest)
| BSL.null chunk ->
return []
| otherwise ->
let chunk' = BSL.toStrict chunk
in return $ (BS.pack $ BS.zipWith xor ks chunk') : go kss rest
go _ _ =
error "The key stream is infinite, so this will never happen."
-- | Encryption and decryption are the same operation for a stream cipher, but
-- we may want to have a function called encrypt for clarity.
decrypt :: Key256 -> Nonce256 -> BSL.ByteString -> BSL.ByteString
decrypt = encrypt
|
valderman/threefish
|
Crypto/Threefish/Skein/StreamCipher.hs
|
bsd-3-clause
| 2,378
| 0
| 22
| 543
| 687
| 356
| 331
| 53
| 2
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.AmountOfMoney.AR.Tests
( tests
) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.AmountOfMoney.AR.Corpus
import Duckling.Dimensions.Types
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "AR Tests"
[ makeCorpusTest [Seal AmountOfMoney] corpus
]
|
facebookincubator/duckling
|
tests/Duckling/AmountOfMoney/AR/Tests.hs
|
bsd-3-clause
| 524
| 0
| 9
| 80
| 79
| 50
| 29
| 11
| 1
|
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_swapchain_mutable_format - device extension
--
-- == VK_KHR_swapchain_mutable_format
--
-- [__Name String__]
-- @VK_KHR_swapchain_mutable_format@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 201
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_swapchain@
--
-- - Requires @VK_KHR_maintenance2@
--
-- - Requires @VK_KHR_image_format_list@
--
-- [__Contact__]
--
-- - Daniel Rakos
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_swapchain_mutable_format] @drakos-arm%0A<<Here describe the issue or question you have about the VK_KHR_swapchain_mutable_format extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-03-28
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Jason Ekstrand, Intel
--
-- - Jan-Harald Fredriksen, ARM
--
-- - Jesse Hall, Google
--
-- - Daniel Rakos, AMD
--
-- - Ray Smith, ARM
--
-- == Description
--
-- This extension allows processing of swapchain images as different
-- formats to that used by the window system, which is particularly useful
-- for switching between sRGB and linear RGB formats.
--
-- It adds a new swapchain creation flag that enables creating image views
-- from presentable images with a different format than the one used to
-- create the swapchain.
--
-- == New Enum Constants
--
-- - 'KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME'
--
-- - 'KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Extensions.VK_KHR_swapchain.SwapchainCreateFlagBitsKHR':
--
-- - 'Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR'
--
-- == Issues
--
-- 1) Are there any new capabilities needed?
--
-- __RESOLVED__: No. It is expected that all implementations exposing this
-- extension support swapchain image format mutability.
--
-- 2) Do we need a separate @VK_SWAPCHAIN_CREATE_EXTENDED_USAGE_BIT_KHR@?
--
-- __RESOLVED__: No. This extension requires @VK_KHR_maintenance2@ and
-- presentable images of swapchains created with
-- 'Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR'
-- are created internally in a way equivalent to specifying both
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_MUTABLE_FORMAT_BIT'
-- and
-- 'Vulkan.Extensions.VK_KHR_maintenance2.IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR'.
--
-- 3) Do we need a separate structure to allow specifying an image format
-- list for swapchains?
--
-- __RESOLVED__: No. We simply use the same
-- 'Vulkan.Extensions.VK_KHR_image_format_list.ImageFormatListCreateInfoKHR'
-- structure introduced by @VK_KHR_image_format_list@. The structure is
-- required to be included in the @pNext@ chain of
-- 'Vulkan.Extensions.VK_KHR_swapchain.SwapchainCreateInfoKHR' for
-- swapchains created with
-- 'Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR'.
--
-- == Version History
--
-- - Revision 1, 2018-03-28 (Daniel Rakos)
--
-- - Internal revisions.
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_swapchain_mutable_format Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_swapchain_mutable_format ( KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION
, pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION
, KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME
, pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME
, SwapchainCreateFlagBitsKHR(..)
, SwapchainCreateFlagsKHR
) where
import Data.String (IsString)
import Vulkan.Extensions.VK_KHR_swapchain (SwapchainCreateFlagBitsKHR(..))
import Vulkan.Extensions.VK_KHR_swapchain (SwapchainCreateFlagsKHR)
type KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION"
pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION = 1
type KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME = "VK_KHR_swapchain_mutable_format"
-- No documentation found for TopLevel "VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME"
pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME = "VK_KHR_swapchain_mutable_format"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_KHR_swapchain_mutable_format.hs
|
bsd-3-clause
| 5,040
| 0
| 8
| 989
| 267
| 209
| 58
| -1
| -1
|
module Main where
import System.Random.MWC
import Control.Monad.Primitive
nouns = ["man", "ball", "woman", "table"]
verbs = ["hit", "took", "saw", "liked"]
articles = ["a", "the"]
newtype Noun = Noun String
deriving Eq
newtype Article = Article String
deriving Eq
newtype Verb = Verb String
deriving Eq
instance Show Noun where
show (Noun s) = s
instance Show Article where
show (Article s) = s
instance Show Verb where
show (Verb s) = s
newtype NounPhrase = NounPhrase (Article,Noun)
deriving (Eq)
instance Show NounPhrase where
show (NounPhrase (a,n)) = (show a) ++ " " ++ (show n)
newtype VerbPhrase = VerbPhrase (Verb,NounPhrase)
deriving Eq
instance Show VerbPhrase where
show (VerbPhrase (v, np)) = (show v) ++ " " ++ (show np)
data Sentence = Sentence (NounPhrase,VerbPhrase)
deriving Eq
instance Show Sentence where
show (Sentence (np,vp)) = (show np) ++ " " ++ (show vp)
uniformW :: PrimMonad m => [String] -> (String -> a) -> Gen (PrimState m) -> m a
uniformW list datacon st = do
i <- uniformR (0, (length list) - 1) st
return $ datacon (list !! i)
{- A possibly stupid way of generating random sentences is making all of them instances of Variate type class.
Then, we are able to create sentences.
Only problem: uniformR is not defined for our type.
-}
uniformP :: (PrimMonad m , Variate b) => (b -> a) -> Gen (PrimState m) -> m a
uniformP datacon st = do
v <- uniform st
return $ datacon v
instance Variate Noun where
uniform = uniformW nouns Noun
uniformR = undefined
instance Variate Verb where
uniform = uniformW verbs Verb
uniformR = undefined
instance Variate Article where
uniform = uniformW articles Article
uniformR = undefined
instance Variate NounPhrase where
uniform = uniformP NounPhrase
uniformR = undefined
instance Variate VerbPhrase where
uniform = uniformP VerbPhrase
uniformR = undefined
instance Variate Sentence where
uniform = uniformP Sentence
uniformR = undefined
genSentence :: IO Sentence
genSentence = do
randomState <- createSystemRandom
sentence <- (uniform randomState)
return sentence
|
kapilash/dc
|
src/Chap2/SentenceGen.hs
|
bsd-3-clause
| 2,223
| 0
| 12
| 526
| 712
| 380
| 332
| 61
| 1
|
module Gradualize.DynamicRuntime where
import Utils.Utils
import TypeSystem
import Data.Char (toUpper)
import Data.Maybe
import Changer.Changes
import Control.Monad
dynamized :: TypeSystem -> [Symbol] -> Either String Changes
dynamized ts proofsFor
= do proofRules <- proofsFor |> generateRelationProof ts
& allRight |> concat
let proofRules' = proofRules
|> (\(n, bnf) -> New n (bnf, IgnoreWS, False))
return $ Changes "Dynamic Runtimed" proofRules' [] [] []
generateTupleRule :: (TypeName, TypeName) -> (TypeName, [BNF])
generateTupleRule (tnl, tnr)
= ("tuple"++onHead toUpper tnl++onHead toUpper tnr, [BNFSeq [Literal "<", BNFRuleCall tnl, Literal ",", BNFRuleCall tnr, Literal ">"]])
generateRelationProof :: TypeSystem -> Symbol -> Either String [(TypeName, [BNF])]
generateRelationProof ts nm
= do rel <- checkRelationExists ts nm
let types = get relTypesModes rel |> fst
unless (length types == 2) $ Left $ "Expected exactly two arguments to "++show nm++", this is not a tuple. Proofs can only be constructed for stuff like equality, is subtype of, ..."
let [tnl, tnr] = types
let t = generateTupleRule (tnl, tnr)
let name = get relPronounce rel |> ("proof "++) |> camelCase & fromMaybe nm
let bnf = [BNFRuleCall $ fst t]
return [t, (name, bnf)]
|
pietervdvn/ALGT
|
src/Gradualize/DynamicRuntime.hs
|
bsd-3-clause
| 1,316
| 64
| 9
| 249
| 478
| 270
| 208
| 27
| 1
|
{-|
Module : PP.Builders.Nfa
Description : Builder for NFA
Copyright : (c) 2017 Patrick Champion
License : see LICENSE file
Maintainer : chlablak@gmail.com
Stability : provisional
Portability : portable
-}
module PP.Builders.Nfa
( combineNfa
) where
import qualified Data.Char as C
import qualified Data.Graph.Inductive.Graph as Gr
import qualified Data.List as L
import PP.Builder
import PP.Grammar
import PP.Grammars.Lexical
-- |Build a NFA from a RegExpr
-- Dragon Book (2nd edition, fr), page 146, algorithm 3.23
instance NfaBuilder RegExpr where
buildNfa re = buildNfa' (stringify re) re
buildNfa' n (RegExpr []) = buildSym n NfaEmpty
buildNfa' n (RegExpr [x]) = buildNfa' n x
buildNfa' n (RegExpr xs) = union n $ map (buildNfa' n) xs
buildNfa' n (Choice []) = buildSym n NfaEmpty
buildNfa' n (Choice [x]) = buildNfa' n x
buildNfa' n (Choice xs) = foldl1 concatenate $ map (buildNfa' n) xs
buildNfa' n (Many0 x) = kleeneStar $ buildNfa' n x
buildNfa' n (Many1 x) = kleenePlus $ buildNfa' n x
buildNfa' n (Option x) = option $ buildNfa' n x
buildNfa' n (Group x) = buildNfa' n x
buildNfa' n (Value c) = buildSym n $ NfaValue c
buildNfa' n classes = buildNfa' n $ buildClasses classes
-- |Build a simple NFA
buildSym :: String -> NfaSymbol -> NfaGraph
buildSym n s = Gr.mkGraph [(0,NfaInitial),(1,NfaFinal n)] [(0,1,s)]
-- |Extract values from a class
buildClasses :: RegExpr -> RegExpr
buildClasses (Class xs) = RegExpr $ L.nub [ c
| x <- xs
, let (RegExpr cs)= buildClasses x
, c <- cs]
buildClasses (Interval a b) = RegExpr [Value c | c <- [a..b]]
buildClasses Any = RegExpr [ Value c
| c <- [minBound..maxBound]
, C.isAscii c]
buildClasses v@(Value _) = RegExpr [v]
-- |Concatenate two NFA
concatenate :: NfaGraph -> NfaGraph -> NfaGraph
concatenate a b = Gr.mkGraph (an2 ++ bn) (ae ++ be)
where
an2 = map (\n@(i, _) -> if i == final then (i, NfaNode) else n) an
bn = map (\(i, n) -> (i + final, n)) $ filter isNotInitial $ Gr.labNodes b
ae = Gr.labEdges a
be = map (\(i, j, e) -> (i + final, j + final, e)) $ Gr.labEdges b
final = ifinal a
an = Gr.labNodes a
-- |Union a list of NFA
union :: String -> [NfaGraph] -> NfaGraph
union n gs = Gr.mkGraph (nodesU ++ nodes3) (edgesU ++ edges2)
where
nodes3 = map (\(i, _) -> (i, NfaNode)) nodes2
nodesU = [(0,NfaInitial),(final,NfaFinal n)]
edgesU = [ (i,j,NfaEmpty)
| n <- nodes2
, isNotNode n
, let (i,j) = getIJ n]
nodes2 = concat $ add $ zip diff nodes
edges2 = concat $ adde $ zip diff edges
nodes = map Gr.labNodes gs
edges = map Gr.labEdges gs
getIJ (j, NfaInitial) = (0, j)
getIJ (i, NfaFinal _) = (i, final)
final = last diff
diff = diff' nodes 1
diff' [] d = [d]
diff' (x:xs) d = d : diff' xs (d + length x)
add = map add'
add' (d, xs) = map (add'' d) xs
add'' d (i, n) = (i + d, n)
adde = map adde'
adde' (d, xs) = map (adde'' d) xs
adde'' d (i, j, n) = (i + d, j + d, n)
-- |For a NFA `x`, returns the NFA for `x*` (Kleene star)
kleeneStar :: NfaGraph -> NfaGraph
kleeneStar g = Gr.mkGraph (nodes2 ++ nodesK) (edges2 ++ edgesK)
where
nodesK = [(initial-1,NfaInitial),(final+1,NfaFinal finalN)]
edgesK = [(initial-1,initial,NfaEmpty),
(final,final+1,NfaEmpty),
(initial-1,final+1,NfaEmpty)]
nodes2 = map (\(i, _) -> (i, NfaNode)) nodes
edges2 = (final,initial,NfaEmpty) : edges
final = let [(i, _)] = filter isFinal nodes in i
finalN = let [(_, NfaFinal n)] = filter isFinal nodes in n
initial = let [(i, _)] = filter isInitial nodes in i
nodes = map (\(i, n) -> (i + 1, n)) $ Gr.labNodes g
edges = map (\(i, j, e) -> (i + 1, j + 1, e)) $ Gr.labEdges g
-- |For a NFA `x`, returns the NFA for `x+` (Kleene plus)
kleenePlus :: NfaGraph -> NfaGraph
kleenePlus g = Gr.delEdge (iinitial g', ifinal g') g'
where
g' = kleeneStar g
-- |For a NFA `x`, returns the NFA for `x?`
option :: NfaGraph -> NfaGraph
option g = Gr.delEdge (ifinal g' - 1, iinitial g' + 1) g'
where
g' = kleeneStar g
-- |Combine multiple NFA in one
combineNfa :: [NfaGraph] -> NfaGraph
combineNfa gs = Gr.mkGraph (nodesU ++ nodes3) (edgesU ++ edges2)
where
nodes3 = map (\n@(i, _) -> if isFinal n then n else (i, NfaNode)) nodes2
nodesU = [(0,NfaInitial)]
edgesU = [ (i,j,NfaEmpty)
| n <- nodes2
, isInitial n
, let (i,j) = getIJ n]
nodes2 = concat $ add $ zip diff nodes
edges2 = concat $ adde $ zip diff edges
nodes = map Gr.labNodes gs
edges = map Gr.labEdges gs
getIJ (j, NfaInitial) = (0, j)
diff = diff' nodes 1
diff' [] d = []
diff' (x:xs) d = d : diff' xs (d + length x)
add = map add'
add' (d, xs) = map (add'' d) xs
add'' d (i, n) = (i + d, n)
adde = map adde'
adde' (d, xs) = map (adde'' d) xs
adde'' d (i, j, n) = (i + d, j + d, n)
-- Utilities
iinitial g = let [(i, _)] = filter isInitial (Gr.labNodes g) in i
ifinal g = let [(i, _)] = filter isFinal (Gr.labNodes g) in i
isFinal (_, NfaFinal _) = True
isFinal _ = False
isInitial (_, NfaInitial) = True
isInitial _ = False
isNotNode (_, NfaNode) = False
isNotNode _ = True
isNotInitial (_, NfaInitial) = False
isNotInitial _ = True
|
chlablak/platinum-parsing
|
src/PP/Builders/Nfa.hs
|
bsd-3-clause
| 5,700
| 0
| 13
| 1,717
| 2,369
| 1,272
| 1,097
| 117
| 3
|
{- This module provides type-level finite maps.
The implementation is similar to that shown in the paper.
"Embedding effect systems in Haskell" Orchard, Petricek 2014 -}
{-# LANGUAGE TypeOperators, PolyKinds, DataKinds, KindSignatures,
TypeFamilies, UndecidableInstances, MultiParamTypeClasses,
FlexibleInstances, GADTs, FlexibleContexts, ScopedTypeVariables,
ConstraintKinds #-}
{-# LANGUAGE TypeInType #-}
module Data.Type.Test.BiMap where
import Data.Type.BiMap
|
AaronFriel/eff-experiments
|
src/Data/Type/Test/BiMap.hs
|
bsd-3-clause
| 510
| 0
| 4
| 95
| 17
| 13
| 4
| 7
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Radio.Application where
import Prelude hiding (div)
import Data.Monoid
import Data.Maybe
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Coroutine
import Control.Monad.Coroutine.SuspensionFunctors
import Control.Applicative
import Radio.Field
import Radio.Task
import Radio.Util
import Radio.Genetic
import Radio.Plot
import Radio.Config
import Radio.Tower
import System.Random
import Haste.HPlay.View hiding (head)
import Haste
import Genetic.Options
import Genetic.Solve
import Genetic.State
import Genetic.Coroutine
data ApplicationState = AppConfigure Input
| AppCalculate Input PlotState (GeneticState TowersIndivid) (Maybe (Pauseable (GeneticState TowersIndivid)))
| AppShow Input PlotState Output
data Route = RouteConfig | RouteCalculate | RouteShow
deriving (Enum, Show)
initialState :: ApplicationState
initialState = AppConfigure initialInput
runApplication :: ApplicationState -> Widget ()
runApplication state = wloop state go
where
go :: ApplicationState -> Widget ApplicationState
go localState@(AppConfigure input) = do
update <- eitherWidget (fieldConfigWidget input) $ routeWidget localState
case update of
Right route -> case route of
RouteCalculate -> do
geneticState <- liftIO initialGeneticState
return $ AppCalculate input initialPlotState geneticState Nothing
_ -> fail $ "invalid route in config state " ++ show route
Left newInput -> return $ AppConfigure newInput
go localState@(AppCalculate input plotState geneticState coroutine) = do
update <- eitherWidget (geneticWidget input geneticState plotState coroutine) $ routeWidget localState
case update of
Right route -> do
liftIO $ clearTimers
case route of
RouteConfig -> return $ AppConfigure input
RouteShow -> return $ AppShow input plotState $ extractSolution input geneticState
_ -> fail $ "invalid route in config state " ++ show route
Left (newGeneticState, newPlotState, newCoroutine) -> return $ if isGeneticFinished newGeneticState
then AppShow input newPlotState $ extractSolution input newGeneticState
else AppCalculate input newPlotState newGeneticState newCoroutine
go localState@(AppShow input plotState output) = do
update <- eitherWidget (showResultsWidget input plotState output) $ routeWidget localState
case update of
Right route -> case route of
RouteConfig -> return $ AppConfigure input
RouteCalculate -> do
geneticState <- liftIO initialGeneticState
return $ AppCalculate input initialPlotState geneticState Nothing
_ -> fail $ "invalid route in show state " ++ show route
Left _ -> return localState
eitherWidget :: Widget a -> Widget b -> Widget (Either a b)
eitherWidget wa wb = (return . Left =<< wa) <|> (return . Right =<< wb)
routeWidget :: ApplicationState -> Widget Route
routeWidget state = div ! atr "class" "row"
<<< div ! atr "class" "col-md-4 col-md-offset-4"
<<< go state
where
go (AppConfigure {}) = bigBtn RouteCalculate "Начать эволюцию"
go (AppCalculate {}) = bigBtn RouteConfig "Назад" <|> bigBtn RouteShow "Остановить"
go (AppShow {}) = bigBtn RouteConfig "Начать с начала" <|> bigBtn RouteCalculate "Перерасчитать"
bigBtn v s = cbutton v s <! [atr "class" "btn btn-primary btn-lg"]
geneticWidget :: Input -> GeneticState TowersIndivid -> PlotState -> Maybe (Pauseable (GeneticState TowersIndivid)) -> Widget (GeneticState TowersIndivid, PlotState, Maybe (Pauseable (GeneticState TowersIndivid)))
geneticWidget input geneticState plotState coroutine = do
--wprint $ show $ geneticCurrentBest geneticState
let newPlotState = if null $ geneticPopulations geneticState
then plotState
else plotState
{
values = values plotState ++ [
( geneticCurrentGen geneticState,
fromMaybe 0 $ fst <$> geneticCurrentBest geneticState
)]
}
(dwidth, dheight) <- liftIO $ getDocumentSize
div ! atr "class" "col-md-6" <<< plotWidget newPlotState "Поколение" "Фитнес" ( 0.4 * fromIntegral dwidth, fromIntegral dheight / 2)
let towersUsed = length $ maybe [] (filterTowers input) $ snd <$> geneticCurrentBest geneticState
wraw $ div ! atr "class" "col-md-6" $ panel "Текущий результат" $ mconcat [
labelRow 4 "Лучший фитнес: " $ show $ maybe 0 fst $ geneticCurrentBest geneticState
, labelRow 4 "Башен использовано: " $ show $ towersUsed
, labelRow 4 "Башен всего: " $ show $ length $ inputTowers input
, labelRow 4 "Лучшее покрытие: " $ maybe "" show $ calcCoverage input . snd <$> geneticCurrentBest geneticState
]
corRes <- timeout 100 $ liftIO $ case coroutine of
Nothing -> resume $ solve (length $ inputTowers input) (fitness input) (inputGeneticOptions input) geneticState
Just cr -> resume cr
(newGeneticState, newCoroutine) <- case corRes of
Left (Yield _ paused) -> return (geneticState, Just paused)
Right genst -> return (genst, Nothing)
return (newGeneticState, newPlotState, newCoroutine)
showResultsWidget :: Input -> PlotState -> Output -> Widget ()
showResultsWidget input plotState output = do
(dwidth, dheight) <- liftIO $ getDocumentSize
let (xsize, ysize) = inputFieldSize input
cellSize = fromIntegral dwidth * 0.45 / fromIntegral xsize
div ! atr "class" "row" <<< do
div ! atr "class" "col-md-6" <<< fieldShow input output cellSize
div ! atr "class" "col-md-6" <<< do
plotWidget plotState "Поколение" "Фитнес" (fromIntegral dwidth / 2, fromIntegral dheight / 2)
wraw $ div ! atr "class" "row-fluid" $ mconcat [
div ! atr "class" "col-md-6" $ inputInfo
, div ! atr "class" "col-md-6" $ optionsInfo
, div ! atr "class" "col-md-6" $ outputInfo
, div ! atr "class" "col-md-6" $ otherInfo
]
noWidget
where
opts = inputGeneticOptions input
showTower t = "x: " ++ show (towerX t) ++ " y: " ++ show (towerY t) ++ " r: " ++ show (towerRadius t)
showTower' t = "x: " ++ show (towerX t) ++ " y: " ++ show (towerY t)
inputInfo = panel "Входные данные" $ mconcat [
labelRow 4 "Размер поля:" $ show $ inputFieldSize input
, labelRow 4 "Возможные башни:" $ show $ showTower <$> inputTowers input
]
optionsInfo = panel "Настройки эволюции" $ mconcat [
labelRow 4 "Шанс мутации: " $ show $ mutationChance opts
, labelRow 4 "Часть элиты: " $ show $ elitePart opts
, labelRow 4 "Максимальное число поколений: " $ show $ maxGeneration opts
, labelRow 4 "Кол-во популяций: " $ show $ popCount opts
, labelRow 4 "Кол-во индивидов в популяции: " $ show $ indCount opts
]
outputInfo = panel "Результаты эволюции" $ mconcat [
labelRow 4 "Лучший фитнес: " $ show $ outputFitness output
, labelRow 4 "Лучшее решение: " $ show $ showTower' <$> outputTowers output
]
otherInfo = panel "Другая информация" $ mconcat [
labelRow 4 "Башен использовано: " $ show $ length $ outputTowers output
, labelRow 4 "Башен всего: " $ show $ length $ inputTowers input
, labelRow 4"Лучшее покрытие: " $ show $ calcCoverage' input $ outputTowers output
]
|
Teaspot-Studio/bmstu-radio-problem-haste
|
Radio/Application.hs
|
bsd-3-clause
| 7,899
| 0
| 19
| 1,789
| 2,157
| 1,053
| 1,104
| 132
| 12
|
module Day10
( parseInstr
, nextState
, initState
, runUntil
, Instr(..)
, Dest(..)
) where
import qualified Data.Map.Strict as Map
import qualified Data.List as List
data Dest = Bot Int | Output Int deriving(Eq, Show)
data Instr = Value { bot :: Int, val :: Int }
| Cmd { bot :: Int
, lo :: Dest
, hi :: Dest }
deriving(Eq, Show)
type BotState = Map.Map Int [Int]
type OutState = Map.Map Int [Int]
type InstrMap = Map.Map Int Instr
parseInstr :: String -> Instr
parseInstr s =
let wrds = words s
in case wrds of
("value":v:_:_:_:b:[]) -> Value { bot = read b, val = read v }
("bot":b:_:_:_:ldest:ld:_:_:_:hdest:hd:[]) ->
Cmd { bot = read b, lo = (parseDest ldest ld), hi = (parseDest hdest hd) }
where parseDest name dest = if name == "bot" then Bot (read dest) else Output (read dest)
_ -> error ("coudn't parse string: " ++ s)
isValue :: Instr -> Bool
isValue (Value _ _) = True
isValue _ = False
initState :: [Instr] -> (BotState, OutState, InstrMap)
initState instrs =
let (vals, cmds) = List.partition isValue instrs
botstate = foldr (\v bs -> Map.insertWith (\nv ov -> nv ++ ov) (bot v) [val v] bs) Map.empty vals
outstate = Map.empty
instrmap = foldr (\cmd im -> Map.insert (bot cmd) cmd im) Map.empty cmds
in (botstate, outstate, instrmap)
execCmd :: Instr -> (BotState, OutState) -> (BotState, OutState)
execCmd cmd (bs, os) =
let botvals = bs Map.! (bot cmd)
lval = List.minimum botvals
hval = List.maximum botvals
(bs', os') =
case (lo cmd) of
Bot b -> (Map.insertWith (\nv ov -> nv ++ ov) b [lval] bs, os)
Output o -> (bs, Map.insertWith(\nv ov -> nv ++ ov) o [lval] os)
in case (hi cmd) of
Bot b -> (Map.insertWith (\nv ov -> nv ++ ov) b [hval] bs', os')
Output o -> (bs', Map.insertWith(\nv ov -> nv ++ ov) o [hval] os')
nextState :: (BotState, OutState, InstrMap) -> (BotState, OutState, InstrMap)
nextState (bs, os, imap) =
let cmds = Map.elems $ Map.filterWithKey (\k _ -> (Map.member k bs) && (2 == (length $ bs Map.! k))) imap
(nbs, nos) = List.foldr execCmd (bs, os) cmds
resolvedbots = List.map (bot) cmds
nimap = Map.filterWithKey (\k _ -> k `List.notElem` resolvedbots) imap
in (nbs, nos, nimap)
runUntil :: ((BotState, OutState, InstrMap) -> Bool) -> (BotState, OutState, InstrMap) -> (BotState, OutState, InstrMap)
runUntil fin state =
if fin state
then state
else runUntil fin (nextState state)
|
reidwilbur/aoc2016
|
src/Day10.hs
|
bsd-3-clause
| 2,580
| 0
| 23
| 674
| 1,183
| 650
| 533
| 61
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.SHFSTM.Internal.Debug
-- Copyright : (c) D. Sabel, Goethe-University, Frankfurt a.M., Germany
-- License : BSD-style
--
-- Maintainer : sabel <at> ki.cs.uni-frankfurt.de
-- Stability : experimental
-- Portability : non-portable (needs GHC and extensions)
--
--
-- This module implements the interface to the STM implementation.
-----------------------------------------------------------------------------
module Control.Concurrent.SHFSTM.Internal.Debug (
sPutStrLn
) where
import Control.Exception
import Control.Concurrent
import System.IO.Unsafe
-- | 'sPutStrLn' can be used to print exclusively to stdout, if all print-operations
-- use this primitive. It is used for debugging purposes.
sPutStrLn :: String -> IO ()
sPutStrLn str =
mask_ $
do
putMVar printv ()
putStrLn str
takeMVar printv
{-# NOINLINE printv #-}
printv :: MVar ()
printv = unsafePerformIO $ newEmptyMVar
|
cornell-pl/HsAdapton
|
stmshf/Control/Concurrent/SHFSTM/Internal/Debug.hs
|
bsd-3-clause
| 1,050
| 0
| 9
| 180
| 113
| 67
| 46
| 15
| 1
|
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses #-}
module DataFamilies where
data family Vector a
newtype instance Vector Bool = V_Bool [Bool]
f :: Vector Bool -> ()
f (V_Bool v) = ()
class GVector v a where
basicLength :: v a -> Int
instance GVector Vector Bool where
basicLength (V_Bool v) = length v
|
phischu/fragnix
|
tests/quick/DataFamilies/DataFamilies.hs
|
bsd-3-clause
| 319
| 0
| 8
| 66
| 106
| 56
| 50
| 10
| 1
|
{-# LANGUAGE TupleSections #-}
module EDSL.Monad.Instructions.Constant where
--import Prelude hiding (error)
import EDSL.Monad.EdslT
import Data.BitCode.LLVM.Classes.HasType
import Data.BitCode.LLVM.Value (Value (Function, TRef, Constant), Named(Unnamed), Symbol, symbolValue, mkUnnamed)
import Data.BitCode.LLVM.Types (BasicBlockId)
import Data.BitCode.LLVM.Type (Ty)
import Data.BitCode.LLVM.Function (BlockInst)
import Data.BitCode.LLVM.Util hiding (lift)
import Data.BitCode.LLVM.Instruction (TailCallKind)
import Data.BitCode.LLVM.CallingConv (CallingConv)
import Data.BitCode.LLVM.RMWOperations (RMWOperations)
import Data.BitCode.LLVM.Codes.AtomicOrdering (AtomicOrdering)
import Data.BitCode.LLVM.Codes.SynchronizationScope (AtomicSynchScope)
import Data.BitCode.LLVM.Opcodes.Cast (CastOp)
import qualified Data.BitCode.LLVM.Instruction as Inst
import qualified Data.BitCode.LLVM.Value as Const (Const(..))
import qualified Data.BitCode.LLVM.Type as Ty
import qualified Data.BitCode.LLVM.Cmp as CmpOp
import qualified Data.BitCode.LLVM.Opcodes.Binary as BinOp
import qualified Data.BitCode.LLVM.Opcodes.Cast as CastOp
import qualified Data.BitCode.LLVM.CallingConv as CConv
import qualified Data.BitCode.LLVM.Util as Util
import qualified Data.BitCode.LLVM.RMWOperations as RMWOp
import Data.BitCode.LLVM.Pretty as P
import Text.PrettyPrint as P
import Control.Monad.Trans.Class (MonadTrans, lift)
import Control.Monad.Trans.Except (ExceptT(..), throwE, runExceptT)
import Control.Monad ((<=<), (>=>))
import Data.Functor.Identity (Identity)
import Data.BitCode.LLVM.Value (trace)
import GHC.Stack
import Data.Word (Word64)
-- ** Constant Cast Op
truncC, zextC, sextC, fpToUiC, fpToSiC, uiToFpC, siToFpC, fpTruncC, fpExtC, ptrToIntC, intToPtrC, bitcastC, addrSpCastC
:: (Monad m, HasCallStack) => Ty -> Symbol -> EdslT m Symbol
mkConstCast :: (Monad m, HasCallStack) => CastOp -> Ty -> Symbol -> EdslT m Symbol
mkConstCast op t = tellConst . mkUnnamed t . Constant t . Const.Cast t op . trace ("[mkConstCast] accessing symbol for " ++ show op)
truncC t = mkConstCast CastOp.TRUNC t
zextC t = mkConstCast CastOp.ZEXT t
sextC t = mkConstCast CastOp.SEXT t
fpToUiC t = mkConstCast CastOp.FPTOUI t
fpToSiC t = mkConstCast CastOp.FPTOSI t
uiToFpC t = mkConstCast CastOp.UITOFP t
siToFpC t = mkConstCast CastOp.SITOFP t
fpTruncC t = mkConstCast CastOp.FPTRUNC t
fpExtC t = mkConstCast CastOp.FPEXT t
ptrToIntC t = mkConstCast CastOp.PTRTOINT t
intToPtrC t = mkConstCast CastOp.INTTOPTR t
bitcastC t = mkConstCast CastOp.BITCAST t
addrSpCastC t = mkConstCast CastOp.ADDRSPACECAST t
-- ** Constant Binary Op
addC, subC, mulC, udivC, sdivC, uremC, sremC, shlC, lshrC, ashrC, andC, orC, xorC
:: (Monad m, HasCallStack) => Symbol -> Symbol -> EdslT m Symbol
mkConstBinOp :: (Monad m, HasCallStack) => BinOp.BinOp -> Symbol -> Symbol -> EdslT m Symbol
-- TODO: verify that both are Constants!
mkConstBinOp op lhs rhs = tellConst . mkUnnamed (ty lhs) . Constant (ty lhs) $ Const.BinOp op (trace ("accessing lhs for binop " ++ show op) lhs)
(trace ("accessing rhs for binop " ++ show op) rhs)
addC lhs rhs = mkConstBinOp BinOp.ADD lhs rhs
subC lhs rhs = mkConstBinOp BinOp.SUB lhs rhs
mulC lhs rhs = mkConstBinOp BinOp.MUL lhs rhs
udivC lhs rhs = mkConstBinOp BinOp.UDIV lhs rhs
sdivC lhs rhs = mkConstBinOp BinOp.SDIV lhs rhs
uremC lhs rhs = mkConstBinOp BinOp.UREM lhs rhs
sremC lhs rhs = mkConstBinOp BinOp.SREM lhs rhs
shlC lhs rhs = mkConstBinOp BinOp.SHL lhs rhs
lshrC lhs rhs = mkConstBinOp BinOp.LSHR lhs rhs
ashrC lhs rhs = mkConstBinOp BinOp.ASHR lhs rhs
andC lhs rhs = mkConstBinOp BinOp.AND lhs rhs
orC lhs rhs = mkConstBinOp BinOp.OR lhs rhs
xorC lhs rhs = mkConstBinOp BinOp.XOR lhs rhs
|
angerman/data-bitcode-edsl
|
src/EDSL/Monad/Instructions/Constant.hs
|
bsd-3-clause
| 3,931
| 0
| 11
| 711
| 1,159
| 675
| 484
| -1
| -1
|
module DFS where
-- Creating tree ds
data Tree a = Empty | Node a (Tree a) (Tree a) deriving (Show)
-- DFS
traverseDFS :: Tree a -> [a]
traverseDFS Empty = []
traverseDFS (Node a l r) = a : (traverseDFS l) ++ (traverseDFS r)
|
manikTharaka/al-go-rithms
|
graphsearch/depth-first-search/Haskell/DFS.hs
|
mit
| 227
| 0
| 8
| 48
| 105
| 57
| 48
| 5
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DataPipeline.DeactivatePipeline
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deactivates the specified running pipeline. The pipeline is set to the
-- 'DEACTIVATING' state until the deactivation process completes.
--
-- To resume a deactivated pipeline, use ActivatePipeline. By default, the
-- pipeline resumes from the last completed execution. Optionally, you can
-- specify the date and time to resume the pipeline.
--
-- /See:/ <http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_DeactivatePipeline.html AWS API Reference> for DeactivatePipeline.
module Network.AWS.DataPipeline.DeactivatePipeline
(
-- * Creating a Request
deactivatePipeline
, DeactivatePipeline
-- * Request Lenses
, dCancelActive
, dPipelineId
-- * Destructuring the Response
, deactivatePipelineResponse
, DeactivatePipelineResponse
-- * Response Lenses
, drsResponseStatus
) where
import Network.AWS.DataPipeline.Types
import Network.AWS.DataPipeline.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Contains the parameters for DeactivatePipeline.
--
-- /See:/ 'deactivatePipeline' smart constructor.
data DeactivatePipeline = DeactivatePipeline'
{ _dCancelActive :: !(Maybe Bool)
, _dPipelineId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeactivatePipeline' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dCancelActive'
--
-- * 'dPipelineId'
deactivatePipeline
:: Text -- ^ 'dPipelineId'
-> DeactivatePipeline
deactivatePipeline pPipelineId_ =
DeactivatePipeline'
{ _dCancelActive = Nothing
, _dPipelineId = pPipelineId_
}
-- | Indicates whether to cancel any running objects. The default is true,
-- which sets the state of any running objects to 'CANCELED'. If this value
-- is false, the pipeline is deactivated after all running objects finish.
dCancelActive :: Lens' DeactivatePipeline (Maybe Bool)
dCancelActive = lens _dCancelActive (\ s a -> s{_dCancelActive = a});
-- | The ID of the pipeline.
dPipelineId :: Lens' DeactivatePipeline Text
dPipelineId = lens _dPipelineId (\ s a -> s{_dPipelineId = a});
instance AWSRequest DeactivatePipeline where
type Rs DeactivatePipeline =
DeactivatePipelineResponse
request = postJSON dataPipeline
response
= receiveEmpty
(\ s h x ->
DeactivatePipelineResponse' <$> (pure (fromEnum s)))
instance ToHeaders DeactivatePipeline where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("DataPipeline.DeactivatePipeline" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DeactivatePipeline where
toJSON DeactivatePipeline'{..}
= object
(catMaybes
[("cancelActive" .=) <$> _dCancelActive,
Just ("pipelineId" .= _dPipelineId)])
instance ToPath DeactivatePipeline where
toPath = const "/"
instance ToQuery DeactivatePipeline where
toQuery = const mempty
-- | Contains the output of DeactivatePipeline.
--
-- /See:/ 'deactivatePipelineResponse' smart constructor.
newtype DeactivatePipelineResponse = DeactivatePipelineResponse'
{ _drsResponseStatus :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeactivatePipelineResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drsResponseStatus'
deactivatePipelineResponse
:: Int -- ^ 'drsResponseStatus'
-> DeactivatePipelineResponse
deactivatePipelineResponse pResponseStatus_ =
DeactivatePipelineResponse'
{ _drsResponseStatus = pResponseStatus_
}
-- | The response status code.
drsResponseStatus :: Lens' DeactivatePipelineResponse Int
drsResponseStatus = lens _drsResponseStatus (\ s a -> s{_drsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-datapipeline/gen/Network/AWS/DataPipeline/DeactivatePipeline.hs
|
mpl-2.0
| 4,797
| 0
| 13
| 1,015
| 589
| 356
| 233
| 78
| 1
|
module Propellor.Spin (
commitSpin,
spin,
update,
gitPushHelper,
mergeSpin,
) where
import Data.List
import System.Exit
import System.PosixCompat
import System.Posix.IO
import System.Posix.Directory
import Control.Concurrent.Async
import qualified Data.ByteString as B
import qualified Data.Set as S
import Network.Socket (getAddrInfo, defaultHints, AddrInfo(..), AddrInfoFlag(..), SockAddr)
import Propellor
import Propellor.Protocol
import Propellor.PrivData.Paths
import Propellor.Git
import Propellor.Ssh
import Propellor.Gpg
import Propellor.Bootstrap
import Propellor.Types.CmdLine
import qualified Propellor.Shim as Shim
import Utility.FileMode
import Utility.SafeCommand
commitSpin :: IO ()
commitSpin = do
void $ actionMessage "Git commit" $
gitCommit [Param "--allow-empty", Param "-a", Param "-m", Param spinCommitMessage]
-- Push to central origin repo first, if possible.
-- The remote propellor will pull from there, which avoids
-- us needing to send stuff directly to the remote host.
whenM hasOrigin $
void $ actionMessage "Push to central git repository" $
boolSystem "git" [Param "push"]
spin :: HostName -> Maybe HostName -> Host -> IO ()
spin target relay hst = do
cacheparams <- if viarelay
then pure ["-A"]
else toCommand <$> sshCachingParams hn
when viarelay $
void $ boolSystem "ssh-add" []
sshtarget <- ("root@" ++) <$> case relay of
Just r -> pure r
Nothing -> getSshTarget target hst
-- Install, or update the remote propellor.
updateServer target relay hst
(proc "ssh" $ cacheparams ++ [sshtarget, shellWrap probecmd])
(proc "ssh" $ cacheparams ++ [sshtarget, shellWrap updatecmd])
-- And now we can run it.
unlessM (boolSystem "ssh" (map Param $ cacheparams ++ ["-t", sshtarget, shellWrap runcmd])) $
error $ "remote propellor failed"
where
hn = fromMaybe target relay
relaying = relay == Just target
viarelay = isJust relay && not relaying
probecmd = intercalate " ; "
[ "if [ ! -d " ++ localdir ++ "/.git ]"
, "then (" ++ intercalate " && "
[ installGitCommand
, "echo " ++ toMarked statusMarker (show NeedGitClone)
] ++ ") || echo " ++ toMarked statusMarker (show NeedPrecompiled)
, "else " ++ updatecmd
, "fi"
]
updatecmd = intercalate " && "
[ "cd " ++ localdir
, bootstrapPropellorCommand
, if viarelay
then "./propellor --continue " ++
shellEscape (show (Relay target))
-- Still using --boot for back-compat...
else "./propellor --boot " ++ target
]
runcmd = "cd " ++ localdir ++ " && ./propellor " ++ cmd
cmd = if viarelay
then "--serialized " ++ shellEscape (show (Spin [target] (Just target)))
else "--continue " ++ shellEscape (show (SimpleRun target))
-- Check if the Host contains an IP address that matches one of the IPs
-- in the DNS for the HostName. If so, the HostName is used as-is,
-- but if the DNS is out of sync with the Host config, or doesn't have
-- the host in it at all, use one of the Host's IPs instead.
getSshTarget :: HostName -> Host -> IO String
getSshTarget target hst
| null configips = return target
| otherwise = go =<< tryIO (dnslookup target)
where
go (Left e) = useip (show e)
go (Right addrinfos) = do
configaddrinfos <- catMaybes <$> mapM iptoaddr configips
if any (`elem` configaddrinfos) (map addrAddress addrinfos)
then return target
else useip ("DNS lookup did not return any of the expected addresses " ++ show configips)
dnslookup h = getAddrInfo (Just $ defaultHints { addrFlags = [AI_CANONNAME] }) (Just h) Nothing
-- Convert a string containing an IP address into a SockAddr.
iptoaddr :: String -> IO (Maybe SockAddr)
iptoaddr ip = catchDefaultIO Nothing $ headMaybe . map addrAddress
<$> getAddrInfo (Just $ defaultHints { addrFlags = [AI_NUMERICHOST] }) (Just ip) Nothing
useip why = case headMaybe configips of
Nothing -> return target
Just ip -> do
-- If we're being asked to run on the local host,
-- ignore DNS.
s <- takeWhile (/= '\n') <$> readProcess "hostname" ["-f"]
if s == target
then return target
else do
warningMessage $ "DNS seems out of date for " ++ target ++ " (" ++ why ++ "); using IP address from configuration instead."
return ip
configips = map fromIPAddr $ mapMaybe getIPAddr $
S.toList $ _dns $ hostInfo hst
-- Update the privdata, repo url, and git repo over the ssh
-- connection, talking to the user's local propellor instance which is
-- running the updateServer
update :: Maybe HostName -> IO ()
update forhost = do
whenM hasGitRepo $
req NeedRepoUrl repoUrlMarker setRepoUrl
makePrivDataDir
createDirectoryIfMissing True (takeDirectory privfile)
req NeedPrivData privDataMarker $
writeFileProtected privfile
whenM hasGitRepo $
req NeedGitPush gitPushMarker $ \_ -> do
hin <- dup stdInput
hout <- dup stdOutput
hClose stdin
hClose stdout
-- Not using git pull because git 2.5.0 badly
-- broke its option parser.
unlessM (boolSystem "git" (pullparams hin hout)) $
errorMessage "git fetch from client failed"
unlessM (boolSystem "git" [Param "merge", Param "FETCH_HEAD"]) $
errorMessage "git merge from client failed"
where
pullparams hin hout =
[ Param "fetch"
, Param "--progress"
, Param "--upload-pack"
, Param $ "./propellor --gitpush " ++ show hin ++ " " ++ show hout
, Param "."
]
-- When --spin --relay is run, get a privdata file
-- to be relayed to the target host.
privfile = maybe privDataLocal privDataRelay forhost
updateServer
:: HostName
-> Maybe HostName
-> Host
-> CreateProcess
-> CreateProcess
-> IO ()
updateServer target relay hst connect haveprecompiled =
withIOHandles createProcessSuccess connect go
where
hn = fromMaybe target relay
relaying = relay == Just target
go (toh, fromh) = do
let loop = go (toh, fromh)
let restart = updateServer hn relay hst connect haveprecompiled
let done = return ()
v <- (maybe Nothing readish <$> getMarked fromh statusMarker)
case v of
(Just NeedRepoUrl) -> do
sendRepoUrl toh
loop
(Just NeedPrivData) -> do
sendPrivData hn hst toh relaying
loop
(Just NeedGitClone) -> do
hClose toh
hClose fromh
sendGitClone hn
restart
(Just NeedPrecompiled) -> do
hClose toh
hClose fromh
sendPrecompiled hn
updateServer hn relay hst haveprecompiled (error "loop")
(Just NeedGitPush) -> do
sendGitUpdate hn fromh toh
hClose fromh
hClose toh
done
Nothing -> done
sendRepoUrl :: Handle -> IO ()
sendRepoUrl toh = sendMarked toh repoUrlMarker =<< (fromMaybe "" <$> getRepoUrl)
sendPrivData :: HostName -> Host -> Handle -> Bool -> IO ()
sendPrivData hn hst toh relaying = do
privdata <- getdata
void $ actionMessage ("Sending privdata (" ++ show (length privdata) ++ " bytes) to " ++ hn) $ do
sendMarked toh privDataMarker privdata
return True
where
getdata
| relaying = do
let f = privDataRelay hn
d <- readFileStrictAnyEncoding f
nukeFile f
return d
| otherwise = show . filterPrivData hst <$> decryptPrivData
sendGitUpdate :: HostName -> Handle -> Handle -> IO ()
sendGitUpdate hn fromh toh =
void $ actionMessage ("Sending git update to " ++ hn) $ do
sendMarked toh gitPushMarker ""
(Nothing, Nothing, Nothing, h) <- createProcess p
(==) ExitSuccess <$> waitForProcess h
where
p = (proc "git" ["upload-pack", "."])
{ std_in = UseHandle fromh
, std_out = UseHandle toh
}
-- Initial git clone, used for bootstrapping.
sendGitClone :: HostName -> IO ()
sendGitClone hn = void $ actionMessage ("Clone git repository to " ++ hn) $ do
branch <- getCurrentBranch
cacheparams <- sshCachingParams hn
withTmpFile "propellor.git" $ \tmp _ -> allM id
[ boolSystem "git" [Param "bundle", Param "create", File tmp, Param "HEAD"]
, boolSystem "scp" $ cacheparams ++ [File tmp, Param ("root@"++hn++":"++remotebundle)]
, boolSystem "ssh" $ cacheparams ++ [Param ("root@"++hn), Param $ unpackcmd branch]
]
where
remotebundle = "/usr/local/propellor.git"
unpackcmd branch = shellWrap $ intercalate " && "
[ "git clone " ++ remotebundle ++ " " ++ localdir
, "cd " ++ localdir
, "git checkout -b " ++ branch
, "git remote rm origin"
, "rm -f " ++ remotebundle
]
-- Send a tarball containing the precompiled propellor, and libraries.
-- This should be reasonably portable, as long as the remote host has the
-- same architecture as the build host.
sendPrecompiled :: HostName -> IO ()
sendPrecompiled hn = void $ actionMessage ("Uploading locally compiled propellor as a last resort") $ do
bracket getWorkingDirectory changeWorkingDirectory $ \_ ->
withTmpDir "propellor" go
where
go tmpdir = do
cacheparams <- sshCachingParams hn
let shimdir = takeFileName localdir
createDirectoryIfMissing True (tmpdir </> shimdir)
changeWorkingDirectory (tmpdir </> shimdir)
me <- readSymbolicLink "/proc/self/exe"
createDirectoryIfMissing True "bin"
unlessM (boolSystem "cp" [File me, File "bin/propellor"]) $
errorMessage "failed copying in propellor"
let bin = "bin/propellor"
let binpath = Just $ localdir </> bin
void $ Shim.setup bin binpath "."
changeWorkingDirectory tmpdir
withTmpFile "propellor.tar." $ \tarball _ -> allM id
[ boolSystem "strip" [File me]
, boolSystem "tar" [Param "czf", File tarball, File shimdir]
, boolSystem "scp" $ cacheparams ++ [File tarball, Param ("root@"++hn++":"++remotetarball)]
, boolSystem "ssh" $ cacheparams ++ [Param ("root@"++hn), Param unpackcmd]
]
remotetarball = "/usr/local/propellor.tar"
unpackcmd = shellWrap $ intercalate " && "
[ "cd " ++ takeDirectory remotetarball
, "tar xzf " ++ remotetarball
, "rm -f " ++ remotetarball
]
-- Shim for git push over the propellor ssh channel.
-- Reads from stdin and sends it to hout;
-- reads from hin and sends it to stdout.
gitPushHelper :: Fd -> Fd -> IO ()
gitPushHelper hin hout = void $ fromstdin `concurrently` tostdout
where
fromstdin = do
h <- fdToHandle hout
connect stdin h
tostdout = do
h <- fdToHandle hin
connect h stdout
connect fromh toh = do
hSetBinaryMode fromh True
hSetBinaryMode toh True
b <- B.hGetSome fromh 40960
if B.null b
then do
hClose fromh
hClose toh
else do
B.hPut toh b
hFlush toh
connect fromh toh
mergeSpin :: IO ()
mergeSpin = do
branch <- getCurrentBranch
branchref <- getCurrentBranchRef
old_head <- getCurrentGitSha1 branch
old_commit <- findLastNonSpinCommit
rungit "reset" [Param old_commit]
rungit "commit" [Param "-a", Param "--allow-empty"]
rungit "merge" =<< gpgSignParams [Param "-s", Param "ours", Param old_head]
current_commit <- getCurrentGitSha1 branch
rungit "update-ref" [Param branchref, Param current_commit]
rungit "checkout" [Param branch]
where
rungit cmd ps = unlessM (boolSystem "git" (Param cmd:ps)) $
error ("git " ++ cmd ++ " failed")
findLastNonSpinCommit :: IO String
findLastNonSpinCommit = do
commits <- map (separate (== ' ')) . lines
<$> readProcess "git" ["log", "--oneline", "--no-abbrev-commit"]
case dropWhile (\(_, msg) -> msg == spinCommitMessage) commits of
((sha, _):_) -> return sha
_ -> error $ "Did not find any previous commit that was not a " ++ show spinCommitMessage
spinCommitMessage :: String
spinCommitMessage = "propellor spin"
|
sjfloat/propellor
|
src/Propellor/Spin.hs
|
bsd-2-clause
| 11,280
| 128
| 19
| 2,233
| 3,429
| 1,690
| 1,739
| 268
| 6
|
{-# LANGUAGE CPP #-}
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Trans.TH
-- Copyright : (C) 2008-2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : MPTCs, fundeps
--
-- Automatic generation of free monadic actions.
--
----------------------------------------------------------------------------
module Control.Monad.Free.TH
(
-- * Free monadic actions
makeFree,
makeFree_,
makeFreeCon,
makeFreeCon_,
-- * Documentation
-- $doc
-- * Examples
-- $examples
) where
import Control.Arrow
import Control.Monad
import Data.Char (toLower)
import Language.Haskell.TH
#if !(MIN_VERSION_base(4,8,0))
import Control.Applicative
#endif
data Arg
= Captured Type Exp
| Param Type
deriving (Show)
params :: [Arg] -> [Type]
params [] = []
params (Param t : xs) = t : params xs
params (_ : xs) = params xs
captured :: [Arg] -> [(Type, Exp)]
captured [] = []
captured (Captured t e : xs) = (t, e) : captured xs
captured (_ : xs) = captured xs
zipExprs :: [Exp] -> [Exp] -> [Arg] -> [Exp]
zipExprs (p:ps) cs (Param _ : as) = p : zipExprs ps cs as
zipExprs ps (c:cs) (Captured _ _ : as) = c : zipExprs ps cs as
zipExprs _ _ _ = []
tyVarBndrName :: TyVarBndr -> Name
tyVarBndrName (PlainTV name) = name
tyVarBndrName (KindedTV name _) = name
findTypeOrFail :: String -> Q Name
findTypeOrFail s = lookupTypeName s >>= maybe (fail $ s ++ " is not in scope") return
findValueOrFail :: String -> Q Name
findValueOrFail s = lookupValueName s >>= maybe (fail $ s ++ "is not in scope") return
-- | Pick a name for an operation.
-- For normal constructors it lowers first letter.
-- For infix ones it omits the first @:@.
mkOpName :: String -> Q String
mkOpName (':':name) = return name
mkOpName ( c :name) = return $ toLower c : name
mkOpName _ = fail "null constructor name"
-- | Check if parameter is used in type.
usesTV :: Name -> Type -> Bool
usesTV n (VarT name) = n == name
usesTV n (AppT t1 t2) = any (usesTV n) [t1, t2]
usesTV n (SigT t _ ) = usesTV n t
usesTV n (ForallT bs _ t) = usesTV n t && n `notElem` map tyVarBndrName bs
usesTV _ _ = False
-- | Analyze constructor argument.
mkArg :: Name -> Type -> Q Arg
mkArg n t
| usesTV n t =
case t of
-- if parameter is used as is, the return type should be ()
-- as well as the corresponding expression
VarT _ -> return $ Captured (TupleT 0) (TupE [])
-- if argument is of type (a1 -> ... -> aN -> param) then the
-- return type is N-tuple (a1, ..., aN) and the corresponding
-- expression is an N-tuple secion (,...,).
AppT (AppT ArrowT _) _ -> do
(ts, name) <- arrowsToTuple t
when (name /= n) $ fail "return type is not the parameter"
let tup = foldl AppT (TupleT $ length ts) ts
xs <- mapM (const $ newName "x") ts
return $ Captured tup (LamE (map VarP xs) (TupE (map VarE xs)))
_ -> fail "don't know how to make Arg"
| otherwise = return $ Param t
where
arrowsToTuple (AppT (AppT ArrowT t1) (VarT name)) = return ([t1], name)
arrowsToTuple (AppT (AppT ArrowT t1) t2) = do
(ts, name) <- arrowsToTuple t2
return (t1:ts, name)
arrowsToTuple _ = fail "return type is not a variable"
-- | Apply transformation to the return value independently of how many
-- parameters does @e@ have.
-- E.g. @mapRet Just (\x y z -> x + y * z)@ goes to
-- @\x y z -> Just (x + y * z)@
mapRet :: (Exp -> Exp) -> Exp -> Exp
mapRet f (LamE ps e) = LamE ps $ mapRet f e
mapRet f e = f e
-- | Unification of two types.
-- @next@ with @a -> next@ gives @Maybe a@ return type
-- @a -> next@ with @b -> next@ gives @Either a b@ return type
unifyT :: (Type, Exp) -> (Type, Exp) -> Q (Type, [Exp])
unifyT (TupleT 0, _) (TupleT 0, _) = fail "can't accept 2 mere parameters"
unifyT (TupleT 0, _) (t, e) = do
maybe' <- ConT <$> findTypeOrFail "Maybe"
nothing' <- ConE <$> findValueOrFail "Nothing"
just' <- ConE <$> findValueOrFail "Just"
return (AppT maybe' t, [nothing', mapRet (AppE just') e])
unifyT x y@(TupleT 0, _) = second reverse <$> unifyT y x
unifyT (t1, e1) (t2, e2) = do
either' <- ConT <$> findTypeOrFail "Either"
left' <- ConE <$> findValueOrFail "Left"
right' <- ConE <$> findValueOrFail "Right"
return (AppT (AppT either' t1) t2, [mapRet (AppE left') e1, mapRet (AppE right') e2])
-- | Unifying a list of types (possibly refining expressions).
-- Name is used when the return type is supposed to be arbitrary.
unifyCaptured :: Name -> [(Type, Exp)] -> Q (Type, [Exp])
unifyCaptured a [] = return (VarT a, [])
unifyCaptured _ [(t, e)] = return (t, [e])
unifyCaptured _ [x, y] = unifyT x y
unifyCaptured _ _ = fail "can't unify more than 2 arguments that use type parameter"
liftCon' :: Bool -> [TyVarBndr] -> Cxt -> Type -> Name -> [Name] -> Name -> [Type] -> Q [Dec]
liftCon' typeSig tvbs cx f n ns cn ts = do
-- prepare some names
opName <- mkName <$> mkOpName (nameBase cn)
m <- newName "m"
a <- newName "a"
monadFree <- findTypeOrFail "MonadFree"
liftF <- findValueOrFail "liftF"
-- look at the constructor parameters
args <- mapM (mkArg n) ts
let ps = params args -- these are not using type parameter
cs = captured args -- these capture it somehow
-- based on cs we get return type and refined expressions
-- (e.g. with Nothing/Just or Left/Right tags)
(retType, es) <- unifyCaptured a cs
-- operation type is (a1 -> a2 -> ... -> aN -> m r)
let opType = foldr (AppT . AppT ArrowT) (AppT (VarT m) retType) ps
-- picking names for the implementation
xs <- mapM (const $ newName "p") ps
let pat = map VarP xs -- this is LHS
exprs = zipExprs (map VarE xs) es args -- this is what ctor would be applied to
fval = foldl AppE (ConE cn) exprs -- this is RHS without liftF
q = tvbs ++ map PlainTV (qa ++ m : ns)
qa = case retType of VarT b | a == b -> [a]; _ -> []
f' = foldl AppT f (map VarT ns)
return $ concat
[ if typeSig
#if MIN_VERSION_template_haskell(2,10,0)
then [ SigD opName (ForallT q (cx ++ [ConT monadFree `AppT` f' `AppT` VarT m]) opType) ]
#else
then [ SigD opName (ForallT q (cx ++ [ClassP monadFree [f', VarT m]]) opType) ]
#endif
else []
, [ FunD opName [ Clause pat (NormalB $ AppE (VarE liftF) fval) [] ] ] ]
-- | Provide free monadic actions for a single value constructor.
liftCon :: Bool -> [TyVarBndr] -> Cxt -> Type -> Name -> [Name] -> Con -> Q [Dec]
liftCon typeSig ts cx f n ns con =
case con of
NormalC cName fields -> liftCon' typeSig ts cx f n ns cName $ map snd fields
RecC cName fields -> liftCon' typeSig ts cx f n ns cName $ map (\(_, _, ty) -> ty) fields
InfixC (_,t1) cName (_,t2) -> liftCon' typeSig ts cx f n ns cName [t1, t2]
ForallC ts' cx' con' -> liftCon typeSig (ts ++ ts') (cx ++ cx') f n ns con'
-- | Provide free monadic actions for a type declaration.
liftDec :: Bool -- ^ Include type signature?
-> Maybe [Name] -- ^ Include only mentioned constructor names. Use all constructors when @Nothing@.
-> Dec -- ^ Data type declaration.
-> Q [Dec]
liftDec typeSig onlyCons (DataD _ tyName tyVarBndrs cons _)
| null tyVarBndrs = fail $ "Type " ++ show tyName ++ " needs at least one free variable"
| otherwise = concat <$> mapM (liftCon typeSig [] [] con nextTyName (init tyNames)) cons'
where
cons' = case onlyCons of
Nothing -> cons
Just ns -> filter (\c -> constructorName c `elem` ns) cons
tyNames = map tyVarBndrName tyVarBndrs
nextTyName = last tyNames
con = ConT tyName
liftDec _ _ dec = fail $ "liftDec: Don't know how to lift " ++ show dec
-- | Get construstor name.
constructorName :: Con -> Name
constructorName (NormalC name _) = name
constructorName (RecC name _) = name
constructorName (InfixC _ name _) = name
constructorName (ForallC _ _ c) = constructorName c
-- | Generate monadic actions for a data type.
genFree :: Bool -- ^ Include type signature?
-> Maybe [Name] -- ^ Include only mentioned constructor names. Use all constructors when @Nothing@.
-> Name -- ^ Type name.
-> Q [Dec] -- ^ Generated declarations.
genFree typeSig cnames tyCon = do
info <- reify tyCon
case info of
TyConI dec -> liftDec typeSig cnames dec
_ -> fail "makeFree expects a type constructor"
-- | Generate monadic action for a single constructor of a data type.
genFreeCon :: Bool -- ^ Include type signature?
-> Name -- ^ Constructor name.
-> Q [Dec] -- ^ Generated declarations.
genFreeCon typeSig cname = do
info <- reify cname
case info of
DataConI _ _ tname _ -> genFree typeSig (Just [cname]) tname
_ -> fail "makeFreeCon expects a data constructor"
-- | @$('makeFree' ''T)@ provides free monadic actions for the
-- constructors of the given data type @T@.
makeFree :: Name -> Q [Dec]
makeFree = genFree True Nothing
-- | Like 'makeFreeCon', but does not provide type signatures.
-- This can be used to attach Haddock comments to individual arguments
-- for each generated function.
--
-- @
-- data LangF x = Output String x
--
-- makeFree_ 'LangF
--
-- -- | Output a string.
-- output :: MonadFree LangF m =>
-- String -- ^ String to output.
-- -> m () -- ^ No result.
-- @
--
-- 'makeFree_' must be called *before* the explicit type signatures.
makeFree_ :: Name -> Q [Dec]
makeFree_ = genFree False Nothing
-- | @$('makeFreeCon' 'Con)@ provides free monadic action for a data
-- constructor @Con@. Note that you can attach Haddock comment to the
-- generated function by placing it before the top-level invocation of
-- 'makeFreeCon':
--
-- @
-- -- | Output a string.
-- makeFreeCon 'Output
-- @
makeFreeCon :: Name -> Q [Dec]
makeFreeCon = genFreeCon True
-- | Like 'makeFreeCon', but does not provide a type signature.
-- This can be used to attach Haddock comments to individual arguments.
--
-- @
-- data LangF x = Output String x
--
-- makeFreeCon_ 'Output
--
-- -- | Output a string.
-- output :: MonadFree LangF m =>
-- String -- ^ String to output.
-- -> m () -- ^ No result.
-- @
--
-- 'makeFreeCon_' must be called *before* the explicit type signature.
makeFreeCon_ :: Name -> Q [Dec]
makeFreeCon_ = genFreeCon False
{- $doc
To generate free monadic actions from a @Type@, it must be a @data@
declaration (maybe GADT) with at least one free variable. For each constructor of the type, a
new function will be declared.
Consider the following generalized definitions:
> data Type a1 a2 … aN param = …
> | FooBar t1 t2 t3 … tJ
> | (:+) t1 t2 t3 … tJ
> | t1 :* t2
> | t1 `Bar` t2
> | Baz { x :: t1, y :: t2, …, z :: tJ }
> | forall b1 b2 … bN. cxt => Qux t1 t2 … tJ
> | …
where each of the constructor arguments @t1, …, tJ@ is either:
1. A type, perhaps depending on some of the @a1, …, aN@.
2. A type dependent on @param@, of the form @s1 -> … -> sM -> param@, M ≥ 0.
At most 2 of the @t1, …, tJ@ may be of this form. And, out of these two,
at most 1 of them may have @M == 0@; that is, be of the form @param@.
For each constructor, a function will be generated. First, the name
of the function is derived from the name of the constructor:
* For prefix constructors, the name of the constructor with the first
letter in lowercase (e.g. @FooBar@ turns into @fooBar@).
* For infix constructors, the name of the constructor with the first
character (a colon @:@), removed (e.g. @:+@ turns into @+@).
Then, the type of the function is derived from the arguments to the constructor:
> …
> fooBar :: (MonadFree Type m) => t1' -> … -> tK' -> m ret
> (+) :: (MonadFree Type m) => t1' -> … -> tK' -> m ret
> bar :: (MonadFree Type m) => t1 -> … -> tK' -> m ret
> baz :: (MonadFree Type m) => t1' -> … -> tK' -> m ret
> qux :: (MonadFree Type m, cxt) => t1' -> … -> tK' -> m ret
> …
The @t1', …, tK'@ are those @t1@ … @tJ@ that only depend on the
@a1, …, aN@.
The type @ret@ depends on those constructor arguments that reference the
@param@ type variable:
1. If no arguments to the constructor depend on @param@, @ret ≡ a@, where
@a@ is a fresh type variable.
2. If only one argument in the constructor depends on @param@, then
@ret ≡ (s1, …, sM)@. In particular, if @M == 0@, then @ret ≡ ()@; if @M == 1@, @ret ≡ s1@.
3. If two arguments depend on @param@, (e.g. @u1 -> … -> uL -> param@ and
@v1 -> … -> vM -> param@, then @ret ≡ Either (u1, …, uL) (v1, …, vM)@.
Note that @Either a ()@ and @Either () a@ are both isomorphic to @Maybe a@.
Because of this, when @L == 0@ or @M == 0@ in case 3., the type of
@ret@ is simplified:
* @ret ≡ Either (u1, …, uL) ()@ is rewritten to @ret ≡ Maybe (u1, …, uL)@.
* @ret ≡ Either () (v1, …, vM)@ is rewritten to @ret ≡ Maybe (v1, …, vM)@.
-}
{- $examples
<examples/Teletype.lhs Teletype> (regular data type declaration)
<examples/RetryTH.hs Retry> (GADT declaration)
-}
|
Chobbes/free
|
src/Control/Monad/Free/TH.hs
|
bsd-3-clause
| 13,673
| 0
| 20
| 3,480
| 2,977
| 1,554
| 1,423
| 158
| 5
|
{-# LANGUAGE CPP, GADTs, ViewPatterns #-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.OddEvenSort
-- Copyright : (c) 2014 Tabula, Inc.
--
-- Maintainer : conal@tabula.com
-- Stability : experimental
--
-- Batcher's even-odd merge sort
----------------------------------------------------------------------
module LambdaCCC.OddEvenSort where
-- TODO: explicit exports
import Data.Functor ((<$>))
import Data.Foldable (toList)
import Data.Traversable (Traversable)
import Control.Arrow (first)
import TypeUnary.TyNat (Z,S)
import TypeUnary.TyNat (N1,N2,N3,N4)
import TypeUnary.Nat (Nat(..),IsNat(..))
import Circat.Misc (Unop,transpose,inTranspose)
import Circat.Pair (Pair(..),sortP)
import Circat.RTree
import Circat.Shift (shiftL,shiftR)
msort :: (IsNat n, Ord a, Bounded a) => Unop (RTree n a)
msort = msort' nat
msort' :: (IsNat n, Ord a, Bounded a) => Nat n -> Unop (RTree n a)
msort' Zero = id
msort' (Succ m) = inB (merge . fmap (msort' m))
merge :: (Ord a, Bounded a) => Unop (Pair (RTree n a))
merge = undefined
#if 0
msort = msort' nat
msort' :: (Ord a, Bounded a) => Nat n -> Unop (RTree n a)
msort' Zero = id
msort' (Succ m) = B . merge m . fmap (msort' m) . unB
merge :: (Ord a, Bounded a) => Nat n -> Pair (LTree n a) -> LTree n (Pair a)
merge Zero = \ (L a :# L b) -> L (sortP (a :# b))
merge (Succ m) = tweak . transpose . fmap (merge m)
-- merge (Succ m) = inB (fmap sortP . (inTranspose.fmap) (merge m))
#if 0
B ::
transpose :: Pair (LTree (S m) a) -> LTree (S m) (Pair a)
unB :: LTree (S m) a -> LTree m (Pair a)
transpose :: LTree m (Pair a) -> Pair (LTree m a)
fmap (merge m) :: Pair (LTree m a) -> Pair (LTree m a)
transpose :: Pair (LTree m a) -> LTree m (Pair a)
tweak :: LTree m (Pair a) -> LTree m (Pair a)
B :: LTree m (Pair a) -> LTree (S m) a
#endif
-- Oops! I have to compare odd/even, not even/odd.
tweak :: (Bounded a, Ord a) => Unop (LTree n (Pair a))
tweak ps = unB (fst (shiftL (bot,B (sortP <$> ps'))))
where
(bot,unB -> ps') = shiftR (B ps,maxBound)
#if 0
ps :: LTree n (Pair a)
B ps :: LTree (S n) a
(B ps, maxBound) :: (LTree (S n) a, a)
shiftR (B ps, maxBound) :: (a, LTree (S n) a)
bot :: a
ps' :: LTree n (Pair a)
sortP <$> ps' :: LTree n (Pair a)
B (sortP <$> ps') :: LTree (S n) a
(bot,B (sortP <$> ps')) :: (a,LTree (S n) a)
fst (shiftL (bot,B (sortP <$> ps'))) :: LTree (S n) a
unB (fst (shiftL (bot,B (sortP <$> ps')))) :: LTree n (Pair a)
#endif
#endif
ps0 :: RTree N1 (Pair Int)
ps0 = fromList [(1 :# 4),(3 :# 5)]
-- tweak = fmap sortP
{--------------------------------------------------------------------
Tests
--------------------------------------------------------------------}
test :: (IsNat n, Ord a, Bounded a) => RTree n a -> [a]
test = toList . msort
_t1 :: RTree N1 Int
_t1 = tree1 4 3
_t2 :: RTree N2 Int
_t2 = tree2 4 3 1 5
_t3 :: RTree N3 Int
_t3 = tree3 4 3 7 1 9 5 2 6
_t4 :: RTree N4 Int
_t4 = tree4 4 12 3 16 8 11 15 7 1 10 9 14 5 13 2 6
|
conal/lambda-ccc
|
src/LambdaCCC/OddEvenSort.hs
|
bsd-3-clause
| 3,491
| 28
| 15
| 972
| 1,425
| 751
| 674
| 34
| 1
|
module Test001 where
import Kask.Time
import Prelude (print)
import RIO
test1 :: IO ()
test1 = do
let coll = [1 .. 10000000] :: [Integer]
let s = sum coll
value <- logging "Computations took sthing like " (withMsecs s) " msecs"
print value
|
kongra/kask-base
|
app/Test001.hs
|
bsd-3-clause
| 285
| 0
| 10
| 89
| 94
| 49
| 45
| 10
| 1
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Init
-- Copyright : (c) Brent Yorgey 2009
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Implementation of the 'cabal init' command, which creates an initial .cabal
-- file for a project.
--
-----------------------------------------------------------------------------
module Distribution.Client.Init (
-- * Commands
initCabal
) where
import System.IO
( hSetBuffering, stdout, BufferMode(..) )
import System.Directory
( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile
, getDirectoryContents )
import System.FilePath
( (</>), (<.>), takeBaseName )
import Data.Time
( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone )
import Data.Char
( toUpper )
import Data.List
( intercalate, nub, groupBy, (\\) )
import Data.Maybe
( fromMaybe, isJust, catMaybes )
import Data.Function
( on )
import qualified Data.Map as M
import Data.Traversable
( traverse )
import Control.Applicative
( (<$>) )
import Control.Monad
( when, unless, (>=>), join )
import Control.Arrow
( (&&&), (***) )
import Text.PrettyPrint hiding (mode, cat)
import Data.Version
( Version(..) )
import Distribution.Version
( orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange )
import Distribution.Verbosity
( Verbosity )
import Distribution.ModuleName
( ModuleName, fromString ) -- And for the Text instance
import Distribution.InstalledPackageInfo
( InstalledPackageInfo, sourcePackageId, exposed )
import qualified Distribution.Package as P
import Language.Haskell.Extension ( Language(..) )
import Distribution.Client.Init.Types
( InitFlags(..), PackageType(..), Category(..) )
import Distribution.Client.Init.Licenses
( bsd3, gplv2, gplv3, lgpl2, lgpl3, agplv3, apache20 )
import Distribution.Client.Init.Heuristics
( guessPackageName, guessAuthorNameMail, SourceFileEntry(..),
scanForModules, neededBuildPrograms )
import Distribution.License
( License(..), knownLicenses )
import Distribution.ReadE
( runReadE, readP_to_E )
import Distribution.Simple.Setup
( Flag(..), flagToMaybe )
import Distribution.Simple.Configure
( getInstalledPackages )
import Distribution.Simple.Compiler
( PackageDBStack, Compiler )
import Distribution.Simple.Program
( ProgramConfiguration )
import Distribution.Simple.PackageIndex
( PackageIndex, moduleNameIndex )
import Distribution.Text
( display, Text(..) )
initCabal :: Verbosity
-> PackageDBStack
-> Compiler
-> ProgramConfiguration
-> InitFlags
-> IO ()
initCabal verbosity packageDBs comp conf initFlags = do
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
hSetBuffering stdout NoBuffering
initFlags' <- extendFlags installedPkgIndex initFlags
writeLicense initFlags'
writeSetupFile initFlags'
success <- writeCabalFile initFlags'
when success $ generateWarnings initFlags'
---------------------------------------------------------------------------
-- Flag acquisition -----------------------------------------------------
---------------------------------------------------------------------------
-- | Fill in more details by guessing, discovering, or prompting the
-- user.
extendFlags :: PackageIndex -> InitFlags -> IO InitFlags
extendFlags pkgIx =
getPackageName
>=> getVersion
>=> getLicense
>=> getAuthorInfo
>=> getHomepage
>=> getSynopsis
>=> getCategory
>=> getExtraSourceFiles
>=> getLibOrExec
>=> getLanguage
>=> getGenComments
>=> getSrcDir
>=> getModulesBuildToolsAndDeps pkgIx
-- | Combine two actions which may return a value, preferring the first. That
-- is, run the second action only if the first doesn't return a value.
infixr 1 ?>>
(?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a)
f ?>> g = do
ma <- f
if isJust ma
then return ma
else g
-- | Witness the isomorphism between Maybe and Flag.
maybeToFlag :: Maybe a -> Flag a
maybeToFlag = maybe NoFlag Flag
-- | Get the package name: use the package directory (supplied, or the current
-- directory by default) as a guess.
getPackageName :: InitFlags -> IO InitFlags
getPackageName flags = do
guess <- traverse guessPackageName (flagToMaybe $ packageDir flags)
?>> Just `fmap` (getCurrentDirectory >>= guessPackageName)
pkgName' <- return (flagToMaybe $ packageName flags)
?>> maybePrompt flags (promptStr "Package name" guess)
?>> return guess
return $ flags { packageName = maybeToFlag pkgName' }
-- | Package version: use 0.1.0.0 as a last resort, but try prompting the user
-- if possible.
getVersion :: InitFlags -> IO InitFlags
getVersion flags = do
let v = Just $ Version { versionBranch = [0,1,0,0], versionTags = [] }
v' <- return (flagToMaybe $ version flags)
?>> maybePrompt flags (prompt "Package version" v)
?>> return v
return $ flags { version = maybeToFlag v' }
-- | Choose a license.
getLicense :: InitFlags -> IO InitFlags
getLicense flags = do
lic <- return (flagToMaybe $ license flags)
?>> fmap (fmap (either UnknownLicense id) . join)
(maybePrompt flags
(promptListOptional "Please choose a license" listedLicenses))
return $ flags { license = maybeToFlag lic }
where
listedLicenses =
knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing
, Apache Nothing, OtherLicense]
-- | The author's name and email. Prompt, or try to guess from an existing
-- darcs repo.
getAuthorInfo :: InitFlags -> IO InitFlags
getAuthorInfo flags = do
(authorName, authorEmail) <-
(flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail
authorName' <- return (flagToMaybe $ author flags)
?>> maybePrompt flags (promptStr "Author name" authorName)
?>> return authorName
authorEmail' <- return (flagToMaybe $ email flags)
?>> maybePrompt flags (promptStr "Maintainer email" authorEmail)
?>> return authorEmail
return $ flags { author = maybeToFlag authorName'
, email = maybeToFlag authorEmail'
}
-- | Prompt for a homepage URL.
getHomepage :: InitFlags -> IO InitFlags
getHomepage flags = do
hp <- queryHomepage
hp' <- return (flagToMaybe $ homepage flags)
?>> maybePrompt flags (promptStr "Project homepage URL" hp)
?>> return hp
return $ flags { homepage = maybeToFlag hp' }
-- | Right now this does nothing, but it could be changed to do some
-- intelligent guessing.
queryHomepage :: IO (Maybe String)
queryHomepage = return Nothing -- get default remote darcs repo?
-- | Prompt for a project synopsis.
getSynopsis :: InitFlags -> IO InitFlags
getSynopsis flags = do
syn <- return (flagToMaybe $ synopsis flags)
?>> maybePrompt flags (promptStr "Project synopsis" Nothing)
return $ flags { synopsis = maybeToFlag syn }
-- | Prompt for a package category.
-- Note that it should be possible to do some smarter guessing here too, i.e.
-- look at the name of the top level source directory.
getCategory :: InitFlags -> IO InitFlags
getCategory flags = do
cat <- return (flagToMaybe $ category flags)
?>> fmap join (maybePrompt flags
(promptListOptional "Project category" [Codec ..]))
return $ flags { category = maybeToFlag cat }
-- | Try to guess extra source files (don't prompt the user).
getExtraSourceFiles :: InitFlags -> IO InitFlags
getExtraSourceFiles flags = do
extraSrcFiles <- return (extraSrc flags)
?>> Just `fmap` guessExtraSourceFiles flags
return $ flags { extraSrc = extraSrcFiles }
-- | Try to guess things to include in the extra-source-files field.
-- For now, we just look for things in the root directory named
-- 'readme', 'changes', or 'changelog', with any sort of
-- capitalization and any extension.
guessExtraSourceFiles :: InitFlags -> IO [FilePath]
guessExtraSourceFiles flags = do
dir <-
maybe getCurrentDirectory return . flagToMaybe $ packageDir flags
files <- getDirectoryContents dir
return $ filter isExtra files
where
isExtra = (`elem` ["README", "CHANGES", "CHANGELOG"])
. map toUpper
. takeBaseName
-- | Ask whether the project builds a library or executable.
getLibOrExec :: InitFlags -> IO InitFlags
getLibOrExec flags = do
isLib <- return (flagToMaybe $ packageType flags)
?>> maybePrompt flags (either (const Library) id `fmap`
promptList "What does the package build"
[Library, Executable]
Nothing display False)
?>> return (Just Library)
return $ flags { packageType = maybeToFlag isLib }
-- | Ask for the base language of the package.
getLanguage :: InitFlags -> IO InitFlags
getLanguage flags = do
lang <- return (flagToMaybe $ language flags)
?>> maybePrompt flags
(either UnknownLanguage id `fmap`
promptList "What base language is the package written in"
[Haskell2010, Haskell98]
(Just Haskell2010) display True)
?>> return (Just Haskell2010)
return $ flags { language = maybeToFlag lang }
-- | Ask whether to generate explanatory comments.
getGenComments :: InitFlags -> IO InitFlags
getGenComments flags = do
genComments <- return (not <$> flagToMaybe (noComments flags))
?>> maybePrompt flags (promptYesNo promptMsg (Just False))
?>> return (Just False)
return $ flags { noComments = maybeToFlag (fmap not genComments) }
where
promptMsg = "Include documentation on what each field means (y/n)"
-- | Try to guess the source root directory (don't prompt the user).
getSrcDir :: InitFlags -> IO InitFlags
getSrcDir flags = do
srcDirs <- return (sourceDirs flags)
?>> Just `fmap` guessSourceDirs flags
return $ flags { sourceDirs = srcDirs }
-- | Try to guess source directories. Could try harder; for the
-- moment just looks to see whether there is a directory called 'src'.
guessSourceDirs :: InitFlags -> IO [String]
guessSourceDirs flags = do
dir <-
maybe getCurrentDirectory return . flagToMaybe $ packageDir flags
srcIsDir <- doesDirectoryExist (dir </> "src")
if srcIsDir
then return ["src"]
else return []
-- | Get the list of exposed modules and extra tools needed to build them.
getModulesBuildToolsAndDeps :: PackageIndex -> InitFlags -> IO InitFlags
getModulesBuildToolsAndDeps pkgIx flags = do
dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags
-- XXX really should use guessed source roots.
sourceFiles <- scanForModules dir
Just mods <- return (exposedModules flags)
?>> (return . Just . map moduleName $ sourceFiles)
tools <- return (buildTools flags)
?>> (return . Just . neededBuildPrograms $ sourceFiles)
deps <- return (dependencies flags)
?>> Just <$> importsToDeps flags
(fromString "Prelude" : -- to ensure we get base as a dep
( nub -- only need to consider each imported package once
. filter (`notElem` mods) -- don't consider modules from
-- this package itself
. concatMap imports
$ sourceFiles
)
)
pkgIx
exts <- return (otherExts flags)
?>> (return . Just . nub . concatMap extensions $ sourceFiles)
return $ flags { exposedModules = Just mods
, buildTools = tools
, dependencies = deps
, otherExts = exts
}
importsToDeps :: InitFlags -> [ModuleName] -> PackageIndex -> IO [P.Dependency]
importsToDeps flags mods pkgIx = do
let modMap :: M.Map ModuleName [InstalledPackageInfo]
modMap = M.map (filter exposed) $ moduleNameIndex pkgIx
modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])]
modDeps = map (id &&& flip M.lookup modMap) mods
message flags "\nGuessing dependencies..."
nub . catMaybes <$> mapM (chooseDep flags) modDeps
-- Given a module and a list of installed packages providing it,
-- choose a dependency (i.e. package + version range) to use for that
-- module.
chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo])
-> IO (Maybe P.Dependency)
chooseDep flags (m, Nothing)
= message flags ("\nWarning: no package found providing " ++ display m ++ ".")
>> return Nothing
chooseDep flags (m, Just [])
= message flags ("\nWarning: no package found providing " ++ display m ++ ".")
>> return Nothing
-- We found some packages: group them by name.
chooseDep flags (m, Just ps)
= case pkgGroups of
-- if there's only one group, i.e. multiple versions of a single package,
-- we make it into a dependency, choosing the latest-ish version (see toDep).
[grp] -> Just <$> toDep grp
-- otherwise, we refuse to choose between different packages and make the user
-- do it.
grps -> do message flags ("\nWarning: multiple packages found providing "
++ display m
++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps))
message flags "You will need to pick one and manually add it to the Build-depends: field."
return Nothing
where
pkgGroups = groupBy ((==) `on` P.pkgName) (map sourcePackageId ps)
-- Given a list of available versions of the same package, pick a dependency.
toDep :: [P.PackageIdentifier] -> IO P.Dependency
-- If only one version, easy. We change e.g. 0.4.2 into 0.4.*
toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize . P.pkgVersion $ pid)
-- Otherwise, choose the latest version and issue a warning.
toDep pids = do
message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.")
return $ P.Dependency (P.pkgName . head $ pids)
(pvpize . maximum . map P.pkgVersion $ pids)
pvpize :: Version -> VersionRange
pvpize v = orLaterVersion v'
`intersectVersionRanges`
earlierVersion (incVersion 1 v')
where v' = (v { versionBranch = take 2 (versionBranch v) })
incVersion :: Int -> Version -> Version
incVersion n (Version vlist tags) = Version (incVersion' n vlist) tags
where
incVersion' 0 [] = [1]
incVersion' 0 (v:_) = [v+1]
incVersion' m [] = replicate m 0 ++ [1]
incVersion' m (v:vs) = v : incVersion' (m-1) vs
---------------------------------------------------------------------------
-- Prompting/user interaction -------------------------------------------
---------------------------------------------------------------------------
-- | Run a prompt or not based on the nonInteractive flag of the
-- InitFlags structure.
maybePrompt :: InitFlags -> IO t -> IO (Maybe t)
maybePrompt flags p =
case nonInteractive flags of
Flag True -> return Nothing
_ -> Just `fmap` p
-- | Create a prompt with optional default value that returns a
-- String.
promptStr :: String -> Maybe String -> IO String
promptStr = promptDefault' Just id
-- | Create a yes/no prompt with optional default value.
--
promptYesNo :: String -> Maybe Bool -> IO Bool
promptYesNo =
promptDefault' recogniseYesNo showYesNo
where
recogniseYesNo s | s == "y" || s == "Y" = Just True
| s == "n" || s == "N" = Just False
| otherwise = Nothing
showYesNo True = "y"
showYesNo False = "n"
-- | Create a prompt with optional default value that returns a value
-- of some Text instance.
prompt :: Text t => String -> Maybe t -> IO t
prompt = promptDefault'
(either (const Nothing) Just . runReadE (readP_to_E id parse))
display
-- | Create a prompt with an optional default value.
promptDefault' :: (String -> Maybe t) -- ^ parser
-> (t -> String) -- ^ pretty-printer
-> String -- ^ prompt message
-> Maybe t -- ^ optional default value
-> IO t
promptDefault' parser pretty pr def = do
putStr $ mkDefPrompt pr (pretty `fmap` def)
inp <- getLine
case (inp, def) of
("", Just d) -> return d
_ -> case parser inp of
Just t -> return t
Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!"
promptDefault' parser pretty pr def
-- | Create a prompt from a prompt string and a String representation
-- of an optional default value.
mkDefPrompt :: String -> Maybe String -> String
mkDefPrompt pr def = pr ++ "?" ++ defStr def
where defStr Nothing = " "
defStr (Just s) = " [default: " ++ s ++ "] "
promptListOptional :: (Text t, Eq t)
=> String -- ^ prompt
-> [t] -- ^ choices
-> IO (Maybe (Either String t))
promptListOptional pr choices =
fmap rearrange
$ promptList pr (Nothing : map Just choices) (Just Nothing)
(maybe "(none)" display) True
where
rearrange = either (Just . Left) (fmap Right)
-- | Create a prompt from a list of items.
promptList :: Eq t
=> String -- ^ prompt
-> [t] -- ^ choices
-> Maybe t -- ^ optional default value
-> (t -> String) -- ^ show an item
-> Bool -- ^ whether to allow an 'other' option
-> IO (Either String t)
promptList pr choices def displayItem other = do
putStrLn $ pr ++ ":"
let options1 = map (\c -> (Just c == def, displayItem c)) choices
options2 = zip ([1..]::[Int])
(options1 ++ [(False, "Other (specify)") | other])
mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2
promptList' displayItem (length options2) choices def other
where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest
| otherwise = " " ++ star i ++ rest
where rest = show n ++ ") "
star True = "*"
star False = " "
promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t)
promptList' displayItem numChoices choices def other = do
putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def)
inp <- getLine
case (inp, def) of
("", Just d) -> return $ Right d
_ -> case readMaybe inp of
Nothing -> invalidChoice inp
Just n -> getChoice n
where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice."
promptList' displayItem numChoices choices def other
getChoice n | n < 1 || n > numChoices = invalidChoice (show n)
| n < numChoices ||
(n == numChoices && not other)
= return . Right $ choices !! (n-1)
| otherwise = Left `fmap` promptStr "Please specify" Nothing
readMaybe :: (Read a) => String -> Maybe a
readMaybe s = case reads s of
[(a,"")] -> Just a
_ -> Nothing
---------------------------------------------------------------------------
-- File generation ------------------------------------------------------
---------------------------------------------------------------------------
writeLicense :: InitFlags -> IO ()
writeLicense flags = do
message flags "\nGenerating LICENSE..."
year <- getYear
let licenseFile =
case license flags of
Flag BSD3 -> Just $ bsd3 (fromMaybe "???"
. flagToMaybe
. author
$ flags)
(show year)
Flag (GPL (Just (Version {versionBranch = [2]})))
-> Just gplv2
Flag (GPL (Just (Version {versionBranch = [3]})))
-> Just gplv3
Flag (LGPL (Just (Version {versionBranch = [2]})))
-> Just lgpl2
Flag (LGPL (Just (Version {versionBranch = [3]})))
-> Just lgpl3
Flag (AGPL (Just (Version {versionBranch = [3]})))
-> Just agplv3
Flag (Apache (Just (Version {versionBranch = [2, 0]})))
-> Just apache20
_ -> Nothing
case licenseFile of
Just licenseText -> writeFileSafe flags "LICENSE" licenseText
Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself."
getYear :: IO Integer
getYear = do
u <- getCurrentTime
z <- getCurrentTimeZone
let l = utcToLocalTime z u
(y, _, _) = toGregorian $ localDay l
return y
writeSetupFile :: InitFlags -> IO ()
writeSetupFile flags = do
message flags "Generating Setup.hs..."
writeFileSafe flags "Setup.hs" setupFile
where
setupFile = unlines
[ "import Distribution.Simple"
, "main = defaultMain"
]
writeCabalFile :: InitFlags -> IO Bool
writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do
message flags "Error: no package name provided."
return False
writeCabalFile flags@(InitFlags{packageName = Flag p}) = do
let cabalFileName = p ++ ".cabal"
message flags $ "Generating " ++ cabalFileName ++ "..."
writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags)
return True
-- | Write a file \"safely\", backing up any existing version (unless
-- the overwrite flag is set).
writeFileSafe :: InitFlags -> FilePath -> String -> IO ()
writeFileSafe flags fileName content = do
moveExistingFile flags fileName
writeFile fileName content
-- | Move an existing file, if there is one, and the overwrite flag is
-- not set.
moveExistingFile :: InitFlags -> FilePath -> IO ()
moveExistingFile flags fileName =
unless (overwrite flags == Flag True) $ do
e <- doesFileExist fileName
when e $ do
newName <- findNewName fileName
message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName
copyFile fileName newName
findNewName :: FilePath -> IO FilePath
findNewName oldName = findNewName' 0
where
findNewName' :: Integer -> IO FilePath
findNewName' n = do
let newName = oldName <.> ("save" ++ show n)
e <- doesFileExist newName
if e then findNewName' (n+1) else return newName
-- | Generate a .cabal file from an InitFlags structure. NOTE: this
-- is rather ad-hoc! What we would REALLY like is to have a
-- standard low-level AST type representing .cabal files, which
-- preserves things like comments, and to write an *inverse*
-- parser/pretty-printer pair between .cabal files and this AST.
-- Then instead of this ad-hoc code we could just map an InitFlags
-- structure onto a low-level AST structure and use the existing
-- pretty-printing code to generate the file.
generateCabalFile :: String -> InitFlags -> String
generateCabalFile fileName c =
renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $
(if minimal c /= Flag True
then showComment (Just $ "Initial " ++ fileName ++ " generated by cabal "
++ "init. For further documentation, see "
++ "http://haskell.org/cabal/users-guide/")
$$ text ""
else empty)
$$
vcat [ fieldS "name" (packageName c)
(Just "The name of the package.")
True
, field "version" (version c)
(Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttp://www.haskell.org/haskellwiki/Package_versioning_policy\n"
++ "PVP summary: +-+------- breaking API changes\n"
++ " | | +----- non-breaking API additions\n"
++ " | | | +--- code changes with no API change")
True
, fieldS "synopsis" (synopsis c)
(Just "A short (one-line) description of the package.")
True
, fieldS "description" NoFlag
(Just "A longer description of the package.")
True
, fieldS "homepage" (homepage c)
(Just "URL for the project homepage or repository.")
False
, fieldS "bug-reports" NoFlag
(Just "A URL where users can report bugs.")
False
, field "license" (license c)
(Just "The license under which the package is released.")
True
, fieldS "license-file" (Flag "LICENSE")
(Just "The file containing the license text.")
True
, fieldS "author" (author c)
(Just "The package author(s).")
True
, fieldS "maintainer" (email c)
(Just "An email address to which users can send suggestions, bug reports, and patches.")
True
, fieldS "copyright" NoFlag
(Just "A copyright notice.")
True
, fieldS "category" (either id display `fmap` category c)
Nothing
True
, fieldS "build-type" (Flag "Simple")
Nothing
True
, fieldS "extra-source-files" (listFieldS (extraSrc c))
(Just "Extra files to be distributed with the package, such as examples or a README.")
True
, field "cabal-version" (Flag $ orLaterVersion (Version [1,10] []))
(Just "Constraint on the version of Cabal needed to build this package.")
False
, case packageType c of
Flag Executable ->
text "\nexecutable" <+> text (fromMaybe "" . flagToMaybe $ packageName c) $$ nest 2 (vcat
[ fieldS "main-is" NoFlag (Just ".hs or .lhs file containing the Main module.") True
, generateBuildInfo Executable c
])
Flag Library -> text "\nlibrary" $$ nest 2 (vcat
[ fieldS "exposed-modules" (listField (exposedModules c))
(Just "Modules exported by the library.")
True
, generateBuildInfo Library c
])
_ -> empty
]
where
generateBuildInfo :: PackageType -> InitFlags -> Doc
generateBuildInfo pkgtype c' = vcat
[ fieldS "other-modules" (listField (otherModules c'))
(Just $ case pkgtype of
Library -> "Modules included in this library but not exported."
Executable -> "Modules included in this executable, other than Main.")
True
, fieldS "other-extensions" (listField (otherExts c'))
(Just "LANGUAGE extensions used by modules in this package.")
True
, fieldS "build-depends" (listField (dependencies c'))
(Just "Other library packages from which modules are imported.")
True
, fieldS "hs-source-dirs" (listFieldS (sourceDirs c'))
(Just "Directories containing source files.")
True
, fieldS "build-tools" (listFieldS (buildTools c'))
(Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.")
False
, field "default-language" (language c')
(Just "Base language which the package is written in.")
True
]
listField :: Text s => Maybe [s] -> Flag String
listField = listFieldS . fmap (map display)
listFieldS :: Maybe [String] -> Flag String
listFieldS = Flag . maybe "" (intercalate ", ")
field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc
field s f = fieldS s (fmap display f)
fieldS :: String -- ^ Name of the field
-> Flag String -- ^ Field contents
-> Maybe String -- ^ Comment to explain the field
-> Bool -- ^ Should the field be included (commented out) even if blank?
-> Doc
fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty
fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty
fieldS s f com _ = case (isJust com, noComments c, minimal c) of
(_, _, Flag True) -> id
(_, Flag True, _) -> id
(True, _, _) -> (showComment com $$) . ($$ text "")
(False, _, _) -> ($$ text "")
$
comment f <> text s <> colon
<> text (replicate (20 - length s) ' ')
<> text (fromMaybe "" . flagToMaybe $ f)
comment NoFlag = text "-- "
comment (Flag "") = text "-- "
comment _ = text ""
showComment :: Maybe String -> Doc
showComment (Just t) = vcat
. map (text . ("-- "++)) . lines
. renderStyle style {
lineLength = 76,
ribbonsPerLine = 1.05
}
. vcat
. map (fcat . map text . breakLine)
. lines
$ t
showComment Nothing = text ""
breakLine [] = []
breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs'
breakLine' [] = []
breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs'
-- | Generate warnings for missing fields etc.
generateWarnings :: InitFlags -> IO ()
generateWarnings flags = do
message flags ""
when (synopsis flags `elem` [NoFlag, Flag ""])
(message flags "Warning: no synopsis given. You should edit the .cabal file and add one.")
message flags "You may want to edit the .cabal file and add a Description field."
-- | Possibly generate a message to stdout, taking into account the
-- --quiet flag.
message :: InitFlags -> String -> IO ()
message (InitFlags{quiet = Flag True}) _ = return ()
message _ s = putStrLn s
|
jwiegley/ghc-release
|
libraries/Cabal/cabal-install/Distribution/Client/Init.hs
|
gpl-3.0
| 30,868
| 0
| 22
| 9,129
| 7,401
| 3,793
| 3,608
| 571
| 15
|
-- | The Manager Process - Manages the torrents and controls them
module Process.TorrentManager (
-- * Types
TorrentManagerMsg(..)
-- * Channels
, TorrentMgrChan
-- * Interface
, start
)
where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad.State
import Control.Monad.Reader
import qualified Data.ByteString as B
import Prelude hiding (log)
import Protocol.BCode as BCode
import Process
import qualified Process.Status as Status
import qualified Process.PeerMgr as PeerMgr
import qualified Process.FS as FSP
import qualified Process.PieceMgr as PieceMgr (start, createPieceDb)
import qualified Process.ChokeMgr as ChokeMgr (ChokeMgrChannel)
import qualified Process.Tracker as Tracker
import Channels
import FS
import Supervisor
import Torrent
data TorrentManagerMsg = AddedTorrent FilePath
| RemovedTorrent FilePath
deriving (Eq, Show)
type TorrentMgrChan = TChan [TorrentManagerMsg]
data CF = CF { tCh :: TorrentMgrChan
, tStatusCh :: Status.StatusChannel
, tStatusTV :: TVar [Status.PStat]
, tPeerId :: PeerId
, tPeerMgrCh :: PeerMgr.PeerMgrChannel
, tChokeCh :: ChokeMgr.ChokeMgrChannel
}
instance Logging CF where
logName _ = "Process.TorrentManager"
data ST = ST { workQueue :: [TorrentManagerMsg] }
start :: TorrentMgrChan -- ^ Channel to watch for changes to torrents
-> Status.StatusChannel
-> TVar [Status.PStat]
-> ChokeMgr.ChokeMgrChannel
-> PeerId
-> PeerMgr.PeerMgrChannel
-> SupervisorChannel
-> IO ThreadId
start chan statusC stv chokeC pid peerC supC =
spawnP (CF chan statusC stv pid peerC chokeC) (ST [])
({-# SCC "TorrentManager" #-} catchP pgm (defaultStopHandler supC))
where pgm = startStop >> dirMsg >> pgm
dirMsg = do
c <- asks tCh
ls <- liftIO . atomically $ readTChan c
modify (\s -> s { workQueue = ls ++ workQueue s })
startStop = do
q <- gets workQueue
case q of
[] -> return ()
(AddedTorrent fp : rest) -> do
debugP $ "Adding torrent file: " ++ fp
_ <- startTorrent fp
modify (\s -> s { workQueue = rest })
startStop
(RemovedTorrent _ : _) -> do
errorP "Removal of torrents not yet supported :P"
stopP
readTorrent :: FilePath -> Process CF ST BCode
readTorrent fp = do
torrent <- liftIO $ B.readFile fp
let bcoded = BCode.decode torrent
case bcoded of
Left err -> do liftIO $ print err
stopP
Right bc -> return bc
startTorrent :: FilePath -> Process CF ST (Maybe ThreadId)
startTorrent fp = do
bc <- readTorrent fp
ti <- liftIO $ mkTorrentInfo bc
sts <- do v <- liftIO newEmptyTMVarIO
statusC <- asks tStatusCh
liftIO . atomically $ writeTChan statusC (Status.RequestAllTorrents v)
liftIO . atomically $ takeTMVar v
case lookup (infoHash ti) sts of
Nothing -> Just `fmap` startTorrent' fp bc ti
Just _x -> return Nothing
startTorrent' :: [Char] -> BCode -> TorrentInfo -> Process CF ST ThreadId
startTorrent' fp bc ti = do
fspC <- liftIO newTChanIO
trackerC <- liftIO newTChanIO
supC <- liftIO newTChanIO
pieceMgrC <- liftIO newTChanIO
chokeC <- asks tChokeCh
statusC <- asks tStatusCh
stv <- asks tStatusTV
pid <- asks tPeerId
pmC <- asks tPeerMgrCh
(handles, haveMap, pieceMap) <- liftIO $ openAndCheckFile bc
let left = bytesLeft haveMap pieceMap
pieceDb <- PieceMgr.createPieceDb haveMap pieceMap
(tid, _) <- liftIO $ allForOne ("TorrentSup - " ++ fp)
[ Worker $ FSP.start handles pieceMap fspC
, Worker $ PieceMgr.start pieceMgrC fspC chokeC statusC pieceDb (infoHash ti)
, Worker $ Tracker.start (infoHash ti) ti pid defaultPort statusC trackerC pmC
] supC
liftIO . atomically $ writeTChan statusC $ Status.InsertTorrent (infoHash ti) left trackerC
c <- asks tPeerMgrCh
liftIO . atomically $ writeTChan c $ PeerMgr.NewTorrent (infoHash ti)
(PeerMgr.TorrentLocal pieceMgrC fspC stv pieceMap)
liftIO . atomically $ writeTChan trackerC Start
return tid
|
jlouis/combinatorrent
|
src/Process/TorrentManager.hs
|
bsd-2-clause
| 4,506
| 0
| 18
| 1,358
| 1,252
| 634
| 618
| 107
| 3
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- | Test suite for Stack.Dot
module Stack.DotSpec where
import Data.Functor.Identity
import Data.List ((\\))
import qualified Data.Map as Map
import qualified Data.Set as Set
import Distribution.License (License (BSD3))
import qualified RIO.Text as T
import Stack.Prelude hiding (pkgName)
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (forAll,choose,Gen)
import Stack.Dot
dummyPayload :: DotPayload
dummyPayload = DotPayload (parseVersion "0.0.0.0") (Just (Right BSD3)) Nothing
spec :: Spec
spec = do
let graph =
Map.mapKeys pkgName
. fmap (\p -> (Set.map pkgName p, dummyPayload))
. Map.fromList $ [("one",Set.fromList ["base","free"])
,("two",Set.fromList ["base","free","mtl","transformers","one"])
]
describe "Stack.Dot" $ do
it "does nothing if depth is 0" $
resolveDependencies (Just 0) graph stubLoader `shouldBe` return graph
it "with depth 1, more dependencies are resolved" $ do
let graph' = Map.insert (pkgName "cycle")
(Set.singleton (pkgName "cycle"), dummyPayload)
graph
resultGraph = runIdentity (resolveDependencies (Just 0) graph stubLoader)
resultGraph' = runIdentity (resolveDependencies (Just 1) graph' stubLoader)
Map.size resultGraph < Map.size resultGraph' `shouldBe` True
it "cycles are ignored" $ do
let graph' = Map.insert (pkgName "cycle")
(Set.singleton (pkgName "cycle"), dummyPayload)
graph
resultGraph = resolveDependencies Nothing graph stubLoader
resultGraph' = resolveDependencies Nothing graph' stubLoader
fmap Map.size resultGraph' `shouldBe` fmap ((+1) . Map.size) resultGraph
let graphElem e = Set.member e . Set.unions . Map.elems
prop "requested packages are pruned" $ do
let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader)
allPackages g = Map.keysSet g `Set.union` foldMap fst g
forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune ->
let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph
in Set.null (allPackages pruned `Set.intersection` Set.fromList toPrune)
prop "pruning removes orhpans" $ do
let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader)
allPackages g = Map.keysSet g `Set.union` foldMap fst g
orphans g = Map.filterWithKey (\k _ -> not (graphElem k g)) g
forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune ->
let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph
in null (Map.keys (orphans (fmap fst pruned)) \\ [pkgName "one", pkgName "two"])
{- Helper functions below -}
-- Backport from QuickCheck 2.8 to 2.7.6
sublistOf :: [a] -> Gen [a]
sublistOf = filterM (\_ -> choose (False, True))
-- Unsafe internal helper to create a package name
pkgName :: Text -> PackageName
pkgName = fromMaybe failure . parsePackageName . T.unpack
where
failure = error "Internal error during package name creation in DotSpec.pkgName"
-- Stub, simulates the function to load package dependecies
stubLoader :: PackageName -> Identity (Set PackageName, DotPayload)
stubLoader name = return . (, dummyPayload) . Set.fromList . map pkgName $ case show name of
"StateVar" -> ["stm","transformers"]
"array" -> []
"bifunctors" -> ["semigroupoids","semigroups","tagged"]
"binary" -> ["array","bytestring","containers"]
"bytestring" -> ["deepseq","ghc-prim","integer-gmp"]
"comonad" -> ["containers","contravariant","distributive"
,"semigroups","tagged","transformers","transformers-compat"
]
"cont" -> ["StateVar","semigroups","transformers","transformers-compat","void"]
"containers" -> ["array","deepseq","ghc-prim"]
"deepseq" -> ["array"]
"distributive" -> ["ghc-prim","tagged","transformers","transformers-compat"]
"free" -> ["bifunctors","comonad","distributive","mtl"
,"prelude-extras","profunctors","semigroupoids"
,"semigroups","template-haskell","transformers"
]
"ghc" -> []
"hashable" -> ["bytestring","ghc-prim","integer-gmp","text"]
"integer" -> []
"mtl" -> ["transformers"]
"nats" -> []
"one" -> ["free"]
"prelude" -> []
"profunctors" -> ["comonad","distributive","semigroupoids","tagged","transformers"]
"semigroupoids" -> ["comonad","containers","contravariant","distributive"
,"semigroups","transformers","transformers-compat"
]
"semigroups" -> ["bytestring","containers","deepseq","hashable"
,"nats","text","unordered-containers"
]
"stm" -> ["array"]
"tagged" -> ["template-haskell"]
"template" -> []
"text" -> ["array","binary","bytestring","deepseq","ghc-prim","integer-gmp"]
"transformers" -> []
"two" -> ["free","mtl","one","transformers"]
"unordered" -> ["deepseq","hashable"]
"void" -> ["ghc-prim","hashable","semigroups"]
_ -> []
|
juhp/stack
|
src/test/Stack/DotSpec.hs
|
bsd-3-clause
| 5,358
| 0
| 24
| 1,204
| 1,500
| 811
| 689
| 98
| 30
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.PixelRectangles.Rasterization
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to a part of section 3.6.4 (Rasterization of Pixel
-- Rectangles) of the OpenGL 2.1 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.PixelRectangles.Rasterization (
PixelData(..), PixelFormat(..), drawPixels, pixelZoom
) where
import Control.Monad
import Graphics.Rendering.OpenGL.GL.CoordTrans
import Graphics.Rendering.OpenGL.GL.PixelData
import Graphics.Rendering.OpenGL.GL.PixelFormat
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
drawPixels :: Size -> PixelData a -> IO ()
drawPixels (Size w h) pd = withPixelData pd $ glDrawPixels w h
--------------------------------------------------------------------------------
pixelZoom :: StateVar (GLfloat, GLfloat)
pixelZoom =
makeStateVar
(liftM2 (,) (getFloat1 id GetZoomX) (getFloat1 id GetZoomY))
(uncurry glPixelZoom)
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/PixelRectangles/Rasterization.hs
|
bsd-3-clause
| 1,403
| 0
| 9
| 165
| 210
| 132
| 78
| 16
| 1
|
-----------------------------------------------------------------------------
-- $Id: DriverPhases.hs,v 1.38 2005/05/17 11:01:59 simonmar Exp $
--
-- GHC Driver
--
-- (c) The University of Glasgow 2002
--
-----------------------------------------------------------------------------
module DriverPhases (
HscSource(..), isHsBoot, hscSourceString,
Phase(..),
happensBefore, eqPhase, anyHsc, isStopLn,
startPhase,
phaseInputExt,
isHaskellishSuffix,
isHaskellSrcSuffix,
isObjectSuffix,
isCishSuffix,
isExtCoreSuffix,
isDynLibSuffix,
isHaskellUserSrcSuffix,
isSourceSuffix,
isHaskellishFilename,
isHaskellSrcFilename,
isObjectFilename,
isCishFilename,
isExtCoreFilename,
isDynLibFilename,
isHaskellUserSrcFilename,
isSourceFilename
) where
#include "HsVersions.h"
import {-# SOURCE #-} DynFlags
import Outputable
import Platform
import System.FilePath
-----------------------------------------------------------------------------
-- Phases
{-
Phase of the | Suffix saying | Flag saying | (suffix of)
compilation system | ``start here''| ``stop after''| output file
literate pre-processor | .lhs | - | -
C pre-processor (opt.) | - | -E | -
Haskell compiler | .hs | -C, -S | .hc, .s
C compiler (opt.) | .hc or .c | -S | .s
assembler | .s or .S | -c | .o
linker | other | - | a.out
-}
data HscSource
= HsSrcFile | HsBootFile | ExtCoreFile
deriving( Eq, Ord, Show )
-- Ord needed for the finite maps we build in CompManager
hscSourceString :: HscSource -> String
hscSourceString HsSrcFile = ""
hscSourceString HsBootFile = "[boot]"
hscSourceString ExtCoreFile = "[ext core]"
isHsBoot :: HscSource -> Bool
isHsBoot HsBootFile = True
isHsBoot _ = False
data Phase
= Unlit HscSource
| Cpp HscSource
| HsPp HscSource
| Hsc HscSource
| Ccpp
| Cc
| Cobjc
| Cobjcpp
| HCc -- Haskellised C (as opposed to vanilla C) compilation
| Splitter -- Assembly file splitter (part of '-split-objs')
| SplitAs -- Assembler for split assembly files (part of '-split-objs')
| As Bool -- Assembler for regular assembly files (Bool: with-cpp)
| LlvmOpt -- Run LLVM opt tool over llvm assembly
| LlvmLlc -- LLVM bitcode to native assembly
| LlvmMangle -- Fix up TNTC by processing assembly produced by LLVM
| CmmCpp -- pre-process Cmm source
| Cmm -- parse & compile Cmm code
| MergeStub -- merge in the stub object file
-- The final phase is a pseudo-phase that tells the pipeline to stop.
-- There is no runPhase case for it.
| StopLn -- Stop, but linking will follow, so generate .o file
deriving (Eq, Show)
instance Outputable Phase where
ppr p = text (show p)
anyHsc :: Phase
anyHsc = Hsc (panic "anyHsc")
isStopLn :: Phase -> Bool
isStopLn StopLn = True
isStopLn _ = False
eqPhase :: Phase -> Phase -> Bool
-- Equality of constructors, ignoring the HscSource field
-- NB: the HscSource field can be 'bot'; see anyHsc above
eqPhase (Unlit _) (Unlit _) = True
eqPhase (Cpp _) (Cpp _) = True
eqPhase (HsPp _) (HsPp _) = True
eqPhase (Hsc _) (Hsc _) = True
eqPhase Ccpp Ccpp = True
eqPhase Cc Cc = True
eqPhase Cobjc Cobjc = True
eqPhase Cobjcpp Cobjcpp = True
eqPhase HCc HCc = True
eqPhase Splitter Splitter = True
eqPhase SplitAs SplitAs = True
eqPhase (As x) (As y) = x == y
eqPhase LlvmOpt LlvmOpt = True
eqPhase LlvmLlc LlvmLlc = True
eqPhase LlvmMangle LlvmMangle = True
eqPhase CmmCpp CmmCpp = True
eqPhase Cmm Cmm = True
eqPhase MergeStub MergeStub = True
eqPhase StopLn StopLn = True
eqPhase _ _ = False
-- Partial ordering on phases: we want to know which phases will occur before
-- which others. This is used for sanity checking, to ensure that the
-- pipeline will stop at some point (see DriverPipeline.runPipeline).
happensBefore :: DynFlags -> Phase -> Phase -> Bool
happensBefore dflags p1 p2 = p1 `happensBefore'` p2
where StopLn `happensBefore'` _ = False
x `happensBefore'` y = after_x `eqPhase` y
|| after_x `happensBefore'` y
where after_x = nextPhase dflags x
nextPhase :: DynFlags -> Phase -> Phase
nextPhase dflags p
-- A conservative approximation to the next phase, used in happensBefore
= case p of
Unlit sf -> Cpp sf
Cpp sf -> HsPp sf
HsPp sf -> Hsc sf
Hsc _ -> maybeHCc
Splitter -> SplitAs
LlvmOpt -> LlvmLlc
LlvmLlc -> LlvmMangle
LlvmMangle -> As False
SplitAs -> MergeStub
As _ -> MergeStub
Ccpp -> As False
Cc -> As False
Cobjc -> As False
Cobjcpp -> As False
CmmCpp -> Cmm
Cmm -> maybeHCc
HCc -> As False
MergeStub -> StopLn
StopLn -> panic "nextPhase: nothing after StopLn"
where maybeHCc = if platformUnregisterised (targetPlatform dflags)
then HCc
else As False
-- the first compilation phase for a given file is determined
-- by its suffix.
startPhase :: String -> Phase
startPhase "lhs" = Unlit HsSrcFile
startPhase "lhs-boot" = Unlit HsBootFile
startPhase "hs" = Cpp HsSrcFile
startPhase "hs-boot" = Cpp HsBootFile
startPhase "hscpp" = HsPp HsSrcFile
startPhase "hspp" = Hsc HsSrcFile
startPhase "hcr" = Hsc ExtCoreFile
startPhase "hc" = HCc
startPhase "c" = Cc
startPhase "cpp" = Ccpp
startPhase "C" = Cc
startPhase "m" = Cobjc
startPhase "M" = Cobjcpp
startPhase "mm" = Cobjcpp
startPhase "cc" = Ccpp
startPhase "cxx" = Ccpp
startPhase "split_s" = Splitter
startPhase "s" = As False
startPhase "S" = As True
startPhase "ll" = LlvmOpt
startPhase "bc" = LlvmLlc
startPhase "lm_s" = LlvmMangle
startPhase "o" = StopLn
startPhase "cmm" = CmmCpp
startPhase "cmmcpp" = Cmm
startPhase _ = StopLn -- all unknown file types
-- This is used to determine the extension for the output from the
-- current phase (if it generates a new file). The extension depends
-- on the next phase in the pipeline.
phaseInputExt :: Phase -> String
phaseInputExt (Unlit HsSrcFile) = "lhs"
phaseInputExt (Unlit HsBootFile) = "lhs-boot"
phaseInputExt (Unlit ExtCoreFile) = "lhcr"
phaseInputExt (Cpp _) = "lpp" -- intermediate only
phaseInputExt (HsPp _) = "hscpp" -- intermediate only
phaseInputExt (Hsc _) = "hspp" -- intermediate only
-- NB: as things stand, phaseInputExt (Hsc x) must not evaluate x
-- because runPipeline uses the StopBefore phase to pick the
-- output filename. That could be fixed, but watch out.
phaseInputExt HCc = "hc"
phaseInputExt Ccpp = "cpp"
phaseInputExt Cobjc = "m"
phaseInputExt Cobjcpp = "mm"
phaseInputExt Cc = "c"
phaseInputExt Splitter = "split_s"
phaseInputExt (As True) = "S"
phaseInputExt (As False) = "s"
phaseInputExt LlvmOpt = "ll"
phaseInputExt LlvmLlc = "bc"
phaseInputExt LlvmMangle = "lm_s"
phaseInputExt SplitAs = "split_s"
phaseInputExt CmmCpp = "cmm"
phaseInputExt Cmm = "cmmcpp"
phaseInputExt MergeStub = "o"
phaseInputExt StopLn = "o"
haskellish_src_suffixes, haskellish_suffixes, cish_suffixes,
extcoreish_suffixes, haskellish_user_src_suffixes
:: [String]
haskellish_src_suffixes = haskellish_user_src_suffixes ++
[ "hspp", "hscpp", "hcr", "cmm", "cmmcpp" ]
haskellish_suffixes = haskellish_src_suffixes ++ ["hc", "raw_s"]
cish_suffixes = [ "c", "cpp", "C", "cc", "cxx", "s", "S", "ll", "bc", "lm_s", "m", "M", "mm" ]
extcoreish_suffixes = [ "hcr" ]
-- Will not be deleted as temp files:
haskellish_user_src_suffixes = [ "hs", "lhs", "hs-boot", "lhs-boot" ]
objish_suffixes :: Platform -> [String]
-- Use the appropriate suffix for the system on which
-- the GHC-compiled code will run
objish_suffixes platform = case platformOS platform of
OSMinGW32 -> [ "o", "O", "obj", "OBJ" ]
_ -> [ "o" ]
dynlib_suffixes :: Platform -> [String]
dynlib_suffixes platform = case platformOS platform of
OSMinGW32 -> ["dll", "DLL"]
OSDarwin -> ["dylib", "so"]
_ -> ["so"]
isHaskellishSuffix, isHaskellSrcSuffix, isCishSuffix, isExtCoreSuffix,
isHaskellUserSrcSuffix
:: String -> Bool
isHaskellishSuffix s = s `elem` haskellish_suffixes
isHaskellSrcSuffix s = s `elem` haskellish_src_suffixes
isCishSuffix s = s `elem` cish_suffixes
isExtCoreSuffix s = s `elem` extcoreish_suffixes
isHaskellUserSrcSuffix s = s `elem` haskellish_user_src_suffixes
isObjectSuffix, isDynLibSuffix :: Platform -> String -> Bool
isObjectSuffix platform s = s `elem` objish_suffixes platform
isDynLibSuffix platform s = s `elem` dynlib_suffixes platform
isSourceSuffix :: String -> Bool
isSourceSuffix suff = isHaskellishSuffix suff || isCishSuffix suff
isHaskellishFilename, isHaskellSrcFilename, isCishFilename,
isExtCoreFilename, isHaskellUserSrcFilename, isSourceFilename
:: FilePath -> Bool
-- takeExtension return .foo, so we drop 1 to get rid of the .
isHaskellishFilename f = isHaskellishSuffix (drop 1 $ takeExtension f)
isHaskellSrcFilename f = isHaskellSrcSuffix (drop 1 $ takeExtension f)
isCishFilename f = isCishSuffix (drop 1 $ takeExtension f)
isExtCoreFilename f = isExtCoreSuffix (drop 1 $ takeExtension f)
isHaskellUserSrcFilename f = isHaskellUserSrcSuffix (drop 1 $ takeExtension f)
isSourceFilename f = isSourceSuffix (drop 1 $ takeExtension f)
isObjectFilename, isDynLibFilename :: Platform -> FilePath -> Bool
isObjectFilename platform f = isObjectSuffix platform (drop 1 $ takeExtension f)
isDynLibFilename platform f = isDynLibSuffix platform (drop 1 $ takeExtension f)
|
lukexi/ghc-7.8-arm64
|
compiler/main/DriverPhases.hs
|
bsd-3-clause
| 10,595
| 0
| 10
| 3,035
| 2,089
| 1,132
| 957
| 209
| 20
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Types.BenchmarkType (
BenchmarkType(..),
knownBenchmarkTypes,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Version
import Distribution.Text
import Text.PrettyPrint as Disp
-- | The \"benchmark-type\" field in the benchmark stanza.
--
data BenchmarkType = BenchmarkTypeExe Version
-- ^ \"type: exitcode-stdio-x.y\"
| BenchmarkTypeUnknown String Version
-- ^ Some unknown benchmark type e.g. \"type: foo\"
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary BenchmarkType
knownBenchmarkTypes :: [BenchmarkType]
knownBenchmarkTypes = [ BenchmarkTypeExe (mkVersion [1,0]) ]
instance Text BenchmarkType where
disp (BenchmarkTypeExe ver) = text "exitcode-stdio-" <<>> disp ver
disp (BenchmarkTypeUnknown name ver) = text name <<>> char '-' <<>> disp ver
parse = stdParse $ \ver name -> case name of
"exitcode-stdio" -> BenchmarkTypeExe ver
_ -> BenchmarkTypeUnknown name ver
|
mydaum/cabal
|
Cabal/Distribution/Types/BenchmarkType.hs
|
bsd-3-clause
| 1,129
| 0
| 11
| 247
| 241
| 133
| 108
| 22
| 1
|
-----------------------------------------------------------------------------
--
-- GHC Extra object linking code
--
-- (c) The GHC Team 2017
--
-----------------------------------------------------------------------------
module SysTools.ExtraObj (
mkExtraObj, mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
checkLinkInfo, getLinkInfo, getCompilerInfo,
ghcLinkInfoSectionName, ghcLinkInfoNoteName, platformSupportsSavingLinkOpts,
haveRtsOptsFlags
) where
import AsmUtils
import ErrUtils
import DynFlags
import Packages
import Platform
import Outputable
import SrcLoc ( noSrcSpan )
import Module
import Elf
import Util
import GhcPrelude
import Control.Monad
import Data.Maybe
import Control.Monad.IO.Class
import FileCleanup
import SysTools.Tasks
import SysTools.Info
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags TFL_CurrentModule extn
oFile <- newTempName dflags TFL_GhcSession "o"
writeFile cFile xs
ccInfo <- liftIO $ getCompilerInfo dflags
runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ if extn /= "s"
then cOpts
else asmOpts ccInfo)
return oFile
where
-- Pass a different set of options to the C compiler depending one whether
-- we're compiling C or assembler. When compiling C, we pass the usual
-- set of include directories and PIC flags.
cOpts = map Option (picCCOpts dflags)
++ map (FileOption "-I")
(includeDirs $ getPackageDetails dflags rtsUnitId)
-- When compiling assembler code, we drop the usual C options, and if the
-- compiler is Clang, we add an extra argument to tell Clang to ignore
-- unused command line options. See trac #11684.
asmOpts ccInfo =
if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [Option "-Qunused-arguments"]
else []
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
-- On Windows, when making a shared library we also may need a DllMain.
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
putLogMsg dflags NoReason SevInfo noSrcSpan
(defaultUserStyle dflags)
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = Outputable.empty
| otherwise
= case ghcLink dflags of
LinkDynLib -> if platformOS (targetPlatform dflags) == OSMinGW32
then dllMain
else Outputable.empty
_ -> exeMain
exeMain = vcat [
text "#include \"Rts.h\"",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> text (if rtsOptsSuggestions dflags
then "true"
else "false") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> text " __conf.rts_opts= " <>
text (show opts) <> semi,
text " __conf.rts_hs_main = true;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
char '\n' -- final newline, to keep gcc happy
]
dllMain = vcat [
text "#include \"Rts.h\"",
text "#include <windows.h>",
text "#include <stdbool.h>",
char '\n',
text "bool",
text "WINAPI",
text "DllMain ( HINSTANCE hInstance STG_UNUSED",
text " , DWORD reason STG_UNUSED",
text " , LPVOID reserved STG_UNUSED",
text " )",
text "{",
text " return true;",
text "}",
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [InstalledUnitId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
-- "link info" section (see Note [LinkInfo section])
makeElfNote ghcLinkInfoSectionName ghcLinkInfoNoteName 0 info,
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.hs for another instance
-- where we need to do this.
if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\","
<> sectionType "progbits" <> char '\n'
else Outputable.empty
]
-- | Return the "link info" string
--
-- See Note [LinkInfo section]
getLinkInfo :: DynFlags -> [InstalledUnitId] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
-- See Note [LinkInfo section]
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
-- Identifier for the note (see Note [LinkInfo section])
ghcLinkInfoNoteName :: String
ghcLinkInfoNoteName = "GHC link info"
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [InstalledUnitId] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfNoteAsString dflags exe_file
ghcLinkInfoSectionName ghcLinkInfoNoteName
let sameLinkInfo = (Just link_info == m_exe_link_info)
debugTraceMsg dflags 3 $ case m_exe_link_info of
Nothing -> text "Exe link info: Not found"
Just s
| sameLinkInfo -> text ("Exe link info is the same")
| otherwise -> text ("Exe link info is different: " ++ s)
return (not sameLinkInfo)
{- Note [LinkInfo section]
~~~~~~~~~~~~~~~~~~~~~~~
The "link info" is a string representing the parameters of the link. We save
this information in the binary, and the next time we link, if nothing else has
changed, we use the link info stored in the existing binary to decide whether
to re-link or not.
The "link info" string is stored in a ELF section called ".debug-ghc-link-info"
(see ghcLinkInfoSectionName) with the SHT_NOTE type. For some time, it used to
not follow the specified record-based format (see #11022).
-}
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
|
ezyang/ghc
|
compiler/main/SysTools/ExtraObj.hs
|
bsd-3-clause
| 9,211
| 0
| 16
| 2,530
| 1,472
| 753
| 719
| 153
| 5
|
module C7 (C7.myFringe) where
import D7
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right)
= (C7.myFringe left) ++ (D7.myFringe right)
|
kmate/HaRe
|
old/testing/renaming/C7_AstOut.hs
|
bsd-3-clause
| 172
| 0
| 8
| 34
| 85
| 46
| 39
| 6
| 1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GADTs #-}
module T12163 where
data T a b where
Mk :: Int -> b -> T Int b
deriving (Functor)
|
ezyang/ghc
|
testsuite/tests/deriving/should_fail/T12163.hs
|
bsd-3-clause
| 150
| 0
| 8
| 40
| 37
| 23
| 14
| 6
| 0
|
/* Copyright (C) 2007 Free Software Foundation
Contributed by Ollie Wild <aaw@google.com> */
static int i = __COUNTER__;
|
SanDisk-Open-Source/SSD_Dashboard
|
uefi/gcc/gcc-4.6.3/gcc/testsuite/gcc.dg/pch/counter-2.hs
|
gpl-2.0
| 125
| 5
| 7
| 22
| 46
| 25
| 21
| 1
| 1
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeFamilies #-}
module Nix.Common (
module ClassyPrelude,
module Control.Applicative,
module Control.Exception,
-- module Control.Exception.ErrorList,
module Control.Monad,
module Control.Monad.Except,
module Control.Monad.Identity,
module Control.Monad.State.Strict,
module Control.Monad.Reader,
module Control.Monad.Writer,
module Control.Monad.Trans,
module Data.Char,
module Data.HashMap.Strict,
module Data.Either,
module Data.Maybe,
module Data.List,
module Data.String.Utils,
module Filesystem.Path.CurrentOS,
module GHC.Exts,
module GHC.IO.Exception,
module Text.Render,
Name, Record,
tuple, tuple3, fromRight, pathToText,
putStrsLn, putStrs, maybeIf,
joinBy, mapJoinBy
) where
import ClassyPrelude hiding (assert, asList, find, FilePath, bracket,
maximum, maximumBy, try)
import qualified Prelude as P
import Control.Monad (when)
import Control.Monad.Trans (MonadIO(..), lift)
import Control.Monad.Reader (ReaderT(..), MonadReader(..), (<=<), (>=>), ask,
asks, runReaderT)
import Control.Monad.Writer (WriterT(..), MonadWriter(..), runWriterT)
import Control.Monad.State.Strict (MonadState, StateT, State, get, gets,
modify, put, liftM, liftIO, runState,
runStateT, execState, execStateT,
evalState, evalStateT)
import Control.Monad.Except (ExceptT, MonadError(..), throwError, runExceptT)
-- import Control.Exception.ErrorList
import Control.Monad.Identity (Identity(..))
import Control.Applicative hiding (empty, optional)
import Data.Char (isDigit, isAlpha)
import Data.List (maximum, maximumBy)
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashMap.Strict as H
import Data.Maybe (fromJust, isJust, isNothing)
import Data.Either (isRight, isLeft)
import Data.String.Utils hiding (join)
import qualified Data.Text as T
import GHC.Exts (IsList)
import GHC.IO.Exception
import Control.Exception (bracket)
import Text.Render hiding (renderParens)
import Filesystem.Path.CurrentOS (FilePath, fromText, toText, collapse)
-- | Indicates that the text is some identifier.
type Name = Text
-- | A record is a lookup table with string keys.
type Record = HashMap Name
-- | Takes two applicative actions and returns their result as a 2-tuple.
tuple :: Applicative f => f a -> f b -> f (a, b)
tuple action1 action2 = (,) <$> action1 <*> action2
-- | Takes three applicative actions and returns their result as a 3-tuple.
tuple3 :: Applicative f => f a -> f b -> f c -> f (a, b, c)
tuple3 action1 action2 action3 = (,,) <$> action1 <*> action2 <*> action3
-- | Creates a new hashmap by applying a function to every key in it.
alterKeys :: (Eq k, Hashable k, Eq k', Hashable k') =>
(k -> k') -> HashMap k v -> HashMap k' v
alterKeys f mp = do
let pairs = H.toList mp
let newPairs = P.map (\(k, v) -> (f k, v)) pairs
let newMap = H.fromList newPairs
newMap
fromRight :: Either a b -> b
fromRight (Right x) = x
fromRight (Left err) = error "Expected `Right` value"
putStrsLn :: MonadIO m => [Text] -> m ()
putStrsLn = putStrLn . concat
putStrs :: MonadIO m => [Text] -> m ()
putStrs = putStr . concat
maybeIf :: Bool -> a -> Maybe a
maybeIf True x = Just x
maybeIf False _ = Nothing
grab :: (Hashable k, Eq k) => k -> HashMap k v -> v
grab k = fromJust . H.lookup k
joinBy :: Text -> [Text] -> Text
joinBy = T.intercalate
mapJoinBy :: Text -> (a -> Text) -> [a] -> Text
mapJoinBy sep func = joinBy sep . map func
pathToText :: FilePath -> Text
pathToText pth = case toText pth of
Left p -> p
Right p -> p
|
adnelson/simple-nix
|
src/Nix/Common.hs
|
mit
| 4,014
| 0
| 14
| 832
| 1,173
| 694
| 479
| 95
| 2
|
/*Owner & Copyrights: Vance King Saxbe. A.*//* Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.*/-- {-# Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting, GoldSax Money, GoldSax Treasury, GoldSax Finance, GoldSax Banking and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. This Engagement sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager. LANGUAGE TemplateHaskell #-}
module GoldSaxMachineModule6.ReaderWriter where
import Control.Monad.Reader
import Control.Monad.Writer
-- import Control.Lens
import Data.Char
import GoldSaxMachineModule6.Lens2
import GoldSaxMachineModule6.Vector
import GoldSaxMachineModule6.KMeans
data Settings e v = Settings { i :: Int -> [e] -> [v], k :: Int, th :: Double, user :: Person }
kMeansMain :: (Vector v, Vectorizable e v) => [e] -> Reader (Settings e v) [v]
kMeansMain points = do i' <- asks i
k' <- asks k
t' <- asks th
return $ kMeans i' k' points t'
saveKMeans :: Vector v => [v] -> Reader (Settings e v) ()
saveKMeans centroids =
do u <- asks user
printString $ "Saving for user: " ++ show u
local (\s -> let Person f l = user s
in s { user = Person (map toUpper f) l }) $
do u' <- asks user
saveDatabase centroids u'
return ()
compareClusters :: (Vector v, Vectorizable e v) => [e] -> Reader (Settings e v) ([v], [v])
compareClusters points = do c1 <- kMeansMain points
c2 <- local (\s -> s { k = k s + 1 })
(kMeansMain points)
return (c1, c2)
printString :: String -> Reader (Settings e v) ()
printString _ = return ()
saveDatabase :: Vector v => [v] -> Person -> Reader (Settings e v) ()
saveDatabase _ _ = return ()
readInformation :: Writer String [String]
readInformation = return []
computeValue :: [String] -> Writer String ()
computeValue _ = return ()
accessDatabase :: Writer String ()
accessDatabase = do tell "Start database access"
info <- readInformation
computeValue info
tell "Finish database access"
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/
|
VanceKingSaxbeA/GoldSaxMachineStore
|
GoldSaxMachineModule6/src/Chapter6/ReaderWriter.hs
|
mit
| 2,890
| 23
| 17
| 690
| 894
| 449
| 445
| 40
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module FD (
-- Types
FD, -- Monad for finite domain constraint solver
FDConstraint, -- Constraint
FDVar, -- Finite domain solver variable
FDExpr, -- Constraint expression
-- Functions on FDVars
runFD, -- Run the monad and return a list of solutions.
newVar, -- Create a new FDVar
newVars, -- Create multiple FDVars
hasValue, -- Constrain a FDVar to a specific value
same, -- Constrain two FDVars to be the same
different, -- Constrain two FDVars to be different
varsAllDifferent, -- Constrain a list of FDVars to be different
varsLabelling, -- Backtracking search for all solutions
solutions,
dump,
-- Functions on FDExprs
(#==),
(#\=),
(#<),
fromInt,
fromVar,
new,
news,
allDifferent,
labelling
) where
import Prelude hiding (lookup)
import Control.Monad
import Control.Applicative
import Control.Monad.Trans.Class
import Control.Monad.Trans.State.Lazy
import qualified Data.Map as Map
import Data.Map ((!), Map)
import Control.Lens
import Data.Map.Lens
import Data.Maybe
import Domain
-- FD variables
newtype FDVar = FDVar { _unwrapFDVar :: Int } deriving (Ord, Eq)
type VarSupply = FDVar
data VarInfo = VarInfo { _delayedConstraints :: !FDConstraint, _domain :: !Domain }
type VarMap = Map FDVar VarInfo
data FDState = FDState { _varSupply :: !VarSupply, _varMap :: !VarMap }
-- The FD monad
type FD a = StateT FDState [] a
type FDConstraint = FD ()
makeLenses ''FDVar
makeLenses ''FDState
makeLenses ''VarInfo
-- Run the FD monad and produce a lazy list of possible solutions.
runFD :: FD a -> [a]
runFD fd = evalStateT fd initState
initState :: FDState
initState = FDState { _varSupply = FDVar 0, _varMap = Map.empty }
initVarInfo :: VarInfo
initVarInfo = VarInfo { _delayedConstraints = return (), _domain = maxDomain }
instance Monoid VarInfo where
mempty = initVarInfo
mappend vi0 vi = vi0 & delayedConstraints %~ (>> vi ^. delayedConstraints)
& domain <>~ (vi ^. domain)
-- Get a new FDVar
newVar :: ToDomain a => a -> FD FDVar
newVar d = do
v <- use varSupply
varSupply . unwrapFDVar += 1
let vi = initVarInfo & domain .~ toDomain d
varMap . at v ?= vi
return v
newVars :: ToDomain a => Int -> a -> FD [FDVar]
newVars n d = replicateM n (newVar d)
-- Look up the current domain of a variable.
lookup :: FDVar -> FD Domain
lookup x =
use $ varMap . ix x . domain
-- Update the domain of a variable and fire all delayed constraints
-- associated with that variable.
update :: FDVar -> Domain -> FDConstraint
update x i = do
vi <- use $ varMap . ix x
varMap . ix x . domain .= i
vi ^. delayedConstraints
-- Add a new constraint for a variable to the constraint store.
addConstraint :: FDVar -> FDConstraint -> FDConstraint
addConstraint x constraint = do
varMap . ix x . delayedConstraints %= (>> constraint)
-- Useful helper function for adding binary constraints between FDVars.
type BinaryConstraint = FDVar -> FDVar -> FDConstraint
addBinaryConstraint :: BinaryConstraint -> BinaryConstraint
addBinaryConstraint f x y = do
let constraint = f x y
constraint
addConstraint x constraint
addConstraint y constraint
-- Constrain a variable to a particular value.
hasValue :: FDVar -> Int -> FDConstraint
var `hasValue` val = do
vals <- lookup var
guard $ val `member` vals
let i = singleton val
when (i /= vals) $ update var i
-- Constrain two variables to have the same value.
same :: FDVar -> FDVar -> FDConstraint
same = addBinaryConstraint $ \x y -> do
xv <- lookup x
yv <- lookup y
let i = xv `intersection` yv
guard $ not $ Domain.null i
when (i /= xv) $ update x i
when (i /= yv) $ update y i
-- Constrain two variables to have different values.
different :: FDVar -> FDVar -> FDConstraint
different = addBinaryConstraint $ \x y -> do
xv <- lookup x
yv <- lookup y
guard $ not (isSingleton xv) || not (isSingleton yv) || xv /= yv
when (isSingleton xv && xv `isSubsetOf` yv) $
update y (yv `difference` xv)
when (isSingleton yv && yv `isSubsetOf` xv) $
update x (xv `difference` yv)
-- Constrain a list of variables to all have different values.
varsAllDifferent :: [FDVar] -> FDConstraint
varsAllDifferent (x:xs) = do
mapM_ (different x) xs
varsAllDifferent xs
varsAllDifferent _ = return ()
-- Constrain one variable to have a value less than the value of another
-- variable.
lessThan :: FDVar -> FDVar -> FDConstraint
lessThan = addBinaryConstraint $ \x y -> do
xv <- lookup x
yv <- lookup y
let xv' = filterLessThan (findMax yv) xv
let yv' = filterGreaterThan (findMin xv) yv
guard $ not $ Domain.null xv'
guard $ not $ Domain.null yv'
when (xv /= xv') $ update x xv'
when (yv /= yv') $ update y yv'
-- Get all solutions for a constraint without actually updating the
-- constraint store.
solutions :: FD a -> FD [a]
solutions constraint = get <&> evalStateT constraint
-- Label variables using a depth-first left-to-right search.
varsLabelling :: [FDVar] -> FD [Int]
varsLabelling = mapM label where
label var = do
vals <- lookup var
val <- lift $ elems vals
var `hasValue` val
return val
dump :: [FDVar] -> FD [Domain]
dump = mapM lookup
data FDExpr
= Int !Int
| Var !FDVar
| Plus !FDExpr !FDExpr
| Minus !FDExpr !FDExpr
| Times !FDExpr !FDExpr
| Negate !FDExpr
| Abs !FDExpr
| Signum !FDExpr
instance Num FDExpr where
(+) = Plus
(-) = Minus
(*) = Times
negate = Negate
abs = Abs
signum = Signum
fromInteger = Int . fromInteger
instance Enum FDExpr where
toEnum = Int
fromEnum (Int n) = n
-- Warning: partial function! But should be sufficient for ranges such as [1..] to work.
fromInt :: Int -> FDExpr
fromInt = Int
fromVar :: FDVar -> FDExpr
fromVar = Var
new :: ToDomain a => a -> FD FDExpr
new d = newVar d <&> fromVar
news :: ToDomain a => Int -> a -> FD [FDExpr]
news n d = replicateM n $ new d
interpret :: FDExpr -> FD FDVar
interpret (Var v) = return v
interpret (Int i) = newVar (i, i)
interpret (Plus e0 e1) = interpretBinary (+) e0 e1
interpret (Minus e0 e1) = interpretBinary (-) e0 e1
interpret (Times e0 e1) = interpretBinary (*) e0 e1
interpret (Negate e) = interpretUnary negate e
interpret (Abs e) = interpretUnary abs e
interpret (Signum e) = interpretUnary signum e -- XXX could be implemented more efficiently if required.
interpretBinary :: (Int -> Int -> Int) -> FDExpr -> FDExpr -> FD FDVar
interpretBinary op e0 e1 = do
v0 <- interpret e0
v1 <- interpret e1
d0 <- lookup v0
d1 <- lookup v1
v <- newVar [n0 `op` n1 | n0 <- elems d0, n1 <- elems d1]
let pc = constrainBinary (\n n0 n1 -> n == n0 `op` n1) v v0 v1
nc0 = constrainBinary (\n0 n n1 -> n == n0 `op` n1) v0 v v1
nc1 = constrainBinary (\n1 n n0 -> n == n0 `op` n1) v1 v v0
addConstraint v0 $ pc >> nc1
addConstraint v1 $ pc >> nc0
addConstraint v $ nc0 >> nc1
return v
constrainBinary :: (Int -> Int -> Int -> Bool) -> FDVar -> FDVar -> FDVar -> FDConstraint
constrainBinary pred v v0 v1 = do
d <- lookup v
d0 <- lookup v0
d1 <- lookup v1
let d' = toDomain [n | n <- elems d, n0 <- elems d0, n1 <- elems d1, pred n n0 n1]
guard $ not $ Domain.null d'
when (d' /= d) $ update v d'
interpretUnary :: (Int -> Int) -> FDExpr -> FD FDVar
interpretUnary op e0 = do
v0 <- interpret e0
d0 <- lookup v0
v <- newVar [op n0 | n0<- elems d0]
addConstraint v0 $ constrainUnary (\n n0 -> n == op n0) v v0
addConstraint v $ constrainUnary (\n0 n -> n == op n0) v0 v
return v
constrainUnary :: (Int -> Int -> Bool) -> FDVar -> FDVar -> FDConstraint
constrainUnary pred v v0 = do
d <- lookup v
d0 <- lookup v0
let d' = toDomain [n | n <- elems d, n0 <- elems d0, pred n n0]
guard $ not $ Domain.null d'
when (d' /= d) $ update v d'
infix 4 #==
(#==) :: FDExpr -> FDExpr -> FDConstraint
a #== b = do
v0 <- interpret a
v1 <- interpret b
v0 `same` v1
infix 4 #\=
(#\=) :: FDExpr -> FDExpr -> FDConstraint
a #\= b = do
v0 <- interpret a
v1 <- interpret b
v0 `different` v1
infix 4 #<
(#<) :: FDExpr -> FDExpr -> FDConstraint
a #< b = do
v0 <- interpret a
v1 <- interpret b
v0 `lessThan` v1
allDifferent :: [FDExpr] -> FDConstraint
allDifferent = varsAllDifferent <=< mapM interpret
labelling :: [FDExpr] -> FD [Int]
labelling = varsLabelling <=< mapM interpret
|
dmoverton/finite-domain
|
src/FD.hs
|
mit
| 8,676
| 0
| 15
| 2,189
| 2,978
| 1,513
| 1,465
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PrettyPrint where
import Syntax
import Text.PrettyPrint.ANSI.Leijen
import qualified Data.Text as T
class Display a where
display :: a -> Doc
instance Display T.Text where
display = text . show
instance Display String where
display = text
instance Display (Either Doc (Term, Ty)) where
display (Left err) = red (text "Error: ") <> err <> hardline
display (Right res) = display res <> hardline
instance Display (Term, Ty) where
display (term, ty) = display term <> bold (text " : ") <> display ty
instance Display Term where
display TmTrue = text "true"
display TmFalse = text "false"
display TmError = text "error"
display (TmNumber n) = text $ show n
display x = text $ show x
instance Display Ty where
display TyBool = green $ text "Bool"
display TyNum = blue $ text "Num"
display (TyArr l r) = display l <> dullyellow (text "→") <> display r
|
kellino/TypeSystems
|
fullError/PrettyPrint.hs
|
mit
| 1,029
| 0
| 10
| 229
| 354
| 180
| 174
| 28
| 0
|
module Problems where
import Test.Hspec
import Test.QuickCheck
import Control.Monad
import Control.Arrow ((&&&))
import Control.Applicative
myLast :: [a] -> a
myLast (x:[]) = x
myLast (_:xs) = myLast xs
myButLast :: [a] -> a
myButLast (x:y:[]) = x
myButLast (x:xs) = myButLast xs
elementAt :: [a] -> Int -> a
elementAt (x:_ ) 1 = x
elementAt (_:xs) n = elementAt xs (n - 1)
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + myLength xs
myReverse :: [a] -> [a]
myReverse xs = go xs []
where
go xs ys
| null xs = ys
| otherwise = go (tail xs) (head xs:ys)
isPalindrome :: Eq a => [a] -> Bool
isPalindrome xs = xs == myReverse xs
data NestedList a = Elem a
| List [NestedList a]
deriving (Show,Eq)
flatten :: NestedList a -> [a]
flatten (Elem a) = [a]
flatten (List []) = []
flatten (List (x:xs)) = flatten x ++ flatten (List xs)
compress :: Eq a => [a] -> [a]
compress [] = []
compress (x:xs) = x : go xs x
where
go [] ls = []
go (x:xs) y
| x == y = go xs y
| otherwise = x : go xs x
pack :: Eq a => [a] -> [[a]]
pack [] = []
pack (x:xs) = take' x xs : pack (drop' x xs)
where
take' y [] = [y]
take' y (z:zs)
| y == z = z : take' y zs
| otherwise = [y]
drop' y [] = []
drop' y (z:zs)
| y /= z = z:zs
| otherwise = drop' y zs
-- Open question; why does this parse in ghci and not hlint?
-- encode :: Eq a => [a] => [(Int,a)]
encode :: Eq a => [a] -> [(Int,a)]
encode xs = map (length &&& head) (pack xs)
data CompressToken a = Multiple Int a
| Single a
deriving (Eq,Show)
encodeModified :: Eq a => [a] -> [CompressToken a]
encodeModified xs = map toToken (pack xs)
where
toToken (x:[]) = Single x
toToken xs = Multiple (length xs) (head xs)
encodeDirect :: Eq a => [a] -> [CompressToken a]
encodeDirect [] = []
encodeDirect (x:xs) = makeToken count x : encodeDirect rest
where
makeToken 1 c = Single c
makeToken n c = Multiple n c
(count,rest) = go x xs 1
go x [] acc = (acc,[])
go x (y:ys) acc
| x /= y = (acc,y:ys)
| x == y = go x ys (acc + 1)
decodeModified :: Eq a => [CompressToken a] -> [a]
decodeModified = concatMap fromToken
where
fromToken (Single x) = [x]
fromToken (Multiple n x) = replicate n x
duplicate :: [a] -> [a]
duplicate [] = []
duplicate (x:xs) = x : x : duplicate xs
repli :: [a] -> Int -> [a]
repli [] _ = []
repli (x:xs) n = go n ++ repli xs n
where
go 0 = []
go a = x : go (a - 1)
dropEvery :: [a] -> Int -> [a]
dropEvery xs n = go xs n 1
where
go [] _ _ = []
go (x:xs) n c
| c `mod` n /= 0 = x : go xs n (c + 1)
| otherwise = go xs n (c + 1)
split :: [a] -> Int -> ([a],[a])
split [] _ = ([],[])
split (x:xs) n
| n <= 0 = ([],x:xs)
| otherwise = (x:a,b)
where
(a,b) = split xs (n - 1)
slice :: [a] -> Int -> Int -> [a]
slice xs s e = go xs 1
where
go (x:xs) c
| c < s = go xs (c + 1)
| c >= s && c <= e = x : go xs (c + 1)
| otherwise = []
rotate :: [a] -> Int -> [a]
rotate xs n = go (endless xs) offset
where
go [] _ = []
go (x:xs) 0 = take len (x:xs)
go (x:xs) n = go xs (n - 1)
len = length xs
offset = if n > 0 then n else (len + n)
endless xs = xs ++ endless xs
removeAt :: Int -> [a] -> (a,[a])
removeAt _ [] = undefined
removeAt n (x:xs)
| n <= 1 = (x,xs)
| otherwise = (e,x:r)
where
(e,r) = removeAt (n - 1) xs
main :: IO ()
main = hspec $ do
describe "List functions" $ do
it "should find the last element of the list" $ do
myLast [1,2,3,4] `shouldBe` 4
it "last of an empty list should be an error" $ do
myLast [] `shouldThrow` anyException
it "should find last but one element of the list" $ do
myButLast [1,2,3,4] `shouldBe` 3
it "element at finds the nth element of the list" $ do
elementAt [1,2,3] 2 `shouldBe` 2
it "finds the length of a list" $ do
myLength "Hello, world!" `shouldBe` 13
it "myReverse reverses a string" $ do
myReverse [1,2,3,4] `shouldBe` [4,3,2,1]
it "checks for palindomes (positive)" $ do
isPalindrome [1,2,3,2,1]
it "checks for palindrome (negative)" $ do
not (isPalindrome [1,2,3])
it "flattens nested lists" $ do
flatten (Elem 5) `shouldBe` [5]
it "really does flatten lists" $ do
flatten (List [List [Elem 5]]) `shouldBe` [5]
it "eliminates consecutive duplicates" $ do
compress "aaabbbcdddeee" `shouldBe` "abcde"
it "packs consecutive duplicates into sublists" $ do
pack "aaaabbbccd" `shouldBe` ["aaaa","bbb","cc","d"]
it "performs run length encoding" $ do
encode "aaaabbbccd" `shouldBe` [(4,'a'),(3,'b'),(2,'c'),(1,'d')]
it "eliminates redundancy during compress" $ do
encodeModified "aaaabbbccd" `shouldBe` [Multiple 4 'a'
,Multiple 3 'b'
,Multiple 2 'c'
,Single 'd']
it "decode modified" $ do
decodeModified [Multiple 4 'a'
,Multiple 3 'b'
,Multiple 2 'c'
,Single 'd'] `shouldBe` "aaaabbbccd"
it "decode modified and encode modified work the same" $ property $
\xs -> decodeModified (encodeModified xs) == (xs :: [Int])
it "eliminates redundancy during compress (efficient)" $ do
encodeDirect "aaaabbbccd" `shouldBe` [Multiple 4 'a'
,Multiple 3 'b'
,Multiple 2 'c'
,Single 'd']
it "duplicate the items in a list" $ do
duplicate [1,2,3] `shouldBe` [1,1,2,2,3,3]
it "duplicate doubles the length" $ property $
\xs -> length (duplicate xs) == 2 * length (xs :: [Int])
it "replicate elements" $ do
repli [1,2,3] 3 `shouldBe` [1,1,1,2,2,2,3,3,3]
it "dropEvery" $ do
dropEvery "abcdefghik" 3 `shouldBe` "abdeghk"
it "split" $ do
split "abcdefghik" 3 `shouldBe` ("abc", "defghik")
it "split is equivalent to splitAt" $ property $
\xs n -> splitAt n xs == split (xs :: [Char]) (n :: Int)
it "slice" $ do
slice ['a','b','c','d','e','f','g','h','i','k'] 3 7 `shouldBe` "cdefg"
it "rotate 1" $ do
rotate ['a','b','c','d','e','f','g','h'] 3 `shouldBe` "defghabc"
it "rotate 2" $ do
rotate ['a','b','c','d','e','f','g','h'] (-2) `shouldBe` "ghabcdef"
it "remove at" $ do
removeAt 2 "abcd" `shouldBe` ('b',"acd")
|
fffej/codekatas
|
99Problems/1-20/Problems.hs
|
mit
| 6,736
| 0
| 21
| 2,173
| 3,178
| 1,645
| 1,533
| 179
| 4
|
-- Copyright (c) 2009 Takashi Yamamiya
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
-- The above copyright notice and this permission notice shall be included in
-- all copies or substantial portions of the Software.
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
-- | A prolog interpreter.
module Prolog
(-- * Data structures
Term(..), Clause(..),
-- * Utility constructors for debugging
w, s, cons,
-- * Reader
parse, parse',
atom, variable, struct, list, nil, terms, arguments, term, clause, clauses, query,
-- * Printer
display,
-- * Unification
unify, unifyList, applyTerm,
-- * Solver
prove, rename,
-- * Testing
solveString, start) where
import Text.ParserCombinators.Parsec
import Data.Maybe (maybeToList)
import Char (isUpper)
infix 6 :-
data Term = Var String Int | Struct String [Term] deriving (Show, Eq)
data Clause = Term :- [Term] deriving (Show, Eq)
data Command = Fact Clause | Query [Term] | ShowAll | Noop
type Rules = [Clause]
-- Utility constructors for debugging
w :: String -> Term
w s@(x:xs) | isUpper x = Var s 0
| otherwise = Struct s []
s :: String -> [Term] -> Term
s n xs = Struct n xs
cons s cdr = (Struct "cons" [w s, cdr])
---- Unification ----
type Substitution = [(Term, Term)]
true = []
-- | > apply [(w"X", w"Y"), (w"Y", w"Z")] [(w"X"), (w"Y")] == [(w"Z"), (w"Z")]
apply :: Substitution -> [Term] -> [Term]
apply s ts = [applyTerm s t | t <- ts]
applyTerm [] (Var y n) = Var y n
applyTerm ((Var x i, t):s) (Var y j) | x == y && i == j = applyTerm s t
| otherwise = applyTerm s (Var y j)
applyTerm s (Struct n ts) = Struct n (apply s ts)
-- | > unify (w"X") (w"apple") == Just [(w"X", w"apple")]
unify :: Term -> Term -> Maybe Substitution
unify (Var x n) (Var y m) = Just [(Var x n, Var y m)]
unify (Var x n) y = Just [(Var x n, y)]
unify x (Var y m) = Just [(Var y m, x)]
unify (Struct a xs) (Struct b ys)
| a == b = unifyList xs ys
| otherwise = Nothing
unifyList :: [Term] -> [Term] -> Maybe Substitution
unifyList [] [] = Just true
unifyList [] _ = Nothing
unifyList _ [] = Nothing
unifyList (x:xs) (y:ys) = do s <- unify x y
s' <- unifyList (apply s xs) (apply s ys)
return (s ++ s')
---- Solver ----
prove :: Rules -> [Term] -> [Substitution]
prove rules goals = find rules 1 goals
-- Depth first search
-- > find (parse' clauses "p(X):-q(X). q(a).") 1 [parse' term "p(X)"]
find :: Rules -> Int -> [Term] -> [Substitution]
find rules i [] = [true]
find rules i goals = do let rules' = rename rules i
(s, goals') <- branch rules' goals
solution <- find rules (i + 1) goals'
return (s ++ solution)
-- Find next branches. A branch is a pair of substitution and next goals.
-- > branch (parse' clauses "n(z). n(s(X)):-n(X).") (parse' query "?-n(X).")
branch :: Rules -> [Term] -> [(Substitution, [Term])]
branch rules (goal:goals) = do head :- body <- rules
s <- maybeToList (unify goal head)
return (s, apply s (body ++ goals))
-- | Rename all variables in the rules to split namespaces.
rename :: Rules -> Int -> Rules
rename rules i = [ renameVar head :- renameVars body | head :- body <- rules]
where renameVar (Var s _) = Var s i
renameVar (Struct s ts) = Struct s (renameVars ts)
renameVars ts = [renameVar t | t <- ts]
---- Reader ----
-- Spaces are always consumed with the previous token.
parse' parser s = result where Right result = parse parser "" s
nil = Struct "nil" []
schar c = char c >> spaces
special = oneOf ":;+=-*&$#@/.~!" <|> digit
atom = (lower >>= \x -> many alphaNum >>= \xs -> spaces >> return (x:xs)) <|>
(many1 special >>= \x -> spaces >> return x)
variable = upper >>= \x -> many alphaNum >>= \xs -> spaces >> return (Var (x:xs) 0)
struct = atom >>= \name -> arguments >>= \ls -> return (Struct name ls)
arguments = ((schar '(' >> terms >>= \ls -> schar ')' >> return ls)) <|>
(spaces >> return [])
list = schar '[' >> terms >>= \ts -> listTail >>= \t -> return (makeList ts t)
where makeList [] cdr = cdr
makeList (x:xs) cdr = Struct "cons" [x, makeList xs cdr]
listTail = (schar '|' >> term >>= \t -> schar ']' >> return t) <|>
(schar ']' >> return nil)
term = (variable <|> struct <|> list) >>= \t -> return t
terms = sepBy term (schar ',')
clause = struct >>= \head -> ((schar '.' >> return (head :- [])) <|>
(query >>= \goals -> return (head :- goals)))
clauses = many clause
arrow = (char '?' <|> char ':') >> schar '-'
query = arrow >> terms >>= \goals -> schar '.' >> return goals
noop = (char '%' >> skipMany anyToken) <|> eof
command :: Parser Command
command = spaces >>
((clause >>= \c -> return (Fact c)) <|>
try (query >>= \ts -> return (Query ts)) <|>
(string "??" >> return (ShowAll)) <|>
(noop >> return (Noop)))
-- parse atom "" "atom1234"
-- parse variable "" "Variable1234"
-- parse struct "" "father ( masuo , tara ) "
-- parse arguments "" "( orange , Apple , banana ) "
-- parse list "" "[]"
-- parse list "" "[ 1 , 2 | 3 ] "
-- parse terms "" "orange , apple , banana "
-- parse term "" "someAtom "
-- parse clause "" "child ( X , Y) :- mother( Y, X ) . "
-- parse query "" "?- apple ."
---- Printer ----
class Display a where
displays :: a -> String -> String
display :: a -> String
display x = displays x ""
instance Display Term where
displays (Var s 0) = showString s
displays (Var s n) = showString s . showChar '_' . shows n
displays (Struct "nil" []) = showString "[]"
displays (Struct "cons" [h, t]) = showChar '[' . displays h . displaysTail t . showChar ']'
displays (Struct s []) = showString s
displays (Struct s xs) = showString s . showChar '(' . displays xs . showChar ')'
displaysTail (Struct "nil" []) = id
displaysTail (Struct "cons" [h, t]) = showChar ',' . displays h . displaysTail t
displaysTail x = showChar '|' . displays x
instance Display Clause where
displays (head :- []) = displays head . showChar '.'
displays (head :- bodies) = displays head . showString " :- " . displays bodies . showChar '.'
instance Display a => Display [a] where
displays [] = id
displays [x] = displays x
displays (x:xs) = displays x . showChar ',' . displays xs
instance (Display a, Display b) => Display (a, b) where
displays (x, y) = displays x . showChar '=' . displays y
displayLines [] = ""
displayLines (x:xs) = display x ++ "\n" ++ display xs
-- display (s"cons" [w"1", (s"cons" [w"2", nil])])
-- display ((s"child" [w"X",w"Y"]) :- [s"mother" [w"Y",w"X"]])
---- REPL --
main = interact start
start = writeStr ("food(apple). -- Add a clause.\n" ++
"?- food(X). -- Query.\n" ++
"?? -- Show all.\n\n") (loop [])
loop :: Rules -> String -> String
loop rules = readLine (exec rules . parse command "")
exec :: Rules -> Either ParseError Command -> String -> String
exec rules (Right (Fact c)) = writeStr ("=> " ++ display c ++ "\n" ) (loop (rules ++ [c]))
exec rules (Right (Query q)) = answer q (prove rules q) rules
exec rules (Right ShowAll) = writeStr (showAll rules) (loop rules)
exec rules (Right Noop) = loop rules
exec rules (Left e) = writeStr (show e ++ "\n") (loop rules)
answer :: [Term] -> [Substitution] -> Rules -> String -> String
answer q [] rules = writeStr "No\n" (loop rules)
answer q (c:cs) rules = writeStr ("=> " ++ result ++ "\n") (more q cs rules)
where result = display (apply c q)
more :: [Term] -> [Substitution] -> Rules -> String -> String
more q cs rules = readLine f
where f (';':_) = answer q cs rules
f x = writeStr "Yes\n" (loop rules)
showAll rules = [line | r <- rules, line <- "=> " ++ display r ++ "\n" ]
-- Interactive library
-- Its arguments are a string to be written and next process.
writeStr :: String -> (String -> String) -> (String -> String)
writeStr output proc input = output ++ proc input
-- Its argument is a process which receives a line.
readLine :: (String -> (String -> String)) -> (String -> String)
readLine proc input = case (nextLine input) of
("", []) -> "" -- End of file
(line, rest) -> proc line rest
nextLine "" = ("","")
nextLine ('\n':xs) = ("\n", xs)
nextLine (x:xs) = (x:ys, zs) where (ys, zs) = nextLine xs
---- Testing ----
-- | Test function
--
-- >>> solveString "p:-q. q:-r. r." "?-p."
-- > [[]]
-- >>> solveString' "p(X):-q(X).q(a)." "?-p(X)."
-- > ["X=X_1,X_1=a"]
solveString :: String -> String -> [Substitution]
solveString rules q =
let rules' = parse' clauses rules
q' = parse' query q
in prove rules' q'
solveString' rules q = [display s | s <- solveString rules q]
|
propella/prolog
|
Prolog.hs
|
mit
| 10,059
| 0
| 16
| 2,690
| 3,356
| 1,744
| 1,612
| 153
| 2
|
-- | Helper functions
module Dissent.Internal.Util where
import Control.Concurrent
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Resource
-- | Forks a function that returns a ResourceT, and returns the result
-- in an MVar primitive.
forkResource :: ResourceT IO a -> ResourceT IO (MVar a)
forkResource handler = do
sync <- liftIO newEmptyMVar
_ <- resourceForkIO $ do
res <- handler
liftIO $ putMVar sync res
return sync
|
solatis/dissent
|
src/Dissent/Internal/Util.hs
|
mit
| 497
| 0
| 12
| 120
| 113
| 59
| 54
| 11
| 1
|
import XMonad
-- import XMonad.Actions.Volume
import XMonad.Hooks.DynamicLog
import Data.Monoid
import System.Exit
import XMonad.Util.Run
import XMonad.Hooks.SetWMName
import qualified XMonad.StackSet as W
import qualified Data.Map as M
import XMonad.Hooks.ManageDocks
import XMonad.Layout.NoBorders
myTerminal = "xterm"
myWorkspaces = ["Internet", "Music", "Code" ] ++ map show [4..9]
------------------------------------------------------------------------
-- Key bindings. Add, modify or remove key bindings here.
--
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $
-- launch a terminal
[ ((modm .|. shiftMask, xK_Return), spawn $ XMonad.terminal conf)
-- launch dmenu
, ((modm, xK_p ), spawn "exe=`dmenu_path | dmenu` && eval \"exec $exe\"")
-- launch gmrun
, ((modm .|. shiftMask, xK_p ), spawn "gmrun")
-- close focused window
, ((modm .|. shiftMask, xK_c ), kill)
-- Lock the screen
, ((modm .|. shiftMask, xK_l ), spawn "xscreensaver-command -lock")
-- Rotate through the available layout algorithms
, ((modm, xK_space ), sendMessage NextLayout)
-- Reset the layouts on the current workspace to default
, ((modm .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size
, ((modm, xK_n ), refresh)
-- Move focus to the next window
, ((modm, xK_Tab ), windows W.focusDown)
-- Move focus to the next window
, ((modm, xK_j ), windows W.focusDown)
-- Move focus to the previous window
, ((modm, xK_k ), windows W.focusUp )
-- Move focus to the master window
, ((modm, xK_m ), windows W.focusMaster )
-- Swap the focused window and the master window
, ((modm, xK_Return), windows W.swapMaster)
-- Swap the focused window with the next window
, ((modm .|. shiftMask, xK_j ), windows W.swapDown )
-- Swap the focused window with the previous window
, ((modm .|. shiftMask, xK_k ), windows W.swapUp )
-- Shrink the master area
, ((modm, xK_h ), sendMessage Shrink)
-- Expand the master area
, ((modm, xK_l ), sendMessage Expand)
-- Push window back into tiling
, ((modm, xK_t ), withFocused $ windows . W.sink)
-- Increment the number of windows in the master area
, ((modm , xK_comma ), sendMessage (IncMasterN 1))
-- Deincrement the number of windows in the master area
, ((modm , xK_period), sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap
-- Use this binding with avoidStruts from Hooks.ManageDocks.
-- See also the statusBar function from Hooks.DynamicLog.
--
-- , ((modm , xK_b ), sendMessage ToggleStruts)
-- Quit xmonad
, ((modm .|. shiftMask, xK_q ), io (exitWith ExitSuccess))
-- Restart xmonad
, ((modm , xK_q ), spawn "xmonad --recompile; xmonad --restart")
]
++
--
-- mod-[1..9], Switch to workspace N
--
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
--
[((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_1 .. xK_9]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
--
-- mod-{w,e,r}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{w,e,r}, Move client to screen 1, 2, or 3
--
[((m .|. modm, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
myLayout = tiled ||| Mirror tiled ||| Full
where
tiled = Tall 1 (3/100) (3/5)
main = xmonad =<< xmobar defaultConfig {
terminal = myTerminal,
workspaces = myWorkspaces,
keys = myKeys,
startupHook = setWMName "LG3D",
layoutHook = lessBorders OnlyFloat $ avoidStruts $ myLayout
, normalBorderColor = "#000000" -- black
, focusedBorderColor = "#ff3f3f" -- reddish
}
|
melloc/dotfiles
|
xmonad/xmonad.hs
|
mit
| 4,313
| 0
| 14
| 1,294
| 967
| 588
| 379
| 53
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Types.Instances where
-- Prelude.
import ClassyPrelude
import Data.Aeson
import Data.Aeson.Types (Value (String))
import qualified Data.ByteString.Char8 as B8
import Data.UUID (UUID)
import qualified Data.UUID as UUID
import Database.Persist.Sql
--------------------------------------------------------------------------------
-- | Persistent instances for @UUID@.
instance PersistField UUID where
toPersistValue uuid = PersistDbSpecific . B8.pack . UUID.toString $ uuid
fromPersistValue (PersistDbSpecific uuidB8) =
case UUID.fromString $ B8.unpack uuidB8 of
Just uuid -> Right uuid
Nothing -> Left "Invalid UUID"
fromPersistValue _ = Left "Not PersistDBSpecific"
instance PersistFieldSql UUID where
sqlType _ = SqlOther "uuid"
--------------------------------------------------------------------------------
-- | Aeson @FromJSON@ and @ToJSON@ instances for @UUID@.
instance FromJSON UUID where
parseJSON = withText "UUID" $ \uuidStr ->
case UUID.fromText uuidStr of
Just uuid -> pure uuid
Nothing -> fail "Failed to parse UUID"
instance ToJSON UUID where
toJSON = String . UUID.toText
|
jkachmar/servant-persistent-realworld
|
src/Types/Instances.hs
|
mit
| 1,266
| 0
| 11
| 268
| 254
| 137
| 117
| 25
| 0
|
module LC where
import Data.Map (fromListWith, toList)
containsDuplicates :: [Int] -> Bool
containsDuplicates = foldr (\x -> (||) (x > 1)) False . fmap snd . frequency
where frequency xs = toList (fromListWith (+) [(x, 1 :: Int) | x <- xs])
|
AriaFallah/leetcode
|
haskell/containsDuplicates.hs
|
mit
| 245
| 0
| 12
| 46
| 113
| 64
| 49
| 5
| 1
|
module System.Conscript (conscript) where
import System.Process
import System.Exit
import Control.Monad
import Control.Concurrent
conscript :: [String] -> IO ([String] -> IO ())
conscript args = do
blocker <- newEmptyMVar
running <- newEmptyMVar
void $ forkIO $ forever $ starter args blocker running
return $ mapM_ $ killer blocker running
killer :: MVar () -> MVar ProcessHandle -> String -> IO ()
killer blocker running _input = void $ takeMVar running >>= terminateProcess >> takeMVar blocker
starter :: [String] -> MVar () -> MVar ProcessHandle -> IO ()
starter args blocker running = do
putMVar blocker ()
p <- startProcess args
putMVar running p
code <- waitForProcess p
case code of ExitFailure 15 -> return () -- Killed!
ExitFailure i -> putStrLn $ "Process [" ++ unwords args ++ "] failed with exit-status [" ++ show i ++ "]"
ExitSuccess -> return ()
startProcess :: [String] -> IO ProcessHandle
startProcess (h:t) = (\(_,_,_,ph) -> ph) `fmap` createProcess (proc h t)
startProcess [] = error "startProcess must accept at least one argument"
|
sordina/Conscript
|
System/Conscript.hs
|
mit
| 1,115
| 0
| 14
| 232
| 412
| 202
| 210
| 25
| 3
|
module Exploration.Basics
(
-- Combinators
flip,
const,
-- Data types
Bool(..),
Int(..),
Integer(..),
Float(..),
Double(..),
module Data.Word
) where
import Prelude (Integer(..), Bool(..), Int(..), Float(..), Double(..))
import Data.Word
flip = \f x y -> f y x
const = \x y -> x
infixr 0 $
($) :: (a -> b) -> a -> b
f $ x = f x
infixr 0 $!
($!) :: (a -> b) -> a -> b
f $! x = let !x' = x in f x'
|
SamuelSchlesinger/Exploration
|
Exploration/Basics.hs
|
mit
| 422
| 0
| 9
| 110
| 228
| 137
| 91
| -1
| -1
|
module Helpers where
import Data.Set (fromList, toList)
import Config
checkHost :: Config -> Bool
checkHost c =
all (==True) [checkFreeMem vms
,checkVolumes vms
,checkNetwork nets]
where vms = envVMs c
nets = envNets c
checkNetwork :: [Network] -> Bool
checkNetwork ns = True
checkVolumes :: [VM] -> Bool
checkVolumes vms =
let tmpls = dedup $ map volTmpl vms
in True
checkFreeMem :: [VM] -> Bool
checkFreeMem vms = True
dedup :: Ord a => [a] -> [a]
dedup = toList . fromList
|
gvnkd/os_dep_jen
|
Helpers.hs
|
mit
| 548
| 0
| 10
| 155
| 193
| 104
| 89
| 20
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.