code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{- | some internal definitions. To use default persistence, import
@Data.TCache.DefaultPersistence@ instead -}
module Data.TCache.Defs where
import Control.Concurrent
import Control.Concurrent.STM (STM, TVar)
import Control.Exception as Exception
import Control.Monad (when, replicateM)
import Control.Monad.Reader (MonadReader, ask, ReaderT(ReaderT), runReaderT)
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.HashTable.IO as H
import Data.IORef
import Data.List (elemIndices, isInfixOf, stripPrefix)
import qualified Data.Map as Map
import Data.Maybe (fromJust, catMaybes)
import Data.Typeable
import System.Directory
(
createDirectoryIfMissing
, getDirectoryContents
, removeFile
)
import System.IO
(
openFile
, IOMode(ReadMode)
, hPutStrLn
, hClose
, hFileSize
, stderr
)
import System.IO.Unsafe
import System.IO.Error
import System.Mem.Weak
--import Debug.Trace
--(!>) = flip trace
type AccessTime = Integer
type ModifTime = Integer
data Status a
= NotRead
| DoNotExist
| Exist a
deriving (Typeable)
data Elem a
= Elem !a !AccessTime !ModifTime
deriving (Typeable)
type TPVar a
= TVar (Status (Elem a))
data DBRef a
= DBRef !String !(TPVar a)
deriving (Typeable)
castErr :: forall a b. (Typeable a, Typeable b) => String -> a -> b
castErr s a = case cast a of
Just x -> x
Nothing -> error $
"Type error: " ++ (show $ typeOf a) ++ " does not match "++ (show $ typeOf (undefined :: b))
++ "\nThis means that objects of these two types have the same key"
++ "\nor the retrieved object type is not the previously stored one for the same key."
++ "\n" ++ s
{- | Indexable is an utility class used to derive instances of IResource
Example:
@data Person= Person{ pname :: String, cars :: [DBRef Car]} deriving (Show, Read, Typeable)
data Car= Car{owner :: DBRef Person , cname:: String} deriving (Show, Read, Eq, Typeable)
@
Since Person and Car are instances of 'Read' ans 'Show', by defining the 'Indexable' instance
will implicitly define the IResource instance for file persistence:
@
instance Indexable Person where key Person{pname=n} = \"Person \" ++ n
instance Indexable Car where key Car{cname= n} = \"Car \" ++ n
@
-}
class Indexable a where
key :: a -> String
--instance IResource a => Indexable a where
-- key x = keyResource x
instance Indexable String where
key = id
instance Indexable Int where
key = show
instance Indexable Integer where
key = show
instance Indexable () where
key () = "void"
{- | Serialize is an alternative to the IResource class for defining persistence in TCache.
The deserialization must be as lazy as possible.
serialization/deserialization are not performance critical in TCache
Read, Show, instances are implicit instances of Serializable
> serialize = pack . show
> deserialize= read . unpack
Since write and read to disk of to/from the cache are not be very frequent
The performance of serialization is not critical.
-}
class Serializable a where
serialize :: a -> B.ByteString
deserialize :: B.ByteString -> a
deserialize = error "No deserialization defined for your data"
deserialKey :: String -> B.ByteString -> a
deserialKey _ v = deserialize v
persist :: Proxy a -> Maybe Persist -- ^ `defaultPersist` if Nothing
persist = const Nothing
type Key
= String
type IResource a = (Typeable a, Indexable a, Serializable a)
-- there are two references to the DBRef here
-- The Maybe one keeps it alive until the cache releases it for *Resources
-- calls which does not reference dbrefs explicitly
-- The weak reference keeps the dbref alive until is it not referenced elsewere
data CacheElem
= forall a. (IResource a, Typeable a) => CacheElem (Maybe (DBRef a)) (Weak (DBRef a))
type Ht
= H.BasicHashTable String CacheElem
type Hts
= Map.Map TypeRep Ht
-- Contains the hashtable and last sync time.
type Cache
= IORef (Hts,Integer)
data CheckTPVarFlags
= AddToHash
| NoAddToHash
-- | Set the cache. This is useful for hot loaded modules that will update an existing cache. Experimental
-- setCache :: Cache -> IO ()
-- setCache ref = readIORef ref >>= \ch -> writeIORef refcache ch
-- | The cache holder. Established by default
-- refcache :: Cache
-- refcache = unsafePerformIO $ newCache >>= newIORef
-- | Creates a new cache. Experimental.
newCache :: IO (Hts,Integer)
newCache = return (Map.empty,0)
data CMTrigger
= forall a. (Typeable a) => CMTrigger !((DBRef a) -> Maybe a -> STM ())
-- | A persistence mechanism has to implement these primitives.
-- 'filePersist' is the default file persistence.
data Persist = Persist
{
readByKey :: Key -> IO (Maybe B.ByteString) -- ^ read by key. It must be strict.
, write :: Key -> B.ByteString -> IO () -- ^ write. It must be strict.
, delete :: Key -> IO () -- ^ delete
, listByType :: forall t. (Typeable t)
=> Proxy t -> IO [Key] -- ^ List keys of objects of the given type.
, cmtriggers :: IORef [(TypeRep, [CMTrigger])]
, cache :: Cache -- ^ Cached values.
, persistName :: String -- ^ For showing.
}
instance Show Persist where
show p = persistName p
-- | Implements default persistence of objects in files with their keys as filenames,
-- inside the given directory.
filePersist :: FilePath -> IO Persist
filePersist dir = do
t <- newIORef []
c <- newCache >>= newIORef
createDirectoryIfMissing True dir
return $ Persist
{
readByKey = defaultReadByKey dir
, write = defaultWrite dir
, delete = defaultDelete dir
, listByType = defaultListByType dir
, cmtriggers = t
, cache = c
, persistName = "File persist in " ++ show dir
}
newtype DB a
= DB (ReaderT Persist STM a)
deriving (Functor, Applicative, Monad, MonadReader Persist)
runDB :: Persist -> DB a -> STM a
runDB s (DB h) = runReaderT h s
db :: (Persist -> STM a) -> DB a
db = DB . ReaderT
stm :: STM a -> DB a
stm = db . const
defaultReadByKey :: FilePath -> String -> IO (Maybe B.ByteString)
defaultReadByKey dir k = handle handler $ do
s <- readFileStrict $ dir ++ "/" ++ k
return $ Just s -- `debug` ("read "++ filename)
where
handler :: IOError -> IO (Maybe B.ByteString)
handler e
| isAlreadyInUseError e = defaultReadByKey dir k
| isDoesNotExistError e = return Nothing
| otherwise = if "invalid" `isInfixOf` ioeGetErrorString e
then error $ "defaultReadByKey: " ++ show e
++ " defPath and/or keyResource are not suitable for a file path:\n" ++ k ++ "\""
else defaultReadByKey dir k
defaultWrite :: FilePath -> String -> B.ByteString -> IO ()
defaultWrite dir k x = safeWrite (dir ++ "/" ++ k) x
safeWrite filename str = handle handler $ B.writeFile filename str -- !> ("write "++filename)
where
handler e -- (e :: IOError)
| isDoesNotExistError e = do
createDirectoryIfMissing True $ take (1 + (last $ elemIndices '/' filename)) filename -- maybe the path does not exist
safeWrite filename str
| otherwise = if ("invalid" `isInfixOf` ioeGetErrorString e)
then
error $ "defaultWriteResource: " ++ show e ++ " defPath and/or keyResource are not suitable for a file path: " ++ filename
else do
hPutStrLn stderr $ "defaultWriteResource: " ++ show e ++ " in file: " ++ filename ++ " retrying"
safeWrite filename str
defaultDelete :: FilePath -> String -> IO ()
defaultDelete dir k = handle (handler filename) $ removeFile filename where
filename = dir ++ "/" ++ k
handler :: String -> IOException -> IO ()
handler file e
| isDoesNotExistError e = return () --`debug` "isDoesNotExistError"
| isAlreadyInUseError e = do
hPutStrLn stderr $ "defaultDelResource: busy in file: " ++ filename ++ " retrying"
-- threadDelay 100000 --`debug`"isAlreadyInUseError"
defaultDelete dir k
| otherwise = do
hPutStrLn stderr $ "defaultDelResource: " ++ show e ++ " in file: " ++ filename ++ " retrying"
-- threadDelay 100000 --`debug` ("otherwise " ++ show e)
defaultDelete dir k
defaultListByType :: forall t. (Typeable t) => FilePath -> Proxy t -> IO [Key]
defaultListByType dir _ = do
files <- getDirectoryContents dir
return . catMaybes . map (stripPrefix $ typeString ++ "-") $ files
where
typeString = show (typeOf (undefined :: t))
-- | Strict read from file, needed for default file persistence
readFileStrict f = openFile f ReadMode >>= \ h -> readIt h `finally` hClose h where
readIt h = do
s <- hFileSize h
let n = fromIntegral s
str <- B.hGet h n
return str
readResourceByKey :: forall t. (Indexable t, Serializable t, Typeable t)
=> Persist -> Key -> IO (Maybe t)
readResourceByKey store k = readByKey store (typedFile pr k)
>>= evaluate . fmap (deserialKey k)
where
pr = Proxy :: Proxy t
readResourcesByKey :: forall t. (Indexable t, Serializable t, Typeable t)
=> Persist -> [Key] -> IO [Maybe t]
readResourcesByKey store = mapM (readResourceByKey store)
writeResource :: forall t. (Indexable t, Serializable t, Typeable t)
=> Persist -> t -> IO ()
writeResource store s = write store (typedFile pr $ key s) $ serialize s
where
pr = Proxy :: Proxy t
delResource :: forall t. (Indexable t, Serializable t, Typeable t)
=> Persist -> t -> IO ()
delResource store s = delete store $ typedFile pr (key s)
where
pr = Proxy :: Proxy t
listResources :: forall t. (Serializable t, Indexable t, Typeable t)
=> Persist -> Proxy t -> IO [Key]
listResources = listByType
typedFile :: forall t. (Indexable t, Typeable t) => Proxy t -> Key -> FilePath
typedFile _ k = typeString ++ "-" ++ k where
typeString = show $ typeOf (undefined :: t)
|
ariep/TCache
|
src/Data/TCache/Defs.hs
|
Haskell
|
bsd-3-clause
| 10,379
|
{-# LANGUAGE CPP #-}
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
--
--------------------------------------------------------------
-- Converting Core to STG Syntax
--------------------------------------------------------------
-- And, as we have the info in hand, we may convert some lets to
-- let-no-escapes.
module CoreToStg ( coreToStg, coreExprToStg ) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, findDefault )
import CoreArity ( manifestArity )
import StgSyn
import Type
import TyCon
import MkId ( coercionTokenId )
import Id
import IdInfo
import DataCon
import CostCentre ( noCCS )
import VarSet
import VarEnv
import Module
import Name ( getOccName, isExternalName, nameOccName )
import OccName ( occNameString, occNameFS )
import BasicTypes ( Arity )
import TysWiredIn ( unboxedUnitDataCon )
import Literal
import Outputable
import MonadUtils
import FastString
import Util
import DynFlags
import ForeignCall
import Demand ( isUsedOnce )
import PrimOp ( PrimCall(..) )
import Data.Maybe (isJust)
import Control.Monad (liftM, ap)
-- Note [Live vs free]
-- ~~~~~~~~~~~~~~~~~~~
--
-- The actual Stg datatype is decorated with live variable information, as well
-- as free variable information. The two are not the same. Liveness is an
-- operational property rather than a semantic one. A variable is live at a
-- particular execution point if it can be referred to directly again. In
-- particular, a dead variable's stack slot (if it has one):
--
-- - should be stubbed to avoid space leaks, and
-- - may be reused for something else.
--
-- There ought to be a better way to say this. Here are some examples:
--
-- let v = [q] \[x] -> e
-- in
-- ...v... (but no q's)
--
-- Just after the `in', v is live, but q is dead. If the whole of that
-- let expression was enclosed in a case expression, thus:
--
-- case (let v = [q] \[x] -> e in ...v...) of
-- alts[...q...]
--
-- (ie `alts' mention `q'), then `q' is live even after the `in'; because
-- we'll return later to the `alts' and need it.
--
-- Let-no-escapes make this a bit more interesting:
--
-- let-no-escape v = [q] \ [x] -> e
-- in
-- ...v...
--
-- Here, `q' is still live at the `in', because `v' is represented not by
-- a closure but by the current stack state. In other words, if `v' is
-- live then so is `q'. Furthermore, if `e' mentions an enclosing
-- let-no-escaped variable, then its free variables are also live if `v' is.
-- Note [Collecting live CAF info]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- In this pass we also collect information on which CAFs are live for
-- constructing SRTs (see SRT.hs).
--
-- A top-level Id has CafInfo, which is
--
-- - MayHaveCafRefs, if it may refer indirectly to
-- one or more CAFs, or
-- - NoCafRefs if it definitely doesn't
--
-- The CafInfo has already been calculated during the CoreTidy pass.
--
-- During CoreToStg, we then pin onto each binding and case expression, a
-- list of Ids which represents the "live" CAFs at that point. The meaning
-- of "live" here is the same as for live variables, see above (which is
-- why it's convenient to collect CAF information here rather than elsewhere).
--
-- The later SRT pass takes these lists of Ids and uses them to construct
-- the actual nested SRTs, and replaces the lists of Ids with (offset,length)
-- pairs.
-- Note [Interaction of let-no-escape with SRTs]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Consider
--
-- let-no-escape x = ...caf1...caf2...
-- in
-- ...x...x...x...
--
-- where caf1,caf2 are CAFs. Since x doesn't have a closure, we
-- build SRTs just as if x's defn was inlined at each call site, and
-- that means that x's CAF refs get duplicated in the overall SRT.
--
-- This is unlike ordinary lets, in which the CAF refs are not duplicated.
--
-- We could fix this loss of (static) sharing by making a sort of pseudo-closure
-- for x, solely to put in the SRTs lower down.
-- Note [What is a non-escaping let]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider:
--
-- let x = fvs \ args -> e
-- in
-- if ... then x else
-- if ... then x else ...
--
-- `x' is used twice (so we probably can't unfold it), but when it is
-- entered, the stack is deeper than it was when the definition of `x'
-- happened. Specifically, if instead of allocating a closure for `x',
-- we saved all `x's fvs on the stack, and remembered the stack depth at
-- that moment, then whenever we enter `x' we can simply set the stack
-- pointer(s) to these remembered (compile-time-fixed) values, and jump
-- to the code for `x'.
--
-- All of this is provided x is:
-- 1. non-updatable - it must have at least one parameter (see Note
-- [Join point abstraction]);
-- 2. guaranteed to be entered before the stack retreats -- ie x is not
-- buried in a heap-allocated closure, or passed as an argument to
-- something;
-- 3. all the enters have exactly the right number of arguments,
-- no more no less;
-- 4. all the enters are tail calls; that is, they return to the
-- caller enclosing the definition of `x'.
--
-- Under these circumstances we say that `x' is non-escaping.
--
-- An example of when (4) does not hold:
--
-- let x = ...
-- in case x of ...alts...
--
-- Here, `x' is certainly entered only when the stack is deeper than when
-- `x' is defined, but here it must return to ...alts... So we can't just
-- adjust the stack down to `x''s recalled points, because that would lost
-- alts' context.
--
-- Things can get a little more complicated. Consider:
--
-- let y = ...
-- in let x = fvs \ args -> ...y...
-- in ...x...
--
-- Now, if `x' is used in a non-escaping way in ...x..., and `y' is used in a
-- non-escaping way in ...y..., then `y' is non-escaping.
--
-- `x' can even be recursive! Eg:
--
-- letrec x = [y] \ [v] -> if v then x True else ...
-- in
-- ...(x b)...
-- --------------------------------------------------------------
-- Setting variable info: top-level, binds, RHSs
-- --------------------------------------------------------------
coreToStg :: DynFlags -> Module -> CoreProgram -> IO [StgBinding]
coreToStg dflags this_mod pgm
= return pgm'
where (_, _, pgm') = coreTopBindsToStg dflags this_mod emptyVarEnv pgm
coreExprToStg :: CoreExpr -> StgExpr
coreExprToStg expr
= new_expr where (new_expr,_,_) = initLne emptyVarEnv (coreToStgExpr expr)
coreTopBindsToStg
:: DynFlags
-> Module
-> IdEnv HowBound -- environment for the bindings
-> CoreProgram
-> (IdEnv HowBound, FreeVarsInfo, [StgBinding])
coreTopBindsToStg _ _ env [] = (env, emptyFVInfo, [])
coreTopBindsToStg dflags this_mod env (b:bs)
= (env2, fvs2, b':bs')
where
-- Notice the mutually-recursive "knot" here:
-- env accumulates down the list of binds,
-- fvs accumulates upwards
(env1, fvs2, b' ) = coreTopBindToStg dflags this_mod env fvs1 b
(env2, fvs1, bs') = coreTopBindsToStg dflags this_mod env1 bs
coreTopBindToStg
:: DynFlags
-> Module
-> IdEnv HowBound
-> FreeVarsInfo -- Info about the body
-> CoreBind
-> (IdEnv HowBound, FreeVarsInfo, StgBinding)
coreTopBindToStg dflags this_mod env body_fvs (NonRec id rhs)
= let
env' = extendVarEnv env id how_bound
how_bound = LetBound TopLet $! manifestArity rhs
(stg_rhs, fvs') =
initLne env $ do
(stg_rhs, fvs') <- coreToTopStgRhs dflags this_mod body_fvs (id,rhs)
return (stg_rhs, fvs')
bind = StgNonRec id stg_rhs
in
ASSERT2(consistentCafInfo id bind, ppr id )
-- NB: previously the assertion printed 'rhs' and 'bind'
-- as well as 'id', but that led to a black hole
-- where printing the assertion error tripped the
-- assertion again!
(env', fvs' `unionFVInfo` body_fvs, bind)
coreTopBindToStg dflags this_mod env body_fvs (Rec pairs)
= ASSERT( not (null pairs) )
let
binders = map fst pairs
extra_env' = [ (b, LetBound TopLet $! manifestArity rhs)
| (b, rhs) <- pairs ]
env' = extendVarEnvList env extra_env'
(stg_rhss, fvs')
= initLne env' $ do
(stg_rhss, fvss') <- mapAndUnzipM (coreToTopStgRhs dflags this_mod body_fvs) pairs
let fvs' = unionFVInfos fvss'
return (stg_rhss, fvs')
bind = StgRec (zip binders stg_rhss)
in
ASSERT2(consistentCafInfo (head binders) bind, ppr binders)
(env', fvs' `unionFVInfo` body_fvs, bind)
-- Assertion helper: this checks that the CafInfo on the Id matches
-- what CoreToStg has figured out about the binding's SRT. The
-- CafInfo will be exact in all cases except when CorePrep has
-- floated out a binding, in which case it will be approximate.
consistentCafInfo :: Id -> GenStgBinding Var Id -> Bool
consistentCafInfo id bind
= WARN( not (exact || is_sat_thing) , ppr id <+> ppr id_marked_caffy <+> ppr binding_is_caffy )
safe
where
safe = id_marked_caffy || not binding_is_caffy
exact = id_marked_caffy == binding_is_caffy
id_marked_caffy = mayHaveCafRefs (idCafInfo id)
binding_is_caffy = stgBindHasCafRefs bind
is_sat_thing = occNameFS (nameOccName (idName id)) == fsLit "sat"
coreToTopStgRhs
:: DynFlags
-> Module
-> FreeVarsInfo -- Free var info for the scope of the binding
-> (Id,CoreExpr)
-> LneM (StgRhs, FreeVarsInfo)
coreToTopStgRhs dflags this_mod scope_fv_info (bndr, rhs)
= do { (new_rhs, rhs_fvs, _) <- coreToStgExpr rhs
; lv_info <- freeVarsToLiveVars rhs_fvs
; let stg_rhs = mkTopStgRhs dflags this_mod rhs_fvs (mkSRT lv_info) bndr bndr_info new_rhs
stg_arity = stgRhsArity stg_rhs
; return (ASSERT2( arity_ok stg_arity, mk_arity_msg stg_arity) stg_rhs,
rhs_fvs) }
where
bndr_info = lookupFVInfo scope_fv_info bndr
-- It's vital that the arity on a top-level Id matches
-- the arity of the generated STG binding, else an importing
-- module will use the wrong calling convention
-- (Trac #2844 was an example where this happened)
-- NB1: we can't move the assertion further out without
-- blocking the "knot" tied in coreTopBindsToStg
-- NB2: the arity check is only needed for Ids with External
-- Names, because they are externally visible. The CorePrep
-- pass introduces "sat" things with Local Names and does
-- not bother to set their Arity info, so don't fail for those
arity_ok stg_arity
| isExternalName (idName bndr) = id_arity == stg_arity
| otherwise = True
id_arity = idArity bndr
mk_arity_msg stg_arity
= vcat [ppr bndr,
text "Id arity:" <+> ppr id_arity,
text "STG arity:" <+> ppr stg_arity]
mkTopStgRhs :: DynFlags -> Module -> FreeVarsInfo
-> SRT -> Id -> StgBinderInfo -> StgExpr
-> StgRhs
mkTopStgRhs dflags this_mod = mkStgRhs' con_updateable
-- Dynamic StgConApps are updatable
where con_updateable con args = isDllConApp dflags this_mod con args
-- ---------------------------------------------------------------------------
-- Expressions
-- ---------------------------------------------------------------------------
coreToStgExpr
:: CoreExpr
-> LneM (StgExpr, -- Decorated STG expr
FreeVarsInfo, -- Its free vars (NB free, not live)
EscVarsSet) -- Its escapees, a subset of its free vars;
-- also a subset of the domain of the envt
-- because we are only interested in the escapees
-- for vars which might be turned into
-- let-no-escaped ones.
-- The second and third components can be derived in a simple bottom up pass, not
-- dependent on any decisions about which variables will be let-no-escaped or
-- not. The first component, that is, the decorated expression, may then depend
-- on these components, but it in turn is not scrutinised as the basis for any
-- decisions. Hence no black holes.
-- No LitInteger's should be left by the time this is called. CorePrep
-- should have converted them all to a real core representation.
coreToStgExpr (Lit (LitInteger {})) = panic "coreToStgExpr: LitInteger"
coreToStgExpr (Lit l) = return (StgLit l, emptyFVInfo, emptyVarSet)
coreToStgExpr (Var v) = coreToStgApp Nothing v [] []
coreToStgExpr (Coercion _) = coreToStgApp Nothing coercionTokenId [] []
coreToStgExpr expr@(App _ _)
= coreToStgApp Nothing f args ticks
where
(f, args, ticks) = myCollectArgs expr
coreToStgExpr expr@(Lam _ _)
= let
(args, body) = myCollectBinders expr
args' = filterStgBinders args
in
extendVarEnvLne [ (a, LambdaBound) | a <- args' ] $ do
(body, body_fvs, body_escs) <- coreToStgExpr body
let
fvs = args' `minusFVBinders` body_fvs
escs = body_escs `delVarSetList` args'
result_expr | null args' = body
| otherwise = StgLam args' body
return (result_expr, fvs, escs)
coreToStgExpr (Tick tick expr)
= do case tick of
HpcTick{} -> return ()
ProfNote{} -> return ()
SourceNote{} -> return ()
Breakpoint{} -> panic "coreToStgExpr: breakpoint should not happen"
(expr2, fvs, escs) <- coreToStgExpr expr
return (StgTick tick expr2, fvs, escs)
coreToStgExpr (Cast expr _)
= coreToStgExpr expr
-- Cases require a little more real work.
coreToStgExpr (Case scrut _ _ [])
= coreToStgExpr scrut
-- See Note [Empty case alternatives] in CoreSyn If the case
-- alternatives are empty, the scrutinee must diverge or raise an
-- exception, so we can just dive into it.
--
-- Of course this may seg-fault if the scrutinee *does* return. A
-- belt-and-braces approach would be to move this case into the
-- code generator, and put a return point anyway that calls a
-- runtime system error function.
coreToStgExpr (Case scrut bndr _ alts) = do
(alts2, alts_fvs, alts_escs)
<- extendVarEnvLne [(bndr, LambdaBound)] $ do
(alts2, fvs_s, escs_s) <- mapAndUnzip3M vars_alt alts
return ( alts2,
unionFVInfos fvs_s,
unionVarSets escs_s )
let
-- Determine whether the default binder is dead or not
-- This helps the code generator to avoid generating an assignment
-- for the case binder (is extremely rare cases) ToDo: remove.
bndr' | bndr `elementOfFVInfo` alts_fvs = bndr
| otherwise = bndr `setIdOccInfo` IAmDead
-- Don't consider the default binder as being 'live in alts',
-- since this is from the point of view of the case expr, where
-- the default binder is not free.
alts_fvs_wo_bndr = bndr `minusFVBinder` alts_fvs
alts_escs_wo_bndr = alts_escs `delVarSet` bndr
alts_lv_info <- freeVarsToLiveVars alts_fvs_wo_bndr
-- We tell the scrutinee that everything
-- live in the alts is live in it, too.
(scrut2, scrut_fvs, _scrut_escs, scrut_lv_info)
<- setVarsLiveInCont alts_lv_info $ do
(scrut2, scrut_fvs, scrut_escs) <- coreToStgExpr scrut
scrut_lv_info <- freeVarsToLiveVars scrut_fvs
return (scrut2, scrut_fvs, scrut_escs, scrut_lv_info)
return (
StgCase scrut2 (getLiveVars scrut_lv_info)
(getLiveVars alts_lv_info)
bndr'
(mkSRT alts_lv_info)
(mkStgAltType bndr alts)
alts2,
scrut_fvs `unionFVInfo` alts_fvs_wo_bndr,
alts_escs_wo_bndr `unionVarSet` getFVSet scrut_fvs
-- You might think we should have scrut_escs, not
-- (getFVSet scrut_fvs), but actually we can't call, and
-- then return from, a let-no-escape thing.
)
where
vars_alt (con, binders, rhs)
| DataAlt c <- con, c == unboxedUnitDataCon
= -- This case is a bit smelly.
-- See Note [Nullary unboxed tuple] in Type.hs
-- where a nullary tuple is mapped to (State# World#)
ASSERT( null binders )
do { (rhs2, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
; return ((DEFAULT, [], [], rhs2), rhs_fvs, rhs_escs) }
| otherwise
= let -- Remove type variables
binders' = filterStgBinders binders
in
extendVarEnvLne [(b, LambdaBound) | b <- binders'] $ do
(rhs2, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
let
-- Records whether each param is used in the RHS
good_use_mask = [ b `elementOfFVInfo` rhs_fvs | b <- binders' ]
return ( (con, binders', good_use_mask, rhs2),
binders' `minusFVBinders` rhs_fvs,
rhs_escs `delVarSetList` binders' )
-- ToDo: remove the delVarSet;
-- since escs won't include any of these binders
-- Lets not only take quite a bit of work, but this is where we convert
-- then to let-no-escapes, if we wish.
-- (Meanwhile, we don't expect to see let-no-escapes...)
coreToStgExpr (Let bind body) = do
(new_let, fvs, escs, _)
<- mfix (\ ~(_, _, _, no_binder_escapes) ->
coreToStgLet no_binder_escapes bind body
)
return (new_let, fvs, escs)
coreToStgExpr e = pprPanic "coreToStgExpr" (ppr e)
mkStgAltType :: Id -> [CoreAlt] -> AltType
mkStgAltType bndr alts = case repType (idType bndr) of
UnaryRep rep_ty -> case tyConAppTyCon_maybe rep_ty of
Just tc | isUnliftedTyCon tc -> PrimAlt tc
| isAbstractTyCon tc -> look_for_better_tycon
| isAlgTyCon tc -> AlgAlt tc
| otherwise -> ASSERT2( _is_poly_alt_tycon tc, ppr tc )
PolyAlt
Nothing -> PolyAlt
UbxTupleRep rep_tys -> UbxTupAlt (length rep_tys)
-- NB Nullary unboxed tuples have UnaryRep, and generate a PrimAlt
where
_is_poly_alt_tycon tc
= isFunTyCon tc
|| isPrimTyCon tc -- "Any" is lifted but primitive
|| isFamilyTyCon tc -- Type family; e.g. Any, or arising from strict
-- function application where argument has a
-- type-family type
-- Sometimes, the TyCon is a AbstractTyCon which may not have any
-- constructors inside it. Then we may get a better TyCon by
-- grabbing the one from a constructor alternative
-- if one exists.
look_for_better_tycon
| ((DataAlt con, _, _) : _) <- data_alts =
AlgAlt (dataConTyCon con)
| otherwise =
ASSERT(null data_alts)
PolyAlt
where
(data_alts, _deflt) = findDefault alts
-- ---------------------------------------------------------------------------
-- Applications
-- ---------------------------------------------------------------------------
coreToStgApp
:: Maybe UpdateFlag -- Just upd <=> this application is
-- the rhs of a thunk binding
-- x = [...] \upd [] -> the_app
-- with specified update flag
-> Id -- Function
-> [CoreArg] -- Arguments
-> [Tickish Id] -- Debug ticks
-> LneM (StgExpr, FreeVarsInfo, EscVarsSet)
coreToStgApp _ f args ticks = do
(args', args_fvs, ticks') <- coreToStgArgs args
how_bound <- lookupVarLne f
let
n_val_args = valArgCount args
not_letrec_bound = not (isLetBound how_bound)
fun_fvs = singletonFVInfo f how_bound fun_occ
-- e.g. (f :: a -> int) (x :: a)
-- Here the free variables are "f", "x" AND the type variable "a"
-- coreToStgArgs will deal with the arguments recursively
-- Mostly, the arity info of a function is in the fn's IdInfo
-- But new bindings introduced by CoreSat may not have no
-- arity info; it would do us no good anyway. For example:
-- let f = \ab -> e in f
-- No point in having correct arity info for f!
-- Hence the hasArity stuff below.
-- NB: f_arity is only consulted for LetBound things
f_arity = stgArity f how_bound
saturated = f_arity <= n_val_args
fun_occ
| not_letrec_bound = noBinderInfo -- Uninteresting variable
| f_arity > 0 && saturated = stgSatOcc -- Saturated or over-saturated function call
| otherwise = stgUnsatOcc -- Unsaturated function or thunk
fun_escs
| not_letrec_bound = emptyVarSet -- Only letrec-bound escapees are interesting
| f_arity == n_val_args = emptyVarSet -- A function *or thunk* with an exactly
-- saturated call doesn't escape
-- (let-no-escape applies to 'thunks' too)
| otherwise = unitVarSet f -- Inexact application; it does escape
-- At the moment of the call:
-- either the function is *not* let-no-escaped, in which case
-- nothing is live except live_in_cont
-- or the function *is* let-no-escaped in which case the
-- variables it uses are live, but still the function
-- itself is not. PS. In this case, the function's
-- live vars should already include those of the
-- continuation, but it does no harm to just union the
-- two regardless.
res_ty = exprType (mkApps (Var f) args)
app = case idDetails f of
DataConWorkId dc | saturated -> StgConApp dc args'
-- Some primitive operator that might be implemented as a library call.
PrimOpId op -> ASSERT( saturated )
StgOpApp (StgPrimOp op) args' res_ty
-- A call to some primitive Cmm function.
FCallId (CCall (CCallSpec (StaticTarget _ lbl (Just pkgId) True)
PrimCallConv _))
-> ASSERT( saturated )
StgOpApp (StgPrimCallOp (PrimCall lbl pkgId)) args' res_ty
-- A regular foreign call.
FCallId call -> ASSERT( saturated )
StgOpApp (StgFCallOp call (idUnique f)) args' res_ty
TickBoxOpId {} -> pprPanic "coreToStg TickBox" $ ppr (f,args')
_other -> StgApp f args'
fvs = fun_fvs `unionFVInfo` args_fvs
vars = fun_escs `unionVarSet` (getFVSet args_fvs)
-- All the free vars of the args are disqualified
-- from being let-no-escaped.
tapp = foldr StgTick app (ticks ++ ticks')
-- Forcing these fixes a leak in the code generator, noticed while
-- profiling for trac #4367
app `seq` fvs `seq` seqVarSet vars `seq` return (
tapp,
fvs,
vars
)
-- ---------------------------------------------------------------------------
-- Argument lists
-- This is the guy that turns applications into A-normal form
-- ---------------------------------------------------------------------------
coreToStgArgs :: [CoreArg] -> LneM ([StgArg], FreeVarsInfo, [Tickish Id])
coreToStgArgs []
= return ([], emptyFVInfo, [])
coreToStgArgs (Type _ : args) = do -- Type argument
(args', fvs, ts) <- coreToStgArgs args
return (args', fvs, ts)
coreToStgArgs (Coercion _ : args) -- Coercion argument; replace with place holder
= do { (args', fvs, ts) <- coreToStgArgs args
; return (StgVarArg coercionTokenId : args', fvs, ts) }
coreToStgArgs (Tick t e : args)
= ASSERT( not (tickishIsCode t) )
do { (args', fvs, ts) <- coreToStgArgs (e : args)
; return (args', fvs, t:ts) }
coreToStgArgs (arg : args) = do -- Non-type argument
(stg_args, args_fvs, ticks) <- coreToStgArgs args
(arg', arg_fvs, _escs) <- coreToStgExpr arg
let
fvs = args_fvs `unionFVInfo` arg_fvs
(aticks, arg'') = stripStgTicksTop tickishFloatable arg'
stg_arg = case arg'' of
StgApp v [] -> StgVarArg v
StgConApp con [] -> StgVarArg (dataConWorkId con)
StgLit lit -> StgLitArg lit
_ -> pprPanic "coreToStgArgs" (ppr arg)
-- WARNING: what if we have an argument like (v `cast` co)
-- where 'co' changes the representation type?
-- (This really only happens if co is unsafe.)
-- Then all the getArgAmode stuff in CgBindery will set the
-- cg_rep of the CgIdInfo based on the type of v, rather
-- than the type of 'co'.
-- This matters particularly when the function is a primop
-- or foreign call.
-- Wanted: a better solution than this hacky warning
let
arg_ty = exprType arg
stg_arg_ty = stgArgType stg_arg
bad_args = (isUnliftedType arg_ty && not (isUnliftedType stg_arg_ty))
|| (map typePrimRep (flattenRepType (repType arg_ty))
/= map typePrimRep (flattenRepType (repType stg_arg_ty)))
-- In GHCi we coerce an argument of type BCO# (unlifted) to HValue (lifted),
-- and pass it to a function expecting an HValue (arg_ty). This is ok because
-- we can treat an unlifted value as lifted. But the other way round
-- we complain.
-- We also want to check if a pointer is cast to a non-ptr etc
WARN( bad_args, text "Dangerous-looking argument. Probable cause: bad unsafeCoerce#" $$ ppr arg )
return (stg_arg : stg_args, fvs, ticks ++ aticks)
-- ---------------------------------------------------------------------------
-- The magic for lets:
-- ---------------------------------------------------------------------------
coreToStgLet
:: Bool -- True <=> yes, we are let-no-escaping this let
-> CoreBind -- bindings
-> CoreExpr -- body
-> LneM (StgExpr, -- new let
FreeVarsInfo, -- variables free in the whole let
EscVarsSet, -- variables that escape from the whole let
Bool) -- True <=> none of the binders in the bindings
-- is among the escaping vars
coreToStgLet let_no_escape bind body = do
(bind2, bind_fvs, bind_escs, bind_lvs,
body2, body_fvs, body_escs, body_lvs)
<- mfix $ \ ~(_, _, _, _, _, rec_body_fvs, _, _) -> do
-- Do the bindings, setting live_in_cont to empty if
-- we ain't in a let-no-escape world
live_in_cont <- getVarsLiveInCont
( bind2, bind_fvs, bind_escs, bind_lv_info, env_ext)
<- setVarsLiveInCont (if let_no_escape
then live_in_cont
else emptyLiveInfo)
(vars_bind rec_body_fvs bind)
-- Do the body
extendVarEnvLne env_ext $ do
(body2, body_fvs, body_escs) <- coreToStgExpr body
body_lv_info <- freeVarsToLiveVars body_fvs
return (bind2, bind_fvs, bind_escs, getLiveVars bind_lv_info,
body2, body_fvs, body_escs, getLiveVars body_lv_info)
-- Compute the new let-expression
let
new_let | let_no_escape = StgLetNoEscape live_in_whole_let bind_lvs bind2 body2
| otherwise = StgLet bind2 body2
free_in_whole_let
= binders `minusFVBinders` (bind_fvs `unionFVInfo` body_fvs)
live_in_whole_let
= bind_lvs `unionVarSet` (body_lvs `delVarSetList` binders)
real_bind_escs = if let_no_escape then
bind_escs
else
getFVSet bind_fvs
-- Everything escapes which is free in the bindings
let_escs = (real_bind_escs `unionVarSet` body_escs) `delVarSetList` binders
all_escs = bind_escs `unionVarSet` body_escs -- Still includes binders of
-- this let(rec)
no_binder_escapes = isEmptyVarSet (set_of_binders `intersectVarSet` all_escs)
-- Debugging code as requested by Andrew Kennedy
checked_no_binder_escapes
| debugIsOn && not no_binder_escapes && any is_join_var binders
= pprTrace "Interesting! A join var that isn't let-no-escaped" (ppr binders)
False
| otherwise = no_binder_escapes
-- Mustn't depend on the passed-in let_no_escape flag, since
-- no_binder_escapes is used by the caller to derive the flag!
return (
new_let,
free_in_whole_let,
let_escs,
checked_no_binder_escapes
)
where
set_of_binders = mkVarSet binders
binders = bindersOf bind
mk_binding bind_lv_info binder rhs
= (binder, LetBound (NestedLet live_vars) (manifestArity rhs))
where
live_vars | let_no_escape = addLiveVar bind_lv_info binder
| otherwise = unitLiveVar binder
-- c.f. the invariant on NestedLet
vars_bind :: FreeVarsInfo -- Free var info for body of binding
-> CoreBind
-> LneM (StgBinding,
FreeVarsInfo,
EscVarsSet, -- free vars; escapee vars
LiveInfo, -- Vars and CAFs live in binding
[(Id, HowBound)]) -- extension to environment
vars_bind body_fvs (NonRec binder rhs) = do
(rhs2, bind_fvs, bind_lv_info, escs) <- coreToStgRhs body_fvs [] (binder,rhs)
let
env_ext_item = mk_binding bind_lv_info binder rhs
return (StgNonRec binder rhs2,
bind_fvs, escs, bind_lv_info, [env_ext_item])
vars_bind body_fvs (Rec pairs)
= mfix $ \ ~(_, rec_rhs_fvs, _, bind_lv_info, _) ->
let
rec_scope_fvs = unionFVInfo body_fvs rec_rhs_fvs
binders = map fst pairs
env_ext = [ mk_binding bind_lv_info b rhs
| (b,rhs) <- pairs ]
in
extendVarEnvLne env_ext $ do
(rhss2, fvss, lv_infos, escss)
<- mapAndUnzip4M (coreToStgRhs rec_scope_fvs binders) pairs
let
bind_fvs = unionFVInfos fvss
bind_lv_info = foldr unionLiveInfo emptyLiveInfo lv_infos
escs = unionVarSets escss
return (StgRec (binders `zip` rhss2),
bind_fvs, escs, bind_lv_info, env_ext)
is_join_var :: Id -> Bool
-- A hack (used only for compiler debuggging) to tell if
-- a variable started life as a join point ($j)
is_join_var j = occNameString (getOccName j) == "$j"
coreToStgRhs :: FreeVarsInfo -- Free var info for the scope of the binding
-> [Id]
-> (Id,CoreExpr)
-> LneM (StgRhs, FreeVarsInfo, LiveInfo, EscVarsSet)
coreToStgRhs scope_fv_info binders (bndr, rhs) = do
(new_rhs, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
lv_info <- freeVarsToLiveVars (binders `minusFVBinders` rhs_fvs)
return (mkStgRhs rhs_fvs (mkSRT lv_info) bndr bndr_info new_rhs,
rhs_fvs, lv_info, rhs_escs)
where
bndr_info = lookupFVInfo scope_fv_info bndr
mkStgRhs :: FreeVarsInfo -> SRT -> Id -> StgBinderInfo -> StgExpr -> StgRhs
mkStgRhs = mkStgRhs' con_updateable
where con_updateable _ _ = False
mkStgRhs' :: (DataCon -> [StgArg] -> Bool)
-> FreeVarsInfo -> SRT -> Id -> StgBinderInfo -> StgExpr -> StgRhs
mkStgRhs' con_updateable rhs_fvs srt bndr binder_info rhs
| StgLam bndrs body <- rhs
= StgRhsClosure noCCS binder_info
(getFVs rhs_fvs)
ReEntrant
srt bndrs body
| StgConApp con args <- unticked_rhs
, not (con_updateable con args)
= StgRhsCon noCCS con args
| otherwise
= StgRhsClosure noCCS binder_info
(getFVs rhs_fvs)
upd_flag srt [] rhs
where
(_, unticked_rhs) = stripStgTicksTop (not . tickishIsCode) rhs
upd_flag | isUsedOnce (idDemandInfo bndr) = SingleEntry
| otherwise = Updatable
{-
SDM: disabled. Eval/Apply can't handle functions with arity zero very
well; and making these into simple non-updatable thunks breaks other
assumptions (namely that they will be entered only once).
upd_flag | isPAP env rhs = ReEntrant
| otherwise = Updatable
-- Detect thunks which will reduce immediately to PAPs, and make them
-- non-updatable. This has several advantages:
--
-- - the non-updatable thunk behaves exactly like the PAP,
--
-- - the thunk is more efficient to enter, because it is
-- specialised to the task.
--
-- - we save one update frame, one stg_update_PAP, one update
-- and lots of PAP_enters.
--
-- - in the case where the thunk is top-level, we save building
-- a black hole and futhermore the thunk isn't considered to
-- be a CAF any more, so it doesn't appear in any SRTs.
--
-- We do it here, because the arity information is accurate, and we need
-- to do it before the SRT pass to save the SRT entries associated with
-- any top-level PAPs.
isPAP env (StgApp f args) = listLengthCmp args arity == LT -- idArity f > length args
where
arity = stgArity f (lookupBinding env f)
isPAP env _ = False
-}
{- ToDo:
upd = if isOnceDem dem
then (if isNotTop toplev
then SingleEntry -- HA! Paydirt for "dem"
else
(if debugIsOn then trace "WARNING: SE CAFs unsupported, forcing UPD instead" else id) $
Updatable)
else Updatable
-- For now we forbid SingleEntry CAFs; they tickle the
-- ASSERT in rts/Storage.c line 215 at newCAF() re mut_link,
-- and I don't understand why. There's only one SE_CAF (well,
-- only one that tickled a great gaping bug in an earlier attempt
-- at ClosureInfo.getEntryConvention) in the whole of nofib,
-- specifically Main.lvl6 in spectral/cryptarithm2.
-- So no great loss. KSW 2000-07.
-}
-- ---------------------------------------------------------------------------
-- A little monad for this let-no-escaping pass
-- ---------------------------------------------------------------------------
-- There's a lot of stuff to pass around, so we use this LneM monad to
-- help. All the stuff here is only passed *down*.
newtype LneM a = LneM
{ unLneM :: IdEnv HowBound
-> LiveInfo -- Vars and CAFs live in continuation
-> a
}
type LiveInfo = (StgLiveVars, -- Dynamic live variables;
-- i.e. ones with a nested (non-top-level) binding
CafSet) -- Static live variables;
-- i.e. top-level variables that are CAFs or refer to them
type EscVarsSet = IdSet
type CafSet = IdSet
data HowBound
= ImportBound -- Used only as a response to lookupBinding; never
-- exists in the range of the (IdEnv HowBound)
| LetBound -- A let(rec) in this module
LetInfo -- Whether top level or nested
Arity -- Its arity (local Ids don't have arity info at this point)
| LambdaBound -- Used for both lambda and case
data LetInfo
= TopLet -- top level things
| NestedLet LiveInfo -- For nested things, what is live if this
-- thing is live? Invariant: the binder
-- itself is always a member of
-- the dynamic set of its own LiveInfo
isLetBound :: HowBound -> Bool
isLetBound (LetBound _ _) = True
isLetBound _ = False
topLevelBound :: HowBound -> Bool
topLevelBound ImportBound = True
topLevelBound (LetBound TopLet _) = True
topLevelBound _ = False
-- For a let(rec)-bound variable, x, we record LiveInfo, the set of
-- variables that are live if x is live. This LiveInfo comprises
-- (a) dynamic live variables (ones with a non-top-level binding)
-- (b) static live variabes (CAFs or things that refer to CAFs)
--
-- For "normal" variables (a) is just x alone. If x is a let-no-escaped
-- variable then x is represented by a code pointer and a stack pointer
-- (well, one for each stack). So all of the variables needed in the
-- execution of x are live if x is, and are therefore recorded in the
-- LetBound constructor; x itself *is* included.
--
-- The set of dynamic live variables is guaranteed ot have no further
-- let-no-escaped variables in it.
emptyLiveInfo :: LiveInfo
emptyLiveInfo = (emptyVarSet,emptyVarSet)
unitLiveVar :: Id -> LiveInfo
unitLiveVar lv = (unitVarSet lv, emptyVarSet)
unitLiveCaf :: Id -> LiveInfo
unitLiveCaf caf = (emptyVarSet, unitVarSet caf)
addLiveVar :: LiveInfo -> Id -> LiveInfo
addLiveVar (lvs, cafs) id = (lvs `extendVarSet` id, cafs)
unionLiveInfo :: LiveInfo -> LiveInfo -> LiveInfo
unionLiveInfo (lv1,caf1) (lv2,caf2) = (lv1 `unionVarSet` lv2, caf1 `unionVarSet` caf2)
mkSRT :: LiveInfo -> SRT
mkSRT (_, cafs) = SRTEntries cafs
getLiveVars :: LiveInfo -> StgLiveVars
getLiveVars (lvs, _) = lvs
-- The std monad functions:
initLne :: IdEnv HowBound -> LneM a -> a
initLne env m = unLneM m env emptyLiveInfo
{-# INLINE thenLne #-}
{-# INLINE returnLne #-}
returnLne :: a -> LneM a
returnLne e = LneM $ \_ _ -> e
thenLne :: LneM a -> (a -> LneM b) -> LneM b
thenLne m k = LneM $ \env lvs_cont
-> unLneM (k (unLneM m env lvs_cont)) env lvs_cont
instance Functor LneM where
fmap = liftM
instance Applicative LneM where
pure = returnLne
(<*>) = ap
instance Monad LneM where
return = pure
(>>=) = thenLne
instance MonadFix LneM where
mfix expr = LneM $ \env lvs_cont ->
let result = unLneM (expr result) env lvs_cont
in result
-- Functions specific to this monad:
getVarsLiveInCont :: LneM LiveInfo
getVarsLiveInCont = LneM $ \_env lvs_cont -> lvs_cont
setVarsLiveInCont :: LiveInfo -> LneM a -> LneM a
setVarsLiveInCont new_lvs_cont expr
= LneM $ \env _lvs_cont
-> unLneM expr env new_lvs_cont
extendVarEnvLne :: [(Id, HowBound)] -> LneM a -> LneM a
extendVarEnvLne ids_w_howbound expr
= LneM $ \env lvs_cont
-> unLneM expr (extendVarEnvList env ids_w_howbound) lvs_cont
lookupVarLne :: Id -> LneM HowBound
lookupVarLne v = LneM $ \env _lvs_cont -> lookupBinding env v
lookupBinding :: IdEnv HowBound -> Id -> HowBound
lookupBinding env v = case lookupVarEnv env v of
Just xx -> xx
Nothing -> ASSERT2( isGlobalId v, ppr v ) ImportBound
-- The result of lookupLiveVarsForSet, a set of live variables, is
-- only ever tacked onto a decorated expression. It is never used as
-- the basis of a control decision, which might give a black hole.
freeVarsToLiveVars :: FreeVarsInfo -> LneM LiveInfo
freeVarsToLiveVars fvs = LneM freeVarsToLiveVars'
where
freeVarsToLiveVars' _env live_in_cont = live_info
where
live_info = foldr unionLiveInfo live_in_cont lvs_from_fvs
lvs_from_fvs = map do_one (allFreeIds fvs)
do_one (v, how_bound)
= case how_bound of
ImportBound -> unitLiveCaf v -- Only CAF imports are
-- recorded in fvs
LetBound TopLet _
| mayHaveCafRefs (idCafInfo v) -> unitLiveCaf v
| otherwise -> emptyLiveInfo
LetBound (NestedLet lvs) _ -> lvs -- lvs already contains v
-- (see the invariant on NestedLet)
_lambda_or_case_binding -> unitLiveVar v -- Bound by lambda or case
-- ---------------------------------------------------------------------------
-- Free variable information
-- ---------------------------------------------------------------------------
type FreeVarsInfo = VarEnv (Var, HowBound, StgBinderInfo)
-- The Var is so we can gather up the free variables
-- as a set.
--
-- The HowBound info just saves repeated lookups;
-- we look up just once when we encounter the occurrence.
-- INVARIANT: Any ImportBound Ids are HaveCafRef Ids
-- Imported Ids without CAF refs are simply
-- not put in the FreeVarsInfo for an expression.
-- See singletonFVInfo and freeVarsToLiveVars
--
-- StgBinderInfo records how it occurs; notably, we
-- are interested in whether it only occurs in saturated
-- applications, because then we don't need to build a
-- curried version.
-- If f is mapped to noBinderInfo, that means
-- that f *is* mentioned (else it wouldn't be in the
-- IdEnv at all), but perhaps in an unsaturated applications.
--
-- All case/lambda-bound things are also mapped to
-- noBinderInfo, since we aren't interested in their
-- occurrence info.
--
-- For ILX we track free var info for type variables too;
-- hence VarEnv not IdEnv
emptyFVInfo :: FreeVarsInfo
emptyFVInfo = emptyVarEnv
singletonFVInfo :: Id -> HowBound -> StgBinderInfo -> FreeVarsInfo
-- Don't record non-CAF imports at all, to keep free-var sets small
singletonFVInfo id ImportBound info
| mayHaveCafRefs (idCafInfo id) = unitVarEnv id (id, ImportBound, info)
| otherwise = emptyVarEnv
singletonFVInfo id how_bound info = unitVarEnv id (id, how_bound, info)
unionFVInfo :: FreeVarsInfo -> FreeVarsInfo -> FreeVarsInfo
unionFVInfo fv1 fv2 = plusVarEnv_C plusFVInfo fv1 fv2
unionFVInfos :: [FreeVarsInfo] -> FreeVarsInfo
unionFVInfos fvs = foldr unionFVInfo emptyFVInfo fvs
minusFVBinders :: [Id] -> FreeVarsInfo -> FreeVarsInfo
minusFVBinders vs fv = foldr minusFVBinder fv vs
minusFVBinder :: Id -> FreeVarsInfo -> FreeVarsInfo
minusFVBinder v fv = fv `delVarEnv` v
-- When removing a binder, remember to add its type variables
-- c.f. CoreFVs.delBinderFV
elementOfFVInfo :: Id -> FreeVarsInfo -> Bool
elementOfFVInfo id fvs = isJust (lookupVarEnv fvs id)
lookupFVInfo :: FreeVarsInfo -> Id -> StgBinderInfo
-- Find how the given Id is used.
-- Externally visible things may be used any old how
lookupFVInfo fvs id
| isExternalName (idName id) = noBinderInfo
| otherwise = case lookupVarEnv fvs id of
Nothing -> noBinderInfo
Just (_,_,info) -> info
allFreeIds :: FreeVarsInfo -> [(Id,HowBound)] -- Both top level and non-top-level Ids
allFreeIds fvs = ASSERT( all (isId . fst) ids ) ids
where
ids = [(id,how_bound) | (id,how_bound,_) <- varEnvElts fvs]
-- Non-top-level things only, both type variables and ids
getFVs :: FreeVarsInfo -> [Var]
getFVs fvs = [id | (id, how_bound, _) <- varEnvElts fvs,
not (topLevelBound how_bound) ]
getFVSet :: FreeVarsInfo -> VarSet
getFVSet fvs = mkVarSet (getFVs fvs)
plusFVInfo :: (Var, HowBound, StgBinderInfo)
-> (Var, HowBound, StgBinderInfo)
-> (Var, HowBound, StgBinderInfo)
plusFVInfo (id1,hb1,info1) (id2,hb2,info2)
= ASSERT(id1 == id2 && hb1 `check_eq_how_bound` hb2)
(id1, hb1, combineStgBinderInfo info1 info2)
-- The HowBound info for a variable in the FVInfo should be consistent
check_eq_how_bound :: HowBound -> HowBound -> Bool
check_eq_how_bound ImportBound ImportBound = True
check_eq_how_bound LambdaBound LambdaBound = True
check_eq_how_bound (LetBound li1 ar1) (LetBound li2 ar2) = ar1 == ar2 && check_eq_li li1 li2
check_eq_how_bound _ _ = False
check_eq_li :: LetInfo -> LetInfo -> Bool
check_eq_li (NestedLet _) (NestedLet _) = True
check_eq_li TopLet TopLet = True
check_eq_li _ _ = False
-- Misc.
filterStgBinders :: [Var] -> [Var]
filterStgBinders bndrs = filter isId bndrs
myCollectBinders :: Expr Var -> ([Var], Expr Var)
myCollectBinders expr
= go [] expr
where
go bs (Lam b e) = go (b:bs) e
go bs (Cast e _) = go bs e
go bs e = (reverse bs, e)
myCollectArgs :: CoreExpr -> (Id, [CoreArg], [Tickish Id])
-- We assume that we only have variables
-- in the function position by now
myCollectArgs expr
= go expr [] []
where
go (Var v) as ts = (v, as, ts)
go (App f a) as ts = go f (a:as) ts
go (Tick t e) as ts = ASSERT( all isTypeArg as )
go e as (t:ts) -- ticks can appear in type apps
go (Cast e _) as ts = go e as ts
go (Lam b e) as ts
| isTyVar b = go e as ts -- Note [Collect args]
go _ _ _ = pprPanic "CoreToStg.myCollectArgs" (ppr expr)
-- Note [Collect args]
-- ~~~~~~~~~~~~~~~~~~~
--
-- This big-lambda case occurred following a rather obscure eta expansion.
-- It all seems a bit yukky to me.
stgArity :: Id -> HowBound -> Arity
stgArity _ (LetBound _ arity) = arity
stgArity f ImportBound = idArity f
stgArity _ LambdaBound = 0
|
GaloisInc/halvm-ghc
|
compiler/stgSyn/CoreToStg.hs
|
Haskell
|
bsd-3-clause
| 46,809
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
@Uniques@ are used to distinguish entities in the compiler (@Ids@,
@Classes@, etc.) from each other. Thus, @Uniques@ are the basic
comparison key in the compiler.
If there is any single operation that needs to be fast, it is @Unique@
comparison. Unsurprisingly, there is quite a bit of huff-and-puff
directed to that end.
Some of the other hair in this code is to be able to use a
``splittable @UniqueSupply@'' if requested/possible (not standard
Haskell).
-}
{-# LANGUAGE CPP, BangPatterns, MagicHash #-}
module Unique (
-- * Main data types
Unique, Uniquable(..),
uNIQUE_BITS,
-- ** Constructors, destructors and operations on 'Unique's
hasKey,
pprUniqueAlways,
mkUniqueGrimily, -- Used in UniqSupply only!
getKey, -- Used in Var, UniqFM, Name only!
mkUnique, unpkUnique, -- Used in GHC.Iface.Binary only
eqUnique, ltUnique,
incrUnique,
newTagUnique, -- Used in CgCase
initTyVarUnique,
initExitJoinUnique,
nonDetCmpUnique,
isValidKnownKeyUnique, -- Used in PrelInfo.knownKeyNamesOkay
-- ** Making built-in uniques
-- now all the built-in Uniques (and functions to make them)
-- [the Oh-So-Wonderful Haskell module system wins again...]
mkAlphaTyVarUnique,
mkPrimOpIdUnique, mkPrimOpWrapperUnique,
mkPreludeMiscIdUnique, mkPreludeDataConUnique,
mkPreludeTyConUnique, mkPreludeClassUnique,
mkCoVarUnique,
mkVarOccUnique, mkDataOccUnique, mkTvOccUnique, mkTcOccUnique,
mkRegSingleUnique, mkRegPairUnique, mkRegClassUnique, mkRegSubUnique,
mkCostCentreUnique,
mkBuiltinUnique,
mkPseudoUniqueD,
mkPseudoUniqueE,
mkPseudoUniqueH,
-- ** Deriving uniques
-- *** From TyCon name uniques
tyConRepNameUnique,
-- *** From DataCon name uniques
dataConWorkerUnique, dataConTyRepNameUnique,
-- ** Local uniques
-- | These are exposed exclusively for use by 'VarEnv.uniqAway', which
-- has rather peculiar needs. See Note [Local uniques].
mkLocalUnique, minLocalUnique, maxLocalUnique
) where
#include "HsVersions.h"
#include "Unique.h"
import GhcPrelude
import BasicTypes
import FastString
import Outputable
import Util
-- just for implementing a fast [0,61) -> Char function
import GHC.Exts (indexCharOffAddr#, Char(..), Int(..))
import Data.Char ( chr, ord )
import Data.Bits
{-
************************************************************************
* *
\subsection[Unique-type]{@Unique@ type and operations}
* *
************************************************************************
The @Chars@ are ``tag letters'' that identify the @UniqueSupply@.
Fast comparison is everything on @Uniques@:
-}
-- | Unique identifier.
--
-- The type of unique identifiers that are used in many places in GHC
-- for fast ordering and equality tests. You should generate these with
-- the functions from the 'UniqSupply' module
--
-- These are sometimes also referred to as \"keys\" in comments in GHC.
newtype Unique = MkUnique Int
{-# INLINE uNIQUE_BITS #-}
uNIQUE_BITS :: Int
uNIQUE_BITS = finiteBitSize (0 :: Int) - UNIQUE_TAG_BITS
{-
Now come the functions which construct uniques from their pieces, and vice versa.
The stuff about unique *supplies* is handled further down this module.
-}
unpkUnique :: Unique -> (Char, Int) -- The reverse
mkUniqueGrimily :: Int -> Unique -- A trap-door for UniqSupply
getKey :: Unique -> Int -- for Var
incrUnique :: Unique -> Unique
stepUnique :: Unique -> Int -> Unique
newTagUnique :: Unique -> Char -> Unique
mkUniqueGrimily = MkUnique
{-# INLINE getKey #-}
getKey (MkUnique x) = x
incrUnique (MkUnique i) = MkUnique (i + 1)
stepUnique (MkUnique i) n = MkUnique (i + n)
mkLocalUnique :: Int -> Unique
mkLocalUnique i = mkUnique 'X' i
minLocalUnique :: Unique
minLocalUnique = mkLocalUnique 0
maxLocalUnique :: Unique
maxLocalUnique = mkLocalUnique uniqueMask
-- newTagUnique changes the "domain" of a unique to a different char
newTagUnique u c = mkUnique c i where (_,i) = unpkUnique u
-- | How many bits are devoted to the unique index (as opposed to the class
-- character).
uniqueMask :: Int
uniqueMask = (1 `shiftL` uNIQUE_BITS) - 1
-- pop the Char in the top 8 bits of the Unique(Supply)
-- No 64-bit bugs here, as long as we have at least 32 bits. --JSM
-- and as long as the Char fits in 8 bits, which we assume anyway!
mkUnique :: Char -> Int -> Unique -- Builds a unique from pieces
-- NOT EXPORTED, so that we can see all the Chars that
-- are used in this one module
mkUnique c i
= MkUnique (tag .|. bits)
where
tag = ord c `shiftL` uNIQUE_BITS
bits = i .&. uniqueMask
unpkUnique (MkUnique u)
= let
-- as long as the Char may have its eighth bit set, we
-- really do need the logical right-shift here!
tag = chr (u `shiftR` uNIQUE_BITS)
i = u .&. uniqueMask
in
(tag, i)
-- | The interface file symbol-table encoding assumes that known-key uniques fit
-- in 30-bits; verify this.
--
-- See Note [Symbol table representation of names] in GHC.Iface.Binary for details.
isValidKnownKeyUnique :: Unique -> Bool
isValidKnownKeyUnique u =
case unpkUnique u of
(c, x) -> ord c < 0xff && x <= (1 `shiftL` 22)
{-
************************************************************************
* *
\subsection[Uniquable-class]{The @Uniquable@ class}
* *
************************************************************************
-}
-- | Class of things that we can obtain a 'Unique' from
class Uniquable a where
getUnique :: a -> Unique
hasKey :: Uniquable a => a -> Unique -> Bool
x `hasKey` k = getUnique x == k
instance Uniquable FastString where
getUnique fs = mkUniqueGrimily (uniqueOfFS fs)
instance Uniquable Int where
getUnique i = mkUniqueGrimily i
{-
************************************************************************
* *
\subsection[Unique-instances]{Instance declarations for @Unique@}
* *
************************************************************************
And the whole point (besides uniqueness) is fast equality. We don't
use `deriving' because we want {\em precise} control of ordering
(equality on @Uniques@ is v common).
-}
-- Note [Unique Determinism]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- The order of allocated @Uniques@ is not stable across rebuilds.
-- The main reason for that is that typechecking interface files pulls
-- @Uniques@ from @UniqSupply@ and the interface file for the module being
-- currently compiled can, but doesn't have to exist.
--
-- It gets more complicated if you take into account that the interface
-- files are loaded lazily and that building multiple files at once has to
-- work for any subset of interface files present. When you add parallelism
-- this makes @Uniques@ hopelessly random.
--
-- As such, to get deterministic builds, the order of the allocated
-- @Uniques@ should not affect the final result.
-- see also wiki/deterministic-builds
--
-- Note [Unique Determinism and code generation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- The goal of the deterministic builds (wiki/deterministic-builds, #4012)
-- is to get ABI compatible binaries given the same inputs and environment.
-- The motivation behind that is that if the ABI doesn't change the
-- binaries can be safely reused.
-- Note that this is weaker than bit-for-bit identical binaries and getting
-- bit-for-bit identical binaries is not a goal for now.
-- This means that we don't care about nondeterminism that happens after
-- the interface files are created, in particular we don't care about
-- register allocation and code generation.
-- To track progress on bit-for-bit determinism see #12262.
eqUnique :: Unique -> Unique -> Bool
eqUnique (MkUnique u1) (MkUnique u2) = u1 == u2
ltUnique :: Unique -> Unique -> Bool
ltUnique (MkUnique u1) (MkUnique u2) = u1 < u2
-- Provided here to make it explicit at the call-site that it can
-- introduce non-determinism.
-- See Note [Unique Determinism]
-- See Note [No Ord for Unique]
nonDetCmpUnique :: Unique -> Unique -> Ordering
nonDetCmpUnique (MkUnique u1) (MkUnique u2)
= if u1 == u2 then EQ else if u1 < u2 then LT else GT
{-
Note [No Ord for Unique]
~~~~~~~~~~~~~~~~~~~~~~~~~~
As explained in Note [Unique Determinism] the relative order of Uniques
is nondeterministic. To prevent from accidental use the Ord Unique
instance has been removed.
This makes it easier to maintain deterministic builds, but comes with some
drawbacks.
The biggest drawback is that Maps keyed by Uniques can't directly be used.
The alternatives are:
1) Use UniqFM or UniqDFM, see Note [Deterministic UniqFM] to decide which
2) Create a newtype wrapper based on Unique ordering where nondeterminism
is controlled. See Module.ModuleEnv
3) Change the algorithm to use nonDetCmpUnique and document why it's still
deterministic
4) Use TrieMap as done in GHC.Cmm.CommonBlockElim.groupByLabel
-}
instance Eq Unique where
a == b = eqUnique a b
a /= b = not (eqUnique a b)
instance Uniquable Unique where
getUnique u = u
-- We do sometimes make strings with @Uniques@ in them:
showUnique :: Unique -> String
showUnique uniq
= case unpkUnique uniq of
(tag, u) -> finish_show tag u (iToBase62 u)
finish_show :: Char -> Int -> String -> String
finish_show 't' u _pp_u | u < 26
= -- Special case to make v common tyvars, t1, t2, ...
-- come out as a, b, ... (shorter, easier to read)
[chr (ord 'a' + u)]
finish_show tag _ pp_u = tag : pp_u
pprUniqueAlways :: Unique -> SDoc
-- The "always" means regardless of -dsuppress-uniques
-- It replaces the old pprUnique to remind callers that
-- they should consider whether they want to consult
-- Opt_SuppressUniques
pprUniqueAlways u
= text (showUnique u)
instance Outputable Unique where
ppr = pprUniqueAlways
instance Show Unique where
show uniq = showUnique uniq
{-
************************************************************************
* *
\subsection[Utils-base62]{Base-62 numbers}
* *
************************************************************************
A character-stingy way to read/write numbers (notably Uniques).
The ``62-its'' are \tr{[0-9a-zA-Z]}. We don't handle negative Ints.
Code stolen from Lennart.
-}
iToBase62 :: Int -> String
iToBase62 n_
= ASSERT(n_ >= 0) go n_ ""
where
go n cs | n < 62
= let !c = chooseChar62 n in c : cs
| otherwise
= go q (c : cs) where (!q, r) = quotRem n 62
!c = chooseChar62 r
chooseChar62 :: Int -> Char
{-# INLINE chooseChar62 #-}
chooseChar62 (I# n) = C# (indexCharOffAddr# chars62 n)
chars62 = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"#
{-
************************************************************************
* *
\subsection[Uniques-prelude]{@Uniques@ for wired-in Prelude things}
* *
************************************************************************
Allocation of unique supply characters:
v,t,u : for renumbering value-, type- and usage- vars.
B: builtin
C-E: pseudo uniques (used in native-code generator)
X: uniques from mkLocalUnique
_: unifiable tyvars (above)
0-9: prelude things below
(no numbers left any more..)
:: (prelude) parallel array data constructors
other a-z: lower case chars for unique supplies. Used so far:
d desugarer
f AbsC flattener
g SimplStg
k constraint tuple tycons
m constraint tuple datacons
n Native codegen
r Hsc name cache
s simplifier
z anonymous sums
-}
mkAlphaTyVarUnique :: Int -> Unique
mkPreludeClassUnique :: Int -> Unique
mkPreludeTyConUnique :: Int -> Unique
mkPreludeDataConUnique :: Arity -> Unique
mkPrimOpIdUnique :: Int -> Unique
-- See Note [Primop wrappers] in PrimOp.hs.
mkPrimOpWrapperUnique :: Int -> Unique
mkPreludeMiscIdUnique :: Int -> Unique
mkCoVarUnique :: Int -> Unique
mkAlphaTyVarUnique i = mkUnique '1' i
mkCoVarUnique i = mkUnique 'g' i
mkPreludeClassUnique i = mkUnique '2' i
--------------------------------------------------
-- Wired-in type constructor keys occupy *two* slots:
-- * u: the TyCon itself
-- * u+1: the TyConRepName of the TyCon
mkPreludeTyConUnique i = mkUnique '3' (2*i)
tyConRepNameUnique :: Unique -> Unique
tyConRepNameUnique u = incrUnique u
-- Data constructor keys occupy *two* slots. The first is used for the
-- data constructor itself and its wrapper function (the function that
-- evaluates arguments as necessary and calls the worker). The second is
-- used for the worker function (the function that builds the constructor
-- representation).
--------------------------------------------------
-- Wired-in data constructor keys occupy *three* slots:
-- * u: the DataCon itself
-- * u+1: its worker Id
-- * u+2: the TyConRepName of the promoted TyCon
-- Prelude data constructors are too simple to need wrappers.
mkPreludeDataConUnique i = mkUnique '6' (3*i) -- Must be alphabetic
--------------------------------------------------
dataConTyRepNameUnique, dataConWorkerUnique :: Unique -> Unique
dataConWorkerUnique u = incrUnique u
dataConTyRepNameUnique u = stepUnique u 2
--------------------------------------------------
mkPrimOpIdUnique op = mkUnique '9' (2*op)
mkPrimOpWrapperUnique op = mkUnique '9' (2*op+1)
mkPreludeMiscIdUnique i = mkUnique '0' i
-- The "tyvar uniques" print specially nicely: a, b, c, etc.
-- See pprUnique for details
initTyVarUnique :: Unique
initTyVarUnique = mkUnique 't' 0
mkPseudoUniqueD, mkPseudoUniqueE, mkPseudoUniqueH,
mkBuiltinUnique :: Int -> Unique
mkBuiltinUnique i = mkUnique 'B' i
mkPseudoUniqueD i = mkUnique 'D' i -- used in NCG for getUnique on RealRegs
mkPseudoUniqueE i = mkUnique 'E' i -- used in NCG spiller to create spill VirtualRegs
mkPseudoUniqueH i = mkUnique 'H' i -- used in NCG spiller to create spill VirtualRegs
mkRegSingleUnique, mkRegPairUnique, mkRegSubUnique, mkRegClassUnique :: Int -> Unique
mkRegSingleUnique = mkUnique 'R'
mkRegSubUnique = mkUnique 'S'
mkRegPairUnique = mkUnique 'P'
mkRegClassUnique = mkUnique 'L'
mkCostCentreUnique :: Int -> Unique
mkCostCentreUnique = mkUnique 'C'
mkVarOccUnique, mkDataOccUnique, mkTvOccUnique, mkTcOccUnique :: FastString -> Unique
-- See Note [The Unique of an OccName] in OccName
mkVarOccUnique fs = mkUnique 'i' (uniqueOfFS fs)
mkDataOccUnique fs = mkUnique 'd' (uniqueOfFS fs)
mkTvOccUnique fs = mkUnique 'v' (uniqueOfFS fs)
mkTcOccUnique fs = mkUnique 'c' (uniqueOfFS fs)
initExitJoinUnique :: Unique
initExitJoinUnique = mkUnique 's' 0
|
sdiehl/ghc
|
compiler/basicTypes/Unique.hs
|
Haskell
|
bsd-3-clause
| 15,888
|
{-# OPTIONS_GHC -Wall #-}
module DFS where
import Types
import Unify
import Control.Monad (forM, liftM)
import Control.Monad.Error (strMsg, throwError)
import Control.Monad.State (gets)
import Data.Maybe (catMaybes)
--import Debug.Trace
getMatchingRules :: Compound -> Manti [(Rule, Substs)]
--getMatchingRules c | trace ("getMatchingRule " ++ show c) False = undefined
getMatchingRules (Compound fName args) = do
rls <- gets rules
let rls' = lookupRules fName (length args) rls
rinsts <- mapM instantiate rls'
return $ catMaybes $ flip map rinsts $ \r@(Rule (RHead _ rargs) _) ->
case unifyArgs nullSubst args rargs of
Nothing -> Nothing
Just ss -> Just (r, ss)
where
lookupRules :: Atom -> Int -> [Rule] -> [Rule]
lookupRules name arity =
filter (\(Rule (RHead fname rargs) _) -> name == fname && length rargs == arity)
unifyArgs :: Substs -> [Term] -> [Term] -> Maybe Substs
unifyArgs ss [] [] = Just ss
unifyArgs ss (t1:t1r) (t2:t2r) =
case unify ss (apply ss t1) (apply ss t2) of
Left _ -> Nothing
Right ss' -> unifyArgs ss' t1r t2r
solve :: [Query] -> Manti [Substs]
solve [] = return [nullSubst]
solve (Query (Compound (Atom "not") [arg]):gs) =
case arg of
TComp comp -> do
ss <- solve [Query comp]
--trace ("ss in `not': " ++ show ss) (return ())
if null ss then solve gs else return []
term -> throwError . strMsg $ "not arg is not compound: " ++ show term
solve goals = do
bs <- branch goals
--trace ("bs: " ++ show bs) (return ())
liftM concat $ forM bs $ \(s, goals') -> do
solutions <- solve goals'
mapM (unionSubsts' s) solutions
branch :: [Query] -> Manti [(Substs, [Query])]
branch [] = return []
branch (Query c:rest) = do
rls <- getMatchingRules c
--trace ("matching rules: " ++ show rls) (return ())
--trace ("rest: " ++ show rest) (return ())
return $ flip map rls $ \(Rule _ (RBody conjs), ss) -> (ss, apply ss $ map Query conjs ++ rest)
|
osa1/MANTI
|
src/DFS.hs
|
Haskell
|
bsd-3-clause
| 2,117
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified Data.Text as Text
import Graphics.Blank
import Paths_blank_canvas_examples (getDataDir)
-- A bare-bones demonstration of loading and playing/pausing an audio file.
main :: IO ()
main = do
dat <- getDataDir
blankCanvas 3000 { events = ["mousedown"], root = dat } $ \context ->
-- The Audio data type works with both local files and URLs
-- startLoop context "http://upload.wikimedia.org/wikipedia/en/d/df/Florence_Foster_Jenkins_H%C3%B6lle_Rache.ogg"
startLoop context "music/sonata.ogg"
data Play = Playing | Paused
deriving (Eq,Ord,Show)
swap :: Play -> Play
swap Playing = Paused
swap Paused = Playing
startLoop context filename = do
music <- send context $ newAudio filename
loop context music Playing
loop :: DeviceContext -> CanvasAudio -> Play -> IO ()
loop context audio play = do
send context $ do
let (w,h) = (width context, height context)
clearRect (0,0,w,h)
lineWidth 1
font "30pt Calibri"
-- This music sure is loud, better make it a bit softer.
setVolumeAudio(audio,0.9)
-- Everyone likes faster music, let's make it twice as fast
setPlaybackRateAudio(audio,2.0)
-- Play/pause the audio depend in which state it's in
if (play == Playing)
then do
fillText("Click screen to play audio",50,50)
pauseAudio audio
else do
fillText("Click screen to pause audio",50,50)
playAudio audio
-- display the current time
-- TODO: use threading so that the current time can continuously update while
-- waiting for a mouse click
time <- currentTimeAudio audio
fillText(Text.append "Current Time: " (Text.pack $ show time),50,90)
-- wait for mouse click
event <- wait context
case ePageXY event of
-- if no mouse location, ignore, and redraw
Nothing -> loop context audio play
Just (_,_) -> loop context audio (swap play)
|
ku-fpg/blank-canvas
|
examples/audio/Main.hs
|
Haskell
|
bsd-3-clause
| 1,972
|
module CF where
-- http://rosettacode.org/wiki/Continued_fraction/Arithmetic/G(matrix_NG,_Contined_Fraction_N)#NG
-- represents the homographic transform
-- a1*z + a
-- f(z) --> --------
-- b1*z + b
import Data.Natural
import Data.List (inits)
data NG = NG {a1 :: Natural, a :: Natural, b1 :: Natural, b :: Natural} deriving Show
output :: NG -> Bool
output (NG _ _ 0 _) = error "b1 == 0"
output (NG _ _ _ 0) = error "b == 0"
output (NG a1 a b1 b) = div a b == div a1 b1
term :: NG -> Natural
term (NG _ _ _ 0) = error "b == 0"
term (NG a1 a b1 b) = div a b
ingress :: NG -> Natural -> NG
ingress (NG a1 a b1 b) t = (NG (a + t * a1) a1 (b + t * b1) b1)
inf_ingress :: NG -> NG
inf_ingress (NG a1 _ b1 _) = NG a1 a1 b1 b1
egress :: NG -> Natural -> NG
egress (NG a1 a b1 b) t
| t * b > a = error "t * b > a"
| t * b1 > a1 = error "t * b1 > a1"
| otherwise = NG b1 b (a1 - t * b1) (a - t * b)
type CF = [Natural]
ng_apply :: NG -> CF -> CF
ng_apply (NG _ _ 0 0) _ = []
ng_apply op@(NG a1 a b1 0) [] = ng_apply (inf_ingress op) []
ng_apply op@(NG a1 a 0 b) [] = ng_apply (inf_ingress op) []
ng_apply op@(NG a1 a b1 b) []
| output op = let t = term op in t : (ng_apply (inf_ingress (egress op t)) [])
| otherwise = ng_apply (inf_ingress op) []
ng_apply op@(NG a1 a b1 0) (x : xs) = ng_apply (ingress op x) xs
ng_apply op@(NG a1 a 0 b) (x : xs) = ng_apply (ingress op x) xs
ng_apply op@(NG a1 a b1 b) (x : xs)
| output op = let t = term op in t : (ng_apply (ingress (egress op t) x) xs)
| otherwise = ng_apply (ingress op x) xs
sqrt2 = 1 : (repeat 2)
e_constant = 2 : 1 : (e_pattern 2)
where e_pattern n = n : 1 : 1 : (e_pattern (2 + n))
pi_constant :: [Natural]
pi_constant = [3, 7, 15, 1, 292, 1, 1, 1, 2, 1, 3, 1, 14, 2, 1, 1, 2, 2, 2, 2, 1, 84, 2, 1, 1, 15, 3, 13, 1, 4, 2, 6, 6, 99, 1, 2, 2, 6, 3, 5, 1, 1, 6, 8, 1, 7, 1, 2, 3, 7, 1, 2, 1, 1, 12, 1, 1, 1, 3, 1, 1, 8, 1, 1, 2, 1, 6, 1, 1, 5, 2, 2, 3, 1, 2, 4, 4, 16, 1, 161, 45, 1, 22, 1, 2, 2, 1, 4, 1, 2, 24, 1, 2, 1, 3, 1, 2, 1]
r2cf :: Natural -> Natural -> CF
r2cf _ 0 = []
r2cf n d = q : (r2cf d r)
where (q,r) = divMod n d
det :: NG -> Int
det (NG a1 a b1 b) = (fromIntegral (a1*b)) - (fromIntegral (a*b1))
lau :: NG -> [Natural] -> [NG]
lau m xs = (map (foldl ingress m) (inits xs))
phi :: Double
phi = 0.5*(1 + sqrt 5)
test0 = ng_apply (NG 1 0 0 4) sqrt2
test1 = ng_apply (NG 2 1 0 2) (r2cf 13 11)
test2 = ng_apply (NG 1 0 1 2) (1 : tail e_constant) -- (e-1)/(e+1)
|
rzil/CF-Agda
|
CF.hs
|
Haskell
|
bsd-3-clause
| 2,476
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Network.Hawk.Internal.Types where
import Control.Applicative
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BL
import Data.Text (Text)
import Data.Time.Clock.POSIX (POSIXTime)
import Network.HTTP.Types.Method (Method)
import Network.Hawk.Algo
-- | Identifies a particular client so that their credentials can be
-- looked up.
type ClientId = Text
-- | Extension data included in verification hash. This can be
-- anything or nothing, depending on what the application needs.
type ExtData = ByteString
-- | Struct for attributes which will be encoded in the Hawk
-- @Authorization@ header and included in the verification. The
-- terminology (and spelling) come from the original Javascript
-- implementation of Hawk.
data HeaderArtifacts = HeaderArtifacts
{ haMethod :: Method -- ^ Signed request method.
-- fixme: replace host/port/resource with SplitURL
, haHost :: ByteString -- ^ Request host.
, haPort :: Maybe Int -- ^ Request port.
, haResource :: ByteString -- ^ Request path and query params.
, haId :: ClientId -- ^ Client identifier.
, haTimestamp :: POSIXTime -- ^ Time of request.
, haNonce :: ByteString -- ^ Nonce value.
, haMac :: ByteString -- ^ Entire header hash.
, haHash :: Maybe ByteString -- ^ Payload hash.
, haExt :: Maybe ExtData -- ^ Optional application-specific data.
, haApp :: Maybe Text -- ^ Oz application, Iron-encoded.
, haDlg :: Maybe Text -- ^ Oz delegated-by application.
} deriving (Show, Eq)
----------------------------------------------------------------------------
-- | Value of @Content-Type@ HTTP headers.
type ContentType = ByteString -- fixme: CI ByteString
-- | Payload data and content type bundled up for convenience.
data PayloadInfo = PayloadInfo
{ payloadContentType :: ContentType
, payloadData :: BL.ByteString
} deriving Show
----------------------------------------------------------------------------
-- | Authorization attributes for a Hawk message. This is generated by
-- 'Network.Hawk.Client.message' and verified by
-- 'Network.Hawk.Server.authenticateMessage'.
data MessageAuth = MessageAuth
{ msgId :: ClientId -- ^ User identifier.
, msgTimestamp :: POSIXTime -- ^ Message time.
, msgNonce :: ByteString -- ^ Nonce string.
, msgHash :: ByteString -- ^ Message hash.
, msgMac :: ByteString -- ^ Hash of all message parameters.
} deriving (Show, Eq)
----------------------------------------------------------------------------
-- | Represents the @WWW-Authenticate@ header which the server uses to
-- respond when the client isn't authenticated.
data WwwAuthenticateHeader = WwwAuthenticateHeader
{ wahError :: ByteString -- ^ Error message
, wahTs :: Maybe POSIXTime -- ^ Server's timestamp
, wahTsm :: Maybe ByteString -- ^ Timestamp mac
} deriving (Show, Eq)
-- | Represents the @Server-Authorization@ header which the server
-- sends back to the client.
data ServerAuthorizationHeader = ServerAuthorizationHeader
{ sahMac :: ByteString -- ^ Hash of all response parameters.
, sahHash :: Maybe ByteString -- ^ Optional payload hash.
, sahExt :: Maybe ExtData -- ^ Optional application-specific data.
} deriving (Show, Eq)
|
rvl/hsoz
|
src/Network/Hawk/Internal/Types.hs
|
Haskell
|
bsd-3-clause
| 3,669
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
module Main (main) where
import Codec.Compression.Zlib (compress)
import Control.Applicative ((<$>))
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar
import qualified Control.Exception as C
import Control.Monad (forM, when)
import qualified Data.Attoparsec.ByteString.Char8 as A
import Data.Attoparsec.ByteString.Lazy (parse, Result(..))
import Data.Binary.Put
import Data.Bits
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as LC
import qualified Data.ByteString.Lazy.Internal as I
import Data.Digest.Pure.SHA (bytestringDigest, sha1)
import Data.Int
import Data.IORef
import Data.List (groupBy, nubBy, sortBy)
import Data.Thyme.Clock (UTCTime)
import Data.Thyme.Format (formatTime)
import Data.Thyme.Time () -- For instance Num POSIXTime (a.k.a. NominalDiffTime)
import Data.Thyme.Time.Core (posixSecondsToUTCTime)
import Data.Word
import System.Directory (canonicalizePath, createDirectoryIfMissing, doesDirectoryExist)
import System.Environment (getArgs)
import System.Exit (ExitCode(..))
import System.FilePath (joinPath, splitDirectories, (</>))
import System.IO
( hClose, hFlush, hGetContents, hPutStr, hSeek, hSetFileSize, openFile
, Handle, IOMode(..), SeekMode(AbsoluteSeek)
)
import qualified System.IO.Streams as S
import System.IO.Unsafe (unsafeInterleaveIO)
import System.Locale (defaultTimeLocale)
import System.Process
( createProcess, proc, readProcessWithExitCode, waitForProcess
, CreateProcess(..), StdStream(..)
)
----------------------------------------------------------------------
-- Command-line
----------------------------------------------------------------------
main :: IO ()
main = do
args <- getArgs
case args of
["init", gitDir_] -> do
gitDir <- canonicalizePath gitDir_
e <- doesDirectoryExist gitDir
if e
then error $ "directory " ++ gitDir ++ " already exist"
else initRepository gitDir
["ensure", gitDir_] -> do
gitDir <- canonicalizePath gitDir_
ensureGitDir gitDir
["test"] -> do
gitDir <- canonicalizePath ".git"
readRefs gitDir Nothing >>= print
readHeads gitDir Nothing >>= print
readObjects gitDir
[ Ref "32ab47487f560b11fdc758eedd9b43ee7aeb8684" -- blob
, Ref "740fc0e4923e9f1ee5e0488cb1e7877c990a3f69" -- tree
, Ref "a5b6d23259c76b66c933ba9940f6afcdf1bf3fff" -- commit
] >>= print
revList gitDir (Ref "a5b6d23259c76b66c933ba9940f6afcdf1bf3fff") >>= print
readRevs gitDir (Ref "a5b6d23259c76b66c933ba9940f6afcdf1bf3fff") >>= print
["ls", gitDir_] -> do
gitDir <- canonicalizePath gitDir_
putStrLn "blob" -- access logical blobs, not chunks
putStrLn "branch"
putStrLn "chunk" -- access all blobs
putStrLn "commit"
putStrLn "time" -- same as commit but using the bup format YY-MM-DD-hhmmss instead of a hash
["ls", gitDir_, path] -> do
gitDir <- canonicalizePath gitDir_
case splitDirectories path of
["branch"] -> do
hds <- readHeads gitDir Nothing
mapM_ (BC.putStrLn . fst) hds
["branch", branch] -> do
enterBranch gitDir branch
"branch" : branch : commit : rest -> enterBranchCommit gitDir ls branch commit rest
["cat", gitDir_, path] -> do
gitDir <- canonicalizePath gitDir_
case splitDirectories path of
"branch" : branch : commit : rest -> enterBranchCommit gitDir cat branch commit rest
["pack", gitDir_] -> do
gitDir <- canonicalizePath gitDir_
(sha, tree) <- repack gitDir "objects/pack/p1.pack"
[L "README.md" $ Blob 15 "Nice isn't it?\n"]
Nothing Nothing
"Initial commit."
"refs/heads/master"
(sha2, tree2) <- repack gitDir "objects/pack/p2.pack"
[L "new.txt" $ Blob 6 "hello\n"]
(Just sha) (Just tree)
"Added new.txt."
"refs/heads/new-branch"
(sha3, tree3) <- repack gitDir "objects/pack/p3.pack"
[T "a" [T "b" [L "c.txt" $ Blob 7 "Super.\n"]]]
(Just sha2) (Just tree2)
"Added a/b/c.txt."
"refs/heads/branch-3"
_ <- repack gitDir "objects/pack/p4.pack"
[L "new.txt" $ Blob 4 "bye\n"]
(Just sha3) (Just tree3)
"Changed hello to bye."
"refs/heads/branch-4"
let blob bs = Blob (L.length bs) bs
_ <- repack gitDir "objects/pack/p5.pack"
(groupBlobs
[ ("README.md", blob "Pack 5\n")
, ("bin/script.hs", blob "main = putStrLn \"Hello, world!\"\n")
, ("tests/data/set-1/file-00.txt", blob "10\n")
, ("tests/data/set-1/file-01.txt", blob "11\n")
, ("tests/data/EMPTY", blob "")
, ("tests/data/set-2/file-00.txt", blob "20\n")
, ("tests/data/set-1/file-02.txt", blob "12\n")
])
Nothing Nothing
"Initial commit."
"refs/heads/branch-5"
return ()
-- `buh fast-export` is meant to be used with `buh fast-import --files` i.e.
-- the commit command is not issued.
-- Example:
-- buh fast-export .git branch/develop/latest/static/css | buh fast-import --files barerepo
-- This will create a css directory at the root of the bare repo.
["fast-export", gitDir_, path] -> do
gitDir <- canonicalizePath gitDir_
checkRepository gitDir
case splitDirectories path of
"branch" : branch : commit : rest -> enterBranchCommit gitDir export branch commit rest
["fast-import", gitDir_] -> do
ensureGitDir gitDir_
gitDir <- canonicalizePath gitDir_
gitFastImport gitDir False
["fast-import", "--files", gitDir_] -> do
ensureGitDir gitDir_
gitDir <- canonicalizePath gitDir_
gitFastImport gitDir True
xs -> error $ "TODO " ++ show xs
initRepository path = do
(_, _, _, p) <- createProcess (proc "git"
[ "init", "--bare", path
])
_ <- waitForProcess p
return ()
-- | TODO Make the check more comprehensive.
checkRepository path = do
e <- doesDirectoryExist path
e' <- doesDirectoryExist $ path </> "objects"
e'' <- doesDirectoryExist $ path </> "refs"
if e && e' && e''
then return ()
else error "Not a Git repository."
ensureGitDir gitDir = do
e <- doesDirectoryExist gitDir
if e
then return ()
else do
createDirectoryIfMissing True gitDir
initRepository gitDir
-- | If doCommit is True, the commit command is automatically sent before the
-- rest of the input stream.
gitFastImport gitDir doCommit = do
is <- if doCommit
then do
-- If the ref already exists, then continue the commit from it.
refs <- readHeads gitDir $ Just $ Ref "refs/heads/fast-import"
commit <- if length refs == 1
then S.fromLazyByteString . runPut $ feCommit (Just "refs/heads/fast-import")
else S.fromLazyByteString . runPut $ feCommit Nothing
S.appendInputStream commit S.stdin
else return S.stdin
(Just hin, _, _, p) <- createProcess (proc "git"
[ "fast-import", "--date-format=now"
]) { env = Just [("GIT_DIR", gitDir)]
, std_in = CreatePipe
}
sIn <- S.handleToOutputStream hin >>=
S.atEndOfOutput (hClose hin)
S.connect is sIn
_ <- waitForProcess p
return ()
enterBranch :: FilePath -> String -> IO ()
enterBranch gitDir branch = do
hds <- readHeads gitDir Nothing
Sha sha <- lookupPath (BC.pack branch) hds
cs <- readRevs gitDir (Ref sha)
mapM_ (BC.putStrLn . fst) cs
enterBranchCommit :: FilePath -> (FilePath -> String -> Object -> IO a) -> String -> String -> [String] -> IO a
enterBranchCommit gitDir f branch commit path = do
hds <- readHeads gitDir Nothing
Sha sha <- lookupPath (BC.pack branch) hds
cs <- readRevs gitDir (Ref sha)
Ref sha' <- lookupPath (BC.pack commit) cs
Commit (Just tree) _ _ <- readCommit gitDir $ Ref sha'
enter gitDir f "/" path tree
ls :: FilePath -> String -> Object -> IO ()
ls _ p o = case o of
Commit _ _ _ -> error "resolve to a commit"
Tree es -> mapM_ (BC.putStrLn . fst) $ treeToRefs es
Blob _ _ -> putStrLn p
cat :: FilePath -> String -> Object -> IO ()
cat _ _ o = case o of
Commit _ _ _ -> error "resolve to a commit"
Tree _ -> error "is a directory"
Blob _ bs -> L.putStr bs
export :: FilePath -> String -> Object -> IO ()
export gitDir p o = export' gitDir p ps o
where ps = if p == "/" then [] else [p]
export' :: FilePath -> String -> [String] -> Object -> IO ()
export' gitDir p ps o = case o of
Commit _ _ _ -> error "resolve to a commit"
Tree es -> do
let refs = treeToRefs es
f (p', ref) = do
o <- readObject gitDir ref
export' gitDir (BC.unpack p') (BC.unpack p':ps) o
mapM_ f refs
-- Blob _ bs -> putStrLn . ("Exporting " ++) . joinPath $ reverse ps
Blob _ _ -> L.putStr . runPut $ fileModify (BC.pack . joinPath $ reverse ps, o)
enter :: FilePath -> (FilePath -> String -> Object -> IO a) -> String -> [String] -> Ref -> IO a
enter gitDir f p ps ref = do
o <- readObject gitDir ref
case ps of
p':ps' -> case o of
Blob _ _ -> error $ "no file '" ++ p' ++ "'"
Tree es -> do
ref' <- lookupPath (BC.pack p') $ treeToRefs es
enter gitDir f p' ps' ref'
Commit _ _ _ -> error "Deref the tree ?"
[] -> f gitDir p o
lookupPath :: ByteString -> [(ByteString, a)] -> IO a
lookupPath k es = do
case lookup k es of
Just v -> return v
_ -> error $ "no file '" ++ BC.unpack k ++ "'" -- TODO exitFailure
-- TODO Also make it possible to specify the mode of each entry.
rewrite gitDir packer xs mref = do
o <- maybe (return $ Tree []) (readObject gitDir) mref
case o of
Blob _ _ -> error $ "file exists"
Tree es -> do
es' <- forM xs $ \x -> do
case x of
L name blob -> do
sha <- pack packer blob
return (normalFile, name, sha)
T name ys -> do
sha <- rewrite gitDir packer ys $ lookup name $ treeToRefs es
return (subdirectory, name, sha)
pack packer $ Tree . nubBy (\(_, a, _) (_, b, _) -> a == b) $ es' ++ es
Commit _ _ _ -> error "expected tree is a commit"
data T p a = T p [T p a] | L p a
deriving Show
input1 =
[ ([], "a", 1)
]
input2 =
[ ([], "a", 1)
, ([], "b", 2)
]
input3 =
[ ([], "a", 1)
, ([], "b", 2)
, (["e"], "c", 3)
]
input4 =
[ ([], "a", 1)
, ([], "b", 2)
, (["e"], "c", 3)
, (["f"], "d", 4)
]
input5 =
[ ([], "a", 1)
, ([], "b", 2)
, (["e"], "c", 3)
, (["f", "g"], "d", 4)
, (["f"], "i", 6)
, ([], "h", 5)
]
listBlobs = map listBlob . reverse . nubBy f . reverse
where f (a, _) (b, _) = a == b
listBlob (path, blob) = case splitDirectories $ BC.unpack path of
[] -> error "at least a filename must be given"
xs -> (map BC.pack $ init xs, BC.pack $ last xs, blob)
-- | Group blobs into a tree.
groupBlobs = groupBlobs' . listBlobs
groupBlobs' blobs = map (\(_, a, b) -> L a b) direct ++ rest
where
rest = map (\(d, bs) -> T d $ groupBlobs' bs) $ map pops $ groupBy f indirect
pops es@((x:_, _, _):_) = (x, map pop es)
pops [] = error "can't happen" -- groupBy returns non-empty lists
pop (_:xs, b, c) = (xs, b, c)
pop _ = error "can't happen" -- pop is called only on the indirect elements
(direct, indirect) = span isDirect $ sortBlobs blobs
isDirect ([], _, _) = True
isDirect _ = False
f ([], _, _) ([], _, _) = True -- unused, f is called on the indirect elements
f (x:_, _, _) (y:_, _, _) = x == y
-- | This is used to group blobs by path, only to arrange them in trees within
-- `groupBlobs`. The order is not the same as Git's Tree object. I.e. objects
-- will be written in the packfile in a slightly different order than they are
-- referenced in the Tree object.
sortBlobs :: Ord p => [([p], p, a)] -> [([p], p, a)]
sortBlobs = sortBy f
where
f (ps1, n1, _) (ps2, n2, _) = case compare ps1 ps2 of
EQ -> compare n1 n2
x -> x
-- | `repack _ fn` creates a new packfile stored at `fn` containg a tree of blobs
-- shadowing an optional tree. The resulting is referenced by commit, which
-- can receive an optional parent.
repack gitDir fn blobs msha mtree msg branch = do
packer <- newPack fn
tree' <- rewrite gitDir packer blobs mtree
Ref sha' <- pack packer $ Commit (Just tree') msha msg
completePack packer
B.writeFile branch $ sha' `BC.append` "\n" -- TODO Use git-update-ref.
return (Ref sha', tree')
----------------------------------------------------------------------
-- Read git objects and refs
----------------------------------------------------------------------
-- | Convert a Tree to the type returned by readHeads.
treeToRefs :: [(ByteString, ByteString, Ref)] -> [(ByteString, Ref)]
treeToRefs = map (\(_, b, c) -> (b, c))
newtype Sha = Sha ByteString
deriving (Eq, Show)
newtype Ref = Ref { unRef ::ByteString } -- TODO Can it actually be non-ascii ?
deriving Show
data Object =
Blob Int64 L.ByteString
| Tree [(ByteString, ByteString, Ref)] -- ^ Mode, name, sha
| Commit (Maybe Ref) (Maybe Ref) ByteString -- ^ Tree ref, parent ref, message.
deriving Show
-- | `git show-ref`
readRefs :: FilePath -> Maybe Ref -> IO [(Ref, Sha)]
readRefs gitDir mref = do
(code, out, _) <- readProcessWithExitCode' "git"
([ "show-ref", "--" ] ++ maybe [] s mref)
[("GIT_DIR", gitDir)]
""
if code == ExitSuccess
then return . map (p . words) $ lines out
-- git show-ref returns a exit code 1 when there is no ref.
-- TODO Differentiate between no ref and other non-zero exit codes.
else return []
where s (Ref r) =[ BC.unpack r]
p [sha, r] = (Ref $ BC.pack r, Sha $ BC.pack sha)
p _ = error "unexpected git-show-ref output"
-- | Like readRefs, but return only those matching `refs/heads`.
readHeads :: FilePath -> Maybe Ref -> IO [(ByteString, Sha)]
readHeads gitDir mref = do
refs <- readRefs gitDir mref
return $ map unref $ filter (prefix . fst) refs
where unref (Ref r, sha) = (BC.drop 11 r, sha)
prefix (Ref r) = BC.isPrefixOf "refs/heads/" r
-- | `git cat-file --batch`
-- TODO Keep the process `git cat-file` around and query it instead of
-- respawning it again and again.
readObjects :: FilePath -> [Ref] -> IO [Object]
readObjects gitDir refs = do
(Just pIn, Just pOut, _, p) <- createProcess (proc "git"
[ "cat-file", "--batch"
])
{ std_in = CreatePipe
, std_out = CreatePipe
, env = Just [("GIT_DIR", gitDir)]
}
let putRef (Ref r) = do
BC.hPutStrLn pIn r
hFlush pIn
readOne = do
ws <- BC.words <$> BC.hGetLine pOut
case ws of
[sha, typ, size_] | Just (size, _) <- BC.readInteger size_ -> do
-- TODO hGet takes an Int, maybe we should read again if the Int64
-- is really useful.
o <- L.hGet pOut (fromInteger size)
nl <- BC.hGetLine pOut
when (nl /= "") $ error "unexpected git-cat-file output (1)"
case typ of
"blob" -> return $ Blob (fromInteger size) o
"tree" -> do
let loop xs s = do
if L.null s
then return . Tree $ reverse xs
else do
-- Maybe rewrite this with attoparsec.
let (a, b) = LC.span (/= ' ') s
c = LC.drop 1 b
(d, d') = LC.span (/= '\0') c
e = LC.drop 1 d'
(f, g) = LC.splitAt 20 e
loop ((toStrict a,toStrict d, Ref . B16.encode $ toStrict f): xs) g
loop [] o
"commit" -> return $ parseCommit o
_ -> error "unexpected git-cat-file output (2)"
x -> error $ "unexpected git-cat-file output (3)" ++ show x
os <- mapM (\r -> putRef r >> readOne) refs
hClose pIn
_ <- waitForProcess p
return os
toStrict :: L.ByteString -> ByteString
toStrict = B.concat . L.toChunks
-- | Similar to `readObjects` with a single ref.
readObject :: FilePath -> Ref -> IO Object
readObject gitDir ref = do
os <- readObjects gitDir [ref]
case os of
[o] -> return o
_ -> error $ "can't happen"
-- | Similar to `readObjects` (with a single ref), and error out if the result
-- is not a blob.
readBlob :: FilePath -> Ref -> IO Object
readBlob gitDir ref = do
o <- readObject gitDir ref
case o of
Blob _ _ -> return o
_ -> error $ "expected blob object"
-- | Similar to `readObjects` (with a single ref), and error out if the result
-- is not a commit.
readCommit :: FilePath -> Ref -> IO Object
readCommit gitDir ref = do
o <- readObject gitDir ref
case o of
Commit _ _ _ -> return o
_ -> error $ "expected commit object"
-- | Similar to `readObjects` (with a single ref), and error out if the result
-- is not a tree.
readTree :: FilePath -> Ref -> IO Object
readTree gitDir ref = do
o <- readObject gitDir ref
case o of
Tree _ -> return o
_ -> error $ "expected tree object"
parseCommit :: L.ByteString -> Object
parseCommit bs = case parse p bs of
Fail _ _ err -> error err
Done _ r -> r
where
p = do
_ <- A.string "tree " -- TODO Optional.
treeSha <- A.takeWhile isSha
when (B.length treeSha /= 40) $ error "unexpected tree ref length"
_ <- A.char '\n'
-- TODO
_ <- A.takeByteString
A.endOfInput
return $ Commit (Just $ Ref treeSha) Nothing ""
isSha c = (c >= '0' && c <= '9') ||
(c >= 'a' && c <= 'f') ||
(c >= 'A' && c <= 'F')
-- | `git rev-list --pretty=format:%at`
revList :: FilePath -> Ref -> IO [(Ref, UTCTime)]
revList gitDir ref = do
(code, out, _) <- readProcessWithExitCode' "git"
[ "rev-list", "--pretty=format:%at", BC.unpack $ unRef ref ]
[("GIT_DIR", gitDir)]
""
if code == ExitSuccess
then return . p [] $ lines out
else error "git failed"
where
-- TODO read
p xs (l1:l2:rest) | "commit":_ <- words l2 =
p ((Ref . BC.pack $ drop 7 l1, posixSecondsToUTCTime . fromInteger $ 0) : xs) (l2:rest)
p xs (l1:l2:rest) =
p ((Ref . BC.pack $ drop 7 l1, posixSecondsToUTCTime . fromInteger $ read l2) : xs) rest
p xs [l1] | "commit":_ <- words l1 =
p ((Ref . BC.pack $ drop 7 l1, posixSecondsToUTCTime . fromInteger $ 0) : xs) []
p xs [] = reverse xs
p _ _ = error "unexpected line from git-rev-list"
-- | Similar to `revList` but the result type matches `readHeads`.
readRevs :: FilePath -> Ref -> IO [(ByteString, Ref)]
readRevs gitDir ref = do
refs <- revList gitDir ref
return . latest $ map f refs
where
f (r, t) = (BC.pack $ formatTime locale format t, r)
locale = defaultTimeLocale
format = "%Y-%m-%d-%H%M%S"
latest (x@(_, r) : xs) = ("latest", r) : x : xs
latest _ = []
----------------------------------------------------------------------
-- Write packfile
--
-- A packfile can be verified with `git verify-pack`. It needs a corresponding
-- `.idx` file which be can generated with `git index-pack`. E.g.:
--
-- > git index-pack t.pack
-- > git verify-pack -v t.pack
-- 32ab47487f560b11fdc758eedd9b43ee7aeb8684 blob 749 349 12
-- non delta: 1 object
-- t.pack: ok
--
-- Those two commands don't need a a Git repository to work. On the other
-- hand, to use a command such as `git cat-file`, a real Git repository must
-- be provided:
--
-- > git init --bare repo
-- > cp t.{idx,pack} repo/objects/pack/
-- > cd repo
-- > echo 32ab47487f560b11fdc758eedd9b43ee7aeb8684 | git cat-file --batch
-- 32ab47487f560b11fdc758eedd9b43ee7aeb8684 blob 749
-- ... blob content ...
--
-- If the packfile contain a commit, we can pretend HEAD points to it, inspect
-- it, or even do a checkout:
--
-- > echo 709149cd69d4e13c8740e5bb3d832f97fcb08878 > refs/heads/master
-- > git log
-- commit 709149cd69d4e13c8740e5bb3d832f97fcb08878
--
-- > mkdir ../work
-- > GIT_WORK_TREE=../work git checkout master
-- Already on 'master'
--
----------------------------------------------------------------------
-- | Incrementally build a pack. It also builds the index. TODO Build the index
-- as the packfile is built, not afterwards.
newPack :: FilePath -> IO Packer
newPack fn = do
h <- openFile fn ReadWriteMode
hSetFileSize h 0 -- TODO Instead use a temporary (and thus new) file,
-- moving it to the correct path when it is complete.
-- The number of objects will be set in `completePack`.
BC.hPut h "PACK\0\0\0\2\0\0\0\0"
counter <- newIORef 0
return Packer
{ packerPack = \o -> do
modifyIORef counter succ
let (sha, bs) = runPutM $ putObject o
L.hPut h bs
return sha
, packerComplete = do
hSeek h AbsoluteSeek 8
n <- readIORef counter
L.hPut h . runPut . putWord32be $ n
hSeek h AbsoluteSeek 0
content <- hReadAll h
let sha = bytestringDigest $ sha1 content
L.hPut h sha
hClose h
indexPack fn
}
indexPack :: String -> IO ()
indexPack path = do
(_, _, _, p) <- createProcess (proc "git"
["index-pack", "-v", path])
_ <- waitForProcess p
return ()
-- | This is the function hGetContentsN from the bytestring package, minus the
-- handle closing bit of code.
hReadAll :: Handle -> IO L.ByteString
hReadAll h = lazyRead -- TODO close on exceptions
where
lazyRead = unsafeInterleaveIO loop
loop = do
c <- B.hGetSome h I.defaultChunkSize -- only blocks if there is no data available
if B.null c
then return I.Empty
else do
cs <- lazyRead
return $ I.Chunk c cs
pack :: Packer -> Object -> IO Ref
pack packer = packerPack packer
pack_ :: Packer -> Object -> IO ()
pack_ packer o = packerPack packer o >> return ()
completePack :: Packer -> IO ()
completePack = packerComplete
data Packer = Packer
{ packerPack :: Object -> IO Ref
, packerComplete :: IO ()
}
-- | Write a packfile. The content of the packfile is provided as a
-- `Data.Binary.Put` serializer. The number of objects must be provided
-- explicitely.
writePack :: FilePath -> Int -> Put -> IO ()
writePack fn n os = L.writeFile fn p
-- TODO Compute the SHA1 sum on-the-fly.
where p_ = runPut $ buildPack n os
sha = bytestringDigest $ sha1 p_
p = p_ `L.append` sha
-- | Build a packfile, minus its SHA1 sum.
buildPack :: Int -> Put -> Put
buildPack n os = do
putByteString "PACK\0\0\0\2"
putWord32be . fromIntegral $ n
os
-- | Serialize an object, using the packfile format.
putObject :: Object -> PutM Ref
putObject o = case o of
Blob size bs -> do
putLength 3 size -- Assume that L.length bs == size.
putLazyByteString $ compress bs
return $ computeSha o
Tree es -> do
let bs = runPut $ putTree es
putLength 2 $ L.length bs
putLazyByteString $ compress bs
return $ computeSha o
Commit mtree mparent msg -> do
let bs = runPut $ putCommit mtree mparent Nothing Nothing msg
putLength 1 $ L.length bs
putLazyByteString $ compress bs
return $ computeSha o
-- | Each object stored in a packfile still retain its loose object SHA1 sum.
computeSha :: Object -> Ref
computeSha o =
Ref . B16.encode . toStrict . bytestringDigest . sha1 . runPut $ putLoose o
-- | Serialize an object using the loose format (but not yet zlib compressed).
putLoose :: Object -> Put
putLoose o = case o of
Blob size bs -> do
putByteString "blob "
putByteString (BC.pack $ show size)
putWord8 0
putLazyByteString bs
Tree es -> do
let bs = runPut $ putTree es
putByteString "tree "
putByteString (BC.pack $ show $ L.length bs)
putWord8 0
putLazyByteString bs
Commit mtree mparent msg -> do
let bs = runPut $ putCommit mtree mparent Nothing Nothing msg
putByteString "commit "
putByteString (BC.pack $ show $ L.length bs)
putWord8 0
putLazyByteString bs
-- | Variable length unsigned integer encoding, used in the packfile format.
-- The type of the object is included.
putLength :: Word8 -> Int64 -> Put
putLength t n = loop size b
where
-- Object type is in the three last bits of the first nibble
-- The first bit (not yet set) is the "continue" bit.
-- / / The second nibble contains the size.
b = (shiftL t 4) .|. (fromIntegral n .&. 0x0f)
size = shiftR n 4
loop sz c =
if sz /= 0
then do
putWord8 $ c .|. 0x80 -- set the "continue"
loop (shiftR sz 7) (fromIntegral sz .&. 0x7f) -- and continue with the next 7 bits
else putWord8 c
-- | Write triple (Mode, name, sha) as a `tree` object in the packfile format.
putTree :: [(ByteString, ByteString, Ref)] -> Put
putTree es = mapM_ putEntry es'
where
es' = sortBy filenames es
filenames (mode1, n1, _) (mode2, n2, _) = compare (f mode1 n1) (f mode2 n2)
where f mode n = if mode == subdirectory then n `B.append` "/" else n
putEntry (mode, name, Ref sha) = do
putByteString mode
putWord8 32 -- that is ' '
putByteString name
putWord8 0
case B16.decode sha of
(sha', rest) | B.null rest -> putByteString sha'
_ -> error "putEntry: invalid sha"
putCommit :: Maybe Ref -> Maybe Ref -> Maybe (ByteString, UTCTime)
-> Maybe (ByteString, UTCTime) -> ByteString -> Put
putCommit mtree mparent mauthor mcommitter msg = do
let opt s f m = do
maybe (return ()) (\v -> do
putByteString s
putByteString $ f v
putWord8 10 -- that is '\n'
) m
opt "tree " unRef mtree
opt "parent " unRef mparent
-- TODO
putWord8 10
putByteString msg
normalFile :: ByteString
normalFile = "100644"
subdirectory :: ByteString
subdirectory = "040000"
----------------------------------------------------------------------
-- serialization to `git fast-import` format
--
-- Example usage:
-- buh fast-export | git fast-import --date-format=now
----------------------------------------------------------------------
fastExport gitDir mfrom files = L.putStr $ toFastExport mfrom files
toFastExport mfrom files = runPut (feCommit mfrom >> feFiles files)
feCommit (mfrom) = do
putByteString "commit refs/heads/fast-import\n"
-- mark?
-- author?
putByteString "committer Vo Minh Thu <noteed@gmail.com> now\n" -- TODO git date format
putByteString "data 0\n"
maybe (return ()) (\ref -> putByteString $ B.concat ["from ", ref, "^0\n"]) mfrom
feFiles = mapM_ fileModify
fileModify (path, Blob n bs) = do
putByteString "M "
putByteString normalFile
putByteString " inline "
putByteString path
putByteString "\ndata "
putByteString . BC.pack $ show n
putByteString "\n"
putLazyByteString bs
putByteString "\n"
-- | Same as System.Process.readProcessWithExitCode but allow to pass an
-- environment.
readProcessWithExitCode'
:: FilePath -- ^ command to run
-> [String] -- ^ any arguments
-> [(String, String)] -- ^ environment
-> String -- ^ standard input
-> IO (ExitCode,String,String) -- ^ exitcode, stdout, stderr
readProcessWithExitCode' cmd args env input = do
(Just inh, Just outh, Just errh, pid) <-
createProcess (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = CreatePipe
, env = Just env
}
outMVar <- newEmptyMVar
-- fork off a thread to start consuming stdout
out <- hGetContents outh
forkIO $ C.evaluate (length out) >> putMVar outMVar ()
-- fork off a thread to start consuming stderr
err <- hGetContents errh
forkIO $ C.evaluate (length err) >> putMVar outMVar ()
-- now write and flush any input
when (not (null input)) $ do hPutStr inh input; hFlush inh
hClose inh -- done with stdin
-- wait on the output
takeMVar outMVar
takeMVar outMVar
hClose outh
-- wait on the process
ex <- waitForProcess pid
return (ex, out, err)
|
noteed/buh
|
bin/buh.hs
|
Haskell
|
bsd-3-clause
| 28,165
|
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Dimitri Sabadie
-- License : BSD3
--
-- Maintainer : Dimitri Sabadie <dimitri.sabadie@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-- Position in space is a 3-float vector.
----------------------------------------------------------------------------
module Quaazar.Geometry.Position (
-- * Position
Position(..)
, pos
) where
import Data.Aeson
import Data.Aeson.Types ( typeMismatch )
import Linear ( V3(..) )
import Quaazar.Render.GL.Shader ( Uniformable(..) )
-- |Position in space a.k.a. space coordinates.
newtype Position = Position { unPosition :: V3 Float } deriving (Eq,Ord,Show)
instance FromJSON Position where
parseJSON v = do
a <- parseJSON v
case a of
[x,y,z] -> return (pos x y z)
_ -> typeMismatch "position" v
instance Uniformable Position where
sendUniform l = sendUniform l . unPosition
-- |Build a 'Position' from /x/, /y/ and /z/ components.
pos :: Float -> Float -> Float -> Position
pos x y z = Position (V3 x y z)
|
phaazon/quaazar
|
src/Quaazar/Geometry/Position.hs
|
Haskell
|
bsd-3-clause
| 1,124
|
module Tutorial where
import MFlow.Wai.Blaze.Html.All
import Data.Monoid
-- widget signature : View rendering monad returnValue
-- flow aignature: FlowM rendering monad returnValue
-- page : View v m a -> FlowM v m a
main = runNavigation "" . step $ do
r <- page $ h3 << "Basic example with inputs and links"
++> getString (Just "text input")
<* submitButton "OK"
page $ b << ("you typed: "++ r)
++> p << "wlink's return typed values to the flow"
++> p << "in MFlow a page is a widget made of widgets"
++> wlink () << p << "next"
r <- page $ h3 << "Operators"
++> p << " The operator (++>) prepend HTML to a widget"
++> p << "in this case, a field that expect an Int"
++> getInt Nothing
<! [("placeholder","This is an attribute for the getInt form")]
<* submitButton "OK"
<++ p << "the operator (<<) add text to a blaze-html tag in this case"
<> p << "If the OverloadedStrings extension is used, this is not necessary"
<> p << " Note that form tags are added automatically"
page $ b << ("you entered: "++ show r) ++> wlink () << p << "next"
r <- page $ h3 << "Here the alternative operator is used to choose between two options"
++> wlink True << b << "True" <++ br
<|> wlink False << b << "False"
page $ b << ("you entered: "++ show r) ++> wlink () << p << "next"
-- |+| <+>
r <- page $ ul << h3 << "More operators"
++> li << " (<<<) embed a widget in a tag"
++> li << " (|*>) intersperse a widget within a list of others"
++> ul << li << "in this case, a link is interspersed within a list of input fields"
++> ((h3 <<< wlink "next" << b << "next")
|*> [getString Nothing <![("placeholder","enter field "++ show i)] | i <- [1..3]])
<** submitButton "OK"
case r of
(Just _, Nothing) -> return ()
(Nothing, Just s) -> do
page $ p << ("you entered " ++ s ++ " in some box")
++> wlink " next" << b << "next"
return()
page $ pageFlow "first" $ do
r <- h3 << "Page flows: run within the View monad, within a page"
++> p << "The call \"pageFlow\" creates a set of unique identifiers and stores\
\the data entered during the steps of the pageflow to replay the computation\
\each time the page is refreshed."
++> p << "until the last monadic statement does not validate, the page Flow will execute again and again"
++> wlink True << b << "True" <++ br
<|> wlink False << b << "False"
p << "Until the first line is not validated, the second does not execute"
++> p << ("you entered: "++ show r) ++> wlink () << p << "click"
page $ pageFlow "second" $ do
h2 << "Field validators" ++> wlink () << p << "next"
r <- p << "this field expect a string less than 5 characters. Otherwise it present a alert message"
++> getString Nothing
`validate`
(\r -> if length r > 5 then return . Just $ script << "alert ('length must be lsst than five chars ')"
else return Nothing)
<* submitButton "OK"
p << ("you entered: "++ show r) ++> wlink () << p << "click"
page $ pageFlow "third" $ do
h2 << "A select/option box" ++> wlink () << b << "click" <++ br
r <- getSelect
(setSelectedOption "" << p << "select a option" <|>
setOption "red" << b << "red" <|>
setOption "blue" << b << "blue" <|>
setOption "Green" << b << "Green")
<! [("onchange","this.form.submit()")]
p << (r ++ " selected") ++> wlink () << p << "next"
let colors= getCheckBoxes( (setCheckBox False "Red" <++ b << "red")
<> (setCheckBox False "Green" <++ b << "green")
<> (setCheckBox False "blue" <++ b << "blue"))
page $ pageFlow "four" $ do
h2 << "checkboxes" ++> wlink () << b << "click" <++ br
r <- colors <** submitButton "submit"
p << (show r ++ " selected") ++> wlink () << p << " menu"
page $ (do
h3 << "A mix of applicative and monadic operators can be used to create multiple pageflows within a page"
++> p << "here two pageflows are composed with an alternative operator"
++> p << "the button triggers the presentation of the changes in both elements."
++> p << "they are part of the same HTML form, but they change their display depending on the input"
++> wlink "" << p << "click here"
(pageFlow "colors" (do
r <- colors <++ br
p << (show r ++ "selected") ++> noWidget <++ br)
<|>
pageFlow "input" (do
r <- getString Nothing <++ br
p << (show r ++"entered") ++> noWidget <++ br)
<* submitButton "OK" ))
<|> br ++> wlink "next" << b << "skip to the next"
<++ p << "this link is alternative (<|>) to the rest of the page, so it can be pressed\
\to skip it at any moment"
return ()
main= runNavigation "" .step . page (pageFlow "s" ( do
n <- getInt Nothing <++ "first"
n' <- getInt (Just n) <++ "second"
p << (n+n') ++> wlink () "click to repeat")
<** submitButton "Send")
|
agocorona/MFlow
|
Demos/Tutorial 1.hs
|
Haskell
|
bsd-3-clause
| 5,648
|
module Blog.BackEnd.RefererStream where
import qualified System.Log.Logger as L
import qualified Blog.FrontEnd.Views as V
import qualified Blog.Constants as C
import qualified Control.Monad as CM
import qualified Data.Map as DM
import Data.List ( isPrefixOf )
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Concurrent.MVar
data RefererStream = RefererStream { control :: Chan Request}
data Request = AddReferer { view_url :: String, referring_url :: String }
| GetReferers { handback :: MVar Referers }
boot :: IO RefererStream
boot = do { c <- newChan
; let rs = RefererStream c
; forkIO $ referer_loop rs empty_referers
; return rs }
type Referers = DM.Map String (DM.Map String Int)
log_handle :: String
log_handle = "RefererStream"
empty_referers :: Referers
empty_referers = DM.empty
send_referer :: (V.Viewable v) => RefererStream -> v -> String -> IO ()
send_referer rs view e = writeChan ( control rs ) $ AddReferer (V.url view) e
get_referers :: (V.Viewable v) => RefererStream -> v -> IO (DM.Map String Int)
get_referers rs v = do { h <- newEmptyMVar
; writeChan ( control rs) $ GetReferers h
; r <- (takeMVar h)
; return $ DM.findWithDefault DM.empty (V.url v) r }
add_referer :: Request -> Referers -> Referers
add_referer (AddReferer v r) m | C.blog_root `isPrefixOf` r = m
| v `DM.member` m = DM.adjust (DM.insertWith' (+) r 1) v m
| otherwise = DM.insert v (DM.insert r 1 DM.empty) m
referer_loop :: RefererStream -> Referers -> IO ()
referer_loop rs r = do { req <- readChan . control $ rs
; L.infoM log_handle $ show r
; case req of
AddReferer _ _ ->
referer_loop rs $ add_referer req r
GetReferers h ->
putMVar h r >> referer_loop rs r }
create_referrers_tables_sql :: String
create_referrers_tables_sql =
"CREATE TABLE referers ( permatitle TEXT PRIMARY KEY NOT NULL, "
++ "referring_uri TEXT NOT NULL, "
++ "first_hit INTEGER NOT NULL, "
++ "most_recent_hit INTEGER NOT NULL, "
++ "count INTEGER NOT NULL DEFAULT 0 )"
|
prb/perpubplat
|
src/Blog/BackEnd/RefererStream.hs
|
Haskell
|
bsd-3-clause
| 2,355
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE NoForeignFunctionInterface #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- |
-- Module : Data.Array.Accelerate.CUDA.Execute
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Trevor L. McDonell <tmcdonell@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.CUDA.Execute (
-- * Execute a computation under a CUDA environment
executeAcc, executeAfun1
) where
-- friends
import Data.Array.Accelerate.CUDA.AST
import Data.Array.Accelerate.CUDA.State
import Data.Array.Accelerate.CUDA.FullList ( FullList(..), List(..) )
import Data.Array.Accelerate.CUDA.Array.Data
import Data.Array.Accelerate.CUDA.Array.Sugar
import Data.Array.Accelerate.CUDA.Foreign.Import ( canExecuteAcc )
import Data.Array.Accelerate.CUDA.CodeGen.Base ( Name, namesOfArray, groupOfInt )
import Data.Array.Accelerate.CUDA.Execute.Event ( Event )
import Data.Array.Accelerate.CUDA.Execute.Stream ( Stream )
import qualified Data.Array.Accelerate.CUDA.Array.Prim as Prim
import qualified Data.Array.Accelerate.CUDA.Debug as D
import qualified Data.Array.Accelerate.CUDA.Execute.Event as Event
import qualified Data.Array.Accelerate.CUDA.Execute.Stream as Stream
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Tuple
import Data.Array.Accelerate.Interpreter ( evalPrim, evalPrimConst, evalPrj )
import Data.Array.Accelerate.Array.Data ( ArrayElt, ArrayData )
import Data.Array.Accelerate.Array.Representation ( SliceIndex(..) )
import qualified Data.Array.Accelerate.Array.Representation as R
-- standard library
import Prelude hiding ( exp, sum, iterate )
import Control.Applicative hiding ( Const )
import Control.Monad ( join, when, liftM )
import Control.Monad.Reader ( asks )
import Control.Monad.State ( gets )
import Control.Monad.Trans ( MonadIO, liftIO )
import System.IO.Unsafe ( unsafeInterleaveIO )
import Data.Int
import Data.Word
import Data.Maybe
import Foreign.CUDA.Analysis.Device ( computeCapability, Compute(..) )
import qualified Foreign.CUDA.Driver as CUDA
import qualified Data.HashMap.Strict as Map
-- Asynchronous kernel execution
-- -----------------------------
-- Arrays with an associated CUDA Event that will be signalled once the
-- computation has completed.
--
data Async a = Async {-# UNPACK #-} !Event !a
-- Valuation for an environment of asynchronous array computations
--
data Aval env where
Aempty :: Aval ()
Apush :: Aval env -> Async t -> Aval (env, t)
-- Projection of a value from a valuation using a de Bruijn index.
--
aprj :: Idx env t -> Aval env -> Async t
aprj ZeroIdx (Apush _ x) = x
aprj (SuccIdx idx) (Apush val _) = aprj idx val
aprj _ _ = $internalError "aprj" "inconsistent valuation"
-- All work submitted to the given stream will occur after the asynchronous
-- event for the given array has been fulfilled. Synchronisation is performed
-- efficiently on the device. This function returns immediately.
--
after :: MonadIO m => Stream -> Async a -> m a
after stream (Async event arr) = liftIO $ Event.after event stream >> return arr
-- Block the calling thread until the event for the given array computation
-- is recorded.
--
wait :: MonadIO m => Async a -> m a
wait (Async e x) = liftIO $ Event.block e >> return x
-- Execute the given computation in a unique execution stream.
--
streaming :: (Stream -> CIO a) -> (Async a -> CIO b) -> CIO b
streaming first second = do
context <- asks activeContext
reservoir <- gets streamReservoir
Stream.streaming context reservoir first (\e a -> second (Async e a))
-- Array expression evaluation
-- ---------------------------
-- Computations are evaluated by traversing the AST bottom-up, and for each node
-- distinguishing between three cases:
--
-- 1. If it is a Use node, return a reference to the device memory holding the
-- array data
--
-- 2. If it is a non-skeleton node, such as a let-binding or shape conversion,
-- this is executed directly by updating the environment or similar
--
-- 3. If it is a skeleton node, the associated binary object is retrieved,
-- memory allocated for the result, and the kernel(s) that implement the
-- skeleton are invoked
--
executeAcc :: Arrays a => ExecAcc a -> CIO a
executeAcc !acc = streaming (executeOpenAcc acc Aempty) wait
executeAfun1 :: (Arrays a, Arrays b) => ExecAfun (a -> b) -> a -> CIO b
executeAfun1 !afun !arrs = do
streaming (useArrays (arrays arrs) (fromArr arrs))
(\(Async event ()) -> executeOpenAfun1 afun Aempty (Async event arrs))
where
useArrays :: ArraysR arrs -> arrs -> Stream -> CIO ()
useArrays ArraysRunit () _ = return ()
useArrays (ArraysRpair r1 r0) (a1, a0) st = useArrays r1 a1 st >> useArrays r0 a0 st
useArrays ArraysRarray arr st = useArrayAsync arr (Just st)
executeOpenAfun1 :: PreOpenAfun ExecOpenAcc aenv (a -> b) -> Aval aenv -> Async a -> CIO b
executeOpenAfun1 (Alam (Abody f)) aenv x = streaming (executeOpenAcc f (aenv `Apush` x)) wait
executeOpenAfun1 _ _ _ = error "the sword comes out after you swallow it, right?"
-- Evaluate an open array computation
--
executeOpenAcc
:: forall aenv arrs.
ExecOpenAcc aenv arrs
-> Aval aenv
-> Stream
-> CIO arrs
executeOpenAcc EmbedAcc{} _ _
= $internalError "execute" "unexpected delayed array"
executeOpenAcc (ExecAcc (FL () kernel more) !gamma !pacc) !aenv !stream
= case pacc of
-- Array introduction
Use arr -> return (toArr arr)
Unit x -> newArray Z . const =<< travE x
-- Environment manipulation
Avar ix -> after stream (aprj ix aenv)
Alet bnd body -> streaming (executeOpenAcc bnd aenv) (\x -> executeOpenAcc body (aenv `Apush` x) stream)
Apply f a -> streaming (executeOpenAcc a aenv) (executeOpenAfun1 f aenv)
Atuple tup -> toTuple <$> travT tup
Aprj ix tup -> evalPrj ix . fromTuple <$> travA tup
Acond p t e -> travE p >>= \x -> if x then travA t else travA e
Awhile p f a -> awhile p f =<< travA a
-- Foreign
Aforeign ff afun a -> fromMaybe (executeAfun1 afun) (canExecuteAcc ff) =<< travA a
-- Producers
Map _ a -> executeOp =<< extent a
Generate sh _ -> executeOp =<< travE sh
Transform sh _ _ _ -> executeOp =<< travE sh
Backpermute sh _ _ -> executeOp =<< travE sh
Reshape sh a -> reshapeOp <$> travE sh <*> travA a
-- Consumers
Fold _ _ a -> foldOp =<< extent a
Fold1 _ a -> fold1Op =<< extent a
FoldSeg _ _ a s -> join $ foldSegOp <$> extent a <*> extent s
Fold1Seg _ a s -> join $ foldSegOp <$> extent a <*> extent s
Scanl1 _ a -> scan1Op =<< extent a
Scanr1 _ a -> scan1Op =<< extent a
Scanl' _ _ a -> scan'Op =<< extent a
Scanr' _ _ a -> scan'Op =<< extent a
Scanl _ _ a -> scanOp True =<< extent a
Scanr _ _ a -> scanOp False =<< extent a
Permute _ d _ a -> join $ permuteOp <$> extent a <*> travA d
Stencil _ _ a -> stencilOp =<< travA a
Stencil2 _ _ a1 _ a2 -> join $ stencil2Op <$> travA a1 <*> travA a2
-- Removed by fusion
Replicate _ _ _ -> fusionError
Slice _ _ _ -> fusionError
ZipWith _ _ _ -> fusionError
where
fusionError = $internalError "executeOpenAcc" "unexpected fusible matter"
-- term traversals
travA :: ExecOpenAcc aenv a -> CIO a
travA !acc = executeOpenAcc acc aenv stream
travE :: ExecExp aenv t -> CIO t
travE !exp = executeExp exp aenv stream
travT :: Atuple (ExecOpenAcc aenv) t -> CIO t
travT NilAtup = return ()
travT (SnocAtup !t !a) = (,) <$> travT t <*> travA a
awhile :: PreOpenAfun ExecOpenAcc aenv (a -> Scalar Bool) -> PreOpenAfun ExecOpenAcc aenv (a -> a) -> a -> CIO a
awhile p f a = do
nop <- liftIO Event.create -- record event never call, so this is a functional no-op
r <- executeOpenAfun1 p aenv (Async nop a)
ok <- indexArray r 0 -- TLM TODO: memory manager should remember what is already on the host
if ok then awhile p f =<< executeOpenAfun1 f aenv (Async nop a)
else return a
-- get the extent of an embedded array
extent :: Shape sh => ExecOpenAcc aenv (Array sh e) -> CIO sh
extent ExecAcc{} = $internalError "executeOpenAcc" "expected delayed array"
extent (EmbedAcc sh) = travE sh
-- Skeleton implementation
-- -----------------------
-- Execute a skeleton that has no special requirements: thread decomposition
-- is based on the given shape.
--
executeOp :: (Shape sh, Elt e) => sh -> CIO (Array sh e)
executeOp !sh = do
out <- allocateArray sh
execute kernel gamma aenv (size sh) out stream
return out
-- Change the shape of an array without altering its contents. This does not
-- execute any kernel programs.
--
reshapeOp :: Shape sh => sh -> Array sh' e -> Array sh e
reshapeOp sh (Array sh' adata)
= $boundsCheck "reshape" "shape mismatch" (size sh == R.size sh')
$ Array (fromElt sh) adata
-- Executing fold operations depend on whether we are recursively collapsing
-- to a single value using multiple thread blocks, or a multidimensional
-- single-pass reduction where there is one block per inner dimension.
--
fold1Op :: (Shape sh, Elt e) => (sh :. Int) -> CIO (Array sh e)
fold1Op !sh@(_ :. sz)
= $boundsCheck "fold1" "empty array" (sz > 0)
$ foldCore sh
foldOp :: (Shape sh, Elt e) => (sh :. Int) -> CIO (Array sh e)
foldOp !(!sh :. sz)
= foldCore ((listToShape . map (max 1) . shapeToList $ sh) :. sz)
foldCore :: (Shape sh, Elt e) => (sh :. Int) -> CIO (Array sh e)
foldCore !(!sh :. sz)
| dim sh > 0 = executeOp sh
| otherwise
= let !numElements = size sh * sz
(_,!numBlocks,_) = configure kernel numElements
in do
out <- allocateArray (sh :. numBlocks)
execute kernel gamma aenv numElements out stream
foldRec out
-- Recursive step(s) of a multi-block reduction
--
foldRec :: (Shape sh, Elt e) => Array (sh:.Int) e -> CIO (Array sh e)
foldRec arr@(Array _ !adata)
| Cons _ rec _ <- more
= let sh :. sz = shape arr
!numElements = size sh * sz
(_,!numBlocks,_) = configure rec numElements
in if sz <= 1
then return $ Array (fromElt sh) adata
else do
out <- allocateArray (sh :. numBlocks)
execute rec gamma aenv numElements (out, arr) stream
foldRec out
| otherwise
= $internalError "foldRec" "missing phase-2 kernel module"
-- Segmented reduction. Subtract one from the size of the segments vector as
-- this is the result of an exclusive scan to calculate segment offsets.
--
foldSegOp :: (Shape sh, Elt e) => (sh :. Int) -> (Z :. Int) -> CIO (Array (sh :. Int) e)
foldSegOp (!sh :. _) !(Z :. sz) = executeOp (sh :. sz - 1)
-- Scans, all variations on a theme.
--
scanOp :: Elt e => Bool -> (Z :. Int) -> CIO (Vector e)
scanOp !left !(Z :. numElements) = do
arr@(Array _ adata) <- allocateArray (Z :. numElements + 1)
out <- devicePtrsOfArrayData adata
let (!body, !sum)
| left = (out, advancePtrsOfArrayData adata numElements out)
| otherwise = (advancePtrsOfArrayData adata 1 out, out)
--
scanCore numElements arr body sum
return arr
scan1Op :: forall e. Elt e => (Z :. Int) -> CIO (Vector e)
scan1Op !(Z :. numElements) = do
arr@(Array _ adata) <- allocateArray (Z :. numElements + 1) :: CIO (Vector e)
body <- devicePtrsOfArrayData adata
let sum {- to fix type -} = advancePtrsOfArrayData adata numElements body
--
scanCore numElements arr body sum
return (Array ((),numElements) adata)
scan'Op :: forall e. Elt e => (Z :. Int) -> CIO (Vector e, Scalar e)
scan'Op !(Z :. numElements) = do
vec@(Array _ ad_vec) <- allocateArray (Z :. numElements) :: CIO (Vector e)
sum@(Array _ ad_sum) <- allocateArray Z :: CIO (Scalar e)
d_vec <- devicePtrsOfArrayData ad_vec
d_sum <- devicePtrsOfArrayData ad_sum
--
scanCore numElements vec d_vec d_sum
return (vec, sum)
scanCore
:: forall e. Elt e
=> Int
-> Vector e -- to fix Elt vs. EltRepr
-> Prim.DevicePtrs (EltRepr e)
-> Prim.DevicePtrs (EltRepr e)
-> CIO ()
scanCore !numElements (Array _ !adata) !body !sum
| Cons _ !upsweep1 (Cons _ !upsweep2 _) <- more
= let (_,!numIntervals,_) = configure kernel numElements
!d_body = marshalDevicePtrs adata body
!d_sum = marshalDevicePtrs adata sum
in do
blk <- allocateArray (Z :. numIntervals) :: CIO (Vector e)
-- Phase 1: Split the array over multiple thread blocks and calculate
-- the final scan result from each interval.
--
when (numIntervals > 1) $ do
execute upsweep1 gamma aenv numElements blk stream
execute upsweep2 gamma aenv numIntervals (blk, blk, d_sum) stream
-- Phase 2: Re-scan the input using the carry-in value from each
-- interval sum calculated in phase 1.
--
execute kernel gamma aenv numElements (numElements, d_body, blk, d_sum) stream
| otherwise
= $internalError "scanOp" "missing multi-block kernel module(s)"
-- Forward permutation
--
permuteOp :: forall sh sh' e. (Shape sh, Shape sh', Elt e) => sh -> Array sh' e -> CIO (Array sh' e)
permuteOp !sh !dfs = do
let sh' = shape dfs
n' = size sh'
out <- allocateArray sh'
Array _ locks <- allocateArray sh' :: CIO (Array sh' Int32)
((), d_locks) <- devicePtrsOfArrayData locks :: CIO ((), CUDA.DevicePtr Int32)
liftIO $ CUDA.memsetAsync d_locks n' 0 (Just stream) -- TLM: overlap these two operations?
copyArrayAsync dfs out (Just stream)
execute kernel gamma aenv (size sh) (out, d_locks) stream
return out
-- Stencil operations. NOTE: the arguments to 'namesOfArray' must be the
-- same as those given in the function 'mkStencil[2]'.
--
stencilOp :: forall sh a b. (Shape sh, Elt a, Elt b) => Array sh a -> CIO (Array sh b)
stencilOp !arr = do
let sh = shape arr
out <- allocateArray sh
dev <- asks deviceProperties
if computeCapability dev < Compute 2 0
then marshalAccTex (namesOfArray "Stencil" (undefined :: a)) kernel arr >>
execute kernel gamma aenv (size sh) (out, sh) stream
else execute kernel gamma aenv (size sh) (out, arr) stream
--
return out
stencil2Op :: forall sh a b c. (Shape sh, Elt a, Elt b, Elt c)
=> Array sh a -> Array sh b -> CIO (Array sh c)
stencil2Op !arr1 !arr2
| Cons _ spec _ <- more
= let sh1 = shape arr1
sh2 = shape arr2
(sh, op)
| fromElt sh1 == fromElt sh2 = (sh1, spec)
| otherwise = (sh1 `intersect` sh2, kernel)
in do
out <- allocateArray sh
dev <- asks deviceProperties
if computeCapability dev < Compute 2 0
then marshalAccTex (namesOfArray "Stencil1" (undefined :: a)) op arr1 >>
marshalAccTex (namesOfArray "Stencil2" (undefined :: b)) op arr2 >>
execute op gamma aenv (size sh) (out, sh1, sh2) stream
else execute op gamma aenv (size sh) (out, arr1, arr2) stream
--
return out
| otherwise
= $internalError "stencil2Op" "missing stencil specialisation kernel"
-- Scalar expression evaluation
-- ----------------------------
executeExp :: ExecExp aenv t -> Aval aenv -> Stream -> CIO t
executeExp !exp !aenv !stream = executeOpenExp exp Empty aenv stream
executeOpenExp :: forall env aenv exp. ExecOpenExp env aenv exp -> Val env -> Aval aenv -> Stream -> CIO exp
executeOpenExp !rootExp !env !aenv !stream = travE rootExp
where
travE :: ExecOpenExp env aenv t -> CIO t
travE exp = case exp of
Var ix -> return (prj ix env)
Let bnd body -> travE bnd >>= \x -> executeOpenExp body (env `Push` x) aenv stream
Const c -> return (toElt c)
PrimConst c -> return (evalPrimConst c)
PrimApp f x -> evalPrim f <$> travE x
Tuple t -> toTuple <$> travT t
Prj ix e -> evalPrj ix . fromTuple <$> travE e
Cond p t e -> travE p >>= \x -> if x then travE t else travE e
While p f x -> while p f =<< travE x
IndexAny -> return Any
IndexNil -> return Z
IndexCons sh sz -> (:.) <$> travE sh <*> travE sz
IndexHead sh -> (\(_ :. ix) -> ix) <$> travE sh
IndexTail sh -> (\(ix :. _) -> ix) <$> travE sh
IndexSlice ix slix sh -> indexSlice ix <$> travE slix <*> travE sh
IndexFull ix slix sl -> indexFull ix <$> travE slix <*> travE sl
ToIndex sh ix -> toIndex <$> travE sh <*> travE ix
FromIndex sh ix -> fromIndex <$> travE sh <*> travE ix
Intersect sh1 sh2 -> intersect <$> travE sh1 <*> travE sh2
ShapeSize sh -> size <$> travE sh
Shape acc -> shape <$> travA acc
Index acc ix -> join $ index <$> travA acc <*> travE ix
LinearIndex acc ix -> join $ indexArray <$> travA acc <*> travE ix
Foreign _ f x -> foreign f x
-- Helpers
-- -------
travT :: Tuple (ExecOpenExp env aenv) t -> CIO t
travT tup = case tup of
NilTup -> return ()
SnocTup !t !e -> (,) <$> travT t <*> travE e
travA :: ExecOpenAcc aenv a -> CIO a
travA !acc = executeOpenAcc acc aenv stream
foreign :: ExecFun () (a -> b) -> ExecOpenExp env aenv a -> CIO b
foreign (Lam (Body f)) x = travE x >>= \e -> executeOpenExp f (Empty `Push` e) Aempty stream
foreign _ _ = error "I bless the rains down in Africa"
travF1 :: ExecOpenFun env aenv (a -> b) -> a -> CIO b
travF1 (Lam (Body f)) x = executeOpenExp f (env `Push` x) aenv stream
travF1 _ _ = error "Gonna take some time to do the things we never have"
while :: ExecOpenFun env aenv (a -> Bool) -> ExecOpenFun env aenv (a -> a) -> a -> CIO a
while !p !f !x = do
ok <- travF1 p x
if ok then while p f =<< travF1 f x
else return x
indexSlice :: (Elt slix, Elt sh, Elt sl)
=> SliceIndex (EltRepr slix) (EltRepr sl) co (EltRepr sh)
-> slix
-> sh
-> sl
indexSlice !ix !slix !sh = toElt $! restrict ix (fromElt slix) (fromElt sh)
where
restrict :: SliceIndex slix sl co sh -> slix -> sh -> sl
restrict SliceNil () () = ()
restrict (SliceAll sliceIdx) (slx, ()) (sl, sz) = (restrict sliceIdx slx sl, sz)
restrict (SliceFixed sliceIdx) (slx, _) (sl, _) = restrict sliceIdx slx sl
indexFull :: (Elt slix, Elt sh, Elt sl)
=> SliceIndex (EltRepr slix) (EltRepr sl) co (EltRepr sh)
-> slix
-> sl
-> sh
indexFull !ix !slix !sl = toElt $! extend ix (fromElt slix) (fromElt sl)
where
extend :: SliceIndex slix sl co sh -> slix -> sl -> sh
extend SliceNil () () = ()
extend (SliceAll sliceIdx) (slx, ()) (sh, sz) = (extend sliceIdx slx sh, sz)
extend (SliceFixed sliceIdx) (slx, sz) sh = (extend sliceIdx slx sh, sz)
index :: (Shape sh, Elt e) => Array sh e -> sh -> CIO e
index !arr !ix = indexArray arr (toIndex (shape arr) ix)
-- Marshalling data
-- ----------------
-- Data which can be marshalled as function arguments to a kernel invocation.
--
class Marshalable a where
marshal :: a -> CIO [CUDA.FunParam]
instance Marshalable () where
marshal () = return []
instance Marshalable CUDA.FunParam where
marshal !x = return [x]
instance ArrayElt e => Marshalable (ArrayData e) where
marshal !ad = marshalArrayData ad
instance Shape sh => Marshalable sh where
marshal !sh = marshal (reverse (shapeToList sh))
instance Marshalable a => Marshalable [a] where
marshal = concatMapM marshal
instance (Marshalable sh, Elt e) => Marshalable (Array sh e) where
marshal !(Array sh ad) = (++) <$> marshal (toElt sh :: sh) <*> marshal ad
instance (Marshalable a, Marshalable b) => Marshalable (a, b) where
marshal (!a, !b) = (++) <$> marshal a <*> marshal b
instance (Marshalable a, Marshalable b, Marshalable c) => Marshalable (a, b, c) where
marshal (!a, !b, !c)
= concat <$> sequence [marshal a, marshal b, marshal c]
instance (Marshalable a, Marshalable b, Marshalable c, Marshalable d)
=> Marshalable (a, b, c, d) where
marshal (!a, !b, !c, !d)
= concat <$> sequence [marshal a, marshal b, marshal c, marshal d]
#define primMarshalable(ty) \
instance Marshalable (ty) where { \
marshal !x = return [CUDA.VArg x] }
primMarshalable(Int)
primMarshalable(Int8)
primMarshalable(Int16)
primMarshalable(Int32)
primMarshalable(Int64)
primMarshalable(Word)
primMarshalable(Word8)
primMarshalable(Word16)
primMarshalable(Word32)
primMarshalable(Word64)
primMarshalable(Float)
primMarshalable(Double)
primMarshalable(CUDA.DevicePtr a)
-- Note [Array references in scalar code]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- All CUDA devices have between 6-8KB of read-only texture memory per
-- multiprocessor. Since all arrays in Accelerate are immutable, we can always
-- access input arrays through the texture cache to reduce global memory demand
-- when accesses do not follow the regular patterns required for coalescing.
--
-- This is great for older 1.x series devices, but newer devices have a
-- dedicated L2 cache (device dependent, 256KB-1.5MB), as well as a configurable
-- L1 cache combined with shared memory (16-48KB).
--
-- For older 1.x series devices, we pass free array variables as texture
-- references, but for new devices we pass them as standard array arguments so
-- as to use the larger available caches.
--
marshalAccEnvTex :: AccKernel a -> Aval aenv -> Gamma aenv -> Stream -> CIO [CUDA.FunParam]
marshalAccEnvTex !kernel !aenv (Gamma !gamma) !stream
= flip concatMapM (Map.toList gamma)
$ \(Idx_ !(idx :: Idx aenv (Array sh e)), i) ->
do arr <- after stream (aprj idx aenv)
marshalAccTex (namesOfArray (groupOfInt i) (undefined :: e)) kernel arr
marshal (shape arr)
marshalAccTex :: (Name,[Name]) -> AccKernel a -> Array sh e -> CIO ()
marshalAccTex (_, !arrIn) (AccKernel _ _ !mdl _ _ _ _) (Array !sh !adata)
= marshalTextureData adata (R.size sh) =<< liftIO (sequence' $ map (CUDA.getTex mdl) (reverse arrIn))
marshalAccEnvArg :: Aval aenv -> Gamma aenv -> Stream -> CIO [CUDA.FunParam]
marshalAccEnvArg !aenv (Gamma !gamma) !stream
= concatMapM (\(Idx_ !idx) -> marshal =<< after stream (aprj idx aenv)) (Map.keys gamma)
-- A lazier version of 'Control.Monad.sequence'
--
sequence' :: [IO a] -> IO [a]
sequence' = foldr k (return [])
where k m ms = do { x <- m; xs <- unsafeInterleaveIO ms; return (x:xs) }
-- Generalise concatMap for teh monadz
--
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
concatMapM f xs = concat `liftM` mapM f xs
-- Kernel execution
-- ----------------
-- What launch parameters should we use to execute the kernel with a number of
-- array elements?
--
configure :: AccKernel a -> Int -> (Int, Int, Int)
configure (AccKernel _ _ _ _ !cta !smem !grid) !n = (cta, grid n, smem)
-- Marshal the kernel arguments. For older 1.x devices this binds free arrays to
-- texture references, and for newer devices adds the parameters to the front of
-- the argument list
--
arguments :: Marshalable args
=> AccKernel a
-> Aval aenv
-> Gamma aenv
-> args
-> Stream
-> CIO [CUDA.FunParam]
arguments !kernel !aenv !gamma !a !stream = do
dev <- asks deviceProperties
let marshaller | computeCapability dev < Compute 2 0 = marshalAccEnvTex kernel
| otherwise = marshalAccEnvArg
--
(++) <$> marshaller aenv gamma stream <*> marshal a
-- Link the binary object implementing the computation, configure the kernel
-- launch parameters, and initiate the computation. This also handles lifting
-- and binding of array references from scalar expressions.
--
execute :: Marshalable args
=> AccKernel a -- The binary module implementing this kernel
-> Gamma aenv -- variables of arrays embedded in scalar expressions
-> Aval aenv -- the environment
-> Int -- a "size" parameter, typically number of elements in the output
-> args -- arguments to marshal to the kernel function
-> Stream -- Compute stream to execute in
-> CIO ()
execute !kernel !gamma !aenv !n !a !stream = do
args <- arguments kernel aenv gamma a stream
launch kernel (configure kernel n) args stream
-- Execute a device function, with the given thread configuration and function
-- parameters. The tuple contains (threads per block, grid size, shared memory)
--
launch :: AccKernel a -> (Int,Int,Int) -> [CUDA.FunParam] -> Stream -> CIO ()
launch (AccKernel entry !fn _ _ _ _ _) !(cta, grid, smem) !args !stream
= D.timed D.dump_exec msg (Just stream)
$ liftIO $ CUDA.launchKernel fn (grid,1,1) (cta,1,1) smem (Just stream) args
where
msg gpuTime cpuTime
= "exec: " ++ entry ++ "<<< " ++ shows grid ", " ++ shows cta ", " ++ shows smem " >>> "
++ D.elapsed gpuTime cpuTime
|
kumasento/accelerate-cuda
|
Data/Array/Accelerate/CUDA/Execute.hs
|
Haskell
|
bsd-3-clause
| 28,128
|
-- | Bot that spits out core.
module Corebot where
|
chrisdone/corebot
|
src/Corebot.hs
|
Haskell
|
bsd-3-clause
| 52
|
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-- | A demo of annotations
module Text.PrettyPrint.Final.Demos.ListDemo () where
import Control.Monad
import Control.Applicative
import Control.Monad.Identity
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.State
import Control.Monad.RWS
import Data.List
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as T
import System.Console.ANSI
import Text.PrettyPrint.Final
import Text.PrettyPrint.Final.Rendering.Console
-- Constructor names or built-in syntax
data HsAnn = Ctor | Stx
deriving (Eq, Ord, Show)
env0 :: Monoid fmt => PEnv Int a fmt
env0 = PEnv
{ maxWidth = 80
, maxRibbon = 60
, layout = Break
, failure = CantFail
, nesting = 0
, formatting = mempty
, formatAnn = const mempty
}
state0 :: PState Int ()
state0 = PState
{ curLine = []
}
-- For plain text pretty printing
newtype DocM a = DocM { unDocM :: RWST (PEnv Int HsAnn ()) (POut Int HsAnn) (PState Int ()) Maybe a }
deriving
( Functor, Applicative, Monad
, MonadReader (PEnv Int HsAnn ()), MonadWriter (POut Int HsAnn), MonadState (PState Int ()), Alternative
)
instance MonadPretty Int HsAnn () DocM
instance IsString (DocM ()) where
fromString = text . fromString
runDocM :: PEnv Int HsAnn () -> PState Int () -> DocM a -> Maybe (PState Int (), POut Int HsAnn, a)
runDocM e s d = (\(a,s',o) -> (s',o,a)) <$> runRWST (unDocM d) e s
execDoc :: Doc -> POut Int HsAnn
execDoc d =
let rM = runDocM env0 state0 d
in case rM of
Nothing -> PAtom $ AChunk $ CText "<internal pretty printing error>"
Just (_, o, ()) -> o
type Doc = DocM ()
instance Monoid Doc where
mempty = return ()
mappend = (>>)
class Pretty a where
pretty :: a -> Doc
instance Pretty Doc where
pretty = id
instance Measure Int () DocM where
measure = return . runIdentity . measure
instance Pretty Text where
pretty = annotate Ctor . text . T.pack . show
instance (Pretty a) => Pretty [a] where
pretty = collection (annotate Stx "[") (annotate Stx "]") (annotate Stx ",") . map pretty
toSGR :: HsAnn -> [SGR]
toSGR Ctor = [SetConsoleIntensity BoldIntensity, SetColor Foreground Vivid Red]
toSGR Stx = [SetConsoleIntensity BoldIntensity, SetColor Foreground Vivid Black]
updateColor :: forall ann . StateT [HsAnn] IO ()
updateColor =
lift . setSGR =<< mconcat . map toSGR . reverse <$> get
openTag :: HsAnn -> StateT [HsAnn] IO ()
openTag ann = modify (ann:) >> updateColor
closeTag :: HsAnn -> StateT [HsAnn] IO ()
closeTag _ = modify tail >> updateColor
renderAnnotation :: HsAnn -> StateT [HsAnn] IO () -> StateT [HsAnn] IO ()
renderAnnotation a o = openTag a >> o >> closeTag a
dumpList :: Doc -> IO ()
dumpList = dumpDoc toSGR renderAnnotation . execDoc
---------------
-- Test docs --
---------------
shortList :: [[Text]]
shortList = [["a", "b", "c"], [], ["longer"]]
longList :: [[Text]]
longList = [map (T.pack . show) [1..10], [], map (T.pack . flip replicate 'a') [1..10]]
-- To try, eval dumpDoc (pretty shortList) or dumpDoc (pretty longList) in console GHCI
|
david-christiansen/final-pretty-printer
|
Text/PrettyPrint/Final/Demos/ListDemo.hs
|
Haskell
|
mit
| 3,428
|
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts, DeriveDataTypeable #-}
-- | Diagrams-AST provides a data oriented interface to the <http://hackage.haskell.org/package/diagrams> package.
module Diagrams.AST (
-- Functions
outputImage,
runImage,
getAngleFraction,
getAngleRadians,
getAngleDegrees,
-- Data Types
Image (..),
Modifier (..),
Images (..),
Path (..),
Shape (..),
ColorData (..),
Fill (..),
Alignment (..),
Angle (..)
)
where
-- Diagram Imports
import qualified Diagrams.Prelude as D
import qualified Diagrams.Path as P
import qualified Diagrams.TwoD.Path as P2
import qualified Diagrams.TwoD.Arc as A
import qualified Diagrams.TwoD.Align as L
import qualified Diagrams.Backend.Cairo as C
import qualified Diagrams.TwoD.Text as T
import qualified Graphics.Rendering.Diagrams.Points as P3
import Diagrams.Prelude ((|||), (===))
-- Data Imports
import Data.Monoid
import Data.List (foldl')
-- Meta
import Data.Generics.Uniplate.Data
import Data.Data
--- Data Types
data Image = Blank
| Shape Shape
| Modifier Modifier Image
| Images Images deriving (Show, Eq, Ord, Data, Typeable)
data Modifier = Foreground ColorData
| LineColor ColorData
| LineWidth Double
| Dashing [Double] Double
| Translate Double Double
| Scale Double Double
| Rotate Angle
| Pad Double
| Freeze
| Origin
| Align Alignment
| Changes [Modifier] deriving (Show, Eq, Ord, Data, Typeable)
data Images = Atop Image Image
| NextTo Image Image
| Above Image Image
| Layers [Image]
| Horizontal [Image]
| Vertical [Image] deriving (Show, Eq, Ord, Data, Typeable)
data Shape = Text String | Circle | Square | Path Fill Path deriving (Show, Eq, Ord, Data, Typeable)
data Path = Offsets [(Double,Double)]
| Points [(Double,Double)]
| Arc Angle Angle deriving (Show, Eq, Ord, Data, Typeable)
data ColorData = RGBA Double Double Double Double -- ^ Red, Green, Blue, Alpha
| RAA Double Angle Angle Double -- ^ Radius, Blue\/Green, (Blue\/Green)\/Red
deriving (Show, Eq, Ord, Data, Typeable)
data Fill = Closed | Open deriving (Show, Eq, Ord, Data, Typeable)
-- | Alignment of the origin of an 'Image'.
data Alignment = L -- ^ Left
| R -- ^ Right
| T -- ^ Top
| B -- ^ Bottom
| TL -- ^ Top-Left
| TR -- ^ Top-Right
| BL -- ^ Bottom-Left
| BR -- ^ Bottom-Right
| C -- ^ Center
| CX -- ^ X-Centered
| CY -- ^ Y-Centered
| X Double -- ^ X-Proportion (Fraction -1 to 1)
| Y Double -- ^ Y-Proportion (Fraction -1 to 1)
deriving (Show, Eq, Ord, Data, Typeable)
-- | Angles are instances of Num. 'fromInteger' interprets its argument as a fraction of a full circle.
data Angle = Fraction Double | Radians Double | Degrees Double deriving (Show, Eq, Ord, Data, Typeable)
instance Num Angle
where
fromInteger x = Fraction $ fromInteger x
x + y = Fraction $ (getAngleFraction x) + (getAngleFraction y)
x * y = Fraction $ (getAngleFraction x) * (getAngleFraction y)
abs x = Fraction $ abs (getAngleFraction x)
signum x
| x' > 0 = 1
| x' < 0 = -1
| otherwise = 0
where x' = getAngleFraction x
instance D.Angle Angle where
toCircleFrac a = D.CircleFrac $ getAngleFraction a
fromCircleFrac (D.CircleFrac d) = Fraction d
-- | 'getAngleFraction' returns the fraction of a full circle for any angle.
getAngleFraction :: Angle -> Double
getAngleFraction (Fraction x) = x
getAngleFraction (Radians x) = x / (2*pi)
getAngleFraction (Degrees x) = x / 360
getAngleRadians = (* 2) . (* pi) . getAngleFraction
getAngleDegrees = (* 360) . getAngleFraction
--- Instances
instance D.Color ColorData where
colorToRGBA (RGBA r g b a) = (r, g, b, a)
colorToRGBA (RAA r g e a) = ( r * cos g' * cos e',
r * cos g' * sin e',
r * sin g',
a ) where [g', e'] = map getAngleRadians [g, e]
---- Run ADT Functions
-- | 'outputImage' renders a PNG to the file supplied.
outputImage :: String -> Int -> Int -> Image -> IO ()
outputImage name width height image = do
-- Is a Result type in Cairo a pair a la State?
-- No idea why I should need to call fst otherwise
-- D.renderDia :: b -> Options b v -> AnnDiagram b v m -> Result b v
fst $ D.renderDia C.Cairo (C.CairoOptions name (C.PNG (width, height))) (runImage image)
--- Main runner
{-
runImage :: (D.Renderable Diagrams.TwoD.Ellipse.Ellipse b, D.Renderable (P.Path D.R2) b, D.Backend b D.R2) =>
Image -> D.Diagram b D.R2
-}
runImage (Shape s) = runShape s
runImage (Modifier m i) = runModifier m (runImage i)
runImage (Images c) = runCombiner c
runImage Blank = mempty
--- Internal runners
runCombiner (Atop l r) = runImage l `D.atop` runImage r
runCombiner (NextTo l r) = runImage l ||| runImage r
runCombiner (Above t b) = runImage t === runImage b
runCombiner (Layers l) = mconcat . map runImage $ l
runCombiner (Horizontal l) = D.hcat (map runImage l)
runCombiner (Vertical l) = D.vcat (map runImage l)
runShape (Text s) = T.text s
runShape Circle = D.circle 1
runShape Square = D.square 1
runShape (Path Closed p) = P2.stroke $ P.close $ runPath p
runShape (Path Open p) = P2.stroke $ P.open $ runPath p
runModifier (Foreground c) = D.fillColor c
runModifier (LineColor c) = D.lineColor c
runModifier (LineWidth w) = D.lw w
runModifier (Dashing a w) = D.dashing a w
runModifier (Translate x y) = D.translate (x, y)
runModifier (Rotate a) = D.rotateBy (D.CircleFrac $ getAngleFraction a)
runModifier (Scale x y) = D.scaleX x . D.scaleY y
runModifier (Pad r) = D.pad r
runModifier (Align a) = runAlign a
runModifier (Changes l) = foldl' (flip (.)) id . map runModifier $ l
runModifier Origin = D.showOrigin
runModifier Freeze = D.freeze
runPath (Offsets l) = P.fromOffsets l
runPath (Points l) = (P.fromVertices . map P3.P) l
runPath (Arc b e) = A.arc b e
runAlign L = L.alignL
runAlign R = L.alignR
runAlign T = L.alignT
runAlign B = L.alignB
runAlign TL = L.alignTL
runAlign TR = L.alignTR
runAlign BL = L.alignBL
runAlign BR = L.alignBR
runAlign C = L.centerXY
runAlign CX = L.centerX
runAlign CY = L.centerY
runAlign (X x) = L.alignX x
runAlign (Y y) = L.alignY y
|
beni55/Diagrams-AST
|
src/Diagrams/AST.hs
|
Haskell
|
bsd-3-clause
| 7,010
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Control.Monad.ProofNames.Class(
MonadProofNames(..)
) where
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Language.Salt.Core.Syntax
-- | A monad class for names used by the proof checker.
class Monad m => MonadProofNames sym m where
-- | The term representing the implies proposition.
impliesProp :: m (Term sym sym)
-- | The symbol for the name "premise", an argument to the implies
-- function.
premiseName :: m sym
-- | The symbol for the name "consequence", an argument to the
-- implies function.
consequenceName :: m sym
instance MonadProofNames sym m => MonadProofNames sym (ReaderT s m) where
impliesProp = lift impliesProp
premiseName = lift premiseName
consequenceName = lift consequenceName
instance MonadProofNames sym m => MonadProofNames sym (StateT s m) where
impliesProp = lift impliesProp
premiseName = lift premiseName
consequenceName = lift consequenceName
instance (Monoid s, MonadProofNames sym m) =>
MonadProofNames sym (WriterT s m) where
impliesProp = lift impliesProp
premiseName = lift premiseName
consequenceName = lift consequenceName
|
emc2/saltlang
|
src/salt/Control/Monad/ProofNames/Class.hs
|
Haskell
|
bsd-3-clause
| 2,847
|
{-# LANGUAGE GADTs, RecordWildCards, MagicHash, ScopedTypeVariables, CPP,
UnboxedTuples #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
-- |
-- Execute GHCi messages.
--
-- For details on Remote GHCi, see Note [Remote GHCi] in
-- compiler/ghci/GHCi.hs.
--
module GHCi.Run
( run, redirectInterrupts
) where
import Prelude -- See note [Why do we import Prelude here?]
import GHCi.CreateBCO
import GHCi.InfoTable
import GHCi.FFI
import GHCi.Message
import GHCi.ObjLink
import GHCi.RemoteTypes
import GHCi.TH
import GHCi.BreakArray
import GHCi.StaticPtrTable
import Control.Concurrent
import Control.DeepSeq
import Control.Exception
import Control.Monad
import Data.Binary
import Data.Binary.Get
import Data.ByteString (ByteString)
import qualified Data.ByteString.Unsafe as B
import GHC.Exts
import GHC.Exts.Heap
import GHC.Stack
import Foreign hiding (void)
import Foreign.C
import GHC.Conc.Sync
import GHC.IO hiding ( bracket )
import System.Mem.Weak ( deRefWeak )
import Unsafe.Coerce
-- -----------------------------------------------------------------------------
-- Implement messages
foreign import ccall "revertCAFs" rts_revertCAFs :: IO ()
-- Make it "safe", just in case
run :: Message a -> IO a
run m = case m of
InitLinker -> initObjLinker RetainCAFs
RtsRevertCAFs -> rts_revertCAFs
LookupSymbol str -> fmap toRemotePtr <$> lookupSymbol str
LookupClosure str -> lookupClosure str
LoadDLL str -> loadDLL str
LoadArchive str -> loadArchive str
LoadObj str -> loadObj str
UnloadObj str -> unloadObj str
AddLibrarySearchPath str -> toRemotePtr <$> addLibrarySearchPath str
RemoveLibrarySearchPath ptr -> removeLibrarySearchPath (fromRemotePtr ptr)
ResolveObjs -> resolveObjs
FindSystemLibrary str -> findSystemLibrary str
CreateBCOs bcos -> createBCOs (concatMap (runGet get) bcos)
FreeHValueRefs rs -> mapM_ freeRemoteRef rs
AddSptEntry fpr r -> localRef r >>= sptAddEntry fpr
EvalStmt opts r -> evalStmt opts r
ResumeStmt opts r -> resumeStmt opts r
AbandonStmt r -> abandonStmt r
EvalString r -> evalString r
EvalStringToString r s -> evalStringToString r s
EvalIO r -> evalIO r
MkCostCentres mod ccs -> mkCostCentres mod ccs
CostCentreStackInfo ptr -> ccsToStrings (fromRemotePtr ptr)
NewBreakArray sz -> mkRemoteRef =<< newBreakArray sz
EnableBreakpoint ref ix b -> do
arr <- localRef ref
_ <- if b then setBreakOn arr ix else setBreakOff arr ix
return ()
BreakpointStatus ref ix -> do
arr <- localRef ref; r <- getBreak arr ix
case r of
Nothing -> return False
Just w -> return (w /= 0)
GetBreakpointVar ref ix -> do
aps <- localRef ref
mapM mkRemoteRef =<< getIdValFromApStack aps ix
MallocData bs -> mkString bs
MallocStrings bss -> mapM mkString0 bss
PrepFFI conv args res -> toRemotePtr <$> prepForeignCall conv args res
FreeFFI p -> freeForeignCallInfo (fromRemotePtr p)
MkConInfoTable ptrs nptrs tag ptrtag desc ->
toRemotePtr <$> mkConInfoTable ptrs nptrs tag ptrtag desc
StartTH -> startTH
GetClosure ref -> do
clos <- getClosureData =<< localRef ref
mapM (\(Box x) -> mkRemoteRef (HValue x)) clos
Seq ref -> tryEval (void $ evaluate =<< localRef ref)
_other -> error "GHCi.Run.run"
evalStmt :: EvalOpts -> EvalExpr HValueRef -> IO (EvalStatus [HValueRef])
evalStmt opts expr = do
io <- mkIO expr
sandboxIO opts $ do
rs <- unsafeCoerce io :: IO [HValue]
mapM mkRemoteRef rs
where
mkIO (EvalThis href) = localRef href
mkIO (EvalApp l r) = do
l' <- mkIO l
r' <- mkIO r
return ((unsafeCoerce l' :: HValue -> HValue) r')
evalIO :: HValueRef -> IO (EvalResult ())
evalIO r = do
io <- localRef r
tryEval (unsafeCoerce io :: IO ())
evalString :: HValueRef -> IO (EvalResult String)
evalString r = do
io <- localRef r
tryEval $ do
r <- unsafeCoerce io :: IO String
evaluate (force r)
evalStringToString :: HValueRef -> String -> IO (EvalResult String)
evalStringToString r str = do
io <- localRef r
tryEval $ do
r <- (unsafeCoerce io :: String -> IO String) str
evaluate (force r)
-- When running a computation, we redirect ^C exceptions to the running
-- thread. ToDo: we might want a way to continue even if the target
-- thread doesn't die when it receives the exception... "this thread
-- is not responding".
--
-- Careful here: there may be ^C exceptions flying around, so we start the new
-- thread blocked (forkIO inherits mask from the parent, #1048), and unblock
-- only while we execute the user's code. We can't afford to lose the final
-- putMVar, otherwise deadlock ensues. (#1583, #1922, #1946)
sandboxIO :: EvalOpts -> IO a -> IO (EvalStatus a)
sandboxIO opts io = do
-- We are running in uninterruptibleMask
breakMVar <- newEmptyMVar
statusMVar <- newEmptyMVar
withBreakAction opts breakMVar statusMVar $ do
let runIt = measureAlloc $ tryEval $ rethrow opts $ clearCCS io
if useSandboxThread opts
then do
tid <- forkIO $ do unsafeUnmask runIt >>= putMVar statusMVar
-- empty: can't block
redirectInterrupts tid $ unsafeUnmask $ takeMVar statusMVar
else
-- GLUT on OS X needs to run on the main thread. If you
-- try to use it from another thread then you just get a
-- white rectangle rendered. For this, or anything else
-- with such restrictions, you can turn the GHCi sandbox off
-- and things will be run in the main thread.
--
-- BUT, note that the debugging features (breakpoints,
-- tracing, etc.) need the expression to be running in a
-- separate thread, so debugging is only enabled when
-- using the sandbox.
runIt
-- We want to turn ^C into a break when -fbreak-on-exception is on,
-- but it's an async exception and we only break for sync exceptions.
-- Idea: if we catch and re-throw it, then the re-throw will trigger
-- a break. Great - but we don't want to re-throw all exceptions, because
-- then we'll get a double break for ordinary sync exceptions (you'd have
-- to :continue twice, which looks strange). So if the exception is
-- not "Interrupted", we unset the exception flag before throwing.
--
rethrow :: EvalOpts -> IO a -> IO a
rethrow EvalOpts{..} io =
catch io $ \se -> do
-- If -fbreak-on-error, we break unconditionally,
-- but with care of not breaking twice
if breakOnError && not breakOnException
then poke exceptionFlag 1
else case fromException se of
-- If it is a "UserInterrupt" exception, we allow
-- a possible break by way of -fbreak-on-exception
Just UserInterrupt -> return ()
-- In any other case, we don't want to break
_ -> poke exceptionFlag 0
throwIO se
--
-- While we're waiting for the sandbox thread to return a result, if
-- the current thread receives an asynchronous exception we re-throw
-- it at the sandbox thread and continue to wait.
--
-- This is for two reasons:
--
-- * So that ^C interrupts runStmt (e.g. in GHCi), allowing the
-- computation to run its exception handlers before returning the
-- exception result to the caller of runStmt.
--
-- * clients of the GHC API can terminate a runStmt in progress
-- without knowing the ThreadId of the sandbox thread (#1381)
--
-- NB. use a weak pointer to the thread, so that the thread can still
-- be considered deadlocked by the RTS and sent a BlockedIndefinitely
-- exception. A symptom of getting this wrong is that conc033(ghci)
-- will hang.
--
redirectInterrupts :: ThreadId -> IO a -> IO a
redirectInterrupts target wait = do
wtid <- mkWeakThreadId target
wait `catch` \e -> do
m <- deRefWeak wtid
case m of
Nothing -> wait
Just target -> do throwTo target (e :: SomeException); wait
measureAlloc :: IO (EvalResult a) -> IO (EvalStatus a)
measureAlloc io = do
setAllocationCounter maxBound
a <- io
ctr <- getAllocationCounter
let allocs = fromIntegral (maxBound::Int64) - fromIntegral ctr
return (EvalComplete allocs a)
-- Exceptions can't be marshaled because they're dynamically typed, so
-- everything becomes a String.
tryEval :: IO a -> IO (EvalResult a)
tryEval io = do
e <- try io
case e of
Left ex -> return (EvalException (toSerializableException ex))
Right a -> return (EvalSuccess a)
-- This function sets up the interpreter for catching breakpoints, and
-- resets everything when the computation has stopped running. This
-- is a not-very-good way to ensure that only the interactive
-- evaluation should generate breakpoints.
withBreakAction :: EvalOpts -> MVar () -> MVar (EvalStatus b) -> IO a -> IO a
withBreakAction opts breakMVar statusMVar act
= bracket setBreakAction resetBreakAction (\_ -> act)
where
setBreakAction = do
stablePtr <- newStablePtr onBreak
poke breakPointIOAction stablePtr
when (breakOnException opts) $ poke exceptionFlag 1
when (singleStep opts) $ setStepFlag
return stablePtr
-- Breaking on exceptions is not enabled by default, since it
-- might be a bit surprising. The exception flag is turned off
-- as soon as it is hit, or in resetBreakAction below.
onBreak :: BreakpointCallback
onBreak ix# uniq# is_exception apStack = do
tid <- myThreadId
let resume = ResumeContext
{ resumeBreakMVar = breakMVar
, resumeStatusMVar = statusMVar
, resumeThreadId = tid }
resume_r <- mkRemoteRef resume
apStack_r <- mkRemoteRef apStack
ccs <- toRemotePtr <$> getCCSOf apStack
putMVar statusMVar $ EvalBreak is_exception apStack_r (I# ix#) (I# uniq#) resume_r ccs
takeMVar breakMVar
resetBreakAction stablePtr = do
poke breakPointIOAction noBreakStablePtr
poke exceptionFlag 0
resetStepFlag
freeStablePtr stablePtr
resumeStmt
:: EvalOpts -> RemoteRef (ResumeContext [HValueRef])
-> IO (EvalStatus [HValueRef])
resumeStmt opts hvref = do
ResumeContext{..} <- localRef hvref
withBreakAction opts resumeBreakMVar resumeStatusMVar $
mask_ $ do
putMVar resumeBreakMVar () -- this awakens the stopped thread...
redirectInterrupts resumeThreadId $ takeMVar resumeStatusMVar
-- when abandoning a computation we have to
-- (a) kill the thread with an async exception, so that the
-- computation itself is stopped, and
-- (b) fill in the MVar. This step is necessary because any
-- thunks that were under evaluation will now be updated
-- with the partial computation, which still ends in takeMVar,
-- so any attempt to evaluate one of these thunks will block
-- unless we fill in the MVar.
-- (c) wait for the thread to terminate by taking its status MVar. This
-- step is necessary to prevent race conditions with
-- -fbreak-on-exception (see #5975).
-- See test break010.
abandonStmt :: RemoteRef (ResumeContext [HValueRef]) -> IO ()
abandonStmt hvref = do
ResumeContext{..} <- localRef hvref
killThread resumeThreadId
putMVar resumeBreakMVar ()
_ <- takeMVar resumeStatusMVar
return ()
foreign import ccall "&rts_stop_next_breakpoint" stepFlag :: Ptr CInt
foreign import ccall "&rts_stop_on_exception" exceptionFlag :: Ptr CInt
setStepFlag :: IO ()
setStepFlag = poke stepFlag 1
resetStepFlag :: IO ()
resetStepFlag = poke stepFlag 0
type BreakpointCallback
= Int# -- the breakpoint index
-> Int# -- the module uniq
-> Bool -- exception?
-> HValue -- the AP_STACK, or exception
-> IO ()
foreign import ccall "&rts_breakpoint_io_action"
breakPointIOAction :: Ptr (StablePtr BreakpointCallback)
noBreakStablePtr :: StablePtr BreakpointCallback
noBreakStablePtr = unsafePerformIO $ newStablePtr noBreakAction
noBreakAction :: BreakpointCallback
noBreakAction _ _ False _ = putStrLn "*** Ignoring breakpoint"
noBreakAction _ _ True _ = return () -- exception: just continue
-- Malloc and copy the bytes. We don't have any way to monitor the
-- lifetime of this memory, so it just leaks.
mkString :: ByteString -> IO (RemotePtr ())
mkString bs = B.unsafeUseAsCStringLen bs $ \(cstr,len) -> do
ptr <- mallocBytes len
copyBytes ptr cstr len
return (castRemotePtr (toRemotePtr ptr))
mkString0 :: ByteString -> IO (RemotePtr ())
mkString0 bs = B.unsafeUseAsCStringLen bs $ \(cstr,len) -> do
ptr <- mallocBytes (len+1)
copyBytes ptr cstr len
pokeElemOff (ptr :: Ptr CChar) len 0
return (castRemotePtr (toRemotePtr ptr))
mkCostCentres :: String -> [(String,String)] -> IO [RemotePtr CostCentre]
#if defined(PROFILING)
mkCostCentres mod ccs = do
c_module <- newCString mod
mapM (mk_one c_module) ccs
where
mk_one c_module (decl_path,srcspan) = do
c_name <- newCString decl_path
c_srcspan <- newCString srcspan
toRemotePtr <$> c_mkCostCentre c_name c_module c_srcspan
foreign import ccall unsafe "mkCostCentre"
c_mkCostCentre :: Ptr CChar -> Ptr CChar -> Ptr CChar -> IO (Ptr CostCentre)
#else
mkCostCentres _ _ = return []
#endif
getIdValFromApStack :: HValue -> Int -> IO (Maybe HValue)
getIdValFromApStack apStack (I# stackDepth) = do
case getApStackVal# apStack stackDepth of
(# ok, result #) ->
case ok of
0# -> return Nothing -- AP_STACK not found
_ -> return (Just (unsafeCoerce# result))
|
sdiehl/ghc
|
libraries/ghci/GHCi/Run.hs
|
Haskell
|
bsd-3-clause
| 13,446
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE EmptyDataDecls #-}
module Text.Css where
import Data.List (intersperse, intercalate)
import Data.Text.Lazy.Builder (Builder, singleton, toLazyText, fromLazyText, fromString)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TLB
import Data.Monoid (Monoid, mconcat, mappend, mempty)
import Data.Text (Text)
import qualified Data.Text as T
import Language.Haskell.TH.Syntax
import System.IO.Unsafe (unsafePerformIO)
import Text.ParserCombinators.Parsec (Parser, parse)
import Text.Shakespeare.Base hiding (Scope)
import Language.Haskell.TH
import Control.Applicative ((<$>), (<*>))
import Control.Arrow ((***), second)
import Text.IndentToBrace (i2b)
import Data.Functor.Identity (runIdentity)
import Text.Shakespeare (VarType (..))
#if MIN_VERSION_base(4,5,0)
import Data.Monoid ((<>))
#else
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
{-# INLINE (<>) #-}
#endif
type CssUrl url = (url -> [(T.Text, T.Text)] -> T.Text) -> Css
type DList a = [a] -> [a]
-- FIXME great use case for data kinds
data Resolved
data Unresolved
type family Selector a
type instance Selector Resolved = Builder
type instance Selector Unresolved = [Contents]
type family ChildBlocks a
type instance ChildBlocks Resolved = ()
type instance ChildBlocks Unresolved = [(HasLeadingSpace, Block Unresolved)]
type HasLeadingSpace = Bool
type family Str a
type instance Str Resolved = Builder
type instance Str Unresolved = Contents
type family Mixins a
type instance Mixins Resolved = ()
type instance Mixins Unresolved = [Deref]
data Block a = Block
{ blockSelector :: !(Selector a)
, blockAttrs :: ![Attr a]
, blockBlocks :: !(ChildBlocks a)
, blockMixins :: !(Mixins a)
}
data Mixin = Mixin
{ mixinAttrs :: ![Attr Resolved]
, mixinBlocks :: ![Block Resolved]
}
instance Monoid Mixin where
mempty = Mixin mempty mempty
mappend (Mixin a x) (Mixin b y) = Mixin (a ++ b) (x ++ y)
data TopLevel a where
TopBlock :: !(Block a) -> TopLevel a
TopAtBlock :: !String -- name e.g., media
-> !(Str a) -- selector
-> ![Block a]
-> TopLevel a
TopAtDecl :: !String -> !(Str a) -> TopLevel a
TopVar :: !String -> !String -> TopLevel Unresolved
data Attr a = Attr
{ attrKey :: !(Str a)
, attrVal :: !(Str a)
}
data Css = CssWhitespace ![TopLevel Resolved]
| CssNoWhitespace ![TopLevel Resolved]
data Content = ContentRaw String
| ContentVar Deref
| ContentUrl Deref
| ContentUrlParam Deref
| ContentMixin Deref
deriving (Show, Eq)
type Contents = [Content]
data CDData url = CDPlain Builder
| CDUrl url
| CDUrlParam (url, [(Text, Text)])
| CDMixin Mixin
pack :: String -> Text
pack = T.pack
#if !MIN_VERSION_text(0, 11, 2)
{-# NOINLINE pack #-}
#endif
fromText :: Text -> Builder
fromText = TLB.fromText
{-# NOINLINE fromText #-}
class ToCss a where
toCss :: a -> Builder
instance ToCss [Char] where toCss = fromLazyText . TL.pack
instance ToCss Text where toCss = fromText
instance ToCss TL.Text where toCss = fromLazyText
-- | Determine which identifiers are used by the given template, useful for
-- creating systems like yesod devel.
cssUsedIdentifiers :: Bool -- ^ perform the indent-to-brace conversion
-> Parser [TopLevel Unresolved]
-> String
-> [(Deref, VarType)]
cssUsedIdentifiers toi2b parseBlocks s' =
concat $ runIdentity $ mapM (getVars scope0) contents
where
s = if toi2b then i2b s' else s'
a = either (error . show) id $ parse parseBlocks s s
(scope0, contents) = go a
go :: [TopLevel Unresolved]
-> (Scope, [Content])
go [] = ([], [])
go (TopAtDecl dec cs:rest) =
(scope, rest'')
where
(scope, rest') = go rest
rest'' =
ContentRaw ('@' : dec ++ " ")
: cs
++ ContentRaw ";"
: rest'
go (TopAtBlock _ _ blocks:rest) =
(scope1 ++ scope2, rest1 ++ rest2)
where
(scope1, rest1) = go (map TopBlock blocks)
(scope2, rest2) = go rest
go (TopBlock (Block x y z mixins):rest) =
(scope1 ++ scope2, rest0 ++ rest1 ++ rest2 ++ restm)
where
rest0 = intercalate [ContentRaw ","] x ++ concatMap go' y
(scope1, rest1) = go (map (TopBlock . snd) z)
(scope2, rest2) = go rest
restm = map ContentMixin mixins
go (TopVar k v:rest) =
((k, v):scope, rest')
where
(scope, rest') = go rest
go' (Attr k v) = k ++ v
cssFileDebug :: Bool -- ^ perform the indent-to-brace conversion
-> Q Exp
-> Parser [TopLevel Unresolved]
-> FilePath
-> Q Exp
cssFileDebug toi2b parseBlocks' parseBlocks fp = do
s <- fmap TL.unpack $ qRunIO $ readUtf8File fp
#ifdef GHC_7_4
qAddDependentFile fp
#endif
let vs = cssUsedIdentifiers toi2b parseBlocks s
c <- mapM vtToExp vs
cr <- [|cssRuntime toi2b|]
parseBlocks'' <- parseBlocks'
return $ cr `AppE` parseBlocks'' `AppE` (LitE $ StringL fp) `AppE` ListE c
combineSelectors :: HasLeadingSpace
-> [Contents]
-> [Contents]
-> [Contents]
combineSelectors hsl a b = do
a' <- a
b' <- b
return $ a' ++ addSpace b'
where
addSpace
| hsl = (ContentRaw " " :)
| otherwise = id
blockRuntime :: [(Deref, CDData url)]
-> (url -> [(Text, Text)] -> Text)
-> Block Unresolved
-> Either String (DList (Block Resolved))
-- FIXME share code with blockToCss
blockRuntime cd render' (Block x attrs z mixinsDerefs) = do
mixins <- mapM getMixin mixinsDerefs
x' <- mapM go' $ intercalate [ContentRaw ","] x
attrs' <- mapM resolveAttr attrs
z' <- mapM (subGo x) z -- FIXME use difflists again
Right $ \rest -> Block
{ blockSelector = mconcat x'
, blockAttrs = concat $ attrs' : map mixinAttrs mixins
, blockBlocks = ()
, blockMixins = ()
} : foldr ($) rest z'
{-
(:) (Css' (mconcat $ map go' $ intercalate [ContentRaw "," ] x) (map go'' y))
. foldr (.) id (map (subGo x) z)
-}
where
go' = contentToBuilderRT cd render'
getMixin d =
case lookup d cd of
Nothing -> Left $ "Mixin not found: " ++ show d
Just (CDMixin m) -> Right m
Just _ -> Left $ "For " ++ show d ++ ", expected Mixin"
resolveAttr :: Attr Unresolved -> Either String (Attr Resolved)
resolveAttr (Attr k v) = Attr <$> (mconcat <$> mapM go' k) <*> (mconcat <$> mapM go' v)
subGo :: [Contents] -- ^ parent selectors
-> (HasLeadingSpace, Block Unresolved)
-> Either String (DList (Block Resolved))
subGo x' (hls, Block a b c d) =
blockRuntime cd render' (Block a' b c d)
where
a' = combineSelectors hls x' a
contentToBuilderRT :: [(Deref, CDData url)]
-> (url -> [(Text, Text)] -> Text)
-> Content
-> Either String Builder
contentToBuilderRT _ _ (ContentRaw s) = Right $ fromText $ pack s
contentToBuilderRT cd _ (ContentVar d) =
case lookup d cd of
Just (CDPlain s) -> Right s
_ -> Left $ show d ++ ": expected CDPlain"
contentToBuilderRT cd render' (ContentUrl d) =
case lookup d cd of
Just (CDUrl u) -> Right $ fromText $ render' u []
_ -> Left $ show d ++ ": expected CDUrl"
contentToBuilderRT cd render' (ContentUrlParam d) =
case lookup d cd of
Just (CDUrlParam (u, p)) ->
Right $ fromText $ render' u p
_ -> Left $ show d ++ ": expected CDUrlParam"
contentToBuilderRT _ _ ContentMixin{} = Left "contentToBuilderRT ContentMixin"
cssRuntime :: Bool -- ^ i2b?
-> Parser [TopLevel Unresolved]
-> FilePath
-> [(Deref, CDData url)]
-> (url -> [(Text, Text)] -> Text)
-> Css
cssRuntime toi2b parseBlocks fp cd render' = unsafePerformIO $ do
s' <- fmap TL.unpack $ qRunIO $ readUtf8File fp
let s = if toi2b then i2b s' else s'
let a = either (error . show) id $ parse parseBlocks s s
return $ CssWhitespace $ goTop [] a
where
goTop :: [(String, String)] -- ^ scope
-> [TopLevel Unresolved]
-> [TopLevel Resolved]
goTop _ [] = []
goTop scope (TopAtDecl dec cs':rest) =
TopAtDecl dec cs : goTop scope rest
where
cs = either error mconcat $ mapM (contentToBuilderRT cd render') cs'
goTop scope (TopBlock b:rest) =
map TopBlock (either error ($[]) $ blockRuntime (addScope scope) render' b) ++
goTop scope rest
goTop scope (TopAtBlock name s' b:rest) =
TopAtBlock name s (foldr (either error id . blockRuntime (addScope scope) render') [] b) :
goTop scope rest
where
s = either error mconcat $ mapM (contentToBuilderRT cd render') s'
goTop scope (TopVar k v:rest) = goTop ((k, v):scope) rest
addScope scope = map (DerefIdent . Ident *** CDPlain . fromString) scope ++ cd
vtToExp :: (Deref, VarType) -> Q Exp
vtToExp (d, vt) = do
d' <- lift d
c' <- c vt
return $ TupE [d', c' `AppE` derefToExp [] d]
where
c :: VarType -> Q Exp
c VTPlain = [|CDPlain . toCss|]
c VTUrl = [|CDUrl|]
c VTUrlParam = [|CDUrlParam|]
c VTMixin = [|CDMixin|]
getVars :: Monad m => [(String, String)] -> Content -> m [(Deref, VarType)]
getVars _ ContentRaw{} = return []
getVars scope (ContentVar d) =
case lookupD d scope of
Just _ -> return []
Nothing -> return [(d, VTPlain)]
getVars scope (ContentUrl d) =
case lookupD d scope of
Nothing -> return [(d, VTUrl)]
Just s -> fail $ "Expected URL for " ++ s
getVars scope (ContentUrlParam d) =
case lookupD d scope of
Nothing -> return [(d, VTUrlParam)]
Just s -> fail $ "Expected URLParam for " ++ s
getVars scope (ContentMixin d) =
case lookupD d scope of
Nothing -> return [(d, VTMixin)]
Just s -> fail $ "Expected Mixin for " ++ s
lookupD :: Deref -> [(String, b)] -> Maybe String
lookupD (DerefIdent (Ident s)) scope =
case lookup s scope of
Nothing -> Nothing
Just _ -> Just s
lookupD _ _ = Nothing
compressTopLevel :: TopLevel Unresolved
-> TopLevel Unresolved
compressTopLevel (TopBlock b) = TopBlock $ compressBlock b
compressTopLevel (TopAtBlock name s b) = TopAtBlock name s $ map compressBlock b
compressTopLevel x@TopAtDecl{} = x
compressTopLevel x@TopVar{} = x
compressBlock :: Block Unresolved
-> Block Unresolved
compressBlock (Block x y blocks mixins) =
Block (map cc x) (map go y) (map (second compressBlock) blocks) mixins
where
go (Attr k v) = Attr (cc k) (cc v)
cc [] = []
cc (ContentRaw a:ContentRaw b:c) = cc $ ContentRaw (a ++ b) : c
cc (a:b) = a : cc b
blockToMixin :: Name
-> Scope
-> Block Unresolved
-> Q Exp
blockToMixin r scope (Block _sel props subblocks mixins) =
[|Mixin
{ mixinAttrs = concat
$ $(listE $ map go props)
: map mixinAttrs $mixinsE
-- FIXME too many complications to implement sublocks for now...
, mixinBlocks = [] -- foldr (.) id $(listE $ map subGo subblocks) []
}|]
{-
. foldr (.) id $(listE $ map subGo subblocks)
. (concatMap mixinBlocks $mixinsE ++)
|]
-}
where
mixinsE = return $ ListE $ map (derefToExp []) mixins
go (Attr x y) = conE 'Attr
`appE` (contentsToBuilder r scope x)
`appE` (contentsToBuilder r scope y)
subGo (Block sel' b c d) = blockToCss r scope $ Block sel' b c d
blockToCss :: Name
-> Scope
-> Block Unresolved
-> Q Exp
blockToCss r scope (Block sel props subblocks mixins) =
[|((Block
{ blockSelector = $(selectorToBuilder r scope sel)
, blockAttrs = concat
$ $(listE $ map go props)
: map mixinAttrs $mixinsE
, blockBlocks = ()
, blockMixins = ()
} :: Block Resolved):)
. foldr (.) id $(listE $ map subGo subblocks)
. (concatMap mixinBlocks $mixinsE ++)
|]
where
mixinsE = return $ ListE $ map (derefToExp []) mixins
go (Attr x y) = conE 'Attr
`appE` (contentsToBuilder r scope x)
`appE` (contentsToBuilder r scope y)
subGo (hls, Block sel' b c d) =
blockToCss r scope $ Block sel'' b c d
where
sel'' = combineSelectors hls sel sel'
selectorToBuilder :: Name -> Scope -> [Contents] -> Q Exp
selectorToBuilder r scope sels =
contentsToBuilder r scope $ intercalate [ContentRaw ","] sels
contentsToBuilder :: Name -> Scope -> [Content] -> Q Exp
contentsToBuilder r scope contents =
appE [|mconcat|] $ listE $ map (contentToBuilder r scope) contents
contentToBuilder :: Name -> Scope -> Content -> Q Exp
contentToBuilder _ _ (ContentRaw x) =
[|fromText . pack|] `appE` litE (StringL x)
contentToBuilder _ scope (ContentVar d) =
case d of
DerefIdent (Ident s)
| Just val <- lookup s scope -> [|fromText . pack|] `appE` litE (StringL val)
_ -> [|toCss|] `appE` return (derefToExp [] d)
contentToBuilder r _ (ContentUrl u) =
[|fromText|] `appE`
(varE r `appE` return (derefToExp [] u) `appE` listE [])
contentToBuilder r _ (ContentUrlParam u) =
[|fromText|] `appE`
([|uncurry|] `appE` varE r `appE` return (derefToExp [] u))
contentToBuilder _ _ ContentMixin{} = error "contentToBuilder on ContentMixin"
type Scope = [(String, String)]
topLevelsToCassius :: [TopLevel Unresolved]
-> Q Exp
topLevelsToCassius a = do
r <- newName "_render"
lamE [varP r] $ appE [|CssNoWhitespace . foldr ($) []|] $ fmap ListE $ go r [] a
where
go _ _ [] = return []
go r scope (TopBlock b:rest) = do
e <- [|(++) $ map TopBlock ($(blockToCss r scope b) [])|]
es <- go r scope rest
return $ e : es
go r scope (TopAtBlock name s b:rest) = do
let s' = contentsToBuilder r scope s
e <- [|(:) $ TopAtBlock $(lift name) $(s') $(blocksToCassius r scope b)|]
es <- go r scope rest
return $ e : es
go r scope (TopAtDecl dec cs:rest) = do
e <- [|(:) $ TopAtDecl $(lift dec) $(contentsToBuilder r scope cs)|]
es <- go r scope rest
return $ e : es
go r scope (TopVar k v:rest) = go r ((k, v) : scope) rest
blocksToCassius :: Name
-> Scope
-> [Block Unresolved]
-> Q Exp
blocksToCassius r scope a = do
appE [|foldr ($) []|] $ listE $ map (blockToCss r scope) a
renderCss :: Css -> TL.Text
renderCss css =
toLazyText $ mconcat $ map go tops
where
(haveWhiteSpace, tops) =
case css of
CssWhitespace x -> (True, x)
CssNoWhitespace x -> (False, x)
go (TopBlock x) = renderBlock haveWhiteSpace mempty x
go (TopAtBlock name s x) =
fromText (pack $ concat ["@", name, " "]) `mappend`
s `mappend`
startBlock `mappend`
foldr mappend endBlock (map (renderBlock haveWhiteSpace (fromString " ")) x)
go (TopAtDecl dec cs) = fromText (pack $ concat ["@", dec, " "]) `mappend`
cs `mappend`
endDecl
startBlock
| haveWhiteSpace = fromString " {\n"
| otherwise = singleton '{'
endBlock
| haveWhiteSpace = fromString "}\n"
| otherwise = singleton '}'
endDecl
| haveWhiteSpace = fromString ";\n"
| otherwise = singleton ';'
renderBlock :: Bool -- ^ have whitespace?
-> Builder -- ^ indentation
-> Block Resolved
-> Builder
renderBlock haveWhiteSpace indent (Block sel attrs () ())
| null attrs = mempty
| otherwise = startSelect
<> sel
<> startBlock
<> mconcat (intersperse endDecl $ map renderAttr attrs)
<> endBlock
where
renderAttr (Attr k v) = startDecl <> k <> colon <> v
colon
| haveWhiteSpace = fromString ": "
| otherwise = singleton ':'
startSelect
| haveWhiteSpace = indent
| otherwise = mempty
startBlock
| haveWhiteSpace = fromString " {\n"
| otherwise = singleton '{'
endBlock
| haveWhiteSpace = fromString ";\n" `mappend` indent `mappend` fromString "}\n"
| otherwise = singleton '}'
startDecl
| haveWhiteSpace = indent `mappend` fromString " "
| otherwise = mempty
endDecl
| haveWhiteSpace = fromString ";\n"
| otherwise = singleton ';'
instance Lift Mixin where
lift (Mixin a b) = [|Mixin a b|]
instance Lift (Attr Unresolved) where
lift (Attr k v) = [|Attr k v :: Attr Unresolved |]
instance Lift (Attr Resolved) where
lift (Attr k v) = [|Attr $(liftBuilder k) $(liftBuilder v) :: Attr Resolved |]
liftBuilder :: Builder -> Q Exp
liftBuilder b = [|fromText $ pack $(lift $ TL.unpack $ toLazyText b)|]
instance Lift Content where
lift (ContentRaw s) = [|ContentRaw s|]
lift (ContentVar d) = [|ContentVar d|]
lift (ContentUrl d) = [|ContentUrl d|]
lift (ContentUrlParam d) = [|ContentUrlParam d|]
lift (ContentMixin m) = [|ContentMixin m|]
instance Lift (Block Unresolved) where
lift (Block a b c d) = [|Block a b c d|]
instance Lift (Block Resolved) where
lift (Block a b () ()) = [|Block $(liftBuilder a) b () ()|]
|
psibi/shakespeare
|
Text/Css.hs
|
Haskell
|
mit
| 17,915
|
-- | A script for generating unrolled versions of three R2 benchmarks
-- For example, `makeCoinBias 42` will generate a file `CoinBias42.hs`
-- containing an unrolled version of the "coinBias" model that uses an
-- array of length 42
module Unroll where
import Text.PrettyPrint (parens, space, (<+>), punctuate, int, vcat,
Doc(..), text, render, ($$), (<>), nest)
-- Helpers
----------------------------------------------------------------------
makeNVars :: Int -> String -> [Doc]
makeNVars n var = [text var <> int i | i <- [0..n-1]]
makePair :: String -> Doc -> Doc -> Doc
makePair str a b = text str <+> a $$ b
pair :: Doc -> Doc -> Doc
pair = makePair "pair"
hPair :: Doc -> Doc -> Doc
hPair = makePair "HPair"
nested :: (Doc -> Doc -> Doc) -> [Doc] -> Doc
nested f vars = let ds = punctuate space vars
in foldr1 (\a -> parens . f a) ds
lastLine :: [Doc] -> String -> Doc
lastLine vars b = text "dirac" <+>
parens (pair (nested pair vars) (text b))
model :: Doc -> Doc -> Doc
model a b = text "Model" <+> a $$ b
makeNTypes :: Int -> String -> [Doc]
makeNTypes n typ = replicate n (text typ)
decl :: String -> Int -> String -> String -> Doc
decl name n a b =
let typeDecl = text name <+> text "::" <+>
model (nested hPair (makeNTypes n a))
(text b)
nameDecl = text name <+> text "="
in typeDecl $$ nameDecl
arrow :: Doc
arrow = text "->"
nat :: Int -> Doc
nat i = parens $ text "nat_" <+> int i
whereDef :: [String] -> Doc
whereDef defns = text "where" $$ vcat (map (nest l) (map text defns))
where l = length "where" + 1
-- Models
----------------------------------------------------------------------
coinBias :: Int -> Doc
coinBias n =
let firstTwo = decl "coinBias" n "HBool" "'HProb"
vars = makeNVars n "tossResult"
prog = vcat $
[text $ "beta (prob_ 2) (prob_ 5) >>= \\bias ->"] ++
[text "bern bias >>= \\" <> v <+> arrow | v <- vars] ++
[lastLine vars "bias"]
in firstTwo $$ nest 4 prog
digitRecognition :: Int -> Doc
digitRecognition n =
let firstTwo = decl "digitRecognition" n "HBool" "'HNat"
vars = makeNVars n "x"
prog = vcat $
[text $ "categorical dataPrior >>= \\y ->"] ++
[text "bern ((dataParams ! y) !" <+> nat i <> text ") >>= \\" <>
(vars !! i) <+> arrow | i <- [0..n-1]] ++
[lastLine vars "y"] ++
[whereDef ["dataPrior = var (Variable \"dataPrior\" 73 (SArray SProb))",
"dataParams = var (Variable \"dataParams\" 41 (SArray (SArray SProb)))"]]
in firstTwo $$ nest 4 prog
linearRegression :: Int -> Doc
linearRegression n =
let firstTwo = decl "linearRegression" n "'HReal" "HUnit"
vars = makeNVars n "y"
prog = vcat $
[text "normal (real_ 0) (prob_ 1) >>= \\a ->",
text "normal (real_ 5) (prob_ 1.825) >>= \\b ->",
text "gamma (prob_ 1) (prob_ 1) >>= \\invNoise ->"] ++
[text "normal (a * (dataX !" <+> nat i <>
text ")) (recip (sqrt invNoise)) >>= \\" <>
(vars !! i) <+> arrow | i <- [0..n-1]] ++
[lastLine vars "unit"] ++
[whereDef ["dataX = var (Variable \"dataX\" 73 (SArray SReal))"]]
in firstTwo $$ nest 4 prog
-- Make files
----------------------------------------------------------------------
pragmas :: Doc
pragmas = text "{-# LANGUAGE DataKinds, TypeOperators, OverloadedStrings #-}\n"
moduleName :: String -> Doc
moduleName name = text "module" <+> text name <+> text "where\n"
imports :: Doc
imports = vcat $
[text "import Prelude (print, length, IO)",
text "import Language.Hakaru.Syntax.Prelude",
text "import Language.Hakaru.Disintegrate",
text "import Language.Hakaru.Syntax.ABT",
text "import Language.Hakaru.Syntax.AST",
text "import Language.Hakaru.Types.DataKind",
text "import Language.Hakaru.Types.Sing\n"]
synonyms :: Doc
synonyms = text "type Model a b = TrivialABT Term '[] ('HMeasure (HPair a b))"
$$ text "type Cond a b = TrivialABT Term '[] (a ':-> 'HMeasure b)\n"
mainCall :: String -> Doc
mainCall name =
text "main :: IO ()" $$
text "main =" <+>
text "print (length (disintegrate" <+> text name <> text "))\n"
makeCoinBias :: Int -> IO ()
makeCoinBias n =
let name = "CoinBias" ++ show n
doc = pragmas $$
moduleName name $$
imports $$
synonyms $$
coinBias n <> text "\n" $$
mainCall "coinBias"
in writeFile (name ++ ".hs") (render doc)
makeDigitRecognition :: Int -> IO ()
makeDigitRecognition n =
let name = "DigitRecognition" ++ show n
doc = pragmas $$
moduleName name $$
imports $$
synonyms $$
digitRecognition n <> text "\n" $$
mainCall "digitRecognition"
in writeFile (name ++ ".hs") (render doc)
makeLinearRegression :: Int -> IO ()
makeLinearRegression n =
let name = "LinearRegression" ++ show n
doc = pragmas $$
moduleName name $$
imports $$
synonyms $$
linearRegression n <> text "\n" $$
mainCall "linearRegression"
in writeFile (name ++ ".hs") (render doc)
main :: IO ()
main = makeCoinBias 5 >>
makeCoinBias 500
|
zaxtax/hakaru
|
haskell/Tests/Unroll/Unroll.hs
|
Haskell
|
bsd-3-clause
| 5,683
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module T17296 where
import Data.Foldable
import Data.Kind
import Language.Haskell.TH hiding (Type)
import System.IO
data family Foo1 :: Type -> Type
data instance Foo1 Bool = Foo1Bool
data instance Foo1 (Maybe a)
data family Foo2 :: k -> Type
data instance Foo2 Bool = Foo2Bool
data instance Foo2 (Maybe a)
data instance Foo2 :: Char -> Type
data instance Foo2 :: (Char -> Char) -> Type where
data family Foo3 :: k
data instance Foo3
data instance Foo3 Bool = Foo3Bool
data instance Foo3 (Maybe a)
data instance Foo3 :: Char -> Type
data instance Foo3 :: (Char -> Char) -> Type where
$(do let test :: Name -> Q ()
test n = do i <- reify n
runIO $ do hPutStrLn stderr $ pprint i
hPutStrLn stderr ""
hFlush stderr
traverse_ test [''Foo1, ''Foo2, ''Foo3]
pure [])
|
sdiehl/ghc
|
testsuite/tests/th/T17296.hs
|
Haskell
|
bsd-3-clause
| 1,017
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hu-HU">
<title>Alert Filters | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Keresés</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
denniskniep/zap-extensions
|
addOns/alertFilters/src/main/javahelp/org/zaproxy/zap/extension/alertFilters/resources/help_hu_HU/helpset_hu_HU.hs
|
Haskell
|
apache-2.0
| 976
|
module SortProps where
import Sort
assert S0 = {sort []} ::: []
assert S1 = All x . {sort [x]} === {[x]}
assert S2 = All x1 . All x2 . {sort [x1,x2]} === {[x1,x2]}
\/ {sort [x1,x2]} === {[x2,x1]}
--assert S = S0 /\ S1 /\ S2
ordered [] = True
ordered [x] = True
ordered (x1:x2:xs) = x1<=x2 && ordered (x2:xs)
lteAll x = all (x<=)
orderedInt :: [Int] -> Bool
orderedInt = ordered
--property IsTrue = {| x | x===True |}
--property IsOrdered = {|xs | IsTrue {orderedInt xs} |}
property IsOrdered = !ordered
assert InsertProp = All x . All xs . IsOrdered xs ==> IsOrdered {insert x xs}
assert SortProp1 = All xs . IsOrdered {sort xs}
property AllElems P = Gfp X . [] \/ P:X
property Minimal x = AllElems (!((x::Int)<=))
property Or1 P Q = {| x | (x:::P) \/ (x:::Q) |}
property Or2 P Q = P \/ Q
property IsOrdered2 =
Lfp X . {| xs | (xs:::[]) \/ (Minimal {head xs} xs /\ ({tail xs}:::X)) |}
|
forste/haReFork
|
tools/hs2alfa/tests/SortProps.hs
|
Haskell
|
bsd-3-clause
| 931
|
-- Test the classic "\SOH" ambiguity
module Main(main) where
main = do { print soh ; print (length (fst (head soh))) ;
print so ; print (length (fst (head so))) }
where
so, soh :: [(String,String)]
soh = reads "\"\\SOH\"" -- Should read \SOH
so = reads "\"\\SOx\"" -- Should read \SO followed by x
|
ezyang/ghc
|
libraries/base/tests/reads001.hs
|
Haskell
|
bsd-3-clause
| 343
|
-- Check that the record selector for maskMB unfolds in the body of f
-- At one stage it didn't because the implicit unfolding looked too big
-- Trac #2581
module ShouldCompile where
import Data.Array.Base
data MBloom s a = MB {
shiftMB :: {-# UNPACK #-} !Int
, maskMB :: {-# UNPACK #-} !Int
, bitArrayMB :: {-# UNPACK #-} !(STUArray s Int Int)
}
f a b c = case maskMB (MB a b c) of
3 -> True
_ -> False
|
bitemyapp/ghc
|
testsuite/tests/eyeball/record1.hs
|
Haskell
|
bsd-3-clause
| 453
|
{-# OPTIONS_GHC -F -pgmF hspec-discover -optF --no-main #-}
|
AndrewRademacher/twitter-conduit
|
tests/Spec.hs
|
Haskell
|
bsd-2-clause
| 60
|
-- Test for trac #2141
module Foo where
foo :: () -> ()
foo x = x { foo = 1 }
|
wxwxwwxxx/ghc
|
testsuite/tests/rename/should_fail/rnfail054.hs
|
Haskell
|
bsd-3-clause
| 80
|
module Annfail13 where
-- Testing that brackets are mandatory in the ANN syntax
{-# ANN f id 1 #-}
{-# ANN f 1 :: Int #-}
f x = x
|
urbanslug/ghc
|
testsuite/tests/annotations/should_fail/annfail13.hs
|
Haskell
|
bsd-3-clause
| 130
|
module Main where
import Flight
main = do
input <- readFile "input.txt"
let constraints = map parseConstraint (lines input)
print $ maxScore 2503 constraints
|
corajr/adventofcode2015
|
14/Main.hs
|
Haskell
|
mit
| 166
|
module Rebase.GHC.MVar
(
module GHC.MVar
)
where
import GHC.MVar
|
nikita-volkov/rebase
|
library/Rebase/GHC/MVar.hs
|
Haskell
|
mit
| 68
|
insert x [] = [x]
insert x (y:ys)
| x < y = x:y:ys
| otherwise = y : insert x ys
isort [] = []
isort (x:xs) = insert x (isort xs)
main = do
print $ isort [4, 6, 9, 8, 3, 5, 1, 7, 2]
|
shigemk2/haskell_abc
|
insert.hs
|
Haskell
|
mit
| 203
|
module Options
( Command(..)
, Options(..)
, BuildParams(..)
, AuthCreds(..)
, parseOptions
) where
import Options.Applicative
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as BS
import Data.Maybe (mapMaybe)
import Jenkins.Types
type JobId = T.Text
newtype BuildParams = BuildParams {
fromBuildParams :: [(BS.ByteString, BS.ByteString)]
} deriving (Show, Eq)
newtype AuthCreds = AuthCreds {
fromAuthCreds :: (BS.ByteString, BS.ByteString)
} deriving (Show, Eq)
data Command = JobStatuses
| JobStatus JobId
| RunBuild JobId BuildParams
| BuildLog JobId (Maybe BuildNum)
deriving (Show, Eq)
data Options = Options
{ optsBaseUri :: String
, optsAuth :: Maybe AuthCreds
, optsCommand :: Command
} deriving (Show, Eq)
parserInfo :: Parser Command -> String -> ParserInfo Command
parserInfo cmd desc = info (helper <*> cmd) (progDesc desc)
jobIdParser :: String -> ReadM JobId
jobIdParser = return . T.pack
buildParamParser :: String -> ReadM BuildParams
buildParamParser = return . BuildParams . mapMaybe parseParam . BS.words . BS.pack
parseParam :: BS.ByteString -> Maybe (BS.ByteString, BS.ByteString)
parseParam s =
case (BS.break ((==) '=') s) of
(_, "") -> Nothing
(k, v) -> Just (k, BS.drop 1 v)
authCredsParser :: String -> ReadM (Maybe AuthCreds)
authCredsParser s = do
return $ case BS.splitWith ((==) ':') (BS.pack s) of
(user:pass:[]) -> Just (AuthCreds (user, pass))
_ -> Nothing
buildNumParser :: String -> ReadM (Maybe BuildNum)
buildNumParser = pure . Just . BuildNum . read
jobStatusParser :: Parser Command
jobStatusParser = JobStatus
<$> argument (str >>= jobIdParser) ( metavar "JOB_ID" )
runBuildParser :: Parser Command
runBuildParser = RunBuild
<$> argument (str >>= jobIdParser) ( metavar "JOB_ID" )
<*> argument (str >>= buildParamParser) ( metavar "PARAM=VALUE .."
<> value (BuildParams [])
<> help "List of parameter/value pairs"
)
buildLogParser :: Parser Command
buildLogParser = BuildLog
<$> argument (str >>= jobIdParser) ( metavar "JOB_ID" )
<*> argument (str >>= buildNumParser) ( metavar "BUILD_NUM"
<> value Nothing
<> help "Build number"
)
parseOptions :: Parser Options
parseOptions = Options
<$> strOption ( short 's'
<> metavar "JENKINS_URL"
<> help "Jenkins base URL"
)
<*> option (str >>= authCredsParser ) ( short 'u'
<> metavar "HTTP_AUTH"
<> value Nothing
<> help "http authentication credentials (i.e. user:password)"
)
<*> subparser
( command "jobs" jobStatusesParserInfo
<> command "job" jobStatusParserInfo
<> command "build" runBuildParserInfo
<> command "log" buildLogParserInfo
)
jobStatusesParserInfo :: ParserInfo Command
jobStatusesParserInfo =
parserInfo (pure JobStatuses) "display all jobs' status"
jobStatusParserInfo :: ParserInfo Command
jobStatusParserInfo =
parserInfo jobStatusParser "list recent builds for a given job"
runBuildParserInfo :: ParserInfo Command
runBuildParserInfo =
parserInfo runBuildParser "build a given job"
buildLogParserInfo :: ParserInfo Command
buildLogParserInfo =
parserInfo buildLogParser "stream build log to standard output"
|
afiore/jenkins-tty.hs
|
src/Options.hs
|
Haskell
|
mit
| 3,617
|
import Control.Monad.Reader.Class (ask)
import XMonad.Main (xmonad)
import XMonad.Core
( X
, spawn
, ManageHook
, layoutHook
, startupHook
, manageHook
, logHook
, modMask
, terminal
, borderWidth
, normalBorderColor
, focusedBorderColor
, workspaces
)
import XMonad.ManageHook
( composeAll
, (<||>), (=?), (-->), (<+>)
, className, doF, stringProperty )
import XMonad.Config ( defaultConfig )
import XMonad.Hooks.DynamicLog
( dynamicLogWithPP
, ppOutput
, ppTitle
, ppLayout
, ppVisible
, ppCurrent
, xmobarPP
, xmobarColor
, shorten
)
import XMonad.Hooks.ManageDocks ( avoidStruts, docks, manageDocks)
import XMonad.Util.Run (spawnPipe)
import XMonad.Util.EZConfig (additionalKeys)
import XMonad.Layout.Spacing (smartSpacing)
import XMonad.Layout.NoBorders (smartBorders)
import XMonad.StackSet (sink)
import Graphics.X11.Types
import Data.Bits ( (.|.) )
import System.IO (hPutStrLn)
----------------------- Colors ----------------------
magenta = "#FF14E5"
blue1 = "#29acff"
green1 = "#60ff45"
white1 = "#FFFFFF"
gray1 = "#3D3D3D"
gray2 = "#808080"
gray3 = "#CCCCCC"
yellow1 = "#FFF500"
yellow2 = "#ffff66"
myManageHook :: ManageHook
myManageHook = composeAll [ (role =? "gimp-toolbox" <||> role =? "gimp-image-window") -->(unfloat)
, className =? "MPlayer" --> (unfloat) ]
where unfloat = ask >>= doF . sink
role = stringProperty "WM_WINDOW_ROLE"
myLayoutHook = smartSpacing 5 $ avoidStruts $ layoutHook defaultConfig
myKeys = [ ((mod4Mask .|. shiftMask, xK_z), spawn "xscreensaver-command -lock")
, ((mod4Mask .|. shiftMask, xK_o), spawn "urxvt -e ranger")
, ((mod4Mask .|. shiftMask, xK_f), spawn "firefox")
, ((controlMask, xK_Print), spawn "sleep 0.2; scrot -s")
, ((0, xK_Print), spawn "scrot")
]
myLogHook h = dynamicLogWithPP xmobarPP { ppOutput = hPutStrLn h
, ppTitle = xmobarColor blue1 "" . shorten 70
, ppCurrent = xmobarColor blue1 ""
, ppVisible = xmobarColor white1 ""
--, ppHiddenNoWindows = xmobarColor magenta ""
, ppLayout = xmobarColor gray2 ""
}
------------------------ Bottom bar stuff ----------------------
myStartupHook :: X ()
myStartupHook = do spawn xmobarBottom
bin_xmobar = "/run/current-system/sw/bin/xmobar"
rc_xmobarTop = "/home/bernie/.xmobarrc.hs"
rc_xmobarBottom = "/home/bernie/.xmobarrc_bottom.hs"
xmobarTop = bin_xmobar ++ " " ++ rc_xmobarTop
xmobarBottom = bin_xmobar ++ " " ++ rc_xmobarBottom
------------------------------------------------------------------
main :: IO ()
main = do
xmproc <- spawnPipe xmobarTop
xmonad $ docks defaultConfig
{ manageHook = manageDocks <+> myManageHook <+> manageHook defaultConfig
, layoutHook = myLayoutHook
, logHook = myLogHook xmproc
-- , startupHook = myStartupHook
, modMask = mod4Mask
, terminal = "urxvt"
, borderWidth = 1
, normalBorderColor = gray1
, focusedBorderColor = blue1
, workspaces = ["sh1","sh2","sh3","gimp4","pdf5","com6","mpc7","web8","web9"]
} `additionalKeys` myKeys
|
iambernie/dotfiles
|
.xmonad/xmonad.hs
|
Haskell
|
mit
| 3,536
|
sumSquareDifference :: Int
sumSquareDifference =
let boundary = 100
sumSquare = sum [i^2 | i <- [1..100]]
squareSum = sum [1..100] ^ 2
in squareSum - sumSquare
|
samidarko/euler
|
problem006.hs
|
Haskell
|
mit
| 187
|
import qualified Data.Map as Map
data Person = Person { firstName :: String
, lastName :: String
, age :: Int
, height :: Float
, phoneNumber :: String
, flavor :: String } deriving (Show)
data Car = Car { company :: String
, model :: String
, year :: Int } deriving (Show)
{- let car = Car { company="TOYOTA", model="Lexus", year=2015 } -}
tellCar :: Car -> String
tellCar (Car { company = c, model = m, year = y })
= "This " ++ c ++ " " ++ m ++ " was made in " ++ show y
data Vector a = Vector a a a deriving (Show)
vplus :: (Num a) => Vector a -> Vector a -> Vector a
(Vector i j k) `vplus` (Vector l m n)
= Vector (i+l) (j+m) (k+n)
dotProd :: (Num a) => Vector a -> Vector a -> a
(Vector i j k) `dotProd` (Vector l m n)
= i*l + j*m + k*n
vmult :: (Num a) => Vector a -> a -> Vector a
(Vector i j k) `vmult` m = Vector (i*m) (j*m) (k*m)
data UniquePerson = UniquePerson { u_firstName :: String
, u_lastName :: String
, u_age :: Int } deriving (Eq, Show, Read)
data Day = Sunday | Monday | Tuesday | Wednesday | Thursday | Friday | Saturday
deriving (Eq, Ord, Show, Read, Bounded, Enum)
type PhoneNumber = String
type Name = String
type PhoneBook = [(Name, PhoneNumber)]
phoneBook :: PhoneBook
phoneBook
= [("betty", "111-2222")
,("bonnie", "222-3333")
,("patsy", "333-4444")
,("lucille", "444-5555")
,("wendy", "555-6666")
,("penny", "666-7777")]
inPhoneBook :: Name -> PhoneNumber -> PhoneBook -> Bool
inPhoneBook name pnumber pbook = (name, pnumber) `elem` pbook
data LockerState = Taken | Free
deriving (Show, Eq)
type Code = String
type LockerMap = Map.Map Int (LockerState, Code)
lockerLookup :: Int -> LockerMap -> Either String Code
lockerLookup lockerNumber map = case Map.lookup lockerNumber map of
Nothing -> Left $ "Locker " ++ show lockerNumber ++ " doesn't exist!"
Just (state, code) -> if state /= Taken
then Right code
else Left $ "Locker " ++ show lockerNumber ++ " is already taken!"
lockers :: LockerMap
lockers = Map.fromList
[(100, (Taken, "AB111"))
,(101, (Free, "BC222"))
,(102, (Free, "CD333"))
,(105, (Free, "DE444"))
,(107, (Taken, "EF555"))
,(109, (Taken, "FG666"))]
{- data List a = Empty | Cons a (List a) -}
{- deriving (Show, Read, Eq, Ord) -}
infixr 5 ^++
(^++) :: [a] -> [a] -> [a]
[] ^++ ys = ys
(x:xs) ^++ ys = x:(xs ^++ ys)
data Tree a = EmptyTree | Node a (Tree a) (Tree a) deriving (Show)
singleton :: a -> Tree a
singleton x = Node x EmptyTree EmptyTree
treeInsert :: (Ord a) => a -> Tree a -> Tree a
treeInsert x EmptyTree = singleton x
treeInsert x (Node a left right)
| x == a = Node x left right
| x < a = Node a (treeInsert x left) right
| x > a = Node a left (treeInsert x right)
treeElem :: (Ord a) => a -> Tree a -> Bool
treeElem x EmptyTree = False
treeElem x (Node a left right)
| x == a = True
| x < a = treeElem x left
| x > a = treeElem x right
{- let nums = [8,6,4,1,7,3,5] -}
{- let numsTree = foldr treeInsert EmptyTree nums -}
{- numsTree -}
{- 8 `treeElem` numsTree -}
{- class Eq a where -}
{- (==) :: a -> a -> Bool -}
{- (/=) :: a -> a -> Bool -}
{- x == y = not (x /= y) -}
{- x /= y = not (x == y) -}
data TrafficLight = Red | Yellow | Green
instance Eq TrafficLight where
Red == Red = True
Green == Green = True
Yellow == Yellow = True
_ == _ = False
instance Show TrafficLight where
show Red = "Red Light"
show Yellow = "Yellow Light"
show Green = "Green Light"
class YesNo a where
yesno :: a -> Bool
instance YesNo Int where
yesno 0 = False
yesno _ = True
instance YesNo [a] where
yesno [] = False
yesno _ = True
instance YesNo Bool where
yesno = id
instance YesNo [Maybe a] where
yesno (Just _) = True
yesno Nothing = False
instance YesNo (Tree a) where
yesno EmptyTree = False
yesno _ = True
instance YesNo TrafficLight where
yesno Red = False
yesno _ = True
{- yesno $ length [] -}
{- yesno $ "haha" -}
{- yesno $ "" -}
{- yesno $ Just 0 -}
{- yesno $ Tree -}
{- yesno EmptyTree -}
{- yesno [] -}
{- yesno [0,0,0] -}
yesnoIf :: (YesNo y) => y -> a -> a -> a
yesnoIf yesnoVal yesResult noResult
= if yesno yesnoVal
then yesResult
else noResult
{- yesnoIf [] "YEAH!" "NO!" -}
{- class Functor f where -}
{- fmap :: (a -> b) -> f a -> f b -}
{- instance Functor [] where -}
{- fmap = map -}
{- instance Functor Maybe where -}
{- fmap f (Just x) = Just (f x) -}
{- fmap f Nothing = Nothing -}
|
yhoshino11/learning_haskell
|
ch7/ch7.hs
|
Haskell
|
mit
| 4,769
|
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module Language.Jass.Codegen.Type(
toLLVMType
, toLLVMType'
, defaultValue
, jassArray
, arraySize
, sizeOfType
, codeTypeSize
, codeTypeStruct
, pointerSize
, getFunctionType
, getFunctionArgumentsTypes
, getFunctionReturnType
, isIntegralType
, isStringType
, getReference
, getCallableLLVMType
, getTypeId
, getTypeFromId
, TypesMap
, getTypeMap
, module SemType
) where
import Language.Jass.JassType
import Language.Jass.Parser.AST.Parameter as AST
import Language.Jass.Codegen.Context
import Language.Jass.Semantic.Type as SemType
import LLVM.General.AST as LLVM
import LLVM.General.AST.Type as LLVMType
import LLVM.General.AST.Constant as LLVM
import LLVM.General.AST.Float
import LLVM.General.AST.DataLayout
import LLVM.General.AST.AddrSpace
import Control.Arrow
import Control.Monad.Except
import Language.Jass.Semantic.Callable
import Language.Jass.Semantic.Variable
import qualified Data.Map.Lazy as ML
import qualified Data.HashMap.Strict as HM
-- | Default array size
arraySize :: Num a => a
arraySize = 65536
-- | Returns reference to local or global variable
getReference :: String -> Codegen (LLVM.Type, LLVM.Operand)
getReference name = do
mvar <- getVariable name
case mvar of
Just var -> do
varType <- ptr <$> if isVarArray var then toLLVMType (JArray $ getVarType var) else toLLVMType (getVarType var)
return (varType, if isGlobalVariable var then LLVM.ConstantOperand $ LLVM.GlobalReference varType (LLVM.Name name) else LLVM.LocalReference varType (LLVM.Name name))
Nothing -> throwError $ unplacedSemError $ "ICE: cannot find variable " ++ name
-- | Returns callable llvm type
getCallableLLVMType :: Callable -> Codegen LLVM.Type
getCallableLLVMType callable = do
rt <- toLLVMType' $ getCallableReturnType callable
args <- mapM toLLVMType (getParamType <$> getCallableParameters callable)
return FunctionType {
resultType = rt
, argumentTypes = args
, isVarArg = False
}
-- | Converts jass type to LLVM type
toLLVMType :: JassType -> Codegen Type
toLLVMType JInteger = return i32
toLLVMType JReal = return float
toLLVMType JBoolean = return i1
toLLVMType JString = return $ ptr i8
toLLVMType JHandle = return i64
toLLVMType JCode = return $ ptr codeTypeStruct
toLLVMType (JArray et) = ArrayType arraySize <$> toLLVMType et
toLLVMType t@(JUserDefined _) = toLLVMType =<< getRootType t
toLLVMType JNull = throwError $ unplacedSemError "ICE: cannot generate code for special type JNull"
sizeOfType :: JassType -> Codegen Int
sizeOfType JInteger = return 4
sizeOfType JReal = return 4
sizeOfType JBoolean = return 1
sizeOfType JString = return pointerSize
sizeOfType JHandle = return 8
sizeOfType JCode = return codeTypeSize
sizeOfType (JArray et) = (arraySize *) <$> sizeOfType et
sizeOfType t@(JUserDefined _) = sizeOfType =<< getRootType t
sizeOfType JNull = throwError $ unplacedSemError "ICE: cannot generate code for special type JNull"
-- | Internal representation of code value
codeTypeStruct :: Type
codeTypeStruct = StructureType {
LLVMType.isPacked = False,
elementTypes = [
ptr i8 -- function pointer
, i32 -- id of return type, 0 for nothing
, i32 -- count of arguments
, ptr i32 -- array of arguments types as ids
]
}
-- | Returns size of code value internal representation
codeTypeSize :: Int
codeTypeSize = pointerSize + 4 + 4 + pointerSize
-- | Size of pointer in jass code
pointerSize :: Int
pointerSize = fromIntegral $ fst (pointerLayouts jassDataLayout ML.! AddrSpace 0)
-- | Ditto, including void type
toLLVMType' :: Maybe JassType -> Codegen Type
toLLVMType' = maybe (return VoidType) toLLVMType
-- | Returns default value for a type
defaultValue :: JassType -> Codegen Constant
defaultValue JInteger = return $ Int 32 0
defaultValue JReal = return $ Float (Single 0.0)
defaultValue JBoolean = return $ Int 1 0
defaultValue JString = return $ Null (ptr i8)
defaultValue JHandle = return $ Int 64 0
defaultValue JCode = throwError $ unplacedSemError "ICE: no default value for code value"
defaultValue t@(JArray _) = Null <$> toLLVMType t
defaultValue t@(JUserDefined _) = defaultValue =<< getRootType t
defaultValue JNull = throwError $ unplacedSemError "ICE: cannot generate code for special type JNull"
-- | Generates array type from element LLVM type
jassArray :: Type -> Type
jassArray = ArrayType arraySize
-- | Returns LLVM type of a function
getFunctionType :: String -> Codegen LLVM.Type
getFunctionType name = do
callable <- getCallable name
case callable of
Nothing -> throwError $ unplacedSemError $ "ICE: cannot find function " ++ name
Just fn -> do
retType <- toLLVMType' $ getCallableReturnType fn
pars <- mapM convertPars $ getCallableParameters fn
return $ FunctionType retType pars False
where
convertPars (AST.Parameter _ t _) = toLLVMType t
-- | Converts callable arguments types to LLVM types
getFunctionArgumentsTypes :: String -> Codegen [LLVM.Type]
getFunctionArgumentsTypes name = do
callable <- getCallable name
case callable of
Nothing -> throwError $ unplacedSemError $ "ICE: cannot find function " ++ name
Just fn -> mapM convertPars $ getCallableParameters fn
where
convertPars (AST.Parameter _ t _) = toLLVMType t
-- | Returns function return type in LLVM typesystem
getFunctionReturnType :: String -> Codegen LLVM.Type
getFunctionReturnType name = do
callable <- getCallable name
case callable of
Nothing -> throwError $ unplacedSemError $ "ICE: cannot find function " ++ name
Just fn -> maybe (return VoidType) toLLVMType $ getCallableReturnType fn
-- | Returns true if type is some sort of integer
isIntegralType :: LLVM.Type -> Bool
isIntegralType (IntegerType _) = True
isIntegralType _ = False
-- | Returns true if type is represents string type
isStringType :: LLVM.Type -> Bool
isStringType (PointerType (IntegerType 8) _) = True
isStringType _ = False
-- | Returns jass type id, custom types should be registered before the function is called
getTypeId :: Maybe JassType -> Codegen Int
getTypeId Nothing = return 0
getTypeId (Just JInteger) = return 1
getTypeId (Just JReal) = return 2
getTypeId (Just JBoolean) = return 3
getTypeId (Just JString) = return 4
getTypeId (Just JHandle) = return 5
getTypeId (Just JCode) = return 6
getTypeId (Just (JArray et)) = (256 +) <$> getTypeId (Just et)
getTypeId (Just (JUserDefined n)) = (512 +) <$> getCustomTypeId n
getTypeId (Just JNull) = throwError $ unplacedSemError "ICE: cannot generate code for special type JNull"
-- | Returns jass type by runtime id, custom types should be registered before the function is called
getTypeFromId :: Int -> Codegen (Maybe JassType)
getTypeFromId 0 = return Nothing
getTypeFromId 1 = return $ Just JInteger
getTypeFromId 2 = return $ Just JReal
getTypeFromId 3 = return $ Just JBoolean
getTypeFromId 4 = return $ Just JString
getTypeFromId 5 = return $ Just JHandle
getTypeFromId 6 = return $ Just JCode
getTypeFromId n
| n > 512 = Just . JUserDefined <$> getCustomTypeFromId (n - 512)
| n > 256 = fmap JArray <$> getTypeFromId (n - 256)
| otherwise = throwError $ unplacedSemError $ "ICE: unknown id of type '" ++ show n ++ "'"
type TypesMap = (HM.HashMap Int JassType, HM.HashMap JassType Int)
getTypeMap :: Codegen TypesMap
getTypeMap = do
let ts = [JInteger, JReal, JBoolean, JString, JHandle, JCode]
basic <- mapM getTypeId (fmap Just ts)
custom <- second (HM.fromList . fmap (first JUserDefined) . HM.toList) <$> first (fmap JUserDefined) <$> getCustomTypes
return $ (
HM.fromList (basic `zip` ts) `HM.union` (fst custom),
HM.fromList (ts `zip` basic) `HM.union` (snd custom))
|
NCrashed/hjass
|
src/library/Language/Jass/Codegen/Type.hs
|
Haskell
|
mit
| 7,842
|
{-# LANGUAGE RecordWildCards #-}
module Interactive.Pipeline
( run_pipeline, Params' (..), Params, pos )
where
-- Modules
--import Browser
import Document.Document
import Documentation.SummaryGen
import Utilities.Config hiding ( wait )
import Interactive.Observable
import Interactive.Serialize
import Logic.Expr
import UnitB.UnitB
import Z3.Z3
( discharge
, Validity ( .. ) )
-- Libraries
import Control.DeepSeq
import Control.Concurrent
import Control.Concurrent.STM
import Control.Lens
import Control.Exception
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Either
import Control.Monad.Trans.State
import Control.Precondition
import Data.Char
import qualified Data.List as L
import Data.Map as M
( insert, keys
, toList, unions )
import qualified Data.Map as M
import GHC.Generics (Generic)
import System.Console.ANSI
import System.Directory
import System.Process
import Text.Printf.TH
import Utilities.Syntactic
import Utilities.TimeIt
-- The pipeline is made of three processes:
-- o the parser
-- o the prover
-- o the display
--
-- The prover and the parser _share_ a map of proof obligations
-- The prover and the parser _share_ a list of PO labels
-- The
data Shared = Shared
{ working :: Observable Int
, system :: Observable System
, error_list :: Observable [Error]
, pr_obl :: Observable (M.Map Key (Seq,Maybe Bool))
, fname :: FilePath
, exit_code :: MVar ()
, parser_state :: Observable ParserState
, focus :: Observable (Maybe String)
, dump_cmd :: Observable (Maybe DumpCmd)
, redraw :: Observable Bool
}
data ParserState = Idle Double | Parsing
deriving Eq
type Params = Params' (M.Map Label (M.Map Label (Bool,Seq)))
data Params' pos = Params
{ path :: FilePath
, verbose :: Bool
, continuous :: Bool
, no_dump :: Bool
, no_verif :: Bool
, reset :: Bool
, _pos :: pos
, init_focus :: Maybe String
} deriving (Generic)
makeLenses ''Params'
instance NFData Params where
instance Show ParserState where
show (Idle x) = [s|Idle %sms|] $ show $ round $ x * 1000
show Parsing = "Parsing"
parser :: Shared
-> IO (IO ())
parser (Shared { .. }) = return $ do
t <- getModificationTime fname
write_obs parser_state Parsing
(dt,()) <- timeItT $ parse
write_obs parser_state (Idle dt)
evalStateT (forever $ do
liftIO $ do
threadDelay 250000
t0 <- get
t1 <- liftIO $ getModificationTime fname
if t0 == t1 then return ()
else do
put t1
liftIO $ do
write_obs parser_state Parsing
(t,()) <- timeItT parse
write_obs parser_state (Idle t)
) t
where
f m = return $ M.mapKeys (g $ _name m) $ proof_obligation m
-- return $ fromList $ map (g $ _name m) $ toList $ x
g lbl x = (lbl,x)
h lbl (x,y) = ((lbl,x),y)
parse = do
xs <- liftIO $ runEitherT $ do
s <- EitherT $ parse_system fname
ms <- hoistEither $ mapM f $ M.elems $ s!.machines
pos <- hoistEither $ mapM theory_po $ M.elems $ s!.theories
let cs = M.fromList $ map (uncurry h) $ do
(x,ys) <- zip (map label (s!.theories.to keys)) pos
y <- toList ys
return (x,y)
liftIO $ evaluate (ms, cs, s)
case xs of
Right (ms,cs,s) -> do
let new_pos = unions (cs : map (M.mapKeys $ over _1 as_label) ms) :: M.Map Key Seq
f (s0,b0) (s1,b1)
| s0 == s1 = (s0,b0)
| otherwise = (s1,b1)
g s = (s, Nothing)
write_obs_fast system s
write_obs error_list []
modify_obs_fast pr_obl $ \pos -> do
evaluate $ M.unionWith f (pos `M.intersection` new_pos) (M.map g new_pos)
return ()
Left es -> do
write_obs error_list es
return ()
prover :: Shared -> IO (IO ())
prover (Shared { .. }) = do
tok <- newEmptyMVar
observe pr_obl tok
-- req <- newEmptyMVar
req <- newTBQueueIO 20
forM_ [1..40] $ \p -> forkOn p $ worker req
return $ forever $ do
takeMVar tok
inc 1
po <- read_obs pr_obl
forM_ (keys po) $ \k -> do
po <- reads_obs pr_obl $ M.lookup k
case po of
Just (po,Nothing) -> do
liftIO $ atomically $ writeTBQueue req (k,po)
-- liftIO $ putMVar req (k,po)
_ -> return ()
dec 1
where
inc x = modify_obs working (return . (+x))
dec x = modify_obs working (return . (+ (-x)))
-- handler ::
handler lbl@(_,x) (ErrorCall msg) = do
write_obs dump_cmd $ Just $ Only x
fail ([s|During %s: %s|] (pretty lbl) msg)
worker req = forever $ do
-- (k,po) <- takeMVar req
(k,po) <- atomically $ readTBQueue req
let k' = uncurry (</>) k
inc 1
r <- catch (discharge k' po) (handler k)
dec 1
modify_obs pr_obl $ return . insert k (po,Just $ r == Valid)
proof_report :: Maybe String
-> M.Map Key (Seq,Maybe Bool)
-> [Error] -> Bool
-> [String]
proof_report = proof_report' False
proof_report' :: Bool
-> Maybe String
-> M.Map Key (Seq,Maybe Bool)
-> [Error] -> Bool
-> [String]
proof_report' showSuccess pattern outs es isWorking =
header ++
ys ++
( if null es then []
else "> errors" : map report es ) ++
footer ++
[ if isWorking
then "> working ..."
else " "
]
where
header = maybe [] head pattern
footer = maybe [] foot pattern
head pat =
[ "#"
, "# Restricted to " ++ pat
, "#"
]
foot _ =
[ [s|# hidden: %d failures|] (length xs - length ys)
]
xs = filter (failure . snd) (zip [0..] $ M.toAscList outs)
ys = map f $ filter (match . snd) xs
match xs = maybe True (\f -> f `L.isInfixOf` map toLower (show $ snd $ fst xs)) pattern
failure :: (a,(b,Maybe Bool)) -> Bool
failure x
| showSuccess = True
| otherwise = maybe False not $ snd $ snd x
f (n,((m,lbl),(_,_))) = [s| x %s - %s (%d)|] (pretty m) (pretty lbl) n
run_all :: [IO (IO ())] -> IO [ThreadId]
run_all xs = do
ys <- sequence xs
mapM f ys
where
f cmd = forkIO $ cmd
display :: Shared
-> IO (IO ())
display (Shared { .. }) = do
tok <- newEmptyMVar
observe pr_obl tok
observe error_list tok
observe working tok
observe parser_state tok
observe focus tok
observe redraw tok
observe dump_cmd tok
clearScreen
return $ forever $ do
outs <- read_obs pr_obl
es <- read_obs error_list
w <- read_obs working
fil <- read_obs focus
let ys = proof_report fil outs es (w /= 0)
cursorUpLine $ length ys
clearFromCursorToScreenBeginning
forM_ ys $ \x -> do
let lns = lines x
forM_ lns $ \x -> do
putStr x
clearFromCursorToLineEnd
putStrLn ""
let u = M.size $ M.filter (isNothing.snd) outs
st <- read_obs parser_state
du <- isJust `liftM` read_obs dump_cmd
putStr $ [s|number of workers: %d; untried: %d; parser: %s; dumping: %s|] w u (show st) (show du)
clearFromCursorToLineEnd
-- hFlush stdout
putStrLn ""
-- cursorDown 1
-- putStr "-salut-"
threadDelay 500000
takeMVar tok
serialize :: Shared -> IO (IO ())
serialize (Shared { .. }) = do
tok <- newEmptyMVar
observe pr_obl tok
return $ forever $ do
threadDelay 10000000
takeMVar tok
pos <- read_obs pr_obl
let out = pos
-- (pos@(out,_),es) <- takeMVar ser
es <- read_obs error_list
-- dump_pos fname pos
writeFile
(fname ++ ".report")
(unlines $ proof_report' True Nothing out es False)
dump :: Shared -> IO (IO b)
dump (Shared { .. }) = do
tok <- newEmptyMVar
observe dump_cmd tok
return $ forever $ do
takeMVar tok
pat <- read_obs dump_cmd
case pat of
Just pat -> do
pos <- read_obs pr_obl
dump_z3 pat fname pos
write_obs dump_cmd Nothing
Nothing -> return ()
pdfLatex :: Shared -> IO (IO ())
pdfLatex (Shared { .. }) = do
v <- newEmptyMVar
observe system v
observe error_list v
return $ forever $ do
threadDelay 5000000
takeMVar v
readProcess "pdflatex" [fname] ""
summary :: Shared -> IO (IO ())
summary (Shared { .. }) = do
v <- newEmptyMVar
observe system v
return $ forever $ do
threadDelay 10000000
takeMVar v
s <- read_obs system
produce_summaries fname s
keyboard :: Shared -> IO ()
keyboard sh@(Shared { .. }) = do
modify_obs redraw $ return . not
xs <- getLine
let xs' = map toLower xs
ws = words xs'
if xs' == "quit"
then return ()
else do
if xs' == "goto" then do
xs <- read_obs error_list
case xs of
(Error _ (LI fn i _)):_ -> do
open_at i fn
(MLError _ ((_,LI fn i _):|_)):_ -> do
open_at i fn
[] -> return ()
else if xs' == "resetall" then do
modify_obs pr_obl $ \m ->
return $ M.map (\(x,_) -> (x,Nothing)) m
else if xs' == "retry" then do
let f (Just False) = Nothing
f x = x
modify_obs pr_obl $ \m ->
return $ m & traverse._2 %~ f
else if xs' == "unfocus" then do
write_obs focus Nothing
else if take 1 ws == ["dump"]
&& length ws == 2
&& all isDigit (ws ! 1) then do
modify_obs dump_cmd $ \st -> do
if isNothing st then do
pos <- read_obs pr_obl
return $ Just $ Only $ snd $ keys pos ! (read $ ws ! 1)
else return Nothing
else if xs == "dumpfail" then do
modify_obs dump_cmd $ \st -> do
if isNothing st then
return $ Just AllFailed
else return st
else if xs == "dumpall" then do
modify_obs dump_cmd $ \st -> do
if isNothing st then
return $ Just All
else return st
else if take 1 ws == ["focus"] && length ws == 2 then do
write_obs focus $ Just (ws ! 1)
else do
putStrLn $ [s|Invalid command: '%s'|] xs
keyboard sh
run_pipeline :: FilePath -> Params -> IO ()
run_pipeline fname (Params {..}) = do
system <- new_obs empty_system
working <- new_obs 0
error_list <- new_obs []
exit_code <- newEmptyMVar
m <- load_pos fname M.empty
pr_obl <- new_obs m
parser_state <- new_obs (Idle 0)
focus <- new_obs init_focus
dump_cmd <- new_obs Nothing
redraw <- new_obs True
setNumCapabilities 8
let sh = Shared { .. }
ts <- run_all $
[ summary sh
-- , prover sh -- (M.map f m)
, serialize sh
, parser sh
, dump sh
, display sh
, pdfLatex sh
] ++
(guard (not no_verif) >> [prover sh])
keyboard sh
putStrLn "received a 'quit' command"
mapM_ killThread ts
pos <- read_obs pr_obl
dump_pos fname pos
-- return sh
--type Verifier = StablePtr (Shared)
--
----run_verifier :: CString -> IO Verifier
----run_verifier fname = do
---- fname <- peekCString fname
---- sh <- run_pipeline fname
---- newStablePtr sh
--
---- merr <- gets error_msg
---- mpos <- gets failed_po
---- liftIO $ swapMVar mpos $ concatMap g $ toList res
---- g ((x,y),(p,b))
---- | not b = [Ref fname (show y) (1,1)]
---- | otherwise = []
--
--get_error_list :: Verifier -> IO CErrList
--get_error_list v = do
-- Shared { .. } <- deRefStablePtr v
-- err <- read_obs error_list
-- let xs = map f err
-- r <- newIORef (RL [] xs)
-- newStablePtr r
-- where
-- f (Error x (LI fname i j)) = Ref fname x (i,j)
--
--get_proof_obligations :: Verifier -> IO CErrList
--get_proof_obligations v = do
-- Shared { .. } <- deRefStablePtr v
-- pos <- read_obs pr_obl
-- let ys = concatMap (g fname) $ toList pos
-- r <- newIORef (RL [] ys)
-- newStablePtr r
-- where
-- g fname ((_,y),(_,b))
-- | b == Just False = [Ref fname (show y) (1,1)]
-- | otherwise = []
|
literate-unitb/literate-unitb
|
src/Interactive/Pipeline.hs
|
Haskell
|
mit
| 14,380
|
module System.Flannel.CommandSpec
( spec
) where
import System.Flannel.Command
import qualified System.Flannel.Params as P
import Test.Hspec
spec :: Spec
spec = do
describe "runCommand" $ do
it "executes the command" $ do
result <- runCommand P.defaultParams $ isSet "no set"
result `shouldBe` False
describe "isSet" $ do
let params = P.setFlag "good" P.defaultParams
context "when the flag is set" $ do
it "returns True" $ do
result <- runCommand params $ isSet "good"
result `shouldBe` True
context "when the flag is not set" $ do
it "returns False" $ do
result <- runCommand params $ isSet "bad"
result `shouldBe` False
describe "getOption" $ do
let params = P.setOption "good" "alpha" P.defaultParams
context "when the option is set" $ do
it "returns the value" $ do
result <- runCommand params $ getOption "good"
result `shouldBe` Just "alpha"
context "when the option is not set" $ do
it "returns Nothing" $ do
result <- runCommand params $ getOption "bad"
result `shouldBe` Nothing
describe "getArg" $ do
let params = P.setArg "good" "1" P.defaultParams
context "when the arg is set" $ do
it "returns the value" $ do
result <- runCommand params $ getArg "good"
result `shouldBe` Just "1"
context "when the arg is not set" $ do
it "returns Nothing" $ do
result <- runCommand params $ getArg "bad"
result `shouldBe` Nothing
describe "getRemaining" $ do
let params = P.addRemaining ["1", "2"] P.defaultParams
it "returns the remaining arguments" $ do
result <- runCommand params getRemaining
result `shouldBe` ["1", "2"]
describe "run" $ do
it "executes the IO action" $ do
res <- runCommand P.defaultParams . run $ do
fmap (head . lines) $ readFile "LICENSE"
res `shouldBe` "The MIT License (MIT)"
|
nahiluhmot/flannel
|
spec/System/Flannel/CommandSpec.hs
|
Haskell
|
mit
| 2,205
|
{-# LANGUAGE OverloadedStrings #-}
module PillsHs.Config where
import Clay
-- the maximum width of the container in pixels
pillsMaxWidth :: Size Abs
pillsMaxWidth = px 1024
pillsWideMaxWidth :: Size Abs
pillsWideMaxWidth = px 1180
pillsWiderMaxWidth :: Size Abs
pillsWiderMaxWidth = px 1366
-- the minimum width of the container in pixels, before it switches to a mobile friendly display
pillsMinWidth :: Size Abs
pillsMinWidth = px 599
-- the padding that will be applied to both sides of a column in pixels, also known as gutter
pillsPaddingWidth :: Double
pillsPaddingWidth = 10
|
polo2ro/pillshs
|
src/PillsHs/Config.hs
|
Haskell
|
mit
| 588
|
module Lambency.UI (
UIWire, WidgetEvent(..), WidgetState(..), Widget(..), screen,
animatedSpriteRenderer, spriteRenderer, colorRenderer,
textRenderer, dynamicTextRenderer,
combineRenderers,
hbox, vbox, glue
) where
--------------------------------------------------------------------------------
import Control.Monad.Reader
import Control.Wire hiding ((.))
import Data.Word
import FRP.Netwire.Input
import qualified Graphics.UI.GLFW as GLFW
import Lambency.Font
import Lambency.GameObject
import Lambency.Sprite
import Lambency.Types
import Linear hiding (trace, identity)
import Prelude hiding (id)
import qualified Yoga as Y
--------------------------------------------------------------------------------
type UIWire a b = GameWire (Y.LayoutInfo, a) b
data WidgetEvent a b
= WidgetEvent'OnMouseOver {
eventLogic :: UIWire a b
}
| WidgetEvent'OnMouseDown {
_eventMouseButton :: GLFW.MouseButton,
eventLogic :: UIWire a b
}
| WidgetEvent'OnKeyDown {
_eventKey :: GLFW.Key,
eventLogic :: UIWire a b
}
data WidgetState a b = WidgetState {
idleLogic :: UIWire a b,
eventHandlers :: [WidgetEvent a b]
}
blankState :: Monoid b => WidgetState a b
blankState = WidgetState (ignoreFst $ mkConst (Right mempty)) []
newtype Widget a b = Widget { getWidgetLayout :: Y.Layout (WidgetState a b) }
widgetRenderFn :: Monoid b =>
TimeStep -> a -> Y.LayoutInfo -> WidgetState a b ->
GameMonad (b, WidgetState a b)
widgetRenderFn dt input lytInfo widgetState =
let eventWire :: WidgetEvent a b -> UIWire a b
eventWire (WidgetEvent'OnMouseDown mb uiw) =
second (mousePressed mb) >>> uiw
eventWire (WidgetEvent'OnKeyDown key uiw) =
second (keyPressed key) >>> uiw
eventWire e@(WidgetEvent'OnMouseOver uiw) = mkGen $ \dt' (lyt, ipt) -> do
(Right (mx, my), _) <- stepWire mouseCursor dt' $ Right undefined
(V2 wx wy) <- windowSize <$> ask
let bx0 = Y.nodeLeft lytInfo / fromIntegral wx
bx1 = (bx0 + Y.nodeWidth lytInfo) / fromIntegral wx
by0 = Y.nodeTop lytInfo / fromIntegral wy
by1 = (by0 + Y.nodeHeight lytInfo) / fromIntegral wy
x = (mx + 1.0) * 0.5
y = (my + 1.0) * 0.5
if x >= bx0 && x <= bx1 && y >= by0 && y <= by1 then
do
(result, uiw') <- stepWire uiw dt' $ Right (lyt, ipt)
return (result, eventWire (WidgetEvent'OnMouseOver uiw'))
else return (Left "Mouse out of bounds", eventWire e)
handleEvent :: WidgetEvent a b -> (Y.LayoutInfo, a) ->
GameMonad (Maybe (b, WidgetEvent a b))
handleEvent event arg = do
(result, uiw') <- stepWire (eventWire event) dt $ Right arg
case result of
Left _ -> return Nothing
Right x -> return $ Just (x, event { eventLogic = uiw' })
handleEvents :: Monoid b => [WidgetEvent a b] -> (Y.LayoutInfo, a) ->
GameMonad (Maybe b, [WidgetEvent a b])
handleEvents events arg =
let eventFn (res, evts) event = do
result <- handleEvent event arg
case result of
Nothing -> return (res, event : evts)
Just (x, e) -> case res of
Nothing -> return (Just x, e : evts)
Just r -> return (Just $ r `mappend` x, e : evts)
in foldM eventFn (Nothing, []) events
wireArg = (lytInfo, input)
in do
(eventResults, events) <- handleEvents (eventHandlers widgetState) wireArg
case eventResults of
Nothing -> do
(result, uiw') <- stepWire (idleLogic widgetState) dt $ Right wireArg
let newState = widgetState { idleLogic = uiw', eventHandlers = events }
case result of
Right x -> return (x, newState)
Left _ -> error "UI wire inhibited?"
Just result -> return (result, widgetState { eventHandlers = events })
ignoreFst :: GameWire b c -> GameWire (a, b) c
ignoreFst logic = mkGen $ \dt (_, ipt) -> do
(result, logic') <- stepWire logic dt $ Right ipt
return (result, ignoreFst logic')
widgetWire :: Monoid b => Widget a b -> GameWire a b
widgetWire (Widget lyt) = mkGen $ \dt input -> do
(result, newLyt) <- Y.foldRender lyt (widgetRenderFn dt input)
return (Right result, widgetWire $ Widget newLyt)
screenPrg :: Monoid b => [Widget a b] -> GameMonad (Widget a b)
screenPrg children = do
(V2 wx wy) <- windowSize <$> ask
return . Widget $
($ blankState) $
Y.withDimensions (fromIntegral wx) (fromIntegral wy) $
Y.vbox (Y.startToEnd $ getWidgetLayout <$> children)
screen :: Monoid b => [Widget a b] -> GameWire a b
screen children = wireFrom (windowSize <$> ask) $ runScreen $
wireFrom (screenPrg children) widgetWire
where
runScreen ui_wire oldWinDims =
let getUIWire False = wireFrom (screenPrg children) widgetWire
getUIWire True = ui_wire
in mkGen $ \dt input -> do
winDims <- windowSize <$> ask
let ui = getUIWire (winDims == oldWinDims)
(result, next_wire') <- stepWire ui dt $ Right input
return (result, runScreen next_wire' winDims)
renderSpriteAt :: Sprite -> Y.LayoutInfo -> GameMonad ()
renderSpriteAt sprite lytInfo = do
let (x, y, w, h) = (
Y.nodeLeft lytInfo,
Y.nodeTop lytInfo,
Y.nodeWidth lytInfo,
Y.nodeHeight lytInfo)
(V2 _ wy) <- windowSize <$> ask
renderUISpriteWithSize sprite (V2 x (fromIntegral wy - y - h)) (V2 w h)
renderStringAt :: Font -> String -> Y.LayoutInfo -> GameMonad()
renderStringAt font str lytInfo = do
let (x, y) = (Y.nodeLeft lytInfo, Y.nodeTop lytInfo)
(V2 _ wy) <- windowSize <$> ask
renderUIString font str $ V2 x (fromIntegral wy - y - stringHeight font str)
animatedRenderer :: Monoid b => GameWire a Sprite -> GameWire a b -> UIWire a b
animatedRenderer spriteWire logic = mkGen $ \dt (lytInfo, val) -> do
(spriteResult, spriteWire') <- stepWire spriteWire dt $ Right val
(logicResult, logic') <- stepWire logic dt $ Right val
case spriteResult of
Right nextSprite -> do
renderSpriteAt nextSprite lytInfo
(nextSpriteResult, _) <- stepWire spriteWire' dt $ Right val
case nextSpriteResult of
Right _ -> return (logicResult, animatedRenderer spriteWire' logic')
Left _ -> return (logicResult, spriteRenderer nextSprite logic')
Left _ -> error "Should never get here"
animatedSpriteRenderer :: Monoid b =>
Sprite -> SpriteAnimationType -> GameWire a b ->
UIWire a b
animatedSpriteRenderer sprite animType logic =
animatedRenderer (animatedWire sprite animType) logic
spriteRenderer :: Monoid b => Sprite -> GameWire a b -> UIWire a b
spriteRenderer s logic = mkGen $ \dt (lytInfo, val) -> do
renderSpriteAt s lytInfo
(result, logic') <- stepWire logic dt $ Right val
return (result, spriteRenderer s logic')
colorRenderer :: Monoid b => V4 Word8 -> GameWire a b -> UIWire a b
colorRenderer color logic = mkGen $ \dt (lytInfo, val) -> do
let byteColor = fromIntegral <$> color
s <- changeSpriteColor byteColor <$> simpleSprite <$> ask
renderSpriteAt s lytInfo
(result, logic') <- stepWire logic dt $ Right val
return (result, spriteRenderer s logic')
textRenderer :: Monoid b => Font -> String -> GameWire a b -> UIWire a b
textRenderer font str logic = mkGen $ \dt (lytInfo, val) -> do
renderStringAt font str lytInfo
(result, logic') <- stepWire logic dt $ Right val
return (result, textRenderer font str logic')
dynamicTextRenderer :: Monoid b => Font -> GameWire a (b, String) -> UIWire a b
dynamicTextRenderer font logic = mkGen $ \dt (lytInfo, val) -> do
(wireResult, logic') <- stepWire logic dt $ Right val
result <- case wireResult of
(Right (bVal, str)) -> do
renderStringAt font str lytInfo
return $ Right bVal
(Left e) -> return (Left e)
return (result, dynamicTextRenderer font logic')
combineRenderers :: Monoid b => [UIWire a b] -> UIWire a b
combineRenderers = mconcat
hbox :: Monoid b => [Widget a b] -> Widget a b
hbox widgets = Widget
$ ($ blankState)
$ Y.stretched
$ Y.hbox (Y.spaceBetween (getWidgetLayout <$> widgets))
vbox :: Monoid b => [Widget a b] -> Widget a b
vbox widgets = Widget
$ ($ blankState)
$ Y.stretched
$ Y.vbox (Y.spaceBetween (getWidgetLayout <$> widgets))
glue :: Monoid b => Widget a b
glue = Widget
$ ($ blankState)
$ Y.growable 2.0 (Y.Min 0.0) (Y.Min 0.0)
$ Y.exact 1.0 1.0
|
Mokosha/Lambency
|
lib/Lambency/UI.hs
|
Haskell
|
mit
| 8,644
|
module DotName where
-- Example from Data.Function
-- uses an infix operator as a pattern variable
-- I think the only solution is to rename to normal variable and make all uses prefix
on :: (b -> b -> c) -> (a -> b) -> a -> a -> c
(.*.) `on` f = \x y -> f x .*. f y
|
antalsz/hs-to-coq
|
examples/tests/DotName.hs
|
Haskell
|
mit
| 270
|
import Control.Concurrent (threadDelay)
import Control.Exception
import Control.Monad
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Rendering.OpenGL (($=))
import qualified Graphics.UI.GLFW as GLFW
import Prelude hiding (catch)
main = do
-- initialize has to come first. If it doesn't return True,
-- this crashes with a pattern match error.
True <- GLFW.initialize
-- Set the RGB bits to get a color window.
-- See the GLFW-b docs for all the options
True <- GLFW.openWindow GLFW.defaultDisplayOptions
{ GLFW.displayOptions_numRedBits = 8
, GLFW.displayOptions_numGreenBits = 8
, GLFW.displayOptions_numBlueBits = 8
, GLFW.displayOptions_numDepthBits = 1
, GLFW.displayOptions_width = 640
, GLFW.displayOptions_height = 480
}
GLFW.setWindowSizeCallback $ resize
-- Use `$=` for assigning to GL values, `get` to read them.
-- These functions basically hide IORefs.
GL.depthFunc $= Just GL.Less
-- Use `finally` so that `quit` is called whether or
-- not `mainLoop` throws an exception
finally mainLoop quit
-- | Resize the viewport and set the projection matrix
resize w h = do
-- These are all analogous to the standard OpenGL functions
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral w) (fromIntegral h))
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.perspective 45 (fromIntegral w / fromIntegral h) 1 100
GL.matrixMode $= GL.Modelview 0
-- | Close the window and terminate GLFW
quit = GLFW.closeWindow >> GLFW.terminate
-- | This will print and clear the OpenGL errors
printErrors = GL.get GL.errors >>= mapM_ print
-- | Draw the window and handle input
mainLoop = do
now <- GLFW.getTime
draw now
-- Input is polled each time swapBuffers is called
esc <- GLFW.keyIsPressed GLFW.KeyEsc
isClosed <- fmap not GLFW.windowIsOpen
unless (esc || isClosed) $ do
-- Sleep for the rest of the frame
frameLeft <- fmap (spf + now -) GLFW.getTime
when (frameLeft > 0) $
threadDelay (truncate $ 1000000 * frameLeft)
mainLoop
where
-- maximum frame rate
fps = 60
spf = recip fps
-- | Draw a frame
draw :: Double -> IO ()
draw t = do
-- Again, the functions in GL almost all map to standard OpenGL functions
GL.clear [GL.ColorBuffer, GL.DepthBuffer]
GL.loadIdentity
GL.translate $ GL.Vector3 0 0 (-50 :: GL.GLfloat)
GL.scale 10 10 (1 :: GL.GLfloat)
GL.rotate theta axis
-- renderPrimitive wraps the supplied action with glBegin and glEnd.
-- We'll stop using this when we switch to shaders and vertex buffers.
GL.renderPrimitive GL.Quads $
-- Draw a unit square centered on the origin
forM_ [(0, 0), (1, 0), (1, 1), (0, 1)] $ \(x, y) ->
-- Note that we have to explicitly type Vertex* and Vector*, because
-- they are polymorphic in number field.
let vtx = GL.Vertex3 (x - 0.5) (y - 0.5) 0 :: GL.Vertex3 GL.GLfloat
in GL.vertex vtx
printErrors
GL.flush
GLFW.swapBuffers
where
-- GL.rotate takes the angle in degrees, not radians
theta = realToFrac t * 360
axis = GL.Vector3 0 1 0 :: GL.Vector3 GL.GLfloat
|
spetz911/progames
|
GLFW/glfw01.hs
|
Haskell
|
mit
| 3,192
|
module HandBrake.T.Encode
( tests )
where
-- base --------------------------------
import Control.Applicative ( pure )
import Data.Function ( ($), (&) )
import Data.List.NonEmpty ( NonEmpty( (:|) ) )
import Data.Tuple ( fst )
import System.Exit ( ExitCode )
import System.IO ( IO )
-- base-unicode-symbols ----------------
import Data.Function.Unicode ( (∘) )
-- fpath -------------------------------
import FPath.AbsDir ( absdir, root )
import FPath.AbsFile ( absfile )
import FPath.PathComponent ( pc )
-- more-unicode ------------------------
import Data.MoreUnicode.Functor ( (⊳) )
import Data.MoreUnicode.Lens ( (⊢), (⊩) )
import Data.MoreUnicode.Maybe ( pattern 𝕵, pattern 𝕹 )
import Data.MoreUnicode.Natural ( ℕ )
import Data.MoreUnicode.String ( 𝕊 )
import Data.MoreUnicode.Text ( 𝕋 )
-- range -------------------------------
import Data.Range ( Bound( Bound ), BoundType( Inclusive )
, Range( LowerBoundRange, SingletonRange ), (+=+) )
-- stdmain -----------------------------
import StdMain.UsageError ( UsageFPathIOError, usageError )
-- tasty -------------------------------
import Test.Tasty ( TestTree, testGroup )
-- tasty-hunit -------------------------
import Test.Tasty.HUnit ( testCase )
-- tasty-plus --------------------------
import TastyPlus ( (≟), assertLeft, assertListEqR
, runTestsP, runTestsReplay, runTestTree )
------------------------------------------------------------
-- local imports --
------------------------------------------------------------
import HandBrake.Encode ( AudioTracks( AudioTracks )
, Chapters( Chapters )
, Numbering( NoNumber, Number, Series )
, Profile( ProfileH265_576P, ProfileH265_720P
, Profile_DeadVideo )
, SubtitleTracks( SubtitleTracks )
, TwoPass( NoTwoPass )
, audios, audioEncoder, chapters, encodeArgs
, encodeRequest, input, name, numbering, outputDir
, outputName, profile, quality, subtitles, titleID
, twoPass
)
--------------------------------------------------------------------------------
tests ∷ TestTree
tests =
testGroup "Encode" $
let
testEncode nm req exp =
testGroup nm $
assertListEqR nm (fst ⊳ encodeArgs @UsageFPathIOError req) exp
base_req = encodeRequest [absfile|/nonesuch|] root 3 (𝕵 "bob")
(AudioTracks $ pure 2)
& subtitles ⊢ (SubtitleTracks [3,4])
usage_error nm txt req =
testCase nm $
assertLeft (usageError @𝕋 @UsageFPathIOError txt ≟) (encodeArgs req)
in
[ testEncode "base_req" base_req
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder", "copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "inputOffset 2" (base_req & numbering ⊢ Number 2)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/05-bob.mkv"
]
, usage_error "inputOffset -3"
"output number 0 (3+(-3)) < 0"
(base_req & numbering ⊢ Number (-3))
, testEncode "NoNumber" (base_req & numbering ⊢ NoNumber)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/bob.mkv"
]
, testEncode "Series S 5" (base_req & numbering ⊢ Series (5,"S") 0)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/S - 05x03 - bob.mkv"
]
, testEncode "Series S 6, no name"
(base_req & numbering ⊢ Series (6,"S") 1 & name ⊢ 𝕹)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/S - 06x04.mkv"
]
, testEncode "chapters 6,7" (base_req & chapters
⊢ (Chapters $ 𝕵 (6 +=+ 7)))
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--chapters", "6-7"
, "--output" , "/03-bob.mkv"
]
, testEncode "chapters 5"
(base_req & chapters ⊢ (Chapters ∘ 𝕵 $ SingletonRange 5))
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--chapters", "5"
, "--output" , "/03-bob.mkv"
]
, testEncode "no two pass" (base_req & twoPass ⊢ NoTwoPass)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "profile 576" (base_req & profile ⊢ ProfileH265_576P)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 576p25"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "audios 8,9" (base_req & audios ⊢ AudioTracks (8 :| [9]) )
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "8,9"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "no subs" (base_req & subtitles ⊢ SubtitleTracks [] )
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "quality 22.5" (base_req & quality ⊢ 22.5 )
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder","copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "22.5"
, "--output" , "/03-bob.mkv"
]
, testEncode "no audio copy" (base_req & audioEncoder ⊢ 𝕵 "mp3")
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder", "mp3"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03-bob.mkv"
]
, testEncode "no name" (base_req & name ⊢ 𝕹)
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder", "copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/03.mkv"
]
, usage_error "no name, no number"
"no number & no title"
(base_req & name ⊢ 𝕹 & numbering ⊢ NoNumber)
, usage_error "illegal range"
"illegal range «[7-»"
(base_req & chapters ⊢ Chapters (𝕵 $ LowerBoundRange (Bound 7 Inclusive)))
, testEncode "outputDir " (base_req & outputDir ⊢ [absdir|/out/|])
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder", "copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/out/03-bob.mkv"
]
, testEncode "outputName " (base_req & outputName ⊩ [pc|output.mkv|])
[ "--input" , "/nonesuch"
, "--title" , "3"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--two-pass", "--turbo"
, "--preset", "H.265 MKV 2160p60"
, "--aencoder", "copy"
, "--audio" , "2"
, "--subtitle", "3,4", "--subtitle-default", "0"
, "--quality" , "26.0"
, "--output" , "/output.mkv"
]
, testEncode "altogether now"
(base_req & input ⊢ [absfile|/not-here|]
& titleID ⊢ 5
& numbering ⊢ Series (7,"T") 1
& name ⊢ 𝕹
& chapters ⊢ Chapters (𝕵 $ 7 +=+ 9)
& twoPass ⊢ NoTwoPass
& profile ⊢ ProfileH265_720P
& audios ⊢ AudioTracks (2 :| [1])
& subtitles ⊢ SubtitleTracks []
& quality ⊢ 26
& audioEncoder ⊢ 𝕵 "flac24,av_aac"
& outputDir ⊢ [absdir|/outdir/|]
& outputName ⊩ [pc|out.mkv|])
[ "--input" , "/not-here"
, "--title" , "5"
, "--markers"
, "--deinterlace"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--preset", "H.265 MKV 720p30"
, "--aencoder", "flac24,av_aac"
, "--audio" , "2,1"
, "--quality" , "26.0"
, "--chapters", "7-9"
, "--output" , "/outdir/out.mkv"
]
, testEncode "dead video"
(base_req & input ⊢ [absfile|/not-here|]
& titleID ⊢ 5
& numbering ⊢ Series (7,"T") 1
& name ⊢ 𝕹
& chapters ⊢ Chapters (𝕵 $ 7 +=+ 9)
& profile ⊢ Profile_DeadVideo
& audios ⊢ AudioTracks (2 :| [1])
& subtitles ⊢ SubtitleTracks []
& quality ⊢ 26
& audioEncoder ⊢ 𝕵 "flac24,av_aac"
& outputDir ⊢ [absdir|/outdir/|]
& outputName ⊩ [pc|out.mkv|])
[ "--input" , "/not-here"
, "--title" , "5"
, "--markers"
, "--audio-copy-mask", "aac,ac3,eac3,truehd,dts,dtshd,mp3,flac"
, "--preset", "H.265 MKV 480p30"
, "--aencoder", "flac24,av_aac"
, "--audio" , "2,1"
, "--quality" , "26.0"
, "--chapters", "7-9"
, "--output" , "/outdir/out.mkv"
]
]
----------------------------------------
_test ∷ IO ExitCode
_test = runTestTree tests
--------------------
_tests ∷ 𝕊 → IO ExitCode
_tests = runTestsP tests
_testr ∷ 𝕊 → ℕ → IO ExitCode
_testr = runTestsReplay tests
-- that's all, folks! ----------------------------------------------------------
|
sixears/handbrake
|
src/HandBrake/T/Encode.hs
|
Haskell
|
mit
| 16,599
|
import Diagrams.Prelude
import Diagrams.Backend.Cairo
import Data.Default
import Control.Lens ((^.))
import Data.List
import Data.Ord
import Diagrams.Plots
-- | project a 3d point to 2d
projection :: (Double, Double, Double) -- ^ position of camera
-> (Double, Double, Double) -- ^ orientation of camera
-> (Double, Double, Double)
-> (Double, Double, Double) -- ^ 3d point to be projected
-> (Double, Double)
projection (cx',cy',cz') (θx,θy,θz) (ex,ey,ez) (ax,ay,az) = (bx, by)
where
bx = ez / dz * dx - ex
by = ez / dz * dy - ey
dx = cy * (sz * y + cz * x) - sy * z
dy = sx * (cy * z + sy * (sz * y + cz * x)) + cx * (cz * y - sz * x)
dz = cx * (cy * z + sy * (sz * y + cz * x)) - sx * (cz * y - sz * x)
x = ax - cx'
y = ay - cy'
z = az - cz'
sx = sin θx
sy = sin θy
sz = sin θz
cx = cos θx
cy = cos θy
cz = cos θz
ps :: [(Double, Double, Double)]
ps = [(2,2,2), (2,5,2), (5,5,2), (5,2,2)]
ps' :: [(Double, Double, Double)]
ps' = [(3,3,1), (3,4,1), (4,4,1), (4,3,1)]
nodes = zipWith f [0..] $ map (projection (0,0,0) (0,0,0) (0,0,1)) $ ps ++ ps'
where
f i (x,y) = def {_nodeId = i, _nodeX = x, _nodeY = y}
{-
nodes = [ def { _nodeId=0, _nodeX=2, _nodeY=2, _nodeSize=0.1, _nodeOpacity=0.5, _nodeColor=red }
, def { _nodeId=1, _nodeX=2, _nodeY=5, _nodeSize=0.03 }
, def { _nodeId=2, _nodeX=5, _nodeY=5, _nodeColor=green }
, def { _nodeId=3, _nodeX=5, _nodeY=2, _nodeColor=green }
]
-}
edges = [ def { _edgeId = 0, _edgeFrom = 0, _edgeTo = 1 }
, def { _edgeId = 1, _edgeFrom = 1, _edgeTo = 2 }
, def { _edgeId = 3, _edgeFrom = 2, _edgeTo = 3 }
, def { _edgeId = 4, _edgeFrom = 3, _edgeTo = 0 }
, def { _edgeId = 5, _edgeFrom = 4, _edgeTo = 5 }
, def { _edgeId = 6, _edgeFrom = 5, _edgeTo = 6 }
, def { _edgeId = 7, _edgeFrom = 6, _edgeTo = 7 }
, def { _edgeId = 8, _edgeFrom = 7, _edgeTo = 4 }
]
main = do
let xaxis = realAxis (xlo,xhi) 0.2 def
yaxis = realAxis (ylo,yhi) 0.2 def
area = plotArea 5.5 4.8 (yaxis, emptyAxis, emptyAxis, xaxis)
plot = drawGraph (nodes, edges)
p = area <+ (plot, BL)
xlo = _nodeX $ minimumBy (comparing _nodeX) nodes
xhi = _nodeX $ maximumBy (comparing _nodeX) nodes
ylo = _nodeY $ minimumBy (comparing _nodeY) nodes
yhi = _nodeY $ maximumBy (comparing _nodeY) nodes
renderCairo "1.png" (Dims 480 480) $ showPlot p
|
kaizhang/haskell-plot
|
Test.hs
|
Haskell
|
mit
| 2,558
|
{-# LANGUAGE CPP, FlexibleContexts, JavaScriptFFI, OverloadedStrings #-}
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad
import Control.Monad.State
import Data.IORef
import qualified Data.Serialize as S
import qualified Data.Text as T
import System.Random
import Flaw.App
import Flaw.Asset
import Flaw.Asset.FolderAssetPack
import Flaw.Asset.RemapAssetPack
import Flaw.Book
import Flaw.Graphics
import Flaw.Graphics.Program
import Flaw.Graphics.Sampler
import Flaw.Math
import Flaw.Math.Geometry
import Flaw.Input
import Flaw.Input.Mouse
import Flaw.Input.Keyboard
import Flaw.Visual.Geometry
import Flaw.Window
#if defined(ghcjs_HOST_OS)
import GHCJS.Types
import GHCJS.Foreign.Callback
import GHCJS.Marshal.Pure
#endif
data GameState = GameState
{ gsPhase :: GamePhase
, gsCameraAlpha :: Float
, gsCameraBeta :: Float
, gsCameraDistance :: Float
, gsLightAngle :: Float
, gsActors :: [Actor]
, gsFirstCursor :: Maybe ((Int, Int), (Int, Int))
, gsUserActorType :: ActorType
, gsUserGun :: GunState
, gsComputerGun :: GunState
, gsDamages :: [Damage]
, gsBeaverLives :: Int
, gsPekaLives :: Int
, gsUserSpawn :: Maybe Float2
} deriving Show
data GamePhase = GameBattle | GameFinish deriving (Eq, Show)
data GunState = GunState
{ gunStateTime :: Float
} deriving Show
data Actor = Actor
{ actorType :: ActorType
, actorStartPosition :: !Float2
, actorFinishPosition :: !Float2
, actorTime :: Float
, actorTotalTime :: Float
, actorState :: ActorState
, actorAngle :: Float
} deriving Show
data ActorType = Peka | Beaver deriving (Eq, Show)
actorFlySpeed :: Float
actorFlySpeed = 11.3
actorGroundSpeed :: Float
actorGroundSpeed = 1.27
actorAngleSpeed :: Float
actorAngleSpeed = actorGroundSpeed / actorOffset
gravity :: Float
gravity = 22.5
actorOffset :: Float
actorOffset = 0.127
gunCoolDown :: Float
gunCoolDown = 0.5
actorDeadTime :: Float
actorDeadTime = 5
actorExplodeTime :: Float
actorExplodeTime = 0.5
actorExplodeDistance :: Float
actorExplodeDistance = 0.127
actorWinningOffset :: Float
actorWinningOffset = 1.27
actorWinningScale :: Float
actorWinningScale = 5
actorWinningTime :: Float
actorWinningTime = 1
livesAmount :: Int
livesAmount = 50
moveClickThreshold :: Num a => a
moveClickThreshold = 50
data ActorState = ActorFlying Float | ActorRunning | ActorDead | ActorExplode | ActorWinning deriving (Eq, Show)
calcActorPosition :: Actor -> Float3
calcActorPosition Actor
{ actorStartPosition = Vec2 sx sy
, actorFinishPosition = Vec2 fx fy
, actorTime = t
, actorTotalTime = tt
, actorState = as
} = case as of
ActorFlying angle -> Vec3 (sx * (1 - k) + fx * k) (sy * (1 - k) + fy * k) z where
k = t / tt
z = actorOffset + actorFlySpeed * (sin angle) * t - gravity * t * t / 2
ActorRunning -> Vec3 (sx * (1 - k) + fx * k) (sy * (1 - k) + fy * k) actorOffset where
k = t / tt
ActorDead -> Vec3 sx sy actorOffset
ActorExplode -> Vec3 (sx * (1 - k) + fx * k) (sy * (1 - k) + fy * k) actorOffset where
k = t / tt
ActorWinning -> Vec3 sx sy actorOffset
spawnActor :: ActorType -> Float2 -> Float2 -> Maybe Actor
spawnActor at s f = maybeActor where
sin2angle = (norm $ s - f) * gravity / (actorFlySpeed * actorFlySpeed)
angle = 0.5 * (pi - asin sin2angle)
maybeActor = if sin2angle >= 1 || norm (castlePosition at - f) < 1.27 then Nothing else Just Actor
{ actorType = at
, actorStartPosition = s
, actorFinishPosition = f
, actorTime = 0
, actorTotalTime = 2 * actorFlySpeed * (sin angle) / gravity
, actorState = ActorFlying angle
, actorAngle = 0
}
castlePosition :: ActorType -> Float2
castlePosition at = case at of
Peka -> Vec2 0 5
Beaver -> Vec2 0 (-5)
castleLine :: ActorType -> Float
castleLine at = case at of
Peka -> 3.8
Beaver -> -3.8
fieldWidth :: Float
fieldWidth = 2.5
enemyActor :: ActorType -> ActorType
enemyActor at = case at of
Peka -> Beaver
Beaver -> Peka
data Damage = Damage ActorType Float2 deriving Show
initialGameState :: GameState
initialGameState = GameState
{ gsPhase = GameBattle
, gsCameraAlpha = 0
, gsCameraBeta = 0.35
, gsCameraDistance = 10
, gsLightAngle = 0
, gsActors = []
, gsFirstCursor = Nothing
, gsUserActorType = Peka
, gsUserGun = GunState
{ gunStateTime = 0
}
, gsComputerGun = GunState
{ gunStateTime = 0
}
, gsDamages = []
, gsBeaverLives = livesAmount
, gsPekaLives = livesAmount
, gsUserSpawn = Nothing
}
getFrontScreenPoint :: (Vectorized a, Fractional a) => Mat4x4 a -> Vec3 a -> Vec3 a
getFrontScreenPoint (Mat4x4
m11 m12 m13 m14
m21 m22 m23 m24
m31 m32 m33 m34
m41 m42 m43 m44
) (Vec3 sx sy sz) = Vec3 (dx / d) (dy / d) (dz / d) where
a11 = m11 - sx * m41
a12 = m12 - sx * m42
a13 = m13 - sx * m43
a14 = sx * m44 - m14
a21 = m21 - sy * m41
a22 = m22 - sy * m42
a23 = m23 - sy * m43
a24 = sy * m44 - m24
a31 = m31 - sz * m41
a32 = m32 - sz * m42
a33 = m33 - sz * m43
a34 = sz * m44 - m34
d = a11 * (a22 * a33 - a23 * a32) - a12 * (a21 * a33 - a23 * a31) + a13 * (a21 * a32 - a22 * a31)
dx = a14 * (a22 * a33 - a23 * a32) - a12 * (a24 * a33 - a23 * a34) + a13 * (a24 * a32 - a22 * a34)
dy = a11 * (a24 * a33 - a23 * a34) - a14 * (a21 * a33 - a23 * a31) + a13 * (a21 * a34 - a24 * a31)
dz = a11 * (a22 * a34 - a24 * a32) - a12 * (a21 * a34 - a24 * a31) + a14 * (a21 * a32 - a22 * a31)
intersectRay :: (Vectorized a, Fractional a) => Vec3 a -> Vec3 a -> Vec3 a -> a -> Vec3 a
intersectRay a d n nq = a + d * vecFromScalar ((nq - dot a n) / (dot d n))
affineActorLookAt :: (Vectorized a, Floating a) => Vec3 a -> Vec3 a -> Vec3 a -> Mat4x4 a
affineActorLookAt position@(Vec3 px py pz) target direction = r where
y@(Vec3 yx yy yz) = normalize $ target - position
x@(Vec3 xx xy xz) = normalize $ cross y direction
Vec3 zx zy zz = cross x y
r = Mat4x4
xx yx zx px
xy yy zy py
xz yz zz pz
0 0 0 1
main :: IO ()
main = handle errorHandler $ withApp appConfig
{ appConfigTitle = "PEKABEAVER"
, appConfigNeedDepthBuffer = True
} $ \window device context presenter inputManager -> withBook $ \bk -> do
setWindowMouseCursor window MouseCursorHand
--setWindowMouseLock window True
-- run detection of closed window
windowLoopVar <- newEmptyMVar
windowEventsChan <- atomically $ chanWindowEvents window
_ <- forkIO $ do
let loop = do
event <- atomically $ readTChan windowEventsChan
case event of
DestroyWindowEvent -> putMVar windowLoopVar True
_ -> loop
loop
-- run input processing thread
keyboardChan <- atomically $ chanInputEvents inputManager
mouseChan <- atomically $ chanInputEvents inputManager
-- initial input states
keyboardState <- atomically initialInputState
mouseState <- atomically initialInputState
-- load asset pack
assetPack <- do
Right assetPackContainer <- S.decode <$> loadAsset (FolderAssetPack "") "pack.bin"
return $ loadRemapAssetPack assetPackContainer (FolderAssetPack "assetpack/") :: IO (RemapAssetPack FolderAssetPack T.Text)
let loadTextureAsset = createNativeTexture
-- load field
Geometry
{ geometryVertexBuffer = vbField
, geometryIndexBuffer = ibField
, geometryIndicesCount = icField
} <- book bk (loadGeometryAsset device =<< loadAsset assetPack "field.bin")
tField <- book bk $ loadTextureAsset device defaultSamplerStateInfo =<< loadAsset assetPack "castle.jpg"
-- load beaver
Geometry
{ geometryVertexBuffer = vbBeaver
, geometryIndexBuffer = ibBeaver
, geometryIndicesCount = icBeaver
} <- book bk (loadGeometryAsset device =<< loadAsset assetPack "beaver.bin")
tBeaver <- book bk $ loadTextureAsset device defaultSamplerStateInfo =<< loadAsset assetPack "beaver.jpg"
-- load peka
Geometry
{ geometryVertexBuffer = vbPeka
, geometryIndexBuffer = ibPeka
, geometryIndicesCount = icPeka
} <- book bk (loadGeometryAsset device =<< loadAsset assetPack "peka.bin")
tPeka <- book bk $ loadTextureAsset device defaultSamplerStateInfo =<< loadAsset assetPack "peka.png"
let samplerState = nullSamplerState
-- program
ubsCamera <- uniformBufferSlot 0
uViewProj <- uniform ubsCamera
uCameraPosition <- uniform ubsCamera
ubsLight <- uniformBufferSlot 1
uLightPosition <- uniform ubsLight
--ubsMaterial <- uniformBufferSlot 2
--uDiffuseColor <- uniform ubsMaterial
ubsObject <- uniformBufferSlot 3
uWorld <- uniform ubsObject
usCamera <- book bk $ createUniformStorage device ubsCamera
usLight <- book bk $ createUniformStorage device ubsLight
--usMaterial <- book bk $ createUniformStorage device ubsMaterial
usObject <- book bk $ createUniformStorage device ubsObject
program <- book bk $ createProgram device $ do
aPosition <- attribute 0 0 0 (AttributeVec3 AttributeFloat32)
aNormal <- attribute 0 12 0 (AttributeVec3 AttributeFloat32)
aTexcoord <- attribute 0 24 0 (AttributeVec2 AttributeFloat32)
worldPosition <- temp $ mul uWorld $ cvec31 aPosition (constf 1)
worldNormal <- temp $ mul uWorld $ cvec31 aNormal (constf 0)
rasterize (mul uViewProj worldPosition) $ do
let toLight = normalize $ (xyz__ worldPosition) - uLightPosition
--diffuse <- temp $ max_ 0 $ dot toLight $ xyz__ worldNormal
diffuse <- temp $ min_ (constf 1) $ constf 0.5 + (abs $ dot toLight $ normalize $ xyz__ worldNormal)
diffuseColor <- temp $ sample (sampler2D3f 0) aTexcoord
colorTarget 0 $ cvec31 (diffuseColor * vecFromScalar diffuse) (constf 1)
let
gameStep :: Float -> StateT GameState IO ()
gameStep frameTime = do
-- check exit
#if !defined(ghcjs_HOST_OS)
loop <- liftIO $ tryTakeMVar windowLoopVar
case loop of
Just True -> liftIO $ exitApp
_ -> return ()
#endif
cameraPosition <- do
s <- get
let alpha = gsCameraAlpha s
let beta = gsCameraBeta s
let distance = gsCameraDistance s
return $ Vec3 (distance * (cos alpha * cos beta)) (distance * (sin alpha * cos beta)) (distance * sin beta)
rs <- get
(viewProj, viewportWidth, viewportHeight) <- liftIO $ render context $ do
present presenter $ do
renderClearColor 0 (Vec4 0.5 0.5 0.5 1)
renderClearDepth 0
renderDepthTestFunc DepthTestFuncGreater
renderProgram program
Vec4 viewportLeft viewportTop viewportRight viewportBottom <- renderGetViewport
let viewportWidth = viewportRight - viewportLeft
let viewportHeight = viewportBottom - viewportTop
let aspect = (fromIntegral viewportWidth) / (fromIntegral viewportHeight)
let view = affineLookAt cameraPosition (Vec3 0 0 0) (Vec3 0 0 1)
let proj = projectionPerspectiveFov (pi / 4) aspect (-50 :: Float) (-0.01)
let viewProj = mul proj view
renderUniform usCamera uViewProj viewProj
renderUniform usCamera uCameraPosition cameraPosition
renderUploadUniformStorage usCamera
renderUniformStorage usCamera
renderUniform usLight uLightPosition $ let
angle = gsLightAngle rs
in Vec3 (2 * cos angle) (2 * sin angle) 2
renderUploadUniformStorage usLight
renderUniformStorage usLight
--renderUniform usMaterial uDiffuseColor $ Vec3 1 0 0
--renderUploadUniformStorage usMaterial
--renderUniformStorage usMaterial
-- render field
renderUniform usObject uWorld $ affineTranslation ((Vec3 0 0 0) :: Float3)
renderUploadUniformStorage usObject
renderUniformStorage usObject
renderVertexBuffer 0 vbField
renderIndexBuffer ibField
renderSampler 0 tField samplerState
renderDraw icField
-- render actors
forM_ (gsActors rs) $ \actor@Actor
{ actorType = at
, actorState = as
, actorFinishPosition = Vec2 fx fy
, actorTime = t
, actorTotalTime = tt
, actorAngle = aa
} -> do
let (vb, ib, ic, tex) = case at of
Peka -> (vbPeka, ibPeka, icPeka, tPeka)
Beaver -> (vbBeaver, ibBeaver, icBeaver, tBeaver)
let k = t / tt
let position = calcActorPosition actor
let translation = affineActorLookAt position (Vec3 fx fy actorOffset) (Vec3 0 0 1)
let world = case as of
ActorFlying _ -> mul translation $ affineFromQuat $ affineAxisRotation (Vec3 (-1) 0 0) $ k * pi * 2
ActorRunning -> if at == Peka then mul translation $ affineFromQuat $ affineAxisRotation (Vec3 (-1) 0 0) aa else translation
ActorDead -> mul translation $ mul (affineTranslation $ Vec3 0 0 $ 0.05 - actorOffset) $ affineScaling (Vec3 1.5 1.5 (0.1 :: Float))
ActorExplode ->
--mul translation $ mul (affineTranslation $ Vec3 0 0 $ k * 10) $ affineScaling $ vecFromScalar $ 1 + k * 0.5
mul translation $ affineScaling $ Vec3 1 (1 * (1 - k) + 0.1 * k) 1
ActorWinning -> mul translation $ mul (affineTranslation $ Vec3 0 0 $ k * actorWinningOffset) $ affineScaling $ vecFromScalar $ 1 - k + actorWinningScale * k
renderUniform usObject uWorld world
renderUploadUniformStorage usObject
renderUniformStorage usObject
renderVertexBuffer 0 vb
renderIndexBuffer ib
renderSampler 0 tex samplerState
renderDraw ic
return (viewProj, viewportWidth, viewportHeight)
-- process input
do
let getMousePoint = do
(cursorX, cursorY) <- liftIO $ atomically $ getMouseCursor mouseState
let frontPoint = getFrontScreenPoint viewProj $ Vec3
((fromIntegral cursorX) / (fromIntegral viewportWidth) * 2 - 1)
(1 - (fromIntegral cursorY) / (fromIntegral viewportHeight) * 2)
0
return $ intersectRay cameraPosition (normalize (frontPoint - cameraPosition)) (Vec3 0 0 1) 0
let
processKeyboard = readTChan keyboardChan >>= \keyboardEvent -> return $ do
case keyboardEvent of
KeyDownEvent KeyEscape -> liftIO $ putMVar windowLoopVar True
_ -> return ()
liftIO $ atomically $ applyInputEvent keyboardState keyboardEvent
processEvent
processMouse = readTChan mouseChan >>= \mouseEvent -> return $ do
case mouseEvent of
MouseDownEvent LeftMouseButton -> do
cursor <- liftIO $ atomically $ getMouseCursor mouseState
state $ \s -> ((), s
{ gsFirstCursor = Just (cursor, cursor)
})
MouseUpEvent LeftMouseButton -> do
(cursorX, cursorY) <- liftIO $ atomically $ getMouseCursor mouseState
s1 <- get
case gsFirstCursor s1 of
Just ((firstCursorX, firstCursorY), _) -> do
if (abs $ cursorX - firstCursorX) < moveClickThreshold && (abs $ cursorY - firstCursorY) < moveClickThreshold then do
(Vec3 fx fy _) <- getMousePoint
state $ \s -> ((), s
{ gsUserSpawn = Just $ Vec2 fx fy
})
else return ()
state $ \s -> ((), s
{ gsFirstCursor = Nothing
})
Nothing -> return ()
CursorMoveEvent cursorX cursorY -> do
s <- get
case gsFirstCursor s of
Just (firstCursor@(firstCursorX, firstCursorY), (moveCursorX, moveCursorY)) -> do
if (abs $ cursorX - firstCursorX) >= moveClickThreshold || (abs $ cursorY - firstCursorY) >= moveClickThreshold then do
put $ s
{ gsCameraAlpha = gsCameraAlpha s - (fromIntegral $ cursorX - moveCursorX) * 0.005
, gsCameraBeta = gsCameraBeta s + (fromIntegral $ cursorY - moveCursorY) * 0.01
, gsFirstCursor = Just (firstCursor, (cursorX, cursorY))
}
else
put $ s
{ gsFirstCursor = Just (firstCursor, (cursorX, cursorY))
}
Nothing -> return ()
RawMouseMoveEvent _dx _dy dz -> state $ \s -> ((), s
{ gsCameraDistance = max 2.5 $ min 12.7 $ dz * (-0.0025) + gsCameraDistance s
})
_ -> return ()
liftIO $ atomically $ applyInputEvent mouseState mouseEvent
processEvent
processEvent = do
action <- liftIO $ atomically $ orElse (orElse processKeyboard processMouse) (return $ return ())
action
processEvent
-- process camera rotation
do
up <- liftIO $ atomically $ getKeyState keyboardState KeyUp
down <- liftIO $ atomically $ getKeyState keyboardState KeyDown
left <- liftIO $ atomically $ getKeyState keyboardState KeyLeft
right <- liftIO $ atomically $ getKeyState keyboardState KeyRight
state $ \s -> ((), s
{ gsCameraAlpha = gsCameraAlpha s + ((if right then 1 else 0) - (if left then 1 else 0)) * frameTime
, gsCameraBeta = max 0.1 $ min 1.5 $ gsCameraBeta s + ((if up then 1 else 0) - (if down then 1 else 0)) * frameTime
})
-- step actors
let stepActor actor@Actor
{ actorState = as
, actorFinishPosition = f
, actorType = at
, actorTime = t
, actorTotalTime = tt
} = do
case as of
ActorFlying _ -> if t >= tt then do
let finishPosition = Vec2 (x_ f) $ castleLine $ enemyActor at
state $ \s -> ((), s
{ gsDamages = (Damage at f) : gsDamages s
})
return [actor
{ actorTime = 0
, actorTotalTime = norm (finishPosition - f) / actorGroundSpeed
, actorStartPosition = f
, actorFinishPosition = finishPosition
, actorState = ActorRunning
, actorAngle = 0
}]
else return [actor
{ actorTime = t + frameTime
}]
ActorRunning -> do
if t >= tt then do
let finishPosition = castlePosition $ enemyActor at
let len = norm $ finishPosition - f
if len < 0.25 then do
state $ \s -> ((), case at of
Beaver -> s { gsPekaLives = gsPekaLives s - 1 }
Peka -> s { gsBeaverLives = gsBeaverLives s - 1 }
)
return [actor
{ actorTime = 0
, actorTotalTime = actorWinningTime
, actorState = ActorWinning
, actorStartPosition = finishPosition
, actorFinishPosition = finishPosition
}]
else return [actor
{ actorTime = 0
, actorTotalTime = len / actorGroundSpeed
, actorStartPosition = f
, actorFinishPosition = finishPosition
, actorState = ActorRunning
, actorAngle = 0
}]
else return [actor
{ actorTime = t + frameTime
, actorAngle = actorAngle actor + actorAngleSpeed * frameTime
}]
ActorDead -> do
if t >= tt then return []
else return [actor
{ actorTime = t + frameTime
}]
ActorExplode -> do
if t >= tt then return []
else return [actor
{ actorTime = t + frameTime
}]
ActorWinning -> do
if t >= tt then return []
else return [actor
{ actorTime = t + frameTime
}]
do
s1 <- get
newActors <- liftM concat $ mapM stepActor $ gsActors s1
state $ \s -> ((), s
{ gsActors = newActors
})
-- apply damages
do
s <- get
let applyDamages (Damage eat ep) actors = map (\actor@Actor
{ actorType = at
, actorState = as
} -> let
Vec3 px py _pz = calcActorPosition actor
in
if at == eat || as == ActorDead || norm (ep - (Vec2 px py)) > 0.5 then actor
else actor
{ actorState = ActorDead
, actorTime = 0
, actorTotalTime = actorDeadTime
, actorStartPosition = Vec2 px py
}) actors
put s
{ gsActors = foldr applyDamages (gsActors s) $ gsDamages s
, gsDamages = []
}
-- annigilate ground actors
state $ \s -> let actors = gsActors s in ((), s
{ gsActors = map (\actor -> let
p@(Vec3 px py _pz) = calcActorPosition actor
at = actorType actor
keep = actorState actor /= ActorRunning || all (\actor2 -> let
p2 = calcActorPosition actor2
at2 = actorType actor2
as2 = actorState actor2
in at == at2 || as2 /= ActorRunning || norm (p - p2) > 0.25
) actors
in if keep then actor
else let startPosition = Vec2 px py in actor
{ actorState = ActorExplode
, actorTime = 0
, actorTotalTime = actorExplodeTime
, actorStartPosition = startPosition
, actorFinishPosition = startPosition + normalize (actorFinishPosition actor - startPosition) * vecFromScalar actorExplodeDistance
}
) actors
})
-- process user's gun
do
s1 <- get
if gsPhase s1 == GameBattle && gunStateTime (gsUserGun s1) <= 0 then do
case gsUserSpawn s1 of
Just position -> do
let at = gsUserActorType s1
case spawnActor at (castlePosition at) position of
Just actor -> state $ \s -> ((), s
{ gsActors = actor : gsActors s
, gsUserGun = (gsUserGun s)
{ gunStateTime = gunCoolDown
}
})
Nothing -> return ()
state $ \s -> ((), s
{ gsUserSpawn = Nothing
})
Nothing -> return ()
else return ()
-- process computer's gun
do
s <- get
if gsPhase s == GameBattle && gunStateTime (gsComputerGun s) <= 0 then do
let minx = -fieldWidth
let maxx = fieldWidth
let at = enemyActor $ gsUserActorType s
let cl = castleLine at
let miny = if cl > 0 then 0 else cl
let maxy = if cl > 0 then cl else 0
x <- liftIO $ getStdRandom $ randomR (minx, maxx)
y <- liftIO $ getStdRandom $ randomR (miny, maxy)
case spawnActor at (castlePosition at) (Vec2 x y) of
Just actor -> put s
{ gsActors = actor : gsActors s
, gsComputerGun = (gsComputerGun s)
{ gunStateTime = gunCoolDown
}
}
Nothing -> return ()
else return ()
-- process gun cooldowns
let processGun gs = gs { gunStateTime = gunStateTime gs - frameTime }
state $ \s -> ((), s
{ gsUserGun = processGun $ gsUserGun s
, gsComputerGun = processGun $ gsComputerGun s
})
-- step light
state $ \s -> ((), s
{ gsLightAngle = gsLightAngle s + frameTime * 3
})
#if defined(ghcjs_HOST_OS)
-- update lives
get >>= \s -> liftIO $ do
if gsPhase s == GameBattle then do
js_setStyleWidth (pToJSVal $ T.pack "beaver_lives") $ pToJSVal $ T.pack $ show $ (fromIntegral $ gsBeaverLives s) * (100 :: Float) / fromIntegral livesAmount
js_setStyleWidth (pToJSVal $ T.pack "peka_lives") $ pToJSVal $ T.pack $ show $ (fromIntegral $ gsPekaLives s) * (100 :: Float) / fromIntegral livesAmount
else return ()
-- check end
get >>= \s -> do
if gsPhase s == GameBattle then do
let beaverLives = gsBeaverLives s
let pekaLives = gsPekaLives s
if beaverLives <= 0 || pekaLives <= 0 then do
put s { gsPhase = GameFinish }
let beaverWon = beaverLives > 0
let userWon = beaverWon == (gsUserActorType s == Beaver)
liftIO $ js_end (pToJSVal $ T.pack $ if beaverWon then "beaver" else "peka") $ pToJSVal $ T.pack $ if userWon then "You won!" else "You lose!"
else return ()
else return ()
-- register start functions
gameStateVar <- liftIO $ newEmptyMVar
do
let start at alpha = do
js_start
putMVar gameStateVar initialGameState
{ gsUserActorType = at
, gsCameraAlpha = alpha
}
beaverStart <- syncCallback ThrowWouldBlock $ start Beaver $ (-pi) / 2
pekaStart <- syncCallback ThrowWouldBlock $ start Peka $ pi / 2
js_registerStart beaverStart pekaStart
-- main loop
runApp $ \frameTime -> modifyMVar_ gameStateVar $ execStateT (gameStep frameTime)
foreign import javascript unsafe "document.getElementById($1).style.width=$2+'%'" js_setStyleWidth :: JSVal -> JSVal -> IO ()
foreign import javascript unsafe "document.getElementById('start-beaver').addEventListener('click', $1, false);document.getElementById('start-peka').addEventListener('click', $2, false);" js_registerStart :: Callback (IO ()) -> Callback (IO ()) -> IO ()
foreign import javascript unsafe "document.getElementById('start').style.display='none';" js_start :: IO ()
foreign import javascript unsafe "document.getElementById('end-'+$1).style.display='block'; document.getElementById('end-title').innerText=$2; document.getElementById('end').style.display='block';" js_end :: JSVal -> JSVal -> IO ()
#else
-- main loop
do
gameStateRef <- newIORef $ initialGameState
{ gsUserActorType = Peka
, gsCameraAlpha = pi / 2
}
runApp $ \frameTime -> writeIORef gameStateRef =<< execStateT (gameStep frameTime) =<< readIORef gameStateRef
#endif
errorHandler :: SomeException -> IO ()
errorHandler = putStrLn . show
|
quyse/pekabeaver
|
Main.hs
|
Haskell
|
mit
| 24,202
|
module Problem021 where
import Data.List (group)
main =
print $ sum $ filter amicable [2..10000]
amicable x = x /= x' && sumd x' == x
where x' = sumd x
sumd x = product (map f1 gs) - product (map f2 gs)
where gs = map (\g -> (head g, length g)) $ group $ factors x
f1 (p, m) = (p ^ (m + 1) - 1) `div` (p - 1)
f2 (p, m) = p ^ m
factors x = f x primes []
where f x (p:ps) fs
| x == 1 = fs
| r == 0 = p:(factors q)
| otherwise = f x ps fs where (q, r) = x `quotRem` p
primes = 2 : 3 : filter (f primes) [5, 7 ..]
where f (p:ps) n = p * p > n || n `rem` p /= 0 && f ps n
|
vasily-kartashov/playground
|
euler/problem-021.hs
|
Haskell
|
apache-2.0
| 647
|
-- Copyright 2016 Google Inc. All Rights Reserved.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE TemplateHaskell, GADTs, KindSignatures, TypeSynonymInstances, FlexibleInstances #-}
import Data.List
import qualified Data.Map.Strict as M
import Control.Monad
import Control.Monad.Operational
import Control.Monad.Loops
import Lens.Micro
import Lens.Micro.TH
import qualified Test.QuickCheck as Q
import qualified Test.QuickCheck.Gen as Q
import Debug.Trace
{- Actors Library -}
type ProcessId = Int
type Message m = (ProcessId, m)
data ActorInstruction s m :: * -> * where
GetState :: ActorInstruction s m s
SetState :: s -> ActorInstruction s m ()
SendMessage :: ProcessId -> m -> ActorInstruction s m ()
WaitForMessage :: ActorInstruction s m (Message m)
type ActorProgram s m a = Program (ActorInstruction s m) a
type Queues m = M.Map ProcessId [m]
data Actor s m =
Actor {
_actorProgram :: ActorProgram s m (),
_actorState :: s,
_actorQueues :: Queues m
}
makeLenses ''Actor
enqueue :: ProcessId -> m -> Queues m -> Queues m
enqueue pid m = M.insertWith (flip (++)) pid [m]
dequeue :: ProcessId -> Queues m -> Maybe (m, Queues m)
dequeue pid queues
| Just (m:ms) <- M.lookup pid queues = Just (m, update ms)
| otherwise = Nothing
where update [] = M.delete pid queues
update ms = M.insert pid ms queues
stepActor :: ProcessId -> Actor s m -> (Actor s m, Maybe (Message m))
stepActor sid (Actor program state queues) = stepActor' (view program)
where stepActor' (GetState :>>= is) = (Actor (is state) state queues, Nothing)
stepActor' (SetState x :>>= is) = (Actor (is ()) x queues, Nothing)
stepActor' (SendMessage pid m :>>= is) = (Actor (is ()) state queues, Just (pid, m))
stepActor' (WaitForMessage :>>= is)
| Just (m, queues') <- dequeue sid queues = (Actor (is (sid, m)) state queues', Nothing)
stepActor' _ = (Actor program state queues, Nothing)
step :: (ProcessId, ProcessId) -> [Actor s m] -> [Actor s m]
step (pid, sid) actors | Just (rid, m) <- message = actors' & ix rid . actorQueues %~ enqueue pid m
| otherwise = actors'
where (actor', message) = stepActor sid (actors !! pid)
actors' = actors & ix pid .~ actor'
run :: [(ProcessId, ProcessId)] -> [Actor s m] -> [[Actor s m]]
run pids actors = scanl (flip step) actors pids
simpleRun :: [(ProcessId, ProcessId)] -> [(ActorProgram s m (), s)] -> [[Actor s m]]
simpleRun pids pairs = run pids [Actor p s M.empty | (p, s) <- pairs]
{- Helper Functions for writing ActorPrograms -}
getState = singleton GetState
setState s = singleton (SetState s)
sendMessage pid m = singleton (SendMessage pid m)
waitForMessage = singleton WaitForMessage
modify f = do { s <- getState; setState (f s) }
get l = do { s <- getState; return (s ^. l) }
infix 4 .=, %=, +=, ++=
l .= x = modify (l .~ x)
l %= f = modify (l %~ f)
l += x = l %= (+x)
l ++= x = l %= (++x)
{- Pretty-Printing -}
instance (Show s, Show m) => Show (ActorInstruction s m a) where
show GetState = "_ <- getState"
show (SetState x) = "setState " ++ show x
show (SendMessage pid m) = "sendMessage " ++ show pid ++ " " ++ show m
show WaitForMessage = "_ <- waitForMessage"
instance (Show s, Show m, Show a) => Show (ProgramView (ActorInstruction s m) a) where
show (Return x) = "return " ++ show x
show (i :>>= is) = show i ++ "; ..."
class Debuggable a where
debug :: ProcessId -> a -> String
-- removes boring steps from the trace
cleanup :: [(ProcessId, ProcessId)] -> [[Actor s m]] -> ([(ProcessId, ProcessId)], [[Actor s m]])
cleanup pids steps = let (pids', steps') = unzip (filter keep (zip pids steps)) in (pids', steps' ++ [last steps])
where keep ((pid, sid), actors) | (GetState :>>= _) <- view ((actors !! pid) ^. actorProgram) = False
keep _ = True
pretty :: (Debuggable s, Show s, Show m) => [(ProcessId, ProcessId)] -> [[Actor s m]] -> String
pretty pids (t:ts) = unlines (prettyActors t : zipWith prettyStep (pids) ts)
where prettyStep pid actors = banner ("advance " ++ show pid) ++ "\n\n" ++ prettyActors actors
banner x = "=============\n" ++ x ++ "\n============="
prettyActors actors = unlines (zipWith prettyActor [0..] actors)
prettyActor pid (Actor program state queues) =
unlines [ "pid " ++ show pid ++ ":"
, " program: " ++ show (view program)
, " state: " ++ show state
, " debug: " ++ debug pid state
, " queues: " ++ prettyQueues (M.toList queues) ]
prettyQueues queues = "{" ++ intercalate ", " (map prettyQueue queues) ++ "}"
prettyQueue (pid, queue) = show pid ++ ": [" ++ intercalate ", " (map show queue) ++ "]"
prettyClean pids snaphots = uncurry pretty (cleanup pids snaphots)
{- Example -}
data Msg = RequestResource | ReleaseResource | Ack
deriving Show
data State = State {
_clock :: Int,
_lastSeenTimestamps :: M.Map ProcessId Int,
_requestQueue :: [(Int, ProcessId)] -- always sorted
}
deriving Show
makeLenses ''State
ownsTheResource myPid [] lastSeen = False
ownsTheResource myPid ((ts, pid):_) lastSeen =
pid == myPid && all (> ts) (M.elems lastSeen)
instance Debuggable State where
debug pid state = "owns = " ++ show (ownsTheResource pid rq lastSeen)
where rq = state ^. requestQueue
lastSeen = state ^. lastSeenTimestamps
addToRequestQueue ts pid = requestQueue %= insert (ts,pid)
rmFromRequestQueue pid = requestQueue %= filter isNotPid
where isNotPid (_, qid) = qid /= pid
waitForMessage' = do
clock += 1
(ts, m) <- waitForMessage
c <- get clock
when (ts > c) (clock .= ts)
return (ts, m)
iOwnTheResource myPid =
ownsTheResource myPid <$> get requestQueue <*> get lastSeenTimestamps
incrementClock = do
c <- get clock
clock .= c + 1
return (c + 1)
program :: ProcessId -> [ProcessId] -> ActorProgram State (Int, Msg) ()
program myPid otherPids = forever $ do
iOwn <- iOwnTheResource myPid
if iOwn
then do
rmFromRequestQueue myPid
ts <- incrementClock
forM_ otherPids $ \pid -> do
sendMessage pid (ts, ReleaseResource)
else do
ts <- incrementClock
addToRequestQueue ts myPid
forM_ otherPids $ \pid -> do
sendMessage pid (ts, RequestResource)
whileM_ (not <$> iOwnTheResource myPid) $ do
(pid, (ots, msg)) <- waitForMessage'
lastSeenTimestamps %= M.insert pid ots
case msg of
RequestResource -> do
addToRequestQueue ots pid
ts <- incrementClock
sendMessage pid (ts, Ack)
ReleaseResource -> do
rmFromRequestQueue pid
_ -> return ()
actors = [
(program 0 [1,2], State 0 (M.fromList [(1,-1), (2,-1)]) [(-2,0)]),
(program 1 [0,2], State 0 (M.fromList [(0,-1), (2,-1)]) [(-2,0)]),
(program 2 [0,1], State 0 (M.fromList [(0,-1), (1,-1)]) [(-2,0)])]
-- We define an alias for lists of pairs of pids that represent execution
-- sequences. (The first element of a pair is the id of the process to advance,
-- the second element is the process it should receive a message from if such a
-- message is waiting and the next execution step is to wait for a messa ge.) It
-- allows us to define custom instances for Arbitrary and Show.
newtype ExecutionSequence = ExecutionSequence [(ProcessId, ProcessId)]
instance Q.Arbitrary ExecutionSequence where
arbitrary = ExecutionSequence <$> Q.listOf ((,) <$> pids <*> pids)
where pids = Q.elements [0,1,2]
shrink (ExecutionSequence pids) = ExecutionSequence <$> Q.shrink pids
instance Show ExecutionSequence where
show (ExecutionSequence ns) = unlines [show ns, prettyClean ns (simpleRun ns actors)]
property1 (ExecutionSequence pids) = all property' (simpleRun pids actors)
where property' actors = length (filter id (zipWith owns [0..] actors)) <= 1
owns pid actor = ownsTheResource pid (actor ^. actorState.requestQueue) (actor ^. actorState.lastSeenTimestamps)
property2 (ExecutionSequence pids) = all property' (simpleRun pids actors)
where property' actors = not (all stuck actors)
stuck actor = M.null (actor ^. actorQueues) && isWaiting (view (actor ^. actorProgram))
isWaiting (WaitForMessage :>>= _) = True
isWaiting _ = False
allpids = 0:1:2:allpids
fairExecutionSequence = concatMap (\pid -> [(pid, 0), (pid, 1), (pid, 2)]) allpids
fairTrace = simpleRun fairExecutionSequence actors
owners = map extractOwner fairTrace
where extractOwner actors = fst <$> find isOwner (zip [0..] actors)
isOwner (pid, actor) = ownsTheResource pid (actor ^. actorState.requestQueue) (actor ^. actorState.lastSeenTimestamps)
property3 = and [Just n `elem` owners | n <- [0,1,2]]
main = do
Q.quickCheckWith args property1
Q.quickCheckWith args property2
print property3
where args = Q.stdArgs { Q.maxSuccess = 1000, Q.maxSize = 1000 }
|
polux/snippets
|
Lamport.hs
|
Haskell
|
apache-2.0
| 9,470
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE ViewPatterns #-}
import Control.Monad
import System.Log.Logger (Priority(DEBUG),rootLoggerName,setLevel,updateGlobalLogger)
import System.Random (randomRIO)
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Path,Test)
import Test.QuickCheck.Instances ()
import Test.QuickCheck.Property (ioProperty)
import LogicGrowsOnTrees.Examples.MapColoring
main :: IO ()
main = do
updateGlobalLogger rootLoggerName (setLevel DEBUG)
defaultMain tests
tests :: [Test]
tests =
[testGroup "LogicGrowsOnTrees.Examples"
[testProperty name . ioProperty $ do
number_of_colors ← randomRIO (2,5)
number_of_countries ← randomRIO (3,7)
neighbor_probability ← randomRIO (0,1::Float)
neighbors ← fmap (concat . concat) $
forM [1..number_of_countries] $ \x →
forM [x+1..number_of_countries] $ \y → do
outcome ← randomRIO (0,1)
return $
if outcome > neighbor_probability
then [(x,y),(y,x)]
else []
let solutions =
computeSolutions
number_of_colors
number_of_countries
(\x y → (x,y) `elem` neighbors)
forM_ solutions $ \solution →
forM_ solution $ \(country_1,color_1) →
forM_ solution $ \(country_2,color_2) →
when ((country_1,country_2) `elem` neighbors) $
assertBool "neighbors have different colors" $ color_1 /= color_2
let correct_count = sum $ do
solution ← zip [1..] <$> replicateM (fromIntegral number_of_countries) [1..number_of_colors]
forM_ solution $ \(country_1,color_1) →
forM_ solution $ \(country_2,color_2) →
when ((country_1,country_2) `elem` neighbors) $
guard $ color_1 /= color_2
return 1
computeCount number_of_colors solutions @?= correct_count
return True
| (name,computeSolutions,computeCount) ←
[("coloringSolutions",coloringSolutions,curry (fromIntegral . length . snd))
,("coloringUniqueSolutions",coloringUniqueSolutions,
\number_of_colors →
sum
.
map (\solution →
let number_of_colors_used = maximum . fmap snd $ solution
in product [number_of_colors-number_of_colors_used+1..number_of_colors]
)
)
]
]
]
|
gcross/LogicGrowsOnTrees
|
LogicGrowsOnTrees/tests/test-Examples.hs
|
Haskell
|
bsd-2-clause
| 3,236
|
{-# LANGUAGE TemplateHaskell, TypeSynonymInstances, FlexibleInstances,
OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Objects
( testObjects
, Node(..)
, genConfigDataWithNetworks
, genDisk
, genDiskWithChildren
, genEmptyCluster
, genInst
, genInstWithNets
, genValidNetwork
, genBitStringMaxLen
) where
import Test.QuickCheck
import qualified Test.HUnit as HUnit
import Control.Applicative
import Control.Monad
import Data.Char
import qualified Data.List as List
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import qualified Data.Set as Set
import Data.Word (Word32)
import GHC.Exts (IsString(..))
import System.Time (ClockTime(..))
import qualified Text.JSON as J
import Test.Ganeti.Query.Language ()
import Test.Ganeti.SlotMap (genSlotLimit)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Types ()
import qualified Ganeti.Constants as C
import qualified Ganeti.ConstantUtils as CU
import Ganeti.Network
import Ganeti.Objects as Objects
import qualified Ganeti.Objects.BitArray as BA
import Ganeti.JSON
import Ganeti.Types
-- * Arbitrary instances
instance Arbitrary (Container DataCollectorConfig) where
arbitrary = do
let names = CU.toList C.dataCollectorNames
activations <- vector $ length names
timeouts <- vector $ length names
let configs = zipWith DataCollectorConfig activations timeouts
return GenericContainer {
fromContainer = Map.fromList $ zip names configs }
$(genArbitrary ''PartialNDParams)
instance Arbitrary Node where
arbitrary = Node <$> genFQDN <*> genFQDN <*> genFQDN
<*> arbitrary <*> arbitrary <*> arbitrary <*> genFQDN
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> genFQDN <*> arbitrary
<*> (Set.fromList <$> genTags)
$(genArbitrary ''BlockDriver)
$(genArbitrary ''DiskMode)
instance Arbitrary LogicalVolume where
arbitrary = LogicalVolume <$> validName <*> validName
where
validName = -- we intentionally omit '.' and '-' to avoid forbidden names
listOf1 $ elements (['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ "+_")
instance Arbitrary DiskLogicalId where
arbitrary = oneof [ LIDPlain <$> arbitrary
, LIDDrbd8 <$> genFQDN <*> genFQDN <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
, LIDFile <$> arbitrary <*> arbitrary
, LIDBlockDev <$> arbitrary <*> arbitrary
, LIDRados <$> arbitrary <*> arbitrary
]
-- | 'Disk' 'arbitrary' instance. Since we don't test disk hierarchy
-- properties, we only generate disks with no children (FIXME), as
-- generating recursive datastructures is a bit more work.
instance Arbitrary Disk where
arbitrary =
frequency [ (2, liftM RealDisk $ RealDiskData <$> arbitrary
<*> pure [] <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary)
, (1, liftM ForthcomingDisk $ ForthcomingDiskData <$> arbitrary
<*> pure [] <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary)
]
-- FIXME: we should generate proper values, >=0, etc., but this is
-- hard for partial ones, where all must be wrapped in a 'Maybe'
$(genArbitrary ''PartialBeParams)
$(genArbitrary ''AdminState)
$(genArbitrary ''AdminStateSource)
$(genArbitrary ''PartialNicParams)
$(genArbitrary ''PartialNic)
instance Arbitrary ForthcomingInstanceData where
arbitrary =
ForthcomingInstanceData
-- name
<$> genMaybe genFQDN
-- primary node
<*> genMaybe genFQDN
-- OS
<*> genMaybe genFQDN
-- hypervisor
<*> arbitrary
-- hvparams
-- FIXME: add non-empty hvparams when they're a proper type
<*> pure (GenericContainer Map.empty)
-- beparams
<*> arbitrary
-- osparams
<*> pure (GenericContainer Map.empty)
-- osparams_private
<*> pure (GenericContainer Map.empty)
-- admin_state
<*> genMaybe arbitrary
-- admin_state_source
<*> genMaybe arbitrary
-- nics
<*> arbitrary
-- disks
<*> vectorOf 5 arbitrary
-- disks active
<*> genMaybe arbitrary
-- network port
<*> arbitrary
-- ts
<*> arbitrary <*> arbitrary
-- uuid
<*> arbitrary
-- serial
<*> arbitrary
-- tags
<*> (Set.fromList <$> genTags)
instance Arbitrary RealInstanceData where
arbitrary =
RealInstanceData
-- name
<$> genFQDN
-- primary node
<*> genFQDN
-- OS
<*> genFQDN
-- hypervisor
<*> arbitrary
-- hvparams
-- FIXME: add non-empty hvparams when they're a proper type
<*> pure (GenericContainer Map.empty)
-- beparams
<*> arbitrary
-- osparams
<*> pure (GenericContainer Map.empty)
-- osparams_private
<*> pure (GenericContainer Map.empty)
-- admin_state
<*> arbitrary
-- admin_state_source
<*> arbitrary
-- nics
<*> arbitrary
-- disks
<*> vectorOf 5 arbitrary
-- disks active
<*> arbitrary
-- network port
<*> arbitrary
-- ts
<*> arbitrary <*> arbitrary
-- uuid
<*> arbitrary
-- serial
<*> arbitrary
-- tags
<*> (Set.fromList <$> genTags)
instance Arbitrary Instance where
arbitrary = frequency [ (1, ForthcomingInstance <$> arbitrary)
, (3, RealInstance <$> arbitrary)
]
-- | Generates an instance that is connected to the given networks
-- and possibly some other networks
genInstWithNets :: [String] -> Gen Instance
genInstWithNets nets = do
plain_inst <- RealInstance <$> arbitrary
enhanceInstWithNets plain_inst nets
-- | Generates an instance that is connected to some networks
genInst :: Gen Instance
genInst = genInstWithNets []
-- | Enhances a given instance with network information, by connecting it to the
-- given networks and possibly some other networks
enhanceInstWithNets :: Instance -> [String] -> Gen Instance
enhanceInstWithNets inst nets = do
mac <- arbitrary
ip <- arbitrary
nicparams <- arbitrary
name <- arbitrary
uuid <- arbitrary
-- generate some more networks than the given ones
num_more_nets <- choose (0,3)
more_nets <- vectorOf num_more_nets genUUID
let genNic net = PartialNic mac ip nicparams net name uuid
partial_nics = map (genNic . Just)
(List.nub (nets ++ more_nets))
new_inst = case inst of
RealInstance rinst ->
RealInstance rinst { realInstNics = partial_nics }
ForthcomingInstance _ -> inst
return new_inst
genDiskWithChildren :: Int -> Gen Disk
genDiskWithChildren num_children = do
logicalid <- arbitrary
children <- vectorOf num_children (genDiskWithChildren 0)
nodes <- arbitrary
ivname <- genName
size <- arbitrary
mode <- arbitrary
name <- genMaybe genName
spindles <- arbitrary
params <- arbitrary
uuid <- genName
serial <- arbitrary
time <- arbitrary
return . RealDisk $
RealDiskData logicalid children nodes ivname size mode name
spindles params uuid serial time time
genDisk :: Gen Disk
genDisk = genDiskWithChildren 3
-- | FIXME: This generates completely random data, without normal
-- validation rules.
$(genArbitrary ''PartialISpecParams)
-- | FIXME: This generates completely random data, without normal
-- validation rules.
$(genArbitrary ''PartialIPolicy)
$(genArbitrary ''FilledISpecParams)
$(genArbitrary ''MinMaxISpecs)
$(genArbitrary ''FilledIPolicy)
$(genArbitrary ''IpFamily)
$(genArbitrary ''FilledNDParams)
$(genArbitrary ''FilledNicParams)
$(genArbitrary ''FilledBeParams)
-- | No real arbitrary instance for 'ClusterHvParams' yet.
instance Arbitrary ClusterHvParams where
arbitrary = return $ GenericContainer Map.empty
-- | No real arbitrary instance for 'OsHvParams' yet.
instance Arbitrary OsHvParams where
arbitrary = return $ GenericContainer Map.empty
-- | No real arbitrary instance for 'GroupDiskParams' yet.
instance Arbitrary GroupDiskParams where
arbitrary = return $ GenericContainer Map.empty
instance Arbitrary ClusterNicParams where
arbitrary = (GenericContainer . Map.singleton C.ppDefault) <$> arbitrary
instance Arbitrary OsParams where
arbitrary = (GenericContainer . Map.fromList) <$> arbitrary
instance Arbitrary Objects.ClusterOsParamsPrivate where
arbitrary = (GenericContainer . Map.fromList) <$> arbitrary
instance Arbitrary a => Arbitrary (Private a) where
arbitrary = Private <$> arbitrary
instance Arbitrary ClusterOsParams where
arbitrary = (GenericContainer . Map.fromList) <$> arbitrary
instance Arbitrary ClusterBeParams where
arbitrary = (GenericContainer . Map.fromList) <$> arbitrary
instance Arbitrary TagSet where
arbitrary = Set.fromList <$> genTags
instance Arbitrary IAllocatorParams where
arbitrary = return $ GenericContainer Map.empty
$(genArbitrary ''Cluster)
instance Arbitrary ConfigData where
arbitrary = genEmptyCluster 0 >>= genConfigDataWithNetworks
instance Arbitrary AddressPool where
arbitrary = AddressPool . BA.fromList <$> arbitrary
instance Arbitrary Network where
arbitrary = genValidNetwork
instance Arbitrary FilterAction where
arbitrary = oneof
[ pure Accept
, pure Pause
, pure Reject
, pure Continue
, RateLimit <$> genSlotLimit
]
instance Arbitrary FilterPredicate where
arbitrary = oneof
[ FPJobId <$> arbitrary
, FPOpCode <$> arbitrary
, FPReason <$> arbitrary
]
instance Arbitrary FilterRule where
arbitrary = FilterRule <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> genUUID
instance Arbitrary MaintenanceData where
arbitrary = MaintenanceData <$> (fromPositive <$> arbitrary)
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
-- | Generates a network instance with minimum netmasks of /24. Generating
-- bigger networks slows down the tests, because long bit strings are generated
-- for the reservations.
genValidNetwork :: Gen Objects.Network
genValidNetwork = do
-- generate netmask for the IPv4 network
netmask <- fromIntegral <$> choose (24::Int, 30)
name <- genName >>= mkNonEmpty
mac_prefix <- genMaybe genName
net <- arbitrary
net6 <- genMaybe genIp6Net
gateway <- genMaybe arbitrary
gateway6 <- genMaybe genIp6Addr
res <- liftM Just (genBitString $ netmask2NumHosts netmask)
ext_res <- liftM Just (genBitString $ netmask2NumHosts netmask)
uuid <- arbitrary
ctime <- arbitrary
mtime <- arbitrary
let n = Network name mac_prefix (mkIp4Network net netmask) net6 gateway
gateway6 res ext_res uuid ctime mtime 0 Set.empty
return n
-- | Generate an arbitrary string consisting of '0' and '1' of the given length.
genBitString :: Int -> Gen AddressPool
genBitString len =
(AddressPool . BA.fromList) `liftM` vectorOf len (elements [False, True])
-- | Generate an arbitrary string consisting of '0' and '1' of the maximum given
-- length.
genBitStringMaxLen :: Int -> Gen AddressPool
genBitStringMaxLen maxLen = choose (0, maxLen) >>= genBitString
-- | Generator for config data with an empty cluster (no instances),
-- with N defined nodes.
genEmptyCluster :: Int -> Gen ConfigData
genEmptyCluster ncount = do
nodes <- vector ncount
version <- arbitrary
grp <- arbitrary
let guuid = groupUuid grp
nodes' = zipWith (\n idx ->
let newname = takeWhile (/= '.') (nodeName n)
++ "-" ++ show idx
in (newname, n { nodeGroup = guuid,
nodeName = newname}))
nodes [(1::Int)..]
nodemap = Map.fromList nodes'
contnodes = if Map.size nodemap /= ncount
then error ("Inconsistent node map, duplicates in" ++
" node name list? Names: " ++
show (map fst nodes'))
else GenericContainer nodemap
continsts = GenericContainer Map.empty
networks = GenericContainer Map.empty
disks = GenericContainer Map.empty
filters = GenericContainer Map.empty
maintenance <- arbitrary
let contgroups = GenericContainer $ Map.singleton guuid grp
serial <- arbitrary
-- timestamp fields
ctime <- arbitrary
mtime <- arbitrary
cluster <- resize 8 arbitrary
let c = ConfigData version cluster contnodes contgroups continsts networks
disks filters ctime maintenance mtime serial
return c
-- | FIXME: make an even simpler base version of creating a cluster.
-- | Generates config data with a couple of networks.
genConfigDataWithNetworks :: ConfigData -> Gen ConfigData
genConfigDataWithNetworks old_cfg = do
num_nets <- choose (0, 3)
-- generate a list of network names (no duplicates)
net_names <- genUniquesList num_nets genName >>= mapM mkNonEmpty
-- generate a random list of networks (possibly with duplicate names)
nets <- vectorOf num_nets genValidNetwork
-- use unique names for the networks
let nets_unique = map ( \(name, net) -> net { networkName = name } )
(zip net_names nets)
net_map = GenericContainer $ Map.fromList
(map (\n -> (networkUuid n, n)) nets_unique)
new_cfg = old_cfg { configNetworks = net_map }
return new_cfg
-- * Test properties
-- | Tests that fillDict behaves correctly
prop_fillDict :: [(Int, Int)] -> [(Int, Int)] -> Property
prop_fillDict defaults custom =
let d_map = Map.fromList defaults
d_keys = map fst defaults
c_map = Map.fromList custom
c_keys = map fst custom
in conjoin [ counterexample "Empty custom filling"
(fillDict d_map Map.empty [] == d_map)
, counterexample "Empty defaults filling"
(fillDict Map.empty c_map [] == c_map)
, counterexample "Delete all keys"
(fillDict d_map c_map (d_keys++c_keys) == Map.empty)
]
prop_LogicalVolume_serialisation :: LogicalVolume -> Property
prop_LogicalVolume_serialisation = testSerialisation
prop_LogicalVolume_deserialisationFail :: Property
prop_LogicalVolume_deserialisationFail =
conjoin . map (testDeserialisationFail (LogicalVolume "" "")) $
[ J.JSArray []
, J.JSString $ J.toJSString "/abc"
, J.JSString $ J.toJSString "abc/"
, J.JSString $ J.toJSString "../."
, J.JSString $ J.toJSString "g/snapshot"
, J.JSString $ J.toJSString "g/a_mimagex"
, J.JSString $ J.toJSString "g/r;3"
]
-- | Test that the serialisation of 'DiskLogicalId', which is
-- implemented manually, is idempotent. Since we don't have a
-- standalone JSON instance for DiskLogicalId (it's a data type that
-- expands over two fields in a JSObject), we test this by actially
-- testing entire Disk serialisations. So this tests two things at
-- once, basically.
prop_Disk_serialisation :: Disk -> Property
prop_Disk_serialisation = testSerialisation
prop_Disk_array_serialisation :: Disk -> Property
prop_Disk_array_serialisation = testArraySerialisation
-- | Check that node serialisation is idempotent.
prop_Node_serialisation :: Node -> Property
prop_Node_serialisation = testSerialisation
-- | Check that instance serialisation is idempotent.
prop_Inst_serialisation :: Instance -> Property
prop_Inst_serialisation = testSerialisation
-- | Check that address pool serialisation is idempotent.
prop_AddressPool_serialisation :: AddressPool -> Property
prop_AddressPool_serialisation = testSerialisation
-- | Check that network serialisation is idempotent.
prop_Network_serialisation :: Network -> Property
prop_Network_serialisation = testSerialisation
-- | Check that filter action serialisation is idempotent.
prop_FilterAction_serialisation :: FilterAction -> Property
prop_FilterAction_serialisation = testSerialisation
-- | Check that filter predicate serialisation is idempotent.
prop_FilterPredicate_serialisation :: FilterPredicate -> Property
prop_FilterPredicate_serialisation = testSerialisation
-- | Check config serialisation.
prop_Config_serialisation :: Property
prop_Config_serialisation =
forAll (choose (0, maxNodes `div` 4) >>= genEmptyCluster) testSerialisation
-- | Custom HUnit test to check the correspondence between Haskell-generated
-- networks and their Python decoded, validated and re-encoded version.
-- For the technical background of this unit test, check the documentation
-- of "case_py_compat_types" of test/hs/Test/Ganeti/Opcodes.hs
casePyCompatNetworks :: HUnit.Assertion
casePyCompatNetworks = do
let num_networks = 500::Int
networks <- genSample (vectorOf num_networks genValidNetwork)
let networks_with_properties = map getNetworkProperties networks
serialized = J.encode networks
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\net -> when (any (not . isAscii) (J.encode net)) .
HUnit.assertFailure $
"Network has non-ASCII fields: " ++ show net
) networks
py_stdout <-
runPython "from ganeti import network\n\
\from ganeti import objects\n\
\from ganeti import serializer\n\
\import sys\n\
\net_data = serializer.Load(sys.stdin.read())\n\
\decoded = [objects.Network.FromDict(n) for n in net_data]\n\
\encoded = []\n\
\for net in decoded:\n\
\ a = network.AddressPool(net)\n\
\ encoded.append((a.GetFreeCount(), a.GetReservedCount(), \\\n\
\ net.ToDict()))\n\
\print serializer.Dump(encoded)" serialized
>>= checkPythonResult
let deserialised = J.decode py_stdout::J.Result [(Int, Int, Network)]
decoded <- case deserialised of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode networks: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode networks"
HUnit.assertEqual "Mismatch in number of returned networks"
(length decoded) (length networks_with_properties)
mapM_ (uncurry (HUnit.assertEqual "Different result after encoding/decoding")
) $ zip networks_with_properties decoded
-- | Creates a tuple of the given network combined with some of its properties
-- to be compared against the same properties generated by the python code.
getNetworkProperties :: Network -> (Int, Int, Network)
getNetworkProperties net =
(getFreeCount net, getReservedCount net, net)
-- | Tests the compatibility between Haskell-serialized node groups and their
-- python-decoded and encoded version.
casePyCompatNodegroups :: HUnit.Assertion
casePyCompatNodegroups = do
let num_groups = 500::Int
groups <- genSample (vectorOf num_groups genNodeGroup)
let serialized = J.encode groups
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\group -> when (any (not . isAscii) (J.encode group)) .
HUnit.assertFailure $
"Node group has non-ASCII fields: " ++ show group
) groups
py_stdout <-
runPython "from ganeti import objects\n\
\from ganeti import serializer\n\
\import sys\n\
\group_data = serializer.Load(sys.stdin.read())\n\
\decoded = [objects.NodeGroup.FromDict(g) for g in group_data]\n\
\encoded = [g.ToDict() for g in decoded]\n\
\print serializer.Dump(encoded)" serialized
>>= checkPythonResult
let deserialised = J.decode py_stdout::J.Result [NodeGroup]
decoded <- case deserialised of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode node groups: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode node groups"
HUnit.assertEqual "Mismatch in number of returned node groups"
(length decoded) (length groups)
mapM_ (uncurry (HUnit.assertEqual "Different result after encoding/decoding")
) $ zip groups decoded
-- | Generates a node group with up to 3 networks.
-- | FIXME: This generates still somewhat completely random data, without normal
-- validation rules.
genNodeGroup :: Gen NodeGroup
genNodeGroup = do
name <- genFQDN
members <- pure []
ndparams <- arbitrary
alloc_policy <- arbitrary
ipolicy <- arbitrary
diskparams <- pure (GenericContainer Map.empty)
num_networks <- choose (0, 3)
net_uuid_list <- vectorOf num_networks (arbitrary::Gen String)
nic_param_list <- vectorOf num_networks (arbitrary::Gen PartialNicParams)
net_map <- pure (GenericContainer . Map.fromList $
zip net_uuid_list nic_param_list)
-- timestamp fields
ctime <- arbitrary
mtime <- arbitrary
uuid <- genFQDN `suchThat` (/= name)
serial <- arbitrary
tags <- Set.fromList <$> genTags
let group = NodeGroup name members ndparams alloc_policy ipolicy diskparams
net_map ctime mtime uuid serial tags
return group
instance Arbitrary NodeGroup where
arbitrary = genNodeGroup
instance Arbitrary Ip4Address where
arbitrary = liftM mkIp4Address $ (,,,) <$> choose (0, 255)
<*> choose (0, 255)
<*> choose (0, 255)
<*> choose (0, 255)
$(genArbitrary ''Ip4Network)
-- | Tests conversions of ip addresses from/to numbers.
prop_ip4AddressAsNum :: Ip4Address -> Property
prop_ip4AddressAsNum ip4 =
ip4AddressFromNumber (ip4AddressToNumber ip4) ==? ip4
-- | Tests that the number produced by 'ip4AddressToNumber' has the correct
-- order of bytes.
prop_ip4AddressToNumber :: Word32 -> Property
prop_ip4AddressToNumber w =
let byte :: Int -> Word32
byte i = (w `div` (256^i)) `mod` 256
ipaddr = List.intercalate "." $ map (show . byte) [3,2..0]
in ip4AddressToNumber <$> readIp4Address ipaddr
==? (return (toInteger w) :: Either String Integer)
-- | IsString instance for 'Ip4Address', to help write the tests.
instance IsString Ip4Address where
fromString s =
fromMaybe (error $ "Failed to parse address from " ++ s) (readIp4Address s)
-- | Tests a few simple cases of IPv4 next address.
caseNextIp4Address :: HUnit.Assertion
caseNextIp4Address = do
HUnit.assertEqual "" "0.0.0.1" $ nextIp4Address "0.0.0.0"
HUnit.assertEqual "" "0.0.0.0" $ nextIp4Address "255.255.255.255"
HUnit.assertEqual "" "1.2.3.5" $ nextIp4Address "1.2.3.4"
HUnit.assertEqual "" "1.3.0.0" $ nextIp4Address "1.2.255.255"
HUnit.assertEqual "" "1.2.255.63" $ nextIp4Address "1.2.255.62"
-- | Tests the compatibility between Haskell-serialized instances and their
-- python-decoded and encoded version.
-- Note: this can be enhanced with logical validations on the decoded objects
casePyCompatInstances :: HUnit.Assertion
casePyCompatInstances = do
let num_inst = 500::Int
instances <- genSample (vectorOf num_inst genInst)
let serialized = J.encode instances
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\inst -> when (any (not . isAscii) (J.encode inst)) .
HUnit.assertFailure $
"Instance has non-ASCII fields: " ++ show inst
) instances
py_stdout <-
runPython "from ganeti import objects\n\
\from ganeti import serializer\n\
\import sys\n\
\inst_data = serializer.Load(sys.stdin.read())\n\
\decoded = [objects.Instance.FromDict(i) for i in inst_data]\n\
\encoded = [i.ToDict() for i in decoded]\n\
\print serializer.Dump(encoded)" serialized
>>= checkPythonResult
let deserialised = J.decode py_stdout::J.Result [Instance]
decoded <- case deserialised of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode instance: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode instances"
HUnit.assertEqual "Mismatch in number of returned instances"
(length decoded) (length instances)
mapM_ (uncurry (HUnit.assertEqual "Different result after encoding/decoding")
) $ zip instances decoded
-- | A helper function for creating 'LIDPlain' values.
mkLIDPlain :: String -> String -> DiskLogicalId
mkLIDPlain = (LIDPlain .) . LogicalVolume
-- | Tests that the logical ID is correctly found in a plain disk
caseIncludeLogicalIdPlain :: HUnit.Assertion
caseIncludeLogicalIdPlain =
let vg_name = "xenvg" :: String
lv_name = "1234sdf-qwef-2134-asff-asd2-23145d.data" :: String
lv = LogicalVolume vg_name lv_name
time = TOD 0 0
d = RealDisk $
RealDiskData (LIDPlain lv) [] ["node1.example.com"] "diskname"
1000 DiskRdWr
Nothing Nothing Nothing "asdfgr-1234-5123-daf3-sdfw-134f43"
0 time time
in
HUnit.assertBool "Unable to detect that plain Disk includes logical ID" $
includesLogicalId lv d
-- | Tests that the logical ID is correctly found in a DRBD disk
caseIncludeLogicalIdDrbd :: HUnit.Assertion
caseIncludeLogicalIdDrbd =
let vg_name = "xenvg" :: String
lv_name = "1234sdf-qwef-2134-asff-asd2-23145d.data" :: String
time = TOD 0 0
d = RealDisk $
RealDiskData
(LIDDrbd8 "node1.example.com" "node2.example.com" 2000 1 5 "secret")
[ RealDisk $ RealDiskData (mkLIDPlain "onevg" "onelv") []
["node1.example.com", "node2.example.com"] "disk1" 1000 DiskRdWr
Nothing Nothing Nothing "145145-asdf-sdf2-2134-asfd-534g2x"
0 time time
, RealDisk $ RealDiskData (mkLIDPlain vg_name lv_name) []
["node1.example.com", "node2.example.com"] "disk2" 1000 DiskRdWr
Nothing Nothing Nothing "6gd3sd-423f-ag2j-563b-dg34-gj3fse"
0 time time
] ["node1.example.com", "node2.example.com"] "diskname" 1000 DiskRdWr
Nothing Nothing Nothing
"asdfgr-1234-5123-daf3-sdfw-134f43" 0 time time
in
HUnit.assertBool "Unable to detect that plain Disk includes logical ID" $
includesLogicalId (LogicalVolume vg_name lv_name) d
-- | Tests that the logical ID is correctly NOT found in a plain disk
caseNotIncludeLogicalIdPlain :: HUnit.Assertion
caseNotIncludeLogicalIdPlain =
let vg_name = "xenvg" :: String
lv_name = "1234sdf-qwef-2134-asff-asd2-23145d.data" :: String
time = TOD 0 0
d = RealDisk $
RealDiskData (mkLIDPlain "othervg" "otherlv") [] ["node1.example.com"]
"diskname" 1000 DiskRdWr Nothing Nothing Nothing
"asdfgr-1234-5123-daf3-sdfw-134f43"
0 time time
in
HUnit.assertBool "Unable to detect that plain Disk includes logical ID" $
not (includesLogicalId (LogicalVolume vg_name lv_name) d)
testSuite "Objects"
[ 'prop_fillDict
, 'prop_LogicalVolume_serialisation
, 'prop_LogicalVolume_deserialisationFail
, 'prop_Disk_serialisation
, 'prop_Disk_array_serialisation
, 'prop_Inst_serialisation
, 'prop_AddressPool_serialisation
, 'prop_Network_serialisation
, 'prop_Node_serialisation
, 'prop_Config_serialisation
, 'prop_FilterAction_serialisation
, 'prop_FilterPredicate_serialisation
, 'casePyCompatNetworks
, 'casePyCompatNodegroups
, 'casePyCompatInstances
, 'prop_ip4AddressAsNum
, 'prop_ip4AddressToNumber
, 'caseNextIp4Address
, 'caseIncludeLogicalIdPlain
, 'caseIncludeLogicalIdDrbd
, 'caseNotIncludeLogicalIdPlain
]
|
grnet/snf-ganeti
|
test/hs/Test/Ganeti/Objects.hs
|
Haskell
|
bsd-2-clause
| 29,828
|
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
import GameOfLife
main :: IO ()
main = hspec $ do
describe "GameOfLife" $ do
describe "NextGeneration" $ do
it "Create valid next glider" $ do
nextGeneration (glider 5 5) `shouldBe` [
[False,False,False,False,False],
[True,False,True,False,False],
[False,True,True,False,False],
[False,True,False,False,False],
[False,False,False,False,False]]
it "Should kill all" $ do
nextGeneration (gameGrid (2, 2) [(1,1),(2,1)]) `shouldBe` [[False, False],[False, False]]
it "Should born new cell" $ do
nextGeneration (gameGrid (3, 3) [(1,1),(2,1), (1,2)]) `shouldBe` [[True,True,False],[True,True,False],[False,False,False]]
describe "Show grid" $ do
it "Should render right" $ do
showGrid (glider 5 5) `shouldBe` "-@---\n--@--\n@@@--\n-----\n-----\n"
it "Should not fail on empty" $ do
showGrid ([]) `shouldBe` ""
|
AM636E/HaskellGameOfLife
|
test/Spec.hs
|
Haskell
|
bsd-3-clause
| 1,165
|
-- jrc -- json-rpc request client
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy.Char8 as B8L
import qualified Data.Streaming.Network as NetStream
import qualified Options.Applicative as Opts
import qualified Network.Socket as Socket
import Options.Applicative ((<>))
import qualified JsonRpc.Request as Request
import qualified JsonRpc.Response as Response
data Options = Options
{ optSocket :: String
, optMethod :: String
} deriving (Show)
readCliOpts :: IO Options
readCliOpts =
Opts.execParser $ Opts.info (Opts.helper <*> cliOpts)
( Opts.fullDesc
<> Opts.header "jrc -- JSON-RPC request client"
<> Opts.progDesc "Query a JSON-RPC server." )
where
cliOpts = Options
<$> Opts.argument Opts.str
( Opts.metavar "SOCKET"
<> Opts.help "Path to socket file." )
<*> Opts.argument Opts.str
( Opts.metavar "METHOD"
<> Opts.help "Method name to call." )
main :: IO ()
main = readCliOpts >>= \o ->
let
socketPath = optSocket o
method = optMethod o
request = Request.request method
msgData = Aeson.encode request
in do
sock <- NetStream.getSocketUnix socketPath
Socket.send sock $ B8L.unpack msgData
responseData <- Socket.recv sock 4096
Socket.close sock
let result = case Aeson.decode (B8L.pack responseData) of
Just x -> Response.result x
Nothing -> "Error parsing response: " ++ responseData
putStrLn result
|
shmookey/bc-tools
|
src/jrc.hs
|
Haskell
|
bsd-3-clause
| 1,515
|
{-# LANGUAGE TypeOperators #-}
-- | Test of long-life for dynamics, via three rotations:
-- rotate 5 dynamic behaviors
-- rotate 6 inputs on left
-- rotate 7 inputs on right
-- for a total 210 combinations based on clock.
module Main
( main
) where
import Sirea.Prelude
import Sirea.Clock
import Sirea.Time
rotate567 :: (Partition p) => B (S p ()) (S p Int :&: S p Int :&: S p Int)
rotate567 = bclockOfFreq 3 >>> bfmap tmNanos >>> bfmap (`div` 333333333) >>>
(bfmap (`mod` 5) &&& bfmap (`mod` 6) &&& bfmap (`mod` 7)) >>>
(bfmap fromInteger *** bfmap fromInteger *** bfmap fromInteger)
add, sub, mul, pow, ssq :: B (S p Int :&: S p Int) (S p Int)
add = bzipWith (+)
sub = bzipWith (-)
pow = bzipWith (^)
mul = bzipWith (*)
ssq = bzipWith (\ b c -> (b*b) + (c*c))
nToF :: Int -> B (S p Int :&: S p Int) (S p Int)
nToF 0 = add
nToF 1 = sub
nToF 2 = pow
nToF 3 = mul
nToF 4 = ssq
nToF _ = error "illegal behavior"
nToFEval :: B (S p Int :&: S p Int :&: S p Int) (S p Int)
nToFEval = bfirst (bfmap nToF) >>> bbeval 0 >>> bright (bconst 999) >>> bmerge
zipAll :: B (S p a :&: S p b :&: S p c) (S p (a,(b,c)))
zipAll = bsecond bzip >>> bzip
main :: IO ()
main = runSireaApp $
rotate567 >>>
(zipAll &&& nToFEval) >>>
bzip >>>
bprint
|
dmbarbour/Sirea
|
tst/RotDyn.hs
|
Haskell
|
bsd-3-clause
| 1,298
|
{-# LANGUAGE OverloadedStrings #-}
module Database.Tempodb.Methods.Series
( seriesCreate
, seriesGet
, seriesList
, seriesUpdate
, seriesDelete
)
where
import Control.Monad.Reader
import Data.Aeson as A
import Data.ByteString.Char8 as C8
import Data.ByteString.Lazy (fromStrict, toStrict)
import Data.Monoid
import Database.Tempodb.Types
import Database.Tempodb.Util
import Network.HTTP.Base (urlEncodeVars)
import Network.Http.Client
-- | Top-level API methods are:
--
-- 1. Series
-- 2. Read
-- 3. Write
-- 4. Increment
-- 5. Single
-- 6. Delete
--
-- TODO: Docs...
seriesCreate :: ByteString -> Tempodb (Maybe Series)
seriesCreate k = do
let postdata = "{\"key\": \"" <> k <> "\"}"
auth <- ask
req <- liftIO . buildRequest $ do
http POST $ rootpath <> "/series"
setContentLength . fromIntegral $ C8.length postdata
auth
(_,result) <- liftIO . runRequest req $ Just postdata
return . A.decode $ fromStrict result
seriesGet :: IdOrKey -> Tempodb (Maybe Series)
seriesGet q = do
auth <- ask
req <- liftIO . buildRequest $ do
http GET path
auth
(_,result) <- liftIO $ runRequest req Nothing
return . A.decode $ fromStrict result
where
ident (SeriesId i) = "/id/" <> i
ident (SeriesKey k) = "/key/" <> k
path = rootpath <> "/series" <> (ident q)
seriesList :: Maybe QueryArgs -> Tempodb (Maybe [Series])
seriesList q = do
req <- seriesCommon q GET
(_,result) <- liftIO $ runRequest req Nothing
return . A.decode $ fromStrict result
seriesDelete :: Maybe QueryArgs -> Tempodb (Int,ByteString)
seriesDelete q = do
req <- seriesCommon q DELETE
liftIO $ runRequest req Nothing
seriesCommon :: Maybe QueryArgs -> Method -> Tempodb Request
seriesCommon q method = do
auth <- ask
liftIO . buildRequest $ do
http method path
setContentLength 0
auth
where
root = rootpath <> "/series"
path = case q of
Nothing -> root
Just qry -> root <> "?" <> (C8.pack $ urlEncodeVars qry)
seriesUpdate :: IdOrKey -> Series -> Tempodb (Maybe Series)
seriesUpdate q s = do
let postdata = toStrict $ A.encode s
auth <- ask
req <- liftIO . buildRequest $ do
http PUT path
setContentLength . fromIntegral $ C8.length postdata
auth
(_,result) <- liftIO . runRequest req $ Just postdata
return . A.decode $ fromStrict result
where
ident (SeriesId i) = "/id/" <> i
ident (SeriesKey k) = "/key/" <> k
path = rootpath <> "/series" <> (ident q)
|
ixmatus/hs-tempodb
|
src/Database/Tempodb/Methods/Series.hs
|
Haskell
|
bsd-3-clause
| 2,668
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Dhall.Test.TypeInference where
import Control.Exception (SomeException (..))
import Data.Text (Text)
import Prelude hiding (FilePath)
import Test.Tasty (TestTree)
import Turtle (FilePath, (</>))
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified Dhall.Core as Core
import qualified Dhall.Import as Import
import qualified Dhall.Parser as Parser
import qualified Dhall.Test.Util as Test.Util
import qualified Dhall.TypeCheck as TypeCheck
import qualified System.FilePath as FilePath
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.HUnit as Tasty.HUnit
import qualified Turtle
typeInferenceDirectory :: FilePath
typeInferenceDirectory = "./dhall-lang/tests/type-inference"
getTests :: IO TestTree
getTests = do
let successTestFiles = do
path <- Turtle.lstree (typeInferenceDirectory </> "success")
let skip =
-- These tests intermittently fails with:
-- "Error: Remote host not found"
[ typeInferenceDirectory </> "success/CacheImportsA.dhall"
, typeInferenceDirectory </> "success/CacheImportsCanonicalizeA.dhall"
]
Monad.guard (path `notElem` skip)
return path
successTests <- Test.Util.discover (Turtle.chars <* "A.dhall") successTest successTestFiles
let failureTestFiles = Turtle.lstree (typeInferenceDirectory </> "failure")
failureTests <- Test.Util.discover (Turtle.chars <* ".dhall") failureTest failureTestFiles
let testTree = Tasty.testGroup "type-inference tests"
[ successTests
, failureTests
]
return testTree
successTest :: Text -> TestTree
successTest prefix = do
let expectedFailures =
[]
#ifdef WITH_HTTP
#else
++ [ typeInferenceDirectory </> "success/CacheImports"
]
#endif
Test.Util.testCase prefix expectedFailures $ do
let prefixFP = Text.unpack prefix
actualCode <- Text.IO.readFile (prefixFP <> "A.dhall")
actualExpr <- Core.throws (Parser.exprFromText mempty actualCode)
tryResolvedExpr <-
Exception.try
(Test.Util.loadRelativeTo
(FilePath.takeDirectory prefixFP)
Import.IgnoreSemanticCache
(Core.denote actualExpr))
resolvedExpr <- case tryResolvedExpr of
Left exception -> Tasty.HUnit.assertFailure (show (exception :: SomeException))
Right resolvedExpr -> return resolvedExpr
expectedTypeCode <- Text.IO.readFile (prefixFP <> "B.dhall")
expectedType <- Core.throws (Parser.exprFromText mempty expectedTypeCode)
resolvedExpectedType <- Import.assertNoImports (Core.denote expectedType)
inferredType <- case TypeCheck.typeOf resolvedExpr of
Left exception -> Tasty.HUnit.assertFailure (show exception)
Right inferredType -> return inferredType
let message = "The inferred type did not match the expected type"
Tasty.HUnit.assertEqual message resolvedExpectedType inferredType
-- We also add this to exercise the `Dhall.Eval.conv` code path, since
-- it's easy to forget to update it when adding new syntax
_ <- Core.throws (TypeCheck.typeOf (Core.Annot resolvedExpr resolvedExpectedType))
return ()
failureTest :: Text -> TestTree
failureTest prefix = do
let expectedFailures =
[
-- Duplicate fields are incorrectly caught during parsing:
-- https://github.com/dhall-lang/dhall-haskell/issues/772
typeInferenceDirectory </> "failure/unit/RecordTypeDuplicateFields"
, typeInferenceDirectory </> "failure/unit/UnionTypeDuplicateVariants1"
, typeInferenceDirectory </> "failure/unit/UnionTypeDuplicateVariants2"
]
Test.Util.testCase prefix expectedFailures $ do
let prefixFP = Text.unpack prefix
code <- Text.IO.readFile (prefixFP <> ".dhall")
expression <- case Parser.exprFromText mempty code of
Left _ -> Tasty.HUnit.assertFailure (prefixFP <> " should have parsed")
Right e -> return e
resolved <- Import.assertNoImports expression
case TypeCheck.typeOf resolved of
Left _ -> return ()
Right _ -> Tasty.HUnit.assertFailure (prefixFP <> " should not have type-checked")
|
Gabriel439/Haskell-Dhall-Library
|
dhall/tests/Dhall/Test/TypeInference.hs
|
Haskell
|
bsd-3-clause
| 4,759
|
{-# LANGUAGE RankNTypes #-}
-- This program must be called with GHC's libdir as the single command line
-- argument.
module Main where
-- import Data.Generics
import Data.Data
import Data.List
import System.IO
import GHC
import BasicTypes
import DynFlags
import MonadUtils
import Outputable
import Bag (filterBag,isEmptyBag)
import System.Directory (removeFile)
import System.Environment( getArgs )
import qualified Data.Map as Map
import Data.Dynamic ( fromDynamic,Dynamic )
main::IO()
main = do
[libdir] <- getArgs
testOneFile libdir "AnnotationTuple"
testOneFile libdir fileName = do
((anns,cs),p) <- runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
setSessionDynFlags dflags
let mn =mkModuleName fileName
addTarget Target { targetId = TargetModule mn
, targetAllowObjCode = True
, targetContents = Nothing }
load LoadAllTargets
modSum <- getModSummary mn
p <- parseModule modSum
t <- typecheckModule p
d <- desugarModule t
l <- loadModule d
let ts=typecheckedSource l
r =renamedSource l
return (pm_annotations p,p)
let tupArgs = gq (pm_parsed_source p)
putStrLn (pp tupArgs)
putStrLn (intercalate "\n" [showAnns anns])
where
gq ast = everything (++) ([] `mkQ` doLHsTupArg) ast
doLHsTupArg :: LHsTupArg GhcPs -> [(SrcSpan,String,HsExpr GhcPs)]
doLHsTupArg (L l arg@(Present {}))
= [(l,"p",ExplicitTuple noExtField [L l arg] Boxed)]
doLHsTupArg (L l arg@(Missing {}))
= [(l,"m",ExplicitTuple noExtField [L l arg] Boxed)]
showAnns anns = "[\n" ++ (intercalate "\n"
$ map (\((s,k),v)
-> ("(AK " ++ pp s ++ " " ++ show k ++" = " ++ pp v ++ ")\n"))
$ Map.toList anns)
++ "]\n"
pp a = showPpr unsafeGlobalDynFlags a
-- ---------------------------------------------------------------------
-- Copied from syb for the test
-- | Generic queries of type \"r\",
-- i.e., take any \"a\" and return an \"r\"
--
type GenericQ r = forall a. Data a => a -> r
-- | Make a generic query;
-- start from a type-specific case;
-- return a constant otherwise
--
mkQ :: ( Typeable a
, Typeable b
)
=> r
-> (b -> r)
-> a
-> r
(r `mkQ` br) a = case cast a of
Just b -> br b
Nothing -> r
-- | Summarise all nodes in top-down, left-to-right order
everything :: (r -> r -> r) -> GenericQ r -> GenericQ r
-- Apply f to x to summarise top-level node;
-- use gmapQ to recurse into immediate subterms;
-- use ordinary foldl to reduce list of intermediate results
everything k f x = foldl k (f x) (gmapQ (everything k f) x)
|
sdiehl/ghc
|
testsuite/tests/ghc-api/annotations/parseTree.hs
|
Haskell
|
bsd-3-clause
| 3,062
|
{-# LANGUAGE FlexibleInstances, ScopedTypeVariables, TypeSynonymInstances #-}
-- |This module contains classes and functions to relate Haskell types
-- with OpenGL DataTypes (typically used to describe the values stored
-- in arrays) and VariableTypes (used as attributes and uniforms in
-- GLSL programs).
module GLUtil.TypeMapping where
import Data.Int
import Data.Word
import Foreign.Storable (Storable)
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Rendering.OpenGL
import Linear (V1, V2, V3, V4, M22, M33, M44)
-- | A mapping from Haskell types to values of 'VariableType'. This
-- defines how Haskell values may be mapped to values that may be
-- bound to GLSL variables.
class HasVariableType a where
variableType :: a -> VariableType
instance HasVariableType Float where variableType _ = Float'
instance HasVariableType Int32 where variableType _ = Int'
instance HasVariableType Word32 where variableType _ = UnsignedInt'
instance HasVariableType (V1 GLfloat) where variableType _ = Float'
instance HasVariableType (V2 GLfloat) where variableType _ = FloatVec2
instance HasVariableType (V3 GLfloat) where variableType _ = FloatVec3
instance HasVariableType (V4 GLfloat) where variableType _ = FloatVec4
instance HasVariableType (V1 Int32) where variableType _ = Int'
instance HasVariableType (V2 Int32) where variableType _ = IntVec2
instance HasVariableType (V3 Int32) where variableType _ = IntVec3
instance HasVariableType (V4 Int32) where variableType _ = IntVec4
instance HasVariableType (V1 Word32) where variableType _ = UnsignedInt'
instance HasVariableType (V2 Word32) where variableType _ = UnsignedIntVec2
instance HasVariableType (V3 Word32) where variableType _ = UnsignedIntVec3
instance HasVariableType (V4 Word32) where variableType _ = UnsignedIntVec4
instance HasVariableType (M22 GLfloat) where variableType _ = FloatMat2
instance HasVariableType (M33 GLfloat) where variableType _ = FloatMat3
instance HasVariableType (M44 GLfloat) where variableType _ = FloatMat4
instance forall t. HasVariableType t => HasVariableType [t] where
variableType _ = variableType (undefined::t)
-- | Maps each 'VariableType' to its corresponding
-- 'DataType'. Typically this indicates the element type of composite
-- variable types (e.g. @variableDataType FloatVec2 = Float@). Note
-- that this is a partial mapping as we are primarily supporting the
-- use of these types as inputs to GLSL programs where types such as
-- Bool are not supported.
variableDataType :: VariableType -> DataType
variableDataType Float' = GL.Float
variableDataType FloatVec2 = GL.Float
variableDataType FloatVec3 = GL.Float
variableDataType FloatVec4 = GL.Float
variableDataType Int' = GL.Int
variableDataType IntVec2 = GL.Int
variableDataType IntVec3 = GL.Int
variableDataType IntVec4 = GL.Int
variableDataType UnsignedInt' = GL.UnsignedInt
variableDataType UnsignedIntVec2 = GL.UnsignedInt
variableDataType UnsignedIntVec3 = GL.UnsignedInt
variableDataType UnsignedIntVec4 = GL.UnsignedInt
variableDataType FloatMat2 = GL.Float
variableDataType FloatMat3 = GL.Float
variableDataType FloatMat4 = GL.Float
variableDataType FloatMat2x3 = GL.Float
variableDataType FloatMat2x4 = GL.Float
variableDataType FloatMat3x2 = GL.Float
variableDataType FloatMat3x4 = GL.Float
variableDataType FloatMat4x2 = GL.Float
variableDataType FloatMat4x3 = GL.Float
variableDataType _ = error "Unsupported variable type!"
-- |Open mapping from Haskell types to OpenGL types.
class Storable a => HasGLType a where
glType :: a -> DataType
instance HasGLType GLint where glType _ = GL.Int
instance HasGLType Word8 where glType _ = GL.UnsignedByte
instance HasGLType Word16 where glType _ = GL.UnsignedShort
instance HasGLType Word32 where glType _ = GL.UnsignedInt
instance HasGLType Float where glType _ = GL.Float
|
coghex/abridgefaraway
|
src/GLUtil/TypeMapping.hs
|
Haskell
|
bsd-3-clause
| 3,818
|
{- Provides constants used by the CORDIC algorithm.
- `alistF` is a list of angles [ atan 1, atan (1/2), atan (1/4, ... ]
- `klistF` is a list of the scaling constants for each iteration
- Traditionally these would have been hard-coded for performance; they are
- generated programmatically here for simplicity.
-}
module Util
(
alistF,
klistF
) where
import Numeric.Fixed
-- |Fixed-point cast of alist
alistF = map toFixed alist
-- |Fixed-point cast of klist
klistF = map toFixed klist
-- |Infinite list of angles with tangent ratios [1, 1/(2^i)]
alist :: [Double]
alist = [ atan ( 1 / 2 ^ e ) | e <- [ 0 .. ] ]
-- |Infinite list of scaling factors
klist :: [Double]
klist = klist' 1 ( k 0 )
-- |Recursive generator for scaling factors
klist' :: Int -> Double -> [Double]
klist' i n = n : klist' ( i + 1 ) ( k i * n )
-- |Scaling factor k at iteration i
k :: Int -> Double
k i = 1 / sqrt ( 1 + 2 ^^ (( -2 ) * i ))
|
chronon-io/cordic
|
src/Util.hs
|
Haskell
|
bsd-3-clause
| 935
|
-- | RSS 2.0 feed source.
-- Items are produced from RSS 2.0 feeds.
-- The itmName, itmURL and itmDescription fields are filled from the
-- corresponding RSS fields.
-- The itmTags field is populated from the RSS tags.
--
-- TODO: support for other feed formats.
module Network.LambNyaa.Source.Feed (rssFeed, rssFeed') where
import Network.LambNyaa.Item
import Network.LambNyaa.Monad
import Network.LambNyaa.Log
import Text.RSS.Syntax
import Text.Feed.Types hiding (Item)
import Network.Download
import Control.Monad
warn' = warn "Source.Feed"
err' = err "Source.Feed"
-- | Create a Source from an RSS feed. The itmSource field of Items originating
-- from this Source will contain the URL of the feed.
rssFeed :: URL -> Nyaa [Item]
rssFeed url = rssFeed' url url
-- | Create a Source from a named RSS feed. The itmSource field of Items
-- originating from this Source will contain the given name.
rssFeed' :: String -> URL -> Nyaa [Item]
rssFeed' src url = source $ do
ef <- openAsFeed url
case ef of
Right (RSSFeed rss) -> do
let is = getItems src (rssChannel rss)
when (null is) . warn' $ "No RSS items from feed " ++ url ++ "!"
return is
_ -> do
err' $ "Unable to parse RSS feed from " ++ url ++ "!"
return []
getItems :: String -> RSSChannel -> [Item]
getItems src = map (getItem src) . rssItems
getItem :: String -> RSSItem -> Item
getItem src item = Item {
itmIdentifier = undefined,
itmSeenBefore = undefined,
itmName = maybe "" id (rssItemTitle item),
itmURL = maybe "" id (rssItemLink item),
itmDescription = rssItemDescription item,
itmSource = src,
itmTags = map rssCategoryValue $ rssItemCategories item
}
|
valderman/lambnyaa
|
Network/LambNyaa/Source/Feed.hs
|
Haskell
|
bsd-3-clause
| 1,739
|
module Data.TestPolynomial where
import Prelude hiding (Rational, gcd, negate, (*), (+), (-), (/), (^))
import Domains.Euclidean
import Domains.Ring
import Data.Polynomial
import Data.Rational
import Collins
import TestUtil
x, x2, x3, x4, x5, x6, x7, x8 :: Polynomial Integer
x = Term 1 1 (Data.Polynomial.Const 0)
x2 = x * x
x3 = x2 * x
x4 = x3 * x
x5 = x4 * x
x6 = x5 * x
x7 = x6 * x
x8 = x7 * x
p :: Polynomial Integer
p = Term 1 1 (Data.Polynomial.Const 1)
pc :: Integer -> Polynomial Integer
pc = promote
run :: IO ()
run
-- print (Collins.gcd (pc 0) (pc 1))
-- print (Collins.gcd (pc 1) (pc 0))
-- print (Collins.gcd (pc 3) (pc 7))
-- print (Collins.gcd (pc 3 * x + pc 1) (pc 7))
= do
test "gcd" (x - pc 1) (gcd (x2 - pc 1) (x - pc 1))
putStrLn ("gcd " ++ show (gcd (pc 6 * (x2 - pc 1)) (pc 4 * (x - pc 1))))
test "ratio " (rational (pc 3 * x + pc 3) (pc 2)) (rational (pc 6 * (x2 - pc 1)) (pc 4 * (x - pc 1)))
test "gcd" (x - one) (gcd (x2 - pc 1) (x2 - pc 2 * x + pc 1))
test "ratio " (rational (x + one) (x - one)) (rational (x2 - pc 1) (x2 - pc 2 * x + pc 1))
test "gcd" one (gcd (pc 3 * x2 + pc 1) (pc 5 * x4 + x2 + pc 4))
test
"subresultant"
260708
(Collins.resultant
(x8 + x6 - pc 3 * x4 - pc 3 * x3 + pc 8 * x2 + pc 2 * x - pc 5)
(pc 3 * x6 + pc 5 * x4 - pc 4 * x2 - pc 9 * x + pc 21))
putStrLn ("p = " ++ show (p * p * p))
|
pmilne/algebra
|
test/Data/TestPolynomial.hs
|
Haskell
|
bsd-3-clause
| 1,512
|
{-# LANGUAGE StandaloneDeriving #-}
module Data.Text.Index
(
Index
, lookupWord
, lookupPhrase
, Weight, toDouble
, size
, empty
, addDocument
, removeDocument
)where
import qualified Data.Search.Results as Results
import Data.Foldable (foldMap, for_)
import Data.Functor ((<$>))
import Data.List (foldl', sortOn, intercalate)
import Data.Monoid ((<>), Endo(Endo, appEndo))
import Data.Ord (Down(Down))
import qualified Data.OrdPSQ as PSQ
import qualified Data.Text as Text
import Data.Text (Text)
import qualified Data.Text.ICU as ICU
import qualified Data.Text.ICU.Char as ICU
import qualified Data.Text.ICU.Normalize as ICU
import qualified Data.TST as TST
import Data.TST (TST)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Prelude hiding (lookup, words, Word)
-- TODO: exact matches within long strings are not found right now (unless
-- the start of the match happens to coincide with a chunk boundary).
-- The index contains many (trimmed-down) variants of all source words.
type Index id
= TST Char (Entries id)
-- For a given variant (this is a leaf of the TST), we have a set
-- of original words, together with the source id, and its weight (indicating
-- how much the original word was trimmed).
type Entries id
= PSQ.OrdPSQ id Weight Word
-- A 'Word' does not contain spaces.
type Word
= Text
newtype Weight
= Weight Double
deriving (Eq, Show, Typeable)
toDouble :: Weight -> Double
toDouble (Weight d) = d
instance Monoid Weight where
mappend (Weight a) (Weight b) = Weight $ a + b
mempty = Weight 0
reweigh :: (Double -> Double) -> Weight -> Weight
reweigh f (Weight d) = Weight (f d)
instance Ord Weight where
compare (Weight a) (Weight b) = compare a b
-- Lookup.
lookupExact :: (Ord id) =>
Index id -> Word -> Results.T id Weight Word
lookupExact index t = case TST.lookup (realChars t) index of
Just entries -> Results.fromPSQ entries
Nothing -> Results.empty
lookupWord :: (Ord id) =>
Index id -> Word -> Results.T id Weight Word
lookupWord index t = Results.union $ map f variations
where
f (t', w) = Results.changeWeight (w <>) $
lookupExact index t'
variations = deletions defaultWeights =<< chop (normalise t)
lookupPhrase :: (Ord id) => Index id -> Text -> Results.T id Weight Word
lookupPhrase index t = case words t of
-- Empty search term gives no results. Do we want a way
-- to return all results?
[] -> Results.empty
terms -> Results.intersection $ map (lookupWord index) terms
size :: Index id -> Int
size = length . TST.toList
-- Creation.
empty :: Index id
empty = TST.empty
addVariant :: (Ord id) => id -> Word -> (Word, Weight) -> Index id -> Index id
addVariant i original (variant, w) = TST.insertWith
(const $ snd . PSQ.alter f i)
(realChars variant)
(PSQ.singleton i w original)
where
f (Just (w', original'))
| w' <= w = ((), Just (w', original'))
f _ = ((), Just (w , original ))
addWord :: (Ord id) => id -> Word -> Index id -> Index id
addWord i t = appEndo . foldMap (Endo . addVariant i t) $
vary defaultWeights t
addDocument :: (Ord id) => id -> Text -> Index id -> Index id
addDocument i = appEndo . foldMap (Endo . addWord i) . words
removeDocument :: (Ord id) => id -> Index id -> Index id
removeDocument i = fmap (PSQ.delete i)
words :: Text -> [Text]
words = map ICU.brkBreak .
filter ((/=) ICU.Uncategorized . ICU.brkStatus) .
ICU.breaks (ICU.breakWord ICU.Current)
normalise :: Text -> Text
normalise = ICU.normalize ICU.NFKD . Text.toCaseFold
weightedLength :: Weights -> Text -> Weight
weightedLength wts = mconcat . map (character wts) . Text.unpack
-- Generate all variants of a word.
-- Some of these contain holes, others are just shortened versions.
vary :: Weights -> Word -> [(Word, Weight)]
vary wts t = shorten wts =<< deletions wts =<< chop =<< [normalise t]
shorten :: Weights -> (Word, Weight) -> [(Word, Weight)]
shorten wts (t, w)
-- Non-trivial deletion variants of a word are not further shortened.
| w > Weight 0.9 = [(t, w)]
| otherwise = (t, w) : concatMap
(\ br -> weigh (ICU.brkPrefix br) ++ weigh (ICU.brkSuffix br))
breaks
where
-- Break up a string in characters.
breaks = ICU.breaks (ICU.breakCharacter ICU.Current) t
weigh st
| Text.null st = []
| otherwise = [(st, w <> _N <> n)]
where
n = reweigh negate $ weightedLength wts st
_N = weightedLength wts t
deletions :: Weights -> (Word, Weight) -> [(Word, Weight)]
deletions wts (t, w) = step (t, w) where
maximalWeight = budget wts $ weightedLength wts t
step :: (Word, Weight) -> [(Word, Weight)]
step (t, w) = go t w 1 where
go t w start
| w > maximalWeight = []
| Text.null t = [(t, w)]
| otherwise = (t, w) : concatMap
(delete w)
[(i, Text.splitAt i t) | i <- [start .. Text.length t]]
delete w (i, (leftC, right)) = go
(Text.append left (Text.cons hole right))
(w <> cost)
(succ i)
where
c = Text.last leftC
left = Text.init leftC
cost = reweigh (continuationCost *) characterCost
continuationCost
| not (Text.null left) && Text.last left == hole = continuation wts
| otherwise = 1
characterCost = character wts c
data Weights
= Weights
{ continuation :: Double
, character :: Char -> Weight
, budget :: Weight -> Weight
}
defaultWeights :: Weights
defaultWeights = Weights
{ continuation = 0.75
, character = \ c -> case ICU.property ICU.GeneralCategory c of
ICU.NonSpacingMark -> Weight 0.2
ICU.EnclosingMark -> Weight 0.2
ICU.CombiningSpacingMark -> Weight 0.2
ICU.OtherPunctuation -> Weight 0.4
_ -> Weight 1
, budget = reweigh $ min 2 . \ l -> 0.5 + l / 5
}
hole :: Char
hole = '\xFFFC'
showHoles :: Text -> Text
showHoles = Text.map (\ c -> if c == hole then '□' else c)
realChars :: Word -> [Char]
realChars = filter (/= hole) . Text.unpack
-- Dealing with long words.
-- Chop up a word in overlapping chunks, of maximal length 15
-- and typical length 10.
chop :: Word -> [(Word, Weight)]
chop t
| Text.length t <= maximalChunk = [(t, Weight 0)]
| otherwise = (Text.take maximalChunk t, Weight 0) :
(map (flip (,) $ Weight 1) . chop' $ Text.drop overlap t)
where
chop' t
| Text.length t <= maximalChunk = [t]
| otherwise = Text.take typicalChunk t : chop' (Text.drop overlap t)
maximalChunk = 3 * overlap
typicalChunk = 2 * overlap
overlap = 5
-- Helper functions and tests.
printVariations :: [(Word, Weight)] -> IO ()
printVariations = mapM_ $ \ (t, w) -> do
putStr (Text.unpack . showHoles $ t)
putStrLn (" " ++ show w)
testDocument :: Text
testDocument = Text.pack "Dit is een hele zin, met allemaal woorden erin. Ook woorden met accent, als café. Daarnaast wat cijfers: 0 123 4.567. En natuurlijk symbolen ☺☹!"
dr :: Text
dr = Text.pack "driehoeksongelijkheid"
|
ariep/text-index
|
src/Data/Text/Index.hs
|
Haskell
|
bsd-3-clause
| 7,323
|
module Callback.MouseMove where
import qualified Data.IORef as R
import Control.Applicative ((<$>))
import qualified Utils as LU
import qualified Graphics.UI.GLFW as GLFW
import qualified AppData as AP
newMouseMoveCallback :: AP.AppDataRef -> GLFW.CursorPosCallback
newMouseMoveCallback appRef = callback
where
callback win x y = do
mp <- LU.windowToLevelCoords (floor x, floor y) <$> R.readIORef appRef
R.modifyIORef appRef (AP.handleMouseMoved mp)
|
dan-t/layers
|
src/Callback/MouseMove.hs
|
Haskell
|
bsd-3-clause
| 480
|
module Network.XMPP.Address
( XMPPLocal
, localText
, localFromText
, XMPPDomain
, domainText
, domainFromText
, XMPPResource
, resourceText
, resourceFromText
, XMPPAddress(..)
, xmppAddress'
, xmppAddress
, addressToText
, addressBare
, BareJID(..)
, bareJidGet
, bareJidAddress
, bareJidToText
, FullJID(..)
, fullJidGet
, fullJidAddress
, fullJidToText
) where
import Data.Text (Text)
import Data.Aeson
import Data.Aeson.Types (toJSONKeyText)
import Control.Applicative
import Data.Attoparsec.Text
import Text.StringPrep
import Text.StringPrep.Profiles
import Network.XMPP.Utils
newtype XMPPLocal = XMPPLocal { localText :: Text }
deriving (Eq, Ord, Show)
localFromText :: Text -> Maybe XMPPLocal
localFromText t = XMPPLocal <$> runStringPrep nodePrepProfile t
newtype XMPPDomain = XMPPDomain { domainText :: Text }
deriving (Eq, Ord, Show)
instance FromJSON XMPPDomain where
parseJSON = withText "XMPPDomain" $ \t -> case domainFromText t of
Nothing -> fail "XMPPDomain"
Just r -> return r
instance ToJSON XMPPDomain where
toJSON = toJSON . domainText
domainFromText :: Text -> Maybe XMPPDomain
domainFromText t = XMPPDomain <$> runStringPrep xmppNamePrepProfile t
newtype XMPPResource = XMPPResource { resourceText :: Text }
deriving (Eq, Ord, Show)
resourceFromText :: Text -> Maybe XMPPResource
resourceFromText t = XMPPResource <$> runStringPrep resourcePrepProfile t
data XMPPAddress = XMPPAddress { addressLocal :: Maybe XMPPLocal
, addressDomain :: XMPPDomain
, addressResource :: Maybe XMPPResource
}
deriving (Eq, Ord)
instance Show XMPPAddress where
show = show . addressToText
instance FromJSON XMPPAddress where
parseJSON = withText "XMPPAddress" $ \t -> case xmppAddress t of
Left e -> fail e
Right r -> return r
instance FromJSONKey XMPPAddress where
fromJSONKey = FromJSONKeyTextParser $ \k -> case xmppAddress k of
Left e -> fail e
Right r -> return r
instance ToJSON XMPPAddress where
toJSON = toJSON . addressToText
instance ToJSONKey XMPPAddress where
toJSONKey = toJSONKeyText addressToText
nodeProhibited :: [Range]
nodeProhibited =
[ range '\x0022' '\x0023'
, range '\x0026' '\x0027'
, range '\x0027' '\x0028'
, range '\x002F' '\x0030'
, range '\x003A' '\x003B'
, range '\x003C' '\x003D'
, range '\x003E' '\x003F'
, range '\x0040' '\x0041'
]
nodePrepProfile :: StringPrepProfile
nodePrepProfile =
Profile { maps = [b1, b2]
, shouldNormalize = True
, prohibited = [a1, c11, c12, c21, c22, c3, c4, c5, c6, c7, c8, c9, nodeProhibited]
, shouldCheckBidi = True
}
xmppNamePrepProfile :: StringPrepProfile
xmppNamePrepProfile = namePrepProfile False
resourcePrepProfile :: StringPrepProfile
resourcePrepProfile =
Profile { maps = [b1]
, shouldNormalize = True
, prohibited = [a1, c12, c21, c22, c3, c4, c5, c6, c7, c8, c9]
, shouldCheckBidi = True
}
xmppAddress' :: Parser XMPPAddress
xmppAddress' = do
first <- takeTill (\c -> c == '@' || c == '/')
sep <- optional anyChar
case sep of
Just '@' -> do
addressLocal <- Just <$> checkLocal first
addressDomain <- takeTill (== '/') >>= checkDomain
sep2 <- optional anyChar
case sep2 of
Just '/' -> do
addressResource <- Just <$> (takeText >>= checkResource)
return XMPPAddress { .. }
Nothing -> return XMPPAddress { addressResource = Nothing
, ..
}
_ -> error "xmppAddress: impossible second separator"
Just '/' -> do
addressDomain <- checkDomain first
addressResource <- Just <$> (takeText >>= checkResource)
return XMPPAddress { addressLocal = Nothing
, ..
}
Nothing -> do
addressDomain <- checkDomain first
return XMPPAddress { addressLocal = Nothing
, addressResource = Nothing
, ..
}
_ -> error "xmppAddress: impossible first separator"
where checkLocal = maybeFail "xmppAddress: localpart doesn't satisfy Nodeprep profile of stringprep" . localFromText
checkDomain = maybeFail "xmppAddress: domainpart doesn't satisfy Nameprep profile of stringprep" . domainFromText
checkResource = maybeFail "xmppAddress: resourcepart doesn't satisfy Resourceprep profile of stringprep" . resourceFromText
xmppAddress :: Text -> Either String XMPPAddress
xmppAddress = parseValue xmppAddress'
addressToText :: XMPPAddress -> Text
addressToText (XMPPAddress {..}) =
maybe mempty ((<> "@") . localText) addressLocal
<> domainText addressDomain
<> maybe mempty (("/" <>) . resourceText) addressResource
addressBare :: XMPPAddress -> XMPPAddress
addressBare (XMPPAddress {..}) = XMPPAddress { addressResource = Nothing
, ..
}
data BareJID = BareJID { bareLocal :: XMPPLocal
, bareDomain :: XMPPDomain
}
deriving (Eq, Ord)
bareJidAddress :: BareJID -> XMPPAddress
bareJidAddress (BareJID {..}) = XMPPAddress (Just bareLocal) bareDomain Nothing
bareJidToText :: BareJID -> Text
bareJidToText = addressToText . bareJidAddress
instance Show BareJID where
show = show . bareJidToText
instance FromJSON BareJID where
parseJSON v = do
addr <- parseJSON v
case bareJidGet addr of
Nothing -> fail "BareJID"
Just r -> return r
instance ToJSON BareJID where
toJSON = toJSON . bareJidAddress
bareJidGet :: XMPPAddress -> Maybe BareJID
bareJidGet XMPPAddress { addressLocal = Just bareLocal, addressDomain = bareDomain, addressResource = Nothing } = Just BareJID {..}
bareJidGet _ = Nothing
data FullJID = FullJID { fullBare :: BareJID
, fullResource :: XMPPResource
}
deriving (Eq, Ord)
fullJidAddress :: FullJID -> XMPPAddress
fullJidAddress (FullJID {..}) = XMPPAddress (Just $ bareLocal fullBare) (bareDomain fullBare) (Just fullResource)
fullJidToText :: FullJID -> Text
fullJidToText = addressToText . fullJidAddress
instance Show FullJID where
show = show . fullJidToText
instance FromJSON FullJID where
parseJSON v = do
addr <- parseJSON v
case fullJidGet addr of
Nothing -> fail "FullJID"
Just r -> return r
instance ToJSON FullJID where
toJSON = toJSON . fullJidAddress
fullJidGet :: XMPPAddress -> Maybe FullJID
fullJidGet XMPPAddress {..} = do
bareLocal <- addressLocal
fullResource <- addressResource
return FullJID { fullBare = BareJID { bareDomain = addressDomain, .. }, .. }
|
abbradar/yaxmpp
|
src/Network/XMPP/Address.hs
|
Haskell
|
bsd-3-clause
| 7,050
|
{-# OPTIONS -fno-implicit-prelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Marshal.Error
-- Copyright : (c) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Marshalling support: Handling of common error conditions
--
-----------------------------------------------------------------------------
module Foreign.Marshal.Error (
-- * Error utilities
-- |Throw an exception on specific return values
--
throwIf, -- :: (a -> Bool) -> (a -> String) -> IO a -> IO a
throwIf_, -- :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIfNeg, -- :: (Ord a, Num a)
-- => (a -> String) -> IO a -> IO a
throwIfNeg_, -- :: (Ord a, Num a)
-- => (a -> String) -> IO a -> IO ()
throwIfNull, -- :: String -> IO (Ptr a) -> IO (Ptr a)
-- Discard return value
--
void -- IO a -> IO ()
) where
import Foreign.Ptr
-- exported functions
-- ------------------
-- |Guard an 'IO' operation and throw an exception if the result meets the given
-- predicate
--
-- * the second argument computes an error message from the result of the 'IO'
-- operation
--
throwIf :: (a -> Bool) -> (a -> String) -> IO a -> IO a
throwIf pred msgfct act =
do
res <- act
(if pred res then ioError . userError . msgfct else return) res
-- |Like 'throwIf', but discarding the result
--
throwIf_ :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIf_ pred msgfct act = void $ throwIf pred msgfct act
-- |Guards against negative result values
--
throwIfNeg :: (Ord a, Num a) => (a -> String) -> IO a -> IO a
throwIfNeg = throwIf (< 0)
-- |Like 'throwIfNeg', but discarding the result
--
throwIfNeg_ :: (Ord a, Num a) => (a -> String) -> IO a -> IO ()
throwIfNeg_ = throwIf_ (< 0)
-- |Guards against null pointers
--
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull = throwIf (== nullPtr) . const
-- |Discard the return value of an 'IO' action
--
void :: IO a -> IO ()
void act = act >> return ()
|
OS2World/DEV-UTIL-HUGS
|
libraries/Foreign/Marshal/Error.hs
|
Haskell
|
bsd-3-clause
| 2,293
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
-- | Types used while planning how to build everything in a project.
--
-- Primarily this is the 'ElaboratedInstallPlan'.
--
module Distribution.Client.ProjectPlanning.Types (
SolverInstallPlan,
-- * Elaborated install plan types
ElaboratedInstallPlan,
ElaboratedConfiguredPackage(..),
elabDistDirParams,
elabExeDependencyPaths,
elabLibDependencies,
elabExeDependencies,
elabSetupDependencies,
elabPkgConfigDependencies,
ElaboratedPackageOrComponent(..),
ElaboratedComponent(..),
ElaboratedPackage(..),
pkgOrderDependencies,
ElaboratedPlanPackage,
ElaboratedSharedConfig(..),
ElaboratedReadyPackage,
BuildStyle(..),
CabalFileText,
-- * Build targets
PackageTarget(..),
ComponentTarget(..),
showComponentTarget,
SubComponentTarget(..),
-- * Setup script
SetupScriptStyle(..),
) where
import Distribution.Client.PackageHash
import Distribution.Client.Types
import Distribution.Client.InstallPlan
( GenericInstallPlan, GenericPlanPackage )
import Distribution.Client.SolverInstallPlan
( SolverInstallPlan )
import Distribution.Client.DistDirLayout
import Distribution.Types.ComponentEnabledSpec
import Distribution.Package
hiding (InstalledPackageId, installedPackageId)
import Distribution.System
import qualified Distribution.PackageDescription as Cabal
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import Distribution.Simple.Compiler
import qualified Distribution.Simple.BuildTarget as Cabal
import Distribution.Simple.Program.Db
import Distribution.ModuleName (ModuleName)
import Distribution.Simple.LocalBuildInfo (ComponentName(..))
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.InstallDirs (PathTemplate)
import Distribution.Version
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps (ComponentDeps)
import Distribution.Solver.Types.OptionalStanza
import Distribution.Compat.Graph (IsNode(..))
import Distribution.Simple.Utils (ordNub)
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.ByteString.Lazy as LBS
import Distribution.Compat.Binary
import GHC.Generics (Generic)
import qualified Data.Monoid as Mon
-- | The combination of an elaborated install plan plus a
-- 'ElaboratedSharedConfig' contains all the details necessary to be able
-- to execute the plan without having to make further policy decisions.
--
-- It does not include dynamic elements such as resources (such as http
-- connections).
--
type ElaboratedInstallPlan
= GenericInstallPlan InstalledPackageInfo
ElaboratedConfiguredPackage
type ElaboratedPlanPackage
= GenericPlanPackage InstalledPackageInfo
ElaboratedConfiguredPackage
--TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle
-- even platform and compiler could be different if we're building things
-- like a server + client with ghc + ghcjs
data ElaboratedSharedConfig
= ElaboratedSharedConfig {
pkgConfigPlatform :: Platform,
pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo
-- | The programs that the compiler configured (e.g. for GHC, the progs
-- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are
-- used.
pkgConfigCompilerProgs :: ProgramDb
}
deriving (Show, Generic)
--TODO: [code cleanup] no Eq instance
instance Binary ElaboratedSharedConfig
data ElaboratedConfiguredPackage
= ElaboratedConfiguredPackage {
-- | The 'UnitId' which uniquely identifies this item in a build plan
elabUnitId :: UnitId,
-- | The 'PackageId' of the originating package
elabPkgSourceId :: PackageId,
-- | Mapping from 'PackageName's to 'ComponentName', for every
-- package that is overloaded with an internal component name
elabInternalPackages :: Map PackageName ComponentName,
-- | A total flag assignment for the package.
-- TODO: Actually this can be per-component if we drop
-- all flags that don't affect a component.
elabFlagAssignment :: Cabal.FlagAssignment,
-- | The original default flag assignment, used only for reporting.
elabFlagDefaults :: Cabal.FlagAssignment,
elabPkgDescription :: Cabal.PackageDescription,
-- | Where the package comes from, e.g. tarball, local dir etc. This
-- is not the same as where it may be unpacked to for the build.
elabPkgSourceLocation :: PackageLocation (Maybe FilePath),
-- | The hash of the source, e.g. the tarball. We don't have this for
-- local source dir packages.
elabPkgSourceHash :: Maybe PackageSourceHash,
-- | Is this package one of the ones specified by location in the
-- project file? (As opposed to a dependency, or a named package pulled
-- in)
elabLocalToProject :: Bool,
-- | Are we going to build and install this package to the store, or are
-- we going to build it and register it locally.
elabBuildStyle :: BuildStyle,
-- | Another way of phrasing 'pkgStanzasAvailable'.
elabEnabledSpec :: ComponentEnabledSpec,
-- | Which optional stanzas (ie testsuites, benchmarks) can be built.
-- This means the solver produced a plan that has them available.
-- This doesn't necessary mean we build them by default.
elabStanzasAvailable :: Set OptionalStanza,
-- | Which optional stanzas the user explicitly asked to enable or
-- to disable. This tells us which ones we build by default, and
-- helps with error messages when the user asks to build something
-- they explicitly disabled.
--
-- TODO: The 'Bool' here should be refined into an ADT with three
-- cases: NotRequested, ExplicitlyRequested and
-- ImplicitlyRequested. A stanza is explicitly requested if
-- the user asked, for this *specific* package, that the stanza
-- be enabled; it's implicitly requested if the user asked for
-- all global packages to have this stanza enabled. The
-- difference between an explicit and implicit request is
-- error reporting behavior: if a user asks for tests to be
-- enabled for a specific package that doesn't have any tests,
-- we should warn them about it, but we shouldn't complain
-- that a user enabled tests globally, and some local packages
-- just happen not to have any tests. (But perhaps we should
-- warn if ALL local packages don't have any tests.)
elabStanzasRequested :: Map OptionalStanza Bool,
elabSetupPackageDBStack :: PackageDBStack,
elabBuildPackageDBStack :: PackageDBStack,
elabRegisterPackageDBStack :: PackageDBStack,
-- | The package/component contains/is a library and so must be registered
elabRequiresRegistration :: Bool,
elabPkgDescriptionOverride :: Maybe CabalFileText,
-- TODO: make per-component variants of these flags
elabVanillaLib :: Bool,
elabSharedLib :: Bool,
elabDynExe :: Bool,
elabGHCiLib :: Bool,
elabProfLib :: Bool,
elabProfExe :: Bool,
elabProfLibDetail :: ProfDetailLevel,
elabProfExeDetail :: ProfDetailLevel,
elabCoverage :: Bool,
elabOptimization :: OptimisationLevel,
elabSplitObjs :: Bool,
elabStripLibs :: Bool,
elabStripExes :: Bool,
elabDebugInfo :: DebugInfoLevel,
elabProgramPaths :: Map String FilePath,
elabProgramArgs :: Map String [String],
elabProgramPathExtra :: [FilePath],
elabConfigureScriptArgs :: [String],
elabExtraLibDirs :: [FilePath],
elabExtraFrameworkDirs :: [FilePath],
elabExtraIncludeDirs :: [FilePath],
elabProgPrefix :: Maybe PathTemplate,
elabProgSuffix :: Maybe PathTemplate,
elabInstallDirs :: InstallDirs.InstallDirs FilePath,
elabHaddockHoogle :: Bool,
elabHaddockHtml :: Bool,
elabHaddockHtmlLocation :: Maybe String,
elabHaddockExecutables :: Bool,
elabHaddockTestSuites :: Bool,
elabHaddockBenchmarks :: Bool,
elabHaddockInternal :: Bool,
elabHaddockCss :: Maybe FilePath,
elabHaddockHscolour :: Bool,
elabHaddockHscolourCss :: Maybe FilePath,
elabHaddockContents :: Maybe PathTemplate,
-- Setup.hs related things:
-- | One of four modes for how we build and interact with the Setup.hs
-- script, based on whether it's a build-type Custom, with or without
-- explicit deps and the cabal spec version the .cabal file needs.
elabSetupScriptStyle :: SetupScriptStyle,
-- | The version of the Cabal command line interface that we are using
-- for this package. This is typically the version of the Cabal lib
-- that the Setup.hs is built against.
elabSetupScriptCliVersion :: Version,
-- Build time related:
elabBuildTargets :: [ComponentTarget],
elabReplTarget :: Maybe ComponentTarget,
elabBuildHaddocks :: Bool,
--pkgSourceDir ? -- currently passed in later because they can use temp locations
--pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc
-- | Component/package specific information
elabPkgOrComp :: ElaboratedPackageOrComponent
}
deriving (Eq, Show, Generic)
instance Package ElaboratedConfiguredPackage where
packageId = elabPkgSourceId
instance HasConfiguredId ElaboratedConfiguredPackage where
configuredId elab = ConfiguredId (packageId elab) (unitIdComponentId (elabUnitId elab))
instance HasUnitId ElaboratedConfiguredPackage where
installedUnitId = elabUnitId
instance IsNode ElaboratedConfiguredPackage where
type Key ElaboratedConfiguredPackage = UnitId
nodeKey = elabUnitId
nodeNeighbors elab = case elabPkgOrComp elab of
-- Important not to have duplicates: otherwise InstallPlan gets
-- confused. NB: this DOES include setup deps.
ElabPackage pkg -> ordNub (CD.flatDeps (pkgOrderDependencies pkg))
ElabComponent comp -> compOrderDependencies comp
instance Binary ElaboratedConfiguredPackage
data ElaboratedPackageOrComponent
= ElabPackage ElaboratedPackage
| ElabComponent ElaboratedComponent
deriving (Eq, Show, Generic)
instance Binary ElaboratedPackageOrComponent
elabDistDirParams :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> DistDirParams
elabDistDirParams shared elab = DistDirParams {
distParamUnitId = installedUnitId elab,
distParamPackageId = elabPkgSourceId elab,
distParamComponentName = case elabPkgOrComp elab of
ElabComponent comp -> compComponentName comp
ElabPackage _ -> Nothing,
distParamCompilerId = compilerId (pkgConfigCompiler shared),
distParamPlatform = pkgConfigPlatform shared,
distParamOptimization = elabOptimization elab
}
-- | The library dependencies (i.e., the libraries we depend on, NOT
-- the dependencies of the library), NOT including setup dependencies.
elabLibDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId]
elabLibDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg }
= ordNub (CD.nonSetupDeps (pkgLibDependencies pkg))
elabLibDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }
= compLibDependencies comp
elabExeDependencies :: ElaboratedConfiguredPackage -> [ComponentId]
elabExeDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg }
= map confInstId (CD.nonSetupDeps (pkgExeDependencies pkg))
elabExeDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }
= compExeDependencies comp
elabExeDependencyPaths :: ElaboratedConfiguredPackage -> [FilePath]
elabExeDependencyPaths ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg }
= CD.nonSetupDeps (pkgExeDependencyPaths pkg)
elabExeDependencyPaths ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }
= compExeDependencyPaths comp
elabSetupDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId]
elabSetupDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg }
= CD.setupDeps (pkgLibDependencies pkg)
elabSetupDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }
= compSetupDependencies comp
elabPkgConfigDependencies :: ElaboratedConfiguredPackage -> [(PackageName, Maybe Version)]
elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg }
= pkgPkgConfigDependencies pkg
elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }
= compPkgConfigDependencies comp
-- | Some extra metadata associated with an
-- 'ElaboratedConfiguredPackage' which indicates that the "package"
-- in question is actually a single component to be built. Arguably
-- it would be clearer if there were an ADT which branched into
-- package work items and component work items, but I've structured
-- it this way to minimize change to the existing code (which I
-- don't feel qualified to rewrite.)
data ElaboratedComponent
= ElaboratedComponent {
-- | The name of the component to be built according to the solver
compSolverName :: CD.Component,
-- | The name of the component to be built. Nothing if
-- it's a setup dep.
compComponentName :: Maybe ComponentName,
-- | The library dependencies of this component.
compLibDependencies :: [ConfiguredId],
-- | The executable dependencies of this component.
compExeDependencies :: [ComponentId],
-- | The @pkg-config@ dependencies of the component
compPkgConfigDependencies :: [(PackageName, Maybe Version)],
-- | The paths all our executable dependencies will be installed
-- to once they are installed.
compExeDependencyPaths :: [FilePath],
-- | The setup dependencies. TODO: Remove this when setups
-- are components of their own.
compSetupDependencies :: [ConfiguredId]
}
deriving (Eq, Show, Generic)
instance Binary ElaboratedComponent
compOrderDependencies :: ElaboratedComponent -> [UnitId]
compOrderDependencies comp =
-- TODO: Change this with Backpack!
map (SimpleUnitId . confInstId) (compLibDependencies comp)
++ map SimpleUnitId (compExeDependencies comp)
++ map (SimpleUnitId . confInstId) (compSetupDependencies comp)
data ElaboratedPackage
= ElaboratedPackage {
pkgInstalledId :: InstalledPackageId,
-- | The exact dependencies (on other plan packages)
--
pkgLibDependencies :: ComponentDeps [ConfiguredId],
-- | Dependencies on executable packages.
--
pkgExeDependencies :: ComponentDeps [ConfiguredId],
-- | Paths where executable dependencies live.
--
pkgExeDependencyPaths :: ComponentDeps [FilePath],
-- | Dependencies on @pkg-config@ packages.
-- NB: this is NOT per-component (although it could be)
-- because Cabal library does not track per-component
-- pkg-config depends; it always does them all at once.
--
pkgPkgConfigDependencies :: [(PackageName, Maybe Version)],
-- | Which optional stanzas (ie testsuites, benchmarks) will actually
-- be enabled during the package configure step.
pkgStanzasEnabled :: Set OptionalStanza
}
deriving (Eq, Show, Generic)
instance Binary ElaboratedPackage
pkgOrderDependencies :: ElaboratedPackage -> ComponentDeps [UnitId]
pkgOrderDependencies pkg =
fmap (map (SimpleUnitId . confInstId)) (pkgLibDependencies pkg) `Mon.mappend`
fmap (map (SimpleUnitId . confInstId)) (pkgExeDependencies pkg)
-- | This is used in the install plan to indicate how the package will be
-- built.
--
data BuildStyle =
-- | The classic approach where the package is built, then the files
-- installed into some location and the result registered in a package db.
--
-- If the package came from a tarball then it's built in a temp dir and
-- the results discarded.
BuildAndInstall
-- | The package is built, but the files are not installed anywhere,
-- rather the build dir is kept and the package is registered inplace.
--
-- Such packages can still subsequently be installed.
--
-- Typically 'BuildAndInstall' packages will only depend on other
-- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones.
--
| BuildInplaceOnly
deriving (Eq, Show, Generic)
instance Binary BuildStyle
type CabalFileText = LBS.ByteString
type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage
---------------------------
-- Build targets
--
-- | The various targets within a package. This is more of a high level
-- specification than a elaborated prescription.
--
data PackageTarget =
-- | Build the default components in this package. This usually means
-- just the lib and exes, but it can also mean the testsuites and
-- benchmarks if the user explicitly requested them.
BuildDefaultComponents
-- | Build a specific component in this package.
| BuildSpecificComponent ComponentTarget
| ReplDefaultComponent
| ReplSpecificComponent ComponentTarget
| HaddockDefaultComponents
deriving (Eq, Show, Generic)
data ComponentTarget = ComponentTarget ComponentName SubComponentTarget
deriving (Eq, Ord, Show, Generic)
data SubComponentTarget = WholeComponent
| ModuleTarget ModuleName
| FileTarget FilePath
deriving (Eq, Ord, Show, Generic)
instance Binary PackageTarget
instance Binary ComponentTarget
instance Binary SubComponentTarget
-- | Unambiguously render a 'ComponentTarget', e.g., to pass
-- to a Cabal Setup script.
showComponentTarget :: PackageId -> ComponentTarget -> String
showComponentTarget pkgid =
Cabal.showBuildTarget pkgid . toBuildTarget
where
toBuildTarget :: ComponentTarget -> Cabal.BuildTarget
toBuildTarget (ComponentTarget cname subtarget) =
case subtarget of
WholeComponent -> Cabal.BuildTargetComponent cname
ModuleTarget mname -> Cabal.BuildTargetModule cname mname
FileTarget fname -> Cabal.BuildTargetFile cname fname
---------------------------
-- Setup.hs script policy
--
-- | There are four major cases for Setup.hs handling:
--
-- 1. @build-type@ Custom with a @custom-setup@ section
-- 2. @build-type@ Custom without a @custom-setup@ section
-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@
-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@
--
-- It's also worth noting that packages specifying @cabal-version: >= 1.23@
-- or later that have @build-type@ Custom will always have a @custom-setup@
-- section. Therefore in case 2, the specified @cabal-version@ will always be
-- less than 1.23.
--
-- In cases 1 and 2 we obviously have to build an external Setup.hs script,
-- while in case 4 we can use the internal library API. In case 3 we also have
-- to build an external Setup.hs script because the package needs a later
-- Cabal lib version than we can support internally.
--
data SetupScriptStyle = SetupCustomExplicitDeps
| SetupCustomImplicitDeps
| SetupNonCustomExternalLib
| SetupNonCustomInternalLib
deriving (Eq, Show, Generic)
instance Binary SetupScriptStyle
|
sopvop/cabal
|
cabal-install/Distribution/Client/ProjectPlanning/Types.hs
|
Haskell
|
bsd-3-clause
| 20,460
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.NB.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Resolve
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Testing.Types hiding (examples)
corpus :: Corpus
corpus = (testContext {lang = NB}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "nå"
, "akkurat nå"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "i dag"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "i går"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "i morgen"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "mandag"
, "man."
, "på mandag"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "Mandag den 18. februar"
, "Man, 18 februar"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "tirsdag"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "torsdag"
, "tors"
, "tors."
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "fredag"
, "fre"
, "fre."
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "lørdag"
, "lør"
, "lør."
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "søndag"
, "søn"
, "søn."
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
[ "Den første mars"
, "1. mars"
, "Den 1. mars"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "3 mars"
, "den tredje mars"
, "den 3. mars"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
[ "3 mars 2015"
, "tredje mars 2015"
, "3. mars 2015"
, "3-3-2015"
, "03-03-2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "På den 15."
, "På den 15"
, "Den 15."
, "Den femtende"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "den 15. februar"
, "15. februar"
, "februar 15"
, "15-02"
, "15/02"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "8 Aug"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "Oktober 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "31/10/1974"
, "31/10/74"
, "31-10-74"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14april 2015"
, "April 14, 2015"
, "fjortende April 15"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "neste fredag igjen"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "neste mars"
]
, examples (datetime (2014, 3, 0, 0, 0, 0) Month)
[ "neste mars igjen"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "Søndag, 10 feb"
, "Søndag 10 Feb"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "Ons, Feb13"
, "Ons feb13"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "Mandag, Feb 18"
, "Man, februar 18"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "denne uken"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "forrige uke"
, "sist uke"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "neste uke"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "forrige måned"
, "sist måned"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "neste måned"
]
, examples (datetime (2013, 1, 1, 0, 0, 0) Quarter)
[ "dette kvartalet"
]
, examples (datetime (2013, 4, 1, 0, 0, 0) Quarter)
[ "neste kvartal"
]
, examples (datetime (2013, 7, 1, 0, 0, 0) Quarter)
[ "tredje kvartal"
, "3. kvartal"
]
, examples (datetime (2018, 10, 1, 0, 0, 0) Quarter)
[ "4. kvartal 2018"
, "fjerde kvartal 2018"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
[ "forrige år"
, "sist år"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
[ "i fjor"
]
, examples (datetime (2013, 0, 0, 0, 0, 0) Year)
[ "i år"
, "dette år"
]
, examples (datetime (2014, 0, 0, 0, 0, 0) Year)
[ "neste år"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "forrige søndag"
, "sist søndag"
, "søndag i forrige uke"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "forrige tirsdag"
, "sist tirsdag"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "neste tirsdag"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "neste onsdag"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "onsdag i neste uke"
, "onsdag neste uke"
, "neste onsdag igjen"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "neste fredag igjen"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "mandag denne uken"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "tirsdag denne uken"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "onsdag denne uken"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "i overimorgen"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "i forigårs"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "siste mandag i mars"
, "siste mandag i mars"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "siste søndag i mars 2014"
, "siste søndag i mars 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "tredje dag i oktober"
, "tredje dag i Oktober"
]
, examples (datetime (2014, 10, 6, 0, 0, 0) Week)
[ "første uke i oktober 2014"
, "første uke i Oktober 2014"
]
, examples (datetime (2015, 10, 31, 0, 0, 0) Day)
[ "siste dag i oktober 2015"
, "siste dag i Oktober 2015"
]
, examples (datetime (2014, 9, 22, 0, 0, 0) Week)
[ "siste uke i september 2014"
, "siste uke i September 2014"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
[ "første tirsdag i oktober"
, "første tirsdag i Oktober"
]
, examples (datetime (2014, 9, 16, 0, 0, 0) Day)
[ "tredje tirsdag i september 2014"
, "tredje tirsdag i September 2014"
]
, examples (datetime (2014, 10, 1, 0, 0, 0) Day)
[ "første onsdag i oktober 2014"
, "første onsdag i Oktober 2014"
]
, examples (datetime (2014, 10, 8, 0, 0, 0) Day)
[ "andre onsdag i oktober 2014"
, "andre onsdag i Oktober 2014"
]
, examples (datetime (2013, 2, 13, 3, 0, 0) Hour)
[ "klokken 3"
, "kl. 3"
]
, examples (datetime (2013, 2, 13, 3, 18, 0) Minute)
[ "3:18"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "klokken 15"
, "kl. 15"
, "15h"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "ca. kl. 15"
, "cirka kl. 15"
, "omkring klokken 15"
]
, examples (datetime (2013, 2, 13, 17, 0, 0) Hour)
[ "imorgen klokken 17 sharp"
, "imorgen kl. 17 presis"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "kvarter over 15"
, "kvart over 15"
, "15:15"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
[ "kl. 20 over 15"
, "klokken 20 over 15"
, "kl. 15:20"
, "15:20"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
[ "15:30"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "kvarter på 12"
, "kvart på 12"
, "11:45"
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "klokken 9 på lørdag"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "Fre, Jul 18, 2014 19:00"
]
, examples (datetime (2014, 7, 18, 0, 0, 0) Day)
[ "Fre, Jul 18"
, "Jul 18, Fre"
]
, examples (datetime (2014, 9, 20, 19, 30, 0) Minute)
[ "kl. 19:30, Lør, 20 sep"
]
, examples (datetime (2013, 2, 12, 4, 30, 1) Second)
[ "om 1 sekund"
, "om ett sekund"
, "om et sekund"
, "ett sekund fra nå"
, "et sekund fra nå"
]
, examples (datetime (2013, 2, 12, 4, 31, 0) Second)
[ "om 1 minutt"
, "om et minutt"
, "om ett minutt"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "om 2 minutter"
, "om to minutter"
, "om 2 minutter mer"
, "om to minutter mer"
, "2 minutter fra nå"
, "to minutter fra nå"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "om 60 minutter"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "om en halv time"
]
, examples (datetime (2013, 2, 12, 7, 0, 0) Second)
[ "om 2,5 time"
, "om 2 og en halv time"
, "om to og en halv time"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Minute)
[ "om én time"
, "om 1 time"
, "om 1t"
]
, examples (datetime (2013, 2, 12, 6, 30, 0) Minute)
[ "om et par timer"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "om 24 timer"
]
, examples (datetime (2013, 2, 13, 4, 0, 0) Hour)
[ "om en dag"
]
, examples (datetime (2016, 2, 0, 0, 0, 0) Month)
[ "3 år fra i dag"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "om 7 dager"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "om en uke"
, "om én uke"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "om ca. en halv time"
, "om cirka en halv time"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "7 dager siden"
, "syv dager siden"
]
, examples (datetime (2013, 1, 29, 4, 0, 0) Hour)
[ "14 dager siden"
, "fjorten dager siden"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "en uke siden"
, "én uke siden"
, "1 uke siden"
]
, examples (datetime (2013, 1, 22, 0, 0, 0) Day)
[ "3 uker siden"
, "tre uker siden"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
[ "3 måneder siden"
, "tre måneder siden"
]
, examples (datetime (2011, 2, 0, 0, 0, 0) Month)
[ "to år siden"
, "2 år siden"
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetime (2013, 12, 0, 0, 0, 0) Month)
[ "et år etter julaften"
, "ett år etter julaften"
]
, examples (datetimeInterval ((2013, 6, 21, 0, 0, 0), (2013, 9, 24, 0, 0, 0)) Day)
[ "denne sommeren"
, "den her sommeren"
]
, examples (datetimeInterval ((2012, 12, 21, 0, 0, 0), (2013, 3, 21, 0, 0, 0)) Day)
[ "denne vinteren"
, "den her vinteren"
]
, examples (datetime (2013, 12, 25, 0, 0, 0) Day)
[ "1 juledag"
, "1. juledag"
, "første juledag"
]
, examples (datetime (2013, 12, 31, 0, 0, 0) Day)
[ "nyttårsaften"
]
, examples (datetime (2014, 1, 1, 0, 0, 0) Day)
[ "nyttårsdag"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "i kveld"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "forrige helg"
, "sist helg"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "i morgen kveld"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "i morgen middag"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "i går kveld"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "denne helgen"
, "denne helga"
, "i helga"
, "i helgen"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "mandag morgen"
]
, examples (datetimeInterval ((2013, 12, 24, 0, 0, 0), (2013, 12, 31, 0, 0, 0)) Day)
[ "i romjulen"
, "i romjula"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 29, 58), (2013, 2, 12, 4, 30, 0)) Second)
[ "siste 2 sekunder"
, "siste to sekunder"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 1), (2013, 2, 12, 4, 30, 4)) Second)
[ "neste 3 sekunder"
, "neste tre sekunder"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 28, 0), (2013, 2, 12, 4, 30, 0)) Minute)
[ "siste 2 minutter"
, "siste to minutter"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 31, 0), (2013, 2, 12, 4, 34, 0)) Minute)
[ "neste 3 minutter"
, "neste tre minutter"
]
, examples (datetimeInterval ((2013, 2, 12, 3, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "siste 1 time"
, "seneste 1 time"
]
, examples (datetimeInterval ((2013, 2, 12, 5, 0, 0), (2013, 2, 12, 8, 0, 0)) Hour)
[ "neste 3 timer"
, "neste tre timer"
]
, examples (datetimeInterval ((2013, 2, 10, 0, 0, 0), (2013, 2, 12, 0, 0, 0)) Day)
[ "siste 2 dager"
, "siste to dager"
, "seneste 2 dager"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "neste 3 dager"
, "neste tre dager"
]
, examples (datetimeInterval ((2013, 1, 28, 0, 0, 0), (2013, 2, 11, 0, 0, 0)) Week)
[ "siste 2 uker"
, "siste to uker"
, "seneste to uker"
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 11, 0, 0, 0)) Week)
[ "neste 3 uker"
, "neste tre uker"
]
, examples (datetimeInterval ((2012, 12, 0, 0, 0, 0), (2013, 2, 0, 0, 0, 0)) Month)
[ "siste 2 måneder"
, "siste to måneder"
, "seneste to måneder"
]
, examples (datetimeInterval ((2013, 3, 0, 0, 0, 0), (2013, 6, 0, 0, 0, 0)) Month)
[ "neste 3 måneder"
, "neste tre måneder"
]
, examples (datetimeInterval ((2011, 0, 0, 0, 0, 0), (2013, 0, 0, 0, 0, 0)) Year)
[ "siste 2 år"
, "siste to år"
, "seneste 2 år"
]
, examples (datetimeInterval ((2014, 0, 0, 0, 0, 0), (2017, 0, 0, 0, 0, 0)) Year)
[ "neste 3 år"
, "neste tre år"
]
, examples (datetimeInterval ((2013, 7, 13, 0, 0, 0), (2013, 7, 16, 0, 0, 0)) Day)
[ "13-15 juli"
, "13-15 Juli"
, "13 til 15 Juli"
, "13 juli til 15 juli"
]
, examples (datetimeInterval ((2013, 8, 8, 0, 0, 0), (2013, 8, 13, 0, 0, 0)) Day)
[ "8 Aug - 12 Aug"
, "8 Aug - 12 aug"
, "8 aug - 12 aug"
, "8 august - 12 august"
]
, examples (datetimeInterval ((2013, 2, 12, 9, 30, 0), (2013, 2, 12, 11, 1, 0)) Minute)
[ "9:30 - 11:00"
, "9:30 til 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 30, 0), (2013, 2, 14, 11, 1, 0)) Minute)
[ "fra 9:30 - 11:00 på torsdag"
, "fra 9:30 til 11:00 på torsdag"
, "mellom 9:30 og 11:00 på torsdag"
, "9:30 - 11:00 på torsdag"
, "9:30 til 11:00 på torsdag"
, "etter 9:30 men før 11:00 på torsdag"
, "torsdag fra 9:30 til 11:00"
, "torsdag mellom 9:30 og 11:00"
, "fra 9:30 til 11:00 på torsdag"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 0, 0), (2013, 2, 14, 12, 0, 0)) Hour)
[ "torsdag fra 9 til 11"
]
, examples (datetimeInterval ((2013, 2, 12, 11, 30, 0), (2013, 2, 12, 13, 31, 0)) Minute)
[ "11:30-13:30"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 2, 26, 0, 0, 0)) Second)
[ "innenfor 2 uker"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 14, 0, 0) Hour)
[ "innen kl. 14"
, "innen klokken 14"
]
, examples (datetime (2013, 2, 12, 13, 0, 0) Minute)
[ "16h CET"
, "kl. 16 CET"
, "klokken 16 CET"
]
, examples (datetime (2013, 2, 14, 6, 0, 0) Minute)
[ "torsdag kl. 8:00 GMT"
, "torsdag klokken 8:00 GMT"
, "torsdag 08:00 GMT"
]
, examples (datetime (2013, 2, 12, 14, 0, 0) Hour)
[ "idag kl. 14"
, "idag klokken 14"
, "kl. 14"
, "klokken 14"
]
, examples (datetime (2013, 4, 25, 16, 0, 0) Minute)
[ "25/4 kl. 16:00"
, "25/4 klokken 16:00"
, "25-04 klokken 16:00"
, "25-4 kl. 16:00"
]
, examples (datetime (2013, 2, 13, 15, 0, 0) Minute)
[ "15:00 i morgen"
, "kl. 15:00 i morgen"
, "klokken 15:00 i morgen"
]
, examples (datetimeOpenInterval After (2013, 2, 12, 14, 0, 0) Hour)
[ "etter kl. 14"
, "etter klokken 14"
]
, examples (datetimeOpenInterval After (2013, 2, 17, 4, 0, 0) Hour)
[ "etter 5 dager"
, "etter fem dager"
]
, examples (datetime (2013, 2, 17, 4, 0, 0) Hour)
[ "om 5 dager"
, "om fem dager"
]
, examples (datetimeOpenInterval After (2013, 2, 13, 14, 0, 0) Hour)
[ "etter i morgen kl. 14"
, "etter i morgen klokken 14"
, "i morgen etter kl. 14"
, "i morgen etter klokken 14"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 11, 0, 0) Hour)
[ "før kl. 11"
, "før klokken 11"
]
, examples (datetimeOpenInterval Before (2013, 2, 13, 11, 0, 0) Hour)
[ "i morgen før kl. 11"
, "i morgen før klokken 11"
]
, examples (datetimeInterval ((2013, 2, 12, 12, 0, 0), (2013, 2, 12, 19, 0, 0)) Hour)
[ "om ettermiddagen"
]
, examples (datetime (2013, 2, 12, 13, 30, 0) Minute)
[ "kl. 13:30"
, "klokken 13:30"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "om 15 minutter"
]
, examples (datetimeInterval ((2013, 2, 12, 13, 0, 0), (2013, 2, 12, 17, 0, 0)) Hour)
[ "etter frokost"
]
, examples (datetime (2013, 2, 12, 10, 30, 0) Minute)
[ "10:30"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 0, 0), (2013, 2, 12, 12, 0, 0)) Hour)
[ "denne morgen"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "neste mandag"
]
, examples (datetime (2014, 2, 9, 0, 0, 0) Day)
[ "morsdag"
]
, examples (datetime (2013, 11, 10, 0, 0, 0) Day)
[ "farsdag"
]
]
|
rfranek/duckling
|
Duckling/Time/NB/Corpus.hs
|
Haskell
|
bsd-3-clause
| 22,209
|
module Test.Property where
import Prelude hiding ((>>))
import Elm.Utils ((|>), (>>))
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Test.QuickCheck.IO ()
import Reporting.Annotation (stripRegion)
import qualified AST.Module
import qualified Data.Text as Text
import qualified Data.Maybe as Maybe
import qualified ElmFormat.Parse as Parse
import qualified ElmFormat.Render.Text as Render
import qualified ElmVersion
import qualified Test.Generators ()
import qualified Test.ElmSourceGenerators
assertStringToString :: String -> Assertion
assertStringToString source =
let
source' = Text.pack source
result =
Parse.parse source'
|> Parse.toEither
|> fmap (Render.render ElmVersion.Elm_0_17)
in
assertEqual "" (Right source') result
astToAst :: AST.Module.Module -> Assertion
astToAst ast =
let
result =
ast
|> Render.render ElmVersion.Elm_0_17
|> Parse.parse
|> Parse.toEither
in
assertEqual ""
(Right $ stripRegion ast)
(fmap stripRegion result)
simpleAst =
case Parse.toEither $ Parse.parse $ Text.pack "module Main exposing (..)\n\n\nfoo =\n 8\n" of
Right ast -> ast
reportFailedAst ast =
let
rendering = Render.render ElmVersion.Elm_0_17 ast |> Text.unpack
result =
Render.render ElmVersion.Elm_0_17 ast
|> Parse.parse
|> fmap stripRegion
|> show
in
concat
[ "=== Parsed as:\n"
, result
, "=== END OF parse\n"
, "=== Rendering of failed AST:\n"
, rendering
, "=== END OF failed AST rendering\n"
]
withCounterexample fn prop =
(\s -> counterexample (fn s) $ prop s)
propertyTests :: TestTree
propertyTests =
testGroup "example test group"
[ testCase "simple AST round trip" $
astToAst simpleAst
, testProperty "rendered AST should parse as equivalent AST"
$ withCounterexample reportFailedAst astToAst
, testGroup "valid Elm files"
[ testProperty "should parse"
$ forAll Test.ElmSourceGenerators.elmModule $ withCounterexample id
$ Text.pack >> Parse.parse >> Parse.toMaybe >> Maybe.isJust
, testProperty "should parse to the same AST after formatting"
$ forAll Test.ElmSourceGenerators.elmModule $ withCounterexample id
$ Text.pack >> Parse.parse >> Parse.toMaybe
>> fmap astToAst
>> Maybe.fromMaybe (assertFailure "failed to parse original")
]
, testCase "simple round trip" $
assertStringToString "module Main exposing (..)\n\n\nfoo =\n 8\n"
, testCase "simple round trip with comments" $
assertStringToString "module Main exposing (..)\n\n\nfoo =\n ( {- A -} 3 {- B -}, {- C -} 4 {- D -} )\n"
, testCase "simple round trip with comments" $
assertStringToString "module Main exposing (..)\n\n\ncommentedLiterals =\n ( {- int -} 1, {- float -} 0.1, {- char -} \'c\', {- string -} \"str\", {- boolean -} True )\n"
]
|
nukisman/elm-format-short
|
tests/Test/Property.hs
|
Haskell
|
bsd-3-clause
| 3,229
|
{-# LANGUAGE PatternGuards #-}
module Main where
import Tests.Testframe
import System.Environment (getArgs)
import Data.Char (toLower)
import System.Console.GetOpt
import System.Exit (ExitCode(..), exitWith)
import System.IO (hPutStrLn, stderr)
data Flag = Alg String | TestVar String
main = do
args <- getArgs
let argLength = length args
if args !! 1 == "e"
then runExtreme (read $ args !! 2)
else do
let alg = map toLower $ args !! 0
testCase = map toLower $ args !! 1
byteAligned = read $ args !! 2
case lookup testCase testOptions of
Just filePath -> run alg filePath byteAligned
Nothing -> putStrLn $ testCase ++ "is not a valid test file option. \n\nOptions: shortXXX, longXXX"
algorithms = ["jh","groestl"]
testOptions :: [(String,FilePath)]
testOptions = [("short224", "ShortMsgKAT_224.txt"),("short256","ShortMsgKAT_256.txt"),
("short384","ShortMsgKAT_384.txt"),("short512","ShortMsgKAT_512.txt"),
("long224","LongMsgKAT_224.txt"),("long256","LongMsgKAT_256.txt"),
("long384","LongMsgKAT_384.txt"),("long512","LongMsgKAT_512.txt")]
|
hakoja/SHA3
|
main.hs
|
Haskell
|
bsd-3-clause
| 1,174
|
module Main where
import Conifer.Types
import Control.Monad (when)
import System.Exit (exitFailure)
import Test.HUnit
main = do
Counts c t e f <- runTestTT tests
when (e > 0 || f > 0) exitFailure
tests = TestList [
TestLabel "getUserDataFromJSON_None" $ TestCase getUserDataFromJSON_None
, TestLabel "getUserDataFromJSON_Age" $ TestCase getUserDataFromJSON_Age
, TestLabel "getUserDataFromJSON_All" $ TestCase getUserDataFromJSON_All
, TestLabel "argsFromInput_NoChange" $ TestCase argsFromInput_NoChange
, TestLabel "argsFromInput_ChangeAll" $ TestCase argsFromInput_ChangeAll
]
getUserDataFromJSON_None = actual @?= expected
where actual = getUserDataFromJSON json
expected = Just $ UD {
udAge = Nothing
, udNeedles = Nothing
, udTrunkLengthIncrementPerYear = Nothing
, udTrunkBranchLengthRatio = Nothing
, udTrunkBranchAngles = Nothing
, udTrunkGirth = Nothing
, udWhorlsPerYear = Nothing
, udWhorlSize = Nothing
, udBranchGirth = Nothing
, udBranchBranchLengthRatio = Nothing
, udBranchBranchLengthRatio2 = Nothing
-- , udBranchBranchAngle :: Angle Double
}
json = "\
\{}"
getUserDataFromJSON_Age = actual @?= expected
where actual = getUserDataFromJSON json
expected = Just $ UD {
udAge = Just 3
, udNeedles = Nothing
, udTrunkLengthIncrementPerYear = Nothing
, udTrunkBranchLengthRatio = Nothing
, udTrunkBranchAngles = Nothing
, udTrunkGirth = Nothing
, udWhorlsPerYear = Nothing
, udWhorlSize = Nothing
, udBranchGirth = Nothing
, udBranchBranchLengthRatio = Nothing
, udBranchBranchLengthRatio2 = Nothing
-- , udBranchBranchAngle :: Angle Double
}
json = "\
\{\"age\":3}"
getUserDataFromJSON_All = actual @?= expected
where actual = getUserDataFromJSON json
expected = Just $ UD {
udAge = Just 3
, udNeedles = Just False
, udTrunkLengthIncrementPerYear = Just 1.4
, udTrunkBranchLengthRatio = Just 0.6
, udTrunkBranchAngles = Just [0.698, 0.898, 1.31 , 0.967]
, udTrunkGirth = Just 5.0
, udWhorlsPerYear = Just 9
, udWhorlSize = Just 7
, udBranchGirth = Just 1.0
, udBranchBranchLengthRatio = Just 1.0
, udBranchBranchLengthRatio2 = Just 1.0
-- , udBranchBranchAngle :: Angle Double
}
json = "\
\{\"udTrunkGirth\":5,\"udWhorlsPerYear\":9,\"udTrunkBranchAngles\":[0.698,0.898,1.31,0.967],\"udTrunkBranchLengthRatio\":0.6,\"udBranchGirth\":1,\"udWhorlSize\":7,\"udBranchBranchLengthRatio\":1,\"udBranchBranchLengthRatio2\":1,\"age\":3,\"needles\":false,\"udTrunkLengthIncrementPerYear\":1.4}"
argsFromInput_NoChange = actual @?= expected
where actual = argsFromInput ud tp ap
expected = (tp, ap, False)
ud = UD {
udAge = Nothing
, udNeedles = Nothing
, udTrunkLengthIncrementPerYear = Nothing
, udTrunkBranchLengthRatio = Nothing
, udTrunkBranchAngles = Nothing
, udTrunkGirth = Nothing
, udWhorlsPerYear = Nothing
, udWhorlSize = Nothing
, udBranchGirth = Nothing
, udBranchBranchLengthRatio = Nothing
, udBranchBranchLengthRatio2 = Nothing
-- , udBranchBranchAngle :: Angle Double
}
tp = TreeParams {
tpTrunkLengthIncrementPerYear = 1.4
, tpTrunkBranchLengthRatio = 0.6
, tpTrunkBranchAngles = [0.698, 0.898, 1.31 , 0.967]
, tpTrunkGirth = 5.0
, tpWhorlsPerYear = 9
, tpWhorlSize = 7
, tpBranchGirth = 1.0
, tpBranchBranchLengthRatio = 1.0
, tpBranchBranchLengthRatio2 = 1.0
, tpBranchBranchAngle = 0.698
}
ap = AgeParams {
apAge = 3
, apTrunkBranchAngleIndex = 0
, apWhorlPhase = 0
}
argsFromInput_ChangeAll = actual @?= expected
where actual = argsFromInput ud tp ap
expected = (tp', ap', n)
ud = UD {
udAge = Just 5
, udNeedles = Just True
, udTrunkLengthIncrementPerYear = Just 1.5
, udTrunkBranchLengthRatio = Just 0.7
, udTrunkBranchAngles = Just [0.777]
, udTrunkGirth = Just 6.0
, udWhorlsPerYear = Just 10
, udWhorlSize = Just 5
, udBranchGirth = Just 1.2
, udBranchBranchLengthRatio = Just 1.3
, udBranchBranchLengthRatio2 = Just 1.4
-- , udBranchBranchAngle :: Angle Double
}
tp = TreeParams {
tpTrunkLengthIncrementPerYear = 1.4
, tpTrunkBranchLengthRatio = 0.6
, tpTrunkBranchAngles = [0.698, 0.898, 1.31 , 0.967]
, tpTrunkGirth = 5.0
, tpWhorlsPerYear = 9
, tpWhorlSize = 7
, tpBranchGirth = 1.0
, tpBranchBranchLengthRatio = 1.0
, tpBranchBranchLengthRatio2 = 1.0
, tpBranchBranchAngle = 0.698
}
ap = AgeParams {
apAge = 3
, apTrunkBranchAngleIndex = 0
, apWhorlPhase = 0
}
tp' = TreeParams {
tpTrunkLengthIncrementPerYear = 1.5
, tpTrunkBranchLengthRatio = 0.7
, tpTrunkBranchAngles = [0.777]
, tpTrunkGirth = 6.0
, tpWhorlsPerYear = 10
, tpWhorlSize = 5
, tpBranchGirth = 1.2
, tpBranchBranchLengthRatio = 1.3
, tpBranchBranchLengthRatio2 = 1.4
, tpBranchBranchAngle = 0.698
}
ap' = AgeParams {
apAge = 5
, apTrunkBranchAngleIndex = 0
, apWhorlPhase = 0
}
n = True
|
bobgru/conifer
|
test/unit-tests/parserTests.hs
|
Haskell
|
bsd-3-clause
| 8,654
|
-- |
-- Module : Crypto.Internal.DeepSeq
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Simple abstraction module to allow compilation without deepseq
-- by defining our own NFData class if not compiling with deepseq
-- support.
--
{-# LANGUAGE CPP #-}
module Crypto.Internal.DeepSeq
( NFData(..)
) where
#ifdef WITH_DEEPSEQ_SUPPORT
import Control.DeepSeq
#else
import Data.Word
import Data.ByteArray
class NFData a where rnf :: a -> ()
instance NFData Word8 where rnf w = w `seq` ()
instance NFData Word16 where rnf w = w `seq` ()
instance NFData Word32 where rnf w = w `seq` ()
instance NFData Word64 where rnf w = w `seq` ()
instance NFData Bytes where rnf b = b `seq` ()
instance NFData ScrubbedBytes where rnf b = b `seq` ()
#endif
|
nomeata/cryptonite
|
Crypto/Internal/DeepSeq.hs
|
Haskell
|
bsd-3-clause
| 849
|
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveGeneric #-}
module Account.Types where
import Database.Persist
import Database.Persist.Sqlite
import Database.Persist.TH
import Data.Aeson
import GHC.Generics
{--
IGNORRE THE FOLLOWING TEMPLATE HASKELL BS
USING THIS IS BETTER THAN ACTUALLY WRITING DB STUFF
BUT IT'S REALLY UNIDIOMATIC
AND FRAGILE
--}
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Account
name String
wins Int
losses Int
UniqueName name
deriving Show Generic
|]
instance ToJSON Account
instance FromJSON Account
|
octopuscabbage/UltimateTicTacToeServer
|
src/Account/Types.hs
|
Haskell
|
bsd-3-clause
| 948
|
{-# LANGUAGE TemplateHaskell #-}
module AWS.CloudWatch.Types
where
import AWS.Lib.FromText
data Metric = Metric
{ metricDimensions :: [Dimension]
, metricName :: Text
, metricNameSpace :: Text
}
deriving (Show, Eq)
data Dimension = Dimension
{ dimensionName :: Text
, dimensionValue :: Text
}
deriving (Show, Eq)
type DimensionFilter = (Text, Text)
data Datapoint = Datapoint
{ datapointTimestamp :: UTCTime
, datapointSampleCount :: Maybe Double
, datapointUnit :: Text
, datapointMinimum :: Maybe Double
, datapointMaximum :: Maybe Double
, datapointSum :: Maybe Double
, datapointAverage :: Maybe Double
}
deriving (Show, Eq)
data Statistic
= StatisticAverage
| StatisticSum
| StatisticSampleCount
| StatisticMaximum
| StatisticMinimum
deriving (Show, Eq, Read)
deriveFromText "Statistic" ["Average", "Sum", "SampleCount", "Maximum", "Minimum"]
allStatistics :: [Statistic]
allStatistics =
[ StatisticAverage
, StatisticSum
, StatisticSampleCount
, StatisticMaximum
, StatisticMinimum
]
data AlarmNameSpec
= AlarmSpecNothing
| AlarmSpecNamePrefix Text
| AlarmSpecNames [Text]
deriving (Show, Eq)
data StateValue
= StateValueOk
| StateValueAlarm
| StateValueInsufficientData
deriving (Show, Eq, Read)
deriveFromText "StateValue" ["OK", "ALARM", "INSUFFICIENT_DATA"]
data ComparisonOperator
= GreaterThanOrEqualToThreshold
| GreaterThanThreshold
| LessThanThreshold
| LessThanOrEqualToThreshold
deriving (Show, Eq, Read)
deriveFromText "ComparisonOperator"
[ "GreaterThanOrEqualToThreshold"
, "GreaterThanThreshold"
, "LessThanThreshold"
, "LessThanOrEqualToThreshold"
]
data MetricAlarm = MetricAlarm
{ metricAlarmAlarmDescription :: Maybe Text
, metricAlarmStateUpdatedTimestamp :: UTCTime
, metricAlarmInsufficientDataActions :: [Text]
, metricAlarmStateReasonData :: Maybe Text
, metricAlarmAlarmArn :: Text
, metricAlarmConfigurationUpdatedTimestamp :: UTCTime
, metricAlarmAlarmName :: Text
, metricAlarmStateValue :: StateValue
, metricAlarmPeriod :: Int
, metricAlarmOKActions :: [Text]
, metricAlarmActionsEnabled :: Bool
, metricAlarmNamespace :: Text
, metricAlarmThreshold :: Double
, metricAlarmEvaluationPeriods :: Int
, metricAlarmStatistic :: Statistic
, metricAlarmAlarmActions :: [Text]
, metricAlarmUnit :: Maybe Text
, metricAlarmStateReason :: Maybe Text
, metricAlarmDimensions :: [Dimension]
, metricAlarmComparisonOperator :: ComparisonOperator
, metricAlarmMetricName :: Text
}
deriving (Show, Eq)
data PutMetricAlarmRequest = PutMetricAlarmRequest
{ putMetricAlarmActionsEnabled :: Maybe Bool
, putMetricAlarmAlarmActions :: [Text]
, putMetricAlarmAlarmDescription :: Maybe Text
, putMetricAlarmAlarmName :: Text
, putMetricAlarmComparisonOperator :: ComparisonOperator
, putMetricAlarmDimensions :: [Dimension]
, putMetricAlarmEvaluationPeriods :: Int
, putMetricAlarmInsufficientDataActions :: [Text]
, putMetricAlarmMetricName :: Text
, putMetricAlarmNamespace :: Text
, putMetricAlarmOKActions :: [Text]
, putMetricAlarmPeriod :: Int
, putMetricAlarmStatistic :: Statistic
, putMetricAlarmThreshold :: Double
, putMetricAlarmUnit :: Maybe Text
}
deriving (Show, Eq)
data AlarmHistory = AlarmHistory
{ alarmHistoryTimestamp :: UTCTime
, alarmHistoryHistoryItemType :: HistoryType
, alarmHistoryAlarmName :: Text
, alarmHistoryHistoryData :: Text
, alarmHistoryHistorySummary :: Text
}
deriving (Show, Eq)
data HistoryType
= HistoryTypeConfigurationUpdate
| HistoryTypeStateUpdate
| HistoryTypeAction
deriving (Show, Eq, Read)
deriveFromText "HistoryType" ["ConfigurationUpdate", "StateUpdate", "Action"]
data MetricDatum = MetricDatum
{ metricDatumDimensions :: [Dimension]
, metricDatumMetricName :: Text
, metricDatumTimestamp :: Maybe UTCTime
, metricDatumUnit :: Maybe Text
, metricDatumValue :: MetricDatumValue
}
deriving (Show, Eq)
data MetricDatumValue
= MetricDatumValue Double
| MetricDatumStatisticValues StatisticSet
deriving (Show, Eq)
data StatisticSet = StatisticSet
{ statisticSetMaximum :: Double
, statisticSetMinimum :: Double
, statisticSetSampleCount :: Double
, statisticSetSum :: Double
}
deriving (Show, Eq)
|
IanConnolly/aws-sdk-fork
|
AWS/CloudWatch/Types.hs
|
Haskell
|
bsd-3-clause
| 4,515
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Windows WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
secdec/zap-extensions
|
addOns/webdrivers/webdriverwindows/src/main/javahelp/org/zaproxy/zap/extension/webdriverwindows/resources/help_ru_RU/helpset_ru_RU.hs
|
Haskell
|
apache-2.0
| 963
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>TLS Debug | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/tlsdebug/src/main/javahelp/org/zaproxy/zap/extension/tlsdebug/resources/help_fil_PH/helpset_fil_PH.hs
|
Haskell
|
apache-2.0
| 985
|
{-
Copyright 2010-2012 Cognimeta Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing permissions and limitations under the License.
-}
{-# LANGUAGE TypeFamilies #-}
module Database.Perdure.AllocCopy (
AllocCopy(..),
allocCopyBits,
module Database.Perdure.WriteBits
) where
import Prelude ()
import Cgm.Prelude
import Cgm.Data.Word
import Cgm.Data.Len
import Database.Perdure.WriteBits
import Cgm.System.Endian
class Endian w => AllocCopy w where
allocCopyBitsSkip :: (BitSrc d, SrcDestState d ~ RealWorld) => Len w Word -> d -> d -> ST RealWorld (STPrimArray RealWorld Pinned w)
allocCopyBits :: (BitSrc d, SrcDestState d ~ RealWorld, AllocCopy w) => d -> d -> IO (PrimArray Pinned w)
allocCopyBits start end = stToIO $ allocCopyBitsSkip 0 start end >>= unsafeFreezeSTPrimArray
instance AllocCopy Word32 where
allocCopyBitsSkip skip start end = onWordConv
(apply wordConv1 <$> allocCopyBitsSkip (retract wordLenB skip) start end)
(error "allocCopyBitsSkip for Word32 not implemented")
instance AllocCopy Word64 where
allocCopyBitsSkip skip start end = onWordConv
(error "allocCopyBitsSkip for Word64 not implemented")
(apply wordConv1 <$> allocCopyBitsSkip (retract wordLenB skip) start end)
instance AllocCopy Word where
-- Starts writing after the specified length 'skip', which can later be used to write a header.
allocCopyBitsSkip skip start end = do
wBuf <- mkArray $ coarsenLen (addedBits end start) + skip
_ <- copyBits end start (aligned $ CWordSeq wBuf skip) >>= padIncompleteWord
return wBuf
|
bitemyapp/perdure
|
src/Database/Perdure/AllocCopy.hs
|
Haskell
|
apache-2.0
| 2,173
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE BangPatterns, MagicHash, CPP, TypeFamilies #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-- Using TemplateHaskell in text unconditionally is unacceptable, as
-- it's a GHC boot library. TemplateHaskellQuotes was added in 8.0, so
-- this would seem to be a problem. However, GHC's policy of only
-- needing to be able to compile itself from the last few releases
-- allows us to use full-fat TH on older versions, while using THQ for
-- GHC versions that may be used for bootstrapping.
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE TemplateHaskellQuotes #-}
#else
{-# LANGUAGE TemplateHaskell #-}
#endif
-- |
-- Module : Data.Text.Lazy
-- Copyright : (c) 2009, 2010, 2012 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Portability : GHC
--
-- A time and space-efficient implementation of Unicode text using
-- lists of packed arrays.
--
-- /Note/: Read below the synopsis for important notes on the use of
-- this module.
--
-- The representation used by this module is suitable for high
-- performance use and for streaming large quantities of data. It
-- provides a means to manipulate a large body of text without
-- requiring that the entire content be resident in memory.
--
-- Some operations, such as 'concat', 'append', 'reverse' and 'cons',
-- have better time complexity than their "Data.Text" equivalents, due
-- to the underlying representation being a list of chunks. For other
-- operations, lazy 'Text's are usually within a few percent of strict
-- ones, but often with better heap usage if used in a streaming
-- fashion. For data larger than available memory, or if you have
-- tight memory constraints, this module will be the only option.
--
-- This module is intended to be imported @qualified@, to avoid name
-- clashes with "Prelude" functions. eg.
--
-- > import qualified Data.Text.Lazy as L
module Data.Text.Lazy
(
-- * Fusion
-- $fusion
-- * Acceptable data
-- $replacement
-- * Types
Text
-- * Creation and elimination
, pack
, unpack
, singleton
, empty
, fromChunks
, toChunks
, toStrict
, fromStrict
, foldrChunks
, foldlChunks
-- * Basic interface
, cons
, snoc
, append
, uncons
, unsnoc
, head
, last
, tail
, init
, null
, length
, compareLength
-- * Transformations
, map
, intercalate
, intersperse
, transpose
, reverse
, replace
-- ** Case conversion
-- $case
, toCaseFold
, toLower
, toUpper
, toTitle
-- ** Justification
, justifyLeft
, justifyRight
, center
-- * Folds
, foldl
, foldl'
, foldl1
, foldl1'
, foldr
, foldr1
-- ** Special folds
, concat
, concatMap
, any
, all
, maximum
, minimum
-- * Construction
-- ** Scans
, scanl
, scanl1
, scanr
, scanr1
-- ** Accumulating maps
, mapAccumL
, mapAccumR
-- ** Generation and unfolding
, repeat
, replicate
, cycle
, iterate
, unfoldr
, unfoldrN
-- * Substrings
-- ** Breaking strings
, take
, takeEnd
, drop
, dropEnd
, takeWhile
, takeWhileEnd
, dropWhile
, dropWhileEnd
, dropAround
, strip
, stripStart
, stripEnd
, splitAt
, span
, breakOn
, breakOnEnd
, break
, group
, groupBy
, inits
, tails
-- ** Breaking into many substrings
-- $split
, splitOn
, split
, chunksOf
-- , breakSubstring
-- ** Breaking into lines and words
, lines
, words
, unlines
, unwords
-- * Predicates
, isPrefixOf
, isSuffixOf
, isInfixOf
-- ** View patterns
, stripPrefix
, stripSuffix
, commonPrefixes
-- * Searching
, filter
, find
, elem
, breakOnAll
, partition
-- , findSubstring
-- * Indexing
, index
, count
-- * Zipping and unzipping
, zip
, zipWith
-- -* Ordered text
-- , sort
) where
import Prelude (Char, Bool(..), Maybe(..), String,
Eq(..), Ord(..), Ordering(..), Read(..), Show(..),
(&&), (||), (+), (-), (.), ($), (++),
error, flip, fmap, fromIntegral, not, otherwise, quot)
import qualified Prelude as P
import Control.DeepSeq (NFData(..))
import Data.Int (Int64)
import qualified Data.List as L
import Data.Char (isSpace)
import Data.Data (Data(gfoldl, toConstr, gunfold, dataTypeOf), constrIndex,
Constr, mkConstr, DataType, mkDataType, Fixity(Prefix))
import Data.Binary (Binary(get, put))
import Data.Monoid (Monoid(..))
#if MIN_VERSION_base(4,9,0)
import Data.Semigroup (Semigroup(..))
#endif
import Data.String (IsString(..))
import qualified Data.Text as T
import qualified Data.Text.Internal as T
import qualified Data.Text.Internal.Fusion.Common as S
import qualified Data.Text.Unsafe as T
import qualified Data.Text.Internal.Lazy.Fusion as S
import Data.Text.Internal.Fusion.Types (PairS(..))
import Data.Text.Internal.Lazy.Fusion (stream, unstream)
import Data.Text.Internal.Lazy (Text(..), chunk, empty, foldlChunks,
foldrChunks, smallChunkSize)
import Data.Text.Internal (firstf, safe, text)
import Data.Text.Lazy.Encoding (decodeUtf8', encodeUtf8)
import qualified Data.Text.Internal.Functions as F
import Data.Text.Internal.Lazy.Search (indices)
#if __GLASGOW_HASKELL__ >= 702
import qualified GHC.CString as GHC
#else
import qualified GHC.Base as GHC
#endif
#if MIN_VERSION_base(4,7,0)
import qualified GHC.Exts as Exts
#endif
import GHC.Prim (Addr#)
import qualified Language.Haskell.TH.Lib as TH
import qualified Language.Haskell.TH.Syntax as TH
#if MIN_VERSION_base(4,7,0)
import Text.Printf (PrintfArg, formatArg, formatString)
#endif
-- $fusion
--
-- Most of the functions in this module are subject to /fusion/,
-- meaning that a pipeline of such functions will usually allocate at
-- most one 'Text' value.
--
-- As an example, consider the following pipeline:
--
-- > import Data.Text.Lazy as T
-- > import Data.Text.Lazy.Encoding as E
-- > import Data.ByteString.Lazy (ByteString)
-- >
-- > countChars :: ByteString -> Int
-- > countChars = T.length . T.toUpper . E.decodeUtf8
--
-- From the type signatures involved, this looks like it should
-- allocate one 'ByteString' value, and two 'Text' values. However,
-- when a module is compiled with optimisation enabled under GHC, the
-- two intermediate 'Text' values will be optimised away, and the
-- function will be compiled down to a single loop over the source
-- 'ByteString'.
--
-- Functions that can be fused by the compiler are documented with the
-- phrase \"Subject to fusion\".
-- $replacement
--
-- A 'Text' value is a sequence of Unicode scalar values, as defined
-- in
-- <http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf#page=35 §3.9, definition D76 of the Unicode 5.2 standard >.
-- As such, a 'Text' cannot contain values in the range U+D800 to
-- U+DFFF inclusive. Haskell implementations admit all Unicode code
-- points
-- (<http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf#page=13 §3.4, definition D10 >)
-- as 'Char' values, including code points from this invalid range.
-- This means that there are some 'Char' values that are not valid
-- Unicode scalar values, and the functions in this module must handle
-- those cases.
--
-- Within this module, many functions construct a 'Text' from one or
-- more 'Char' values. Those functions will substitute 'Char' values
-- that are not valid Unicode scalar values with the replacement
-- character \"�\" (U+FFFD). Functions that perform this
-- inspection and replacement are documented with the phrase
-- \"Performs replacement on invalid scalar values\".
--
-- (One reason for this policy of replacement is that internally, a
-- 'Text' value is represented as packed UTF-16 data. Values in the
-- range U+D800 through U+DFFF are used by UTF-16 to denote surrogate
-- code points, and so cannot be represented. The functions replace
-- invalid scalar values, instead of dropping them, as a security
-- measure. For details, see
-- <http://unicode.org/reports/tr36/#Deletion_of_Noncharacters Unicode Technical Report 36, §3.5 >.)
-- $setup
-- >>> import Data.Text
-- >>> import qualified Data.Text as T
-- >>> :seti -XOverloadedStrings
equal :: Text -> Text -> Bool
equal Empty Empty = True
equal Empty _ = False
equal _ Empty = False
equal (Chunk a as) (Chunk b bs) =
case compare lenA lenB of
LT -> a == (T.takeWord16 lenA b) &&
as `equal` Chunk (T.dropWord16 lenA b) bs
EQ -> a == b && as `equal` bs
GT -> T.takeWord16 lenB a == b &&
Chunk (T.dropWord16 lenB a) as `equal` bs
where lenA = T.lengthWord16 a
lenB = T.lengthWord16 b
instance Eq Text where
(==) = equal
{-# INLINE (==) #-}
instance Ord Text where
compare = compareText
compareText :: Text -> Text -> Ordering
compareText Empty Empty = EQ
compareText Empty _ = LT
compareText _ Empty = GT
compareText (Chunk a0 as) (Chunk b0 bs) = outer a0 b0
where
outer ta@(T.Text arrA offA lenA) tb@(T.Text arrB offB lenB) = go 0 0
where
go !i !j
| i >= lenA = compareText as (chunk (T.Text arrB (offB+j) (lenB-j)) bs)
| j >= lenB = compareText (chunk (T.Text arrA (offA+i) (lenA-i)) as) bs
| a < b = LT
| a > b = GT
| otherwise = go (i+di) (j+dj)
where T.Iter a di = T.iter ta i
T.Iter b dj = T.iter tb j
instance Show Text where
showsPrec p ps r = showsPrec p (unpack ps) r
instance Read Text where
readsPrec p str = [(pack x,y) | (x,y) <- readsPrec p str]
#if MIN_VERSION_base(4,9,0)
-- | Non-orphan 'Semigroup' instance only defined for
-- @base-4.9.0.0@ and later; orphan instances for older GHCs are
-- provided by
-- the [semigroups](http://hackage.haskell.org/package/semigroups)
-- package
--
-- @since 1.2.2.0
instance Semigroup Text where
(<>) = append
#endif
instance Monoid Text where
mempty = empty
#if MIN_VERSION_base(4,9,0)
mappend = (<>) -- future-proof definition
#else
mappend = append
#endif
mconcat = concat
instance IsString Text where
fromString = pack
#if MIN_VERSION_base(4,7,0)
-- | @since 1.2.0.0
instance Exts.IsList Text where
type Item Text = Char
fromList = pack
toList = unpack
#endif
instance NFData Text where
rnf Empty = ()
rnf (Chunk _ ts) = rnf ts
-- | @since 1.2.1.0
instance Binary Text where
put t = put (encodeUtf8 t)
get = do
bs <- get
case decodeUtf8' bs of
P.Left exn -> P.fail (P.show exn)
P.Right a -> P.return a
-- | This instance preserves data abstraction at the cost of inefficiency.
-- We omit reflection services for the sake of data abstraction.
--
-- This instance was created by copying the updated behavior of
-- @"Data.Text".@'Data.Text.Text'
instance Data Text where
gfoldl f z txt = z pack `f` (unpack txt)
toConstr _ = packConstr
gunfold k z c = case constrIndex c of
1 -> k (z pack)
_ -> error "Data.Text.Lazy.Text.gunfold"
dataTypeOf _ = textDataType
-- | This instance has similar considerations to the 'Data' instance:
-- it preserves abstraction at the cost of inefficiency.
--
-- @since 1.2.4.0
instance TH.Lift Text where
lift = TH.appE (TH.varE 'pack) . TH.stringE . unpack
#if MIN_VERSION_template_haskell(2,17,0)
liftTyped = TH.unsafeCodeCoerce . TH.lift
#elif MIN_VERSION_template_haskell(2,16,0)
liftTyped = TH.unsafeTExpCoerce . TH.lift
#endif
#if MIN_VERSION_base(4,7,0)
-- | Only defined for @base-4.7.0.0@ and later
--
-- @since 1.2.2.0
instance PrintfArg Text where
formatArg txt = formatString $ unpack txt
#endif
packConstr :: Constr
packConstr = mkConstr textDataType "pack" [] Prefix
textDataType :: DataType
textDataType = mkDataType "Data.Text.Lazy.Text" [packConstr]
-- | /O(n)/ Convert a 'String' into a 'Text'.
--
-- Subject to fusion. Performs replacement on invalid scalar values.
pack :: String -> Text
pack = unstream . S.streamList . L.map safe
{-# INLINE [1] pack #-}
-- | /O(n)/ Convert a 'Text' into a 'String'.
-- Subject to fusion.
unpack :: Text -> String
unpack t = S.unstreamList (stream t)
{-# INLINE [1] unpack #-}
-- | /O(n)/ Convert a literal string into a Text.
unpackCString# :: Addr# -> Text
unpackCString# addr# = unstream (S.streamCString# addr#)
{-# NOINLINE unpackCString# #-}
{-# RULES "TEXT literal" forall a.
unstream (S.streamList (L.map safe (GHC.unpackCString# a)))
= unpackCString# a #-}
{-# RULES "TEXT literal UTF8" forall a.
unstream (S.streamList (L.map safe (GHC.unpackCStringUtf8# a)))
= unpackCString# a #-}
{-# RULES "LAZY TEXT empty literal"
unstream (S.streamList (L.map safe []))
= Empty #-}
{-# RULES "LAZY TEXT empty literal" forall a.
unstream (S.streamList (L.map safe [a]))
= Chunk (T.singleton a) Empty #-}
-- | /O(1)/ Convert a character into a Text. Subject to fusion.
-- Performs replacement on invalid scalar values.
singleton :: Char -> Text
singleton c = Chunk (T.singleton c) Empty
{-# INLINE [1] singleton #-}
{-# RULES
"LAZY TEXT singleton -> fused" [~1] forall c.
singleton c = unstream (S.singleton c)
"LAZY TEXT singleton -> unfused" [1] forall c.
unstream (S.singleton c) = singleton c
#-}
-- | /O(c)/ Convert a list of strict 'T.Text's into a lazy 'Text'.
fromChunks :: [T.Text] -> Text
fromChunks cs = L.foldr chunk Empty cs
-- | /O(n)/ Convert a lazy 'Text' into a list of strict 'T.Text's.
toChunks :: Text -> [T.Text]
toChunks cs = foldrChunks (:) [] cs
-- | /O(n)/ Convert a lazy 'Text' into a strict 'T.Text'.
toStrict :: Text -> T.Text
toStrict t = T.concat (toChunks t)
{-# INLINE [1] toStrict #-}
-- | /O(c)/ Convert a strict 'T.Text' into a lazy 'Text'.
fromStrict :: T.Text -> Text
fromStrict t = chunk t Empty
{-# INLINE [1] fromStrict #-}
-- -----------------------------------------------------------------------------
-- * Basic functions
-- | /O(1)/ Adds a character to the front of a 'Text'. Subject to fusion.
cons :: Char -> Text -> Text
cons c t = Chunk (T.singleton c) t
{-# INLINE [1] cons #-}
infixr 5 `cons`
{-# RULES
"LAZY TEXT cons -> fused" [~1] forall c t.
cons c t = unstream (S.cons c (stream t))
"LAZY TEXT cons -> unfused" [1] forall c t.
unstream (S.cons c (stream t)) = cons c t
#-}
-- | /O(n)/ Adds a character to the end of a 'Text'. This copies the
-- entire array in the process, unless fused. Subject to fusion.
snoc :: Text -> Char -> Text
snoc t c = foldrChunks Chunk (singleton c) t
{-# INLINE [1] snoc #-}
{-# RULES
"LAZY TEXT snoc -> fused" [~1] forall t c.
snoc t c = unstream (S.snoc (stream t) c)
"LAZY TEXT snoc -> unfused" [1] forall t c.
unstream (S.snoc (stream t) c) = snoc t c
#-}
-- | /O(n\/c)/ Appends one 'Text' to another. Subject to fusion.
append :: Text -> Text -> Text
append xs ys = foldrChunks Chunk ys xs
{-# INLINE [1] append #-}
{-# RULES
"LAZY TEXT append -> fused" [~1] forall t1 t2.
append t1 t2 = unstream (S.append (stream t1) (stream t2))
"LAZY TEXT append -> unfused" [1] forall t1 t2.
unstream (S.append (stream t1) (stream t2)) = append t1 t2
#-}
-- | /O(1)/ Returns the first character and rest of a 'Text', or
-- 'Nothing' if empty. Subject to fusion.
uncons :: Text -> Maybe (Char, Text)
uncons Empty = Nothing
uncons (Chunk t ts) = Just (T.unsafeHead t, ts')
where ts' | T.compareLength t 1 == EQ = ts
| otherwise = Chunk (T.unsafeTail t) ts
{-# INLINE uncons #-}
-- | /O(1)/ Returns the first character of a 'Text', which must be
-- non-empty. Subject to fusion.
head :: Text -> Char
head t = S.head (stream t)
{-# INLINE head #-}
-- | /O(1)/ Returns all characters after the head of a 'Text', which
-- must be non-empty. Subject to fusion.
tail :: Text -> Text
tail (Chunk t ts) = chunk (T.tail t) ts
tail Empty = emptyError "tail"
{-# INLINE [1] tail #-}
{-# RULES
"LAZY TEXT tail -> fused" [~1] forall t.
tail t = unstream (S.tail (stream t))
"LAZY TEXT tail -> unfused" [1] forall t.
unstream (S.tail (stream t)) = tail t
#-}
-- | /O(n\/c)/ Returns all but the last character of a 'Text', which must
-- be non-empty. Subject to fusion.
init :: Text -> Text
init (Chunk t0 ts0) = go t0 ts0
where go t (Chunk t' ts) = Chunk t (go t' ts)
go t Empty = chunk (T.init t) Empty
init Empty = emptyError "init"
{-# INLINE [1] init #-}
{-# RULES
"LAZY TEXT init -> fused" [~1] forall t.
init t = unstream (S.init (stream t))
"LAZY TEXT init -> unfused" [1] forall t.
unstream (S.init (stream t)) = init t
#-}
-- | /O(n\/c)/ Returns the 'init' and 'last' of a 'Text', or 'Nothing' if
-- empty.
--
-- * It is no faster than using 'init' and 'last'.
--
-- @since 1.2.3.0
unsnoc :: Text -> Maybe (Text, Char)
unsnoc Empty = Nothing
unsnoc ts@(Chunk _ _) = Just (init ts, last ts)
{-# INLINE unsnoc #-}
-- | /O(1)/ Tests whether a 'Text' is empty or not. Subject to
-- fusion.
null :: Text -> Bool
null Empty = True
null _ = False
{-# INLINE [1] null #-}
{-# RULES
"LAZY TEXT null -> fused" [~1] forall t.
null t = S.null (stream t)
"LAZY TEXT null -> unfused" [1] forall t.
S.null (stream t) = null t
#-}
-- | /O(1)/ Tests whether a 'Text' contains exactly one character.
-- Subject to fusion.
isSingleton :: Text -> Bool
isSingleton = S.isSingleton . stream
{-# INLINE isSingleton #-}
-- | /O(n\/c)/ Returns the last character of a 'Text', which must be
-- non-empty. Subject to fusion.
last :: Text -> Char
last Empty = emptyError "last"
last (Chunk t ts) = go t ts
where go _ (Chunk t' ts') = go t' ts'
go t' Empty = T.last t'
{-# INLINE [1] last #-}
{-# RULES
"LAZY TEXT last -> fused" [~1] forall t.
last t = S.last (stream t)
"LAZY TEXT last -> unfused" [1] forall t.
S.last (stream t) = last t
#-}
-- | /O(n)/ Returns the number of characters in a 'Text'.
-- Subject to fusion.
length :: Text -> Int64
length = foldlChunks go 0
where go l t = l + fromIntegral (T.length t)
{-# INLINE [1] length #-}
{-# RULES
"LAZY TEXT length -> fused" [~1] forall t.
length t = S.length (stream t)
"LAZY TEXT length -> unfused" [1] forall t.
S.length (stream t) = length t
#-}
-- | /O(n)/ Compare the count of characters in a 'Text' to a number.
-- Subject to fusion.
--
-- This function gives the same answer as comparing against the result
-- of 'length', but can short circuit if the count of characters is
-- greater than the number, and hence be more efficient.
compareLength :: Text -> Int64 -> Ordering
compareLength t n = S.compareLengthI (stream t) n
{-# INLINE [1] compareLength #-}
-- We don't apply those otherwise appealing length-to-compareLength
-- rewrite rules here, because they can change the strictness
-- properties of code.
-- | /O(n)/ 'map' @f@ @t@ is the 'Text' obtained by applying @f@ to
-- each element of @t@. Subject to fusion. Performs replacement on
-- invalid scalar values.
map :: (Char -> Char) -> Text -> Text
map f t = unstream (S.map (safe . f) (stream t))
{-# INLINE [1] map #-}
-- | /O(n)/ The 'intercalate' function takes a 'Text' and a list of
-- 'Text's and concatenates the list after interspersing the first
-- argument between each element of the list.
intercalate :: Text -> [Text] -> Text
intercalate t = concat . (F.intersperse t)
{-# INLINE intercalate #-}
-- | /O(n)/ The 'intersperse' function takes a character and places it
-- between the characters of a 'Text'. Subject to fusion. Performs
-- replacement on invalid scalar values.
intersperse :: Char -> Text -> Text
intersperse c t = unstream (S.intersperse (safe c) (stream t))
{-# INLINE intersperse #-}
-- | /O(n)/ Left-justify a string to the given length, using the
-- specified fill character on the right. Subject to fusion. Performs
-- replacement on invalid scalar values.
--
-- Examples:
--
-- > justifyLeft 7 'x' "foo" == "fooxxxx"
-- > justifyLeft 3 'x' "foobar" == "foobar"
justifyLeft :: Int64 -> Char -> Text -> Text
justifyLeft k c t
| len >= k = t
| otherwise = t `append` replicateChar (k-len) c
where len = length t
{-# INLINE [1] justifyLeft #-}
{-# RULES
"LAZY TEXT justifyLeft -> fused" [~1] forall k c t.
justifyLeft k c t = unstream (S.justifyLeftI k c (stream t))
"LAZY TEXT justifyLeft -> unfused" [1] forall k c t.
unstream (S.justifyLeftI k c (stream t)) = justifyLeft k c t
#-}
-- | /O(n)/ Right-justify a string to the given length, using the
-- specified fill character on the left. Performs replacement on
-- invalid scalar values.
--
-- Examples:
--
-- > justifyRight 7 'x' "bar" == "xxxxbar"
-- > justifyRight 3 'x' "foobar" == "foobar"
justifyRight :: Int64 -> Char -> Text -> Text
justifyRight k c t
| len >= k = t
| otherwise = replicateChar (k-len) c `append` t
where len = length t
{-# INLINE justifyRight #-}
-- | /O(n)/ Center a string to the given length, using the specified
-- fill character on either side. Performs replacement on invalid
-- scalar values.
--
-- Examples:
--
-- > center 8 'x' "HS" = "xxxHSxxx"
center :: Int64 -> Char -> Text -> Text
center k c t
| len >= k = t
| otherwise = replicateChar l c `append` t `append` replicateChar r c
where len = length t
d = k - len
r = d `quot` 2
l = d - r
{-# INLINE center #-}
-- | /O(n)/ The 'transpose' function transposes the rows and columns
-- of its 'Text' argument. Note that this function uses 'pack',
-- 'unpack', and the list version of transpose, and is thus not very
-- efficient.
transpose :: [Text] -> [Text]
transpose ts = L.map (\ss -> Chunk (T.pack ss) Empty)
(L.transpose (L.map unpack ts))
-- TODO: make this fast
-- | /O(n)/ 'reverse' @t@ returns the elements of @t@ in reverse order.
reverse :: Text -> Text
reverse = rev Empty
where rev a Empty = a
rev a (Chunk t ts) = rev (Chunk (T.reverse t) a) ts
-- | /O(m+n)/ Replace every non-overlapping occurrence of @needle@ in
-- @haystack@ with @replacement@.
--
-- This function behaves as though it was defined as follows:
--
-- @
-- replace needle replacement haystack =
-- 'intercalate' replacement ('splitOn' needle haystack)
-- @
--
-- As this suggests, each occurrence is replaced exactly once. So if
-- @needle@ occurs in @replacement@, that occurrence will /not/ itself
-- be replaced recursively:
--
-- > replace "oo" "foo" "oo" == "foo"
--
-- In cases where several instances of @needle@ overlap, only the
-- first one will be replaced:
--
-- > replace "ofo" "bar" "ofofo" == "barfo"
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
replace :: Text
-- ^ @needle@ to search for. If this string is empty, an
-- error will occur.
-> Text
-- ^ @replacement@ to replace @needle@ with.
-> Text
-- ^ @haystack@ in which to search.
-> Text
replace s d = intercalate d . splitOn s
{-# INLINE replace #-}
-- ----------------------------------------------------------------------------
-- ** Case conversions (folds)
-- $case
--
-- With Unicode text, it is incorrect to use combinators like @map
-- toUpper@ to case convert each character of a string individually.
-- Instead, use the whole-string case conversion functions from this
-- module. For correctness in different writing systems, these
-- functions may map one input character to two or three output
-- characters.
-- | /O(n)/ Convert a string to folded case. Subject to fusion.
--
-- This function is mainly useful for performing caseless (or case
-- insensitive) string comparisons.
--
-- A string @x@ is a caseless match for a string @y@ if and only if:
--
-- @toCaseFold x == toCaseFold y@
--
-- The result string may be longer than the input string, and may
-- differ from applying 'toLower' to the input string. For instance,
-- the Armenian small ligature men now (U+FB13) is case folded to the
-- bigram men now (U+0574 U+0576), while the micro sign (U+00B5) is
-- case folded to the Greek small letter letter mu (U+03BC) instead of
-- itself.
toCaseFold :: Text -> Text
toCaseFold t = unstream (S.toCaseFold (stream t))
{-# INLINE toCaseFold #-}
-- | /O(n)/ Convert a string to lower case, using simple case
-- conversion. Subject to fusion.
--
-- The result string may be longer than the input string. For
-- instance, the Latin capital letter I with dot above (U+0130) maps
-- to the sequence Latin small letter i (U+0069) followed by combining
-- dot above (U+0307).
toLower :: Text -> Text
toLower t = unstream (S.toLower (stream t))
{-# INLINE toLower #-}
-- | /O(n)/ Convert a string to upper case, using simple case
-- conversion. Subject to fusion.
--
-- The result string may be longer than the input string. For
-- instance, the German eszett (U+00DF) maps to the two-letter
-- sequence SS.
toUpper :: Text -> Text
toUpper t = unstream (S.toUpper (stream t))
{-# INLINE toUpper #-}
-- | /O(n)/ Convert a string to title case, using simple case
-- conversion. Subject to fusion.
--
-- The first letter of the input is converted to title case, as is
-- every subsequent letter that immediately follows a non-letter.
-- Every letter that immediately follows another letter is converted
-- to lower case.
--
-- The result string may be longer than the input string. For example,
-- the Latin small ligature fl (U+FB02) is converted to the
-- sequence Latin capital letter F (U+0046) followed by Latin small
-- letter l (U+006C).
--
-- /Note/: this function does not take language or culture specific
-- rules into account. For instance, in English, different style
-- guides disagree on whether the book name \"The Hill of the Red
-- Fox\" is correctly title cased—but this function will
-- capitalize /every/ word.
--
-- @since 1.0.0.0
toTitle :: Text -> Text
toTitle t = unstream (S.toTitle (stream t))
{-# INLINE toTitle #-}
-- | /O(n)/ 'foldl', applied to a binary operator, a starting value
-- (typically the left-identity of the operator), and a 'Text',
-- reduces the 'Text' using the binary operator, from left to right.
-- Subject to fusion.
foldl :: (a -> Char -> a) -> a -> Text -> a
foldl f z t = S.foldl f z (stream t)
{-# INLINE foldl #-}
-- | /O(n)/ A strict version of 'foldl'.
-- Subject to fusion.
foldl' :: (a -> Char -> a) -> a -> Text -> a
foldl' f z t = S.foldl' f z (stream t)
{-# INLINE foldl' #-}
-- | /O(n)/ A variant of 'foldl' that has no starting value argument,
-- and thus must be applied to a non-empty 'Text'. Subject to fusion.
foldl1 :: (Char -> Char -> Char) -> Text -> Char
foldl1 f t = S.foldl1 f (stream t)
{-# INLINE foldl1 #-}
-- | /O(n)/ A strict version of 'foldl1'. Subject to fusion.
foldl1' :: (Char -> Char -> Char) -> Text -> Char
foldl1' f t = S.foldl1' f (stream t)
{-# INLINE foldl1' #-}
-- | /O(n)/ 'foldr', applied to a binary operator, a starting value
-- (typically the right-identity of the operator), and a 'Text',
-- reduces the 'Text' using the binary operator, from right to left.
-- Subject to fusion.
foldr :: (Char -> a -> a) -> a -> Text -> a
foldr f z t = S.foldr f z (stream t)
{-# INLINE foldr #-}
-- | /O(n)/ A variant of 'foldr' that has no starting value argument,
-- and thus must be applied to a non-empty 'Text'. Subject to
-- fusion.
foldr1 :: (Char -> Char -> Char) -> Text -> Char
foldr1 f t = S.foldr1 f (stream t)
{-# INLINE foldr1 #-}
-- | /O(n)/ Concatenate a list of 'Text's.
concat :: [Text] -> Text
concat = to
where
go Empty css = to css
go (Chunk c cs) css = Chunk c (go cs css)
to [] = Empty
to (cs:css) = go cs css
{-# INLINE concat #-}
-- | /O(n)/ Map a function over a 'Text' that results in a 'Text', and
-- concatenate the results.
concatMap :: (Char -> Text) -> Text -> Text
concatMap f = concat . foldr ((:) . f) []
{-# INLINE concatMap #-}
-- | /O(n)/ 'any' @p@ @t@ determines whether any character in the
-- 'Text' @t@ satisfies the predicate @p@. Subject to fusion.
any :: (Char -> Bool) -> Text -> Bool
any p t = S.any p (stream t)
{-# INLINE any #-}
-- | /O(n)/ 'all' @p@ @t@ determines whether all characters in the
-- 'Text' @t@ satisfy the predicate @p@. Subject to fusion.
all :: (Char -> Bool) -> Text -> Bool
all p t = S.all p (stream t)
{-# INLINE all #-}
-- | /O(n)/ 'maximum' returns the maximum value from a 'Text', which
-- must be non-empty. Subject to fusion.
maximum :: Text -> Char
maximum t = S.maximum (stream t)
{-# INLINE maximum #-}
-- | /O(n)/ 'minimum' returns the minimum value from a 'Text', which
-- must be non-empty. Subject to fusion.
minimum :: Text -> Char
minimum t = S.minimum (stream t)
{-# INLINE minimum #-}
-- | /O(n)/ 'scanl' is similar to 'foldl', but returns a list of
-- successive reduced values from the left. Subject to fusion.
-- Performs replacement on invalid scalar values.
--
-- > scanl f z [x1, x2, ...] == [z, z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
scanl :: (Char -> Char -> Char) -> Char -> Text -> Text
scanl f z t = unstream (S.scanl g z (stream t))
where g a b = safe (f a b)
{-# INLINE scanl #-}
-- | /O(n)/ 'scanl1' is a variant of 'scanl' that has no starting
-- value argument. Performs replacement on invalid scalar values.
--
-- > scanl1 f [x1, x2, ...] == [x1, x1 `f` x2, ...]
scanl1 :: (Char -> Char -> Char) -> Text -> Text
scanl1 f t0 = case uncons t0 of
Nothing -> empty
Just (t,ts) -> scanl f t ts
{-# INLINE scanl1 #-}
-- | /O(n)/ 'scanr' is the right-to-left dual of 'scanl'. Performs
-- replacement on invalid scalar values.
--
-- > scanr f v == reverse . scanl (flip f) v . reverse
scanr :: (Char -> Char -> Char) -> Char -> Text -> Text
scanr f v = reverse . scanl g v . reverse
where g a b = safe (f b a)
-- | /O(n)/ 'scanr1' is a variant of 'scanr' that has no starting
-- value argument. Performs replacement on invalid scalar values.
scanr1 :: (Char -> Char -> Char) -> Text -> Text
scanr1 f t | null t = empty
| otherwise = scanr f (last t) (init t)
-- | /O(n)/ Like a combination of 'map' and 'foldl''. Applies a
-- function to each element of a 'Text', passing an accumulating
-- parameter from left to right, and returns a final 'Text'. Performs
-- replacement on invalid scalar values.
mapAccumL :: (a -> Char -> (a,Char)) -> a -> Text -> (a, Text)
mapAccumL f = go
where
go z (Chunk c cs) = (z'', Chunk c' cs')
where (z', c') = T.mapAccumL f z c
(z'', cs') = go z' cs
go z Empty = (z, Empty)
{-# INLINE mapAccumL #-}
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- a strict 'foldr'; it applies a function to each element of a
-- 'Text', passing an accumulating parameter from right to left, and
-- returning a final value of this accumulator together with the new
-- 'Text'. Performs replacement on invalid scalar values.
mapAccumR :: (a -> Char -> (a,Char)) -> a -> Text -> (a, Text)
mapAccumR f = go
where
go z (Chunk c cs) = (z'', Chunk c' cs')
where (z'', c') = T.mapAccumR f z' c
(z', cs') = go z cs
go z Empty = (z, Empty)
{-# INLINE mapAccumR #-}
-- | @'repeat' x@ is an infinite 'Text', with @x@ the value of every
-- element.
--
-- @since 1.2.0.5
repeat :: Char -> Text
repeat c = let t = Chunk (T.replicate smallChunkSize (T.singleton c)) t
in t
-- | /O(n*m)/ 'replicate' @n@ @t@ is a 'Text' consisting of the input
-- @t@ repeated @n@ times.
replicate :: Int64 -> Text -> Text
replicate n t
| null t || n <= 0 = empty
| isSingleton t = replicateChar n (head t)
| otherwise = concat (rep 0)
where rep !i | i >= n = []
| otherwise = t : rep (i+1)
{-# INLINE [1] replicate #-}
-- | 'cycle' ties a finite, non-empty 'Text' into a circular one, or
-- equivalently, the infinite repetition of the original 'Text'.
--
-- @since 1.2.0.5
cycle :: Text -> Text
cycle Empty = emptyError "cycle"
cycle t = let t' = foldrChunks Chunk t' t
in t'
-- | @'iterate' f x@ returns an infinite 'Text' of repeated applications
-- of @f@ to @x@:
--
-- > iterate f x == [x, f x, f (f x), ...]
--
-- @since 1.2.0.5
iterate :: (Char -> Char) -> Char -> Text
iterate f c = let t c' = Chunk (T.singleton c') (t (f c'))
in t c
-- | /O(n)/ 'replicateChar' @n@ @c@ is a 'Text' of length @n@ with @c@ the
-- value of every element. Subject to fusion.
replicateChar :: Int64 -> Char -> Text
replicateChar n c = unstream (S.replicateCharI n (safe c))
{-# INLINE replicateChar #-}
{-# RULES
"LAZY TEXT replicate/singleton -> replicateChar" [~1] forall n c.
replicate n (singleton c) = replicateChar n c
"LAZY TEXT replicate/unstream/singleton -> replicateChar" [~1] forall n c.
replicate n (unstream (S.singleton c)) = replicateChar n c
#-}
-- | /O(n)/, where @n@ is the length of the result. The 'unfoldr'
-- function is analogous to the List 'L.unfoldr'. 'unfoldr' builds a
-- 'Text' from a seed value. The function takes the element and
-- returns 'Nothing' if it is done producing the 'Text', otherwise
-- 'Just' @(a,b)@. In this case, @a@ is the next 'Char' in the
-- string, and @b@ is the seed value for further production.
-- Subject to fusion.
-- Performs replacement on invalid scalar values.
unfoldr :: (a -> Maybe (Char,a)) -> a -> Text
unfoldr f s = unstream (S.unfoldr (firstf safe . f) s)
{-# INLINE unfoldr #-}
-- | /O(n)/ Like 'unfoldr', 'unfoldrN' builds a 'Text' from a seed
-- value. However, the length of the result should be limited by the
-- first argument to 'unfoldrN'. This function is more efficient than
-- 'unfoldr' when the maximum length of the result is known and
-- correct, otherwise its performance is similar to 'unfoldr'.
-- Subject to fusion.
-- Performs replacement on invalid scalar values.
unfoldrN :: Int64 -> (a -> Maybe (Char,a)) -> a -> Text
unfoldrN n f s = unstream (S.unfoldrN n (firstf safe . f) s)
{-# INLINE unfoldrN #-}
-- | /O(n)/ 'take' @n@, applied to a 'Text', returns the prefix of the
-- 'Text' of length @n@, or the 'Text' itself if @n@ is greater than
-- the length of the Text. Subject to fusion.
take :: Int64 -> Text -> Text
take i _ | i <= 0 = Empty
take i t0 = take' i t0
where take' 0 _ = Empty
take' _ Empty = Empty
take' n (Chunk t ts)
| n < len = Chunk (T.take (fromIntegral n) t) Empty
| otherwise = Chunk t (take' (n - len) ts)
where len = fromIntegral (T.length t)
{-# INLINE [1] take #-}
{-# RULES
"LAZY TEXT take -> fused" [~1] forall n t.
take n t = unstream (S.take n (stream t))
"LAZY TEXT take -> unfused" [1] forall n t.
unstream (S.take n (stream t)) = take n t
#-}
-- | /O(n)/ 'takeEnd' @n@ @t@ returns the suffix remaining after
-- taking @n@ characters from the end of @t@.
--
-- Examples:
--
-- > takeEnd 3 "foobar" == "bar"
--
-- @since 1.1.1.0
takeEnd :: Int64 -> Text -> Text
takeEnd n t0
| n <= 0 = empty
| otherwise = takeChunk n empty . L.reverse . toChunks $ t0
where takeChunk _ acc [] = acc
takeChunk i acc (t:ts)
| i <= l = chunk (T.takeEnd (fromIntegral i) t) acc
| otherwise = takeChunk (i-l) (Chunk t acc) ts
where l = fromIntegral (T.length t)
-- | /O(n)/ 'drop' @n@, applied to a 'Text', returns the suffix of the
-- 'Text' after the first @n@ characters, or the empty 'Text' if @n@
-- is greater than the length of the 'Text'. Subject to fusion.
drop :: Int64 -> Text -> Text
drop i t0
| i <= 0 = t0
| otherwise = drop' i t0
where drop' 0 ts = ts
drop' _ Empty = Empty
drop' n (Chunk t ts)
| n < len = Chunk (T.drop (fromIntegral n) t) ts
| otherwise = drop' (n - len) ts
where len = fromIntegral (T.length t)
{-# INLINE [1] drop #-}
{-# RULES
"LAZY TEXT drop -> fused" [~1] forall n t.
drop n t = unstream (S.drop n (stream t))
"LAZY TEXT drop -> unfused" [1] forall n t.
unstream (S.drop n (stream t)) = drop n t
#-}
-- | /O(n)/ 'dropEnd' @n@ @t@ returns the prefix remaining after
-- dropping @n@ characters from the end of @t@.
--
-- Examples:
--
-- > dropEnd 3 "foobar" == "foo"
--
-- @since 1.1.1.0
dropEnd :: Int64 -> Text -> Text
dropEnd n t0
| n <= 0 = t0
| otherwise = dropChunk n . L.reverse . toChunks $ t0
where dropChunk _ [] = empty
dropChunk m (t:ts)
| m >= l = dropChunk (m-l) ts
| otherwise = fromChunks . L.reverse $
T.dropEnd (fromIntegral m) t : ts
where l = fromIntegral (T.length t)
-- | /O(n)/ 'dropWords' @n@ returns the suffix with @n@ 'Word16'
-- values dropped, or the empty 'Text' if @n@ is greater than the
-- number of 'Word16' values present.
dropWords :: Int64 -> Text -> Text
dropWords i t0
| i <= 0 = t0
| otherwise = drop' i t0
where drop' 0 ts = ts
drop' _ Empty = Empty
drop' n (Chunk (T.Text arr off len) ts)
| n < len' = chunk (text arr (off+n') (len-n')) ts
| otherwise = drop' (n - len') ts
where len' = fromIntegral len
n' = fromIntegral n
-- | /O(n)/ 'takeWhile', applied to a predicate @p@ and a 'Text',
-- returns the longest prefix (possibly empty) of elements that
-- satisfy @p@. Subject to fusion.
takeWhile :: (Char -> Bool) -> Text -> Text
takeWhile p t0 = takeWhile' t0
where takeWhile' Empty = Empty
takeWhile' (Chunk t ts) =
case T.findIndex (not . p) t of
Just n | n > 0 -> Chunk (T.take n t) Empty
| otherwise -> Empty
Nothing -> Chunk t (takeWhile' ts)
{-# INLINE [1] takeWhile #-}
{-# RULES
"LAZY TEXT takeWhile -> fused" [~1] forall p t.
takeWhile p t = unstream (S.takeWhile p (stream t))
"LAZY TEXT takeWhile -> unfused" [1] forall p t.
unstream (S.takeWhile p (stream t)) = takeWhile p t
#-}
-- | /O(n)/ 'takeWhileEnd', applied to a predicate @p@ and a 'Text',
-- returns the longest suffix (possibly empty) of elements that
-- satisfy @p@.
-- Examples:
--
-- > takeWhileEnd (=='o') "foo" == "oo"
--
-- @since 1.2.2.0
takeWhileEnd :: (Char -> Bool) -> Text -> Text
takeWhileEnd p = takeChunk empty . L.reverse . toChunks
where takeChunk acc [] = acc
takeChunk acc (t:ts)
| T.lengthWord16 t' < T.lengthWord16 t
= chunk t' acc
| otherwise = takeChunk (Chunk t' acc) ts
where t' = T.takeWhileEnd p t
{-# INLINE takeWhileEnd #-}
-- | /O(n)/ 'dropWhile' @p@ @t@ returns the suffix remaining after
-- 'takeWhile' @p@ @t@. Subject to fusion.
dropWhile :: (Char -> Bool) -> Text -> Text
dropWhile p t0 = dropWhile' t0
where dropWhile' Empty = Empty
dropWhile' (Chunk t ts) =
case T.findIndex (not . p) t of
Just n -> Chunk (T.drop n t) ts
Nothing -> dropWhile' ts
{-# INLINE [1] dropWhile #-}
{-# RULES
"LAZY TEXT dropWhile -> fused" [~1] forall p t.
dropWhile p t = unstream (S.dropWhile p (stream t))
"LAZY TEXT dropWhile -> unfused" [1] forall p t.
unstream (S.dropWhile p (stream t)) = dropWhile p t
#-}
-- | /O(n)/ 'dropWhileEnd' @p@ @t@ returns the prefix remaining after
-- dropping characters that satisfy the predicate @p@ from the end of
-- @t@.
--
-- Examples:
--
-- > dropWhileEnd (=='.') "foo..." == "foo"
dropWhileEnd :: (Char -> Bool) -> Text -> Text
dropWhileEnd p = go
where go Empty = Empty
go (Chunk t Empty) = if T.null t'
then Empty
else Chunk t' Empty
where t' = T.dropWhileEnd p t
go (Chunk t ts) = case go ts of
Empty -> go (Chunk t Empty)
ts' -> Chunk t ts'
{-# INLINE dropWhileEnd #-}
-- | /O(n)/ 'dropAround' @p@ @t@ returns the substring remaining after
-- dropping characters that satisfy the predicate @p@ from both the
-- beginning and end of @t@.
dropAround :: (Char -> Bool) -> Text -> Text
dropAround p = dropWhile p . dropWhileEnd p
{-# INLINE [1] dropAround #-}
-- | /O(n)/ Remove leading white space from a string. Equivalent to:
--
-- > dropWhile isSpace
stripStart :: Text -> Text
stripStart = dropWhile isSpace
{-# INLINE stripStart #-}
-- | /O(n)/ Remove trailing white space from a string. Equivalent to:
--
-- > dropWhileEnd isSpace
stripEnd :: Text -> Text
stripEnd = dropWhileEnd isSpace
{-# INLINE [1] stripEnd #-}
-- | /O(n)/ Remove leading and trailing white space from a string.
-- Equivalent to:
--
-- > dropAround isSpace
strip :: Text -> Text
strip = dropAround isSpace
{-# INLINE [1] strip #-}
-- | /O(n)/ 'splitAt' @n t@ returns a pair whose first element is a
-- prefix of @t@ of length @n@, and whose second is the remainder of
-- the string. It is equivalent to @('take' n t, 'drop' n t)@.
splitAt :: Int64 -> Text -> (Text, Text)
splitAt = loop
where loop _ Empty = (empty, empty)
loop n t | n <= 0 = (empty, t)
loop n (Chunk t ts)
| n < len = let (t',t'') = T.splitAt (fromIntegral n) t
in (Chunk t' Empty, Chunk t'' ts)
| otherwise = let (ts',ts'') = loop (n - len) ts
in (Chunk t ts', ts'')
where len = fromIntegral (T.length t)
-- | /O(n)/ 'splitAtWord' @n t@ returns a strict pair whose first
-- element is a prefix of @t@ whose chunks contain @n@ 'Word16'
-- values, and whose second is the remainder of the string.
splitAtWord :: Int64 -> Text -> PairS Text Text
splitAtWord _ Empty = empty :*: empty
splitAtWord x (Chunk c@(T.Text arr off len) cs)
| y >= len = let h :*: t = splitAtWord (x-fromIntegral len) cs
in Chunk c h :*: t
| otherwise = chunk (text arr off y) empty :*:
chunk (text arr (off+y) (len-y)) cs
where y = fromIntegral x
-- | /O(n+m)/ Find the first instance of @needle@ (which must be
-- non-'null') in @haystack@. The first element of the returned tuple
-- is the prefix of @haystack@ before @needle@ is matched. The second
-- is the remainder of @haystack@, starting with the match.
--
-- Examples:
--
-- > breakOn "::" "a::b::c" ==> ("a", "::b::c")
-- > breakOn "/" "foobar" ==> ("foobar", "")
--
-- Laws:
--
-- > append prefix match == haystack
-- > where (prefix, match) = breakOn needle haystack
--
-- If you need to break a string by a substring repeatedly (e.g. you
-- want to break on every instance of a substring), use 'breakOnAll'
-- instead, as it has lower startup overhead.
--
-- This function is strict in its first argument, and lazy in its
-- second.
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
breakOn :: Text -> Text -> (Text, Text)
breakOn pat src
| null pat = emptyError "breakOn"
| otherwise = case indices pat src of
[] -> (src, empty)
(x:_) -> let h :*: t = splitAtWord x src
in (h, t)
-- | /O(n+m)/ Similar to 'breakOn', but searches from the end of the string.
--
-- The first element of the returned tuple is the prefix of @haystack@
-- up to and including the last match of @needle@. The second is the
-- remainder of @haystack@, following the match.
--
-- > breakOnEnd "::" "a::b::c" ==> ("a::b::", "c")
breakOnEnd :: Text -> Text -> (Text, Text)
breakOnEnd pat src = let (a,b) = breakOn (reverse pat) (reverse src)
in (reverse b, reverse a)
{-# INLINE breakOnEnd #-}
-- | /O(n+m)/ Find all non-overlapping instances of @needle@ in
-- @haystack@. Each element of the returned list consists of a pair:
--
-- * The entire string prior to the /k/th match (i.e. the prefix)
--
-- * The /k/th match, followed by the remainder of the string
--
-- Examples:
--
-- > breakOnAll "::" ""
-- > ==> []
-- > breakOnAll "/" "a/b/c/"
-- > ==> [("a", "/b/c/"), ("a/b", "/c/"), ("a/b/c", "/")]
--
-- This function is strict in its first argument, and lazy in its
-- second.
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
--
-- The @needle@ parameter may not be empty.
breakOnAll :: Text -- ^ @needle@ to search for
-> Text -- ^ @haystack@ in which to search
-> [(Text, Text)]
breakOnAll pat src
| null pat = emptyError "breakOnAll"
| otherwise = go 0 empty src (indices pat src)
where
go !n p s (x:xs) = let h :*: t = splitAtWord (x-n) s
h' = append p h
in (h',t) : go x h' t xs
go _ _ _ _ = []
-- | /O(n)/ 'break' is like 'span', but the prefix returned is over
-- elements that fail the predicate @p@.
--
-- >>> T.break (=='c') "180cm"
-- ("180","cm")
break :: (Char -> Bool) -> Text -> (Text, Text)
break p t0 = break' t0
where break' Empty = (empty, empty)
break' c@(Chunk t ts) =
case T.findIndex p t of
Nothing -> let (ts', ts'') = break' ts
in (Chunk t ts', ts'')
Just n | n == 0 -> (Empty, c)
| otherwise -> let (a,b) = T.splitAt n t
in (Chunk a Empty, Chunk b ts)
-- | /O(n)/ 'span', applied to a predicate @p@ and text @t@, returns
-- a pair whose first element is the longest prefix (possibly empty)
-- of @t@ of elements that satisfy @p@, and whose second is the
-- remainder of the list.
--
-- >>> T.span (=='0') "000AB"
-- ("000","AB")
span :: (Char -> Bool) -> Text -> (Text, Text)
span p = break (not . p)
{-# INLINE span #-}
-- | The 'group' function takes a 'Text' and returns a list of 'Text's
-- such that the concatenation of the result is equal to the argument.
-- Moreover, each sublist in the result contains only equal elements.
-- For example,
--
-- > group "Mississippi" = ["M","i","ss","i","ss","i","pp","i"]
--
-- It is a special case of 'groupBy', which allows the programmer to
-- supply their own equality test.
group :: Text -> [Text]
group = groupBy (==)
{-# INLINE group #-}
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (Char -> Char -> Bool) -> Text -> [Text]
groupBy _ Empty = []
groupBy eq (Chunk t ts) = cons x ys : groupBy eq zs
where (ys,zs) = span (eq x) xs
x = T.unsafeHead t
xs = chunk (T.unsafeTail t) ts
-- | /O(n)/ Return all initial segments of the given 'Text',
-- shortest first.
inits :: Text -> [Text]
inits = (Empty :) . inits'
where inits' Empty = []
inits' (Chunk t ts) = L.map (\t' -> Chunk t' Empty) (L.tail (T.inits t))
++ L.map (Chunk t) (inits' ts)
-- | /O(n)/ Return all final segments of the given 'Text', longest
-- first.
tails :: Text -> [Text]
tails Empty = Empty : []
tails ts@(Chunk t ts')
| T.length t == 1 = ts : tails ts'
| otherwise = ts : tails (Chunk (T.unsafeTail t) ts')
-- $split
--
-- Splitting functions in this library do not perform character-wise
-- copies to create substrings; they just construct new 'Text's that
-- are slices of the original.
-- | /O(m+n)/ Break a 'Text' into pieces separated by the first 'Text'
-- argument (which cannot be an empty string), consuming the
-- delimiter. An empty delimiter is invalid, and will cause an error
-- to be raised.
--
-- Examples:
--
-- > splitOn "\r\n" "a\r\nb\r\nd\r\ne" == ["a","b","d","e"]
-- > splitOn "aaa" "aaaXaaaXaaaXaaa" == ["","X","X","X",""]
-- > splitOn "x" "x" == ["",""]
--
-- and
--
-- > intercalate s . splitOn s == id
-- > splitOn (singleton c) == split (==c)
--
-- (Note: the string @s@ to split on above cannot be empty.)
--
-- This function is strict in its first argument, and lazy in its
-- second.
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
splitOn :: Text
-- ^ String to split on. If this string is empty, an error
-- will occur.
-> Text
-- ^ Input text.
-> [Text]
splitOn pat src
| null pat = emptyError "splitOn"
| isSingleton pat = split (== head pat) src
| otherwise = go 0 (indices pat src) src
where
go _ [] cs = [cs]
go !i (x:xs) cs = let h :*: t = splitAtWord (x-i) cs
in h : go (x+l) xs (dropWords l t)
l = foldlChunks (\a (T.Text _ _ b) -> a + fromIntegral b) 0 pat
{-# INLINE [1] splitOn #-}
{-# RULES
"LAZY TEXT splitOn/singleton -> split/==" [~1] forall c t.
splitOn (singleton c) t = split (==c) t
#-}
-- | /O(n)/ Splits a 'Text' into components delimited by separators,
-- where the predicate returns True for a separator element. The
-- resulting components do not contain the separators. Two adjacent
-- separators result in an empty component in the output. eg.
--
-- > split (=='a') "aabbaca" == ["","","bb","c",""]
-- > split (=='a') [] == [""]
split :: (Char -> Bool) -> Text -> [Text]
split _ Empty = [Empty]
split p (Chunk t0 ts0) = comb [] (T.split p t0) ts0
where comb acc (s:[]) Empty = revChunks (s:acc) : []
comb acc (s:[]) (Chunk t ts) = comb (s:acc) (T.split p t) ts
comb acc (s:ss) ts = revChunks (s:acc) : comb [] ss ts
comb _ [] _ = impossibleError "split"
{-# INLINE split #-}
-- | /O(n)/ Splits a 'Text' into components of length @k@. The last
-- element may be shorter than the other chunks, depending on the
-- length of the input. Examples:
--
-- > chunksOf 3 "foobarbaz" == ["foo","bar","baz"]
-- > chunksOf 4 "haskell.org" == ["hask","ell.","org"]
chunksOf :: Int64 -> Text -> [Text]
chunksOf k = go
where
go t = case splitAt k t of
(a,b) | null a -> []
| otherwise -> a : go b
{-# INLINE chunksOf #-}
-- | /O(n)/ Breaks a 'Text' up into a list of 'Text's at
-- newline 'Char's. The resulting strings do not contain newlines.
lines :: Text -> [Text]
lines Empty = []
lines t = let (l,t') = break ((==) '\n') t
in l : if null t' then []
else lines (tail t')
-- | /O(n)/ Breaks a 'Text' up into a list of words, delimited by 'Char's
-- representing white space.
words :: Text -> [Text]
words = L.filter (not . null) . split isSpace
{-# INLINE words #-}
-- | /O(n)/ Joins lines, after appending a terminating newline to
-- each.
unlines :: [Text] -> Text
unlines = concat . L.map (`snoc` '\n')
{-# INLINE unlines #-}
-- | /O(n)/ Joins words using single space characters.
unwords :: [Text] -> Text
unwords = intercalate (singleton ' ')
{-# INLINE unwords #-}
-- | /O(n)/ The 'isPrefixOf' function takes two 'Text's and returns
-- 'True' iff the first is a prefix of the second. Subject to fusion.
isPrefixOf :: Text -> Text -> Bool
isPrefixOf Empty _ = True
isPrefixOf _ Empty = False
isPrefixOf (Chunk x xs) (Chunk y ys)
| lx == ly = x == y && isPrefixOf xs ys
| lx < ly = x == yh && isPrefixOf xs (Chunk yt ys)
| otherwise = xh == y && isPrefixOf (Chunk xt xs) ys
where (xh,xt) = T.splitAt ly x
(yh,yt) = T.splitAt lx y
lx = T.length x
ly = T.length y
{-# INLINE [1] isPrefixOf #-}
{-# RULES
"LAZY TEXT isPrefixOf -> fused" [~1] forall s t.
isPrefixOf s t = S.isPrefixOf (stream s) (stream t)
"LAZY TEXT isPrefixOf -> unfused" [1] forall s t.
S.isPrefixOf (stream s) (stream t) = isPrefixOf s t
#-}
-- | /O(n)/ The 'isSuffixOf' function takes two 'Text's and returns
-- 'True' iff the first is a suffix of the second.
isSuffixOf :: Text -> Text -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
{-# INLINE isSuffixOf #-}
-- TODO: a better implementation
-- | /O(n+m)/ The 'isInfixOf' function takes two 'Text's and returns
-- 'True' iff the first is contained, wholly and intact, anywhere
-- within the second.
--
-- This function is strict in its first argument, and lazy in its
-- second.
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
isInfixOf :: Text -> Text -> Bool
isInfixOf needle haystack
| null needle = True
| isSingleton needle = S.elem (head needle) . S.stream $ haystack
| otherwise = not . L.null . indices needle $ haystack
{-# INLINE [1] isInfixOf #-}
{-# RULES
"LAZY TEXT isInfixOf/singleton -> S.elem/S.stream" [~1] forall n h.
isInfixOf (singleton n) h = S.elem n (S.stream h)
#-}
-------------------------------------------------------------------------------
-- * View patterns
-- | /O(n)/ Return the suffix of the second string if its prefix
-- matches the entire first string.
--
-- Examples:
--
-- > stripPrefix "foo" "foobar" == Just "bar"
-- > stripPrefix "" "baz" == Just "baz"
-- > stripPrefix "foo" "quux" == Nothing
--
-- This is particularly useful with the @ViewPatterns@ extension to
-- GHC, as follows:
--
-- > {-# LANGUAGE ViewPatterns #-}
-- > import Data.Text.Lazy as T
-- >
-- > fnordLength :: Text -> Int
-- > fnordLength (stripPrefix "fnord" -> Just suf) = T.length suf
-- > fnordLength _ = -1
stripPrefix :: Text -> Text -> Maybe Text
stripPrefix p t
| null p = Just t
| otherwise = case commonPrefixes p t of
Just (_,c,r) | null c -> Just r
_ -> Nothing
-- | /O(n)/ Find the longest non-empty common prefix of two strings
-- and return it, along with the suffixes of each string at which they
-- no longer match.
--
-- If the strings do not have a common prefix or either one is empty,
-- this function returns 'Nothing'.
--
-- Examples:
--
-- > commonPrefixes "foobar" "fooquux" == Just ("foo","bar","quux")
-- > commonPrefixes "veeble" "fetzer" == Nothing
-- > commonPrefixes "" "baz" == Nothing
commonPrefixes :: Text -> Text -> Maybe (Text,Text,Text)
commonPrefixes Empty _ = Nothing
commonPrefixes _ Empty = Nothing
commonPrefixes a0 b0 = Just (go a0 b0 [])
where
go t0@(Chunk x xs) t1@(Chunk y ys) ps
= case T.commonPrefixes x y of
Just (p,a,b)
| T.null a -> go xs (chunk b ys) (p:ps)
| T.null b -> go (chunk a xs) ys (p:ps)
| otherwise -> (fromChunks (L.reverse (p:ps)),chunk a xs, chunk b ys)
Nothing -> (fromChunks (L.reverse ps),t0,t1)
go t0 t1 ps = (fromChunks (L.reverse ps),t0,t1)
-- | /O(n)/ Return the prefix of the second string if its suffix
-- matches the entire first string.
--
-- Examples:
--
-- > stripSuffix "bar" "foobar" == Just "foo"
-- > stripSuffix "" "baz" == Just "baz"
-- > stripSuffix "foo" "quux" == Nothing
--
-- This is particularly useful with the @ViewPatterns@ extension to
-- GHC, as follows:
--
-- > {-# LANGUAGE ViewPatterns #-}
-- > import Data.Text.Lazy as T
-- >
-- > quuxLength :: Text -> Int
-- > quuxLength (stripSuffix "quux" -> Just pre) = T.length pre
-- > quuxLength _ = -1
stripSuffix :: Text -> Text -> Maybe Text
stripSuffix p t = reverse `fmap` stripPrefix (reverse p) (reverse t)
-- | /O(n)/ 'filter', applied to a predicate and a 'Text',
-- returns a 'Text' containing those characters that satisfy the
-- predicate.
filter :: (Char -> Bool) -> Text -> Text
filter p t = unstream (S.filter p (stream t))
{-# INLINE filter #-}
-- | /O(n)/ The 'find' function takes a predicate and a 'Text', and
-- returns the first element in matching the predicate, or 'Nothing'
-- if there is no such element. Subject to fusion.
find :: (Char -> Bool) -> Text -> Maybe Char
find p t = S.findBy p (stream t)
{-# INLINE find #-}
-- | /O(n)/ The 'elem' function takes a character and a 'Text', and
-- returns 'True' if the element is found in the given 'Text', or
-- 'False' otherwise.
elem :: Char -> Text -> Bool
elem c t = S.any (== c) (stream t)
{-# INLINE elem #-}
-- | /O(n)/ The 'partition' function takes a predicate and a 'Text',
-- and returns the pair of 'Text's with elements which do and do not
-- satisfy the predicate, respectively; i.e.
--
-- > partition p t == (filter p t, filter (not . p) t)
partition :: (Char -> Bool) -> Text -> (Text, Text)
partition p t = (filter p t, filter (not . p) t)
{-# INLINE partition #-}
-- | /O(n)/ 'Text' index (subscript) operator, starting from 0.
-- Subject to fusion.
index :: Text -> Int64 -> Char
index t n = S.index (stream t) n
{-# INLINE index #-}
-- | /O(n+m)/ The 'count' function returns the number of times the
-- query string appears in the given 'Text'. An empty query string is
-- invalid, and will cause an error to be raised.
--
-- In (unlikely) bad cases, this function's time complexity degrades
-- towards /O(n*m)/.
count :: Text -> Text -> Int64
count pat src
| null pat = emptyError "count"
| otherwise = go 0 (indices pat src)
where go !n [] = n
go !n (_:xs) = go (n+1) xs
{-# INLINE [1] count #-}
{-# RULES
"LAZY TEXT count/singleton -> countChar" [~1] forall c t.
count (singleton c) t = countChar c t
#-}
-- | /O(n)/ The 'countChar' function returns the number of times the
-- query element appears in the given 'Text'. Subject to fusion.
countChar :: Char -> Text -> Int64
countChar c t = S.countChar c (stream t)
-- | /O(n)/ 'zip' takes two 'Text's and returns a list of
-- corresponding pairs of bytes. If one input 'Text' is short,
-- excess elements of the longer 'Text' are discarded. This is
-- equivalent to a pair of 'unpack' operations.
zip :: Text -> Text -> [(Char,Char)]
zip a b = S.unstreamList $ S.zipWith (,) (stream a) (stream b)
{-# INLINE [0] zip #-}
-- | /O(n)/ 'zipWith' generalises 'zip' by zipping with the function
-- given as the first argument, instead of a tupling function.
-- Performs replacement on invalid scalar values.
zipWith :: (Char -> Char -> Char) -> Text -> Text -> Text
zipWith f t1 t2 = unstream (S.zipWith g (stream t1) (stream t2))
where g a b = safe (f a b)
{-# INLINE [0] zipWith #-}
revChunks :: [T.Text] -> Text
revChunks = L.foldl' (flip chunk) Empty
emptyError :: String -> a
emptyError fun = P.error ("Data.Text.Lazy." ++ fun ++ ": empty input")
impossibleError :: String -> a
impossibleError fun = P.error ("Data.Text.Lazy." ++ fun ++ ": impossible case")
|
bgamari/text
|
src/Data/Text/Lazy.hs
|
Haskell
|
bsd-2-clause
| 58,598
|
{-# LANGUAGE TypeSynonymInstances #-}
module Foundation.Math.Trigonometry
( Trigonometry(..)
) where
import Basement.Compat.Base
import qualified Prelude
-- | Method to support basic trigonometric functions
class Trigonometry a where
-- | the famous pi value
pi :: a
-- | sine
sin :: a -> a
-- | cosine
cos :: a -> a
-- | tan
tan :: a -> a
-- | sine-1
asin :: a -> a
-- | cosine-1
acos :: a -> a
-- | tangent-1
atan :: a -> a
-- | hyperbolic sine
sinh :: a -> a
-- | hyperbolic cosine
cosh :: a -> a
-- | hyperbolic tangent
tanh :: a -> a
-- | hyperbolic sine-1
asinh :: a -> a
-- | hyperbolic cosine-1
acosh :: a -> a
-- | hyperbolic tangent-1
atanh :: a -> a
instance Trigonometry Float where
pi = Prelude.pi
sin = Prelude.sin
cos = Prelude.cos
tan = Prelude.tan
asin = Prelude.asin
acos = Prelude.acos
atan = Prelude.atan
sinh = Prelude.sinh
cosh = Prelude.cosh
tanh = Prelude.tanh
asinh = Prelude.asinh
acosh = Prelude.acosh
atanh = Prelude.atanh
instance Trigonometry Double where
pi = Prelude.pi
sin = Prelude.sin
cos = Prelude.cos
tan = Prelude.tan
asin = Prelude.asin
acos = Prelude.acos
atan = Prelude.atan
sinh = Prelude.sinh
cosh = Prelude.cosh
tanh = Prelude.tanh
asinh = Prelude.asinh
acosh = Prelude.acosh
atanh = Prelude.atanh
|
vincenthz/hs-foundation
|
foundation/Foundation/Math/Trigonometry.hs
|
Haskell
|
bsd-3-clause
| 1,487
|
{-# LANGUAGE PatternGuards, CPP, ForeignFunctionInterface #-}
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2004-2009.
--
-- Package management tool
--
-----------------------------------------------------------------------------
module Main (main) where
import Distribution.InstalledPackageInfo.Binary()
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.ModuleName hiding (main)
import Distribution.InstalledPackageInfo
import Distribution.Compat.ReadP
import Distribution.ParseUtils
import Distribution.Package hiding (depends)
import Distribution.Text
import Distribution.Version
import System.FilePath as FilePath
import qualified System.FilePath.Posix as FilePath.Posix
import System.Process
import System.Directory ( getAppUserDataDirectory, createDirectoryIfMissing,
getModificationTime )
import Text.Printf
import Prelude
import System.Console.GetOpt
import qualified Control.Exception as Exception
import Data.Maybe
import Data.Char ( isSpace, toLower )
import Data.Ord (comparing)
import Control.Applicative (Applicative(..))
import Control.Monad
import System.Directory ( doesDirectoryExist, getDirectoryContents,
doesFileExist, renameFile, removeFile,
getCurrentDirectory )
import System.Exit ( exitWith, ExitCode(..) )
import System.Environment ( getArgs, getProgName, getEnv )
import System.IO
import System.IO.Error
import Data.List
import Control.Concurrent
import qualified Data.ByteString.Lazy as B
import qualified Data.Binary as Bin
import qualified Data.Binary.Get as Bin
-- Haste-specific
import Haste.Environment
import Haste.Version
import System.Info (os)
import qualified Control.Shell as Sh
#if defined(mingw32_HOST_OS)
-- mingw32 needs these for getExecDir
import Foreign
import Foreign.C
#endif
#ifdef mingw32_HOST_OS
import GHC.ConsoleHandler
#else
import System.Posix hiding (fdToHandle)
#endif
#if defined(GLOB)
import qualified System.Info(os)
#endif
#if !defined(mingw32_HOST_OS) && !defined(BOOTSTRAPPING)
import System.Console.Terminfo as Terminfo
#endif
#ifdef mingw32_HOST_OS
# if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
# elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
# else
# error Unknown mingw32 arch
# endif
#endif
-- -----------------------------------------------------------------------------
-- Entry point
main :: IO ()
main = do
args <- getArgs
case args of
["relocate", pkg] -> do
Sh.shell (relocate packages pkg) >> exitWith ExitSuccess
_ ->
return ()
case getOpt Permute (flags ++ deprecFlags) args of
(cli,_,[]) | FlagHelp `elem` cli -> do
prog <- getProgramName
bye (usageInfo (usageHeader prog) flags)
(cli,_,[]) | FlagVersion `elem` cli ->
bye ourCopyright
(cli,nonopts,[]) ->
case getVerbosity Normal cli of
Right v -> runit v cli nonopts
Left err -> die err
(_,_,errors) -> do
prog <- getProgramName
die (concat errors ++ shortUsage prog)
where
packages = ["--global-package-db=" ++ pkgSysDir,
"--package-db=" ++ pkgSysDir,
"--package-db=" ++ pkgUserDir]
-- -----------------------------------------------------------------------------
-- Command-line syntax
data Flag
= FlagUser
| FlagGlobal
| FlagHelp
| FlagVersion
| FlagConfig FilePath
| FlagGlobalConfig FilePath
| FlagForce
| FlagForceFiles
| FlagAutoGHCiLibs
| FlagExpandEnvVars
| FlagExpandPkgroot
| FlagNoExpandPkgroot
| FlagSimpleOutput
| FlagNamesOnly
| FlagIgnoreCase
| FlagNoUserDb
| FlagVerbosity (Maybe String)
deriving Eq
flags :: [OptDescr Flag]
flags = [
Option [] ["user"] (NoArg FlagUser)
"use the current user's package database",
Option [] ["global"] (NoArg FlagGlobal)
"use the global package database",
Option ['f'] ["package-db"] (ReqArg FlagConfig "FILE/DIR")
"use the specified package database",
Option [] ["package-conf"] (ReqArg FlagConfig "FILE/DIR")
"use the specified package database (DEPRECATED)",
Option [] ["global-package-db"] (ReqArg FlagGlobalConfig "DIR")
"location of the global package database",
Option [] ["no-user-package-db"] (NoArg FlagNoUserDb)
"never read the user package database",
Option [] ["no-user-package-conf"] (NoArg FlagNoUserDb)
"never read the user package database (DEPRECATED)",
Option [] ["force"] (NoArg FlagForce)
"ignore missing dependencies, directories, and libraries",
Option [] ["force-files"] (NoArg FlagForceFiles)
"ignore missing directories and libraries only",
Option ['g'] ["auto-ghci-libs"] (NoArg FlagAutoGHCiLibs)
"automatically build libs for GHCi (with register)",
Option [] ["expand-env-vars"] (NoArg FlagExpandEnvVars)
"expand environment variables (${name}-style) in input package descriptions",
Option [] ["expand-pkgroot"] (NoArg FlagExpandPkgroot)
"expand ${pkgroot}-relative paths to absolute in output package descriptions",
Option [] ["no-expand-pkgroot"] (NoArg FlagNoExpandPkgroot)
"preserve ${pkgroot}-relative paths in output package descriptions",
Option ['?'] ["help"] (NoArg FlagHelp)
"display this help and exit",
Option ['V'] ["version"] (NoArg FlagVersion)
"output version information and exit",
Option [] ["simple-output"] (NoArg FlagSimpleOutput)
"print output in easy-to-parse format for some commands",
Option [] ["names-only"] (NoArg FlagNamesOnly)
"only print package names, not versions; can only be used with list --simple-output",
Option [] ["ignore-case"] (NoArg FlagIgnoreCase)
"ignore case for substring matching",
Option ['v'] ["verbose"] (OptArg FlagVerbosity "Verbosity")
"verbosity level (0-2, default 1)"
]
data Verbosity = Silent | Normal | Verbose
deriving (Show, Eq, Ord)
getVerbosity :: Verbosity -> [Flag] -> Either String Verbosity
getVerbosity v [] = Right v
getVerbosity _ (FlagVerbosity Nothing : fs) = getVerbosity Verbose fs
getVerbosity _ (FlagVerbosity (Just "0") : fs) = getVerbosity Silent fs
getVerbosity _ (FlagVerbosity (Just "1") : fs) = getVerbosity Normal fs
getVerbosity _ (FlagVerbosity (Just "2") : fs) = getVerbosity Verbose fs
getVerbosity _ (FlagVerbosity v : _) = Left ("Bad verbosity: " ++ show v)
getVerbosity v (_ : fs) = getVerbosity v fs
deprecFlags :: [OptDescr Flag]
deprecFlags = [
-- put deprecated flags here
]
ourCopyright :: String
ourCopyright = "Haste package manager version " ++ showVersion ghcVersion ++ "\n"
shortUsage :: String -> String
shortUsage prog = "For usage information see '" ++ prog ++ " --help'."
usageHeader :: String -> String
usageHeader prog = substProg prog $
"Usage:\n" ++
" $p init {path}\n" ++
" Create and initialise a package database at the location {path}.\n" ++
" Packages can be registered in the new database using the register\n" ++
" command with --package-db={path}. To use the new database with GHC,\n" ++
" use GHC's -package-db flag.\n" ++
"\n" ++
" $p register {filename | -}\n" ++
" Register the package using the specified installed package\n" ++
" description. The syntax for the latter is given in the $p\n" ++
" documentation. The input file should be encoded in UTF-8.\n" ++
"\n" ++
" $p update {filename | -}\n" ++
" Register the package, overwriting any other package with the\n" ++
" same name. The input file should be encoded in UTF-8.\n" ++
"\n" ++
" $p unregister {pkg-id}\n" ++
" Unregister the specified package.\n" ++
"\n" ++
" $p expose {pkg-id}\n" ++
" Expose the specified package.\n" ++
"\n" ++
" $p hide {pkg-id}\n" ++
" Hide the specified package.\n" ++
"\n" ++
" $p trust {pkg-id}\n" ++
" Trust the specified package.\n" ++
"\n" ++
" $p distrust {pkg-id}\n" ++
" Distrust the specified package.\n" ++
"\n" ++
" $p list [pkg]\n" ++
" List registered packages in the global database, and also the\n" ++
" user database if --user is given. If a package name is given\n" ++
" all the registered versions will be listed in ascending order.\n" ++
" Accepts the --simple-output flag.\n" ++
"\n" ++
" $p dot\n" ++
" Generate a graph of the package dependencies in a form suitable\n" ++
" for input for the graphviz tools. For example, to generate a PDF" ++
" of the dependency graph: ghc-pkg dot | tred | dot -Tpdf >pkgs.pdf" ++
"\n" ++
" $p find-module {module}\n" ++
" List registered packages exposing module {module} in the global\n" ++
" database, and also the user database if --user is given.\n" ++
" All the registered versions will be listed in ascending order.\n" ++
" Accepts the --simple-output flag.\n" ++
"\n" ++
" $p latest {pkg-id}\n" ++
" Prints the highest registered version of a package.\n" ++
"\n" ++
" $p check\n" ++
" Check the consistency of package dependencies and list broken packages.\n" ++
" Accepts the --simple-output flag.\n" ++
"\n" ++
" $p describe {pkg}\n" ++
" Give the registered description for the specified package. The\n" ++
" description is returned in precisely the syntax required by $p\n" ++
" register.\n" ++
"\n" ++
" $p field {pkg} {field}\n" ++
" Extract the specified field of the package description for the\n" ++
" specified package. Accepts comma-separated multiple fields.\n" ++
"\n" ++
" $p dump\n" ++
" Dump the registered description for every package. This is like\n" ++
" \"ghc-pkg describe '*'\", except that it is intended to be used\n" ++
" by tools that parse the results, rather than humans. The output is\n" ++
" always encoded in UTF-8, regardless of the current locale.\n" ++
"\n" ++
" $p recache\n" ++
" Regenerate the package database cache. This command should only be\n" ++
" necessary if you added a package to the database by dropping a file\n" ++
" into the database directory manually. By default, the global DB\n" ++
" is recached; to recache a different DB use --user or --package-db\n" ++
" as appropriate.\n" ++
"\n" ++
" Substring matching is supported for {module} in find-module and\n" ++
" for {pkg} in list, describe, and field, where a '*' indicates\n" ++
" open substring ends (prefix*, *suffix, *infix*).\n" ++
"\n" ++
" When asked to modify a database (register, unregister, update,\n"++
" hide, expose, and also check), ghc-pkg modifies the global database by\n"++
" default. Specifying --user causes it to act on the user database,\n"++
" or --package-db can be used to act on another database\n"++
" entirely. When multiple of these options are given, the rightmost\n"++
" one is used as the database to act upon.\n"++
"\n"++
" Commands that query the package database (list, tree, latest, describe,\n"++
" field) operate on the list of databases specified by the flags\n"++
" --user, --global, and --package-db. If none of these flags are\n"++
" given, the default is --global --user.\n"++
"\n" ++
" The following optional flags are also accepted:\n"
substProg :: String -> String -> String
substProg _ [] = []
substProg prog ('$':'p':xs) = prog ++ substProg prog xs
substProg prog (c:xs) = c : substProg prog xs
-- -----------------------------------------------------------------------------
-- Do the business
data Force = NoForce | ForceFiles | ForceAll | CannotForce
deriving (Eq,Ord)
data PackageArg = Id PackageIdentifier | Substring String (String->Bool)
runit :: Verbosity -> [Flag] -> [String] -> IO ()
runit verbosity cli nonopts = do
installSignalHandlers -- catch ^C and clean up
prog <- getProgramName
let
force
| FlagForce `elem` cli = ForceAll
| FlagForceFiles `elem` cli = ForceFiles
| otherwise = NoForce
auto_ghci_libs = FlagAutoGHCiLibs `elem` cli
expand_env_vars= FlagExpandEnvVars `elem` cli
mexpand_pkgroot= foldl' accumExpandPkgroot Nothing cli
where accumExpandPkgroot _ FlagExpandPkgroot = Just True
accumExpandPkgroot _ FlagNoExpandPkgroot = Just False
accumExpandPkgroot x _ = x
splitFields fields = unfoldr splitComma (',':fields)
where splitComma "" = Nothing
splitComma fs = Just $ break (==',') (tail fs)
substringCheck :: String -> Maybe (String -> Bool)
substringCheck "" = Nothing
substringCheck "*" = Just (const True)
substringCheck [_] = Nothing
substringCheck (h:t) =
case (h, init t, last t) of
('*',s,'*') -> Just (isInfixOf (f s) . f)
('*',_, _ ) -> Just (isSuffixOf (f t) . f)
( _ ,s,'*') -> Just (isPrefixOf (f (h:s)) . f)
_ -> Nothing
where f | FlagIgnoreCase `elem` cli = map toLower
| otherwise = id
#if defined(GLOB)
glob x | System.Info.os=="mingw32" = do
-- glob echoes its argument, after win32 filename globbing
(_,o,_,_) <- runInteractiveCommand ("glob "++x)
txt <- hGetContents o
return (read txt)
glob x | otherwise = return [x]
#endif
--
-- first, parse the command
case nonopts of
#if defined(GLOB)
-- dummy command to demonstrate usage and permit testing
-- without messing things up; use glob to selectively enable
-- windows filename globbing for file parameters
-- register, update, FlagGlobalConfig, FlagConfig; others?
["glob", filename] -> do
print filename
glob filename >>= print
#endif
["init", filename] ->
initPackageDB filename verbosity cli
["register", filename] ->
registerPackage filename verbosity cli
auto_ghci_libs expand_env_vars False force
["update", filename] ->
registerPackage filename verbosity cli
auto_ghci_libs expand_env_vars True force
["unregister", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
unregisterPackage pkgid verbosity cli force
["expose", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
exposePackage pkgid verbosity cli force
["hide", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
hidePackage pkgid verbosity cli force
["trust", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
trustPackage pkgid verbosity cli force
["distrust", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
distrustPackage pkgid verbosity cli force
["list"] -> do
listPackages verbosity cli Nothing Nothing
["list", pkgid_str] ->
case substringCheck pkgid_str of
Nothing -> do pkgid <- readGlobPkgId pkgid_str
listPackages verbosity cli (Just (Id pkgid)) Nothing
Just m -> listPackages verbosity cli (Just (Substring pkgid_str m)) Nothing
["dot"] -> do
showPackageDot verbosity cli
["find-module", moduleName] -> do
let match = maybe (==moduleName) id (substringCheck moduleName)
listPackages verbosity cli Nothing (Just match)
["latest", pkgid_str] -> do
pkgid <- readGlobPkgId pkgid_str
latestPackage verbosity cli pkgid
["describe", pkgid_str] -> do
pkgarg <- case substringCheck pkgid_str of
Nothing -> liftM Id (readGlobPkgId pkgid_str)
Just m -> return (Substring pkgid_str m)
describePackage verbosity cli pkgarg (fromMaybe False mexpand_pkgroot)
["field", pkgid_str, fields] -> do
pkgarg <- case substringCheck pkgid_str of
Nothing -> liftM Id (readGlobPkgId pkgid_str)
Just m -> return (Substring pkgid_str m)
describeField verbosity cli pkgarg
(splitFields fields) (fromMaybe True mexpand_pkgroot)
["check"] -> do
checkConsistency verbosity cli
["dump"] -> do
dumpPackages verbosity cli (fromMaybe False mexpand_pkgroot)
["recache"] -> do
recache verbosity cli
[] -> do
die ("missing command\n" ++ shortUsage prog)
(_cmd:_) -> do
die ("command-line syntax error\n" ++ shortUsage prog)
parseCheck :: ReadP a a -> String -> String -> IO a
parseCheck parser str what =
case [ x | (x,ys) <- readP_to_S parser str, all isSpace ys ] of
[x] -> return x
_ -> die ("cannot parse \'" ++ str ++ "\' as a " ++ what)
readGlobPkgId :: String -> IO PackageIdentifier
readGlobPkgId str = parseCheck parseGlobPackageId str "package identifier"
parseGlobPackageId :: ReadP r PackageIdentifier
parseGlobPackageId =
parse
+++
(do n <- parse
_ <- string "-*"
return (PackageIdentifier{ pkgName = n, pkgVersion = globVersion }))
-- globVersion means "all versions"
globVersion :: Version
globVersion = Version{ versionBranch=[], versionTags=["*"] }
-- -----------------------------------------------------------------------------
-- Package databases
-- Some commands operate on a single database:
-- register, unregister, expose, hide, trust, distrust
-- however these commands also check the union of the available databases
-- in order to check consistency. For example, register will check that
-- dependencies exist before registering a package.
--
-- Some commands operate on multiple databases, with overlapping semantics:
-- list, describe, field
data PackageDB
= PackageDB {
location, locationAbsolute :: !FilePath,
-- We need both possibly-relative and definately-absolute package
-- db locations. This is because the relative location is used as
-- an identifier for the db, so it is important we do not modify it.
-- On the other hand we need the absolute path in a few places
-- particularly in relation to the ${pkgroot} stuff.
packages :: [InstalledPackageInfo]
}
type PackageDBStack = [PackageDB]
-- A stack of package databases. Convention: head is the topmost
-- in the stack.
allPackagesInStack :: PackageDBStack -> [InstalledPackageInfo]
allPackagesInStack = concatMap packages
getPkgDatabases :: Verbosity
-> Bool -- we are modifying, not reading
-> Bool -- read caches, if available
-> Bool -- expand vars, like ${pkgroot} and $topdir
-> [Flag]
-> IO (PackageDBStack,
-- the real package DB stack: [global,user] ++
-- DBs specified on the command line with -f.
Maybe FilePath,
-- which one to modify, if any
PackageDBStack)
-- the package DBs specified on the command
-- line, or [global,user] otherwise. This
-- is used as the list of package DBs for
-- commands that just read the DB, such as 'list'.
getPkgDatabases verbosity modify use_cache expand_vars my_flags = do
-- first we determine the location of the global package config. On Windows,
-- this is found relative to the ghc-pkg.exe binary, whereas on Unix the
-- location is passed to the binary using the --global-package-db flag by the
-- wrapper script.
let err_msg = "missing --global-package-db option, location of global package database unknown\n"
global_conf <-
case [ f | FlagGlobalConfig f <- my_flags ] of
[] -> do mb_dir <- getLibDir
case mb_dir of
Nothing -> die err_msg
Just dir -> do
r <- lookForPackageDBIn dir
case r of
Nothing -> die ("Can't find package database in " ++ dir)
Just path -> return path
fs -> return (last fs)
-- The value of the $topdir variable used in some package descriptions
-- Note that the way we calculate this is slightly different to how it
-- is done in ghc itself. We rely on the convention that the global
-- package db lives in ghc's libdir.
top_dir <- absolutePath (takeDirectory global_conf)
let no_user_db = FlagNoUserDb `elem` my_flags
mb_user_conf <-
if no_user_db then return Nothing else do
r <- lookForPackageDBIn hasteUserDir
case r of
Nothing -> return (Just (pkgUserDir, False))
Just f -> return (Just (f, True))
-- If the user database doesn't exist, and this command isn't a
-- "modify" command, then we won't attempt to create or use it.
let sys_databases
| Just (user_conf,user_exists) <- mb_user_conf,
modify || user_exists = [user_conf, global_conf]
| otherwise = [global_conf]
e_pkg_path <- tryIO (System.Environment.getEnv "GHC_PACKAGE_PATH")
let env_stack =
case e_pkg_path of
Left _ -> sys_databases
Right path
| last cs == "" -> init cs ++ sys_databases
| otherwise -> cs
where cs = parseSearchPath path
-- The "global" database is always the one at the bottom of the stack.
-- This is the database we modify by default.
virt_global_conf = last env_stack
let db_flags = [ f | Just f <- map is_db_flag my_flags ]
where is_db_flag FlagUser
| Just (user_conf, _user_exists) <- mb_user_conf
= Just user_conf
is_db_flag FlagGlobal = Just virt_global_conf
is_db_flag (FlagConfig f) = Just f
is_db_flag _ = Nothing
let flag_db_names | null db_flags = env_stack
| otherwise = reverse (nub db_flags)
-- For a "modify" command, treat all the databases as
-- a stack, where we are modifying the top one, but it
-- can refer to packages in databases further down the
-- stack.
-- -f flags on the command line add to the database
-- stack, unless any of them are present in the stack
-- already.
let final_stack = filter (`notElem` env_stack)
[ f | FlagConfig f <- reverse my_flags ]
++ env_stack
-- the database we actually modify is the one mentioned
-- rightmost on the command-line.
let to_modify
| not modify = Nothing
| null db_flags = Just virt_global_conf
| otherwise = Just (last db_flags)
db_stack <- sequence
[ do db <- readParseDatabase verbosity mb_user_conf use_cache db_path
if expand_vars then return (mungePackageDBPaths top_dir db)
else return db
| db_path <- final_stack ]
let flag_db_stack = [ db | db_name <- flag_db_names,
db <- db_stack, location db == db_name ]
return (db_stack, to_modify, flag_db_stack)
lookForPackageDBIn :: FilePath -> IO (Maybe FilePath)
lookForPackageDBIn dir = do
let path_dir = dir </> "package.conf.d"
exists_dir <- doesDirectoryExist path_dir
if exists_dir then return (Just path_dir) else do
let path_file = dir </> "package.conf"
exists_file <- doesFileExist path_file
if exists_file then return (Just path_file) else return Nothing
readParseDatabase :: Verbosity
-> Maybe (FilePath,Bool)
-> Bool -- use cache
-> FilePath
-> IO PackageDB
readParseDatabase verbosity mb_user_conf use_cache path
-- the user database (only) is allowed to be non-existent
| Just (user_conf,False) <- mb_user_conf, path == user_conf
= mkPackageDB []
| otherwise
= do e <- tryIO $ getDirectoryContents path
case e of
Left _ -> do
pkgs <- parseMultiPackageConf verbosity path
mkPackageDB pkgs
Right fs
| not use_cache -> ignore_cache (const $ return ())
| otherwise -> do
let cache = path </> cachefilename
tdir <- getModificationTime path
e_tcache <- tryIO $ getModificationTime cache
case e_tcache of
Left ex -> do
when (verbosity > Normal) $
warn ("warning: cannot read cache file " ++ cache ++ ": " ++ show ex)
ignore_cache (const $ return ())
Right tcache -> do
let compareTimestampToCache file =
when (verbosity >= Verbose) $ do
tFile <- getModificationTime file
compareTimestampToCache' file tFile
compareTimestampToCache' file tFile = do
let rel = case tcache `compare` tFile of
LT -> " (NEWER than cache)"
GT -> " (older than cache)"
EQ -> " (same as cache)"
warn ("Timestamp " ++ show tFile
++ " for " ++ file ++ rel)
when (verbosity >= Verbose) $ do
warn ("Timestamp " ++ show tcache ++ " for " ++ cache)
compareTimestampToCache' path tdir
if tcache >= tdir
then do
when (verbosity > Normal) $
infoLn ("using cache: " ++ cache)
pkgs <- myReadBinPackageDB cache
let pkgs' = map convertPackageInfoIn pkgs
mkPackageDB pkgs'
else do
when (verbosity >= Normal) $ do
warn ("WARNING: cache is out of date: "
++ cache)
warn "Use 'ghc-pkg recache' to fix."
ignore_cache compareTimestampToCache
where
ignore_cache :: (FilePath -> IO ()) -> IO PackageDB
ignore_cache checkTime = do
let confs = filter (".conf" `isSuffixOf`) fs
doFile f = do checkTime f
parseSingletonPackageConf verbosity f
pkgs <- mapM doFile $ map (path </>) confs
mkPackageDB pkgs
where
mkPackageDB pkgs = do
path_abs <- absolutePath path
return PackageDB {
location = path,
locationAbsolute = path_abs,
packages = pkgs
}
-- read the package.cache file strictly, to work around a problem with
-- bytestring 0.9.0.x (fixed in 0.9.1.x) where the file wasn't closed
-- after it has been completely read, leading to a sharing violation
-- later.
myReadBinPackageDB :: FilePath -> IO [InstalledPackageInfoString]
myReadBinPackageDB filepath = do
h <- openBinaryFile filepath ReadMode
sz <- hFileSize h
b <- B.hGet h (fromIntegral sz)
hClose h
return $ Bin.runGet Bin.get b
parseMultiPackageConf :: Verbosity -> FilePath -> IO [InstalledPackageInfo]
parseMultiPackageConf verbosity file = do
when (verbosity > Normal) $ infoLn ("reading package database: " ++ file)
str <- readUTF8File file
let pkgs = map convertPackageInfoIn $ read str
Exception.evaluate pkgs
`catchError` \e->
die ("error while parsing " ++ file ++ ": " ++ show e)
parseSingletonPackageConf :: Verbosity -> FilePath -> IO InstalledPackageInfo
parseSingletonPackageConf verbosity file = do
when (verbosity > Normal) $ infoLn ("reading package config: " ++ file)
readUTF8File file >>= fmap fst . parsePackageInfo
cachefilename :: FilePath
cachefilename = "package.cache"
mungePackageDBPaths :: FilePath -> PackageDB -> PackageDB
mungePackageDBPaths top_dir db@PackageDB { packages = pkgs } =
db { packages = map (mungePackagePaths top_dir pkgroot) pkgs }
where
pkgroot = takeDirectory (locationAbsolute db)
-- It so happens that for both styles of package db ("package.conf"
-- files and "package.conf.d" dirs) the pkgroot is the parent directory
-- ${pkgroot}/package.conf or ${pkgroot}/package.conf.d/
-- TODO: This code is duplicated in compiler/main/Packages.lhs
mungePackagePaths :: FilePath -> FilePath
-> InstalledPackageInfo -> InstalledPackageInfo
-- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec
-- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html)
-- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}.
-- The "pkgroot" is the directory containing the package database.
--
-- Also perform a similar substitution for the older GHC-specific
-- "$topdir" variable. The "topdir" is the location of the ghc
-- installation (obtained from the -B option).
mungePackagePaths top_dir pkgroot pkg =
pkg {
importDirs = munge_paths (importDirs pkg),
includeDirs = munge_paths (includeDirs pkg),
libraryDirs = munge_paths (libraryDirs pkg),
frameworkDirs = munge_paths (frameworkDirs pkg),
haddockInterfaces = munge_paths (haddockInterfaces pkg),
-- haddock-html is allowed to be either a URL or a file
haddockHTMLs = munge_paths (munge_urls (haddockHTMLs pkg))
}
where
munge_paths = map munge_path
munge_urls = map munge_url
munge_path p
| Just p' <- stripVarPrefix "${pkgroot}" p = pkgroot ++ p'
| Just p' <- stripVarPrefix "$topdir" p = top_dir ++ p'
| otherwise = p
munge_url p
| Just p' <- stripVarPrefix "${pkgrooturl}" p = toUrlPath pkgroot p'
| Just p' <- stripVarPrefix "$httptopdir" p = toUrlPath top_dir p'
| otherwise = p
toUrlPath r p = "file:///"
-- URLs always use posix style '/' separators:
++ FilePath.Posix.joinPath
(r : -- We need to drop a leading "/" or "\\"
-- if there is one:
dropWhile (all isPathSeparator)
(FilePath.splitDirectories p))
-- We could drop the separator here, and then use </> above. However,
-- by leaving it in and using ++ we keep the same path separator
-- rather than letting FilePath change it to use \ as the separator
stripVarPrefix var path = case stripPrefix var path of
Just [] -> Just []
Just cs@(c : _) | isPathSeparator c -> Just cs
_ -> Nothing
-- -----------------------------------------------------------------------------
-- Creating a new package DB
initPackageDB :: FilePath -> Verbosity -> [Flag] -> IO ()
initPackageDB filename verbosity _flags = do
let eexist = die ("cannot create: " ++ filename ++ " already exists")
b1 <- doesFileExist filename
when b1 eexist
b2 <- doesDirectoryExist filename
when b2 eexist
filename_abs <- absolutePath filename
changeDB verbosity [] PackageDB {
location = filename, locationAbsolute = filename_abs,
packages = []
}
-- -----------------------------------------------------------------------------
-- Registering
registerPackage :: FilePath
-> Verbosity
-> [Flag]
-> Bool -- auto_ghci_libs
-> Bool -- expand_env_vars
-> Bool -- update
-> Force
-> IO ()
registerPackage input verbosity my_flags auto_ghci_libs expand_env_vars update force = do
(db_stack, Just to_modify, _flag_dbs) <-
getPkgDatabases verbosity True True False{-expand vars-} my_flags
let
db_to_operate_on = my_head "register" $
filter ((== to_modify).location) db_stack
--
when (auto_ghci_libs && verbosity >= Silent) $
warn "Warning: --auto-ghci-libs is deprecated and will be removed in GHC 7.4"
--
s <-
case input of
"-" -> do
when (verbosity >= Normal) $
info "Reading package info from stdin ... "
-- fix the encoding to UTF-8, since this is an interchange format
hSetEncoding stdin utf8
getContents
f -> do
when (verbosity >= Normal) $
info ("Reading package info from " ++ show f ++ " ... ")
readUTF8File f
expanded <- if expand_env_vars then expandEnvVars s force
else return s
(pkg, ws) <- parsePackageInfo expanded
when (verbosity >= Normal) $
infoLn "done."
-- report any warnings from the parse phase
_ <- reportValidateErrors [] ws
(display (sourcePackageId pkg) ++ ": Warning: ") Nothing
-- validate the expanded pkg, but register the unexpanded
pkgroot <- absolutePath (takeDirectory to_modify)
let top_dir = takeDirectory (location (last db_stack))
pkg_expanded = mungePackagePaths top_dir pkgroot pkg
let truncated_stack = dropWhile ((/= to_modify).location) db_stack
-- truncate the stack for validation, because we don't allow
-- packages lower in the stack to refer to those higher up.
validatePackageConfig pkg_expanded verbosity truncated_stack auto_ghci_libs update force
let
removes = [ RemovePackage p
| p <- packages db_to_operate_on,
sourcePackageId p == sourcePackageId pkg ]
--
changeDB verbosity (removes ++ [AddPackage pkg]) db_to_operate_on
parsePackageInfo
:: String
-> IO (InstalledPackageInfo, [ValidateWarning])
parsePackageInfo str =
case parseInstalledPackageInfo str of
ParseOk warnings ok -> return (ok, ws)
where
ws = [ msg | PWarning msg <- warnings
, not ("Unrecognized field pkgroot" `isPrefixOf` msg) ]
ParseFailed err -> case locatedErrorMsg err of
(Nothing, s) -> die s
(Just l, s) -> die (show l ++ ": " ++ s)
-- -----------------------------------------------------------------------------
-- Making changes to a package database
data DBOp = RemovePackage InstalledPackageInfo
| AddPackage InstalledPackageInfo
| ModifyPackage InstalledPackageInfo
changeDB :: Verbosity -> [DBOp] -> PackageDB -> IO ()
changeDB verbosity cmds db = do
let db' = updateInternalDB db cmds
isfile <- doesFileExist (location db)
if isfile
then writeNewConfig verbosity (location db') (packages db')
else do
createDirectoryIfMissing True (location db)
changeDBDir verbosity cmds db'
updateInternalDB :: PackageDB -> [DBOp] -> PackageDB
updateInternalDB db cmds = db{ packages = foldl do_cmd (packages db) cmds }
where
do_cmd pkgs (RemovePackage p) =
filter ((/= installedPackageId p) . installedPackageId) pkgs
do_cmd pkgs (AddPackage p) = p : pkgs
do_cmd pkgs (ModifyPackage p) =
do_cmd (do_cmd pkgs (RemovePackage p)) (AddPackage p)
changeDBDir :: Verbosity -> [DBOp] -> PackageDB -> IO ()
changeDBDir verbosity cmds db = do
mapM_ do_cmd cmds
updateDBCache verbosity db
where
do_cmd (RemovePackage p) = do
let file = location db </> display (installedPackageId p) <.> "conf"
when (verbosity > Normal) $ infoLn ("removing " ++ file)
removeFileSafe file
do_cmd (AddPackage p) = do
let file = location db </> display (installedPackageId p) <.> "conf"
when (verbosity > Normal) $ infoLn ("writing " ++ file)
writeFileUtf8Atomic file (showInstalledPackageInfo p)
do_cmd (ModifyPackage p) =
do_cmd (AddPackage p)
updateDBCache :: Verbosity -> PackageDB -> IO ()
updateDBCache verbosity db = do
let filename = location db </> cachefilename
when (verbosity > Normal) $
infoLn ("writing cache " ++ filename)
writeBinaryFileAtomic filename (map convertPackageInfoOut (packages db))
`catchIO` \e ->
if isPermissionError e
then die (filename ++ ": you don't have permission to modify this file")
else ioError e
#ifndef mingw32_HOST_OS
status <- getFileStatus filename
setFileTimes (location db) (accessTime status) (modificationTime status)
#endif
-- -----------------------------------------------------------------------------
-- Exposing, Hiding, Trusting, Distrusting, Unregistering are all similar
exposePackage :: PackageIdentifier -> Verbosity -> [Flag] -> Force -> IO ()
exposePackage = modifyPackage (\p -> ModifyPackage p{exposed=True})
hidePackage :: PackageIdentifier -> Verbosity -> [Flag] -> Force -> IO ()
hidePackage = modifyPackage (\p -> ModifyPackage p{exposed=False})
trustPackage :: PackageIdentifier -> Verbosity -> [Flag] -> Force -> IO ()
trustPackage = modifyPackage (\p -> ModifyPackage p{trusted=True})
distrustPackage :: PackageIdentifier -> Verbosity -> [Flag] -> Force -> IO ()
distrustPackage = modifyPackage (\p -> ModifyPackage p{trusted=False})
unregisterPackage :: PackageIdentifier -> Verbosity -> [Flag] -> Force -> IO ()
unregisterPackage = modifyPackage RemovePackage
modifyPackage
:: (InstalledPackageInfo -> DBOp)
-> PackageIdentifier
-> Verbosity
-> [Flag]
-> Force
-> IO ()
modifyPackage fn pkgid verbosity my_flags force = do
(db_stack, Just _to_modify, _flag_dbs) <-
getPkgDatabases verbosity True{-modify-} True{-use cache-} False{-expand vars-} my_flags
(db, ps) <- fmap head $ findPackagesByDB db_stack (Id pkgid)
let
db_name = location db
pkgs = packages db
pids = map sourcePackageId ps
cmds = [ fn pkg | pkg <- pkgs, sourcePackageId pkg `elem` pids ]
new_db = updateInternalDB db cmds
old_broken = brokenPackages (allPackagesInStack db_stack)
rest_of_stack = filter ((/= db_name) . location) db_stack
new_stack = new_db : rest_of_stack
new_broken = map sourcePackageId (brokenPackages (allPackagesInStack new_stack))
newly_broken = filter (`notElem` map sourcePackageId old_broken) new_broken
--
when (not (null newly_broken)) $
dieOrForceAll force ("unregistering " ++ display pkgid ++
" would break the following packages: "
++ unwords (map display newly_broken))
changeDB verbosity cmds db
recache :: Verbosity -> [Flag] -> IO ()
recache verbosity my_flags = do
(db_stack, Just to_modify, _flag_dbs) <-
getPkgDatabases verbosity True{-modify-} False{-no cache-} False{-expand vars-} my_flags
let
db_to_operate_on = my_head "recache" $
filter ((== to_modify).location) db_stack
--
changeDB verbosity [] db_to_operate_on
-- -----------------------------------------------------------------------------
-- Listing packages
listPackages :: Verbosity -> [Flag] -> Maybe PackageArg
-> Maybe (String->Bool)
-> IO ()
listPackages verbosity my_flags mPackageName mModuleName = do
let simple_output = FlagSimpleOutput `elem` my_flags
(db_stack, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} False{-expand vars-} my_flags
let db_stack_filtered -- if a package is given, filter out all other packages
| Just this <- mPackageName =
[ db{ packages = filter (this `matchesPkg`) (packages db) }
| db <- flag_db_stack ]
| Just match <- mModuleName = -- packages which expose mModuleName
[ db{ packages = filter (match `exposedInPkg`) (packages db) }
| db <- flag_db_stack ]
| otherwise = flag_db_stack
db_stack_sorted
= [ db{ packages = sort_pkgs (packages db) }
| db <- db_stack_filtered ]
where sort_pkgs = sortBy cmpPkgIds
cmpPkgIds pkg1 pkg2 =
case pkgName p1 `compare` pkgName p2 of
LT -> LT
GT -> GT
EQ -> pkgVersion p1 `compare` pkgVersion p2
where (p1,p2) = (sourcePackageId pkg1, sourcePackageId pkg2)
stack = reverse db_stack_sorted
match `exposedInPkg` pkg = any match (map display $ exposedModules pkg)
pkg_map = allPackagesInStack db_stack
broken = map sourcePackageId (brokenPackages pkg_map)
show_normal PackageDB{ location = db_name, packages = pkg_confs } =
do hPutStrLn stdout (db_name ++ ":")
if null pp_pkgs
then hPutStrLn stdout " (no packages)"
else hPutStrLn stdout $ unlines (map (" " ++) pp_pkgs)
where
-- Sort using instance Ord PackageId
pp_pkgs = map pp_pkg . sortBy (comparing installedPackageId) $ pkg_confs
pp_pkg p
| sourcePackageId p `elem` broken = printf "{%s}" doc
| exposed p = doc
| otherwise = printf "(%s)" doc
where doc | verbosity >= Verbose = printf "%s (%s)" pkg ipid
| otherwise = pkg
where
InstalledPackageId ipid = installedPackageId p
pkg = display (sourcePackageId p)
show_simple = simplePackageList my_flags . allPackagesInStack
when (not (null broken) && not simple_output && verbosity /= Silent) $ do
prog <- getProgramName
warn ("WARNING: there are broken packages. Run '" ++ prog ++ " check' for more details.")
if simple_output then show_simple stack else do
#if defined(mingw32_HOST_OS) || defined(BOOTSTRAPPING)
mapM_ show_normal stack
#else
let
show_colour withF db =
mconcat $ map (<#> termText "\n") $
(termText (location db) :
map (termText " " <#>) (map pp_pkg (packages db)))
where
pp_pkg p
| sourcePackageId p `elem` broken = withF Red doc
| exposed p = doc
| otherwise = withF Blue doc
where doc | verbosity >= Verbose
= termText (printf "%s (%s)" pkg ipid)
| otherwise
= termText pkg
where
InstalledPackageId ipid = installedPackageId p
pkg = display (sourcePackageId p)
is_tty <- hIsTerminalDevice stdout
if not is_tty
then mapM_ show_normal stack
else do tty <- Terminfo.setupTermFromEnv
case Terminfo.getCapability tty withForegroundColor of
Nothing -> mapM_ show_normal stack
Just w -> runTermOutput tty $ mconcat $
map (show_colour w) stack
#endif
simplePackageList :: [Flag] -> [InstalledPackageInfo] -> IO ()
simplePackageList my_flags pkgs = do
let showPkg = if FlagNamesOnly `elem` my_flags then display . pkgName
else display
-- Sort using instance Ord PackageId
strs = map showPkg $ sort $ map sourcePackageId pkgs
when (not (null pkgs)) $
hPutStrLn stdout $ concat $ intersperse " " strs
showPackageDot :: Verbosity -> [Flag] -> IO ()
showPackageDot verbosity myflags = do
(_, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} False{-expand vars-} myflags
let all_pkgs = allPackagesInStack flag_db_stack
ipix = PackageIndex.fromList all_pkgs
putStrLn "digraph {"
let quote s = '"':s ++ "\""
mapM_ putStrLn [ quote from ++ " -> " ++ quote to
| p <- all_pkgs,
let from = display (sourcePackageId p),
depid <- depends p,
Just dep <- [PackageIndex.lookupInstalledPackageId ipix depid],
let to = display (sourcePackageId dep)
]
putStrLn "}"
-- -----------------------------------------------------------------------------
-- Prints the highest (hidden or exposed) version of a package
latestPackage :: Verbosity -> [Flag] -> PackageIdentifier -> IO ()
latestPackage verbosity my_flags pkgid = do
(_, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} False{-expand vars-} my_flags
ps <- findPackages flag_db_stack (Id pkgid)
case ps of
[] -> die "no matches"
_ -> show_pkg . maximum . map sourcePackageId $ ps
where
show_pkg pid = hPutStrLn stdout (display pid)
-- -----------------------------------------------------------------------------
-- Describe
describePackage :: Verbosity -> [Flag] -> PackageArg -> Bool -> IO ()
describePackage verbosity my_flags pkgarg expand_pkgroot = do
(_, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} expand_pkgroot my_flags
dbs <- findPackagesByDB flag_db_stack pkgarg
doDump expand_pkgroot [ (pkg, locationAbsolute db)
| (db, pkgs) <- dbs, pkg <- pkgs ]
dumpPackages :: Verbosity -> [Flag] -> Bool -> IO ()
dumpPackages verbosity my_flags expand_pkgroot = do
(_, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} expand_pkgroot my_flags
doDump expand_pkgroot [ (pkg, locationAbsolute db)
| db <- flag_db_stack, pkg <- packages db ]
doDump :: Bool -> [(InstalledPackageInfo, FilePath)] -> IO ()
doDump expand_pkgroot pkgs = do
-- fix the encoding to UTF-8, since this is an interchange format
hSetEncoding stdout utf8
putStrLn $
intercalate "---\n"
[ if expand_pkgroot
then showInstalledPackageInfo pkg
else showInstalledPackageInfo pkg ++ pkgrootField
| (pkg, pkgloc) <- pkgs
, let pkgroot = takeDirectory pkgloc
pkgrootField = "pkgroot: " ++ show pkgroot ++ "\n" ]
-- PackageId is can have globVersion for the version
findPackages :: PackageDBStack -> PackageArg -> IO [InstalledPackageInfo]
findPackages db_stack pkgarg
= fmap (concatMap snd) $ findPackagesByDB db_stack pkgarg
findPackagesByDB :: PackageDBStack -> PackageArg
-> IO [(PackageDB, [InstalledPackageInfo])]
findPackagesByDB db_stack pkgarg
= case [ (db, matched)
| db <- db_stack,
let matched = filter (pkgarg `matchesPkg`) (packages db),
not (null matched) ] of
[] -> die ("cannot find package " ++ pkg_msg pkgarg)
ps -> return ps
where
pkg_msg (Id pkgid) = display pkgid
pkg_msg (Substring pkgpat _) = "matching " ++ pkgpat
matches :: PackageIdentifier -> PackageIdentifier -> Bool
pid `matches` pid'
= (pkgName pid == pkgName pid')
&& (pkgVersion pid == pkgVersion pid' || not (realVersion pid))
realVersion :: PackageIdentifier -> Bool
realVersion pkgid = versionBranch (pkgVersion pkgid) /= []
-- when versionBranch == [], this is a glob
matchesPkg :: PackageArg -> InstalledPackageInfo -> Bool
(Id pid) `matchesPkg` pkg = pid `matches` sourcePackageId pkg
(Substring _ m) `matchesPkg` pkg = m (display (sourcePackageId pkg))
-- -----------------------------------------------------------------------------
-- Field
describeField :: Verbosity -> [Flag] -> PackageArg -> [String] -> Bool -> IO ()
describeField verbosity my_flags pkgarg fields expand_pkgroot = do
(_, _, flag_db_stack) <-
getPkgDatabases verbosity False True{-use cache-} expand_pkgroot my_flags
fns <- mapM toField fields
ps <- findPackages flag_db_stack pkgarg
mapM_ (selectFields fns) ps
where showFun = if FlagSimpleOutput `elem` my_flags
then showSimpleInstalledPackageInfoField
else showInstalledPackageInfoField
toField f = case showFun f of
Nothing -> die ("unknown field: " ++ f)
Just fn -> return fn
selectFields fns pinfo = mapM_ (\fn->putStrLn (fn pinfo)) fns
-- -----------------------------------------------------------------------------
-- Check: Check consistency of installed packages
checkConsistency :: Verbosity -> [Flag] -> IO ()
checkConsistency verbosity my_flags = do
(db_stack, _, _) <-
getPkgDatabases verbosity True True{-use cache-} True{-expand vars-} my_flags
-- check behaves like modify for the purposes of deciding which
-- databases to use, because ordering is important.
let simple_output = FlagSimpleOutput `elem` my_flags
let pkgs = allPackagesInStack db_stack
checkPackage p = do
(_,es,ws) <- runValidate $ checkPackageConfig p verbosity db_stack False True
if null es
then do when (not simple_output) $ do
_ <- reportValidateErrors [] ws "" Nothing
return ()
return []
else do
when (not simple_output) $ do
reportError ("There are problems in package " ++ display (sourcePackageId p) ++ ":")
_ <- reportValidateErrors es ws " " Nothing
return ()
return [p]
broken_pkgs <- concat `fmap` mapM checkPackage pkgs
let filterOut pkgs1 pkgs2 = filter not_in pkgs2
where not_in p = sourcePackageId p `notElem` all_ps
all_ps = map sourcePackageId pkgs1
let not_broken_pkgs = filterOut broken_pkgs pkgs
(_, trans_broken_pkgs) = closure [] not_broken_pkgs
all_broken_pkgs = broken_pkgs ++ trans_broken_pkgs
when (not (null all_broken_pkgs)) $ do
if simple_output
then simplePackageList my_flags all_broken_pkgs
else do
reportError ("\nThe following packages are broken, either because they have a problem\n"++
"listed above, or because they depend on a broken package.")
mapM_ (hPutStrLn stderr . display . sourcePackageId) all_broken_pkgs
when (not (null all_broken_pkgs)) $ exitWith (ExitFailure 1)
closure :: [InstalledPackageInfo] -> [InstalledPackageInfo]
-> ([InstalledPackageInfo], [InstalledPackageInfo])
closure pkgs db_stack = go pkgs db_stack
where
go avail not_avail =
case partition (depsAvailable avail) not_avail of
([], not_avail') -> (avail, not_avail')
(new_avail, not_avail') -> go (new_avail ++ avail) not_avail'
depsAvailable :: [InstalledPackageInfo] -> InstalledPackageInfo
-> Bool
depsAvailable pkgs_ok pkg = null dangling
where dangling = filter (`notElem` pids) (depends pkg)
pids = map installedPackageId pkgs_ok
-- we want mutually recursive groups of package to show up
-- as broken. (#1750)
brokenPackages :: [InstalledPackageInfo] -> [InstalledPackageInfo]
brokenPackages pkgs = snd (closure [] pkgs)
-- -----------------------------------------------------------------------------
-- Manipulating package.conf files
type InstalledPackageInfoString = InstalledPackageInfo_ String
convertPackageInfoOut :: InstalledPackageInfo -> InstalledPackageInfoString
convertPackageInfoOut
(pkgconf@(InstalledPackageInfo { exposedModules = e,
hiddenModules = h })) =
pkgconf{ exposedModules = map display e,
hiddenModules = map display h }
convertPackageInfoIn :: InstalledPackageInfoString -> InstalledPackageInfo
convertPackageInfoIn
(pkgconf@(InstalledPackageInfo { exposedModules = e,
hiddenModules = h })) =
pkgconf{ exposedModules = map convert e,
hiddenModules = map convert h }
where convert = fromJust . simpleParse
writeNewConfig :: Verbosity -> FilePath -> [InstalledPackageInfo] -> IO ()
writeNewConfig verbosity filename ipis = do
when (verbosity >= Normal) $
info "Writing new package config file... "
createDirectoryIfMissing True $ takeDirectory filename
let shown = concat $ intersperse ",\n "
$ map (show . convertPackageInfoOut) ipis
fileContents = "[" ++ shown ++ "\n]"
writeFileUtf8Atomic filename fileContents
`catchIO` \e ->
if isPermissionError e
then die (filename ++ ": you don't have permission to modify this file")
else ioError e
when (verbosity >= Normal) $
infoLn "done."
-----------------------------------------------------------------------------
-- Sanity-check a new package config, and automatically build GHCi libs
-- if requested.
type ValidateError = (Force,String)
type ValidateWarning = String
newtype Validate a = V { runValidate :: IO (a, [ValidateError],[ValidateWarning]) }
instance Functor Validate where
fmap = liftM
instance Applicative Validate where
pure = return
(<*>) = ap
instance Monad Validate where
return a = V $ return (a, [], [])
m >>= k = V $ do
(a, es, ws) <- runValidate m
(b, es', ws') <- runValidate (k a)
return (b,es++es',ws++ws')
verror :: Force -> String -> Validate ()
verror f s = V (return ((),[(f,s)],[]))
vwarn :: String -> Validate ()
vwarn s = V (return ((),[],["Warning: " ++ s]))
liftIO :: IO a -> Validate a
liftIO k = V (k >>= \a -> return (a,[],[]))
-- returns False if we should die
reportValidateErrors :: [ValidateError] -> [ValidateWarning]
-> String -> Maybe Force -> IO Bool
reportValidateErrors es ws prefix mb_force = do
mapM_ (warn . (prefix++)) ws
oks <- mapM report es
return (and oks)
where
report (f,s)
| Just force <- mb_force
= if (force >= f)
then do reportError (prefix ++ s ++ " (ignoring)")
return True
else if f < CannotForce
then do reportError (prefix ++ s ++ " (use --force to override)")
return False
else do reportError err
return False
| otherwise = do reportError err
return False
where
err = prefix ++ s
validatePackageConfig :: InstalledPackageInfo
-> Verbosity
-> PackageDBStack
-> Bool -- auto-ghc-libs
-> Bool -- update, or check
-> Force
-> IO ()
validatePackageConfig pkg verbosity db_stack auto_ghci_libs update force = do
(_,es,ws) <- runValidate $ checkPackageConfig pkg verbosity db_stack auto_ghci_libs update
ok <- reportValidateErrors es ws (display (sourcePackageId pkg) ++ ": ") (Just force)
when (not ok) $ exitWith (ExitFailure 1)
checkPackageConfig :: InstalledPackageInfo
-> Verbosity
-> PackageDBStack
-> Bool -- auto-ghc-libs
-> Bool -- update, or check
-> Validate ()
checkPackageConfig pkg verbosity db_stack auto_ghci_libs update = do
checkInstalledPackageId pkg db_stack update
checkPackageId pkg
checkDuplicates db_stack pkg update
mapM_ (checkDep db_stack) (depends pkg)
checkDuplicateDepends (depends pkg)
mapM_ (checkDir False "import-dirs") (importDirs pkg)
mapM_ (checkDir True "library-dirs") (libraryDirs pkg)
mapM_ (checkDir True "include-dirs") (includeDirs pkg)
mapM_ (checkDir True "framework-dirs") (frameworkDirs pkg)
mapM_ (checkFile True "haddock-interfaces") (haddockInterfaces pkg)
mapM_ (checkDirURL True "haddock-html") (haddockHTMLs pkg)
checkModules pkg
mapM_ (checkHSLib verbosity (libraryDirs pkg) auto_ghci_libs) (hsLibraries pkg)
-- ToDo: check these somehow?
-- extra_libraries :: [String],
-- c_includes :: [String],
checkInstalledPackageId :: InstalledPackageInfo -> PackageDBStack -> Bool
-> Validate ()
checkInstalledPackageId ipi db_stack update = do
let ipid@(InstalledPackageId str) = installedPackageId ipi
when (null str) $ verror CannotForce "missing id field"
let dups = [ p | p <- allPackagesInStack db_stack,
installedPackageId p == ipid ]
when (not update && not (null dups)) $
verror CannotForce $
"package(s) with this id already exist: " ++
unwords (map (display.packageId) dups)
-- When the package name and version are put together, sometimes we can
-- end up with a package id that cannot be parsed. This will lead to
-- difficulties when the user wants to refer to the package later, so
-- we check that the package id can be parsed properly here.
checkPackageId :: InstalledPackageInfo -> Validate ()
checkPackageId ipi =
let str = display (sourcePackageId ipi) in
case [ x :: PackageIdentifier | (x,ys) <- readP_to_S parse str, all isSpace ys ] of
[_] -> return ()
[] -> verror CannotForce ("invalid package identifier: " ++ str)
_ -> verror CannotForce ("ambiguous package identifier: " ++ str)
checkDuplicates :: PackageDBStack -> InstalledPackageInfo -> Bool -> Validate ()
checkDuplicates db_stack pkg update = do
let
pkgid = sourcePackageId pkg
pkgs = packages (head db_stack)
--
-- Check whether this package id already exists in this DB
--
when (not update && (pkgid `elem` map sourcePackageId pkgs)) $
verror CannotForce $
"package " ++ display pkgid ++ " is already installed"
let
uncasep = map toLower . display
dups = filter ((== uncasep pkgid) . uncasep) (map sourcePackageId pkgs)
when (not update && not (null dups)) $ verror ForceAll $
"Package names may be treated case-insensitively in the future.\n"++
"Package " ++ display pkgid ++
" overlaps with: " ++ unwords (map display dups)
checkDir, checkFile, checkDirURL :: Bool -> String -> FilePath -> Validate ()
checkDir = checkPath False True
checkFile = checkPath False False
checkDirURL = checkPath True True
checkPath :: Bool -> Bool -> Bool -> String -> FilePath -> Validate ()
checkPath url_ok is_dir warn_only thisfield d
| url_ok && ("http://" `isPrefixOf` d
|| "https://" `isPrefixOf` d) = return ()
| url_ok
, Just d' <- stripPrefix "file://" d
= checkPath False is_dir warn_only thisfield d'
-- Note: we don't check for $topdir/${pkgroot} here. We rely on these
-- variables having been expanded already, see mungePackagePaths.
| isRelative d = verror ForceFiles $
thisfield ++ ": " ++ d ++ " is a relative path which "
++ "makes no sense (as there is nothing for it to be "
++ "relative to). You can make paths relative to the "
++ "package database itself by using ${pkgroot}."
-- relative paths don't make any sense; #4134
| otherwise = do
there <- liftIO $ if is_dir then doesDirectoryExist d else doesFileExist d
when (not there) $
let msg = thisfield ++ ": " ++ d ++ " doesn't exist or isn't a "
++ if is_dir then "directory" else "file"
in
if warn_only
then vwarn msg
else verror ForceFiles msg
checkDep :: PackageDBStack -> InstalledPackageId -> Validate ()
checkDep db_stack pkgid
| pkgid `elem` pkgids = return ()
| otherwise = verror ForceAll ("dependency \"" ++ display pkgid
++ "\" doesn't exist")
where
all_pkgs = allPackagesInStack db_stack
pkgids = map installedPackageId all_pkgs
checkDuplicateDepends :: [InstalledPackageId] -> Validate ()
checkDuplicateDepends deps
| null dups = return ()
| otherwise = verror ForceAll ("package has duplicate dependencies: " ++
unwords (map display dups))
where
dups = [ p | (p:_:_) <- group (sort deps) ]
checkHSLib :: Verbosity -> [String] -> Bool -> String -> Validate ()
checkHSLib verbosity dirs auto_ghci_libs lib = do
let batch_lib_file = "lib" ++ lib ++ ".a"
filenames = ["lib" ++ lib ++ ".a",
"lib" ++ lib ++ ".p_a",
"lib" ++ lib ++ "-ghc" ++ showVersion ghcVersion ++ ".so",
"lib" ++ lib ++ "-ghc" ++ showVersion ghcVersion ++ ".dylib",
lib ++ "-ghc" ++ showVersion ghcVersion ++ ".dll"]
m <- liftIO $ doesFileExistOnPath filenames dirs
case m of
Nothing -> verror ForceFiles ("cannot find any of " ++ show filenames ++
" on library path")
Just dir -> liftIO $ checkGHCiLib verbosity dir batch_lib_file lib auto_ghci_libs
doesFileExistOnPath :: [FilePath] -> [FilePath] -> IO (Maybe FilePath)
doesFileExistOnPath filenames paths = go fullFilenames
where fullFilenames = [ (path, path </> filename)
| filename <- filenames
, path <- paths ]
go [] = return Nothing
go ((p, fp) : xs) = do b <- doesFileExist fp
if b then return (Just p) else go xs
checkModules :: InstalledPackageInfo -> Validate ()
checkModules pkg = do
mapM_ findModule (exposedModules pkg ++ hiddenModules pkg)
where
findModule modl =
-- there's no interface file for GHC.Prim
unless (modl == fromString "GHC.Prim") $ do
let files = [ toFilePath modl <.> extension
| extension <- ["hi", "p_hi", "dyn_hi" ] ]
m <- liftIO $ doesFileExistOnPath files (importDirs pkg)
when (isNothing m) $
verror ForceFiles ("cannot find any of " ++ show files)
checkGHCiLib :: Verbosity -> String -> String -> String -> Bool -> IO ()
checkGHCiLib verbosity batch_lib_dir batch_lib_file lib auto_build
| auto_build = autoBuildGHCiLib verbosity batch_lib_dir batch_lib_file ghci_lib_file
| otherwise = return ()
where
ghci_lib_file = lib <.> "o"
-- automatically build the GHCi version of a batch lib,
-- using ld --whole-archive.
autoBuildGHCiLib :: Verbosity -> String -> String -> String -> IO ()
autoBuildGHCiLib verbosity dir batch_file ghci_file = do
let ghci_lib_file = dir ++ '/':ghci_file
batch_lib_file = dir ++ '/':batch_file
when (verbosity >= Normal) $
info ("building GHCi library " ++ ghci_lib_file ++ "...")
#if defined(darwin_HOST_OS)
r <- rawSystem "ld" ["-r","-x","-o",ghci_lib_file,"-all_load",batch_lib_file]
#elif defined(mingw32_HOST_OS)
execDir <- getLibDir
r <- rawSystem (maybe "" (++"/gcc-lib/") execDir++"ld") ["-r","-x","-o",ghci_lib_file,"--whole-archive",batch_lib_file]
#else
r <- rawSystem "ld" ["-r","-x","-o",ghci_lib_file,"--whole-archive",batch_lib_file]
#endif
when (r /= ExitSuccess) $ exitWith r
when (verbosity >= Normal) $
infoLn (" done.")
-- -----------------------------------------------------------------------------
-- Searching for modules
#if not_yet
findModules :: [FilePath] -> IO [String]
findModules paths =
mms <- mapM searchDir paths
return (concat mms)
searchDir path prefix = do
fs <- getDirectoryEntries path `catchIO` \_ -> return []
searchEntries path prefix fs
searchEntries path prefix [] = return []
searchEntries path prefix (f:fs)
| looks_like_a_module = do
ms <- searchEntries path prefix fs
return (prefix `joinModule` f : ms)
| looks_like_a_component = do
ms <- searchDir (path </> f) (prefix `joinModule` f)
ms' <- searchEntries path prefix fs
return (ms ++ ms')
| otherwise
searchEntries path prefix fs
where
(base,suffix) = splitFileExt f
looks_like_a_module =
suffix `elem` haskell_suffixes &&
all okInModuleName base
looks_like_a_component =
null suffix && all okInModuleName base
okInModuleName c
#endif
-- ---------------------------------------------------------------------------
-- expanding environment variables in the package configuration
expandEnvVars :: String -> Force -> IO String
expandEnvVars str0 force = go str0 ""
where
go "" acc = return $! reverse acc
go ('$':'{':str) acc | (var, '}':rest) <- break close str
= do value <- lookupEnvVar var
go rest (reverse value ++ acc)
where close c = c == '}' || c == '\n' -- don't span newlines
go (c:str) acc
= go str (c:acc)
lookupEnvVar :: String -> IO String
lookupEnvVar "pkgroot" = return "${pkgroot}" -- these two are special,
lookupEnvVar "pkgrooturl" = return "${pkgrooturl}" -- we don't expand them
lookupEnvVar nm =
catchIO (System.Environment.getEnv nm)
(\ _ -> do dieOrForceAll force ("Unable to expand variable " ++
show nm)
return "")
-----------------------------------------------------------------------------
getProgramName :: IO String
getProgramName = liftM (`withoutSuffix` ".bin") getProgName
where str `withoutSuffix` suff
| suff `isSuffixOf` str = take (length str - length suff) str
| otherwise = str
bye :: String -> IO a
bye s = putStr s >> exitWith ExitSuccess
die :: String -> IO a
die = dieWith 1
dieWith :: Int -> String -> IO a
dieWith ec s = do
prog <- getProgramName
reportError (prog ++ ": " ++ s)
exitWith (ExitFailure ec)
dieOrForceAll :: Force -> String -> IO ()
dieOrForceAll ForceAll s = ignoreError s
dieOrForceAll _other s = dieForcible s
warn :: String -> IO ()
warn = reportError
-- send info messages to stdout
infoLn :: String -> IO ()
infoLn = putStrLn
info :: String -> IO ()
info = putStr
ignoreError :: String -> IO ()
ignoreError s = reportError (s ++ " (ignoring)")
reportError :: String -> IO ()
reportError s = do hFlush stdout; hPutStrLn stderr s
dieForcible :: String -> IO ()
dieForcible s = die (s ++ " (use --force to override)")
my_head :: String -> [a] -> a
my_head s [] = error s
my_head _ (x : _) = x
getLibDir :: IO (Maybe String)
getLibDir = return $ Just hasteSysDir
-----------------------------------------
-- Adapted from ghc/compiler/utils/Panic
installSignalHandlers :: IO ()
installSignalHandlers = do
threadid <- myThreadId
let
interrupt = Exception.throwTo threadid
(Exception.ErrorCall "interrupted")
--
#if !defined(mingw32_HOST_OS)
_ <- installHandler sigQUIT (Catch interrupt) Nothing
_ <- installHandler sigINT (Catch interrupt) Nothing
return ()
#else
-- GHC 6.3+ has support for console events on Windows
-- NOTE: running GHCi under a bash shell for some reason requires
-- you to press Ctrl-Break rather than Ctrl-C to provoke
-- an interrupt. Ctrl-C is getting blocked somewhere, I don't know
-- why --SDM 17/12/2004
let sig_handler ControlC = interrupt
sig_handler Break = interrupt
sig_handler _ = return ()
_ <- installHandler (Catch sig_handler)
return ()
#endif
#if mingw32_HOST_OS || mingw32_TARGET_OS
throwIOIO :: Exception.IOException -> IO a
throwIOIO = Exception.throwIO
#endif
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
catchError :: IO a -> (String -> IO a) -> IO a
catchError io handler = io `Exception.catch` handler'
where handler' (Exception.ErrorCall err) = handler err
tryIO :: IO a -> IO (Either Exception.IOException a)
tryIO = Exception.try
writeBinaryFileAtomic :: Bin.Binary a => FilePath -> a -> IO ()
writeBinaryFileAtomic targetFile obj =
withFileAtomic targetFile $ \h -> do
hSetBinaryMode h True
B.hPutStr h (Bin.encode obj)
writeFileUtf8Atomic :: FilePath -> String -> IO ()
writeFileUtf8Atomic targetFile content =
withFileAtomic targetFile $ \h -> do
hSetEncoding h utf8
hPutStr h content
-- copied from Cabal's Distribution.Simple.Utils, except that we want
-- to use text files here, rather than binary files.
withFileAtomic :: FilePath -> (Handle -> IO ()) -> IO ()
withFileAtomic targetFile write_content = do
(newFile, newHandle) <- openNewFile targetDir template
do write_content newHandle
hClose newHandle
#if mingw32_HOST_OS || mingw32_TARGET_OS
renameFile newFile targetFile
-- If the targetFile exists then renameFile will fail
`catchIO` \err -> do
exists <- doesFileExist targetFile
if exists
then do removeFileSafe targetFile
-- Big fat hairy race condition
renameFile newFile targetFile
-- If the removeFile succeeds and the renameFile fails
-- then we've lost the atomic property.
else throwIOIO err
#else
renameFile newFile targetFile
#endif
`Exception.onException` do hClose newHandle
removeFileSafe newFile
where
template = targetName <.> "tmp"
targetDir | null targetDir_ = "."
| otherwise = targetDir_
--TODO: remove this when takeDirectory/splitFileName is fixed
-- to always return a valid dir
(targetDir_,targetName) = splitFileName targetFile
openNewFile :: FilePath -> String -> IO (FilePath, Handle)
openNewFile dir template = do
-- this was added to System.IO in 6.12.1
-- we must use this version because the version below opens the file
-- in binary mode.
openTempFileWithDefaultPermissions dir template
-- | The function splits the given string to substrings
-- using 'isSearchPathSeparator'.
parseSearchPath :: String -> [FilePath]
parseSearchPath path = split path
where
split :: String -> [String]
split s =
case rest' of
[] -> [chunk]
_:rest -> chunk : split rest
where
chunk =
case chunk' of
#ifdef mingw32_HOST_OS
('\"':xs@(_:_)) | last xs == '\"' -> init xs
#endif
_ -> chunk'
(chunk', rest') = break isSearchPathSeparator s
readUTF8File :: FilePath -> IO String
readUTF8File file = do
h <- openFile file ReadMode
-- fix the encoding to UTF-8
hSetEncoding h utf8
hGetContents h
-- removeFileSave doesn't throw an exceptions, if the file is already deleted
removeFileSafe :: FilePath -> IO ()
removeFileSafe fn =
removeFile fn `catchIO` \ e ->
when (not $ isDoesNotExistError e) $ ioError e
absolutePath :: FilePath -> IO FilePath
absolutePath path = return . normalise . (</> path) =<< getCurrentDirectory
-- | Only global packages may be marked as relocatable!
-- May break horribly for general use, only reliable for Haste base packages.
relocate :: [String] -> String -> Sh.Shell ()
relocate packages pkg = do
pi <- Sh.run hastePkgBinary (packages ++ ["describe", pkg]) ""
Sh.run_ hastePkgBinary (packages ++ ["update", "-", "--force", "--global"])
(reloc pi)
where
reloc = unlines . map fixPath . lines
fixPath s
| isKey "library-dirs: " s = prefix s "library-dirs" importDir
| isKey "import-dirs: " s = prefix s "import-dirs" importDir
| isKey "haddock-interfaces: " s = prefix s "haddock-interfaces" importDir
| isKey "haddock-html: " s = prefix s "haddock-html" importDir
| isKey "include-dirs: " s = "include-dirs: " ++ includeDir
| otherwise = s
prefix s pfx path = pfx ++ ": " ++ path </> stripPrefix s
stripPrefix s
| os == "darwin" =
case take 3 $ reverse $ splitPath s of
[third, second, first] -> first </> second </> third
| otherwise =
case take 2 $ reverse $ splitPath s of
[second, first] -> first </> second
isKey _ "" =
False
isKey key str =
and $ zipWith (==) key str
importDir
| os == "linux" = "${pkgroot}" </> "libraries" </> "lib"
| otherwise = "${pkgroot}" </> "libraries"
includeDir = "${pkgroot}" </> "include"
|
akru/haste-compiler
|
utils/haste-pkg/haste-pkg.hs
|
Haskell
|
bsd-3-clause
| 71,975
|
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Marquise.IO.SpoolFile
( newRandomPointsSpoolFile
, newRandomContentsSpoolFile
) where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Monad.State
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy as LB
import Data.Maybe
import Marquise.Classes
import Marquise.IO.FFI
import Marquise.Types
import System.Directory
import System.FilePath.Posix
import System.IO
import System.IO.Unsafe
import System.Posix.Files
import System.Posix.IO (closeFd)
import System.Posix.Temp
import System.Posix.Types (Fd)
instance MarquiseSpoolFileMonad IO where
randomSpoolFiles sn =
SpoolFiles <$> newRandomPointsSpoolFile sn
<*> newRandomContentsSpoolFile sn
createDirectories sn =
mapM_ (createDirectoryIfMissing True . ($sn))
[ newPointsDir
, newContentsDir
, curPointsDir
, curContentsDir]
appendPoints spools = doAppend (pointsSpoolFile spools)
appendContents spools = doAppend (contentsSpoolFile spools)
nextPoints sn = nextSpoolContents (newPointsDir sn) (curPointsDir sn)
nextContents sn = nextSpoolContents (newContentsDir sn) (curContentsDir sn)
close _ = c_sync
newRandomSpoolFile :: FilePath -> IO FilePath
newRandomSpoolFile path = do
(spool_file, handle) <- mkstemp path
hClose handle
return spool_file
-- | Creates and returns a new points spool file from a namespace
newRandomPointsSpoolFile :: SpoolName -> IO FilePath
newRandomPointsSpoolFile = newRandomSpoolFile . newPointsDir
-- | Creates and returns a new contents spool file from a namespace
newRandomContentsSpoolFile :: SpoolName -> IO FilePath
newRandomContentsSpoolFile = newRandomSpoolFile . newContentsDir
-- | Grab the next avaliable spool file, providing that file as a lazy
-- bytestring and an action to close it, wiping the file.
nextSpoolContents :: FilePath -> FilePath -> IO (Maybe (L.ByteString, IO ()))
nextSpoolContents new_dir cur_dir = do
-- First check for any work already in the work spool dir.
work <- tryCurDir cur_dir
case work of
Nothing ->
-- No existing work, get some new work out of the spool
-- directory then.
rotate new_dir cur_dir >> return Nothing
Just (fp, lock_fd) -> do
threadDelay 100000 -- Ensure that any slow writes are done
contents <- LB.readFile fp
let close_f = removeLink fp >> closeFd lock_fd
return $ Just (contents, close_f)
-- | Check the work directory for any outstanding work, if there is a potential
-- candidate, lock it. If that fails, try the next.
tryCurDir :: FilePath -> IO (Maybe (FilePath, Fd))
tryCurDir cur_dir =
listToMaybe . catMaybes <$> (getAbsoluteDirectoryFiles cur_dir
>>= mapM lazyLock)
where
lazyLock :: FilePath -> IO (Maybe (FilePath, Fd))
lazyLock fp = unsafeInterleaveIO $ do
lock <- tryLock fp
case lock of
Nothing -> return Nothing
Just lock_fd -> return . Just $ (fp, lock_fd)
-- Attempt to rotate all files from src folder to dst folder.
rotate :: FilePath -> FilePath -> IO ()
rotate src dst = do
candidates <- getAbsoluteDirectoryFiles src
unless (null candidates)
(mapM_ doMove candidates)
where
doMove src_file = do
(new_path, h) <- mkstemp dst
hClose h
renameFile src_file new_path
getAbsoluteDirectoryFiles :: FilePath -> IO [FilePath]
getAbsoluteDirectoryFiles =
getAbsoluteDirectoryContents >=> filterM doesFileExist
getAbsoluteDirectoryContents :: FilePath -> IO [FilePath]
getAbsoluteDirectoryContents fp =
map (\rel -> joinPath [fp, rel]) <$> getDirectoryContents fp
doAppend :: FilePath -> ByteString -> IO ()
doAppend = S.appendFile
newPointsDir :: SpoolName -> FilePath
newPointsDir = specificSpoolDir ["points", "new"]
newContentsDir :: SpoolName -> FilePath
newContentsDir = specificSpoolDir ["contents", "new"]
curPointsDir :: SpoolName -> FilePath
curPointsDir = specificSpoolDir ["points", "cur"]
curContentsDir :: SpoolName -> FilePath
curContentsDir = specificSpoolDir ["contents", "cur"]
specificSpoolDir :: [String] -> SpoolName -> FilePath
specificSpoolDir subdirs sn =
addTrailingPathSeparator (joinPath (baseSpoolDir sn:subdirs))
baseSpoolDir :: SpoolName -> FilePath
baseSpoolDir (SpoolName sn) = joinPath ["/var/spool/marquise/", sn]
|
anchor/marquise
|
lib/Marquise/IO/SpoolFile.hs
|
Haskell
|
bsd-3-clause
| 4,886
|
module Blockchain.SampleTransactions where
import Prelude hiding (EQ)
import qualified Data.ByteString as B
import Network.Haskoin.Internals hiding (Address)
import Blockchain.Data.Address
import Blockchain.Data.Code
import Blockchain.Data.Transaction
import Blockchain.Constants
import Blockchain.ExtendedECDSA
import Blockchain.ExtWord
import Blockchain.JCommand
import Blockchain.VM.Code
import Blockchain.VM.Labels
import Blockchain.VM.Opcodes
--import Debug.Trace
createContract::Monad m=>Integer->Integer->Code->PrvKey->SecretT m Transaction
createContract val gl code prvKey =
createContractCreationTX 0 0x9184e72a000 gl val code prvKey
createMessage::Monad m=>Integer->Integer->Address->B.ByteString->PrvKey->SecretT m Transaction
createMessage val gl toAddr theData prvKey = createMessageTX 0 0x9184e72a000 gl toAddr val theData prvKey
----------------------
simpleTX::Monad m=>PrvKey->SecretT m Transaction
simpleTX =
createContract 0 550
$ compile
[
PUSH [2],
PUSH [0],
MSTORE,
PUSH [0x20],
PUSH [0],
RETURN
]
outOfGasTX::Monad m=>PrvKey->SecretT m Transaction
outOfGasTX =
createContract 3 522
$ compile
[
PUSH [1],
PUSH [0],
MSTORE
]
simpleStorageTX::Monad m=>PrvKey->SecretT m Transaction
simpleStorageTX =
createContract 3 1000
$ compile
[
PUSH [1],
PUSH [0],
SSTORE
]
createInit::[JCommand]->[JCommand]->Code
createInit initFunc contract = -- trace (intercalate "-" $ show <$> contract) $
-- trace (intercalate "\n " $ fmap show $ snd $ jcompile $ initFunc ++ [ReturnCode contract]) $ do
compile $ lcompile $ snd $ jcompile $ initFunc ++ [ReturnCode $ compile $ lcompile $ snd $ jcompile contract]
createContractTX::Monad m=>PrvKey->SecretT m Transaction
createContractTX =
createContract (1000*finney) 1000
$ createInit []
[
PermStorage (Number 0) :=: Input 0
]
sendMessageTX::Monad m=>PrvKey->SecretT m Transaction
sendMessageTX =
createMessage (1000*finney) 1000 (Address 0x9f840fe058ce3d84e319b8c747accc1e52f69426)
(B.pack $ word256ToBytes 0x1234)
paymentContract::Monad m=>PrvKey->SecretT m Transaction
paymentContract =
createContract (1000*finney) 2000
$ createInit
[
PermStorage Caller :=: Number 1000
]
(
let
toAddr = Input (0*32)
fromAddr = Caller
val = Input (1*32)
in
[
If (PermVal fromAddr :>=: val)
[
PermStorage fromAddr :=: PermVal fromAddr - val,
PermStorage toAddr :=: PermVal toAddr + val
]
]
)
sendCoinTX::Monad m=>PrvKey->SecretT m Transaction
sendCoinTX =
createMessage 0 2000 (Address 0x9f840fe058ce3d84e319b8c747accc1e52f69426)
(B.pack $ word256ToBytes 0x1 ++ word256ToBytes 500)
keyValuePublisher::Monad m=>PrvKey->SecretT m Transaction
keyValuePublisher =
createContract (1000*finney) 2000
$ createInit
[
PermStorage 69 :=: Caller
]
(
let
inputP = MemStorage (Number 0)
inputPr = MemVal (Number 0)
in
[
If (Caller :==: PermVal (Number 69))
[
While (inputPr :<: CallDataSize)
[
PermStorage (Input inputPr) :=: Input (inputPr + 32),
inputP :=: inputPr + 64
]
]
]
)
sendKeyVal::Monad m=>PrvKey->SecretT m Transaction
sendKeyVal prvKey =
createMessage 0 2000 (Address 0x9f840fe058ce3d84e319b8c747accc1e52f69426)
(B.pack $ word256ToBytes 1000 ++ word256ToBytes 2000 ++ word256ToBytes 1234 ++ word256ToBytes 1)
prvKey
{-
(when (= (caller) @@69)
(for {} (< @i (calldatasize)) [i](+ @i 64)
[[ (calldataload @i) ]] (calldataload (+ @i 32))
)
-}
mysteryCode::[Operation]
mysteryCode =
[
PUSH [3,144],
DUP1,
PUSH [0,14],
PUSH [0],
CODECOPY,
PUSH [3,158],
JUMP,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [32],
MSTORE,
PUSH [0],
PUSH [64],
MSTORE,
PUSH [1],
PUSH [96],
MSTORE,
PUSH [2],
PUSH [128],
MSTORE,
PUSH [3],
PUSH [160],
MSTORE,
PUSH [0],
PUSH [192],
MSTORE,
PUSH [1],
PUSH [224],
MSTORE,
PUSH [2],
PUSH [1,0],
MSTORE,
PUSH [0],
PUSH [1,32],
MSTORE,
PUSH [1],
PUSH [1,64],
MSTORE,
PUSH [2],
PUSH [1,96],
MSTORE,
PUSH [3],
PUSH [1,128],
MSTORE,
PUSH [3],
PUSH [1,160],
MSTORE,
PUSH [32],
CALLDATASIZE,
DIV,
PUSH [1,192],
MSTORE,
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
ADD,
PUSH [1,192],
MLOAD,
SLT,
ISZERO,
PUSH [0,136],
JUMPI,
PUSH [1,64],
MLOAD,
PUSH [1,224],
MSTORE,
PUSH [32],
PUSH [1,224],
CALLCODE,
JUMPDEST,
PUSH [0],
PUSH [1,128],
MLOAD,
PUSH [1,160],
MLOAD,
PUSH [1,192],
MLOAD,
SUB,
SMOD,
EQ,
ISZERO,
ISZERO,
PUSH [0,174],
JUMPI,
PUSH [1,64],
MLOAD,
PUSH [2,0],
MSTORE,
PUSH [32],
PUSH [2,0],
CALLCODE,
JUMPDEST,
PUSH [1,128],
MLOAD,
PUSH [1,160],
MLOAD,
PUSH [1,192],
MLOAD,
SUB,
SDIV,
PUSH [2,32],
MSTORE,
PUSH [0],
PUSH [2,64],
MSTORE,
PUSH [2],
PUSH [1,160],
MLOAD,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [2,96],
MSTORE,
PUSH [0],
PUSH [2,128],
MSTORE,
JUMPDEST,
PUSH [2,32],
MLOAD,
PUSH [2,64],
MLOAD,
SLT,
ISZERO,
PUSH [1,155],
JUMPI,
PUSH [1],
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
PUSH [2,64],
MLOAD,
MUL,
ADD,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [2,160],
MSTORE,
PUSH [2],
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
PUSH [2,64],
MLOAD,
MUL,
ADD,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [2,192],
MSTORE,
PUSH [2,96],
MLOAD,
PUSH [2,192],
MLOAD,
EQ,
ISZERO,
ISZERO,
PUSH [1,80],
JUMPI,
PUSH [2,192],
MLOAD,
PUSH [2,96],
MSTORE,
PUSH [0],
PUSH [2,128],
MLOAD,
EQ,
ISZERO,
ISZERO,
PUSH [1,79],
JUMPI,
PUSH [1,96],
MLOAD,
PUSH [2,224],
MSTORE,
PUSH [32],
PUSH [2,224],
CALLCODE,
JUMPDEST,
JUMPDEST,
PUSH [2,160],
MLOAD,
PUSH [2,128],
MLOAD,
ADD,
PUSH [2,128],
MSTORE,
PUSH [1],
PUSH [2,32],
MLOAD,
SUB,
PUSH [2,64],
MLOAD,
EQ,
ISZERO,
PUSH [1,139],
JUMPI,
PUSH [0],
PUSH [2,128],
MLOAD,
EQ,
ISZERO,
ISZERO,
PUSH [1,138],
JUMPI,
PUSH [1,96],
MLOAD,
PUSH [3,0],
MSTORE,
PUSH [32],
PUSH [3,0],
CALLCODE,
JUMPDEST,
JUMPDEST,
PUSH [1],
PUSH [2,64],
MLOAD,
ADD,
PUSH [2,64],
MSTORE,
PUSH [0,220],
JUMP,
JUMPDEST,
PUSH [32],
MLOAD,
SLOAD,
PUSH [3,32],
MSTORE,
PUSH [1],
PUSH [32],
MLOAD,
SLOAD,
ADD,
PUSH [32],
MLOAD,
SSTORE,
PUSH [32],
CALLDATALOAD,
PUSH [3,64],
MSTORE,
PUSH [64],
CALLDATALOAD,
PUSH [3,96],
MSTORE,
PUSH [255,255,255,255,255,255,255,255],
PUSH [3,128],
MSTORE,
PUSH [3,64],
MLOAD,
PUSH [64],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [3,128],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
PUSH [3,96],
MLOAD,
PUSH [96],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [3,128],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
NUMBER,
PUSH [128],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [3,128],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
TIMESTAMP,
PUSH [160],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [3,128],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
PUSH [0],
PUSH [2,64],
MSTORE,
JUMPDEST,
PUSH [2,32],
MLOAD,
PUSH [2,64],
MLOAD,
SLT,
ISZERO,
PUSH [3,129],
JUMPI,
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
PUSH [2,64],
MLOAD,
MUL,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [3,160],
MSTORE,
PUSH [1],
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
PUSH [2,64],
MLOAD,
MUL,
ADD,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [2,160],
MSTORE,
PUSH [2],
PUSH [1,160],
MLOAD,
PUSH [1,128],
MLOAD,
PUSH [2,64],
MLOAD,
MUL,
ADD,
ADD,
PUSH [32],
MUL,
CALLDATALOAD,
PUSH [2,192],
MSTORE,
PUSH [3,160],
MLOAD,
PUSH [192],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [2,64],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
PUSH [2,160],
MLOAD,
PUSH [224],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [2,64],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
PUSH [2,192],
MLOAD,
PUSH [1,0],
MLOAD,
PUSH [1,0,0,0,0,0,0,0,0],
PUSH [2,64],
MLOAD,
MUL,
PUSH [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
PUSH [3,32],
MLOAD,
MUL,
ADD,
ADD,
SSTORE,
PUSH [1],
PUSH [2,64],
MLOAD,
ADD,
PUSH [2,64],
MSTORE,
PUSH [2,138],
JUMP,
JUMPDEST,
PUSH [1,32],
MLOAD,
PUSH [3,192],
MSTORE,
PUSH [32],
PUSH [3,192],
CALLCODE,
JUMPDEST,
PUSH [0],
CALLCODE
]
createMysteryContract::Monad m=>PrvKey->SecretT m Transaction
createMysteryContract prvKey =
createContractCreationTX 0 0x9184e72a000 8000 0 (compile mysteryCode) prvKey
|
jamshidh/ethereum-vm
|
src/Blockchain/SampleTransactions.hs
|
Haskell
|
bsd-3-clause
| 11,275
|
{-# LANGUAGE GeneralizedNewtypeDeriving,
FlexibleInstances,
MultiParamTypeClasses,
TypeFamilies,
StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.Kontiki.Monad
-- Copyright : (c) 2013, Nicolas Trangez
-- License : BSD-like
--
-- Maintainer : ikke@nicolast.be
--
-- This module introduces the main monad transformer `TransitionT' in
-- which `Handler's, `MessageHandler's and `TimeoutHandler's live
-- and defines actions that can be performed by them.
-----------------------------------------------------------------------------
module Network.Kontiki.Monad where
import Prelude hiding (log)
import Control.Monad.RWS
import Data.ByteString (ByteString)
import Data.ByteString.Lazy.Builder (Builder, byteString)
import Control.Lens hiding (Index)
import Network.Kontiki.Log
import Network.Kontiki.Types
-- | kontiki monad in which `Handler's,
-- `MessageHandler's and `TimeoutHandler's live that adds the ability to
-- read `Config', issue `Command's and keep state `s' to an inner monad `m'.
newtype TransitionT a s m r = TransitionT { unTransitionT :: RWST Config [Command a] s m r }
deriving ( Functor
, Applicative
, Monad
, MonadReader Config
, MonadWriter [Command a]
, MonadState s
, MonadRWS Config [Command a] s
, MonadTrans
)
instance (Monad m, MonadLog m a) => MonadLog (TransitionT a f m) a where
logEntry = lift . logEntry
logLastEntry = lift logLastEntry
-- | Runs `TransitionT'.
runTransitionT :: TransitionT a s m r -> Config -> s -> m (r, s, [Command a])
runTransitionT = runRWST . unTransitionT
-- | Broadcasts a message `m' to all nodes.
broadcast :: (Monad m, IsMessage t a) => t -> TransitionT a f m ()
broadcast m = tell [CBroadcast $ toMessage m]
-- | Sends a message `m' to a specific `NodeId' `n'.
send :: (Monad m, IsMessage t a) => NodeId -> t -> TransitionT a f m ()
send n m = tell [CSend n (toMessage m)]
-- | Resets the election timeout.
resetElectionTimeout :: Monad m => TransitionT a f m ()
resetElectionTimeout = do
t <- view configElectionTimeout
tell [CResetElectionTimeout t (2 * t)]
-- | Resets the heartbeat timeout.
resetHeartbeatTimeout :: Monad m => TransitionT a f m ()
resetHeartbeatTimeout = do
t <- view configHeartbeatTimeout
tell [CResetHeartbeatTimeout t]
-- | Logs a message from this `Builder' `b'.
log :: Monad m => Builder -> TransitionT a f m ()
log b = tell [CLog b]
-- | Logs a message from this `ByteString'.
logS :: Monad m => ByteString -> TransitionT a f m ()
logS = log . byteString
-- | Truncates the log of events to `Index' `i'.
truncateLog :: Monad m => Index -> TransitionT a f m ()
truncateLog i = tell [CTruncateLog i]
-- | Adds entries `es' to the log.
logEntries :: Monad m => [Entry a] -> TransitionT a f m ()
logEntries es = tell [CLogEntries es]
-- | Sets new commit `Index' `i'
setCommitIndex :: Monad m => Index -> TransitionT a f m ()
setCommitIndex i = tell [CSetCommitIndex i]
-- | Handler of events.
type Handler a s m =
Event a -- ^ `Event' to handle
-> TransitionT a (InternalState s) m SomeState -- ^ new `TransitionT'
-- | Handler of messages.
type MessageHandler t a s m =
NodeId -- ^ sender of the message
-> t -- ^ the mesage
-> TransitionT a (InternalState s) m SomeState -- ^ new `TransitionT'
-- | Handler of timeouts.
type TimeoutHandler t a s m = TransitionT a (InternalState s) m SomeState
|
NicolasT/kontiki
|
src/Network/Kontiki/Monad.hs
|
Haskell
|
bsd-3-clause
| 3,757
|
{-
A simple homogeneous pair type with useful Functor, Applicative, and
Traversable instances.
-}
{-# LANGUAGE CPP #-}
module Pair ( Pair(..), unPair, toPair, swap ) where
#include "HsVersions.h"
import Outputable
import Control.Applicative
#if __GLASGOW_HASKELL__ < 709
import Data.Foldable
import Data.Monoid
import Data.Traversable
#endif
data Pair a = Pair { pFst :: a, pSnd :: a }
-- Note that Pair is a *unary* type constructor
-- whereas (,) is binary
-- The important thing about Pair is that it has a *homogenous*
-- Functor instance, so you can easily apply the same function
-- to both components
instance Functor Pair where
fmap f (Pair x y) = Pair (f x) (f y)
instance Applicative Pair where
pure x = Pair x x
(Pair f g) <*> (Pair x y) = Pair (f x) (g y)
instance Foldable Pair where
foldMap f (Pair x y) = f x `mappend` f y
instance Traversable Pair where
traverse f (Pair x y) = Pair <$> f x <*> f y
instance Outputable a => Outputable (Pair a) where
ppr (Pair a b) = ppr a <+> char '~' <+> ppr b
unPair :: Pair a -> (a,a)
unPair (Pair x y) = (x,y)
toPair :: (a,a) -> Pair a
toPair (x,y) = Pair x y
swap :: Pair a -> Pair a
swap (Pair x y) = Pair y x
|
green-haskell/ghc
|
compiler/utils/Pair.hs
|
Haskell
|
bsd-3-clause
| 1,191
|
{-# language DataKinds #-}
{-# language OverloadedLists #-}
{-# language OverloadedStrings #-}
{-# language QuasiQuotes #-}
{-# language Rank2Types #-}
{-# language TypeApplications #-}
{-# language TypeFamilies #-}
module Planetary.Core.Eval.Test (unitTests, runTest, mkLogger) where
import Control.Lens
import Control.Monad.Reader (asks)
import Control.Monad.IO.Class
import Data.Maybe (fromJust)
import Data.Text (Text)
import NeatInterpolation
import Network.IPLD (Cid, toIpld)
import Prelude hiding (not)
import EasyTest hiding (bool, run)
import Planetary.Core hiding (logIncomplete, logReturnState, logValue)
import Planetary.Support.Ids hiding (boolId) -- XXX fix this
import Planetary.Support.Parser (forceTm)
import Planetary.Support.Pretty
import Planetary.Library
import qualified Planetary.Library.FrankExamples as Frank
import Planetary.Library.HaskellForeign (mkForeignTm, haskellOracles, intOpsId)
import Data.Text.Prettyprint.Doc
-- Some more good examples here:
-- https://github.com/dhil/links/blob/master/examples/handlers/shallow_state.links
noteFailureState
:: EvalState -> Either Err TmI -> Either Err TmI -> Test ()
noteFailureState initState result expected = do
note $ layout $ vsep
[ ""
, annotate Error "fail with initial state:"
, prettyEvalState initState
, ""
, annotate Error "got:"
, either pretty (prettyTmPrec 11) result
, ""
, annotate Error "expected:"
, either pretty (prettyTmPrec 11) expected
]
fail "failure: see above"
putLogs :: Bool
putLogs = False
mkLogger :: (Text -> IO ()) -> Logger
mkLogger mkNote =
let helper :: forall a. (a -> Text) -> a -> IO ()
helper f = if putLogs then mkNote . f else const (pure ())
in Logger (helper . logReturnState) (helper logIncomplete) (helper logValue)
runTest
:: Text
-> AmbientHandlers
-> ValueStore
-> TmI
-> Either Err TmI
-> Test ()
runTest name handlers store tm expected = scope name $ do
let initState = initEvalState store tm
logger <- mkLogger <$> asks note_
result <- liftIO $ run handlers logger initState
if result == expected
then ok
else noteFailureState initState result expected
boolId :: Cid
(boolId, _) = fromJust $ namedData "Bool" Frank.resolvedDecls
bool :: Int -> TmI
bool i = DataConstructor boolId i []
unitTests :: Test ()
unitTests =
let
-- true, false :: forall a b. Tm Cid a b
false = bool 0
true = bool 1
not tm = Case tm
[ ([], true)
, ([], false)
]
evalEnvRunTest desc = runTest desc noAmbientHandlers emptyStore
in scope "evaluation" $ tests
[ let x = V"x"
-- tm = forceTm "(\y -> y) x"
lam = Lambda ["x"] x
in scope "functions" $ tests
-- [ evalEnvRunTest "application 1" (AppN lam [true])
-- (Right true)
[ evalEnvRunTest "application"
(AppT lam [true])
(Right true)
-- TODO: test further steps with bound variables
, evalEnvRunTest "nullary function call"
(AppT (Lambda [] true) [])
(Right true)
]
, scope "case" $ tests
[ evalEnvRunTest "not (1)" (not false) (Right true)
, evalEnvRunTest "not (2)" (not true) (Right false)
]
, let ty :: Polytype Cid
ty = Polytype [] (DataTy (UidTy boolId) [])
tm = Let false ty "x" (V"x")
in scope "let" $ evalEnvRunTest "let x = false in x" tm (Right false)
, scope "handle" $ do
let abortHandlerTm = forceTm [text|
handle x : [e , <Abort>]Int with
Abort:
| <aborting -> k> -> one
| v -> two
|]
sendHandlerTm = forceTm [text|
handle x : [e, <Send Int>]Int with
Send:
| <send n -> k> -> n
| v -> v
|]
-- TODO: this is duplicated in FrankExamples.Test
stateHandlerTm = forceTm [text|
letrec
state : forall S X. {S -> {X} -> X}
= \s x -> handle x! : X with
State:
| <get -> k> -> state s (\-> k s)
| <put s -> k> -> state s (\-> k <Unit.0>)
| y -> y
fst : forall X Y. {X -> Y -> X}
= \x y -> x
-- increment, return original value
next : forall. {[<State Int>]Int}
-- fst get! (put (get! + 1))
= \-> fst get! (put (add get! one))
statefulTm
: forall. {[<State Int>] Int}
= \-> let x : forall. Int = next! in
let y : forall. Int = next! in
let z : forall. Int = next! in z
in state zero statefulTm
|]
(zero, zeroVal) = mkForeignTm @Int intId [] 0
(one, oneVal) = mkForeignTm @Int intId [] 1
(two, twoVal) = mkForeignTm @Int intId [] 2
Right abortHandler <- pure $ resolve $ fst abortHandlerTm
Right sendHandler <- pure $ resolve $ fst sendHandlerTm
Right stateHandler <- pure $ resolve $ fst stateHandlerTm
Just abortCid <- pure $ Frank.resolvedDecls ^? globalCids . ix "Abort"
Just sendCid <- pure $ Frank.resolvedDecls ^? globalCids . ix "Send"
Just stateCid <- pure $ Frank.resolvedDecls ^? globalCids . ix "State"
let abortHandler' = substitute "one" one $
substitute "two" two
abortHandler
handleVal = substitute "x" zero abortHandler'
abort = AppN (Command abortCid 0) []
handleAbort = substitute "x" abort abortHandler'
handleSend = substitute "x"
(AppN (Command sendCid 0) [one])
sendHandler
get = Command stateCid 0
put = Command stateCid 1
add = Command intOpsId 0
handleNext = substituteAll
[ ("zero", zero)
, ("one", one)
, ("get", get)
, ("put", put)
, ("add", add)
]
stateHandler
numberStore = storeOf $ toIpld <$> [zeroVal, oneVal, twoVal]
tests
[ runTest "val" noAmbientHandlers emptyStore handleVal
(Right two)
, runTest "abort" noAmbientHandlers emptyStore
handleAbort (Right one)
, runTest "send" noAmbientHandlers emptyStore handleSend
(Right one)
, let handlers = AmbientHandlers haskellOracles
in runTest "handle state" handlers numberStore handleNext (Right two)
]
, scope "let x = false in let y = not x in not y" $ do
let
ty = Polytype [] (DataTy (UidTy boolId) [])
tm = Let false ty "x" $
Let (not (V"x")) ty "y" $
not (V"y")
-- both versions of tm should be equivalent
Right tm2 <- pure $ resolve $ fst $ forceTm [text|
let not: forall. {<Bool> -> <Bool>}
= \x -> case x of
| <False> -> <Bool.1>
| <True> -> <Bool.0>
in
let x: forall. Bool = false in
let y: forall. Bool = not x in
not y
|]
let tm2' = substitute "false" false tm2
tests
[ evalEnvRunTest "tm" tm (Right false)
, evalEnvRunTest "tm2" tm2' (Right false)
]
, scope "letrec" $ do
let evenodd = fst $ forceTm [text|
letrec
even : forall. {<Fix NatF> -> <Bool>}
= \n -> case n of
| <z> -> <Bool.1> -- true
| <succ n'> -> odd n'
odd : forall. {<Fix NatF> -> <Bool>}
= \m -> case m of
| <z> -> <Bool.0> -- false
| <succ m'> -> even m'
in body
|]
-- mkFix = Command fixOpsId 0
-- unFix = Command fixOpsId 1
Right evenodd' <- pure $ resolve evenodd
Just (natfId, _) <- pure $ namedData "NatF" Frank.resolvedDecls
let -- mkTm n = [| evenOdd n |]
mkTm :: Text -> Int -> TmI
mkTm fnName n =
let mkNat 0 = DataConstructor natfId 0 []
mkNat k = DataConstructor natfId 1 [mkNat (k - 1)]
tm = substitute "body"
(AppT (V fnName) [mkNat n])
evenodd'
in tm
handlers = AmbientHandlers haskellOracles
runTest' desc = runTest desc handlers emptyStore
tests
[ runTest' "even 0" (mkTm "even" 0) (Right true)
, runTest' "odd 0" (mkTm "odd" 0) (Right false)
, runTest' "even 1" (mkTm "even" 1) (Right false)
, runTest' "odd 1" (mkTm "odd" 1) (Right true)
, runTest' "even 7" (mkTm "even" 7) (Right false)
, runTest' "odd 7" (mkTm "odd" 7) (Right true)
, runTest' "even 10" (mkTm "even" 10) (Right true)
, runTest' "odd 10" (mkTm "odd" 10) (Right false)
, runTest' "odd 20" (mkTm "odd" 20) (Right false)
, runTest' "even 100" (mkTm "even" 100) (Right true)
]
, scope "closures" $ do
let tm = fst $ forceTm [text|
letrec
const
: forall. {<Text> -> {<Text> -> <Text>}}
= \x -> \y -> x
-- capture x, then shadow
actual1
: forall. {<Text>}
= \->
let foo' : forall. {<Text> -> <Text>} = const foo in
let x : forall. <Text> = bar in
foo' bar
expected1
: forall. {<Text>}
= \-> foo
in actual1!
|]
let tm2 = fst $ forceTm [text|
letrec
captureX
: forall. {<Text> -> <Pair {<Text> -> <Text>} <Text>>}
= \x -> <Pair.0 (\y -> x) x>
-- capture x, then shadow
actual1
: forall. {<Text>}
= \->
let pair : forall. <Pair {<Text> -> <Text>} <Text>> = captureX foo in
case pair of
| <pair f x> -> f bar
expected1
: forall. {<Text>}
= \-> foo
in actual1!
|]
Right tm' <- pure $ resolve tm
Right tm2' <- pure $ resolve tm2
let (foo, fooVal) = mkForeignTm @Text intId [] "foo"
(bar, barVal) = mkForeignTm @Text intId [] "bar"
(baz, bazVal) = mkForeignTm @Text intId [] "baz"
subs =
[ ("foo", foo)
, ("bar", bar)
, ("baz", baz)
]
let tm'' = substituteAll subs tm'
tm2'' = substituteAll subs tm2'
store = storeOf $ toIpld <$> [fooVal, barVal, bazVal]
tests
[ runTest "const" noAmbientHandlers store tm'' (Right foo)
, runTest "pair" noAmbientHandlers store tm2'' (Right foo)
]
]
|
joelburget/interplanetary-computation
|
src/Planetary/Core/Eval/Test.hs
|
Haskell
|
bsd-3-clause
| 11,459
|
{-# LANGUAGE CPP, MagicHash #-}
-- | Dynamically lookup up values from modules and loading them.
module DynamicLoading (
#ifdef GHCI
-- * Loading plugins
loadPlugins,
loadFrontendPlugin,
-- * Force loading information
forceLoadModuleInterfaces,
forceLoadNameModuleInterface,
forceLoadTyCon,
-- * Finding names
lookupRdrNameInModuleForPlugins,
-- * Loading values
getValueSafely,
getHValueSafely,
lessUnsafeCoerce
#else
pluginError,
#endif
) where
#ifdef GHCI
import Linker ( linkModule, getHValue )
import GHCi ( wormhole )
import SrcLoc ( noSrcSpan )
import Finder ( findPluginModule, cannotFindModule )
import TcRnMonad ( initTcInteractive, initIfaceTcRn )
import LoadIface ( loadPluginInterface )
import RdrName ( RdrName, ImportSpec(..), ImpDeclSpec(..)
, ImpItemSpec(..), mkGlobalRdrEnv, lookupGRE_RdrName
, gre_name, mkRdrQual )
import OccName ( OccName, mkVarOcc )
import RnNames ( gresFromAvails )
import DynFlags
import Plugins ( Plugin, FrontendPlugin, CommandLineOption )
import PrelNames ( pluginTyConName, frontendPluginTyConName )
import HscTypes
import GHCi.RemoteTypes ( HValue )
import Type ( Type, eqType, mkTyConTy, pprTyThingCategory )
import TyCon ( TyCon )
import Name ( Name, nameModule_maybe )
import Id ( idType )
import Module ( Module, ModuleName )
import Panic
import FastString
import ErrUtils
import Outputable
import Exception
import Hooks
import Data.Maybe ( mapMaybe )
import GHC.Exts ( unsafeCoerce# )
#else
import Module ( ModuleName, moduleNameString )
import Panic
import Data.List ( intercalate )
#endif
#ifdef GHCI
loadPlugins :: HscEnv -> IO [(ModuleName, Plugin, [CommandLineOption])]
loadPlugins hsc_env
= do { plugins <- mapM (loadPlugin hsc_env) to_load
; return $ map attachOptions $ to_load `zip` plugins }
where
dflags = hsc_dflags hsc_env
to_load = pluginModNames dflags
attachOptions (mod_nm, plug) = (mod_nm, plug, options)
where
options = [ option | (opt_mod_nm, option) <- pluginModNameOpts dflags
, opt_mod_nm == mod_nm ]
loadPlugin :: HscEnv -> ModuleName -> IO Plugin
loadPlugin = loadPlugin' (mkVarOcc "plugin") pluginTyConName
loadFrontendPlugin :: HscEnv -> ModuleName -> IO FrontendPlugin
loadFrontendPlugin = loadPlugin' (mkVarOcc "frontendPlugin") frontendPluginTyConName
loadPlugin' :: OccName -> Name -> HscEnv -> ModuleName -> IO a
loadPlugin' occ_name plugin_name hsc_env mod_name
= do { let plugin_rdr_name = mkRdrQual mod_name occ_name
dflags = hsc_dflags hsc_env
; mb_name <- lookupRdrNameInModuleForPlugins hsc_env mod_name
plugin_rdr_name
; case mb_name of {
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ text "The module", ppr mod_name
, text "did not export the plugin name"
, ppr plugin_rdr_name ]) ;
Just name ->
do { plugin_tycon <- forceLoadTyCon hsc_env plugin_name
; mb_plugin <- getValueSafely hsc_env name (mkTyConTy plugin_tycon)
; case mb_plugin of
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ text "The value", ppr name
, text "did not have the type"
, ppr pluginTyConName, text "as required"])
Just plugin -> return plugin } } }
-- | Force the interfaces for the given modules to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadModuleInterfaces :: HscEnv -> SDoc -> [Module] -> IO ()
forceLoadModuleInterfaces hsc_env doc modules
= (initTcInteractive hsc_env $
initIfaceTcRn $
mapM_ (loadPluginInterface doc) modules)
>> return ()
-- | Force the interface for the module containing the name to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadNameModuleInterface :: HscEnv -> SDoc -> Name -> IO ()
forceLoadNameModuleInterface hsc_env reason name = do
let name_modules = mapMaybe nameModule_maybe [name]
forceLoadModuleInterfaces hsc_env reason name_modules
-- | Load the 'TyCon' associated with the given name, come hell or high water. Fails if:
--
-- * The interface could not be loaded
-- * The name is not that of a 'TyCon'
-- * The name did not exist in the loaded module
forceLoadTyCon :: HscEnv -> Name -> IO TyCon
forceLoadTyCon hsc_env con_name = do
forceLoadNameModuleInterface hsc_env (text "contains a name used in an invocation of loadTyConTy") con_name
mb_con_thing <- lookupTypeHscEnv hsc_env con_name
case mb_con_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError con_name
Just (ATyCon tycon) -> return tycon
Just con_thing -> throwCmdLineErrorS dflags $ wrongTyThingError con_name con_thing
where dflags = hsc_dflags hsc_env
-- | Loads the value corresponding to a 'Name' if that value has the given 'Type'. This only provides limited safety
-- in that it is up to the user to ensure that that type corresponds to the type you try to use the return value at!
--
-- If the value found was not of the correct type, returns @Nothing@. Any other condition results in an exception:
--
-- * If we could not load the names module
-- * If the thing being loaded is not a value
-- * If the Name does not exist in the module
-- * If the link failed
getValueSafely :: HscEnv -> Name -> Type -> IO (Maybe a)
getValueSafely hsc_env val_name expected_type = do
mb_hval <- lookupHook getValueSafelyHook getHValueSafely dflags hsc_env val_name expected_type
case mb_hval of
Nothing -> return Nothing
Just hval -> do
value <- lessUnsafeCoerce dflags "getValueSafely" hval
return (Just value)
where
dflags = hsc_dflags hsc_env
getHValueSafely :: HscEnv -> Name -> Type -> IO (Maybe HValue)
getHValueSafely hsc_env val_name expected_type = do
forceLoadNameModuleInterface hsc_env (text "contains a name used in an invocation of getHValueSafely") val_name
-- Now look up the names for the value and type constructor in the type environment
mb_val_thing <- lookupTypeHscEnv hsc_env val_name
case mb_val_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError val_name
Just (AnId id) -> do
-- Check the value type in the interface against the type recovered from the type constructor
-- before finally casting the value to the type we assume corresponds to that constructor
if expected_type `eqType` idType id
then do
-- Link in the module that contains the value, if it has such a module
case nameModule_maybe val_name of
Just mod -> do linkModule hsc_env mod
return ()
Nothing -> return ()
-- Find the value that we just linked in and cast it given that we have proved it's type
hval <- getHValue hsc_env val_name >>= wormhole dflags
return (Just hval)
else return Nothing
Just val_thing -> throwCmdLineErrorS dflags $ wrongTyThingError val_name val_thing
where dflags = hsc_dflags hsc_env
-- | Coerce a value as usual, but:
--
-- 1) Evaluate it immediately to get a segfault early if the coercion was wrong
--
-- 2) Wrap it in some debug messages at verbosity 3 or higher so we can see what happened
-- if it /does/ segfault
lessUnsafeCoerce :: DynFlags -> String -> a -> IO b
lessUnsafeCoerce dflags context what = do
debugTraceMsg dflags 3 $ (text "Coercing a value in") <+> (text context) <>
(text "...")
output <- evaluate (unsafeCoerce# what)
debugTraceMsg dflags 3 (text "Successfully evaluated coercion")
return output
-- | Finds the 'Name' corresponding to the given 'RdrName' in the
-- context of the 'ModuleName'. Returns @Nothing@ if no such 'Name'
-- could be found. Any other condition results in an exception:
--
-- * If the module could not be found
-- * If we could not determine the imports of the module
--
-- Can only be used for looking up names while loading plugins (and is
-- *not* suitable for use within plugins). The interface file is
-- loaded very partially: just enough that it can be used, without its
-- rules and instances affecting (and being linked from!) the module
-- being compiled. This was introduced by 57d6798.
lookupRdrNameInModuleForPlugins :: HscEnv -> ModuleName -> RdrName -> IO (Maybe Name)
lookupRdrNameInModuleForPlugins hsc_env mod_name rdr_name = do
-- First find the package the module resides in by searching exposed packages and home modules
found_module <- findPluginModule hsc_env mod_name
case found_module of
Found _ mod -> do
-- Find the exports of the module
(_, mb_iface) <- initTcInteractive hsc_env $
initIfaceTcRn $
loadPluginInterface doc mod
case mb_iface of
Just iface -> do
-- Try and find the required name in the exports
let decl_spec = ImpDeclSpec { is_mod = mod_name, is_as = mod_name
, is_qual = False, is_dloc = noSrcSpan }
imp_spec = ImpSpec decl_spec ImpAll
env = mkGlobalRdrEnv (gresFromAvails (Just imp_spec) (mi_exports iface))
case lookupGRE_RdrName rdr_name env of
[gre] -> return (Just (gre_name gre))
[] -> return Nothing
_ -> panic "lookupRdrNameInModule"
Nothing -> throwCmdLineErrorS dflags $ hsep [text "Could not determine the exports of the module", ppr mod_name]
err -> throwCmdLineErrorS dflags $ cannotFindModule dflags mod_name err
where
dflags = hsc_dflags hsc_env
doc = text "contains a name used in an invocation of lookupRdrNameInModule"
wrongTyThingError :: Name -> TyThing -> SDoc
wrongTyThingError name got_thing = hsep [text "The name", ppr name, ptext (sLit "is not that of a value but rather a"), pprTyThingCategory got_thing]
missingTyThingError :: Name -> SDoc
missingTyThingError name = hsep [text "The name", ppr name, ptext (sLit "is not in the type environment: are you sure it exists?")]
throwCmdLineErrorS :: DynFlags -> SDoc -> IO a
throwCmdLineErrorS dflags = throwCmdLineError . showSDoc dflags
throwCmdLineError :: String -> IO a
throwCmdLineError = throwGhcExceptionIO . CmdLineError
#else
pluginError :: [ModuleName] -> a
pluginError modnames = throwGhcException (CmdLineError msg)
where
msg = "not built for interactive use - can't load plugins ("
-- module names are not z-encoded
++ intercalate ", " (map moduleNameString modnames)
++ ")"
#endif
|
vTurbine/ghc
|
compiler/main/DynamicLoading.hs
|
Haskell
|
bsd-3-clause
| 11,451
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>JSON View</title>
<maps>
<homeID>jsonview</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/jsonview/src/main/javahelp/help_sr_CS/helpset_sr_CS.hs
|
Haskell
|
apache-2.0
| 959
|
-- | Put common type definitions here to break recursive module dependencies.
module RegAlloc.Linear.Base (
BlockAssignment,
Loc(..),
regsOfLoc,
-- for stats
SpillReason(..),
RegAllocStats(..),
-- the allocator monad
RA_State(..),
)
where
import GhcPrelude
import RegAlloc.Linear.StackMap
import RegAlloc.Liveness
import Reg
import DynFlags
import Outputable
import Unique
import UniqFM
import UniqSupply
-- | Used to store the register assignment on entry to a basic block.
-- We use this to handle join points, where multiple branch instructions
-- target a particular label. We have to insert fixup code to make
-- the register assignments from the different sources match up.
--
type BlockAssignment freeRegs
= BlockMap (freeRegs, RegMap Loc)
-- | Where a vreg is currently stored
-- A temporary can be marked as living in both a register and memory
-- (InBoth), for example if it was recently loaded from a spill location.
-- This makes it cheap to spill (no save instruction required), but we
-- have to be careful to turn this into InReg if the value in the
-- register is changed.
-- This is also useful when a temporary is about to be clobbered. We
-- save it in a spill location, but mark it as InBoth because the current
-- instruction might still want to read it.
--
data Loc
-- | vreg is in a register
= InReg !RealReg
-- | vreg is held in a stack slot
| InMem {-# UNPACK #-} !StackSlot
-- | vreg is held in both a register and a stack slot
| InBoth !RealReg
{-# UNPACK #-} !StackSlot
deriving (Eq, Show, Ord)
instance Outputable Loc where
ppr l = text (show l)
-- | Get the reg numbers stored in this Loc.
regsOfLoc :: Loc -> [RealReg]
regsOfLoc (InReg r) = [r]
regsOfLoc (InBoth r _) = [r]
regsOfLoc (InMem _) = []
-- | Reasons why instructions might be inserted by the spiller.
-- Used when generating stats for -ddrop-asm-stats.
--
data SpillReason
-- | vreg was spilled to a slot so we could use its
-- current hreg for another vreg
= SpillAlloc !Unique
-- | vreg was moved because its hreg was clobbered
| SpillClobber !Unique
-- | vreg was loaded from a spill slot
| SpillLoad !Unique
-- | reg-reg move inserted during join to targets
| SpillJoinRR !Unique
-- | reg-mem move inserted during join to targets
| SpillJoinRM !Unique
-- | Used to carry interesting stats out of the register allocator.
data RegAllocStats
= RegAllocStats
{ ra_spillInstrs :: UniqFM [Int] }
-- | The register allocator state
data RA_State freeRegs
= RA_State
{
-- | the current mapping from basic blocks to
-- the register assignments at the beginning of that block.
ra_blockassig :: BlockAssignment freeRegs
-- | free machine registers
, ra_freeregs :: !freeRegs
-- | assignment of temps to locations
, ra_assig :: RegMap Loc
-- | current stack delta
, ra_delta :: Int
-- | free stack slots for spilling
, ra_stack :: StackMap
-- | unique supply for generating names for join point fixup blocks.
, ra_us :: UniqSupply
-- | Record why things were spilled, for -ddrop-asm-stats.
-- Just keep a list here instead of a map of regs -> reasons.
-- We don't want to slow down the allocator if we're not going to emit the stats.
, ra_spills :: [SpillReason]
, ra_DynFlags :: DynFlags }
|
ezyang/ghc
|
compiler/nativeGen/RegAlloc/Linear/Base.hs
|
Haskell
|
bsd-3-clause
| 3,779
|
module ShouldFail where
-- strictness annotations on the argument to a newtype constructor
-- are not allowed.
newtype N a = T ![a]
|
urbanslug/ghc
|
testsuite/tests/parser/should_fail/readFail008.hs
|
Haskell
|
bsd-3-clause
| 133
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Network.IPFS.Types (
IPFS (..),
Endpoint (..),
Multihash (..),
Key,
Data,
Template (..),
FileHash (..),
Link (..),
Node (..),
Object (..),
ID (..),
parseMultihash
) where
import Control.Monad (ap, liftM)
import Control.Monad.Fix (MonadFix, mfix)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.Aeson (FromJSON (..), (.:), (.:?))
import qualified Data.Aeson as JSON
import Data.ByteString.Lazy (ByteString)
import Data.ByteString.Lazy.UTF8 (fromString, toString)
import qualified Data.Multihash.Base as MB
import Data.Text (unpack)
import GHC.Generics (Generic)
import qualified Network.HTTP.Conduit as HTTP
import Network.Multiaddr (Multiaddr)
import Text.ParserCombinators.ReadP (ReadP, readP_to_S, munch1)
-- | An 'Endpoint' is an IPFS node that will execute an API request
data Endpoint = Endpoint HTTP.Manager String
newtype IPFS a = IPFS { unIPFS :: ReaderT Endpoint IO a }
instance Monad IPFS where
return = IPFS . return
m >>= f = IPFS (unIPFS m >>= unIPFS . f)
instance MonadFix IPFS where
mfix f = IPFS (mfix (unIPFS . f))
instance MonadIO IPFS where
liftIO = IPFS . liftIO
instance Functor IPFS where
fmap = liftM
instance Applicative IPFS where
pure = return
(<*>) = ap
newtype Multihash = Multihash { multihash :: ByteString } deriving (Generic, Eq)
instance Show Multihash where
show = toString . MB.encode MB.Base58 . multihash
instance Read Multihash where
readsPrec _ = readP_to_S parseMultihash
parseMultihash :: ReadP Multihash
parseMultihash = do
base58 <- munch1 (`elem` ("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" :: String)) -- base58 bitcoin alphabet
either fail (return . Multihash) . MB.decode MB.Base58 . fromString $ base58
instance FromJSON Multihash where
parseJSON (JSON.String s) = return $ read . unpack $ s
parseJSON _ = fail "Expected a Multihash String"
type Key = ByteString
type Data = ByteString
data Template = Unixfs | None deriving (Show, Eq)
data FileHash = FileHash {
fileName :: FilePath,
fileHash :: Multihash
} deriving (Generic, Show, Eq)
instance FromJSON FileHash where
parseJSON (JSON.Object o) = FileHash
<$> o .: "Name"
<*> o .: "Hash"
parseJSON _ = fail "Expected a FileHash"
data Link = Link {
hash :: Maybe Multihash,
name :: Maybe FilePath,
size :: Maybe Int
} deriving (Generic, Eq, Show)
instance FromJSON Link where
parseJSON (JSON.Object o) = Link
<$> o .:? "Hash"
<*> o .:? "Name"
<*> o .:? "Size"
parseJSON _ = fail "Expected a Link"
data Node = Node {
links :: [Link],
payload :: Maybe Data
} deriving (Generic, Show, Eq)
instance FromJSON Node where
parseJSON (JSON.Object o) = Node
<$> o .: "Links"
<*> o .:? "Data"
parseJSON _ = fail "Expected a Node"
data Object = Object {
objectHash :: Multihash,
objectPayload :: Data,
objectLinks :: [(String, Object)]
} deriving (Generic, Show, Eq)
data ID = ID {
idHash :: Multihash,
publicKey :: Key,
addresses :: [Multiaddr], -- TODO replace with multiaddresses ?
agentVersion :: String,
protocolVersion :: String
} deriving (Generic, Show, Eq)
instance FromJSON ID where
parseJSON (JSON.Object o) = ID
<$> o .: "ID"
<*> o .: "PublicKey"
<*> (map read <$> o .: "Addresses")
<*> o .: "AgentVersion"
<*> o .: "ProtocolVersion"
parseJSON _ = fail "Expected an ID"
instance FromJSON ByteString where
parseJSON = (fromString <$>) . parseJSON
|
basile-henry/hs-ipfs-api
|
src/Network/IPFS/Types.hs
|
Haskell
|
mit
| 4,247
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module IHaskell.Display.Widgets.Output
( -- * The Output Widget
OutputWidget
-- * Constructor
, mkOutput
-- * Using the output widget
, appendStdout
, appendStderr
, appendDisplay
, clearOutput
, clearOutput_
, replaceOutput
) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Data.Aeson
import Data.IORef (newIORef)
import Data.Text
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Types (StreamType(..))
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
import IHaskell.Display.Widgets.Common
import IHaskell.Display.Widgets.Layout.LayoutWidget
-- | An 'OutputWidget' represents a Output widget from IPython.html.widgets.
type OutputWidget = IPythonWidget 'OutputType
-- | Create a new output widget
mkOutput :: IO OutputWidget
mkOutput = do
-- Default properties, with a random uuid
wid <- U.random
layout <- mkLayout
let domAttrs = defaultDOMWidget "OutputView" "OutputModel" layout
outAttrs = (ViewModule =:! "@jupyter-widgets/output")
:& (ModelModule =:! "@jupyter-widgets/output")
:& (ViewModuleVersion =:! "1.0.0")
:& (ModelModuleVersion =:! "1.0.0")
:& (MsgID =:: "")
:& (Outputs =:: [])
:& RNil
widgetState = WidgetState $ domAttrs <+> outAttrs
stateIO <- newIORef widgetState
let widget = IPythonWidget wid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen widget $ toJSON widgetState
-- Return the image widget
return widget
-- | Appends the Text to the given Stream
appendStd :: StreamType -> OutputWidget -> Text -> IO ()
appendStd n out t = do
getField out Outputs >>= setField out Outputs . updateOutputs
where updateOutputs :: [OutputMsg] -> [OutputMsg]
updateOutputs = (++[OutputStream n t])
-- | Appends text to the stdout of an output widget
appendStdout :: OutputWidget -> Text -> IO ()
appendStdout = appendStd Stdout
-- | Appends text to the stderr of an output widget
appendStderr :: OutputWidget -> Text -> IO ()
appendStderr = appendStd Stderr
-- | Clears the output widget
clearOutput' :: OutputWidget -> IO ()
clearOutput' w = do
_ <- setField w Outputs []
_ <- setField w MsgID ""
return ()
-- | Appends anything displayable to an output widget
appendDisplay :: IHaskellDisplay a => OutputWidget -> a -> IO ()
appendDisplay o d = do
outputs <- getField o Outputs
disp <- display d
_ <- setField o Outputs $ outputs ++ [OutputData disp]
return ()
-- | Clear the output widget immediately
clearOutput :: OutputWidget -> IO ()
clearOutput widget = widgetClearOutput False >> clearOutput' widget
-- | Clear the output widget on next append
clearOutput_ :: OutputWidget -> IO ()
clearOutput_ widget = widgetClearOutput True >> clearOutput' widget
-- | Replace the currently displayed output for output widget
replaceOutput :: IHaskellDisplay a => OutputWidget -> a -> IO ()
replaceOutput widget d = do
disp <- display d
setField widget Outputs [OutputData disp]
instance IHaskellWidget OutputWidget where
getCommUUID = uuid
|
gibiansky/IHaskell
|
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Output.hs
|
Haskell
|
mit
| 3,538
|
{-# LANGUAGE ScopedTypeVariables #-}
module CNFList (tests) where
import BooleanLaws
import SimplifyLaws
import BooleanModelLaws
import FreeBoolean
import Test.Tasty
import Test.QuickCheck
import Control.Applicative
import Data.Function (on)
import Data.Algebra.Boolean.Negable (Negable, Neg(..))
import Data.Algebra.Boolean.CNF.List
instance Arbitrary a => Arbitrary (Neg a) where
arbitrary = oneof [ Pos <$> arbitrary, Pos <$> arbitrary ]
instance (Negable a, Arbitrary a) => Arbitrary (CNF a) where
arbitrary = fromFreeBoolean <$> arbitrary
tests :: TestTree
tests = testGroup "CNF list implementation"
[ monotoneLaws eq
, simplifyLaws (undefined :: CNF (Either Bool Bool))
, booleanModelLaws (undefined :: CNF (Either Bool Bool))
]
eq :: CNF (Neg Int) -> CNF (Neg Int) -> Bool
eq = (==) `on` toBool
|
phadej/boolean-normal-forms
|
tests/CNFList.hs
|
Haskell
|
mit
| 824
|
maybe Tips for using this guide
CodeWorld editor -> can link back to CodeWorld editor
program = drawingOf(wheel)
wheel = text("I Love Pandas!")
wheel = solidRectangle(8,4)
wheel = circle(5) --5 is radius
--multiple
program = drawingOf(design)
design = solidRectangle(4, 0.4)
& solidCircle(1.2)
& circle(2)
-- renders the last one on top?
-- make sure shapes don't overlap. I figured out my first object
-- was smaller than the second so my first wa completely covered
-- by second
-- gallery of things people have made with CodeWorld?
-- the tree was super cool!
-- overlap
program = drawingOf(overlap)
overlap = colored(square, translucent(blue))
& colored(disk, translucent(green))
square = solidRectangle(5,5)
disk = solidCircle(3)
-- translated
program = drawingOf(forest)
forest = translated(tree, -5, 5)
& translated(tree, 0, 0)
& translated(tree, 5, -5)
tree = colored(leaves, green) & colored(trunk, brown)
leaves = sector(0, 180, 4)
trunk = solidRectangle(1, 4)
-- rotation
rotated(square, 45)
-- scale
-- scale (var to pass through, x, y)
-- circle(4) is an expression
-- colored(text("Help"), red) is also an expression
-- rectangle(1, 4) & circle(2) is an expression
-- leaves & trunk is an expression
-- rectangle is a function. It needs a width and a height and makes a picture
-- light is a function. It needs a color, and makes another color
-- that's the same name, but lighter
-- drawingOf is a function. It needs a picture, and makes a program to
-- draw that picture
-- scaled is a function. It needs a picture and two scaling factors
-- and makes a modified picture
-- name it all in one
program = drawingOf(diamond)
diamond = rotated(rectangle(2, 2),45)
-- or
program = drawingOf(rotated(rectangle(2, 2), 45))
-- functions
-- light is a function that needs a color and makes another Color
-- it has the type Color -> Color
-- circle is a function that needs a number (the radius) and makes
-- a picture. It has the type Number -> Picture
-- Rectangle is a function that needs two numbers, and makes a
-- Picture. It has the type (Number, Number) -> Picture
-- translated is a function that needs a picture and two numbers
-- (x and y distance) and makes a new picture. It has the type
-- (Picture, Number, Number) -> Picture
|
kammitama5/kammitama5.github.io
|
images/play_img/notes.hs
|
Haskell
|
mit
| 2,382
|
-- What's up next?
-- http://www.codewars.com/kata/542ebbdb494db239f8000046
module LazyNext where
next :: Eq a => a -> [a] -> Maybe a
next _ [] = Nothing
next item [x] = Nothing
next item (x:xs) = if x == item then Just(head xs) else next item xs
|
gafiatulin/codewars
|
src/8 kyu/LazyNext.hs
|
Haskell
|
mit
| 249
|
module SuperUserSpark.Constants where
import Import
keywordCard :: String
keywordCard = "card"
keywordSpark :: String
keywordSpark = "spark"
keywordFile :: String
keywordFile = "file"
keywordInto :: String
keywordInto = "into"
keywordOutof :: String
keywordOutof = "outof"
keywordKindOverride :: String
keywordKindOverride = "kind"
keywordLink :: String
keywordLink = "link"
keywordCopy :: String
keywordCopy = "copy"
keywordAlternatives :: String
keywordAlternatives = "alternatives"
linkKindSymbol :: String
linkKindSymbol = "l->"
copyKindSymbol :: String
copyKindSymbol = "c->"
unspecifiedKindSymbol :: String
unspecifiedKindSymbol = "->"
bracesChars :: [Char]
bracesChars = ['{', '}']
linespaceChars :: [Char]
linespaceChars = [' ', '\t']
endOfLineChars :: [Char]
endOfLineChars = ['\n', '\r']
whitespaceChars :: [Char]
whitespaceChars = linespaceChars ++ endOfLineChars
lineDelimiter :: String
lineDelimiter = ";"
branchDelimiter :: String
branchDelimiter = ":"
quotesChar :: Char
quotesChar = '"'
lineCommentStr :: String
lineCommentStr = "#"
blockCommentStrs :: (String, String)
blockCommentStrs = ("[[", "]]")
sparkExtension :: String
sparkExtension = "sus"
|
NorfairKing/super-user-spark
|
src/SuperUserSpark/Constants.hs
|
Haskell
|
mit
| 1,189
|
{-# htermination enumFromThenTo :: () -> () -> () -> [()] #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_enumFromThenTo_2.hs
|
Haskell
|
mit
| 62
|
module Control.Monad.Classes.Except where
import qualified Control.Monad.Trans.Except as Exc
import qualified Control.Monad.Trans.Maybe as Mb
import qualified Control.Exception as E
import Control.Monad
import Control.Monad.Trans.Class
import GHC.Prim (Proxy#, proxy#)
import Control.Monad.Classes.Core
import Control.Monad.Classes.Effects
import Data.Peano (Peano (..))
type instance CanDo IO (EffExcept e) = True
type instance CanDo (Exc.ExceptT e m) eff = ExceptCanDo e eff
type instance CanDo (Mb.MaybeT m) eff = ExceptCanDo () eff
type family ExceptCanDo e eff where
ExceptCanDo e (EffExcept e) = True
ExceptCanDo e eff = False
class Monad m => MonadExceptN (n :: Peano) e m where
throwN :: Proxy# n -> (e -> m a)
instance Monad m => MonadExceptN Zero e (Exc.ExceptT e m) where
throwN _ = Exc.throwE
instance E.Exception e => MonadExceptN Zero e IO where
throwN _ = E.throwIO
instance Monad m => MonadExceptN Zero () (Mb.MaybeT m) where
throwN _ _ = mzero
instance (MonadTrans t, Monad (t m), MonadExceptN n e m, Monad m)
=> MonadExceptN (Succ n) e (t m)
where
throwN _ = lift . throwN (proxy# :: Proxy# n)
-- | The @'MonadExcept' e m@ constraint asserts that @m@ is a monad stack
-- that supports throwing exceptions of type @e@
type MonadExcept e m = MonadExceptN (Find (EffExcept e) m) e m
-- | Throw an exception
throw :: forall a e m . MonadExcept e m => e -> m a
throw = throwN (proxy# :: Proxy# (Find (EffExcept e) m))
runExcept :: Exc.ExceptT e m a -> m (Either e a)
runExcept = Exc.runExceptT
runMaybe :: Mb.MaybeT m a -> m (Maybe a)
runMaybe = Mb.runMaybeT
|
strake/monad-classes.hs
|
Control/Monad/Classes/Except.hs
|
Haskell
|
mit
| 1,605
|
-- | A utility module containing data definitions common to the rest of the
-- code. This file is forbidden from having dependencies on any other part
-- of the codebase, except for other libraries (such as
-- Graphics.Rendering.OpenGL.Monad and its cousins).
--
-- In all cases, the dependency graph should be acyclic.
module Util.Defs
where
import Prelewd
-- | The (x, y) coordinates of a point on the screen, measured from the bottom,
-- left corner.
type Coord = (Int, Int)
-- | If you were to draw an axis-aligned bounding box around an object,
-- Dimensions would represent the (x, y) lengths of the sides.
type Dimensions = (Int, Int)
|
bfops/Chess
|
src/Util/Defs.hs
|
Haskell
|
mit
| 661
|
-- Lists
alphabet = ['a'..'z']
-- This breaks out to the full alphabet
-- lists are lazy by default, so this pulls only the first 20
first20Mults x = take 20 [0,x..]
-- cycle creates a cycle between elements, generating a repeating sequence
-- repeat just repeats the same value over and over
first20MultsComp x = firstN x 20
firstN x n = [i * x | i <- [1..n]]
cartesianProduct x y = [(a,b)|a <- [1..x], b <- [1..y]]
-- zip only creates pairs as long as there is a matching index in both lists
zipped = zip [1..10] ['a'..'i']
rightTriangleWithPerim n = [(a,b,c) | c <- [1..n],
b <- [1..c],
a <- [1..b],
-- such that
(a^2)+(b^2) == (c^2),
a < b && b < c,
a + b + c == n]
|
ChrisCoffey/haskell_sandbox
|
c_1/first_func.hs
|
Haskell
|
mit
| 746
|
module Mck.AST where
newtype VarId = VarId String
newtype TypeId = TypeId String
newtype Agent = Agent String
newtype ProtocolId = ProtocolId String
newtype Label = Label String
newtype Constant = Const String
data JointProtocol = JointProtocol Environment [Protocol]
data Environment = Env [TypeDec] [VarDec] [Definition] (Maybe EnvInitCond) [EnvAgentDec] (Maybe TransitionBlock) [EnvFairness] [EnvSpec]
data TypeDec = TypeDec TypeId VarType
data Definition = Def Var Expr
data EnvAgentDec = EnvAgentDec Agent ProtocolId [Var]
data EnvInitCond = InitDec InitFromDec TransitionBlock
data EnvFairness = Fairness BoolTransitionExpr
data InitFromDec = Uniform | AllInit
data TransitionBlock = Block [TransitionStmt]
data TransitionStmt = TBlockStmt TransitionBlock
| TIfClause TransitionClause [TransitionClause] (Maybe TransitionStmt)
| TIfBool BoolTransitionExpr TransitionStmt TransitionStmt
| TSkip
| TAssign VarId Expr
| TRandom [Var] BoolExpr
data TransitionClause = TC BoolTransitionExpr TransitionStmt
data Protocol = Protocol ProtocolId EnvVarParams LocalVars [Definition] ProtocolBlock
data EnvVarParams = EV [VarDec]
data LocalVars = LV [VarDec] (Maybe LocalVarInitCond)
data LocalVarInitCond = LVAllInit | LVExpr Expr | LVInitDec InitFromDec TransitionBlock
data ProtocolBlock = PBlock [LabelledStmt]
data LabelledStmt = LS ProtocolStmt (Maybe Label)
data ProtocolStmt = PBlockStmt ProtocolBlock
| PIfClause Clause [Clause] (Maybe LabelledStmt)
| PDo Clause [Clause] (Maybe LabelledStmt) (Maybe LabelledStmt)
| PIfBool BoolExpr LabelledStmt LabelledStmt
| PWhile BoolExpr LabelledStmt
| PSkip
| PAssign VarId Expr
| PRandom [Var] BoolExpr
| PAction Action [ActionAssignment]
data Clause = Clause BoolExpr LabelledStmt
data Action = AConstant Constant
| AWrite Var Expr
| ARead VarId Var
data ActionAssignment = AAssign Var Expr
data BoolTransitionExpr = BTEVarPrime VarPrime
| BTEConstant Constant
| BTEObject LocalObject
| BTEIn Var VarType
| BTEAEq ArithExpr ArithExpr
| BTEANeq ArithExpr ArithExpr
| BTEEEq EnumExpr EnumExpr
| BTEENeq EnumExpr EnumExpr
| BTEARel RelOp ArithExpr ArithExpr
| BTEERel RelOp EnumExpr EnumExpr
| BTEBOp BoolOp BoolTransitionExpr BoolTransitionExpr
| BTENeg BoolTransitionExpr
| BTEParen BoolTransitionExpr
data Expr = BExpr BoolExpr | AExpr ArithExpr | EExpr EnumExpr
data BoolExpr = BEVarPrime VarPrime
| BEConstant Constant
| BEIn Var VarType
| BEAEq ArithExpr ArithExpr
| BEANeq ArithExpr ArithExpr
| BEEEq EnumExpr EnumExpr
| BEENeq EnumExpr EnumExpr
| BEARel RelOp ArithExpr ArithExpr
| BEERel RelOp EnumExpr EnumExpr
| BEBOp BoolOp BoolTransitionExpr BoolTransitionExpr
| BENeg BoolTransitionExpr
| BEParen BoolTransitionExpr
data ArithExpr = AEVarPrime VarPrime
| AEInt Integer
| AEOp ArithOp ArithExpr ArithExpr
| AEParen ArithExpr
data EnumExpr = EEVarPrime VarPrime
| EEConstant Constant
| EEPrev EnumExpr
| EENext EnumExpr
| EEParen EnumExpr
data VarType = EnumType [Constant]
| RangeType Integer Integer
data VarDec = VarDec VarId Type
data Type = Observable RawType | Plain RawType
data RawType = RawType TypeId | Array TypeId Integer
data Var = Var VarId | ArraySelf VarId | ArrayAccess VarId Integer
data VarPrime = Primed Var | UnPrimed Var
data LocalObject = LocalObject Constant Constant
data BoolOp = And | Or | Implies | Equiv | Xor
data ArithOp = Plus | Minus
data RelOp = Lt | Lte | Gt | Gte
data EnvSpec = Obs
|
thinkmoore/influence-checker
|
src/Mck/AST.hs
|
Haskell
|
mit
| 4,237
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html
module Stratosphere.ResourceProperties.FSxFileSystemWindowsConfiguration where
import Stratosphere.ResourceImports
-- | Full data type definition for FSxFileSystemWindowsConfiguration. See
-- 'fSxFileSystemWindowsConfiguration' for a more convenient constructor.
data FSxFileSystemWindowsConfiguration =
FSxFileSystemWindowsConfiguration
{ _fSxFileSystemWindowsConfigurationActiveDirectoryId :: Maybe (Val Text)
, _fSxFileSystemWindowsConfigurationAutomaticBackupRetentionDays :: Maybe (Val Integer)
, _fSxFileSystemWindowsConfigurationCopyTagsToBackups :: Maybe (Val Bool)
, _fSxFileSystemWindowsConfigurationDailyAutomaticBackupStartTime :: Maybe (Val Text)
, _fSxFileSystemWindowsConfigurationThroughputCapacity :: Maybe (Val Integer)
, _fSxFileSystemWindowsConfigurationWeeklyMaintenanceStartTime :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON FSxFileSystemWindowsConfiguration where
toJSON FSxFileSystemWindowsConfiguration{..} =
object $
catMaybes
[ fmap (("ActiveDirectoryId",) . toJSON) _fSxFileSystemWindowsConfigurationActiveDirectoryId
, fmap (("AutomaticBackupRetentionDays",) . toJSON) _fSxFileSystemWindowsConfigurationAutomaticBackupRetentionDays
, fmap (("CopyTagsToBackups",) . toJSON) _fSxFileSystemWindowsConfigurationCopyTagsToBackups
, fmap (("DailyAutomaticBackupStartTime",) . toJSON) _fSxFileSystemWindowsConfigurationDailyAutomaticBackupStartTime
, fmap (("ThroughputCapacity",) . toJSON) _fSxFileSystemWindowsConfigurationThroughputCapacity
, fmap (("WeeklyMaintenanceStartTime",) . toJSON) _fSxFileSystemWindowsConfigurationWeeklyMaintenanceStartTime
]
-- | Constructor for 'FSxFileSystemWindowsConfiguration' containing required
-- fields as arguments.
fSxFileSystemWindowsConfiguration
:: FSxFileSystemWindowsConfiguration
fSxFileSystemWindowsConfiguration =
FSxFileSystemWindowsConfiguration
{ _fSxFileSystemWindowsConfigurationActiveDirectoryId = Nothing
, _fSxFileSystemWindowsConfigurationAutomaticBackupRetentionDays = Nothing
, _fSxFileSystemWindowsConfigurationCopyTagsToBackups = Nothing
, _fSxFileSystemWindowsConfigurationDailyAutomaticBackupStartTime = Nothing
, _fSxFileSystemWindowsConfigurationThroughputCapacity = Nothing
, _fSxFileSystemWindowsConfigurationWeeklyMaintenanceStartTime = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-activedirectoryid
fsfswcActiveDirectoryId :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Text))
fsfswcActiveDirectoryId = lens _fSxFileSystemWindowsConfigurationActiveDirectoryId (\s a -> s { _fSxFileSystemWindowsConfigurationActiveDirectoryId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-automaticbackupretentiondays
fsfswcAutomaticBackupRetentionDays :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Integer))
fsfswcAutomaticBackupRetentionDays = lens _fSxFileSystemWindowsConfigurationAutomaticBackupRetentionDays (\s a -> s { _fSxFileSystemWindowsConfigurationAutomaticBackupRetentionDays = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-copytagstobackups
fsfswcCopyTagsToBackups :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Bool))
fsfswcCopyTagsToBackups = lens _fSxFileSystemWindowsConfigurationCopyTagsToBackups (\s a -> s { _fSxFileSystemWindowsConfigurationCopyTagsToBackups = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-dailyautomaticbackupstarttime
fsfswcDailyAutomaticBackupStartTime :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Text))
fsfswcDailyAutomaticBackupStartTime = lens _fSxFileSystemWindowsConfigurationDailyAutomaticBackupStartTime (\s a -> s { _fSxFileSystemWindowsConfigurationDailyAutomaticBackupStartTime = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-throughputcapacity
fsfswcThroughputCapacity :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Integer))
fsfswcThroughputCapacity = lens _fSxFileSystemWindowsConfigurationThroughputCapacity (\s a -> s { _fSxFileSystemWindowsConfigurationThroughputCapacity = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-fsx-filesystem-windowsconfiguration.html#cfn-fsx-filesystem-windowsconfiguration-weeklymaintenancestarttime
fsfswcWeeklyMaintenanceStartTime :: Lens' FSxFileSystemWindowsConfiguration (Maybe (Val Text))
fsfswcWeeklyMaintenanceStartTime = lens _fSxFileSystemWindowsConfigurationWeeklyMaintenanceStartTime (\s a -> s { _fSxFileSystemWindowsConfigurationWeeklyMaintenanceStartTime = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/FSxFileSystemWindowsConfiguration.hs
|
Haskell
|
mit
| 5,291
|
module Graphics.Render.Light(
module L
, enlightNormal
, enlightNormal'
, enlightSimple
) where
import Prelude as P hiding ((<*))
import Graphics.Light as L
import Graphics.GPipe
import Data.Vec as Vec
import Math.Vector
-- | Transforms color with alpha-as-intensity to GPU color
liftVec4Color :: Vec4 Float -> Vec3 (Fragment Float)
liftVec4Color vcpu = Vec.take n3 vRGBA `smult` Vec.get n3 vRGBA
where
vRGBA :: Vec4 (Fragment Float)
vRGBA = toGPU vcpu
v `smult` s = Vec.map (*s) v
enlightSimple ::
-- | Diffuse texture
Texture2D RGBAFormat
-- | Ambient color, alpha is entensity
-> Vec4 Float
-- | Additional color modifier, the fourth component is intensity
-> Vec4 Float
-- | Fragment uv pos
-> Vec2 (Fragment Float)
-- | Resulting color
-> Color RGBAFormat (Fragment Float)
enlightSimple tex ambientColor colorMod uv = RGBA fragColor fragColorA
where
(RGBA diffuseColor diffuseColorA) = sample (Sampler Point Wrap) tex uv
ambient = liftVec4Color ambientColor
modifier = liftVec4Color colorMod
itensity = ambient + 1
fragColor :: Vec3 (Fragment Float)
fragColor = (diffuseColor + modifier) * itensity
fragColorA = diffuseColorA
-- | Performs dynamic lighting of fragments with given set of lights
enlightNormal :: Vec2 Int -- ^ Resolution of screen
-- | Diffuse texture
-> Texture2D RGBAFormat
-- | Normal texture
-> Texture2D RGBAFormat
-- | Ambient color, alpha is entensity
-> Vec4 Float
-- | Additional color modifier, the fourth component is intensity
-> Vec4 Float
-- | Lights that are used to enlight
-> [Light Float]
-- | Inverse of VP matrix
-> Mat44 Float
-- | Fragment uv pos
-> Vec2 (Fragment Float)
-- | Resulting color
-> Color RGBAFormat (Fragment Float)
enlightNormal size tex ntex ambientColor colorMod lightsCPU vpInverse uv =
enlightNormal' size tex ntex ambientColor colorMod lightsCPU vpInverse uv uv
-- | More general function than enlightNormal, accepts different uvs for diffuse and normal textures
enlightNormal' :: Vec2 Int -- ^ Resolution of screen
-- | Diffuse texture
-> Texture2D RGBAFormat
-- | Normal texture
-> Texture2D RGBAFormat
-- | Ambient color, alpha is entensity
-> Vec4 Float
-- | Additional color modifier, the fourth component is intensity
-> Vec4 Float
-- | Lights that are used to enlight
-> [Light Float]
-- | Inverse of VP matrix
-> Mat44 Float
-- | Fragment uv pos for diffuse texture
-> Vec2 (Fragment Float)
-- | Fragment uv pos for normal texture
-> Vec2 (Fragment Float)
-- | Resulting color
-> Color RGBAFormat (Fragment Float)
enlightNormal' size tex ntex ambientColor colorMod lightsCPU vpInverse uvDiff uvNorm =
P.foldl combineLights (RGBA 0 0) $ enlightLight <$> lightsCPU
where
v `smult` s = Vec.map (*s) v
combineLights (RGBA accRGB accAlpha) (RGBA lightRGB lightAlpha) =
RGBA (accRGB + lightRGB)--(accRGB `smult` (1 - accAlpha) + lightRGB `smult` lightAlpha)
(accAlpha + (1 - accAlpha)*lightAlpha)
enlightLight lightCPU = RGBA fragColor fragColorA
where
(RGBA diffuseColor diffuseColorA) = sample (Sampler Point Wrap) tex uvDiff
(RGBA normalMap _) = sample (Sampler Point Wrap) ntex uvNorm
(xSize :. ySize :. ()) = toGPU $ Vec.map fromIntegral size
light :: Light (Fragment Float)
light = toGPU lightCPU
lightPos = lightPosition light
lColorRGB = lightColor light
power = lightPower light
powerLoss = lightPowerLoss light
(fx:.fy:.fz:.fw:.()) = toGPU vpInverse `multmv` (
(fragX/xSize * 2 - 1):.
(fragY/ySize * 2 - 1):.
(fragDepth * 2 - 1) :.
1:.())
fragmentPos :: Vec3 (Fragment Float)
fragmentPos = ((fx/fw) :. (fy/fw) :. (fz/fw) :. ())
lightDir = case light of
(DirectionalLight d _ _) -> d
_ -> lightPos - fragmentPos
dist = Vec.norm lightDir
n = Vec.normalize $ normalMap `smult` 2.0 - 1
l = Vec.normalize lightDir
diffuse :: Vec3 (Fragment Float)
diffuse = (lColorRGB `smult` power) `smult` maxB (Vec.dot n l) 0.0
ambient = liftVec4Color ambientColor
modifier = liftVec4Color colorMod
attenuation = case light of
(ConeLight _ coneDir conAngle _ _ _) ->
let da = (negate l) `angleBetweenVecs` coneDir
distAtten = 1.0 / (attentation powerLoss dist * (conAngle/2*pi))
in ifB (da <* conAngle) distAtten
-- Soft border, the formula is based on sigmoid function
(let x = da - conAngle
p = 0.99
k = 100
in distAtten / (1 + exp (k*x + log ( (1-p)/p ))) )
_ -> 1.0 / (attentation powerLoss dist)
itensity = ambient + diffuse `smult` attenuation
finalColor = (diffuseColor + modifier) * itensity
fragColor :: Vec3 (Fragment Float)
fragColor = finalColor
fragColorA = diffuseColorA
|
NCrashed/sinister
|
src/client/Graphics/Render/Light.hs
|
Haskell
|
mit
| 5,009
|
{-# OPTIONS -Wall #-}
import Data.Functor
import qualified Data.List as List
import qualified Data.Maybe as Maybe
import qualified Data.Set as Set
import Data.Text (Text)
import Helpers.Grid (Grid, (!), (//))
import qualified Helpers.Grid as Grid
import Helpers.Parse
import Helpers.Point (Point (..))
import Text.Parsec
data SeaCucumber = FacingEast | FacingSouth
deriving (Eq)
instance Show SeaCucumber where
show FacingEast = "> "
show FacingSouth = "v "
type SeaFloor = Grid (Maybe SeaCucumber)
main :: IO ()
main = do
seaFloor <- Grid.fromList <$> parseLinesIO parser
let steps = iterate step seaFloor
let answer = Maybe.fromJust (List.elemIndex True (zipWith (==) steps (tail steps))) + 1
print answer
step :: SeaFloor -> SeaFloor
step = stepSouth . stepEast
stepEast :: SeaFloor -> SeaFloor
stepEast = stepInDirection nextPoint FacingEast
where
nextPoint (Point _ minX, Point _ maxX) (Point y x) = Point y (inc minX maxX x)
stepSouth :: SeaFloor -> SeaFloor
stepSouth = stepInDirection nextPoint FacingSouth
where
nextPoint (Point minY _, Point maxY _) (Point y x) = Point (inc minY maxY y) x
stepInDirection :: ((Point, Point) -> Point -> Point) -> SeaCucumber -> SeaFloor -> SeaFloor
stepInDirection updatePoint toMove seaFloor =
let bounds = Grid.bounds seaFloor
points = Grid.allPointsList seaFloor
values = zip points $ map (seaFloor !) points
available = Set.fromList $ map fst $ filter (Maybe.isNothing . snd) values
candidates = filter ((== Just toMove) . snd) values
canMove =
filter (\(_, new, _) -> new `Set.member` available) $
map (\(point, seaCucumber) -> (point, updatePoint bounds point, seaCucumber)) candidates
updates = map (\(old, _, _) -> (old, Nothing)) canMove ++ map (\(_, new, seaCucumber) -> (new, seaCucumber)) canMove
in seaFloor // updates
inc :: Int -> Int -> Int -> Int
inc lowest highest value = (value + 1) `mod` (highest - lowest + 1) + lowest
parser :: Parsec Text () [Maybe SeaCucumber]
parser = many1 seaCucumber
where
seaCucumber =
try (char '>' $> Just FacingEast)
<|> try (char 'v' $> Just FacingSouth)
<|> (char '.' $> Nothing)
|
SamirTalwar/advent-of-code
|
2021/AOC_25.hs
|
Haskell
|
mit
| 2,200
|
{-# LANGUAGE ForeignFunctionInterface, EmptyDataDecls, OverloadedStrings, DeriveGeneric, DeriveDataTypeable #-}
{-
JQuery bindings, loosely based on fay-jquery
-}
module JavaScript.JQuery ( JQuery(..)
, Event(..)
, EventType
, Selector
, Method(..)
, AjaxSettings(..)
, AjaxResult(..)
, ajax
, HandlerSettings(..)
, addClass
, animate
, getAttr
, setAttr
, hasClass
, getHtml
, setHtml
, getProp
, setProp
, removeAttr
, removeClass
, removeProp
, getVal
, setVal
, getText
, setText
, holdReady
, selectElement
, selectObject
, select
, selectEmpty
, selectWithContext
, getCss
, setCss
, getHeight
, setHeight
, getWidth
, setWidth
, getInnerHeight
, getOuterHeight
, getInnerWidth
, getOuterWidth
, getScrollLeft
, setScrollLeft
, getScrollTop
, setScrollTop
, click
, dblclick
, focusin
, focusout
, hover
, mousedown
, mouseenter
, mouseleave
, mousemove
, mouseup
, on
, one
, triggerHandler
, delegateTarget
, isDefaultPrevented
, isImmediatePropagationStopped
, isPropagationStopped
, namespace
, pageX
, pageY
, preventDefault
, stopPropagation
, stopImmediatePropagation
, target
, timeStamp
, eventType
, which
, blur
, change
, onFocus
, focus
, onSelect
, keydown
, keyup
, keypress
, after
, afterJQuery
, afterElem
, append
, appendJQuery
, appendElem
, appendTo
, appendToJQuery
, appendToElem
, before
, beforeJQuery
, beforeElem
, CloneType(..)
, clone
, detach
, detachSelector
, empty
, insertAfter
, insertAfterJQuery
, insertAfterElem
, insertBefore
, insertBeforeJQuery
, insertBeforeElem
, prepend
, prependJQuery
, prependElem
, prependTo
, prependToJQuery
, prependToElem
, remove
, removeSelector
, replaceAll
, replaceAllJQuery
, replaceAllElem
, replaceWith
, replaceWithJQuery
, replaceWithElem
, unwrap
, wrap
, wrapJQuery
, wrapElem
, wrapAll
, wrapAllJQuery
, wrapAllElem
, wrapInner
, wrapInnerJQuery
, wrapInnerElem
, addSelector
, addElement
, addHtml
, add
, andSelf
, children
, childrenMatching
, closestSelector
, closest
, closestElement
, contents
, end
, eq
, filter
, filterElement
, filterJQuery
, find
, findJQuery
, findElement
, first
, has
, hasElement
, is
, isJQuery
, isElement
, last
, next
, nextSelector
, nextAll
, nextAllSelector
, nextUntil
, nextUntilElement
, not
, notElement
, notJQuery
, offsetParent
, parent
, parentSelector
, parents
, parentsSelector
, parentsUntil
, parentsUntilElement
, prev
, prevSelector
, prevAll
, prevAllSelector
, prevUntil
, prevUntilElement
, siblings
, siblingsSelector
, slice
, sliceFromTo
, stop
) where
import Prelude hiding (filter, not, empty, last)
import GHCJS.Marshal
import GHCJS.Foreign (toJSBool, jsNull, jsFalse, jsTrue)
import GHCJS.Types
import GHCJS.DOM.Types ( ToJSString(..), FromJSString(..), toJSString, fromJSString
, Element(..), IsElement(..), toElement, unElement)
import qualified GHCJS.Foreign as F
import qualified GHCJS.Foreign.Callback as F
import GHCJS.Nullable
import qualified JavaScript.Object as F
import JavaScript.JQuery.Internal
import Control.Applicative hiding (empty)
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Monad
import Data.Default
import Data.Maybe
import Data.Text (Text)
import Data.Typeable
import Data.Coerce
import System.IO (fixIO)
type EventType = Text
type Selector = Text
data Method = GET | POST | PUT | DELETE deriving (Eq, Ord, Enum, Show)
data AjaxSettings = AjaxSettings { asContentType :: Text
, asCache :: Bool
, asIfModified :: Bool
, asMethod :: Method
} deriving (Ord, Eq, Show, Typeable)
data AjaxResult = AjaxResult { arStatus :: Int
, arData :: Maybe Text
} deriving (Ord, Eq, Show, Typeable)
instance Default AjaxSettings where
def = AjaxSettings "application/x-www-form-urlencoded; charset=UTF-8" True False GET
instance ToJSRef AjaxSettings where
toJSRef (AjaxSettings ct cache ifMod method) = do
o <- F.create
let (.=) :: Text -> JSRef -> IO ()
p .= v = F.setProp p v o
"method" .= toJSString method
"ifModified" .= toJSBool ifMod
"cache" .= toJSBool cache
"contentType" .= toJSString ct
"dataType" .= ("text" :: JSString)
return o
instance ToJSString Method
instance ToJSRef Method where
toJSRef m = (toJSRef :: JSString -> JSRef) $ case m of
GET -> "GET"
POST -> "POST"
PUT -> "PUT"
DELETE -> "DELETE"
ajax :: Text -> [(Text,Text)] -> AjaxSettings -> IO AjaxResult
ajax url d s = do
o <- F.create
forM_ d (\(k,v) -> F.setProp k (toJSString v) o)
os <- toJSRef s
F.setProp ("data"::Text) o os
arr <- jq_ajax (toJSString url) os
dat <- F.getProp ("data"::Text) arr
let d = if isNull dat then Nothing else Just (fromJSString dat)
status <- fromMaybe 0 <$> (fromJSRef =<< F.getProp ("status"::Text) arr)
return (AjaxResult status d)
data HandlerSettings = HandlerSettings { hsPreventDefault :: Bool
, hsStopPropagation :: Bool
, hsStopImmediatePropagation :: Bool
, hsSynchronous :: Bool
, hsDescendantFilter :: Maybe Selector
, hsHandlerData :: Maybe JSRef
}
convertHandlerSettings :: HandlerSettings -> (Bool, Bool, Bool, JSString, JSRef)
convertHandlerSettings (HandlerSettings pd sp sip _ ds hd) =
(pd, sp, sip, maybe jsNull toJSString ds, fromMaybe jsNull hd)
instance Default HandlerSettings where
def = HandlerSettings False False False True Nothing Nothing
addClass :: Text -> JQuery -> IO JQuery
addClass c = jq_addClass (toJSString c)
animate :: F.Object -> F.Object -> JQuery -> IO JQuery
animate = jq_animate
getAttr :: Text -> JQuery -> IO Text
getAttr a jq = fromJSString <$> jq_getAttr (toJSString a) jq
setAttr :: Text -> Text -> JQuery -> IO JQuery
setAttr a v = jq_setAttr (toJSString a) (toJSString v)
hasClass :: Text -> JQuery -> IO Bool
hasClass c jq = jq_hasClass (toJSString c) jq
getHtml :: JQuery -> IO Text
getHtml jq = fromJSString <$> jq_getHtml jq
setHtml :: Text -> JQuery -> IO JQuery
setHtml t = jq_setHtml (toJSString t)
getProp :: Text -> JQuery -> IO Text
getProp p jq = fromJSString <$> jq_getProp (toJSString p) jq
-- fixme value can be Boolean or Number
setProp :: Text -> Text -> JQuery -> IO JQuery
setProp p v = jq_setProp (toJSString p) (toJSString v)
removeAttr :: Text -> JQuery -> IO JQuery
removeAttr a = jq_removeAttr (toJSString a)
removeClass :: Text -> JQuery -> IO JQuery
removeClass c = jq_removeClass (toJSString c)
removeProp :: Text -> JQuery -> IO JQuery
removeProp p = jq_removeProp (toJSString p)
-- toggleClass :: Text -> JQuery -> IO JQuery
-- toggleClass c = jq_toggleClass (toJSString c)
getVal :: JQuery -> IO Text
getVal jq = fromJSString <$> jq_getVal jq
setVal :: Text -> JQuery -> IO JQuery
setVal v = jq_setVal (toJSString v)
getText :: JQuery -> IO Text
getText jq = fromJSString <$> jq_getText jq
setText :: Text -> JQuery -> IO JQuery
setText t = jq_setText (toJSString t)
holdReady :: Bool -> IO ()
holdReady b = jq_holdReady b
selectElement :: IsElement e => e -> IO JQuery
selectElement e = jq_selectElement (unElement (toElement e))
selectObject :: F.Object -> IO JQuery
selectObject a = jq_selectObject (coerce a)
select :: Text -> IO JQuery
select q = jq_select (toJSString q)
selectEmpty :: IO JQuery
selectEmpty = jq_selectEmpty
-- :: Text -> Either JQuery F.Object -> IO JQuery ?
selectWithContext :: Text -> F.Object -> IO JQuery
selectWithContext t o = jq_selectWithContext (toJSString t) (coerce o)
getCss :: Text -> JQuery -> IO Text
getCss t jq = fromJSString <$> jq_getCss (toJSString t) jq
setCss :: Text -> Text -> JQuery -> IO JQuery
setCss k v = jq_setCss (toJSString k) (toJSString v)
getHeight :: JQuery -> IO Double
getHeight = jq_getHeight
setHeight :: Double -> JQuery -> IO JQuery
setHeight = jq_setHeight
getWidth :: JQuery -> IO Double
getWidth = jq_getWidth
setWidth :: Double -> JQuery -> IO JQuery
setWidth = jq_setWidth
getInnerHeight :: JQuery -> IO Double
getInnerHeight = jq_getInnerHeight
getInnerWidth :: JQuery -> IO Double
getInnerWidth = jq_getInnerWidth
getOuterHeight :: Bool -- ^ include margin?
-> JQuery
-> IO Double
getOuterHeight b = jq_getOuterHeight b
getOuterWidth :: Bool -- ^ include margin?
-> JQuery
-> IO Double
getOuterWidth b = jq_getOuterWidth b
getScrollLeft :: JQuery -> IO Double
getScrollLeft = jq_getScrollLeft
setScrollLeft :: Double -> JQuery -> IO JQuery
setScrollLeft = jq_setScrollLeft
getScrollTop :: JQuery -> IO Double
getScrollTop = jq_getScrollTop
setScrollTop :: Double -> JQuery -> IO JQuery
setScrollTop = jq_setScrollTop
click :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
click a = on a "click"
dblclick :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
dblclick a = on a "dblclick"
focusin :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
focusin a = on a "focusin"
focusout :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
focusout a = on a "focusout"
hover :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
hover a = on a "hover"
mousedown :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mousedown a = on a "mousedown"
mouseenter :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mouseenter a = on a "mouseenter"
mouseleave :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mouseleave a = on a "mouseleave"
mousemove :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mousemove a = on a "mousemove"
mouseout :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mouseout a = on a "mouseout"
mouseover :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mouseover a = on a "mouseover"
mouseup :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
mouseup a = on a "mouseup"
{- | Register an event handler. Use the returned IO action to remove the
handler.
Note that the handler will stay in memory until the returned IO action is
executed, even if the DOM nodes are removed.
-}
on :: (Event -> IO ()) -> EventType -> HandlerSettings -> JQuery -> IO (IO ())
on a et hs jq = do
cb <- if hsSynchronous hs
then F.syncCallback1 F.ContinueAsync a
else F.asyncCallback1 a
jq_on cb et' ds hd sp sip pd jq
return (jq_off cb et' ds jq >> F.releaseCallback cb)
where
et' = toJSString et
(pd, sp, sip, ds, hd) = convertHandlerSettings hs
one :: (Event -> IO ()) -> EventType -> HandlerSettings -> JQuery -> IO (IO ())
one a et hs jq = do
cb <- fixIO $ \cb ->
let a' = \e -> F.releaseCallback cb >> a e
in if hsSynchronous hs
then F.syncCallback1 F.ContinueAsync a
else F.asyncCallback1 a
jq_one cb et' ds hd sp sip pd jq
return (jq_off cb et' ds jq >> F.releaseCallback cb)
where
et' = toJSString et
(pd, sp, sip, ds, hd) = convertHandlerSettings hs
trigger :: EventType -> JQuery -> IO ()
trigger et jq = jq_trigger (toJSString et) jq
triggerHandler :: EventType -> JQuery -> IO ()
triggerHandler et jq = jq_triggerHandler (toJSString et) jq
delegateTarget :: Event -> IO Element
delegateTarget ev = Element <$> jq_delegateTarget ev
isDefaultPrevented :: Event -> IO Bool
isDefaultPrevented e = jq_isDefaultPrevented e
isImmediatePropagationStopped :: Event -> IO Bool
isImmediatePropagationStopped e = jq_isImmediatePropagationStopped e
isPropagationStopped :: Event -> IO Bool
isPropagationStopped e = jq_isPropagationStopped e
namespace :: Event -> IO Text
namespace e = fromJSString <$> jq_namespace e
pageX :: Event -> IO Double
pageX = jq_pageX
pageY :: Event -> IO Double
pageY = jq_pageY
preventDefault :: Event -> IO ()
preventDefault = jq_preventDefault
stopPropagation :: Event -> IO ()
stopPropagation = jq_stopPropagation
stopImmediatePropagation :: Event -> IO ()
stopImmediatePropagation = jq_stopImmediatePropagation
target :: Event -> IO Element
target ev = Element <$> jq_target ev
timeStamp :: Event -> IO Double
timeStamp = jq_timeStamp
eventType :: Event -> IO Text
eventType e = fromJSString <$> jq_eventType e
which :: Event -> IO Int
which = jq_eventWhich
blur :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
blur a = on a "blur"
change :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
change a = on a "change"
onFocus :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
onFocus a = on a "focus"
focus :: JQuery -> IO JQuery
focus = jq_focus
onSelect :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
onSelect a = on a "select"
submit :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
submit a = on a "submit"
keydown :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
keydown a = on a "keydown"
keyup :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
keyup a = on a "keyup"
keypress :: (Event -> IO ()) -> HandlerSettings -> JQuery -> IO (IO ())
keypress a = on a "keypress"
after :: Text -> JQuery -> IO JQuery
after h jq = jq_after (coerce $ toJSString h) jq
afterJQuery :: JQuery -> JQuery -> IO JQuery
afterJQuery j jq = jq_after (coerce j) jq
afterElem :: IsElement e => e -> JQuery -> IO JQuery
afterElem e jq = jq_after (coerce . unElement $ toElement e) jq
append :: Text -> JQuery -> IO JQuery
append h jq = jq_append (coerce $ toJSString h) jq
appendJQuery :: JQuery -> JQuery -> IO JQuery
appendJQuery j jq = jq_append (coerce j) jq
appendElem :: IsElement e => e -> JQuery -> IO JQuery
appendElem e jq = jq_append (coerce . unElement $ toElement e) jq
appendTo :: Text -> JQuery -> IO JQuery
appendTo h jq = jq_appendTo (coerce $ toJSString h) jq
appendToJQuery :: JQuery -> JQuery -> IO JQuery
appendToJQuery j jq = jq_appendTo (coerce j) jq
appendToElem :: IsElement e => e -> JQuery -> IO JQuery
appendToElem e jq = jq_appendTo (coerce . unElement $ toElement e) jq
before :: Text -> JQuery -> IO JQuery
before h jq = jq_before (coerce $ toJSString h) jq
beforeJQuery :: JQuery -> JQuery -> IO JQuery
beforeJQuery j jq = jq_before (coerce j) jq
beforeElem :: IsElement e => e -> JQuery -> IO JQuery
beforeElem e jq = jq_before (coerce . unElement $ toElement e) jq
data CloneType = WithoutDataAndEvents
| WithDataAndEvents
| DeepWithDataAndEvents
clone :: CloneType -> JQuery -> IO JQuery
clone WithoutDataAndEvents = jq_clone False False
clone WithDataAndEvents = jq_clone True False
clone DeepWithDataAndEvents = jq_clone True True
detach :: JQuery -> IO JQuery
detach = jq_detach jsNull
detachSelector :: Selector -> JQuery -> IO JQuery
detachSelector s = jq_detach (toJSString s)
empty :: JQuery -> IO JQuery
empty = jq_empty
insertAfter :: Text -> JQuery -> IO JQuery
insertAfter h jq = jq_insertAfter (coerce $ toJSString h) jq
insertAfterJQuery :: JQuery -> JQuery -> IO JQuery
insertAfterJQuery j jq = jq_insertAfter (coerce j) jq
insertAfterElem :: IsElement e => e -> JQuery -> IO JQuery
insertAfterElem e jq = jq_insertAfter (coerce . unElement $ toElement e) jq
insertBefore :: Text -> JQuery -> IO JQuery
insertBefore h jq = jq_insertBefore (coerce $ toJSString h) jq
insertBeforeJQuery :: JQuery -> JQuery -> IO JQuery
insertBeforeJQuery j jq = jq_insertBefore (coerce j) jq
insertBeforeElem :: IsElement e => e -> JQuery -> IO JQuery
insertBeforeElem e jq = jq_insertBefore (coerce . unElement $ toElement e) jq
prepend :: Text -> JQuery -> IO JQuery
prepend h jq = jq_prepend (coerce $ toJSString h) jq
prependJQuery :: JQuery -> JQuery -> IO JQuery
prependJQuery j jq = jq_prepend (coerce j) jq
prependElem :: IsElement e => e -> JQuery -> IO JQuery
prependElem e jq = jq_prepend (coerce . unElement $ toElement e) jq
prependTo :: Text -> JQuery -> IO JQuery
prependTo h jq = jq_prependTo (coerce $ toJSString h) jq
prependToJQuery :: JQuery -> JQuery -> IO JQuery
prependToJQuery j jq = jq_prependTo (coerce j) jq
prependToElem :: IsElement e => e -> JQuery -> IO JQuery
prependToElem e jq = jq_prependTo (coerce . unElement $ toElement e) jq
remove :: JQuery -> IO JQuery
remove = jq_remove jsNull
removeSelector :: Selector -> JQuery -> IO JQuery
removeSelector s = jq_remove (toJSString s)
replaceAll :: Text -> JQuery -> IO JQuery
replaceAll h jq = jq_replaceAll (coerce $ toJSString h) jq
replaceAllJQuery :: JQuery -> JQuery -> IO JQuery
replaceAllJQuery j jq = jq_replaceAll (coerce j) jq
replaceAllElem :: IsElement e => e -> JQuery -> IO JQuery
replaceAllElem e jq = jq_replaceAll (coerce . unElement $ toElement e) jq
replaceWith :: Text -> JQuery -> IO JQuery
replaceWith h jq = jq_replaceWith (coerce $ toJSString h) jq
replaceWithJQuery :: JQuery -> JQuery -> IO JQuery
replaceWithJQuery j jq = jq_replaceWith (coerce j) jq
replaceWithElem :: IsElement e => e -> JQuery -> IO JQuery
replaceWithElem e jq = jq_replaceWith (coerce . unElement $ toElement e) jq
unwrap :: JQuery -> IO JQuery
unwrap = jq_unwrap
wrap :: Text -> JQuery -> IO JQuery
wrap h jq = jq_wrap (coerce $ toJSString h) jq
wrapJQuery :: JQuery -> JQuery -> IO JQuery
wrapJQuery j jq = jq_wrap (coerce j) jq
wrapElem :: IsElement e => e -> JQuery -> IO JQuery
wrapElem e jq = jq_wrap (coerce . unElement $ toElement e) jq
wrapAll :: Text -> JQuery -> IO JQuery
wrapAll h jq = jq_wrapAll (coerce $ toJSString h) jq
wrapAllJQuery :: JQuery -> JQuery -> IO JQuery
wrapAllJQuery j jq = jq_wrapAll (coerce j) jq
wrapAllElem :: IsElement e => e -> JQuery -> IO JQuery
wrapAllElem e jq = jq_wrapAll (coerce . unElement $ toElement e) jq
wrapInner :: Text -> JQuery -> IO JQuery
wrapInner h jq = jq_wrapInner (coerce $ toJSString h) jq
wrapInnerJQuery :: JQuery -> JQuery -> IO JQuery
wrapInnerJQuery j jq = jq_wrapInner (coerce j) jq
wrapInnerElem :: IsElement e => e -> JQuery -> IO JQuery
wrapInnerElem e jq = jq_wrapInner (coerce . unElement $ toElement e) jq
addSelector :: Selector -> JQuery -> IO JQuery
addSelector s jq = jq_add (coerce $ toJSString s) jq
addElement :: IsElement e => e -> JQuery -> IO JQuery
addElement e jq = jq_add (coerce . unElement $ toElement e) jq
addHtml :: Text -> JQuery -> IO JQuery
addHtml h jq = jq_add (coerce $ toJSString h) jq
add :: JQuery -> JQuery -> IO JQuery
add j jq = jq_add (coerce j) jq
-- addSelectorWithContext :: Selector -> JQuery -> JQuery -> IO JQuery
-- addSelectorWithContext = undefined
andSelf :: JQuery -> IO JQuery
andSelf = jq_andSelf
children :: JQuery -> IO JQuery
children = jq_children jsNull
childrenMatching :: Selector -> JQuery -> IO JQuery
childrenMatching s = jq_children (toJSString s)
closestSelector :: Selector -> JQuery -> IO JQuery
closestSelector s jq = jq_closest (coerce $ toJSString s) jq
-- closestWithContext :: Selector -> Selector -> JQuery -> IO JQuery
-- closestWithContext = undefined
closest :: JQuery -> JQuery -> IO JQuery
closest j jq = jq_closest (coerce j) jq
closestElement :: IsElement e => e -> JQuery -> IO JQuery
closestElement e jq = jq_closest (coerce . unElement $ toElement e) jq
contents :: JQuery -> IO JQuery
contents = jq_contents
-- This just isn't cool[' Can']'t we all just use map?
-- each :: (Double -> Element -> Fay Bool) -> JQuery -> Fay JQuery
-- each = ffi "%2['each'](%1)"
end :: JQuery -> IO JQuery
end = jq_end
eq :: Int -> JQuery -> IO JQuery
eq = jq_eq
filter :: Selector -> JQuery -> IO JQuery
filter s = jq_filter (coerce $ toJSString s)
filterElement :: IsElement e => e -> JQuery -> IO JQuery
filterElement e = jq_filter (coerce . unElement $ toElement e)
filterJQuery :: JQuery -> JQuery -> IO JQuery
filterJQuery j = jq_filter (coerce j)
find :: Selector -> JQuery -> IO JQuery
find s = jq_find (coerce $ toJSString s)
findJQuery :: JQuery -> JQuery -> IO JQuery
findJQuery j = jq_find (coerce j)
findElement :: IsElement e => e -> JQuery -> IO JQuery
findElement e = jq_find (coerce . unElement $ toElement e)
first :: JQuery -> IO JQuery
first = jq_first
has :: Selector -> JQuery -> IO JQuery
has s = jq_has (coerce $ toJSString s)
hasElement :: IsElement e => e -> JQuery -> IO JQuery
hasElement e = jq_has (coerce . unElement $ toElement e)
is :: Selector -> JQuery -> IO Bool
is s = jq_is (coerce $ toJSString s)
isJQuery :: JQuery -> JQuery -> IO Bool
isJQuery j = jq_is (coerce j)
isElement :: IsElement e => e -> JQuery -> IO Bool
isElement e = jq_is (coerce . unElement $ toElement e)
last :: JQuery -> IO JQuery
last = jq_last
next :: JQuery -> IO JQuery
next = jq_next jsNull
nextSelector :: Selector -> JQuery -> IO JQuery
nextSelector s = jq_next (toJSString s)
nextAll :: JQuery -> IO JQuery
nextAll = jq_nextAll jsNull
nextAllSelector :: Selector -> JQuery -> IO JQuery
nextAllSelector s = jq_nextAll (toJSString s)
nextUntil :: Selector -> Maybe Selector -> JQuery -> IO JQuery
nextUntil s mf = jq_nextUntil (coerce $ toJSString s) (maybe jsNull toJSString mf)
nextUntilElement :: IsElement e => e -> Maybe Selector -> JQuery -> IO JQuery
nextUntilElement e mf = jq_nextUntil (coerce . unElement $ toElement e) (maybe jsNull toJSString mf)
not :: Selector -> JQuery -> IO JQuery
not s = jq_not (coerce $ toJSString s)
notElement :: IsElement e => e -> JQuery -> IO JQuery
notElement e = jq_not (coerce . unElement $ toElement e)
-- notElements :: [Element] -> JQuery -> IO JQuery
-- notElements = jq_notElements
notJQuery :: JQuery -> JQuery -> IO JQuery
notJQuery j = jq_not (coerce j)
offsetParent :: JQuery -> IO JQuery
offsetParent = jq_offsetParent
parent :: JQuery -> IO JQuery
parent = jq_parent jsNull
parentSelector :: String -> JQuery -> IO JQuery
parentSelector s = jq_parent (toJSString s)
parents :: JQuery -> IO JQuery
parents = jq_parents jsNull
parentsSelector :: Selector -> JQuery -> IO JQuery
parentsSelector s = jq_parents (toJSString s)
parentsUntil :: Selector -> Maybe Selector -> JQuery -> IO JQuery
parentsUntil s mf = jq_parentsUntil (coerce $ toJSString s) (maybe jsNull (coerce . toJSString) mf)
parentsUntilElement :: IsElement e => e -> Maybe Selector -> JQuery -> IO JQuery
parentsUntilElement e mf = jq_parentsUntil (coerce . unElement $ toElement e) (maybe jsNull (coerce . toJSString) mf)
prev :: JQuery -> IO JQuery
prev = jq_prev jsNull
prevSelector :: Selector -> JQuery -> IO JQuery
prevSelector s = jq_prev (toJSString s)
prevAll :: JQuery -> IO JQuery
prevAll = jq_prevAll jsNull
prevAllSelector :: String -> JQuery -> IO JQuery
prevAllSelector s = jq_prevAll (toJSString s)
prevUntil :: Selector -> Maybe Selector -> JQuery -> IO JQuery
prevUntil s mf = jq_prevUntil (coerce $ toJSString s) (maybe jsNull toJSString mf)
prevUntilElement :: IsElement e => e -> Maybe Selector -> JQuery -> IO JQuery
prevUntilElement e mf = jq_prevUntil (coerce . unElement $ toElement e) (maybe jsNull toJSString mf)
siblings :: JQuery -> IO JQuery
siblings = jq_siblings (maybeToNullable Nothing)
siblingsSelector :: Selector -> JQuery -> IO JQuery
siblingsSelector s = jq_siblings (maybeToNullable (Just (toJSString s)))
slice :: Int -> JQuery -> IO JQuery
slice = jq_slice
sliceFromTo :: Int -> Int -> JQuery -> IO JQuery
sliceFromTo = jq_sliceFromTo
stop :: Bool -> JQuery -> IO JQuery
stop = jq_stop
|
mgsloan/ghcjs-jquery
|
JavaScript/JQuery.hs
|
Haskell
|
mit
| 28,244
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.