code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Database.VCache.VTx
( VTx
, runVTx
, liftSTM
, markDurable
, markDurableIf
, getVTxSpace
) where
import Control.Monad
import Control.Monad.Trans.State.Strict
import Control.Concurrent.STM
import Control.Concurrent.MVar
import qualified Data.Map.Strict as Map
import Database.VCache.Types
-- | runVTx executes a transaction that may involve both STM TVars
-- (via liftSTM) and VCache PVars (via readPVar, writePVar).
runVTx :: VSpace -> VTx a -> IO a
runVTx vc action = do
mvWait <- newEmptyMVar
join (atomically (runVTx' vc mvWait action))
{-# INLINABLE runVTx #-}
runVTx' :: VSpace -> MVar () -> VTx a -> STM (IO a)
runVTx' vc mvWait action =
let s0 = VTxState vc Map.empty False in
runStateT (_vtx action) s0 >>= \ (r,s) ->
-- fast path for read-only, non-durable actions
let bWrite = not (Map.null (vtx_writes s)) in
let bSync = vtx_durable s in
let bDone = not (bWrite || bSync) in
if bDone then return (return r) else
-- otherwise, we update shared queue w/ potential conflicts
readTVar (vcache_writes vc) >>= \ w ->
let wdata' = updateLog (vtx_writes s) (write_data w) in
let wsync' = updateSync bSync mvWait (write_sync w) in
let w' = Writes { write_data = wdata', write_sync = wsync' } in
writeTVar (vcache_writes vc) w' >>= \ () ->
return $ do
w' `seq` signalWriter vc
when bSync (takeMVar mvWait)
return r
-- Signal the writer of work to do.
signalWriter :: VSpace -> IO ()
signalWriter vc = void (tryPutMVar (vcache_signal vc) ())
{-# INLINE signalWriter #-}
-- Record recent writes for each PVar.
updateLog :: WriteLog -> WriteLog -> WriteLog
updateLog = Map.union
{-# INLINE updateLog #-}
-- Track which threads are waiting on a commit signal.
updateSync :: Bool -> MVar () -> [MVar ()] -> [MVar ()]
updateSync bSync v = if bSync then (v:) else id
{-# INLINE updateSync #-}
-- | Durability for a VTx transaction is optional: it requires an
-- additional wait for the background thread to signal that it has
-- committed content to the persistence layer. Due to how writes
-- are batched, a durable transaction may share its wait with many
-- other transactions that occur at more or less the same time.
--
-- Developers should mark a transaction durable only if necessary
-- based on domain layer policies. E.g. for a shopping service,
-- normal updates and views of the virtual shopping cart might not
-- be durable while committing to a purchase is durable.
--
markDurable :: VTx ()
markDurable = VTx $ modify $ \ vtx ->
vtx { vtx_durable = True }
{-# INLINE markDurable #-}
-- | This variation of markDurable makes it easier to short-circuit
-- complex computations to decide durability when the transaction is
-- already durable. If durability is already marked, the boolean is
-- not evaluated.
markDurableIf :: Bool -> VTx ()
markDurableIf b = VTx $ modify $ \ vtx ->
let bDurable = vtx_durable vtx || b in
vtx { vtx_durable = bDurable }
{-# INLINE markDurableIf #-}
|
dmbarbour/haskell-vcache
|
hsrc_lib/Database/VCache/VTx.hs
|
bsd-2-clause
| 3,032
| 0
| 31
| 635
| 696
| 372
| 324
| 52
| 2
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Auto.Genome
Description : The genetic algorithm implementation
Copyright : (c) Bo Joel Svensson, 2015
Michael Vollmer, 2015
License : GPL-3
Maintainer :
Stability : experimental
Portability :
A typeclass for basic operations on genomes.
-}
module Auto.Genome (Genome(..)) where
import Control.Monad.Random
-- | A genome is parameterized by its representation and its
-- "return value" (i.e. what type of value it represents).
-- You can expect to mutate and cross over genomes.
class Genome a b where
-- | Convert a value to a genome.
toValue :: b -> a
-- | Randomly mutate a genome based on some probability.
mutate :: (RandomGen g, Fractional f, Ord f, Random f)
=> g -> f -> b -> b
-- | Cross over two genomes.
cross :: (RandomGen g) => g -> b -> b -> b
|
iu-parfunc/AutoObsidian
|
src/Auto/Genome.hs
|
bsd-3-clause
| 874
| 0
| 10
| 208
| 119
| 69
| 50
| 8
| 0
|
{-# LANGUAGE PatternGuards #-}
module Idris.CaseSplit(splitOnLine, replaceSplits,
getClause, getProofClause,
mkWith,
nameMissing,
getUniq, nameRoot) where
-- splitting a variable in a pattern clause
import Idris.AbsSyntax
import Idris.AbsSyntaxTree (Idris, IState, PTerm)
import Idris.ElabDecls
import Idris.Delaborate
import Idris.Parser
import Idris.Error
import Idris.Output
import Idris.Elab.Value
import Idris.Elab.Term
import Idris.Core.TT
import Idris.Core.Typecheck
import Idris.Core.Evaluate
import Data.Maybe
import Data.Char
import Data.List (isPrefixOf, isSuffixOf)
import Control.Monad
import Control.Monad.State.Strict
import Text.Parser.Combinators
import Text.Parser.Char(anyChar)
import Text.Trifecta(Result(..), parseString)
import Text.Trifecta.Delta
import qualified Data.ByteString.UTF8 as UTF8
import Debug.Trace
{-
Given a pattern clause and a variable 'n', elaborate the clause and find the
type of 'n'.
Make new pattern clauses by replacing 'n' with all the possibly constructors
applied to '_', and replacing all other variables with '_' in order to
resolve other dependencies.
Finally, merge the generated patterns with the original, by matching.
Always take the "more specific" argument when there is a discrepancy, i.e.
names over '_', patterns over names, etc.
-}
-- Given a variable to split, and a term application, return a list of
-- variable updates
split :: Name -> PTerm -> Idris [[(Name, PTerm)]]
split n t'
= do ist <- getIState
-- Make sure all the names in the term are accessible
mapM_ (\n -> setAccessibility n Public) (allNamesIn t')
-- ETyDecl rather then ELHS because there'll be explicit type
-- matching
(tm, ty, pats) <- elabValBind recinfo ETyDecl True (addImplPat ist t')
-- ASSUMPTION: tm is in normal form after elabValBind, so we don't
-- need to do anything special to find out what family each argument
-- is in
logLvl 4 ("Elaborated:\n" ++ show tm ++ " : " ++ show ty ++ "\n" ++ show pats)
-- iputStrLn (show (delab ist tm) ++ " : " ++ show (delab ist ty))
-- iputStrLn (show pats)
let t = mergeUserImpl (addImplPat ist t') (delab ist tm)
let ctxt = tt_ctxt ist
case lookup n pats of
Nothing -> ifail $ show n ++ " is not a pattern variable"
Just ty ->
do let splits = findPats ist ty
iLOG ("New patterns " ++ showSep ", "
(map showTmImpls splits))
let newPats_in = zipWith (replaceVar ctxt n) splits (repeat t)
logLvl 4 ("Working from " ++ show t)
logLvl 4 ("Trying " ++ showSep "\n"
(map (showTmImpls) newPats_in))
newPats <- mapM elabNewPat newPats_in
logLvl 3 ("Original:\n" ++ show t)
logLvl 3 ("Split:\n" ++
(showSep "\n" (map show (mapMaybe id newPats))))
logLvl 3 "----"
let newPats' = mergeAllPats ist n t (mapMaybe id newPats)
iLOG ("Name updates " ++ showSep "\n"
(map (\ (p, u) -> show u ++ " " ++ show p) newPats'))
return (map snd newPats')
data MergeState = MS { namemap :: [(Name, Name)],
invented :: [(Name, Name)],
explicit :: [Name],
updates :: [(Name, PTerm)] }
addUpdate :: Name -> Idris.AbsSyntaxTree.PTerm -> State MergeState ()
addUpdate n tm = do ms <- get
put (ms { updates = ((n, stripNS tm) : updates ms) } )
inventName :: Idris.AbsSyntaxTree.IState -> Maybe Name -> Name -> State MergeState Name
inventName ist ty n =
do ms <- get
let supp = case ty of
Nothing -> []
Just t -> getNameHints ist t
let nsupp = case n of
MN i n | not (tnull n) && thead n == '_'
-> mkSupply (supp ++ varlist)
MN i n -> mkSupply (UN n : supp ++ varlist)
UN n | thead n == '_'
-> mkSupply (supp ++ varlist)
x -> mkSupply (x : supp)
let badnames = map snd (namemap ms) ++ map snd (invented ms) ++
explicit ms
case lookup n (invented ms) of
Just n' -> return n'
Nothing ->
do let n' = uniqueNameFrom nsupp badnames
put (ms { invented = (n, n') : invented ms })
return n'
mkSupply :: [Name] -> [Name]
mkSupply ns = mkSupply' ns (map nextName ns)
where mkSupply' xs ns' = xs ++ mkSupply ns'
varlist :: [Name]
varlist = map (sUN . (:[])) "xyzwstuv" -- EB's personal preference :)
stripNS :: Idris.AbsSyntaxTree.PTerm -> Idris.AbsSyntaxTree.PTerm
stripNS tm = mapPT dens tm where
dens (PRef fc n) = PRef fc (nsroot n)
dens t = t
mergeAllPats :: IState -> Name -> PTerm -> [PTerm] -> [(PTerm, [(Name, PTerm)])]
mergeAllPats ist cv t [] = []
mergeAllPats ist cv t (p : ps)
= let (p', MS _ _ _ u) = runState (mergePat ist t p Nothing)
(MS [] [] (filter (/=cv) (patvars t)) [])
ps' = mergeAllPats ist cv t ps in
((p', u) : ps')
where patvars (PRef _ n) = [n]
patvars (PApp _ _ as) = concatMap (patvars . getTm) as
patvars (PPatvar _ n) = [n]
patvars _ = []
mergePat :: IState -> PTerm -> PTerm -> Maybe Name -> State MergeState PTerm
-- If any names are unified, make sure they stay unified. Always prefer
-- user provided name (first pattern)
mergePat ist (PPatvar fc n) new t
= mergePat ist (PRef fc n) new t
mergePat ist old (PPatvar fc n) t
= mergePat ist old (PRef fc n) t
mergePat ist orig@(PRef fc n) new@(PRef _ n') t
| isDConName n' (tt_ctxt ist) = do addUpdate n new
return new
| otherwise
= do ms <- get
case lookup n' (namemap ms) of
Just x -> do addUpdate n (PRef fc x)
return (PRef fc x)
Nothing -> do put (ms { namemap = ((n', n) : namemap ms) })
return (PRef fc n)
mergePat ist (PApp _ _ args) (PApp fc f args') t
= do newArgs <- zipWithM mergeArg args (zip args' (argTys ist f))
return (PApp fc f newArgs)
where mergeArg x (y, t)
= do tm' <- mergePat ist (getTm x) (getTm y) t
case x of
(PImp _ _ _ _ _) ->
return (y { machine_inf = machine_inf x,
getTm = tm' })
_ -> return (y { getTm = tm' })
mergePat ist (PRef fc n) tm ty = do tm <- tidy ist tm ty
addUpdate n tm
return tm
mergePat ist x y t = return y
mergeUserImpl :: PTerm -> PTerm -> PTerm
mergeUserImpl x y = x
argTys :: IState -> PTerm -> [Maybe Name]
argTys ist (PRef fc n)
= case lookupTy n (tt_ctxt ist) of
[ty] -> map (tyName . snd) (getArgTys ty) ++ repeat Nothing
_ -> repeat Nothing
where tyName (Bind _ (Pi _ _ _) _) = Just (sUN "->")
tyName t | (P _ n _, _) <- unApply t = Just n
| otherwise = Nothing
argTys _ _ = repeat Nothing
tidy :: IState -> PTerm -> Maybe Name -> State MergeState PTerm
tidy ist orig@(PRef fc n) ty
= do ms <- get
case lookup n (namemap ms) of
Just x -> return (PRef fc x)
Nothing -> case n of
(UN _) -> return orig
_ -> do n' <- inventName ist ty n
return (PRef fc n')
tidy ist (PApp fc f args) ty
= do args' <- zipWithM tidyArg args (argTys ist f)
return (PApp fc f args')
where tidyArg x ty' = do tm' <- tidy ist (getTm x) ty'
return (x { getTm = tm' })
tidy ist tm ty = return tm
-- mapPT tidyVar tm
-- where tidyVar (PRef _ _) = Placeholder
-- tidyVar t = t
elabNewPat :: PTerm -> Idris (Maybe PTerm)
elabNewPat t = idrisCatch (do (tm, ty) <- elabVal recinfo ELHS t
i <- getIState
return (Just (delab i tm)))
(\e -> do i <- getIState
logLvl 5 $ "Not a valid split:\n" ++ pshow i e
return Nothing)
findPats :: IState -> Type -> [PTerm]
findPats ist t | (P _ n _, _) <- unApply t
= case lookupCtxt n (idris_datatypes ist) of
[ti] -> map genPat (con_names ti)
_ -> [Placeholder]
where genPat n = case lookupCtxt n (idris_implicits ist) of
[args] -> PApp emptyFC (PRef emptyFC n)
(map toPlaceholder args)
_ -> error $ "Can't happen (genPat) " ++ show n
toPlaceholder tm = tm { getTm = Placeholder }
findPats ist t = [Placeholder]
replaceVar :: Context -> Name -> PTerm -> PTerm -> PTerm
replaceVar ctxt n t (PApp fc f pats) = PApp fc f (map substArg pats)
where subst :: PTerm -> PTerm
subst orig@(PPatvar _ v) | v == n = t
| otherwise = Placeholder
subst orig@(PRef _ v) | v == n = t
| isDConName v ctxt = orig
subst (PRef _ _) = Placeholder
subst (PApp fc (PRef _ t) pats)
| isTConName t ctxt = Placeholder -- infer types
subst (PApp fc f pats) = PApp fc f (map substArg pats)
subst x = x
substArg arg = arg { getTm = subst (getTm arg) }
replaceVar ctxt n t pat = pat
splitOnLine :: Int -- ^ line number
-> Name -- ^ variable
-> FilePath -- ^ name of file
-> Idris [[(Name, PTerm)]]
splitOnLine l n fn = do
-- let (before, later) = splitAt (l-1) (lines inp)
-- i <- getIState
cl <- getInternalApp fn l
logLvl 3 ("Working with " ++ showTmImpls cl)
tms <- split n cl
-- iputStrLn (showSep "\n" (map show tms))
return tms -- "" -- not yet done...
replaceSplits :: String -> [[(Name, PTerm)]] -> Idris [String]
replaceSplits l ups = updateRHSs 1 (map (rep (expandBraces l)) ups)
where
rep str [] = str ++ "\n"
rep str ((n, tm) : ups) = rep (updatePat False (show n) (nshow tm) str) ups
updateRHSs i [] = return []
updateRHSs i (x : xs) = do (x', i') <- updateRHS i x
xs' <- updateRHSs i' xs
return (x' : xs')
updateRHS i ('?':'=':xs) = do (xs', i') <- updateRHS i xs
return ("?=" ++ xs', i')
updateRHS i ('?':xs) = do let (nm, rest) = span (not . isSpace) xs
(nm', i') <- getUniq nm i
return ('?':nm' ++ rest, i')
updateRHS i (x : xs) = do (xs', i') <- updateRHS i xs
return (x : xs', i')
updateRHS i [] = return ("", i)
-- TMP HACK: If there are Nats, we don't want to show as numerals since
-- this isn't supported in a pattern, so special case here
nshow (PRef _ (UN z)) | z == txt "Z" = "Z"
nshow (PApp _ (PRef _ (UN s)) [x]) | s == txt "S" =
"(S " ++ addBrackets (nshow (getTm x)) ++ ")"
nshow t = show t
-- if there's any {n} replace with {n=n}
-- but don't replace it in comments
expandBraces ('{' : '-' : xs) = '{' : '-' : xs
expandBraces ('{' : xs)
= let (brace, (_:rest)) = span (/= '}') xs in
if (not ('=' `elem` brace))
then ('{' : brace ++ " = " ++ brace ++ "}") ++
expandBraces rest
else ('{' : brace ++ "}") ++ expandBraces rest
expandBraces (x : xs) = x : expandBraces xs
expandBraces [] = []
updatePat start n tm [] = []
updatePat start n tm ('{':rest) =
let (space, rest') = span isSpace rest in
'{' : space ++ updatePat False n tm rest'
updatePat start n tm done@('?':rest) = done
updatePat True n tm xs@(c:rest) | length xs > length n
= let (before, after@(next:_)) = splitAt (length n) xs in
if (before == n && not (isAlphaNum next))
then addBrackets tm ++ updatePat False n tm after
else c : updatePat (not (isAlphaNum c)) n tm rest
updatePat start n tm (c:rest) = c : updatePat (not ((isAlphaNum c) || c == '_')) n tm rest
addBrackets tm | ' ' `elem` tm
, not (isPrefixOf "(" tm)
, not (isSuffixOf ")" tm) = "(" ++ tm ++ ")"
| otherwise = tm
getUniq :: (Show t, Num t) => [Char] -> t -> Idris ([Char], t)
getUniq nm i
= do ist <- getIState
let n = nameRoot [] nm ++ "_" ++ show i
case lookupTy (sUN n) (tt_ctxt ist) of
[] -> return (n, i+1)
_ -> getUniq nm (i+1)
nameRoot acc nm | all isDigit nm = showSep "_" acc
nameRoot acc nm =
case span (/='_') nm of
(before, ('_' : after)) -> nameRoot (acc ++ [before]) after
_ -> showSep "_" (acc ++ [nm])
getClause :: Int -- ^ line number that the type is declared on
-> Name -- ^ Function name
-> FilePath -- ^ Source file name
-> Idris String
getClause l fn fp
= do i <- getIState
case lookupCtxt fn (idris_classes i) of
[c] -> return (mkClassBodies i (class_methods c))
_ -> do ty <- getInternalApp fp l
ist <- get
let ap = mkApp ist ty []
return (show fn ++ " " ++ ap ++ "= ?"
++ show fn ++ "_rhs")
where mkApp :: IState -> PTerm -> [Name] -> String
mkApp i (PPi (Exp _ _ False) (MN _ _) _ ty sc) used
= let n = getNameFrom i used ty in
show n ++ " " ++ mkApp i sc (n : used)
mkApp i (PPi (Exp _ _ False) (UN n) _ ty sc) used
| thead n == '_'
= let n = getNameFrom i used ty in
show n ++ " " ++ mkApp i sc (n : used)
mkApp i (PPi (Exp _ _ False) n _ _ sc) used
= show n ++ " " ++ mkApp i sc (n : used)
mkApp i (PPi _ _ _ _ sc) used = mkApp i sc used
mkApp i _ _ = ""
getNameFrom i used (PPi _ _ _ _ _)
= uniqueNameFrom (mkSupply [sUN "f", sUN "g"]) used
getNameFrom i used (PApp fc f as) = getNameFrom i used f
getNameFrom i used (PRef fc f)
= case getNameHints i f of
[] -> uniqueName (sUN "x") used
ns -> uniqueNameFrom (mkSupply ns) used
getNameFrom i used _ = uniqueName (sUN "x") used
-- write method declarations, indent with 4 spaces
mkClassBodies :: IState -> [(Name, (FnOpts, PTerm))] -> String
mkClassBodies i ns
= showSep "\n"
(zipWith (\(n, (_, ty)) m -> " " ++
def (show (nsroot n)) ++ " "
++ mkApp i ty []
++ "= ?"
++ show fn ++ "_rhs_" ++ show m) ns [1..])
def n@(x:xs) | not (isAlphaNum x) = "(" ++ n ++ ")"
def n = n
getProofClause :: Int -- ^ line number that the type is declared
-> Name -- ^ Function name
-> FilePath -- ^ Source file name
-> Idris String
getProofClause l fn fp
= do ty <- getInternalApp fp l
return (mkApp ty ++ " = ?" ++ show fn ++ "_rhs")
where mkApp (PPi _ _ _ _ sc) = mkApp sc
mkApp rt = "(" ++ show rt ++ ") <== " ++ show fn
-- Purely syntactic - turn a pattern match clause into a with and a new
-- match clause
mkWith :: String -> Name -> String
mkWith str n = let str' = replaceRHS str "with (_)"
in str' ++ "\n" ++ newpat str
where replaceRHS [] str = str
replaceRHS ('?':'=': rest) str = str
replaceRHS ('=': rest) str
| not ('=' `elem` rest) = str
replaceRHS (x : rest) str = x : replaceRHS rest str
newpat ('>':patstr) = '>':newpat patstr
newpat patstr =
" " ++
replaceRHS patstr "| with_pat = ?" ++ show n ++ "_rhs"
-- Replace _ with names in missing clauses
nameMissing :: [PTerm] -> Idris [PTerm]
nameMissing ps = do ist <- get
newPats <- mapM nm ps
let newPats' = mergeAllPats ist (sUN "_") (base (head ps))
newPats
return (map fst newPats')
where
base (PApp fc f args) = PApp fc f (map (fmap (const (PRef fc (sUN "_")))) args)
base t = t
nm ptm = do mptm <- elabNewPat ptm
case mptm of
Nothing -> return ptm
Just ptm' -> return ptm'
|
BartAdv/Idris-dev
|
src/Idris/CaseSplit.hs
|
bsd-3-clause
| 17,302
| 19
| 25
| 6,627
| 6,083
| 3,049
| 3,034
| 332
| 17
|
{-# LANGUAGE FlexibleContexts #-}
module Language.Haskell.Liquid.Bare.SymSort (
txRefSort
) where
import Prelude hiding (error)
import qualified Data.List as L
import Data.Maybe (fromMaybe)
import TyCon (TyCon)
import Language.Fixpoint.Misc (fst3, snd3)
import Language.Fixpoint.Types (atLoc, meet, TCEmb)
import Language.Haskell.Liquid.Types.RefType (appRTyCon, strengthen)
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.GHC.Misc (fSrcSpan)
import Language.Haskell.Liquid.Misc (safeZipWithError)
import Language.Haskell.Liquid.Bare.Env
-- EFFECTS: TODO is this the SAME as addTyConInfo? No. `txRefSort`
-- (1) adds the _real_ sorts to RProp,
-- (2) gathers _extra_ RProp at turnst them into refinements,
-- e.g. tests/pos/multi-pred-app-00.hs
txRefSort :: TCEnv -> TCEmb TyCon -> Located SpecType -> Located SpecType
txRefSort tyi tce t = atLoc t $ mapBot (addSymSort (fSrcSpan t) tce tyi) (val t)
addSymSort sp tce tyi (RApp rc@(RTyCon _ _ _) ts rs r)
= RApp rc ts (zipWith3 (addSymSortRef sp rc) pvs rargs [1..]) r'
where
rc' = appRTyCon tce tyi rc ts
pvs = rTyConPVs rc'
(rargs, rrest) = splitAt (length pvs) rs
r' = L.foldl' go r rrest
go r (RProp _ (RHole r')) = r' `meet` r
go r (RProp _ t' ) = let r' = fromMaybe mempty (stripRTypeBase t') in r `meet` r'
addSymSort _ _ _ t
= t
addSymSortRef sp rc p r i
| isPropPV p
= addSymSortRef' sp rc i p r
| otherwise
= panic Nothing "addSymSortRef: malformed ref application"
addSymSortRef' _ _ _ p (RProp s (RVar v r)) | isDummy v
= RProp xs t
where
t = ofRSort (pvType p) `strengthen` r
xs = spliceArgs "addSymSortRef 1" s p
addSymSortRef' sp rc i p (RProp _ (RHole r@(MkUReft _ (Pr [up]) _)))
| length xs == length ts
= RProp xts (RHole r)
| otherwise
= uError $ ErrPartPred sp (pprint rc) (pprint $ pname up) i (length xs) (length ts)
where
xts = safeZipWithError "addSymSortRef'" xs ts
xs = snd3 <$> pargs up
ts = fst3 <$> pargs p
addSymSortRef' _ _ _ _ (RProp s (RHole r))
= RProp s (RHole r)
addSymSortRef' _ _ _ p (RProp s t)
= RProp xs t
where
xs = spliceArgs "addSymSortRef 2" s p
spliceArgs msg s p = go (fst <$> s) (pargs p)
where
go [] [] = []
go [] ((s,x,_):as) = (x, s):go [] as
go (x:xs) ((s,_,_):as) = (x,s):go xs as
go xs [] = panic Nothing $ "spliceArgs: " ++ msg ++ "on XS=" ++ show xs
|
ssaavedra/liquidhaskell
|
src/Language/Haskell/Liquid/Bare/SymSort.hs
|
bsd-3-clause
| 2,536
| 0
| 17
| 653
| 995
| 521
| 474
| 53
| 4
|
module Data.Monoid.Difference (
-- ** Difference monoid
Endo(..),
improve, unimprove,
) where
import Data.Monoid
-- | Converting a normal monoid to a difference monoid.
improve :: Monoid a => a -> Endo a
improve a = Endo (mappend a)
{-# INLINE improve #-}
-- | Converting a difference monoid back to a normal monoid.
unimprove :: Monoid a => Endo a -> a
unimprove (Endo f) = f mempty
{-# INLINE unimprove #-}
|
arkeet/difference
|
src/Data/Monoid/Difference.hs
|
bsd-3-clause
| 422
| 0
| 7
| 86
| 105
| 58
| 47
| 10
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Audio.TagLib.Internal where
import Control.Monad.State
import Control.Applicative
import Data.Typeable (Typeable())
import Foreign.C.String (CString)
import Foreign.C.Types (CInt(..),CChar(..))
import Foreign.Ptr (Ptr)
import qualified Control.Exception as E
import qualified Data.Map as M
-- Types {{{
-- | Monad for performing TagLib operations
newtype TagLib a = TagLib { unTagLib :: StateT TagLibEnv IO a }
instance Functor TagLib where
fmap f (TagLib m) = TagLib $ fmap f m
instance Monad TagLib where
return = TagLib . return
(TagLib m) >>= f = TagLib $ m >>= unTagLib . f
instance Applicative TagLib where
pure = return
(<*>) = ap
-- | Internal representation of an open file
data TagLibFile = TagLibFile
{ filePtr :: Ptr File
, tagPtr :: Ptr Tag
, audioPropPtr :: Ptr AudioProperties
}
-- | A handle for an open file
newtype FileId = FileId Integer deriving (Eq,Ord)
-- | Abstract C Types
data File
data Tag
data AudioProperties
-- }}}
-- Env {{{
-- | A collection of open files, and a generator for unique file ID's
data TagLibEnv = TagLibEnv
{ taglibFilesOpen :: M.Map FileId TagLibFile
, taglibNextId :: Integer
}
-- | A fresh Env
initialEnv :: TagLibEnv
initialEnv = TagLibEnv M.empty 0
-- | Record modify for taglibFilesOpen
onFilesOpen :: (M.Map FileId TagLibFile -> M.Map FileId TagLibFile)
-> TagLibEnv -> TagLibEnv
onFilesOpen f e = e { taglibFilesOpen = f $ taglibFilesOpen e }
-- | Record modify for taglibNextId
onNextId :: (Integer -> Integer)
-> TagLibEnv -> TagLibEnv
onNextId f e = e { taglibNextId = f $ taglibNextId e }
-- }}}
-- Exceptions {{{
-- | Exceptions that might be thrown
data TagLibException
= NoSuchFileId
| InvalidFile FilePath
| UnableToOpen FilePath
| FileClosed
deriving (Show, Typeable)
instance E.Exception TagLibException
-- }}}
-- Monadic Operations {{{
-- | Put a new file into the Env
addNewFile :: FileId -> TagLibFile -> TagLib ()
addNewFile fid f = TagLib $ modify $ onFilesOpen $ M.insert fid f
-- | Get a fresh FileId, maintaining the internal generator
nextId :: TagLib FileId
nextId = do
i <- fromEnv taglibNextId
TagLib $ modify $ onNextId (+1)
return $ FileId i
-- | Get the list of currently opened files.
openFilePtrs :: TagLib [Ptr File]
openFilePtrs = fromEnv $ map filePtr . M.elems . taglibFilesOpen
-- | Call a function requiring the Env
fromEnv :: (TagLibEnv -> a) -> TagLib a
fromEnv f = TagLib $ gets f
-- | Call a function requiring a file.
-- Throws an exception should the FileId not point
-- to a currently open file.
fromFile :: (TagLibFile -> a) -> FileId -> TagLib a
fromFile acc fid = do
mf <- M.lookup fid <$> fromEnv taglibFilesOpen
case mf of
Just f -> return (acc f)
Nothing -> io $ E.throw NoSuchFileId
-- | Embed an IO action in the TagLib context.
io :: IO a -> TagLib a
io m = TagLib $ StateT $ \e -> (,) <$> m <*> pure e
-- }}}
-- File FFI {{{
foreign import ccall "taglib_file_new"
c_taglib_file_new :: CString -> IO (Ptr File)
foreign import ccall "taglib_file_free"
c_taglib_file_free :: Ptr File -> IO ()
foreign import ccall "taglib_file_save"
c_taglib_file_save :: Ptr File -> IO ()
foreign import ccall "taglib_file_is_valid"
c_taglib_file_is_valid :: Ptr File -> IO CInt
foreign import ccall "taglib_file_tag"
c_taglib_file_tag :: Ptr File -> IO (Ptr Tag)
foreign import ccall "taglib_file_audioproperties"
c_taglib_file_audioproperties :: Ptr File -> IO (Ptr AudioProperties)
foreign import ccall "taglib_tag_free_strings"
c_taglib_free_strings :: IO ()
-- }}}
-- Unmanaged Interface {{{
-- | Free all the strings that TagLib has allocated.
-- Use only when handling your own memory.
-- Otherwise, 'taglib' will take care of this for you.
freeTagLibStrings :: IO ()
freeTagLibStrings = c_taglib_free_strings
-- | Remove a file from the Env
removeFile :: FileId -> TagLib ()
removeFile fid = TagLib $ modify $ onFilesOpen $ M.delete fid
-- | Run a @TagLib@ action without managing allocated resources.
-- Reading tags from a file will work regardless of whether
-- 'cleanupFile' is used, but writing tags will not.
-- TagLib's strings must still be freed if a memory leak is to
-- be avoided.
runTagLib :: TagLibEnv -> TagLib a -> IO (a,TagLibEnv)
runTagLib env m = runStateT (unTagLib m) env
-- | Run an unmanaged @TagLib@ action, discarding the final Env.
evalTagLib :: TagLibEnv -> TagLib a -> IO a
evalTagLib env = fmap fst . runTagLib env
-- | Save and close a file, in case you want to manage your own memory.
-- TagLib's strings are still freed by 'taglib'.
closeFile :: FileId -> TagLib ()
closeFile fid = do
fptr <- fromFile filePtr fid
removeFile fid
io $ cleanupFile fptr
-- | The base IO action necessary to deallocate all resources
-- associated with a single file.
cleanupFile :: Ptr File -> IO ()
cleanupFile f = do
c_taglib_file_save f
c_taglib_file_free f
-- }}}
|
kylcarte/taglib-api
|
src/Audio/TagLib/Internal.hs
|
bsd-3-clause
| 4,990
| 0
| 12
| 991
| 1,209
| 646
| 563
| -1
| -1
|
module Network.HTTP.Types.AsHttp
( -- unqualified class re-export
I.QueryLike(I.toQuery)
, module Network.HTTP.Types.AsHttp
) where
-- generated by https://github.com/rvion/ride/tree/master/jetpack-gen
import qualified Network.HTTP.Types as I
-- http_hAccept :: HeaderName
http_hAccept = I.hAccept
-- http_hAcceptLanguage :: HeaderName
http_hAcceptLanguage = I.hAcceptLanguage
-- http_hAuthorization :: HeaderName
http_hAuthorization = I.hAuthorization
-- http_hCacheControl :: HeaderName
http_hCacheControl = I.hCacheControl
-- http_hConnection :: HeaderName
http_hConnection = I.hConnection
-- http_hContentEncoding :: HeaderName
http_hContentEncoding = I.hContentEncoding
-- http_hContentLength :: HeaderName
http_hContentLength = I.hContentLength
-- http_hContentMD5 :: HeaderName
http_hContentMD5 = I.hContentMD5
-- http_hContentType :: HeaderName
http_hContentType = I.hContentType
-- http_hCookie :: HeaderName
http_hCookie = I.hCookie
-- http_hDate :: HeaderName
http_hDate = I.hDate
-- http_hIfModifiedSince :: HeaderName
http_hIfModifiedSince = I.hIfModifiedSince
-- http_hIfRange :: HeaderName
http_hIfRange = I.hIfRange
-- http_hLastModified :: HeaderName
http_hLastModified = I.hLastModified
-- http_hLocation :: HeaderName
http_hLocation = I.hLocation
-- http_hRange :: HeaderName
http_hRange = I.hRange
-- http_hReferer :: HeaderName
http_hReferer = I.hReferer
-- http_hServer :: HeaderName
http_hServer = I.hServer
-- http_hUserAgent :: HeaderName
http_hUserAgent = I.hUserAgent
-- http_renderByteRange :: ByteRange -> ByteString
http_renderByteRange = I.renderByteRange
-- http_renderByteRangeBuilder :: ByteRange -> Builder
http_renderByteRangeBuilder = I.renderByteRangeBuilder
-- http_renderByteRanges :: ByteRanges -> ByteString
http_renderByteRanges = I.renderByteRanges
-- http_renderByteRangesBuilder :: ByteRanges -> Builder
http_renderByteRangesBuilder = I.renderByteRangesBuilder
-- http_methodConnect :: Method
http_methodConnect = I.methodConnect
-- http_methodDelete :: Method
http_methodDelete = I.methodDelete
-- http_methodGet :: Method
http_methodGet = I.methodGet
-- http_methodHead :: Method
http_methodHead = I.methodHead
-- http_methodOptions :: Method
http_methodOptions = I.methodOptions
-- http_methodPatch :: Method
http_methodPatch = I.methodPatch
-- http_methodPost :: Method
http_methodPost = I.methodPost
-- http_methodPut :: Method
http_methodPut = I.methodPut
-- http_methodTrace :: Method
http_methodTrace = I.methodTrace
-- http_parseMethod :: Method -> Either ByteString StdMethod
http_parseMethod = I.parseMethod
-- http_renderMethod :: Either ByteString StdMethod -> Method
http_renderMethod = I.renderMethod
-- http_renderStdMethod :: StdMethod -> Method
http_renderStdMethod = I.renderStdMethod
-- http_accepted202 :: Status
http_accepted202 = I.accepted202
-- http_badGateway502 :: Status
http_badGateway502 = I.badGateway502
-- http_badRequest400 :: Status
http_badRequest400 = I.badRequest400
-- http_conflict409 :: Status
http_conflict409 = I.conflict409
-- http_continue100 :: Status
http_continue100 = I.continue100
-- http_created201 :: Status
http_created201 = I.created201
-- http_expectationFailed417 :: Status
http_expectationFailed417 = I.expectationFailed417
-- http_forbidden403 :: Status
http_forbidden403 = I.forbidden403
-- http_found302 :: Status
http_found302 = I.found302
-- http_gatewayTimeout504 :: Status
http_gatewayTimeout504 = I.gatewayTimeout504
-- http_gone410 :: Status
http_gone410 = I.gone410
-- http_httpVersionNotSupported505 :: Status
http_httpVersionNotSupported505 = I.httpVersionNotSupported505
-- http_imATeaPot418 :: Status
http_imATeaPot418 = I.imATeaPot418
-- http_internalServerError500 :: Status
http_internalServerError500 = I.internalServerError500
-- http_lengthRequired411 :: Status
http_lengthRequired411 = I.lengthRequired411
-- http_methodNotAllowed405 :: Status
http_methodNotAllowed405 = I.methodNotAllowed405
-- http_mkStatus :: Int -> ByteString -> Status
http_mkStatus = I.mkStatus
-- http_movedPermanently301 :: Status
http_movedPermanently301 = I.movedPermanently301
-- http_multipleChoices300 :: Status
http_multipleChoices300 = I.multipleChoices300
-- http_noContent204 :: Status
http_noContent204 = I.noContent204
-- http_nonAuthoritative203 :: Status
http_nonAuthoritative203 = I.nonAuthoritative203
-- http_notAcceptable406 :: Status
http_notAcceptable406 = I.notAcceptable406
-- http_notFound404 :: Status
http_notFound404 = I.notFound404
-- http_notImplemented501 :: Status
http_notImplemented501 = I.notImplemented501
-- http_notModified304 :: Status
http_notModified304 = I.notModified304
-- http_ok200 :: Status
http_ok200 = I.ok200
-- http_partialContent206 :: Status
http_partialContent206 = I.partialContent206
-- http_paymentRequired402 :: Status
http_paymentRequired402 = I.paymentRequired402
-- http_preconditionFailed412 :: Status
http_preconditionFailed412 = I.preconditionFailed412
-- http_proxyAuthenticationRequired407 :: Status
http_proxyAuthenticationRequired407 = I.proxyAuthenticationRequired407
-- http_requestEntityTooLarge413 :: Status
http_requestEntityTooLarge413 = I.requestEntityTooLarge413
-- http_requestTimeout408 :: Status
http_requestTimeout408 = I.requestTimeout408
-- http_requestURITooLong414 :: Status
http_requestURITooLong414 = I.requestURITooLong414
-- http_requestedRangeNotSatisfiable416 :: Status
http_requestedRangeNotSatisfiable416 = I.requestedRangeNotSatisfiable416
-- http_resetContent205 :: Status
http_resetContent205 = I.resetContent205
-- http_seeOther303 :: Status
http_seeOther303 = I.seeOther303
-- http_serviceUnavailable503 :: Status
http_serviceUnavailable503 = I.serviceUnavailable503
-- http_status100 :: Status
http_status100 = I.status100
-- http_status101 :: Status
http_status101 = I.status101
-- http_status200 :: Status
http_status200 = I.status200
-- http_status201 :: Status
http_status201 = I.status201
-- http_status202 :: Status
http_status202 = I.status202
-- http_status203 :: Status
http_status203 = I.status203
-- http_status204 :: Status
http_status204 = I.status204
-- http_status205 :: Status
http_status205 = I.status205
-- http_status206 :: Status
http_status206 = I.status206
-- http_status300 :: Status
http_status300 = I.status300
-- http_status301 :: Status
http_status301 = I.status301
-- http_status302 :: Status
http_status302 = I.status302
-- http_status303 :: Status
http_status303 = I.status303
-- http_status304 :: Status
http_status304 = I.status304
-- http_status305 :: Status
http_status305 = I.status305
-- http_status307 :: Status
http_status307 = I.status307
-- http_status400 :: Status
http_status400 = I.status400
-- http_status401 :: Status
http_status401 = I.status401
-- http_status402 :: Status
http_status402 = I.status402
-- http_status403 :: Status
http_status403 = I.status403
-- http_status404 :: Status
http_status404 = I.status404
-- http_status405 :: Status
http_status405 = I.status405
-- http_status406 :: Status
http_status406 = I.status406
-- http_status407 :: Status
http_status407 = I.status407
-- http_status408 :: Status
http_status408 = I.status408
-- http_status409 :: Status
http_status409 = I.status409
-- http_status410 :: Status
http_status410 = I.status410
-- http_status411 :: Status
http_status411 = I.status411
-- http_status412 :: Status
http_status412 = I.status412
-- http_status413 :: Status
http_status413 = I.status413
-- http_status414 :: Status
http_status414 = I.status414
-- http_status415 :: Status
http_status415 = I.status415
-- http_status416 :: Status
http_status416 = I.status416
-- http_status417 :: Status
http_status417 = I.status417
-- http_status418 :: Status
http_status418 = I.status418
-- http_status500 :: Status
http_status500 = I.status500
-- http_status501 :: Status
http_status501 = I.status501
-- http_status502 :: Status
http_status502 = I.status502
-- http_status503 :: Status
http_status503 = I.status503
-- http_status504 :: Status
http_status504 = I.status504
-- http_status505 :: Status
http_status505 = I.status505
-- http_statusIsClientError :: Status -> Bool
http_statusIsClientError = I.statusIsClientError
-- http_statusIsInformational :: Status -> Bool
http_statusIsInformational = I.statusIsInformational
-- http_statusIsRedirection :: Status -> Bool
http_statusIsRedirection = I.statusIsRedirection
-- http_statusIsServerError :: Status -> Bool
http_statusIsServerError = I.statusIsServerError
-- http_statusIsSuccessful :: Status -> Bool
http_statusIsSuccessful = I.statusIsSuccessful
-- http_switchingProtocols101 :: Status
http_switchingProtocols101 = I.switchingProtocols101
-- http_temporaryRedirect307 :: Status
http_temporaryRedirect307 = I.temporaryRedirect307
-- http_unauthorized401 :: Status
http_unauthorized401 = I.unauthorized401
-- http_unsupportedMediaType415 :: Status
http_unsupportedMediaType415 = I.unsupportedMediaType415
-- http_useProxy305 :: Status
http_useProxy305 = I.useProxy305
-- http_decodePath :: ByteString -> ([Text], Query)
http_decodePath = I.decodePath
-- http_decodePathSegments :: ByteString -> [Text]
http_decodePathSegments = I.decodePathSegments
-- http_encodePath :: [Text] -> Query -> Builder
http_encodePath = I.encodePath
-- http_encodePathSegments :: [Text] -> Builder
http_encodePathSegments = I.encodePathSegments
-- http_encodePathSegmentsRelative :: [Text] -> Builder
http_encodePathSegmentsRelative = I.encodePathSegmentsRelative
-- http_extractPath :: ByteString -> ByteString
http_extractPath = I.extractPath
-- http_parseQuery :: ByteString -> Query
http_parseQuery = I.parseQuery
-- http_parseQueryText :: ByteString -> QueryText
http_parseQueryText = I.parseQueryText
-- http_parseSimpleQuery :: ByteString -> SimpleQuery
http_parseSimpleQuery = I.parseSimpleQuery
-- http_queryTextToQuery :: QueryText -> Query
http_queryTextToQuery = I.queryTextToQuery
-- http_queryToQueryText :: Query -> QueryText
http_queryToQueryText = I.queryToQueryText
-- http_renderQuery :: Bool -> Query -> ByteString
http_renderQuery = I.renderQuery
-- http_renderQueryBuilder :: Bool -> Query -> Builder
http_renderQueryBuilder = I.renderQueryBuilder
-- http_renderQueryText :: Bool -> QueryText -> Builder
http_renderQueryText = I.renderQueryText
-- http_renderSimpleQuery :: Bool -> SimpleQuery -> ByteString
http_renderSimpleQuery = I.renderSimpleQuery
-- http_simpleQueryToQuery :: SimpleQuery -> Query
http_simpleQueryToQuery = I.simpleQueryToQuery
-- http_urlDecode :: Bool -> ByteString -> ByteString
http_urlDecode = I.urlDecode
-- http_urlEncode :: Bool -> ByteString -> ByteString
http_urlEncode = I.urlEncode
-- http_urlEncodeBuilder :: Bool -> ByteString -> Builder
http_urlEncodeBuilder = I.urlEncodeBuilder
-- http_http09 :: HttpVersion
http_http09 = I.http09
-- http_http10 :: HttpVersion
http_http10 = I.http10
-- http_http11 :: HttpVersion
http_http11 = I.http11
type HttpByteRange = I.ByteRange
-- constructor :: Integer -> ByteRangeFrom
http_mk'ByteRangeFrom = I.ByteRangeFrom
pattern HttpByteRangeFrom a <- I.ByteRangeFrom a
-- constructor :: Integer -> Integer -> ByteRangeFromTo
http_mk'ByteRangeFromTo = I.ByteRangeFromTo
pattern HttpByteRangeFromTo a b <- I.ByteRangeFromTo a b
-- constructor :: Integer -> ByteRangeSuffix
http_mk'ByteRangeSuffix = I.ByteRangeSuffix
pattern HttpByteRangeSuffix a <- I.ByteRangeSuffix a
type HttpByteRanges = I.ByteRanges
type HttpHeader = I.Header
type HttpHeaderName = I.HeaderName
type HttpRequestHeaders = I.RequestHeaders
type HttpResponseHeaders = I.ResponseHeaders
type HttpMethod = I.Method
type HttpStdMethod = I.StdMethod
-- constructor :: GET
http_mk'GET = I.GET
pattern HttpGET <- I.GET
-- constructor :: POST
http_mk'POST = I.POST
pattern HttpPOST <- I.POST
-- constructor :: HEAD
http_mk'HEAD = I.HEAD
pattern HttpHEAD <- I.HEAD
-- constructor :: PUT
http_mk'PUT = I.PUT
pattern HttpPUT <- I.PUT
-- constructor :: DELETE
http_mk'DELETE = I.DELETE
pattern HttpDELETE <- I.DELETE
-- constructor :: TRACE
http_mk'TRACE = I.TRACE
pattern HttpTRACE <- I.TRACE
-- constructor :: CONNECT
http_mk'CONNECT = I.CONNECT
pattern HttpCONNECT <- I.CONNECT
-- constructor :: OPTIONS
http_mk'OPTIONS = I.OPTIONS
pattern HttpOPTIONS <- I.OPTIONS
-- constructor :: PATCH
http_mk'PATCH = I.PATCH
pattern HttpPATCH <- I.PATCH
type HttpStatus = I.Status
get_http_statusCode o = I.statusCode o
set_http_statusCode x o = o { I.statusCode = x}
get_http_statusMessage o = I.statusMessage o
set_http_statusMessage x o = o { I.statusMessage = x}
-- constructor :: Int -> ByteString -> Status
http_mk'Status = I.Status
pattern HttpStatus a b <- I.Status a b
type HttpQuery = I.Query
type HttpQueryItem = I.QueryItem
type HttpQueryText = I.QueryText
type HttpSimpleQuery = I.SimpleQuery
type HttpSimpleQueryItem = I.SimpleQueryItem
type HttpHttpVersion = I.HttpVersion
get_http_httpMajor o = I.httpMajor o
set_http_httpMajor x o = o { I.httpMajor = x}
get_http_httpMinor o = I.httpMinor o
set_http_httpMinor x o = o { I.httpMinor = x}
-- constructor :: Int -> Int -> HttpVersion
http_mk'HttpVersion = I.HttpVersion
pattern HttpHttpVersion a b <- I.HttpVersion a b
|
rvion/ride
|
jetpack/src/Network/HTTP/Types/AsHttp.hs
|
bsd-3-clause
| 13,267
| 1
| 7
| 1,662
| 1,745
| 1,053
| 692
| -1
| -1
|
module Air.Lab1 where
import Air.Data.Default
import Air.Env hiding (mod, read, length, drop, at)
import Air.Extra
import Air.Extra (now)
import Control.Concurrent
import Control.Concurrent.STM
import Data.ByteString.Lazy.Char8 (ByteString, pack, unpack)
import Data.IORef
import Data.List ( genericDrop, genericLength )
import Data.Maybe
import Data.StateVar
import Data.Time.Clock.POSIX
import Prelude ()
import System.Exit
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import System.IO (hGetContents)
import System.Process
import Text.JSON.Generic
import Text.JSON.String
import qualified Control.Monad as Monad
import qualified Data.StateVar as StateVar
import qualified Prelude as P
atom :: STM a -> IO a
atom = atomically
want :: (Monad m, Default b) => (Maybe a) -> (a -> m b) -> m b
want = flip - maybe (return def)
wantM :: (Monad m, Default b) => m (Maybe a) -> (a -> m b) -> m b
wantM x f = do
x' <- x
x'.want - f
append_maybe :: a -> Maybe [a] -> Maybe [a]
append_maybe x Nothing = Just [x]
append_maybe x (Just xs) = Just - x:xs
maybe_first :: [a] -> Maybe a
maybe_first xs = case xs of
[] -> Nothing
(y:_) -> Just y
update_list :: (a -> Bool) -> (a -> a) -> [a] -> [a]
update_list p f xs = do
x <- xs
let y = if p x then f x else x
return y
decode_json :: (Data a) => String -> Either String a
decode_json s =
case runGetJSON readJSValue s of
Left msg -> Left msg
Right j ->
case fromJSON j of
Error msg -> Left msg
Ok x -> Right x
mapT :: (HasGetter g, HasSetter g) => (a -> a) -> [g a] -> IO [g a]
mapT f xs = xs.mapM (\o -> do
o $~ f
return o
)
filterT :: (HasGetter g) => (a -> Bool) -> [g a] -> IO [g a]
filterT f xs = xs.mapM (\o -> do
object <- get o
if f object
then return - Just o
else return - Nothing
)
.fmap catMaybes
findT :: (HasGetter g) => (a -> Bool) -> [g a] -> IO (Maybe (g a))
findT f xs = filterT f xs ^ listToMaybe
find_or_fail_with_message_T :: (HasGetter g) => String -> (a -> Bool) -> [g a] -> IO (g a)
find_or_fail_with_message_T msg f xs = do
r <- findT f xs
case r of
Nothing -> error msg
Just _r -> return _r
now_in_micro_seconds :: IO Integer
now_in_micro_seconds =
now ^ (utcTimeToPOSIXSeconds > (* 1000000) > floor)
now_in_milli_seconds :: IO Integer
now_in_milli_seconds =
now ^ (utcTimeToPOSIXSeconds > (* 1000) > floor)
run_within :: (RealFrac a) => a -> String -> [String] -> IO (Maybe ExitCode)
run_within secs cmd args = do
(_, hOut, hError, pid) <- runInteractiveProcess cmd args Nothing Nothing
bomb_time <- now ^ t2f ^ (P.+ secs)
let { wait_loop = do
exit_code <- getProcessExitCode pid
case exit_code of
Just code -> do
hGetContents hOut >>= putStrLn
hGetContents hError >>= putStrLn
return - Just code
Nothing -> do
time_stamp <- now ^ t2f
if time_stamp >= bomb_time
then do
terminateProcess pid
return - Nothing
else do
-- putStrLn "sleeping ..."
sleep 0.1
wait_loop
}
wait_loop
as_string :: (String -> String) -> ByteString -> ByteString
as_string f x = x.unpack.f.pack
|
nfjinjing/source-code-server
|
src/Air/Lab1.hs
|
bsd-3-clause
| 3,276
| 1
| 21
| 869
| 1,334
| 691
| 643
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Web.Telegram.API.Bot.API.Core
( -- * Types
Token (..)
, TelegramToken
, TelegramClient
, run
, run_
, runM
, asking
, runClient
, runClient'
, telegramBaseUrl
) where
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Data.Text (Text)
import Network.HTTP.Client (Manager)
import Servant.API
import Servant.Client
-- | Telegram Bot's Token
newtype Token = Token Text
deriving (Show, Eq, Ord, ToHttpApiData, FromHttpApiData)
-- | Type for token
type TelegramToken = Capture ":token" Token
type TelegramClient a = ReaderT Token ClientM a
telegramBaseUrl :: BaseUrl
telegramBaseUrl = BaseUrl Https "api.telegram.org" 443 ""
-- | Allows to run 'TelegramClient' against arbitrary url
runClient' :: TelegramClient a -> Token -> ClientEnv -> IO (Either ServantError a)
runClient' tcm token = runClientM (runReaderT tcm token)
-- | Runs 'TelegramClient'
runClient :: TelegramClient a -> Token -> Manager -> IO (Either ServantError a)
runClient tcm token manager = runClient' tcm token (ClientEnv manager telegramBaseUrl)
asking :: Monad m => (t -> m b) -> ReaderT t m b
asking op = ask >>= \t -> lift $ op t
run :: BaseUrl -> (Token -> a -> ClientM b) -> Token -> a -> Manager -> IO (Either ServantError b)
run b e t r m = runClientM (e t r) (ClientEnv m b)
run_ :: Monad m => (a -> b -> m c) -> b -> ReaderT a m c
run_ act request = asking $ flip act request
runM :: (r -> TelegramClient a) -> Token -> r -> Manager -> IO (Either ServantError a)
runM tcm token request = runClient (tcm request) token
|
cblp/haskell-telegram-api
|
src/Web/Telegram/API/Bot/API/Core.hs
|
bsd-3-clause
| 1,837
| 0
| 12
| 451
| 554
| 296
| 258
| 40
| 1
|
{-# OPTIONS -F -pgmF lisk #-}
(module :fibs
(import :system.environment)
(:: main (:i-o ()))
(= main (>>= get-args (. print fib read head)))
(:: test (-> :string (, :int :string)))
(= test (, 1))
(:: fib (-> :int :int))
(= fib 0 0)
(= fib 1 1)
(= fib n (+ (fib (- n 1))
(fib (- n 2)))))
|
aculich/lisk
|
examples/Foo.hs
|
bsd-3-clause
| 321
| 22
| 12
| 93
| 197
| 111
| 86
| -1
| -1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.TextureFloat
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All tokens from the ARB_texture_float extension, see
-- <http://www.opengl.org/registry/specs/ARB/texture_float.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.TextureFloat (
-- * Tokens
gl_TEXTURE_RED_TYPE,
gl_TEXTURE_GREEN_TYPE,
gl_TEXTURE_BLUE_TYPE,
gl_TEXTURE_ALPHA_TYPE,
gl_TEXTURE_LUMINANCE_TYPE,
gl_TEXTURE_INTENSITY_TYPE,
gl_TEXTURE_DEPTH_TYPE,
gl_UNSIGNED_NORMALIZED,
gl_RGBA32F,
gl_RGB32F,
gl_ALPHA32F,
gl_INTENSITY32F,
gl_LUMINANCE32F,
gl_LUMINANCE_ALPHA32F,
gl_RGBA16F,
gl_RGB16F,
gl_ALPHA16F,
gl_INTENSITY16F,
gl_LUMINANCE16F,
gl_LUMINANCE_ALPHA16F
) where
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility
import Graphics.Rendering.OpenGL.Raw.Core32
gl_ALPHA32F :: GLenum
gl_ALPHA32F = 0x8816
gl_INTENSITY32F :: GLenum
gl_INTENSITY32F = 0x8817
gl_LUMINANCE32F :: GLenum
gl_LUMINANCE32F = 0x8818
gl_LUMINANCE_ALPHA32F :: GLenum
gl_LUMINANCE_ALPHA32F = 0x8819
gl_ALPHA16F :: GLenum
gl_ALPHA16F = 0x881C
gl_INTENSITY16F :: GLenum
gl_INTENSITY16F = 0x881D
gl_LUMINANCE16F :: GLenum
gl_LUMINANCE16F = 0x881E
gl_LUMINANCE_ALPHA16F :: GLenum
gl_LUMINANCE_ALPHA16F = 0x881F
|
mfpi/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/ARB/TextureFloat.hs
|
bsd-3-clause
| 1,565
| 0
| 4
| 219
| 184
| 125
| 59
| 39
| 1
|
module TicTacToeTest where
import TicTacToe
import Test.Tasty
import Test.Tasty.HUnit
testUndoOnlyOneMove :: IO ()
testUndoOnlyOneMove =
let f = NoMoves
partWay = (f >>== midCen)
in case partWay of
Unfinished g ->
let undone = undoMove g
expectedBefore = XMove (X ((1,1),ONone))
expectedAfter = NoMoves
in (assertEqual "Undoing the move" expectedAfter undone >>
assertEqual "Before undo" expectedBefore g)
_ -> assertFailure "Game should be Unfinished"
testUndo :: IO ()
testUndo =
let f = NoMoves
partWay = (f >>== midCen >>== topRight)
in case partWay of
Unfinished g ->
let undone = undoMove g
expectedUndo = Unfinished $ XMove (X ((1,1),ONone))
expectedBefore = OMove (O ((0,2),X ((1,1),ONone)))
in (assertEqual "Undoing the move" expectedUndo undone >>
assertEqual "Before undo" expectedBefore g)
_ -> assertFailure "Game should be Unfinished"
testWinning :: Assertion
testWinning =
let f = NoMoves
game = stringifyGame (f >>== midCen >>== topLeft >>== bottomCen >>== topCen >>== bottomRight >>== topRight)
expectedString = ["Player: O Won!",
"OOO",
" X ",
" XX"]
in (assertEqual "Player 0 Winning" expectedString game)
testDidWin :: Game -> Bool
testDidWin (Unfinished a) = didWin a
testDidWin (Finished a) = didWin a
testDidWin _ = False
testWinConditions :: Assertion
testWinConditions =
let f = NoMoves
topRowWin = testDidWin $ f >>== topLeft >>== bottomCen >>== topCen >>== bottomRight >>== topRight
midRowWin = testDidWin $ f >>== midLeft >>== bottomCen >>== midCen >>== bottomRight >>== midRight
bottomRowWin = testDidWin $ f >>== bottomLeft >>== topCen >>== bottomCen >>== midRight >>== bottomRight
rightWin = testDidWin $ f >>== bottomRight >>== topCen >>== midRight >>== bottomLeft >>== topRight
leftWin = testDidWin $ f >>== bottomLeft >>== topCen >>== midLeft >>== bottomRight >>== topLeft
cenWin = testDidWin $ f >>== bottomCen >>== topRight >>== midCen >>== bottomRight >>== topCen
noWin = testDidWin $ f >>== bottomCen >>== topRight >>== midLeft >>== bottomRight >>== topCen
diagLeft = testDidWin $ f >>== bottomLeft >>== topCen >>== midCen >>== bottomRight >>== topRight
diagRight = testDidWin $ f >>== bottomRight >>== topRight >>== midCen >>== bottomRight >>== topLeft
in (assertEqual "Top row win" True topRowWin)
>> (assertEqual "Mid row win" True midRowWin)
>> (assertEqual "Bottom row win" True bottomRowWin)
>> (assertEqual "Right win" True rightWin)
>> (assertEqual "left win" True leftWin)
>> (assertEqual "cen win" True cenWin)
>> (assertEqual "no win" False noWin)
>> (assertEqual "diag right" True diagRight)
>> (assertEqual "diag left" True diagLeft)
testUnfinished :: Assertion
testUnfinished =
let f = NoMoves
game = stringifyGame (f >>== midCen >>== topLeft >>== bottomCen >>== topCen >>== bottomRight)
expectedString = ["OO ",
" X ",
" XX"]
in (assertEqual "Game unfinished" expectedString game)
tests2 :: TestTree
tests2 = testGroup "TicTacToeTests"
[
testCase "undo game one move" testUndoOnlyOneMove,
testCase "undo game" testUndo,
testCase "game unfinished" testUnfinished,
testCase "winning game" testWinning,
testCase "Win conditions" testWinConditions
]
runner = defaultMain tests2
|
steveshogren/haskell-katas
|
test/TicTacToeTest.hs
|
bsd-3-clause
| 3,558
| 0
| 20
| 895
| 991
| 515
| 476
| 79
| 2
|
{-|
Module : Data.Array.BitArray.IO
Copyright : (c) Claude Heiland-Allen 2012
License : BSD3
Maintainer : claude@mathr.co.uk
Stability : unstable
Portability : portable
Unboxed mutable bit arrays in the 'IO' monad.
-}
module Data.Array.BitArray.IO
( IOBitArray()
-- * MArray-like interface.
, getBounds
, newArray
, newArray_
, newListArray
, readArray
, writeArray
, mapArray
, mapIndices
, getElems
, getAssocs
-- * Conversion to/from immutable bit arrays.
, freeze
, thaw
-- * Construction
, copy
, fill
-- * Short-circuiting reductions.
, or
, and
, isUniform
-- * Aggregate operations.
, fold
, map
, zipWith
-- * Unsafe.
, unsafeReadArray
, unsafeGetElems
, unsafeFreeze
, unsafeThaw
) where
import Prelude hiding (and, or, map, zipWith)
import Control.Monad (forM_, when)
import Data.Bits (shiftR, testBit, setBit, clearBit, (.&.), complement)
import Data.Ix (Ix, index, inRange, range, rangeSize)
import Data.List (foldl1')
import Data.Word (Word8, Word64)
import Foreign.ForeignPtr (withForeignPtr, touchForeignPtr)
import Foreign.Ptr (Ptr, plusPtr, castPtr)
import Foreign.Storable (poke, pokeByteOff, pokeElemOff, peekByteOff, peekElemOff)
import System.IO.Unsafe (unsafeInterleaveIO)
import Data.Bits.Bitwise (packWord8LE, mask)
import qualified Data.Bits.Bitwise as Bitwise
import Data.Array.BitArray.Internal
( IOBitArray(..)
, getBounds
, newArray_
, freeze
, unsafeFreeze
, thaw
, unsafeThaw
, copy
)
-- | Create a new array filled with an initial value.
{-# INLINE newArray #-}
newArray :: Ix i => (i, i) {- ^ bounds -} -> Bool {- ^ initial value -} -> IO (IOBitArray i)
newArray bs b = do
a <- newArray_ bs
fill a b
return a
-- | Create a new array filled with values from a list.
{-# INLINE newListArray #-}
newListArray :: Ix i => (i, i) {- ^ bounds -} -> [Bool] {- ^ elems -} -> IO (IOBitArray i)
newListArray bs es = do
a <- newArray_ bs
let byteBits = 8
writeBytes :: Ptr Word8 -> [Bool] -> IO ()
writeBytes p (b0:b1:b2:b3:b4:b5:b6:b7:rest) = do
poke p (packWord8LE b0 b1 b2 b3 b4 b5 b6 b7)
writeBytes (plusPtr p 1) rest
writeBytes _ [] = return ()
writeBytes p rest = writeBytes p (take byteBits (rest ++ repeat False))
withForeignPtr (iobData a) $ \p -> do
writeBytes (castPtr p) (take (byteBits * iobBytes a) es)
return a
-- | Read from an array at an index.
{-# INLINE readArray #-}
readArray :: Ix i => IOBitArray i -> i -> IO Bool
readArray a i = do
bs <- getBounds a
when (not (inRange bs i)) $ error "array index out of bounds"
readArrayRaw a (index bs i)
-- | Read from an array at an index without bounds checking. Unsafe.
{-# INLINE unsafeReadArray #-}
unsafeReadArray :: Ix i => IOBitArray i -> i -> IO Bool
unsafeReadArray a i = do
bs <- getBounds a
readArrayRaw a (index bs i)
{-# INLINE readArrayRaw #-}
readArrayRaw :: Ix i => IOBitArray i -> Int -> IO Bool
readArrayRaw a n = do
let byte = n `shiftR` 3
bit = n .&. 7
withForeignPtr (iobData a) $ \p -> do
b0 <- peekByteOff p byte
return (testBit (b0 :: Word8) bit)
-- | Write to an array at an index.
{-# INLINE writeArray #-}
writeArray :: Ix i => IOBitArray i -> i -> Bool -> IO ()
writeArray a i b = do
bs <- getBounds a
when (not (inRange bs i)) $ error "array index out of bounds"
let n = index bs i
byte = n `shiftR` 3
bit = n .&. 7
withForeignPtr (iobData a) $ \p -> do
b0 <- peekByteOff p byte
let b1 = (if b then setBit else clearBit) (b0 :: Word8) bit
pokeByteOff p byte b1
-- | Alias for 'map'.
{-# INLINE mapArray #-}
mapArray :: Ix i => (Bool -> Bool) -> IOBitArray i -> IO (IOBitArray i)
mapArray = map
-- unsafeInterleaveIO is used to avoid having to create the whole list in
-- memory before the function can return, but need to keep the ForeignPtr
-- alive to avoid GC stealing our data.
interleavedMapMThenTouch :: Ix i => IOBitArray i -> (a -> IO b) -> [a] -> IO [b]
interleavedMapMThenTouch a _ [] = touchForeignPtr (iobData a) >> return []
interleavedMapMThenTouch a f (x:xs) = unsafeInterleaveIO $ do
y <- f x
ys <- interleavedMapMThenTouch a f xs
return (y:ys)
-- | Create a new array by reading from another.
{-# INLINE mapIndices #-}
mapIndices :: (Ix i, Ix j) => (i, i) {- ^ new bounds -} -> (i -> j) {- ^ index transformation -} -> IOBitArray j {- ^ source array -} -> IO (IOBitArray i)
mapIndices bs h a = newListArray bs =<< interleavedMapMThenTouch a (readArray a . h) (range bs)
-- | Get a list of all elements of an array.
{-# INLINE getElems #-}
getElems :: Ix i => IOBitArray i -> IO [Bool]
getElems a = unsafeGetElems =<< copy a
-- | Get a list of all elements of an array. Unsafe when the source
-- array can be modified later.
{-# INLINE unsafeGetElems #-}
unsafeGetElems :: Ix i => IOBitArray i -> IO [Bool]
unsafeGetElems a' = do
bs <- getBounds a'
let r = rangeSize bs
count = (r + 7) `shiftR` 3
p <- withForeignPtr (iobData a') $ return
bytes <- interleavedMapMThenTouch a' (peekByteOff p) [0 .. count - 1]
return . take r . concatMap Bitwise.toListLE $ (bytes :: [Word8])
-- | Get a list of all (index, element) pairs.
{-# INLINE getAssocs #-}
getAssocs :: Ix i => IOBitArray i -> IO [(i, Bool)]
getAssocs a = do
bs <- getBounds a
zip (range bs) `fmap` getElems a
-- | Fill an array with a uniform value.
{-# INLINE fill #-}
fill :: Ix i => IOBitArray i -> Bool -> IO ()
fill a b = do
let count = iobBytes a `shiftR` 3
word :: Word64
word = if b then complement 0 else 0
withForeignPtr (iobData a) $ \p ->
forM_ [0 .. count - 1] $ \i ->
pokeElemOff p i word
-- | Short-circuit bitwise reduction: True when any bit is True.
{-# INLINE or #-}
or :: Ix i => IOBitArray i -> IO Bool
or a = do
bs <- getBounds a
let total = rangeSize bs
full = total .&. complement (mask 6)
count = full `shiftR` 6
loop :: Ptr Word64 -> Int -> IO Bool
loop p n
| n < count = do
w <- peekElemOff p n
if w /= (0 :: Word64) then return True else loop p (n + 1)
| otherwise = rest full
rest m
| m < total = do
b <- readArrayRaw a m
if b then return True else rest (m + 1)
| otherwise = return False
withForeignPtr (iobData a) $ \p -> loop p 0
-- | Short-circuit bitwise reduction: False when any bit is False.
{-# INLINE and #-}
and :: Ix i => IOBitArray i -> IO Bool
and a = do
bs <- getBounds a
let total = rangeSize bs
full = total .&. complement (mask 6)
count = full `shiftR` 6
loop :: Ptr Word64 -> Int -> IO Bool
loop p n
| n < count = do
w <- peekElemOff p n
if w /= (complement 0 :: Word64) then return False else loop p (n + 1)
| otherwise = rest full
rest m
| m < total = do
b <- readArrayRaw a m
if not b then return False else rest (m + 1)
| otherwise = return True
withForeignPtr (iobData a) $ \p -> loop p 0
-- | Short-circuit bitwise reduction: 'Nothing' when any bits differ,
-- 'Just' when all bits are the same.
{-# INLINE isUniform #-}
isUniform :: Ix i => IOBitArray i -> IO (Maybe Bool)
isUniform a = do
bs <- getBounds a
let total = rangeSize bs
full = total .&. complement (mask 6)
count = full `shiftR` 6
loop :: Ptr Word64 -> Int -> Bool -> Bool -> IO (Maybe Bool)
loop p n st sf
| n < count = do
w <- peekElemOff p n
let t = w /= (0 :: Word64) || st
f = w /= (complement 0) || sf
if t && f then return Nothing else loop p (n + 1) t f
| otherwise = rest full st sf
rest m st sf
| m < total = do
b <- readArrayRaw a m
let t = b || st
f = not b || sf
if t && f then return Nothing else rest (m + 1) t f
| st && not sf = return (Just True)
| not st && sf = return (Just False)
| otherwise = return Nothing
withForeignPtr (iobData a) $ \p -> loop p 0 False False
-- | Bitwise reduction with an associative commutative boolean operator.
-- Implementation lifts from 'Bool' to 'Bits' and folds large chunks
-- at a time. Each bit is used as a source exactly once.
{-# INLINE fold #-}
fold :: Ix i => (Bool -> Bool -> Bool) {- ^ operator -} -> IOBitArray i -> IO (Maybe Bool)
fold f a = do
bs <- getBounds a
let g = Bitwise.zipWith f
total = rangeSize bs
full = total .&. complement (mask 6)
count = full `shiftR` 6
loop :: Ptr Word64 -> Int -> Maybe Word64 -> IO (Maybe Bool)
loop p n mw
| n < count = do
w <- peekElemOff p n
case mw of
Nothing -> loop p (n + 1) (Just $! w)
Just w0 -> loop p (n + 1) (Just $! g w0 w)
| otherwise =
case mw of
Nothing -> rest full Nothing
Just w0 -> rest full (Just $! foldl1' f (Bitwise.toListLE w0))
rest m mb
| m < total = do
b <- readArrayRaw a m
case mb of
Nothing -> rest (m + 1) (Just $! b)
Just b0 -> rest (m + 1) (Just $! f b0 b)
| otherwise = return mb
withForeignPtr (iobData a) $ \p -> loop p 0 Nothing
-- | Bitwise map. Implementation lifts from 'Bool' to 'Bits' and maps
-- large chunks at a time.
{-# INLINE map #-}
map :: Ix i => (Bool -> Bool) -> IOBitArray i -> IO (IOBitArray i)
map f a = do
bs <- getBounds a
b <- newArray_ bs
mapTo b f a
return b
{-# INLINE mapTo #-}
mapTo :: Ix i => IOBitArray i -> (Bool -> Bool) -> IOBitArray i -> IO ()
mapTo dst f src = do
-- {
sbs <- getBounds src
dbs <- getBounds dst
when (sbs /= dbs) $ error "mapTo mismatched bounds"
-- }
let count = iobBytes dst `shiftR` 3
g :: Word64 -> Word64
g = Bitwise.map f
withForeignPtr (iobData src) $ \sp ->
withForeignPtr (iobData dst) $ \dp ->
forM_ [0 .. count - 1] $ \n -> do
pokeElemOff dp n . g =<< peekElemOff sp n
-- | Bitwise zipWith. Implementation lifts from 'Bool' to 'Bits' and
-- combines large chunks at a time.
--
-- The bounds of the source arrays must be identical.
{-# INLINE zipWith #-}
zipWith :: Ix i => (Bool -> Bool -> Bool) -> IOBitArray i -> IOBitArray i -> IO (IOBitArray i)
zipWith f l r = do
lbs <- getBounds l
rbs <- getBounds r
when (lbs /= rbs) $ error "zipWith mismatched bounds"
c <- newArray_ lbs
zipWithTo c f l r
return c
{-# INLINE zipWithTo #-}
zipWithTo :: Ix i => IOBitArray i -> (Bool -> Bool -> Bool) -> IOBitArray i -> IOBitArray i -> IO ()
zipWithTo dst f l r = do
lbs <- getBounds l
rbs <- getBounds r
dbs <- getBounds dst
when (lbs /= rbs || dbs /= lbs || dbs /= rbs) $ error "zipWithTo mismatched bounds"
let count = iobBytes dst `shiftR` 3
g :: Word64 -> Word64 -> Word64
g = Bitwise.zipWith f
withForeignPtr (iobData l) $ \lp ->
withForeignPtr (iobData r) $ \rp ->
withForeignPtr (iobData dst) $ \dp ->
forM_ [0 .. count - 1] $ \n -> do
p <- peekElemOff lp n
q <- peekElemOff rp n
pokeElemOff dp n (g p q)
|
ekmett/bitwise
|
src/Data/Array/BitArray/IO.hs
|
bsd-3-clause
| 11,227
| 0
| 20
| 3,156
| 4,010
| 1,997
| 2,013
| 274
| 4
|
module Main where
import Greet (putHello)
main :: IO ()
main = putHello "Fragnix!"
|
phischu/fragnix
|
tests/quick/HelloFragnix/Main.hs
|
bsd-3-clause
| 85
| 0
| 6
| 16
| 30
| 17
| 13
| 4
| 1
|
module Homework1.Hanoi
( hanoi
) where
type Peg = String
type Move = (Peg, Peg)
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi 0 _ _ _ = []
hanoi n a b c = hanoi ( n - 1 ) a c b ++ [(a,b)] ++ hanoi ( n - 1 ) c b a
|
AlexaDeWit/haskell-exercises
|
src/Homework1/Hanoi.hs
|
bsd-3-clause
| 231
| 0
| 9
| 70
| 132
| 73
| 59
| 7
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
-- | The general Stack configuration that starts everything off. This should
-- be smart to falback if there is no stack.yaml, instead relying on
-- whatever files are available.
--
-- If there is no stack.yaml, and there is a cabal.config, we
-- read in those constraints, and if there's a cabal.sandbox.config,
-- we read any constraints from there and also find the package
-- database from there, etc. And if there's nothing, we should
-- probably default to behaving like cabal, possibly with spitting out
-- a warning that "you should run `stk init` to make things better".
module Stack.Config
(MiniConfig
,loadConfig
,loadConfigMaybeProject
,loadMiniConfig
,loadConfigYaml
,packagesParser
,getLocalPackages
,resolvePackageEntry
,getImplicitGlobalProjectDir
,getStackYaml
,getSnapshots
,makeConcreteResolver
,checkOwnership
,getInContainer
,getInNixShell
,defaultConfigYaml
,getProjectConfig
,LocalConfigStatus(..)
,removePathFromPackageEntry
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Zip as Zip
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Arrow ((***))
import Control.Exception (assert)
import Control.Monad (liftM, unless, when, filterM)
import Control.Monad.Catch (MonadThrow, MonadCatch, catchAll, throwM, catch)
import Control.Monad.Extra (firstJustM)
import Control.Monad.IO.Class
import Control.Monad.Logger hiding (Loc)
import Control.Monad.Reader (ask, runReaderT)
import Crypto.Hash (hashWith, SHA256(..))
import Data.Aeson.Extended
import qualified Data.ByteArray as Mem (convert)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64.URL as B64URL
import qualified Data.ByteString.Lazy as L
import Data.Foldable (forM_)
import Data.IORef
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid.Extra
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.Yaml as Yaml
import Distribution.System (OS (..), Platform (..), buildPlatform, Arch(OtherArch))
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange)
import GHC.Conc (getNumProcessors)
import Lens.Micro (lens)
import Network.HTTP.Client (parseUrlThrow)
import Network.HTTP.Download (download)
import Network.HTTP.Simple (httpJSON, getResponseBody)
import Options.Applicative (Parser, strOption, long, help)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.Find (findInParents)
import Path.IO
import qualified Paths_stack as Meta
import Stack.BuildPlan
import Stack.Config.Build
import Stack.Config.Docker
import Stack.Config.Nix
import Stack.Config.Urls
import Stack.Constants
import qualified Stack.Image as Image
import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.Docker
import Stack.Types.Internal
import Stack.Types.Nix
import Stack.Types.PackageIndex (IndexType (ITHackageSecurity), HackageSecurity (..))
import Stack.Types.Resolver
import Stack.Types.StackT
import Stack.Types.StringError
import Stack.Types.Urls
import Stack.Types.Version
import System.Environment
import System.IO
import System.PosixCompat.Files (fileOwner, getFileStatus)
import System.PosixCompat.User (getEffectiveUserID)
import System.Process.Read
import System.Process.Run
-- | If deprecated path exists, use it and print a warning.
-- Otherwise, return the new path.
tryDeprecatedPath
:: (MonadIO m, MonadLogger m)
=> Maybe T.Text -- ^ Description of file for warning (if Nothing, no deprecation warning is displayed)
-> (Path Abs a -> m Bool) -- ^ Test for existence
-> Path Abs a -- ^ New path
-> Path Abs a -- ^ Deprecated path
-> m (Path Abs a, Bool) -- ^ (Path to use, whether it already exists)
tryDeprecatedPath mWarningDesc exists new old = do
newExists <- exists new
if newExists
then return (new, True)
else do
oldExists <- exists old
if oldExists
then do
case mWarningDesc of
Nothing -> return ()
Just desc ->
$logWarn $ T.concat
[ "Warning: Location of ", desc, " at '"
, T.pack (toFilePath old)
, "' is deprecated; rename it to '"
, T.pack (toFilePath new)
, "' instead" ]
return (old, True)
else return (new, False)
-- | Get the location of the implicit global project directory.
-- If the directory already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getImplicitGlobalProjectDir
:: (MonadIO m, MonadLogger m)
=> Config -> m (Path Abs Dir)
getImplicitGlobalProjectDir config =
--TEST no warning printed
liftM fst $ tryDeprecatedPath
Nothing
doesDirExist
(implicitGlobalProjectDir stackRoot)
(implicitGlobalProjectDirDeprecated stackRoot)
where
stackRoot = configStackRoot config
-- | This is slightly more expensive than @'asks' ('bcStackYaml' '.' 'getBuildConfig')@
-- and should only be used when no 'BuildConfig' is at hand.
getStackYaml
:: (StackMiniM env m, HasConfig env)
=> m (Path Abs File)
getStackYaml = do
config <- view configL
case configMaybeProject config of
Just (_project, stackYaml) -> return stackYaml
Nothing -> liftM (</> stackDotYaml) (getImplicitGlobalProjectDir config)
-- | Download the 'Snapshots' value from stackage.org.
getSnapshots
:: (StackMiniM env m, HasConfig env)
=> m Snapshots
getSnapshots = do
latestUrlText <- askLatestSnapshotUrl
latestUrl <- parseUrlThrow (T.unpack latestUrlText)
$logDebug $ "Downloading snapshot versions file from " <> latestUrlText
result <- httpJSON latestUrl
$logDebug $ "Done downloading and parsing snapshot versions file"
return $ getResponseBody result
-- | Turn an 'AbstractResolver' into a 'Resolver'.
makeConcreteResolver
:: (StackMiniM env m, HasConfig env)
=> AbstractResolver
-> m Resolver
makeConcreteResolver (ARResolver r) = return r
makeConcreteResolver ar = do
snapshots <- getSnapshots
r <-
case ar of
ARResolver r -> assert False $ return r
ARGlobal -> do
config <- view configL
implicitGlobalDir <- getImplicitGlobalProjectDir config
let fp = implicitGlobalDir </> stackDotYaml
ProjectAndConfigMonoid project _ <-
loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp
return $ projectResolver project
ARLatestNightly -> return $ ResolverSnapshot $ Nightly $ snapshotsNightly snapshots
ARLatestLTSMajor x ->
case IntMap.lookup x $ snapshotsLts snapshots of
Nothing -> errorString $ "No LTS release found with major version " ++ show x
Just y -> return $ ResolverSnapshot $ LTS x y
ARLatestLTS
| IntMap.null $ snapshotsLts snapshots -> errorString "No LTS releases found"
| otherwise ->
let (x, y) = IntMap.findMax $ snapshotsLts snapshots
in return $ ResolverSnapshot $ LTS x y
$logInfo $ "Selected resolver: " <> resolverName r
return r
-- | Get the latest snapshot resolver available.
getLatestResolver :: (StackMiniM env m, HasConfig env) => m Resolver
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
(x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots)))
return (LTS x y)
snap = fromMaybe (Nightly (snapshotsNightly snapshots)) mlts
return (ResolverSnapshot snap)
-- | Create a 'Config' value when we're not using any local
-- configuration files (e.g., the script command)
configNoLocalConfig
:: (MonadLogger m, MonadIO m, MonadCatch m)
=> Path Abs Dir -- ^ stack root
-> Maybe AbstractResolver
-> ConfigMonoid
-> m Config
configNoLocalConfig _ Nothing _ = throwM NoResolverWhenUsingNoLocalConfig
configNoLocalConfig stackRoot (Just resolver) configMonoid = do
userConfigPath <- getFakeConfigPath stackRoot resolver
configFromConfigMonoid
stackRoot
userConfigPath
False
(Just resolver)
Nothing -- project
configMonoid
-- Interprets ConfigMonoid options.
configFromConfigMonoid
:: (MonadLogger m, MonadIO m, MonadCatch m)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml
-> Bool -- ^ allow locals?
-> Maybe AbstractResolver
-> Maybe (Project, Path Abs File)
-> ConfigMonoid
-> m Config
configFromConfigMonoid
configStackRoot configUserConfigPath configAllowLocals mresolver
mproject ConfigMonoid{..} = do
-- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK
-- is set, use that. If neither, use the default ".stack-work"
mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar
configWorkDir0 <- maybe (return $(mkRelDir ".stack-work")) parseRelDir mstackWorkEnv
let configWorkDir = fromFirst configWorkDir0 configMonoidWorkDir
-- This code is to handle the deprecation of latest-snapshot-url
configUrls <- case (getFirst configMonoidLatestSnapshotUrl, getFirst (urlsMonoidLatestSnapshot configMonoidUrls)) of
(Just url, Nothing) -> do
$logWarn "The latest-snapshot-url field is deprecated in favor of 'urls' configuration"
return (urlsFromMonoid configMonoidUrls) { urlsLatestSnapshot = url }
_ -> return (urlsFromMonoid configMonoidUrls)
let configConnectionCount = fromFirst 8 configMonoidConnectionCount
configHideTHLoading = fromFirst True configMonoidHideTHLoading
configPackageIndices = fromFirst
[PackageIndex
{ indexName = IndexName "Hackage"
, indexLocation = "https://s3.amazonaws.com/hackage.fpcomplete.com/"
, indexType = ITHackageSecurity HackageSecurity
{ hsKeyIds =
[ "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d"
, "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42"
, "280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833"
, "2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201"
, "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3"
, "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921"
, "772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d"
, "aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9"
, "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0"
]
, hsKeyThreshold = 3
}
, indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/"
, indexRequireHashes = False
}]
configMonoidPackageIndices
configGHCVariant0 = getFirst configMonoidGHCVariant
configGHCBuild = getFirst configMonoidGHCBuild
configInstallGHC = fromFirst False configMonoidInstallGHC
configSkipGHCCheck = fromFirst False configMonoidSkipGHCCheck
configSkipMsys = fromFirst False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
configOverrideGccPath = getFirst configMonoidOverrideGccPath
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
arch = fromMaybe defArch
$ getFirst configMonoidArch >>= Distribution.Text.simpleParse
os = defOS
configPlatform = Platform arch os
configRequireStackVersion = simplifyVersionRange (getIntersectingVersionRange configMonoidRequireStackVersion)
configImage = Image.imgOptsFromMonoid configMonoidImageOpts
configCompilerCheck = fromFirst MatchMinor configMonoidCompilerCheck
case arch of
OtherArch unk -> $logWarn $ "Warning: Unknown value for architecture setting: " <> T.pack (show unk)
_ -> return ()
configPlatformVariant <- liftIO $
maybe PlatformVariantNone PlatformVariant <$> lookupEnv platformVariantEnvVar
let configBuild = buildOptsFromMonoid configMonoidBuildOpts
configDocker <-
dockerOptsFromMonoid (fmap fst mproject) configStackRoot mresolver configMonoidDockerOpts
configNix <- nixOptsFromMonoid configMonoidNixOpts os
configSystemGHC <-
case (getFirst configMonoidSystemGHC, nixEnable configNix) of
(Just False, True) ->
throwM NixRequiresSystemGhc
_ ->
return
(fromFirst
(dockerEnable configDocker || nixEnable configNix)
configMonoidSystemGHC)
when (isJust configGHCVariant0 && configSystemGHC) $
throwM ManualGHCVariantSettingsAreIncompatibleWithSystemGHC
rawEnv <- liftIO getEnvironment
pathsEnv <- augmentPathMap configMonoidExtraPath
(Map.fromList (map (T.pack *** T.pack) rawEnv))
origEnv <- mkEnvOverride configPlatform pathsEnv
let configEnvOverride _ = return origEnv
configLocalProgramsBase <- case getFirst configMonoidLocalProgramsBase of
Nothing -> getDefaultLocalProgramsBase configStackRoot configPlatform origEnv
Just path -> return path
platformOnlyDir <- runReaderT platformOnlyRelDir (configPlatform, configPlatformVariant)
let configLocalPrograms = configLocalProgramsBase </> platformOnlyDir
configLocalBin <-
case getFirst configMonoidLocalBinPath of
Nothing -> do
localDir <- getAppUserDataDir "local"
return $ localDir </> $(mkRelDir "bin")
Just userPath ->
(case mproject of
-- Not in a project
Nothing -> resolveDir' userPath
-- Resolves to the project dir and appends the user path if it is relative
Just (_, configYaml) -> resolveDir (parent configYaml) userPath)
-- TODO: Either catch specific exceptions or add a
-- parseRelAsAbsDirMaybe utility and use it along with
-- resolveDirMaybe.
`catchAll`
const (throwM (NoSuchDirectory userPath))
configJobs <-
case getFirst configMonoidJobs of
Nothing -> liftIO getNumProcessors
Just i -> return i
let configConcurrentTests = fromFirst True configMonoidConcurrentTests
let configTemplateParams = configMonoidTemplateParameters
configScmInit = getFirst configMonoidScmInit
configGhcOptions = configMonoidGhcOptions
configSetupInfoLocations = configMonoidSetupInfoLocations
configPvpBounds = fromFirst (PvpBounds PvpBoundsNone False) configMonoidPvpBounds
configModifyCodePage = fromFirst True configMonoidModifyCodePage
configExplicitSetupDeps = configMonoidExplicitSetupDeps
configRebuildGhcOptions = fromFirst False configMonoidRebuildGhcOptions
configApplyGhcOptions = fromFirst AGOLocals configMonoidApplyGhcOptions
configAllowNewer = fromFirst False configMonoidAllowNewer
configDefaultTemplate = getFirst configMonoidDefaultTemplate
configDumpLogs = fromFirst DumpWarningLogs configMonoidDumpLogs
configSaveHackageCreds = fromFirst True configMonoidSaveHackageCreds
configAllowDifferentUser <-
case getFirst configMonoidAllowDifferentUser of
Just True -> return True
_ -> getInContainer
configPackageCaches <- liftIO $ newIORef Nothing
let configMaybeProject = mproject
return Config {..}
-- | Get the default location of the local programs directory.
getDefaultLocalProgramsBase :: MonadThrow m
=> Path Abs Dir
-> Platform
-> EnvOverride
-> m (Path Abs Dir)
getDefaultLocalProgramsBase configStackRoot configPlatform override =
let
defaultBase = configStackRoot </> $(mkRelDir "programs")
in
case configPlatform of
-- For historical reasons, on Windows a subdirectory of LOCALAPPDATA is
-- used instead of a subdirectory of STACK_ROOT. Unifying the defaults would
-- mean that Windows users would manually have to move data from the old
-- location to the new one, which is undesirable.
Platform _ Windows ->
case Map.lookup "LOCALAPPDATA" $ unEnvOverride override of
Just t ->
case parseAbsDir $ T.unpack t of
Nothing -> throwString ("Failed to parse LOCALAPPDATA environment variable (expected absolute directory): " ++ show t)
Just lad -> return $ lad </> $(mkRelDir "Programs") </> $(mkRelDir stackProgName)
Nothing -> return defaultBase
_ -> return defaultBase
-- | An environment with a subset of BuildConfig used for setup.
data MiniConfig = MiniConfig
{ mcGHCVariant :: !GHCVariant
, mcConfig :: !Config
}
instance HasConfig MiniConfig where
configL = lens mcConfig (\x y -> x { mcConfig = y })
instance HasPlatform MiniConfig
instance HasGHCVariant MiniConfig where
ghcVariantL = lens mcGHCVariant (\x y -> x { mcGHCVariant = y })
-- | Load the 'MiniConfig'.
loadMiniConfig :: Config -> MiniConfig
loadMiniConfig config =
let ghcVariant = fromMaybe GHCStandard (configGHCVariant0 config)
in MiniConfig ghcVariant config
-- Load the configuration, using environment variables, and defaults as
-- necessary.
loadConfigMaybeProject
:: StackM env m
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe AbstractResolver
-- ^ Override resolver
-> LocalConfigStatus (Project, Path Abs File, ConfigMonoid)
-- ^ Project config to use, if any
-> m (LoadConfig m)
loadConfigMaybeProject configArgs mresolver mproject = do
(stackRoot, userOwnsStackRoot) <- determineStackRootAndOwnership configArgs
let loadHelper mproject' = do
userConfigPath <- getDefaultUserConfigPath stackRoot
extraConfigs0 <- getExtraConfigs userConfigPath >>=
mapM (\file -> loadConfigYaml (parseConfigMonoid (parent file)) file)
let extraConfigs =
-- non-project config files' existence of a docker section should never default docker
-- to enabled, so make it look like they didn't exist
map (\c -> c {configMonoidDockerOpts =
(configMonoidDockerOpts c) {dockerMonoidDefaultEnable = Any False}})
extraConfigs0
configFromConfigMonoid
stackRoot
userConfigPath
True -- allow locals
mresolver
(fmap (\(x, y, _) -> (x, y)) mproject')
$ mconcat $ configArgs
: maybe id (\(_, _, projectConfig) -> (projectConfig:)) mproject' extraConfigs
config <-
case mproject of
LCSNoConfig -> configNoLocalConfig stackRoot mresolver configArgs
LCSProject project -> loadHelper $ Just project
LCSNoProject -> loadHelper Nothing
unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
let mprojectRoot = fmap (\(_, fp, _) -> parent fp) mproject
unless (configAllowDifferentUser config) $ do
unless userOwnsStackRoot $
throwM (UserDoesn'tOwnDirectory stackRoot)
forM_ mprojectRoot $ \dir ->
checkOwnership (dir </> configWorkDir config)
return LoadConfig
{ lcConfig = config
, lcLoadBuildConfig = loadBuildConfig mproject config mresolver
, lcProjectRoot =
case mprojectRoot of
LCSProject fp -> Just fp
LCSNoProject -> Nothing
LCSNoConfig -> Nothing
}
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary. The passed @Maybe (Path Abs File)@ is an
-- override for the location of the project's stack.yaml.
loadConfig :: StackM env m
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe AbstractResolver
-- ^ Override resolver
-> StackYamlLoc (Path Abs File)
-- ^ Override stack.yaml
-> m (LoadConfig m)
loadConfig configArgs mresolver mstackYaml =
loadProjectConfig mstackYaml >>= loadConfigMaybeProject configArgs mresolver
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
loadBuildConfig :: StackM env m
=> LocalConfigStatus (Project, Path Abs File, ConfigMonoid)
-> Config
-> Maybe AbstractResolver -- override resolver
-> Maybe CompilerVersion -- override compiler
-> m BuildConfig
loadBuildConfig mproject config mresolver mcompiler = do
env <- ask
(project', stackYamlFP) <- case mproject of
LCSProject (project, fp, _) -> do
forM_ (projectUserMsg project) ($logWarn . T.pack)
return (project, fp)
LCSNoConfig -> do
p <- getEmptyProject
return (p, configUserConfigPath config)
LCSNoProject -> do
$logDebug "Run from outside a project, using implicit global project config"
destDir <- getImplicitGlobalProjectDir config
let dest :: Path Abs File
dest = destDir </> stackDotYaml
dest' :: FilePath
dest' = toFilePath dest
ensureDir destDir
exists <- doesFileExist dest
if exists
then do
ProjectAndConfigMonoid project _ <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest
when (view terminalL env) $
case mresolver of
Nothing ->
$logDebug ("Using resolver: " <> resolverName (projectResolver project) <>
" from implicit global project's config file: " <> T.pack dest')
Just aresolver -> do
let name =
case aresolver of
ARResolver resolver -> resolverName resolver
ARLatestNightly -> "nightly"
ARLatestLTS -> "lts"
ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
ARGlobal -> "global"
$logDebug ("Using resolver: " <> name <>
" specified on command line")
return (project, dest)
else do
$logInfo ("Writing implicit global project config file to: " <> T.pack dest')
$logInfo "Note: You can change the snapshot via the resolver field there."
p <- getEmptyProject
liftIO $ do
S.writeFile dest' $ S.concat
[ "# This is the implicit global project's config file, which is only used when\n"
, "# 'stack' is run outside of a real project. Settings here do _not_ act as\n"
, "# defaults for all projects. To change stack's default settings, edit\n"
, "# '", encodeUtf8 (T.pack $ toFilePath $ configUserConfigPath config), "' instead.\n"
, "#\n"
, "# For more information about stack's configuration, see\n"
, "# http://docs.haskellstack.org/en/stable/yaml_configuration/\n"
, "#\n"
, Yaml.encode p]
S.writeFile (toFilePath $ parent dest </> $(mkRelFile "README.txt")) $ S.concat
[ "This is the implicit global project, which is used only when 'stack' is run\n"
, "outside of a real project.\n" ]
return (p, dest)
resolver <-
case mresolver of
Nothing -> return $ projectResolver project'
Just aresolver ->
runReaderT (makeConcreteResolver aresolver) miniConfig
let project = project'
{ projectResolver = resolver
, projectCompiler = mcompiler <|> projectCompiler project'
}
(mbp0, loadedResolver) <- flip runReaderT miniConfig $
loadResolver (Just stackYamlFP) (projectResolver project)
let mbp = case projectCompiler project of
Just compiler -> mbp0 { mbpCompilerVersion = compiler }
Nothing -> mbp0
extraPackageDBs <- mapM resolveDir' (projectExtraPackageDBs project)
return BuildConfig
{ bcConfig = config
, bcResolver = loadedResolver
, bcWantedMiniBuildPlan = mbp
, bcGHCVariant = view ghcVariantL miniConfig
, bcPackageEntries = projectPackages project
, bcExtraDeps = projectExtraDeps project
, bcExtraPackageDBs = extraPackageDBs
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
, bcImplicitGlobal =
case mproject of
LCSNoProject -> True
LCSProject _ -> False
LCSNoConfig -> False
}
where
miniConfig = loadMiniConfig config
getEmptyProject = do
r <- case mresolver of
Just aresolver -> do
r' <- runReaderT (makeConcreteResolver aresolver) miniConfig
$logInfo ("Using resolver: " <> resolverName r' <> " specified on command line")
return r'
Nothing -> do
r'' <- runReaderT getLatestResolver miniConfig
$logInfo ("Using latest snapshot resolver: " <> resolverName r'')
return r''
return Project
{ projectUserMsg = Nothing
, projectPackages = mempty
, projectExtraDeps = mempty
, projectFlags = mempty
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
-- | Get packages from EnvConfig, downloading and cloning as necessary.
-- If the packages have already been downloaded, this uses a cached value (
getLocalPackages
:: (StackMiniM env m, HasEnvConfig env)
=> m (Map.Map (Path Abs Dir) TreatLikeExtraDep)
getLocalPackages = do
cacheRef <- view $ envConfigL.to envConfigPackagesRef
mcached <- liftIO $ readIORef cacheRef
case mcached of
Just cached -> return cached
Nothing -> do
menv <- getMinimalEnvOverride
root <- view projectRootL
entries <- view $ buildConfigL.to bcPackageEntries
liftM (Map.fromList . concat) $ mapM
(resolvePackageEntry menv root)
entries
-- | Resolve a PackageEntry into a list of paths, downloading and cloning as
-- necessary.
resolvePackageEntry
:: (StackMiniM env m, HasConfig env)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageEntry
-> m [(Path Abs Dir, TreatLikeExtraDep)]
resolvePackageEntry menv projRoot pe = do
entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
paths <-
case peSubdirs pe of
[] -> return [entryRoot]
subs -> mapM (resolveDir entryRoot) subs
extraDep <-
case peExtraDepMaybe pe of
Just e -> return e
Nothing ->
case peLocation pe of
PLFilePath _ ->
-- we don't give a warning on missing explicit
-- value here, user intent is almost always
-- the default for a local directory
return False
PLRemote url _ -> do
$logWarn $ mconcat
[ "No extra-dep setting found for package at URL:\n\n"
, url
, "\n\n"
, "This is usually a mistake, external packages "
, "should typically\nbe treated as extra-deps to avoid "
, "spurious test case failures."
]
return False
return $ map (, extraDep) paths
-- | Resolve a PackageLocation into a path, downloading and cloning as
-- necessary.
resolvePackageLocation
:: (StackMiniM env m, HasConfig env)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageLocation
-> m (Path Abs Dir)
resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
resolvePackageLocation menv projRoot (PLRemote url remotePackageType) = do
workDir <- view workDirL
let nameBeforeHashing = case remotePackageType of
RPTHttp{} -> url
RPTGit commit -> T.unwords [url, commit]
RPTHg commit -> T.unwords [url, commit, "hg"]
-- TODO: dedupe with code for snapshot hash?
name = T.unpack $ decodeUtf8 $ S.take 12 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ encodeUtf8 nameBeforeHashing
root = projRoot </> workDir </> $(mkRelDir "downloaded")
fileExtension' = case remotePackageType of
RPTHttp -> ".http-archive"
_ -> ".unused"
fileRel <- parseRelFile $ name ++ fileExtension'
dirRel <- parseRelDir name
dirRelTmp <- parseRelDir $ name ++ ".tmp"
let file = root </> fileRel
dir = root </> dirRel
exists <- doesDirExist dir
unless exists $ do
ignoringAbsence (removeDirRecur dir)
let cloneAndExtract commandName cloneArgs resetCommand commit = do
ensureDir root
callProcessInheritStderrStdout Cmd
{ cmdDirectoryToRunIn = Just root
, cmdCommandToRun = commandName
, cmdEnvOverride = menv
, cmdCommandLineArguments =
"clone" :
cloneArgs ++
[ T.unpack url
, toFilePathNoTrailingSep dir
]
}
created <- doesDirExist dir
unless created $ throwM $ FailedToCloneRepo commandName
readProcessNull (Just dir) menv commandName
(resetCommand ++ [T.unpack commit, "--"])
`catch` \case
ex@ProcessFailed{} -> do
$logInfo $ "Please ensure that commit " <> commit <> " exists within " <> url
throwM ex
ex -> throwM ex
case remotePackageType of
RPTHttp -> do
let dirTmp = root </> dirRelTmp
ignoringAbsence (removeDirRecur dirTmp)
let fp = toFilePath file
req <- parseUrlThrow $ T.unpack url
_ <- download req file
let tryTar = do
$logDebug $ "Trying to untar " <> T.pack fp
liftIO $ withBinaryFile fp ReadMode $ \h -> do
lbs <- L.hGetContents h
let entries = Tar.read $ GZip.decompress lbs
Tar.unpack (toFilePath dirTmp) entries
tryZip = do
$logDebug $ "Trying to unzip " <> T.pack fp
archive <- fmap Zip.toArchive $ liftIO $ L.readFile fp
liftIO $ Zip.extractFilesFromArchive [Zip.OptDestination
(toFilePath dirTmp)] archive
err = throwM $ UnableToExtractArchive url file
catchAllLog goodpath handler =
catchAll goodpath $ \e -> do
$logDebug $ "Got exception: " <> T.pack (show e)
handler
tryTar `catchAllLog` tryZip `catchAllLog` err
renameDir dirTmp dir
-- Passes in --git-dir to git and --repository to hg, in order
-- to avoid the update commands being applied to the user's
-- repo. See https://github.com/commercialhaskell/stack/issues/2748
RPTGit commit -> cloneAndExtract "git" ["--recursive"] ["--git-dir=.git", "reset", "--hard"] commit
RPTHg commit -> cloneAndExtract "hg" [] ["--repository", ".", "update", "-C"] commit
case remotePackageType of
RPTHttp -> do
x <- listDir dir
case x of
([dir'], []) -> return dir'
(dirs, files) -> do
ignoringAbsence (removeFile file)
ignoringAbsence (removeDirRecur dir)
throwM $ UnexpectedArchiveContents dirs files
_ -> return dir
-- | Remove path from package entry. If the package entry contains subdirs, then it removes
-- the subdir. If the package entry points to the path to remove, this function returns
-- Nothing. If the package entry doesn't mention the path to remove, it is returned unchanged
removePathFromPackageEntry
:: (StackMiniM env m, HasConfig env)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> Path Abs Dir -- ^ path to remove
-> PackageEntry
-> m (Maybe PackageEntry)
-- ^ Nothing if the whole package entry should be removed, otherwise
-- it returns the updated PackageEntry
removePathFromPackageEntry menv projectRoot pathToRemove packageEntry = do
locationPath <- resolvePackageLocation menv projectRoot (peLocation packageEntry)
case peSubdirs packageEntry of
[] -> if locationPath == pathToRemove then return Nothing else return (Just packageEntry)
subdirPaths -> do
let shouldKeepSubdir path = do
resolvedPath <- resolveDir locationPath path
return (pathToRemove /= resolvedPath)
filteredSubdirs <- filterM shouldKeepSubdir subdirPaths
if null filteredSubdirs then return Nothing else return (Just packageEntry {peSubdirs = filteredSubdirs})
-- | Get the stack root, e.g. @~/.stack@, and determine whether the user owns it.
--
-- On Windows, the second value is always 'True'.
determineStackRootAndOwnership
:: (MonadIO m, MonadCatch m)
=> ConfigMonoid
-- ^ Parsed command-line arguments
-> m (Path Abs Dir, Bool)
determineStackRootAndOwnership clArgs = do
stackRoot <- do
case getFirst (configMonoidStackRoot clArgs) of
Just x -> return x
Nothing -> do
mstackRoot <- liftIO $ lookupEnv stackRootEnvVar
case mstackRoot of
Nothing -> getAppUserDataDir stackProgName
Just x -> case parseAbsDir x of
Nothing -> throwString ("Failed to parse STACK_ROOT environment variable (expected absolute directory): " ++ show x)
Just parsed -> return parsed
(existingStackRootOrParentDir, userOwnsIt) <- do
mdirAndOwnership <- findInParents getDirAndOwnership stackRoot
case mdirAndOwnership of
Just x -> return x
Nothing -> throwM (BadStackRoot stackRoot)
when (existingStackRootOrParentDir /= stackRoot) $
if userOwnsIt
then liftIO $ ensureDir stackRoot
else throwM $
Won'tCreateStackRootInDirectoryOwnedByDifferentUser
stackRoot
existingStackRootOrParentDir
stackRoot' <- canonicalizePath stackRoot
return (stackRoot', userOwnsIt)
-- | @'checkOwnership' dir@ throws 'UserDoesn'tOwnDirectory' if @dir@
-- isn't owned by the current user.
--
-- If @dir@ doesn't exist, its parent directory is checked instead.
-- If the parent directory doesn't exist either, @'NoSuchDirectory' ('parent' dir)@
-- is thrown.
checkOwnership :: (MonadIO m, MonadCatch m) => Path Abs Dir -> m ()
checkOwnership dir = do
mdirAndOwnership <- firstJustM getDirAndOwnership [dir, parent dir]
case mdirAndOwnership of
Just (_, True) -> return ()
Just (dir', False) -> throwM (UserDoesn'tOwnDirectory dir')
Nothing ->
(throwM . NoSuchDirectory) $ (toFilePathNoTrailingSep . parent) dir
-- | @'getDirAndOwnership' dir@ returns @'Just' (dir, 'True')@ when @dir@
-- exists and the current user owns it in the sense of 'isOwnedByUser'.
getDirAndOwnership
:: (MonadIO m, MonadCatch m)
=> Path Abs Dir
-> m (Maybe (Path Abs Dir, Bool))
getDirAndOwnership dir = forgivingAbsence $ do
ownership <- isOwnedByUser dir
return (dir, ownership)
-- | Check whether the current user (determined with 'getEffectiveUserId') is
-- the owner for the given path.
--
-- Will always return 'True' on Windows.
isOwnedByUser :: MonadIO m => Path Abs t -> m Bool
isOwnedByUser path = liftIO $ do
if osIsWindows
then return True
else do
fileStatus <- getFileStatus (toFilePath path)
user <- getEffectiveUserID
return (user == fileOwner fileStatus)
where
#ifdef WINDOWS
osIsWindows = True
#else
osIsWindows = False
#endif
-- | 'True' if we are currently running inside a Docker container.
getInContainer :: (MonadIO m) => m Bool
getInContainer = liftIO (isJust <$> lookupEnv inContainerEnvVar)
-- | 'True' if we are currently running inside a Nix.
getInNixShell :: (MonadIO m) => m Bool
getInNixShell = liftIO (isJust <$> lookupEnv inNixShellEnvVar)
-- | Determine the extra config file locations which exist.
--
-- Returns most local first
getExtraConfigs :: (MonadIO m, MonadLogger m)
=> Path Abs File -- ^ use config path
-> m [Path Abs File]
getExtraConfigs userConfigPath = do
defaultStackGlobalConfigPath <- getDefaultGlobalConfigPath
liftIO $ do
env <- getEnvironment
mstackConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_CONFIG" env
mstackGlobalConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_GLOBAL_CONFIG" env
filterM doesFileExist
$ fromMaybe userConfigPath mstackConfig
: maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfigPath)
-- | Load and parse YAML from the given config file. Throws
-- 'ParseConfigFileException' when there's a decoding error.
loadConfigYaml
:: (MonadIO m, MonadLogger m)
=> (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> m a
loadConfigYaml parser path = do
eres <- loadYaml parser path
case eres of
Left err -> liftIO $ throwM (ParseConfigFileException path err)
Right res -> return res
-- | Load and parse YAML from the given file.
loadYaml
:: (MonadIO m, MonadLogger m)
=> (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> m (Either Yaml.ParseException a)
loadYaml parser path = do
eres <- liftIO $ Yaml.decodeFileEither (toFilePath path)
case eres of
Left err -> return (Left err)
Right val ->
case Yaml.parseEither parser val of
Left err -> return (Left (Yaml.AesonException err))
Right (WithJSONWarnings res warnings) -> do
logJSONWarnings (toFilePath path) warnings
return (Right res)
-- | Get the location of the project config file, if it exists.
getProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> StackYamlLoc (Path Abs File)
-- ^ Override stack.yaml
-> m (LocalConfigStatus (Path Abs File))
getProjectConfig (SYLOverride stackYaml) = return $ LCSProject stackYaml
getProjectConfig SYLDefault = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
$logInfo "Getting project config file from STACK_YAML environment"
liftM LCSProject $ resolveFile' fp
Nothing -> do
currDir <- getCurrentDir
maybe LCSNoProject LCSProject <$> findInParents getStackDotYaml currDir
where
getStackDotYaml dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
$logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- doesFileExist fp
if exists
then return $ Just fp
else return Nothing
getProjectConfig SYLNoConfig = return LCSNoConfig
data LocalConfigStatus a
= LCSNoProject
| LCSProject a
| LCSNoConfig
deriving (Show,Functor,Foldable,Traversable)
-- | Find the project config file location, respecting environment variables
-- and otherwise traversing parents. If no config is found, we supply a default
-- based on current directory.
loadProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> StackYamlLoc (Path Abs File)
-- ^ Override stack.yaml
-> m (LocalConfigStatus (Project, Path Abs File, ConfigMonoid))
loadProjectConfig mstackYaml = do
mfp <- getProjectConfig mstackYaml
case mfp of
LCSProject fp -> do
currDir <- getCurrentDir
$logDebug $ "Loading project config file " <>
T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
LCSProject <$> load fp
LCSNoProject -> do
$logDebug $ "No project config file found, using defaults."
return LCSNoProject
LCSNoConfig -> do
$logDebug "Ignoring config files"
return LCSNoConfig
where
load fp = do
ProjectAndConfigMonoid project config <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp
return (project, fp, config)
-- | Get the location of the default stack configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultGlobalConfigPath
:: (MonadIO m, MonadLogger m)
=> m (Maybe (Path Abs File))
getDefaultGlobalConfigPath =
case (defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated) of
(Just new,Just old) ->
liftM (Just . fst ) $
tryDeprecatedPath
(Just "non-project global configuration file")
doesFileExist
new
old
(Just new,Nothing) -> return (Just new)
_ -> return Nothing
-- | Get the location of the default user configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultUserConfigPath
:: (MonadIO m, MonadLogger m)
=> Path Abs Dir -> m (Path Abs File)
getDefaultUserConfigPath stackRoot = do
(path, exists) <- tryDeprecatedPath
(Just "non-project configuration file")
doesFileExist
(defaultUserConfigPath stackRoot)
(defaultUserConfigPathDeprecated stackRoot)
unless exists $ do
ensureDir (parent path)
liftIO $ S.writeFile (toFilePath path) defaultConfigYaml
return path
-- | Get a fake configuration file location, used when doing a "no
-- config" run (the script command).
getFakeConfigPath
:: (MonadIO m, MonadThrow m)
=> Path Abs Dir -- ^ stack root
-> AbstractResolver
-> m (Path Abs File)
getFakeConfigPath stackRoot ar = do
asString <-
case ar of
ARResolver r -> return $ T.unpack $ resolverName r
_ -> throwM $ InvalidResolverForNoLocalConfig $ show ar
asDir <- parseRelDir asString
let full = stackRoot </> $(mkRelDir "script") </> asDir </> $(mkRelFile "config.yaml")
ensureDir (parent full)
return full
packagesParser :: Parser [String]
packagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
defaultConfigYaml :: S.ByteString
defaultConfigYaml = S.intercalate "\n"
[ "# This file contains default non-project-specific settings for 'stack', used"
, "# in all projects. For more information about stack's configuration, see"
, "# http://docs.haskellstack.org/en/stable/yaml_configuration/"
, ""
, "# The following parameters are used by \"stack new\" to automatically fill fields"
, "# in the cabal config. We recommend uncommenting them and filling them out if"
, "# you intend to use 'stack new'."
, "# See https://docs.haskellstack.org/en/stable/yaml_configuration/#templates"
, "templates:"
, " params:"
, "# author-name:"
, "# author-email:"
, "# copyright:"
, "# github-username:"
]
|
mrkkrp/stack
|
src/Stack/Config.hs
|
bsd-3-clause
| 46,940
| 0
| 30
| 14,317
| 9,127
| 4,585
| 4,542
| 871
| 14
|
{-|
Module : Diplomacy.Phase
Description : Definition of phases of play
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : aovieth@gmail.com
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
module Diplomacy.Phase (
Phase(..)
) where
data Phase where
Typical :: Phase
Retreat :: Phase
Adjust :: Phase
deriving instance Show Phase
deriving instance Eq Phase
deriving instance Ord Phase
deriving instance Enum Phase
deriving instance Bounded Phase
|
avieth/diplomacy
|
Diplomacy/Phase.hs
|
bsd-3-clause
| 610
| 0
| 5
| 120
| 78
| 46
| 32
| 14
| 0
|
-- |
-- Module : SysCmd
-- Copyright : (C) 2013-2015 Jens Petersen
--
-- Maintainer : Jens Petersen <petersen@fedoraproject.org>
-- Stability : alpha
-- Portability : portable
--
-- Explanation: Command line option processing for building RPM
-- packages.
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
module SysCmd (
cmd,
cmd_,
cmdBool,
cmdIgnoreErr,
cmdQuiet,
cmdSilent,
pkgInstall,
repoquery,
rpmInstall,
trySystem,
shell,
sudo,
(+-+)) where
import Control.Monad (unless, void, when)
import Data.Functor ((<$>))
import Data.List ((\\))
import Data.Maybe (fromMaybe, isJust, isNothing)
#if defined(MIN_VERSION_Cabal) && MIN_VERSION_Cabal(1,18,0)
import Distribution.Simple.Program.Find (defaultProgramSearchPath,
findProgramOnSearchPath)
import Distribution.Simple.Utils (die)
#else
import Distribution.Simple.Utils (die, findProgramLocation)
#endif
import Distribution.Verbosity (normal)
import System.Posix.User (getEffectiveUserID)
import System.Process (readProcess, readProcessWithExitCode, system, rawSystem)
import System.Exit (ExitCode(..))
findProgram :: FilePath -> IO (Maybe FilePath)
findProgram prog =
#if defined(MIN_VERSION_Cabal) && MIN_VERSION_Cabal(1,18,0)
findProgramOnSearchPath normal defaultProgramSearchPath prog
#if defined(MIN_VERSION_Cabal) && MIN_VERSION_Cabal(1,23,0)
>>= return . fmap fst
#endif
#else
findProgramLocation normal prog
#endif
requireProgram :: String -> IO ()
requireProgram c = do
mavail <- findProgram c
when (isNothing mavail) $ die (c ++ ": command not found")
optionalProgram :: String -> IO Bool
optionalProgram c = do
mavail <- findProgram c
return $ isJust mavail
cmd_ :: String -> [String] -> IO ()
cmd_ c args = do
requireProgram c
-- putStrLn $ "cmd_:" +-+ c +-+ unwords args
ret <- rawSystem c args
case ret of
ExitSuccess -> return ()
ExitFailure n -> die ("\"" ++ c +-+ unwords args ++ "\"" +-+ "failed with status" +-+ show n)
-- hide stderr
cmdQuiet :: String -> [String] -> IO String
cmdQuiet c args = do
requireProgram c
(ret, out, err) <- readProcessWithExitCode c args ""
case ret of
ExitSuccess -> return $removeTrailingNewline out
ExitFailure n -> die ("\"" ++ c +-+ unwords args ++ "\"" +-+ "failed with status" +-+ show n ++ "\n" ++ err)
-- hide stdout
cmdSilent :: String -> [String] -> IO ()
cmdSilent c args = do
requireProgram c
-- putStrLn $ "cmd_:" +-+ c +-+ unwords args
(ret, _, err) <- readProcessWithExitCode c args ""
case ret of
ExitSuccess -> return ()
ExitFailure n -> die ("\"" ++ c +-+ unwords args ++ "\"" +-+ "failed with status" +-+ show n ++ "\n" ++ err)
shell :: String -> IO ()
shell c = cmd_ "sh" ["-c", c]
sudo :: String -> [String] -> IO ()
sudo c as = do
requireProgram "sudo"
requireProgram c
putStrLn $ "sudo" +-+ c +-+ unwords as
cmd_ "sudo" (c:as)
trySystem :: String -> [String] -> IO ()
trySystem c args = do
requireProgram c
void $ rawSystem c args
cmdBool :: String -> IO Bool
cmdBool c = do
requireProgram $ head $ words c
ret <- system $ c +-+ ">/dev/null"
case ret of
ExitSuccess -> return True
ExitFailure _ -> return False
cmd :: FilePath -> [String] -> IO String
cmd c args = do
requireProgram c
removeTrailingNewline <$> readProcess c args ""
cmdIgnoreErr :: FilePath -> [String] -> String -> IO String
cmdIgnoreErr c args input = do
(_exit, out, _err) <- readProcessWithExitCode c args input
return out
removeTrailingNewline :: String -> String
removeTrailingNewline "" = ""
removeTrailingNewline str =
if last str == '\n'
then init str
else str
infixr 4 +-+
(+-+) :: String -> String -> String
"" +-+ s = s
s +-+ "" = s
s +-+ t = s ++ " " ++ t
packageManager :: IO String
packageManager = do
havednf <- optionalProgram "dnf"
if havednf
then return "dnf"
else requireProgram "yum" >> return "yum"
repoquery :: [String] -> String -> IO String
repoquery args key = do
havednf <- optionalProgram "dnf"
let (prog, subcmd) = if havednf then ("dnf", ["repoquery", "-q"]) else ("repoquery", [])
cmd prog (subcmd ++ args ++ [key])
pkgInstall :: [String] -> Bool -> IO ()
pkgInstall [] _ = return ()
pkgInstall pkgs hard = do
pkginstaller <- packageManager
putStrLn $ "Running repoquery" +-+ unwords pkgs
repopkgs <- filter (/= "") <$> mapM (repoquery ["--qf", "%{name}"]) pkgs
let missing = pkgs \\ repopkgs
if not (null missing) && hard
then error $ unwords missing +-+ "not available."
else do
unless (null missing) $ do
putStrLn "Unavailable dependencies:"
mapM_ putStrLn missing
unless (null repopkgs) $ do
putStrLn "Uninstalled dependencies:"
mapM_ putStrLn repopkgs
uid <- getEffectiveUserID
maybeSudo <-
if uid == 0
then return Nothing
else do
havesudo <- optionalProgram "sudo"
return $ if havesudo then Just "sudo" else Nothing
let args = map showPkg repopkgs
putStrLn $ "Running:" +-+ fromMaybe "" maybeSudo +-+ pkginstaller +-+ "install" +-+ unwords args
let exec = if hard then cmd_ else trySystem
fedora <- cmd "rpm" ["--eval", "%fedora"]
let nogpgcheck = ["--nogpgcheck" | fedora `elem` ["22", "23"]]
exec (fromMaybe pkginstaller maybeSudo) $ maybe [] (const pkginstaller) maybeSudo : "install" : args ++ nogpgcheck
showPkg :: String -> String
showPkg p = if '(' `elem` p then show p else p
rpmInstall :: [String] -> IO ()
rpmInstall rpms = do
pkginstaller <- packageManager
let (inst, arg) = if pkginstaller == "dnf" then ("dnf", "install") else ("yum", "localinstall")
sudo inst $ ["-y", arg] ++ rpms
|
opensuse-haskell/cabal-rpm
|
src/SysCmd.hs
|
gpl-3.0
| 5,922
| 0
| 19
| 1,274
| 1,864
| 946
| 918
| 140
| 5
|
-- Copyright (C) 2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.Label.FileLabels(apply)
where
import Control.Monad.IO.Class(MonadIO)
import Database.Esqueleto(Key, SqlPersistT)
import Data.Maybe(mapMaybe)
import BDCS.DB(Files(..), FileKeyValues(..))
import qualified BDCS.Label.Docs as Docs
import qualified BDCS.Label.Fonts as Fonts
import qualified BDCS.Label.InfoPage as Info
import qualified BDCS.Label.Library as Library
import qualified BDCS.Label.License as License
import qualified BDCS.Label.ManPage as Man
import qualified BDCS.Label.Service as Service
import qualified BDCS.Label.Translation as Xlat
import BDCS.Label.Types(Label(..))
import BDCS.Label.Utils(addLabelKey)
import BDCS.Utils.Monad(concatForM)
checks :: [(Files -> Bool, Files -> Maybe Label)]
checks = [(Docs.matches, Docs.mkLabel),
(Fonts.matches, Fonts.mkLabel),
(Info.matches, Info.mkLabel),
(License.matches, License.mkLabel),
(Library.matches, Library.mkLabel),
(Man.matches, Man.mkLabel),
(Service.matches, Service.mkLabel),
(Xlat.matches, Xlat.mkLabel)]
apply :: MonadIO m => [(Files, Key Files)] -> SqlPersistT m [Key FileKeyValues]
apply lst =
-- Iterate over all the given files.
concatForM lst $ \(f, ndx) -> do
-- Gather up all the tuples from the checks list where the
-- file met the matching criteria.
let successfulChecks = filter (\(matches, _) -> matches f) checks
-- Try to run the maker function from each of those tuples.
-- It's possible for the maker function to return Nothing
-- (though I don't know how that could happen right now),
-- so we need to filter those out.
let labels = mapMaybe (\(_, maker) -> maker f) successfulChecks
-- Now add each of those labels to the database, collecting
-- the resulting IDs.
mapM (\lbl -> addLabelKey ndx lbl Nothing Nothing)
labels
|
atodorov/bdcs
|
src/BDCS/Label/FileLabels.hs
|
lgpl-2.1
| 2,671
| 0
| 15
| 584
| 486
| 306
| 180
| 32
| 1
|
-- |
-- <http://redpitaya.com/ Red Pitaya> library for accessing Fpga
module System.RedPitaya.Fpga (
Registry,
Channel(..),
FpgaSetGet(..),
-- * Housekeeping
-- | various housekeeping and Gpio functions
fpgaId,
dna,
setExpDirP,
getExpDirP,
setExpDirN,
getExpDirN,
setExpOutP,
setExpOutN,
getExpInP,
getExpInN,
-- single pin functions
GpioType(..),
GpioDirection(..),
PinNum,
setExpDir,
GpioValue(..),
setExpOut,
getExpOut,
setLed,
getLed,
-- * Oscilloscope
-- | functions for accessing oscilloscope features
resetWriteSM,
triggerNow,
TriggerSource(..),
setOscTrigger,
triggerDelayEnded,
setTreshold,
getTreshold,
setDelayAfterTrigger,
getDelayAfterTrigger,
setOscDecimationRaw,
getOscDecimationRaw,
OscDecimation(..),
setOscDecimation,
getOscWpCurrent,
getOscWpTrigger,
getOscHysteresis,
setOscHysteresis,
enableOscDecimationAvarage,
setEqualFilter,
getEqualFilter,
setAxiLowerAddress,
getAxiLowerAddress,
setAxiUpperAddress,
getAxiUpperAddress,
setAxiDelayAfterTrigger,
getAxiDelayAfterTrigger,
enableAxiMaster,
getAxiWritePtrTrigger,
getAxiWritePtrCurrent,
getOscBuffer,
-- * Arbitrary Signal Generator (ASG)
getAsgOption,
setAsgOption,
setAsgOptionBExtGatRep,
getAsgOptionBExtGatRep,
setAsgAmplitudeScale,
setAsgAmplitudeOffset,
setAsgCounterWrap,
setAsgCounterStartOffset,
setAsgCounterStep,
getAsgCounterReadPtr,
setAsgCounterReadPtr,
getAsgNumReadCycles,
setAsgNumReadCycles,
getAsgNumRepetitions,
setAsgNumRepetitions,
getAsgBurstDelay,
setAsgBurstDelay,
-- * Plumbing
-- | low level helper functions, used to extend interface
Page,
Offset,
fpgaRead,
fpgaWrite,
fpgaFmap,
writeFpgaArray,
readFpgaArray,
fpgaPageSize
)
where
import Data.Int
import Data.Word
import Data.Bits
import Control.Monad
import Control.Applicative
import Data.Traversable as DT
-- | type representing fpga memory offset from page
type Offset = Int
-- | type representing fpga memory page
type Page = Int
-- | type representing fpga registry
type Registry = Word32
-- | size of Fpga page
fpgaPageSize = 0x100000 :: Offset
-- | FpgaSetGet is typeclass for acesssing Fpga
class (Monad m) => FpgaSetGet m where
fpgaGet :: Offset -> m Registry
fpgaSet :: Offset -> Registry -> m ()
fpgaGetArray :: Offset -> Int -> m [Registry]
fpgaSetArray :: Offset -> [Registry] -> m ()
-- default implemetations of each others so
-- user can provide only one set and one get if requred
fpgaGet off = fmap head $ fpgaGetArray off 1
fpgaSet off v = fpgaSetArray off [v]
fpgaGetArray off len = sequence $ map fpgaGet $ take len [off, off+4 .. ]
fpgaSetArray off d = sequence_ $ zipWith fpgaSet [off, off+4 .. ] d
-- | Redpitaya Channel A or B.
data Channel = A | B
getTotalOffset :: Page -> Offset -> Offset
getTotalOffset page offset = page * fpgaPageSize + offset
-- | direct read from fpga registry
--fpgaRead :: Page -> Offset -> Fpga FpgaMmapM Registry
fpgaRead :: (FpgaSetGet m) => Page -> Offset -> m Registry
fpgaRead page offset = fpgaGet $ getTotalOffset page offset
-- | direct write in fpga registry
fpgaWrite :: (FpgaSetGet m) => Page -> Offset -> Registry -> m ()
fpgaWrite page offset reg = fpgaSet (getTotalOffset page offset) reg
-- | apply transformation on fpga registry value
fpgaFmap :: (FpgaSetGet m) => Page -> Offset -> (Registry -> Registry) -> m ()
fpgaFmap page offset f = do
reg <- fpgaRead page offset
fpgaWrite page offset (f reg)
-- | write array in fpga memory
writeFpgaArray :: (FpgaSetGet m) => Page -> Offset -> [Registry] -> m ()
writeFpgaArray page offset = fpgaSetArray $ getTotalOffset page offset
-- | read array from fpga memory, passing page, offset and length
readFpgaArray :: (FpgaSetGet a) => Page -> Offset -> Int -> a [Registry]
readFpgaArray page offset = fpgaGetArray ( getTotalOffset page offset )
---------------------------------------------------------
-- * Housekeeping memory map
-- | get ID , 0 prototype , 1 release
fpgaId :: (FpgaSetGet a) => a Registry
fpgaId = fpgaRead 0 0
-- | get DNA
dna :: (FpgaSetGet a) => a Integer
dna = do
dna1 <- fromIntegral <$> fpgaRead 0 4
dna2 <- fromIntegral <$> fpgaRead 0 8
return $ dna1 + (2^32)*dna2
-- | set expansion connector direction P registry
--
-- 1 out , 0 in
setExpDirP :: (FpgaSetGet a) => Registry -> a ()
setExpDirP = fpgaWrite 0 0x10
-- | get expansion connector direction P registry
--
-- 1 out , 0 in
getExpDirP :: (FpgaSetGet a) => a Registry
getExpDirP = fpgaRead 0 0x10
-- | set expansion connector direction N registry
--
-- 1 out , 0 in
setExpDirN :: (FpgaSetGet a) => Registry -> a ()
setExpDirN = fpgaWrite 0 0x14
-- | get expansion connector direction N registry
--
-- 1 out , 0 in
getExpDirN :: (FpgaSetGet a) => a Registry
getExpDirN = fpgaRead 0 0x14
-- | expansion connector P output registry value
setExpOutP :: (FpgaSetGet a) => Registry -> a ()
setExpOutP = fpgaWrite 0 0x18
-- | expansion connector P output registry value
getExpOutP :: (FpgaSetGet a) => a Registry
getExpOutP = fpgaRead 0 0x18
-- | expansion connector N output registry value
setExpOutN :: (FpgaSetGet a) => Registry -> a ()
setExpOutN = fpgaWrite 0 0x1C
-- | expansion connector N output registry value
getExpOutN :: (FpgaSetGet a) => a Registry
getExpOutN = fpgaRead 0 0x1C
-- | expansion connector P input registry value
getExpInP :: (FpgaSetGet a) => a Registry
getExpInP = fpgaRead 0 0x20
-- | expansion connector N input registry value
getExpInN :: (FpgaSetGet a) => a Registry
getExpInN = fpgaRead 0 0x24
-- | type of gpio can be either P on N
data GpioType =
-- | P gpio
P |
-- | N gpio
N
deriving (Show)
class ToBool b where
toBool :: b -> Bool
setBitValue :: (Bits a,ToBool b) => b -> Int -> a -> a
setBitValue b
| toBool b = flip setBit
| otherwise = flip clearBit
-- | represent gpio direction, that can be either Input or Output
data GpioDirection =
Input |
Output
deriving (Show)
instance ToBool GpioDirection where
toBool Input = True
toBool Output = False
-- | type representing pin number
type PinNum = Int
-- | Sets direction of pin
setExpDir N d p = setBitValue d p <$> getExpDirN >>= setExpDirN
setExpDir P d p = setBitValue d p <$> getExpDirP >>= setExpDirP
-- | represent gpio value that can be either Hi or Low
data GpioValue =
Low |
Hi
deriving (Show)
instance ToBool GpioValue where
toBool Low = False
toBool Hi = True
-- | Sets outout value of pin
-- read using getExpOutN , fmap over setBitValue and bind in setExpOutX
setExpOut N v p = setBitValue v p <$> getExpOutN >>= setExpOutN
setExpOut P v p = setBitValue v p <$> getExpOutP >>= setExpOutP
toGpioValue :: Bool -> GpioValue
toGpioValue True = Hi
toGpioValue False = Low
-- | Sets output value of single pin
getExpOut N p = (\x -> toGpioValue ( testBit x p )) <$> getExpOutN
getExpOut P p = (\x -> toGpioValue ( testBit x p )) <$> getExpOutP
-- | write in led registry
setLed :: (FpgaSetGet f) => Registry -> f ()
setLed = fpgaWrite 0 0x30
-- | read in led registry
getLed :: (FpgaSetGet f) => f Registry
getLed = fpgaRead 0 0x30
---------------------------------------
-- * Oscilloscope
osciloscpeFpgaPage = 1
fpgaWriteOsc :: FpgaSetGet a => Offset -> Registry -> a ()
fpgaWriteOsc = fpgaWrite osciloscpeFpgaPage
fpgaReadOsc :: FpgaSetGet a => Offset -> a Registry
fpgaReadOsc = fpgaRead osciloscpeFpgaPage
-- | reset write state machine for oscilloscope
resetWriteSM :: FpgaSetGet a => a ()
resetWriteSM = fpgaWriteOsc 0 2
-- | start writing data into memory (ARM trigger).
triggerNow :: FpgaSetGet a => a ()
triggerNow = fpgaWriteOsc 0 1
-- | oscilloscope trigger selection
data TriggerSource =
-- | trig immediately
Immediately
-- | ch A threshold positive edge
| ChAPositiveEdge
-- | ch A threshold negative edge
| ChANegativeEdge
-- | ch B threshold positive edge
| ChBPositiveEdge
-- | ch B threshold negative edge
| ChBNegativeEdge
-- | external trigger positive edge - DIO0_P pin
| ExtPositiveEdge
-- | external trigger negative edge
| ExtNegaitveEdge
-- | arbitrary wave generator application positive edge
| AWGPositiveEdge
-- | arbitrary wave generator application negative edge
| AWGNegativeEdge
deriving (Show)
-- | set oscilloscope trigger
setOscTrigger Immediately = setOscTriggerHelper 1
setOscTrigger ChAPositiveEdge = setOscTriggerHelper 2
setOscTrigger ChANegativeEdge = setOscTriggerHelper 3
setOscTrigger ChBPositiveEdge = setOscTriggerHelper 4
setOscTrigger ChBNegativeEdge = setOscTriggerHelper 5
setOscTrigger ExtPositiveEdge = setOscTriggerHelper 6
setOscTrigger ExtNegaitveEdge = setOscTriggerHelper 7
setOscTrigger AWGPositiveEdge = setOscTriggerHelper 8
setOscTrigger AWGNegativeEdge = setOscTriggerHelper 9
setOscTriggerHelper :: FpgaSetGet a => Registry -> a ()
setOscTriggerHelper = fpgaWriteOsc 0x4
-- | when trigger delay is value becomes 'True'
triggerDelayEnded :: (FpgaSetGet a) => a Bool
triggerDelayEnded = (==0) <$> fpgaReadOsc 0x4
-- | Ch x threshold, makes trigger when ADC value cross this value
setTreshold A = fpgaWriteOsc 0x8
setTreshold B = fpgaWriteOsc 0xc
-- | gets ch x threshold
getTreshold A = fpgaReadOsc 0x8
getTreshold B = fpgaReadOsc 0xc
-- | Number of decimated data after trigger written into memory
setDelayAfterTrigger :: FpgaSetGet a => Registry -> a ()
setDelayAfterTrigger = fpgaWriteOsc 0x10
-- | gets delay after trigger value
getDelayAfterTrigger :: (FpgaSetGet a) => a Registry
getDelayAfterTrigger = fpgaReadOsc 0x10
-- | sets oscilloscope decimation registry, allows only
-- 1,8, 64,1024,8192,65536. If other value is written data will NOT be correct.
setOscDecimationRaw :: (FpgaSetGet a) => Registry -> a ()
setOscDecimationRaw = fpgaWriteOsc 0x14
-- | oscilloscope decimation registry value
getOscDecimationRaw :: (FpgaSetGet a) => a Registry
getOscDecimationRaw = fpgaReadOsc 0x14
-- | oscilloscope decimation
data OscDecimation =
OscDec1
| OscDec8
| OscDec64
| OscDec1024
| OscDec8192
| OscDec65536
deriving (Show)
-- | set oscilloscope decimation
setOscDecimation :: (FpgaSetGet a) => OscDecimation -> a ()
setOscDecimation OscDec1 = setOscDecimationRaw 1
setOscDecimation OscDec8 = setOscDecimationRaw 8
setOscDecimation OscDec64 = setOscDecimationRaw 64
setOscDecimation OscDec1024 = setOscDecimationRaw 1024
setOscDecimation OscDec8192 = setOscDecimationRaw 8192
setOscDecimation OscDec65536 = setOscDecimationRaw 65536
-- | write pointer - current
getOscWpCurrent :: (FpgaSetGet a) => a Registry
getOscWpCurrent = fpgaReadOsc 0x18
-- | write pointer - trigger
getOscWpTrigger :: (FpgaSetGet a) => a Registry
getOscWpTrigger = fpgaReadOsc 0x1C
-- | ch x hysteresis
getOscHysteresis :: (FpgaSetGet a) => Channel -> a Registry
getOscHysteresis A = fpgaReadOsc 0x20
getOscHysteresis B = fpgaReadOsc 0x24
-- | set ch x hysteresis
setOscHysteresis :: (FpgaSetGet a) => Channel -> Registry -> a ()
setOscHysteresis A = fpgaWriteOsc 0x20
setOscHysteresis B = fpgaWriteOsc 0x24
-- | Enable signal average at decimation True enables, False disables
enableOscDecimationAvarage :: (FpgaSetGet a) => Bool -> a ()
enableOscDecimationAvarage True = fpgaWriteOsc 0x28 1
enableOscDecimationAvarage False = fpgaWriteOsc 0x28 0
-- | set ch A equalization filter, takes array with coefficients [AA,BB,KK,PP]
setEqualFilter :: (FpgaSetGet a) => Channel -> [Registry] -> a ()
setEqualFilter A = writeFpgaArray osciloscpeFpgaPage 0x30 . take 4
setEqualFilter B = writeFpgaArray osciloscpeFpgaPage 0x40 . take 4
-- | get ch x equalization filter, return array with coefficients [AA,BB,KK,PP]
getEqualFilter :: (FpgaSetGet a) => Channel -> a [Registry]
getEqualFilter A = readFpgaArray osciloscpeFpgaPage 0x30 4
getEqualFilter B = readFpgaArray osciloscpeFpgaPage 0x40 4
setAxiGeneric' :: (FpgaSetGet a) => Offset -> Channel -> Registry -> a ()
setAxiGeneric' offest A = fpgaWriteOsc offest
setAxiGeneric' offest B = fpgaWriteOsc (offest+0x20)
getAxiGeneric' :: (FpgaSetGet a) => Offset -> Channel -> a Registry
getAxiGeneric' offest A = fpgaReadOsc offest
getAxiGeneric' offest B = fpgaReadOsc (offest+0x20)
-- | starting writing address ch x - CH x AXI lower address
setAxiLowerAddress :: (FpgaSetGet a) => Channel -> Registry -> a ()
setAxiLowerAddress = setAxiGeneric' 0x50
-- | read - starting writing address ch x - CH x AXI lower address
getAxiLowerAddress :: (FpgaSetGet a) => Channel -> a Registry
getAxiLowerAddress = getAxiGeneric' 0x50
-- | starting writing address ch x - CH x AXI lower address
setAxiUpperAddress :: (FpgaSetGet a) => Channel -> Registry -> a ()
setAxiUpperAddress = setAxiGeneric' 0x54
-- | read - starting writing address ch x - CH x AXI lower address
getAxiUpperAddress :: (FpgaSetGet a) => Channel -> a Registry
getAxiUpperAddress = getAxiGeneric' 0x54
-- | read - Number of decimated data after trigger written into memory
getAxiDelayAfterTrigger :: (FpgaSetGet a) => Channel -> a Registry
getAxiDelayAfterTrigger = getAxiGeneric' 0x58
-- | set umber of decimated data after trigger written into memory
setAxiDelayAfterTrigger :: (FpgaSetGet a) => Channel -> Registry -> a ()
setAxiDelayAfterTrigger = setAxiGeneric' 0x58
-- | Enable AXI master
enableAxiMaster :: (FpgaSetGet a) => Channel -> Bool -> a ()
enableAxiMaster ch True = setAxiGeneric' 0x5c ch 1
enableAxiMaster ch False = setAxiGeneric' 0x5c ch 0
-- | Write pointer for ch x at time when trigger arrived
getAxiWritePtrTrigger :: FpgaSetGet a => Channel -> a Registry
getAxiWritePtrTrigger = getAxiGeneric' 0x60
-- | current write pointer for ch x
getAxiWritePtrCurrent :: FpgaSetGet a => Channel -> a Registry
getAxiWritePtrCurrent = getAxiGeneric' 0x64
-- | reads oscilloscope buffer for channel x from Fpga passing offset and length.
-- buffer should fit within 16k sampling range.
-- Returns less than requested data if trying to read over the bounds.
getOscBuffer :: FpgaSetGet a => Channel -> Offset -> Int -> a [Registry]
getOscBuffer chan off len = readFpgaArray osciloscpeFpgaPage (off' + (chOff chan)) len'
where
off' = max 0 off
len' = min (0x10000 - off) len
chOff A = 0x10000
chOff B = 0x20000
--------------------------------------------------------------------
-- ASG
-- | Set registry with value passed as tuple of bit offests
-- | setBits (fromBit,toBit) value rin = ..
setBits (fromBit,toBit) value rin = valueShift .|. hole
where
ones = complement 0 :: Registry
maskShift = xor (shiftL ones fromBit) (shiftL ones (toBit+1))
hole = complement maskShift .&. rin
valueShift = ( shiftL value fromBit ) .&. maskShift
-- | read bits range from registy
getBits (fromBit,toBit) value = shiftR andV fromBit
where
ones = complement 0 :: Registry
maskShift = xor (shiftL ones fromBit) (shiftL ones (toBit+1))
andV = maskShift .&. value
asgFpgaPage = 2
fpgaWriteAsg :: (FpgaSetGet a) => Offset -> Registry -> a ()
fpgaWriteAsg = fpgaWrite asgFpgaPage
fpgaReadAsg :: (FpgaSetGet a) => Offset -> a Registry
fpgaReadAsg = fpgaRead asgFpgaPage
fpgaFmapAsg :: (FpgaSetGet a) => Offset -> (Registry -> Registry) -> a ()
fpgaFmapAsg = fpgaFmap asgFpgaPage
fpgaWriteAsgChannel offset A = fpgaWriteAsg offset
fpgaWriteAsgChannel offset B = fpgaWriteAsg ( offset + 0x20)
fpgaReadAsgChannel offset A = fpgaReadAsg offset
fpgaReadAsgChannel offset B = fpgaReadAsg ( offset + 0x20)
fpgaFmapAsgChannel offset f A = fpgaFmapAsg offset f
fpgaFmapAsgChannel offset f B = fpgaFmapAsg ( offset + 0x20) f
-- | get ASGoption registry
getAsgOption :: FpgaSetGet a => a Registry
getAsgOption = fpgaReadAsg 0x0
-- | set ASG option registry
setAsgOption :: FpgaSetGet a => Registry -> a ()
setAsgOption = fpgaWriteAsg 0x0
-- | ch B external gated repetitions,
-- registry can be either 0x0 or 0x1
setAsgOptionBExtGatRep :: FpgaSetGet a => Registry -> a ()
setAsgOptionBExtGatRep = fpgaFmapAsg 0 . setBits (24,24)
-- | get ch B external gated repetitions,
-- registry can be either 0x0 or 0x1
getAsgOptionBExtGatRep :: FpgaSetGet a => a Registry
getAsgOptionBExtGatRep = getBits (24,24) <$> getAsgOption
-- | TODO others
-- | todo other registries
-- | Ch x amplitude scale (14 bist) - out = (data*scale)/0x2000 + offset
setAsgAmplitudeScale ch reg = fpgaFmapAsgChannel 0x4 ( setBits (16,29) reg ) ch
-- | Ch x amplitude offset (14 bits) - out = (data*scale)/0x2000 + offset
setAsgAmplitudeOffset ch reg = fpgaFmapAsgChannel 0x4 ( setBits (0,13) reg ) ch
-- | Ch x counter wrap - Value where counter wraps around. Depends on SM wrap setting.
-- If it is 1 new value is get by wrap, if value is 0 counter goes to offset value.
-- 16 bits for decimals.
setAsgCounterWrap :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgCounterWrap = fpgaWriteAsgChannel 0x8
-- | Ch x Counter start offset. Start offset when trigger arrives. 16 bits for decimals.
setAsgCounterStartOffset :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgCounterStartOffset = fpgaWriteAsgChannel 0xc
-- | Ch x counter step. 16 bits for decimals.
setAsgCounterStep :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgCounterStep = fpgaWriteAsgChannel 0x10
-- | get ch x buffer current read pointer
getAsgCounterReadPtr :: FpgaSetGet a => Channel -> a Registry
getAsgCounterReadPtr = fpgaReadAsgChannel 0x14
-- | set ch x buffer current read pointer
setAsgCounterReadPtr :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgCounterReadPtr = fpgaWriteAsgChannel 0x14
-- | get ch x number of read cycles in one burst
getAsgNumReadCycles :: FpgaSetGet a => Channel -> a Registry
getAsgNumReadCycles = fpgaReadAsgChannel 0x18
-- | set ch x number of read cycles in one burst
setAsgNumReadCycles :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgNumReadCycles = fpgaWriteAsgChannel 0x18
-- | get ch x number of read cycles in one burst
getAsgNumRepetitions :: FpgaSetGet a => Channel -> a Registry
getAsgNumRepetitions = fpgaReadAsgChannel 0x1a
-- | set ch x number of read cycles in one burst
setAsgNumRepetitions :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgNumRepetitions = fpgaWriteAsgChannel 0x1a
-- | get ch x delay between burst repetitions, granularity=1us
getAsgBurstDelay :: FpgaSetGet a => Channel -> a Registry
getAsgBurstDelay = fpgaReadAsgChannel 0x20
-- | set ch x delay between burst repetitions, granularity=1us
setAsgBurstDelay :: FpgaSetGet a => Channel -> Registry -> a ()
setAsgBurstDelay = fpgaWriteAsgChannel 0x20
|
ra1u/HaskellPitaya
|
src/System/RedPitaya/Fpga.hs
|
lgpl-3.0
| 19,013
| 0
| 11
| 3,796
| 4,171
| 2,218
| 1,953
| 341
| 2
|
{-# Language DatatypeContexts #-}
{-# Language ExistentialQuantification #-}
{-# LAnguage GADTs #-}
{-# LAnguage KindSignatures #-}
data Foo = A
| B
| C
-- | data_or_newtype capi_ctype tycl_hdr constrs deriving
data {-# Ctype "Foo" "bar" #-} F1 = F1
data {-# Ctype "baz" #-} Eq a => F2 a = F2 a
data (Eq a,Ord a) => F3 a = F3 Int a
data F4 a = forall x y. (Eq x,Eq y) => F4 a x y
| forall x y. (Eq x,Eq y) => F4b a x y
data G1 a :: * where
G1A, G1B :: Int -> G1 a
G1C :: Double -> G1 a
data G2 a :: * where
G2A { g2a :: a, g2b :: Int } :: G2 a
G2C :: Double -> G2 a
data (Eq a,Ord a) => G3 a = G3
{ g3A :: Int
, g3B :: Bool
, g3a :: a
} deriving (Eq,Ord)
|
mpickering/ghc-exactprint
|
tests/examples/ghc710-only/DataDecl.hs
|
bsd-3-clause
| 725
| 8
| 10
| 228
| 258
| 148
| 110
| -1
| -1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
#ifdef USE_REFLEX_OPTIMIZER
{-# OPTIONS_GHC -fplugin=Reflex.Optimizer #-}
#endif
module Reflex.DynamicWriter.Base
( DynamicWriterT (..)
, runDynamicWriterT
, withDynamicWriterT
) where
import Control.Monad.Exception
import Control.Monad.Identity
import Control.Monad.IO.Class
import Control.Monad.Morph
import Control.Monad.Primitive
import Control.Monad.Reader
import Control.Monad.Ref
import Control.Monad.State.Strict
import Data.Align
import Data.Dependent.Map (DMap)
import qualified Data.Dependent.Map as DMap
import Data.FastMutableIntMap
import Data.Functor.Misc
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Semigroup (Semigroup(..))
import Data.Some (Some)
import Data.These
import Reflex.Adjustable.Class
import Reflex.Class
import Reflex.DynamicWriter.Class
import Reflex.EventWriter.Class (EventWriter, tellEvent)
import Reflex.Host.Class
import qualified Data.Patch.MapWithMove as MapWithMove
import Reflex.PerformEvent.Class
import Reflex.PostBuild.Class
import Reflex.Query.Class
import Reflex.Requester.Class
import Reflex.TriggerEvent.Class
mapIncrementalMapValues :: (Reflex t, Patch (p v), Patch (p v'), PatchTarget (p v) ~ f v, PatchTarget (p v') ~ f v', Functor p, Functor f) => (v -> v') -> Incremental t (p v) -> Incremental t (p v')
mapIncrementalMapValues f = unsafeMapIncremental (fmap f) (fmap f)
mergeDynIncremental :: (Reflex t, Ord k) => Incremental t (PatchMap k (Dynamic t v)) -> Incremental t (PatchMap k v)
mergeDynIncremental a = unsafeBuildIncremental (mapM (sample . current) =<< sample (currentIncremental a)) $ addedAndRemovedValues <> changedValues
where changedValues = fmap (PatchMap . fmap Just) $ mergeMapIncremental $ mapIncrementalMapValues updated a
addedAndRemovedValues = flip pushAlways (updatedIncremental a) $ \(PatchMap m) -> PatchMap <$> mapM (mapM (sample . current)) m
mergeIntMapDynIncremental :: Reflex t => Incremental t (PatchIntMap (Dynamic t v)) -> Incremental t (PatchIntMap v)
mergeIntMapDynIncremental a = unsafeBuildIncremental (mapM (sample . current) =<< sample (currentIncremental a)) $ addedAndRemovedValues <> changedValues
where changedValues = fmap (PatchIntMap . fmap Just) $ mergeIntMapIncremental $ mapIncrementalMapValues updated a
addedAndRemovedValues = flip pushAlways (updatedIncremental a) $ \(PatchIntMap m) -> PatchIntMap <$> mapM (mapM (sample . current)) m
mergeDynIncrementalWithMove :: forall t k v. (Reflex t, Ord k) => Incremental t (PatchMapWithMove k (Dynamic t v)) -> Incremental t (PatchMapWithMove k v)
mergeDynIncrementalWithMove a = unsafeBuildIncremental (mapM (sample . current) =<< sample (currentIncremental a)) $ alignWith f addedAndRemovedValues changedValues
where changedValues = mergeMapIncrementalWithMove $ mapIncrementalMapValues updated a
addedAndRemovedValues = flip pushAlways (updatedIncremental a) $ fmap unsafePatchMapWithMove . mapM (mapM (sample . current)) . unPatchMapWithMove
f :: These (PatchMapWithMove k v) (Map k v) -> PatchMapWithMove k v
f x = unsafePatchMapWithMove $
let (p, changed) = case x of
This p_ -> (unPatchMapWithMove p_, mempty)
That c -> (mempty, c)
These p_ c -> (unPatchMapWithMove p_, c)
(pWithNewVals, noLongerMoved) = flip runState [] $ forM p $ MapWithMove.nodeInfoMapMFrom $ \case
MapWithMove.From_Insert v -> return $ MapWithMove.From_Insert v
MapWithMove.From_Delete -> return MapWithMove.From_Delete
MapWithMove.From_Move k -> case Map.lookup k changed of
Nothing -> return $ MapWithMove.From_Move k
Just v -> do
modify (k:)
return $ MapWithMove.From_Insert v
noLongerMovedMap = Map.fromList $ fmap (, ()) noLongerMoved
in Map.differenceWith (\e _ -> Just $ MapWithMove.nodeInfoSetTo Nothing e) pWithNewVals noLongerMovedMap --TODO: Check if any in the second map are not covered?
-- | A basic implementation of 'DynamicWriter'.
newtype DynamicWriterT t w m a = DynamicWriterT { unDynamicWriterT :: StateT [Dynamic t w] m a }
-- The list is kept in reverse order
deriving
( Functor
, Applicative
, Monad
, MonadTrans
, MFunctor
, MonadIO
, MonadFix
, MonadAsyncException
, MonadException
)
deriving instance MonadHold t m => MonadHold t (DynamicWriterT t w m)
deriving instance MonadSample t m => MonadSample t (DynamicWriterT t w m)
instance MonadRef m => MonadRef (DynamicWriterT t w m) where
type Ref (DynamicWriterT t w m) = Ref m
newRef = lift . newRef
readRef = lift . readRef
writeRef r = lift . writeRef r
instance MonadAtomicRef m => MonadAtomicRef (DynamicWriterT t w m) where
atomicModifyRef r = lift . atomicModifyRef r
instance MonadReflexCreateTrigger t m => MonadReflexCreateTrigger t (DynamicWriterT t w m) where
newEventWithTrigger = lift . newEventWithTrigger
newFanEventWithTrigger f = lift $ newFanEventWithTrigger f
-- | Run a 'DynamicWriterT' action. The dynamic writer output will be provided
-- along with the result of the action.
runDynamicWriterT :: (MonadFix m, Reflex t, Monoid w) => DynamicWriterT t w m a -> m (a, Dynamic t w)
runDynamicWriterT (DynamicWriterT a) = do
(result, ws) <- runStateT a []
return (result, mconcat $ reverse ws)
instance (Monad m, Monoid w, Reflex t) => DynamicWriter t w (DynamicWriterT t w m) where
tellDyn w = DynamicWriterT $ modify (w :)
instance MonadReader r m => MonadReader r (DynamicWriterT t w m) where
ask = lift ask
local f (DynamicWriterT a) = DynamicWriterT $ mapStateT (local f) a
reader = lift . reader
instance PerformEvent t m => PerformEvent t (DynamicWriterT t w m) where
type Performable (DynamicWriterT t w m) = Performable m
performEvent_ = lift . performEvent_
performEvent = lift . performEvent
instance TriggerEvent t m => TriggerEvent t (DynamicWriterT t w m) where
newTriggerEvent = lift newTriggerEvent
newTriggerEventWithOnComplete = lift newTriggerEventWithOnComplete
newEventWithLazyTriggerWithOnComplete = lift . newEventWithLazyTriggerWithOnComplete
instance PostBuild t m => PostBuild t (DynamicWriterT t w m) where
getPostBuild = lift getPostBuild
instance MonadState s m => MonadState s (DynamicWriterT t w m) where
get = lift get
put = lift . put
instance PrimMonad m => PrimMonad (DynamicWriterT t w m) where
type PrimState (DynamicWriterT t w m) = PrimState m
primitive = lift . primitive
newtype DynamicWriterTLoweredResult t w v a = DynamicWriterTLoweredResult (v a, Dynamic t w)
-- | When the execution of a 'DynamicWriterT' action is adjusted using
-- 'Adjustable', the 'Dynamic' output of that action will also be updated to
-- match.
instance (Adjustable t m, MonadFix m, Monoid w, MonadHold t m, Reflex t) => Adjustable t (DynamicWriterT t w m) where
runWithReplace a0 a' = do
(result0, result') <- lift $ runWithReplace (runDynamicWriterT a0) $ runDynamicWriterT <$> a'
tellDyn . join =<< holdDyn (snd result0) (snd <$> result')
return (fst result0, fst <$> result')
traverseIntMapWithKeyWithAdjust = traverseIntMapWithKeyWithAdjustImpl traverseIntMapWithKeyWithAdjust mergeIntMapDynIncremental
traverseDMapWithKeyWithAdjust = traverseDMapWithKeyWithAdjustImpl traverseDMapWithKeyWithAdjust mapPatchDMap weakenPatchDMapWith mergeDynIncremental
traverseDMapWithKeyWithAdjustWithMove = traverseDMapWithKeyWithAdjustImpl traverseDMapWithKeyWithAdjustWithMove mapPatchDMapWithMove weakenPatchDMapWithMoveWith mergeDynIncrementalWithMove
traverseDMapWithKeyWithAdjustImpl :: forall t w k v' p p' v m. (PatchTarget (p' (Some k) (Dynamic t w)) ~ Map (Some k) (Dynamic t w), PatchTarget (p' (Some k) w) ~ Map (Some k) w, Patch (p' (Some k) w), Patch (p' (Some k) (Dynamic t w)), MonadFix m, Monoid w, Reflex t, MonadHold t m)
=> ( (forall a. k a -> v a -> m (DynamicWriterTLoweredResult t w v' a))
-> DMap k v
-> Event t (p k v)
-> m (DMap k (DynamicWriterTLoweredResult t w v'), Event t (p k (DynamicWriterTLoweredResult t w v')))
)
-> ((forall a. DynamicWriterTLoweredResult t w v' a -> v' a) -> p k (DynamicWriterTLoweredResult t w v') -> p k v')
-> ((forall a. DynamicWriterTLoweredResult t w v' a -> Dynamic t w) -> p k (DynamicWriterTLoweredResult t w v') -> p' (Some k) (Dynamic t w))
-> (Incremental t (p' (Some k) (Dynamic t w)) -> Incremental t (p' (Some k) w))
-> (forall a. k a -> v a -> DynamicWriterT t w m (v' a))
-> DMap k v
-> Event t (p k v)
-> DynamicWriterT t w m (DMap k v', Event t (p k v'))
traverseDMapWithKeyWithAdjustImpl base mapPatch weakenPatchWith mergeMyDynIncremental f (dm0 :: DMap k v) dm' = do
(result0, result') <- lift $ base (\k v -> fmap DynamicWriterTLoweredResult $ runDynamicWriterT $ f k v) dm0 dm'
let getValue (DynamicWriterTLoweredResult (v, _)) = v
getWritten (DynamicWriterTLoweredResult (_, w)) = w
liftedResult0 = DMap.map getValue result0
liftedResult' = ffor result' $ mapPatch getValue
liftedWritten0 :: Map (Some k) (Dynamic t w)
liftedWritten0 = weakenDMapWith getWritten result0
liftedWritten' = ffor result' $ weakenPatchWith getWritten
--TODO: We should be able to improve the performance here by incrementally updating the mconcat of the merged Dynamics
i <- holdIncremental liftedWritten0 liftedWritten'
tellDyn $ fmap (mconcat . Map.elems) $ incrementalToDynamic $ mergeMyDynIncremental i
return (liftedResult0, liftedResult')
traverseIntMapWithKeyWithAdjustImpl :: forall t w v' p p' v m. (PatchTarget (p' (Dynamic t w)) ~ IntMap (Dynamic t w), PatchTarget (p' w) ~ IntMap w, Patch (p' w), Patch (p' (Dynamic t w)), MonadFix m, Monoid w, Reflex t, MonadHold t m, Functor p, p ~ p')
=> ( (IntMap.Key -> v -> m (v', Dynamic t w))
-> IntMap v
-> Event t (p v)
-> m (IntMap (v', Dynamic t w), Event t (p (v', Dynamic t w)))
)
-> (Incremental t (p' (Dynamic t w)) -> Incremental t (p' w))
-> (IntMap.Key -> v -> DynamicWriterT t w m v')
-> IntMap v
-> Event t (p v)
-> DynamicWriterT t w m (IntMap v', Event t (p v'))
traverseIntMapWithKeyWithAdjustImpl base mergeMyDynIncremental f (dm0 :: IntMap v) dm' = do
(result0, result') <- lift $ base (\k v -> runDynamicWriterT $ f k v) dm0 dm'
let liftedResult0 = fmap fst result0
liftedResult' = fmap (fmap fst) result'
liftedWritten0 :: IntMap (Dynamic t w)
liftedWritten0 = fmap snd result0
liftedWritten' = fmap (fmap snd) result'
--TODO: We should be able to improve the performance here by incrementally updating the mconcat of the merged Dynamics
i <- holdIncremental liftedWritten0 liftedWritten'
tellDyn $ fmap (mconcat . IntMap.elems) $ incrementalToDynamic $ mergeMyDynIncremental i
return (liftedResult0, liftedResult')
-- | Map a function over the output of a 'DynamicWriterT'.
withDynamicWriterT :: (Monoid w, Monoid w', Reflex t, MonadHold t m, MonadFix m)
=> (w -> w')
-> DynamicWriterT t w m a
-> DynamicWriterT t w' m a
withDynamicWriterT f dw = do
(r, d) <- lift $ do
(r, d) <- runDynamicWriterT dw
let d' = fmap f d
return (r, d')
tellDyn d
return r
instance Requester t m => Requester t (DynamicWriterT t w m) where
type Request (DynamicWriterT t w m) = Request m
type Response (DynamicWriterT t w m) = Response m
requesting = lift . requesting
requesting_ = lift . requesting_
instance (MonadQuery t q m, Monad m) => MonadQuery t q (DynamicWriterT t w m) where
tellQueryIncremental = lift . tellQueryIncremental
askQueryResult = lift askQueryResult
queryIncremental = lift . queryIncremental
instance EventWriter t w m => EventWriter t w (DynamicWriterT t v m) where
tellEvent = lift . tellEvent
|
ryantrinkle/reflex
|
src/Reflex/DynamicWriter/Base.hs
|
bsd-3-clause
| 12,313
| 0
| 23
| 2,373
| 4,073
| 2,083
| 1,990
| 201
| 6
|
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
module Main where
import Control.Monad (when,unless,foldM)
import Data.List (isPrefixOf,partition,nub)
import Data.Monoid (Monoid(..),Endo(..))
import System.Console.GetOpt
(getOpt,usageInfo,ArgOrder(..),OptDescr(..),ArgDescr(..))
import System.Directory
(getDirectoryContents,doesDirectoryExist,createDirectoryIfMissing
,canonicalizePath)
import System.Environment (getArgs,withArgs,getProgName)
import System.Exit (exitFailure,exitSuccess)
import System.FilePath
((</>),(<.>),takeExtension,splitFileName,splitDirectories,pathSeparator
,isRelative)
import System.Process
(createProcess,CreateProcess(..),StdStream(..),proc,waitForProcess)
import System.IO
(hGetContents,IOMode(..),withFile,SeekMode(..),Handle,hSetBuffering
,BufferMode(..))
import Test.Framework (defaultMain,Test,testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (assertFailure)
import qualified Control.Exception as X
import qualified Data.Map as Map
main :: IO ()
main = do
opts <- parseOptions
createDirectoryIfMissing True (optResultDir opts)
resultsDir <- canonicalizePath (optResultDir opts)
let opts' = opts { optResultDir = resultsDir }
files <- findTests (optTests opts')
withArgs (optOther opts') (defaultMain (generateTests opts' files))
-- Command Line Options --------------------------------------------------------
data TestStrategy
= TestDiscover FilePath
| TestFile FilePath
deriving (Show)
data Options = Options
{ optCryptol :: String
, optOther :: [String]
, optHelp :: Bool
, optResultDir :: FilePath
, optTests :: [TestStrategy]
, optDiff :: String
} deriving (Show)
defaultOptions :: Options
defaultOptions = Options
{ optCryptol = "cryptol-2"
, optOther = []
, optHelp = False
, optResultDir = "output"
, optTests = []
, optDiff = "meld"
}
setHelp :: Endo Options
setHelp = Endo (\ opts -> opts { optHelp = True } )
setDiff :: String -> Endo Options
setDiff diff = Endo (\opts -> opts { optDiff = diff })
setCryptol :: String -> Endo Options
setCryptol path = Endo (\ opts -> opts { optCryptol = path } )
addOther :: String -> Endo Options
addOther arg = Endo (\ opts -> opts { optOther = optOther opts ++ [arg] } )
setResultDir :: String -> Endo Options
setResultDir path = Endo (\ opts -> opts { optResultDir = path })
addDiscover :: String -> Endo Options
addDiscover path =
Endo (\ opts -> opts { optTests = TestDiscover path : optTests opts })
addTestFile :: String -> Endo Options
addTestFile path =
Endo (\ opts -> opts { optTests = TestFile path : optTests opts })
options :: [OptDescr (Endo Options)]
options =
[ Option "c" ["cryptol"] (ReqArg setCryptol "PATH")
"the cryptol executable to use"
, Option "d" ["base"] (ReqArg addDiscover "PATH")
"the base directory for test discovery"
, Option "r" ["result-dir"] (ReqArg setResultDir "PATH")
"the result directory for test runs"
, Option "p" ["diff-prog"] (ReqArg setDiff "PROG")
"use this diffing program on failures"
, Option "T" [] (ReqArg addOther "STRING")
"add an argument to pass to the test-runner main"
, Option "h" ["help"] (NoArg setHelp)
"display this message"
]
parseOptions :: IO Options
parseOptions = do
args <- getArgs
case getOpt (ReturnInOrder addTestFile) options args of
(es,_,[]) -> do
let opts = appEndo (mconcat es) defaultOptions
when (optHelp opts) $ do
displayUsage []
exitSuccess
-- canonicalize the path to cryptol, if it's relative
cryptol' <- if pathSeparator `elem` optCryptol opts
&& isRelative (optCryptol opts)
then canonicalizePath (optCryptol opts)
else return (optCryptol opts)
return opts
{ optCryptol = cryptol'
, optTests = reverse (optTests opts)
}
(_,_,errs) -> do
displayUsage errs
exitFailure
displayUsage :: [String] -> IO ()
displayUsage errs = do
prog <- getProgName
let banner = unlines (errs ++ ["Usage: " ++ prog ++ " [OPTIONS] [FILES]"])
putStrLn (usageInfo banner options)
-- Test Generation -------------------------------------------------------------
-- | Write the output of a run of cryptol-2 to this handle. Stdin and stderr
-- will both be given the handle provided.
cryptol2 :: Options -> Handle -> FilePath -> [String] -> IO ()
cryptol2 opts hout path args = do
(_, _, _, ph) <- createProcess (proc (optCryptol opts) args)
{ cwd = Just path
, std_out = UseHandle hout
, std_in = Inherit
, std_err = UseHandle hout
}
_ <- waitForProcess ph
return ()
generateTests :: Options -> TestFiles -> [Test]
generateTests opts = loop ""
where
loop dir (TestFiles m fs) = as ++ grouped
where
as = map (generateAssertion opts dir) fs
grouped = [ testGroup path (loop (dir </> path) t)
| (path,t) <- Map.toList m ]
generateAssertion :: Options -> FilePath -> FilePath -> Test
generateAssertion opts dir file = testCase file $ do
createDirectoryIfMissing True resultDir
withFile resultOut WriteMode $ \ hout ->
do hSetBuffering hout NoBuffering
cryptol2 opts hout dir ["-b",file]
out <- readFile resultOut
expected <- readFile goldFile
checkOutput expected out
where
goldFile = dir </> file <.> "stdout"
resultOut = resultDir </> file <.> "stdout"
resultDir = optResultDir opts </> dir
indent str = unlines (map (" " ++) (lines str))
checkOutput expected out
| expected == out = return ()
| otherwise = assertFailure $ unwords [ optDiff opts, goldFile, resultOut ]
-- Test Discovery --------------------------------------------------------------
findTests :: [TestStrategy] -> IO TestFiles
findTests = foldM step mempty
where
step tests strategy = do
tests' <- evalStrategy strategy
return (tests `mappend` tests')
evalStrategy strategy = case strategy of
TestDiscover path -> testDiscovery path
TestFile path ->
let (dir,file) = splitFileName path
dirs = splitDirectories dir
insert d t = TestFiles (Map.singleton d t) []
in return $! foldr insert (TestFiles Map.empty [file]) dirs
-- | Files that end in .icry are cryptol test cases.
isTestCase :: FilePath -> Bool
isTestCase path = takeExtension path == ".icry"
-- | Directory structure of the discovered tests. Each entry in the map
-- represents a single folder, with the top-level list representing tests
-- inside the base directory.
data TestFiles = TestFiles (Map.Map FilePath TestFiles) [FilePath]
deriving (Show)
instance Monoid TestFiles where
mempty = TestFiles Map.empty []
mappend (TestFiles lt lf) (TestFiles rt rf) = TestFiles mt mf
where
mt = Map.unionWith mappend lt rt
mf = nub (lf ++ rf)
nullTests :: TestFiles -> Bool
nullTests (TestFiles m as) = null as && Map.null m
-- | Find test cases to run.
testDiscovery :: FilePath -> IO TestFiles
testDiscovery from = do
subTests <- process from =<< getDirectoryContents from
let insert d t = TestFiles (Map.singleton d t) []
return (foldr insert subTests (splitDirectories from))
where
process base contents = do
let (tests,files) = partition isTestCase
$ filter (not . isDotFile) contents
subdirs <- mapM (resolve base) files
let subTests = Map.fromList [ (p,m) | (p,m) <- subdirs
, not (nullTests m) ]
return (TestFiles subTests tests)
loop base = do
isDir <- doesDirectoryExist base
if isDir
then do subTests <- process base =<< getDirectoryContents base
return (TestFiles (Map.singleton base subTests) [])
else return mempty
resolve base p = do
let path = base </> p
tests <- loop path
return (p,tests)
-- Utilities -------------------------------------------------------------------
-- | Screen out dotfiles.
isDotFile :: FilePath -> Bool
isDotFile path = "." `isPrefixOf` path
|
TomMD/cryptol
|
tests/Main.hs
|
bsd-3-clause
| 8,332
| 0
| 17
| 1,929
| 2,501
| 1,318
| 1,183
| 184
| 3
|
module Rede.SpdyProtocol.Framing.WindowUpdate(
WindowUpdateFrame(..)
,WindowSizeValidFlags(..)
) where
import Data.Binary (Binary, get, put)
import Data.Binary.Get (getWord32be)
import Data.Binary.Put (putWord32be)
import Rede.SpdyProtocol.Framing.Frame
data WindowSizeValidFlags = None_WSVF
deriving (Show, Enum)
data WindowUpdateFrame =
WindowUpdateFrame {
prologue:: ControlFrame WindowSizeValidFlags
, streamId:: Int
, deltaWindowSize:: Int
}
deriving Show
instance HasStreamId WindowUpdateFrame where
streamIdFromFrame = streamId
instance Binary WindowUpdateFrame where
put wsf= do
let
new_prologue = resetControlFrameSize (prologue wsf) 16
put $ new_prologue
putWord32be $ fromIntegral $ streamId wsf
putWord32be $ fromIntegral $ deltaWindowSize wsf
get = do
pr <- get
stream_id <- getWord32be
delta_window_size <- getWord32be
return WindowUpdateFrame {
prologue = pr
, streamId = fromIntegral stream_id
, deltaWindowSize = fromIntegral delta_window_size
}
|
alcidesv/ReH
|
hs-src/Rede/SpdyProtocol/Framing/WindowUpdate.hs
|
bsd-3-clause
| 1,101
| 18
| 13
| 246
| 280
| 153
| 127
| 32
| 0
|
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.BufferNew
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
module Yi.Keymap.Vim.Ex.Commands.BufferNew (parse) where
import Control.Applicative (Alternative(..))
import Control.Monad (void)
import qualified Data.Attoparsec.Text as P (anyChar, char, string)
import Data.List (null)
import qualified Data.Text as T (pack)
import Yi.Buffer (BufferId (MemBuffer))
import Yi.Editor (newEmptyBufferE, newTempBufferE, switchToBufferE)
import Yi.Keymap (Action (EditorA))
import Yi.Keymap.Vim.Common (EventString)
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common (parse, pureExCommand)
import Yi.Keymap.Vim.Ex.Types (ExCommand (cmdAction, cmdShow))
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ do
void $ P.string "new"
n <- (some (P.char ' ') *> many (P.anyChar)) <|>
("" <$ many (P.char ' '))
return $ Common.pureExCommand {
cmdShow = "new"
, cmdAction = EditorA $ do
b <- if null n
then newTempBufferE
else newEmptyBufferE (MemBuffer $ T.pack n)
switchToBufferE b
}
|
noughtmare/yi
|
yi-keymap-vim/src/Yi/Keymap/Vim/Ex/Commands/BufferNew.hs
|
gpl-2.0
| 1,500
| 0
| 20
| 509
| 340
| 204
| 136
| 25
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.ResyncMFADevice
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Synchronizes the specified MFA device with AWS servers.
--
-- For more information about creating and working with virtual MFA devices, go
-- to <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_VirtualMFA.html Using a Virtual MFA Device> in the /Using IAM/ guide.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ResyncMFADevice.html>
module Network.AWS.IAM.ResyncMFADevice
(
-- * Request
ResyncMFADevice
-- ** Request constructor
, resyncMFADevice
-- ** Request lenses
, rmfadAuthenticationCode1
, rmfadAuthenticationCode2
, rmfadSerialNumber
, rmfadUserName
-- * Response
, ResyncMFADeviceResponse
-- ** Response constructor
, resyncMFADeviceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data ResyncMFADevice = ResyncMFADevice
{ _rmfadAuthenticationCode1 :: Text
, _rmfadAuthenticationCode2 :: Text
, _rmfadSerialNumber :: Text
, _rmfadUserName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'ResyncMFADevice' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rmfadAuthenticationCode1' @::@ 'Text'
--
-- * 'rmfadAuthenticationCode2' @::@ 'Text'
--
-- * 'rmfadSerialNumber' @::@ 'Text'
--
-- * 'rmfadUserName' @::@ 'Text'
--
resyncMFADevice :: Text -- ^ 'rmfadUserName'
-> Text -- ^ 'rmfadSerialNumber'
-> Text -- ^ 'rmfadAuthenticationCode1'
-> Text -- ^ 'rmfadAuthenticationCode2'
-> ResyncMFADevice
resyncMFADevice p1 p2 p3 p4 = ResyncMFADevice
{ _rmfadUserName = p1
, _rmfadSerialNumber = p2
, _rmfadAuthenticationCode1 = p3
, _rmfadAuthenticationCode2 = p4
}
-- | An authentication code emitted by the device.
rmfadAuthenticationCode1 :: Lens' ResyncMFADevice Text
rmfadAuthenticationCode1 =
lens _rmfadAuthenticationCode1
(\s a -> s { _rmfadAuthenticationCode1 = a })
-- | A subsequent authentication code emitted by the device.
rmfadAuthenticationCode2 :: Lens' ResyncMFADevice Text
rmfadAuthenticationCode2 =
lens _rmfadAuthenticationCode2
(\s a -> s { _rmfadAuthenticationCode2 = a })
-- | Serial number that uniquely identifies the MFA device.
rmfadSerialNumber :: Lens' ResyncMFADevice Text
rmfadSerialNumber =
lens _rmfadSerialNumber (\s a -> s { _rmfadSerialNumber = a })
-- | The name of the user whose MFA device you want to resynchronize.
rmfadUserName :: Lens' ResyncMFADevice Text
rmfadUserName = lens _rmfadUserName (\s a -> s { _rmfadUserName = a })
data ResyncMFADeviceResponse = ResyncMFADeviceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'ResyncMFADeviceResponse' constructor.
resyncMFADeviceResponse :: ResyncMFADeviceResponse
resyncMFADeviceResponse = ResyncMFADeviceResponse
instance ToPath ResyncMFADevice where
toPath = const "/"
instance ToQuery ResyncMFADevice where
toQuery ResyncMFADevice{..} = mconcat
[ "AuthenticationCode1" =? _rmfadAuthenticationCode1
, "AuthenticationCode2" =? _rmfadAuthenticationCode2
, "SerialNumber" =? _rmfadSerialNumber
, "UserName" =? _rmfadUserName
]
instance ToHeaders ResyncMFADevice
instance AWSRequest ResyncMFADevice where
type Sv ResyncMFADevice = IAM
type Rs ResyncMFADevice = ResyncMFADeviceResponse
request = post "ResyncMFADevice"
response = nullResponse ResyncMFADeviceResponse
|
romanb/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/ResyncMFADevice.hs
|
mpl-2.0
| 4,568
| 0
| 9
| 1,009
| 536
| 327
| 209
| 71
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>搜索</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/wappalyzer/resources/help_zh_CN/helpset_zh_CN.hs
|
apache-2.0
| 982
| 87
| 51
| 160
| 396
| 209
| 187
| -1
| -1
|
main = print "exe1"
|
dcreager/cabal
|
tests/systemTests/sdist/Exe1.hs
|
bsd-3-clause
| 20
| 0
| 5
| 4
| 9
| 4
| 5
| 1
| 1
|
{-| Cluster information printer.
-}
{-
Copyright (C) 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Program.Hinfo
( main
, options
, arguments
) where
import Control.Monad
import Data.List
import System.IO
import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.Utils as ClusterUtils
import qualified Ganeti.HTools.Cluster.Metrics as Metrics
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.Common
import Ganeti.HTools.CLI
import Ganeti.HTools.ExtLoader
import Ganeti.HTools.Loader
import Ganeti.Utils
-- | Options list and functions.
options :: IO [OptType]
options = do
luxi <- oLuxiSocket
return
[ oPrintNodes
, oPrintInsts
, oDataFile
, oRapiMaster
, luxi
, oIAllocSrc
, oVerbose
, oQuiet
, oOfflineNode
, oIgnoreDyn
, oMonD
, oMonDDataFile
]
-- | The list of arguments supported by the program.
arguments :: [ArgCompletion]
arguments = []
-- | Group information data-type.
data GroupInfo = GroupInfo { giName :: String
, giNodeCount :: Int
, giInstCount :: Int
, giBadNodes :: Int
, giBadInsts :: Int
, giN1Status :: Bool
, giScore :: Double
}
-- | Node group statistics.
calcGroupInfo :: Group.Group
-> Node.List
-> Instance.List
-> GroupInfo
calcGroupInfo g nl il =
let nl_size = Container.size nl
il_size = Container.size il
(bad_nodes, bad_instances) = Cluster.computeBadItems nl il
bn_size = length bad_nodes
bi_size = length bad_instances
n1h = bn_size == 0
score = Metrics.compCV nl
in GroupInfo (Group.name g) nl_size il_size bn_size bi_size n1h score
-- | Helper to format one group row result.
groupRowFormatHelper :: GroupInfo -> [String]
groupRowFormatHelper gi =
[ giName gi
, printf "%d" $ giNodeCount gi
, printf "%d" $ giInstCount gi
, printf "%d" $ giBadNodes gi
, printf "%d" $ giBadInsts gi
, show $ giN1Status gi
, printf "%.8f" $ giScore gi
]
-- | Print node group information.
showGroupInfo :: Int -> Group.List -> Node.List -> Instance.List -> IO ()
showGroupInfo verbose gl nl il = do
let cgrs = map (\(gdx, (gnl, gil)) ->
calcGroupInfo (Container.find gdx gl) gnl gil) $
ClusterUtils.splitCluster nl il
cn1h = all giN1Status cgrs
grs = map groupRowFormatHelper cgrs
header = ["Group", "Nodes", "Instances", "Bad_Nodes", "Bad_Instances",
"N+1", "Score"]
when (verbose > 1) $
printf "Node group information:\n%s"
(printTable " " header grs [False, True, True, True, True,
False, True])
printf "Cluster is N+1 %s\n" $ if cn1h then "happy" else "unhappy"
-- | Gather and print split instances.
splitInstancesInfo :: Int -> Node.List -> Instance.List -> IO ()
splitInstancesInfo verbose nl il = do
let split_insts = Cluster.findSplitInstances nl il
if null split_insts
then
when (verbose > 1) $
putStrLn "No split instances found"::IO ()
else do
putStrLn "Found instances belonging to multiple node groups:"
mapM_ (\i -> hPutStrLn stderr $ " " ++ Instance.name i) split_insts
-- | Print common (interesting) information.
commonInfo :: Int -> Group.List -> Node.List -> Instance.List -> IO ()
commonInfo verbose gl nl il = do
when (Container.null il && verbose > 1) $
printf "Cluster is empty.\n"::IO ()
let nl_size = Container.size nl
il_size = Container.size il
gl_size = Container.size gl
printf "Loaded %d %s, %d %s, %d %s\n"
nl_size (plural nl_size "node" "nodes")
il_size (plural il_size "instance" "instances")
gl_size (plural gl_size "node group" "node groups")::IO ()
let csf = commonSuffix nl il
when (not (null csf) && verbose > 2) $
printf "Note: Stripping common suffix of '%s' from names\n" csf
-- | Main function.
main :: Options -> [String] -> IO ()
main opts args = do
unless (null args) $ exitErr "This program doesn't take any arguments."
let verbose = optVerbose opts
shownodes = optShowNodes opts
showinsts = optShowInsts opts
(ClusterData gl fixed_nl ilf ctags ipol) <- loadExternalData opts
putStrLn $ "Loaded cluster tags: " ++ intercalate "," ctags
when (verbose > 2) .
putStrLn $ "Loaded cluster ipolicy: " ++ show ipol
nlf <- setNodeStatus opts fixed_nl
commonInfo verbose gl nlf ilf
splitInstancesInfo verbose nlf ilf
showGroupInfo verbose gl nlf ilf
maybePrintInsts showinsts "Instances" (Cluster.printInsts nlf ilf)
maybePrintNodes shownodes "Cluster" (Cluster.printNodes nlf)
printf "Cluster coefficients:\n%s" (Metrics.printStats " " nlf)::IO ()
printf "Cluster score: %.8f\n" (Metrics.compCV nlf)
|
leshchevds/ganeti
|
src/Ganeti/HTools/Program/Hinfo.hs
|
bsd-2-clause
| 6,538
| 31
| 17
| 1,672
| 1,325
| 716
| 609
| 122
| 2
|
module Stack.Types.CompilerBuild
(CompilerBuild(..)
,compilerBuildName
,compilerBuildSuffix
,parseCompilerBuild
) where
import Control.Monad.Catch (MonadThrow)
import Data.Aeson.Extended (FromJSON, parseJSON, withText)
import Data.Text as T
data CompilerBuild
= CompilerBuildStandard
| CompilerBuildSpecialized String
deriving (Show)
instance FromJSON CompilerBuild where
-- Strange structuring is to give consistent error messages
parseJSON =
withText
"CompilerBuild"
(either (fail . show) return . parseCompilerBuild . T.unpack)
-- | Descriptive name for compiler build
compilerBuildName :: CompilerBuild -> String
compilerBuildName CompilerBuildStandard = "standard"
compilerBuildName (CompilerBuildSpecialized s) = s
-- | Suffix to use for filenames/directories constructed with compiler build
compilerBuildSuffix :: CompilerBuild -> String
compilerBuildSuffix CompilerBuildStandard = ""
compilerBuildSuffix (CompilerBuildSpecialized s) = '-' : s
-- | Parse compiler build from a String.
parseCompilerBuild :: (MonadThrow m) => String -> m CompilerBuild
parseCompilerBuild "" = return CompilerBuildStandard
parseCompilerBuild "standard" = return CompilerBuildStandard
parseCompilerBuild name = return (CompilerBuildSpecialized name)
|
mrkkrp/stack
|
src/Stack/Types/CompilerBuild.hs
|
bsd-3-clause
| 1,335
| 0
| 12
| 234
| 252
| 140
| 112
| 27
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.InstallDirs
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This manages everything to do with where files get installed (though does
-- not get involved with actually doing any installation). It provides an
-- 'InstallDirs' type which is a set of directories for where to install
-- things. It also handles the fact that we use templates in these install
-- dirs. For example most install dirs are relative to some @$prefix@ and by
-- changing the prefix all other dirs still end up changed appropriately. So it
-- provides a 'PathTemplate' type and functions for substituting for these
-- templates.
module Distribution.Simple.InstallDirs (
InstallDirs(..),
InstallDirTemplates,
defaultInstallDirs,
combineInstallDirs,
absoluteInstallDirs,
CopyDest(..),
prefixRelativeInstallDirs,
substituteInstallDirTemplates,
PathTemplate,
PathTemplateVariable(..),
PathTemplateEnv,
toPathTemplate,
fromPathTemplate,
substPathTemplate,
initialPathTemplateEnv,
platformTemplateEnv,
compilerTemplateEnv,
packageTemplateEnv,
abiTemplateEnv,
installDirsTemplateEnv,
) where
import Distribution.Compat.Binary (Binary)
import Distribution.Compat.Semigroup as Semi
import Distribution.Package
import Distribution.System
import Distribution.Compiler
import Distribution.Text
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import GHC.Generics (Generic)
import System.Directory (getAppUserDataDirectory)
import System.FilePath ((</>), isPathSeparator, pathSeparator)
import System.FilePath (dropDrive)
#if mingw32_HOST_OS
import Foreign
import Foreign.C
#endif
-- ---------------------------------------------------------------------------
-- Installation directories
-- | The directories where we will install files for packages.
--
-- We have several different directories for different types of files since
-- many systems have conventions whereby different types of files in a package
-- are installed in different directories. This is particularly the case on
-- Unix style systems.
--
data InstallDirs dir = InstallDirs {
prefix :: dir,
bindir :: dir,
libdir :: dir,
libsubdir :: dir,
dynlibdir :: dir,
libexecdir :: dir,
includedir :: dir,
datadir :: dir,
datasubdir :: dir,
docdir :: dir,
mandir :: dir,
htmldir :: dir,
haddockdir :: dir,
sysconfdir :: dir
} deriving (Eq, Read, Show, Functor, Generic)
instance Binary dir => Binary (InstallDirs dir)
instance (Semigroup dir, Monoid dir) => Monoid (InstallDirs dir) where
mempty = gmempty
mappend = (Semi.<>)
instance Semigroup dir => Semigroup (InstallDirs dir) where
(<>) = gmappend
combineInstallDirs :: (a -> b -> c)
-> InstallDirs a
-> InstallDirs b
-> InstallDirs c
combineInstallDirs combine a b = InstallDirs {
prefix = prefix a `combine` prefix b,
bindir = bindir a `combine` bindir b,
libdir = libdir a `combine` libdir b,
libsubdir = libsubdir a `combine` libsubdir b,
dynlibdir = dynlibdir a `combine` dynlibdir b,
libexecdir = libexecdir a `combine` libexecdir b,
includedir = includedir a `combine` includedir b,
datadir = datadir a `combine` datadir b,
datasubdir = datasubdir a `combine` datasubdir b,
docdir = docdir a `combine` docdir b,
mandir = mandir a `combine` mandir b,
htmldir = htmldir a `combine` htmldir b,
haddockdir = haddockdir a `combine` haddockdir b,
sysconfdir = sysconfdir a `combine` sysconfdir b
}
appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a
appendSubdirs append dirs = dirs {
libdir = libdir dirs `append` libsubdir dirs,
datadir = datadir dirs `append` datasubdir dirs,
libsubdir = error "internal error InstallDirs.libsubdir",
datasubdir = error "internal error InstallDirs.datasubdir"
}
-- | The installation directories in terms of 'PathTemplate's that contain
-- variables.
--
-- The defaults for most of the directories are relative to each other, in
-- particular they are all relative to a single prefix. This makes it
-- convenient for the user to override the default installation directory
-- by only having to specify --prefix=... rather than overriding each
-- individually. This is done by allowing $-style variables in the dirs.
-- These are expanded by textual substitution (see 'substPathTemplate').
--
-- A few of these installation directories are split into two components, the
-- dir and subdir. The full installation path is formed by combining the two
-- together with @\/@. The reason for this is compatibility with other Unix
-- build systems which also support @--libdir@ and @--datadir@. We would like
-- users to be able to configure @--libdir=\/usr\/lib64@ for example but
-- because by default we want to support installing multiple versions of
-- packages and building the same package for multiple compilers we append the
-- libsubdir to get: @\/usr\/lib64\/$libname\/$compiler@.
--
-- An additional complication is the need to support relocatable packages on
-- systems which support such things, like Windows.
--
type InstallDirTemplates = InstallDirs PathTemplate
-- ---------------------------------------------------------------------------
-- Default installation directories
defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates
defaultInstallDirs comp userInstall _hasLibs = do
installPrefix <-
if userInstall
then getAppUserDataDirectory "cabal"
else case buildOS of
Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir
return (windowsProgramFilesDir </> "Haskell")
_ -> return "/usr/local"
installLibDir <-
case buildOS of
Windows -> return "$prefix"
_ -> case comp of
LHC | userInstall -> getAppUserDataDirectory "lhc"
_ -> return ("$prefix" </> "lib")
return $ fmap toPathTemplate $ InstallDirs {
prefix = installPrefix,
bindir = "$prefix" </> "bin",
libdir = installLibDir,
libsubdir = case comp of
JHC -> "$compiler"
LHC -> "$compiler"
UHC -> "$pkgid"
_other -> "$abi" </> "$libname",
dynlibdir = "$libdir",
libexecdir = case buildOS of
Windows -> "$prefix" </> "$libname"
_other -> "$prefix" </> "libexec",
includedir = "$libdir" </> "$libsubdir" </> "include",
datadir = case buildOS of
Windows -> "$prefix"
_other -> "$prefix" </> "share",
datasubdir = "$abi" </> "$pkgid",
docdir = "$datadir" </> "doc" </> "$abi" </> "$pkgid",
mandir = "$datadir" </> "man",
htmldir = "$docdir" </> "html",
haddockdir = "$htmldir",
sysconfdir = "$prefix" </> "etc"
}
-- ---------------------------------------------------------------------------
-- Converting directories, absolute or prefix-relative
-- | Substitute the install dir templates into each other.
--
-- To prevent cyclic substitutions, only some variables are allowed in
-- particular dir templates. If out of scope vars are present, they are not
-- substituted for. Checking for any remaining unsubstituted vars can be done
-- as a subsequent operation.
--
-- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we
-- can replace 'prefix' with the 'PrefixVar' and get resulting
-- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it
-- each to check which paths are relative to the $prefix.
--
substituteInstallDirTemplates :: PathTemplateEnv
-> InstallDirTemplates -> InstallDirTemplates
substituteInstallDirTemplates env dirs = dirs'
where
dirs' = InstallDirs {
-- So this specifies exactly which vars are allowed in each template
prefix = subst prefix [],
bindir = subst bindir [prefixVar],
libdir = subst libdir [prefixVar, bindirVar],
libsubdir = subst libsubdir [],
dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar],
libexecdir = subst libexecdir prefixBinLibVars,
includedir = subst includedir prefixBinLibVars,
datadir = subst datadir prefixBinLibVars,
datasubdir = subst datasubdir [],
docdir = subst docdir prefixBinLibDataVars,
mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]),
htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]),
haddockdir = subst haddockdir (prefixBinLibDataVars ++
[docdirVar, htmldirVar]),
sysconfdir = subst sysconfdir prefixBinLibVars
}
subst dir env' = substPathTemplate (env'++env) (dir dirs)
prefixVar = (PrefixVar, prefix dirs')
bindirVar = (BindirVar, bindir dirs')
libdirVar = (LibdirVar, libdir dirs')
libsubdirVar = (LibsubdirVar, libsubdir dirs')
datadirVar = (DatadirVar, datadir dirs')
datasubdirVar = (DatasubdirVar, datasubdir dirs')
docdirVar = (DocdirVar, docdir dirs')
htmldirVar = (HtmldirVar, htmldir dirs')
prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar]
prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar]
-- | Convert from abstract install directories to actual absolute ones by
-- substituting for all the variables in the abstract paths, to get real
-- absolute path.
absoluteInstallDirs :: PackageIdentifier
-> UnitId
-> CompilerInfo
-> CopyDest
-> Platform
-> InstallDirs PathTemplate
-> InstallDirs FilePath
absoluteInstallDirs pkgId libname compilerId copydest platform dirs =
(case copydest of
CopyTo destdir -> fmap ((destdir </>) . dropDrive)
_ -> id)
. appendSubdirs (</>)
. fmap fromPathTemplate
$ substituteInstallDirTemplates env dirs
where
env = initialPathTemplateEnv pkgId libname compilerId platform
-- |The location prefix for the /copy/ command.
data CopyDest
= NoCopyDest
| CopyTo FilePath
deriving (Eq, Show)
-- | Check which of the paths are relative to the installation $prefix.
--
-- If any of the paths are not relative, ie they are absolute paths, then it
-- prevents us from making a relocatable package (also known as a \"prefix
-- independent\" package).
--
prefixRelativeInstallDirs :: PackageIdentifier
-> UnitId
-> CompilerInfo
-> Platform
-> InstallDirTemplates
-> InstallDirs (Maybe FilePath)
prefixRelativeInstallDirs pkgId libname compilerId platform dirs =
fmap relative
. appendSubdirs combinePathTemplate
$ -- substitute the path template into each other, except that we map
-- \$prefix back to $prefix. We're trying to end up with templates that
-- mention no vars except $prefix.
substituteInstallDirTemplates env dirs {
prefix = PathTemplate [Variable PrefixVar]
}
where
env = initialPathTemplateEnv pkgId libname compilerId platform
-- If it starts with $prefix then it's relative and produce the relative
-- path by stripping off $prefix/ or $prefix
relative dir = case dir of
PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs)
relative' (Variable PrefixVar : Ordinary (s:rest) : rest')
| isPathSeparator s = Just (Ordinary rest : rest')
relative' (Variable PrefixVar : rest) = Just rest
relative' _ = Nothing
-- ---------------------------------------------------------------------------
-- Path templates
-- | An abstract path, possibly containing variables that need to be
-- substituted for to get a real 'FilePath'.
--
newtype PathTemplate = PathTemplate [PathComponent]
deriving (Eq, Ord, Generic)
instance Binary PathTemplate
data PathComponent =
Ordinary FilePath
| Variable PathTemplateVariable
deriving (Eq, Ord, Generic)
instance Binary PathComponent
data PathTemplateVariable =
PrefixVar -- ^ The @$prefix@ path variable
| BindirVar -- ^ The @$bindir@ path variable
| LibdirVar -- ^ The @$libdir@ path variable
| LibsubdirVar -- ^ The @$libsubdir@ path variable
| DatadirVar -- ^ The @$datadir@ path variable
| DatasubdirVar -- ^ The @$datasubdir@ path variable
| DocdirVar -- ^ The @$docdir@ path variable
| HtmldirVar -- ^ The @$htmldir@ path variable
| PkgNameVar -- ^ The @$pkg@ package name path variable
| PkgVerVar -- ^ The @$version@ package version path variable
| PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@
| LibNameVar -- ^ The @$libname@ path variable
| CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@
| OSVar -- ^ The operating system name, eg @windows@ or @linux@
| ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@
| AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag
| AbiTagVar -- ^ The optional ABI tag for the compiler
| ExecutableNameVar -- ^ The executable name; used in shell wrappers
| TestSuiteNameVar -- ^ The name of the test suite being run
| TestSuiteResultVar -- ^ The result of the test suite being run, eg
-- @pass@, @fail@, or @error@.
| BenchmarkNameVar -- ^ The name of the benchmark being run
deriving (Eq, Ord, Generic)
instance Binary PathTemplateVariable
type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)]
-- | Convert a 'FilePath' to a 'PathTemplate' including any template vars.
--
toPathTemplate :: FilePath -> PathTemplate
toPathTemplate = PathTemplate . read
-- | Convert back to a path, any remaining vars are included
--
fromPathTemplate :: PathTemplate -> FilePath
fromPathTemplate (PathTemplate template) = show template
combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate
combinePathTemplate (PathTemplate t1) (PathTemplate t2) =
PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2)
substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate
substPathTemplate environment (PathTemplate template) =
PathTemplate (concatMap subst template)
where subst component@(Ordinary _) = [component]
subst component@(Variable variable) =
case lookup variable environment of
Just (PathTemplate components) -> components
Nothing -> [component]
-- | The initial environment has all the static stuff but no paths
initialPathTemplateEnv :: PackageIdentifier
-> UnitId
-> CompilerInfo
-> Platform
-> PathTemplateEnv
initialPathTemplateEnv pkgId libname compiler platform =
packageTemplateEnv pkgId libname
++ compilerTemplateEnv compiler
++ platformTemplateEnv platform
++ abiTemplateEnv compiler platform
packageTemplateEnv :: PackageIdentifier -> UnitId -> PathTemplateEnv
packageTemplateEnv pkgId libname =
[(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)])
,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)])
,(LibNameVar, PathTemplate [Ordinary $ display libname])
,(PkgIdVar, PathTemplate [Ordinary $ display pkgId])
]
compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv
compilerTemplateEnv compiler =
[(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)])
]
platformTemplateEnv :: Platform -> PathTemplateEnv
platformTemplateEnv (Platform arch os) =
[(OSVar, PathTemplate [Ordinary $ display os])
,(ArchVar, PathTemplate [Ordinary $ display arch])
]
abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv
abiTemplateEnv compiler (Platform arch os) =
[(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++
'-':display (compilerInfoId compiler) ++
case compilerInfoAbiTag compiler of
NoAbiTag -> ""
AbiTag tag -> '-':tag])
,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)])
]
installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv
installDirsTemplateEnv dirs =
[(PrefixVar, prefix dirs)
,(BindirVar, bindir dirs)
,(LibdirVar, libdir dirs)
,(LibsubdirVar, libsubdir dirs)
,(DatadirVar, datadir dirs)
,(DatasubdirVar, datasubdir dirs)
,(DocdirVar, docdir dirs)
,(HtmldirVar, htmldir dirs)
]
-- ---------------------------------------------------------------------------
-- Parsing and showing path templates:
-- The textual format is that of an ordinary Haskell String, eg
-- "$prefix/bin"
-- and this gets parsed to the internal representation as a sequence of path
-- spans which are either strings or variables, eg:
-- PathTemplate [Variable PrefixVar, Ordinary "/bin" ]
instance Show PathTemplateVariable where
show PrefixVar = "prefix"
show LibNameVar = "libname"
show BindirVar = "bindir"
show LibdirVar = "libdir"
show LibsubdirVar = "libsubdir"
show DatadirVar = "datadir"
show DatasubdirVar = "datasubdir"
show DocdirVar = "docdir"
show HtmldirVar = "htmldir"
show PkgNameVar = "pkg"
show PkgVerVar = "version"
show PkgIdVar = "pkgid"
show CompilerVar = "compiler"
show OSVar = "os"
show ArchVar = "arch"
show AbiTagVar = "abitag"
show AbiVar = "abi"
show ExecutableNameVar = "executablename"
show TestSuiteNameVar = "test-suite"
show TestSuiteResultVar = "result"
show BenchmarkNameVar = "benchmark"
instance Read PathTemplateVariable where
readsPrec _ s =
take 1
[ (var, drop (length varStr) s)
| (varStr, var) <- vars
, varStr `isPrefixOf` s ]
-- NB: order matters! Longer strings first
where vars = [("prefix", PrefixVar)
,("bindir", BindirVar)
,("libdir", LibdirVar)
,("libsubdir", LibsubdirVar)
,("datadir", DatadirVar)
,("datasubdir", DatasubdirVar)
,("docdir", DocdirVar)
,("htmldir", HtmldirVar)
,("pkgid", PkgIdVar)
,("libname", LibNameVar)
,("pkgkey", LibNameVar) -- backwards compatibility
,("pkg", PkgNameVar)
,("version", PkgVerVar)
,("compiler", CompilerVar)
,("os", OSVar)
,("arch", ArchVar)
,("abitag", AbiTagVar)
,("abi", AbiVar)
,("executablename", ExecutableNameVar)
,("test-suite", TestSuiteNameVar)
,("result", TestSuiteResultVar)
,("benchmark", BenchmarkNameVar)]
instance Show PathComponent where
show (Ordinary path) = path
show (Variable var) = '$':show var
showList = foldr (\x -> (shows x .)) id
instance Read PathComponent where
-- for some reason we collapse multiple $ symbols here
readsPrec _ = lex0
where lex0 [] = []
lex0 ('$':'$':s') = lex0 ('$':s')
lex0 ('$':s') = case [ (Variable var, s'')
| (var, s'') <- reads s' ] of
[] -> lex1 "$" s'
ok -> ok
lex0 s' = lex1 [] s'
lex1 "" "" = []
lex1 acc "" = [(Ordinary (reverse acc), "")]
lex1 acc ('$':'$':s) = lex1 acc ('$':s)
lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)]
lex1 acc (c:s) = lex1 (c:acc) s
readList [] = [([],"")]
readList s = [ (component:components, s'')
| (component, s') <- reads s
, (components, s'') <- readList s' ]
instance Show PathTemplate where
show (PathTemplate template) = show (show template)
instance Read PathTemplate where
readsPrec p s = [ (PathTemplate template, s')
| (path, s') <- readsPrec p s
, (template, "") <- reads path ]
-- ---------------------------------------------------------------------------
-- Internal utilities
getWindowsProgramFilesDir :: IO FilePath
getWindowsProgramFilesDir = do
#if mingw32_HOST_OS
m <- shGetFolderPath csidl_PROGRAM_FILES
#else
let m = Nothing
#endif
return (fromMaybe "C:\\Program Files" m)
#if mingw32_HOST_OS
shGetFolderPath :: CInt -> IO (Maybe FilePath)
shGetFolderPath n =
allocaArray long_path_size $ \pPath -> do
r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath
if (r /= 0)
then return Nothing
else do s <- peekCWString pPath; return (Just s)
where
long_path_size = 1024 -- MAX_PATH is 260, this should be plenty
csidl_PROGRAM_FILES :: CInt
csidl_PROGRAM_FILES = 0x0026
-- csidl_PROGRAM_FILES_COMMON :: CInt
-- csidl_PROGRAM_FILES_COMMON = 0x002b
#ifdef x86_64_HOST_ARCH
#define CALLCONV ccall
#else
#define CALLCONV stdcall
#endif
foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW"
c_SHGetFolderPath :: Ptr ()
-> CInt
-> Ptr ()
-> CInt
-> CWString
-> IO CInt
#endif
|
kolmodin/cabal
|
Cabal/Distribution/Simple/InstallDirs.hs
|
bsd-3-clause
| 22,505
| 2
| 16
| 6,154
| 4,222
| 2,361
| 1,861
| 362
| 10
|
module Foo () where
{-@ measure getfst :: (a, b) -> a
getfst (x, y) = x
@-}
{-@ type Pair a b = {v0 : ({v:a | v = (getfst v0)}, b) | true } @-}
{-@ type OPList a b = [(Pair a b)]<\h -> {v: (Pair a b) | (getfst v) >= (getfst h)}> @-}
{-@ type OList a = [a]<\h -> {v: a | (v >= h)}> @-}
-- This is Unsafe, as refinements in Predicate parameters (i.e., Pair a b)
-- are lost, so application `getFsts` cannot be proven safe
{-@ getFsts :: OPList a b -> OList a @-}
getFsts :: [(a, b)] -> [a]
getFsts [] = []
getFsts ((x,_) : xs) = x : getFsts xs
|
abakst/liquidhaskell
|
tests/pos/PairMeasure0.hs
|
bsd-3-clause
| 580
| 0
| 8
| 162
| 75
| 46
| 29
| 4
| 1
|
module MapReduce () where
import Language.Haskell.Liquid.Prelude
import Data.Map hiding (filter, map, foldl, foldr)
baz (v:vs) _ = crash False
baz [] _ = crash False
mymap = Data.Map.fromList [('a', [1])]
-- Why is this safe
coll = Data.Map.fold baz 0
prop_safe = coll mymap
-- Oddly, this is unsafe
-- prop_unsafe = Data.Map.foldr baz 0 mymap
|
mightymoose/liquidhaskell
|
tests/neg/mr00.hs
|
bsd-3-clause
| 357
| 0
| 8
| 69
| 116
| 68
| 48
| 8
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.PackageUtils
-- Copyright : (c) Duncan Coutts 2010
-- License : BSD-like
--
-- Maintainer : cabal-devel@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Various package description utils that should be in the Cabal lib
-----------------------------------------------------------------------------
module Distribution.Client.PackageUtils (
externalBuildDepends,
) where
import Distribution.Package
( packageVersion, packageName, Dependency(..) )
import Distribution.PackageDescription
( PackageDescription(..) )
import Distribution.Version
( withinRange )
-- | The list of dependencies that refer to external packages
-- rather than internal package components.
--
externalBuildDepends :: PackageDescription -> [Dependency]
externalBuildDepends pkg = filter (not . internal) (buildDepends pkg)
where
-- True if this dependency is an internal one (depends on a library
-- defined in the same package).
internal (Dependency depName versionRange) =
depName == packageName pkg &&
packageVersion pkg `withinRange` versionRange
|
tolysz/prepare-ghcjs
|
spec-lts8/cabal/cabal-install/Distribution/Client/PackageUtils.hs
|
bsd-3-clause
| 1,249
| 0
| 10
| 227
| 150
| 93
| 57
| 13
| 1
|
{-# LANGUAGE TemplateHaskell, PolyKinds, TypeFamilies #-}
module T9160 where
$( [d| class C (a :: k) where
type F (a :: k) :: *
|]
)
|
ghc-android/ghc
|
testsuite/tests/th/T9199.hs
|
bsd-3-clause
| 151
| 0
| 6
| 44
| 16
| 11
| 5
| 4
| 0
|
import Data.Map as Map
import Graphics.UI.SDL as SDL
keyFromString = ((!) name_key_map)
name_key_map = Map.fromList [
("SDLK_UNKNOWN", SDLK_UNKNOWN),
("SDLK_FIRST", SDLK_FIRST),
("SDLK_BACKSPACE", SDLK_BACKSPACE),
("SDLK_TAB", SDLK_TAB),
("SDLK_CLEAR", SDLK_CLEAR),
("SDLK_RETURN", SDLK_RETURN),
("SDLK_PAUSE", SDLK_PAUSE),
("SDLK_ESCAPE", SDLK_ESCAPE),
("SDLK_SPACE", SDLK_SPACE),
("SDLK_EXCLAIM", SDLK_EXCLAIM),
("SDLK_QUOTEDBL", SDLK_QUOTEDBL),
("SDLK_HASH", SDLK_HASH),
("SDLK_DOLLAR", SDLK_DOLLAR),
("SDLK_AMPERSAND", SDLK_AMPERSAND),
("SDLK_QUOTE", SDLK_QUOTE),
("SDLK_LEFTPAREN", SDLK_LEFTPAREN),
("SDLK_RIGHTPAREN", SDLK_RIGHTPAREN),
("SDLK_ASTERISK", SDLK_ASTERISK),
("SDLK_PLUS", SDLK_PLUS),
("SDLK_COMMA", SDLK_COMMA),
("SDLK_MINUS", SDLK_MINUS),
("SDLK_PERIOD", SDLK_PERIOD),
("SDLK_SLASH", SDLK_SLASH),
("SDLK_0", SDLK_0),
("SDLK_1", SDLK_1),
("SDLK_2", SDLK_2),
("SDLK_3", SDLK_3),
("SDLK_4", SDLK_4),
("SDLK_5", SDLK_5),
("SDLK_6", SDLK_6),
("SDLK_7", SDLK_7),
("SDLK_8", SDLK_8),
("SDLK_9", SDLK_9),
("SDLK_COLON", SDLK_COLON),
("SDLK_SEMICOLON", SDLK_SEMICOLON),
("SDLK_LESS", SDLK_LESS),
("SDLK_EQUALS", SDLK_EQUALS),
("SDLK_GREATER", SDLK_GREATER),
("SDLK_QUESTION", SDLK_QUESTION),
("SDLK_AT", SDLK_AT),
("SDLK_LEFTBRACKET", SDLK_LEFTBRACKET),
("SDLK_BACKSLASH", SDLK_BACKSLASH),
("SDLK_RIGHTBRACKET", SDLK_RIGHTBRACKET),
("SDLK_CARET", SDLK_CARET),
("SDLK_UNDERSCORE", SDLK_UNDERSCORE),
("SDLK_BACKQUOTE", SDLK_BACKQUOTE),
("SDLK_a", SDLK_a),
("SDLK_b", SDLK_b),
("SDLK_c", SDLK_c),
("SDLK_d", SDLK_d),
("SDLK_e", SDLK_e),
("SDLK_f", SDLK_f),
("SDLK_g", SDLK_g),
("SDLK_h", SDLK_h),
("SDLK_i", SDLK_i),
("SDLK_j", SDLK_j),
("SDLK_k", SDLK_k),
("SDLK_l", SDLK_l),
("SDLK_m", SDLK_m),
("SDLK_n", SDLK_n),
("SDLK_o", SDLK_o),
("SDLK_p", SDLK_p),
("SDLK_q", SDLK_q),
("SDLK_r", SDLK_r),
("SDLK_s", SDLK_s),
("SDLK_t", SDLK_t),
("SDLK_u", SDLK_u),
("SDLK_v", SDLK_v),
("SDLK_w", SDLK_w),
("SDLK_x", SDLK_x),
("SDLK_y", SDLK_y),
("SDLK_z", SDLK_z),
("SDLK_DELETE", SDLK_DELETE),
("SDLK_WORLD_0", SDLK_WORLD_0),
("SDLK_WORLD_1", SDLK_WORLD_1),
("SDLK_WORLD_2", SDLK_WORLD_2),
("SDLK_WORLD_3", SDLK_WORLD_3),
("SDLK_WORLD_4", SDLK_WORLD_4),
("SDLK_WORLD_5", SDLK_WORLD_5),
("SDLK_WORLD_6", SDLK_WORLD_6),
("SDLK_WORLD_7", SDLK_WORLD_7),
("SDLK_WORLD_8", SDLK_WORLD_8),
("SDLK_WORLD_9", SDLK_WORLD_9),
("SDLK_WORLD_10", SDLK_WORLD_10),
("SDLK_WORLD_11", SDLK_WORLD_11),
("SDLK_WORLD_12", SDLK_WORLD_12),
("SDLK_WORLD_13", SDLK_WORLD_13),
("SDLK_WORLD_14", SDLK_WORLD_14),
("SDLK_WORLD_15", SDLK_WORLD_15),
("SDLK_WORLD_16", SDLK_WORLD_16),
("SDLK_WORLD_17", SDLK_WORLD_17),
("SDLK_WORLD_18", SDLK_WORLD_18),
("SDLK_WORLD_19", SDLK_WORLD_19),
("SDLK_WORLD_20", SDLK_WORLD_20),
("SDLK_WORLD_21", SDLK_WORLD_21),
("SDLK_WORLD_22", SDLK_WORLD_22),
("SDLK_WORLD_23", SDLK_WORLD_23),
("SDLK_WORLD_24", SDLK_WORLD_24),
("SDLK_WORLD_25", SDLK_WORLD_25),
("SDLK_WORLD_26", SDLK_WORLD_26),
("SDLK_WORLD_27", SDLK_WORLD_27),
("SDLK_WORLD_28", SDLK_WORLD_28),
("SDLK_WORLD_29", SDLK_WORLD_29),
("SDLK_WORLD_30", SDLK_WORLD_30),
("SDLK_WORLD_31", SDLK_WORLD_31),
("SDLK_WORLD_32", SDLK_WORLD_32),
("SDLK_WORLD_33", SDLK_WORLD_33),
("SDLK_WORLD_34", SDLK_WORLD_34),
("SDLK_WORLD_35", SDLK_WORLD_35),
("SDLK_WORLD_36", SDLK_WORLD_36),
("SDLK_WORLD_37", SDLK_WORLD_37),
("SDLK_WORLD_38", SDLK_WORLD_38),
("SDLK_WORLD_39", SDLK_WORLD_39),
("SDLK_WORLD_40", SDLK_WORLD_40),
("SDLK_WORLD_41", SDLK_WORLD_41),
("SDLK_WORLD_42", SDLK_WORLD_42),
("SDLK_WORLD_43", SDLK_WORLD_43),
("SDLK_WORLD_44", SDLK_WORLD_44),
("SDLK_WORLD_45", SDLK_WORLD_45),
("SDLK_WORLD_46", SDLK_WORLD_46),
("SDLK_WORLD_47", SDLK_WORLD_47),
("SDLK_WORLD_48", SDLK_WORLD_48),
("SDLK_WORLD_49", SDLK_WORLD_49),
("SDLK_WORLD_50", SDLK_WORLD_50),
("SDLK_WORLD_51", SDLK_WORLD_51),
("SDLK_WORLD_52", SDLK_WORLD_52),
("SDLK_WORLD_53", SDLK_WORLD_53),
("SDLK_WORLD_54", SDLK_WORLD_54),
("SDLK_WORLD_55", SDLK_WORLD_55),
("SDLK_WORLD_56", SDLK_WORLD_56),
("SDLK_WORLD_57", SDLK_WORLD_57),
("SDLK_WORLD_58", SDLK_WORLD_58),
("SDLK_WORLD_59", SDLK_WORLD_59),
("SDLK_WORLD_60", SDLK_WORLD_60),
("SDLK_WORLD_61", SDLK_WORLD_61),
("SDLK_WORLD_62", SDLK_WORLD_62),
("SDLK_WORLD_63", SDLK_WORLD_63),
("SDLK_WORLD_64", SDLK_WORLD_64),
("SDLK_WORLD_65", SDLK_WORLD_65),
("SDLK_WORLD_66", SDLK_WORLD_66),
("SDLK_WORLD_67", SDLK_WORLD_67),
("SDLK_WORLD_68", SDLK_WORLD_68),
("SDLK_WORLD_69", SDLK_WORLD_69),
("SDLK_WORLD_70", SDLK_WORLD_70),
("SDLK_WORLD_71", SDLK_WORLD_71),
("SDLK_WORLD_72", SDLK_WORLD_72),
("SDLK_WORLD_73", SDLK_WORLD_73),
("SDLK_WORLD_74", SDLK_WORLD_74),
("SDLK_WORLD_75", SDLK_WORLD_75),
("SDLK_WORLD_76", SDLK_WORLD_76),
("SDLK_WORLD_77", SDLK_WORLD_77),
("SDLK_WORLD_78", SDLK_WORLD_78),
("SDLK_WORLD_79", SDLK_WORLD_79),
("SDLK_WORLD_80", SDLK_WORLD_80),
("SDLK_WORLD_81", SDLK_WORLD_81),
("SDLK_WORLD_82", SDLK_WORLD_82),
("SDLK_WORLD_83", SDLK_WORLD_83),
("SDLK_WORLD_84", SDLK_WORLD_84),
("SDLK_WORLD_85", SDLK_WORLD_85),
("SDLK_WORLD_86", SDLK_WORLD_86),
("SDLK_WORLD_87", SDLK_WORLD_87),
("SDLK_WORLD_88", SDLK_WORLD_88),
("SDLK_WORLD_89", SDLK_WORLD_89),
("SDLK_WORLD_90", SDLK_WORLD_90),
("SDLK_WORLD_91", SDLK_WORLD_91),
("SDLK_WORLD_92", SDLK_WORLD_92),
("SDLK_WORLD_93", SDLK_WORLD_93),
("SDLK_WORLD_94", SDLK_WORLD_94),
("SDLK_WORLD_95", SDLK_WORLD_95),
("SDLK_KP0", SDLK_KP0),
("SDLK_KP1", SDLK_KP1),
("SDLK_KP2", SDLK_KP2),
("SDLK_KP3", SDLK_KP3),
("SDLK_KP4", SDLK_KP4),
("SDLK_KP5", SDLK_KP5),
("SDLK_KP6", SDLK_KP6),
("SDLK_KP7", SDLK_KP7),
("SDLK_KP8", SDLK_KP8),
("SDLK_KP9", SDLK_KP9),
("SDLK_KP_PERIOD", SDLK_KP_PERIOD),
("SDLK_KP_DIVIDE", SDLK_KP_DIVIDE),
("SDLK_KP_MULTIPLY", SDLK_KP_MULTIPLY),
("SDLK_KP_MINUS", SDLK_KP_MINUS),
("SDLK_KP_PLUS", SDLK_KP_PLUS),
("SDLK_KP_ENTER", SDLK_KP_ENTER),
("SDLK_KP_EQUALS", SDLK_KP_EQUALS),
("SDLK_UP", SDLK_UP),
("SDLK_DOWN", SDLK_DOWN),
("SDLK_RIGHT", SDLK_RIGHT),
("SDLK_LEFT", SDLK_LEFT),
("SDLK_INSERT", SDLK_INSERT),
("SDLK_HOME", SDLK_HOME),
("SDLK_END", SDLK_END),
("SDLK_PAGEUP", SDLK_PAGEUP),
("SDLK_PAGEDOWN", SDLK_PAGEDOWN),
("SDLK_F1", SDLK_F1),
("SDLK_F2", SDLK_F2),
("SDLK_F3", SDLK_F3),
("SDLK_F4", SDLK_F4),
("SDLK_F5", SDLK_F5),
("SDLK_F6", SDLK_F6),
("SDLK_F7", SDLK_F7),
("SDLK_F8", SDLK_F8),
("SDLK_F9", SDLK_F9),
("SDLK_F10", SDLK_F10),
("SDLK_F11", SDLK_F11),
("SDLK_F12", SDLK_F12),
("SDLK_F13", SDLK_F13),
("SDLK_F14", SDLK_F14),
("SDLK_F15", SDLK_F15),
("SDLK_NUMLOCK", SDLK_NUMLOCK),
("SDLK_CAPSLOCK", SDLK_CAPSLOCK),
("SDLK_SCROLLOCK", SDLK_SCROLLOCK),
("SDLK_RSHIFT", SDLK_RSHIFT),
("SDLK_LSHIFT", SDLK_LSHIFT),
("SDLK_RCTRL", SDLK_RCTRL),
("SDLK_LCTRL", SDLK_LCTRL),
("SDLK_RALT", SDLK_RALT),
("SDLK_LALT", SDLK_LALT),
("SDLK_RMETA", SDLK_RMETA),
("SDLK_LMETA", SDLK_LMETA),
("SDLK_LSUPER", SDLK_LSUPER),
("SDLK_RSUPER", SDLK_RSUPER),
("SDLK_MODE", SDLK_MODE),
("SDLK_COMPOSE", SDLK_COMPOSE),
("SDLK_HELP", SDLK_HELP),
("SDLK_PRINT", SDLK_PRINT),
("SDLK_SYSREQ", SDLK_SYSREQ),
("SDLK_BREAK", SDLK_BREAK),
("SDLK_MENU", SDLK_MENU),
("SDLK_POWER", SDLK_POWER),
("SDLK_EURO", SDLK_EURO),
("SDLK_UNDO", SDLK_UNDO),
("SDLK_LAST", SDLK_LAST)]
|
stevedonnelly/haskell
|
code/Graphics/UI/SDL/Keysym/Extensions.hs
|
mit
| 7,941
| 0
| 7
| 1,423
| 2,145
| 1,428
| 717
| 238
| 1
|
module Y2020.M10.D14.Exercise where
{--
I really hate wikidata sometimes.
The query:
# Continents/Countries
SELECT ?continent ?continentLabel ?country ?countryLabel
# ?region ?regionLabel # ?particularRegion ?particularRegionLabel
WHERE
{
?continent wdt:P31 wd:Q5107.
?country wdt:P31 wd:Q6256.
# ?region wdt:P31 wd:Q82794.
# ?region wdt:p642 ?continent.
# ?particularRegion wdt:p361 ?region.
?country wdt:P361 ?continent.
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
says there are only two countries in Europe: Norway and Italy.
It also says there are only seventeen countries that are in continents in the
entire World. If you're not one of these seventeen countries, then, as Cee Lo
Green sings: "[Forget] you."
You see that some RDF triples are commented out. Removing the comment marker
reduces the result set to 0 countries and continents, even though manual
search shows otherwise.
[Forget] you, wikidata.
But, there is a wiki page that lists countries by continent. So I did a hand-
scrape of that page.
Today's Haskell problem, let's convert that scrape to Haskell data.
--}
import Y2020.M10.D12.Exercise -- Country-type
import Data.Map (Map)
workingDir :: FilePath
workingDir = "Y2020/M10/D14/"
cbc :: FilePath
cbc = "countries.txt"
type Continent = String
type ContinentMap = Map Continent [Country]
countriesByContinent :: FilePath -> IO ContinentMap
countriesByContinent countriesFile = undefined
{--
>>> countriesByContinent (workingDir ++ cbc)
...
>>> let contis = it
A note about formatting:
deal with it.
That is all.
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2020/M10/D14/Exercise.hs
|
mit
| 1,623
| 0
| 6
| 265
| 88
| 54
| 34
| 11
| 1
|
-- current darcs as of 2010-12-31
{-# LANGUAGE
DeriveDataTypeable,
FlexibleContexts,
FlexibleInstances,
MultiParamTypeClasses,
NoMonomorphismRestriction,
PatternGuards,
ScopedTypeVariables,
TypeSynonymInstances,
UndecidableInstances
#-}
{-# OPTIONS_GHC -W -fwarn-unused-imports -fno-warn-missing-signatures #-}
import XMonad.Util.NamedScratchpad
import Control.Applicative
import Control.Monad
import Control.Monad.Instances ()
import Control.Monad.Writer
import Data.List
import Data.Maybe
import Data.Traversable(traverse)
import Graphics.X11.Xinerama
import qualified Data.Map as M
import qualified XMonad.StackSet as W
import qualified XMonad.Util.ExtensibleState as XS
import System.IO
import XMonad
import XMonad.Actions.DwmPromote
import XMonad.Actions.FloatSnap
import XMonad.Actions.GridSelect
import XMonad.Actions.Search
import XMonad.Actions.SpawnOn
import XMonad.Actions.Submap
import XMonad.Actions.TopicSpace
import XMonad.Actions.UpdatePointer
import XMonad.Actions.Warp
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.EwmhDesktops
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.UrgencyHook
import XMonad.Layout.ResizableTile
import XMonad.Layout.Accordion
import XMonad.Layout.MultiToggle
import XMonad.Layout.Combo
import XMonad.Layout.BoringWindows
import XMonad.Layout.Drawer
import XMonad.Layout.Grid
import XMonad.Layout.TwoPane
import XMonad.Layout.IM
import XMonad.Layout.LayoutHints
import XMonad.Layout.LayoutModifier
import XMonad.Layout.Magnifier
import XMonad.Layout.Master
import XMonad.Layout.Mosaic
import XMonad.Layout.MosaicAlt
import XMonad.Layout.MouseResizableTile
import XMonad.Layout.Named
import XMonad.Layout.NoBorders
import XMonad.Layout.PerWorkspace
import XMonad.Layout.Simplest
import XMonad.Layout.SimplestFloat
import XMonad.Layout.SubLayouts
import XMonad.Layout.Tabbed
import XMonad.Layout.TrackFloating
import XMonad.Layout.WindowNavigation
import XMonad.Prompt
import XMonad.Prompt.RunOrRaise
import XMonad.Prompt.Ssh
import XMonad.Prompt.Window
import XMonad.Prompt.XMonad
import XMonad.Util.EZConfig
import XMonad.Util.Replace
import XMonad.Util.Run
scratchpads =
[ NS "htop" "urxvt -e htop" (title =? "htop") defaultFloating
, NS "workspace" "urxvt -name workspace" (title =? "workspace") defaultFloating
] where role = stringProperty "WM_WINDOW_ROLE"
main :: IO ()
main = do
replace
checkTopicConfig myTopics myTopicConfig
let urgency
| True = withUrgencyHook FocusHook
| True = withUrgencyHook NoUrgencyHook
xmonad . ewmh . urgency . myConfig
=<< mapM xmobarScreen =<< getScreens
sofficeToolbox = className =? "OpenOffice.org 3.1"
<&&> isInProperty "WM_PROTOCOLS" "WM_TAKE_FOCUS"
myConfig hs = let c = defaultConfig {
layoutHook = myLayout
, focusFollowsMouse = False
, focusedBorderColor = "red"
, startupHook = do
return () -- supposedly to avoid inf. loops with checkKeymap
checkKeymap (myConfig []) (myKeys c)
, terminal = "urxvt -e zsh"
, modMask = mod4Mask
, logHook = do
multiPP'
(mergePPOutputs [XMonad.Actions.TopicSpace.pprWindowSet myTopicConfig,
dynamicLogString . onlyTitle])
myPP
myPP{ ppTitle = const "" }
hs
-- updatePointer (TowardsCentre 0.2 0.2)
, handleEventHook = ewmhDesktopsEventHook <+> fullscreenEventHook <+> focusFollow <+>
(\e -> case e of
PropertyEvent{ ev_window = w } -> do
isURXVT <- runQuery (className =? "URxvt") w
if not isURXVT then hintsEventHook e else return (All True)
_ -> return (All True))
, workspaces = myTopics
, manageHook = mconcat
[manageSpawn
,isFullscreen --> doFullFloat
-- ,className =? "MPlayer" --> doFullFloat
,className =? "XTerm" --> queryMerge (className =? "XTerm")
,manageDocks
, namedScratchpadManageHook scratchpads
]
} in additionalKeysP c (myKeys c)
myXPConfig :: XPConfig
myXPConfig = greenXPConfig { font = "xft:Profont:pixelsize=15:autohint=true" }
gsConfig = defaultGSConfig { gs_navigate = fix $ \self ->
let navKeyMap = M.mapKeys ((,) 0) $ M.fromList $
[(xK_Escape, cancel)
,(xK_Return, select)
,(xK_slash , substringSearch self)]
++
map (\(k,a) -> (k,a >> self))
[(xK_Left , move (-1,0 ))
,(xK_h , move (-1,0 ))
,(xK_n , move (-1,0 ))
,(xK_Right , move (1,0 ))
,(xK_l , move (1,0 ))
,(xK_i , move (1,0 ))
,(xK_Down , move (0,1 ))
,(xK_j , move (0,1 ))
,(xK_e , move (0,1 ))
,(xK_Up , move (0,-1 ))
,(xK_u , move (0,-1 ))
,(xK_y , move (-1,-1))
,(xK_m , move (1,-1 ))
,(xK_space , setPos (0,0))
]
in makeXEventhandler $ shadowWithKeymap navKeyMap (const self) }
data ExpandEdges a = ExpandEdges Int deriving (Read,Show)
instance LayoutModifier ExpandEdges Window where
modifyLayout (ExpandEdges n) ws (Rectangle x y w h) = let
bigRect = Rectangle (x - fromIntegral n) (y - fromIntegral n)
(w + 2*fromIntegral n) (h + 2*fromIntegral n)
in
runLayout ws bigRect
-- | push edges off-screen
expandEdges n layout = ModifiedLayout (ExpandEdges n) layout
-------------------- Layout ----------------------------------
myLayout =
trackFloating . smartBorders
. avoidStruts
. onWorkspace "xm-conf" ((nav $ ModifiedLayout (ExpandEdges 1) (Tall 1 0.3 0.5)) ||| Full)
$ m ||| named "F" (noBorders Full) ||| (TwoPane (3/100) (1/2))
||| (named "Full|Acc" $ combineTwo myTiled Full Accordion)
where nav = configurableNavigation (navigateColor "#ffff00")
m = named "M"
. lessBorders Screen
. layoutHintsToCenter
. addTabs shrinkText defaultTheme
. nav
. boringAuto
. subLayout [] (Simplest ||| simplestFloat)
$ mosaic 1.5 [7,5,2]
myTiled = named "Tall" $ ResizableTall 1 0.03 0.5 []
--------------------------------------------------------------
-------------------- Keys ------------------------------------
myKeys c =
[("M-<Left>" , withFocused $ snapMove L Nothing )
,("M-<Right>" , withFocused $ snapMove R Nothing )
,("M-<Up>" , withFocused $ snapMove U Nothing )
,("M-<Down>" , withFocused $ snapMove D Nothing )
,("M-S-<Left>" , withFocused $ snapShrink R Nothing)
,("M-S-<Right>", withFocused $ snapGrow R Nothing)
,("M-S-<Up>" , withFocused $ snapShrink D Nothing)
,("M-S-<Down>" , withFocused $ snapGrow D Nothing)
, ("M-`", namedScratchpadAction scratchpads "workspace")
, ("M-S-h", namedScratchpadAction scratchpads "htop")
, ("M-l", withFocused (sendMessage . expandWindowAlt) >> sendMessage Expand)
, ("M-h", withFocused (sendMessage . shrinkWindowAlt) >> sendMessage Shrink)
,("M-;", withFocused (sendMessage . tallWindowAlt) >> sendMessage Taller)
,("M-o", withFocused (sendMessage . wideWindowAlt) >> sendMessage Wider )
,("M-v", toggleFF)
,("M-S-b", restart "/home/aavogt/bin/obtoxmd" True)
,("M-S-d", restart "urxvt -e xmonad" False)
,("M-S-o" , withFocused $ sendMessage . UnMerge )
,("M-S-C-o", withFocused $ sendMessage . UnMergeAll)
,("M-C-m" , withFocused $ sendMessage . MergeAll )
,("M-C-." , onGroup W.focusDown')
,("M-C-," , onGroup W.focusUp' )
,("M-p", shellPromptHere myXPConfig)
,("M-x", submap $ M.fromList subMaps)
,("M-g", submap $ defaultSublMap c )
,("M-S-.", focusDown)
,("M-S-,", focusUp )
,("M-S-a", currentTopicAction myTopicConfig)
,("M-a", warpToCentre >> goToSelected gsConfig)
-- workaround
,("M-<Tab>", switchNthLastFocused myTopicConfig . succ . length . W.visible . windowset =<< get )
,("M-s" , warpToCentre >> promptedGoto )
,("M-S-s", warpToCentre >> promptedShift)
,("M-b", sendMessage ToggleStruts)
,("M-<Return>", dwmpromote)
,("M-S-<Return>", spawnShell)
-- don't force a recompile, if nothing has changed (xmonad --recompile runs XMonad.recompile True)
,("M-q", spawn "ghc -e ':m +XMonad Control.Monad System.Exit' -e 'flip unless exitFailure =<< recompile False' && xmonad --restart")
,("M-S-q", spawn "~/wip/x11-wm/xmonad/rebuild.sh")
,("<Print>", spawn "scrot")
]
++
concatMap (\(m,f) -> lrud ("M-"++m) f)
[("S-" , sendMessage . Swap)
,("C-" , sendMessage . pullGroup)
,("S-C-", sendMessage . pushWindow)
,("" , sendMessage . Go)]
++ mediaKeys ++
[("M-"++m++[key], screenWorkspace sc >>= flip whenJust (windows . f))
| (f, m) <- [(W.view, ""), (W.shift, "S-")]
, (key, sc) <- zip "wf" [0 .. ]]
++
[ ("M-"++m++[k], a i)
| (a, m) <- [(switchNthLastFocused myTopicConfig,""),(shiftNthLastFocused, "S-")]
, (i, k) <- zip [1..] "123456789"]
-- helper for windowNavigation keys
-- note: with colemak neiu are placed where jkli are with qwerty layout
lrud :: String -> (Direction2D -> b) -> [(String, b)]
lrud m cmd = zip ks cmds
where
ks = map (\x -> m ++ [x]) "niue"
cmds = zipWith ($) (repeat cmd) [L,R,U,D]
subMaps = [((0, xK_o), runOrRaisePrompt myXPConfig),
((0, xK_p), shellPromptHere myXPConfig),
((0, xK_x), xmonadPrompt myXPConfig),
((0, xK_z), sshPrompt myXPConfig),
((shiftMask, xK_w), windowPromptGoto myXPConfig),
((0, xK_w), promptSearch myXPConfig wikipedia),
((0, xK_s), promptSearch myXPConfig multi),
((0, xK_m), promptSearch myXPConfig mathworld),
((0, xK_b), sendMessage ToggleStruts),
((0, xK_f), withFocused $ windows . W.sink),
((0, xK_v), refresh),
((0, xK_c), asks config >>= spawnHere . terminal),
((0, xK_k), kill)
]
amarok = False
mediaKeys = [("<XF86AudioPlay>", do mpcAct "toggle"; when amarok $ spawn "amarok -t"),
("<XF86AudioStop>", promptHost),
("<XF86AudioNext>", do mpcAct "next"; when amarok $ spawn "amarok -f"),
("<XF86AudioPrev>", do mpcAct "prev"; when amarok $ spawn "amarok -r"),
("<XF86AudioMute>", spawn "ossmix vmix0-outvol 0"),
("<XF86AudioLowerVolume>", spawn "amixer sset PCM 1-"),
("<XF86AudioRaiseVolume>", spawn "amixer sset PCM 1+"),
("<XF86Sleep>", spawn "sudo pm-suspend")
]
where mpcAct c = do
h <- XS.gets hostPrompt
spawn $ unwords ["export MPD_HOST="++h,";","mpc",c]
-- Prompt for mpd host
newtype HostPrompt = HostPrompt { hostPrompt :: String } deriving (Read,Show,Typeable)
instance ExtensionClass HostPrompt where
initialValue = HostPrompt "/home/aavogt/.mpd/socket"
extensionType = PersistentExtension
instance XPrompt HostPrompt where showXPrompt _ = "Pick MPD Host: "
promptHost = mkXPrompt (HostPrompt "") myXPConfig (return . compl) (XS.put . HostPrompt)
where compl s = nub $ filter (s `isPrefixOf`) ["127.0.0.1","dell"]
--------------------------------------------------------------
warpToCentre = gets (W.screen . W.current . windowset) >>= \x -> warpToScreen x 0.5 0.5
-------------------- Support for per-screen xmobars ---------
-- Some parts of this should be merged into contrib sometime
getScreens :: IO [Int]
getScreens = openDisplay "" >>= liftA2 (<*) f closeDisplay
where f = fmap (zipWith const [0..]) . getScreenInfo
multiPP :: PP -- ^ The PP to use if the screen is focused
-> PP -- ^ The PP to use otherwise
-> [Handle] -- ^ Handles for the status bars, in order of increasing X
-- screen number
-> X ()
multiPP = multiPP' dynamicLogString
multiPP' :: (PP -> X String) -> PP -> PP -> [Handle] -> X ()
multiPP' dynlStr focusPP unfocusPP handles = do
state <- get
let pickPP :: WorkspaceId -> WriterT (Last XState) X String
pickPP ws = do
let isFoc = (ws ==) . W.tag . W.workspace . W.current $ windowset state
put state{ windowset = W.view ws $ windowset state }
out <- lift $ dynlStr $ if isFoc then focusPP else unfocusPP
when isFoc $ get >>= tell . Last . Just
return out
traverse put . getLast
=<< execWriterT . (io . zipWithM_ hPutStrLn handles <=< mapM pickPP) . catMaybes
=<< mapM screenWorkspace (zipWith const [0..] handles)
return ()
mergePPOutputs :: [PP -> X String] -> PP -> X String
mergePPOutputs x pp = fmap (intercalate (ppSep pp)) . sequence . sequence x $ pp
onlyTitle :: PP -> PP
onlyTitle pp = defaultPP { ppCurrent = const ""
, ppHidden = const ""
, ppVisible = const ""
, ppLayout = ppLayout pp
, ppTitle = ppTitle pp }
-- | Requires a recent addition to xmobar (>0.9.2), otherwise you have to use
-- multiple configuration files, which gets messy
xmobarScreen :: Int -> IO Handle
xmobarScreen = spawnPipe . ("~/.cabal/bin/xmobar -x " ++) . show
myPP :: PP
myPP = sjanssenPP { ppLayout = xmobarColor "orange" "", ppUrgent = xmobarColor "red" "" . ('^':) }
--------------------------------------------------------------
-------------------- X.Actions.TopicSpace --------------------
myTopics :: [Topic]
myTopics =
[ "workspace"
, "web"
, "mail"
, "xm-conf"
, "test"
]
myTopicConfig = TopicConfig
{ topicDirs = M.fromList $
[ ("workspace", "~/workspace")
, ("xm-conf", ".xmonad")
]
, defaultTopicAction = const $ spawnShell >*> 2
, defaultTopic = "workspace"
, maxTopicHistory = 10
, topicActions = M.fromList $
[ ("xm-conf", spawn "~/bin/emacs ~/.xmonad/xmonad.hs")
, ("mail" , spawnOn "mail" "thunderbird")
, ("web", spawnOn "web" "google-chrome")
]
}
-- From the sample config in TopicSpace, these should probably be exported from that module
spawnShell = currentTopicDir myTopicConfig >>= spawnShellIn
spawnShellIn dir = do
-- color <- randomBg' (HSV 255 255)
t <- asks (terminal . config)
spawnHere $ "cd " ++ dir ++ " && " ++ t -- ++ " -bg " ++ color
wsgrid = gridselect gsConfig <=< asks $ map (\x -> (x,x)) . workspaces . config
promptedGoto = wsgrid >>= flip whenJust (switchTopic myTopicConfig)
promptedShift = wsgrid >>= \x -> whenJust x $ \y -> windows (W.greedyView y . W.shift y)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- A nice little example of extensiblestate
newtype FocusFollow = FocusFollow {getFocusFollow :: Bool } deriving (Typeable,Read,Show)
instance ExtensionClass FocusFollow where
initialValue = FocusFollow True
extensionType = PersistentExtension
-- this eventHook is the same as from xmonad for handling crossing events
focusFollow e@(CrossingEvent {ev_window=w, ev_event_type=t})
| t == enterNotify, ev_mode e == notifyNormal =
whenX (XS.gets getFocusFollow) (focus w) >> return (All True)
focusFollow _ = return (All True)
toggleFF = XS.modify $ FocusFollow . not . getFocusFollow
--------------------------------------------------------------------------------
{- | Sometimes this picks the wrong element to merge into (that is, not the
'focused' element of the group), and SubLayouts breaks up the whole group
-}
queryMerge pGrp = do
w <- ask
aws <- liftX $ filterM (runQuery pGrp) =<< gets
(W.integrate' . W.stack . W.workspace . W.current . windowset)
let addRem = False -- run the query with window removed??
when addRem
(liftX $ modify (\ws -> ws { windowset = W.insertUp w (windowset ws) }))
liftX $ windows (W.insertUp w)
mapM_ (liftX . sendMessage . XMonad.Layout.SubLayouts.Merge w) aws
when addRem
(liftX $ modify (\ws -> ws { windowset = W.delete' w (windowset ws) }))
idHook
|
mfpi/configs
|
xmonad/xmonad.hs
|
mit
| 16,555
| 0
| 22
| 4,119
| 4,569
| 2,536
| 2,033
| -1
| -1
|
{-# htermination eltsFM_GE :: FiniteMap Float b -> Float -> [b] #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_eltsFM_GE_6.hs
|
mit
| 85
| 0
| 3
| 14
| 5
| 3
| 2
| 1
| 0
|
import Data.Map.Strict as M
{-@ measure len @-}
len :: Map Int Int -> Int
len = size
double :: Int -> Int
double val = 2 * val
mkSet :: [Int] -> Map Int Int
mkSet = foldl (\m x -> M.insert x x m) M.empty
smap :: (Int -> Int) -> Map Int Int -> Map Int Int
smap = M.map
exs :: [(Map Int Int, Map Int Int)]
exs = [(mkSet [1,2,3,4], mkSet [2,4,6,8])]
|
santolucito/ives
|
tests/broken_benchmarks/map_double_vals.hs
|
mit
| 353
| 0
| 9
| 83
| 203
| 112
| 91
| 11
| 1
|
module Main where
import Data.Char(isSpace)
import Data.List(groupBy)
import Data.Function (on)
trim :: String -> String
trim = trimTail . trimHead
trimHead,trimTail :: String -> String
trimHead = dropWhile isSpace
trimTail = takeWhile (not.isSpace)
f :: String -> String
f = unlines . map unwords . groupBy ((==) `on` null) . map trim . lines
main :: IO ()
main = interact f
|
spoj/mwtools
|
app/joinPara.hs
|
mit
| 381
| 0
| 10
| 67
| 152
| 85
| 67
| 13
| 1
|
module Fib.Internal where
import Prelude
fib :: Int -> Int
fib 0 = 1
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
|
hdgarrood/scotty-fay
|
test/fay-resources1/Fib/Internal.hs
|
mit
| 111
| 0
| 8
| 28
| 66
| 35
| 31
| 6
| 1
|
-- https://www.hackerrank.com/challenges/area-under-curves-and-volume-of-revolving-a-curv
import Text.Printf (printf)
-- This function should return a list [area, volume].
solve :: Int -> Int -> [Int] -> [Int] -> [Double]
solve l r a b = [area, volume]
where results = map (polynomial a b) (inputRange l r 0.001)
area = sum $ map (areaRectangle 0.001) results
volume = sum $ map (volumeCilinder 0.001) results
inputRange :: Int -> Int -> Double -> [Double]
inputRange start end step = [first,second..last]
where first = fromIntegral start
second = first + step
last = fromIntegral end
polynomial :: [Int] -> [Int] -> Double -> Double
polynomial coefficients degrees x = sum $ map (\(c,d) -> c * x**d) (zip coef degr)
where coef = map fromIntegral coefficients
degr = map fromIntegral degrees
areaRectangle :: Double -> Double -> Double
areaRectangle width height = width * height
volumeCilinder :: Double -> Double -> Double
volumeCilinder height radius = (pi * radius**2) * height
--Input/Output.
main :: IO ()
main = getContents >>= mapM_ (printf "%.1f\n"). (\[a, b, [l, r]] -> solve l r a b). map (map read. words). lines
|
Seblink/HackerRank
|
functional-programming/introduction/area-under-curves-and-volume-of-revolving-a-curv.hs
|
mit
| 1,207
| 0
| 11
| 264
| 447
| 238
| 209
| 21
| 1
|
module Rebase.Control.Monad.RWS.Lazy
(
module Control.Monad.RWS.Lazy
)
where
import Control.Monad.RWS.Lazy
|
nikita-volkov/rebase
|
library/Rebase/Control/Monad/RWS/Lazy.hs
|
mit
| 110
| 0
| 5
| 12
| 26
| 19
| 7
| 4
| 0
|
module One
( circleArea
) where
circleArea r = 3.14 * (r * r)
|
mudphone/HaskellBook
|
src/One.hs
|
mit
| 77
| 0
| 7
| 29
| 28
| 16
| 12
| 3
| 1
|
import Text.KR4MB
import Control.Monad (forM_)
import System.Environment
dest_path = "/Users/keqh/Library/Application Support/KeyRemap4MacBook/private.xml"
settings = [("repeat.wait", 10)
,("repeat.initial_wait", 250)
,("pointing.relative2scroll_rate", 250)
]
main = do
(arg:rest) <- getArgs
if arg == "dump"
then do
dump rule
else do
reload dest_path rule
setParams settings
rule :: Rule
rule = root $ do
item "disable minimize" $ do
cmd 'm' `keyToKey` VK_NONE
item "personal settings" $ do
keyOverlaidModifier CONTROL_L CONTROL_L [JIS_EISUU, ESCAPE]
':' `swapKey` ':'
';' `swapKey` '_'
appendix "Google IME"
ctrl 'j' `keyToKey` JIS_KANA
group "standard settings" $ do
item "JIS to US" $ do
setJSLayout
item "basic settings" $ do
COMMAND_L `keyToKey` OPTION_L
JIS_KANA `keyToKey` RETURN
keyOverlaidModifier JIS_EISUU COMMAND_L [JIS_EISUU]
SPACE `keyOverlaidModifierWithRepeat` SHIFT_L
F7 `keyToConsumer` MUSIC_PREV
F8 `keyToConsumer` MUSIC_PLAY
F9 `keyToConsumer` MUSIC_NEXT
F10 `keyToConsumer` VOLUME_MUTE
F11 `keyToConsumer` VOLUME_DOWN
F12 `keyToConsumer` VOLUME_UP
group "use extra1" $ do
item "for symbol keycode" $ do
appendix "Modと併用時は普通にshiftとして動作する"
forM_ [cmd, opt, ctrl] $ \modkey -> do
modkey SHIFT_L `keyToKey` modkey SHIFT_L
SHIFT_L `keyToKey` VK_MODIFIER_EXTRA1
-- EXTRA1に依存してるので整理すること
item "for tmux" $ do
let focusiTerm = opt $ ctrl $ shift 'z'
let tmuxPrefix = ctrl 't'
forM_ "jklpnc" $ \key -> do
extra1 key `keyToKey'` [toKey JIS_EISUU, focusiTerm, tmuxPrefix, toKey key]
app_only "TERMINAL" $ do
let tmuxPrefix = ctrl 't'
let copyModePrefix = ctrl '['
forM_ "du" $ \key -> do
extra1 key `keyToKey'` [tmuxPrefix, copyModePrefix, ctrl key]
item "for coding" $ do
extra1 '.' `keyToKey'` " -> "
extra1 ',' `keyToKey'` " # "
extra1 'w' `keyToKey'` "\n where\n"
|
keqh/KR4MB
|
Sample.hs
|
mit
| 2,116
| 9
| 14
| 523
| 604
| 306
| 298
| 59
| 2
|
-----------------------------------------------------------------------------
-- |
-- Module : Reader.Parser.Expression
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein (klein@react.uni-saarland.de)
--
-- Expression Parser.
--
-----------------------------------------------------------------------------
module Reader.Parser.Expression
( exprParser
) where
-----------------------------------------------------------------------------
import Data.Expression
( Expr(..)
, Expr'(..)
, SrcPos(..)
, ExprPos(..)
)
import Reader.Parser.Data
( globalDef
)
import Reader.Parser.Utils
( getPos
, identifier
, positionParser
)
import Control.Monad
( liftM
, void
)
import Text.Parsec
( (<|>)
, char
, try
, oneOf
, many1
, digit
, lookAhead
, notFollowedBy
)
import Text.Parsec.Expr
( Assoc(..)
, Operator(..)
, buildExpressionParser
)
import Text.Parsec.String
( Parser
)
import Text.Parsec.Token
( GenLanguageDef(..)
, commaSep
, reservedNames
, whiteSpace
, makeTokenParser
, reserved
, reservedOp
)
-----------------------------------------------------------------------------
-- | Parses an expression.
exprParser
:: Parser (Expr String)
exprParser = (~~) >> buildExpressionParser table term
where
table =
[ [ Prefix $ unaryOperators numUnary
]
, [ Infix (binOp "*" NumMul) AssocLeft
, Infix (binOp "MUL" NumMul) AssocLeft
]
, [ Infix (binOp "/" NumDiv) AssocRight
, Infix (binOp "DIV" NumDiv) AssocRight
, Infix (binOp "%" NumMod) AssocRight
, Infix (binOp "MOD" NumMod) AssocRight
]
, [ Infix (binOp "+" NumPlus) AssocLeft
, Infix (binOp "PLUS" NumPlus) AssocLeft
, Infix (binOp "-" NumMinus) AssocLeft
, Infix (binOp "MINUS" NumMinus) AssocLeft
]
, [ Prefix $ unaryOperators setUnary
]
, [ Infix (binOp "(-)" SetMinus) AssocRight
, Infix (binOp "(\\)" SetMinus) AssocRight
, Infix (binOp "SETMINUS" SetMinus) AssocRight
]
, [ Infix (binOp "(*)" SetCap) AssocLeft
, Infix (binOp "CAP" SetCap) AssocLeft
]
, [ Infix (binOp "(+)" SetCup) AssocLeft
, Infix (binOp "CUP" SetCup) AssocLeft
]
, [ Infix (binOp "==" BlnEQ) AssocLeft
, Infix (binOp "EQ" BlnEQ) AssocLeft
, Infix (binOp "/=" BlnNEQ) AssocLeft
, Infix (binOp "!=" BlnNEQ) AssocLeft
, Infix (binOp "NEQ" BlnNEQ) AssocLeft
, Infix (binOp ">" BlnGE) AssocLeft
, Infix (binOp "GE" BlnGE) AssocLeft
, Infix (binOp ">=" BlnGEQ) AssocLeft
, Infix (binOp "GEQ" BlnGEQ) AssocLeft
, Infix (binOp "<" BlnLE) AssocLeft
, Infix (binOp "LE" BlnLE) AssocLeft
, Infix (binOp "<=" BlnLEQ) AssocLeft
, Infix (binOp "LEQ" BlnLEQ) AssocLeft
]
, [ Infix (binOp "<-" BlnElem) AssocLeft
, Infix (binOp "IN" BlnElem) AssocLeft
, Infix (binOp "ELEM" BlnElem) AssocLeft
]
, [ Prefix $ unaryOperators ltlUnary
]
, [ Infix (binOp "&&" BlnAnd) AssocLeft
, Infix (binOp "AND" BlnAnd) AssocLeft
]
, [ Infix (binOp "||" BlnOr) AssocLeft
, Infix (binOp "OR" BlnOr) AssocLeft
]
, [ Infix (binOp "->" BlnImpl) AssocRight
, Infix (binOp "IMPIES" BlnImpl) AssocRight
, Infix (binOp "<->" BlnEquiv) AssocRight
, Infix (binOp "EQUIV" BlnEquiv) AssocRight
]
, [ Infix (binOp "W" LtlWeak) AssocRight
]
, [ Infix (binOp "U" LtlUntil) AssocRight
]
, [ Infix (binOp "R" LtlRelease) AssocLeft
]
, [ Infix (binOp "S" LtlSince) AssocRight
]
, [ Infix (binOp "T" LtlTriggered) AssocLeft
]
, [ Infix (binOp "~" Pattern) AssocLeft
]
, [ Infix (binOp ":" Colon) AssocLeft
]
]
tokenDef =
globalDef
{ opStart = oneOf "!&|-<=/+*%(:~,."
, opLetter = oneOf "!&|<->=/\\[+*%():~,."
, reservedOpNames =
["!","&&","||","->","<->","==","/=","<",">","<=",">=",
"<-","&&[","||[","NOT","AND","OR","IMPLIES","EQUIV","EQ",
"NEQ", "LE", "GE", "LEQ", "GEQ", "ELEM","AND[","OR[",
"+","-","*","/","%","PLUS","MINUS","MUL","DIV","MOD",
"SIZE","MIN","MAX","(-)","(\\)","(+)","(*)","SETMINUS",
"CAP","CUP",":","~","W","U","R","X","Y","G","F","H","O",
"S","T","X[","Y[","G[","F[","H[","O[","AND[","OR[",
"SUM","PROD","IN","SIZEOF"]
, reservedNames =
["NOT","AND","OR","IMPLIES","EQUIV","true","false","F",
"PLUS","MINUS","MUL","DIV","MOD","SIZE","MIN","MAX","_",
"SETMINUS","CAP","CUP","otherwise","W","U","R","X","G",
"SUM","PROD","IN","SIZEOF","Y","H","O","S","T"] }
tokenparser = makeTokenParser tokenDef
term =
parentheses
<|> setExplicit
<|> between' '|' '|' (liftM NumSSize exprParser)
<|> keyword "otherwise" BaseOtherwise
<|> keyword "false" BaseFalse
<|> keyword "true" BaseTrue
<|> keyword "_" BaseWild
<|> constant
<|> ident
numUnary =
unOp6 'S' 'I' 'Z' 'E' 'O' 'F' NumSizeOf
<|> unOp4 'S' 'I' 'Z' 'E' NumSSize
<|> unOp3 'M' 'I' 'N' NumSMin
<|> unOp3 'M' 'A' 'X' NumSMax
<|> parOp "+" manyExprParser NumRPlus
<|> parOp "SUM" manyExprParser NumRPlus
<|> parOp "*" manyExprParser NumRMul
<|> parOp "PROD" manyExprParser NumRMul
setUnary =
parOp "(+)" manyExprParser SetRCup
<|> parOp "CUP" manyExprParser SetRCap
<|> parOp "(-)" manyExprParser SetRCup
<|> parOp "CAP" manyExprParser SetRCap
ltlUnary =
unOp' '!' BlnNot
<|> unOp3 'N' 'O' 'T' BlnNot
<|> unOp1 'X' LtlNext
<|> unOp1 'Y' LtlPrevious
<|> unOp1 'G' LtlGlobally
<|> unOp1 'F' LtlFinally
<|> unOp1 'H' LtlHistorically
<|> unOp1 'O' LtlOnce
<|> parOp "X" exprParser LtlRNext
<|> parOp "Y" exprParser LtlRPrevious
<|> parOp "G" exprParser LtlRGlobally
<|> parOp "F" exprParser LtlRFinally
<|> parOp "H" exprParser LtlRHistorically
<|> parOp "O" exprParser LtlROnce
<|> parOp "&&" manyExprParser BlnRAnd
<|> parOp "AND" manyExprParser BlnRAnd
<|> parOp "FORALL" manyExprParser BlnRAnd
<|> parOp "||" manyExprParser BlnROr
<|> parOp "OR" manyExprParser BlnROr
<|> parOp "EXISTS" manyExprParser BlnROr
parentheses = do
notFollowedBy $ ch '(' >> oneOf "+-*/"
between' '(' ')' $ liftM expr exprParser
keyword x c = do
s <- getPos
void $ reserved tokenparser x
return $ Expr c $ ExprPos s $
SrcPos (srcLine s) (srcColumn s + length x)
setExplicit = do
s <- getPos; ch '{'; (~~)
emptySet s <|> nonEmptySet s
emptySet s = do
e <- closeSet
return $ Expr (SetExplicit []) (ExprPos s e)
nonEmptySet s = do
x <- exprParser
singeltonSet s x <|> nonSingeltonSet s x
singeltonSet s x = do
e <- closeSet
return $ Expr (SetExplicit [x]) (ExprPos s e)
nonSingeltonSet s x = do
ch ','; (~~)
y <- exprParser
twoElmSet s x y <|> rangeSet s x y <|> manyElmSet s x y
twoElmSet s x y = do
e <- closeSet
return $ Expr (SetExplicit [x,y]) (ExprPos s e)
rangeSet s x y = do
ch '.'; ch '.'; (~~)
z <- exprParser
e <- closeSet
return $ Expr (SetRange x y z) (ExprPos s e)
manyElmSet s x y = do
ch ','; (~~)
xs <- manyExprParser
e <- closeSet
return $ Expr (SetExplicit (x:y:xs)) (ExprPos s e)
closeSet = do { ch '}'; e <- getPos; (~~); return e }
binOp x c = do
reservedOp tokenparser x
return $ \a b -> Expr (c a b) $
ExprPos (srcBegin $ srcPos a) $
srcEnd $ srcPos b
unaryOperators p = do
(x:xr) <- many1 $ unaryOperator p
return $ conUnOp x xr
unaryOperator p = do
s <- getPos
c <- p
return (s,c)
conUnOp (s,c) xs = case xs of
[] -> \e -> Expr (c e) $
ExprPos s $ srcEnd $ srcPos e
(x:xr) -> \e -> Expr (c $ conUnOp x xr e) $
ExprPos s $ srcEnd $ srcPos e
unOp6 c1 c2 c3 c4 c5 c6 c = try $ do
ch4 c1 c2 c3 c4
ch2 c5 c6
lookahead
return c
unOp4 c1 c2 c3 c4 c = try $ do
ch4 c1 c2 c3 c4
lookahead
return c
unOp' x c = do
ch x
(~~)
return c
unOp1 x c = try $ do
ch x
lookahead
return c
unOp3 c1 c2 c3 c = try $ do
ch2 c1 c2
ch c3
lookahead
return c
parOp x p c = do
reservedOp tokenparser (x ++ "[")
e <- p; ch ']'; (~~)
return (c e)
between' c1 c2 p = do
s <- getPos; ch c1; (~~); x <- p
ch c2; e <- getPos; (~~)
return $ Expr x $ ExprPos s e
constant = do
(x,pos) <- positionParser (~~) $ many1 digit
return $ Expr (BaseCon $ read x) pos
ident = do
(i,pos) <- identifier (~~)
functionParser pos i
<|> busParser pos i
<|> return (Expr (BaseId i) pos)
functionParser pos i = do
notFollowedBy $ ch '(' >> oneOf "+-*/"
ch '('; (~~)
ys <- manyExprParser
ch ')'; e <- getPos; (~~)
return $ Expr (BaseFml ys i) $
ExprPos (srcBegin pos) e
busParser pos i = do
ch '['; (~~)
x <- exprParser
ch ']'; p <- getPos; (~~)
return $ Expr (BaseBus x i) $
ExprPos (srcBegin pos) p
manyExprParser = commaSep tokenparser exprParser
(~~) = whiteSpace tokenparser
lookahead = do
lookAhead (ch ' ' <|> ch '(' <|> ch '\t' <|> ch '\n')
(~~)
ch = void . char
ch2 c1 c2 = do { ch c1; ch c2 }
ch4 c1 c2 c3 c4 = do { ch2 c1 c2; ch2 c3 c4 }
-----------------------------------------------------------------------------
|
reactive-systems/syfco
|
src/lib/Reader/Parser/Expression.hs
|
mit
| 10,644
| 0
| 26
| 3,731
| 3,481
| 1,784
| 1,697
| 272
| 2
|
> module Parsing where
>
>
> import Data.Char
> import Control.Monad
>
> infixr 5 +++
The monad of parsers
--------------------
> newtype Parser a = P (String -> [(a,String)])
>
> instance Monad Parser where
> -- return :: a -> Parser b
> return v = P (\inp -> [(v,inp)])
>
> -- (>>=) :: Parser a -> (a -> Parser b) -> Parser b
> p >>= f = P (\inp -> case parse p inp of
> [(v,out)] -> parse (f v) out
> [] -> [])
>
> instance MonadPlus Parser where
> mzero = P (\inp -> [])
> p `mplus` q = P (\inp -> case parse p inp of
> [] -> parse q inp
> [(v,out)] -> [(v,out)])
Basic parsers
-------------
> failure :: Parser a
> failure = mzero
>
> item :: Parser Char
> item = P (\inp -> case inp of
> [] -> []
> (x:xs) -> [(x,xs)])
>
> parse :: Parser a -> String -> [(a,String)]
> parse (P p) inp = p inp
Choice
------
> (+++) :: Parser a -> Parser a -> Parser a
> p +++ q = p `mplus` q
Derived primitives
------------------
> sat :: (Char -> Bool) -> Parser Char
> sat p = do x <- item
> if p x then return x else failure
>
> digit :: Parser Char
> digit = sat isDigit
>
> lower :: Parser Char
> lower = sat isLower
>
> upper :: Parser Char
> upper = sat isUpper
>
> letter :: Parser Char
> letter = sat isAlpha
>
> alphanum :: Parser Char
> alphanum = sat isAlphaNum
>
> char :: Char -> Parser Char
> char x = sat (== x)
>
> pfchar :: Char -> Parser Char
> pfchar x = (sat . (==)) x
>
> string :: String -> Parser String
> string [] = return []
> string (x:xs) = do char x
> string xs
> return (x:xs)
>
> many :: Parser a -> Parser [a]
> many p = many1 p +++ return []
>
> many1 :: Parser a -> Parser [a]
> many1 p = do v <- p
> vs <- many p
> return (v:vs)
>
> ident :: Parser String
> ident = do x <- lower
> xs <- many alphanum
> return (x:xs)
>
> nat :: Parser Int
> nat = do xs <- many1 digit
> return (read xs)
>
> int :: Parser Int
> int = (do char '-'
> n <- nat
> return (-n))
> +++ nat
>
> space :: Parser ()
> space = do many (sat isSpace)
> return ()
>
> comment :: Parser ()
> comment = do string "--"
> many (sat (/= '\n'))
> return ()
>
> expr :: Parser Int
> expr = do n <- natural
> ns <- many (do symbol "-"
> natural)
> return (foldl (-) n ns)
Ignoring spacing
----------------
> token :: Parser a -> Parser a
> token p = do space
> v <- p
> space
> return v
>
> identifier :: Parser String
> identifier = token ident
>
> natural :: Parser Int
> natural = token nat
>
> integer :: Parser Int
> integer = token int
>
> symbol :: String -> Parser String
> symbol xs = token (string xs)
|
anwb/fp-one-on-one
|
lecture-07-hw.hs
|
mit
| 4,952
| 127
| 15
| 3,039
| 1,343
| 693
| 650
| -1
| -1
|
module ArithmeticSpec (main, spec) where
import Test.Hspec
import Arithmetic
import Control.Exception (evaluate)
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "primes" $ do
it "first 10 terms are [2,3,5,7,11,13,17,19,23,29]" $ do
take 10 primes `shouldBe` [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
describe "Problem 31" $ do
it "returns true when n = 7" $ do
isPrime 7 `shouldBe` True
it "returns true when n = 2" $ do
isPrime 2 `shouldBe` True
it "returns false when n = 6" $ do
isPrime 6 `shouldBe` False
it "returns false when n = 1" $ do
isPrime 1 `shouldBe` False
describe "Problem 32" $ do
it "returns 9 when (m, n) = (36, 63)" $ do
myGCD 36 63 `shouldBe` 9
it "returns 3 when (m, n) = (-3, -6)" $ do
myGCD (-3) (-6) `shouldBe` 3
it "returns 3 when (m, n) = (-3, 6)" $ do
myGCD (-3) 6 `shouldBe` 3
describe "Problem 33" $ do
it "returns True when (m, n) = (35, 64)" $ do
coprime 35 64 `shouldBe` True
it "returns True when (m, n) = (109, 2)" $ do
coprime 109 2 `shouldBe` True
it "returns False when (m, n) = (4, 14)" $ do
coprime 4 14 `shouldBe` False
describe "Problem 34" $ do
it "returns 4 when n = 10" $ do
totient 10 `shouldBe` 4
it "returns 6 when n = 7" $ do
totient 7 `shouldBe` 6
describe "Problem 35" $ do
it "returns [3,3,5,7] when n = 315" $ do
primeFactors 315 `shouldBe` [3, 3, 5, 7]
it "returns [109] when n = 109" $ do
primeFactors 109 `shouldBe` [109]
describe "Problem 36" $ do
it "returns [(3,2),(5,1),(7,1)] when n = 315" $ do
primeFactorsMult 315 `shouldBe` [(3, 2), (5, 1), (7, 1)]
it "returns [(109, 1)] when n = 109" $ do
primeFactorsMult 109 `shouldBe` [(109, 1)]
describe "Problem 37" $ do
it "returns 4 when n = 10" $ do
totient2 10 `shouldBe` 4
it "returns 6 when n = 7" $ do
totient2 7 `shouldBe` 6
describe "Problem 39" $ do
it "returns [11,13,17,19] when (s, e) = (10, 20)" $ do
primesR 10 20 `shouldBe` [11, 13, 17, 19]
describe "Problem 40" $ do
it "returns (5, 23) when n = 28" $ do
goldbach 28 `shouldBe` (5, 23)
it "throws exception (1)" $ do
evaluate (goldbach 27) `shouldThrow` errorCall "n must be even number"
it "throws exception (2)" $ do
evaluate (goldbach 1) `shouldThrow` errorCall "n must be grater than 2"
describe "Problem 41 (1)" $ do
it "returns [(3,7),(5,7),(3,11),(3,13),(5,13),(3,17)] when (s, e) = (9, 20)" $ do
goldbachList 9 20 `shouldBe` [(3 , 7), (5, 7), (3, 11), (3, 13), (5, 13), (3, 17)]
describe "Problem 41 (2)" $ do
it "returns [(73,919),(61,1321),(67,1789),(61,1867)] when (s, e, m) = (4, 2000, 50)" $ do
goldbachList' 4 2000 50 `shouldBe` [(73, 919), (61, 1321), (67, 1789), (61, 1867)]
|
yyotti/99Haskell
|
src/test/ArithmeticSpec.hs
|
mit
| 2,874
| 0
| 16
| 793
| 972
| 486
| 486
| 70
| 1
|
-- Copyright 2015 Mitchell Kember. Subject to the MIT License.
-- Project Euler: Problem 2
-- Even Fibonacci numbers
module Problem02 where
import Common (fibonacci)
solve :: Int
solve = sum . takeWhile (<= 4000000) . filter even $ fibonacci
|
mk12/euler
|
haskell/Problem02.hs
|
mit
| 245
| 0
| 9
| 43
| 47
| 28
| 19
| 4
| 1
|
module Party where
import Employee
import Data.Monoid
import Data.Tree
import Data.List
glCons :: Employee -> GuestList -> GuestList
glCons e (GL gl f) = GL (e:gl) (f + empFun e)
instance Monoid GuestList where
mempty = GL [] 0
mappend (GL l1 f1) (GL l2 f2) = GL (l1 ++ l2) (f1 + f2)
moreFun :: GuestList -> GuestList -> GuestList
moreFun a@(GL _ f1) b@(GL _ f2)
| f1 > f2 = a
| otherwise = b
treeFold :: (a -> [b] -> b) -> Tree a -> b
treeFold f (Node a xs) = f a (map (treeFold f) xs)
nextLevel :: Employee -> [(GuestList, GuestList)]
-> (GuestList, GuestList)
nextLevel e gs = (withE,withoutE)
where
inclSubBoss = mconcat $ map fst gs
exclSubBoss = mconcat $ map snd gs
withE = glCons e exclSubBoss
withoutE = inclSubBoss
maxFun :: Tree Employee -> GuestList
maxFun = uncurry moreFun . treeFold nextLevel
guestNames :: GuestList -> String
guestNames (GL es _) = unlines . sort $ map empName es
listHeader :: GuestList -> String
listHeader (GL _ f) = "Total Fun: " ++ (show f)
|
bachase/cis194
|
hw8/Party.hs
|
mit
| 1,044
| 0
| 9
| 250
| 460
| 239
| 221
| 29
| 1
|
module Text.Documentalist.SourceParser ( module Text.Documentalist.Types.Comment
, module Text.Documentalist.Types.Package
, SourceParser(..)
) where
import Control.Monad.IO.Class
import Text.Documentalist.Types.Comment
import Text.Documentalist.Types.Package
-- | Extracts comments from a specific source language.
class MonadIO p => SourceParser p where
-- | Parses a package into a language-independent form.
--
-- Any errors will be indicated with a thrown 'Exception'.
parse :: FilePath -> p (Package (Maybe Comment))
|
jspahrsummers/documentalist
|
Text/Documentalist/SourceParser.hs
|
mit
| 660
| 0
| 12
| 199
| 97
| 61
| 36
| 8
| 0
|
{-# LANGUAGE RecordWildCards, QuasiQuotes#-} -- , OverloadedStrings #-}
module Compiler where
import Text.RawString.QQ
import Prelude hiding (lookup)
import Numeric (showIntAtBase)
import Data.Map (Map, fromList, lookup)
import Debug.Trace
import Data.Maybe
import Data.Hashable
import Data.Char
import Types
import Testing
import Parse
import Graphviz
encodeArray :: [(Int, Char)]
encodeArray = [ (0,'A'), (1,'B'), (2,'C'), (3,'D'), (4,'E'), (5,'F')
, (6,'G'), (7,'H'), (8,'I'), (9,'J'), (10,'K'), (11,'L')
, (12,'M'), (13,'N'), (14,'O'), (15,'P'), (16,'Q'), (17,'R')
, (18,'S'), (19,'T'), (20,'U'), (21,'V'), (22,'W'), (23,'X')
, (24,'Y'), (25,'Z'), (26,'a'), (27,'b'), (28,'c'), (29,'d')
, (30,'e'), (31,'f'), (32,'g'), (33,'h'), (34,'i'), (35,'j')
, (36,'k'), (37,'l'), (38,'m'), (39,'n'), (40,'o'), (41,'p')
, (42,'q'), (43,'r'), (44,'s'), (45,'t'), (46,'u'), (47,'v')
, (48,'w'), (49,'x'), (50,'y'), (51,'z'), (52,'0'), (53,'1')
, (54,'2'), (55,'3'), (56,'4'), (57,'5'), (58,'6'), (59,'7')
, (60,'8'), (61,'9'), (62,'_'), (63,'/') ]
selectBase64Char :: Int -> Char
selectBase64Char ch = snd (encodeArray!!ch)
----------------
postVisitor :: (Expr -> Expr) -> Expr -> Expr
postVisitor f (Numeric op l r) = Numeric op (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (Boolean op l r) = Boolean op (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (Compose op l r) = Compose op (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (Pair l r) = Pair (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (Triple first s t) = Triple (f (postVisitor f first)) (f (postVisitor f s)) (f (postVisitor f t))
postVisitor f (Cons l r) = Cons (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (Sel l r) = Sel (f (postVisitor f l)) (f (postVisitor f r))
postVisitor f (IfE c t false) = IfE (f (postVisitor f c)) (f (postVisitor f t)) (f (postVisitor f false))
postVisitor f (App name arg) = App (f (postVisitor f name)) (f (postVisitor f arg))
postVisitor f (Def n b) = Def (f (postVisitor f n)) (f (postVisitor f b))
postVisitor f (Let exprs body) = Let (map f (map (postVisitor f) exprs)) (f (postVisitor f body))
postVisitor f (Lambda p b) = Lambda (f (postVisitor f p)) (f (postVisitor f b))
postVisitor f e = case e of
TpDef n v -> TpDef (f (postVisitor f n)) (f (postVisitor f v))
FuncCall funcName args -> FuncCall funcName (map f (map (postVisitor f) args))
FuncDef funcName args body -> FuncDef funcName args (f (postVisitor f body))
Type varName typeName -> Type (f (postVisitor f varName)) (f (postVisitor f typeName))
List typeName -> List $ f $ postVisitor f typeName
TIdf varName varType -> TIdf varName $ f $ postVisitor f varType
_ -> e
----------------
-- Convert App's to FuncCall's (And therewith flatten the App's).
appToFuncCall' :: Expr -> Expr
appToFuncCall' (App (Idf name) arg) = FuncCall name [arg]
appToFuncCall' (App (FuncCall name args) arg) = FuncCall name (args ++ [arg])
appToFuncCall' t = t
appToFuncCall = postVisitor appToFuncCall'
-- Convert definitions of lambda's into funcdefs. Used in combination with the lambdas to
-- def.
defToFuncDef e = case e of
(Def (Idf name) l@(Lambda arg body)) -> FuncDef name args expr
where
FuncDef "" args expr = defToFuncDef l
(Lambda (Idf arg) l@(Lambda _ _)) -> FuncDef "" (arg:args) body
where
FuncDef "" args body = defToFuncDef l
(Lambda (Idf arg) body) -> FuncDef "" [arg] body
Let exprs body -> Let (map defToFuncDef exprs) body
_ -> e
-- Extract lambdas to Def
gatherLambdas :: Expr -> [Expr]
gatherLambdas t = case t of
Numeric _ l r -> (gatherLambdas l) ++ (gatherLambdas r)
Boolean _ l r -> (gatherLambdas l) ++ (gatherLambdas r)
Compose _ l r -> (gatherLambdas l) ++ (gatherLambdas r)
Pair l r -> (gatherLambdas l) ++ (gatherLambdas r)
Triple l m r -> (gatherLambdas l) ++ (gatherLambdas m) ++ (gatherLambdas r)
Cons l r -> (gatherLambdas l) ++ (gatherLambdas r)
Sel l r -> (gatherLambdas l) ++ (gatherLambdas r)
IfE l m r -> (gatherLambdas l) ++ (gatherLambdas m) ++ (gatherLambdas r)
App l r -> (gatherLambdas l) ++ (gatherLambdas r)
Def l r -> (gatherLambdas l) ++ (gatherLambdas r)
Let exprs body -> (concat $ map gatherLambdas exprs) ++ (gatherLambdas body)
t@(Lambda l r) -> [t]
FuncCall _ exprs -> concat $ map gatherLambdas exprs
FuncDef _ _ body -> gatherLambdas body
_ -> []
lambdaToHash :: Expr -> Expr
lambdaToHash t = case t of
Numeric op l r -> Numeric op (lambdaToHash l) (lambdaToHash r)
Boolean op l r -> Boolean op (lambdaToHash l) (lambdaToHash r)
Compose op l r -> Compose op (lambdaToHash l) (lambdaToHash r)
Pair l r -> Pair (lambdaToHash l) (lambdaToHash r)
Triple l m r -> Triple (lambdaToHash l) (lambdaToHash m) (lambdaToHash r)
Cons l r -> Cons (lambdaToHash l) (lambdaToHash r)
Sel l r -> Sel (lambdaToHash l) (lambdaToHash r)
IfE l m r -> IfE (lambdaToHash l) (lambdaToHash m) (lambdaToHash r)
App l r -> App (lambdaToHash l) (lambdaToHash r)
Def l r -> Def (lambdaToHash l) (lambdaToHash r)
Let exprs body -> Let (map lambdaToHash exprs) (lambdaToHash body)
t@(Lambda l r) -> Idf (getLambdaName t)
FuncCall funcName exprs -> FuncCall funcName (map lambdaToHash exprs)
FuncDef funcName args body -> FuncDef funcName args (lambdaToHash body)
t -> t
hasLambdas :: Expr -> Bool
hasLambdas t = case t of
Numeric op l r -> (hasLambdas l) || (hasLambdas r)
Boolean op l r -> (hasLambdas l) || (hasLambdas r)
Compose op l r -> (hasLambdas l) || (hasLambdas r)
Pair l r -> (hasLambdas l) || (hasLambdas r)
Triple l m r -> (hasLambdas l) || (hasLambdas m) || (hasLambdas r)
Cons l r -> (hasLambdas l) || (hasLambdas r)
Sel l r -> (hasLambdas l) || (hasLambdas r)
IfE l m r -> (hasLambdas l) || (hasLambdas m) || (hasLambdas r)
App l r -> (hasLambdas l) || (hasLambdas r)
Def l r -> (hasLambdas l) || (hasLambdas r)
Let exprs body -> (foldl1 (||) (map hasLambdas exprs)) || (hasLambdas body)
Lambda l r -> True
FuncCall funcName exprs -> foldl1 (||) (map hasLambdas exprs)
FuncDef funcName args body -> hasLambdas body
_ -> False
removeSlash [] = []
removeSlash ('/':xs) = "__" ++ removeSlash xs
removeSlash (x:xs) = x : removeSlash xs
getLambdaName t@(Lambda arg body)
| val < 0 = prefix ('_' : posVal)
| otherwise = prefix posVal
where
val = hash t
posVal = removeSlash $ showIntAtBase 64 selectBase64Char (abs val) ""
prefix = ("lf" ++)
lambdaToDef t@(Lambda arg body) = Def (Idf (getLambdaName t)) t
moveLambdas t@(Let _ _) = Let (exprs ++ allLambdas) nudeTree
where
allLambdas = map lambdaToDef $ gatherLambdas t
Let exprs nudeTree = lambdaToHash t
removeLambdas tree
| hasLambdas trans = removeLambdas trans
| otherwise = trans
where
trans = defToFuncDef $ moveLambdas $ tree
isTypename :: String -> Bool
isTypename = isUpper . head
doTyping' u@(Cons t@(Idf typename) Null)
| isTypename typename = List t
| otherwise = u
doTyping' (Cons t@(List typeExpr) Null) = List t
doTyping' (TpDef n@(Idf _) t@(List _)) = Type n t
doTyping' r@(TpDef n@(Idf _) t@(Idf typename)) = Type n t
doTyping' t = t
doTyping = postVisitor doTyping'
-- Get types of arguments of global let
getGlobalTypes :: Expr -> [(String, Expr)]
getGlobalTypes e = case e of
(Type (Idf varName) varType) -> [(varName, varType)]
(Def (Idf varName) (Num _)) -> [(varName, Idf "Int")]
(Let exprs body) -> concat $ map getGlobalTypes exprs
(FuncDef name args _) -> [(name, FuncSig $ length args)]
(Def (TIdf varName varType) _) -> [(varName, varType)]
_ -> []
applyTypes' :: Map String Expr -> Expr -> Expr
applyTypes' types e = case e of
Idf varName -> case lookup varName types of
Just varType -> TIdf varName varType
Nothing -> Idf varName
Def (Idf name) varType@(TIdf otherName otherType) -> Def (TIdf name otherType) varType
_ -> e
applyTypes types = postVisitor (applyTypes' types)
getNames :: Expr -> [String]
getNames (Idf varName) = [varName]
getNames _ = []
getLocalTypes :: Expr -> [(String, Expr)]
getLocalTypes e = case e of
Numeric _ l r -> (map (\x -> (x, Idf "Int")) ((getNames l) ++ (getNames r))) ++ getLocalTypes l ++ getLocalTypes r
Boolean _ l r -> (map (\x -> (x, Idf "Bool")) ((getNames l) ++ (getNames r))) ++ getLocalTypes l ++ getLocalTypes r
Compose _ l r -> (map (\x -> (x, Idf "Compose")) ((getNames l) ++ (getNames r))) ++ getLocalTypes l ++ getLocalTypes r
Pair l r -> getLocalTypes l ++ getLocalTypes r
Triple l m r -> getLocalTypes l ++ getLocalTypes m ++ getLocalTypes r
Null -> []
Cons l r -> getLocalTypes l ++ getLocalTypes r
Sel l r -> getLocalTypes l ++ getLocalTypes r
IfE c t f -> getLocalTypes c ++ getLocalTypes t ++ getLocalTypes f
App f arg -> getLocalTypes f ++ getLocalTypes arg
Def name body -> getLocalTypes name ++ getLocalTypes body
Let exprs body -> (concat $ map getLocalTypes exprs) ++ getLocalTypes body
FuncCall _ args -> concat $ map getLocalTypes args
FuncDef _ _ body -> getLocalTypes body
Type _ _ -> [] -- Type should not have Num in it
List _ -> [] -- Neither
TIdf _ _ -> [] -- Neither
_ -> []
hasUntypedLeft :: Expr -> Bool
hasUntypedLeft e = case e of
Idf _ -> True
Num _ -> False
Bln _ -> False
Numeric _ l r -> hasUntypedLeft l || hasUntypedLeft r
Boolean _ l r -> hasUntypedLeft l || hasUntypedLeft r
Compose _ l r -> hasUntypedLeft l || hasUntypedLeft r
Pair l r -> hasUntypedLeft l || hasUntypedLeft r
Triple l m r -> hasUntypedLeft l || hasUntypedLeft m || hasUntypedLeft r
Cons l r -> hasUntypedLeft l || hasUntypedLeft r
Sel l r -> hasUntypedLeft l || hasUntypedLeft r
IfE c t f -> hasUntypedLeft c || hasUntypedLeft t || hasUntypedLeft f
App l r -> hasUntypedLeft l || hasUntypedLeft r
Def l r -> hasUntypedLeft l || hasUntypedLeft r
Let ls r -> (foldl1 (||) (map hasUntypedLeft ls)) || hasUntypedLeft r
Lambda arg body -> hasUntypedLeft arg || hasUntypedLeft body
FuncCall _ args -> foldl1 (||) $ map hasUntypedLeft args
FuncDef _ _ body -> hasUntypedLeft body
FuncSig args -> False
Type name typeExpr -> hasUntypedLeft name
List _ -> False
TIdf _ _ -> False
getTypeOfExpr :: Expr -> Expr
getTypeOfExpr e = case e of
Num _ -> Idf "Int"
Bln _ -> Idf "Bool"
Numeric _ _ _ -> Idf "Int"
Boolean _ _ _ -> Idf "Bool"
TIdf _ varType -> varType
_ -> error ((show e) ++ " not supported")
typeLocalVars' (FuncDef name args body) = FuncDef name args typedBody
where
localTypes = fromList $ (map (\x -> (x, Idf "Int")) args)
typedBody = applyTypes localTypes body
typeLocalVars' t = t
getSig (FuncDef name args _) = (name, FuncSig $ length args)
getSig _ = ("", Empty)
typeLocalVars (Let exprs body) = Let partiallyTypedExprs body
where
partiallyTypedExprs = map typeLocalVars' exprs
showTaal id = showTree $ doTaal id
saveTaal id = saveTree $ doTaal id
doTypes t
| hasUntypedLeft t' = doTypes t'
| otherwise = t'
where
t' = typeLocalVars $ applyTypes globalVarTypes t
globalVarTypes = fromList $ getGlobalTypes t
doTaal id = doTypes first
where
first = doTyping $ removeLambdas $ appToFuncCall $ showExpr id
|
bobismijnnaam/hywar
|
Compiler.hs
|
mit
| 11,832
| 0
| 15
| 2,929
| 5,384
| 2,733
| 2,651
| 227
| 21
|
module Background where
import Graphics.Gloss
data Background = Back { backX :: Float, backY :: Float, backgroundSprite :: Picture }
backGround :: Background
backGround = Back { backX = 0, backY = 1200, backgroundSprite = ground }
moveBackground :: Background -> Background
moveBackground back = back { backY = newBackY }
where
newBackY = (backY back) - scrollSpeed
scrollSpeed = 1
-- Functie die een achtergrondobject naar beneden beweegt.
-- Dit zou per cyclus moeten worden uitgevoerd zodat het level autoscrollt.
moveBackgroundPicture :: Background -> Picture
moveBackgroundPicture bg = translate xPos yPos pic
where
xPos = backX bg
yPos = backY bg
pic = backgroundSprite bg
-- Oude Background mbv. Picture ipv. Background.
-- (0, 0) = middelpunt Window.
-- X vergroten --> naar rechts, Y vergroten --> naar boven.
ground, drawing, drawing1, drawing2 :: Picture
ground = color green (polygon (rectanglePath 1000 2400))
drawing = circle 10
drawing1 = translate 200 200 (circle 30)
drawing2 = translate (-400) (-100) (circle 100)
backgroundPictures :: [Picture]
backgroundPictures = [ground, drawing, drawing1, drawing2]
|
ThomasBDev/Iron-Rat
|
src/Background.hs
|
gpl-2.0
| 1,422
| 0
| 9
| 470
| 282
| 163
| 119
| 21
| 1
|
{-# LANGUAGE CPP #-}
-- | Repository model
module Darcs.Test.Patch.V1Model
( module Storage.Hashed.AnchoredPath
, V1Model, repoTree
, RepoItem, File, Dir, Content
, makeRepo, emptyRepo
, makeFile, emptyFile
, emptyDir
, nullRepo
, isFile, isDir
, fileContent, dirContent
, isEmpty
, root
, filterFiles, filterDirs
, find
, list
, ap2fp
, aFilename, aDirname
, aLine, aContent
, aFile, aDir
, aRepo
) where
import Darcs.Test.Util.QuickCheck ( alpha, uniques, bSized )
import Darcs.Test.Patch.RepoModel
import Darcs.Patch.Apply( applyToTree )
import Darcs.Patch.Witnesses.Sealed ( Sealed, seal )
import Darcs.Patch.Witnesses.Show
import Storage.Hashed.AnchoredPath
import Storage.Hashed.Tree( Tree, TreeItem )
import Storage.Hashed.Darcs ( darcsUpdateHashes )
import qualified Storage.Hashed.Tree as T
import Control.Applicative ( (<$>) )
import Control.Arrow ( second )
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BLC
import Data.List ( intercalate )
import qualified Data.Map as M
import Test.QuickCheck
( Arbitrary(..)
, Gen, choose, vectorOf, frequency )
#include "impossible.h"
----------------------------------------------------------------------
-- * Model definition
-- | A repository is an abstraction build in top of a 'Tree'.
-- NB: Repository preferences are not supported yet.
newtype V1Model wX = V1Model {
repoTree :: Tree Fail
}
-- | Repository items may be text files or directories.
-- NB: Binary files are not supported yet.
newtype RepoItem = RepoItem {
treeItem :: TreeItem Fail
}
type File = RepoItem
type Dir = RepoItem
type Content = [B.ByteString]
----------------------------------------
-- Instances
instance Show (V1Model wX) where
show repo = "V1Model{ "
++ intercalate " " (map showEntry $ list repo)
++ " }"
where
showPath = show . flatten
showContent content = "[" ++ intercalate " " (map show content) ++ "]"
showEntry (path,item)
| isDir item = showPath path
| isFile item = showPath path ++ showContent (fileContent item)
showEntry _ = impossible
instance Show1 V1Model where
showDict1 = ShowDictClass
----------------------------------------
-- Utils
bs2lbs :: B.ByteString -> BL.ByteString
bs2lbs bs = BL.fromChunks [bs]
lbs2bs :: BL.ByteString -> B.ByteString
lbs2bs = B.concat . BL.toChunks
content2lbs :: Content -> BL.ByteString
content2lbs = BLC.unlines . map bs2lbs
lbs2content :: BL.ByteString -> Content
lbs2content = map lbs2bs . BLC.lines
----------------------------------------------------------------------
-- ** Path conversion
ap2fp :: AnchoredPath -> FilePath
ap2fp = anchorPath ""
----------------------------------------------------------------------
-- * Constructors
makeRepo :: [(Name, RepoItem)] -> V1Model wX
makeRepo = V1Model . T.makeTree . map (second treeItem)
emptyRepo :: V1Model wX
emptyRepo = V1Model T.emptyTree
makeFile :: Content -> File
makeFile = RepoItem . T.File . T.makeBlob . content2lbs
emptyFile :: File
emptyFile = RepoItem $ T.File T.emptyBlob
emptyDir :: Dir
emptyDir = RepoItem $ T.SubTree T.emptyTree
----------------------------------------------------------------------
-- * Queries
nullRepo :: V1Model wX -> Bool
nullRepo = M.null . T.items . repoTree
isFile :: RepoItem -> Bool
isFile (RepoItem (T.File _)) = True
isFile _other = False
isDir :: RepoItem -> Bool
isDir (RepoItem (T.SubTree _)) = True
isDir _other = False
fileContent :: File -> Content
fileContent (RepoItem (T.File blob))
= lbs2content $ unFail $ T.readBlob blob
fileContent _other
= error "fileContent: Not a file."
dirContent :: Dir -> [(Name, RepoItem)]
dirContent (RepoItem (T.SubTree subtree))
= map (second RepoItem) $ M.toList $ T.items subtree
dirContent _other
= error "dirContent: Not a directory."
-- | @isEmpty file@ <=> file content is empty
-- @isEmpty dir@ <=> dir has no child
isEmpty :: RepoItem -> Bool
isEmpty item
| isFile item = null $ fileContent item
| isDir item = null $ dirContent item
| otherwise = undefined
-- | The root directory of a repository.
root :: V1Model wX -> Dir
root = RepoItem . T.SubTree . repoTree
find :: V1Model wX -> AnchoredPath -> Maybe RepoItem
find (V1Model tree) path = RepoItem <$> T.find tree path
-- | List repository items.
-- NB: It does not include the root directory.
list :: V1Model wX -> [(AnchoredPath, RepoItem)]
list (V1Model tree) = map (second RepoItem) $ T.list tree
----------------------------------------------------------------------
-- ** Filtering
filterFiles :: [(n, RepoItem)] -> [(n, File)]
filterFiles = filter (isFile . snd)
filterDirs :: [(n, RepoItem)] -> [(n, Dir)]
filterDirs = filter (isDir . snd)
----------------------------------------------------------------------
-- * Comparing repositories
diffRepos :: V1Model wX -> V1Model wY -> (V1Model wU, V1Model wV)
diffRepos repo1 repo2 =
let (diff1,diff2) = unFail $ T.diffTrees hashedTree1 hashedTree2
in (V1Model diff1, V1Model diff2)
where
hashedTree1, hashedTree2 :: Tree Fail
hashedTree1 = unFail $ darcsUpdateHashes $ repoTree repo1
hashedTree2 = unFail $ darcsUpdateHashes $ repoTree repo2
----------------------------------------------------------------------
-- * Patch application
----------------------------------------------------------------------
-- * QuickCheck generators
-- Testing code assumes that aFilename and aDirname generators
-- will always be able to generate a unique name given a list of
-- existing names. This should be OK as long as the number of possible
-- file/dirnames is much bigger than the number of files/dirs per repository.
-- 'Arbitrary' 'V1Model' instance is based on the 'aSmallRepo' generator.
-- | Files are distinguish by ending their names with ".txt".
aFilename :: Gen Name
aFilename = do len <- choose (1,maxLength)
name <- vectorOf len alpha
return $ makeName (name ++ ".txt")
where
maxLength = 3
aDirname :: Gen Name
aDirname = do len <- choose (1,maxLength)
name <- vectorOf len alpha
return $ makeName name
where
maxLength = 3
aWord :: Gen B.ByteString
aWord = do c <- alpha
return $ BC.pack[c]
aLine :: Gen B.ByteString
aLine = do wordsNo <- choose (1,2)
ws <- vectorOf wordsNo aWord
return $ BC.unwords ws
aContent :: Gen Content
aContent = bSized 0 0.5 80 $ \k ->
do n <- choose (0,k)
vectorOf n aLine
aFile :: Gen File
aFile = makeFile <$> aContent
-- | See 'aRepo', the same applies for 'aDir'.
aDir :: Int -- ^ Maximum number of files
-> Int -- ^ Maximum number of directories
-> Gen Dir
aDir filesL dirL = root <$> aRepo filesL dirL
-- | @aRepo filesNo dirsNo@ produces repositories with *at most*
-- @filesNo@ files and @dirsNo@ directories.
-- The structure of the repository is aleatory.
aRepo :: Int -- ^ Maximum number of files
-> Int -- ^ Maximum number of directories
-> Gen (V1Model wX)
aRepo maxFiles maxDirs
= do let minFiles = if maxDirs == 0 && maxFiles > 0 then 1 else 0
filesNo <- choose (minFiles,maxFiles)
let minDirs = if filesNo == 0 && maxDirs > 0 then 1 else 0
dirsNo <- choose (minDirs,maxDirs)
-- NB: Thanks to laziness we don't need to care about division by zero
-- since if dirsNo == 0 then neither filesPerDirL nor subdirsPerDirL will
-- be evaluated.
let filesPerDirL = (maxFiles-filesNo) `div` dirsNo
subdirsPerDirL = (maxDirs-dirsNo) `div` dirsNo
files <- vectorOf filesNo aFile
filenames <- uniques filesNo aFilename
dirs <- vectorOf dirsNo (aDir filesPerDirL subdirsPerDirL)
dirnames <- uniques dirsNo aDirname
return $ makeRepo (filenames `zip` files ++ dirnames `zip` dirs)
-- | Generate small repositories.
-- Small repositories help generating (potentially) conflicting patches.
instance RepoModel V1Model where
type RepoState V1Model = Tree
showModel m = show m
aSmallRepo = do filesNo <- frequency [(3, return 1), (1, return 2)]
dirsNo <- frequency [(3, return 1), (1, return 0)]
aRepo filesNo dirsNo
repoApply (V1Model tree) patch = V1Model <$> applyToTree patch tree
eqModel repo1 repo2 = let (diff1,diff2) = diffRepos repo1 repo2
in nullRepo diff1 && nullRepo diff2
instance Arbitrary (Sealed V1Model) where
arbitrary = seal <$> aSmallRepo
|
DavidAlphaFox/darcs
|
harness/Darcs/Test/Patch/V1Model.hs
|
gpl-2.0
| 8,832
| 0
| 13
| 1,973
| 2,235
| 1,213
| 1,022
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-| Implementation of the Ganeti Ssconf interface.
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Ssconf
( SSKey(..)
, sSKeyToRaw
, sSKeyFromRaw
, getPrimaryIPFamily
, getMasterCandidatesIps
, getMasterNode
, keyToFilename
, sSFilePrefix
) where
import Control.Exception
import Control.Monad (liftM)
import Data.Maybe (fromMaybe)
import qualified Network.Socket as Socket
import System.FilePath ((</>))
import System.IO.Error (isDoesNotExistError)
import qualified AutoConf
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import qualified Ganeti.Path as Path
import Ganeti.THH
import Ganeti.Utils
-- | Maximum ssconf file size we support.
maxFileSize :: Int
maxFileSize = 131072
-- | ssconf file prefix, re-exported from Constants.
sSFilePrefix :: FilePath
sSFilePrefix = C.ssconfFileprefix
$(declareSADT "SSKey"
[ ("SSClusterName", 'C.ssClusterName)
, ("SSClusterTags", 'C.ssClusterTags)
, ("SSFileStorageDir", 'C.ssFileStorageDir)
, ("SSSharedFileStorageDir", 'C.ssSharedFileStorageDir)
, ("SSMasterCandidates", 'C.ssMasterCandidates)
, ("SSMasterCandidatesIps", 'C.ssMasterCandidatesIps)
, ("SSMasterIp", 'C.ssMasterIp)
, ("SSMasterNetdev", 'C.ssMasterNetdev)
, ("SSMasterNetmask", 'C.ssMasterNetmask)
, ("SSMasterNode", 'C.ssMasterNode)
, ("SSNodeList", 'C.ssNodeList)
, ("SSNodePrimaryIps", 'C.ssNodePrimaryIps)
, ("SSNodeSecondaryIps", 'C.ssNodeSecondaryIps)
, ("SSOfflineNodes", 'C.ssOfflineNodes)
, ("SSOnlineNodes", 'C.ssOnlineNodes)
, ("SSPrimaryIpFamily", 'C.ssPrimaryIpFamily)
, ("SSInstanceList", 'C.ssInstanceList)
, ("SSReleaseVersion", 'C.ssReleaseVersion)
, ("SSHypervisorList", 'C.ssHypervisorList)
, ("SSMaintainNodeHealth", 'C.ssMaintainNodeHealth)
, ("SSUidPool", 'C.ssUidPool)
, ("SSNodegroups", 'C.ssNodegroups)
])
-- | Convert a ssconf key into a (full) file path.
keyToFilename :: FilePath -- ^ Config path root
-> SSKey -- ^ Ssconf key
-> FilePath -- ^ Full file name
keyToFilename cfgpath key =
cfgpath </> sSFilePrefix ++ sSKeyToRaw key
-- | Runs an IO action while transforming any error into 'Bad'
-- values. It also accepts an optional value to use in case the error
-- is just does not exist.
catchIOErrors :: Maybe a -- ^ Optional default
-> IO a -- ^ Action to run
-> IO (Result a)
catchIOErrors def action =
Control.Exception.catch
(do
result <- action
return (Ok result)
) (\err -> let bad_result = Bad (show err)
in return $ if isDoesNotExistError err
then maybe bad_result Ok def
else bad_result)
-- | Read an ssconf file.
readSSConfFile :: Maybe FilePath -- ^ Optional config path override
-> Maybe String -- ^ Optional default value
-> SSKey -- ^ Desired ssconf key
-> IO (Result String)
readSSConfFile optpath def key = do
dpath <- Path.dataDir
result <- catchIOErrors def . readFile .
keyToFilename (fromMaybe dpath optpath) $ key
return (liftM (take maxFileSize) result)
-- | Parses a string containing an IP family
parseIPFamily :: Int -> Result Socket.Family
parseIPFamily fam | fam == AutoConf.pyAfInet4 = Ok Socket.AF_INET
| fam == AutoConf.pyAfInet6 = Ok Socket.AF_INET6
| otherwise = Bad $ "Unknown af_family value: " ++ show fam
-- | Read the primary IP family.
getPrimaryIPFamily :: Maybe FilePath -> IO (Result Socket.Family)
getPrimaryIPFamily optpath = do
result <- readSSConfFile optpath
(Just (show AutoConf.pyAfInet4))
SSPrimaryIpFamily
return (liftM rStripSpace result >>=
tryRead "Parsing af_family" >>= parseIPFamily)
-- | Read the list of IP addresses of the master candidates of the cluster.
getMasterCandidatesIps :: Maybe FilePath -> IO (Result [String])
getMasterCandidatesIps optPath = do
result <- readSSConfFile optPath Nothing SSMasterCandidatesIps
return $ liftM lines result
-- | Read the name of the master node.
getMasterNode :: Maybe FilePath -> IO (Result String)
getMasterNode optPath = do
result <- readSSConfFile optPath Nothing SSMasterNode
return (liftM rStripSpace result)
|
vladimir-ipatov/ganeti
|
src/Ganeti/Ssconf.hs
|
gpl-2.0
| 5,263
| 0
| 14
| 1,301
| 983
| 551
| 432
| 94
| 2
|
import GBC
main :: IO ()
main = do
GBC.run
{-
initializeAll
window <- createWindow "My SDL Application" defaultWindow
renderer <- createRenderer window (-1) defaultRenderer
appLoop renderer
appLoop :: Renderer -> IO ()
appLoop renderer = do
events <- pollEvents
let eventIsQPress event =
case eventPayload event of
KeyboardEvent keyboardEvent ->
keyboardEventKeyMotion keyboardEvent == Pressed &&
keysymKeycode (keyboardEventKeysym keyboardEvent) == KeycodeQ
_ -> False
qPressed = not (null (filter eventIsQPress events))
rendererDrawColor renderer $= V4 0 0 255 255
clear renderer
present renderer
unless qPressed (appLoop renderer)
-}
|
nikhilunni/HaskellBoy
|
src/Main.hs
|
gpl-2.0
| 749
| 0
| 7
| 200
| 25
| 13
| 12
| 4
| 1
|
{-# LANGUAGE CPP #-}
{-|
hledger-web - a hledger add-on providing a web interface.
Copyright (c) 2007-2011 Simon Michael <simon@joyful.com>
Released under GPL version 3 or later.
-}
module Main
where
-- import Control.Concurrent (forkIO, threadDelay)
import Control.Monad
import Data.Maybe
import Data.Text(pack)
import Network.Wai.Handler.Warp (run)
import System.Exit
import System.IO.Storage (withStore, putValue)
import Text.Printf
#ifndef PRODUCTION
import Network.Wai.Middleware.Debug (debugHandle)
import Yesod.Logger (logString, logLazyText, flushLogger, makeLogger)
#else
import Yesod.Logger (makeLogger)
#endif
import Hledger
import Hledger.Cli hiding (progname,progversion)
import Prelude hiding (putStrLn)
import Hledger.Utils.UTF8 (putStrLn)
import Hledger.Web
main :: IO ()
main = do
opts <- getHledgerWebOpts
when (debug_ $ cliopts_ opts) $ printf "%s\n" progversion >> printf "opts: %s\n" (show opts)
runWith opts
runWith :: WebOpts -> IO ()
runWith opts = run opts
where
run opts
| "help" `in_` (rawopts_ $ cliopts_ opts) = putStr (showModeHelp webmode) >> exitSuccess
| "version" `in_` (rawopts_ $ cliopts_ opts) = putStrLn progversion >> exitSuccess
| "binary-filename" `in_` (rawopts_ $ cliopts_ opts) = putStrLn (binaryfilename progname)
| otherwise = journalFilePathFromOpts (cliopts_ opts) >>= ensureJournalFile >> withJournalDo' opts web
withJournalDo' :: WebOpts -> (WebOpts -> Journal -> IO ()) -> IO ()
withJournalDo' opts cmd = do
journalFilePathFromOpts (cliopts_ opts) >>= readJournalFile Nothing >>=
either error' (cmd opts . journalApplyAliases (aliasesFromOpts $ cliopts_ opts))
-- | The web command.
web :: WebOpts -> Journal -> IO ()
web opts j = do
-- unless (debug_ $ cliopts_ opts) $ forkIO (browser baseurl) >> return ()
server (base_url_ opts) (port_ opts) opts j
-- browser :: String -> IO ()
-- browser baseurl = do
-- threadDelay $ fromIntegral browserstartdelay
-- putStrLn "Attempting to start a web browser"
-- openBrowserOn baseurl >> return ()
server :: String -> Int -> WebOpts -> Journal -> IO ()
server baseurl port opts j = do
printf "Starting http server on port %d with base url %s\n" port baseurl
-- let a = App{getStatic=static staticdir
-- ,appRoot=pack baseurl
-- ,appOpts=opts
-- ,appArgs=patterns_ $ reportopts_ $ cliopts_ opts
-- ,appJournal=j
-- }
withStore "hledger" $ do
putValue "hledger" "journal" j
-- yesod main
logger <- makeLogger
-- args <- cmdArgs argConfig
-- env <- getAppEnv args
let env = Development
-- c <- loadConfig env
-- let c' = if port_ opts /= 0
-- then c{ appPort = port args }
-- else c
let c = AppConfig {
appEnv = env
, appPort = port_ opts
, appRoot = pack baseurl
}
#if PRODUCTION
withApp c logger opts $ run (appPort c)
#else
logString logger $ (show env) ++ " application launched, listening on port " ++ show (appPort c)
withApp c logger opts $ run (appPort c) . debugHandle (logHandle logger)
flushLogger logger
where
logHandle logger msg = logLazyText logger msg >> flushLogger logger
#endif
-- data ArgConfig = ArgConfig
-- { environment :: String
-- , port :: Int
-- } deriving (Show, Data, Typeable)
-- argConfig :: ArgConfig
-- argConfig = ArgConfig
-- { environment = def
-- &= help ("application environment, one of: " ++ (foldl1 (\a b -> a ++ ", " ++ b) environments))
-- &= typ "ENVIRONMENT"
-- , port = def
-- &= typ "PORT"
-- }
-- environments :: [String]
-- environments = map ((map toLower) . show) ([minBound..maxBound] :: [AppEnvironment])
-- | retrieve the -e environment option
-- getAppEnv :: ArgConfig -> IO AppEnvironment
-- getAppEnv cfg = do
-- let e = if environment cfg /= ""
-- then environment cfg
-- else
-- #if PRODUCTION
-- "production"
-- #else
-- "development"
-- #endif
-- return $ read $ capitalize e
-- where
-- capitalize [] = []
-- capitalize (x:xs) = toUpper x : map toLower xs
|
Lainepress/hledger
|
hledger-web/hledger-web.hs
|
gpl-3.0
| 4,313
| 0
| 15
| 1,113
| 717
| 393
| 324
| 50
| 1
|
-- Copyright 2013 Gushcha Anton
-- This file is part of PowerCom.
--
-- PowerCom is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- PowerCom is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with PowerCom. If not, see <http://www.gnu.org/licenses/>.
module Channel.CyclicCode (
codeCyclic
, decodeCyclic
, prop_codeDecodeEq
, prop_polyConverting
, prop_Word8BitCount
, prop_quotRemPoly
, prop_simpleCoding
, prop_fullCodingDecoding
, prop_falseWord4Coding
, prop_falseWord8Coding
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as CH
import Math.Polynomial
import Data.Bits
import Data.Sequence (foldrWithIndex, fromList)
import Data.Word
import Control.Monad
import Test.QuickCheck hiding ( (.&.) )
type Word4 = Word8 -- only for semantic concise
type Word7 = Word8
data Bit = Bit Bool
deriving Eq
instance Show Bit where
show (Bit val) = if val then "1" else "0"
instance Num Bit where
(+) (Bit a) (Bit b) = case (a, b) of
(True, True) -> Bit False
(_, True) -> Bit True
(True, _) -> Bit True
_ -> Bit False
(-) = (+)
(*) (Bit a) (Bit b) = case (a, b) of
(False, _) -> Bit False
(_, False) -> Bit False
_ -> Bit True
abs ba = ba
signum ba = ba
fromInteger int = Bit $ int > 0
instance Fractional Bit where
(/) ba _ = ba
fromRational = undefined
word8ToPoly :: Word8 -> Poly Bit
word8ToPoly wd = poly LE $ map
(Bit . testBit wd) [0 .. bitSize wd - 1]
polyToWord8 :: Poly Bit -> Word8
polyToWord8 = foldrWithIndex coeff2Bit 0 . fromList . polyCoeffs LE
where
coeff2Bit :: Int -> Bit -> Word8 -> Word8
coeff2Bit i (Bit b) acc = if b then acc `setBit` i else acc
codeCyclic :: BS.ByteString -> BS.ByteString
codeCyclic = BS.pack . concatMap (\(a,b) -> [a, b]) . map codeWord8 . BS.unpack
codeWord8 :: Word8 -> (Word7, Word7)
codeWord8 wd = (codeWord4 highWord, codeWord4 lowWord)
where highWord = (wd .&. 0xF0) `shiftR` 4
lowWord = wd .&. 0x0F
codeWord4 :: Word4 -> Word7 -- n = 7 k = 4
codeWord4 wd = polyToWord8 finalPoly
where
polyGen = poly BE [1,0,1,1]
wordPoly = word8ToPoly wd
shiftedPoly = wordPoly `multPoly` poly BE [1, 0, 0, 0] -- (n - k) = 3
reminder = shiftedPoly `remPoly` polyGen
finalPoly = shiftedPoly `addPoly` reminder
decodeCyclic :: BS.ByteString -> Maybe BS.ByteString
decodeCyclic = mPack . mapM decodeWord8 . makePairs . BS.unpack
where
mPack = liftM BS.pack
makePairs :: [a] -> [(a, a)]
makePairs [] = []
makePairs (_:[]) = []
makePairs (x1:x2:xs) = (x1, x2) : makePairs xs
decodeWord8 :: (Word7, Word7) -> Maybe Word8
decodeWord8 (a, b) = mShiftL4 (decodeWord4 a) `mOr` decodeWord4 b
where
mShiftL4 = liftM $ flip shiftL 4
mOr = liftM2 (.|.)
decodeWord4 :: Word7 -> Maybe Word4
decodeWord4 wd = if syndrome == zero then Just finalWord else Nothing
where
polyGen = poly BE [1,0,1,1]
wordPoly = word8ToPoly wd
syndrome = wordPoly `remPoly` polyGen
finalWord = (wd `shiftR` 3) .&. 0x0F
-- Testing
prop_codeDecodeEq :: Word8 -> Bool
prop_codeDecodeEq wd = case decodeWord8 $ codeWord8 wd of
Nothing -> False
Just val -> wd == val
prop_polyConverting :: Word8 -> Bool
prop_polyConverting wd = wd == polyToWord8 (word8ToPoly wd)
prop_Word8BitCount :: Word8 -> Bool
prop_Word8BitCount wd = bitSize wd == 8
prop_quotRemPoly :: Word8 -> Word8 -> Bool
prop_quotRemPoly a b = (b == 0) || (newa == pa)
where newa = addPoly (multPoly q pb) r
(q, r) = quotRemPoly pa pb
pa = word8ToPoly a
pb = word8ToPoly b
prop_simpleCoding :: Word8 -> Bool
prop_simpleCoding wd = case decodeWord4 $ codeWord4 cutedWd of
Nothing -> False
Just val -> val == cutedWd
where cutedWd = wd .&. 0x0F
prop_fullCodingDecoding :: String -> Bool
prop_fullCodingDecoding s = case decodeCyclic $ codeCyclic bs of
Nothing -> False
Just val -> val == bs
where bs = CH.pack s
newtype BitError = BitError Int
deriving (Eq, Show)
instance Arbitrary BitError where
arbitrary = oneof $ map (return . BitError) [0 .. 7]
shrink _ = []
prop_falseWord4Coding :: Word8 -> BitError -> Bool
prop_falseWord4Coding wd (BitError i) = case decodeWord4 $ complementBit (codeWord4 cutedWd) i of
Nothing -> True
Just _ -> False
where cutedWd = wd .&. 0x0F
prop_falseWord8Coding :: Word8 -> BitError -> BitError -> Bool
prop_falseWord8Coding wd (BitError i1) (BitError i2) =
case decodeWord8 (cwd1 `complementBit` i1, cwd2 `complementBit` i2) of
Nothing -> True
Just _ -> False
where
(cwd1, cwd2) = codeWord8 wd
|
NCrashed/PowerCom
|
src/powercom/Channel/CyclicCode.hs
|
gpl-3.0
| 5,315
| 0
| 11
| 1,367
| 1,628
| 890
| 738
| 119
| 2
|
module Milkman.Test.Context ()
where
import Control.Applicative ((<$>))
import Data.List.Split (chunksOf)
import Data.Text ( Text
, pack
)
import Test.QuickCheck.Instances
import qualified Test.QuickCheck as QC
import qualified Test.Tasty.QuickCheck as QC
import qualified Test.Tasty.SmallCheck as SC
import Milkman.Context
instance QC.Arbitrary Context
where arbitrary = gen
gen :: QC.Gen Context
gen = do
no <- sizedChoose
na <- sizedChoose
let o = pack . show <$> [1 .. no]
a = pack . show <$> [1 .. na]
i <- QC.vector $ no * na :: QC.Gen [Bool]
mkContext o a $ chunksOf no i
where sizedChoose = QC.sized (\n -> QC.choose (1, n)) `QC.suchThat` (>0)
|
mmarx/milkman
|
test/Milkman/Test/Context.hs
|
gpl-3.0
| 717
| 0
| 12
| 168
| 257
| 148
| 109
| 21
| 1
|
{-
- Copyright (C) 2014 Alexander Berntsen <alexander@plaimi.net>
-
- This file is part of Virtual Tennis.
-
- Virtual Tennis is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- Virtual Tennis is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with Virtual Tennis. If not, see <http://www.gnu.org/licenses/>.
-} module Mathema where
magApply :: (Float -> Float) -> Float -> Float
-- | 'magApply' applies a function to the magnitude of a number.
magApply f x =
let sig = signum x
in sig * f (sig * x)
clamp :: Float -> Float -> Float -> Float
-- | 'clamp' clamps a number between a low and a high.
clamp low high = max low . min high
|
alexander-b/master-fun
|
src/virtualtennis/Mathema.hs
|
gpl-3.0
| 1,082
| 0
| 10
| 211
| 102
| 53
| 49
| 7
| 1
|
module CoreIO
(getInt
,getInts
,getFloat
,publishFullFigure_dep
,publishFullFigure
,namedPublish
,multiNamePublish
)where
import DataTypes
import CoreSVG (writeFullFigure_dep,writeFullFigure)
import Control.Monad (replicateM)
-- |A function which takes a FullFigure and writes it to a file
publishFullFigure_dep :: FullFigure -> IO ()
publishFullFigure_dep fig = writeFile "Output.svg" $ writeFullFigure_dep fig
-- |A function which takes a FullFigure and writes it to a file so it is viewable
-- useage: publishFullFigure figureToView => ()
publishFullFigure :: FullFigure -> IO ()
publishFullFigure fig = writeFile "Output.svg" $ writeFullFigure fig
namedPublish :: String -> FullFigure -> IO ()
namedPublish file fig = writeFile (file++".svg") $ writeFullFigure fig
multiNamePublish_help :: String -> Int -> [FullFigure] -> IO ()
multiNamePublish_help file num (f:[]) = namedPublish (file++(show num)) f
multiNamePublish_help file num (f:xs) = do
namedPublish (file++(show num)) f
multiNamePublish_help file (num + 1) xs
-- |A function which will convert a list of related full figures to a series of numbered svg files
multiNamePublish :: String -> [FullFigure] -> IO ()
multiNamePublish file figs = multiNamePublish_help file 0 figs
-- |A function which takes a number which defines the number of ints to get from the cmdLine
getInts :: Int -> IO [Int]
getInts n = fmap (fmap read) $ mapM (const getLine) [1.. n]
-- |gets a single int from the cmdLine
getInt :: IO Int
getInt = fmap read getLine
-- |gets a single Float from the cmdLine
getFloat :: IO Float
getFloat = fmap read getLine
|
Lexer747/Haskell-Fractals
|
Core/CoreIO.hs
|
gpl-3.0
| 1,623
| 0
| 11
| 270
| 414
| 216
| 198
| 30
| 1
|
{- |
Module : $Header$
Description : Simple Main file to test stuff
Copyright : (c) Michal Parusinski
License : GPLv3
Maintainer : mparusinski@gmail.com
Stability : experimental
Portability : portable
<module description starting at first column>
-}
module Main where
import Control.Monad
import Data.Maybe
import System.Random
import IO
import Data.Time
--import Criterion.Main
import Factoring.Lenstra
import Factoring.TrialDivision
import Primes.MillerRabin
import Generator.RandomGenerator
import Generator.Generator
ecmGenericFull method number
= do updateIORandomGenerator
stdGen <- getStdGen
let isPrime = millerRabinPrimalityTest stdGen number
if isPrime
then return [number]
else do maybeFactor <- return $! method number
if isNothing maybeFactor
then return [number]
else do leftPart <- ecmStandardFull $ fromJust maybeFactor
rightPart <- ecmStandardFull $ number `div` (fromJust maybeFactor)
return (leftPart ++ rightPart)
ecmStandardFull = ecmGenericFull lenstraECMSmartBound
ecmParallelFull = ecmGenericFull lenstraECMParallelSmartBound
trialDivisionFull num = return $! trialDivision num
generateSemiPrime bitSize
= do updateIORandomGenerator
stdGen1 <- getStdGen
let pg = primeGenerator stdGen1 bitSize
updateIORandomGenerator
stdGen2 <- getStdGen
let ([prime1, prime2], state) = runGeneratorNTimes 2 pg stdGen2
return (prime1 * prime2, prime1, prime2)
-- action should always give the same output
runActionNTimes action input times
= liftM head $ mapM action $ take times $ repeat input
times = 5
divisionRun method number methodString
= do putStr $ "Using " ++ methodString ++ " ..."
hFlush stdout
start <- getCurrentTime
factors <- runActionNTimes method number times -- it should be eager
end <- getCurrentTime
putStrLn $ " found factors " ++ show factors
let timeTaken = show ((diffUTCTime end start) / (fromIntegral times))
putStrLn $ "It took " ++ timeTaken ++ "\n"
experimentRun bitSize
= do putStrLn "===================================================="
putStrLn $ "Running experiment for bit size " ++ show bitSize
(product, first, second) <- generateSemiPrime bitSize
putStrLn $ show product ++ " = " ++ show first ++ " x " ++ show second ++ "\n"
divisionRun trialDivisionFull product "trial division"
divisionRun ecmStandardFull product "ECM Standard"
divisionRun ecmParallelFull product "ECM Parallel"
putStrLn " "
main = do let bitSizes = [1..30]
mapM_ experimentRun bitSizes
|
mparusinski/Haskell-number-theory-library
|
Experiment_1.hs
|
gpl-3.0
| 2,774
| 0
| 16
| 692
| 622
| 298
| 324
| 57
| 3
|
module Main where
import qualified Math.ExpPairs.Matrix3 as M3
import Test.Tasty.Bench
testm3 :: Int -> M3.Matrix3 Integer
testm3 k = M3.fromList $ map (100*10^k `div`) [100..108]
compareMults :: Int -> Benchmark
compareMults k = bgroup (show k)
[ bench "vanillaMult" $ nf (\x -> x * x) (testm3 k)
, bench "makarovMult" $ nf (\x -> x `M3.makarovMult` x) (testm3 k)
, bench "ladermanMult" $ nf (\x -> x `M3.ladermanMult` x) (testm3 k)
]
main :: IO ()
main = defaultMain $ map compareMults [400,450..800]
|
Bodigrim/exp-pairs
|
auxiliary/BenchMatrix.hs
|
gpl-3.0
| 534
| 0
| 11
| 115
| 238
| 130
| 108
| 12
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.AuthorizeSecurityGroupEgress
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds one or more egress rules to a security group for use with a VPC.
-- Specifically, this action permits instances to send traffic to one or more
-- destination CIDR IP address ranges, or to one or more destination security
-- groups for the same VPC.
--
-- You can have up to 50 rules per security group (covering both ingress and
-- egress rules).
--
-- A security group is for use with instances either in the EC2-Classic
-- platform or in a specific VPC. This action doesn't apply to security groups
-- for use in EC2-Classic. For more information, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_SecurityGroups.html Security Groups for Your VPC>
-- in the /Amazon Virtual Private Cloud User Guide/.
--
-- Each rule consists of the protocol (for example, TCP), plus either a CIDR
-- range or a source group. For the TCP and UDP protocols, you must also specify
-- the destination port or port range. For the ICMP protocol, you must also
-- specify the ICMP type and code. You can use -1 for the type or code to mean
-- all types or all codes.
--
-- Rule changes are propagated to affected instances as quickly as possible.
-- However, a small delay might occur.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AuthorizeSecurityGroupEgress.html>
module Network.AWS.EC2.AuthorizeSecurityGroupEgress
(
-- * Request
AuthorizeSecurityGroupEgress
-- ** Request constructor
, authorizeSecurityGroupEgress
-- ** Request lenses
, asgeCidrIp
, asgeDryRun
, asgeFromPort
, asgeGroupId
, asgeIpPermissions
, asgeIpProtocol
, asgeSourceSecurityGroupName
, asgeSourceSecurityGroupOwnerId
, asgeToPort
-- * Response
, AuthorizeSecurityGroupEgressResponse
-- ** Response constructor
, authorizeSecurityGroupEgressResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data AuthorizeSecurityGroupEgress = AuthorizeSecurityGroupEgress
{ _asgeCidrIp :: Maybe Text
, _asgeDryRun :: Maybe Bool
, _asgeFromPort :: Maybe Int
, _asgeGroupId :: Text
, _asgeIpPermissions :: List "item" IpPermission
, _asgeIpProtocol :: Maybe Text
, _asgeSourceSecurityGroupName :: Maybe Text
, _asgeSourceSecurityGroupOwnerId :: Maybe Text
, _asgeToPort :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'AuthorizeSecurityGroupEgress' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'asgeCidrIp' @::@ 'Maybe' 'Text'
--
-- * 'asgeDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'asgeFromPort' @::@ 'Maybe' 'Int'
--
-- * 'asgeGroupId' @::@ 'Text'
--
-- * 'asgeIpPermissions' @::@ ['IpPermission']
--
-- * 'asgeIpProtocol' @::@ 'Maybe' 'Text'
--
-- * 'asgeSourceSecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'asgeSourceSecurityGroupOwnerId' @::@ 'Maybe' 'Text'
--
-- * 'asgeToPort' @::@ 'Maybe' 'Int'
--
authorizeSecurityGroupEgress :: Text -- ^ 'asgeGroupId'
-> AuthorizeSecurityGroupEgress
authorizeSecurityGroupEgress p1 = AuthorizeSecurityGroupEgress
{ _asgeGroupId = p1
, _asgeDryRun = Nothing
, _asgeSourceSecurityGroupName = Nothing
, _asgeSourceSecurityGroupOwnerId = Nothing
, _asgeIpProtocol = Nothing
, _asgeFromPort = Nothing
, _asgeToPort = Nothing
, _asgeCidrIp = Nothing
, _asgeIpPermissions = mempty
}
-- | The CIDR IP address range. You can't specify this parameter when specifying a
-- source security group.
asgeCidrIp :: Lens' AuthorizeSecurityGroupEgress (Maybe Text)
asgeCidrIp = lens _asgeCidrIp (\s a -> s { _asgeCidrIp = a })
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
asgeDryRun :: Lens' AuthorizeSecurityGroupEgress (Maybe Bool)
asgeDryRun = lens _asgeDryRun (\s a -> s { _asgeDryRun = a })
-- | The start of port range for the TCP and UDP protocols, or an ICMP type
-- number. For the ICMP type number, use '-1' to specify all ICMP types.
asgeFromPort :: Lens' AuthorizeSecurityGroupEgress (Maybe Int)
asgeFromPort = lens _asgeFromPort (\s a -> s { _asgeFromPort = a })
-- | The ID of the security group.
asgeGroupId :: Lens' AuthorizeSecurityGroupEgress Text
asgeGroupId = lens _asgeGroupId (\s a -> s { _asgeGroupId = a })
-- | A set of IP permissions. You can't specify a destination security group and a
-- CIDR IP address range.
asgeIpPermissions :: Lens' AuthorizeSecurityGroupEgress [IpPermission]
asgeIpPermissions =
lens _asgeIpPermissions (\s a -> s { _asgeIpPermissions = a })
. _List
-- | The IP protocol name ('tcp', 'udp', 'icmp') or number (see <http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml Protocol Numbers>). Use '-1'
-- to specify all.
asgeIpProtocol :: Lens' AuthorizeSecurityGroupEgress (Maybe Text)
asgeIpProtocol = lens _asgeIpProtocol (\s a -> s { _asgeIpProtocol = a })
-- | [EC2-Classic, default VPC] The name of the destination security group. You
-- can't specify a destination security group and a CIDR IP address range.
asgeSourceSecurityGroupName :: Lens' AuthorizeSecurityGroupEgress (Maybe Text)
asgeSourceSecurityGroupName =
lens _asgeSourceSecurityGroupName
(\s a -> s { _asgeSourceSecurityGroupName = a })
-- | The ID of the destination security group. You can't specify a destination
-- security group and a CIDR IP address range.
asgeSourceSecurityGroupOwnerId :: Lens' AuthorizeSecurityGroupEgress (Maybe Text)
asgeSourceSecurityGroupOwnerId =
lens _asgeSourceSecurityGroupOwnerId
(\s a -> s { _asgeSourceSecurityGroupOwnerId = a })
-- | The end of port range for the TCP and UDP protocols, or an ICMP code number.
-- For the ICMP code number, use '-1' to specify all ICMP codes for the ICMP type.
asgeToPort :: Lens' AuthorizeSecurityGroupEgress (Maybe Int)
asgeToPort = lens _asgeToPort (\s a -> s { _asgeToPort = a })
data AuthorizeSecurityGroupEgressResponse = AuthorizeSecurityGroupEgressResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AuthorizeSecurityGroupEgressResponse' constructor.
authorizeSecurityGroupEgressResponse :: AuthorizeSecurityGroupEgressResponse
authorizeSecurityGroupEgressResponse = AuthorizeSecurityGroupEgressResponse
instance ToPath AuthorizeSecurityGroupEgress where
toPath = const "/"
instance ToQuery AuthorizeSecurityGroupEgress where
toQuery AuthorizeSecurityGroupEgress{..} = mconcat
[ "CidrIp" =? _asgeCidrIp
, "DryRun" =? _asgeDryRun
, "FromPort" =? _asgeFromPort
, "GroupId" =? _asgeGroupId
, "IpPermissions" `toQueryList` _asgeIpPermissions
, "IpProtocol" =? _asgeIpProtocol
, "SourceSecurityGroupName" =? _asgeSourceSecurityGroupName
, "SourceSecurityGroupOwnerId" =? _asgeSourceSecurityGroupOwnerId
, "ToPort" =? _asgeToPort
]
instance ToHeaders AuthorizeSecurityGroupEgress
instance AWSRequest AuthorizeSecurityGroupEgress where
type Sv AuthorizeSecurityGroupEgress = EC2
type Rs AuthorizeSecurityGroupEgress = AuthorizeSecurityGroupEgressResponse
request = post "AuthorizeSecurityGroupEgress"
response = nullResponse AuthorizeSecurityGroupEgressResponse
|
romanb/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/AuthorizeSecurityGroupEgress.hs
|
mpl-2.0
| 8,818
| 0
| 10
| 1,968
| 929
| 565
| 364
| 99
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetPools.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a target pool in the specified project and region using the data
-- included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetPools.insert@.
module Network.Google.Resource.Compute.TargetPools.Insert
(
-- * REST Resource
TargetPoolsInsertResource
-- * Creating a Request
, targetPoolsInsert
, TargetPoolsInsert
-- * Request Lenses
, tpiRequestId
, tpiProject
, tpiPayload
, tpiRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetPools.insert@ method which the
-- 'TargetPoolsInsert' request conforms to.
type TargetPoolsInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetPools" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetPool :> Post '[JSON] Operation
-- | Creates a target pool in the specified project and region using the data
-- included in the request.
--
-- /See:/ 'targetPoolsInsert' smart constructor.
data TargetPoolsInsert =
TargetPoolsInsert'
{ _tpiRequestId :: !(Maybe Text)
, _tpiProject :: !Text
, _tpiPayload :: !TargetPool
, _tpiRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetPoolsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tpiRequestId'
--
-- * 'tpiProject'
--
-- * 'tpiPayload'
--
-- * 'tpiRegion'
targetPoolsInsert
:: Text -- ^ 'tpiProject'
-> TargetPool -- ^ 'tpiPayload'
-> Text -- ^ 'tpiRegion'
-> TargetPoolsInsert
targetPoolsInsert pTpiProject_ pTpiPayload_ pTpiRegion_ =
TargetPoolsInsert'
{ _tpiRequestId = Nothing
, _tpiProject = pTpiProject_
, _tpiPayload = pTpiPayload_
, _tpiRegion = pTpiRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
tpiRequestId :: Lens' TargetPoolsInsert (Maybe Text)
tpiRequestId
= lens _tpiRequestId (\ s a -> s{_tpiRequestId = a})
-- | Project ID for this request.
tpiProject :: Lens' TargetPoolsInsert Text
tpiProject
= lens _tpiProject (\ s a -> s{_tpiProject = a})
-- | Multipart request metadata.
tpiPayload :: Lens' TargetPoolsInsert TargetPool
tpiPayload
= lens _tpiPayload (\ s a -> s{_tpiPayload = a})
-- | Name of the region scoping this request.
tpiRegion :: Lens' TargetPoolsInsert Text
tpiRegion
= lens _tpiRegion (\ s a -> s{_tpiRegion = a})
instance GoogleRequest TargetPoolsInsert where
type Rs TargetPoolsInsert = Operation
type Scopes TargetPoolsInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetPoolsInsert'{..}
= go _tpiProject _tpiRegion _tpiRequestId
(Just AltJSON)
_tpiPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetPoolsInsertResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/TargetPools/Insert.hs
|
mpl-2.0
| 4,725
| 0
| 17
| 1,087
| 559
| 335
| 224
| 86
| 1
|
module XTag.Model.Util
( createThumbnail
, findBooks
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Data.Char
import Data.Maybe
import System.Directory
import qualified Data.ByteString.UTF8 as C
import Data.UUID
import System.Process
import System.UUID.V4
findLeafDirectories dir = do
isDir <- liftIO $ doesDirectoryExist dir
if isDir
then do
contents <- liftIO $ transform <$> getDirectoryContents dir
leafs <- concat <$> mapM findLeafDirectories contents
if null leafs
then return [(dir, contents)]
else return leafs
else
return []
where
transform = map subpath . filter noDot
noDot x = head x /= '.'
subpath x = concat [dir, "/", x]
isImage p =
return $ extension p `elem` ["jpg", "png", "jpeg"]
where
extension =
map toLower . reverse . takeWhile (\x -> x /= '.') . reverse
toBook (path, pages) = do
pages <- filterM isImage pages
if null pages
then return Nothing
else return $ Just
(C.fromString path, map C.fromString pages)
findBooks dir = do
dir <- liftIO $ canonicalizePath dir
catMaybes <$> (findLeafDirectories dir >>= mapM toBook)
createThumbnail dir path width height = do
thumb <- (\x y -> x ++ "/" ++ y ++ ".jpg")
<$> canonicalizePath (C.toString dir)<*> liftA show uuid
let size = concat [show width, "x", show height]
(_, _, _, hProc) <- createProcess
(proc "./convert" [(C.toString path), "-thumbnail", size, thumb])
waitForProcess hProc
return $ C.fromString thumb
|
yeyan/xtag
|
src/XTag/Model/Util.hs
|
lgpl-3.0
| 1,782
| 0
| 14
| 569
| 564
| 290
| 274
| 47
| 3
|
{-# LANGUAGE RankNTypes #-}
module Math.Topology.KnotTh.Tabulation.Test
( test
) where
import Control.Arrow ((&&&))
import Control.Monad (when)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Math.Topology.KnotTh.Algebra.Dihedral.Dn
import Math.Topology.KnotTh.Tangle
import Math.Topology.KnotTh.Tabulation.TangleDiagramsCascade
import Math.Topology.KnotTh.Tabulation.TangleFlypeClasses
import TestUtil.Table
test :: Test
test = testGroup "Tangle generators"
[ testCase "Numbers of prime tangle projections" $
testTable (\ n -> generateTable $ forCCP_ (primeProjections n))
[ [1]
, [1, 1]
, [2, 2, 2]
, [6, 8, 8, 5]
, [19, 29, 41, 31, 16]
, [71, 138, 210, 231, 161, 60]
, [293, 638, 1125, 1458, 1406, 840, 261]
, [1348, 3237, 6138, 9183, 10572, 8818, 4702, 1243]
]
, testCase "Numbers of basic polyhedral tangle projections" $
testTable (\ n -> generateTable $ forCCP_ (reducedProjections n))
[ [1]
, [0, 1]
, [0, 1, 2]
, [0, 1, 2, 5]
, [1, 1, 4, 9, 16]
, [1, 4, 7, 22, 42, 60]
, [3, 7, 21, 49, 126, 228, 261]
]
, testCase "Numbers of tangle templates" $
testTable (\ n -> generateTable $ forCCP_ (templateProjections n))
[ [1]
, [0, 1]
, [0, 1, 2]
, [0, 1, 2, 5]
, [1, 1, 4, 9, 16]
, [0, 3, 7, 22, 42, 60]
, [1, 4, 17, 49, 126, 228, 261]
, [2, 12, 43, 139, 355, 799, 1288, 1243]
]
, testCase "Numbers of tangle diagrams" $
testTable (\ n -> generateTable $ forCCP_ (primeIrreducibleDiagrams n))
[ [1]
, [1, 2]
, [3, 4, 6]
, [14, 25, 33, 32]
, [76, 148, 258, 290, 206]
, [486, 1146, 2125, 3086, 3081, 1718]
]
, testCase "Numbers of alternating tangles" $
testTable (\ n -> generateTable $ generateFlypeEquivalent n)
[ [1]
, [1, 1]
, [2, 2, 2]
, [5, 7, 8, 5]
, [13, 20, 37, 31, 16]
, [36, 77, 157, 209, 161, 60]
, [111, 276, 687, 1128, 1294, 840, 261]
, [373, 1135, 3052, 5986, 8528, 8206, 4702, 1243]
, [1362, 4823, 13981, 30556, 51475, 62895, 52815, 26753, 6257]
]
, testCase "Numbers of 4-leg alternating tangles without symmetry" $
testTable
(\ n -> generateTable'
((numberOfVertices &&& numberOfLegs) . fst)
(\ (_, symmetry) -> rotationPeriod symmetry * (if hasReflectionPart symmetry then 1 else 2))
(\ yield -> generateFlypeEquivalentInTriangle n
(\ (t, s) -> when (numberOfLegs t == 4) $
yield (t, s)
)
)
)
[[1], [2], [4], [10], [29], [98], [372], [1538], [6755], [30996]]
]
|
mishun/tangles
|
test/Math/Topology/KnotTh/Tabulation/Test.hs
|
lgpl-3.0
| 3,102
| 0
| 21
| 1,170
| 1,092
| 683
| 409
| 71
| 2
|
module Main where
import Sort (sort)
import Point (Point)
import Support (shouldEql)
p00 = (0, 0)
p10 = (1, 0)
p10' = (-1, 0)
p01 = (0, 1)
p01' = (0, -1)
p11 = (1, 1)
p11' = (-1, -1)
p13 = (1, 3)
main :: IO ()
main = print $ foldl1 (++) [
sort [] p00 `shouldEql` [],
sort [p00] p00 `shouldEql` [p00],
sort [p00, p00] p00 `shouldEql` [p00, p00],
sort [p00, p10] p00 `shouldEql` [p00, p10],
sort [p10, p10] p00 `shouldEql` [p10, p10],
sort [p10, p00] p00 `shouldEql` [p00, p10],
sort [p10', p10, p00] p00 `shouldEql` [p00, p10, p10'],
sort [
p10', p10, p11, p01, p01', p01, p00, p10', p11'
] p00 `shouldEql` [
p11', p01', p00, p10, p11, p01, p01, p10', p10'
],
sort [] p13 `shouldEql` [],
sort [p00] p13 `shouldEql` [p00],
sort [p00, p00] p13 `shouldEql` [p00, p00],
sort [p00, p10] p13 `shouldEql` [p00, p10],
sort [
p10', p10, p11, p01, p01', p01, p00, p10', p11'
] p13 `shouldEql` [
p10', p10', p01, p01, p11', p00, p01', p11, p10
],
sort [p10, p00, p10'] (-2, 0) `shouldEql` [p10', p00, p10],
"done"
]
|
jhnns/haskell-experiments
|
test/SortSpec.hs
|
unlicense
| 1,124
| 0
| 11
| 308
| 593
| 367
| 226
| 35
| 1
|
rotate :: [a] -> Int -> [a]
rotate xs 0 = xs
rotate xs n | n > 0 = rotate (tail xs ++ [head xs]) (n-1)
| n < 0 = rotate ([last xs] ++ init xs) (n+1)
|
alephnil/h99
|
19.hs
|
apache-2.0
| 160
| 0
| 10
| 50
| 119
| 59
| 60
| 4
| 1
|
-- | In which the user prelude is massaged into the form hint needs.
module System.Console.Hawk.UserPrelude where
import Control.Monad.Trans.Class
import Data.ByteString as B
import Text.Printf
import Control.Monad.Trans.Uncertain
import Data.HaskellModule
import System.Console.Hawk.PackageDbs
import System.Console.Hawk.UserPrelude.Extend
type UserPrelude = HaskellModule
testC :: FilePath -> IO ()
testC f = do
let orig = printf "tests/preludes/%s/prelude.hs" f
m <- runUncertainIO $ readModule orig
B.putStr $ showModule orig (canonicalizeUserPrelude m)
-- |
-- >>> testC "default"
-- {-# LANGUAGE ExtendedDefaultRules, OverloadedStrings #-}
-- module System.Console.Hawk.CachedPrelude where
-- {-# LINE 2 "tests/preludes/default/prelude.hs" #-}
-- import Prelude
-- import qualified Data.ByteString.Lazy.Char8 as B
-- import qualified Data.List as L
--
-- >>> testC "moduleName"
-- module MyPrelude where
-- import Prelude
-- {-# LINE 2 "tests/preludes/moduleName/prelude.hs" #-}
-- t = take
canonicalizeUserPrelude :: HaskellModule -> UserPrelude
canonicalizeUserPrelude = extendModuleName . extendImports
readUserPrelude :: FilePath -> UncertainT IO UserPrelude
readUserPrelude f = canonicalizeUserPrelude <$> readModule f
compileUserPrelude :: FilePath -- ^ the original's filename,
-- used for fixing up line numbers
-> FilePath -- ^ new filename, because ghc compiles from disk.
-- the compiled output will be in the same folder.
-> UserPrelude
-> UncertainT IO ()
compileUserPrelude = compileUserPreludeWithArgs []
compileUserPreludeWithArgs :: [String] -- ^ extra ghc args
-> FilePath -- ^ the original's filename,
-- used for fixing up line numbers
-> FilePath -- ^ new filename, because ghc compiles from disk.
-- the compiled output will be in the same folder.
-> UserPrelude
-> UncertainT IO ()
compileUserPreludeWithArgs args orig f m = do
extraArgs <- lift $ extraGhcArgs
let args' = (extraArgs ++ args)
compileModuleWithArgs args' orig f m
|
gelisam/hawk
|
src/System/Console/Hawk/UserPrelude.hs
|
apache-2.0
| 2,308
| 0
| 11
| 615
| 319
| 180
| 139
| 32
| 1
|
{-# LANGUAGE PolyKinds #-}
module Type.Either where
import Prelude
type family IsLeft (a :: Either l r) :: Bool where IsLeft ('Left l) = 'True
IsLeft ('Right r) = 'False
type family IsRight (a :: Either l r) :: Bool where IsRight ('Left l) = 'False
IsRight ('Right r) = 'True
type family FromRight (a :: Either l r) where FromRight ('Right r) = r
type family FromLeft (a :: Either l r) where FromLeft ('Left l) = l
|
wdanilo/typelevel
|
src/Type/Either.hs
|
apache-2.0
| 529
| 0
| 9
| 193
| 192
| 107
| 85
| -1
| -1
|
module Poset.A999998 (a999998) where
import Poset.Wichita (parents, wichitaRanks)
import Data.Set (Set)
import qualified Data.Set as Set
a999998_row :: Integer -> [Integer]
a999998_row = Set.toAscList . parents
a999998_list :: [Integer]
a999998_list = concatMap a999998_row [1..]
a999998 :: Int -> Integer
a999998 n = a999998_list !! (n - 1)
|
peterokagey/haskellOEIS
|
src/Sandbox/WichitaPoset/src/A999998.hs
|
apache-2.0
| 345
| 0
| 7
| 50
| 114
| 67
| 47
| 10
| 1
|
module NatTr.Product where
import qualified Prelude as P
import Data.Constraint hiding ((&&&))
import Data.Tagged
import Data.Proxy
import Category
import Category.Product
import Functor
import Functor.Product
import Product
import NatTr
import Adjoint
instance (Category c1, ProductCategory c2) => Functor (ProductF (NatTr c1 (c2 :: o2 -> o2 -> *))) ('KProxy :: KProxy ((*, *) -> *)) where
type Domain (ProductF (NatTr c1 c2)) = NatTr c1 c2 :><: NatTr c1 c2
type Codomain (ProductF (NatTr c1 c2)) = NatTr c1 c2
type FMap (ProductF (NatTr c1 c2)) '((f :: *), (g :: *)) = Comp ('KProxy :: KProxy (o2, o2)) (ProductF c2) (f :&&&: g)
morphMap = Tagged (\t@(f :><: g) -> case observeObjects t of Dict -> (f . appNat proj1) &&& (g . appNat proj2))
instance (Category c1, ProductCategory c2) =>
Adjoint (NatTr c1 c2) (NatTr (c1 :: o1 -> o1 -> *) (c2 :: o2 -> o2 -> *) :><: NatTr c1 c2) (Diag (NatTr c1 c2)) (ProductF (NatTr c1 c2)) where
leftAdjunct = NatTr (Tagged (\(s :><: t) -> compFL (productNat s t . diag) . assocR . compFR unit . idLInv))
rightAdjunct = NatTr (Tagged (\t -> (proj1F . compFR proj1 . t) :><: (proj2F . compFR proj2 . t)))
instance (Category c1, ProductCategory c2) => ProductCategory (NatTr c1 c2)
type f :*: g = Product (NatTr (->) (->)) f g
|
ian-mi/extended-categories
|
NatTr/Product.hs
|
bsd-3-clause
| 1,299
| 2
| 17
| 262
| 632
| 340
| 292
| -1
| -1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
#ifdef USE_GIT_INFO
{-# LANGUAGE TemplateHaskell #-}
#endif
-- | Main stack tool entry point.
module Main (main) where
#ifndef HIDE_DEP_VERSIONS
import qualified Build_stack
#endif
import Stack.Prelude
import Control.Monad.Logger (runNoLoggingT)
import Control.Monad.Reader (local)
import Control.Monad.Trans.Except (ExceptT)
import Control.Monad.Writer.Lazy (Writer)
import Data.Attoparsec.Interpreter (getInterpreterArgs)
import qualified Data.ByteString.Lazy as L
import Data.IORef.RunOnce (runOnce)
import Data.List
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import Data.Version (showVersion)
import System.Process.Read
#ifdef USE_GIT_INFO
import Development.GitRev (gitCommitCount, gitHash)
#endif
import Distribution.System (buildArch, buildPlatform)
import Distribution.Text (display)
import Distribution.Version (mkVersion')
import GHC.IO.Encoding (mkTextEncoding, textEncodingName)
import Lens.Micro
import Options.Applicative
import Options.Applicative.Help (errorHelp, stringChunk, vcatChunks)
import Options.Applicative.Builder.Extra
import Options.Applicative.Complicated
#ifdef USE_GIT_INFO
import Options.Applicative.Simple (simpleVersion)
#endif
import Options.Applicative.Types (ParserHelp(..))
import Path
import Path.IO
import qualified Paths_stack as Meta
import Stack.Build
import Stack.Clean (CleanOpts(..), clean)
import Stack.Config
import Stack.ConfigCmd as ConfigCmd
import Stack.Constants
import Stack.Constants.Config
import Stack.Coverage
import qualified Stack.Docker as Docker
import Stack.Dot
import Stack.Exec
import Stack.GhcPkg (findGhcPkgField)
import qualified Stack.Nix as Nix
import Stack.Fetch
import Stack.FileWatch
import Stack.Ghci
import Stack.Hoogle
import qualified Stack.IDE as IDE
import qualified Stack.Image as Image
import Stack.Init
import Stack.New
import Stack.Options.BuildParser
import Stack.Options.CleanParser
import Stack.Options.DockerParser
import Stack.Options.DotParser
import Stack.Options.ExecParser
import Stack.Options.GhciParser
import Stack.Options.GlobalParser
import Stack.Options.HpcReportParser
import Stack.Options.NewParser
import Stack.Options.NixParser
import Stack.Options.ScriptParser
import Stack.Options.SDistParser
import Stack.Options.SolverParser
import Stack.Options.Utils
import qualified Stack.PackageIndex
import qualified Stack.Path
import Stack.PrettyPrint hiding (display)
import qualified Stack.PrettyPrint as P
import Stack.Runners
import Stack.Script
import Stack.SDist (getSDistTarball, checkSDistTarball, checkSDistTarball', SDistOpts(..))
import Stack.SetupCmd
import qualified Stack.Sig as Sig
import Stack.Snapshot (loadResolver)
import Stack.Solver (solveExtraDeps)
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Compiler
import Stack.Types.Nix
import Stack.Upgrade
import qualified Stack.Upload as Upload
import qualified System.Directory as D
import System.Environment (getProgName, getArgs, withArgs)
import System.Exit
import System.FilePath (pathSeparator)
import System.IO (hIsTerminalDevice, stderr, stdin, stdout, hSetBuffering, BufferMode(..), hPutStrLn, hGetEncoding, hSetEncoding)
-- | Change the character encoding of the given Handle to transliterate
-- on unsupported characters instead of throwing an exception
hSetTranslit :: Handle -> IO ()
hSetTranslit h = do
menc <- hGetEncoding h
case fmap textEncodingName menc of
Just name
| '/' `notElem` name -> do
enc' <- mkTextEncoding $ name ++ "//TRANSLIT"
hSetEncoding h enc'
_ -> return ()
versionString' :: String
#ifdef USE_GIT_INFO
versionString' = concat $ concat
[ [$(simpleVersion Meta.version)]
-- Leave out number of commits for --depth=1 clone
-- See https://github.com/commercialhaskell/stack/issues/792
, [" (" ++ commitCount ++ " commits)" | commitCount /= ("1"::String) &&
commitCount /= ("UNKNOWN" :: String)]
, [" ", display buildArch]
, [depsString]
]
where
commitCount = $gitCommitCount
#else
versionString' =
showVersion Meta.version
++ ' ' : display buildArch
++ depsString
where
#endif
#ifdef HIDE_DEP_VERSIONS
depsString = " hpack-" ++ VERSION_hpack
#else
depsString = "\nCompiled with:\n" ++ unlines (map ("- " ++) Build_stack.deps)
#endif
main :: IO ()
main = do
-- Line buffer the output by default, particularly for non-terminal runs.
-- See https://github.com/commercialhaskell/stack/pull/360
hSetBuffering stdout LineBuffering
hSetBuffering stdin LineBuffering
hSetBuffering stderr LineBuffering
hSetTranslit stdout
hSetTranslit stderr
args <- getArgs
progName <- getProgName
isTerminal <- hIsTerminalDevice stdout
execExtraHelp args
Docker.dockerHelpOptName
(dockerOptsParser False)
("Only showing --" ++ Docker.dockerCmdName ++ "* options.")
execExtraHelp args
Nix.nixHelpOptName
(nixOptsParser False)
("Only showing --" ++ Nix.nixCmdName ++ "* options.")
currentDir <- D.getCurrentDirectory
eGlobalRun <- try $ commandLineHandler currentDir progName False
case eGlobalRun of
Left (exitCode :: ExitCode) ->
throwIO exitCode
Right (globalMonoid,run) -> do
let global = globalOptsFromMonoid isTerminal globalMonoid
when (globalLogLevel global == LevelDebug) $ hPutStrLn stderr versionString'
case globalReExecVersion global of
Just expectVersion -> do
expectVersion' <- parseVersionFromString expectVersion
unless (checkVersion MatchMinor expectVersion' (fromCabalVersion (mkVersion' Meta.version)))
$ throwIO $ InvalidReExecVersion expectVersion (showVersion Meta.version)
_ -> return ()
run global `catch` \e ->
-- This special handler stops "stack: " from being printed before the
-- exception
case fromException e of
Just ec -> exitWith ec
Nothing -> do
printExceptionStderr e
exitFailure
-- Vertically combine only the error component of the first argument with the
-- error component of the second.
vcatErrorHelp :: ParserHelp -> ParserHelp -> ParserHelp
vcatErrorHelp h1 h2 = h2 { helpError = vcatChunks [helpError h2, helpError h1] }
commandLineHandler
:: FilePath
-> String
-> Bool
-> IO (GlobalOptsMonoid, GlobalOpts -> IO ())
commandLineHandler currentDir progName isInterpreter = complicatedOptions
Meta.version
(Just versionString')
VERSION_hpack
"stack - The Haskell Tool Stack"
""
"stack's documentation is available at https://docs.haskellstack.org/"
(globalOpts OuterGlobalOpts)
(Just failureCallback)
addCommands
where
failureCallback f args =
case stripPrefix "Invalid argument" (fst (renderFailure f "")) of
Just _ -> if isInterpreter
then parseResultHandler args f
else secondaryCommandHandler args f
>>= interpreterHandler currentDir args
Nothing -> parseResultHandler args f
parseResultHandler args f =
if isInterpreter
then do
let hlp = errorHelp $ stringChunk
(unwords ["Error executing interpreter command:"
, progName
, unwords args])
handleParseResult (overFailure (vcatErrorHelp hlp) (Failure f))
else handleParseResult (Failure f)
addCommands = do
unless isInterpreter (do
addBuildCommand' "build"
"Build the package(s) in this directory/configuration"
buildCmd
(buildOptsParser Build)
addBuildCommand' "install"
"Shortcut for 'build --copy-bins'"
buildCmd
(buildOptsParser Install)
addCommand' "uninstall"
"DEPRECATED: This command performs no actions, and is present for documentation only"
uninstallCmd
(many $ strArgument $ metavar "IGNORED")
addBuildCommand' "test"
"Shortcut for 'build --test'"
buildCmd
(buildOptsParser Test)
addBuildCommand' "bench"
"Shortcut for 'build --bench'"
buildCmd
(buildOptsParser Bench)
addBuildCommand' "haddock"
"Shortcut for 'build --haddock'"
buildCmd
(buildOptsParser Haddock)
addCommand' "new"
"Create a new project from a template. Run `stack templates' to see available templates."
newCmd
newOptsParser
addCommand' "templates"
"List the templates available for `stack new'."
templatesCmd
(pure ())
addCommand' "init"
"Create stack project config from cabal or hpack package specifications"
initCmd
initOptsParser
addCommand' "solver"
"Add missing extra-deps to stack project config"
solverCmd
solverOptsParser
addCommand' "setup"
"Get the appropriate GHC for your project"
setupCmd
setupParser
addCommand' "path"
"Print out handy path information"
pathCmd
Stack.Path.pathParser
addCommand' "unpack"
"Unpack one or more packages locally"
unpackCmd
(some $ strArgument $ metavar "PACKAGE")
addCommand' "update"
"Update the package index"
updateCmd
(pure ())
addCommand' "upgrade"
"Upgrade to the latest stack"
upgradeCmd
upgradeOpts
addCommand'
"upload"
"Upload a package to Hackage"
uploadCmd
(sdistOptsParser True)
addCommand'
"sdist"
"Create source distribution tarballs"
sdistCmd
(sdistOptsParser False)
addCommand' "dot"
"Visualize your project's dependency graph using Graphviz dot"
dotCmd
(dotOptsParser False) -- Default for --external is False.
addCommand' "ghc"
"Run ghc"
execCmd
(execOptsParser $ Just ExecGhc)
addCommand' "hoogle"
("Run hoogle, the Haskell API search engine. Use 'stack exec' syntax " ++
"to pass Hoogle arguments, e.g. stack hoogle -- --count=20")
hoogleCmd
((,,) <$> many (strArgument (metavar "ARG"))
<*> boolFlags
True
"setup"
"If needed: install hoogle, build haddocks and generate a hoogle database"
idm
<*> switch
(long "rebuild" <>
help "Rebuild the hoogle database"))
)
-- These are the only commands allowed in interpreter mode as well
addCommand' "exec"
"Execute a command"
execCmd
(execOptsParser Nothing)
addGhciCommand' "ghci"
"Run ghci in the context of package(s) (experimental)"
ghciCmd
ghciOptsParser
addGhciCommand' "repl"
"Run ghci in the context of package(s) (experimental) (alias for 'ghci')"
ghciCmd
ghciOptsParser
addCommand' "runghc"
"Run runghc"
execCmd
(execOptsParser $ Just ExecRunGhc)
addCommand' "runhaskell"
"Run runghc (alias for 'runghc')"
execCmd
(execOptsParser $ Just ExecRunGhc)
addCommand' "script"
"Run a Stack Script"
scriptCmd
scriptOptsParser
unless isInterpreter (do
addCommand' "eval"
"Evaluate some haskell code inline. Shortcut for 'stack exec ghc -- -e CODE'"
evalCmd
(evalOptsParser "CODE")
addCommand' "clean"
"Clean the local packages"
cleanCmd
cleanOptsParser
addCommand' "list-dependencies"
"List the dependencies"
listDependenciesCmd
listDepsOptsParser
addCommand' "query"
"Query general build information (experimental)"
queryCmd
(many $ strArgument $ metavar "SELECTOR...")
addSubCommands'
"ide"
"IDE-specific commands"
(do addCommand'
"packages"
"List all available local loadable packages"
idePackagesCmd
(pure ())
addCommand'
"targets"
"List all available stack targets"
ideTargetsCmd
(pure ()))
addSubCommands'
Docker.dockerCmdName
"Subcommands specific to Docker use"
(do addCommand' Docker.dockerPullCmdName
"Pull latest version of Docker image from registry"
dockerPullCmd
(pure ())
addCommand' "reset"
"Reset the Docker sandbox"
dockerResetCmd
(switch (long "keep-home" <>
help "Do not delete sandbox's home directory"))
addCommand' Docker.dockerCleanupCmdName
"Clean up Docker images and containers"
dockerCleanupCmd
dockerCleanupOptsParser)
addSubCommands'
ConfigCmd.cfgCmdName
"Subcommands specific to modifying stack.yaml files"
(addCommand' ConfigCmd.cfgCmdSetName
"Sets a field in the project's stack.yaml to value"
cfgSetCmd
configCmdSetParser)
addSubCommands'
Image.imgCmdName
"Subcommands specific to imaging"
(addCommand'
Image.imgDockerCmdName
"Build a Docker image for the project"
imgDockerCmd
((,) <$>
boolFlags
True
"build"
"building the project before creating the container"
idm <*>
many
(textOption
(long "image" <>
help "A specific container image name to build"))))
addSubCommands'
"hpc"
"Subcommands specific to Haskell Program Coverage"
(addCommand' "report"
"Generate unified HPC coverage report from tix files and project targets"
hpcReportCmd
hpcReportOptsParser)
)
where
-- addCommand hiding global options
addCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a
-> AddCommand
addCommand' cmd title constr =
addCommand cmd title globalFooter constr (globalOpts OtherCmdGlobalOpts)
addSubCommands' :: String -> String -> AddCommand
-> AddCommand
addSubCommands' cmd title =
addSubCommands cmd title globalFooter (globalOpts OtherCmdGlobalOpts)
-- Additional helper that hides global options and shows build options
addBuildCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a
-> AddCommand
addBuildCommand' cmd title constr =
addCommand cmd title globalFooter constr (globalOpts BuildCmdGlobalOpts)
-- Additional helper that hides global options and shows some ghci options
addGhciCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a
-> AddCommand
addGhciCommand' cmd title constr =
addCommand cmd title globalFooter constr (globalOpts GhciCmdGlobalOpts)
globalOpts :: GlobalOptsContext -> Parser GlobalOptsMonoid
globalOpts kind =
extraHelpOption hide progName (Docker.dockerCmdName ++ "*") Docker.dockerHelpOptName <*>
extraHelpOption hide progName (Nix.nixCmdName ++ "*") Nix.nixHelpOptName <*>
globalOptsParser currentDir kind
(if isInterpreter
-- Silent except when errors occur - see #2879
then Just LevelError
else Nothing)
where hide = kind /= OuterGlobalOpts
globalFooter = "Run 'stack --help' for global options that apply to all subcommands."
type AddCommand =
ExceptT (GlobalOpts -> IO ()) (Writer (Mod CommandFields (GlobalOpts -> IO (), GlobalOptsMonoid))) ()
-- | fall-through to external executables in `git` style if they exist
-- (i.e. `stack something` looks for `stack-something` before
-- failing with "Invalid argument `something'")
secondaryCommandHandler
:: [String]
-> ParserFailure ParserHelp
-> IO (ParserFailure ParserHelp)
secondaryCommandHandler args f =
-- don't even try when the argument looks like a path or flag
if elem pathSeparator cmd || "-" `isPrefixOf` head args
then return f
else do
mExternalExec <- D.findExecutable cmd
case mExternalExec of
Just ex -> do
menv <- getEnvOverride buildPlatform
-- TODO show the command in verbose mode
-- hPutStrLn stderr $ unwords $
-- ["Running", "[" ++ ex, unwords (tail args) ++ "]"]
_ <- runNoLoggingT (exec menv ex (tail args))
return f
Nothing -> return $ fmap (vcatErrorHelp (noSuchCmd cmd)) f
where
-- FIXME this is broken when any options are specified before the command
-- e.g. stack --verbosity silent cmd
cmd = stackProgName ++ "-" ++ head args
noSuchCmd name = errorHelp $ stringChunk
("Auxiliary command not found in path `" ++ name ++ "'")
interpreterHandler
:: Monoid t
=> FilePath
-> [String]
-> ParserFailure ParserHelp
-> IO (GlobalOptsMonoid, (GlobalOpts -> IO (), t))
interpreterHandler currentDir args f = do
-- args can include top-level config such as --extra-lib-dirs=... (set by
-- nix-shell) - we need to find the first argument which is a file, everything
-- afterwards is an argument to the script, everything before is an argument
-- to Stack
(stackArgs, fileArgs) <- spanM (fmap not . D.doesFileExist) args
case fileArgs of
(file:fileArgs') -> runInterpreterCommand file stackArgs fileArgs'
[] -> parseResultHandler (errorCombine (noSuchFile firstArg))
where
firstArg = head args
spanM _ [] = return ([], [])
spanM p xs@(x:xs') = do
r <- p x
if r
then do
(ys, zs) <- spanM p xs'
return (x:ys, zs)
else
return ([], xs)
-- if the first argument contains a path separator then it might be a file,
-- or a Stack option referencing a file. In that case we only show the
-- interpreter error message and exclude the command related error messages.
errorCombine =
if pathSeparator `elem` firstArg
then overrideErrorHelp
else vcatErrorHelp
overrideErrorHelp h1 h2 = h2 { helpError = helpError h1 }
parseResultHandler fn = handleParseResult (overFailure fn (Failure f))
noSuchFile name = errorHelp $ stringChunk
("File does not exist or is not a regular file `" ++ name ++ "'")
runInterpreterCommand path stackArgs fileArgs = do
progName <- getProgName
iargs <- getInterpreterArgs path
let parseCmdLine = commandLineHandler currentDir progName True
separator = if "--" `elem` iargs then [] else ["--"]
cmdArgs = stackArgs ++ iargs ++ separator ++ path : fileArgs
-- TODO show the command in verbose mode
-- hPutStrLn stderr $ unwords $
-- ["Running", "[" ++ progName, unwords cmdArgs ++ "]"]
(a,b) <- withArgs cmdArgs parseCmdLine
return (a,(b,mempty))
pathCmd :: [Text] -> GlobalOpts -> IO ()
pathCmd keys go = withBuildConfig go (Stack.Path.path keys)
setupCmd :: SetupCmdOpts -> GlobalOpts -> IO ()
setupCmd sco@SetupCmdOpts{..} go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> do
when (isJust scoUpgradeCabal && nixEnable (configNix (lcConfig lc))) $ do
throwIO UpgradeCabalUnusable
withUserFileLock go (configStackRoot $ lcConfig lc) $ \lk -> do
let getCompilerVersion = loadCompilerVersion go lc
runRIO (lcConfig lc) $
Docker.reexecWithOptionalContainer
(lcProjectRoot lc)
Nothing
(runRIO (lcConfig lc) $
Nix.reexecWithOptionalShell (lcProjectRoot lc) getCompilerVersion $ do
(wantedCompiler, compilerCheck, mstack) <-
case scoCompilerVersion of
Just v -> return (v, MatchMinor, Nothing)
Nothing -> do
bc <- liftIO $ lcLoadBuildConfig lc globalCompiler
return ( view wantedCompilerVersionL bc
, configCompilerCheck (lcConfig lc)
, Just $ view stackYamlL bc
)
runRIO (loadMiniConfig (lcConfig lc)) $ setup sco wantedCompiler compilerCheck mstack
)
Nothing
(Just $ munlockFile lk)
cleanCmd :: CleanOpts -> GlobalOpts -> IO ()
cleanCmd opts go =
-- See issues #2010 and #3468 for why "stack clean --full" is not used
-- within docker.
case opts of
CleanFull{} -> withBuildConfigAndLock go (const (clean opts))
CleanShallow{} -> withBuildConfigAndLockNoDocker go (const (clean opts))
-- | Helper for build and install commands
buildCmd :: BuildOptsCLI -> GlobalOpts -> IO ()
buildCmd opts go = do
when ("-prof" `elem` boptsCLIGhcOptions opts) $ do
hPutStrLn stderr "Error: When building with stack, you should not use the -prof GHC option"
hPutStrLn stderr "Instead, please use --library-profiling and --executable-profiling"
hPutStrLn stderr "See: https://github.com/commercialhaskell/stack/issues/1015"
exitFailure
case boptsCLIFileWatch opts of
FileWatchPoll -> fileWatchPoll stderr inner
FileWatch -> fileWatch stderr inner
NoFileWatch -> inner $ const $ return ()
where
inner setLocalFiles = withBuildConfigAndLock go' $ \lk ->
Stack.Build.build setLocalFiles lk opts
-- Read the build command from the CLI and enable it to run
go' = case boptsCLICommand opts of
Test -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidTestsL) (Just True) go
Haddock -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidHaddockL) (Just True) go
Bench -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidBenchmarksL) (Just True) go
Install -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidInstallExesL) (Just True) go
Build -> go -- Default case is just Build
uninstallCmd :: [String] -> GlobalOpts -> IO ()
uninstallCmd _ go = withConfigAndLock go $
prettyErrorL
[ flow "stack does not manage installations in global locations."
, flow "The only global mutation stack performs is executable copying."
, flow "For the default executable destination, please run"
, styleShell "stack path --local-bin"
]
-- | Unpack packages to the filesystem
unpackCmd :: [String] -> GlobalOpts -> IO ()
unpackCmd names go = withConfigAndLock go $ do
mSnapshotDef <- mapM (makeConcreteResolver Nothing >=> loadResolver) (globalResolver go)
Stack.Fetch.unpackPackages mSnapshotDef "." names
-- | Update the package index
updateCmd :: () -> GlobalOpts -> IO ()
updateCmd () go = withConfigAndLock go Stack.PackageIndex.updateAllIndices
upgradeCmd :: UpgradeOpts -> GlobalOpts -> IO ()
upgradeCmd upgradeOpts' go = withGlobalConfigAndLock go $
upgrade (globalConfigMonoid go)
(globalResolver go)
#ifdef USE_GIT_INFO
(find (/= "UNKNOWN") [$gitHash])
#else
Nothing
#endif
upgradeOpts'
-- | Upload to Hackage
uploadCmd :: SDistOpts -> GlobalOpts -> IO ()
uploadCmd (SDistOpts [] _ _ _ _ _) go =
withConfigAndLock go . prettyErrorL $
[ flow "To upload the current package, please run"
, styleShell "stack upload ."
, flow "(with the period at the end)"
]
uploadCmd sdistOpts go = do
let partitionM _ [] = return ([], [])
partitionM f (x:xs) = do
r <- f x
(as, bs) <- partitionM f xs
return $ if r then (x:as, bs) else (as, x:bs)
(files, nonFiles) <- partitionM D.doesFileExist (sdoptsDirsToWorkWith sdistOpts)
(dirs, invalid) <- partitionM D.doesDirectoryExist nonFiles
withBuildConfigAndLock go $ \_ -> do
unless (null invalid) $ do
let invalidList = bulletedList $ map (styleFile . fromString) invalid
prettyErrorL
[ styleShell "stack upload"
, flow "expects a list of sdist tarballs or cabal directories."
, flow "Can't find:"
, line <> invalidList
]
liftIO exitFailure
config <- view configL
getCreds <- liftIO (runOnce (Upload.loadCreds config))
mapM_ (resolveFile' >=> checkSDistTarball sdistOpts) files
forM_
files
(\file ->
do tarFile <- resolveFile' file
liftIO $ do
creds <- getCreds
Upload.upload creds (toFilePath tarFile)
when
(sdoptsSign sdistOpts)
(void $
Sig.sign
(sdoptsSignServerUrl sdistOpts)
tarFile))
unless (null dirs) $
forM_ dirs $ \dir -> do
pkgDir <- resolveDir' dir
(tarName, tarBytes, mcabalRevision) <- getSDistTarball (sdoptsPvpBounds sdistOpts) pkgDir
checkSDistTarball' sdistOpts tarName tarBytes
liftIO $ do
creds <- getCreds
Upload.uploadBytes creds tarName tarBytes
forM_ mcabalRevision $ uncurry $ Upload.uploadRevision creds
tarPath <- parseRelFile tarName
when
(sdoptsSign sdistOpts)
(void $
Sig.signTarBytes
(sdoptsSignServerUrl sdistOpts)
tarPath
tarBytes)
sdistCmd :: SDistOpts -> GlobalOpts -> IO ()
sdistCmd sdistOpts go =
withBuildConfig go $ do -- No locking needed.
-- If no directories are specified, build all sdist tarballs.
dirs' <- if null (sdoptsDirsToWorkWith sdistOpts)
then liftM (map lpvRoot . Map.elems . lpProject) getLocalPackages
else mapM resolveDir' (sdoptsDirsToWorkWith sdistOpts)
forM_ dirs' $ \dir -> do
(tarName, tarBytes, _mcabalRevision) <- getSDistTarball (sdoptsPvpBounds sdistOpts) dir
distDir <- distDirFromDir dir
tarPath <- (distDir </>) <$> parseRelFile tarName
ensureDir (parent tarPath)
liftIO $ L.writeFile (toFilePath tarPath) tarBytes
checkSDistTarball sdistOpts tarPath
prettyInfoL [flow "Wrote sdist tarball to", P.display tarPath]
when (sdoptsSign sdistOpts) (void $ Sig.sign (sdoptsSignServerUrl sdistOpts) tarPath)
-- | Execute a command.
execCmd :: ExecOpts -> GlobalOpts -> IO ()
execCmd ExecOpts {..} go@GlobalOpts{..} =
case eoExtra of
ExecOptsPlain -> do
(cmd, args) <- case (eoCmd, eoArgs) of
(ExecCmd cmd, args) -> return (cmd, args)
(ExecGhc, args) -> return ("ghc", args)
(ExecRunGhc, args) -> return ("runghc", args)
loadConfigWithOpts go $ \lc ->
withUserFileLock go (configStackRoot $ lcConfig lc) $ \lk -> do
let getCompilerVersion = loadCompilerVersion go lc
runRIO (lcConfig lc) $
Docker.reexecWithOptionalContainer
(lcProjectRoot lc)
-- Unlock before transferring control away, whether using docker or not:
(Just $ munlockFile lk)
(runRIO (lcConfig lc) $ do
config <- view configL
menv <- liftIO $ configEnvOverride config plainEnvSettings
Nix.reexecWithOptionalShell
(lcProjectRoot lc)
getCompilerVersion
(runRIO (lcConfig lc) $
exec menv cmd args))
Nothing
Nothing -- Unlocked already above.
ExecOptsEmbellished {..} ->
withBuildConfigAndLock go $ \lk -> do
let targets = concatMap words eoPackages
unless (null targets) $
Stack.Build.build (const $ return ()) lk defaultBuildOptsCLI
{ boptsCLITargets = map T.pack targets
}
config <- view configL
menv <- liftIO $ configEnvOverride config eoEnvSettings
-- Add RTS options to arguments
let argsWithRts args = if null eoRtsOptions
then args :: [String]
else args ++ ["+RTS"] ++ eoRtsOptions ++ ["-RTS"]
(cmd, args) <- case (eoCmd, argsWithRts eoArgs) of
(ExecCmd cmd, args) -> return (cmd, args)
(ExecGhc, args) -> getGhcCmd "" menv eoPackages args
-- NOTE: this won't currently work for GHCJS, because it doesn't have
-- a runghcjs binary. It probably will someday, though.
(ExecRunGhc, args) ->
getGhcCmd "run" menv eoPackages args
munlockFile lk -- Unlock before transferring control away.
exec menv cmd args
where
-- return the package-id of the first package in GHC_PACKAGE_PATH
getPkgId menv wc name = do
mId <- findGhcPkgField menv wc [] name "id"
case mId of
Just i -> return (head $ words (T.unpack i))
-- should never happen as we have already installed the packages
_ -> liftIO $ do
hPutStrLn stderr ("Could not find package id of package " ++ name)
exitFailure
getPkgOpts menv wc pkgs = do
ids <- mapM (getPkgId menv wc) pkgs
return $ map ("-package-id=" ++) ids
getGhcCmd prefix menv pkgs args = do
wc <- view $ actualCompilerVersionL.whichCompilerL
pkgopts <- getPkgOpts menv wc pkgs
return (prefix ++ compilerExeName wc, pkgopts ++ args)
-- | Evaluate some haskell code inline.
evalCmd :: EvalOpts -> GlobalOpts -> IO ()
evalCmd EvalOpts {..} go@GlobalOpts {..} = execCmd execOpts go
where
execOpts =
ExecOpts { eoCmd = ExecGhc
, eoArgs = ["-e", evalArg]
, eoExtra = evalExtra
}
-- | Run GHCi in the context of a project.
ghciCmd :: GhciOpts -> GlobalOpts -> IO ()
ghciCmd ghciOpts go@GlobalOpts{..} =
withBuildConfigAndLock go $ \lk -> do
munlockFile lk -- Don't hold the lock while in the GHCI.
bopts <- view buildOptsL
-- override env so running of tests and benchmarks is disabled
let boptsLocal = bopts
{ boptsTestOpts = (boptsTestOpts bopts) { toDisableRun = True }
, boptsBenchmarkOpts = (boptsBenchmarkOpts bopts) { beoDisableRun = True }
}
local (set buildOptsL boptsLocal)
(ghci ghciOpts)
-- | List packages in the project.
idePackagesCmd :: () -> GlobalOpts -> IO ()
idePackagesCmd () go =
withBuildConfig go IDE.listPackages
-- | List targets in the project.
ideTargetsCmd :: () -> GlobalOpts -> IO ()
ideTargetsCmd () go =
withBuildConfig go IDE.listTargets
-- | Pull the current Docker image.
dockerPullCmd :: () -> GlobalOpts -> IO ()
dockerPullCmd _ go@GlobalOpts{..} =
loadConfigWithOpts go $ \lc ->
-- TODO: can we eliminate this lock if it doesn't touch ~/.stack/?
withUserFileLock go (configStackRoot $ lcConfig lc) $ \_ ->
runRIO (lcConfig lc) $
Docker.preventInContainer Docker.pull
-- | Reset the Docker sandbox.
dockerResetCmd :: Bool -> GlobalOpts -> IO ()
dockerResetCmd keepHome go@GlobalOpts{..} =
loadConfigWithOpts go $ \lc ->
-- TODO: can we eliminate this lock if it doesn't touch ~/.stack/?
withUserFileLock go (configStackRoot $ lcConfig lc) $ \_ ->
runRIO (lcConfig lc) $
Docker.preventInContainer $ Docker.reset (lcProjectRoot lc) keepHome
-- | Cleanup Docker images and containers.
dockerCleanupCmd :: Docker.CleanupOpts -> GlobalOpts -> IO ()
dockerCleanupCmd cleanupOpts go@GlobalOpts{..} =
loadConfigWithOpts go $ \lc ->
-- TODO: can we eliminate this lock if it doesn't touch ~/.stack/?
withUserFileLock go (configStackRoot $ lcConfig lc) $ \_ ->
runRIO (lcConfig lc) $
Docker.preventInContainer $
Docker.cleanup cleanupOpts
cfgSetCmd :: ConfigCmd.ConfigCmdSet -> GlobalOpts -> IO ()
cfgSetCmd co go@GlobalOpts{..} =
withMiniConfigAndLock
go
(cfgCmdSet go co)
imgDockerCmd :: (Bool, [Text]) -> GlobalOpts -> IO ()
imgDockerCmd (rebuild,images) go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> do
let mProjectRoot = lcProjectRoot lc
withBuildConfigExt
False
go
Nothing
(\lk ->
do when rebuild $
Stack.Build.build
(const (return ()))
lk
defaultBuildOptsCLI
Image.stageContainerImageArtifacts mProjectRoot images)
(Just $ Image.createContainerImageFromStage mProjectRoot images)
-- | Project initialization
initCmd :: InitOpts -> GlobalOpts -> IO ()
initCmd initOpts go = do
pwd <- getCurrentDir
withMiniConfigAndLock go (initProject IsInitCmd pwd initOpts (globalResolver go))
-- | Create a project directory structure and initialize the stack config.
newCmd :: (NewOpts,InitOpts) -> GlobalOpts -> IO ()
newCmd (newOpts,initOpts) go@GlobalOpts{..} =
withMiniConfigAndLock go $ do
dir <- new newOpts (forceOverwrite initOpts)
initProject IsNewCmd dir initOpts globalResolver
-- | List the available templates.
templatesCmd :: () -> GlobalOpts -> IO ()
templatesCmd _ go@GlobalOpts{..} = withConfigAndLock go listTemplates
-- | Fix up extra-deps for a project
solverCmd :: Bool -- ^ modify stack.yaml automatically?
-> GlobalOpts
-> IO ()
solverCmd fixStackYaml go =
withBuildConfigAndLock go (\_ -> solveExtraDeps fixStackYaml)
-- | Visualize dependencies
dotCmd :: DotOpts -> GlobalOpts -> IO ()
dotCmd dotOpts go = withBuildConfigDot dotOpts go $ dot dotOpts
-- | List the dependencies
listDependenciesCmd :: ListDepsOpts -> GlobalOpts -> IO ()
listDependenciesCmd opts go = withBuildConfigDot (listDepsDotOpts opts) go $ listDependencies opts
-- Plumbing for --test and --bench flags
withBuildConfigDot :: DotOpts -> GlobalOpts -> RIO EnvConfig () -> IO ()
withBuildConfigDot opts go f = withBuildConfig go' f
where
go' =
(if dotTestTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidTestsL) (Just True) else id) $
(if dotBenchTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidBenchmarksL) (Just True) else id)
go
-- | Query build information
queryCmd :: [String] -> GlobalOpts -> IO ()
queryCmd selectors go = withBuildConfig go $ queryBuildInfo $ map T.pack selectors
-- | Generate a combined HPC report
hpcReportCmd :: HpcReportOpts -> GlobalOpts -> IO ()
hpcReportCmd hropts go = withBuildConfig go $ generateHpcReportForTargets hropts
data MainException = InvalidReExecVersion String String
| UpgradeCabalUnusable
deriving (Typeable)
instance Exception MainException
instance Show MainException where
show (InvalidReExecVersion expected actual) = concat
[ "When re-executing '"
, stackProgName
, "' in a container, the incorrect version was found\nExpected: "
, expected
, "; found: "
, actual]
show UpgradeCabalUnusable = "--upgrade-cabal cannot be used when nix is activated"
|
MichielDerhaeg/stack
|
src/main/Main.hs
|
bsd-3-clause
| 38,534
| 0
| 29
| 12,766
| 7,790
| 3,953
| 3,837
| 754
| 8
|
module TagSoup.Sample where
import Text.HTML.TagSoup
import Control.Exception
import Control.Monad
import Data.Char
import Data.List
import System.Cmd
import System.Directory
import System.Exit
import System.IO
openItem :: String -> IO String
openItem url | not $ "http://" `isPrefixOf` url = readFile url
openItem url = bracket
(openTempFile "." "tagsoup.tmp")
(\(file,hndl) -> removeFile file)
$ \(file,hndl) -> do
hClose hndl
putStrLn $ "Downloading: " ++ url
res <- system $ "wget " ++ url ++ " -O " ++ file
when (res /= ExitSuccess) $ error $ "Failed to download using wget: " ++ url
src <- readFile file
length src `seq` return src
grab :: String -> IO ()
grab x = openItem x >>= putStr
parse :: String -> IO ()
parse x = openItem x >>= putStr . show2 . parseTags
where
show2 [] = "[]"
show2 xs = "[" ++ concat (intersperseNotBroken "\n," $ map show xs) ++ "\n]\n"
-- the standard intersperse has a strictness bug which sucks!
intersperseNotBroken :: a -> [a] -> [a]
intersperseNotBroken _ [] = []
intersperseNotBroken sep (x:xs) = x : is xs
where
is [] = []
is (y:ys) = sep : y : is ys
{-
<li id="lastmod"> This page was last modified on 9 September 2013, at 22:38.</li>
-}
haskellLastModifiedDateTime :: IO ()
haskellLastModifiedDateTime = do
src <- openItem "http://wiki.haskell.org/Haskell"
let lastModifiedDateTime = fromFooter $ parseTags src
putStrLn $ "wiki.haskell.org was last modified on " ++ lastModifiedDateTime
where fromFooter = unwords . drop 6 . words . innerText . take 2 . dropWhile (~/= "<li id=lastmod>")
googleTechNews :: IO ()
googleTechNews = do
tags <- fmap parseTags $ openItem "http://news.google.com/?ned=us&topic=t"
let links = [ ascii name ++ " <" ++ maybe "unknown" shortUrl (lookup "href" atts) ++ ">"
| TagOpen "h2" [("class","title")]:TagText spaces:TagOpen "a" atts:TagText name:_ <- tails tags]
putStr $ unlines links
where
shortUrl x | "http://" `isPrefixOf` x = shortUrl $ drop 7 x
| "www." `isPrefixOf` x = shortUrl $ drop 4 x
| otherwise = takeWhile (/= '/') x
ascii ('\226':'\128':'\147':xs) = '-' : ascii xs
ascii ('\194':'\163':xs) = "#GBP " ++ ascii xs
ascii (x:xs) = x : ascii xs
ascii [] = []
spjPapers :: IO ()
spjPapers = do
tags <- parseTags <$> openItem "http://research.microsoft.com/en-us/people/simonpj/"
let links = map f $ sections (~== "<A>") $
takeWhile (~/= "<a name=haskell>") $
drop 5 $ dropWhile (~/= "<a name=current>") tags
putStr $ unlines links
where
f :: [Tag String] -> String
f = dequote . unwords . words . fromTagText . head . filter isTagText
dequote ('\"':xs) | last xs == '\"' = init xs
dequote x = x
ndmPapers :: IO ()
ndmPapers = do
tags <- parseTags <$> openItem "http://community.haskell.org/~ndm/downloads/"
let papers = map f $ sections (~== "<li class=paper>") tags
putStr $ unlines papers
where
f :: [Tag String] -> String
f xs = fromTagText (xs !! 2)
currentTime :: IO ()
currentTime = do
tags <- parseTags <$> openItem "http://www.timeanddate.com/worldclock/uk/london"
let time = fromTagText (dropWhile (~/= "<span id=ct>") tags !! 1)
putStrLn time
type Section = String
data Package = Package {name :: String, desc :: String, href :: String}
deriving Show
hackage :: IO [(Section,[Package])]
hackage = do
tags <- fmap parseTags $ openItem "http://hackage.haskell.org/packages/archive/pkg-list.html"
return $ map parseSect $ partitions (~== "<h3>") tags
where
parseSect xs = (nam, packs)
where
nam = fromTagText $ xs !! 2
packs = map parsePackage $ partitions (~== "<li>") xs
parsePackage xs =
Package
(fromTagText $ xs !! 2)
(drop 2 $ dropWhile (/= ':') $ fromTagText $ xs !! 4)
(fromAttrib "href" $ xs !! 1)
-- rssCreators Example: prints names of story contributors on
-- sequence.complete.org. This content is RSS (not HTML), and the selected
-- tag uses a different XML namespace "dc:creator".
rssCreators :: IO ()
rssCreators = do
tags <- fmap parseTags $ openItem "http://sequence.complete.org/node/feed"
putStrLn $ unlines $ map names $ partitions (~== "<dc:creator>") tags
where names xs = fromTagText $ xs !! 1
validate :: String -> IO ()
validate x = putStr . unlines . g . f . parseTagsOptions opts =<< openItem x
where
opts = parseOptions{optTagPosition=True, optTagWarning=True}
f :: [Tag String] -> [String]
f (TagPosition row col:TagWarning warn:rest) =
("Warning (" ++ show row ++ "," ++ show col ++ "): " ++ warn) : f rest
f (TagWarning warn:rest) =
("Warning (?,?): " ++ warn) : f rest
f (_:rest) = f rest
f [] = []
g xs = xs ++ [if n == 0 then "Success, no warnings"
else "Failed, " ++ show n ++ " warning" ++ ['s'|n>1]]
where n = length xs
|
ChristopherKing42/tagsoup
|
TagSoup/Sample.hs
|
bsd-3-clause
| 5,250
| 2
| 17
| 1,461
| 1,734
| 869
| 865
| 108
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module Web.Spock.Internal.SessionManagerSpec (spec) where
import Control.Concurrent.STM
import Data.IORef
import Data.Time
import qualified Data.Vault.Lazy as V
import Test.Hspec
import Web.Spock.Config
import Web.Spock.Internal.SessionManager
import Web.Spock.Internal.SessionVault
spec :: Spec
spec =
describe "Session Manager" $
do
it "should return the correct csrf token" $
do
mgr <- mkMgr
sm_getCsrfToken mgr `shouldReturn` "fake-token"
it "should not loose data on session id regeneration" $
do
mgr <- mkMgr
sm_writeSession mgr True
sm_regenerateSessionId mgr
sm_readSession mgr `shouldReturn` True
it "should modify session correctly" $
do
mgr <- mkMgr
x <- sm_modifySession mgr (const (True, True))
x `shouldBe` True
sm_readSession mgr `shouldReturn` True
it "should remember session content" $
do
mgr <- mkMgr
sm_writeSession mgr True
sm_readSession mgr `shouldReturn` True
it "writing to the session after clearing all should not crash" $
do
mgr <- mkMgr
sm_writeSession mgr True
sm_clearAllSessions mgr
sm_writeSession mgr True
it "should be possible to map over all sessions" $
do
mgr <- mkMgr
sm_writeSession mgr True
sm_readSession mgr `shouldReturn` True
sm_mapSessions mgr (const $ return False)
sm_readSession mgr `shouldReturn` False
mkMgr :: IO (SessionManager IO conn Bool st)
mkMgr =
do
sessionCfg <- defaultSessionCfg False
sv <- newStmSessionStore'
let testSession =
Session
{ sess_id = "fake-sid",
sess_csrfToken = "fake-token",
sess_validUntil = UTCTime (fromGregorian 2030 1 1) 0,
sess_data = False
}
atomically $
ss_storeSession sv testSession
let sessionCfg' = sessionCfg {sc_store = SessionStoreInstance sv}
k <- V.newKey
sessionVaultR <- newIORef $ V.insert k (sess_id testSession) V.empty
mgr <-
createSessionManager sessionCfg' $
SessionIf
{ si_queryVault =
\key ->
do
vault <- readIORef sessionVaultR
return $ V.lookup key vault,
si_modifyVault = modifyIORef sessionVaultR,
si_setRawMultiHeader = \_ _ -> return (),
si_vaultKey = return k
}
return mgr
|
agrafix/Spock
|
Spock/test/Web/Spock/Internal/SessionManagerSpec.hs
|
bsd-3-clause
| 2,591
| 0
| 16
| 847
| 612
| 301
| 311
| 76
| 1
|
module Geordi.FileInfo where
import qualified Data.ByteString as S
-- | Information on an uploaded file.
data FileInfo c = FileInfo
{ fileName :: S.ByteString
, fileContentType :: S.ByteString
, fileContent :: c
}
deriving (Eq, Show)
|
liamoc/geordi
|
Geordi/FileInfo.hs
|
bsd-3-clause
| 257
| 0
| 9
| 59
| 58
| 37
| 21
| 7
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{- | Bounded buffers.
A bounded buffer is a queue of sized values in which the sum of
the sizes must be no more than a fixed capacity. These are intended
to help implement per-stream flow control and buffering in a SPDY
endpoint.
-}
module Network.SPDY.Internal.BoundedBuffer (Sized(..),
BoundedBuffer,
new,
add,
tryAdd,
remove,
totalCapacity,
remainingCapacity,
snapshot) where
import Control.Applicative ((<$>))
import Control.Concurrent.MSem (MSem)
import qualified Control.Concurrent.MSem as MSem
import Control.Concurrent.MSemN (MSemN)
import qualified Control.Concurrent.MSemN as MSemN
import Control.Exception (mask_)
import Control.Monad (liftM4, when, unless)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Foldable
import qualified Data.Foldable as F
import Data.IORef (IORef, newIORef, atomicModifyIORef, readIORef)
import Data.Sequence (Seq, (|>))
import qualified Data.Sequence as S
-- | Types with a size.
class Sized a where
size :: a -> Int
instance Sized ByteString where
size = B.length
instance (Sized a, Foldable f, Functor f) => Sized (f a) where
size = F.sum . fmap size
-- | A FIFO collection of sized values, bounded by a total capacity.
data BoundedBuffer a =
BoundedBuffer { bbChunks :: IORef (Seq a)
, bbChunkCount :: MSem Int
, bbFreeSpace :: MSemN Int
, bbOriginalCapacity :: Int }
-- | Creates a new empty buffer with a given total capacity.
new :: Sized a => Int -> IO (BoundedBuffer a)
new capacity =
liftM4 BoundedBuffer (newIORef S.empty) (MSem.new 0) (MSemN.new capacity) (return capacity)
-- | Adds a chunk to the buffer. Raises an error if the size
-- of the chunk is greater than the total capacity of the
-- buffer. Blocks if there is insufficient remaining capacity, until
-- another thread removes enough chunks to free the required space.
add :: Sized a => BoundedBuffer a -> a -> IO ()
add bb chunk =
addGeneral bb chunk True >>= \queued ->
unless queued
(error ("Can't insert chunk of size " ++ show (size chunk) ++
" in a buffer with total capacity " ++
show (totalCapacity bb)))
-- | Attempts to add a chunk to the buffer, and indicates whether it
-- was successful. May block momentarily, but not
-- indefinitely. Returns 'False' for chunks that exceed the total
-- capacity of the buffer.
tryAdd :: Sized a => BoundedBuffer a -> a -> IO Bool
tryAdd bb chunk = addGeneral bb chunk False
-- | Adds a chunk, optionally waiting until space is
-- available. Returns 'True' if the chunk was added, 'False'
-- otherwise.
addGeneral :: Sized a => BoundedBuffer a -> a -> Bool -> IO Bool
addGeneral bb chunk waitForSpace = do
-- Using mask_ here should be sufficient to prevent an asynchronous
-- exception from corrupting the state of the buffer, for three
-- reasons. First, according to the SafeSemaphore docs MSemN.waitF
-- can be interrupted, but it guarantees that it will not lose any
-- of the semaphore quantity when this happens. Second, according to
-- the Control.Exception docs, atomicModifyIORef is not
-- interruptible. Third, according to the SafeSemaphore docs,
-- MSem.signal is not interruptible. Therefore, the only part that
-- can be interrupted by an asynchronous exception is MSemN.waitF,
-- and in that case none of the semaphore quantity is taken, and the
-- buffer will not add a chunk or signal the arrival of chunk.
mask_ $ do
(_, sufficient) <- MSemN.waitF (bbFreeSpace bb) demand
when sufficient $ do
atomicModifyIORef (bbChunks bb) (\chunks -> (chunks |> chunk, ()))
MSem.signal (bbChunkCount bb)
return sufficient
where demand avail = let sufficientNow = avail >= n
n = size chunk
sufficient = sufficientNow ||
waitForSpace && not (n > totalCapacity bb)
in (if sufficient then n else 0, sufficient)
-- | Removes the next available chunk from the buffer, blocking if the
-- buffer is empty.
remove :: Sized a => BoundedBuffer a -> IO a
remove bb = mask_ $ do
-- mask_ should be sufficient to prevent asynchronous exceptions
-- from corrupting the state of the buffer, for reasons similar to
-- those described in 'addGeneral' above.
MSem.wait (bbChunkCount bb)
chunk <- atomicModifyIORef (bbChunks bb) takeFirst
MSemN.signal (bbFreeSpace bb) (size chunk)
return chunk
where takeFirst chunks =
let (first, rest) = S.splitAt 1 chunks
in if S.length first > 0
then let chunk = S.index first 0
in (rest, chunk)
else error "Empty bounded buffer didn't block removal. This shouldn't happen."
-- | The total capacity of the buffer, i.e. the remaining capacity
-- when the buffer is empty.
totalCapacity :: BoundedBuffer a -> Int
totalCapacity = bbOriginalCapacity
-- | The current remaining capacity of the buffer. This is a snapshot
-- and may be invalid immediately afterward.
remainingCapacity :: Sized a => BoundedBuffer a -> IO Int
remainingCapacity = (fst <$>) . snapshot
-- | A snapshot of the current state of the buffer, including the free
-- capacity and the current buffer contents. If concurrent threads are
-- interacting with the buffer, this may become invalid
-- immediately. This function is intended primarily for testing and
-- debugging.
snapshot :: Sized a => BoundedBuffer a -> IO (Int, [a])
snapshot bb = do
chunks <- readIORef (bbChunks bb)
return (bbOriginalCapacity bb - F.sum (fmap size chunks), toList chunks)
|
kcharter/spdy-base
|
src/Network/SPDY/Internal/BoundedBuffer.hs
|
bsd-3-clause
| 5,990
| 0
| 17
| 1,571
| 1,106
| 598
| 508
| 80
| 2
|
module Language.Css.Selectors where
import Language.Css.Namespaces
data CssCount =
CssAdd Integer Integer -- ^ @2n+1@
| CssSub Integer Integer -- ^ @2n-1@
| CssNum Integer -- ^ @5@
| CssRate Integer -- ^ @5n@
| CssEven -- ^ @even@
| CssOdd -- ^ @odd@
deriving (Show, Eq)
data CssPseudoClass =
CssPseudoRoot -- ^ @:root@
| CssPseudoNthChild CssCount -- ^ @:nth-child(n)@
| CssPseudoNthLastChild CssCount -- ^ @:nth-last-child(n)@
| CssPseudoNthOfType CssCount -- ^ @:nth-of-type(n)@
| CssPseudoNthLastOfType CssCount -- ^ @:nth-last-of-type(n)@
| CssPseudoFirstChild -- ^ @:first-child@
| CssPseudoLastChild -- ^ @:last-child@
| CssPseudoFirstOfType -- ^ @:first-of-type@
| CssPseudoLastOfType -- ^ @:last-of-type@
| CssPseudoOnlyChild -- ^ @:only-child@
| CssPseudoOnlyOfType -- ^ @:only-of-type@
| CssPseudoEmpty -- ^ @:empty@
| CssPseudoLink -- ^ @:link@
| CssPseudoVisited -- ^ @:visited@
| CssPseudoActive -- ^ @:active@
| CssPseudoHover -- ^ @:hover@
| CssPseudoFocus -- ^ @:focus@
| CssPseudoTarget -- ^ @:target@
| CssPseudoLang String -- ^ @:lang(l)@
| CssPseudoEnabled -- ^ @:enabled@
| CssPseudoDisabled -- ^ @:disabled@
| CssPseudoChecked -- ^ @:checked@
| CssPseudoNot CssSimpleSelector -- ^ @:not(s)@
deriving (Show, Eq)
data CssPseudoElement =
CssPseudoFirstLine -- ^ @::first-line@
| CssPseudoFirstLetter -- ^ @::first-letter@
| CssPseudoBefore -- ^ @::before@
| CssPseudoAfter -- ^ @::after@
deriving (Show, Eq)
data CssAttribute =
CssAttribute String -- ^ @[foo]@
| CssIsAttribute String String -- ^ @[foo="bar"]@
| CssSpAttribute String String -- ^ @[foo~="bar"]@
| CssHyAttribute String String -- ^ @[foo|="bar"]@
| CssBeginsAttribute String String -- ^ @[foo^="bar"]@
| CssEndsAttribute String String -- ^ @[foo$="bar"]@
| CssSubAttribute String String -- ^ @[foo*="bar"]@
deriving (Show, Eq)
type CssClass = String
type CssId = String
type CssNode = String
-- | A chunk of a selector query
data CssSimpleSelector = CssSimpleSelector
{ namespaceSel :: Maybe CssQualifierName
, nodeSel :: Maybe CssNode
, idSel :: Maybe CssId
, classesSel :: [CssClass]
, atrrsSel :: [CssAttribute]
} deriving (Show, Eq)
data CssSelector = CssSelector
{ simpleSel :: CssSimpleSelector
, pseudoSel :: Either CssPseudoClass CssPseudoElement
} deriving (Show, Eq)
data CssCombinator =
CssCombSelector CssSelector
| CssDescendant CssCombinator CssSelector -- ^ @E F@
| CssChild CssCombinator CssSelector -- ^ @E > F@
| CssAdjacent CssCombinator CssSelector -- ^ @E + F@
| CssSibling CssCombinator CssSelector -- ^ @E ~ F@
deriving (Show, Eq)
|
athanclark/css-grammar
|
src/Language/Css/Selectors.hs
|
bsd-3-clause
| 3,080
| 0
| 9
| 902
| 456
| 289
| 167
| 71
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
module Snap.Snaplet.CustomAuth.Handlers where
import Control.Error.Util hiding (err)
import Control.Lens hiding (un)
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Control.Monad.Trans.Maybe
import Control.Monad.State
import qualified Data.Configurator as C
import qualified Data.HashMap.Lazy as M
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding
import Snap
import Data.Map
import Snap.Snaplet.CustomAuth.Types hiding (name)
import Snap.Snaplet.CustomAuth.AuthManager
import Snap.Snaplet.CustomAuth.OAuth2.Internal
import Snap.Snaplet.CustomAuth.User (setUser, recoverSession, currentUser, isSessionDefined)
import Snap.Snaplet.CustomAuth.Util (getParamText)
setFailure'
:: Handler b (AuthManager u e b) ()
-> AuthFailure e
-> Handler b (AuthManager u e b) ()
setFailure' action err =
(modify $ \s -> s { authFailData = Just err }) >> action
loginUser
:: IAuthBackend u i e b
=> Handler b (AuthManager u e b) ()
-> Handler b (AuthManager u e b) ()
-> Handler b (AuthManager u e b) ()
loginUser loginFail loginSucc = do
usrName <- gets userField
pwdName <- gets passwordField
res <- runExceptT $ do
userName <- noteT (Login UsernameMissing) $ MaybeT $
(fmap . fmap) decodeUtf8 $ getParam usrName
passwd <- noteT (Login PasswordMissing) $ MaybeT $
(fmap . fmap) decodeUtf8 $ getParam pwdName
usr <- withExceptT UserError $ ExceptT $ login userName passwd
lift $ maybe (return ()) setUser usr
hoistEither $ note (Login WrongPasswordOrUsername) usr
either (setFailure' loginFail) (const loginSucc) res
logoutUser
:: IAuthBackend u i e b
=> Handler b (AuthManager u e b) ()
logoutUser = do
sesName <- gets sessionCookieName
runMaybeT $ do
ses <- MaybeT $ getCookie sesName
lift $ expireCookie ses >> logout (decodeUtf8 $ cookieValue ses)
modify $ \mgr -> mgr { activeUser = Nothing }
-- Recover if session token is present. Login if login+password are
-- present.
combinedLoginRecover
:: IAuthBackend u i e b
=> Handler b (AuthManager u e b) ()
-> Handler b (AuthManager u e b) (Maybe u)
combinedLoginRecover loginFail = do
sesActive <- isSessionDefined
usr <- runMaybeT $ do
guard sesActive
lift recoverSession
MaybeT currentUser
err <- gets authFailData
maybe (maybe combinedLogin (return . Just) usr)
(const $ loginFail >> return Nothing) err
where
combinedLogin = runMaybeT $ do
usrName <- gets userField
pwdName <- gets passwordField
params <- lift $ fmap rqParams getRequest
when (all (flip member params) [usrName, pwdName]) $ do
lift $ loginUser loginFail $ return ()
MaybeT currentUser
-- Account with password login
createAccount
:: IAuthBackend u i e b
=> Handler b (AuthManager u e b) (Either (Either e CreateFailure) u)
createAccount = do
usrName <- ("_new" <>) <$> gets userField
pwdName <- ("_new" <>) <$> gets passwordField
let pwdAgainName = pwdName <> "_again"
usr <- runExceptT $ do
name <- noteT (Right MissingName) $ MaybeT $
getParamText usrName
passwd <- noteT (Right $ PasswordFailure Missing) $ MaybeT $
getParamText pwdName
when (T.null passwd) $ throwE (Right $ PasswordFailure Missing)
noteT (Right $ PasswordFailure Mismatch) $ guard =<<
(MaybeT $ (fmap . fmap) (== passwd) (getParamText pwdAgainName))
userId <- either (throwE . Left) return =<<
(lift $ preparePasswordCreate Nothing passwd)
return (name, userId)
res <- runExceptT $ do
(name, userId) <- hoistEither usr
u <- ExceptT $ create name userId
lift $ setUser u
return u
case (usr, res) of
(Right i, Left _) -> cancelPrepare $ snd i
_ -> return ()
either (setFailure' (return ()) . either UserError Create) (const $ return ()) res
return res
authInit
:: IAuthBackend u i e b
=> Maybe (OAuth2Settings u i e b)
-> AuthSettings
-> SnapletInit b (AuthManager u e b)
authInit oa s = makeSnaplet (view authName s) "Custom auth" Nothing $ do
cfg <- getSnapletUserConfig
un <- liftIO $ C.lookupDefault "_login" cfg "userField"
pn <- liftIO $ C.lookupDefault "_password" cfg "passwordField"
scn <- liftIO $ C.lookupDefault "_session" cfg "sessionCookieName"
ps <- maybe (return M.empty) oauth2Init oa
return $ AuthManager
{ activeUser = Nothing
, cookieLifetime = s ^. authCookieLifetime
, sessionCookieName = scn
, userField = un
, passwordField = pn
, stateStore' = maybe (error "oauth2 hooks not defined") stateStore oa
, oauth2Provider = Nothing
, authFailData = Nothing
, providers = ps
}
isLoggedIn :: UserData u => Handler b (AuthManager u e b) Bool
isLoggedIn = isJust <$> currentUser
getAuthFailData
:: Handler b (AuthManager u e b) (Maybe (AuthFailure e))
getAuthFailData = get >>= return . authFailData
resetAuthFailData
:: Handler b (AuthManager u e b) ()
resetAuthFailData = modify $ \mgr -> mgr { authFailData = Nothing }
|
kaol/snaplet-customauth
|
Snap/Snaplet/CustomAuth/Handlers.hs
|
bsd-3-clause
| 5,271
| 0
| 17
| 1,090
| 1,791
| 899
| 892
| 136
| 2
|
module Model (
module Model.Types
, module Model.Db
) where
import Model.Types
import Model.Db
|
nurpax/hstodo
|
src/Model.hs
|
bsd-3-clause
| 105
| 0
| 5
| 24
| 28
| 18
| 10
| 5
| 0
|
module Eval (eval) where
import AST
import Parser
eval :: [Module [TopDeclaration]] -> IO ()
eval a = mapM_ print $ concat [ d | Module _ _ _ _ _ d <- a ]
|
tomahawkins/atom
|
src/Eval.hs
|
bsd-3-clause
| 158
| 0
| 10
| 38
| 80
| 42
| 38
| 5
| 1
|
----------------------------------------------------------------------------
-- |
-- Module : DependencyMatchingConstructorsTypes
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
----------------------------------------------------------------------------
{-# LANGUAGE GADTs #-}
module DependencyMatchingConstructorsTypes where
data FooMatching = FooMatching Int Double
newtype BarMatching =
BarMatching { unBarMatching :: String }
data BazMatching =
Hello
| BazMatching
data QuuxMatching a ix where
QuuxMatching :: a -> QuuxMatching a a
QuuxInt :: Int -> QuuxMatching a Int
|
sergv/tags-server
|
test-data/0011hide_constructor_named_as_type/deps/DependencyMatchingConstructorsTypes.hs
|
bsd-3-clause
| 681
| 0
| 7
| 113
| 80
| 51
| 29
| 11
| 0
|
{-# LANGUAGE TemplateHaskell, TupleSections, FlexibleContexts #-}
module Text.Peggy.CodeGen.TH (
genDecs,
genQQ,
) where
import Control.Applicative
import Control.Monad
import qualified Data.HashTable.ST.Basic as HT
import Data.List
import qualified Data.ListLike as LL
import Data.Maybe
import Data.Typeable ()
import Language.Haskell.Meta
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Quote
import Text.Peggy.Prim
import Text.Peggy.Syntax
import Text.Peggy.SrcLoc
import Text.Peggy.Normalize
import Text.Peggy.LeftRec
genQQ :: Syntax -> (String, String) -> Q [Dec]
genQQ syn (qqName, parserName) = do
sig <- sigD (mkName qqName) (conT ''QuasiQuoter)
dat <- valD (varP $ mkName qqName) (normalB con) []
return [sig, dat]
where
con = do
e <- [| \str -> do
loc <- location
case parse $(varE $ mkName parserName) (SrcPos (loc_filename loc) 0 (fst $ loc_start loc) (snd $ loc_start loc)) str of
Left err -> error $ show err
Right a -> a
|]
u <- [| undefined |]
recConE 'QuasiQuoter [ return ('quoteExp, e)
, return ('quoteDec, u)
, return ('quotePat, u)
, return ('quoteType, u)
]
genDecs :: Syntax -> Q [Dec]
genDecs = generate . normalize . removeLeftRecursion
generate :: Syntax -> Q [Dec]
generate defs = do
tblTypName <- newName "MemoTable"
tblDatName <- newName "MemoTable"
ps <- parsers tblTypName
sequence $ [ defTbl tblTypName tblDatName
, instTbl tblTypName tblDatName
] ++ ps
where
n = length defs
defTbl :: Name -> Name -> DecQ
defTbl tblTypName tblDatName = do
s <- newName "s"
str <- newName "str"
dataD (cxt []) tblTypName [PlainTV str, PlainTV s] [con s str] []
where
con s str = recC tblDatName $ map toMem defs where
toMem (Definition nont typ _) = do
let tt | isExp nont = [t| ExpQ |]
| otherwise = parseType' typ
t <- [t| HT.HashTable $(varT s) Int
(Result $(varT str) $tt) |]
return (mkName $ "tbl_" ++nont, NotStrict, t)
instTbl :: Name -> Name -> DecQ
instTbl tblTypName tblDatName = do
str <- newName "str"
instanceD (cxt []) (conT ''MemoTable `appT` (conT tblTypName `appT` varT str))
[ valD (varP 'newTable) (normalB body) [] ]
where
body = do
names <- replicateM n (newName "t")
doE $ map (\name -> bindS (varP name) [| HT.new |]) names
++ [ noBindS $ appsE [varE 'return, appsE $ conE tblDatName : map varE names]]
parsers tblName = concat <$> mapM (gen tblName) defs
isExp name = isJust $ find f defs where
f (Definition nont typ _)
| nont == name && head (words typ) == "Exp" = True
| otherwise = False
gen tblName (Definition nont typ e)
| isExp nont = return $
[ genSig tblName nont [t| ExpQ |]
, funD (mkName nont)
[clause [] (normalB [| memo $(varE $ mkName $ "tbl_" ++ nont) $ $(genP True e) |]) []]]
| otherwise = return $
[ genSig tblName nont (parseType' typ)
, funD (mkName nont)
[clause [] (normalB [| memo $(varE $ mkName $ "tbl_" ++ nont) $ $(genP False e) |]) []]]
genSig tblName name typ = do
str <- newName "str"
s <- newName "s"
sigD (mkName name) $
forallT [PlainTV str, PlainTV s]
(cxt [classP ''LL.ListLike [varT str, conT ''Char]]) $
conT ''Parser `appT`
(conT tblName `appT` varT str) `appT`
varT str `appT`
varT s `appT`
typ
-- Generate Parser
genP :: Bool -> Expr -> ExpQ
genP isE e = case (isE, e) of
(False, Terminals False False str) ->
[| string str |]
(True, Terminals False False str) ->
[| lift <$> string str |]
(False, TerminalSet rs) ->
[| satisfy $(genRanges rs) |]
(True, TerminalSet rs) ->
[| lift <$> satisfy $(genRanges rs) |]
(False, TerminalCmp rs) ->
[| satisfy $ not . $(genRanges rs) |]
(True, TerminalCmp rs) ->
[| lift <$> (satisfy $ not . $(genRanges rs)) |]
(False, TerminalAny) ->
[| anyChar |]
(True, TerminalAny) ->
[| lift <$> anyChar |]
(False, NonTerminal nont) ->
if isExp nont then error $ "value cannot contain exp: " ++ nont
else [| $(varE $ mkName nont) |]
(True, NonTerminal nont) ->
if isExp nont
then [| $(varE $ mkName nont) |]
else [| lift <$> $(varE $ mkName nont) |]
(False, Primitive name) ->
[| $(varE $ mkName name) |]
(True, Primitive name) ->
[| lift <$> $(varE $ mkName name) |]
(False, Empty) ->
[| return () |]
(True, Empty) ->
[| lift <$> return () |]
(False, Many f) ->
[| many $(genP isE f) |]
(True, Many f) ->
[| do eQs <- many $(genP isE f); return $ listE eQs |]
(False, Some f) ->
[| some $(genP isE f) |]
(True, Some f) ->
[| do eQs <- some $(genP isE f); return $ listE eQs |]
(False, Optional f) ->
[| optional $(genP isE f) |]
(True, Optional f) ->
[| do eQm <- optional $(genP isE f); case eQm of Nothing -> lift Nothing; Just q -> do ee <- q; lift (Just ee) |]
(False, And f) ->
[| expect $(genP isE f) |]
(True, And f) ->
[| lift () <$ expect $(genP isE f) |]
(False, Not f) ->
[| unexpect $(genP isE f) |]
(True, Not f) ->
[| lift () <$ unexpect $(genP isE f) |]
(_, Token f) ->
[| token $(varE skip) $(varE delimiter) ( $(genP isE f) ) |]
-- simply, ignoreing result value
(False, Named "_" f) ->
[| () <$ $(genP isE f) |]
(True, Named "_" f) ->
[| () <$ $(genP isE f) |]
(_, Named {}) -> error "named expr must has semantic."
(False, Choice es) ->
foldl1 (\a b -> [| $a <|> $b |]) $ map (genP isE) es
(True, Choice es) ->
[| $(foldl1 (\a b -> [| $a <|> $b |]) $ map (genP isE) es) |]
-- Semancit Code
-- Generates a Normal, value constructing code.
-- It cannot has anti-quotes, values dependent on anti-quotes.
(False, Semantic (Sequence es) cf) -> do
-- TODO: make it syntax-sugar
let needSt = hasPos cf || hasSpan cf
needEd = hasSpan cf
st = if needSt then [bindS (varP $ mkName stName) [| getPos |]] else []
ed = if needEd then [bindS (varP $ mkName edName) [| getPos |]] else []
doE $ st ++ genBinds 1 es ++ ed ++ [ noBindS [| return $(genCF isE cf) |] ]
-- Generates a Exp constructing code.
-- It can contain anti-quotes.
-- Anti-quoted value must be Normal values.
(True, Semantic (Sequence es) cf) -> do
bs <- sequence $ genBinds 1 es
let vn = length $ filter isBind bs
let gcf = genCF isE (ccf vn)
doE $ map return bs ++
[ noBindS [| return $ foldl appE (return $(lift =<< gcf)) $(eQnames vn) |]]
where
ccf 0 = cf
ccf nn = [Snippet $ "\\" ++ unwords (names nn ++ qames nn) ++ " -> ("] ++ cf ++ [Snippet ")"]
eQnames nn =
listE $ [ [| lift $(varE (mkName $ var i)) |] | i <- [1..nn]] ++
[ if hasAQ i cf
then [| varE $ mkName $(varE $ mkName $ var i) |]
else [| litE $ integerL 0 |]
| i <- [1..nn]]
names nn = map var [1..nn]
qames nn = map qar [1..nn]
_ ->
error $ "internal compile error: " ++ show e
where
genBinds _ [] = []
genBinds ix (f:fs) = case f of
Named "_" g ->
noBindS (genP isE g) :
genBinds ix fs
Named name g ->
bindS (asP (mkName name) $ varP $ mkName (var ix)) (genP isE g) :
genBinds (ix+1) fs
_ | shouldBind f ->
bindS (varP $ mkName $ var ix) (genP isE f) :
genBinds (ix+1) fs
_ ->
noBindS (genP isE f) :
genBinds ix fs
genRanges :: [CharRange] -> ExpQ
genRanges rs =
let c = mkName "c" in
lamE [varP c] $ foldl1 (\a b -> [| $a || $b |]) $ map (genRange c) rs
genRange :: Name -> CharRange -> ExpQ
genRange c (CharRange l h) =
[| l <= $(varE c) && $(varE c) <= h |]
genRange c (CharOne v) =
[| $(varE c) == v |]
genCF isE cf =
case parsed of
Left _ ->
error $ "code fragment parse error: " ++ scf
Right ret ->
return ret
where
parsed = parseExp scf
scf = concatMap toStr cf
toStr (Snippet str) = str
toStr (Argument a) = var a
toStr (AntiArgument nn)
| not isE = error "Anti-quoter is not allowed in non-AQ parser"
| otherwise = qar nn
toStr ArgPos = "(LocPos " ++ stName ++ ")"
toStr ArgSpan = "(LocSpan " ++ stName ++ " " ++ edName ++ ")"
hasAQ x cf = not . null $ filter (isAQ x) cf where
isAQ i (AntiArgument j) = i == j
isAQ _ _ = False
hasPos = any (==ArgPos)
hasSpan = any (==ArgSpan)
isBind (BindS _ _) = True
isBind _ = False
skip = mkName "skip"
delimiter = mkName "delimiter"
var nn = "v" ++ show (nn :: Int)
qar nn = "q" ++ show (nn :: Int)
stName = "st_Pos"
edName = "ed_Pos"
parseExp' str =
case parseExp str of
Left _ ->
error $ "code fragment parse error: " ++ str
Right ret ->
return ret
parseType' typ =
case parseType typ of
Left err -> error $ "type parse error :" ++ typ ++ ", " ++ err
Right t -> case t of
-- GHC.Unit.()/GHC.Tuple.() is not a type name. Is it a bug of haskell-src-meta?
-- Use (TupleT 0) insted.
ConT con | show con == "GHC.Unit.()" ->
return $ TupleT 0
ConT con | show con == "GHC.Tuple.()" ->
return $ TupleT 0
_ ->
return t
|
tanakh/Peggy
|
Text/Peggy/CodeGen/TH.hs
|
bsd-3-clause
| 9,844
| 104
| 24
| 3,193
| 3,163
| 1,695
| 1,468
| 248
| 52
|
module Text
( stringDropCmd
, matchUrl
, matchTitle
, matchDice
, wrapDie
, stringRegex
, helpstr
, helpstrs
, unescapeEntities
, googlestr
, wikistr
, youstr
, spaceToPlus
, killSpaces
, lower
) where
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.UTF8 as U
import Text.Regex.PCRE
import Text.HTML.TagSoup.Entity (lookupEntity)
import Data.Char
helpstr = "Commands (prefix ?): " ++
"h[elp] [command], " ++
"tell <nick> <message>, " ++
"ping [url], " ++
-- "dc <equation>, " ++
-- "eval <expression>, " ++
"t[itle] [url], " ++
-- "trans <string>, " ++
"g <query>, " ++
"wik <query>, " ++
"tube <query>, " ++
-- "weather <location>[,province[,country]], " ++
"d <[x|]<y>d<z>[+/-w]>..." ++
-- "Passive: Report titles for urls;"
""
helpstrs = [("h", "?h[elp] [command] - A help dialog for command, Or a list of commands.")
,("tell", "?tell <nick> <message> - Send \"<nick> tell message\" as a PM to nick next time they speak.")
,("ping", "?ping [url] - Ping a site and return it's response time. Or just pong the user.")
-- ,("dc", "?dc <equation> - Arbitrary precision reverse polish calculator.")
-- ,("eval", "?eval <expression> - Haskell expression")
,("t", "?t[itle] [url] - Gets either url or the previous URL from the channel.")
-- ,("trans", "?trans <string> - Translate string into english.")
,("g", "?g <query> - Return the first google search result matching query.")
,("wik", "?wik <query> - Return the first wikipedia search result matching query.")
,("tube", "?tube <query> - Return the first youtube search result matching query.")
-- ,("weather", "?weather <location>[,province[,country]] - Get the weather from location.")
,("d", "?d <[x|]<y>d<z>[+/-w]>... - Sum of the values of y dice with z (% gives a percent) sides, plus or minus w, x times.")
-- ,("tatl", "?tatl # - Link the sentance numbered # from tatoeba.org")
]
regexUrl = "(http(s)?://)?(www.)?([a-zA-Z0-9\\-_]{1,}\\.){1,}[a-zA-Z]{2,4}(/)?[^ ]*"
regexTitle = "<[^>]*[tT][iI][tT][lL][eE][^>]*>[^<]*<[^>]*/[^>]*[tT][iI][tT][lL][eE][^>]*>"
googlestr = (++) "http://ajax.googleapis.com/ajax/services/search/web?v=1.0&safe=off&q="
wikistr = (++) . googlestr $ "site%3Awikipedia.org+"
youstr = (++) . googlestr $ "site%3Awww.youtube.com+"
regexDice = "([0-9]?\\|)?([0-9]+)?d([0-9]+|%)((\\+|-)[0-9]+)?"
matchDice = map head . flip listRegex regexDice
matchUrl :: String -> String
matchUrl = flip stringRegex regexUrl
matchTitle :: String -> String
matchTitle = killSpaces . map (\x -> if x=='\r' then ' ' else x) . flip stringRegex "(?<=>)[^<]*" . flip stringRegex regexTitle . unwords . lines
stringRegex :: String -> String -> String
stringRegex orig regex = orig =~ regex
listRegex :: String -> String -> [[String]]
listRegex orig regex = orig =~ regex :: [[String]]
dropCommand :: B.ByteString -> B.ByteString
dropCommand b = B.drop 1 $ B.dropWhile (/= ' ') b
stringDropCmd :: B.ByteString -> String
stringDropCmd = U.toString . dropCommand
killSpaces :: String -> String
killSpaces [] = ""
killSpaces (a:[]) = if a=='\t' || a=='\n' then [] else [a]
killSpaces (a:b:ss) = (if (a == ' ' && b == ' ') || a=='\t' || a=='\n' then id else (a:)) $ killSpaces $ b:ss
unescapeEntities :: String -> String
unescapeEntities [] = []
unescapeEntities ('&':xs) =
let (b, a) = break (== ';') xs in
case (lookupEntity b, a) of
(Just c, ';':as) -> c ++ unescapeEntities as
_ -> '&' : unescapeEntities xs
unescapeEntities (x:xs) = x : unescapeEntities xs
spaceToPlus :: String -> String
spaceToPlus = map stp . killSpaces
where stp ' ' = '+'
stp x = x
lower = map toLower
wrapDie = map (\a -> (dieD a, dieMulti a, dieOffset a, dieLoop a))
dieMulti :: String -> Int
dieMulti a
| null b = 1
| otherwise = read b ::Int
where b = stringRegex a "([0-9]+)?(?=d)"
dieOffset :: String -> Int
dieOffset a
| null b = 0
| head b == '+' = read $ drop 1 b ::Int
| otherwise = read b ::Int
where b = stringRegex a "(\\+|-)[0-9]+"
dieLoop :: String -> Int
dieLoop a
| null b = 1
| otherwise = read b ::Int
where b = stringRegex a "[0-9]+(?=\\|)"
dieD :: String -> Int
dieD a
| b == "%" = 100
| otherwise = read b ::Int
where b = stringRegex a "(?<=d)([0-9]+|%)"
|
raposalorx/mssbot
|
Text.hs
|
bsd-3-clause
| 4,515
| 0
| 17
| 1,045
| 1,113
| 606
| 507
| 97
| 3
|
module Network.URL.Archiver (checkArchive) where
import Control.Monad (when, unless, void)
import Data.Char (isAlphaNum, isAscii)
import Data.List (isInfixOf, isPrefixOf)
import Data.Maybe (fromJust)
import Network.Browser (browse, formToRequest, request, Form(..))
import Network.HTTP (getRequest, simpleHTTP, RequestMethod(POST))
import Network.URI (isURI, parseURI)
import System.Random (getStdGen, randomR)
import Text.Printf (printf)
-- | Open a URL
pingURL :: String -> IO ()
pingURL = void . simpleHTTP . getRequest
-- | Error check the URL and then archive it using 'webciteArchive', 'wikiwixArchive', 'internetArchiveLive', and 'alexaToolbar'; excludes Tor links.
checkArchive :: String -- ^ email for WebCite to send status to
-> String -- ^ URL to archive
-> IO ()
checkArchive email url = when (isURI url && not (".onion/" `isInfixOf` url)) (alexaToolbar url >> webciteArchive email url >> internetArchiveLive url >> wikiwixArchive url >> googleSearch url >> archiveisArchive url)
{- | Request <http://www.webcitation.org> to copy a supplied URL; WebCite does on-demand archiving, unlike Alexa/Internet Archive,
and so in practice this is the most useful function. This function throws away any return status from WebCite (which may be changed
in the future), so it is suggested that one test with a valid email address.
This ignores any attempt to archive the archive's existing pages, since that is useless.
/Warning!/ WebCite has throttling mechanisms; if you request more than 100 URLs per hour, your IP may be banned! It is
suggested that one sleep for \~30 seconds between each URL request. -}
webciteArchive :: String -> String -> IO ()
webciteArchive email url = unless ("http://www.webcitation.org" `isPrefixOf` url) $
pingURL ("http://www.webcitation.org/archive?url="++url++ "&email="++email)
-- | Request a URL through Internet Archive's on-demand archiving URL.
--
-- This also does a backup archive attempt through the live Internet mirror;
-- this is completely speculative and may result in no archiving.
-- This method is a guess based on my use of their mirror and a banner that is sometimes inserted;
-- see <http://www.archive.org/post/380853/virus-operating-in-internet-archive>
internetArchiveLive :: String -> IO ()
internetArchiveLive url = pingURL("http://web.archive.org/save/"++url) >> pingURL ("http://liveweb.archive.org/"++url)
-- | Ping Alexa's servers like the Toolbar does; this may or may not result in any archiving.
alexaToolbar :: String -> IO ()
alexaToolbar url = do gen <- getStdGen
let rint = fst $ randomR (1000::Int,20000) gen
let payload = "wid=" ++ show rint ++ "&ref=&url=" ++ escape url
pingURL ("http://data.alexa.com/data/SbADd155Tq0000?cli=10&ver=spkyf-1.5.0&dat=ns&cdt=rq=0&"++payload)
return ()
wikiwixArchive :: String -> IO ()
wikiwixArchive url = pingURL ("http://archive.wikiwix.com/cache/?url="++url)
-- | <http://blog.archive.is/post/45031162768/can-you-recommend-the-best-method-script-so-i-may-batch>
archiveisArchive :: String -> IO ()
archiveisArchive url = do let archiveform = Form POST (fromJust $ parseURI "http://archive.is/submit/") [("url", url), ("submit", "")]
void $ browse $ request $ formToRequest archiveform
-- can't hurt to let Google know it exists
googleSearch :: String -> IO ()
googleSearch url = pingURL ("http://www.google.com/search?q=" ++ escape url)
-- | Utility function to URL-encode a string for use in URL arguments; copied from somewhere
escape :: String -> String
escape = concatMap escapeURIChar
escapeURIChar :: Char -> String
escapeURIChar c | isAscii c && isAlphaNum c = [c]
| otherwise = concatMap (printf "%%%02X") [c]
|
gwern/archiver-bot
|
Network/URL/Archiver.hs
|
bsd-3-clause
| 3,860
| 0
| 13
| 725
| 722
| 383
| 339
| 39
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Reporting
-- Copyright : (c) David Waern 2008
-- License : BSD-like
--
-- Maintainer : david.waern@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- Anonymous build report data structure, printing and parsing
--
-----------------------------------------------------------------------------
module Distribution.Client.BuildReports.Storage (
-- * Storing and retrieving build reports
storeAnonymous,
storeLocal,
-- retrieve,
-- * 'InstallPlan' support
fromInstallPlan,
fromPlanningFailure,
) where
import qualified Distribution.Client.BuildReports.Anonymous as BuildReport
import Distribution.Client.BuildReports.Anonymous (BuildReport)
import Distribution.Client.Types
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan
( InstallPlan )
import Distribution.Package
( PackageId, packageId )
import Distribution.PackageDescription
( FlagAssignment )
import Distribution.Simple.InstallDirs
( PathTemplate, fromPathTemplate
, initialPathTemplateEnv, substPathTemplate )
import Distribution.System
( Platform(Platform) )
import Distribution.Compiler
( CompilerId(..), CompilerInfo(..) )
import Distribution.Simple.Utils
( comparing, equating )
import Data.List
( groupBy, sortBy )
import Data.Maybe
( catMaybes )
import System.FilePath
( (</>), takeDirectory )
import System.Directory
( createDirectoryIfMissing )
storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO ()
storeAnonymous reports = sequence_
[ appendFile file (concatMap format reports')
| (repo, reports') <- separate reports
, let file = repoLocalDir repo </> "build-reports.log" ]
--TODO: make this concurrency safe, either lock the report file or make sure
-- the writes for each report are atomic (under 4k and flush at boundaries)
where
format r = '\n' : BuildReport.show r ++ "\n"
separate :: [(BuildReport, Maybe Repo)]
-> [(Repo, [BuildReport])]
separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ]))
. map concat
. groupBy (equating (repoName . head))
. sortBy (comparing (repoName . head))
. groupBy (equating repoName)
. onlyRemote
repoName (_,_,rrepo) = remoteRepoName rrepo
onlyRemote :: [(BuildReport, Maybe Repo)] -> [(BuildReport, Repo, RemoteRepo)]
onlyRemote rs =
[ (report, repo, remoteRepo)
| (report, Just repo@Repo { repoKind = Left remoteRepo }) <- rs ]
storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)]
-> Platform -> IO ()
storeLocal cinfo templates reports platform = sequence_
[ do createDirectoryIfMissing True (takeDirectory file)
appendFile file output
--TODO: make this concurrency safe, either lock the report file or make
-- sure the writes for each report are atomic
| (file, reports') <- groupByFileName
[ (reportFileName template report, report)
| template <- templates
, (report, _repo) <- reports ]
, let output = concatMap format reports'
]
where
format r = '\n' : BuildReport.show r ++ "\n"
reportFileName template report =
fromPathTemplate (substPathTemplate env template)
where env = initialPathTemplateEnv
(BuildReport.package report)
-- ToDo: In principle, we can support $pkgkey, but only
-- if the configure step succeeds. So add a Maybe field
-- to the build report, and either use that or make up
-- a fake identifier if it's not available.
(error "storeLocal: package key not available")
cinfo
platform
groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp))
. groupBy (equating fst)
. sortBy (comparing fst)
-- ------------------------------------------------------------
-- * InstallPlan support
-- ------------------------------------------------------------
fromInstallPlan :: InstallPlan -> [(BuildReport, Maybe Repo)]
fromInstallPlan plan = catMaybes
. map (fromPlanPackage platform comp)
. InstallPlan.toList
$ plan
where platform = InstallPlan.planPlatform plan
comp = compilerInfoId (InstallPlan.planCompiler plan)
fromPlanPackage :: Platform -> CompilerId
-> InstallPlan.PlanPackage
-> Maybe (BuildReport, Maybe Repo)
fromPlanPackage (Platform arch os) comp planPackage = case planPackage of
InstallPlan.Installed (ReadyPackage srcPkg flags _ deps) result
-> Just $ ( BuildReport.new os arch comp
(packageId srcPkg) flags (map packageId deps)
(Right result)
, extractRepo srcPkg)
InstallPlan.Failed (ConfiguredPackage srcPkg flags _ deps) result
-> Just $ ( BuildReport.new os arch comp
(packageId srcPkg) flags deps
(Left result)
, extractRepo srcPkg )
_ -> Nothing
where
extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) = Just repo
extractRepo (SourcePackage { packageSource = ScmPackage repo _ _ _}) = repo
extractRepo _ = Nothing
fromPlanningFailure :: Platform -> CompilerId
-> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)]
fromPlanningFailure (Platform arch os) comp pkgids flags =
[ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing)
| pkgid <- pkgids ]
|
typelead/epm
|
epm/Distribution/Client/BuildReports/Storage.hs
|
bsd-3-clause
| 5,947
| 0
| 19
| 1,599
| 1,359
| 750
| 609
| 102
| 5
|
module Data.LabelledLens where
import Prelude()
import Data.Text
import Data.Lens
data LabelledLensP x a b c =
LabelledLensP Text Text (LensP x a b c)
type LabelledLens x y =
LabelledLensP x y y x
|
tonymorris/lens-proposal
|
src/Data/LabelledLens.hs
|
bsd-3-clause
| 204
| 0
| 8
| 41
| 68
| 41
| 27
| 8
| 0
|
--------------------------------------------------------------------------------
-- |
-- Module : Language.Verilog.Parser
-- Copyright : (c) Signali Corp. 2010
-- License : All rights reserved
--
-- Maintainer : philip.weaver@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- A parser for the Verilog AST. The following sources were used to define the
-- AST and the parser:
--
-- * <http://www.verilog.com/VerilogBNF.html>
--
-- * <http://www.hdlworks.com/hdl_corner/verilog_ref/index.html>
--
-- * <http://en.wikipedia.org/wiki/Verilog>
--
-- The specifications at the first two links contradict each other in several
-- places. When in doubt, we try to make this parser match Icarus Verilog.
--------------------------------------------------------------------------------
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Language.Verilog.Parser where
import Control.Monad ( liftM, liftM2 )
import Control.Monad.Identity ( Identity )
import Data.Either ( partitionEithers)
import Data.Maybe ( fromMaybe )
import Text.Parsec
import Text.Parsec.Expr hiding (Operator)
import qualified Text.Parsec.Expr as E
import Text.Parsec.Token hiding (GenTokenParser(..))
import qualified Text.Parsec.Token as T
import Language.Verilog.Syntax
-- --------------------
type P s a = ParsecT s () Identity a
type Operator s a = E.Operator s () Identity a
type OpTable s a = [[Operator s a]]
-- --------------------
verilog :: Stream s Identity Char => T.GenTokenParser s () Identity
verilog = makeTokenParser verilogDef
verilogDef :: Stream s Identity Char => GenLanguageDef s () Identity
verilogDef
= LanguageDef
{ commentStart = "/*"
, commentEnd = "*/"
, commentLine = "//"
, nestedComments = True
, identStart = letter <|> char '_'
, identLetter = alphaNum <|> oneOf "_$"
, opStart = oneOf "+-!~&|^*/%><=?:"
, opLetter = oneOf "+-!~&|^*/%><=?:"
, reservedNames = verilogKeywords
, reservedOpNames = verilogOps
, caseSensitive = True
}
lexeme = T.lexeme verilog
lexeme :: (Stream s Identity Char) => P s a -> P s a
whiteSpace :: Stream s Identity Char => P s ()
whiteSpace = T.whiteSpace verilog
identifier :: Stream s Identity Char => P s String
identifier = T.identifier verilog
reserved :: Stream s Identity Char => String -> P s ()
reserved = T.reserved verilog
reservedOp :: Stream s Identity Char => String -> P s ()
reservedOp = T.reservedOp verilog
symbol :: Stream s Identity Char => String -> P s ()
symbol x = T.symbol verilog x >> return ()
stringLiteral :: Stream s Identity Char => P s String
stringLiteral = T.stringLiteral verilog
-- integer :: P s Integer
-- integer = T.integer verilog
parens, brackets, braces :: Stream s Identity Char => P s a -> P s a
parens = T.parens verilog
brackets = T.brackets verilog
braces = T.braces verilog
comma, semi, colon, dot :: Stream s Identity Char => P s ()
comma = T.comma verilog >> return ()
semi = T.semi verilog >> return ()
colon = T.colon verilog >> return ()
dot = T.dot verilog >> return ()
commaSep, commaSep1 :: Stream s Identity Char => P s a -> P s [a]
commaSep = T.commaSep verilog
commaSep1 = T.commaSep1 verilog
-- http://www.hdlworks.com/hdl_corner/verilog_ref/index.html
verilogKeywords :: [String]
verilogKeywords
= [ "always", "and", "assign", "begin", "buf", "bufif0", "bufif1"
, "case", "casex", "casez", "cmos"
, "deassign", "default", "defparam", "disable"
, "edge", "else", "end", "endcase", "endfunction", "endmodule"
, "endprimitive", "endspecify", "endtable", "endtask", "event"
, "for", "force", "forever", "fork", "function"
, "highz0", "highz1", "if", "ifnone", "initial", "inout", "input", "integer"
, "join", "large", "macromodule", "medium", "module"
, "nand", "negedge", "nmos", "nor", "not", "notif0", "notif1"
, "or", "output", "parameter", "pmos", "posedge", "primitive"
, "pull0", "pull1", "pulldown", "pullup"
, "rcmos", "real", "realtime", "reg", "release", "repeat"
, "rnmos", "rpmos", "rtran", "rtranif0", "rtranif1"
, "scalared", "small", "specify", "specparam"
, "strong0", "strong1", "supply0", "supply1"
, "table", "task", "time", "tran", "tranif0", "tranif1"
, "tri", "tri0", "tri1", "triand", "trior", "trireg"
, "vectored", "wait", "wand", "weak0", "weak1", "while", "wire", "wor"
, "xnor", "xor"
] ++
-- introduced in Verilog-2001
[ "automatic", "cell", "config", "design", "endconfig", "endgenerate"
, "generate", "genvar", "instance", "liblist", "localparam"
, "noshowcancelled", "pulsestyle_ondetect", "pulsestyle_onevent"
, "showcancelled", "signed", "use"
]
verilogOps :: [String]
verilogOps
= [ "+", "-", "!", "~", "&", "~&", "|", "~|", "^", "~^", "^~"
, "+", "-", "*", "/", "%", ">", ">=", "<", "<="
, "&&", "||", "==", "!=", "===", "!===", "&", "|", "^", "^~", "~^"
, "<<", ">>", "<<<", ">>>"
, "?", ":", "->"
] ++
-- introduced in Verilog-2001
[ "**" ]
-- --------------------
verilogFile :: Stream s Identity Char => P s Verilog
verilogFile
= do whiteSpace
ds <- many description
eof
return (Verilog ds)
description :: Stream s Identity Char => P s Description
description
= liftM ModuleDescription module_description <|>
liftM UDPDescription udp_description
module_description :: Stream s Identity Char => P s Module
module_description
= do reserved "module"
name <- ident
paral <- optionMaybe parameterlist_declaration
ports <- parens (commaSep port_declaration) <|> return []
semi
items <- many module_item
reserved "endmodule"
return (Module name paral ports items)
parameterlist_declaration :: Stream s Identity Char => P s [ParamDecl]
parameterlist_declaration
= do symbol "#"
paras <- parens (commaSep parameter_declaration01) <|> return []
return paras
port_declaration :: Stream s Identity Char => P s PortDecl
port_declaration
= do dir <- portDir
ty <- optionMaybe portType
r <- optionMaybe range
i <- ident
return (PortDecl dir ty r i (CommentItem ""))
module_item :: Stream s Identity Char => P s Item
module_item
= liftM ParamDeclItem parameter_declaration <|>
liftM InputDeclItem input_declaration <|>
liftM OutputDeclItem output_declaration <|>
liftM InOutDeclItem inout_declaration <|>
liftM NetDeclItem net_declaration <|>
liftM RegDeclItem reg_declaration <|>
liftM EventDeclItem event_declaration <|>
liftM PrimitiveInstItem primitive_instance <|>
liftM InstanceItem module_or_udp_instance <|>
liftM ParamOverrideItem (fail "TODO param override") <|>
liftM LocalParamItem localparam_declaration <|>
continuous_assign <|>
(reserved "initial" >> liftM InitialItem statement) <|>
(reserved "always" >> liftM AlwaysItem statement) <|>
generate_decl <|>
genvar_decl <|>
task_decl <|>
function_decl
<?> "module item"
generate_decl :: Stream s Identity Char => P s Item
generate_decl
= liftM GenerateDeclItem genfor_stmt <|>
liftM GenerateDeclItem genif_stmt
genvar_decl :: Stream s Identity Char => P s Item
genvar_decl
= do reserved "genvar"
vs <- commaSep1 ident
semi
return (GenVarItem vs)
task_decl :: Stream s Identity Char => P s Item
task_decl
= do reserved "task"
x <- ident
semi
ys <- many local_decl
s <- statement
reserved "endtask"
return (TaskItem x ys s)
function_decl :: Stream s Identity Char => P s Item
function_decl
= do reserved "function"
t <- optionMaybe function_type
x <- ident
semi
ys <- many local_decl
s <- statement
reserved "endfunction"
return (FunctionItem t x ys s)
function_type :: Stream s Identity Char => P s FunctionType
function_type
= liftM FunctionTypeRange range <|>
(reserved "integer" >> return FunctionTypeInteger) <|>
(reserved "real" >> return FunctionTypeReal)
udp_description :: Stream s Identity Char => P s UDP
udp_description
= do reserved "primitive"
x <- ident
symbol "("
x_out <- ident
comma
x_ins <- commaSep1 ident
symbol ")"
semi
ds <- many1 udp_port_decl
y <- optionMaybe udp_initial_statement
t <- udp_table
reserved "endprimitive"
return (UDP x x_out x_ins ds y t)
udp_port_decl :: Stream s Identity Char => P s UDPDecl
udp_port_decl
= liftM UDPOutputDecl output_declaration <|>
liftM UDPInputDecl input_declaration <|>
do reserved "reg"
x <- ident
semi
return (UDPRegDecl x)
<?> "UDP port declaration"
udp_initial_statement :: Stream s Identity Char => P s UDPInitialStatement
udp_initial_statement
= do reserved "initial"
x <- ident
symbol "="
e <- expression
semi
return (UDPInitialStatement x e)
<?> "UDP initial statement"
udp_table :: Stream s Identity Char => P s TableDefinition
udp_table
= do reserved "table"
es <- many1 table_entry
t <- case partitionEithers es of
(es', []) -> return (CombinationalTable es')
([], es') -> return (SequentialTable es')
_ -> fail "UDP table: mix of sequential and combinational entries"
reserved "endtable"
return t
table_entry :: Stream s Identity Char =>
P s (Either CombinationalEntry SequentialEntry)
table_entry
= try (liftM Left combinational_entry) <|>
liftM Right sequential_entry
combinational_entry :: Stream s Identity Char => P s CombinationalEntry
combinational_entry
= do xs <- many1 level_symbol
colon
y <- choice [ lexeme (char x) | x <- outputSymbols ]
semi
return (CombinationalEntry xs y)
sequential_entry :: Stream s Identity Char => P s SequentialEntry
sequential_entry
= do x <- many1 (liftM Left level_symbol <|> liftM Right edge)
colon
y <- level_symbol
colon
z <- next_state
semi
return (SequentialEntry x y z)
level_symbol :: Stream s Identity Char => P s LevelSymbol
level_symbol = choice [ lexeme (char x) | x <- levelSymbols ]
next_state :: Stream s Identity Char => P s NextState
next_state = choice [ lexeme (char x) | x <- nextStates ]
edge :: Stream s Identity Char => P s Edge
edge
= parens (liftM2 EdgeLevels (oneOf levelSymbols) (oneOf levelSymbols)) <|>
liftM EdgeSymbol (choice [ lexeme (char x) | x <- edgeSymbols ])
-- -----------------------------------------------------------------------------
-- declarations
local_decl :: Stream s Identity Char => P s LocalDecl
local_decl
= liftM LocalParamDecl parameter_declaration <|>
liftM LocalInputDecl input_declaration <|>
liftM LocalOutputDecl output_declaration <|>
liftM LocalInOutDecl inout_declaration <|>
liftM LocalRegDecl reg_declaration
<?> "local declaration"
parameter_declaration :: Stream s Identity Char => P s ParamDecl
parameter_declaration
= do reserved "parameter"
param_assigns <- (commaSep1 parameter_assignment)
<?> "parameter list"
semi
return (ParamDecl param_assigns)
<?> "parameter declaration"
localparam_declaration :: Stream s Identity Char => P s LocalParam
localparam_declaration
= do reserved "localparam"
param_assigns <- (commaSep1 parameter_assignment)
<?> "parameter list"
semi
return (LocalParam param_assigns)
<?> "localparam declaration"
parameter_declaration01 :: Stream s Identity Char => P s ParamDecl
parameter_declaration01
= do reserved "parameter"
param_assign <- parameter_assignment
<?> "parameter"
return (ParamDecl [param_assign])
<?> "parameter declaration"
input_declaration :: Stream s Identity Char => P s InputDecl
input_declaration
= do reserved "input"
r <- optionMaybe range
xs <- commaSep1 ident
semi
return (InputDecl r xs)
<?> "input declaration"
output_declaration :: Stream s Identity Char => P s OutputDecl
output_declaration
= do reserved "output"
r <- optionMaybe range
xs <- commaSep1 ident
semi
return (OutputDecl r xs)
<?> "output declaration"
inout_declaration :: Stream s Identity Char => P s InOutDecl
inout_declaration
= do reserved "inout"
r <- optionMaybe range
xs <- commaSep1 ident
semi
return (InOutDecl r xs)
<?> "inout declaration"
net_declaration :: Stream s Identity Char => P s NetDecl
net_declaration
= do t <- net_type
try (net_decl t) <|> net_decl_assign t
<?> "net declaration"
net_decl :: Stream s Identity Char => NetType -> P s NetDecl
net_decl t
= do r <- optionMaybe expand_range
d <- optionMaybe delay
xs <- commaSep1 ident
semi
return (NetDecl t r d xs)
net_decl_assign :: Stream s Identity Char => NetType -> P s NetDecl
net_decl_assign t
= do s <- optionMaybe drive_strength
r <- optionMaybe expand_range
d <- optionMaybe delay
xs <- commaSep1 (assign ident)
semi
return (NetDeclAssign t s r d xs)
reg_declaration :: Stream s Identity Char => P s RegDecl
reg_declaration
= do t <- reg_type
-- only the "reg" type can have a vector range before the identifier
r <- case t of
Reg_reg -> optionMaybe range
_ -> return Nothing
xs <- commaSep1 reg_var
semi
return (RegDecl t r xs)
<?> "reg declaration"
event_declaration :: Stream s Identity Char => P s EventDecl
event_declaration
= do reserved "event"
xs <- commaSep1 ident
return (EventDecl xs)
<?> "event declaration"
continuous_assign :: Stream s Identity Char => P s Item
continuous_assign
= do reserved "assign"
s <- optionMaybe drive_strength
d <- optionMaybe delay
xs <- commaSep1 assignment
semi
return (AssignItem s d xs)
-- -----------------------------------------------------------------------------
-- primitive instantiation
primitive_instance :: Stream s Identity Char => P s PrimitiveInst
primitive_instance
= do t <- prim_type
s <- optionMaybe (try drive_strength)
-- have to use 'try' here because drive_strength starts with the the
-- open paren token, just like in prim_inst
d <- optionMaybe delay
xs <- commaSep1 prim_inst
semi
return (PrimitiveInst t s d xs)
prim_inst :: Stream s Identity Char => P s PrimInst
prim_inst
= liftM2 PrimInst (optionMaybe prim_inst_name) (parens (commaSep expression))
prim_inst_name :: Stream s Identity Char => P s PrimInstName
prim_inst_name
= liftM2 PrimInstName ident (optionMaybe range)
-- -----------------------------------------------------------------------------
-- module instantiations
module_or_udp_instance :: Stream s Identity Char => P s Instance
module_or_udp_instance
= do x <- ident
-- TODO optional strength
ys <- optionMaybe $
do symbol "#"
symbol "("
ys <- liftM Left (commaSep1 expression) <|>
liftM Right (commaSep1 parameter)
symbol ")"
return ys
insts <- commaSep1 inst
semi
return (Instance x (fromMaybe (Left []) ys) insts)
parameter :: Stream s Identity Char => P s Parameter
parameter
= do dot
x <- ident
e <- parens expression
return (Parameter x e)
inst :: Stream s Identity Char => P s Inst
inst
= do x <- ident
r <- optionMaybe range
-- mc <- optionMaybe connections
-- return (Inst x r $ maybeList mc)
-- where maybeList Nothing = []
-- maybeList (Just l) = l
c <- connections
return (Inst x r c)
connections :: Stream s Identity Char => P s Connections
connections
= parens (liftM NamedConnections (commaSep named_connection) <|>
liftM Connections (commaSep expression))
named_connection :: Stream s Identity Char => P s NamedConnection
named_connection
= do dot
x <- ident
e <- parens expression
return (NamedConnection x e (CommentItem ""))
-- -----------------------------------------------------------------------------
-- statements
statement :: Stream s Identity Char => P s Statement
statement
= assignment_stmt <|>
if_stmt <|>
case_stmt <|>
for_stmt <|>
while_stmt <|>
delay_stmt <|>
event_control_stmt <|>
seq_block <|>
par_block <|>
task_stmt <|>
assign_stmt <|>
genfor_stmt <|>
genif_stmt <|>
item_stmt
<?> "statement"
item_stmt :: Stream s Identity Char => P s Statement
item_stmt = liftM ItemStmt module_item
genfor_stmt :: Stream s Identity Char => P s Statement
genfor_stmt
= do reserved "generate"
vs <- many genvar_decl
fs <- many for_stmt
reserved "endgenerate"
return (GenForStmt vs fs)
genif_stmt :: Stream s Identity Char => P s Statement
genif_stmt
= do reserved "generate"
vs <- many if_stmt
reserved "endgenerate"
return (GenIfStmt vs)
assignment_stmt :: Stream s Identity Char => P s Statement
assignment_stmt
= do x <- lvalue
f <- (symbol "=" >> return BlockingAssignment) <|>
(symbol "<=" >> return NonBlockingAssignment)
c <- optionMaybe assignment_control
e <- expression
semi
return (f x c e)
if_stmt :: Stream s Identity Char => P s Statement
if_stmt
= do reserved "if"
e <- parens expression
s1 <- maybe_statement
s2 <- (reserved "else" >> maybe_statement) <|>
return Nothing
return (IfStmt e s1 s2)
case_stmt :: Stream s Identity Char => P s Statement
case_stmt
= do t <- (reserved "case" >> return Case) <|>
(reserved "casex" >> return Casex) <|>
(reserved "casez" >> return Casez)
e <- parens expression
xs <- many case_item
reserved "endcase"
return (CaseStmt t e xs)
case_item :: Stream s Identity Char => P s CaseItem
case_item
= do f <- (reserved "default" >> return CaseDefault) <|>
(many1 expression >>= return . CaseItem)
colon
s <- maybe_statement
return (f s)
<?> "case item"
for_stmt :: Stream s Identity Char => P s Statement
for_stmt
= do reserved "for"
symbol "("
x <- assignment
semi
y <- expression
semi
z <- assignment
symbol ")"
s <- statement
return (ForStmt x y z s)
while_stmt :: Stream s Identity Char => P s Statement
while_stmt
= do reserved "while"
symbol "("
e <- expression
symbol ")"
s <- statement
return (WhileStmt e s)
delay_stmt :: Stream s Identity Char => P s Statement
delay_stmt
= liftM2 DelayStmt delay maybe_statement
event_control_stmt :: Stream s Identity Char => P s Statement
event_control_stmt
= liftM2 EventControlStmt event_control maybe_statement
maybe_statement :: Stream s Identity Char => P s (Maybe Statement)
maybe_statement
= (semi >> return Nothing) <|> liftM Just statement
seq_block :: Stream s Identity Char => P s Statement
seq_block
= do reserved "begin"
maybe_label <- optionMaybe (colon >> ident)
-- TODO local block declarations
stmts <- many1 statement
reserved "end"
return (SeqBlock maybe_label [] stmts)
par_block :: Stream s Identity Char => P s Statement
par_block
= do reserved "fork"
maybe_label <- optionMaybe (colon >> ident)
-- TODO local block declarations
stmts <- many1 statement
reserved "join"
return (ParBlock maybe_label [] stmts)
task_stmt :: Stream s Identity Char => P s Statement
task_stmt
= do _ <- char '$'
x <- ident
args <- optionMaybe (parens (commaSep expression))
semi
return (TaskStmt x args)
assign_stmt :: Stream s Identity Char => P s Statement
assign_stmt
= do reserved "assign"
x <- assignment
semi
return (AssignStmt x)
event_control :: Stream s Identity Char => P s EventControl
event_control
= do symbol "@"
choice [ symbol "*" >> return EventControlWildCard
, parens ((symbol "*" >> return EventControlWildCard) <|>
liftM EventControlExpr (commaSep event_expr) <|>
liftM EventControlIdent (commaSep ident))
]
assignment_control :: Stream s Identity Char => P s AssignmentControl
assignment_control
= fail "assignment control" -- TODO
event_expr :: Stream s Identity Char => P s EventExpr
event_expr
= do e1 <- choice [ liftM EventExpr expression
, reserved "posedge" >> liftM EventPosedge expression
, reserved "negedge" >> liftM EventNegedge expression
]
choice [ reserved "or" >> liftM (EventOr e1) event_expr
, return e1
]
-- parse an assignment statement.
-- parametrized over the parser of the left-hand side.
assign :: Stream s Identity Char => P s a -> P s (a, Expression)
assign lhs
= do x <- lhs
symbol "="
y <- expression
return (x, y)
assignment :: Stream s Identity Char => P s Assignment
assignment
= liftM (uncurry Assignment) (assign lvalue)
-- -----------------------------------------------------------------------------
-- expressions
const_expr :: Stream s Identity Char => P s Expression
const_expr = expression
<?> "constant expression"
expression :: Stream s Identity Char => P s Expression
expression
= do e1 <- expression'
choice [ do symbol "?"
e2 <- expression
symbol ":"
e3 <- expression
return (ExprCond e1 e2 e3)
, return e1
]
expression' :: Stream s Identity Char => P s Expression
expression'
= buildExpressionParser opTable factor <?> "expression"
where
factor
= choice [ parens expression
, ident >>= expr_ident
, expr_number
, expr_string
, expr_concat
]
<?> "factor"
-- parse an expression that starts with an identifier
expr_ident :: Stream s Identity Char => Ident -> P s Expression
expr_ident x
= liftM (ExprFunCall x) (parens (commaSep expression)) <|>
(brackets $
do e <- expression
-- for ExprSlice, 'e' is actually a constant expression,
-- but const_expr = expression, so it does not matter.
choice [ colon >> liftM (ExprSlice x e) const_expr
, symbol "+:" >> liftM (ExprSlicePlus x e) const_expr
, symbol "-:" >> liftM (ExprSliceMinus x e) const_expr
, return (ExprIndex x e)
]) <|>
return (ExprVar x)
opTable :: Stream s Identity Char => OpTable s Expression
opTable
= [ [ unaryOp "+" UPlus
, unaryOp "-" UMinus
, unaryOp "!" UBang
, unaryOp "~" UTilde
]
, [ binaryOp "*" Times
, binaryOp "/" Divide
, binaryOp "%" Modulo
, binaryOp "**" Pow
]
, [ binaryOp "+" Plus
, binaryOp "-" Minus
]
-- TODO <<< and >>> operators
, [ binaryOp "<<" ShiftLeft
, binaryOp ">>" ShiftRight
]
, [ binaryOp "<" LessThan
, binaryOp "<=" LessEqual
, binaryOp ">" GreaterThan
, binaryOp ">=" GreaterEqual
]
, [ binaryOp "==" Equals
, binaryOp "!=" NotEquals
, binaryOp "===" CEquals
, binaryOp "!==" CNotEquals
]
, [ unaryOp "&" UAnd
, unaryOp "~&" UNand
, binaryOp "&" And
]
, [ unaryOp "^" UXor
, unaryOp "^~" UXnor
, unaryOp "~^" UXnor
, binaryOp "^" Xor
, binaryOp "^~" Xnor
, binaryOp "~^" Xnor
]
, [ unaryOp "|" UOr
, unaryOp "~|" UNor
, binaryOp "|" Or
, binaryOp "~|" Nor
]
, [ binaryOp "&&" LAnd ]
, [ binaryOp "||" LOr ]
]
unaryOp :: Stream s Identity Char => String -> UnaryOp -> Operator s Expression
unaryOp name fun
= Prefix (reservedOp name >> return (ExprUnary fun))
binaryOp :: Stream s Identity Char => String -> BinaryOp -> Operator s Expression
binaryOp name fun
= Infix (reservedOp name >> return (ExprBinary fun)) AssocLeft
expr_number :: Stream s Identity Char => P s Expression
expr_number
= liftM ExprNum number
{- syntax for numbers:
[ sign ] [ size ] [ 'base ] value // integer
[ sign ] value[.value] [ sign ] baseExponent // real
where an integer value is allowed to have some subset of
"0123456789abcdefABCDEFxXzZ?_", depending on the base,
and a real value contains only decimal characters: "0123456789".
-}
expr_string :: Stream s Identity Char => P s Expression
expr_string
= liftM ExprString stringLiteral
expr_index :: Stream s Identity Char => P s Expression
expr_index
= liftM2 ExprIndex ident (brackets const_expr)
expr_slice :: Stream s Identity Char => P s Expression
expr_slice
= do x <- ident
symbol "["
e1 <- const_expr
colon
e2 <- const_expr
symbol "]"
return (ExprSlice x e1 e2)
expr_concat :: Stream s Identity Char => P s Expression
expr_concat
= do symbol "{"
e <- expression
choice [ do comma
es <- commaSep expression
symbol "}"
return (ExprConcat (e:es))
, do es <- braces (commaSep expression)
symbol "}"
return (ExprMultiConcat e es)
]
lvalue :: Stream s Identity Char => P s LValue
lvalue
= try expr_index <|>
try expr_slice <|>
liftM ExprVar ident <|>
expr_concat
number :: Stream s Identity Char => P s Number
number
= do { s <- optionMaybe sign
; whiteSpace
; base_integral s Nothing <|>
do n <- decimal_number
whiteSpace
-- n could be the size of an integral, the integral value itself,
-- or the integral part of a real.
base_integral s (Just n) <|> real_number s n
}
where
base_integral maybe_sign maybe_size
= do b <- base
whiteSpace
x <- digits b
whiteSpace
return (IntNum maybe_sign maybe_size (Just b) x)
-- given the optional sign and the integral part, parse the remainder of a
-- real number, or yield an integer.
real_number maybe_sign int_value
= choice [ do maybe_fractional <- optionMaybe (dot >> decimal_number)
whiteSpace
maybe_exponent <- optionMaybe $
do _ <- oneOf "eE"
s <- optionMaybe sign
e <- decimal_number
return (s, e)
case (maybe_fractional, maybe_exponent) of
(Nothing, Nothing)
-> return $ IntNum maybe_sign Nothing Nothing int_value
_ -> return $ RealNum maybe_sign int_value
maybe_fractional maybe_exponent
]
decimal_number :: Stream s Identity Char => P s String
decimal_number = digits DecBase
digits :: Stream s Identity Char => Base -> P s String
digits BinBase
= many1 (oneOf "01xXzZ?_") <?> "binary digit"
digits OctBase
= many1 (oneOf "01234567xXzZ?_") <?> "octal digit"
digits HexBase
= many1 (oneOf "0123456789abcdefABCDEFxXzZ?_") <?> "hexadecimal digit"
digits DecBase
= many1 (oneOf "0123456789_") <?> "decimal digit"
sign :: Stream s Identity Char => P s Sign
sign = (symbol "+" >> return Pos) <|>
(symbol "-" >> return Neg)
base :: Stream s Identity Char => P s Base
base = do { _ <- char '\''
; (oneOf "bB" >> return BinBase) <|>
(oneOf "oO" >> return OctBase) <|>
(oneOf "dD" >> return DecBase) <|>
(oneOf "hH" >> return HexBase)
} <?> "base"
-- -----------------------------------------------------------------------------
-- miscellaneous
ident :: Stream s Identity Char => P s Ident
ident = liftM Ident identifier
portDir :: Stream s Identity Char => P s PortDir
portDir = (reserved "input" >> return (PortDir Input)) <|>
(reserved "output" >> return (PortDir Output)) <|>
(reserved "inout" >> return (PortDir InOut)) <?> "port direction"
portType :: Stream s Identity Char => P s PortType
portType = (reserved "wire" >> return (PortType Wire)) <|>
(reserved "reg" >> return (PortType Reg))
reg_var :: Stream s Identity Char => P s RegVar
reg_var
= do { x <- ident
; liftM (MemVar x) range <|>
liftM (RegVar x) (optionMaybe (symbol "=" >> expression))
}
parameter_assignment :: Stream s Identity Char => P s ParamAssign
parameter_assignment
= do x <- ident
_ <- symbol "="
e <- const_expr
return (ParamAssign x e)
expand_range :: Stream s Identity Char => P s ExpandRange
expand_range
= liftM SimpleRange range <|>
(reserved "scalared" >> liftM ScalaredRange range) <|>
(reserved "vectored" >> liftM VectoredRange range)
<?> "expand range"
range :: Stream s Identity Char => P s Range
range
= brackets $ do e1 <- const_expr
colon
e2 <- const_expr
return (Range e1 e2)
delay :: Stream s Identity Char => P s Delay
delay = do symbol "#"
expression -- expr_number <|> expr_var <|> expression
drive_strength :: Stream s Identity Char => P s DriveStrength
drive_strength
= parens $
(do s0 <- strength0
comma
s1 <- strength1
return (Strength01 s0 s1)) <|>
(do s1 <- strength1
comma
s0 <- strength0
return (Strength10 s1 s0))
prim_type :: Stream s Identity Char => P s PrimType
prim_type = parse_table
strength0 :: Stream s Identity Char => P s Strength0
strength0 = parse_table
strength1 :: Stream s Identity Char => P s Strength1
strength1 = parse_table
net_type :: Stream s Identity Char => P s NetType
net_type = parse_table
reg_type :: Stream s Identity Char => P s RegType
reg_type = parse_table
-- this can be used for NetType, RegType, Strength0, Strength1, etc.
parse_table :: (Stream s Identity Char, Show a, Enum a, Bounded a) => P s a
parse_table
= choice [ reserved (show x) >> return x
| x <- [minBound..maxBound]
]
-- -----------------------------------------------------------------------------
|
githubkleon/ConvenientHDL
|
src/Language/Verilog/Parser.hs
|
bsd-3-clause
| 30,753
| 0
| 23
| 8,377
| 8,816
| 4,282
| 4,534
| 767
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{- Untyped lambda calculus, from chapter 7 -}
module Untyped (
module Untyped.Parse2
, module Untyped.Interpret
) where
import Untyped.Parse2
import Untyped.Interpret
|
joelburget/tapl
|
Untyped.hs
|
bsd-3-clause
| 217
| 0
| 5
| 41
| 30
| 20
| 10
| 6
| 0
|
module Data.SouSiT.Handle (
-- * Source
hSource,
hSource',
hSourceRes,
hSourceNoEOF,
hSourceNoEOF',
hSourceResNoEOF,
-- * Sink
hSink,
hSinkRes
) where
import Data.SouSiT.Source
import Data.SouSiT.Sink
import System.IO
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
-- | Source from a handle. The handle will not be closed and is read till hIsEOF.
hSource :: MonadIO m => (Handle -> m a) -> Handle -> FeedSource m a
hSource get = actionSource . toEof get
-- | Same as hSource, but opens the handle when transfer is called and closes it when
-- transfer/feedToSink completes.
-- Uses 'bracket' to ensure safe release of the allocated resources.
hSource' :: (Handle -> IO a) -> IO Handle -> FeedSource IO a
hSource' get open = bracketActionSource open (liftIO . hClose) (toEof get)
-- | Same as hSource, but opens the handle when transfer is called and closes it when
-- transfer/feedToSink completes.
hSourceRes :: (MonadIO m, MonadResource m) => (Handle -> m a) -> IO Handle -> FeedSource m a
hSourceRes get open = FeedSource fun
where fun sink = do (r,h) <- allocate open (liftIO . hClose)
sink' <- feedToSink (actionSource $ toEof get h) sink
release r
return sink'
toEof get h = (liftIO . hIsEOF) h >>= next
where next True = return Nothing
next False = liftM Just (get h)
-- | Same as hSource, but does not check for hIsEOF and therefore never terminates.
hSourceNoEOF :: MonadIO m => (Handle -> m a) -> Handle -> FeedSource m a
hSourceNoEOF get = actionSource . liftM Just . get
-- | Same as hSource', but does not check for hIsEOF and therefore never terminates.
hSourceNoEOF' :: (Handle -> IO a) -> IO Handle -> FeedSource IO a
hSourceNoEOF' get open = bracketActionSource open hClose (liftM Just . get)
-- | Same as hSourceRes', but does not check for hIsEOF and therefore never terminates.
hSourceResNoEOF :: (MonadIO m, MonadResource m) => (Handle -> m a) -> IO Handle -> FeedSource m a
hSourceResNoEOF get open = FeedSource fun
where fun sink = do (r,h) <- allocate open (liftIO . hClose)
sink' <- feedToSink (actionSource $ liftM Just $ get h) sink
release r
return sink'
-- | Sink backed by a handle. The data will be written by the provided function.
-- The sink will never change to the SinkDone state (if the device is full then
-- the operation will simply fail).
-- The handle is not closed and exceptions are not catched.
hSink :: MonadIO m => (Handle -> a -> m ()) -> Handle -> Sink a m ()
hSink put h = actionSink (put h)
-- | Same as hSink, but does opens the handle when the first item is written.
-- The handle will be closed when the sink is closed.
hSinkRes :: (MonadIO m, MonadResource m) => (Handle -> a -> m ()) -> IO Handle -> Sink a m ()
hSinkRes put open = openCloseActionSink o (release . fst) (put . snd)
where o = allocate open (liftIO . hClose)
|
msiegenthaler/SouSiT
|
Data/SouSiT/Handle.hs
|
bsd-3-clause
| 3,058
| 0
| 14
| 741
| 800
| 409
| 391
| 43
| 2
|
-- | Provides a greedy graph-coloring algorithm.
module Futhark.Optimise.MemoryBlockMerging.GreedyColoring (colorGraph, Coloring) where
import Data.Function ((&))
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Futhark.Analysis.Interference as Interference
-- | A map of values to their color, identified by an integer.
type Coloring a = M.Map a Int
-- | A map of values to the set "neighbors" in the graph
type Neighbors a = M.Map a (S.Set a)
-- | Computes the neighbor map of a graph.
neighbors :: Ord a => Interference.Graph a -> Neighbors a
neighbors =
S.foldr
( \(x, y) acc ->
acc
& M.insertWith S.union x (S.singleton y)
& M.insertWith S.union y (S.singleton x)
)
M.empty
firstAvailable :: Eq space => M.Map Int space -> S.Set Int -> Int -> space -> (M.Map Int space, Int)
firstAvailable spaces xs i sp =
case (i `S.member` xs, spaces M.!? i) of
(False, Just sp') | sp' == sp -> (spaces, i)
(False, Nothing) -> (M.insert i sp spaces, i)
_ -> firstAvailable spaces xs (i + 1) sp
colorNode ::
(Ord a, Eq space) =>
Neighbors a ->
(a, space) ->
(M.Map Int space, Coloring a) ->
(M.Map Int space, Coloring a)
colorNode nbs (x, sp) (spaces, coloring) =
let nb_colors =
foldMap (maybe S.empty S.singleton . (coloring M.!?)) $
fromMaybe mempty (nbs M.!? x)
(spaces', color) = firstAvailable spaces nb_colors 0 sp
in (spaces', M.insert x color coloring)
-- | Graph coloring that takes into account the @space@ of values. Two values
-- can only share the same color if they live in the same space. The result is
-- map from each color to a space and a map from each value in the input graph
-- to it's new color.
colorGraph ::
(Ord a, Ord space) =>
M.Map a space ->
Interference.Graph a ->
(M.Map Int space, Coloring a)
colorGraph spaces graph =
let nodes = S.fromList $ M.toList spaces
nbs = neighbors graph
in S.foldr (colorNode nbs) mempty nodes
|
diku-dk/futhark
|
src/Futhark/Optimise/MemoryBlockMerging/GreedyColoring.hs
|
isc
| 2,027
| 0
| 14
| 453
| 676
| 363
| 313
| 43
| 3
|
module RegisteredUser where
newtype UserName =
UserName String
newtype AccountNumber =
AccountNumber Integer
data User =
Unregistereduser
| RegisteredUser UserName AccountNumber
|
brodyberg/Notes
|
ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/RegisteredUser1.hs
|
mit
| 193
| 0
| 6
| 36
| 34
| 21
| 13
| 8
| 0
|
{- |
Module : ./Common/ResultT.hs
Description : ResultT type and a monadic transformer instance
Copyright : (c) T. Mossakowski, C. Maeder, Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
'ResultT' type and a monadic transformer instance
-}
module Common.ResultT where
import Common.Result
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
newtype ResultT m a = ResultT { runResultT :: m (Result a) }
instance Monad m => Functor (ResultT m) where
fmap f m = ResultT $ do
r <- runResultT m
return $ fmap f r
instance Monad m => Applicative (ResultT m) where
pure = return
(<*>) = ap
instance Monad m => Monad (ResultT m) where
return = ResultT . return . return
m >>= k = ResultT $ do
r@(Result e v) <- runResultT m
case v of
Nothing -> return $ Result e Nothing
Just a -> do
s <- runResultT $ k a
return $ joinResult r s
fail = ResultT . return . fail
instance MonadTrans ResultT where
lift m = ResultT $ do
a <- m
return $ return a
-- | Inspired by the MonadIO-class
class Monad m => MonadResult m where
liftR :: Result a -> m a
instance Monad m => MonadResult (ResultT m) where
liftR = ResultT . return
instance MonadIO m => MonadIO (ResultT m) where
liftIO = ResultT . liftM return . liftIO
|
spechub/Hets
|
Common/ResultT.hs
|
gpl-2.0
| 1,488
| 0
| 16
| 410
| 418
| 208
| 210
| 33
| 0
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Python Scripting</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 962
| 79
| 66
| 157
| 409
| 207
| 202
| -1
| -1
|
module Propellor.Property.Journald where
import Propellor.Base
import qualified Propellor.Property.Systemd as Systemd
import Utility.DataUnits
-- | Configures journald, restarting it so the changes take effect.
configured :: Systemd.Option -> String -> Property Linux
configured option value =
Systemd.configured "/etc/systemd/journald.conf" option value
`onChange` Systemd.restarted "systemd-journald"
-- The string is parsed to get a data size.
-- Examples: "100 megabytes" or "0.5tb"
type DataSize = String
configuredSize :: Systemd.Option -> DataSize -> Property Linux
configuredSize option s = case readSize dataUnits s of
Just sz -> configured option (systemdSizeUnits sz)
Nothing -> property ("unable to parse " ++ option ++ " data size " ++ s) $
return FailedChange
systemMaxUse :: DataSize -> Property Linux
systemMaxUse = configuredSize "SystemMaxUse"
runtimeMaxUse :: DataSize -> Property Linux
runtimeMaxUse = configuredSize "RuntimeMaxUse"
systemKeepFree :: DataSize -> Property Linux
systemKeepFree = configuredSize "SystemKeepFree"
runtimeKeepFree :: DataSize -> Property Linux
runtimeKeepFree = configuredSize "RuntimeKeepFree"
systemMaxFileSize :: DataSize -> Property Linux
systemMaxFileSize = configuredSize "SystemMaxFileSize"
runtimeMaxFileSize :: DataSize -> Property Linux
runtimeMaxFileSize = configuredSize "RuntimeMaxFileSize"
-- Generates size units as used in journald.conf.
systemdSizeUnits :: Integer -> String
systemdSizeUnits sz = filter (/= ' ') (roughSize cfgfileunits True sz)
where
cfgfileunits :: [Unit]
cfgfileunits =
[ Unit (p 6) "E" "exabyte"
, Unit (p 5) "P" "petabyte"
, Unit (p 4) "T" "terabyte"
, Unit (p 3) "G" "gigabyte"
, Unit (p 2) "M" "megabyte"
, Unit (p 1) "K" "kilobyte"
]
p :: Integer -> Integer
p n = 1024^n
|
ArchiveTeam/glowing-computing-machine
|
src/Propellor/Property/Journald.hs
|
bsd-2-clause
| 1,821
| 14
| 13
| 300
| 470
| 244
| 226
| 38
| 2
|
module Book.Routes where
import Prelude (IO, Maybe)
import Yesod
import Data.Text (Text)
import Data.IORef (IORef)
import Book (Book)
import Filesystem.Path (FilePath)
data BookSub = BookSub
{ bsBook :: IORef Book
, bsRoot :: FilePath
, bsReload :: IO ()
, bsTitle :: Html
, bsWarning :: Maybe Html
, bsBranch :: Text
}
mkYesodSubData "BookSub" [parseRoutes|
/ BookHomeR GET
/#Text ChapterR GET
/image/#Text BookImageR GET
|]
|
wolftune/yesodweb.com
|
Book/Routes.hs
|
bsd-2-clause
| 457
| 0
| 10
| 97
| 126
| 76
| 50
| -1
| -1
|
{-
- Example of interacting with a lambda bridge.
-
- Author: Andy Gill (andygill@ku.edu)
-}
module Main where
import Network.LambdaBridge
import System.IO
import Control.Concurrent
import Control.Monad
main = do
putStrLn "Connecting to 'cat' lambda bridge"
([send],[recv]) <- board_connect (1,1) ["lb_cat","--debug"]
let loop n = do
hPutStrLn send $ "Message " ++ show n ++ "!"
hFlush send
loop (n+1)
forkIO $ loop 0
forever $ do
str <- hGetLine recv
putStrLn str
putStrLn "Exiting lambda bridge"
|
andygill/lambda-bridge
|
examples/haskell/cat/Main.hs
|
bsd-3-clause
| 531
| 0
| 15
| 110
| 163
| 79
| 84
| 17
| 1
|
{-# LANGUAGE CPP, ForeignFunctionInterface, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Utils
-- Copyright : Isaac Jones, Simon Marlow 2003-2004
-- License : BSD3
-- portions Copyright (c) 2007, Galois Inc.
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- A large and somewhat miscellaneous collection of utility functions used
-- throughout the rest of the Cabal lib and in other tools that use the Cabal
-- lib like @cabal-install@. It has a very simple set of logging actions. It
-- has low level functions for running programs, a bunch of wrappers for
-- various directory and file functions that do extra logging.
module Distribution.Simple.Utils (
cabalVersion,
-- * logging and errors
die,
dieWithLocation,
topHandler, topHandlerWith,
warn, notice, setupMessage, info, debug,
debugNoWrap, chattyTry,
printRawCommandAndArgs, printRawCommandAndArgsAndEnv,
-- * running programs
rawSystemExit,
rawSystemExitCode,
rawSystemExitWithEnv,
rawSystemStdout,
rawSystemStdInOut,
rawSystemIOWithEnv,
maybeExit,
xargs,
findProgramLocation,
findProgramVersion,
-- * copying files
smartCopySources,
createDirectoryIfMissingVerbose,
copyFileVerbose,
copyDirectoryRecursiveVerbose,
copyFiles,
copyFileTo,
-- * installing files
installOrdinaryFile,
installExecutableFile,
installMaybeExecutableFile,
installOrdinaryFiles,
installExecutableFiles,
installMaybeExecutableFiles,
installDirectoryContents,
copyDirectoryRecursive,
-- * File permissions
doesExecutableExist,
setFileOrdinary,
setFileExecutable,
-- * file names
currentDir,
shortRelativePath,
-- * finding files
findFile,
findFirstFile,
findFileWithExtension,
findFileWithExtension',
findAllFilesWithExtension,
findModuleFile,
findModuleFiles,
getDirectoryContentsRecursive,
-- * environment variables
isInSearchPath,
addLibraryPath,
-- * simple file globbing
matchFileGlob,
matchDirFileGlob,
parseFileGlob,
FileGlob(..),
-- * modification time
moreRecentFile,
existsAndIsMoreRecentThan,
-- * temp files and dirs
TempFileOptions(..), defaultTempFileOptions,
withTempFile, withTempFileEx,
withTempDirectory, withTempDirectoryEx,
-- * .cabal and .buildinfo files
defaultPackageDesc,
findPackageDesc,
tryFindPackageDesc,
defaultHookedPackageDesc,
findHookedPackageDesc,
-- * reading and writing files safely
withFileContents,
writeFileAtomic,
rewriteFile,
-- * Unicode
fromUTF8,
toUTF8,
readUTF8File,
withUTF8FileContents,
writeUTF8File,
normaliseLineEndings,
-- * BOM
startsWithBOM,
fileHasBOM,
ignoreBOM,
-- * generic utils
dropWhileEndLE,
takeWhileEndLE,
equating,
comparing,
isInfixOf,
intercalate,
lowercase,
listUnion,
listUnionRight,
ordNub,
ordNubRight,
wrapText,
wrapLine,
) where
import Control.Monad
( join, when, unless, filterM )
import Control.Concurrent.MVar
( newEmptyMVar, putMVar, takeMVar )
import Data.Bits
( Bits((.|.), (.&.), shiftL, shiftR) )
import Data.Char as Char
( isDigit, toLower, chr, ord )
import Data.Foldable
( traverse_ )
import Data.List
( nub, unfoldr, isPrefixOf, tails, intercalate )
import Data.Typeable
( cast )
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
import qualified Data.Set as Set
import System.Directory
( Permissions(executable), getDirectoryContents, getPermissions
, doesDirectoryExist, doesFileExist, removeFile, findExecutable
, getModificationTime )
import System.Environment
( getProgName )
import System.Exit
( exitWith, ExitCode(..) )
import System.FilePath
( normalise, (</>), (<.>)
, getSearchPath, joinPath, takeDirectory, splitFileName
, splitExtension, splitExtensions, splitDirectories
, searchPathSeparator )
import System.Directory
( createDirectory, renameFile, removeDirectoryRecursive )
import System.IO
( Handle, openFile, openBinaryFile, openBinaryTempFileWithDefaultPermissions
, IOMode(ReadMode), hSetBinaryMode
, hGetContents, stderr, stdout, hPutStr, hFlush, hClose )
import System.IO.Error as IO.Error
( isDoesNotExistError, isAlreadyExistsError
, ioeSetFileName, ioeGetFileName, ioeGetErrorString )
import System.IO.Error
( ioeSetLocation, ioeGetLocation )
import System.IO.Unsafe
( unsafeInterleaveIO )
import qualified Control.Exception as Exception
import Distribution.Text
( display, simpleParse )
import Distribution.Package
( PackageIdentifier )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.System
( OS (..) )
import Distribution.Version
(Version(..))
import Control.Exception (IOException, evaluate, throwIO)
import Control.Concurrent (forkIO)
import qualified System.Process as Process
( CreateProcess(..), StdStream(..), proc)
import System.Process
( createProcess, rawSystem, runInteractiveProcess
, showCommandForUser, waitForProcess)
import Distribution.Compat.CopyFile
( copyFile, copyOrdinaryFile, copyExecutableFile
, setFileOrdinary, setFileExecutable, setDirOrdinary )
import Distribution.Compat.TempFile
( openTempFile, createTempDirectory )
import Distribution.Compat.Exception
( tryIO, catchIO, catchExit )
import Distribution.Verbosity
#ifdef VERSION_base
import qualified Paths_Cabal (version)
#endif
-- We only get our own version number when we're building with ourselves
cabalVersion :: Version
#if defined(VERSION_base)
cabalVersion = Paths_Cabal.version
#elif defined(CABAL_VERSION)
cabalVersion = Version [CABAL_VERSION] []
#else
cabalVersion = Version [1,9999] [] --used when bootstrapping
#endif
-- ----------------------------------------------------------------------------
-- Exception and logging utils
dieWithLocation :: FilePath -> Maybe Int -> String -> IO a
dieWithLocation filename lineno msg =
ioError . setLocation lineno
. flip ioeSetFileName (normalise filename)
$ userError msg
where
setLocation Nothing err = err
setLocation (Just n) err = ioeSetLocation err (show n)
die :: String -> IO a
die msg = ioError (userError msg)
topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a
topHandlerWith cont prog =
Exception.catches prog [
Exception.Handler rethrowAsyncExceptions
, Exception.Handler rethrowExitStatus
, Exception.Handler handle
]
where
-- Let async exceptions rise to the top for the default top-handler
rethrowAsyncExceptions :: Exception.AsyncException -> IO a
rethrowAsyncExceptions = throwIO
-- ExitCode gets thrown asynchronously too, and we don't want to print it
rethrowExitStatus :: ExitCode -> IO a
rethrowExitStatus = throwIO
-- Print all other exceptions
handle :: Exception.SomeException -> IO a
handle se = do
hFlush stdout
pname <- getProgName
hPutStr stderr (message pname se)
cont se
message :: String -> Exception.SomeException -> String
message pname (Exception.SomeException se) =
case cast se :: Maybe Exception.IOException of
Just ioe ->
let file = case ioeGetFileName ioe of
Nothing -> ""
Just path -> path ++ location ++ ": "
location = case ioeGetLocation ioe of
l@(n:_) | Char.isDigit n -> ':' : l
_ -> ""
detail = ioeGetErrorString ioe
in wrapText (pname ++ ": " ++ file ++ detail)
Nothing ->
#if __GLASGOW_HASKELL__ < 710
show se
#else
Exception.displayException se
#endif
topHandler :: IO a -> IO a
topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog
-- | Non fatal conditions that may be indicative of an error or problem.
--
-- We display these at the 'normal' verbosity level.
--
warn :: Verbosity -> String -> IO ()
warn verbosity msg =
when (verbosity >= normal) $ do
hFlush stdout
hPutStr stderr (wrapText ("Warning: " ++ msg))
-- | Useful status messages.
--
-- We display these at the 'normal' verbosity level.
--
-- This is for the ordinary helpful status messages that users see. Just
-- enough information to know that things are working but not floods of detail.
--
notice :: Verbosity -> String -> IO ()
notice verbosity msg =
when (verbosity >= normal) $
putStr (wrapText msg)
setupMessage :: Verbosity -> String -> PackageIdentifier -> IO ()
setupMessage verbosity msg pkgid =
notice verbosity (msg ++ ' ': display pkgid ++ "...")
-- | More detail on the operation of some action.
--
-- We display these messages when the verbosity level is 'verbose'
--
info :: Verbosity -> String -> IO ()
info verbosity msg =
when (verbosity >= verbose) $
putStr (wrapText msg)
-- | Detailed internal debugging information
--
-- We display these messages when the verbosity level is 'deafening'
--
debug :: Verbosity -> String -> IO ()
debug verbosity msg =
when (verbosity >= deafening) $ do
putStr (wrapText msg)
hFlush stdout
-- | A variant of 'debug' that doesn't perform the automatic line
-- wrapping. Produces better output in some cases.
debugNoWrap :: Verbosity -> String -> IO ()
debugNoWrap verbosity msg =
when (verbosity >= deafening) $ do
putStrLn msg
hFlush stdout
-- | Perform an IO action, catching any IO exceptions and printing an error
-- if one occurs.
chattyTry :: String -- ^ a description of the action we were attempting
-> IO () -- ^ the action itself
-> IO ()
chattyTry desc action =
catchIO action $ \exception ->
putStrLn $ "Error while " ++ desc ++ ": " ++ show exception
-- -----------------------------------------------------------------------------
-- Helper functions
-- | Wraps text to the default line width. Existing newlines are preserved.
wrapText :: String -> String
wrapText = unlines
. map (intercalate "\n"
. map unwords
. wrapLine 79
. words)
. lines
-- | Wraps a list of words to a list of lines of words of a particular width.
wrapLine :: Int -> [String] -> [[String]]
wrapLine width = wrap 0 []
where wrap :: Int -> [String] -> [String] -> [[String]]
wrap 0 [] (w:ws)
| length w + 1 > width
= wrap (length w) [w] ws
wrap col line (w:ws)
| col + length w + 1 > width
= reverse line : wrap 0 [] (w:ws)
wrap col line (w:ws)
= let col' = col + length w + 1
in wrap col' (w:line) ws
wrap _ [] [] = []
wrap _ line [] = [reverse line]
-- -----------------------------------------------------------------------------
-- rawSystem variants
maybeExit :: IO ExitCode -> IO ()
maybeExit cmd = do
res <- cmd
unless (res == ExitSuccess) $ exitWith res
printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO ()
printRawCommandAndArgs verbosity path args =
printRawCommandAndArgsAndEnv verbosity path args Nothing
printRawCommandAndArgsAndEnv :: Verbosity
-> FilePath
-> [String]
-> Maybe [(String, String)]
-> IO ()
printRawCommandAndArgsAndEnv verbosity path args menv
| verbosity >= deafening = do
traverse_ (putStrLn . ("Environment: " ++) . show) menv
print (path, args)
| verbosity >= verbose = putStrLn $ showCommandForUser path args
| otherwise = return ()
-- Exit with the same exit code if the subcommand fails
rawSystemExit :: Verbosity -> FilePath -> [String] -> IO ()
rawSystemExit verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode
rawSystemExitCode verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
return exitcode
rawSystemExitWithEnv :: Verbosity
-> FilePath
-> [String]
-> [(String, String)]
-> IO ()
rawSystemExitWithEnv verbosity path args env = do
printRawCommandAndArgsAndEnv verbosity path args (Just env)
hFlush stdout
(_,_,_,ph) <- createProcess $
(Process.proc path args) { Process.env = (Just env)
#ifdef MIN_VERSION_process
#if MIN_VERSION_process(1,2,0)
-- delegate_ctlc has been added in process 1.2, and we still want to be able to
-- bootstrap GHC on systems not having that version
, Process.delegate_ctlc = True
#endif
#endif
}
exitcode <- waitForProcess ph
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
-- Closes the passed in handles before returning.
rawSystemIOWithEnv :: Verbosity
-> FilePath
-> [String]
-> Maybe FilePath -- ^ New working dir or inherit
-> Maybe [(String, String)] -- ^ New environment or inherit
-> Maybe Handle -- ^ stdin
-> Maybe Handle -- ^ stdout
-> Maybe Handle -- ^ stderr
-> IO ExitCode
rawSystemIOWithEnv verbosity path args mcwd menv inp out err = do
printRawCommandAndArgsAndEnv verbosity path args menv
hFlush stdout
(_,_,_,ph) <- createProcess $
(Process.proc path args) { Process.cwd = mcwd
, Process.env = menv
, Process.std_in = mbToStd inp
, Process.std_out = mbToStd out
, Process.std_err = mbToStd err
#ifdef MIN_VERSION_process
#if MIN_VERSION_process(1,2,0)
-- delegate_ctlc has been added in process 1.2, and we still want to be able to
-- bootstrap GHC on systems not having that version
, Process.delegate_ctlc = True
#endif
#endif
}
exitcode <- waitForProcess ph
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
return exitcode
where
mbToStd :: Maybe Handle -> Process.StdStream
mbToStd = maybe Process.Inherit Process.UseHandle
-- | Run a command and return its output.
--
-- The output is assumed to be text in the locale encoding.
--
rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String
rawSystemStdout verbosity path args = do
(output, errors, exitCode) <- rawSystemStdInOut verbosity path args
Nothing Nothing
Nothing False
when (exitCode /= ExitSuccess) $
die errors
return output
-- | Run a command and return its output, errors and exit status. Optionally
-- also supply some input. Also provides control over whether the binary/text
-- mode of the input and output.
--
rawSystemStdInOut :: Verbosity
-> FilePath -- ^ Program location
-> [String] -- ^ Arguments
-> Maybe FilePath -- ^ New working dir or inherit
-> Maybe [(String, String)] -- ^ New environment or inherit
-> Maybe (String, Bool) -- ^ input text and binary mode
-> Bool -- ^ output in binary mode
-> IO (String, String, ExitCode) -- ^ output, errors, exit
rawSystemStdInOut verbosity path args mcwd menv input outputBinary = do
printRawCommandAndArgs verbosity path args
Exception.bracket
(runInteractiveProcess path args mcwd menv)
(\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh)
$ \(inh,outh,errh,pid) -> do
-- output mode depends on what the caller wants
hSetBinaryMode outh outputBinary
-- but the errors are always assumed to be text (in the current locale)
hSetBinaryMode errh False
-- fork off a couple threads to pull on the stderr and stdout
-- so if the process writes to stderr we do not block.
err <- hGetContents errh
out <- hGetContents outh
mv <- newEmptyMVar
let force str = (evaluate (length str) >> return ())
`Exception.finally` putMVar mv ()
--TODO: handle exceptions like text decoding.
_ <- forkIO $ force out
_ <- forkIO $ force err
-- push all the input, if any
case input of
Nothing -> return ()
Just (inputStr, inputBinary) -> do
-- input mode depends on what the caller wants
hSetBinaryMode inh inputBinary
hPutStr inh inputStr
hClose inh
--TODO: this probably fails if the process refuses to consume
-- or if it closes stdin (eg if it exits)
-- wait for both to finish, in either order
takeMVar mv
takeMVar mv
-- wait for the program to terminate
exitcode <- waitForProcess pid
unless (exitcode == ExitSuccess) $
debug verbosity $ path ++ " returned " ++ show exitcode
++ if null err then "" else
" with error message:\n" ++ err
++ case input of
Nothing -> ""
Just ("", _) -> ""
Just (inp, _) -> "\nstdin input:\n" ++ inp
return (out, err, exitcode)
-- | Look for a program on the path.
findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath)
findProgramLocation verbosity prog = do
debug verbosity $ "searching for " ++ prog ++ " in path."
res <- findExecutable prog
case res of
Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path")
Just path -> debug verbosity ("found " ++ prog ++ " at "++ path)
return res
-- | Look for a program and try to find it's version number. It can accept
-- either an absolute path or the name of a program binary, in which case we
-- will look for the program on the path.
--
findProgramVersion :: String -- ^ version args
-> (String -> String) -- ^ function to select version
-- number from program output
-> Verbosity
-> FilePath -- ^ location
-> IO (Maybe Version)
findProgramVersion versionArg selectVersion verbosity path = do
str <- rawSystemStdout verbosity path [versionArg]
`catchIO` (\_ -> return "")
`catchExit` (\_ -> return "")
let version :: Maybe Version
version = simpleParse (selectVersion str)
case version of
Nothing -> warn verbosity $ "cannot determine version of " ++ path
++ " :\n" ++ show str
Just v -> debug verbosity $ path ++ " is version " ++ display v
return version
-- | Like the Unix xargs program. Useful for when we've got very long command
-- lines that might overflow an OS limit on command line length and so you
-- need to invoke a command multiple times to get all the args in.
--
-- Use it with either of the rawSystem variants above. For example:
--
-- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs
--
xargs :: Int -> ([String] -> IO ())
-> [String] -> [String] -> IO ()
xargs maxSize rawSystemFun fixedArgs bigArgs =
let fixedArgSize = sum (map length fixedArgs) + length fixedArgs
chunkSize = maxSize - fixedArgSize
in mapM_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs)
where chunks len = unfoldr $ \s ->
if null s then Nothing
else Just (chunk [] len s)
chunk acc _ [] = (reverse acc,[])
chunk acc len (s:ss)
| len' < len = chunk (s:acc) (len-len'-1) ss
| otherwise = (reverse acc, s:ss)
where len' = length s
-- ------------------------------------------------------------
-- * File Utilities
-- ------------------------------------------------------------
----------------
-- Finding files
-- | Find a file by looking in a search path. The file path must match exactly.
--
findFile :: [FilePath] -- ^search locations
-> FilePath -- ^File Name
-> IO FilePath
findFile searchPath fileName =
findFirstFile id
[ path </> fileName
| path <- nub searchPath]
>>= maybe (die $ fileName ++ " doesn't exist") return
-- | Find a file by looking in a search path with one of a list of possible
-- file extensions. The file base name should be given and it will be tried
-- with each of the extensions in each element of the search path.
--
findFileWithExtension :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe FilePath)
findFileWithExtension extensions searchPath baseName =
findFirstFile id
[ path </> baseName <.> ext
| path <- nub searchPath
, ext <- nub extensions ]
findAllFilesWithExtension :: [String]
-> [FilePath]
-> FilePath
-> IO [FilePath]
findAllFilesWithExtension extensions searchPath basename =
findAllFiles id
[ path </> basename <.> ext
| path <- nub searchPath
, ext <- nub extensions ]
-- | Like 'findFileWithExtension' but returns which element of the search path
-- the file was found in, and the file path relative to that base directory.
--
findFileWithExtension' :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe (FilePath, FilePath))
findFileWithExtension' extensions searchPath baseName =
findFirstFile (uncurry (</>))
[ (path, baseName <.> ext)
| path <- nub searchPath
, ext <- nub extensions ]
findFirstFile :: (a -> FilePath) -> [a] -> IO (Maybe a)
findFirstFile file = findFirst
where findFirst [] = return Nothing
findFirst (x:xs) = do exists <- doesFileExist (file x)
if exists
then return (Just x)
else findFirst xs
findAllFiles :: (a -> FilePath) -> [a] -> IO [a]
findAllFiles file = filterM (doesFileExist . file)
-- | Finds the files corresponding to a list of Haskell module names.
--
-- As 'findModuleFile' but for a list of module names.
--
findModuleFiles :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> [ModuleName] -- ^ modules
-> IO [(FilePath, FilePath)]
findModuleFiles searchPath extensions moduleNames =
mapM (findModuleFile searchPath extensions) moduleNames
-- | Find the file corresponding to a Haskell module name.
--
-- This is similar to 'findFileWithExtension'' but specialised to a module
-- name. The function fails if the file corresponding to the module is missing.
--
findModuleFile :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> ModuleName -- ^ module
-> IO (FilePath, FilePath)
findModuleFile searchPath extensions moduleName =
maybe notFound return
=<< findFileWithExtension' extensions searchPath
(ModuleName.toFilePath moduleName)
where
notFound = die $ "Error: Could not find module: " ++ display moduleName
++ " with any suffix: " ++ show extensions
++ " in the search path: " ++ show searchPath
-- | List all the files in a directory and all subdirectories.
--
-- The order places files in sub-directories after all the files in their
-- parent directories. The list is generated lazily so is not well defined if
-- the source directory structure changes before the list is used.
--
getDirectoryContentsRecursive :: FilePath -> IO [FilePath]
getDirectoryContentsRecursive topdir = recurseDirectories [""]
where
recurseDirectories :: [FilePath] -> IO [FilePath]
recurseDirectories [] = return []
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
files' <- recurseDirectories (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files
,reverse dirs')
collect files dirs' (entry:entries) | ignore entry
= collect files dirs' entries
collect files dirs' (entry:entries) = do
let dirEntry = dir </> entry
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
if isDirectory
then collect files (dirEntry:dirs') entries
else collect (dirEntry:files) dirs' entries
ignore ['.'] = True
ignore ['.', '.'] = True
ignore _ = False
------------------------
-- Environment variables
-- | Is this directory in the system search path?
isInSearchPath :: FilePath -> IO Bool
isInSearchPath path = fmap (elem path) getSearchPath
addLibraryPath :: OS
-> [FilePath]
-> [(String,String)]
-> [(String,String)]
addLibraryPath os paths = addEnv
where
pathsString = intercalate [searchPathSeparator] paths
ldPath = case os of
OSX -> "DYLD_LIBRARY_PATH"
_ -> "LD_LIBRARY_PATH"
addEnv [] = [(ldPath,pathsString)]
addEnv ((key,value):xs)
| key == ldPath =
if null value
then (key,pathsString):xs
else (key,value ++ (searchPathSeparator:pathsString)):xs
| otherwise = (key,value):addEnv xs
----------------
-- File globbing
data FileGlob
-- | No glob at all, just an ordinary file
= NoGlob FilePath
-- | dir prefix and extension, like @\"foo\/bar\/\*.baz\"@ corresponds to
-- @FileGlob \"foo\/bar\" \".baz\"@
| FileGlob FilePath String
parseFileGlob :: FilePath -> Maybe FileGlob
parseFileGlob filepath = case splitExtensions filepath of
(filepath', ext) -> case splitFileName filepath' of
(dir, "*") | '*' `elem` dir
|| '*' `elem` ext
|| null ext -> Nothing
| null dir -> Just (FileGlob "." ext)
| otherwise -> Just (FileGlob dir ext)
_ | '*' `elem` filepath -> Nothing
| otherwise -> Just (NoGlob filepath)
matchFileGlob :: FilePath -> IO [FilePath]
matchFileGlob = matchDirFileGlob "."
matchDirFileGlob :: FilePath -> FilePath -> IO [FilePath]
matchDirFileGlob dir filepath = case parseFileGlob filepath of
Nothing -> die $ "invalid file glob '" ++ filepath
++ "'. Wildcards '*' are only allowed in place of the file"
++ " name, not in the directory name or file extension."
++ " If a wildcard is used it must be with an file extension."
Just (NoGlob filepath') -> return [filepath']
Just (FileGlob dir' ext) -> do
files <- getDirectoryContents (dir </> dir')
case [ dir' </> file
| file <- files
, let (name, ext') = splitExtensions file
, not (null name) && ext' == ext ] of
[] -> die $ "filepath wildcard '" ++ filepath
++ "' does not match any files."
matches -> return matches
--------------------
-- Modification time
-- | Compare the modification times of two files to see if the first is newer
-- than the second. The first file must exist but the second need not.
-- The expected use case is when the second file is generated using the first.
-- In this use case, if the result is True then the second file is out of date.
--
moreRecentFile :: FilePath -> FilePath -> IO Bool
moreRecentFile a b = do
exists <- doesFileExist b
if not exists
then return True
else do tb <- getModificationTime b
ta <- getModificationTime a
return (ta > tb)
-- | Like 'moreRecentFile', but also checks that the first file exists.
existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool
existsAndIsMoreRecentThan a b = do
exists <- doesFileExist a
if not exists
then return False
else a `moreRecentFile` b
----------------------------------------
-- Copying and installing files and dirs
-- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels.
--
createDirectoryIfMissingVerbose :: Verbosity
-> Bool -- ^ Create its parents too?
-> FilePath
-> IO ()
createDirectoryIfMissingVerbose verbosity create_parents path0
| create_parents = createDirs (parents path0)
| otherwise = createDirs (take 1 (parents path0))
where
parents = reverse . scanl1 (</>) . splitDirectories . normalise
createDirs [] = return ()
createDirs (dir:[]) = createDir dir throwIO
createDirs (dir:dirs) =
createDir dir $ \_ -> do
createDirs dirs
createDir dir throwIO
createDir :: FilePath -> (IOException -> IO ()) -> IO ()
createDir dir notExistHandler = do
r <- tryIO $ createDirectoryVerbose verbosity dir
case (r :: Either IOException ()) of
Right () -> return ()
Left e
| isDoesNotExistError e -> notExistHandler e
-- createDirectory (and indeed POSIX mkdir) does not distinguish
-- between a dir already existing and a file already existing. So we
-- check for it here. Unfortunately there is a slight race condition
-- here, but we think it is benign. It could report an exception in
-- the case that the dir did exist but another process deletes the
-- directory and creates a file in its place before we can check
-- that the directory did indeed exist.
| isAlreadyExistsError e -> (do
isDir <- doesDirectoryExist dir
if isDir then return ()
else throwIO e
) `catchIO` ((\_ -> return ()) :: IOException -> IO ())
| otherwise -> throwIO e
createDirectoryVerbose :: Verbosity -> FilePath -> IO ()
createDirectoryVerbose verbosity dir = do
info verbosity $ "creating " ++ dir
createDirectory dir
setDirOrdinary dir
-- | Copies a file without copying file permissions. The target file is created
-- with default permissions. Any existing target file is replaced.
--
-- At higher verbosity levels it logs an info message.
--
copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyFileVerbose verbosity src dest = do
info verbosity ("copy " ++ src ++ " to " ++ dest)
copyFile src dest
-- | Install an ordinary file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\"
-- while on Windows it uses the default permissions for the target directory.
--
installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO ()
installOrdinaryFile verbosity src dest = do
info verbosity ("Installing " ++ src ++ " to " ++ dest)
copyOrdinaryFile src dest
-- | Install an executable file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\"
-- while on Windows it uses the default permissions for the target directory.
--
installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO ()
installExecutableFile verbosity src dest = do
info verbosity ("Installing executable " ++ src ++ " to " ++ dest)
copyExecutableFile src dest
-- | Install a file that may or not be executable, preserving permissions.
installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO ()
installMaybeExecutableFile verbosity src dest = do
perms <- getPermissions src
if (executable perms) --only checks user x bit
then installExecutableFile verbosity src dest
else installOrdinaryFile verbosity src dest
-- | Given a relative path to a file, copy it to the given directory, preserving
-- the relative path and creating the parent directories if needed.
copyFileTo :: Verbosity -> FilePath -> FilePath -> IO ()
copyFileTo verbosity dir file = do
let targetFile = dir </> file
createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile)
installOrdinaryFile verbosity file targetFile
-- | Common implementation of 'copyFiles', 'installOrdinaryFiles',
-- 'installExecutableFiles' and 'installMaybeExecutableFiles'.
copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ())
-> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
copyFilesWith doCopy verbosity targetDir srcFiles = do
-- Create parent directories for everything
let dirs = map (targetDir </>) . nub . map (takeDirectory . snd) $ srcFiles
mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs
-- Copy all the files
sequence_ [ let src = srcBase </> srcFile
dest = targetDir </> srcFile
in doCopy verbosity src dest
| (srcBase, srcFile) <- srcFiles ]
-- | Copies a bunch of files to a target directory, preserving the directory
-- structure in the target location. The target directories are created if they
-- do not exist.
--
-- The files are identified by a pair of base directory and a path relative to
-- that base. It is only the relative part that is preserved in the
-- destination.
--
-- For example:
--
-- > copyFiles normal "dist/src"
-- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")]
--
-- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and
-- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\".
--
-- This operation is not atomic. Any IO failure during the copy (including any
-- missing source files) leaves the target in an unknown state so it is best to
-- use it with a freshly created directory so that it can be simply deleted if
-- anything goes wrong.
--
copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
copyFiles = copyFilesWith copyFileVerbose
-- | This is like 'copyFiles' but uses 'installOrdinaryFile'.
--
installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
installOrdinaryFiles = copyFilesWith installOrdinaryFile
-- | This is like 'copyFiles' but uses 'installExecutableFile'.
--
installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)]
-> IO ()
installExecutableFiles = copyFilesWith installExecutableFile
-- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'.
--
installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)]
-> IO ()
installMaybeExecutableFiles = copyFilesWith installMaybeExecutableFile
-- | This installs all the files in a directory to a target location,
-- preserving the directory layout. All the files are assumed to be ordinary
-- rather than executable files.
--
installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO ()
installDirectoryContents verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
-- | Recursively copy the contents of one directory to another path.
copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO ()
copyDirectoryRecursive verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f) | f <- srcFiles ]
-------------------
-- File permissions
-- | Like 'doesFileExist', but also checks that the file is executable.
doesExecutableExist :: FilePath -> IO Bool
doesExecutableExist f = do
exists <- doesFileExist f
if exists
then do perms <- getPermissions f
return (executable perms)
else return False
---------------------------------
-- Deprecated file copy functions
{-# DEPRECATED smartCopySources
"Use findModuleFiles and copyFiles or installOrdinaryFiles" #-}
smartCopySources :: Verbosity -> [FilePath] -> FilePath
-> [ModuleName] -> [String] -> IO ()
smartCopySources verbosity searchPath targetDir moduleNames extensions =
findModuleFiles searchPath extensions moduleNames
>>= copyFiles verbosity targetDir
{-# DEPRECATED copyDirectoryRecursiveVerbose
"You probably want installDirectoryContents instead" #-}
copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyDirectoryRecursiveVerbose verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
---------------------------
-- Temporary files and dirs
-- | Advanced options for 'withTempFile' and 'withTempDirectory'.
data TempFileOptions = TempFileOptions {
optKeepTempFiles :: Bool -- ^ Keep temporary files?
}
defaultTempFileOptions :: TempFileOptions
defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False }
-- | Use a temporary filename that doesn't already exist.
--
withTempFile :: FilePath -- ^ Temp dir to create the file in
-> String -- ^ File name template. See 'openTempFile'.
-> (FilePath -> Handle -> IO a) -> IO a
withTempFile tmpDir template action =
withTempFileEx defaultTempFileOptions tmpDir template action
-- | A version of 'withTempFile' that additionally takes a 'TempFileOptions'
-- argument.
withTempFileEx :: TempFileOptions
-> FilePath -- ^ Temp dir to create the file in
-> String -- ^ File name template. See 'openTempFile'.
-> (FilePath -> Handle -> IO a) -> IO a
withTempFileEx opts tmpDir template action =
Exception.bracket
(openTempFile tmpDir template)
(\(name, handle) -> do hClose handle
unless (optKeepTempFiles opts) $ removeFile name)
(uncurry action)
-- | Create and use a temporary directory.
--
-- Creates a new temporary directory inside the given directory, making use
-- of the template. The temp directory is deleted after use. For example:
--
-- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ...
--
-- The @tmpDir@ will be a new subdirectory of the given directory, e.g.
-- @src/sdist.342@.
--
withTempDirectory :: Verbosity
-> FilePath -> String -> (FilePath -> IO a) -> IO a
withTempDirectory verbosity targetDir template =
withTempDirectoryEx verbosity defaultTempFileOptions targetDir template
-- | A version of 'withTempDirectory' that additionally takes a
-- 'TempFileOptions' argument.
withTempDirectoryEx :: Verbosity
-> TempFileOptions
-> FilePath -> String -> (FilePath -> IO a) -> IO a
withTempDirectoryEx _verbosity opts targetDir template =
Exception.bracket
(createTempDirectory targetDir template)
(unless (optKeepTempFiles opts) . removeDirectoryRecursive)
-----------------------------------
-- Safely reading and writing files
-- | Gets the contents of a file, but guarantee that it gets closed.
--
-- The file is read lazily but if it is not fully consumed by the action then
-- the remaining input is truncated and the file is closed.
--
withFileContents :: FilePath -> (String -> IO a) -> IO a
withFileContents name action =
Exception.bracket (openFile name ReadMode) hClose
(\hnd -> hGetContents hnd >>= action)
-- | Writes a file atomically.
--
-- The file is either written successfully or an IO exception is raised and
-- the original file is left unchanged.
--
-- On windows it is not possible to delete a file that is open by a process.
-- This case will give an IO exception but the atomic property is not affected.
--
writeFileAtomic :: FilePath -> BS.ByteString -> IO ()
writeFileAtomic targetPath content = do
let (targetDir, targetFile) = splitFileName targetPath
Exception.bracketOnError
(openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp")
(\(tmpPath, handle) -> hClose handle >> removeFile tmpPath)
(\(tmpPath, handle) -> do
BS.hPut handle content
hClose handle
renameFile tmpPath targetPath)
-- | Write a file but only if it would have new content. If we would be writing
-- the same as the existing content then leave the file as is so that we do not
-- update the file's modification time.
--
-- NB: the file is assumed to be ASCII-encoded.
rewriteFile :: FilePath -> String -> IO ()
rewriteFile path newContent =
flip catchIO mightNotExist $ do
existingContent <- readFile path
_ <- evaluate (length existingContent)
unless (existingContent == newContent) $
writeFileAtomic path (BS.Char8.pack newContent)
where
mightNotExist e | isDoesNotExistError e = writeFileAtomic path
(BS.Char8.pack newContent)
| otherwise = ioError e
-- | The path name that represents the current directory.
-- In Unix, it's @\".\"@, but this is system-specific.
-- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.)
currentDir :: FilePath
currentDir = "."
shortRelativePath :: FilePath -> FilePath -> FilePath
shortRelativePath from to =
case dropCommonPrefix (splitDirectories from) (splitDirectories to) of
(stuff, path) -> joinPath (map (const "..") stuff ++ path)
where
dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a])
dropCommonPrefix (x:xs) (y:ys)
| x == y = dropCommonPrefix xs ys
dropCommonPrefix xs ys = (xs,ys)
-- ------------------------------------------------------------
-- * Finding the description file
-- ------------------------------------------------------------
-- |Package description file (/pkgname/@.cabal@)
defaultPackageDesc :: Verbosity -> IO FilePath
defaultPackageDesc _verbosity = tryFindPackageDesc currentDir
-- |Find a package description file in the given directory. Looks for
-- @.cabal@ files.
findPackageDesc :: FilePath -- ^Where to look
-> IO (Either String FilePath) -- ^<pkgname>.cabal
findPackageDesc dir
= do files <- getDirectoryContents dir
-- to make sure we do not mistake a ~/.cabal/ dir for a <pkgname>.cabal
-- file we filter to exclude dirs and null base file names:
cabalFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == ".cabal" ]
case cabalFiles of
[] -> return (Left noDesc)
[cabalFile] -> return (Right cabalFile)
multiple -> return (Left $ multiDesc multiple)
where
noDesc :: String
noDesc = "No cabal file found.\n"
++ "Please create a package description file <pkgname>.cabal"
multiDesc :: [String] -> String
multiDesc l = "Multiple cabal files found.\n"
++ "Please use only one of: "
++ intercalate ", " l
-- |Like 'findPackageDesc', but calls 'die' in case of error.
tryFindPackageDesc :: FilePath -> IO FilePath
tryFindPackageDesc dir = join . fmap (either die return) $ findPackageDesc dir
-- |Optional auxiliary package information file (/pkgname/@.buildinfo@)
defaultHookedPackageDesc :: IO (Maybe FilePath)
defaultHookedPackageDesc = findHookedPackageDesc currentDir
-- |Find auxiliary package information in the given directory.
-- Looks for @.buildinfo@ files.
findHookedPackageDesc
:: FilePath -- ^Directory to search
-> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present
findHookedPackageDesc dir = do
files <- getDirectoryContents dir
buildInfoFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == buildInfoExt ]
case buildInfoFiles of
[] -> return Nothing
[f] -> return (Just f)
_ -> die ("Multiple files with extension " ++ buildInfoExt)
buildInfoExt :: String
buildInfoExt = ".buildinfo"
-- ------------------------------------------------------------
-- * Unicode stuff
-- ------------------------------------------------------------
-- This is a modification of the UTF8 code from gtk2hs and the
-- utf8-string package.
fromUTF8 :: String -> String
fromUTF8 [] = []
fromUTF8 (c:cs)
| c <= '\x7F' = c : fromUTF8 cs
| c <= '\xBF' = replacementChar : fromUTF8 cs
| c <= '\xDF' = twoBytes c cs
| c <= '\xEF' = moreBytes 3 0x800 cs (ord c .&. 0xF)
| c <= '\xF7' = moreBytes 4 0x10000 cs (ord c .&. 0x7)
| c <= '\xFB' = moreBytes 5 0x200000 cs (ord c .&. 0x3)
| c <= '\xFD' = moreBytes 6 0x4000000 cs (ord c .&. 0x1)
| otherwise = replacementChar : fromUTF8 cs
where
twoBytes c0 (c1:cs')
| ord c1 .&. 0xC0 == 0x80
= let d = ((ord c0 .&. 0x1F) `shiftL` 6)
.|. (ord c1 .&. 0x3F)
in if d >= 0x80
then chr d : fromUTF8 cs'
else replacementChar : fromUTF8 cs'
twoBytes _ cs' = replacementChar : fromUTF8 cs'
moreBytes :: Int -> Int -> [Char] -> Int -> [Char]
moreBytes 1 overlong cs' acc
| overlong <= acc && acc <= 0x10FFFF
&& (acc < 0xD800 || 0xDFFF < acc)
&& (acc < 0xFFFE || 0xFFFF < acc)
= chr acc : fromUTF8 cs'
| otherwise
= replacementChar : fromUTF8 cs'
moreBytes byteCount overlong (cn:cs') acc
| ord cn .&. 0xC0 == 0x80
= moreBytes (byteCount-1) overlong cs'
((acc `shiftL` 6) .|. ord cn .&. 0x3F)
moreBytes _ _ cs' _
= replacementChar : fromUTF8 cs'
replacementChar = '\xfffd'
toUTF8 :: String -> String
toUTF8 [] = []
toUTF8 (c:cs)
| c <= '\x07F' = c
: toUTF8 cs
| c <= '\x7FF' = chr (0xC0 .|. (w `shiftR` 6))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| c <= '\xFFFF'= chr (0xE0 .|. (w `shiftR` 12))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| otherwise = chr (0xf0 .|. (w `shiftR` 18))
: chr (0x80 .|. ((w `shiftR` 12) .&. 0x3F))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
where w = ord c
-- | Whether BOM is at the beginning of the input
startsWithBOM :: String -> Bool
startsWithBOM ('\xFEFF':_) = True
startsWithBOM _ = False
-- | Check whether a file has Unicode byte order mark (BOM).
fileHasBOM :: FilePath -> IO Bool
fileHasBOM f = fmap (startsWithBOM . fromUTF8)
. hGetContents =<< openBinaryFile f ReadMode
-- | Ignore a Unicode byte order mark (BOM) at the beginning of the input
--
ignoreBOM :: String -> String
ignoreBOM ('\xFEFF':string) = string
ignoreBOM string = string
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Reads lazily using ordinary 'readFile'.
--
readUTF8File :: FilePath -> IO String
readUTF8File f = fmap (ignoreBOM . fromUTF8)
. hGetContents =<< openBinaryFile f ReadMode
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Same behaviour as 'withFileContents'.
--
withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a
withUTF8FileContents name action =
Exception.bracket
(openBinaryFile name ReadMode)
hClose
(\hnd -> hGetContents hnd >>= action . ignoreBOM . fromUTF8)
-- | Writes a Unicode String as a UTF8 encoded text file.
--
-- Uses 'writeFileAtomic', so provides the same guarantees.
--
writeUTF8File :: FilePath -> String -> IO ()
writeUTF8File path = writeFileAtomic path . BS.Char8.pack . toUTF8
-- | Fix different systems silly line ending conventions
normaliseLineEndings :: String -> String
normaliseLineEndings [] = []
normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows
normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X
normaliseLineEndings ( c :s) = c : normaliseLineEndings s
-- ------------------------------------------------------------
-- * Common utils
-- ------------------------------------------------------------
-- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but
-- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this
-- version is that the one in "Data.List" is strict in elements, but spine-lazy,
-- while this one is spine-strict but lazy in elements. That's what @LE@ stands
-- for - "lazy in elements".
--
-- Example:
--
-- @
-- > tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1]
-- *** Exception: Prelude.undefined
-- > tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1]
-- [5,4,3]
-- > take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined]
-- [5,4,3]
-- > take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined]
-- *** Exception: Prelude.undefined
-- @
dropWhileEndLE :: (a -> Bool) -> [a] -> [a]
dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) []
-- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but
-- is usually faster (as well as being easier to read).
takeWhileEndLE :: (a -> Bool) -> [a] -> [a]
takeWhileEndLE p = fst . foldr go ([], False)
where
go x (rest, done)
| not done && p x = (x:rest, False)
| otherwise = (rest, True)
-- | Like "Data.List.nub", but has @O(n log n)@ complexity instead of
-- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's
-- <http://github.com/nh2/haskell-ordnub ordnub> package.
ordNub :: (Ord a) => [a] -> [a]
ordNub l = go Set.empty l
where
go _ [] = []
go s (x:xs) = if x `Set.member` s then go s xs
else x : go (Set.insert x s) xs
-- | Like "Data.List.union", but has @O(n log n)@ complexity instead of
-- @O(n^2)@.
listUnion :: (Ord a) => [a] -> [a] -> [a]
listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b)
where
aSet = Set.fromList a
-- | A right-biased version of 'ordNub'.
--
-- Example:
--
-- @
-- > ordNub [1,2,1]
-- [1,2]
-- > ordNubRight [1,2,1]
-- [2,1]
-- @
ordNubRight :: (Ord a) => [a] -> [a]
ordNubRight = fst . foldr go ([], Set.empty)
where
go x p@(l, s) = if x `Set.member` s then p
else (x:l, Set.insert x s)
-- | A right-biased version of 'listUnion'.
--
-- Example:
--
-- @
-- > listUnion [1,2,3,4,3] [2,1,1]
-- [1,2,3,4,3]
-- > listUnionRight [1,2,3,4,3] [2,1,1]
-- [4,3,2,1,1]
-- @
listUnionRight :: (Ord a) => [a] -> [a] -> [a]
listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b
where
bSet = Set.fromList b
equating :: Eq a => (b -> a) -> b -> b -> Bool
equating p x y = p x == p y
comparing :: Ord a => (b -> a) -> b -> b -> Ordering
comparing p x y = p x `compare` p y
isInfixOf :: String -> String -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
lowercase :: String -> String
lowercase = map Char.toLower
|
x-y-z/cabal
|
Cabal/Distribution/Simple/Utils.hs
|
bsd-3-clause
| 52,900
| 0
| 21
| 14,278
| 11,332
| 5,977
| 5,355
| 872
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.