code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Yesod.Logger (Logger, logBS, toProduction)
import Network.Wai.Middleware.RequestLogger (logCallback, logCallbackDev)
import qualified Database.Persist.Store
import Database.Persist.GenericSql (runMigration)
import Network.HTTP.Conduit (newManager, def)
import ControlState
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see
-- the comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> Logger -> IO Application
makeApplication conf logger = do
foundation <- makeFoundation conf setLogger
app <- toWaiAppPlain foundation
return $ logWare app
where
setLogger = if development then logger else toProduction logger
logWare = if development then logCallbackDev (logBS setLogger)
else logCallback (logBS setLogger)
makeFoundation :: AppConfig DefaultEnv Extra -> Logger -> IO App
makeFoundation conf setLogger = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/sqlite.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
Database.Persist.Store.runPool dbconf (runMigration migrateAll) p
cs <- beginIt "172.31.99.130"
return $ App conf setLogger s p manager dbconf cs
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader makeApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
| hce/lights | Application.hs | bsd-2-clause | 2,366 | 0 | 11 | 434 | 448 | 244 | 204 | -1 | -1 |
module GameSpec
( spec
, completeGame
, incompleteGame
) where
import Control.Exception (evaluate)
import Test.Hspec
import Test.QuickCheck
import Game
import PartialBoard (emptyBoard, flipTileAtWith)
import PartialBoardSpec (completePartialBoard, consistentPartialBoard, incompletePartialBoard, inconsistentPartialBoard)
completeGame :: Gen Game
completeGame = uncurry game <$> completePartialBoard
incompleteGame :: Gen Game
incompleteGame = uncurry game <$> incompletePartialBoard
instance Arbitrary Game where
arbitrary = uncurry game <$> consistentPartialBoard
spec :: Spec
spec = do
describe "game" $ do
context "given a Board and a consistent PartialBoard" $ do
it "is inverted by unGame" $ property $
forAll (consistentPartialBoard) $ \(b, pb) ->
unGame (game b pb) `shouldBe` (b, pb)
context "given a Board and an inconsistent PartialBoard" $ do
it "returns an error" $ property $
forAll (inconsistentPartialBoard) $ \(b, pb) ->
evaluate (game b pb) `shouldThrow` errorCall "PartialBoard is not consistent with Board"
describe "newGame" $ do
it "returns a valid Game" $ property $
\b -> newGame b `shouldSatisfy` isValidGame
it "is inverted by unGame" $ property $
\b -> unGame (newGame b) `shouldBe` (b, emptyBoard)
describe "flipTileAt" $ do
it "returns a valid Game" $ property $
\g p -> flipTileAt g p `shouldSatisfy` isValidGame
it "does not modify the Board" $ property $
\g p -> let (b, _) = unGame g in (getGameBoard $ flipTileAt p g) `shouldBe` b
it "flips the Tile at the given Position of the PartialBoard" $ property $
\g p -> let (b, pb) = unGame g in (getGamePartialBoard $ flipTileAt p g) `shouldBe` flipTileAtWith b p pb
describe "isComplete" $ do
context "given a complete PartialBoard" $ do
it "returns True" $ property $
forAll (completeGame) $ \g ->
isComplete g `shouldBe` True
context "given an incomplete PartialBoard" $ do
it "returns False" $ property $
forAll (incompleteGame) $ \g ->
isComplete g `shouldBe` False
| jameshales/voltorb-flip | test/GameSpec.hs | bsd-3-clause | 2,140 | 0 | 18 | 487 | 632 | 321 | 311 | 48 | 1 |
import System.Environment
import Text.ICalendar.Folding
{-
- The aim of this test is to ensure that lines get unfolded correctly.
- For now, taking two input file names it should tell us wether or not one is the unfolded version
- of the other and vice versa.
-}
arg_count = 2
main :: IO ()
main = do
args <- getArgs
if length args /= arg_count
then
error ("Expected two arguments but got: " ++ show args)
else
let [folded_file, unfolded_file] = args
in do
foldedText <- readFile folded_file
unfoldedText <- readFile unfolded_file
if unfold foldedText == unfoldedText
then putStrLn "Unfold Worked."
else putStrLn "Unfold FAILED."
if fold unfoldedText == foldedText
then putStrLn "Fold Worked."
else putStrLn "Fold FAILED."
--putStrLn . fold $ unfoldedText
--putStrLn . unfold $ foldedText
| robertmassaioli/hICalendar | test/FoldingCheck.hs | bsd-3-clause | 1,082 | 0 | 14 | 421 | 159 | 78 | 81 | 18 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Ivory.Language.Syntax.AST where
import Prelude ()
import Prelude.Compat
import Ivory.Language.Syntax.Concrete.Location
import Ivory.Language.Syntax.Names
import Ivory.Language.Syntax.Type
import Language.Haskell.TH.Lift (deriveLiftMany)
#if __GLASGOW_HASKELL__ < 709
import Language.Haskell.TH.Syntax (Lift (..))
#endif
import Data.Ratio (denominator,
numerator)
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup(..))
#endif
-- Modules ---------------------------------------------------------------------
-- | An external module that defines an imported resource. A header file in C
-- is an example of this.
type ModulePath = String
data Visible a = Visible
{ public :: [a]
, private :: [a]
} deriving (Show, Eq, Ord)
instance Semigroup (Visible a) where
Visible l0 l1 <> Visible m0 m1 = Visible (l0 ++ m0) (l1 ++ m1)
instance Monoid (Visible a) where
mempty = Visible [] []
mappend = (<>)
-- | The name of a module defined in Ivory.
type ModuleName = String
data Module = Module
{ modName :: ModuleName
-- ^ The name of this module
, modHeaders :: [FilePath]
-- ^ Included headers: lists instead of Sets because some systems depend on
-- a particular header order.
, modDepends :: [ModuleName]
-- ^ Named module dependencies: lists instead of Sets because some systems
-- depend on a particular header order.
, modExterns :: [Extern]
, modImports :: [Import]
, modProcs :: Visible Proc
, modStructs :: Visible Struct
, modAreas :: Visible Area
, modAreaImports :: [AreaImport]
} deriving (Show, Eq, Ord)
instance Semigroup Module where
l <> r = Module
{ modName = modName (if null (modName l) then r else l)
, modHeaders = modHeaders l <> modHeaders r
, modDepends = modDepends l <> modDepends r
, modExterns = modExterns l <> modExterns r
, modImports = modImports l <> modImports r
, modProcs = modProcs l <> modProcs r
, modStructs = modStructs l <> modStructs r
, modAreas = modAreas l <> modAreas r
, modAreaImports = modAreaImports l <> modAreaImports r
}
instance Monoid Module where
mempty = Module
{ modName = ""
, modHeaders = []
, modDepends = []
, modExterns = []
, modImports = []
, modProcs = mempty
, modStructs = mempty
, modAreas = mempty
, modAreaImports = []
}
mappend = (<>)
-- Imported Functions ----------------------------------------------------------
-- | Functions that are defined in a c header.
data Import = Import
{ importSym :: Sym
, importFile :: ModulePath
, importRetTy :: Type
, importArgs :: [Typed Var]
, importRequires :: [Require]
, importEnsures :: [Ensure]
} deriving (Show, Eq, Ord)
-- Procedures ------------------------------------------------------------------
-- | Functions defined in the language.
data Proc = Proc
{ procSym :: Sym
, procRetTy :: Type
, procArgs :: [Typed Var]
, procBody :: Block
, procRequires :: [Require]
, procEnsures :: [Ensure]
} deriving (Show, Eq, Ord)
-- Structure Definitions -------------------------------------------------------
data Struct
= Struct String [Typed String]
| Abstract String ModulePath
deriving (Show, Eq, Ord)
structName :: Struct -> String
structName def = case def of
Struct n _ -> n
Abstract n _ -> n
-- Global Memory Areas ---------------------------------------------------------
data Area = Area
{ areaSym :: Sym
, areaConst :: Bool
, areaType :: Type
, areaInit :: Init
} deriving (Show, Eq, Ord)
-- Imported Memory Areas -------------------------------------------------------
data AreaImport = AreaImport
{ aiSym :: Sym
, aiConst :: Bool
, aiFile :: ModulePath
} deriving (Show, Eq, Ord)
-- Statements ------------------------------------------------------------------
type Block = [Stmt]
data Stmt
= IfTE Expr Block Block
-- ^ If-then-else statement. The @Expr@ argument will be typed as an
-- @IBool@.
| Assert Expr
-- ^ Boolean-valued assertions. The @Expr@ argument will be typed as an
-- @IBool@.
| CompilerAssert Expr
-- ^ Compiler-inserted assertion (as opposed to user-level assertions).
-- These are expected to be correct (e.g., no overflow, etc). Not exported.
| Assume Expr
-- ^ Boolean-valued assumptions. The @Expr@ argument will be typed as an
-- @IBool@.
| Return (Typed Expr)
-- ^ Returning a value.
| ReturnVoid
-- ^ Returning void.
| Deref Type Var Expr
-- ^ Reference dereferencing. The type parameter refers to the type of the
-- referenced value, not the reference itself; the expression to be
-- dereferenced is assumed to always be a reference.
| Store Type Expr Expr
-- ^ Storing to a reference. The type parameter refers to the type of the
-- referenced value, not the reference itself; the expression to be
-- dereferenced is assumed to always be a reference.
| Assign Type Var Expr
-- ^ Simple assignment.
| Call Type (Maybe Var) Name [Typed Expr]
-- ^ Function call. The optional variable is where to store the result. It
-- is expected that the @Expr@ passed for the function symbol will have the
-- same type as the combination of the types for the arguments, and the
-- return type.
| Local Type Var Init
-- ^ Stack allocation. The type parameter is not a reference at this point;
-- references are allocated separately to the stack-allocated data.
| RefCopy Type Expr Expr
-- ^ Ref copy. Copy the second variable reference to the first (like
-- memcopy). The type is the dereferenced value of the variables.
| RefZero Type Expr
-- ^ Ref zero. Zero out the memory associated with the reference. The type
-- parameter is not a reference, but the referenced type.
| AllocRef Type Var Name
-- ^ Reference allocation. The type parameter is not a reference, but the
-- referenced type.
| Loop Integer Var Expr LoopIncr Block
-- ^ Looping: arguments are the maximum number of iterations of the loop,
-- loop variable, start value, break condition (for increment or decrement),
-- and block.
| Forever Block
-- ^ Nonterminting loop
| Break
-- ^ Break out of a loop
| Comment Comment
-- ^ User comment, can be used to output a comment in the backend.
deriving (Show, Eq, Ord)
data LoopIncr
= IncrTo Expr
| DecrTo Expr
deriving (Show, Eq, Ord)
data Name
= NameSym Sym
| NameVar Var
deriving (Show, Eq, Ord)
data Comment = UserComment String
| SourcePos SrcLoc
deriving (Show, Eq, Ord)
-- Conditions ------------------------------------------------------------------
data Cond
= CondBool Expr
-- ^ Boolean Expressions
| CondDeref Type Expr Var Cond
-- ^ Dereference introduction. The type is the type of the dereferenced
-- thing, not the reference itself.
deriving (Show, Eq, Ord)
-- Pre-conditions --------------------------------------------------------------
newtype Require = Require
{ getRequire :: Cond
} deriving (Show, Eq, Ord)
-- Post-conditions -------------------------------------------------------------
-- | Ensure statements describe properties of the return value for the function
-- they annotate. The return value is referenced through the special internal
-- variable, "retval".
newtype Ensure = Ensure
{ getEnsure :: Cond
} deriving (Show, Eq, Ord)
-- Imported symbols ------------------------------------------------------------
-- | External Symbols.
data Extern = Extern
{ externSym :: Sym
, externFile :: ModulePath
, externType :: Type
} deriving (Show, Eq, Ord)
-- Expressions -----------------------------------------------------------------
data Expr
= ExpSym Sym
-- ^ Symbols
| ExpExtern Extern
-- ^ Imported symbols
| ExpVar Var
-- ^ Variables
| ExpLit Literal
-- ^ Literals
| ExpLabel Type Expr String
-- ^ Struct label indexing.
| ExpIndex Type Expr Type Expr -- XXX Do we need the 2nd (index) Type?
-- ^ Array indexing. The type is the type of the array being indexed, it's
-- implied that the expression with the array in it is a reference.
| ExpToIx Expr Integer
-- ^ Cast from an expression to an index (Ix) used in loops and array
-- indexing. The Integer is the maximum bound.
| ExpSafeCast Type Expr
-- ^ Type-safe casting. The type is the type casted from.
| ExpOp ExpOp [Expr]
-- ^ Primitive expression operators
| ExpAddrOfGlobal Sym
-- ^ Take the address of a global memory area, introduced through a MemArea
-- *only*.
| ExpMaxMin Bool
-- ^ True is max value, False is min value for the type.
| ExpSizeOf Type
-- ^ Return the allocation size of the given type.
deriving (Show, Eq, Ord)
-- Expression Operators --------------------------------------------------------
data ExpOp
= ExpEq Type
| ExpNeq Type
| ExpCond
| ExpGt Bool Type
-- ^ True is >=, False is >
| ExpLt Bool Type
-- ^ True is <=, False is <
| ExpNot
| ExpAnd
| ExpOr
| ExpMul
| ExpAdd
| ExpSub
| ExpNegate
| ExpAbs
| ExpSignum
| ExpDiv
| ExpMod
| ExpRecip
| ExpFExp
| ExpFSqrt
| ExpFLog
| ExpFPow
| ExpFLogBase
| ExpFSin
| ExpFTan
| ExpFCos
| ExpFAsin
| ExpFAtan
| ExpFAtan2
| ExpFAcos
| ExpFSinh
| ExpFTanh
| ExpFCosh
| ExpFAsinh
| ExpFAtanh
| ExpFAcosh
| ExpIsNan Type
| ExpIsInf Type
| ExpRoundF
| ExpCeilF
| ExpFloorF
| ExpBitAnd
| ExpBitOr
| ExpBitXor
| ExpBitComplement
| ExpBitShiftL
| ExpBitShiftR
deriving (Show, Eq, Ord)
instance Num Expr where
l * r = ExpOp ExpMul [l,r]
l + r = ExpOp ExpAdd [l,r]
l - r = ExpOp ExpSub [l,r]
abs e = ExpOp ExpAbs [e]
signum e = ExpOp ExpSignum [e]
negate (ExpLit (LitInteger i)) = ExpLit (LitInteger (negate i))
negate (ExpLit (LitFloat f)) = ExpLit (LitFloat (negate f))
negate (ExpLit (LitDouble d)) = ExpLit (LitDouble (negate d))
negate e = ExpOp ExpNegate [e]
fromInteger i = ExpLit (LitInteger i)
instance Bounded Expr where
minBound = ExpMaxMin False
maxBound = ExpMaxMin True
instance Fractional Expr where
l / r = ExpOp ExpDiv [l,r]
recip a = ExpOp ExpRecip [a]
fromRational a = fromInteger (numerator a) / fromInteger (denominator a)
instance Floating Expr where
pi = error "pi not implemented for Expr"
exp e = ExpOp ExpFExp [e]
sqrt e = ExpOp ExpFSqrt [e]
log e = ExpOp ExpFLog [e]
a ** b = ExpOp ExpFPow [a,b]
logBase a b = ExpOp ExpFLogBase [a,b]
sin e = ExpOp ExpFSin [e]
tan e = ExpOp ExpFTan [e]
cos e = ExpOp ExpFCos [e]
asin e = ExpOp ExpFAsin [e]
atan e = ExpOp ExpFAtan [e]
acos e = ExpOp ExpFAcos [e]
sinh e = ExpOp ExpFSinh [e]
tanh e = ExpOp ExpFTanh [e]
cosh e = ExpOp ExpFCosh [e]
asinh e = ExpOp ExpFAsinh [e]
atanh e = ExpOp ExpFAtanh [e]
acosh e = ExpOp ExpFAcosh [e]
-- Literals --------------------------------------------------------------------
data Literal
= LitInteger Integer
| LitFloat Float
| LitDouble Double
| LitChar Char
| LitBool Bool
| LitNull
| LitString String
deriving (Show, Eq, Ord)
-- Initializers ----------------------------------------------------------------
-- | An initializer with no 'InitExpr' fields corresponds to @{0}@.
zeroInit :: Init
zeroInit = InitZero
data Init
= InitZero -- ^ @ {} @
| InitExpr Type Expr -- ^ @ expr @
| InitStruct [(String,Init)] -- ^ @ { .f1 = i1, ..., .fn = in } @
| InitArray [Init] Bool -- ^ @ { i1, ..., in } @
-- Bool true if no unused initialization values.
deriving (Show, Eq, Ord)
-- TH Lifting ------------------------------------------------------------------
deriveLiftMany
[ ''Module, ''Visible, ''AreaImport, ''Area, ''Struct
, ''Import
, ''Extern
, ''Proc, ''Ensure, ''Require, ''Cond
, ''Name
, ''Stmt, ''LoopIncr, ''Comment, ''SrcLoc, ''Range, ''Position
, ''Expr, ''ExpOp, ''Literal, ''Init
]
#if __GLASGOW_HASKELL__ < 709
instance Lift Double where
lift = lift . toRational
instance Lift Float where
lift = lift . toRational
#endif
| GaloisInc/ivory | ivory/src/Ivory/Language/Syntax/AST.hs | bsd-3-clause | 12,914 | 0 | 13 | 3,429 | 2,598 | 1,498 | 1,100 | 269 | 2 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns -Werror #-}
{-| Binary instances for the core datatypes -}
module Idris.Core.Binary where
import Data.Binary
import Data.Vector.Binary
import qualified Data.Text as T
import Idris.Core.TT
instance Binary ErrorReportPart where
put (TextPart msg) = do putWord8 0 ; put msg
put (NamePart n) = do putWord8 1 ; put n
put (TermPart t) = do putWord8 2 ; put t
put (SubReport ps) = do putWord8 3 ; put ps
get = do i <- getWord8
case i of
0 -> fmap TextPart get
1 -> fmap NamePart get
2 -> fmap TermPart get
3 -> fmap SubReport get
_ -> error "Corrupted binary data for ErrorReportPart"
instance Binary a => Binary (Err' a) where
put (Msg str) = do putWord8 0
put str
put (InternalMsg str) = do putWord8 1
put str
put (CantUnify x y z e ctxt i) = do putWord8 2
put x
put y
put z
put e
put ctxt
put i
put (InfiniteUnify n t ctxt) = do putWord8 3
put n
put t
put ctxt
put (CantConvert x y ctxt) = do putWord8 4
put x
put y
put ctxt
put (CantSolveGoal x ctxt) = do putWord8 5
put x
put ctxt
put (UnifyScope n1 n2 x ctxt) = do putWord8 6
put n1
put n2
put x
put ctxt
put (CantInferType str) = do putWord8 7
put str
put (NonFunctionType t1 t2) = do putWord8 8
put t1
put t2
put (NotEquality t1 t2) = do putWord8 9
put t1
put t2
put (TooManyArguments n) = do putWord8 10
put n
put (CantIntroduce t) = do putWord8 11
put t
put (NoSuchVariable n) = do putWord8 12
put n
put (NoTypeDecl n) = do putWord8 13
put n
put (NotInjective x y z) = do putWord8 14
put x
put y
put z
put (CantResolve t) = do putWord8 15
put t
put (CantResolveAlts ns) = do putWord8 16
put ns
put (IncompleteTerm t) = do putWord8 17
put t
put UniverseError = putWord8 18
put (UniqueError u n) = do putWord8 19
put u
put n
put (UniqueKindError u n) = do putWord8 20
put u
put n
put ProgramLineComment = putWord8 21
put (Inaccessible n) = do putWord8 22
put n
put (NonCollapsiblePostulate n) = do putWord8 23
put n
put (AlreadyDefined n) = do putWord8 24
put n
put (ProofSearchFail e) = do putWord8 25
put e
put (NoRewriting t) = do putWord8 26
put t
put (At fc e) = do putWord8 27
put fc
put e
put (Elaborating str n e) = do putWord8 28
put str
put n
put e
put (ElaboratingArg n1 n2 ns e) = do putWord8 29
put n1
put n2
put ns
put e
put (ProviderError str) = do putWord8 30
put str
put (LoadingFailed str e) = do putWord8 31
put str
put e
put (ReflectionError parts e) = do putWord8 32
put parts
put e
put (ReflectionFailed str e) = do putWord8 33
put str
put e
put (WithFnType t) = do putWord8 34
put t
get = do i <- getWord8
case i of
0 -> fmap Msg get
1 -> fmap InternalMsg get
2 -> do x <- get ; y <- get ; z <- get ; e <- get ; ctxt <- get ; i <- get
return $ CantUnify x y z e ctxt i
3 -> do x <- get ; y <- get ; z <- get
return $ InfiniteUnify x y z
4 -> do x <- get ; y <- get ; z <- get
return $ CantConvert x y z
5 -> do x <- get ; y <- get
return $ CantSolveGoal x y
6 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ UnifyScope w x y z
7 -> fmap CantInferType get
8 -> do x <- get ; y <- get
return $ NonFunctionType x y
9 -> do x <- get ; y <- get
return $ NotEquality x y
10 -> fmap TooManyArguments get
11 -> fmap CantIntroduce get
12 -> fmap NoSuchVariable get
13 -> fmap NoTypeDecl get
14 -> do x <- get ; y <- get ; z <- get
return $ NotInjective x y z
15 -> fmap CantResolve get
16 -> fmap CantResolveAlts get
17 -> fmap IncompleteTerm get
18 -> return UniverseError
19 -> do x <- get ; y <- get
return $ UniqueError x y
20 -> do x <- get ; y <- get
return $ UniqueKindError x y
21 -> return ProgramLineComment
22 -> fmap Inaccessible get
23 -> fmap NonCollapsiblePostulate get
24 -> fmap AlreadyDefined get
25 -> fmap ProofSearchFail get
26 -> fmap NoRewriting get
27 -> do x <- get ; y <- get
return $ At x y
28 -> do x <- get ; y <- get ; z <- get
return $ Elaborating x y z
29 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ ElaboratingArg w x y z
30 -> fmap ProviderError get
31 -> do x <- get ; y <- get
return $ LoadingFailed x y
32 -> do x <- get ; y <- get
return $ ReflectionError x y
33 -> do x <- get ; y <- get
return $ ReflectionFailed x y
34 -> fmap WithFnType get
_ -> error "Corrupted binary data for Err'"
----- Generated by 'derive'
instance Binary FC where
put (FC x1 (x2, x3) (x4, x5))
= do put x1
put (x2 * 65536 + x3)
put (x4 * 65536 + x5)
get
= do x1 <- get
x2x3 <- get
x4x5 <- get
return (FC x1 (x2x3 `div` 65536, x2x3 `mod` 65536) (x4x5 `div` 65536, x4x5 `mod` 65536))
instance Binary Name where
put x
= case x of
UN x1 -> do putWord8 0
put x1
NS x1 x2 -> do putWord8 1
put x1
put x2
MN x1 x2 -> do putWord8 2
put x1
put x2
NErased -> putWord8 3
SN x1 -> do putWord8 4
put x1
SymRef x1 -> do putWord8 5
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (UN x1)
1 -> do x1 <- get
x2 <- get
return (NS x1 x2)
2 -> do x1 <- get
x2 <- get
return (MN x1 x2)
3 -> return NErased
4 -> do x1 <- get
return (SN x1)
5 -> do x1 <- get
return (SymRef x1)
_ -> error "Corrupted binary data for Name"
instance Binary T.Text where
put x = put (str x)
get = do x <- get
return (txt x)
instance Binary SpecialName where
put x
= case x of
WhereN x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
InstanceN x1 x2 -> do putWord8 1
put x1
put x2
ParentN x1 x2 -> do putWord8 2
put x1
put x2
MethodN x1 -> do putWord8 3
put x1
CaseN x1 -> do putWord8 4; put x1
ElimN x1 -> do putWord8 5; put x1
InstanceCtorN x1 -> do putWord8 6; put x1
WithN x1 x2 -> do putWord8 7
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (WhereN x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (InstanceN x1 x2)
2 -> do x1 <- get
x2 <- get
return (ParentN x1 x2)
3 -> do x1 <- get
return (MethodN x1)
4 -> do x1 <- get
return (CaseN x1)
5 -> do x1 <- get
return (ElimN x1)
6 -> do x1 <- get
return (InstanceCtorN x1)
7 -> do x1 <- get
x2 <- get
return (WithN x1 x2)
_ -> error "Corrupted binary data for SpecialName"
instance Binary Const where
put x
= case x of
I x1 -> do putWord8 0
put x1
BI x1 -> do putWord8 1
put x1
Fl x1 -> do putWord8 2
put x1
Ch x1 -> do putWord8 3
put x1
Str x1 -> do putWord8 4
put x1
B8 x1 -> putWord8 5 >> put x1
B16 x1 -> putWord8 6 >> put x1
B32 x1 -> putWord8 7 >> put x1
B64 x1 -> putWord8 8 >> put x1
(AType (ATInt ITNative)) -> putWord8 9
(AType (ATInt ITBig)) -> putWord8 10
(AType ATFloat) -> putWord8 11
(AType (ATInt ITChar)) -> putWord8 12
StrType -> putWord8 13
PtrType -> putWord8 14
Forgot -> putWord8 15
(AType (ATInt (ITFixed ity))) -> putWord8 (fromIntegral (16 + fromEnum ity)) -- 16-19 inclusive
(AType (ATInt (ITVec ity count))) -> do
putWord8 20
putWord8 (fromIntegral . fromEnum $ ity)
putWord8 (fromIntegral count)
B8V x1 -> putWord8 21 >> put x1
B16V x1 -> putWord8 22 >> put x1
B32V x1 -> putWord8 23 >> put x1
B64V x1 -> putWord8 24 >> put x1
BufferType -> putWord8 25
ManagedPtrType -> putWord8 26
VoidType -> putWord8 27
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (I x1)
1 -> do x1 <- get
return (BI x1)
2 -> do x1 <- get
return (Fl x1)
3 -> do x1 <- get
return (Ch x1)
4 -> do x1 <- get
return (Str x1)
5 -> fmap B8 get
6 -> fmap B16 get
7 -> fmap B32 get
8 -> fmap B64 get
9 -> return (AType (ATInt ITNative))
10 -> return (AType (ATInt ITBig))
11 -> return (AType ATFloat)
12 -> return (AType (ATInt ITChar))
13 -> return StrType
14 -> return PtrType
15 -> return Forgot
16 -> return (AType (ATInt (ITFixed IT8)))
17 -> return (AType (ATInt (ITFixed IT16)))
18 -> return (AType (ATInt (ITFixed IT32)))
19 -> return (AType (ATInt (ITFixed IT64)))
20 -> do
e <- getWord8
c <- getWord8
return (AType (ATInt (ITVec (toEnum . fromIntegral $ e) (fromIntegral c))))
21 -> fmap B8V get
22 -> fmap B16V get
23 -> fmap B32V get
24 -> fmap B64V get
25 -> return BufferType
26 -> return ManagedPtrType
27 -> return VoidType
_ -> error "Corrupted binary data for Const"
instance Binary Raw where
put x
= case x of
Var x1 -> do putWord8 0
put x1
RBind x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
RApp x1 x2 -> do putWord8 2
put x1
put x2
RType -> putWord8 3
RConstant x1 -> do putWord8 4
put x1
RForce x1 -> do putWord8 5
put x1
RUType x1 -> do putWord8 6
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Var x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (RBind x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (RApp x1 x2)
3 -> return RType
4 -> do x1 <- get
return (RConstant x1)
5 -> do x1 <- get
return (RForce x1)
6 -> do x1 <- get
return (RUType x1)
_ -> error "Corrupted binary data for Raw"
instance (Binary b) => Binary (Binder b) where
put x
= case x of
Lam x1 -> do putWord8 0
put x1
Pi x1 x2 -> do putWord8 1
put x1
put x2
Let x1 x2 -> do putWord8 2
put x1
put x2
NLet x1 x2 -> do putWord8 3
put x1
put x2
Hole x1 -> do putWord8 4
put x1
GHole x1 x2 -> do putWord8 5
put x1
put x2
Guess x1 x2 -> do putWord8 6
put x1
put x2
PVar x1 -> do putWord8 7
put x1
PVTy x1 -> do putWord8 8
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Lam x1)
1 -> do x1 <- get
x2 <- get
return (Pi x1 x2)
2 -> do x1 <- get
x2 <- get
return (Let x1 x2)
3 -> do x1 <- get
x2 <- get
return (NLet x1 x2)
4 -> do x1 <- get
return (Hole x1)
5 -> do x1 <- get
x2 <- get
return (GHole x1 x2)
6 -> do x1 <- get
x2 <- get
return (Guess x1 x2)
7 -> do x1 <- get
return (PVar x1)
8 -> do x1 <- get
return (PVTy x1)
_ -> error "Corrupted binary data for Binder"
instance Binary Universe where
put x = case x of
UniqueType -> putWord8 0
AllTypes -> putWord8 1
NullType -> putWord8 2
get = do i <- getWord8
case i of
0 -> return UniqueType
1 -> return AllTypes
2 -> return NullType
_ -> error "Corrupted binary data for Universe"
instance Binary NameType where
put x
= case x of
Bound -> putWord8 0
Ref -> putWord8 1
DCon x1 x2 x3 -> do putWord8 2
put (x1 * 65536 + x2)
put x3
TCon x1 x2 -> do putWord8 3
put (x1 * 65536 + x2)
get
= do i <- getWord8
case i of
0 -> return Bound
1 -> return Ref
2 -> do x1x2 <- get
x3 <- get
return (DCon (x1x2 `div` 65536) (x1x2 `mod` 65536) x3)
3 -> do x1x2 <- get
return (TCon (x1x2 `div` 65536) (x1x2 `mod` 65536))
_ -> error "Corrupted binary data for NameType"
instance {- (Binary n) => -} Binary (TT Name) where
put x
= {-# SCC "putTT" #-}
case x of
P x1 x2 x3 -> do putWord8 0
put x1
put x2
-- put x3
V x1 -> if (x1 >= 0 && x1 < 256)
then do putWord8 1
putWord8 (toEnum (x1 + 1))
else do putWord8 9
put x1
Bind x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
App x1 x2 -> do putWord8 3
put x1
put x2
Constant x1 -> do putWord8 4
put x1
Proj x1 x2 -> do putWord8 5
put x1
putWord8 (toEnum (x2 + 1))
Erased -> putWord8 6
TType x1 -> do putWord8 7
put x1
Impossible -> putWord8 8
UType x1 -> do putWord8 10
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
-- x3 <- get
return (P x1 x2 Erased)
1 -> do x1 <- getWord8
return (V ((fromEnum x1) - 1))
2 -> do x1 <- get
x2 <- get
x3 <- get
return (Bind x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (App x1 x2)
4 -> do x1 <- get
return (Constant x1)
5 -> do x1 <- get
x2 <- getWord8
return (Proj x1 ((fromEnum x2)-1))
6 -> return Erased
7 -> do x1 <- get
return (TType x1)
8 -> return Impossible
9 -> do x1 <- get
return (V x1)
10 -> do x1 <- get
return (UType x1)
_ -> error "Corrupted binary data for TT"
| andyarvanitis/Idris-dev | src/Idris/Core/Binary.hs | bsd-3-clause | 21,546 | 0 | 22 | 12,915 | 6,307 | 2,783 | 3,524 | 521 | 0 |
import Codec.Crypto.RSA
import Control.Monad
import Data.ByteString.Lazy(ByteString)
import qualified Data.ByteString.Lazy as BS
import Data.Digest.Pure.SHA
import Data.Word
import System.Random
import Test.QuickCheck
import Test.Framework (defaultMain, testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
-- --------------------------------------------------------------------------
data KeyPair = KP1K PublicKey PrivateKey
deriving (Show)
data KeyPair2048 = KP2K PublicKey PrivateKey
deriving (Show)
getRNGSeed :: Gen StdGen
getRNGSeed = fmap mkStdGen arbitrary
instance Arbitrary KeyPair where
arbitrary = do g <- getRNGSeed
let (pub, priv, _) = generateKeyPair g 1024
return $ KP1K pub priv
instance Arbitrary KeyPair2048 where
arbitrary = do g <- getRNGSeed
let (pub, priv, _) = generateKeyPair g 2048
return $ KP2K pub priv
-- --------------------------------------------------------------------------
newtype LargePrime = LP Integer
instance Show LargePrime where
show (LP x) = show x
instance Arbitrary LargePrime where
arbitrary = do g <- getRNGSeed
let (res, _) = large_random_prime g 64
return (LP res)
-- --------------------------------------------------------------------------
newtype PositiveInteger = PI Integer
instance Show PositiveInteger where
show (PI x) = show x
instance Arbitrary PositiveInteger where
arbitrary = (PI . (+1) . abs) `fmap` arbitrary
-- --------------------------------------------------------------------------
newtype NonEmptyByteString = NEBS ByteString
instance Show NonEmptyByteString where
show (NEBS x) = show x
instance Arbitrary ByteString where
arbitrary = BS.pack `fmap` arbitrary
instance Arbitrary NonEmptyByteString where
arbitrary = (NEBS . BS.pack) `fmap` (return(:)`ap`arbitrary`ap`arbitrary)
-- --------------------------------------------------------------------------
instance Arbitrary EncryptionOptions where
arbitrary = arbitrary >>= \ lbl -> elements [
UsePKCS1_v1_5
, UseOAEP sha1' (generate_MGF1 sha1') lbl
, UseOAEP sha256' (generate_MGF1 sha256') lbl
, UseOAEP sha384' (generate_MGF1 sha384') lbl
, UseOAEP sha512' (generate_MGF1 sha512') lbl
]
where
sha1' = bytestringDigest . sha1
sha256' = bytestringDigest . sha256
sha384' = bytestringDigest . sha384
sha512' = bytestringDigest . sha512
instance Show HashInfo where
show h = "<hash: len=" ++ (show $ BS.length $ hashFunction h BS.empty) ++ ">"
instance Arbitrary HashInfo where
arbitrary = elements [ha_SHA1, ha_SHA256, ha_SHA384, ha_SHA512]
-- --------------------------------------------------------------------------
prop_chunkify_works :: NonEmptyByteString -> PositiveInteger -> Bool
prop_chunkify_works (NEBS x) (PI l) =
all (\ bs -> BS.length bs <= (fromIntegral l)) (chunkify (fromIntegral l) x)
prop_mod_exp_works :: PositiveInteger -> PositiveInteger -> PositiveInteger ->
Bool
prop_mod_exp_works (PI b) (PI e) (PI m) =
((b ^ e) `mod` m) == (modular_exponentiation b e m)
prop_mod_inv_works :: LargePrime -> LargePrime -> Bool
prop_mod_inv_works (LP p) (LP q) = (e * d) `mod` phi == 1
where
e = 65537
phi = (p - 1) * (q - 1)
d = modular_inverse e phi
-- --------------------------------------------------------------------------
prop_i2o2i_identity :: PositiveInteger -> Bool
prop_i2o2i_identity (PI x) = x == (os2ip $ i2osp x 16)
prop_o2i2o_identity :: NonEmptyByteString -> Bool
prop_o2i2o_identity (NEBS x) = x == (i2osp (os2ip x) (BS.length x))
prop_ep_dp_identity :: KeyPair -> PositiveInteger -> Bool
prop_ep_dp_identity (KP1K pub priv) (PI x) = m == m'
where
n = public_n pub
e = public_e pub
d = private_d priv
m = x `mod` n
m' = rsa_dp n d $ rsa_ep n e m
prop_sp_vp_identity :: KeyPair -> PositiveInteger -> Bool
prop_sp_vp_identity (KP1K pub priv) (PI x) = m == m'
where
n = public_n pub
e = public_e pub
d = private_d priv
m = x `mod` n
m' = rsa_vp1 n e $ rsa_sp1 n d m
-- --------------------------------------------------------------------------
prop_oaep_inverts :: HashInfo -> KeyPair2048 -> PositiveInteger ->
ByteString -> NonEmptyByteString ->
Bool
prop_oaep_inverts hi (KP2K pub priv) (PI seed) l (NEBS x) = m == m'
where
hash = hashFunction hi
kLen = public_size pub
hLen = BS.length $ hash BS.empty
mgf = generate_MGF1 hash
m = BS.take (kLen - (2 * hLen) - 2) x
c = rsaes_oaep_encrypt hash mgf pub seed l m
m' = rsaes_oaep_decrypt hash mgf priv l c
prop_pkcs_inverts :: RandomGen g => g -> KeyPair -> NonEmptyByteString -> Bool
prop_pkcs_inverts g (KP1K pub priv) (NEBS x) = m == m'
where
kLen = public_size pub
m = BS.take (kLen - 11) x
(c,_) = rsaes_pkcs1_v1_5_encrypt g pub m
m' = rsaes_pkcs1_v1_5_decrypt priv c
prop_sign_works :: HashInfo -> KeyPair -> NonEmptyByteString -> Bool
prop_sign_works hi (KP1K pub priv) (NEBS m) =
rsassa_pkcs1_v1_5_verify hi pub m $ rsassa_pkcs1_v1_5_sign hi priv m
-- --------------------------------------------------------------------------
prop_encrypt_inverts :: RandomGen g =>
g -> KeyPair2048 -> NonEmptyByteString ->
Bool
prop_encrypt_inverts g (KP2K pub priv) (NEBS m) =
m == decrypt priv (fst $ encrypt g pub m)
prop_encrypt_plus_inverts :: RandomGen g =>
g -> EncryptionOptions -> KeyPair2048 ->
NonEmptyByteString ->
Bool
prop_encrypt_plus_inverts g opts (KP2K pub priv) (NEBS m) =
m == decrypt' opts priv (fst $ encrypt' opts g pub m)
-- --------------------------------------------------------------------------
main :: IO ()
main = do
putStrLn "\nWARNING WARNING WARNING"
putStrLn "This test suite takes a very long time to run. If you're in a "
putStrLn "hurry, Control-C is your friend."
putStrLn "WARNING WARNING WARNING\n"
g <- getStdGen
defaultMain $ tests g
tests :: StdGen -> [Test]
tests g = [
testGroup "Testing basic helper functions" [
testProperty "prop_chunkify_works" prop_chunkify_works,
testProperty "prop_mod_exp_works" prop_mod_exp_works,
testProperty "prop_mod_inv_works" prop_mod_inv_works
],
testGroup "Testing RSA core functions" [
testProperty "prop_i2o2i_identity" prop_i2o2i_identity,
testProperty "prop_o2i2o_identity" prop_o2i2o_identity,
testProperty "prop_ep_dp_identity" prop_ep_dp_identity,
testProperty "prop_sp_vp_identity" prop_sp_vp_identity
],
testGroup "Testing fixed-width RSA padding functions" [
testProperty "prop_oaep_inverts" prop_oaep_inverts,
testProperty "prop_pkcs_inverts" $ prop_pkcs_inverts g,
testProperty "prop_sign_works" prop_sign_works
],
testGroup "Testing top-level functions" [
testProperty "prop_encrypt_inverts" $ prop_encrypt_inverts g,
testProperty "prop_encrypt_plus_inverts" $ prop_encrypt_plus_inverts g
]
]
| soenkehahn/RSA | Test.hs | bsd-3-clause | 7,320 | 150 | 9 | 1,675 | 1,930 | 1,019 | 911 | 145 | 1 |
{-# LANGUAGE RecordWildCards, ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module All(runNinja) where
import System.Process (system)
import System.Exit
import Control.Exception (throwIO)
import Type
import Parse
import B.Shake hiding (Rule)
--import Development.Shake.Command
import B.Shake.FilePath
--import Development.Shake.Timing
import qualified Data.ByteString.Char8 as BS
import System.Directory
import qualified Data.HashMap.Strict as Map
import Control.Monad
import Data.List
import Data.Char
addTiming :: String -> IO ()
addTiming _ = return ()
runNinja :: FilePath -> [String] -> IO (Rules ())
runNinja file args = do
addTiming "Ninja parse"
ninja@Ninja{..} <- parse file
return $ do
phonys <- return $ Map.fromList phonys
singles <- return $ Map.fromList singles
multiples <- return $ Map.fromList [(x,(xs,b)) | (xs,b) <- multiples, x <- xs]
rules <- return $ Map.fromList rules
{- pools <- fmap Map.fromList $ forM pools $ \(name,depth) ->
fmap ((,) name) $ newResource (BS.unpack name) depth -}
want $ map (normalise . BS.unpack) $ concatMap (resolvePhony phonys) $ if null args then defaults else map BS.pack args
(\x -> fmap (map BS.unpack . fst) $ Map.lookup (BS.pack x) multiples) ?>> \out -> let out2 = map BS.pack out in
build defines phonys rules {-pools-} out2 $ snd $ multiples Map.! head out2
(flip Map.member singles . BS.pack) ?> \out -> let out2 = BS.pack out in
build defines phonys rules {-pools-} [out2] $ singles Map.! out2
resolvePhony :: Map.HashMap Str [Str] -> Str -> [Str]
resolvePhony mp = f $ Left 100
where
f (Left 0) x = f (Right []) x
f (Right xs) x | x `elem` xs = error $ "Recursive phony involving " ++ BS.unpack x
f a x = case Map.lookup x mp of
Nothing -> [x]
Just xs -> concatMap (f $ either (Left . subtract 1) (Right . (x:)) a) xs
build :: Env -> Map.HashMap Str [Str] -> Map.HashMap Str Rule {--> Map.HashMap Str Resource-} -> [Str] -> Build -> Action ()
build env phonys rules {-pools-} out Build{..} = do
need $ map (normalise . BS.unpack) $ concatMap (resolvePhony phonys) $ depsNormal ++ depsImplicit ++ depsOrderOnly
case Map.lookup ruleName rules of
Nothing -> error $ "Ninja rule named " ++ BS.unpack ruleName ++ " is missing, required to build " ++ BS.unpack (BS.unwords out)
Just Rule{..} -> do
env <- return $
addBinds ruleBind $ addBinds buildBind $
addEnv (BS.pack "in_newline") (BS.unlines depsNormal) $
addEnv (BS.pack "in") (BS.unwords depsNormal) $
addEnv (BS.pack "out") (BS.unwords out) env
applyRspfile env $ do
let commandline = BS.unpack $ askVar env $ BS.pack "command"
let depfile = BS.unpack $ askVar env $ BS.pack "depfile"
let deps = BS.unpack $ askVar env $ BS.pack "deps"
let description = BS.unpack $ askVar env $ BS.pack "description"
let pool = askVar env $ BS.pack "pool"
let withPool = id
{-let withPool act = case Map.lookup pool pools of
_ | BS.null pool -> act
Nothing -> error $ "Ninja pool named " ++ BS.unpack pool ++ " not found, required to build " ++ BS.unpack (BS.unwords out)
Just r -> withResource r 1 act-}
when (description /= "") $ putNormal description
{-if deps == "msvc" then do
Stdout stdout <- withPool $ command [Shell, EchoStdout True] commandline []
need $ map normalise $ parseShowIncludes stdout
else
withPool $ command_ [Shell] commandline []-}
exitCode <- liftIO $ system commandline
case exitCode of
ExitSuccess -> return ()
ExitFailure _ -> liftIO $ throwIO exitCode
when (depfile /= "") $ do
when (deps /= "gcc") $ need [depfile]
depsrc <- liftM BS.unpack $ liftIO $ BS.readFile depfile
need $ map normalise $ concatMap snd $ parseMakefile depsrc
when (deps == "gcc") $ liftIO $ removeFile depfile
applyRspfile :: Env -> Action a -> Action a
applyRspfile env act
| rspfile == "" = act
| otherwise = do
liftIO $ BS.writeFile rspfile rspfile_content
res <- act
liftIO $ removeFile rspfile
return res
where
rspfile = BS.unpack $ askVar env $ BS.pack "rspfile"
rspfile_content = askVar env $ BS.pack "rspfile_content"
parseShowIncludes :: String -> [FilePath]
parseShowIncludes out = [y | x <- lines out, Just x <- [stripPrefix "Note: including file:" x]
, let y = dropWhile isSpace x, not $ isSystemInclude y]
-- Dodgy, but ported over from the original Ninja
isSystemInclude :: String -> Bool
isSystemInclude x = "program files" `isInfixOf` lx || "microsoft visual studio" `isInfixOf` lx
where lx = map toLower x
parseMakefile :: String -> [(FilePath, [FilePath])]
parseMakefile = concatMap f . join . lines
where
join (x1:x2:xs) | "\\" `isSuffixOf` x1 = join $ (init x1 ++ x2) : xs
join (x:xs) = x : join xs
join [] = []
f x = [(a, words $ drop 1 b) | a <- words a]
where (a,b) = break (== ':') $ takeWhile (/= '#') x
| strager/b-shake | examples/Ninja/All.hs | bsd-3-clause | 5,543 | 0 | 22 | 1,680 | 1,770 | 875 | 895 | 90 | 4 |
module BookingRequestSpec (main, spec) where
import Test.Hspec
import qualified BookingRequest
import qualified Date
import qualified Seats
import Data.Validation
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Failing" $ do
it "junk" $ do
let now = Date.Date 2
BookingRequest.make now (Just "1") (Just (-3)) `shouldSatisfy`
(== AccFailure [ BookingRequest.DateBefore (Date.Date 1) (Date.Date 2)
, BookingRequest.SeatsError (Seats.BadCount (-3))
]
)
describe "Succeeding" $ do
it "All good" $ do
let now = Date.Date 2
BookingRequest.make now (Just "3") (Just 5) `shouldSatisfy`
isAccSuccess
isAccSuccess :: AccValidation e a -> Bool
isAccSuccess (AccSuccess _) = True
isAccSuccess _ = False
| pittsburgh-haskell/data-validation-demo-haskell | test/BookingRequestSpec.hs | bsd-3-clause | 820 | 0 | 21 | 216 | 285 | 145 | 140 | 24 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{--
Copyright (c) 2006, Peng Li
2006, Stephan A. Zdancewic
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright owners nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--}
module Network.TCP.Aux.Misc where
import Network.TCP.Type.Base
import Network.TCP.Type.Timer
import Network.TCP.Type.Datagram
import Network.TCP.Type.Socket
import Network.TCP.Aux.Param
import Foreign hiding (unsafePerformIO)
import Data.Map as Map
import Data.List as List
import Data.Maybe
import Data.List as List
import System.IO.Unsafe
import Control.Exception
debug :: (Monad m) => String -> m a
debug s = seq (unsafePerformIO $ putStrLn s) return undefined
bound_ports :: Map SocketID (TCPSocket threadt) -> [Port]
bound_ports sockmap = List.map get_local_port (keys sockmap)
-- not considering SO_REUSEADDR
-- bound_port_allowed :: Map SocketID (TCPSocket threadt) -> Port -> Bool
-- bound_port_allowed m p = not $ List.elem p (bound_ports m)
-- lookup_socketid_by_seg :: Map SocketID (TCPSocket threadt) -> TCPSegment -> Maybe SocketID
-- lookup_socketid_by_seg m s =
-- let fakeid = (tcp_dst s, tcp_src s) in
-- if (member fakeid m) then
-- Just fakeid
-- else
-- Nothing
--
create_timer (curr_time :: Time) (offset :: Time) = curr_time + offset
slow_timer = create_timer
create_timewindow (curr_time :: Time) (offset :: Time) a = Just (Timed a (create_timer curr_time offset))
-- queues
-- enqueue_message msg q = addToQueue q msg
-- enqueue_messages msgs q = foldl addToQueue q msgs
--
accept_incoming_q0 :: SocketListen -> Bool
accept_incoming_q0 lis =
(length $ lis_q lis) < (backlog_fudge (lis_qlimit lis))
accept_incoming_q lis =
(length $ lis_q lis) < 3 * (backlog_fudge (lis_qlimit lis `div` 2))
drop_from_q0 lis =
(length $ lis_q0 lis) >= tcp_q0maxlimit
do_tcp_options :: Time -> Bool -> (TimeWindow Timestamp) -> Timestamp -> Maybe (Timestamp,Timestamp)
do_tcp_options curr_time cb_tf_doing_tstmp cb_ts_recent cb_ts_val =
if cb_tf_doing_tstmp then
let ts_ecr' = case timewindow_val curr_time cb_ts_recent of
Just x -> x
Nothing -> Timestamp 0
in Just(cb_ts_val, ts_ecr')
else
Nothing
calculate_tcp_options_len cb_tf_doing_tstmp =
if cb_tf_doing_tstmp then 12 else 0
rounddown bs v = if v < bs then v else (v `div` bs) * bs
roundup bs v = ((v+(bs-1)) `div` bs) * bs
calculate_buf_sizes (cb_t_maxseg :: Int)
(seg_mss :: Maybe Int)
(bw_delay_product_for_rt :: Maybe Int)
(is_local_conn :: Bool)
(rcvbufsize :: Int)
(sndbufsize :: Int)
(cb_tf_doing_tstmp :: Bool)
= let t_maxseg' =
let maxseg = (min cb_t_maxseg (max 64 $ (case seg_mss of Nothing -> mssdflt; Just x-> x))) in
-- BSD
maxseg - (calculate_tcp_options_len cb_tf_doing_tstmp)
in
let t_maxseg'' = rounddown mclbytes (t_maxseg') in
let rcvbufsize' = case bw_delay_product_for_rt of Nothing->rcvbufsize; Just x->x in
let (rcvbufsize'', t_maxseg''') = ( if rcvbufsize' < t_maxseg''
then (rcvbufsize', rcvbufsize')
else (min (sb_max) (roundup (t_maxseg'') rcvbufsize'),
t_maxseg'')) in
let sndbufsize' = case bw_delay_product_for_rt of Nothing->sndbufsize; Just x->x in
let sndbufsize'' = (if sndbufsize' < t_maxseg'''
then sndbufsize'
else min (sb_max) (roundup (t_maxseg'') sndbufsize')) in
let snd_cwnd = t_maxseg''' * ((if is_local_conn then ss_fltsz_local else ss_fltsz)) in
(rcvbufsize'', sndbufsize'', t_maxseg''', snd_cwnd)
calculate_bsd_rcv_wnd :: TCPSocket t -> Int
calculate_bsd_rcv_wnd (tcp_sock :: TCPSocket t)=
let cb = cb_rcv tcp_sock in
assert ((rcv_adv cb) >= (rcv_nxt cb)) $ -- assertion for debugging
max (seq_diff (rcv_adv cb) (rcv_nxt cb))
(freebsd_so_rcvbuf - (bufc_length $ rcvq cb))
send_queue_space sndq_max sndq_size = (sndq_max - sndq_size)
update_idle (curr_time :: Time) tcp_sock =
let tt_keep' = if not (st tcp_sock == SYN_RECEIVED && tf_needfin (cb tcp_sock)) then
Just (slow_timer curr_time tcptv_keep_idle)
else
tt_keep $ cb_time tcp_sock
tt_fin_wait_2' = if st tcp_sock == FIN_WAIT_2 then
Just (slow_timer curr_time tcptv_maxidle )
else
tt_fin_wait_2 $ cb_time tcp_sock
in
(tt_keep', tt_fin_wait_2')
-- tcp timing and rtt
tcp_backoffs = tcp_bsd_backoffs
tcp_syn_backoffs = tcp_syn_backoffs
mode_of :: Maybe (Timed (RexmtMode,Int)) -> Maybe RexmtMode
mode_of (Just (Timed (x,_) _)) = Just x
mode_of Nothing = Nothing
shift_of :: Maybe (Timed (RexmtMode,Int)) -> Int
shift_of (Just (Timed (_,shift) _ )) = shift
-- todo: check types!
-- compute the retransmit timeout to use
computed_rto :: [Int] -> Int -> Rttinf -> Time
computed_rto (backoffs :: [Int]) (shift :: Int) (ri::Rttinf) =
(to_Int64 $ backoffs !! shift ) * (max (t_rttmin ri) ((t_srtt ri) + 4*(t_rttvar ri)))
-- compute the last-used rxtcur
computed_rxtcur (ri :: Rttinf) =
max (t_rttmin ri)
(min (tcptv_rexmtmax)
((computed_rto ( if t_wassyn ri then tcp_syn_backoffs else tcp_backoffs )
(t_lastshift ri) ri )))
start_tt_rexmt_gen (mode :: RexmtMode) (backoffs :: [Int]) (shift :: Int)
(wantmin :: Bool) (ri :: Rttinf) (curr_time :: Time) =
let rxtcur = max (if wantmin
then max (t_rttmin ri) (t_lastrtt ri + (2*1000*1000 `div` 100)) -- 2s/100
else t_rttmin ri )
( min (tcptv_rexmtmax )
( computed_rto backoffs shift ri) )
in
Just ( Timed (mode,shift) (create_timer curr_time rxtcur ) )
start_tt_rexmt = start_tt_rexmt_gen Rexmt tcp_backoffs
start_tt_rexmtsyn = start_tt_rexmt_gen RexmtSyn tcp_syn_backoffs
start_tt_persist (shift :: Int) (ri::Rttinf) (curr_time :: Time) =
let cur = max (tcptv_persmin)
(min (tcptv_persmax)
(computed_rto tcp_backoffs shift ri) )
in
Just ( Timed (Persist, shift) (create_timer curr_time cur))
update_rtt :: Time -> Rttinf -> Rttinf
update_rtt rtt ri =
let (t_srtt'', t_rttvar'')
= if tf_srtt_valid ri then
let delta = (rtt - 1000*10) - (t_srtt ri) -- 1000*10 = 1/HZ
vardelta = (abs delta) - (t_rttvar ri)
t_srtt' = max (1000*1000 `div` (32*100)) (t_srtt ri + (delta `div` 8))
t_rttvar'=max (1000*1000 `div` (16*100)) (t_rttvar ri + (vardelta `div` 4))
in (t_srtt', t_rttvar')
else
let t_srtt' = rtt
t_rttvar' = rtt `div` 2
in (t_srtt',t_rttvar')
in
ri { t_rttupdated = t_rttupdated ri + 1
, tf_srtt_valid = True
, t_srtt = t_srtt''
, t_rttvar = t_rttvar''
, t_lastrtt = rtt
, t_lastshift = 0
, t_wassyn = False
}
expand_cwnd ssthresh maxseg maxwin cwnd
= min maxwin (cwnd + (if cwnd > ssthresh then (maxseg * maxseg) `div` cwnd else maxseg))
-- Path MTU Discovery
mtu_tab = [65535, 32000, 17914, 8166, 4352, 2002, 1492, 1006, 508, 296, 88]
next_smaller :: [Int] -> Int -> Int
next_smaller (x:xs) value = if value >= x then x else next_smaller xs value
initial_cb_time = TCBTiming
{ tt_keep = Nothing
, tt_conn_est = Nothing
, tt_fin_wait_2 = Nothing
, tt_2msl = Nothing
, t_idletime = 0
, ts_recent = Nothing
, t_badrxtwin = Nothing
}
initial_cb_snd = TCBSending
{ sndq = bufferchain_empty
, snd_una = SeqLocal 0
, snd_wnd = 0
, snd_wl1 = SeqForeign 0
, snd_wl2 = SeqLocal 0
, snd_cwnd = tcp_maxwin `shiftL` tcp_maxwinscale
, snd_nxt = SeqLocal 0
, snd_max = SeqLocal 0
, t_dupacks = 0
, t_rttinf = Rttinf { t_rttupdated = 0
, tf_srtt_valid = False
, t_srtt = tcptv_rtobase
, t_rttvar = tcptv_rttvarbase
, t_rttmin = tcptv_min
, t_lastrtt = 0
, t_lastshift = 0
, t_wassyn = False
}
, t_rttseg = Nothing
, tt_rexmt = Nothing
}
{-# INLINE hasfin #-}
{-# INLINE tcp_reass #-}
{-# INLINE tcp_reass_prune #-}
hasfin seg = case trs_FIN seg of True -> 1; False -> 0
-- returns (1) the string
-- (2) the SEQ for the next byte
-- (3) whether FIN has been reached
-- (4) remaining...
-- this is a very SLOW algorithm and should be replaced ....
tcp_reass :: SeqForeign -> [TCPReassSegment] -> (BufferChain, SeqForeign, Bool, [TCPReassSegment])
tcp_reass seq rsegq =
let searchpkt rseg =
let seq1 = (trs_seq rseg)
seq2 = seq1 `seq_plus` (bufc_length $ trs_data rseg) `seq_plus` (hasfin rseg)
in (seq >= seq1 && seq < seq2)
in
case List.find searchpkt rsegq of
Nothing ->
(bufferchain_empty, seq, False, rsegq)
Just rseg ->
let data_to_trim = seq `seq_diff` (trs_seq rseg) in
let result_buf = bufferchain_drop data_to_trim (trs_data rseg) in
let next_seq = (trs_seq rseg) `seq_plus` (bufc_length $ trs_data rseg) `seq_plus` (hasfin rseg) in
let new_rsegq = tcp_reass_prune next_seq rsegq in
if trs_FIN rseg then
(result_buf
, next_seq
, True
, new_rsegq
)
else
let (bufc2, next_seq2, hasfin2, rsegq2) = tcp_reass next_seq new_rsegq in
( bufferchain_concat result_buf bufc2
, next_seq2
, hasfin2
, rsegq2
)
tcp_reass_prune :: SeqForeign -> [TCPReassSegment] -> [TCPReassSegment]
tcp_reass_prune seq rsegq =
List.filter (\seg ->
let nxtseq = (trs_seq seg) `seq_plus` (bufc_length $ trs_data seg) `seq_plus` (hasfin seg)
in nxtseq > seq
) rsegq
initial_cb_rcv = TCBReceiving
{ last_ack_sent = SeqForeign 0
, tf_rxwin0sent = False
, tf_shouldacknow = False
, tt_delack = False
, rcv_adv = SeqForeign 0
, rcv_wnd = 0
, rcv_nxt = SeqForeign 0
, rcvq = bufferchain_empty
, t_segq = []
}
initial_cb_misc = TCBMisc
{ -- retransmission
snd_ssthresh = tcp_maxwin `shiftL` tcp_maxwinscale
, snd_cwnd_prev = 0
, snd_ssthresh_prev = 0
, snd_recover = SeqLocal 0
-- some tags
, cantsndmore = False
, cantrcvmore = False
, bsd_cantconnect = False
-- initialization parameters
, self_id = SocketID (0,TCPAddr (IPAddr 0,0))
, parent_id = SocketID (0,TCPAddr (IPAddr 0,0))
, local_addr = TCPAddr (IPAddr 0,0)
, remote_addr = TCPAddr (IPAddr 0,0)
, t_maxseg = mssdflt
, t_advmss = Nothing
, tf_doing_ws = False
, tf_doing_tstmp = False
, tf_req_tstmp = False
, request_r_scale = Nothing
, snd_scale = 0
, rcv_scale = 0
, iss = SeqLocal 0
, irs = SeqForeign 0
-- other things i don't use for the moment
, sndurp = Nothing
, rcvurp = Nothing
, iobc = NO_OOBDATA
, rcv_up = SeqForeign 0
, tf_needfin = False
}
initial_tcp_socket = TCPSocket
{ st = CLOSED
, cb_time = initial_cb_time
, cb_snd = initial_cb_snd
, cb_rcv = initial_cb_rcv
, cb = initial_cb_misc
, sock_listen = SocketListen [] [] 0
, waiting_list = []
}
empty_sid :: SocketID
empty_sid = SocketID (0,TCPAddr (IPAddr 0,0))
| Tener/HaNS | src/Network/TCP/Aux/Misc.hs | bsd-3-clause | 13,719 | 0 | 23 | 4,255 | 3,324 | 1,848 | 1,476 | 240 | 7 |
{-# LANGUAGE OverloadedStrings #-}
module Arbitrary where
import Control.Monad
import qualified Data.Text as T
import Test.QuickCheck
import qualified Filesystem.Path.CurrentOS as P
newtype AFilePath = AFilePath { unAFP :: P.FilePath } deriving Show
instance Arbitrary AFilePath where
arbitrary = sized $ \i -> do
root <- arbitrary >>= \b -> return $ if b then "/" else ""
strs <- replicateM i . listOf $ elements pathChar
return . AFilePath . P.concat $ root : map P.decodeString strs
where pathChar = "." ++ ['0' .. '9'] ++ ['a' .. 'z'] ++ ['A' .. 'Z']
newtype AModule = AModule { unAM :: T.Text } deriving Show
instance Arbitrary AModule where
arbitrary = sized $ \i -> do
a <- modElem
b <- replicateM i modElem
return . AModule . T.intercalate "." . map T.pack $ a : b
where modChar = ['0' .. '9'] ++ ['a' .. 'z'] ++ ['A' .. 'Z']
modElem = do
h <- elements ['A' .. 'Z']
o <- listOf $ elements modChar
return $ h : o
| philopon/hassistant.vim | test/Arbitrary.hs | bsd-3-clause | 1,054 | 0 | 15 | 304 | 357 | 195 | 162 | 24 | 0 |
{-# LANGUAGE StandaloneDeriving
#-}
{-| JSON datatype definition.
-}
module Text.JSONb.Simple where
import Data.ByteString
import Data.Trie
{-| A monomorphic JSON datatype, backed with 'Rational', strict 'ByteString'
and 'ByteString' 'Trie'.
-}
data JSON
= Object (Trie JSON)
| Array [JSON]
| String ByteString
| Number Rational
| Boolean Bool
| Null
deriving instance Eq JSON
instance Show JSON where
show json = case json of
Object trie -> unlines $ "Object" : trie_show trie
Array list -> unlines $ "Array" : fmap show list
String bytes -> unwords ["String", show bytes]
Number rational -> unwords ["Number", show rational]
Boolean bool -> unwords ["Boolean", show bool]
Null -> "Null"
where
trie_show = fmap edge_show . toList
where
edge_show (k, v) = unwords [show k, "->", show v]
| solidsnack/JSONb | Text/JSONb/Simple.hs | bsd-3-clause | 987 | 0 | 12 | 325 | 248 | 129 | 119 | 22 | 0 |
module Main where
import System.IO
import Hs
import Control.Concurrent.STM.TChan
import Control.Concurrent.STM
import Control.Concurrent
import Control.Monad
import Network.Socket
import System.Random
worldDimension :: Dimension
worldDimension = Dimension 40 30
data Colour =
Black
| Red
| Green
| Yellow
| Blue
| Magenta
| Cyan
| White
| BrightBlack
| BrightRed
| BrightGreen
| BrightYellow
| BrightBlue
| BrightMagenta
| BrightCyan
| BrightWhite deriving (Eq)
setColour :: Colour -> String
setColour c =
let code = case c of
Black -> "30"
Red -> "31"
Green -> "32"
Yellow -> "33"
Blue -> "34"
Magenta -> "35"
Cyan -> "36"
White -> "37"
BrightBlack -> "90"
BrightRed -> "91"
BrightGreen -> "92"
BrightYellow -> "93"
BrightBlue -> "94"
BrightMagenta -> "95"
BrightCyan -> "96"
BrightWhite -> "97"
in "\ESC[" ++ code ++ "m"
hideCursor :: String
hideCursor = "\ESC[?25l"
showCursor :: String
showCursor = "\ESC[?25h"
clearScreen :: String
clearScreen = "\ESC[2J"
moveCursor :: Coordinate -> String
moveCursor (Coordinate x' y') = "\ESC[" ++ (show (y' + 1)) ++ ";" ++ (show (x' + 1)) ++ "H"
snakeColours :: [Colour]
snakeColours =
[ Green
, Blue
, Yellow
, Magenta
, Cyan
, BrightBlack
, BrightRed
, BrightGreen
, BrightYellow
, BrightBlue
, BrightMagenta
, BrightCyan
, BrightWhite]
snakeColour :: Id -> Colour
snakeColour (Id index) = let numberOfColours = length snakeColours
in snakeColours !! (index `mod` numberOfColours)
printSnake :: Snake -> String
printSnake (Snake snakeId heading' h t) = do
let headChar = case heading' of
West -> "<"
East -> ">"
North -> "^"
South -> "v"
(moveCursor h) ++ (setColour (snakeColour snakeId)) ++ headChar ++ (setColour Green) ++ (foldMap (\c -> ((moveCursor c) ++ "O")) t)
printSnakes :: World -> String
printSnakes world = let ss = snakes world
in foldMap printSnake ss
printApple :: Apple -> String
printApple (Apple pos) = (moveCursor pos) ++ (setColour Red) ++ "@"
printApples :: World -> String
printApples world = foldMap printApple (apples world)
printBorder :: World -> String
printBorder (World (Dimension w h) _ _) =
let horizontal = [0..w]
vertical = [0..h]
upperBorder = map (\x' -> Coordinate x' 0) horizontal
lowerBorder = map (\x' -> Coordinate x' h) horizontal
leftBorder = map (\y' -> Coordinate 0 y') vertical
rightBorder = map (\y' -> Coordinate w y') vertical
in (setColour White) ++ (foldMap (\c -> (moveCursor c) ++ "+" ) (upperBorder ++ lowerBorder ++ leftBorder ++ rightBorder))
printWorld :: World -> WorldChan -> IO ()
printWorld world worldChan = do
let newWorld = clearScreen ++ (printSnakes world) ++ (printApples world) ++ (printBorder world)
atomically $ writeTChan worldChan newWorld
data UserInput =
Quit
| Turn Direction
| Unknown deriving (Eq)
charToInput :: Char -> UserInput
charToInput c = case c of
'q' -> Quit
'h' -> Turn West
'l' -> Turn East
'j' -> Turn South
'k' -> Turn North
_ -> Unknown
type EventChan = TChan Event
type WorldChan = TChan String
worldUpdate :: EventChan -> WorldChan -> Hs.World -> IO ()
worldUpdate chan worldChan world = do
event <- atomically $ readTChan chan
let newWorld = updateWorld world event
if event == Step then (printWorld newWorld worldChan) else pure ()
worldUpdate chan worldChan newWorld
stepSender :: EventChan -> IO ()
stepSender chan = forever $ do
atomically $ writeTChan chan Step
threadDelay 500000
clientLoop :: Int -> EventChan -> Socket -> IO ()
clientLoop index chan sock = forever $ do
msg <- recv sock 1
case (charToInput (head msg)) of
(Turn direction) -> do
atomically $ writeTChan chan (TurnSnake (Id index) direction)
_ -> pure ()
clientUpdateLoop :: WorldChan -> Socket -> IO ()
clientUpdateLoop worldChan socket = forever $ do
world <- atomically $ readTChan worldChan
send socket world
clientMain :: Int -> EventChan -> WorldChan -> (Socket, SockAddr) -> IO ()
clientMain clientIndex chan worldChan (sock, _) = do
atomically $ writeTChan chan (AddSnake (Snake (Id clientIndex) East (startCoordinate clientIndex) []))
_ <- forkIO $ clientUpdateLoop worldChan sock
clientLoop clientIndex chan sock
where
startCoordinate :: Int -> Coordinate
startCoordinate index = Coordinate 1 ((index `mod` ((height worldDimension) - 1)) + 1)
listeningLoop :: Int -> EventChan -> WorldChan -> Socket -> IO ()
listeningLoop clientCount chan worldChan sock = do
conn <- accept sock
newchan <- atomically $ dupTChan worldChan
_ <- forkIO $ clientMain clientCount chan newchan conn
listeningLoop (clientCount + 1) chan worldChan sock
networkThread :: EventChan -> WorldChan -> IO ()
networkThread chan worldChan = do
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bind sock (SockAddrInet 4242 iNADDR_ANY)
listen sock 2
listeningLoop 0 chan worldChan sock
appleSpawner :: Dimension -> StdGen -> EventChan -> IO ()
appleSpawner dim@(Dimension w h) gen chan = do
let (x, newGen) = randomR (0, w) gen
let (y, newGen') = randomR (0, h) newGen
atomically $ writeTChan chan (AddApple (Apple (Coordinate x y)))
threadDelay 1000000
appleSpawner dim newGen' chan
start :: Hs.World -> IO ()
start world = do
chan <- newTChanIO
worldChan <- atomically $ newBroadcastTChan
_ <- forkIO $ worldUpdate chan worldChan world
_ <- forkIO $ stepSender chan
_ <- forkIO $ networkThread chan worldChan
stdGen <- newStdGen
appleSpawner (dimension world) stdGen chan
main :: IO ()
main = do
let snakes' = [(Snake (Id 0) East (Coordinate 10 5) [(Coordinate 9 5), (Coordinate 8 5), (Coordinate 7 5), (Coordinate 6 5)])]
let apples' = [ (Apple (Coordinate 5 5)), (Apple (Coordinate 15 7)), (Apple (Coordinate 14 7)), (Apple (Coordinate 13 7)), (Apple (Coordinate 12 7)), (Apple (Coordinate 11 7)), (Apple (Coordinate 10 10)), (Apple (Coordinate 9 9)), (Apple (Coordinate 16 8)) ]
start (World worldDimension [] apples')
| PeterHajdu/hs | app/Main.hs | bsd-3-clause | 6,449 | 0 | 18 | 1,645 | 2,312 | 1,172 | 1,140 | 178 | 16 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.API.Subledger.Client.HttpStreams
( subledger
, subledgerConn
, withConnection
, SubledgerRequest(..)
, SubledgerError(..)
, SubledgerConfig(..)
-- * low-level
, callAPI
) where
-- import Control.Monad.Trans.Resource (runResourceT)
-- import qualified Network.HTTP.Simple as H
import Control.Exception (finally, SomeException, try)
import Control.Monad (when)
import Data.Aeson (encode, FromJSON, fromJSON, json', Result(..), Value)
import Data.Binary.Builder (toLazyByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Default (def)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Network.API.Subledger.Client (handleStream, Method(..), nullBody, SubledgerConfig(..), SubledgerError(..), SubledgerErrorType(..), SubledgerRequest(..), SubledgerReturn)
import qualified Network.Http.Client as C
import qualified Network.HTTP.Types as H
import OpenSSL (withOpenSSL)
import qualified System.IO.Streams as Streams
import qualified System.IO.Streams.Attoparsec as Streams
import System.IO.Streams.Attoparsec (ParseException(..))
-- | Create a request to Subledger’s API
subledger :: FromJSON (SubledgerReturn a)
=> SubledgerConfig
-> SubledgerRequest a
-> IO (Either SubledgerError (SubledgerReturn a))
subledger config request =
withConnection $ \conn -> subledgerConn conn config request
-- | Create a request to Subledger’s API using a connection opened
-- with `withConnection`
subledgerConn :: FromJSON (SubledgerReturn a)
=> C.Connection
-> SubledgerConfig
-> SubledgerRequest a
-> IO (Either SubledgerError (SubledgerReturn a))
subledgerConn = flip callAPI fromJSON
-- | Open a connection to the Subledger API server
withConnection :: (C.Connection -> IO (Either SubledgerError a))
-> IO (Either SubledgerError a)
withConnection f =
withOpenSSL $ do
ctx <- C.baselineContextSSL
result <- try (C.openConnectionSSL ctx "api.subledger.com" 443) :: IO (Either SomeException C.Connection)
case result of
Left msg -> return $ Left $ def { errorType = ConnectionFailure
, errorMsg = T.pack $ show msg
}
Right conn -> f conn `finally` C.closeConnection conn
-- | Convert from subledger-core Method type to http-stream Method type
m2m :: Method -> C.Method
m2m GET = C.GET
m2m PATCH = C.PATCH
m2m POST = C.POST
------------------------------------------------------------------------------
-- | Create a request to `Subledger`'s API over an existing connection
--
-- see also: 'withConnection'
-- FIXME: all connection errors should be
-- turned into a `SubledgerError`. But that is not yet implemented.
--
-- NOTES: this a pretty low-level function. You probably want `subledger`
-- or `subledgerConn`. If you call this function directly, you are
-- responsible for ensuring the JSON conversion function supplied is
-- correct for `SubledgerRequest`. In the rest of the library this
-- property is enforced automatically by the type-system. But adding
-- that constraint here made implementing the `Subledger` testing monad
-- difficult.
callAPI :: C.Connection -- ^ an open connection to the server (`withConnection`)
-> (Value -> Result b) -- ^ function to convert JSON result to Haskell Value
-> SubledgerConfig -- ^ SubledgerConfig
-> SubledgerRequest a -- ^ SubledgerRequest
-> IO (Either SubledgerError b)
callAPI conn fromJSON' SubledgerConfig {..} sreq@SubledgerRequest{..} = do
req <- C.buildRequest $ do
C.http (m2m method) $ encodePath path query
C.setAuthorizationBasic (encodeUtf8 apiKey) (encodeUtf8 apiSecret)
C.setAccept "application/json"
when (not $ nullBody sreq) $ C.setContentType "application/json"
C.setHeader "Connection" "Keep-Alive"
C.setTransferEncoding
when debug $ print req
if nullBody sreq
then C.sendRequest conn req C.emptyBody
else do
let lbs = encode body
when debug $ L.putStrLn lbs
i <- Streams.fromLazyByteString lbs
C.sendRequest conn req $ C.inputStreamBody i
C.receiveResponse conn $ \response inputStream ->
do when debug $ print response
let statusCode = C.getStatusCode response
v <- try (Streams.parseFromStream json' inputStream)
let r = case v of
(Left (ParseException msg)) -> Error msg
(Right a) -> Success a
return $ handleStream fromJSON' statusCode r
debug :: Bool
debug = True
encodePath :: [T.Text] -> [(T.Text, Maybe T.Text)] -> S.ByteString
encodePath p q = L.toStrict $ toLazyByteString $ H.encodePath p $ H.toQuery q
| whittle/subledger | subledger-http-streams/src/Network/API/Subledger/Client/HttpStreams.hs | bsd-3-clause | 5,069 | 0 | 20 | 1,181 | 1,131 | 611 | 520 | 86 | 3 |
module Text.XML.SpreadsheetML.Types where
{- See http://msdn.microsoft.com/en-us/library/aa140066%28office.10%29.aspx -}
import Data.Word ( Word64 )
-- | Only implement what we need
data Workbook = Workbook
{ workbookDocumentProperties :: Maybe DocumentProperties
, workbookStyles :: Maybe Styles
, workbookWorksheets :: [Worksheet]
}
deriving (Read, Show)
data Styles = Styles
{ elemStyles :: [Style]
}
deriving (Read, Show)
data Style = Style
-- attributes
{ attribID :: StyleID
, attribName :: Maybe String
, attribParent :: Maybe String
-- elements
, elemAlignment :: Maybe Alignment
, elemBorders :: Maybe Borders
, elemFont :: Maybe Font
, elemInterior :: Maybe Interior
, elemNumberFormat :: Maybe NumberFormat
, elemProtection :: Maybe Protection
}
deriving (Read, Show)
data Alignment = Alignment
{ alignHorizontal :: Maybe Horizontal
, alignReadingOrder :: Maybe ReadingOrder
, alignRotate :: Maybe Double
, alignShrinkToFit :: Maybe Bool
, alignVertical :: Maybe Vertical
}
deriving (Read, Show)
data Borders = Borders
{ elemBorder :: [Border] }
deriving (Read, Show)
data Border = Border
{ attribPosition :: Maybe Position
, attribBorderColor :: Maybe String
, attribLineStyle :: Maybe LineStyle
, attribWeight :: Maybe LineWeight
}
deriving (Read, Show)
data Font = Font
{ attribBold :: Maybe Bool
, attribFontColor :: Maybe String
, attribFontName :: Maybe String
, attribItalic :: Maybe Bool
, attribSize :: Maybe Double
, attribStrikeThrough :: Maybe Bool
, attribUnderline :: Maybe Underline
, attribCharSet :: Maybe Word64
, attribFamily :: Maybe FontFamily
}
deriving (Read, Show)
data Interior = Interior
{ attribInteriorColor :: Maybe String
, attribPattern :: Maybe Pattern
}
deriving (Read, Show)
data NumberFormat = NumberFormat
{ attribFormat :: Maybe String }
deriving (Read, Show)
data Protection = Protection
{ attribProtected :: Maybe Bool
, attribHideFormula :: Maybe Bool
}
deriving (Read, Show)
data DocumentProperties = DocumentProperties
{ documentPropertiesTitle :: Maybe String
, documentPropertiesSubject :: Maybe String
, documentPropertiesKeywords :: Maybe String
, documentPropertiesDescription :: Maybe String
, documentPropertiesRevision :: Maybe Word64
, documentPropertiesAppName :: Maybe String
, documentPropertiesCreated :: Maybe String -- ^ Actually, this should be a date time
}
deriving (Read, Show)
data Worksheet = Worksheet
{ worksheetTable :: Maybe Table
, worksheetName :: Name
}
deriving (Read, Show)
data Table = Table
{ tableColumns :: [Column]
, tableRows :: [Row]
, tableStyleID :: Maybe StyleID -- ^ Must be defined in Styles
, tableDefaultColumnWidth :: Maybe Double -- ^ Default is 48
, tableDefaultRowHeight :: Maybe Double -- ^ Default is 12.75
, tableExpandedColumnCount :: Maybe Word64
, tableExpandedRowCount :: Maybe Word64
, tableLeftCell :: Maybe Word64 -- ^ Default is 1
, tableTopCell :: Maybe Word64 -- ^ Default is 1
, tableFullColumns :: Maybe Bool
, tableFullRows :: Maybe Bool
}
deriving (Read, Show)
data Column = Column
{ columnCaption :: Maybe Caption
, columnStyleID :: Maybe StyleID -- ^ Must be defined in Styles
, columnAutoFitWidth :: Maybe AutoFitWidth
, columnHidden :: Maybe Hidden
, columnIndex :: Maybe Word64
, columnSpan :: Maybe Word64
, columnWidth :: Maybe Double
}
deriving (Read, Show)
data Row = Row
{ rowCells :: [Cell]
, rowStyleID :: Maybe StyleID -- ^ Must be defined in Styles
, rowCaption :: Maybe Caption
, rowAutoFitHeight :: Maybe AutoFitHeight
, rowHeight :: Maybe Double
, rowHidden :: Maybe Hidden
, rowIndex :: Maybe Word64
, rowSpan :: Maybe Word64
}
deriving (Read, Show)
data Cell = Cell
-- elements
{ cellData :: Maybe ExcelValue
-- Attributes
, cellStyleID :: Maybe StyleID -- ^ Must be defined in Styles
, cellFormula :: Maybe Formula
, cellIndex :: Maybe Word64
, cellMergeAcross :: Maybe Word64
, cellMergeDown :: Maybe Word64
}
deriving (Read, Show)
data ExcelValue = Number Double | Boolean Bool | StringType String
deriving (Read, Show)
-- | TODO: Currently just a string, but we could model excel formulas and
-- use that type here instead.
newtype Formula = Formula String
deriving (Read, Show)
data AutoFitWidth = AutoFitWidth | DoNotAutoFitWidth
deriving (Read, Show)
data AutoFitHeight = AutoFitHeight | DoNotAutoFitHeight
deriving (Read, Show)
-- | Attribute for hidden things
data Hidden = Shown | Hidden
deriving (Read, Show)
data Horizontal = HAlignAutomatic | HAlignLeft | HAlignCenter | HAlignRight
deriving (Read, Show)
data ReadingOrder = RightToLeft | LeftToRight
deriving (Read, Show)
data Vertical = VAlignAutomatic | VAlignTop | VAlignBottom | VAlignCenter
deriving (Read, Show)
data Position = PositionLeft | PositionTop | PositionRight | PositionBottom
deriving (Read, Show)
data LineStyle = LineStyleNone | LineStyleContinuous | LineStyleDash | LineStyleDot | LineStyleDashDot | LineStyleDashDotDot
deriving (Read, Show)
data LineWeight = Hairline | Thin | Medium | Thick
deriving (Read, Show)
data Underline = UnderlineNone | UnderlineSingle | UnderlineDouble | UnderlineSingleAccounting | UnderlineDoubleAccounting
deriving (Read, Show)
data FontFamily = Automatic | Decorative | Modern | Roman | Script | Swiss
deriving (Read, Show)
data Pattern = PatternNone | PatternSolid | PatternGray75 | PatternGray50 | PatternGray25 | PatternGray125 | PatternGray0625 |
PatternHorzStripe | PatternVertStripe | PatternReverseDiagStripe | PatternDiagStripe | PatternDiagCross |
PatternThickDiagCross | PatternThinHorzStripe | PatternThinVertStripe | PatternThinReverseDiagStripe |
PatternThinDiagStripe | PatternThinHorzCross | PatternThinDiagCross
deriving (Read, Show)
-- | For now this is just a string, but we could model excel's names
newtype Name = Name String
deriving (Read, Show)
newtype Caption = Caption String
deriving (Read, Show)
newtype StyleID = StyleID String
deriving (Read, Show)
| dagit/SpreadsheetML | src/Text/XML/SpreadsheetML/Types.hs | bsd-3-clause | 6,591 | 0 | 9 | 1,619 | 1,519 | 878 | 641 | 149 | 0 |
module PlatformDB
( release, deltaFrom,
incGHC, incGHCLib, incGHCTool, incLib, incTool,
notWindows, onlyWindows,
allPackages,
corePackages,
platformPackages,
packagesByIncludeFilter,
isGhc, isWindows, isNotWindows, isLib, isTool
) where
import Data.List (partition)
import Types
import Utils (version)
-- | Construct a release
release :: String -> [Include] -> Release
release vstr incs = Release (HpVersion $ version vstr) incs
-- | Construct list of Includes as a delta to packages in another release
deltaFrom :: Release -> [Include] -> [Include]
deltaFrom base deltas = go (relIncludes base) deltas
where
go [] dIncs = dIncs
go (bInc : bIncs) dIncs =
let (updates, dIncs') = partition (match bInc) dIncs
in merge bInc updates : go bIncs dIncs'
match (_, bPkg) (_, dPkg) = pkgName bPkg == pkgName dPkg
merge bInc [] = bInc
merge _ [updateInc] = updateInc
merge bInc _ = error $ "multiple updates for package " ++ show (snd bInc)
buildInc :: IncludeType -> PackageName -> String -> Include
buildInc inc name vstr = (inc, Package name $ version vstr)
-- | An include entry for the version of GHC itself
incGHC :: String -> Include
incGHC = buildInc IncGHC "ghc"
-- | An include entry for a lib that is supplied with GHC
incGHCLib :: PackageName -> String -> Include
incGHCLib = buildInc IncGHCLib
-- | An include entry for a lib that is supplied by the platform
incLib :: PackageName -> String -> Include
incLib = buildInc IncLib
-- | An include entry for a tool that is supplied with GHC
incGHCTool :: PackageName -> String -> Include
incGHCTool = buildInc IncGHCTool
-- | An include entry for a tool that is supplied with the platform
incTool :: PackageName -> String -> Include
incTool = buildInc IncTool
-- | Modify an include for being part of the platform only on non-windows
-- distributions.
notWindows :: Include -> Include
notWindows (it, pkg) = (IncIfNotWindows it, pkg)
-- | Modify an include for being part of the platform only on windows
-- distributions.
onlyWindows :: Include -> Include
onlyWindows (it, pkg) = (IncIfWindows it, pkg)
packagesByIncludeFilter :: (IncludeType -> Bool) -> Release -> [Package]
packagesByIncludeFilter f = map snd . filter (f . fst) . relIncludes
-- | All packages in the release
allPackages :: Release -> [Package]
allPackages = packagesByIncludeFilter $ const True
-- | Includes that are part of the core (expected to come with GHC)
corePackages :: Release -> [Package]
corePackages = packagesByIncludeFilter isGhc
-- | Includes that come from the platform (added beyond the GHC default)
platformPackages :: Release -> [Package]
platformPackages = packagesByIncludeFilter (not . isGhc)
-- | Tests of Include
isGhc, isWindows, isNotWindows, isLib, isTool :: IncludeType -> Bool
isGhc IncGHC = True
isGhc IncGHCLib = True
isGhc IncGHCTool = True
isGhc i = isIncRecurse isGhc i
isWindows (IncIfWindows _) = True
isWindows _ = False
isNotWindows (IncIfNotWindows _) = True
isNotWindows _ = False
isLib IncGHCLib = True
isLib IncLib = True
isLib i = isIncRecurse isLib i
isTool IncGHCTool = True
isTool IncTool = True
isTool i = isIncRecurse isTool i
isIncRecurse :: (IncludeType -> Bool) -> IncludeType -> Bool
isIncRecurse p (IncIfWindows i) = p i
isIncRecurse p (IncIfNotWindows i) = p i
isIncRecurse _ _ = False
| ardumont/haskell-platform | hptool/src/PlatformDB.hs | bsd-3-clause | 3,404 | 0 | 13 | 677 | 894 | 483 | 411 | 67 | 4 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
--------------------------------------------------------------------------------
module Generics.Deriving.Fold where
--------------------------------------------------------------------------------
import GHC.Generics
--------------------------------------------------------------------------------
-- Note: We do not know (according to the types) whether a K1 holds a recursive
-- position or a constant. So, we use Either to allow for both options. This
-- makes the algebra ugly, since we need to use Left/Right at every K1. There is
-- also no error checking at the moment, but it can be added later.
--
-- One way to know if a K1 is recursive or not is to use the upcoming
-- overlapping type families.
type family Alg (f :: * -> *) r
type instance Alg U1 r = r
type instance Alg (K1 i a) r = Either a r -> r
type instance Alg (M1 i c f) r = Alg f r
type instance Alg (f :+: g) r = (Alg f r, Alg g r)
type instance Alg (M1 i c (K1 j a) :*: g) r = Either a r -> Alg g r
type instance Alg ((f :*: g) :*: h) r = Alg (f :*: (g :*: h)) r
--------------------------------------------------------------------------------
class Fold' f a where
fold' :: proxy a -> Alg (Rep a) r -> Alg f r -> f x -> r
--------------------------------------------------------------------------------
instance Fold' U1 a where
fold' _ _ alg U1 = alg
instance Fold a => Fold' (K1 i a) a where
fold' p palg alg (K1 x) = alg (Right (fold palg x))
instance Fold' (K1 i b) a where
fold' p _ alg (K1 x) = alg (Left x)
instance Fold' f a => Fold' (M1 i c f) a where
fold' p palg alg (M1 x) = fold' p palg alg x
instance (Fold' f a, Fold' g a) => Fold' (f :+: g) a where
fold' p palg (alg, _) (L1 x) = fold' p palg alg x
fold' p palg (_, alg) (R1 x) = fold' p palg alg x
instance (Fold a, Fold' g a) => Fold' (M1 i c (K1 j a) :*: g) a where
fold' p palg alg (M1 (K1 x) :*: g) = fold' p palg (alg (Right (fold palg x))) g
instance Fold' g a => Fold' (M1 i c (K1 j b) :*: g) a where
fold' p palg alg (M1 (K1 x) :*: g) = fold' p palg (alg (Left x)) g
instance Fold' (f :*: (g :*: h)) a => Fold' ((f :*: g) :*: h) a where
fold' p palg alg ((f :*: g) :*: h) = fold' p palg alg (f :*: (g :*: h))
--------------------------------------------------------------------------------
class (Generic a, Fold' (Rep a) a) => Fold a where
fold :: Alg (Rep a) r -> a -> r
fold alg x = fold' (Just x) alg alg (from x)
--------------------------------------------------------------------------------
instance Fold Bool
instance Fold Char
instance Fold Double
instance Fold Float
instance Fold Int
instance Fold Ordering
instance Fold [a]
instance Fold (Maybe a)
instance Fold (Either a b)
instance Fold ()
instance Fold (a, b)
instance Fold (a, b, c)
instance Fold (a, b, c, d)
instance Fold (a, b, c, d, e)
instance Fold (a, b, c, d, e, f)
instance Fold (a, b, c, d, e, f, g)
| spl/generic-deriving-extras | src/Generics/Deriving/Fold.hs | bsd-3-clause | 3,214 | 0 | 12 | 686 | 1,195 | 624 | 571 | 55 | 0 |
{-# LANGUAGE DeriveGeneric, StandaloneDeriving, DeriveAnyClass, OverloadedStrings #-}
-- Utility to dump all of the release information as JSON.
module Main where
import GHC.Generics
import Data.Aeson
import Types
import ReleaseFiles
import Releases
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy.Char8 as BS
deriving instance Generic OS
deriving instance ToJSON OS
deriving instance Generic Arch
deriving instance ToJSON Arch
deriving instance Generic DistType
deriving instance ToJSON DistType
deriving instance Generic Package
deriving instance ToJSON Package
deriving instance Generic IncludeType
deriving instance ToJSON IncludeType
deriving instance Generic HpVersion
deriving instance ToJSON HpVersion
jlist :: (a -> Value) -> [a] -> Value
jlist tojson as = Array (V.fromList (map tojson as))
jfileinfo (disttype, url, mhash, isfull) =
object [ "disttype" .= toJSON disttype
, "url" .= toJSON url
, "hash" .= toJSON mhash
, "isfull" .= toJSON isfull
]
jdate :: Date -> Value
jdate (month, year) =
object [ "month" .= toJSON month
, "year" .= toJSON year
]
jreleaseFiles (version, date, fileInfos) =
object [ "version" .= version
, "date" .= jdate date
, "fileInfo" .= jlist jfileinfo fileInfos
]
jrelease :: Release -> Value
jrelease r =
object [ "relVersion" .= toJSON (relVersion r)
, "relMinimalIncludes" .= jlist jinclude (relMinimalIncludes r)
, "relIncludes" .= jlist jinclude (relIncludes r)
]
jinclude :: Include -> Value
jinclude (inctype, package) =
object [ "includeType" .= toJSON inctype
, "package" .= toJSON package
]
jalldata :: [ReleaseFiles] -> [Release] -> Value
jalldata releaseFiles releases =
object [ "releaseFiles" .= jlist jreleaseFiles releaseFiles
, "releases" .= jlist jrelease releases
]
main = BS.putStrLn $ encode $ jalldata releaseFiles releases
| erantapaa/haskell-platform | hptool/src/DataMain.hs | bsd-3-clause | 2,008 | 0 | 10 | 461 | 543 | 288 | 255 | 50 | 1 |
module MyLittlePrelude
( module X
, undefined
, error
, trace
, traceShow
, traceShowM
, traceM
, traceIO
, notImplemented
, whenM
, unlessM
, ifM
, guardM
) where
import Prelude as X hiding ((.), id, undefined, error)
import Control.Applicative as X
((<|>), liftA, liftA2)
import Control.Category as X
(Category, id, (.), (<<<), (>>>))
import Control.Monad as X
(when, unless, (>=>), (<=<), forM, forM_, mzero, MonadPlus(..), guard, mfilter)
import Control.Monad.IO.Class as X
(MonadIO, liftIO)
import Control.Monad.Reader.Class as X
(MonadReader, ask)
import Control.Error.Util as X
(failWith, failWithM, (??))
import Control.Monad.Trans.Maybe as X
(MaybeT(..))
import Data.Hashable as X
(Hashable)
import Data.Maybe as X
(fromMaybe, isJust, isNothing, mapMaybe, catMaybes)
import Data.Data as X
(Data)
import Data.Monoid as X
((<>))
import Data.Map as X
(Map)
import Data.HashMap.Strict as X
(HashMap)
import Data.Set as X
(Set)
import Data.HashSet as X
(HashSet)
import Data.Proxy as X
(Proxy(..))
import Data.Text as X
(Text)
import Data.Time.Clock as X
(UTCTime)
import Data.ByteString as X
(ByteString)
import Data.Typeable as X
(Typeable)
import Data.Vector as X
(Vector)
import Data.Foldable as X
(find)
import GHC.Generics as X
(Generic)
import Safe as X
(headMay, initMay, tailMay)
import qualified Prelude as P
import qualified Debug.Trace as T
{-# WARNING undefined "'undefined' remains in code" #-}
undefined :: a
undefined = P.undefined
{-# WARNING error "'error' remains in code" #-}
error :: P.String -> a
error = P.error
{-# WARNING trace "'trace' remains in code" #-}
trace :: P.String -> a -> a
trace = T.trace
{-# WARNING traceShow "'traceShow' remains in code" #-}
traceShow :: P.Show a => a -> a
traceShow a = T.trace (P.show a) a
{-# WARNING traceShowM "'traceShowM' remains in code" #-}
traceShowM :: (P.Show a, P.Monad m) => a -> m ()
traceShowM a = T.traceM (P.show a)
{-# WARNING traceM "'traceM' remains in code" #-}
traceM :: P.Monad m => P.String -> m ()
traceM = T.traceM
{-# WARNING traceIO "'traceIO' remains in code" #-}
traceIO :: P.String -> P.IO ()
traceIO = T.traceIO
{-# WARNING notImplemented "'notImplemented' remains in code" #-}
notImplemented :: a
notImplemented = P.error "Not implemented"
whenM :: Monad m => m Bool -> m () -> m ()
whenM p m =
p >>= flip when m
unlessM :: Monad m => m Bool -> m () -> m ()
unlessM p m =
p >>= flip unless m
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM p x y = p >>= \b -> if b then x else y
guardM :: MonadPlus m => m Bool -> m ()
guardM f = guard =<< f
| zudov/purescript-inspection | src/MyLittlePrelude.hs | bsd-3-clause | 2,644 | 0 | 9 | 539 | 912 | 551 | 361 | 99 | 2 |
-- -----------------------------------------------------------------------------
--
-- AbsSyn.hs, part of Alex
--
-- (c) Chris Dornan 1995-2000, Simon Marlow 2003
--
-- This module provides a concrete representation for regular expressions and
-- scanners. Scanners are used for tokenising files in preparation for parsing.
--
-- ----------------------------------------------------------------------------}
module AbsSyn (
Code, Directive(..), Scheme(..),
wrapperName,
Scanner(..),
RECtx(..),
RExp(..),
DFA(..), State(..), SNum, StartCode, Accept(..),
RightContext(..), showRCtx, strtype,
encodeStartCodes, extractActions,
Target(..),
UsesPreds(..), usesPreds,
StrType(..)
) where
import CharSet ( CharSet, Encoding )
import Map ( Map )
import qualified Map hiding ( Map )
import Data.IntMap (IntMap)
import Sort ( nub' )
import Util ( str, nl )
import Data.Maybe ( fromJust )
infixl 4 :|
infixl 5 :%%
-- -----------------------------------------------------------------------------
-- Abstract Syntax for Alex scripts
type Code = String
data Directive
= WrapperDirective String -- use this wrapper
| EncodingDirective Encoding -- use this encoding
| ActionType String -- Type signature of actions,
-- with optional typeclasses
| TypeClass String
| TokenType String
deriving Show
data StrType = Str | Lazy | Strict
instance Show StrType where
show Str = "String"
show Lazy = "ByteString.ByteString"
show Strict = "ByteString.ByteString"
data Scheme
= Default { defaultTypeInfo :: Maybe (Maybe String, String) }
| GScan { gscanTypeInfo :: Maybe (Maybe String, String) }
| Basic { basicStrType :: StrType,
basicTypeInfo :: Maybe (Maybe String, String) }
| Posn { posnByteString :: Bool,
posnTypeInfo :: Maybe (Maybe String, String) }
| Monad { monadByteString :: Bool, monadUserState :: Bool,
monadTypeInfo :: Maybe (Maybe String, String) }
strtype :: Bool -> String
strtype True = "ByteString.ByteString"
strtype False = "String"
wrapperName :: Scheme -> Maybe String
wrapperName Default {} = Nothing
wrapperName GScan {} = Just "gscan"
wrapperName Basic { basicStrType = Str } = Just "basic"
wrapperName Basic { basicStrType = Lazy } = Just "basic-bytestring"
wrapperName Basic { basicStrType = Strict } = Just "strict-bytestring"
wrapperName Posn { posnByteString = False } = Just "posn"
wrapperName Posn { posnByteString = True } = Just "posn-bytestring"
wrapperName Monad { monadByteString = False,
monadUserState = False } = Just "monad"
wrapperName Monad { monadByteString = True,
monadUserState = False } = Just "monad-bytestring"
wrapperName Monad { monadByteString = False,
monadUserState = True } = Just "monadUserState"
wrapperName Monad { monadByteString = True,
monadUserState = True } = Just "monadUserState-bytestring"
-- TODO: update this comment
--
-- A `Scanner' consists of an association list associating token names with
-- regular expressions with context. The context may include a list of start
-- codes, some leading context to test the character immediately preceding the
-- token and trailing context to test the residual input after the token.
--
-- The start codes consist of the names and numbers of the start codes;
-- initially the names only will be generated by the parser, the numbers being
-- allocated at a later stage. Start codes become meaningful when scanners are
-- converted to DFAs; see the DFA section of the Scan module for details.
data Scanner = Scanner { scannerName :: String,
scannerTokens :: [RECtx] }
deriving Show
data RECtx = RECtx { reCtxStartCodes :: [(String,StartCode)],
reCtxPreCtx :: Maybe CharSet,
reCtxRE :: RExp,
reCtxPostCtx :: RightContext RExp,
reCtxCode :: Maybe Code
}
data RightContext r
= NoRightContext
| RightContextRExp r
| RightContextCode Code
deriving (Eq,Ord)
instance Show RECtx where
showsPrec _ (RECtx scs _ r rctx code) =
showStarts scs . shows r . showRCtx rctx . showMaybeCode code
showMaybeCode :: Maybe String -> String -> String
showMaybeCode Nothing = id
showMaybeCode (Just code) = showCode code
showCode :: String -> String -> String
showCode code = showString " { " . showString code . showString " }"
showStarts :: [(String, StartCode)] -> String -> String
showStarts [] = id
showStarts scs = shows scs
showRCtx :: Show r => RightContext r -> String -> String
showRCtx NoRightContext = id
showRCtx (RightContextRExp r) = ('\\':) . shows r
showRCtx (RightContextCode code) = showString "\\ " . showCode code
-- -----------------------------------------------------------------------------
-- DFAs
data DFA s a = DFA
{ dfa_start_states :: [s],
dfa_states :: Map s (State s a)
}
data State s a = State { state_acc :: [Accept a],
state_out :: IntMap s -- 0..255 only
}
type SNum = Int
data Accept a
= Acc { accPrio :: Int,
accAction :: Maybe a,
accLeftCtx :: Maybe CharSet, -- cannot be converted to byteset at this point.
accRightCtx :: RightContext SNum
}
deriving (Eq,Ord)
-- debug stuff
instance Show (Accept a) where
showsPrec _ (Acc p _act _lctx _rctx) = shows p --TODO
type StartCode = Int
-- -----------------------------------------------------------------------------
-- Predicates / contexts
-- we can generate somewhat faster code in the case that
-- the lexer doesn't use predicates
data UsesPreds = UsesPreds | DoesntUsePreds
usesPreds :: DFA s a -> UsesPreds
usesPreds dfa
| any acceptHasCtx [ acc | st <- Map.elems (dfa_states dfa)
, acc <- state_acc st ]
= UsesPreds
| otherwise
= DoesntUsePreds
where
acceptHasCtx Acc { accLeftCtx = Nothing
, accRightCtx = NoRightContext } = False
acceptHasCtx _ = True
-- -----------------------------------------------------------------------------
-- Regular expressions
-- `RExp' provides an abstract syntax for regular expressions. `Eps' will
-- match empty strings; `Ch p' matches strings containinng a single character
-- `c' if `p c' is true; `re1 :%% re2' matches a string if `re1' matches one of
-- its prefixes and `re2' matches the rest; `re1 :| re2' matches a string if
-- `re1' or `re2' matches it; `Star re', `Plus re' and `Ques re' can be
-- expressed in terms of the other operators. See the definitions of `ARexp'
-- for a formal definition of the semantics of these operators.
data RExp
= Eps
| Ch CharSet
| RExp :%% RExp
| RExp :| RExp
| Star RExp
| Plus RExp
| Ques RExp
instance Show RExp where
showsPrec _ Eps = showString "()"
showsPrec _ (Ch _) = showString "[..]"
showsPrec _ (l :%% r) = shows l . shows r
showsPrec _ (l :| r) = shows l . ('|':) . shows r
showsPrec _ (Star r) = shows r . ('*':)
showsPrec _ (Plus r) = shows r . ('+':)
showsPrec _ (Ques r) = shows r . ('?':)
{------------------------------------------------------------------------------
Abstract Regular Expression
------------------------------------------------------------------------------}
-- This section contains demonstrations; it is not part of Alex.
{-
-- This function illustrates `ARexp'. It returns true if the string in its
-- argument is matched by the regular expression.
recognise:: RExp -> String -> Bool
recognise re inp = any (==len) (ap_ar (arexp re) inp)
where
len = length inp
-- `ARexp' provides an regular expressions in abstract format. Here regular
-- expressions are represented by a function that takes the string to be
-- matched and returns the sizes of all the prefixes matched by the regular
-- expression (the list may contain duplicates). Each of the `RExp' operators
-- are represented by similarly named functions over ARexp. The `ap' function
-- takes an `ARExp', a string and returns the sizes of all the prefixes
-- matching that regular expression. `arexp' converts an `RExp' to an `ARexp'.
arexp:: RExp -> ARexp
arexp Eps = eps_ar
arexp (Ch p) = ch_ar p
arexp (re :%% re') = arexp re `seq_ar` arexp re'
arexp (re :| re') = arexp re `bar_ar` arexp re'
arexp (Star re) = star_ar (arexp re)
arexp (Plus re) = plus_ar (arexp re)
arexp (Ques re) = ques_ar (arexp re)
star_ar:: ARexp -> ARexp
star_ar sc = eps_ar `bar_ar` plus_ar sc
plus_ar:: ARexp -> ARexp
plus_ar sc = sc `seq_ar` star_ar sc
ques_ar:: ARexp -> ARexp
ques_ar sc = eps_ar `bar_ar` sc
-- Hugs abstract type definition -- not for GHC.
type ARexp = String -> [Int]
-- in ap_ar, eps_ar, ch_ar, seq_ar, bar_ar
ap_ar:: ARexp -> String -> [Int]
ap_ar sc = sc
eps_ar:: ARexp
eps_ar inp = [0]
ch_ar:: (Char->Bool) -> ARexp
ch_ar p "" = []
ch_ar p (c:rst) = if p c then [1] else []
seq_ar:: ARexp -> ARexp -> ARexp
seq_ar sc sc' inp = [n+m| n<-sc inp, m<-sc' (drop n inp)]
bar_ar:: ARexp -> ARexp -> ARexp
bar_ar sc sc' inp = sc inp ++ sc' inp
-}
-- -----------------------------------------------------------------------------
-- Utils
-- Map the available start codes onto [1..]
encodeStartCodes:: Scanner -> (Scanner,[StartCode],ShowS)
encodeStartCodes scan = (scan', 0 : map snd name_code_pairs, sc_hdr)
where
scan' = scan{ scannerTokens = map mk_re_ctx (scannerTokens scan) }
mk_re_ctx (RECtx scs lc re rc code)
= RECtx (map mk_sc scs) lc re rc code
mk_sc (nm,_) = (nm, if nm=="0" then 0
else fromJust (Map.lookup nm code_map))
sc_hdr tl =
case name_code_pairs of
[] -> tl
(nm,_):rst -> "\n" ++ nm ++ foldr f t rst
where
f (nm', _) t' = "," ++ nm' ++ t'
t = " :: Int\n" ++ foldr fmt_sc tl name_code_pairs
where
fmt_sc (nm,sc) t = nm ++ " = " ++ show sc ++ "\n" ++ t
code_map = Map.fromList name_code_pairs
name_code_pairs = zip (nub' (<=) nms) [1..]
nms = [nm | RECtx{reCtxStartCodes = scs} <- scannerTokens scan,
(nm,_) <- scs, nm /= "0"]
-- Grab the code fragments for the token actions, and replace them
-- with function names of the form alex_action_$n$. We do this
-- because the actual action fragments might be duplicated in the
-- generated file.
extractActions :: Scheme -> Scanner -> (Scanner,ShowS)
extractActions scheme scanner = (scanner{scannerTokens = new_tokens}, decl_str)
where
(new_tokens, decls) = unzip (zipWith f (scannerTokens scanner) act_names)
f r@RECtx{ reCtxCode = Just code } name
= (r{reCtxCode = Just name}, Just (mkDecl name code))
f r@RECtx{ reCtxCode = Nothing } _
= (r{reCtxCode = Nothing}, Nothing)
gscanActionType res =
str "AlexPosn -> Char -> String -> Int -> ((Int, state) -> "
. str res . str ") -> (Int, state) -> " . str res
mkDecl fun code = case scheme of
Default { defaultTypeInfo = Just (Nothing, actionty) } ->
str fun . str " :: " . str actionty . str "\n"
. str fun . str " = " . str code . nl
Default { defaultTypeInfo = Just (Just tyclasses, actionty) } ->
str fun . str " :: (" . str tyclasses . str ") => " .
str actionty . str "\n" .
str fun . str " = " . str code . nl
GScan { gscanTypeInfo = Just (Nothing, tokenty) } ->
str fun . str " :: " . gscanActionType tokenty . str "\n"
. str fun . str " = " . str code . nl
GScan { gscanTypeInfo = Just (Just tyclasses, tokenty) } ->
str fun . str " :: (" . str tyclasses . str ") => " .
gscanActionType tokenty . str "\n" .
str fun . str " = " . str code . nl
Basic { basicStrType = strty, basicTypeInfo = Just (Nothing, tokenty) } ->
str fun . str " :: " . str (show strty) . str " -> "
. str tokenty . str "\n"
. str fun . str " = " . str code . nl
Basic { basicStrType = strty,
basicTypeInfo = Just (Just tyclasses, tokenty) } ->
str fun . str " :: (" . str tyclasses . str ") => " .
str (show strty) . str " -> " . str tokenty . str "\n" .
str fun . str " = " . str code . nl
Posn { posnByteString = isByteString,
posnTypeInfo = Just (Nothing, tokenty) } ->
str fun . str " :: AlexPosn -> " . str (strtype isByteString) . str " -> "
. str tokenty . str "\n"
. str fun . str " = " . str code . nl
Posn { posnByteString = isByteString,
posnTypeInfo = Just (Just tyclasses, tokenty) } ->
str fun . str " :: (" . str tyclasses . str ") => AlexPosn -> " .
str (strtype isByteString) . str " -> " . str tokenty . str "\n" .
str fun . str " = " . str code . nl
Monad { monadByteString = isByteString,
monadTypeInfo = Just (Nothing, tokenty) } ->
let
actintty = if isByteString then "Int64" else "Int"
in
str fun . str " :: AlexInput -> " . str actintty . str " -> Alex ("
. str tokenty . str ")\n"
. str fun . str " = " . str code . nl
Monad { monadByteString = isByteString,
monadTypeInfo = Just (Just tyclasses, tokenty) } ->
let
actintty = if isByteString then "Int64" else "Int"
in
str fun . str " :: (" . str tyclasses . str ") => "
. str " AlexInput -> " . str actintty
. str " -> Alex (" . str tokenty . str ")\n"
. str fun . str " = " . str code . nl
_ -> str fun . str " = " . str code . nl
act_names = map (\n -> "alex_action_" ++ show (n::Int)) [0..]
decl_str = foldr (.) id [ decl | Just decl <- decls ]
-- -----------------------------------------------------------------------------
-- Code generation targets
data Target = GhcTarget | HaskellTarget
| alanz/Alex | src/AbsSyn.hs | bsd-3-clause | 14,064 | 0 | 24 | 3,651 | 3,320 | 1,758 | 1,562 | 213 | 14 |
module Main where
import App
main :: IO ()
main = appMain
| lslah/kis-proto | app/Main.hs | bsd-3-clause | 60 | 0 | 6 | 14 | 22 | 13 | 9 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Aws.Swf.Commands.PollForDecisionTask
where
-- import qualified Data.HashMap.Strict as HM
import Control.Applicative ((<$>), (<*>))
import Control.Monad (mzero)
import Data.Aeson (FromJSON (..), ToJSON (..),
Value (Object), object, (.!=), (.:),
(.:?), (.=))
import qualified Data.ByteString as B
--import Data.Aeson.Types (Parser, parse)
import Aws.Core (AsMemoryResponse (..),
ResponseConsumer (..), Transaction)
import qualified Aws.Core.Sign as S (ServiceConfiguration)
import qualified Aws.Core.Sign3 as SIG3 (signRequest)
import Aws.Core.SignClass (SignQuery (..))
import Aws.Swf.Response (SwfMetadata, jsonConsumer,
swfResponseConsumer)
import Aws.Swf.Sign (swfRequest)
import qualified Data.Text as T
import Aws.Swf.Commands.JTypes (TaskList (..), WorkflowExecution (..),
WorkflowType (..))
-- ActivityTaskCancelRequestedEventAttributes (..),
-- ActivityTaskCanceledEventAttributes)
--import Aws.Swf.Commands.Event
import qualified Aws.Swf.Commands.Event as E
target :: B.ByteString
target = "com.amazonaws.swf.service.model.SimpleWorkflowService.PollForDecisionTask"
data PollForDecisionTask =
PollForDecisionTask { domain :: T.Text,
identity :: T.Text,
maximumPageSize :: Int,
rNextPageToken :: Maybe T.Text,
reverseOrder :: Bool,
taskList :: TaskList }
deriving Show
data PollForDecisionTaskResponse =
PollForDecisionTaskResponse { events :: [E.HistoryEvent],
nextPageToken :: Maybe T.Text,
previousStartedEventId :: Int,
startedEventId :: Int,
taskToken :: Maybe T.Text,
workflowExecution :: Maybe WorkflowExecution,
workflowType :: Maybe WorkflowType }
deriving Show
instance ToJSON PollForDecisionTask where
toJSON (PollForDecisionTask domain identity maximumPageSize rNextPageToken reverseOrder taskList) =
object [ "domain" .= domain,
"identity" .= identity,
"maximumPageSize" .= maximumPageSize,
"nextPageToken" .= rNextPageToken,
"reverseOrder" .= reverseOrder,
"taskList" .= taskList ]
instance FromJSON PollForDecisionTaskResponse where
parseJSON (Object o) = PollForDecisionTaskResponse <$>
o .:? "events" .!= [] <*>
o .:? "nextPageToken" <*>
o .: "previousStartedEventId" <*>
o .: "startedEventId" <*>
o .:? "taskToken" <*>
o .:? "workflowExecution" <*>
o .:? "workflowType"
parseJSON _ = mzero
instance SignQuery PollForDecisionTask where
type ServiceConfiguration PollForDecisionTask = S.ServiceConfiguration
signQuery req = SIG3.signRequest $ swfRequest target $ toJSON req
instance ResponseConsumer PollForDecisionTask PollForDecisionTaskResponse where
type ResponseMetadata PollForDecisionTaskResponse = SwfMetadata
responseConsumer _ mref = swfResponseConsumer mref $ \rsp -> jsonConsumer rsp
instance Transaction PollForDecisionTask PollForDecisionTaskResponse
instance AsMemoryResponse PollForDecisionTaskResponse where
type MemoryResponse PollForDecisionTaskResponse = PollForDecisionTaskResponse
loadToMemory = return
| RayRacine/aws | Aws/Swf/Commands/PollForDecisionTask.hs | bsd-3-clause | 4,178 | 0 | 20 | 1,559 | 675 | 405 | 270 | -1 | -1 |
module Problem7 where
import Prime
main :: IO ()
-- First prime is 2 so we find 10 000th (also 9999 because 0-based indexing)
main = print . (!! 9999) . filter isPrimeNaive $ [3, 5 ..]
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem7.hs | bsd-3-clause | 188 | 0 | 8 | 40 | 49 | 29 | 20 | 4 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Messages used for communication in SSC.
module Pos.Chain.Ssc.Message
( MCCommitment (..)
, MCOpening (..)
, MCShares (..)
, MCVssCertificate (..)
, _MCCommitment
, _MCOpening
, _MCShares
, _MCVssCertificate
, HasSscTag (..)
, SscTag (..)
) where
import Universum
import Control.Lens (makePrisms)
import Formatting (bprint, build, (%))
import qualified Formatting.Buildable as Buildable
import Pos.Chain.Ssc.Commitment (SignedCommitment)
import Pos.Chain.Ssc.Opening (Opening)
import Pos.Chain.Ssc.SharesMap (InnerSharesMap)
import Pos.Chain.Ssc.Toss.Types (SscTag (..))
import Pos.Chain.Ssc.VssCertificate (VssCertificate, getCertId)
import Pos.Core (StakeholderId, addressHash)
class HasSscTag a where
toSscTag :: a -> SscTag
data MCCommitment = MCCommitment !SignedCommitment
deriving (Show, Eq, Generic)
data MCOpening = MCOpening !StakeholderId !Opening
deriving (Show, Eq, Generic)
data MCShares = MCShares !StakeholderId !InnerSharesMap
deriving (Show, Eq, Generic)
data MCVssCertificate = MCVssCertificate !VssCertificate
deriving (Show, Eq, Generic)
makePrisms ''MCCommitment
makePrisms ''MCOpening
makePrisms ''MCShares
makePrisms ''MCVssCertificate
instance Buildable MCCommitment where
build (MCCommitment (pk, _, _)) =
bprint ("commitment contents from "%build) $ addressHash pk
instance Buildable MCOpening where
build (MCOpening k _) =
bprint ("opening contents from "%build) k
instance Buildable MCShares where
build (MCShares k _) =
bprint ("shares contents from "%build) k
instance Buildable MCVssCertificate where
build (MCVssCertificate c) =
bprint ("VSS certificate contents from "%build) $ getCertId c
instance HasSscTag MCCommitment where
toSscTag _ = CommitmentMsg
instance HasSscTag MCOpening where
toSscTag _ = OpeningMsg
instance HasSscTag MCShares where
toSscTag _ = SharesMsg
instance HasSscTag MCVssCertificate where
toSscTag _ = VssCertificateMsg
| input-output-hk/pos-haskell-prototype | chain/src/Pos/Chain/Ssc/Message.hs | mit | 2,253 | 0 | 9 | 518 | 565 | 315 | 250 | 70 | 0 |
{-|
Module : Translation.Constants
Description : Define CUDA constants
Maintainer : Josh Acay<cacay@cmu.edu>
Stability : experimental
-}
module Translation.Constants (defineIndicies) where
import Control.Monad.State (State, execState, modify)
import qualified Data.Set as Set
import AST.AST
import AST.Operations
type Variables = Set.Set Ident
type Free = State Variables
defineIndicies :: Fun -> Fun
defineIndicies f@(Fun t id pars sts) =
Fun t id pars (idx ++ idy ++ idz ++ sts)
where
define :: Ident -> Dimension -> Stmt
define var d = Simp (Asgn (Ident var) $
Binop Add
(Binop Mul (CudaVar $ BlockIdx d) (CudaVar $ BlockDim d))
(CudaVar $ ThreadIdx d))
free = freeFun f
idx = if Set.member "idx" free then [define "idx" DimX] else []
idy = if Set.member "idy" free then [define "idy" DimY] else []
idz = if Set.member "idz" free then [define "idz" DimZ] else []
freeFun :: Fun -> Variables
freeFun (Fun _ _ pars sts) = freeStmts Set.empty sts Set.\\ args
where args = Set.fromList $ map (\(Param _ id) -> id) pars
freeStmts :: Variables -> [Stmt] -> Variables
freeStmts acc = foldr (flip freeStmt) acc
freeStmt :: Variables -> Stmt -> Variables
freeStmt acc st = case st of
Simp s -> freeSimp acc s
If e st1 st2 ->
freeExp e `Set.union` freeStmts acc st1 `Set.union` freeStmts acc st2
While e body -> freeExp e `Set.union` freeStmts acc body
For init cond step body ->
let header = freeExp cond `Set.union` freeSimp Set.empty step
in freeSimp (header `Set.union` freeStmts acc body) init
Break -> acc
Continue -> acc
Ret Nothing -> acc
Ret (Just e) -> acc `Set.union` freeExp e
freeSimp :: Variables -> Simp -> Variables
freeSimp acc (Decl _ id) = Set.delete id acc
freeSimp acc (Asgn (Ident id) e) = freeExp e `Set.union` Set.delete id acc
freeSimp acc (Asgn e1 e2) = freeExp e1 `Set.union` freeExp e2 `Set.union` acc
freeSimp acc (Exp e) = acc `Set.union` freeExp e
freeSimp acc Nop = acc
freeExp :: Exp -> Variables
freeExp e = execState (freeExp' e) Set.empty
where
freeExp' :: Exp -> Free ()
freeExp' Null = return ()
freeExp' (Bool _) = return ()
freeExp' (Int _) = return ()
freeExp' (Float _) = return ()
freeExp' (CudaVar v) = modify $ Set.insert $ show v
freeExp' (Ident id) = modify $ Set.insert id
freeExp' (Binop _ e1 e2) = freeExp' e1 >> freeExp' e2
freeExp' (Cmp _ e1 e2) = freeExp' e1 >> freeExp' e2
freeExp' (Case e1 e2 e3) = freeExp' e1 >> freeExp' e2 >> freeExp' e3
freeExp' (Call _ args) = mapM_ freeExp' args
freeExp' (Index e1 e2) = freeExp' e1 >> freeExp' e2
| oulgen/CudaPy | py2cuda/src/Translation/Constants.hs | mit | 2,642 | 0 | 15 | 596 | 1,106 | 558 | 548 | 57 | 11 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Test.AWS.ELB.Internal
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.ELB.Internal where
import Test.AWS.Prelude
| fmapfmapfmap/amazonka | amazonka-elb/test/Test/AWS/ELB/Internal.hs | mpl-2.0 | 613 | 0 | 4 | 140 | 25 | 21 | 4 | 4 | 0 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Hızlı Başlat | ZAP Uzantıları</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçindekiler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İçerik</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/quickstart/src/main/javahelp/org/zaproxy/zap/extension/quickstart/resources/help_tr_TR/helpset_tr_TR.hs | apache-2.0 | 987 | 80 | 66 | 160 | 431 | 217 | 214 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Run.Config where
import qualified Data.Configurator as Conf
import qualified Data.Configurator.Types as Conf
import System.Directory
import System.FilePath
import System.Log.Logger
import qualified Data.Text as Text
-- | Load the configuration files
loadConfig :: IO Conf.Config
loadConfig = do
appData <- getAppUserDataDirectory "pontarius-xmpp-tests"
home <- getHomeDirectory
Conf.load [ Conf.Optional $ appData </> "pontarius-xmpp-tests.conf"
, Conf.Optional $ home </> ".pontarius-xmpp-tests.conf"
]
configuredLoglevel conf = do
loglevel <- Conf.lookup conf "loglevel" >>= \case
(Nothing :: Maybe Text.Text) -> return ERROR
Just "debug" -> return DEBUG
Just "info" -> return INFO
Just "notice" -> return NOTICE
Just "warning" -> return WARNING
Just "error" -> return ERROR
Just "critical" -> return CRITICAL
Just "alert" -> return ALERT
Just "emergency" -> return EMERGENCY
Just e -> error $ "Log level " ++ (Text.unpack e) ++ " unknown"
updateGlobalLogger "Pontarius.Xmpp" $ setLevel loglevel
return loglevel
| Philonous/pontarius-xmpp | tests/Run/Config.hs | bsd-3-clause | 1,287 | 0 | 16 | 315 | 310 | 153 | 157 | 30 | 10 |
{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-}
-- | Planning how to build everything in a project.
--
module Distribution.Client.ProjectPlanning (
-- * elaborated install plan types
ElaboratedInstallPlan,
ElaboratedConfiguredPackage(..),
ElaboratedPlanPackage,
ElaboratedSharedConfig(..),
ElaboratedReadyPackage,
BuildStyle(..),
CabalFileText,
--TODO: [code cleanup] these types should live with execution, not with
-- plan definition. Need to better separate InstallPlan definition.
GenericBuildResult(..),
BuildResult,
BuildSuccess(..),
BuildFailure(..),
DocsResult(..),
TestsResult(..),
-- * Producing the elaborated install plan
rebuildInstallPlan,
-- * Build targets
PackageTarget(..),
ComponentTarget(..),
SubComponentTarget(..),
showComponentTarget,
-- * Selecting a plan subset
pruneInstallPlanToTargets,
-- * Utils required for building
pkgHasEphemeralBuildTargets,
pkgBuildTargetWholeComponents,
-- * Setup.hs CLI flags for building
setupHsScriptOptions,
setupHsConfigureFlags,
setupHsBuildFlags,
setupHsBuildArgs,
setupHsReplFlags,
setupHsReplArgs,
setupHsCopyFlags,
setupHsRegisterFlags,
setupHsHaddockFlags,
packageHashInputs,
-- TODO: [code cleanup] utils that should live in some shared place?
createPackageDBIfMissing
) where
import Distribution.Client.ProjectPlanning.Types
import Distribution.Client.PackageHash
import Distribution.Client.RebuildMonad
import Distribution.Client.ProjectConfig
import Distribution.Client.ProjectPlanOutput
import Distribution.Client.Types
hiding ( BuildResult, BuildSuccess(..), BuildFailure(..)
, DocsResult(..), TestsResult(..) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
import qualified Distribution.Client.IndexUtils as IndexUtils
import Distribution.Client.Targets (userToPackageConstraint)
import Distribution.Client.DistDirLayout
import Distribution.Client.SetupWrapper
import Distribution.Client.JobControl
import Distribution.Client.FetchUtils
import qualified Hackage.Security.Client as Sec
import Distribution.Client.Setup hiding (packageName, cabalVersion)
import Distribution.Utils.NubList
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps (ComponentDeps)
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.LabeledPackageConstraint
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageFixedDeps
import qualified Distribution.Solver.Types.PackageIndex as SourcePackageIndex
import Distribution.Solver.Types.PkgConfigDb
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.SolverId
import Distribution.Solver.Types.SolverPackage
import Distribution.Solver.Types.SourcePackage
import Distribution.Package hiding
(InstalledPackageId, installedPackageId)
import Distribution.System
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription as PD
import qualified Distribution.PackageDescription.Configuration as PD
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.Compiler hiding (Flag)
import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate
import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate
import Distribution.Simple.Program
import Distribution.Simple.Program.Db
import Distribution.Simple.Program.Find
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
(Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault)
import qualified Distribution.Simple.Configure as Cabal
import qualified Distribution.Simple.LocalBuildInfo as Cabal
import Distribution.Simple.LocalBuildInfo (ComponentName(..))
import qualified Distribution.Simple.Register as Cabal
import qualified Distribution.Simple.InstallDirs as InstallDirs
import qualified Distribution.Simple.BuildTarget as Cabal
import Distribution.Simple.Utils hiding (matchFileGlob)
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Graph as Graph
import qualified Data.Tree as Tree
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Control.Monad.State as State
import Control.Exception
import Data.List
import Data.Maybe
import Data.Either
import Data.Monoid
import Data.Function
import System.FilePath
import System.Directory (doesDirectoryExist)
------------------------------------------------------------------------------
-- * Elaborated install plan
------------------------------------------------------------------------------
-- "Elaborated" -- worked out with great care and nicety of detail;
-- executed with great minuteness: elaborate preparations;
-- elaborate care.
--
-- So here's the idea:
--
-- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc
-- all passed in as separate args and which are then further selected,
-- transformed etc during the execution of the build. Instead we construct
-- an elaborated install plan that includes everything we will need, and then
-- during the execution of the plan we do as little transformation of this
-- info as possible.
--
-- So we're trying to split the work into two phases: construction of the
-- elaborated install plan (which as far as possible should be pure) and
-- then simple execution of that plan without any smarts, just doing what the
-- plan says to do.
--
-- So that means we need a representation of this fully elaborated install
-- plan. The representation consists of two parts:
--
-- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a
-- representation of source packages that includes a lot more detail about
-- that package's individual configuration
--
-- * A 'ElaboratedSharedConfig'. Some package configuration is the same for
-- every package in a plan. Rather than duplicate that info every entry in
-- the 'GenericInstallPlan' we keep that separately.
--
-- The division between the shared and per-package config is /not set in stone
-- for all time/. For example if we wanted to generalise the install plan to
-- describe a situation where we want to build some packages with GHC and some
-- with GHCJS then the platform and compiler would no longer be shared between
-- all packages but would have to be per-package (probably with some sanity
-- condition on the graph structure).
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- type ElaboratedInstallPlan = ...
-- type ElaboratedPlanPackage = ...
-- data ElaboratedSharedConfig = ...
-- data ElaboratedConfiguredPackage = ...
-- data BuildStyle =
-- | Check that an 'ElaboratedConfiguredPackage' actually makes
-- sense under some 'ElaboratedSharedConfig'.
sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> a
-> a
sanityCheckElaboratedConfiguredPackage sharedConfig
pkg@ElaboratedConfiguredPackage{..}
ret =
-- we should only have enabled stanzas that actually can be built
-- (according to the solver)
assert (pkgStanzasEnabled `Set.isSubsetOf` pkgStanzasAvailable)
-- the stanzas that the user explicitly requested should be
-- enabled (by the previous test, they are also available)
. assert (Map.keysSet (Map.filter id pkgStanzasRequested)
`Set.isSubsetOf` pkgStanzasEnabled)
-- the stanzas explicitly disabled should not be available
. assert (Set.null (Map.keysSet (Map.filter not pkgStanzasRequested)
`Set.intersection` pkgStanzasAvailable))
-- either a package is being built inplace, or the
-- 'installedPackageId' we assigned is consistent with
-- the 'hashedInstalledPackageId' we would compute from
-- the elaborated configured package
. assert (pkgBuildStyle == BuildInplaceOnly ||
installedPackageId pkg == hashedInstalledPackageId
(packageHashInputs sharedConfig pkg))
-- either a package is built inplace, or we are not attempting to
-- build any test suites or benchmarks (we never build these
-- for remote packages!)
. assert (pkgBuildStyle == BuildInplaceOnly ||
Set.null pkgStanzasAvailable)
$ ret
------------------------------------------------------------------------------
-- * Deciding what to do: making an 'ElaboratedInstallPlan'
------------------------------------------------------------------------------
rebuildInstallPlan :: Verbosity
-> FilePath -> DistDirLayout -> CabalDirLayout
-> ProjectConfig
-> IO ( ElaboratedInstallPlan
, ElaboratedSharedConfig
, ProjectConfig )
rebuildInstallPlan verbosity
projectRootDir
distDirLayout@DistDirLayout {
distDirectory,
distProjectCacheFile,
distProjectCacheDirectory
}
cabalDirLayout@CabalDirLayout {
cabalPackageCacheDirectory,
cabalStoreDirectory,
cabalStorePackageDB
}
cliConfig =
runRebuild projectRootDir $ do
progsearchpath <- liftIO $ getSystemSearchPath
let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty }
-- The overall improved plan is cached
rerunIfChanged verbosity fileMonitorImprovedPlan
-- react to changes in command line args and the path
(cliConfigPersistent, progsearchpath) $ do
-- And so is the elaborated plan that the improved plan based on
(elaboratedPlan, elaboratedShared,
projectConfig) <-
rerunIfChanged verbosity fileMonitorElaboratedPlan
(cliConfigPersistent, progsearchpath) $ do
(projectConfig, projectConfigTransient) <- phaseReadProjectConfig
localPackages <- phaseReadLocalPackages projectConfig
compilerEtc <- phaseConfigureCompiler projectConfig
_ <- phaseConfigurePrograms projectConfig compilerEtc
solverPlan <- phaseRunSolver projectConfigTransient
compilerEtc localPackages
(elaboratedPlan,
elaboratedShared) <- phaseElaboratePlan projectConfigTransient
compilerEtc
solverPlan localPackages
phaseMaintainPlanOutputs elaboratedPlan elaboratedShared
return (elaboratedPlan, elaboratedShared,
projectConfig)
-- The improved plan changes each time we install something, whereas
-- the underlying elaborated plan only changes when input config
-- changes, so it's worth caching them separately.
improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared
return (improvedPlan, elaboratedShared, projectConfig)
where
fileMonitorCompiler = newFileMonitorInCacheDir "compiler"
fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan"
fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes"
fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan"
fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan"
newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b
newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile
-- Read the cabal.project (or implicit config) and combine it with
-- arguments from the command line
--
phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig)
phaseReadProjectConfig = do
liftIO $ do
info verbosity "Project settings changed, reconfiguring..."
createDirectoryIfMissingVerbose verbosity False distDirectory
createDirectoryIfMissingVerbose verbosity False distProjectCacheDirectory
projectConfig <- readProjectConfig verbosity projectRootDir
-- The project config comming from the command line includes "build only"
-- flags that we don't cache persistently (because like all "build only"
-- flags they do not affect the value of the outcome) but that we do
-- sometimes using during planning (in particular the http transport)
let projectConfigTransient = projectConfig <> cliConfig
projectConfigPersistent = projectConfig
<> cliConfig {
projectConfigBuildOnly = mempty
}
liftIO $ writeProjectConfigFile (distProjectCacheFile "config")
projectConfigPersistent
return (projectConfigPersistent, projectConfigTransient)
-- Look for all the cabal packages in the project
-- some of which may be local src dirs, tarballs etc
--
phaseReadLocalPackages :: ProjectConfig
-> Rebuild [UnresolvedSourcePackage]
phaseReadLocalPackages projectConfig = do
localCabalFiles <- findProjectPackages projectRootDir projectConfig
mapM (readSourcePackage verbosity) localCabalFiles
-- Configure the compiler we're using.
--
-- This is moderately expensive and doesn't change that often so we cache
-- it independently.
--
phaseConfigureCompiler :: ProjectConfig
-> Rebuild (Compiler, Platform, ProgramDb)
phaseConfigureCompiler ProjectConfig {
projectConfigShared = ProjectConfigShared {
projectConfigHcFlavor,
projectConfigHcPath,
projectConfigHcPkg
},
projectConfigLocalPackages = PackageConfig {
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra
}
} = do
progsearchpath <- liftIO $ getSystemSearchPath
rerunIfChanged verbosity fileMonitorCompiler
(hcFlavor, hcPath, hcPkg, progsearchpath,
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra) $ do
liftIO $ info verbosity "Compiler settings changed, reconfiguring..."
result@(_, _, progdb') <- liftIO $
Cabal.configCompilerEx
hcFlavor hcPath hcPkg
progdb verbosity
-- Note that we added the user-supplied program locations and args
-- for /all/ programs, not just those for the compiler prog and
-- compiler-related utils. In principle we don't know which programs
-- the compiler will configure (and it does vary between compilers).
-- We do know however that the compiler will only configure the
-- programs it cares about, and those are the ones we monitor here.
monitorFiles (programsMonitorFiles progdb')
return result
where
hcFlavor = flagToMaybe projectConfigHcFlavor
hcPath = flagToMaybe projectConfigHcPath
hcPkg = flagToMaybe projectConfigHcPkg
progdb =
userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths))
. userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs))
. modifyProgramSearchPath
(++ [ ProgramSearchPathDir dir
| dir <- fromNubList packageConfigProgramPathExtra ])
$ defaultProgramDb
-- Configuring other programs.
--
-- Having configred the compiler, now we configure all the remaining
-- programs. This is to check we can find them, and to monitor them for
-- changes.
--
-- TODO: [required eventually] we don't actually do this yet.
--
-- We rely on the fact that the previous phase added the program config for
-- all local packages, but that all the programs configured so far are the
-- compiler program or related util programs.
--
phaseConfigurePrograms :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> Rebuild ()
phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do
-- Users are allowed to specify program locations independently for
-- each package (e.g. to use a particular version of a pre-processor
-- for some packages). However they cannot do this for the compiler
-- itself as that's just not going to work. So we check for this.
liftIO $ checkBadPerPackageCompilerPaths
(configuredPrograms compilerprogdb)
(getMapMappend (projectConfigSpecificPackage projectConfig))
--TODO: [required eventually] find/configure other programs that the
-- user specifies.
--TODO: [required eventually] find/configure all build-tools
-- but note that some of them may be built as part of the plan.
-- Run the solver to get the initial install plan.
-- This is expensive so we cache it independently.
--
phaseRunSolver :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> [UnresolvedSourcePackage]
-> Rebuild (SolverInstallPlan, PackagesImplicitSetupDeps)
phaseRunSolver projectConfig@ProjectConfig {
projectConfigShared,
projectConfigBuildOnly
}
(compiler, platform, progdb)
localPackages =
rerunIfChanged verbosity fileMonitorSolverPlan
(solverSettings, cabalPackageCacheDirectory,
localPackages, localPackagesEnabledStanzas,
compiler, platform, programsDbSignature progdb) $ do
installedPkgIndex <- getInstalledPackages verbosity
compiler progdb platform
corePackageDbs
sourcePkgDb <- getSourcePackages verbosity withRepoCtx
pkgConfigDB <- getPkgConfigDb verbosity progdb
--TODO: [code cleanup] it'd be better if the Compiler contained the
-- ConfiguredPrograms that it needs, rather than relying on the progdb
-- since we don't need to depend on all the programs here, just the
-- ones relevant for the compiler.
liftIO $ do
solver <- chooseSolver verbosity
(solverSettingSolver solverSettings)
(compilerInfo compiler)
notice verbosity "Resolving dependencies..."
foldProgress logMsg die return $
planPackages compiler platform solver solverSettings
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages localPackagesEnabledStanzas
where
corePackageDbs = [GlobalPackageDB]
withRepoCtx = projectConfigWithSolverRepoContext verbosity
cabalPackageCacheDirectory
projectConfigShared
projectConfigBuildOnly
solverSettings = resolveSolverSettings projectConfig
logMsg message rest = debugNoWrap verbosity message >> rest
localPackagesEnabledStanzas =
Map.fromList
[ (pkgname, stanzas)
| pkg <- localPackages
, let pkgname = packageName pkg
testsEnabled = lookupLocalPackageConfig
packageConfigTests
projectConfig pkgname
benchmarksEnabled = lookupLocalPackageConfig
packageConfigBenchmarks
projectConfig pkgname
stanzas =
Map.fromList $
[ (TestStanzas, enabled)
| enabled <- flagToList testsEnabled ]
++ [ (BenchStanzas , enabled)
| enabled <- flagToList benchmarksEnabled ]
]
-- Elaborate the solver's install plan to get a fully detailed plan. This
-- version of the plan has the final nix-style hashed ids.
--
phaseElaboratePlan :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> (SolverInstallPlan, PackagesImplicitSetupDeps)
-> [SourcePackage loc]
-> Rebuild ( ElaboratedInstallPlan
, ElaboratedSharedConfig )
phaseElaboratePlan ProjectConfig {
projectConfigShared,
projectConfigLocalPackages,
projectConfigSpecificPackage,
projectConfigBuildOnly
}
(compiler, platform, progdb)
(solverPlan, pkgsImplicitSetupDeps)
localPackages = do
liftIO $ debug verbosity "Elaborating the install plan..."
sourcePackageHashes <-
rerunIfChanged verbosity fileMonitorSourceHashes
(packageLocationsSignature solverPlan) $
getPackageSourceHashes verbosity withRepoCtx solverPlan
defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler
return $
elaborateInstallPlan
platform compiler progdb
distDirLayout
cabalDirLayout
solverPlan
pkgsImplicitSetupDeps
localPackages
sourcePackageHashes
defaultInstallDirs
projectConfigShared
projectConfigLocalPackages
(getMapMappend projectConfigSpecificPackage)
where
withRepoCtx = projectConfigWithSolverRepoContext verbosity
cabalPackageCacheDirectory
projectConfigShared
projectConfigBuildOnly
-- Update the files we maintain that reflect our current build environment.
-- In particular we maintain a JSON representation of the elaborated
-- install plan.
--
-- TODO: [required eventually] maintain the ghc environment file reflecting
-- the libs available. This will need to be after plan improvement phase.
--
phaseMaintainPlanOutputs :: ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ()
phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Updating plan.json"
liftIO $ writePlanExternalRepresentation
distDirLayout
elaboratedPlan
elaboratedShared
-- Improve the elaborated install plan. The elaborated plan consists
-- mostly of source packages (with full nix-style hashed ids). Where
-- corresponding installed packages already exist in the store, replace
-- them in the plan.
--
-- Note that we do monitor the store's package db here, so we will redo
-- this improvement phase when the db changes -- including as a result of
-- executing a plan and installing things.
--
phaseImprovePlan :: ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ElaboratedInstallPlan
phaseImprovePlan elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Improving the install plan..."
recreateDirectory verbosity True storeDirectory
storePkgIndex <- getPackageDBContents verbosity
compiler progdb platform
storePackageDb
let improvedPlan = improveInstallPlanWithPreExistingPackages
storePkgIndex
elaboratedPlan
return improvedPlan
where
storeDirectory = cabalStoreDirectory (compilerId compiler)
storePackageDb = cabalStorePackageDB (compilerId compiler)
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = progdb
} = elaboratedShared
programsMonitorFiles :: ProgramDb -> [MonitorFilePath]
programsMonitorFiles progdb =
[ monitor
| prog <- configuredPrograms progdb
, monitor <- monitorFileSearchPath (programMonitorFiles prog)
(programPath prog)
]
-- | Select the bits of a 'ProgramDb' to monitor for value changes.
-- Use 'programsMonitorFiles' for the files to monitor.
--
programsDbSignature :: ProgramDb -> [ConfiguredProgram]
programsDbSignature progdb =
[ prog { programMonitorFiles = []
, programOverrideEnv = filter ((/="PATH") . fst)
(programOverrideEnv prog) }
| prog <- configuredPrograms progdb ]
getInstalledPackages :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDBStack
-> Rebuild InstalledPackageIndex
getInstalledPackages verbosity compiler progdb platform packagedbs = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
packagedbs progdb platform)
liftIO $ IndexUtils.getInstalledPackages
verbosity compiler
packagedbs progdb
getPackageDBContents :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDB
-> Rebuild InstalledPackageIndex
getPackageDBContents verbosity compiler progdb platform packagedb = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
[packagedb] progdb platform)
liftIO $ do
createPackageDBIfMissing verbosity compiler
progdb [packagedb]
Cabal.getPackageDBContents verbosity compiler
packagedb progdb
getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a)
-> Rebuild SourcePackageDb
getSourcePackages verbosity withRepoCtx = do
(sourcePkgDb, repos) <-
liftIO $
withRepoCtx $ \repoctx -> do
sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx
return (sourcePkgDb, repoContextRepos repoctx)
monitorFiles . map monitorFile
. IndexUtils.getSourcePackagesMonitorFiles
$ repos
return sourcePkgDb
createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb
-> PackageDBStack -> IO ()
createPackageDBIfMissing verbosity compiler progdb packageDbs =
case reverse packageDbs of
SpecificPackageDB dbPath : _ -> do
exists <- liftIO $ Cabal.doesPackageDBExist dbPath
unless exists $ do
createDirectoryIfMissingVerbose verbosity False (takeDirectory dbPath)
Cabal.createPackageDB verbosity compiler progdb False dbPath
_ -> return ()
getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb
getPkgConfigDb verbosity progdb = do
dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb
-- Just monitor the dirs so we'll notice new .pc files.
-- Alternatively we could monitor all the .pc files too.
forM_ dirs $ \dir -> do
dirExists <- liftIO $ doesDirectoryExist dir
-- TODO: turn this into a utility function
monitorFiles [if dirExists
then monitorDirectory dir
else monitorNonExistentDirectory dir]
liftIO $ readPkgConfigDb verbosity progdb
recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild ()
recreateDirectory verbosity createParents dir = do
liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir
monitorFiles [monitorDirectoryExistence dir]
-- | Select the config values to monitor for changes package source hashes.
packageLocationsSignature :: SolverInstallPlan
-> [(PackageId, PackageLocation (Maybe FilePath))]
packageLocationsSignature solverPlan =
[ (packageId pkg, packageSource pkg)
| InstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- InstallPlan.toList solverPlan
]
-- | Get the 'HashValue' for all the source packages where we use hashes,
-- and download any packages required to do so.
--
-- Note that we don't get hashes for local unpacked packages.
--
getPackageSourceHashes :: Verbosity
-> (forall a. (RepoContext -> IO a) -> IO a)
-> SolverInstallPlan
-> Rebuild (Map PackageId PackageSourceHash)
getPackageSourceHashes verbosity withRepoCtx solverPlan = do
-- Determine if and where to get the package's source hash from.
--
let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))]
allPkgLocations =
[ (packageId pkg, packageSource pkg)
| InstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- InstallPlan.toList solverPlan ]
-- Tarballs that were local in the first place.
-- We'll hash these tarball files directly.
localTarballPkgs :: [(PackageId, FilePath)]
localTarballPkgs =
[ (pkgid, tarball)
| (pkgid, LocalTarballPackage tarball) <- allPkgLocations ]
-- Tarballs from remote URLs. We must have downloaded these already
-- (since we extracted the .cabal file earlier)
--TODO: [required eventually] finish remote tarball functionality
-- allRemoteTarballPkgs =
-- [ (pkgid, )
-- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ]
-- Tarballs from repositories, either where the repository provides
-- hashes as part of the repo metadata, or where we will have to
-- download and hash the tarball.
repoTarballPkgsWithMetadata :: [(PackageId, Repo)]
repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)]
(repoTarballPkgsWithMetadata,
repoTarballPkgsWithoutMetadata) =
partitionEithers
[ case repo of
RepoSecure{} -> Left (pkgid, repo)
_ -> Right (pkgid, repo)
| (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ]
-- For tarballs from repos that do not have hashes available we now have
-- to check if the packages were downloaded already.
--
(repoTarballPkgsToDownload,
repoTarballPkgsDownloaded)
<- fmap partitionEithers $
liftIO $ sequence
[ do mtarball <- checkRepoTarballFetched repo pkgid
case mtarball of
Nothing -> return (Left (pkgid, repo))
Just tarball -> return (Right (pkgid, tarball))
| (pkgid, repo) <- repoTarballPkgsWithoutMetadata ]
(hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded) <-
-- Avoid having to initialise the repository (ie 'withRepoCtx') if we
-- don't have to. (The main cost is configuring the http client.)
if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata
then return (Map.empty, [])
else liftIO $ withRepoCtx $ \repoctx -> do
-- For tarballs from repos that do have hashes available as part of the
-- repo metadata we now load up the index for each repo and retrieve
-- the hashes for the packages
--
hashesFromRepoMetadata <-
Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions
fmap (Map.fromList . concat) $
sequence
-- Reading the repo index is expensive so we group the packages by repo
[ repoContextWithSecureRepo repoctx repo $ \secureRepo ->
Sec.withIndex secureRepo $ \repoIndex ->
sequence
[ do hash <- Sec.trusted <$> -- strip off Trusted tag
Sec.indexLookupHash repoIndex pkgid
-- Note that hackage-security currently uses SHA256
-- but this API could in principle give us some other
-- choice in future.
return (pkgid, hashFromTUF hash)
| pkgid <- pkgids ]
| (repo, pkgids) <-
map (\grp@((_,repo):_) -> (repo, map fst grp))
. groupBy ((==) `on` (remoteRepoName . repoRemote . snd))
. sortBy (compare `on` (remoteRepoName . repoRemote . snd))
$ repoTarballPkgsWithMetadata
]
-- For tarballs from repos that do not have hashes available, download
-- the ones we previously determined we need.
--
repoTarballPkgsNewlyDownloaded <-
sequence
[ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid
return (pkgid, tarball)
| (pkgid, repo) <- repoTarballPkgsToDownload ]
return (hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded)
-- Hash tarball files for packages where we have to do that. This includes
-- tarballs that were local in the first place, plus tarballs from repos,
-- either previously cached or freshly downloaded.
--
let allTarballFilePkgs :: [(PackageId, FilePath)]
allTarballFilePkgs = localTarballPkgs
++ repoTarballPkgsDownloaded
++ repoTarballPkgsNewlyDownloaded
hashesFromTarballFiles <- liftIO $
fmap Map.fromList $
sequence
[ do srchash <- readFileHashValue tarball
return (pkgid, srchash)
| (pkgid, tarball) <- allTarballFilePkgs
]
monitorFiles [ monitorFile tarball
| (_pkgid, tarball) <- allTarballFilePkgs ]
-- Return the combination
return $! hashesFromRepoMetadata
<> hashesFromTarballFiles
-- ------------------------------------------------------------
-- * Installation planning
-- ------------------------------------------------------------
planPackages :: Compiler
-> Platform
-> Solver -> SolverSettings
-> InstalledPackageIndex
-> SourcePackageDb
-> PkgConfigDb
-> [UnresolvedSourcePackage]
-> Map PackageName (Map OptionalStanza Bool)
-> Progress String String
(SolverInstallPlan, PackagesImplicitSetupDeps)
planPackages comp platform solver SolverSettings{..}
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages pkgStanzasEnable =
rememberImplicitSetupDeps (depResolverSourcePkgIndex stdResolverParams) <$>
resolveDependencies
platform (compilerInfo comp)
pkgConfigDB solver
resolverParams
where
--TODO: [nice to have] disable multiple instances restriction in the solver, but then
-- make sure we can cope with that in the output.
resolverParams =
setMaxBackjumps solverSettingMaxBackjumps
--TODO: [required eventually] should only be configurable for custom installs
-- . setIndependentGoals solverSettingIndependentGoals
. setReorderGoals solverSettingReorderGoals
--TODO: [required eventually] should only be configurable for custom installs
-- . setAvoidReinstalls solverSettingAvoidReinstalls
--TODO: [required eventually] should only be configurable for custom installs
-- . setShadowPkgs solverSettingShadowPkgs
. setStrongFlags solverSettingStrongFlags
--TODO: [required eventually] decide if we need to prefer installed for
-- global packages, or prefer latest even for global packages. Perhaps
-- should be configurable but with a different name than "upgrade-dependencies".
. setPreferenceDefault PreferLatestForSelected
{-(if solverSettingUpgradeDeps
then PreferAllLatest
else PreferLatestForSelected)-}
. removeUpperBounds solverSettingAllowNewer
. addDefaultSetupDependencies (defaultSetupDeps comp platform
. PD.packageDescription
. packageDescription)
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- solverSettingPreferences ]
. addConstraints
-- version constraints from the config file or command line
[ LabeledPackageConstraint (userToPackageConstraint pc) src
| (pc, src) <- solverSettingConstraints ]
. addPreferences
-- enable stanza preference where the user did not specify
[ PackageStanzasPreference pkgname stanzas
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Nothing ]
, not (null stanzas)
]
. addConstraints
-- enable stanza constraints where the user asked to enable
[ LabeledPackageConstraint
(PackageConstraintStanzas pkgname stanzas)
ConstraintSourceConfigFlagOrTarget
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Just True ]
, not (null stanzas)
]
. addConstraints
--TODO: [nice to have] should have checked at some point that the
-- package in question actually has these flags.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| (pkgname, flags) <- Map.toList solverSettingFlagAssignments ]
. addConstraints
--TODO: [nice to have] we have user-supplied flags for unspecified
-- local packages (as well as specific per-package flags). For the
-- former we just apply all these flags to all local targets which
-- is silly. We should check if the flags are appropriate.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| let flags = solverSettingFlagAssignment
, not (null flags)
, pkg <- localPackages
, let pkgname = packageName pkg ]
$ stdResolverParams
stdResolverParams =
standardInstallPolicy
installedPkgIndex sourcePkgDb
(map SpecificSourcePackage localPackages)
------------------------------------------------------------------------------
-- * Install plan post-processing
------------------------------------------------------------------------------
-- This phase goes from the InstallPlan we get from the solver and has to
-- make an elaborated install plan.
--
-- We go in two steps:
--
-- 1. elaborate all the source packages that the solver has chosen.
-- 2. swap source packages for pre-existing installed packages wherever
-- possible.
--
-- We do it in this order, elaborating and then replacing, because the easiest
-- way to calculate the installed package ids used for the replacement step is
-- from the elaborated configuration for each package.
------------------------------------------------------------------------------
-- * Install plan elaboration
------------------------------------------------------------------------------
-- | Produce an elaborated install plan using the policy for local builds with
-- a nix-style shared store.
--
-- In theory should be able to make an elaborated install plan with a policy
-- matching that of the classic @cabal install --user@ or @--global@
--
elaborateInstallPlan
:: Platform -> Compiler -> ProgramDb
-> DistDirLayout
-> CabalDirLayout
-> SolverInstallPlan
-> PackagesImplicitSetupDeps
-> [SourcePackage loc]
-> Map PackageId PackageSourceHash
-> InstallDirs.InstallDirTemplates
-> ProjectConfigShared
-> PackageConfig
-> Map PackageName PackageConfig
-> (ElaboratedInstallPlan, ElaboratedSharedConfig)
elaborateInstallPlan platform compiler compilerprogdb
DistDirLayout{..}
cabalDirLayout@CabalDirLayout{cabalStorePackageDB}
solverPlan pkgsImplicitSetupDeps localPackages
sourcePackageHashes
defaultInstallDirs
_sharedPackageConfig
localPackagesConfig
perPackageConfig =
(elaboratedInstallPlan, elaboratedSharedConfig)
where
elaboratedSharedConfig =
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = compilerprogdb
}
elaboratedInstallPlan =
flip InstallPlan.mapPreservingGraph solverPlan $ \mapDep planpkg ->
case planpkg of
InstallPlan.PreExisting pkg ->
InstallPlan.PreExisting pkg
InstallPlan.Configured pkg ->
InstallPlan.Configured
(elaborateSolverPackage mapDep pkg)
_ -> error "elaborateInstallPlan: unexpected package state"
elaborateSolverPackage :: (UnitId -> UnitId)
-> SolverPackage UnresolvedPkgLoc
-> ElaboratedConfiguredPackage
elaborateSolverPackage
mapDep
pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride)
flags stanzas deps0) =
elaboratedPackage
where
-- Knot tying: the final elaboratedPackage includes the
-- pkgInstalledId, which is calculated by hashing many
-- of the other fields of the elaboratedPackage.
--
elaboratedPackage = ElaboratedConfiguredPackage {..}
deps = fmap (map elaborateSolverId) deps0
elaborateSolverId sid =
ConfiguredId {
confSrcId = packageId sid,
-- Update the 'UnitId' to the final nix-style hashed ID
confInstId = mapDep (installedPackageId sid)
}
pkgInstalledId
| shouldBuildInplaceOnly pkg
= mkUnitId (display pkgid ++ "-inplace")
| otherwise
= assert (isJust pkgSourceHash) $
hashedInstalledPackageId
(packageHashInputs
elaboratedSharedConfig
elaboratedPackage) -- recursive use of elaboratedPackage
| otherwise
= error $ "elaborateInstallPlan: non-inplace package "
++ " is missing a source hash: " ++ display pkgid
-- All the other fields of the ElaboratedConfiguredPackage
--
pkgSourceId = pkgid
pkgDescription = let Right (desc, _) =
PD.finalizePackageDescription
flags (const True)
platform (compilerInfo compiler)
[] gdesc
in desc
pkgFlagAssignment = flags
pkgFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag)
| flag <- PD.genPackageFlags gdesc ]
pkgDependencies = deps
pkgStanzasAvailable = Set.fromList stanzas
pkgStanzasRequested =
-- NB: even if a package stanza is requested, if the package
-- doesn't actually have any of that stanza we omit it from
-- the request, to ensure that we don't decide that this
-- package needs to be rebuilt. (It needs to be done here,
-- because the ElaboratedConfiguredPackage is where we test
-- whether or not there have been changes.)
Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests
, _ <- PD.testSuites pkgDescription ]
++ [ (BenchStanzas, v) | v <- maybeToList benchmarks
, _ <- PD.benchmarks pkgDescription ]
where
tests, benchmarks :: Maybe Bool
tests = perPkgOptionMaybe pkgid packageConfigTests
benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks
-- This is a placeholder which will get updated by 'pruneInstallPlanPass1'
-- and 'pruneInstallPlanPass2'. We can't populate it here
-- because whether or not tests/benchmarks should be enabled
-- is heuristically calculated based on whether or not the
-- dependencies of the test suite have already been installed,
-- but this function doesn't know what is installed (since
-- we haven't improved the plan yet), so we do it in another pass.
-- Check the comments of those functions for more details.
pkgStanzasEnabled = Set.empty
pkgBuildTargets = []
pkgReplTarget = Nothing
pkgBuildHaddocks = False
pkgSourceLocation = srcloc
pkgSourceHash = Map.lookup pkgid sourcePackageHashes
pkgBuildStyle = if shouldBuildInplaceOnly pkg
then BuildInplaceOnly else BuildAndInstall
pkgBuildPackageDBStack = buildAndRegisterDbs
pkgRegisterPackageDBStack = buildAndRegisterDbs
pkgRequiresRegistration = PD.hasPublicLib pkgDescription
pkgSetupScriptStyle = packageSetupScriptStylePostSolver
pkgsImplicitSetupDeps pkg pkgDescription
pkgSetupScriptCliVersion = packageSetupScriptSpecVersion
pkgSetupScriptStyle pkgDescription deps
pkgSetupPackageDBStack = buildAndRegisterDbs
buildAndRegisterDbs
| shouldBuildInplaceOnly pkg = inplacePackageDbs
| otherwise = storePackageDbs
pkgDescriptionOverride = descOverride
pkgVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively
pkgSharedLib = pkgid `Set.member` pkgsUseSharedLibrary
pkgDynExe = perPkgOptionFlag pkgid False packageConfigDynExe
pkgGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still
pkgProfExe = perPkgOptionFlag pkgid False packageConfigProf
pkgProfLib = pkgid `Set.member` pkgsUseProfilingLibrary
(pkgProfExeDetail,
pkgProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault
packageConfigProfDetail
packageConfigProfLibDetail
pkgCoverage = perPkgOptionFlag pkgid False packageConfigCoverage
pkgOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization
pkgSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs
pkgStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs
pkgStripExes = perPkgOptionFlag pkgid False packageConfigStripExes
pkgDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo
-- Combine the configured compiler prog settings with the user-supplied
-- config. For the compiler progs any user-supplied config was taken
-- into account earlier when configuring the compiler so its ok that
-- our configured settings for the compiler override the user-supplied
-- config here.
pkgProgramPaths = Map.fromList
[ (programId prog, programPath prog)
| prog <- configuredPrograms compilerprogdb ]
<> perPkgOptionMapLast pkgid packageConfigProgramPaths
pkgProgramArgs = Map.fromList
[ (programId prog, args)
| prog <- configuredPrograms compilerprogdb
, let args = programOverrideArgs prog
, not (null args)
]
<> perPkgOptionMapMappend pkgid packageConfigProgramArgs
pkgProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra
pkgConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs
pkgExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs
pkgExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs
pkgExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs
pkgProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix
pkgProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix
pkgInstallDirs
| shouldBuildInplaceOnly pkg
-- use the ordinary default install dirs
= (InstallDirs.absoluteInstallDirs
pkgid
(installedUnitId pkg)
(compilerInfo compiler)
InstallDirs.NoCopyDest
platform
defaultInstallDirs) {
InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as
InstallDirs.datasubdir = "" -- 'undefined' but we have to use
} -- them as "Setup.hs configure" args
| otherwise
-- use special simplified install dirs
= storePackageInstallDirs
cabalDirLayout
(compilerId compiler)
pkgInstalledId
pkgHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle
pkgHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml
pkgHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation
pkgHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables
pkgHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites
pkgHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks
pkgHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal
pkgHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss
pkgHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour
pkgHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss
pkgHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents
perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a
perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a
perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a]
perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f)
perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f)
perPkgOptionList pkgid f = lookupPerPkgOption pkgid f
perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f)
perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f)
perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f)
perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib)
where
exe = fromFlagOrDefault def bothflag
lib = fromFlagOrDefault def (bothflag <> libflag)
bothflag = lookupPerPkgOption pkgid fboth
libflag = lookupPerPkgOption pkgid flib
lookupPerPkgOption :: (Package pkg, Monoid m)
=> pkg -> (PackageConfig -> m) -> m
lookupPerPkgOption pkg f
-- the project config specifies values that apply to packages local to
-- but by default non-local packages get all default config values
-- the project, and can specify per-package values for any package,
| isLocalToProject pkg = local <> perpkg
| otherwise = perpkg
where
local = f localPackagesConfig
perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig)
inplacePackageDbs = storePackageDbs
++ [ distPackageDB (compilerId compiler) ]
storePackageDbs = [ GlobalPackageDB
, cabalStorePackageDB (compilerId compiler) ]
-- For this local build policy, every package that lives in a local source
-- dir (as opposed to a tarball), or depends on such a package, will be
-- built inplace into a shared dist dir. Tarball packages that depend on
-- source dir packages will also get unpacked locally.
shouldBuildInplaceOnly :: HasUnitId pkg => pkg -> Bool
shouldBuildInplaceOnly pkg = Set.member (installedPackageId pkg)
pkgsToBuildInplaceOnly
pkgsToBuildInplaceOnly :: Set InstalledPackageId
pkgsToBuildInplaceOnly =
Set.fromList
$ map installedPackageId
$ InstallPlan.reverseDependencyClosure
solverPlan
[ installedPackageId (PlannedId (packageId pkg))
| pkg <- localPackages ]
isLocalToProject :: Package pkg => pkg -> Bool
isLocalToProject pkg = Set.member (packageId pkg)
pkgsLocalToProject
pkgsLocalToProject :: Set PackageId
pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ]
pkgsUseSharedLibrary :: Set PackageId
pkgsUseSharedLibrary =
packagesWithDownwardClosedProperty needsSharedLib
where
needsSharedLib pkg =
fromMaybe compilerShouldUseSharedLibByDefault
(liftM2 (||) pkgSharedLib pkgDynExe)
where
pkgid = packageId pkg
pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib
pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe
--TODO: [code cleanup] move this into the Cabal lib. It's currently open
-- coded in Distribution.Simple.Configure, but should be made a proper
-- function of the Compiler or CompilerInfo.
compilerShouldUseSharedLibByDefault =
case compilerFlavor compiler of
GHC -> GHC.isDynamic compiler
GHCJS -> GHCJS.isDynamic compiler
_ -> False
pkgsUseProfilingLibrary :: Set PackageId
pkgsUseProfilingLibrary =
packagesWithDownwardClosedProperty needsProfilingLib
where
needsProfilingLib pkg =
fromFlagOrDefault False (profBothFlag <> profLibFlag)
where
pkgid = packageId pkg
profBothFlag = lookupPerPkgOption pkgid packageConfigProf
profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib
--TODO: [code cleanup] unused: the old deprecated packageConfigProfExe
packagesWithDownwardClosedProperty property =
Set.fromList
$ map packageId
$ InstallPlan.dependencyClosure
solverPlan
[ installedPackageId pkg
| pkg <- InstallPlan.toList solverPlan
, property pkg ] -- just the packages that satisfy the propety
--TODO: [nice to have] this does not check the config consistency,
-- e.g. a package explicitly turning off profiling, but something
-- depending on it that needs profiling. This really needs a separate
-- package config validation/resolution pass.
--TODO: [nice to have] config consistency checking:
-- * profiling libs & exes, exe needs lib, recursive
-- * shared libs & exes, exe needs lib, recursive
-- * vanilla libs & exes, exe needs lib, recursive
-- * ghci or shared lib needed by TH, recursive, ghc version dependent
---------------------------
-- Build targets
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- data PackageTarget = ...
-- data ComponentTarget = ...
-- data SubComponentTarget = ...
--TODO: this needs to report some user target/config errors
elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget]
-> ([ComponentTarget], Maybe ComponentTarget, Bool)
elaboratePackageTargets ElaboratedConfiguredPackage{..} targets =
let buildTargets = nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateBuildTarget
$ targets
--TODO: instead of listToMaybe we should be reporting an error here
replTargets = listToMaybe
. nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateReplTarget
$ targets
buildHaddocks = HaddockDefaultComponents `elem` targets
in (buildTargets, replTargets, buildHaddocks)
where
--TODO: need to report an error here if defaultComponents is empty
elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents
elaborateBuildTarget (BuildSpecificComponent t) = [t]
elaborateBuildTarget _ = []
--TODO: need to report an error here if defaultComponents is empty
elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents
elaborateReplTarget (ReplSpecificComponent t) = [t]
elaborateReplTarget _ = []
pkgDefaultComponents =
[ ComponentTarget cname WholeComponent
| c <- Cabal.pkgComponents pkgDescription
, PD.buildable (Cabal.componentBuildInfo c)
, let cname = Cabal.componentName c
, enabledOptionalStanza cname
]
where
enabledOptionalStanza cname =
case componentOptionalStanza cname of
Nothing -> True
Just stanza -> Map.lookup stanza pkgStanzasRequested
== Just True
-- Not all Cabal Setup.hs versions support sub-component targets, so switch
-- them over to the whole component
compatSubComponentTargets :: ComponentTarget -> ComponentTarget
compatSubComponentTargets target@(ComponentTarget cname _subtarget)
| not setupHsSupportsSubComponentTargets
= ComponentTarget cname WholeComponent
| otherwise = target
-- Actually the reality is that no current version of Cabal's Setup.hs
-- build command actually support building specific files or modules.
setupHsSupportsSubComponentTargets = False
-- TODO: when that changes, adjust this test, e.g.
-- | pkgSetupScriptCliVersion >= Version [x,y] []
nubComponentTargets :: [ComponentTarget] -> [ComponentTarget]
nubComponentTargets =
concatMap (wholeComponentOverrides . map snd)
. groupBy ((==) `on` fst)
. sortBy (compare `on` fst)
. map (\t@(ComponentTarget cname _) -> (cname, t))
-- If we're building the whole component then that the only target all we
-- need, otherwise we can have several targets within the component.
wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget]
wholeComponentOverrides ts =
case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of
(t:_) -> [t]
[] -> ts
pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool
pkgHasEphemeralBuildTargets pkg =
isJust (pkgReplTarget pkg)
|| (not . null) [ () | ComponentTarget _ subtarget <- pkgBuildTargets pkg
, subtarget /= WholeComponent ]
-- | The components that we'll build all of, meaning that after they're built
-- we can skip building them again (unlike with building just some modules or
-- other files within a component).
--
pkgBuildTargetWholeComponents :: ElaboratedConfiguredPackage
-> Set ComponentName
pkgBuildTargetWholeComponents pkg =
Set.fromList
[ cname | ComponentTarget cname WholeComponent <- pkgBuildTargets pkg ]
------------------------------------------------------------------------------
-- * Install plan pruning
------------------------------------------------------------------------------
-- | Given a set of package targets (and optionally component targets within
-- those packages), take the subset of the install plan needed to build those
-- targets. Also, update the package config to specify which optional stanzas
-- to enable, and which targets within each package to build.
--
pruneInstallPlanToTargets :: Map InstalledPackageId [PackageTarget]
-> ElaboratedInstallPlan -> ElaboratedInstallPlan
pruneInstallPlanToTargets perPkgTargetsMap =
either (\_ -> assert False undefined) id
. InstallPlan.new (IndependentGoals False)
. PackageIndex.fromList
-- We have to do this in two passes
. pruneInstallPlanPass2
. pruneInstallPlanPass1 perPkgTargetsMap
. InstallPlan.toList
-- | The first pass does three things:
--
-- * Set the build targets based on the user targets (but not rev deps yet).
-- * A first go at determining which optional stanzas (testsuites, benchmarks)
-- are needed. We have a second go in the next pass.
-- * Take the dependency closure using pruned dependencies. We prune deps that
-- are used only by unneeded optional stanzas. These pruned deps are only
-- used for the dependency closure and are not persisted in this pass.
--
pruneInstallPlanPass1 :: Map InstalledPackageId [PackageTarget]
-> [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass1 perPkgTargetsMap pkgs =
map fst $
dependencyClosure
(installedPackageId . fst) -- the pkg id
snd -- the pruned deps
[ (pkg', pruneOptionalDependencies pkg')
| pkg <- pkgs
, let pkg' = mapConfiguredPackage
(pruneOptionalStanzas . setBuildTargets) pkg
]
(Map.keys perPkgTargetsMap)
where
-- Elaborate and set the targets we'll build for this package. This is just
-- based on the targets from the user, not targets implied by reverse
-- dependencies. Those comes in the second pass once we know the rev deps.
--
setBuildTargets pkg =
pkg {
pkgBuildTargets = buildTargets,
pkgReplTarget = replTarget,
pkgBuildHaddocks = buildHaddocks
}
where
(buildTargets, replTarget, buildHaddocks)
= elaboratePackageTargets pkg targets
targets = fromMaybe []
$ Map.lookup (installedPackageId pkg) perPkgTargetsMap
-- Decide whether or not to enable testsuites and benchmarks
--
-- The testsuite and benchmark targets are somewhat special in that we need
-- to configure the packages with them enabled, and we need to do that even
-- if we only want to build one of several testsuites.
--
-- There are two cases in which we will enable the testsuites (or
-- benchmarks): if one of the targets is a testsuite, or if all of the
-- testsuite dependencies are already cached in the store. The rationale
-- for the latter is to minimise how often we have to reconfigure due to
-- the particular targets we choose to build. Otherwise choosing to build
-- a testsuite target, and then later choosing to build an exe target
-- would involve unnecessarily reconfiguring the package with testsuites
-- disabled. Technically this introduces a little bit of stateful
-- behaviour to make this "sticky", but it should be benign.
--
pruneOptionalStanzas pkg = pkg { pkgStanzasEnabled = stanzas }
where
stanzas :: Set OptionalStanza
stanzas = optionalStanzasRequiredByTargets pkg
<> optionalStanzasRequestedByDefault pkg
<> optionalStanzasWithDepsAvailable availablePkgs pkg
-- Calculate package dependencies but cut out those needed only by
-- optional stanzas that we've determined we will not enable.
-- These pruned deps are not persisted in this pass since they're based on
-- the optional stanzas and we'll make further tweaks to the optional
-- stanzas in the next pass.
--
pruneOptionalDependencies :: ElaboratedPlanPackage -> [InstalledPackageId]
pruneOptionalDependencies (InstallPlan.Configured pkg) =
(CD.flatDeps . CD.filterDeps keepNeeded) (depends pkg)
where
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
stanzas = pkgStanzasEnabled pkg
pruneOptionalDependencies pkg =
CD.flatDeps (depends pkg)
optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequiredByTargets pkg =
Set.fromList
[ stanza
| ComponentTarget cname _ <- pkgBuildTargets pkg
++ maybeToList (pkgReplTarget pkg)
, stanza <- maybeToList (componentOptionalStanza cname)
]
optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequestedByDefault =
Map.keysSet
. Map.filter (id :: Bool -> Bool)
. pkgStanzasRequested
availablePkgs =
Set.fromList
[ installedPackageId pkg
| InstallPlan.PreExisting pkg <- pkgs ]
-- | Given a set of already installed packages @availablePkgs@,
-- determine the set of available optional stanzas from @pkg@
-- which have all of their dependencies already installed. This is used
-- to implement "sticky" testsuites, where once we have installed
-- all of the deps needed for the test suite, we go ahead and
-- enable it always.
optionalStanzasWithDepsAvailable :: Set InstalledPackageId
-> ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasWithDepsAvailable availablePkgs pkg =
Set.fromList
[ stanza
| stanza <- Set.toList (pkgStanzasAvailable pkg)
, let deps :: [InstalledPackageId]
deps = map installedPackageId
$ CD.select (optionalStanzaDeps stanza)
(pkgDependencies pkg)
, all (`Set.member` availablePkgs) deps
]
where
optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True
optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True
optionalStanzaDeps _ _ = False
-- The second pass does three things:
--
-- * A second go at deciding which optional stanzas to enable.
-- * Prune the dependencies based on the final choice of optional stanzas.
-- * Extend the targets within each package to build, now we know the reverse
-- dependencies, ie we know which libs are needed as deps by other packages.
--
-- Achieving sticky behaviour with enabling\/disabling optional stanzas is
-- tricky. The first approximation was handled by the first pass above, but
-- it's not quite enough. That pass will enable stanzas if all of the deps
-- of the optional stanza are already installed /in the store/. That's important
-- but it does not account for dependencies that get built inplace as part of
-- the project. We cannot take those inplace build deps into account in the
-- pruning pass however because we don't yet know which ones we're going to
-- build. Once we do know, we can have another go and enable stanzas that have
-- all their deps available. Now we can consider all packages in the pruned
-- plan to be available, including ones we already decided to build from
-- source.
--
-- Deciding which targets to build depends on knowing which packages have
-- reverse dependencies (ie are needed). This requires the result of first
-- pass, which is another reason we have to split it into two passes.
--
-- Note that just because we might enable testsuites or benchmarks (in the
-- first or second pass) doesn't mean that we build all (or even any) of them.
-- That depends on which targets we picked in the first pass.
--
pruneInstallPlanPass2 :: [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass2 pkgs =
map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs
where
setStanzasDepsAndTargets pkg =
pkg {
pkgStanzasEnabled = stanzas,
pkgDependencies = CD.filterDeps keepNeeded (pkgDependencies pkg),
pkgBuildTargets = pkgBuildTargets pkg ++ targetsRequiredForRevDeps
}
where
stanzas :: Set OptionalStanza
stanzas = pkgStanzasEnabled pkg
<> optionalStanzasWithDepsAvailable availablePkgs pkg
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
targetsRequiredForRevDeps =
[ ComponentTarget (Cabal.defaultLibName (pkgSourceId pkg)) WholeComponent
-- if anything needs this pkg, build the library component
| installedPackageId pkg `Set.member` hasReverseLibDeps
]
--TODO: also need to track build-tool rev-deps for exes
availablePkgs :: Set InstalledPackageId
availablePkgs = Set.fromList (map installedPackageId pkgs)
hasReverseLibDeps :: Set InstalledPackageId
hasReverseLibDeps =
Set.fromList [ depid | pkg <- pkgs
, depid <- CD.flatDeps (depends pkg) ]
mapConfiguredPackage :: (ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage)
-> ElaboratedPlanPackage
-> ElaboratedPlanPackage
mapConfiguredPackage f (InstallPlan.Configured pkg) =
InstallPlan.Configured (f pkg)
mapConfiguredPackage _ pkg = pkg
componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza
componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas
componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas
componentOptionalStanza _ = Nothing
dependencyClosure :: (pkg -> InstalledPackageId)
-> (pkg -> [InstalledPackageId])
-> [pkg]
-> [InstalledPackageId]
-> [pkg]
dependencyClosure pkgid deps allpkgs =
map vertexToPkg
. concatMap Tree.flatten
. Graph.dfs graph
. map pkgidToVertex
where
(graph, vertexToPkg, pkgidToVertex) = dependencyGraph pkgid deps allpkgs
dependencyGraph :: (pkg -> InstalledPackageId)
-> (pkg -> [InstalledPackageId])
-> [pkg]
-> (Graph.Graph,
Graph.Vertex -> pkg,
InstalledPackageId -> Graph.Vertex)
dependencyGraph pkgid deps pkgs =
(graph, vertexToPkg', pkgidToVertex')
where
(graph, vertexToPkg, pkgidToVertex) =
Graph.graphFromEdges [ ( pkg, pkgid pkg, deps pkg )
| pkg <- pkgs ]
vertexToPkg' = (\(pkg,_,_) -> pkg)
. vertexToPkg
pkgidToVertex' = fromMaybe (error "dependencyGraph: lookup failure")
. pkgidToVertex
---------------------------
-- Setup.hs script policy
--
-- Handling for Setup.hs scripts is a bit tricky, part of it lives in the
-- solver phase, and part in the elaboration phase. We keep the helper
-- functions for both phases together here so at least you can see all of it
-- in one place.
--
-- There are four major cases for Setup.hs handling:
--
-- 1. @build-type@ Custom with a @custom-setup@ section
-- 2. @build-type@ Custom without a @custom-setup@ section
-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@
-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@
--
-- It's also worth noting that packages specifying @cabal-version: >= 1.23@
-- or later that have @build-type@ Custom will always have a @custom-setup@
-- section. Therefore in case 2, the specified @cabal-version@ will always be
-- less than 1.23.
--
-- In cases 1 and 2 we obviously have to build an external Setup.hs script,
-- while in case 4 we can use the internal library API. In case 3 we also have
-- to build an external Setup.hs script because the package needs a later
-- Cabal lib version than we can support internally.
--
-- data SetupScriptStyle = ... -- see ProjectPlanning.Types
-- | Work out the 'SetupScriptStyle' given the package description.
--
-- This only works on original packages before we give them to the solver,
-- since after the solver some implicit setup deps are made explicit.
--
-- See 'rememberImplicitSetupDeps' and 'packageSetupScriptStylePostSolver'.
--
packageSetupScriptStylePreSolver :: PD.PackageDescription -> SetupScriptStyle
packageSetupScriptStylePreSolver pkg
| buildType == PD.Custom
, isJust (PD.setupBuildInfo pkg)
= SetupCustomExplicitDeps
| buildType == PD.Custom
= SetupCustomImplicitDeps
| PD.specVersion pkg > cabalVersion -- one cabal-install is built against
= SetupNonCustomExternalLib
| otherwise
= SetupNonCustomInternalLib
where
buildType = fromMaybe PD.Custom (PD.buildType pkg)
-- | Part of our Setup.hs handling policy is implemented by getting the solver
-- to work out setup dependencies for packages. The solver already handles
-- packages that explicitly specify setup dependencies, but we can also tell
-- the solver to treat other packages as if they had setup dependencies.
-- That's what this function does, it gets called by the solver for all
-- packages that don't already have setup dependencies.
--
-- The dependencies we want to add is different for each 'SetupScriptStyle'.
--
-- Note that adding default deps means these deps are actually /added/ to the
-- packages that we get out of the solver in the 'SolverInstallPlan'. Making
-- implicit setup deps explicit is a problem in the post-solver stages because
-- we still need to distinguish the case of explicit and implict setup deps.
-- See 'rememberImplicitSetupDeps'.
--
defaultSetupDeps :: Compiler -> Platform
-> PD.PackageDescription
-> Maybe [Dependency]
defaultSetupDeps compiler platform pkg =
case packageSetupScriptStylePreSolver pkg of
-- For packages with build type custom that do not specify explicit
-- setup dependencies, we add a dependency on Cabal and a number
-- of other packages.
SetupCustomImplicitDeps ->
Just $
[ Dependency depPkgname anyVersion
| depPkgname <- legacyCustomSetupPkgs compiler platform ] ++
[ Dependency cabalPkgname cabalConstraint
| packageName pkg /= cabalPkgname ]
where
-- The Cabal dep is slightly special:
-- * We omit the dep for the Cabal lib itself, since it bootstraps.
-- * We constrain it to be >= 1.18 < 2
--
cabalConstraint = orLaterVersion cabalCompatMinVer
`intersectVersionRanges`
orLaterVersion (PD.specVersion pkg)
`intersectVersionRanges`
earlierVersion cabalCompatMaxVer
-- The idea here is that at some point we will make significant
-- breaking changes to the Cabal API that Setup.hs scripts use.
-- So for old custom Setup scripts that do not specify explicit
-- constraints, we constrain them to use a compatible Cabal version.
-- The exact version where we'll make this API break has not yet been
-- decided, so for the meantime we guess at 2.x.
cabalCompatMaxVer = Version [2] []
-- In principle we can talk to any old Cabal version, and we need to
-- be able to do that for custom Setup scripts that require older
-- Cabal lib versions. However in practice we have currently have
-- problems with Cabal-1.16. (1.16 does not know about build targets)
-- If this is fixed we can relax this constraint.
cabalCompatMinVer = Version [1,18] []
-- For other build types (like Simple) if we still need to compile an
-- external Setup.hs, it'll be one of the simple ones that only depends
-- on Cabal and base.
SetupNonCustomExternalLib ->
Just [ Dependency cabalPkgname cabalConstraint
, Dependency basePkgname anyVersion ]
where
cabalConstraint = orLaterVersion (PD.specVersion pkg)
-- The internal setup wrapper method has no deps at all.
SetupNonCustomInternalLib -> Just []
SetupCustomExplicitDeps ->
error $ "defaultSetupDeps: called for a package with explicit "
++ "setup deps: " ++ display (packageId pkg)
-- | See 'rememberImplicitSetupDeps' for details.
type PackagesImplicitSetupDeps = Set InstalledPackageId
-- | A consequence of using 'defaultSetupDeps' in 'planPackages' is that by
-- making implicit setup deps explicit we loose track of which packages
-- originally had implicit setup deps. That's important because we do still
-- have different behaviour based on the setup style (in particular whether to
-- compile a Setup.hs script with version macros).
--
-- So we remember the necessary information in an auxilliary set and use it
-- in 'packageSetupScriptStylePreSolver' to recover the full info.
--
rememberImplicitSetupDeps :: SourcePackageIndex.PackageIndex (SourcePackage loc)
-> SolverInstallPlan
-> (SolverInstallPlan, PackagesImplicitSetupDeps)
rememberImplicitSetupDeps sourcePkgIndex plan =
(plan, pkgsImplicitSetupDeps)
where
pkgsImplicitSetupDeps =
Set.fromList
[ installedPackageId pkg
| InstallPlan.Configured
pkg@(SolverPackage newpkg _ _ _) <- InstallPlan.toList plan
-- has explicit setup deps now
, hasExplicitSetupDeps newpkg
-- but originally had no setup deps
, let Just origpkg = SourcePackageIndex.lookupPackageId
sourcePkgIndex (packageId pkg)
, not (hasExplicitSetupDeps origpkg)
]
hasExplicitSetupDeps =
(SetupCustomExplicitDeps==)
. packageSetupScriptStylePreSolver
. PD.packageDescription . packageDescription
-- | Use the extra info saved by 'rememberImplicitSetupDeps' to let us work
-- out the correct 'SetupScriptStyle'. This should give the same result as
-- 'packageSetupScriptStylePreSolver' gave prior to munging the package info
-- through the solver.
--
packageSetupScriptStylePostSolver :: Set InstalledPackageId
-> SolverPackage loc
-> PD.PackageDescription
-> SetupScriptStyle
packageSetupScriptStylePostSolver pkgsImplicitSetupDeps pkg pkgDescription =
case packageSetupScriptStylePreSolver pkgDescription of
SetupCustomExplicitDeps
| Set.member (installedPackageId pkg) pkgsImplicitSetupDeps
-> SetupCustomImplicitDeps
other -> other
-- | Work out which version of the Cabal spec we will be using to talk to the
-- Setup.hs interface for this package.
--
-- This depends somewhat on the 'SetupScriptStyle' but most cases are a result
-- of what the solver picked for us, based on the explicit setup deps or the
-- ones added implicitly by 'defaultSetupDeps'.
--
packageSetupScriptSpecVersion :: Package pkg
=> SetupScriptStyle
-> PD.PackageDescription
-> ComponentDeps [pkg]
-> Version
-- We're going to be using the internal Cabal library, so the spec version of
-- that is simply the version of the Cabal library that cabal-install has been
-- built with.
packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ =
cabalVersion
-- If we happen to be building the Cabal lib itself then because that
-- bootstraps itself then we use the version of the lib we're building.
packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _
| packageName pkg == cabalPkgname
= packageVersion pkg
-- In all other cases we have a look at what version of the Cabal lib the
-- solver picked. Or if it didn't depend on Cabal at all (which is very rare)
-- then we look at the .cabal file to see what spec version it declares.
packageSetupScriptSpecVersion _ pkg deps =
case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of
Just dep -> packageVersion dep
Nothing -> PD.specVersion pkg
cabalPkgname, basePkgname :: PackageName
cabalPkgname = PackageName "Cabal"
basePkgname = PackageName "base"
legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName]
legacyCustomSetupPkgs compiler (Platform _ os) =
map PackageName $
[ "array", "base", "binary", "bytestring", "containers"
, "deepseq", "directory", "filepath", "old-time", "pretty"
, "process", "time", "transformers" ]
++ [ "Win32" | os == Windows ]
++ [ "unix" | os /= Windows ]
++ [ "ghc-prim" | isGHC ]
++ [ "template-haskell" | isGHC ]
where
isGHC = compilerCompatFlavor GHC compiler
-- The other aspects of our Setup.hs policy lives here where we decide on
-- the 'SetupScriptOptions'.
--
-- Our current policy for the 'SetupCustomImplicitDeps' case is that we
-- try to make the implicit deps cover everything, and we don't allow the
-- compiler to pick up other deps. This may or may not be sustainable, and
-- we might have to allow the deps to be non-exclusive, but that itself would
-- be tricky since we would have to allow the Setup access to all the packages
-- in the store and local dbs.
setupHsScriptOptions :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> FilePath
-> FilePath
-> Bool
-> Lock
-> SetupScriptOptions
setupHsScriptOptions (ReadyPackage ElaboratedConfiguredPackage{..})
ElaboratedSharedConfig{..} srcdir builddir
isParallelBuild cacheLock =
SetupScriptOptions {
useCabalVersion = thisVersion pkgSetupScriptCliVersion,
useCabalSpecVersion = Just pkgSetupScriptCliVersion,
useCompiler = Just pkgConfigCompiler,
usePlatform = Just pkgConfigPlatform,
usePackageDB = pkgSetupPackageDBStack,
usePackageIndex = Nothing,
useDependencies = [ (uid, srcid)
| ConfiguredId srcid uid <- CD.setupDeps pkgDependencies ],
useDependenciesExclusive = True,
useVersionMacros = pkgSetupScriptStyle == SetupCustomExplicitDeps,
useProgramConfig = pkgConfigCompilerProgs,
useDistPref = builddir,
useLoggingHandle = Nothing, -- this gets set later
useWorkingDir = Just srcdir,
useWin32CleanHack = False, --TODO: [required eventually]
forceExternalSetupMethod = isParallelBuild,
setupCacheLock = Just cacheLock
}
-- | To be used for the input for elaborateInstallPlan.
--
-- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure.
--
userInstallDirTemplates :: Compiler
-> IO InstallDirs.InstallDirTemplates
userInstallDirTemplates compiler = do
InstallDirs.defaultInstallDirs
(compilerFlavor compiler)
True -- user install
False -- unused
storePackageInstallDirs :: CabalDirLayout
-> CompilerId
-> InstalledPackageId
-> InstallDirs.InstallDirs FilePath
storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory}
compid ipkgid =
InstallDirs.InstallDirs {..}
where
prefix = cabalStorePackageDirectory compid ipkgid
bindir = prefix </> "bin"
libdir = prefix </> "lib"
libsubdir = ""
dynlibdir = libdir
libexecdir = prefix </> "libexec"
includedir = libdir </> "include"
datadir = prefix </> "share"
datasubdir = ""
docdir = datadir </> "doc"
mandir = datadir </> "man"
htmldir = docdir </> "html"
haddockdir = htmldir
sysconfdir = prefix </> "etc"
--TODO: [code cleanup] perhaps reorder this code
-- based on the ElaboratedInstallPlan + ElaboratedSharedConfig,
-- make the various Setup.hs {configure,build,copy} flags
setupHsConfigureFlags :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ConfigFlags
setupHsConfigureFlags (ReadyPackage
pkg@ElaboratedConfiguredPackage{..})
sharedConfig@ElaboratedSharedConfig{..}
verbosity builddir =
sanityCheckElaboratedConfiguredPackage sharedConfig pkg
(Cabal.ConfigFlags {..})
where
configDistPref = toFlag builddir
configVerbosity = toFlag verbosity
configIPID = toFlag (display (installedUnitId pkg))
configProgramPaths = Map.toList pkgProgramPaths
configProgramArgs = Map.toList pkgProgramArgs
configProgramPathExtra = toNubList pkgProgramPathExtra
configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler)
configHcPath = mempty -- we use configProgramPaths instead
configHcPkg = mempty -- we use configProgramPaths instead
configVanillaLib = toFlag pkgVanillaLib
configSharedLib = toFlag pkgSharedLib
configDynExe = toFlag pkgDynExe
configGHCiLib = toFlag pkgGHCiLib
configProfExe = mempty
configProfLib = toFlag pkgProfLib
configProf = toFlag pkgProfExe
-- configProfDetail is for exe+lib, but overridden by configProfLibDetail
-- so we specify both so we can specify independently
configProfDetail = toFlag pkgProfExeDetail
configProfLibDetail = toFlag pkgProfLibDetail
configCoverage = toFlag pkgCoverage
configLibCoverage = mempty
configOptimization = toFlag pkgOptimization
configSplitObjs = toFlag pkgSplitObjs
configStripExes = toFlag pkgStripExes
configStripLibs = toFlag pkgStripLibs
configDebugInfo = toFlag pkgDebugInfo
configAllowNewer = mempty -- we use configExactConfiguration True
configConfigurationsFlags = pkgFlagAssignment
configConfigureArgs = pkgConfigureScriptArgs
configExtraLibDirs = pkgExtraLibDirs
configExtraFrameworkDirs = pkgExtraFrameworkDirs
configExtraIncludeDirs = pkgExtraIncludeDirs
configProgPrefix = maybe mempty toFlag pkgProgPrefix
configProgSuffix = maybe mempty toFlag pkgProgSuffix
configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate)
pkgInstallDirs
-- we only use configDependencies, unless we're talking to an old Cabal
-- in which case we use configConstraints
configDependencies = [ (packageName srcid, uid)
| ConfiguredId srcid uid <- CD.nonSetupDeps pkgDependencies ]
configConstraints = [ thisPackageVersion srcid
| ConfiguredId srcid _uid <- CD.nonSetupDeps pkgDependencies ]
-- explicitly clear, then our package db stack
-- TODO: [required eventually] have to do this differently for older Cabal versions
configPackageDBs = Nothing : map Just pkgBuildPackageDBStack
configTests = toFlag (TestStanzas `Set.member` pkgStanzasEnabled)
configBenchmarks = toFlag (BenchStanzas `Set.member` pkgStanzasEnabled)
configExactConfiguration = toFlag True
configFlagError = mempty --TODO: [research required] appears not to be implemented
configRelocatable = mempty --TODO: [research required] ???
configScratchDir = mempty -- never use
configUserInstall = mempty -- don't rely on defaults
configPrograms_ = mempty -- never use, shouldn't exist
setupHsBuildFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.BuildFlags
setupHsBuildFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.BuildFlags {
buildProgramPaths = mempty, --unused, set at configure time
buildProgramArgs = mempty, --unused, set at configure time
buildVerbosity = toFlag verbosity,
buildDistPref = toFlag builddir,
buildAssumeDepsUpToDate = toFlag False,
buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs),
buildArgs = mempty -- unused, passed via args not flags
}
setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String]
setupHsBuildArgs pkg =
map (showComponentTarget pkg) (pkgBuildTargets pkg)
showComponentTarget :: ElaboratedConfiguredPackage -> ComponentTarget -> String
showComponentTarget _pkg =
showBuildTarget . toBuildTarget
where
showBuildTarget t =
Cabal.showBuildTarget (qlBuildTarget t) t
qlBuildTarget Cabal.BuildTargetComponent{} = Cabal.QL2
qlBuildTarget _ = Cabal.QL3
toBuildTarget :: ComponentTarget -> Cabal.BuildTarget
toBuildTarget (ComponentTarget cname subtarget) =
case subtarget of
WholeComponent -> Cabal.BuildTargetComponent cname
ModuleTarget mname -> Cabal.BuildTargetModule cname mname
FileTarget fname -> Cabal.BuildTargetFile cname fname
setupHsReplFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ReplFlags
setupHsReplFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.ReplFlags {
replProgramPaths = mempty, --unused, set at configure time
replProgramArgs = mempty, --unused, set at configure time
replVerbosity = toFlag verbosity,
replDistPref = toFlag builddir,
replReload = mempty --only used as callback from repl
}
setupHsReplArgs :: ElaboratedConfiguredPackage -> [String]
setupHsReplArgs pkg =
maybe [] (\t -> [showComponentTarget pkg t]) (pkgReplTarget pkg)
--TODO: should be able to give multiple modules in one component
setupHsCopyFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.CopyFlags
setupHsCopyFlags _ _ verbosity builddir =
Cabal.CopyFlags {
--TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right
-- thing, but perhaps we ought really to copy into an image dir and do
-- some sanity checks and move into the final location ourselves
copyArgs = [], -- TODO: could use this to only copy what we enabled
copyDest = toFlag InstallDirs.NoCopyDest,
copyDistPref = toFlag builddir,
copyAssumeDepsUpToDate = toFlag False,
copyVerbosity = toFlag verbosity
}
setupHsRegisterFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> FilePath
-> Cabal.RegisterFlags
setupHsRegisterFlags ElaboratedConfiguredPackage {pkgBuildStyle} _
verbosity builddir pkgConfFile =
Cabal.RegisterFlags {
regPackageDB = mempty, -- misfeature
regGenScript = mempty, -- never use
regGenPkgConf = toFlag (Just pkgConfFile),
regInPlace = case pkgBuildStyle of
BuildInplaceOnly -> toFlag True
_ -> toFlag False,
regPrintId = mempty, -- never use
regDistPref = toFlag builddir,
regVerbosity = toFlag verbosity,
-- Currently not used, because this is per-package.
regAssumeDepsUpToDate = toFlag False,
regArgs = []
}
setupHsHaddockFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.HaddockFlags
setupHsHaddockFlags ElaboratedConfiguredPackage{..} _ verbosity builddir =
Cabal.HaddockFlags {
haddockProgramPaths = mempty, --unused, set at configure time
haddockProgramArgs = mempty, --unused, set at configure time
haddockHoogle = toFlag pkgHaddockHoogle,
haddockHtml = toFlag pkgHaddockHtml,
haddockHtmlLocation = maybe mempty toFlag pkgHaddockHtmlLocation,
haddockForHackage = mempty, --TODO: new flag
haddockExecutables = toFlag pkgHaddockExecutables,
haddockTestSuites = toFlag pkgHaddockTestSuites,
haddockBenchmarks = toFlag pkgHaddockBenchmarks,
haddockInternal = toFlag pkgHaddockInternal,
haddockCss = maybe mempty toFlag pkgHaddockCss,
haddockHscolour = toFlag pkgHaddockHscolour,
haddockHscolourCss = maybe mempty toFlag pkgHaddockHscolourCss,
haddockContents = maybe mempty toFlag pkgHaddockContents,
haddockDistPref = toFlag builddir,
haddockKeepTempFiles = mempty, --TODO: from build settings
haddockVerbosity = toFlag verbosity
}
{-
setupHsTestFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.TestFlags
setupHsTestFlags _ _ verbosity builddir =
Cabal.TestFlags {
}
-}
------------------------------------------------------------------------------
-- * Sharing installed packages
------------------------------------------------------------------------------
--
-- Nix style store management for tarball packages
--
-- So here's our strategy:
--
-- We use a per-user nix-style hashed store, but /only/ for tarball packages.
-- So that includes packages from hackage repos (and other http and local
-- tarballs). For packages in local directories we do not register them into
-- the shared store by default, we just build them locally inplace.
--
-- The reason we do it like this is that it's easy to make stable hashes for
-- tarball packages, and these packages benefit most from sharing. By contrast
-- unpacked dir packages are harder to hash and they tend to change more
-- frequently so there's less benefit to sharing them.
--
-- When using the nix store approach we have to run the solver *without*
-- looking at the packages installed in the store, just at the source packages
-- (plus core\/global installed packages). Then we do a post-processing pass
-- to replace configured packages in the plan with pre-existing ones, where
-- possible. Where possible of course means where the nix-style package hash
-- equals one that's already in the store.
--
-- One extra wrinkle is that unless we know package tarball hashes upfront, we
-- will have to download the tarballs to find their hashes. So we have two
-- options: delay replacing source with pre-existing installed packages until
-- the point during the execution of the install plan where we have the
-- tarball, or try to do as much up-front as possible and then check again
-- during plan execution. The former isn't great because we would end up
-- telling users we're going to re-install loads of packages when in fact we
-- would just share them. It'd be better to give as accurate a prediction as
-- we can. The latter is better for users, but we do still have to check
-- during plan execution because it's important that we don't replace existing
-- installed packages even if they have the same package hash, because we
-- don't guarantee ABI stability.
-- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but
-- not replace installed packages with ghc-pkg.
packageHashInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashInputs
packageHashInputs
pkgshared
pkg@ElaboratedConfiguredPackage{
pkgSourceId,
pkgSourceHash = Just srchash,
pkgDependencies
} =
PackageHashInputs {
pkgHashPkgId = pkgSourceId,
pkgHashSourceHash = srchash,
pkgHashDirectDeps = Set.fromList
[ installedPackageId dep
| dep <- CD.select relevantDeps pkgDependencies ],
pkgHashOtherConfig = packageHashConfigInputs pkgshared pkg
}
where
-- Obviously the main deps are relevant
relevantDeps (CD.ComponentLib _) = True
relevantDeps (CD.ComponentExe _) = True
-- Setup deps can affect the Setup.hs behaviour and thus what is built
relevantDeps CD.ComponentSetup = True
-- However testsuites and benchmarks do not get installed and should not
-- affect the result, so we do not include them.
relevantDeps (CD.ComponentTest _) = False
relevantDeps (CD.ComponentBench _) = False
packageHashInputs _ pkg =
error $ "packageHashInputs: only for packages with source hashes. "
++ display (packageId pkg)
packageHashConfigInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashConfigInputs
packageHashConfigInputs
ElaboratedSharedConfig{..}
ElaboratedConfiguredPackage{..} =
PackageHashConfigInputs {
pkgHashCompilerId = compilerId pkgConfigCompiler,
pkgHashPlatform = pkgConfigPlatform,
pkgHashFlagAssignment = pkgFlagAssignment,
pkgHashConfigureScriptArgs = pkgConfigureScriptArgs,
pkgHashVanillaLib = pkgVanillaLib,
pkgHashSharedLib = pkgSharedLib,
pkgHashDynExe = pkgDynExe,
pkgHashGHCiLib = pkgGHCiLib,
pkgHashProfLib = pkgProfLib,
pkgHashProfExe = pkgProfExe,
pkgHashProfLibDetail = pkgProfLibDetail,
pkgHashProfExeDetail = pkgProfExeDetail,
pkgHashCoverage = pkgCoverage,
pkgHashOptimization = pkgOptimization,
pkgHashSplitObjs = pkgSplitObjs,
pkgHashStripLibs = pkgStripLibs,
pkgHashStripExes = pkgStripExes,
pkgHashDebugInfo = pkgDebugInfo,
pkgHashExtraLibDirs = pkgExtraLibDirs,
pkgHashExtraFrameworkDirs = pkgExtraFrameworkDirs,
pkgHashExtraIncludeDirs = pkgExtraIncludeDirs,
pkgHashProgPrefix = pkgProgPrefix,
pkgHashProgSuffix = pkgProgSuffix
}
-- | Given the 'InstalledPackageIndex' for a nix-style package store, and an
-- 'ElaboratedInstallPlan', replace configured source packages by pre-existing
-- installed packages whenever they exist.
--
improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex
-> ElaboratedInstallPlan
-> ElaboratedInstallPlan
improveInstallPlanWithPreExistingPackages installedPkgIndex installPlan =
replaceWithPreExisting installPlan
[ ipkg
| InstallPlan.Configured pkg
<- InstallPlan.reverseTopologicalOrder installPlan
, ipkg <- maybeToList (canPackageBeImproved pkg) ]
where
--TODO: sanity checks:
-- * the installed package must have the expected deps etc
-- * the installed package must not be broken, valid dep closure
--TODO: decide what to do if we encounter broken installed packages,
-- since overwriting is never safe.
canPackageBeImproved pkg =
PackageIndex.lookupUnitId
installedPkgIndex (installedPackageId pkg)
replaceWithPreExisting =
foldl' (\plan ipkg -> InstallPlan.preexisting
(installedPackageId ipkg) ipkg plan)
| bennofs/cabal | cabal-install/Distribution/Client/ProjectPlanning.hs | bsd-3-clause | 102,575 | 0 | 28 | 29,461 | 13,098 | 7,152 | 5,946 | 1,387 | 7 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- Module : Gen.Orphans
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Gen.Orphans where
import qualified Data.Aeson as A
import Data.Bifunctor
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as CI
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.Jason.Types
import Data.SemVer (Version, fromText, toText)
import Data.Text (Text)
instance FromJSON (CI Text) where
parseJSON = withText "case-insensitive" (return . CI.mk)
instance FromJSON a => FromJSON (HashMap (CI Text) a) where
parseJSON = fmap (Map.fromList . map (first CI.mk) . Map.toList) . parseJSON
instance FromJSON Version where
parseJSON = withText "semantic_version" $
either fail return . fromText
instance A.ToJSON Version where
toJSON = A.String . toText
| kim/amazonka | gen/src/Gen/Orphans.hs | mpl-2.0 | 1,449 | 0 | 14 | 360 | 251 | 146 | 105 | 22 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- Module : Network.AWS.Data.Internal.URI
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.AWS.Data.Internal.URI
( collapsePath
) where
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.Foldable as Fold
import Data.Monoid
collapsePath :: ByteString -> ByteString
collapsePath bs
| BS.null bs = slash
| BS.null path = slash
| otherwise = tl (hd path)
where
path = BS.intercalate slash
. reverse
. Fold.foldl' go []
. filter (/= mempty)
$ BS.split sep bs
hd x | BS.head x == sep = x
| otherwise = sep `BS.cons` x
tl x | BS.last x == sep = x
| BS.last bs == sep = x `BS.snoc` sep
| otherwise = x
go acc c | c == dot = acc
go acc c | c == dots = remv acc c
go acc c = c : acc
remv [] _ = []
remv (x : xs) c
| x == dot = c : xs
| x == dots = c : x : xs
| otherwise = xs
dot = "."
dots = ".."
slash = BS.singleton sep
sep = '/'
| kim/amazonka | core/src/Network/AWS/Data/Internal/URI.hs | mpl-2.0 | 1,582 | 0 | 12 | 538 | 425 | 218 | 207 | 34 | 4 |
{-# LANGUAGE ScopedTypeVariables, EmptyDataDecls #-}
module Engine where
import FRP.Sodium
import Control.Applicative
import Control.Monad
import Data.List
import Graphics.Rendering.OpenGL as GL hiding (Triangle, Rect, translate)
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.UI.GLUT as GLUT hiding (Rect, translate)
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
import System.Time
import Debug.Trace
import Image
frameRate :: Num a => a
frameRate = 40
type Coord = Double
type Point = (Coord, Coord)
type Vector = (Coord, Coord)
type Rect = (Point, Vector) -- Central point and size from centre to edge
type Sprite = (Rect, String)
data MouseEvent = MouseDown Point | MouseMove Point | MouseUp Point
deriving Show
plus :: Point -> Vector -> Point
plus (x0, y0) (x1, y1) = (x0 + x1, y0 + y1)
minus :: Point -> Point -> Vector
minus (x0, y0) (x1, y1) = (x0 - x1, y0 - y1)
-- | True if the point is inside the rectangle
inside :: Point -> Rect -> Bool
inside (x, y) ((ox, oy), (wx, wy)) =
x >= ox - wx && x <= ox + wx &&
y >= oy - wy && y <= oy + wy
-- | True if the two rectangles overlap
overlaps :: Rect -> Rect -> Bool
overlaps ((x0, y0), (w0, h0)) ((x1, y1), (w1, h1)) =
let ax0 = x0 - w0
ay0 = y0 - h0
ax1 = x0 + w0
ay1 = y0 + h0
bx0 = x1 - w1
by0 = y1 - h1
bx1 = x1 + w1
by1 = y1 + h1
in ax1 > bx0 &&
ay1 > by0 &&
ax0 < bx1 &&
ay0 < by1
-- | Get system time in seconds since the start of the Unix epoch
-- (1 Jan 1970).
getTime :: IO Double
getTime = do
(TOD sec pico) <- getClockTime
return $!
(fromIntegral sec) +
(fromIntegral pico) / 1000000000000
-- | Game, which takes mouse event and time as input, and a list of sprites to draw
-- as output. Time is updated once per animation frame.
type Game = Event MouseEvent -> Behaviour Double -> Reactive (Behaviour [Sprite])
runGame :: String -> Game -> IO ()
runGame title game = do
(eMouse, pushMouse) <- sync newEvent
(eTime, pushTime) <- sync newEvent
spritesRef <- newIORef []
unlisten <- sync $ do
time <- hold 0 eTime
sprites <- game eMouse time
listen (value sprites) (writeIORef spritesRef)
_ <- GLUT.getArgsAndInitialize
GLUT.initialDisplayMode $= [GLUT.DoubleBuffered]
GLUT.createWindow title
GLUT.windowSize $= GLUT.Size 700 500
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
multisample $= Enabled
shadeModel $= Smooth
polygonSmooth $= Enabled
hint PolygonSmooth $= Nicest
hint LineSmooth $= Nicest
normalize $= Enabled
texturesRef <- newIORef M.empty
t0 <- getTime
GLUT.displayCallback $= display texturesRef t0 pushTime spritesRef
let motion (GLUT.Position x y) = do
pt <- toScreen x y
sync $ pushMouse (MouseMove pt)
GLUT.motionCallback $= Just motion
GLUT.passiveMotionCallback $= Just motion
GLUT.keyboardMouseCallback $= Just (\key keyState mods pos -> do
case (key, keyState, pos) of
(GLUT.MouseButton GLUT.LeftButton, GLUT.Down, GLUT.Position x y) ->
sync . pushMouse . MouseDown =<< toScreen x y
(GLUT.MouseButton GLUT.LeftButton, GLUT.Up, GLUT.Position x y) ->
sync . pushMouse . MouseUp =<< toScreen x y
_ -> return ()
)
GLUT.addTimerCallback (1000 `div` frameRate) $ repaint
GLUT.mainLoop
unlisten
where
toScreen :: GLint -> GLint -> IO (Coord, Coord)
toScreen x y = do
(_, Size w h) <- get viewport
let aspect = fromIntegral w / fromIntegral h
sx = 0.001/aspect
sy = 0.001
xx = 2 * ((fromIntegral x / fromIntegral w) - 0.5) / sx
yy = 2 * (0.5 - (fromIntegral y / fromIntegral h)) / sy
return (xx, yy)
repaint = do
GLUT.postRedisplay Nothing
GLUT.addTimerCallback (1000 `div` frameRate) $ repaint
period = 1 / frameRate
display :: IORef (Map String (TextureImage, TextureObject))
-> Double
-> (Double -> Reactive ())
-> IORef [Sprite]
-> IO ()
display texturesRef t0 pushTime spritesRef = do
t <- subtract t0 <$> getTime
sync $ pushTime t
sprites <- readIORef spritesRef
clearColor $= Color4 0 0 0 (1 :: GLclampf)
--clearColor $= Color4 0.1 0.1 0.15 (1 :: GLclampf)
clear [ColorBuffer{-, DepthBuffer-}]
loadIdentity
(_, Size w h) <- get viewport
let aspect = fromIntegral w / fromIntegral h
scale (0.001/aspect) 0.001 (0.001 :: GLfloat)
forM_ sprites $ \(((posX, posY),(sizeX, sizeY)),imgFile) -> do
textures <- readIORef texturesRef
(TextureImage iWidth iHeight pWidth pHeight _ _, to) <- case imgFile `M.lookup` textures of
Just (ti, to) -> return (ti, to)
Nothing -> do
ti <- loadTexture imgFile False
to <- createTexture ti
modifyIORef texturesRef (M.insert imgFile (ti, to))
return $ (ti, to)
preservingMatrix $ do
texture Texture2D $= Enabled
textureBinding Texture2D $= Just to
GL.translate $ Vector3 (realToFrac posX) (realToFrac posY) (0 :: GLdouble)
let w2 = realToFrac sizeX :: GLdouble
h2 = realToFrac sizeY :: GLdouble
cx = realToFrac pWidth / realToFrac iWidth :: GLfloat
cy = realToFrac pHeight / realToFrac iHeight :: GLfloat
renderPrimitive Polygon $ do
texCoord $ TexCoord2 0 cy
vertex $ Vertex2 (-w2) (-h2)
texCoord $ TexCoord2 cx cy
vertex $ Vertex2 w2 (-h2)
texCoord $ TexCoord2 cx 0
vertex $ Vertex2 w2 h2
texCoord $ TexCoord2 0 (0 :: GLfloat)
vertex $ Vertex2 (-w2) h2
texture Texture2D $= Disabled
--translate $ Vector3 0 0 (0.001 :: GLdouble)
GLUT.swapBuffers
| kevintvh/sodium | haskell/examples/games/Engine.hs | bsd-3-clause | 6,300 | 0 | 22 | 2,026 | 2,052 | 1,053 | 999 | 149 | 4 |
{-
(c) The University of Glasgow 2006
Functions for working with the typechecker environment (setters, getters...).
-}
{-# LANGUAGE CPP, ExplicitForAll, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TcRnMonad(
module TcRnMonad,
module TcRnTypes,
module IOEnv
) where
#include "HsVersions.h"
import TcRnTypes -- Re-export all
import IOEnv -- Re-export all
import TcEvidence
import HsSyn hiding (LIE)
import HscTypes
import Module
import RdrName
import Name
import Type
import TcType
import InstEnv
import FamInstEnv
import PrelNames
import Id
import VarSet
import VarEnv
import ErrUtils
import SrcLoc
import NameEnv
import NameSet
import Bag
import Outputable
import UniqSupply
import UniqFM
import DynFlags
import StaticFlags
import FastString
import Panic
import Util
import Annotations
import BasicTypes( TopLevelFlag )
import Control.Exception
import Data.IORef
import qualified Data.Set as Set
import Control.Monad
#ifdef GHCI
import qualified Data.Map as Map
#endif
{-
************************************************************************
* *
initTc
* *
************************************************************************
-}
-- | Setup the initial typechecking environment
initTc :: HscEnv
-> HscSource
-> Bool -- True <=> retain renamed syntax trees
-> Module
-> RealSrcSpan
-> TcM r
-> IO (Messages, Maybe r)
-- Nothing => error thrown by the thing inside
-- (error messages should have been printed already)
initTc hsc_env hsc_src keep_rn_syntax mod loc do_this
= do { errs_var <- newIORef (emptyBag, emptyBag) ;
tvs_var <- newIORef emptyVarSet ;
keep_var <- newIORef emptyNameSet ;
used_rdr_var <- newIORef Set.empty ;
th_var <- newIORef False ;
th_splice_var<- newIORef False ;
infer_var <- newIORef (True, emptyBag) ;
lie_var <- newIORef emptyWC ;
dfun_n_var <- newIORef emptyOccSet ;
type_env_var <- case hsc_type_env_var hsc_env of {
Just (_mod, te_var) -> return te_var ;
Nothing -> newIORef emptyNameEnv } ;
dependent_files_var <- newIORef [] ;
static_wc_var <- newIORef emptyWC ;
#ifdef GHCI
th_topdecls_var <- newIORef [] ;
th_topnames_var <- newIORef emptyNameSet ;
th_modfinalizers_var <- newIORef [] ;
th_state_var <- newIORef Map.empty ;
#endif /* GHCI */
let {
dflags = hsc_dflags hsc_env ;
maybe_rn_syntax :: forall a. a -> Maybe a ;
maybe_rn_syntax empty_val
| keep_rn_syntax = Just empty_val
| otherwise = Nothing ;
gbl_env = TcGblEnv {
#ifdef GHCI
tcg_th_topdecls = th_topdecls_var,
tcg_th_topnames = th_topnames_var,
tcg_th_modfinalizers = th_modfinalizers_var,
tcg_th_state = th_state_var,
#endif /* GHCI */
tcg_mod = mod,
tcg_src = hsc_src,
tcg_sig_of = getSigOf dflags (moduleName mod),
tcg_mod_name = Nothing,
tcg_impl_rdr_env = Nothing,
tcg_rdr_env = emptyGlobalRdrEnv,
tcg_fix_env = emptyNameEnv,
tcg_field_env = RecFields emptyNameEnv emptyNameSet,
tcg_default = if modulePackageKey mod == primPackageKey
then Just [] -- See Note [Default types]
else Nothing,
tcg_type_env = emptyNameEnv,
tcg_type_env_var = type_env_var,
tcg_inst_env = emptyInstEnv,
tcg_fam_inst_env = emptyFamInstEnv,
tcg_ann_env = emptyAnnEnv,
tcg_visible_orphan_mods = mkModuleSet [mod],
tcg_th_used = th_var,
tcg_th_splice_used = th_splice_var,
tcg_exports = [],
tcg_imports = emptyImportAvails,
tcg_used_rdrnames = used_rdr_var,
tcg_dus = emptyDUs,
tcg_rn_imports = [],
tcg_rn_exports = maybe_rn_syntax [],
tcg_rn_decls = maybe_rn_syntax emptyRnGroup,
tcg_binds = emptyLHsBinds,
tcg_imp_specs = [],
tcg_sigs = emptyNameSet,
tcg_ev_binds = emptyBag,
tcg_warns = NoWarnings,
tcg_anns = [],
tcg_tcs = [],
tcg_insts = [],
tcg_fam_insts = [],
tcg_rules = [],
tcg_fords = [],
tcg_vects = [],
tcg_patsyns = [],
tcg_dfun_n = dfun_n_var,
tcg_keep = keep_var,
tcg_doc_hdr = Nothing,
tcg_hpc = False,
tcg_main = Nothing,
tcg_safeInfer = infer_var,
tcg_dependent_files = dependent_files_var,
tcg_tc_plugins = [],
tcg_static_wc = static_wc_var
} ;
lcl_env = TcLclEnv {
tcl_errs = errs_var,
tcl_loc = loc, -- Should be over-ridden very soon!
tcl_ctxt = [],
tcl_rdr = emptyLocalRdrEnv,
tcl_th_ctxt = topStage,
tcl_th_bndrs = emptyNameEnv,
tcl_arrow_ctxt = NoArrowCtxt,
tcl_env = emptyNameEnv,
tcl_bndrs = [],
tcl_tidy = emptyTidyEnv,
tcl_tyvars = tvs_var,
tcl_lie = lie_var,
tcl_tclvl = topTcLevel
} ;
} ;
-- OK, here's the business end!
maybe_res <- initTcRnIf 'a' hsc_env gbl_env lcl_env $
do { r <- tryM do_this
; case r of
Right res -> return (Just res)
Left _ -> return Nothing } ;
-- Check for unsolved constraints
lie <- readIORef lie_var ;
if isEmptyWC lie
then return ()
else pprPanic "initTc: unsolved constraints" (ppr lie) ;
-- Collect any error messages
msgs <- readIORef errs_var ;
let { final_res | errorsFound dflags msgs = Nothing
| otherwise = maybe_res } ;
return (msgs, final_res)
}
initTcInteractive :: HscEnv -> TcM a -> IO (Messages, Maybe a)
-- Initialise the type checker monad for use in GHCi
initTcInteractive hsc_env thing_inside
= initTc hsc_env HsSrcFile False
(icInteractiveModule (hsc_IC hsc_env))
(realSrcLocSpan interactive_src_loc)
thing_inside
where
interactive_src_loc = mkRealSrcLoc (fsLit "<interactive>") 1 1
initTcForLookup :: HscEnv -> TcM a -> IO a
-- The thing_inside is just going to look up something
-- in the environment, so we don't need much setup
initTcForLookup hsc_env thing_inside
= do { (msgs, m) <- initTcInteractive hsc_env thing_inside
; case m of
Nothing -> throwIO $ mkSrcErr $ snd msgs
Just x -> return x }
{- Note [Default types]
~~~~~~~~~~~~~~~~~~~~~~~
The Integer type is simply not available in package ghc-prim (it is
declared in integer-gmp). So we set the defaulting types to (Just
[]), meaning there are no default types, rather then Nothing, which
means "use the default default types of Integer, Double".
If you don't do this, attempted defaulting in package ghc-prim causes
an actual crash (attempting to look up the Integer type).
************************************************************************
* *
Initialisation
* *
************************************************************************
-}
initTcRnIf :: Char -- Tag for unique supply
-> HscEnv
-> gbl -> lcl
-> TcRnIf gbl lcl a
-> IO a
initTcRnIf uniq_tag hsc_env gbl_env lcl_env thing_inside
= do { us <- mkSplitUniqSupply uniq_tag ;
; us_var <- newIORef us ;
; let { env = Env { env_top = hsc_env,
env_us = us_var,
env_gbl = gbl_env,
env_lcl = lcl_env} }
; runIOEnv env thing_inside
}
{-
************************************************************************
* *
Simple accessors
* *
************************************************************************
-}
discardResult :: TcM a -> TcM ()
discardResult a = a >> return ()
getTopEnv :: TcRnIf gbl lcl HscEnv
getTopEnv = do { env <- getEnv; return (env_top env) }
getGblEnv :: TcRnIf gbl lcl gbl
getGblEnv = do { env <- getEnv; return (env_gbl env) }
updGblEnv :: (gbl -> gbl) -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
updGblEnv upd = updEnv (\ env@(Env { env_gbl = gbl }) ->
env { env_gbl = upd gbl })
setGblEnv :: gbl -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
setGblEnv gbl_env = updEnv (\ env -> env { env_gbl = gbl_env })
getLclEnv :: TcRnIf gbl lcl lcl
getLclEnv = do { env <- getEnv; return (env_lcl env) }
updLclEnv :: (lcl -> lcl) -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
updLclEnv upd = updEnv (\ env@(Env { env_lcl = lcl }) ->
env { env_lcl = upd lcl })
setLclEnv :: lcl' -> TcRnIf gbl lcl' a -> TcRnIf gbl lcl a
setLclEnv lcl_env = updEnv (\ env -> env { env_lcl = lcl_env })
getEnvs :: TcRnIf gbl lcl (gbl, lcl)
getEnvs = do { env <- getEnv; return (env_gbl env, env_lcl env) }
setEnvs :: (gbl', lcl') -> TcRnIf gbl' lcl' a -> TcRnIf gbl lcl a
setEnvs (gbl_env, lcl_env) = updEnv (\ env -> env { env_gbl = gbl_env, env_lcl = lcl_env })
-- Command-line flags
xoptM :: ExtensionFlag -> TcRnIf gbl lcl Bool
xoptM flag = do { dflags <- getDynFlags; return (xopt flag dflags) }
doptM :: DumpFlag -> TcRnIf gbl lcl Bool
doptM flag = do { dflags <- getDynFlags; return (dopt flag dflags) }
goptM :: GeneralFlag -> TcRnIf gbl lcl Bool
goptM flag = do { dflags <- getDynFlags; return (gopt flag dflags) }
woptM :: WarningFlag -> TcRnIf gbl lcl Bool
woptM flag = do { dflags <- getDynFlags; return (wopt flag dflags) }
setXOptM :: ExtensionFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
setXOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = xopt_set (hsc_dflags top) flag}} )
unsetGOptM :: GeneralFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
unsetGOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = gopt_unset (hsc_dflags top) flag}} )
unsetWOptM :: WarningFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
unsetWOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = wopt_unset (hsc_dflags top) flag}} )
-- | Do it flag is true
whenDOptM :: DumpFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenDOptM flag thing_inside = do b <- doptM flag
when b thing_inside
whenGOptM :: GeneralFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenGOptM flag thing_inside = do b <- goptM flag
when b thing_inside
whenWOptM :: WarningFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenWOptM flag thing_inside = do b <- woptM flag
when b thing_inside
whenXOptM :: ExtensionFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenXOptM flag thing_inside = do b <- xoptM flag
when b thing_inside
getGhcMode :: TcRnIf gbl lcl GhcMode
getGhcMode = do { env <- getTopEnv; return (ghcMode (hsc_dflags env)) }
withDoDynamicToo :: TcRnIf gbl lcl a -> TcRnIf gbl lcl a
withDoDynamicToo m = do env <- getEnv
let dflags = extractDynFlags env
dflags' = dynamicTooMkDynamicDynFlags dflags
env' = replaceDynFlags env dflags'
setEnv env' m
getEpsVar :: TcRnIf gbl lcl (TcRef ExternalPackageState)
getEpsVar = do { env <- getTopEnv; return (hsc_EPS env) }
getEps :: TcRnIf gbl lcl ExternalPackageState
getEps = do { env <- getTopEnv; readMutVar (hsc_EPS env) }
-- | Update the external package state. Returns the second result of the
-- modifier function.
--
-- This is an atomic operation and forces evaluation of the modified EPS in
-- order to avoid space leaks.
updateEps :: (ExternalPackageState -> (ExternalPackageState, a))
-> TcRnIf gbl lcl a
updateEps upd_fn = do
traceIf (text "updating EPS")
eps_var <- getEpsVar
atomicUpdMutVar' eps_var upd_fn
-- | Update the external package state.
--
-- This is an atomic operation and forces evaluation of the modified EPS in
-- order to avoid space leaks.
updateEps_ :: (ExternalPackageState -> ExternalPackageState)
-> TcRnIf gbl lcl ()
updateEps_ upd_fn = do
traceIf (text "updating EPS_")
eps_var <- getEpsVar
atomicUpdMutVar' eps_var (\eps -> (upd_fn eps, ()))
getHpt :: TcRnIf gbl lcl HomePackageTable
getHpt = do { env <- getTopEnv; return (hsc_HPT env) }
getEpsAndHpt :: TcRnIf gbl lcl (ExternalPackageState, HomePackageTable)
getEpsAndHpt = do { env <- getTopEnv; eps <- readMutVar (hsc_EPS env)
; return (eps, hsc_HPT env) }
{-
************************************************************************
* *
Arrow scopes
* *
************************************************************************
-}
newArrowScope :: TcM a -> TcM a
newArrowScope
= updLclEnv $ \env -> env { tcl_arrow_ctxt = ArrowCtxt (tcl_rdr env) (tcl_lie env) }
-- Return to the stored environment (from the enclosing proc)
escapeArrowScope :: TcM a -> TcM a
escapeArrowScope
= updLclEnv $ \ env ->
case tcl_arrow_ctxt env of
NoArrowCtxt -> env
ArrowCtxt rdr_env lie -> env { tcl_arrow_ctxt = NoArrowCtxt
, tcl_lie = lie
, tcl_rdr = rdr_env }
{-
************************************************************************
* *
Unique supply
* *
************************************************************************
-}
newUnique :: TcRnIf gbl lcl Unique
newUnique
= do { env <- getEnv ;
let { u_var = env_us env } ;
us <- readMutVar u_var ;
case takeUniqFromSupply us of { (uniq, us') -> do {
writeMutVar u_var us' ;
return $! uniq }}}
-- NOTE 1: we strictly split the supply, to avoid the possibility of leaving
-- a chain of unevaluated supplies behind.
-- NOTE 2: we use the uniq in the supply from the MutVar directly, and
-- throw away one half of the new split supply. This is safe because this
-- is the only place we use that unique. Using the other half of the split
-- supply is safer, but slower.
newUniqueSupply :: TcRnIf gbl lcl UniqSupply
newUniqueSupply
= do { env <- getEnv ;
let { u_var = env_us env } ;
us <- readMutVar u_var ;
case splitUniqSupply us of { (us1,us2) -> do {
writeMutVar u_var us1 ;
return us2 }}}
newLocalName :: Name -> TcM Name
newLocalName name = newName (nameOccName name)
newName :: OccName -> TcM Name
newName occ
= do { uniq <- newUnique
; loc <- getSrcSpanM
; return (mkInternalName uniq occ loc) }
newSysName :: OccName -> TcM Name
newSysName occ
= do { uniq <- newUnique
; return (mkSystemName uniq occ) }
newSysLocalId :: FastString -> TcType -> TcRnIf gbl lcl TcId
newSysLocalId fs ty
= do { u <- newUnique
; return (mkSysLocal fs u ty) }
newSysLocalIds :: FastString -> [TcType] -> TcRnIf gbl lcl [TcId]
newSysLocalIds fs tys
= do { us <- newUniqueSupply
; return (zipWith (mkSysLocal fs) (uniqsFromSupply us) tys) }
instance MonadUnique (IOEnv (Env gbl lcl)) where
getUniqueM = newUnique
getUniqueSupplyM = newUniqueSupply
{-
************************************************************************
* *
Accessing input/output
* *
************************************************************************
-}
newTcRef :: a -> TcRnIf gbl lcl (TcRef a)
newTcRef = newMutVar
readTcRef :: TcRef a -> TcRnIf gbl lcl a
readTcRef = readMutVar
writeTcRef :: TcRef a -> a -> TcRnIf gbl lcl ()
writeTcRef = writeMutVar
updTcRef :: TcRef a -> (a -> a) -> TcRnIf gbl lcl ()
-- Returns ()
updTcRef ref fn = liftIO $ do { old <- readIORef ref
; writeIORef ref (fn old) }
updTcRefX :: TcRef a -> (a -> a) -> TcRnIf gbl lcl a
-- Returns previous value
updTcRefX ref fn = liftIO $ do { old <- readIORef ref
; writeIORef ref (fn old)
; return old }
{-
************************************************************************
* *
Debugging
* *
************************************************************************
-}
traceTc :: String -> SDoc -> TcRn ()
traceTc herald doc = traceTcN 1 (hang (text herald) 2 doc)
-- | Typechecker trace
traceTcN :: Int -> SDoc -> TcRn ()
traceTcN level doc
= do dflags <- getDynFlags
when (level <= traceLevel dflags && not opt_NoDebugOutput) $
traceOptTcRn Opt_D_dump_tc_trace doc
traceRn :: SDoc -> TcRn ()
traceRn = traceOptTcRn Opt_D_dump_rn_trace -- Renamer Trace
-- | Output a doc if the given 'DumpFlag' is set.
--
-- By default this logs to stdout
-- However, if the `-ddump-to-file` flag is set,
-- then this will dump output to a file
--
-- Just a wrapper for 'dumpSDoc'
traceOptTcRn :: DumpFlag -> SDoc -> TcRn ()
traceOptTcRn flag doc
= do { dflags <- getDynFlags
; when (dopt flag dflags) (traceTcRn flag doc)
}
traceTcRn :: DumpFlag -> SDoc -> TcRn ()
-- ^ Unconditionally dump some trace output
--
-- The DumpFlag is used only to set the output filename
-- for --dump-to-file, not to decide whether or not to output
-- That part is done by the caller
traceTcRn flag doc
= do { real_doc <- prettyDoc doc
; dflags <- getDynFlags
; printer <- getPrintUnqualified dflags
; liftIO $ dumpSDoc dflags printer flag "" real_doc }
where
-- Add current location if opt_PprStyle_Debug
prettyDoc :: SDoc -> TcRn SDoc
prettyDoc doc = if opt_PprStyle_Debug
then do { loc <- getSrcSpanM; return $ mkLocMessage SevOutput loc doc }
else return doc -- The full location is usually way too much
getPrintUnqualified :: DynFlags -> TcRn PrintUnqualified
getPrintUnqualified dflags
= do { rdr_env <- getGlobalRdrEnv
; return $ mkPrintUnqualified dflags rdr_env }
-- | Like logInfoTcRn, but for user consumption
printForUserTcRn :: SDoc -> TcRn ()
printForUserTcRn doc
= do { dflags <- getDynFlags
; printer <- getPrintUnqualified dflags
; liftIO (printOutputForUser dflags printer doc) }
-- | Typechecker debug
debugDumpTcRn :: SDoc -> TcRn ()
debugDumpTcRn doc = unless opt_NoDebugOutput $
traceOptTcRn Opt_D_dump_tc doc
{-
traceIf and traceHiDiffs work in the TcRnIf monad, where no RdrEnv is
available. Alas, they behave inconsistently with the other stuff;
e.g. are unaffected by -dump-to-file.
-}
traceIf, traceHiDiffs :: SDoc -> TcRnIf m n ()
traceIf = traceOptIf Opt_D_dump_if_trace
traceHiDiffs = traceOptIf Opt_D_dump_hi_diffs
traceOptIf :: DumpFlag -> SDoc -> TcRnIf m n ()
traceOptIf flag doc
= whenDOptM flag $ -- No RdrEnv available, so qualify everything
do { dflags <- getDynFlags
; liftIO (putMsg dflags doc) }
{-
************************************************************************
* *
Typechecker global environment
* *
************************************************************************
-}
setModule :: Module -> TcRn a -> TcRn a
setModule mod thing_inside = updGblEnv (\env -> env { tcg_mod = mod }) thing_inside
getIsGHCi :: TcRn Bool
getIsGHCi = do { mod <- getModule
; return (isInteractiveModule mod) }
getGHCiMonad :: TcRn Name
getGHCiMonad = do { hsc <- getTopEnv; return (ic_monad $ hsc_IC hsc) }
getInteractivePrintName :: TcRn Name
getInteractivePrintName = do { hsc <- getTopEnv; return (ic_int_print $ hsc_IC hsc) }
tcIsHsBootOrSig :: TcRn Bool
tcIsHsBootOrSig = do { env <- getGblEnv; return (isHsBootOrSig (tcg_src env)) }
getGlobalRdrEnv :: TcRn GlobalRdrEnv
getGlobalRdrEnv = do { env <- getGblEnv; return (tcg_rdr_env env) }
getRdrEnvs :: TcRn (GlobalRdrEnv, LocalRdrEnv)
getRdrEnvs = do { (gbl,lcl) <- getEnvs; return (tcg_rdr_env gbl, tcl_rdr lcl) }
getImports :: TcRn ImportAvails
getImports = do { env <- getGblEnv; return (tcg_imports env) }
getFixityEnv :: TcRn FixityEnv
getFixityEnv = do { env <- getGblEnv; return (tcg_fix_env env) }
extendFixityEnv :: [(Name,FixItem)] -> RnM a -> RnM a
extendFixityEnv new_bit
= updGblEnv (\env@(TcGblEnv { tcg_fix_env = old_fix_env }) ->
env {tcg_fix_env = extendNameEnvList old_fix_env new_bit})
getRecFieldEnv :: TcRn RecFieldEnv
getRecFieldEnv = do { env <- getGblEnv; return (tcg_field_env env) }
getDeclaredDefaultTys :: TcRn (Maybe [Type])
getDeclaredDefaultTys = do { env <- getGblEnv; return (tcg_default env) }
addDependentFiles :: [FilePath] -> TcRn ()
addDependentFiles fs = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fs ++ dep_files)
{-
************************************************************************
* *
Error management
* *
************************************************************************
-}
getSrcSpanM :: TcRn SrcSpan
-- Avoid clash with Name.getSrcLoc
getSrcSpanM = do { env <- getLclEnv; return (RealSrcSpan (tcl_loc env)) }
setSrcSpan :: SrcSpan -> TcRn a -> TcRn a
setSrcSpan (RealSrcSpan real_loc) thing_inside
= updLclEnv (\env -> env { tcl_loc = real_loc }) thing_inside
-- Don't overwrite useful info with useless:
setSrcSpan (UnhelpfulSpan _) thing_inside = thing_inside
addLocM :: (a -> TcM b) -> Located a -> TcM b
addLocM fn (L loc a) = setSrcSpan loc $ fn a
wrapLocM :: (a -> TcM b) -> Located a -> TcM (Located b)
wrapLocM fn (L loc a) = setSrcSpan loc $ do b <- fn a; return (L loc b)
wrapLocFstM :: (a -> TcM (b,c)) -> Located a -> TcM (Located b, c)
wrapLocFstM fn (L loc a) =
setSrcSpan loc $ do
(b,c) <- fn a
return (L loc b, c)
wrapLocSndM :: (a -> TcM (b,c)) -> Located a -> TcM (b, Located c)
wrapLocSndM fn (L loc a) =
setSrcSpan loc $ do
(b,c) <- fn a
return (b, L loc c)
-- Reporting errors
getErrsVar :: TcRn (TcRef Messages)
getErrsVar = do { env <- getLclEnv; return (tcl_errs env) }
setErrsVar :: TcRef Messages -> TcRn a -> TcRn a
setErrsVar v = updLclEnv (\ env -> env { tcl_errs = v })
addErr :: MsgDoc -> TcRn () -- Ignores the context stack
addErr msg = do { loc <- getSrcSpanM; addErrAt loc msg }
failWith :: MsgDoc -> TcRn a
failWith msg = addErr msg >> failM
addErrAt :: SrcSpan -> MsgDoc -> TcRn ()
-- addErrAt is mainly (exclusively?) used by the renamer, where
-- tidying is not an issue, but it's all lazy so the extra
-- work doesn't matter
addErrAt loc msg = do { ctxt <- getErrCtxt
; tidy_env <- tcInitTidyEnv
; err_info <- mkErrInfo tidy_env ctxt
; addLongErrAt loc msg err_info }
addErrs :: [(SrcSpan,MsgDoc)] -> TcRn ()
addErrs msgs = mapM_ add msgs
where
add (loc,msg) = addErrAt loc msg
checkErr :: Bool -> MsgDoc -> TcRn ()
-- Add the error if the bool is False
checkErr ok msg = unless ok (addErr msg)
warnIf :: Bool -> MsgDoc -> TcRn ()
warnIf True msg = addWarn msg
warnIf False _ = return ()
addMessages :: Messages -> TcRn ()
addMessages (m_warns, m_errs)
= do { errs_var <- getErrsVar ;
(warns, errs) <- readTcRef errs_var ;
writeTcRef errs_var (warns `unionBags` m_warns,
errs `unionBags` m_errs) }
discardWarnings :: TcRn a -> TcRn a
-- Ignore warnings inside the thing inside;
-- used to ignore-unused-variable warnings inside derived code
discardWarnings thing_inside
= do { errs_var <- getErrsVar
; (old_warns, _) <- readTcRef errs_var ;
; result <- thing_inside
-- Revert warnings to old_warns
; (_new_warns, new_errs) <- readTcRef errs_var
; writeTcRef errs_var (old_warns, new_errs)
; return result }
{-
************************************************************************
* *
Shared error message stuff: renamer and typechecker
* *
************************************************************************
-}
mkLongErrAt :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ErrMsg
mkLongErrAt loc msg extra
= do { dflags <- getDynFlags ;
printer <- getPrintUnqualified dflags ;
return $ mkLongErrMsg dflags loc printer msg extra }
addLongErrAt :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ()
addLongErrAt loc msg extra = mkLongErrAt loc msg extra >>= reportError
reportErrors :: [ErrMsg] -> TcM ()
reportErrors = mapM_ reportError
reportError :: ErrMsg -> TcRn ()
reportError err
= do { traceTc "Adding error:" (pprLocErrMsg err) ;
errs_var <- getErrsVar ;
(warns, errs) <- readTcRef errs_var ;
writeTcRef errs_var (warns, errs `snocBag` err) }
reportWarning :: ErrMsg -> TcRn ()
reportWarning err
= do { let warn = makeIntoWarning err
-- 'err' was build by mkLongErrMsg or something like that,
-- so it's of error severity. For a warning we downgrade
-- its severity to SevWarning
; traceTc "Adding warning:" (pprLocErrMsg warn)
; errs_var <- getErrsVar
; (warns, errs) <- readTcRef errs_var
; writeTcRef errs_var (warns `snocBag` warn, errs) }
try_m :: TcRn r -> TcRn (Either IOEnvFailure r)
-- Does tryM, with a debug-trace on failure
try_m thing
= do { mb_r <- tryM thing ;
case mb_r of
Left exn -> do { traceTc "tryTc/recoverM recovering from" $
text (showException exn)
; return mb_r }
Right _ -> return mb_r }
-----------------------
recoverM :: TcRn r -- Recovery action; do this if the main one fails
-> TcRn r -- Main action: do this first
-> TcRn r
-- Errors in 'thing' are retained
recoverM recover thing
= do { mb_res <- try_m thing ;
case mb_res of
Left _ -> recover
Right res -> return res }
-----------------------
mapAndRecoverM :: (a -> TcRn b) -> [a] -> TcRn [b]
-- Drop elements of the input that fail, so the result
-- list can be shorter than the argument list
mapAndRecoverM _ [] = return []
mapAndRecoverM f (x:xs) = do { mb_r <- try_m (f x)
; rs <- mapAndRecoverM f xs
; return (case mb_r of
Left _ -> rs
Right r -> r:rs) }
-- | Succeeds if applying the argument to all members of the lists succeeds,
-- but nevertheless runs it on all arguments, to collect all errors.
mapAndReportM :: (a -> TcRn b) -> [a] -> TcRn [b]
mapAndReportM f xs = checkNoErrs (mapAndRecoverM f xs)
-----------------------
tryTc :: TcRn a -> TcRn (Messages, Maybe a)
-- (tryTc m) executes m, and returns
-- Just r, if m succeeds (returning r)
-- Nothing, if m fails
-- It also returns all the errors and warnings accumulated by m
-- It always succeeds (never raises an exception)
tryTc m
= do { errs_var <- newTcRef emptyMessages ;
res <- try_m (setErrsVar errs_var m) ;
msgs <- readTcRef errs_var ;
return (msgs, case res of
Left _ -> Nothing
Right val -> Just val)
-- The exception is always the IOEnv built-in
-- in exception; see IOEnv.failM
}
-----------------------
tryTcErrs :: TcRn a -> TcRn (Messages, Maybe a)
-- Run the thing, returning
-- Just r, if m succceeds with no error messages
-- Nothing, if m fails, or if it succeeds but has error messages
-- Either way, the messages are returned; even in the Just case
-- there might be warnings
tryTcErrs thing
= do { (msgs, res) <- tryTc thing
; dflags <- getDynFlags
; let errs_found = errorsFound dflags msgs
; return (msgs, case res of
Nothing -> Nothing
Just val | errs_found -> Nothing
| otherwise -> Just val)
}
-----------------------
tryTcLIE :: TcM a -> TcM (Messages, Maybe a)
-- Just like tryTcErrs, except that it ensures that the LIE
-- for the thing is propagated only if there are no errors
-- Hence it's restricted to the type-check monad
tryTcLIE thing_inside
= do { ((msgs, mb_res), lie) <- captureConstraints (tryTcErrs thing_inside) ;
; case mb_res of
Nothing -> return (msgs, Nothing)
Just val -> do { emitConstraints lie; return (msgs, Just val) }
}
-----------------------
tryTcLIE_ :: TcM r -> TcM r -> TcM r
-- (tryTcLIE_ r m) tries m;
-- if m succeeds with no error messages, it's the answer
-- otherwise tryTcLIE_ drops everything from m and tries r instead.
tryTcLIE_ recover main
= do { (msgs, mb_res) <- tryTcLIE main
; case mb_res of
Just val -> do { addMessages msgs -- There might be warnings
; return val }
Nothing -> recover -- Discard all msgs
}
-----------------------
checkNoErrs :: TcM r -> TcM r
-- (checkNoErrs m) succeeds iff m succeeds and generates no errors
-- If m fails then (checkNoErrsTc m) fails.
-- If m succeeds, it checks whether m generated any errors messages
-- (it might have recovered internally)
-- If so, it fails too.
-- Regardless, any errors generated by m are propagated to the enclosing context.
checkNoErrs main
= do { (msgs, mb_res) <- tryTcLIE main
; addMessages msgs
; case mb_res of
Nothing -> failM
Just val -> return val
}
whenNoErrs :: TcM () -> TcM ()
whenNoErrs thing = ifErrsM (return ()) thing
ifErrsM :: TcRn r -> TcRn r -> TcRn r
-- ifErrsM bale_out normal
-- does 'bale_out' if there are errors in errors collection
-- otherwise does 'normal'
ifErrsM bale_out normal
= do { errs_var <- getErrsVar ;
msgs <- readTcRef errs_var ;
dflags <- getDynFlags ;
if errorsFound dflags msgs then
bale_out
else
normal }
failIfErrsM :: TcRn ()
-- Useful to avoid error cascades
failIfErrsM = ifErrsM failM (return ())
#ifdef GHCI
checkTH :: a -> String -> TcRn ()
checkTH _ _ = return () -- OK
#else
checkTH :: Outputable a => a -> String -> TcRn ()
checkTH e what = failTH e what -- Raise an error in a stage-1 compiler
#endif
failTH :: Outputable a => a -> String -> TcRn x
failTH e what -- Raise an error in a stage-1 compiler
= failWithTc (vcat [ hang (char 'A' <+> text what
<+> ptext (sLit "requires GHC with interpreter support:"))
2 (ppr e)
, ptext (sLit "Perhaps you are using a stage-1 compiler?") ])
{-
************************************************************************
* *
Context management for the type checker
* *
************************************************************************
-}
getErrCtxt :: TcM [ErrCtxt]
getErrCtxt = do { env <- getLclEnv; return (tcl_ctxt env) }
setErrCtxt :: [ErrCtxt] -> TcM a -> TcM a
setErrCtxt ctxt = updLclEnv (\ env -> env { tcl_ctxt = ctxt })
addErrCtxt :: MsgDoc -> TcM a -> TcM a
addErrCtxt msg = addErrCtxtM (\env -> return (env, msg))
addErrCtxtM :: (TidyEnv -> TcM (TidyEnv, MsgDoc)) -> TcM a -> TcM a
addErrCtxtM ctxt = updCtxt (\ ctxts -> (False, ctxt) : ctxts)
addLandmarkErrCtxt :: MsgDoc -> TcM a -> TcM a
addLandmarkErrCtxt msg = updCtxt (\ctxts -> (True, \env -> return (env,msg)) : ctxts)
-- Helper function for the above
updCtxt :: ([ErrCtxt] -> [ErrCtxt]) -> TcM a -> TcM a
updCtxt upd = updLclEnv (\ env@(TcLclEnv { tcl_ctxt = ctxt }) ->
env { tcl_ctxt = upd ctxt })
popErrCtxt :: TcM a -> TcM a
popErrCtxt = updCtxt (\ msgs -> case msgs of { [] -> []; (_ : ms) -> ms })
getCtLoc :: CtOrigin -> TcM CtLoc
getCtLoc origin
= do { env <- getLclEnv
; return (CtLoc { ctl_origin = origin
, ctl_env = env
, ctl_depth = initialSubGoalDepth }) }
setCtLoc :: CtLoc -> TcM a -> TcM a
-- Set the SrcSpan and error context from the CtLoc
setCtLoc (CtLoc { ctl_env = lcl }) thing_inside
= updLclEnv (\env -> env { tcl_loc = tcl_loc lcl
, tcl_bndrs = tcl_bndrs lcl
, tcl_ctxt = tcl_ctxt lcl })
thing_inside
{-
************************************************************************
* *
Error message generation (type checker)
* *
************************************************************************
The addErrTc functions add an error message, but do not cause failure.
The 'M' variants pass a TidyEnv that has already been used to
tidy up the message; we then use it to tidy the context messages
-}
addErrTc :: MsgDoc -> TcM ()
addErrTc err_msg = do { env0 <- tcInitTidyEnv
; addErrTcM (env0, err_msg) }
addErrsTc :: [MsgDoc] -> TcM ()
addErrsTc err_msgs = mapM_ addErrTc err_msgs
addErrTcM :: (TidyEnv, MsgDoc) -> TcM ()
addErrTcM (tidy_env, err_msg)
= do { ctxt <- getErrCtxt ;
loc <- getSrcSpanM ;
add_err_tcm tidy_env err_msg loc ctxt }
-- Return the error message, instead of reporting it straight away
mkErrTcM :: (TidyEnv, MsgDoc) -> TcM ErrMsg
mkErrTcM (tidy_env, err_msg)
= do { ctxt <- getErrCtxt ;
loc <- getSrcSpanM ;
err_info <- mkErrInfo tidy_env ctxt ;
mkLongErrAt loc err_msg err_info }
-- The failWith functions add an error message and cause failure
failWithTc :: MsgDoc -> TcM a -- Add an error message and fail
failWithTc err_msg
= addErrTc err_msg >> failM
failWithTcM :: (TidyEnv, MsgDoc) -> TcM a -- Add an error message and fail
failWithTcM local_and_msg
= addErrTcM local_and_msg >> failM
checkTc :: Bool -> MsgDoc -> TcM () -- Check that the boolean is true
checkTc True _ = return ()
checkTc False err = failWithTc err
failIfTc :: Bool -> MsgDoc -> TcM () -- Check that the boolean is false
failIfTc False _ = return ()
failIfTc True err = failWithTc err
-- Warnings have no 'M' variant, nor failure
warnTc :: Bool -> MsgDoc -> TcM ()
warnTc warn_if_true warn_msg
| warn_if_true = addWarnTc warn_msg
| otherwise = return ()
addWarnTc :: MsgDoc -> TcM ()
addWarnTc msg = do { env0 <- tcInitTidyEnv
; addWarnTcM (env0, msg) }
addWarnTcM :: (TidyEnv, MsgDoc) -> TcM ()
addWarnTcM (env0, msg)
= do { ctxt <- getErrCtxt ;
err_info <- mkErrInfo env0 ctxt ;
add_warn msg err_info }
addWarn :: MsgDoc -> TcRn ()
addWarn msg = add_warn msg Outputable.empty
addWarnAt :: SrcSpan -> MsgDoc -> TcRn ()
addWarnAt loc msg = add_warn_at loc msg Outputable.empty
add_warn :: MsgDoc -> MsgDoc -> TcRn ()
add_warn msg extra_info
= do { loc <- getSrcSpanM
; add_warn_at loc msg extra_info }
add_warn_at :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ()
add_warn_at loc msg extra_info
= do { dflags <- getDynFlags ;
printer <- getPrintUnqualified dflags ;
let { warn = mkLongWarnMsg dflags loc printer
msg extra_info } ;
reportWarning warn }
tcInitTidyEnv :: TcM TidyEnv
tcInitTidyEnv
= do { lcl_env <- getLclEnv
; return (tcl_tidy lcl_env) }
{-
-----------------------------------
Other helper functions
-}
add_err_tcm :: TidyEnv -> MsgDoc -> SrcSpan
-> [ErrCtxt]
-> TcM ()
add_err_tcm tidy_env err_msg loc ctxt
= do { err_info <- mkErrInfo tidy_env ctxt ;
addLongErrAt loc err_msg err_info }
mkErrInfo :: TidyEnv -> [ErrCtxt] -> TcM SDoc
-- Tidy the error info, trimming excessive contexts
mkErrInfo env ctxts
-- | opt_PprStyle_Debug -- In -dppr-debug style the output
-- = return empty -- just becomes too voluminous
| otherwise
= go 0 env ctxts
where
go :: Int -> TidyEnv -> [ErrCtxt] -> TcM SDoc
go _ _ [] = return Outputable.empty
go n env ((is_landmark, ctxt) : ctxts)
| is_landmark || n < mAX_CONTEXTS -- Too verbose || opt_PprStyle_Debug
= do { (env', msg) <- ctxt env
; let n' = if is_landmark then n else n+1
; rest <- go n' env' ctxts
; return (msg $$ rest) }
| otherwise
= go n env ctxts
mAX_CONTEXTS :: Int -- No more than this number of non-landmark contexts
mAX_CONTEXTS = 3
-- debugTc is useful for monadic debugging code
debugTc :: TcM () -> TcM ()
debugTc thing
| debugIsOn = thing
| otherwise = return ()
{-
************************************************************************
* *
Type constraints
* *
************************************************************************
-}
newTcEvBinds :: TcM EvBindsVar
newTcEvBinds = do { ref <- newTcRef emptyEvBindMap
; uniq <- newUnique
; return (EvBindsVar ref uniq) }
addTcEvBind :: EvBindsVar -> EvBind -> TcM ()
-- Add a binding to the TcEvBinds by side effect
addTcEvBind (EvBindsVar ev_ref _) ev_bind
= do { traceTc "addTcEvBind" $ ppr ev_bind
; bnds <- readTcRef ev_ref
; writeTcRef ev_ref (extendEvBinds bnds ev_bind) }
getTcEvBinds :: EvBindsVar -> TcM (Bag EvBind)
getTcEvBinds (EvBindsVar ev_ref _)
= do { bnds <- readTcRef ev_ref
; return (evBindMapBinds bnds) }
chooseUniqueOccTc :: (OccSet -> OccName) -> TcM OccName
chooseUniqueOccTc fn =
do { env <- getGblEnv
; let dfun_n_var = tcg_dfun_n env
; set <- readTcRef dfun_n_var
; let occ = fn set
; writeTcRef dfun_n_var (extendOccSet set occ)
; return occ }
getConstraintVar :: TcM (TcRef WantedConstraints)
getConstraintVar = do { env <- getLclEnv; return (tcl_lie env) }
setConstraintVar :: TcRef WantedConstraints -> TcM a -> TcM a
setConstraintVar lie_var = updLclEnv (\ env -> env { tcl_lie = lie_var })
emitConstraints :: WantedConstraints -> TcM ()
emitConstraints ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`andWC` ct) }
emitSimple :: Ct -> TcM ()
emitSimple ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addSimples` unitBag ct) }
emitSimples :: Cts -> TcM ()
emitSimples cts
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addSimples` cts) }
emitImplication :: Implication -> TcM ()
emitImplication ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addImplics` unitBag ct) }
emitImplications :: Bag Implication -> TcM ()
emitImplications ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addImplics` ct) }
emitInsoluble :: Ct -> TcM ()
emitInsoluble ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addInsols` unitBag ct) ;
v <- readTcRef lie_var ;
traceTc "emitInsoluble" (ppr v) }
captureConstraints :: TcM a -> TcM (a, WantedConstraints)
-- (captureConstraints m) runs m, and returns the type constraints it generates
captureConstraints thing_inside
= do { lie_var <- newTcRef emptyWC ;
res <- updLclEnv (\ env -> env { tcl_lie = lie_var })
thing_inside ;
lie <- readTcRef lie_var ;
return (res, lie) }
pushLevelAndCaptureConstraints :: TcM a -> TcM (a, TcLevel, WantedConstraints)
pushLevelAndCaptureConstraints thing_inside
= do { env <- getLclEnv
; lie_var <- newTcRef emptyWC ;
; let tclvl' = pushTcLevel (tcl_tclvl env)
; res <- setLclEnv (env { tcl_tclvl = tclvl'
, tcl_lie = lie_var })
thing_inside
; lie <- readTcRef lie_var
; return (res, tclvl', lie) }
pushTcLevelM_ :: TcM a -> TcM a
pushTcLevelM_ = updLclEnv (\ env -> env { tcl_tclvl = pushTcLevel (tcl_tclvl env) })
pushTcLevelM :: TcM a -> TcM (a, TcLevel)
pushTcLevelM thing_inside
= do { env <- getLclEnv
; let tclvl' = pushTcLevel (tcl_tclvl env)
; res <- setLclEnv (env { tcl_tclvl = tclvl' })
thing_inside
; return (res, tclvl') }
getTcLevel :: TcM TcLevel
getTcLevel = do { env <- getLclEnv
; return (tcl_tclvl env) }
setTcLevel :: TcLevel -> TcM a -> TcM a
setTcLevel tclvl thing_inside
= updLclEnv (\env -> env { tcl_tclvl = tclvl }) thing_inside
isTouchableTcM :: TcTyVar -> TcM Bool
isTouchableTcM tv
= do { env <- getLclEnv
; return (isTouchableMetaTyVar (tcl_tclvl env) tv) }
getLclTypeEnv :: TcM TcTypeEnv
getLclTypeEnv = do { env <- getLclEnv; return (tcl_env env) }
setLclTypeEnv :: TcLclEnv -> TcM a -> TcM a
-- Set the local type envt, but do *not* disturb other fields,
-- notably the lie_var
setLclTypeEnv lcl_env thing_inside
= updLclEnv upd thing_inside
where
upd env = env { tcl_env = tcl_env lcl_env,
tcl_tyvars = tcl_tyvars lcl_env }
traceTcConstraints :: String -> TcM ()
traceTcConstraints msg
= do { lie_var <- getConstraintVar
; lie <- readTcRef lie_var
; traceTc (msg ++ ": LIE:") (ppr lie)
}
emitWildcardHoleConstraints :: [(Name, TcTyVar)] -> TcM ()
emitWildcardHoleConstraints wcs
= do { ctLoc <- getCtLoc HoleOrigin
; forM_ wcs $ \(name, tv) -> do {
; let real_span = case nameSrcSpan name of
RealSrcSpan span -> span
UnhelpfulSpan str -> pprPanic "emitWildcardHoleConstraints"
(ppr name <+> quotes (ftext str))
-- Wildcards are defined locally, and so have RealSrcSpans
ctLoc' = setCtLocSpan ctLoc real_span
ty = mkTyVarTy tv
ev = mkLocalId name ty
can = CHoleCan { cc_ev = CtWanted ty ev ctLoc'
, cc_occ = occName name
, cc_hole = TypeHole }
; emitInsoluble can } }
{-
************************************************************************
* *
Template Haskell context
* *
************************************************************************
-}
recordThUse :: TcM ()
recordThUse = do { env <- getGblEnv; writeTcRef (tcg_th_used env) True }
recordThSpliceUse :: TcM ()
recordThSpliceUse = do { env <- getGblEnv; writeTcRef (tcg_th_splice_used env) True }
keepAlive :: Name -> TcRn () -- Record the name in the keep-alive set
keepAlive name
= do { env <- getGblEnv
; traceRn (ptext (sLit "keep alive") <+> ppr name)
; updTcRef (tcg_keep env) (`extendNameSet` name) }
getStage :: TcM ThStage
getStage = do { env <- getLclEnv; return (tcl_th_ctxt env) }
getStageAndBindLevel :: Name -> TcRn (Maybe (TopLevelFlag, ThLevel, ThStage))
getStageAndBindLevel name
= do { env <- getLclEnv;
; case lookupNameEnv (tcl_th_bndrs env) name of
Nothing -> return Nothing
Just (top_lvl, bind_lvl) -> return (Just (top_lvl, bind_lvl, tcl_th_ctxt env)) }
setStage :: ThStage -> TcM a -> TcRn a
setStage s = updLclEnv (\ env -> env { tcl_th_ctxt = s })
{-
************************************************************************
* *
Safe Haskell context
* *
************************************************************************
-}
-- | Mark that safe inference has failed
-- See Note [Safe Haskell Overlapping Instances Implementation]
-- although this is used for more than just that failure case.
recordUnsafeInfer :: WarningMessages -> TcM ()
recordUnsafeInfer warns =
getGblEnv >>= \env -> writeTcRef (tcg_safeInfer env) (False, warns)
-- | Figure out the final correct safe haskell mode
finalSafeMode :: DynFlags -> TcGblEnv -> IO SafeHaskellMode
finalSafeMode dflags tcg_env = do
safeInf <- fst <$> readIORef (tcg_safeInfer tcg_env)
return $ case safeHaskell dflags of
Sf_None | safeInferOn dflags && safeInf -> Sf_Safe
| otherwise -> Sf_None
s -> s
-- | Switch instances to safe instances if we're in Safe mode.
fixSafeInstances :: SafeHaskellMode -> [ClsInst] -> [ClsInst]
fixSafeInstances sfMode | sfMode /= Sf_Safe = id
fixSafeInstances _ = map fixSafe
where fixSafe inst = let new_flag = (is_flag inst) { isSafeOverlap = True }
in inst { is_flag = new_flag }
{-
************************************************************************
* *
Stuff for the renamer's local env
* *
************************************************************************
-}
getLocalRdrEnv :: RnM LocalRdrEnv
getLocalRdrEnv = do { env <- getLclEnv; return (tcl_rdr env) }
setLocalRdrEnv :: LocalRdrEnv -> RnM a -> RnM a
setLocalRdrEnv rdr_env thing_inside
= updLclEnv (\env -> env {tcl_rdr = rdr_env}) thing_inside
{-
************************************************************************
* *
Stuff for interface decls
* *
************************************************************************
-}
mkIfLclEnv :: Module -> SDoc -> IfLclEnv
mkIfLclEnv mod loc = IfLclEnv { if_mod = mod,
if_loc = loc,
if_tv_env = emptyUFM,
if_id_env = emptyUFM }
-- | Run an 'IfG' (top-level interface monad) computation inside an existing
-- 'TcRn' (typecheck-renaming monad) computation by initializing an 'IfGblEnv'
-- based on 'TcGblEnv'.
initIfaceTcRn :: IfG a -> TcRn a
initIfaceTcRn thing_inside
= do { tcg_env <- getGblEnv
; let { if_env = IfGblEnv {
if_rec_types = Just (tcg_mod tcg_env, get_type_env)
}
; get_type_env = readTcRef (tcg_type_env_var tcg_env) }
; setEnvs (if_env, ()) thing_inside }
initIfaceCheck :: HscEnv -> IfG a -> IO a
-- Used when checking the up-to-date-ness of the old Iface
-- Initialise the environment with no useful info at all
initIfaceCheck hsc_env do_this
= do let rec_types = case hsc_type_env_var hsc_env of
Just (mod,var) -> Just (mod, readTcRef var)
Nothing -> Nothing
gbl_env = IfGblEnv { if_rec_types = rec_types }
initTcRnIf 'i' hsc_env gbl_env () do_this
initIfaceTc :: ModIface
-> (TcRef TypeEnv -> IfL a) -> TcRnIf gbl lcl a
-- Used when type-checking checking an up-to-date interface file
-- No type envt from the current module, but we do know the module dependencies
initIfaceTc iface do_this
= do { tc_env_var <- newTcRef emptyTypeEnv
; let { gbl_env = IfGblEnv {
if_rec_types = Just (mod, readTcRef tc_env_var)
} ;
; if_lenv = mkIfLclEnv mod doc
}
; setEnvs (gbl_env, if_lenv) (do_this tc_env_var)
}
where
mod = mi_module iface
doc = ptext (sLit "The interface for") <+> quotes (ppr mod)
initIfaceLcl :: Module -> SDoc -> IfL a -> IfM lcl a
initIfaceLcl mod loc_doc thing_inside
= setLclEnv (mkIfLclEnv mod loc_doc) thing_inside
getIfModule :: IfL Module
getIfModule = do { env <- getLclEnv; return (if_mod env) }
--------------------
failIfM :: MsgDoc -> IfL a
-- The Iface monad doesn't have a place to accumulate errors, so we
-- just fall over fast if one happens; it "shouldnt happen".
-- We use IfL here so that we can get context info out of the local env
failIfM msg
= do { env <- getLclEnv
; let full_msg = (if_loc env <> colon) $$ nest 2 msg
; dflags <- getDynFlags
; liftIO (log_action dflags dflags SevFatal noSrcSpan (defaultErrStyle dflags) full_msg)
; failM }
--------------------
forkM_maybe :: SDoc -> IfL a -> IfL (Maybe a)
-- Run thing_inside in an interleaved thread.
-- It shares everything with the parent thread, so this is DANGEROUS.
--
-- It returns Nothing if the computation fails
--
-- It's used for lazily type-checking interface
-- signatures, which is pretty benign
forkM_maybe doc thing_inside
-- NB: Don't share the mutable env_us with the interleaved thread since env_us
-- does not get updated atomically (e.g. in newUnique and newUniqueSupply).
= do { child_us <- newUniqueSupply
; child_env_us <- newMutVar child_us
-- see Note [Masking exceptions in forkM_maybe]
; unsafeInterleaveM $ uninterruptibleMaskM_ $ updEnv (\env -> env { env_us = child_env_us }) $
do { traceIf (text "Starting fork {" <+> doc)
; mb_res <- tryM $
updLclEnv (\env -> env { if_loc = if_loc env $$ doc }) $
thing_inside
; case mb_res of
Right r -> do { traceIf (text "} ending fork" <+> doc)
; return (Just r) }
Left exn -> do {
-- Bleat about errors in the forked thread, if -ddump-if-trace is on
-- Otherwise we silently discard errors. Errors can legitimately
-- happen when compiling interface signatures (see tcInterfaceSigs)
whenDOptM Opt_D_dump_if_trace $ do
dflags <- getDynFlags
let msg = hang (text "forkM failed:" <+> doc)
2 (text (show exn))
liftIO $ log_action dflags dflags SevFatal noSrcSpan (defaultErrStyle dflags) msg
; traceIf (text "} ending fork (badly)" <+> doc)
; return Nothing }
}}
forkM :: SDoc -> IfL a -> IfL a
forkM doc thing_inside
= do { mb_res <- forkM_maybe doc thing_inside
; return (case mb_res of
Nothing -> pgmError "Cannot continue after interface file error"
-- pprPanic "forkM" doc
Just r -> r) }
{-
Note [Masking exceptions in forkM_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When using GHC-as-API it must be possible to interrupt snippets of code
executed using runStmt (#1381). Since commit 02c4ab04 this is almost possible
by throwing an asynchronous interrupt to the GHC thread. However, there is a
subtle problem: runStmt first typechecks the code before running it, and the
exception might interrupt the type checker rather than the code. Moreover, the
typechecker might be inside an unsafeInterleaveIO (through forkM_maybe), and
more importantly might be inside an exception handler inside that
unsafeInterleaveIO. If that is the case, the exception handler will rethrow the
asynchronous exception as a synchronous exception, and the exception will end
up as the value of the unsafeInterleaveIO thunk (see #8006 for a detailed
discussion). We don't currently know a general solution to this problem, but
we can use uninterruptibleMask_ to avoid the situation.
-}
| fmthoma/ghc | compiler/typecheck/TcRnMonad.hs | bsd-3-clause | 54,147 | 63 | 25 | 17,111 | 12,574 | 6,548 | 6,026 | 879 | 5 |
module Hadrian.BuildPath where
import Base
import Data.Functor
import qualified Text.Parsec as Parsec
-- | A path of the form
--
-- > <build root>/stage<N>/<path/to/pkg/from/ghc/root>/build/<something>
--
-- where @something@ describes a library or object file or ... to be built
-- for the given package.
--
-- @a@, which represents that @something@, is instantiated with library-related
-- data types in @Rules.Library@ and with object/interface files related types
-- in @Rules.Compile@.
data BuildPath a = BuildPath
{ _buildPathRoot :: FilePath -- ^ @<build root>/@
, _buildPathStage :: Stage -- ^ @stage<N>/@
, _buildPathPkgPath :: FilePath -- ^ @<path/to/pkg/from/ghc/root>/build/@
, _buildPathTarget :: a -- ^ whatever comes after @build/@
} deriving (Eq, Show)
-- | Parse a build path under the given build root.
parseBuildPath
:: FilePath -- ^ build root
-> Parsec.Parsec String () a -- ^ what to parse after @build/@
-> Parsec.Parsec String () (BuildPath a)
parseBuildPath root afterBuild = do
_ <- Parsec.string root *> Parsec.optional (Parsec.char '/')
stage <- parseStage
_ <- Parsec.char '/'
pkgpath <- Parsec.manyTill Parsec.anyChar
(Parsec.try $ Parsec.string "/build/")
a <- afterBuild
return (BuildPath root stage pkgpath a)
-- | A path of the form
--
-- > <build root>/stage<N>/lib/<arch>-<os>-ghc-<ghc version>/<something>
--
-- where @something@ describes a library or object file or ... to be registered
-- for the given package. These are files registered into a ghc-pkg database.
--
-- @a@, which represents that @something@, is instantiated with library-related
-- data types in @Rules.Library@ and with object/interface files related types
-- in @Rules.Compile@.
data GhcPkgPath a = GhcPkgPath
{ _ghcpkgPathRoot :: FilePath -- ^ @<build root>/@
, _ghcpkgPathStage :: Stage -- ^ @stage<N>/@
, _ghcpkgRegPath :: FilePath -- ^ @lib/<arch>-<os>-ghc-<ghc version>/@
, _ghcPkgObject :: a -- ^ whatever comes after
} deriving (Eq, Show)
-- | Parse a registered ghc-pkg path under the given build root.
parseGhcPkgPath
:: FilePath -- ^ build root
-> Parsec.Parsec String () a -- ^ what to parse after @build/@
-> Parsec.Parsec String () (GhcPkgPath a)
parseGhcPkgPath root after = do
_ <- Parsec.string root *> Parsec.optional (Parsec.char '/')
stage <- parseStage
_ <- Parsec.char '/'
regPath <- Parsec.string "lib/"
<> Parsec.manyTill Parsec.anyChar (Parsec.try $ Parsec.string "/")
a <- after
return (GhcPkgPath root stage regPath a)
-- To be kept in sync with Stage.hs's stageString function
-- | Parse @"stageX"@ into a 'Stage'.
parseStage :: Parsec.Parsec String () Stage
parseStage = (Parsec.string "stage" *> Parsec.choice
[ Parsec.string (show n) $> toEnum n
| n <- map fromEnum [minBound .. maxBound :: Stage]
]) Parsec.<?> "stage string"
-- To be kept in sync with the show instances in 'Way.Type', until we perhaps
-- use some bidirectional parsing/pretty printing approach or library.
-- | Parse a way suffix, returning the argument when no suffix is found (the
-- argument will be vanilla in most cases, but dynamic when we parse the way
-- suffix out of a shared library file name).
parseWaySuffix :: Way -> Parsec.Parsec String () Way
parseWaySuffix w = Parsec.choice
[ Parsec.char '_' *>
(wayFromUnits <$> Parsec.sepBy1 parseWayUnit (Parsec.char '_'))
, pure w
] Parsec.<?> "way suffix (e.g _thr_p, or none for vanilla)"
-- | Same as 'parseWaySuffix', but for parsing e.g @thr_p_@
-- instead of @_thr_p@, like 'parseWaySuffix' does.
--
-- This is used to parse paths to object files,
-- in Rules.Compile.
parseWayPrefix :: Way -> Parsec.Parsec String () Way
parseWayPrefix w = Parsec.choice
[ wayFromUnits <$> Parsec.endBy1 parseWayUnit (Parsec.char '_')
, pure w
] Parsec.<?> "way prefix (e.g thr_p_, or none for vanilla)"
parseWayUnit :: Parsec.Parsec String () WayUnit
parseWayUnit = Parsec.choice
[ Parsec.string "thr" *> pure Threaded
, Parsec.char 'd' *>
(Parsec.choice [ Parsec.string "ebug" *> pure Debug
, Parsec.string "yn" *> pure Dynamic ])
, Parsec.char 'p' *> pure Profiling
, Parsec.char 'l' *> pure Logging
] Parsec.<?> "way unit (thr, debug, dyn, p, l)"
-- | Parse a @"pkgname-pkgversion"@ string into the package name and the
-- integers that make up the package version.
parsePkgId :: Parsec.Parsec String () (String, [Integer])
parsePkgId = parsePkgId' "" Parsec.<?> "package identifier (<name>-<version>)"
where
parsePkgId' currName = do
s <- Parsec.many1 Parsec.alphaNum
_ <- Parsec.char '-'
let newName = if null currName then s else currName ++ "-" ++ s
Parsec.choice [ (newName,) <$> parsePkgVersion
, parsePkgId' newName ]
-- | Parse "."-separated integers that describe a package's version.
parsePkgVersion :: Parsec.Parsec String () [Integer]
parsePkgVersion = fmap reverse (parsePkgVersion' [])
Parsec.<?> "package version"
where
parsePkgVersion' xs = do
n <- parseNatural
Parsec.choice
[ Parsec.try
(Parsec.lookAhead (Parsec.char '.' *>
(Parsec.letter <|> Parsec.char '_')
)
)
$> (n:xs)
, Parsec.char '.' *> parsePkgVersion' (n:xs)
, pure $ (n:xs) ]
-- | Parse a natural number.
parseNatural :: Parsec.Parsec String () Integer
parseNatural = (read <$> Parsec.many1 Parsec.digit) Parsec.<?> "natural number"
-- | Runs the given parser against the given path, erroring out when the parser
-- fails (because it shouldn't if the code from this module is correct).
parsePath
:: Parsec.Parsec String () a -- ^ parser to run
-> String -- ^ string describing the input source
-> FilePath -- ^ path to parse
-> Action a
parsePath p inp path = case Parsec.parse p inp path of
Left err -> fail $ "Hadrian.BuildPath.parsePath: path="
++ path ++ ", error:\n" ++ show err
Right a -> pure a
| sdiehl/ghc | hadrian/src/Hadrian/BuildPath.hs | bsd-3-clause | 6,249 | 0 | 20 | 1,480 | 1,266 | 658 | 608 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Construct a @Plan@ for how to build
module Stack.Build.ConstructPlan
( constructPlan
) where
import Control.Arrow ((&&&), second)
import Control.Exception.Lifted
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger, logWarn)
import Control.Monad.RWS.Strict
import Control.Monad.Trans.Resource
import Data.Either
import Data.Function
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Package (Dependency (..))
import Distribution.Version (anyVersion)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Prelude hiding (pi, writeFile)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Types.Build
import Stack.BuildPlan
import Stack.Package
import Stack.PackageDump
import Stack.Types
data PackageInfo
= PIOnlyInstalled InstallLocation Installed
| PIOnlySource PackageSource
| PIBoth PackageSource Installed
combineSourceInstalled :: PackageSource
-> (InstallLocation, Installed)
-> PackageInfo
combineSourceInstalled ps (location, installed) =
assert (piiVersion ps == installedVersion installed) $
assert (piiLocation ps == location) $
case location of
-- Always trust something in the snapshot
Snap -> PIOnlyInstalled location installed
Local -> PIBoth ps installed
type CombinedMap = Map PackageName PackageInfo
combineMap :: SourceMap -> InstalledMap -> CombinedMap
combineMap = Map.mergeWithKey
(\_ s i -> Just $ combineSourceInstalled s i)
(fmap PIOnlySource)
(fmap (uncurry PIOnlyInstalled))
data AddDepRes
= ADRToInstall Task
| ADRFound InstallLocation Installed
deriving Show
data W = W
{ wFinals :: !(Map PackageName (Either ConstructPlanException Task))
, wInstall :: !(Map Text InstallLocation)
-- ^ executable to be installed, and location where the binary is placed
, wDirty :: !(Map PackageName Text)
-- ^ why a local package is considered dirty
, wDeps :: !(Set PackageName)
-- ^ Packages which count as dependencies
, wWarnings :: !([Text] -> [Text])
-- ^ Warnings
}
instance Monoid W where
mempty = W mempty mempty mempty mempty mempty
mappend (W a b c d e) (W w x y z z') = W (mappend a w) (mappend b x) (mappend c y) (mappend d z) (mappend e z')
type M = RWST
Ctx
W
(Map PackageName (Either ConstructPlanException AddDepRes))
IO
data Ctx = Ctx
{ mbp :: !MiniBuildPlan
, baseConfigOpts :: !BaseConfigOpts
, loadPackage :: !(PackageName -> Version -> Map FlagName Bool -> IO Package)
, combinedMap :: !CombinedMap
, toolToPackages :: !(Dependency -> Map PackageName VersionRange)
, ctxEnvConfig :: !EnvConfig
, callStack :: ![PackageName]
, extraToBuild :: !(Set PackageName)
, ctxVersions :: !(Map PackageName (Set Version))
, wanted :: !(Set PackageName)
, localNames :: !(Set PackageName)
}
instance HasStackRoot Ctx
instance HasPlatform Ctx
instance HasGHCVariant Ctx
instance HasConfig Ctx
instance HasBuildConfig Ctx where
getBuildConfig = getBuildConfig . getEnvConfig
instance HasEnvConfig Ctx where
getEnvConfig = ctxEnvConfig
constructPlan :: forall env m.
(MonadCatch m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m, MonadBaseControl IO m, HasHttpManager env)
=> MiniBuildPlan
-> BaseConfigOpts
-> [LocalPackage]
-> Set PackageName -- ^ additional packages that must be built
-> [DumpPackage () ()] -- ^ locally registered
-> (PackageName -> Version -> Map FlagName Bool -> IO Package) -- ^ load upstream package
-> SourceMap
-> InstalledMap
-> m Plan
constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage0 sourceMap installedMap = do
let locallyRegistered = Map.fromList $ map (dpGhcPkgId &&& dpPackageIdent) localDumpPkgs
bconfig <- asks getBuildConfig
let versions =
Map.fromListWith Set.union $
map (second Set.singleton . toTuple) $
Map.keys (bcPackageCaches bconfig)
econfig <- asks getEnvConfig
let onWanted = void . addDep False . packageName . lpPackage
let inner = do
mapM_ onWanted $ filter lpWanted locals
mapM_ (addDep False) $ Set.toList extraToBuild0
((), m, W efinals installExes dirtyReason deps warnings) <-
liftIO $ runRWST inner (ctx econfig versions) M.empty
mapM_ $logWarn (warnings [])
let toEither (_, Left e) = Left e
toEither (k, Right v) = Right (k, v)
(errlibs, adrs) = partitionEithers $ map toEither $ M.toList m
(errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals
errs = errlibs ++ errfinals
if null errs
then do
let toTask (_, ADRFound _ _) = Nothing
toTask (name, ADRToInstall task) = Just (name, task)
tasks = M.fromList $ mapMaybe toTask adrs
takeSubset =
case boptsBuildSubset $ bcoBuildOpts baseConfigOpts0 of
BSAll -> id
BSOnlySnapshot -> stripLocals
BSOnlyDependencies -> stripNonDeps deps
return $ takeSubset Plan
{ planTasks = tasks
, planFinals = M.fromList finals
, planUnregisterLocal = mkUnregisterLocal tasks dirtyReason locallyRegistered sourceMap
, planInstallExes =
if boptsInstallExes $ bcoBuildOpts baseConfigOpts0
then installExes
else Map.empty
}
else throwM $ ConstructPlanExceptions errs (bcStackYaml $ getBuildConfig econfig)
where
ctx econfig versions = Ctx
{ mbp = mbp0
, baseConfigOpts = baseConfigOpts0
, loadPackage = loadPackage0
, combinedMap = combineMap sourceMap installedMap
, toolToPackages = \ (Dependency name _) ->
maybe Map.empty (Map.fromSet (const anyVersion)) $
Map.lookup (T.pack . packageNameString . fromCabalPackageName $ name) toolMap
, ctxEnvConfig = econfig
, callStack = []
, extraToBuild = extraToBuild0
, ctxVersions = versions
, wanted = wantedLocalPackages locals
, localNames = Set.fromList $ map (packageName . lpPackage) locals
}
-- TODO Currently, this will only consider and install tools from the
-- snapshot. It will not automatically install build tools from extra-deps
-- or local packages.
toolMap = getToolMap mbp0
-- | Determine which packages to unregister based on the given tasks and
-- already registered local packages
mkUnregisterLocal :: Map PackageName Task
-> Map PackageName Text
-> Map GhcPkgId PackageIdentifier
-> SourceMap
-> Map GhcPkgId (PackageIdentifier, Maybe Text)
mkUnregisterLocal tasks dirtyReason locallyRegistered sourceMap =
Map.unions $ map toUnregisterMap $ Map.toList locallyRegistered
where
toUnregisterMap (gid, ident) =
case M.lookup name tasks of
Nothing ->
case M.lookup name sourceMap of
Just (PSUpstream _ Snap _) -> Map.singleton gid
( ident
, Just "Switching to snapshot installed package"
)
_ -> Map.empty
Just _ -> Map.singleton gid
( ident
, Map.lookup name dirtyReason
)
where
name = packageIdentifierName ident
addFinal :: LocalPackage -> Package -> Bool -> M ()
addFinal lp package isAllInOne = do
depsRes <- addPackageDeps False package
res <- case depsRes of
Left e -> return $ Left e
Right (missing, present, _minLoc) -> do
ctx <- ask
return $ Right Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
True -- wanted
True -- local
Local
package
, taskPresent = present
, taskType = TTLocal lp
, taskAllInOne = isAllInOne
}
tell mempty { wFinals = Map.singleton (packageName package) res }
addDep :: Bool -- ^ is this being used by a dependency?
-> PackageName
-> M (Either ConstructPlanException AddDepRes)
addDep treatAsDep' name = do
ctx <- ask
let treatAsDep = treatAsDep' || name `Set.notMember` wanted ctx
when treatAsDep $ markAsDep name
m <- get
case Map.lookup name m of
Just res -> return res
Nothing -> do
res <- if name `elem` callStack ctx
then return $ Left $ DependencyCycleDetected $ name : callStack ctx
else local (\ctx' -> ctx' { callStack = name : callStack ctx' }) $
case Map.lookup name $ combinedMap ctx of
-- TODO look up in the package index and see if there's a
-- recommendation available
Nothing -> return $ Left $ UnknownPackage name
Just (PIOnlyInstalled loc installed) -> do
-- slightly hacky, no flags since they likely won't affect executable names
tellExecutablesUpstream name (installedVersion installed) loc Map.empty
return $ Right $ ADRFound loc installed
Just (PIOnlySource ps) -> do
tellExecutables name ps
installPackage treatAsDep name ps Nothing
Just (PIBoth ps installed) -> do
tellExecutables name ps
installPackage treatAsDep name ps (Just installed)
modify $ Map.insert name res
return res
tellExecutables :: PackageName -> PackageSource -> M ()
tellExecutables _ (PSLocal lp)
| lpWanted lp = tellExecutablesPackage Local $ lpPackage lp
| otherwise = return ()
tellExecutables name (PSUpstream version loc flags) =
tellExecutablesUpstream name version loc flags
tellExecutablesUpstream :: PackageName -> Version -> InstallLocation -> Map FlagName Bool -> M ()
tellExecutablesUpstream name version loc flags = do
ctx <- ask
when (name `Set.member` extraToBuild ctx) $ do
p <- liftIO $ loadPackage ctx name version flags
tellExecutablesPackage loc p
tellExecutablesPackage :: InstallLocation -> Package -> M ()
tellExecutablesPackage loc p = do
cm <- asks combinedMap
-- Determine which components are enabled so we know which ones to copy
let myComps =
case Map.lookup (packageName p) cm of
Nothing -> assert False Set.empty
Just (PIOnlyInstalled _ _) -> Set.empty
Just (PIOnlySource ps) -> goSource ps
Just (PIBoth ps _) -> goSource ps
goSource (PSLocal lp)
| lpWanted lp = exeComponents (lpComponents lp)
| otherwise = Set.empty
goSource (PSUpstream{}) = Set.empty
tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p }
where
filterComps myComps x
| Set.null myComps = x
| otherwise = Set.intersection x myComps
installPackage :: Bool -- ^ is this being used by a dependency?
-> PackageName
-> PackageSource
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
installPackage treatAsDep name ps minstalled = do
ctx <- ask
case ps of
PSUpstream version _ flags -> do
package <- liftIO $ loadPackage ctx name version flags
resolveDepsAndInstall False treatAsDep ps package minstalled
PSLocal lp ->
case lpTestBench lp of
Nothing -> resolveDepsAndInstall False treatAsDep ps (lpPackage lp) minstalled
Just tb -> do
-- Attempt to find a plan which performs an all-in-one
-- build. Ignore the writer action + reset the state if
-- it fails.
s <- get
res <- pass $ do
res <- addPackageDeps treatAsDep tb
let writerFunc w = case res of
Left _ -> mempty
_ -> w
return (res, writerFunc)
case res of
Right deps -> do
adr <- installPackageGivenDeps True ps tb minstalled deps
-- FIXME: this redundantly adds the deps (but
-- they'll all just get looked up in the map)
addFinal lp tb True
return $ Right adr
Left _ -> do
-- Reset the state to how it was before
-- attempting to find an all-in-one build
-- plan.
put s
-- Otherwise, fall back on building the
-- tests / benchmarks in a separate step.
res' <- resolveDepsAndInstall False treatAsDep ps (lpPackage lp) minstalled
when (isRight res') $ do
-- Insert it into the map so that it's
-- available for addFinal.
modify $ Map.insert name res'
addFinal lp tb False
return res'
resolveDepsAndInstall :: Bool
-> Bool
-> PackageSource
-> Package
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
resolveDepsAndInstall isAllInOne treatAsDep ps package minstalled = do
res <- addPackageDeps treatAsDep package
case res of
Left err -> return $ Left err
Right deps -> liftM Right $ installPackageGivenDeps isAllInOne ps package minstalled deps
installPackageGivenDeps :: Bool
-> PackageSource
-> Package
-> Maybe Installed
-> ( Set PackageIdentifier
, Map PackageIdentifier GhcPkgId
, InstallLocation )
-> M AddDepRes
installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minLoc) = do
let name = packageName package
ctx <- ask
mRightVersionInstalled <- case (minstalled, Set.null missing) of
(Just installed, True) -> do
shouldInstall <- checkDirtiness ps installed package present (wanted ctx)
return $ if shouldInstall then Nothing else Just installed
(Just _, False) -> do
let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing)
tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t }
return Nothing
(Nothing, _) -> return Nothing
return $ case mRightVersionInstalled of
Just installed -> ADRFound (piiLocation ps) installed
Nothing -> ADRToInstall Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
destLoc = piiLocation ps <> minLoc
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
(psWanted ps)
(psLocal ps)
-- An assertion to check for a recurrence of
-- https://github.com/commercialhaskell/stack/issues/345
(assert (destLoc == piiLocation ps) destLoc)
package
, taskPresent = present
, taskType =
case ps of
PSLocal lp -> TTLocal lp
PSUpstream _ loc _ -> TTUpstream package $ loc <> minLoc
, taskAllInOne = isAllInOne
}
addEllipsis :: Text -> Text
addEllipsis t
| T.length t < 100 = t
| otherwise = T.take 97 t <> "..."
addPackageDeps :: Bool -- ^ is this being used by a dependency?
-> Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
addPackageDeps treatAsDep package = do
ctx <- ask
deps' <- packageDepsWithTools package
deps <- forM (Map.toList deps') $ \(depname, range) -> do
eres <- addDep treatAsDep depname
let mlatestApplicable =
(latestApplicableVersion range <=< Map.lookup depname) (ctxVersions ctx)
case eres of
Left e ->
let bd =
case e of
UnknownPackage name -> assert (name == depname) NotInBuildPlan
_ -> Couldn'tResolveItsDependencies
in return $ Left (depname, (range, mlatestApplicable, bd))
Right adr -> do
inRange <- if adrVersion adr `withinRange` range
then return True
else do
let warn reason =
tell mempty { wWarnings = (msg:) }
where
msg = T.concat
[ "WARNING: Ignoring out of range dependency"
, reason
, ": "
, T.pack $ packageIdentifierString $ PackageIdentifier depname (adrVersion adr)
, ". "
, T.pack $ packageNameString $ packageName package
, " requires: "
, versionRangeText range
]
allowNewer <- asks $ configAllowNewer . getConfig
if allowNewer
then do
warn " (allow-newer enabled)"
return True
else do
x <- inSnapshot (packageName package) (packageVersion package)
y <- inSnapshot depname (adrVersion adr)
if x && y
then do
warn " (trusting snapshot over Hackage revisions)"
return True
else return False
if inRange
then case adr of
ADRToInstall task -> return $ Right
(Set.singleton $ taskProvides task, Map.empty, taskLocation task)
ADRFound loc (Executable _) -> return $ Right
(Set.empty, Map.empty, loc)
ADRFound loc (Library ident gid) -> return $ Right
(Set.empty, Map.singleton ident gid, loc)
else return $ Left (depname, (range, mlatestApplicable, DependencyMismatch $ adrVersion adr))
case partitionEithers deps of
([], pairs) -> return $ Right $ mconcat pairs
(errs, _) -> return $ Left $ DependencyPlanFailures
(PackageIdentifier
(packageName package)
(packageVersion package))
(Map.fromList errs)
where
adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task
adrVersion (ADRFound _ installed) = installedVersion installed
checkDirtiness :: PackageSource
-> Installed
-> Package
-> Map PackageIdentifier GhcPkgId
-> Set PackageName
-> M Bool
checkDirtiness ps installed package present wanted = do
ctx <- ask
moldOpts <- tryGetFlagCache installed
let configOpts = configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
present
(psWanted ps)
(psLocal ps)
(piiLocation ps) -- should be Local always
package
buildOpts = bcoBuildOpts (baseConfigOpts ctx)
wantConfigCache = ConfigCache
{ configCacheOpts = configOpts
, configCacheDeps = Set.fromList $ Map.elems present
, configCacheComponents =
case ps of
PSLocal lp -> Set.map renderComponent $ lpComponents lp
PSUpstream{} -> Set.empty
, configCacheHaddock =
shouldHaddockPackage buildOpts wanted (packageName package) ||
-- Disabling haddocks when old config had haddocks doesn't make dirty.
maybe False configCacheHaddock moldOpts
}
let mreason =
case moldOpts of
Nothing -> Just "old configure information not found"
Just oldOpts
| Just reason <- describeConfigDiff config oldOpts wantConfigCache -> Just reason
| Just files <- psDirty ps -> Just $ "local file changes: " <>
addEllipsis (T.pack $ unwords $ Set.toList files)
| otherwise -> Nothing
config = getConfig ctx
case mreason of
Nothing -> return False
Just reason -> do
tell mempty { wDirty = Map.singleton (packageName package) reason }
return True
describeConfigDiff :: Config -> ConfigCache -> ConfigCache -> Maybe Text
describeConfigDiff config old new
| not (configCacheDeps new `Set.isSubsetOf` configCacheDeps old) = Just "dependencies changed"
| not $ Set.null newComponents =
Just $ "components added: " `T.append` T.intercalate ", "
(map (decodeUtf8With lenientDecode) (Set.toList newComponents))
| not (configCacheHaddock old) && configCacheHaddock new = Just "rebuilding with haddocks"
| oldOpts /= newOpts = Just $ T.pack $ concat
[ "flags changed from "
, show oldOpts
, " to "
, show newOpts
]
| otherwise = Nothing
where
-- options set by stack
isStackOpt t = any (`T.isPrefixOf` t)
[ "--dependency="
, "--constraint="
, "--package-db="
, "--libdir="
, "--bindir="
, "--datadir="
, "--libexecdir="
, "--sysconfdir"
, "--docdir="
, "--htmldir="
, "--haddockdir="
, "--enable-tests"
, "--enable-benchmarks"
] || elem t
[ "--user"
]
stripGhcOptions =
go
where
go [] = []
go ("--ghc-option":x:xs) = go' x xs
go ("--ghc-options":x:xs) = go' x xs
go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' x xs
go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' x xs
go (x:xs) = x : go xs
go' x xs = checkKeepers x $ go xs
checkKeepers x xs =
case filter isKeeper $ T.words x of
[] -> xs
keepers -> "--ghc-options" : T.unwords keepers : xs
-- GHC options which affect build results and therefore should always
-- force a rebuild
--
-- For the most part, we only care about options generated by Stack
-- itself
isKeeper = (== "-fhpc") -- more to be added later
userOpts = filter (not . isStackOpt)
. (if configRebuildGhcOptions config
then id
else stripGhcOptions)
. map T.pack
. (\(ConfigureOpts x y) -> x ++ y)
. configCacheOpts
(oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new)
removeMatching (x:xs) (y:ys)
| x == y = removeMatching xs ys
removeMatching xs ys = (xs, ys)
newComponents = configCacheComponents new `Set.difference` configCacheComponents old
psDirty :: PackageSource -> Maybe (Set FilePath)
psDirty (PSLocal lp) = lpDirtyFiles lp
psDirty (PSUpstream {}) = Nothing -- files never change in an upstream package
psWanted :: PackageSource -> Bool
psWanted (PSLocal lp) = lpWanted lp
psWanted (PSUpstream {}) = False
psLocal :: PackageSource -> Bool
psLocal (PSLocal _) = True
psLocal (PSUpstream {}) = False
-- | Get all of the dependencies for a given package, including guessed build
-- tool dependencies.
packageDepsWithTools :: Package -> M (Map PackageName VersionRange)
packageDepsWithTools p = do
ctx <- ask
return $ Map.unionsWith intersectVersionRanges
$ packageDeps p
: map (toolToPackages ctx) (packageTools p)
-- | Strip out anything from the @Plan@ intended for the local database
stripLocals :: Plan -> Plan
stripLocals plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planUnregisterLocal = Map.empty
, planInstallExes = Map.filter (/= Local) $ planInstallExes plan
}
where
checkTask task =
case taskType task of
TTLocal _ -> False
TTUpstream _ Local -> False
TTUpstream _ Snap -> True
stripNonDeps :: Set PackageName -> Plan -> Plan
stripNonDeps deps plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planInstallExes = Map.empty -- TODO maybe don't disable this?
}
where
checkTask task = packageIdentifierName (taskProvides task) `Set.member` deps
markAsDep :: PackageName -> M ()
markAsDep name = tell mempty { wDeps = Set.singleton name }
-- | Is the given package/version combo defined in the snapshot?
inSnapshot :: PackageName -> Version -> M Bool
inSnapshot name version = do
p <- asks mbp
ls <- asks localNames
return $ fromMaybe False $ do
guard $ not $ name `Set.member` ls
mpi <- Map.lookup name (mbpPackages p)
return $ mpiVersion mpi == version
| harendra-kumar/stack | src/Stack/Build/ConstructPlan.hs | bsd-3-clause | 28,155 | 0 | 31 | 10,403 | 6,699 | 3,365 | 3,334 | 591 | 11 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Revisit | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/revisit/src/main/javahelp/org/zaproxy/zap/extension/revisit/resources/help_az_AZ/helpset_az_AZ.hs | apache-2.0 | 968 | 78 | 66 | 158 | 411 | 208 | 203 | -1 | -1 |
import Test.HUnit (Test(TestList), runTestTT, (~=?), Counts(errors, failures))
import Utils (stringsFromStatus, Hash(MkHash))
import Control.Applicative ((<$>))
import System.Exit (exitFailure)
import Control.Monad (when)
type TestData = (String, String, [Int])
tests :: [TestData]
tests = [
("## master...ori/master [ahead 3]\n M", "master", [3,0,0,0,1,0])
,
("## stat\nM ", "stat", [0,0,1,0,0,0])
,
("## exp...o/exp [ahead 3, behind 2]\n", "exp", [3,2,0,0,0,0])
,
("## master\nU \nU \nM \nM \nM ", "master", [0,0,3,2,0,0])
,
("## HEAD (no branch)\n", ":hash", [0,0,0,0,0,0])
,
("## master\n M\n M\n M\n??\n", "master", [0,0,0,0,3,1])
,
("## dev...o/dev [ahead 4, behind 5]\nM \n M\n??\n", "dev", [4,5,1,0,1,1])
]
makeTest :: TestData -> Test
makeTest (input, branch, numbers) = Just (branch : (show <$> numbers)) ~=? stringsFromStatus (Just $ MkHash "hash") input
main :: IO ()
main = do
testResult <- runTestTT $ TestList $ makeTest <$> tests
let some accessor = accessor testResult /= 0
when (some errors || some failures) exitFailure
| TiddoLangerak/zsh-git-prompt | src/TestFunctional.hs | mit | 1,075 | 16 | 11 | 181 | 472 | 287 | 185 | 22 | 1 |
{-# OPTIONS_GHC -fdefer-type-errors #-}
test :: IO Bool
test = return True
| sdiehl/ghc | testsuite/tests/ghci/should_run/T14963c.hs | bsd-3-clause | 76 | 0 | 5 | 13 | 18 | 9 | 9 | 3 | 1 |
import System.IO (hFlush, stdout)
import Control.Monad (mapM)
import Control.Monad.Error (runErrorT)
import qualified Data.Map as Map
import qualified Data.Traversable as DT
import Readline (readline, load_history)
import Types
import Reader (read_str)
import Printer (_pr_str)
-- read
mal_read :: String -> IOThrows MalVal
mal_read str = read_str str
-- eval
eval_ast :: MalVal -> (Map.Map String MalVal) -> IOThrows MalVal
eval_ast (MalSymbol sym) env = do
case Map.lookup sym env of
Nothing -> throwStr $ "'" ++ sym ++ "' not found"
Just v -> return v
eval_ast ast@(MalList lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalList new_lst m
eval_ast ast@(MalVector lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalVector new_lst m
eval_ast ast@(MalHashMap lst m) env = do
new_hm <- DT.mapM (\x -> (eval x env)) lst
return $ MalHashMap new_hm m
eval_ast ast env = return ast
apply_ast :: MalVal -> (Map.Map String MalVal) -> IOThrows MalVal
apply_ast ast@(MalList _ _) env = do
el <- eval_ast ast env
case el of
(MalList ((Func (Fn f) _) : rest) _) ->
f $ rest
el ->
throwStr $ "invalid apply: " ++ (show el)
eval :: MalVal -> (Map.Map String MalVal) -> IOThrows MalVal
eval ast env = do
case ast of
(MalList _ _) -> apply_ast ast env
_ -> eval_ast ast env
-- print
mal_print :: MalVal -> String
mal_print exp = show exp
-- repl
add [MalNumber a, MalNumber b] = return $ MalNumber $ a + b
add _ = throwStr $ "illegal arguments to +"
sub [MalNumber a, MalNumber b] = return $ MalNumber $ a - b
sub _ = throwStr $ "illegal arguments to -"
mult [MalNumber a, MalNumber b] = return $ MalNumber $ a * b
mult _ = throwStr $ "illegal arguments to *"
divd [MalNumber a, MalNumber b] = return $ MalNumber $ a `div` b
divd _ = throwStr $ "illegal arguments to /"
repl_env :: Map.Map String MalVal
repl_env = Map.fromList [("+", _func add),
("-", _func sub),
("*", _func mult),
("/", _func divd)]
rep :: String -> IOThrows String
rep line = do
ast <- mal_read line
exp <- eval ast repl_env
return $ mal_print exp
repl_loop :: IO ()
repl_loop = do
line <- readline "user> "
case line of
Nothing -> return ()
Just "" -> repl_loop
Just str -> do
res <- runErrorT $ rep str
out <- case res of
Left (StringError str) -> return $ "Error: " ++ str
Left (MalValError mv) -> return $ "Error: " ++ (show mv)
Right val -> return val
putStrLn out
hFlush stdout
repl_loop
main = do
load_history
repl_loop
| nlfiedler/mal | haskell/step2_eval.hs | mpl-2.0 | 2,798 | 0 | 18 | 827 | 1,086 | 543 | 543 | 77 | 5 |
{-# LANGUAGE TypeFamilies #-}
module ColInference where
type family Elem c
type instance Elem [e] = e
class Col c where
isEmpty :: c -> Bool
add :: c -> Elem c -> c
headTail :: c -> (Elem c,c)
-- addAll :: (Col c1, Col c2, Elem c1 ~ Elem c2) => c1 -> c2 -> c2
-- addAll c1 c2
-- | isEmpty c1
-- = c2
-- | otherwise
-- = let (x,c1') = headTail c1
-- in addAll c1' (add c2 x)
sumCol :: (Col c, Elem c ~ Int) => c -> Int
sumCol c | isEmpty c
= 0
| otherwise
= let (x,xs) = headTail c
in x + (sumCol xs)
-- data CP :: * -> * where
-- CP :: (Col c1, Col c2, Elem c1 ~ Elem c2, Elem c2 ~ Int) => (c1,c2) -> CP Char
| urbanslug/ghc | testsuite/tests/indexed-types/should_compile/ColGivenCheck2.hs | bsd-3-clause | 651 | 4 | 10 | 189 | 175 | 95 | 80 | 14 | 1 |
module Root.Test.Test3 where
import Root.Src.P1.C
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
main :: IO ()
main = hspec spec
spec::Spec
spec = do
describe "Prelude.head" $ do
it "returns the first element of a list" $ do
bar
head [23 ..] `shouldBe` (25 :: Int)
| codeboardio/kali | test/src_examples/haskell/several_files_folders2/Root/Test/Test3.hs | mit | 306 | 0 | 15 | 57 | 107 | 59 | 48 | 13 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.ApplicationCache
(update, swapCache, abort, pattern UNCACHED, pattern IDLE,
pattern CHECKING, pattern DOWNLOADING, pattern UPDATEREADY,
pattern OBSOLETE, getStatus, checking, error, noUpdate,
downloading, progress, updateReady, cached, obsolete,
ApplicationCache(..), gTypeApplicationCache)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.update Mozilla ApplicationCache.update documentation>
update :: (MonadDOM m) => ApplicationCache -> m ()
update self = liftDOM (void (self ^. jsf "update" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.swapCache Mozilla ApplicationCache.swapCache documentation>
swapCache :: (MonadDOM m) => ApplicationCache -> m ()
swapCache self = liftDOM (void (self ^. jsf "swapCache" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.abort Mozilla ApplicationCache.abort documentation>
abort :: (MonadDOM m) => ApplicationCache -> m ()
abort self = liftDOM (void (self ^. jsf "abort" ()))
pattern UNCACHED = 0
pattern IDLE = 1
pattern CHECKING = 2
pattern DOWNLOADING = 3
pattern UPDATEREADY = 4
pattern OBSOLETE = 5
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.status Mozilla ApplicationCache.status documentation>
getStatus :: (MonadDOM m) => ApplicationCache -> m Word
getStatus self
= liftDOM (round <$> ((self ^. js "status") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onchecking Mozilla ApplicationCache.onchecking documentation>
checking :: EventName ApplicationCache Event
checking = unsafeEventName (toJSString "checking")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onerror Mozilla ApplicationCache.onerror documentation>
error :: EventName ApplicationCache UIEvent
error = unsafeEventNameAsync (toJSString "error")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onnoupdate Mozilla ApplicationCache.onnoupdate documentation>
noUpdate :: EventName ApplicationCache Event
noUpdate = unsafeEventName (toJSString "noupdate")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.ondownloading Mozilla ApplicationCache.ondownloading documentation>
downloading :: EventName ApplicationCache Event
downloading = unsafeEventName (toJSString "downloading")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onprogress Mozilla ApplicationCache.onprogress documentation>
progress :: EventName ApplicationCache ProgressEvent
progress = unsafeEventNameAsync (toJSString "progress")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onupdateready Mozilla ApplicationCache.onupdateready documentation>
updateReady :: EventName ApplicationCache Event
updateReady = unsafeEventName (toJSString "updateready")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.oncached Mozilla ApplicationCache.oncached documentation>
cached :: EventName ApplicationCache Event
cached = unsafeEventName (toJSString "cached")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ApplicationCache.onobsolete Mozilla ApplicationCache.onobsolete documentation>
obsolete :: EventName ApplicationCache Event
obsolete = unsafeEventName (toJSString "obsolete")
| ghcjs/jsaddle-dom | src/JSDOM/Generated/ApplicationCache.hs | mit | 4,264 | 0 | 12 | 451 | 828 | 480 | 348 | 53 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Postgis.Simple.Field.Default
-- Copyright : (c) igor720
-- License : MIT
--
-- Maintainer : igor720@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- Default types for Postgis data
--
-----------------------------------------------------------------------------
module Postgis.Simple.Field.Default where
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.FromField
import Postgis.Simple.Internal
-- | Default toField function
toFieldDefault :: Geometry a => a -> Action
toFieldDefault = Escape . writeEWKB
-- | Default fromField function
fromFieldDefault :: Geometry a => FieldParser a
fromFieldDefault f m = do
typ <- typename f
if typ /= "geometry"
then returnError Incompatible f (show typ)
else case m of
Nothing -> returnError UnexpectedNull f ""
Just bs -> return $ readEWKB bs
| igor720/postgis-simple | src/Postgis/Simple/Field/Default.hs | mit | 1,059 | 0 | 12 | 217 | 165 | 94 | 71 | 15 | 3 |
module Hash
( runScript
, runInteractive
) where
import System.IO (hFlush, stdout)
import Hash.Language.Exec
import Hash.Language.Commands
import Hash.Parsing.HashParser (parseToTLExpr)
import System.Directory (getCurrentDirectory, getHomeDirectory, doesFileExist)
import qualified Data.Map as M
import Control.Exception
import Data.Char (isSpace)
-- Get $HOME/.hashrc if exists
getHashRc :: IO String
getHashRc = do
hd <- getHomeDirectory
let hashrcPath = hd ++ "/.hashrc"
hashrcExists <- doesFileExist hashrcPath
if hashrcExists then do
readFile hashrcPath
else return ""
-- The top-level module. Connects parsing to execution and adds interaction
-- with the user / reading from file.
-- Runs a .hash script
runScript :: FilePath -> IO ()
runScript fp = do
file <- readFile fp
hashrc <- getHashRc
let parsedFile = parseToTLExpr (hashrc ++ file)
wd <- getCurrentDirectory
runHashProgram commands (Left wd) parsedFile
return ()
-- Communicates with the user and performs hash commands line by line
runInteractive :: IO ()
runInteractive = do
hashrc <- getHashRc
let parsedFile = parseToTLExpr hashrc
currentDir <- getCurrentDirectory
ss <- runHashProgram commands (Left currentDir) parsedFile
repl ss "hash-0.1> "
-- flushStr prints out a string and immediately flushes the stream
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
-- Prints out a prompt and reads in a line of input
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
-- Trim spaces
trim :: String -> String
trim = f . f
where f = reverse . dropWhile isSpace
-- Read Eval Print Loop
repl :: ScriptState -> String -> IO ()
repl ss prompt = do
input <- readPrompt prompt
let stripped = trim input
let line = if length stripped > 0 then (if last stripped /= ';' then stripped ++ ";" else stripped) else input
case line of
":q" -> return ()
_ -> do
ss' <- catch (runHashProgram commands (Right ss) (parseToTLExpr line)) $
\e -> do
putStrLn $ " *** Exception: " ++ show (e :: SomeException)
return ss
repl ss' prompt
| yossarin/hash | src/Hash.hs | mit | 2,164 | 0 | 19 | 459 | 603 | 303 | 300 | 54 | 4 |
{-
Digit fifth powers
Problem 30
Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits:
1634 = 1^4 + 6^4 + 3^4 + 4^4
8208 = 8^4 + 2^4 + 0^4 + 8^4
9474 = 9^4 + 4^4 + 7^4 + 4^4
As 1 = 1^4 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their digits.
-}
import Data.Bits
import Data.List
import Data.Char
char2int :: Char -> Int
char2int c = (ord c) .&. 0xf
sump :: Int -> Int -> Int
sump p i = sum $ map ((^p) . char2int) $ show i
solve :: Int -> [Int]
solve p = solve' 2 []
where
solve' i out
| done = out
| match = solve' (i+1) (i:out)
| otherwise = solve' (i+1) out
where
s = sump p i
match = s==i
iscale :: Int
iscale = (10^) $ truncate $ (/) (log $ fromIntegral i) (log 10)
done = (s<i) && ((mod i iscale) == (iscale-1))
main = do
print $ solve 4
--print $ solve 5
print $ sum $ solve 5
| bertdouglas/euler-haskell | 001-050/30a.hs | mit | 1,084 | 0 | 13 | 337 | 320 | 166 | 154 | 21 | 1 |
{-# htermination pi :: Float #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_pi_1.hs | mit | 33 | 0 | 2 | 6 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Data.Git.Types where
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (isInfixOf)
import Data.Lens.Common (getL)
import Data.Lens.Template (makeLenses)
import Data.Monoid (Sum(..), mappend, mempty)
import Data.Set (Set)
import qualified Data.Set as Set
type CommitHash = ByteString
type CommitAuthor = ByteString
type CommitDate = ByteString
data CommitFile
= CommitFile {
_cAddedLines :: Integer
, _cRemovedLines :: Integer
, _cFileName :: ByteString
}
deriving (Show, Eq, Ord)
data GitCommit
= GitCommit {
_commitRepo :: String
, _commitHash :: CommitHash
, _commitAuthor :: CommitAuthor
, _commitDate :: CommitDate
, _commitFiles :: Set CommitFile
}
deriving (Show)
makeLenses [''GitCommit, ''CommitFile]
getLimitedAddedLines :: [ByteString] -> GitCommit -> Integer
getLimitedAddedLines paths =
getSum .
Set.fold mappend mempty .
Set.map (Sum . getL cAddedLines) .
Set.filter (checkPathName paths) .
getL commitFiles
where
checkPathName paths cfile =
not $ any (getL cFileName cfile `isInfixOf`) paths
getAddedLines :: GitCommit -> Integer
getAddedLines =
getSum .
Set.fold mappend mempty .
Set.map (Sum . getL cAddedLines) .
getL commitFiles
| roman/git-log-stats | src/Data/Git/Types.hs | mit | 1,404 | 0 | 11 | 360 | 373 | 212 | 161 | 42 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module CheckSkeletonParser
( checkSkeletonParser,
main,
)
where
import Core.System
import Core.Text
import Technique.Language
import Technique.Parser
import Technique.Quantity
import Test.Hspec
import Text.Megaparsec hiding (Label)
main :: IO ()
main = do
finally (hspec checkSkeletonParser) (putStrLn ".")
checkSkeletonParser :: Spec
checkSkeletonParser = do
describe "Parse procfile header" $ do
it "correctly parses a complete magic line" $ do
parseMaybe pMagicLine "% technique v2\n" `shouldBe` Just 2
it "errors if magic line has incorrect syntax" $ do
parseMaybe pMagicLine "%\n" `shouldBe` Nothing
parseMaybe pMagicLine "%technique\n" `shouldBe` Nothing
parseMaybe pMagicLine "% technique\n" `shouldBe` Nothing
parseMaybe pMagicLine "% technique \n" `shouldBe` Nothing
parseMaybe pMagicLine "% technique v\n" `shouldBe` Nothing
parseMaybe pMagicLine "% technique v2\n" `shouldBe` Nothing
parseMaybe pMagicLine "% technique v2 asdf\n" `shouldBe` Nothing
it "correctly parses an SPDX header line" $ do
parseMaybe pSpdxLine "! BSD-3-Clause\n" `shouldBe` Just ("BSD-3-Clause", Nothing)
it "correctly parses an SPDX header line with Copyright" $ do
parseMaybe pSpdxLine "! BSD-3-Clause; (c) 2019 Kermit le Frog\n" `shouldBe` Just ("BSD-3-Clause", Just "2019 Kermit le Frog")
it "errors if SPDX line has incorrect syntax" $ do
parseMaybe pSpdxLine "!\n" `shouldBe` Nothing
parseMaybe pSpdxLine "! Public-Domain;\n" `shouldBe` Nothing
parseMaybe pSpdxLine "! Public-Domain; (\n" `shouldBe` Nothing
parseMaybe pSpdxLine "! Public-Domain; (c)\n" `shouldBe` Nothing
parseMaybe pSpdxLine "! Public-Domain; (c) \n" `shouldBe` Nothing
it "correctly parses a complete technique program header" $ do
parseMaybe
pTechnique
[quote|
% technique v0
! BSD-3-Clause
|]
`shouldBe` Just
( Technique
{ techniqueVersion = 0,
techniqueLicense = "BSD-3-Clause",
techniqueCopyright = Nothing,
techniqueBody = []
}
)
describe "Parses a proecdure declaration" $ do
it "name parser handles valid identifiers" $ do
parseMaybe pIdentifier "" `shouldBe` Nothing
parseMaybe pIdentifier "i" `shouldBe` Just (Identifier "i")
parseMaybe pIdentifier "ingredients" `shouldBe` Just (Identifier "ingredients")
parseMaybe pIdentifier "roast_turkey" `shouldBe` Just (Identifier "roast_turkey")
parseMaybe pIdentifier "1x" `shouldBe` Nothing
parseMaybe pIdentifier "x1" `shouldBe` Just (Identifier "x1")
it "type parser handles valid type names" $ do
parseMaybe pType "" `shouldBe` Nothing
parseMaybe pType "I" `shouldBe` Just (Type "I")
parseMaybe pType "Ingredients" `shouldBe` Just (Type "Ingredients")
parseMaybe pType "RoastTurkey" `shouldBe` Just (Type "RoastTurkey")
parseMaybe pType "Roast_Turkey" `shouldBe` Nothing
parseMaybe pType "2Dinner" `shouldBe` Nothing
parseMaybe pType "Dinner3" `shouldBe` Just (Type "Dinner3")
it "handles a name, parameters, and a type signature" $ do
parseMaybe pProcedureDeclaration "roast_turkey i : Ingredients -> Turkey"
`shouldBe` Just (Identifier "roast_turkey", [Identifier "i"], [Type "Ingredients"], [Type "Turkey"])
parseMaybe pProcedureDeclaration "roast_turkey i : Ingredients -> Turkey"
`shouldBe` Just (Identifier "roast_turkey", [Identifier "i"], [Type "Ingredients"], [Type "Turkey"])
parseMaybe pProcedureDeclaration "roast_turkey:Ingredients->Turkey"
`shouldBe` Just (Identifier "roast_turkey", [], [Type "Ingredients"], [Type "Turkey"])
describe "Literals" $ do
it "quoted string is interpreted as text" $ do
parseMaybe stringLiteral "\"Hello world\"" `shouldBe` Just "Hello world"
parseMaybe stringLiteral "\"Hello \\\"world\"" `shouldBe` Just "Hello \"world"
parseMaybe stringLiteral "\"\"" `shouldBe` Just ""
it "positive integers" $ do
parseMaybe numberLiteral "42" `shouldBe` Just 42
parseMaybe numberLiteral "0" `shouldBe` Just 0
parseMaybe numberLiteral "1a" `shouldBe` Nothing
describe "Parses quantities" $ do
it "a number is a Number" $ do
parseMaybe pQuantity "42" `shouldBe` Just (Number 42)
parseMaybe pQuantity "-42" `shouldBe` Just (Number (-42))
it "a quantity with units is a Quantity" $ do
parseMaybe pQuantity "149 kg" `shouldBe` Just (Quantity (Decimal 149 0) (Decimal 0 0) 0 "kg")
it "a quantity with mantissa, magnitude, and units is a Quantity" $ do
parseMaybe pQuantity "5.9722 × 10^24 kg" `shouldBe` Just (Quantity (Decimal 59722 4) (Decimal 0 0) 24 "kg")
it "a quantity with mantissa, uncertainty, and units is a Quantity" $ do
parseMaybe pQuantity "5.9722 ± 0.0006 kg" `shouldBe` Just (Quantity (Decimal 59722 4) (Decimal 6 4) 0 "kg")
it "a quantity with mantissa, uncertainty, magnitude, and units is a Quantity" $ do
parseMaybe pQuantity "5.9722 ± 0.0006 × 10^24 kg" `shouldBe` Just (Quantity (Decimal 59722 4) (Decimal 6 4) 24 "kg")
it "same quantity, with superscripts, is a Quantity" $ do
parseMaybe pQuantity "5.9722 ± 0.0006 × 10²⁴ kg" `shouldBe` Just (Quantity (Decimal 59722 4) (Decimal 6 4) 24 "kg")
it "negative Quantities also parse" $ do
parseMaybe pQuantity "1234567890" `shouldBe` Just (Number 1234567890)
parseMaybe pQuantity "-1234567890" `shouldBe` Just (Number (-1234567890))
parseMaybe pQuantity "3.14 ± 0.01 m" `shouldBe` Just (Quantity (Decimal 314 2) (Decimal 001 2) 0 "m")
parseMaybe pQuantity "-3.14 ± 0.01 m" `shouldBe` Just (Quantity (Decimal (-314) 2) (Decimal 001 2) 0 "m")
describe "Parses attributes" $ do
it "recognizes a role marker" $ do
parseMaybe pAttribute "@butler" `shouldBe` Just (Role (Identifier "butler"))
it "recognizes a place marker" $ do
parseMaybe pAttribute "#library" `shouldBe` Just (Place (Identifier "library"))
it "recognizes any" $ do
parseMaybe pAttribute "@*" `shouldBe` Just (Role (Identifier "*"))
parseMaybe pAttribute "#*" `shouldBe` Just (Place (Identifier "*"))
describe "Parses expressions" $ do
it "an empty input an error" $ do
parseMaybe pExpression "" `shouldBe` Nothing
it "an pair of parentheses is None" $ do
parseMaybe pExpression "()" `shouldBe` Just (None 0)
it "a quoted string is a Text" $ do
parseMaybe pExpression "\"Hello world\"" `shouldBe` Just (Text 0 "Hello world")
parseMaybe pExpression "\"\"" `shouldBe` Just (Text 0 "")
it "a bare identifier is a Variable" $ do
parseMaybe pExpression "x" `shouldBe` Just (Variable 0 [Identifier "x"])
it "an identifier, space, and then expression is an Application" $ do
parseMaybe pExpression "a x"
`shouldBe` Just (Application 0 (Identifier "a") (Variable 2 [Identifier "x"]))
it "a quoted string is a Literal Text" $ do
parseMaybe pExpression "\"Hello world\"" `shouldBe` Just (Text 0 "Hello world")
it "a bare number is a Literal Number" $ do
parseMaybe pExpression "42" `shouldBe` Just (Amount 0 (Number 42))
it "a nested expression is parsed as Grouped" $ do
parseMaybe pExpression "(42)" `shouldBe` Just (Grouping 0 (Amount 1 (Number 42)))
it "an operator between two expressions is an Operation" $ do
parseMaybe pExpression "x & y"
`shouldBe` Just
( Operation
0
WaitBoth
(Variable 0 [Identifier "x"])
(Variable 4 [Identifier "y"])
)
it "handles tablet with one binding" $ do
parseMaybe pExpression "[ \"King\" ~ george ]"
`shouldBe` Just
( Object
0
( Tablet
[ Binding (Label "King") (Variable 11 [Identifier "george"])
]
)
)
it "handles tablet with multiple bindings" $ do
parseMaybe pExpression "[ \"first\" ~ \"George\" \n \"last\" ~ \"Windsor\" ]"
`shouldBe` Just
( Object
0
( Tablet
[ Binding (Label "first") (Text 12 "George"),
Binding (Label "last") (Text 32 "Windsor")
]
)
)
{-
it "handles tablet with alternate single-line syntax" $
let expected =
Just
( Object
( Tablet
[ Binding "name" (Variable [Identifier "n"]),
Binding "king" (Amount (Number 42))
]
)
)
in do
parseMaybe pExpression "[\"name\" ~ n,\"king\" ~ 42]" `shouldBe` expected
parseMaybe pExpression "[\"name\" ~ n , \"king\" ~ 42]" `shouldBe` expected
-}
describe "Parses statements containing expressions" $ do
it "a blank line is a Blank" $ do
parseMaybe pStatement "\n" `shouldBe` Just (Blank 0)
it "considers a single identifier an Execute" $ do
parseMaybe pStatement "x"
`shouldBe` Just (Execute 0 (Variable 0 [Identifier "x"]))
it "considers a line with an '=' to be an Assignment" $ do
parseMaybe pStatement "answer = 42"
`shouldBe` Just (Assignment 0 [Identifier "answer"] (Amount 9 (Number 42)))
describe "Parses blocks of statements" $ do
it "an empty block is a [] (special case)" $ do
parseMaybe pBlock "{}" `shouldBe` Just (Block [])
it "a block with a newline (only) is []" $ do
parseMaybe pBlock "{\n}" `shouldBe` Just (Block [])
it "a block with single statement surrounded by a newlines" $ do
parseMaybe pBlock "{\nx\n}"
`shouldBe` Just
( Block
[ Execute 2 (Variable 2 [Identifier "x"])
]
)
parseMaybe pBlock "{\nanswer = 42\n}"
`shouldBe` Just
( Block
[ (Assignment 2 [Identifier "answer"] (Amount 11 (Number 42)))
]
)
it "a block with a blank line contains a Blank" $ do
parseMaybe pBlock "{\nx1\n\nx2\n}"
`shouldBe` Just
( Block
[ Execute 2 (Variable 2 [Identifier "x1"]),
Blank 5,
Execute 6 (Variable 6 [Identifier "x2"])
]
)
it "a block with multiple statements separated by newlines" $ do
parseMaybe pBlock "{\nx\nanswer = 42\n}"
`shouldBe` Just
( Block
[ Execute 2 (Variable 2 [Identifier "x"]),
Assignment 4 [Identifier "answer"] (Amount 13 (Number 42))
]
)
it "a block with multiple statements separated by semicolons" $ do
parseMaybe pBlock "{x ; answer = 42}"
`shouldBe` Just
( Block
[ Execute 1 (Variable 1 [Identifier "x"]),
Series 3,
Assignment 5 [Identifier "answer"] (Amount 14 (Number 42))
]
)
it "consumes whitespace in inconvenient places" $ do
parseMaybe pBlock "{ \n }"
`shouldBe` Just (Block [])
parseMaybe pBlock "{ \n x \n }"
`shouldBe` Just
( Block
[ Execute 4 (Variable 4 [Identifier "x"])
]
)
parseMaybe pBlock "{ \n (42) \n}"
`shouldBe` Just
( Block
[ Execute 4 (Grouping 4 (Amount 5 (Number 42)))
]
)
parseMaybe pBlock "{ \n (42 ) \n}"
`shouldBe` Just
( Block
[ Execute 4 (Grouping 4 (Amount 5 (Number 42)))
]
)
parseMaybe pBlock "{ answer = 42 ; }"
`shouldBe` Just
( Block
[ Assignment 2 [Identifier "answer"] (Amount 11 (Number 42)),
Series 14
]
)
describe "Parses a procedure declaration" $ do
it "simple declaration " $ do
parseMaybe pProcedureDeclaration "f x : X -> Y"
`shouldBe` Just (Identifier "f", [Identifier "x"], [Type "X"], [Type "Y"])
it "declaration with multiple variables and input types" $ do
parseMaybe pProcedureDeclaration "after_dinner i,s,w : IceCream,Strawberries,Waffles -> Dessert"
`shouldBe` Just
( Identifier "after_dinner",
[Identifier "i", Identifier "s", Identifier "w"],
[Type "IceCream", Type "Strawberries", Type "Waffles"],
[Type "Dessert"]
)
it "handles spurious whitespace" $ do
parseMaybe pProcedureDeclaration "after_dinner i ,s ,w : IceCream ,Strawberries, Waffles -> Dessert"
`shouldBe` Just
( Identifier "after_dinner",
[Identifier "i", Identifier "s", Identifier "w"],
[Type "IceCream", Type "Strawberries", Type "Waffles"],
[Type "Dessert"]
)
describe "Parses a the code for a complete procedure" $ do
it "parses a declaration and block" $ do
parseMaybe pProcedureCode "f : X -> Y\n{ x }\n"
`shouldBe` Just
( emptyProcedure
{ procedureOffset = 0,
procedureName = Identifier "f",
procedureInput = [Type "X"],
procedureOutput = [Type "Y"],
procedureBlock = Block [Execute 13 (Variable 13 [Identifier "x"])]
}
)
| oprdyn/technique | tests/CheckSkeletonParser.hs | mit | 13,587 | 0 | 24 | 3,963 | 3,451 | 1,670 | 1,781 | 241 | 1 |
-- https://www.reddit.com/r/dailyprogrammer/comments/2xoxum/20150302_challenge_204_easy_remembering_your_lines/
module Main where
import Data.Text as T
import Data.Text.IO as TIO
import Data.List as List
import Control.Monad.State
data Position = Position
{ act :: Text
, scene :: Text
, speaker :: Text
, rest :: [Text] }
initSpaces :: Text -> Int
initSpaces = T.length . T.takeWhile (' ' ==)
grabPassage :: Monad m => StateT Position m [Text]
grabPassage = do
pos <- get
let (xs , ys) = List.span ((4 ==) . initSpaces) $ rest pos
() <- put $ pos { rest = ys }
return xs
grabActOrScene :: Monad m => StateT Position m ()
grabActOrScene = do
pos <- get
let (x : xs) = rest pos
if T.pack "ACT " `T.isPrefixOf` x
then put $ pos { act = grabInfo x, rest = xs }
else if T.pack "SCENE " `T.isPrefixOf` x
then put $ pos { scene = grabInfo x, rest = xs }
else put $ pos { rest = xs }
grabInfo :: Text -> Text
grabInfo = T.takeWhile ('.' /=)
nextPassage :: StateT Position Maybe [Text]
nextPassage = do
pos <- get
case rest pos of
[] -> lift Nothing
(x : xs) -> do
let (ys, zs) = T.span (' ' ==) x
case T.length ys of
0 -> grabActOrScene >> nextPassage
2 -> put (pos { speaker = grabInfo zs, rest = xs }) >> nextPassage
4 -> grabPassage
findPassage :: Text -> StateT Position Maybe [Text]
findPassage pass = do
txt <- nextPassage
if List.any (pass `T.isInfixOf`) txt then return txt
else findPassage pass
main :: IO ()
main = do
txt <- TIO.readFile "macbeth.txt"
pass <- TIO.getLine
let dummyText = T.pack ""
let initState = Position dummyText dummyText dummyText $ T.lines txt
case runStateT (findPassage pass) initState of
Nothing -> TIO.putStrLn $ (T.pack "Could not find ") `T.append` txt
Just (xs , pos) -> TIO.putStr $ T.unlines
$ [ act pos, scene pos, speaker pos ]
++ fmap (T.dropWhile (' ' ==)) xs
| gallais/dailyprogrammer | easy/204/Main.hs | mit | 2,020 | 0 | 20 | 551 | 775 | 407 | 368 | 56 | 4 |
import Math.NumberTheory.Primes (primes, isPrime)
import Data.List (permutations, subsequences)
import Data.List.Unique (sortUniq)
--------------------------------------------------------------------------------
-- Return the sorted list of numbers obtained by permuting the digits of the
-- function argument.
--------------------------------------------------------------------------------
permuteDigits :: Integer -> [Integer]
permuteDigits = sortUniq . map read . permutations . show
--------------------------------------------------------------------------------
-- Return the list of all the triplets whose first element is the function
-- argument and that fulfil the conditions expressed in the exercise statement.
--------------------------------------------------------------------------------
triplets :: Integer -> [[Integer]]
triplets n = filter predicat $ subsequences $ filter isPrime (permuteDigits n)
where
predicat :: [Integer] -> Bool
predicat triplet@[u, v, w] = u == n && u /= v && v-u == w-v
predicat _ = False
combine :: [Integer] -> Integer
combine = read . concat . map show
--------------------------------------------------------------------------------
-- Main
--------------------------------------------------------------------------------
main = print $ concat $ map (map combine . triplets) primesInRange
where
combine :: [Integer] -> Integer
combine = read . concat . map show
primesInRange = takeWhile (<=9999) $ dropWhile (<1000) primes | dpieroux/euler | 0/0049.hs | mit | 1,562 | 0 | 13 | 256 | 308 | 172 | 136 | 16 | 2 |
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NumericUnderscores #-}
module ConstructDatabase where
import qualified Data.Array.Unboxed as A
import Data.Bits
import Data.Char
import Data.Word
import PrepareDatabase
type GCDatabase = (A.UArray Int Word32, A.UArray Int Word32, A.UArray Int Word8)
{-
TODO: which way is faster?
-}
type PackedGCDatabase = A.UArray Int Word64
mkDatabase :: [(Ranged, GeneralCategory)] -> GCDatabase
mkDatabase gs = gcDb
where
l = length gs
mkArr prj =
A.listArray
(0, l -1)
(fmap prj gs)
loArr, hiArr :: A.UArray Int Word32
loArr =
mkArr
(\(e, _) -> case e of
Left (i, _) -> fromIntegral i
Right i -> fromIntegral i)
hiArr =
mkArr
(\(e, _) -> case e of
Left (_, i) -> fromIntegral i
Right i -> fromIntegral i)
valArr :: A.UArray Int Word8
valArr =
mkArr
(\(_, gc) -> fromIntegral (fromEnum gc))
gcDb :: GCDatabase
gcDb = (loArr, hiArr, valArr)
mkDatabasePacked :: [(Ranged, GeneralCategory)] -> PackedGCDatabase
mkDatabasePacked gs = A.listArray (0, l -1) (fmap mkItem gs)
where
l = length gs
mkItem (range, gc) =
packTuple
( fromIntegral lo
, fromIntegral hi
, fromIntegral $ fromEnum gc
)
where
(lo, hi) = case range of
Left (a, b) -> (a, b)
Right v -> (v, v)
{-
low: 0~23
high: 24~47
gc: 48~
-}
packTuple :: (Word32, Word32, Word8) -> Word64
packTuple (lo, high, gc) = fromIntegral lo .|. high' .|. gc'
where
high' = fromIntegral high `unsafeShiftL` 24
gc' = fromIntegral gc `unsafeShiftL` 48
unpackTuple :: Word64 -> (Word32, Word32, Word8)
unpackTuple payload = (lo, high, gc)
where
lo, high :: Word32
lo = fromIntegral (0xFF_FFFF .&. payload)
high = fromIntegral (0xFF_FFFF .&. (payload `unsafeShiftR` 24))
gc = fromIntegral (0xFF .&. (payload `unsafeShiftR` 48))
query :: GCDatabase -> Char -> GeneralCategory
query (loArr, hiArr, valArr) ch = toEnum . fromIntegral $ search lo hi
where
needle :: Word32
needle = fromIntegral $ ord ch
(lo, hi) = A.bounds loArr
-- compare <needle> <range at index>
cmp' :: Int -> Ordering
cmp' i
| needle < rangeL = LT
| needle > rangeR = GT
| rangeL <= needle && needle <= rangeR = EQ
| otherwise = error "unreachable"
where
rangeL = loArr A.! i
rangeR = hiArr A.! i
search l r =
if l <= r
then
let mid = (l + r) `quot` 2
in case cmp' mid of
EQ -> valArr A.! mid
LT -> search l (mid -1)
GT -> search (mid + 1) r
else fromIntegral $ fromEnum NotAssigned
queryPacked :: PackedGCDatabase -> Char -> GeneralCategory
queryPacked arr ch = toEnum . fromIntegral $ search lo hi
where
needle :: Word32
needle = fromIntegral $ ord ch
(lo, hi) = A.bounds arr
search l r =
if l <= r
then
let mid = (l + r) `quot` 2
(rangeL, rangeR, val) = unpackTuple (arr A.! mid)
in if
| needle < rangeL -> search l (mid -1)
| needle > rangeR -> search (mid + 1) r
| rangeL <= needle && needle <= rangeR -> val
| otherwise -> error "unreachable"
else fromIntegral $ fromEnum NotAssigned
-- this also serves as verifying that query is implemented correctly.
validateDatabase :: (Char -> GeneralCategory) -> IO ()
validateDatabase queryDb = do
let allChars :: [Char]
allChars = [minBound .. maxBound]
notDefined :: [Char]
notDefined = filter ((== NotAssigned) . queryDb) allChars
inconsistents
:: [ ( Char
, GeneralCategory -- general category from base
, GeneralCategory -- general category from UnicodeData.txt
)
]
inconsistents = concatMap getInconsistent allChars
where
getInconsistent ch =
[(ch, libGc, u13) | libGc /= NotAssigned, u13 /= libGc]
where
libGc = generalCategory ch
u13 = queryDb ch
newItems :: [(Char, GeneralCategory)]
newItems = concatMap go allChars
where
go ch =
[(ch, u13) | libGc == NotAssigned && u13 /= NotAssigned]
where
libGc = generalCategory ch
u13 = queryDb ch
putStrLn $ "Number of NotAssigned in database: " <> show (length notDefined)
putStrLn $ "Newly assigned since base: " <> show (length newItems)
putStrLn "Inconsistent chars:"
mapM_ print inconsistents
{-
Notes are based on the results of following setup:
- The Glorious Glasgow Haskell Compilation System, version 8.8.4
- Unicode 13.0.0
5 known inconsistent characters:
+ ('\5741',OtherPunctuation,Just OtherSymbol)
https://unicode.org/reports/tr44/
The Terminal_Punctuation property of U+166D CANADIAN SYLLABICS CHI SIGN was changed to No
+ ('\43453',SpacingCombiningMark,Just NonSpacingMark)
https://unicode.org/reports/tr44/
The classification of the dependent form of the Javanese vocalic r,
U+A9BD JAVANESE CONSONANT SIGN KERET, was corrected to a below-base mark
+ ('\72146',NonSpacingMark,Just SpacingCombiningMark)
https://www.unicode.org/L2/L2019/19047-script-adhoc-recs.pdf
+ ('\72162',OtherLetter,Just OtherPunctuation)
not sure about this one, it's already Po in Unicode 12.0.0 and Unicode 12.1.0.
+ ('\123215',OtherLetter,Just OtherSymbol)
https://www.unicode.org/L2/L2019/19008.htm
"Update the general category of U+1E14F NYIAKENG PUACHUE HMONG CIRCLED CA
from gc="Lo" to "So", for Unicode version 12.0."
GHC's table:
https://github.com/ghc/ghc/commits/ghc-8.10.4-release/libraries/base/cbits/WCsubst.c
-}
| Javran/misc | unicode-data/src/ConstructDatabase.hs | mit | 5,826 | 0 | 16 | 1,653 | 1,499 | 803 | 696 | 116 | 5 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module RAExpr.RASet (RASet, fromList, fromSet, empty, attributes, RAExpr.RASet.filter,
project, union, intersection, difference, crossProduct,
renameColumn, renameColumns, natualJoin) where
import Control.Exception.Base (assert)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Function (on)
import RAExpr.Row(Row)
import qualified RAExpr.Row as Row
import Function(around)
import Class.Pretty
import Data.List (intercalate)
type Names names = Set names
type Rows names values = Set (Row names values)
data RASet names values = RASet (Names names) (Rows names values)
deriving (Eq, Ord)
-- | Instance of Show.
-- >>> RASet (Set.fromList ["a", "b"]) (Set.fromList [Row.singleton "a" "x", Row.singleton "b" "y"])
-- fromList ["a","b"] [fromList [("a","x")],fromList [("b","y")]]
instance (Show names, Show values) => Show (RASet names values) where
show (RASet names rows) = "fromList " ++ show (Set.toList names) ++ " " ++ show (Set.toList rows)
-- | Instance of Pretty
-- >>> pretty (empty::(RASet String String))
-- "{|}"
-- >>> pretty (fromList ["A"] ([]::[Row String String]))
-- "{A|}"
-- >>> pretty (fromList ["A"] ([Row.empty]::[Row String String]))
-- "{A|}"
-- >>> pretty (fromList ["A"] ([Row.singleton "A" "x"]))
-- "{A|(\"x\")}"
-- >>> pretty (fromList ["A","B"] ([Row.fromList [("A","x"),("B","y")]]))
-- "{A,B|(\"x\",\"y\")}"
-- >>> pretty (fromList ["A"] [Row.fromList [("A","x")], Row.fromList [("A","y")]])
-- "{A|(\"x\"),(\"y\")}"
instance Pretty (RASet String String) where
pretty set = "{" ++ commaSeperatedNames ++ "|" ++ commaSeperatedRows ++ "}"
where commaSeperatedNames = intercalate "," (Set.toList names)
commaSeperatedRows = intercalate "," (Set.toList prettyRows)
prettyRows = Set.map pretty (rows set)
names = attributes set
-- instance (Ord names) => Foldable (RASet names) where
-- foldr f x (RASet names rows) = Set.foldr f x rows
-- | Set factory function
-- >>> empty
-- fromList [] []
empty:: (Ord names, Ord values) => RASet names values
empty = RASet Set.empty Set.empty
-- | RASet factory function
-- >>> singleton [] (Row.empty)
-- fromList [] []
-- >>> singleton [] (Row.singleton "a" 1)
-- fromList [] []
-- >>> singleton ["a"] (Row.singleton "a" 1)
-- fromList ["a"] [fromList [("a",1)]]
singleton:: (Ord names, Ord values) => [names] -> Row names values -> RASet names values
singleton attributes row = fromList attributes [row]
-- | RASet factory function
-- >>> fromList [] [Row.fromList [("a","x"),("b","y")]]
-- fromList [] []
-- >>> fromList ["a"] [Row.empty]
-- fromList ["a"] []
-- >>> fromList ["a"] [Row.singleton "a" "z", Row.fromList [("a","x"),("b","y")]]
-- fromList ["a"] [fromList [("a","x")],fromList [("a","z")]]
fromList:: (Ord names, Ord values) => [names] -> [Row names values] -> RASet names values
fromList attributes rows
= fromSet (Set.fromList attributes) (Set.fromList rows)
-- | Set factory function
-- >>> fromSet Set.empty Set.empty
-- fromList [] []
fromSet:: (Ord names, Ord values) => Names names -> Rows names values -> RASet names values
fromSet attributes rows = let
mask = Set.toList attributes
maskedRows = Set.map (Row.mask mask) rows
filteredRows = Set.filter (not . null) maskedRows
in RASet attributes filteredRows
-- | Get attributes of Set.
-- >>> attributes (fromList [] [Row.empty])
-- fromList []
-- >>> attributes (fromList ["a"] [Row.fromList []])
-- fromList ["a"]
attributes:: RASet names value -> Names names
attributes (RASet names _) = names
-- | Get rows of RASet.
-- >>> rows (fromList ["a"] [Row.fromList []])
-- fromList []
-- >>> rows (fromList ["a"] [Row.singleton "a" "x"])
-- fromList [fromList [("a","x")]]
rows:: RASet names values -> Rows names values
rows (RASet _ rows) = rows
filter f (RASet attributes rows) = RASet attributes (Set.filter f rows)
-- | Change the name of a column.
-- >>> renameColumn "a" "c" empty
-- fromList [] []
-- >>> renameColumn "a" "c" (fromList ["a"] [Row.singleton "a" "x"])
-- fromList ["c"] [fromList [("c","x")]]
renameColumn:: (Ord names, Ord values) => names -> names -> RASet names values -> RASet names values
renameColumn oldName newName set = let
oldAttributes = attributes set
newAttributes = if oldName `Set.member` oldAttributes then
Set.insert newName (Set.delete oldName oldAttributes)
else
oldAttributes
newRows = Set.map (Row.renameColumn oldName newName) (rows set)
in RASet newAttributes newRows
-- | Change the name of many columns.
-- >>> renameColumns [("a","c")] (fromList ["a"] [Row.singleton "a" "x"])
-- fromList ["c"] [fromList [("c","x")]]
renameColumns:: (Ord names, Ord values) => [(names, names)] -> RASet names values -> RASet names values
renameColumns changes set =
foldl (\set change -> (uncurry renameColumn) change set) set changes
-- | Project the column names on to a set.
-- >>> project (Set.singleton "a") (fromList ["a", "b"] [Row.singleton "a" "x", Row.singleton "b" "y"])
-- fromList ["a"] [fromList [("a","x")]]
project:: (Ord names, Ord values) => Names names -> RASet names values -> RASet names values
project attributes (RASet _ rows) = fromSet attributes rows
-- | Union of two union compatible sets.
-- union (fromList ["a"] [Row.singleton "a" "x"]]) (fromList ["a"] [Row.singleton "a" "y"]])
-- fromList ["a"] [fromList [("a","x")],fromList [("a","y")]]
union:: (Ord names, Ord values) => RASet names values -> RASet names values -> RASet names values
union = unionCompatibleOperation (Set.union `on` rows)
-- | Difference of two union compatible sets.
-- differance (fromList ["a"] [Row.singleton "a" "x"]]) (fromList ["a"] [Row.singleton "a" "x"]])
-- fromList ["a"] []
-- differance (fromList ["a"] [Row.singleton "a" "x"]]) (fromList ["a"] [])
-- fromList ["a"] [fromList [("a", "x")]]
difference:: (Ord names, Ord values) => RASet names values -> RASet names values -> RASet names values
difference = unionCompatibleOperation (Set.difference `on` rows)
-- | Intersection of two union compatible sets.
-- intersection (fromList ["a"] [Row.singleton "a" "x"]) (fromList ["a"] [])
-- fromList ["a"] [fromList [("a", "x")]]
-- intersection (fromList ["a"] [Row.singleton "a" "x"]]) (fromList ["a"] [Row.singleton "a" "y"]])
-- fromList ["a"] [fromList [("a","x")],fromList [("a","y")]]
intersection:: (Ord names, Ord values) => RASet names values -> RASet names values -> RASet names values
intersection = unionCompatibleOperation (Set.intersection `on` rows)
-- | CrossProduct of two anti union compatible sets.
-- >>> crossProduct (fromList ["a"] [Row.singleton "a" "x"]) (fromList ["b"] [Row.singleton "b" "y"])
-- fromList ["a","b"] [fromList [("a","x"),("b","y")]]
-- >>> crossProduct (fromList ["a"] []) (fromList ["b"] [Row.singleton "b" "y"])
-- fromList ["a","b"] []
-- >>> crossProduct (fromList ["a"] [Row.singleton "a" "x"]) (fromList ["b"] [])
-- fromList ["a","b"] []
crossProduct:: (Ord names, Ord values) => RASet names values -> RASet names values -> RASet names values
crossProduct =
let
crossProduct leftSet rightSet = Set.foldl (folder rightSet) Set.empty leftSet
folder otherSet set row = Set.union (crossRow row otherSet) set
crossRow row set = Set.map (Row.union row) set
in antiUnionCompatibleOperation (crossProduct `on` rows)
natualJoin:: (Ord names, Ord values) => RASet names values -> RASet names values -> RASet names values
natualJoin set1 set2 = undefined
-- | Test if two RASets are union compatible.
-- >>> unionCompatible (fromList ["a"] [Row.empty]) (fromList ["a"] [Row.empty])
-- True
-- >>> unionCompatible (fromList ["a"] [Row.empty]) (fromList ["b"] [Row.empty])
-- False
-- >>> unionCompatible (fromList ["a", "b"] [Row.empty]) (fromList ["b", "c"] [Row.empty])
-- False
unionCompatible:: (Ord names, Ord values) => RASet names values -> RASet names values -> Bool
unionCompatible = (==) `on` attributes
-- | Test if two RASets are not union compatible.
-- >>> notUnionCompatible (fromList ["a"] [Row.empty]) (fromList ["a"] [Row.empty])
-- False
-- >>> notUnionCompatible (fromList ["a"] [Row.empty]) (fromList ["b"] [Row.empty])
-- True
-- >>> notUnionCompatible (fromList ["a", "b"] [Row.empty]) (fromList ["b", "c"] [Row.empty])
-- True
notUnionCompatible:: (Ord names, Ord values) => RASet names values -> RASet names values -> Bool
notUnionCompatible = not `around` unionCompatible
-- | Test if two RASets are anti union compatible.
-- >>> antiUnionCompatible (fromList ["a"] [Row.empty]) (fromList ["a"] [Row.empty])
-- False
-- >>> antiUnionCompatible (fromList ["a"] [Row.empty]) (fromList ["b"] [Row.empty])
-- True
-- >>> antiUnionCompatible (fromList ["a", "b"] [Row.empty]) (fromList ["b", "c"] [Row.empty])
-- False
antiUnionCompatible:: (Ord names, Ord values) => RASet names values -> RASet names values -> Bool
antiUnionCompatible = (Set.empty==) `around` (Set.intersection `on` attributes)
-- | Assert two sets are union compatible.
-- >>> assertUnionCompatible empty empty True
-- True
assertUnionCompatible:: (Ord names, Ord values) => RASet names values -> RASet names values -> a -> a
assertUnionCompatible = assert `around` unionCompatible
-- | Assert two sets are anti Union compatible.
-- >>> assertAntiUnionCompatible (fromList ["a"] []) (fromList ["b"] []) True
-- True
assertAntiUnionCompatible:: (Ord names, Ord values) => RASet names values -> RASet names values -> a -> a
assertAntiUnionCompatible = assert `around` antiUnionCompatible
-- | Preform an opperation if the two sets are union compatible.
-- >>> unionCompatibleOperation (\x y -> Set.empty) (fromList ["a"] []) (fromList ["a"] [])
-- fromList ["a"] []
unionCompatibleOperation:: (Ord n, Ord v) => (RASet n v -> RASet n v -> Rows n v) -> RASet n v-> RASet n v -> RASet n v
unionCompatibleOperation operator set1 set2 =
let newRows = assertUnionCompatible set1 set2 (set1 `operator` set2)
newAttributes = attributes set1
in RASet newAttributes newRows
-- | Preform an opperation if the two sets are anti union compatible.
-- >>> antiUnionCompatibleOperation (\x y -> Set.empty) (fromList ["a"] []) (fromList ["b"] [])
-- fromList ["a","b"] []
antiUnionCompatibleOperation:: (Ord n, Ord v) => (RASet n v -> RASet n v-> Rows n v) -> RASet n v-> RASet n v -> RASet n v
antiUnionCompatibleOperation operator set1 set2 =
let newRows = assertAntiUnionCompatible set1 set2 (set1 `operator` set2)
newAttributes = (Set.union `on` attributes) set1 set2
in RASet newAttributes newRows
| cameronbwhite/RelationalAlgebra | src/RAExpr/RASet.hs | mit | 10,563 | 0 | 13 | 1,680 | 2,090 | 1,135 | 955 | 91 | 2 |
{-# LANGUAGE UnicodeSyntax #-}
module Pixs.Operations.Geometric where
| ayberkt/pixs | src/Pixs/Operations/Geometric.hs | mit | 71 | 0 | 3 | 8 | 8 | 6 | 2 | 2 | 0 |
module Feature.AuthSpec where
-- {{{ Imports
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import qualified Hasql.Connection as H
import SpecHelper
import PostgREST.Types (DbStructure(..))
-- }}}
spec :: DbStructure -> H.Connection -> Spec
spec struct c = around (withApp cfgDefault struct c)
$ describe "authorization" $ do
it "hides tables that anonymous does not own" $
get "/authors_only" `shouldRespondWith` 404
it "returns jwt functions as jwt tokens" $
post "/rpc/login" [json| { "id": "jdoe", "pass": "1234" } |]
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| {"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"} |]
, matchStatus = 200
, matchHeaders = ["Content-Type" <:> "application/json"]
}
it "allows users with permissions to see their tables" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
it "works with tokens which have extra fields" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIiwia2V5MSI6InZhbHVlMSIsImtleTIiOiJ2YWx1ZTIiLCJrZXkzIjoidmFsdWUzIiwiYSI6MSwiYiI6MiwiYyI6M30.GfydCh-F4wnM379xs0n1zUgalwJIsb6YoBapCo8HlFk"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
-- this test will stop working 9999999999s after the UNIX EPOCH
it "succeeds with an unexpired token" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjk5OTk5OTk5OTksInJvbGUiOiJwb3N0Z3Jlc3RfdGVzdF9hdXRob3IiLCJpZCI6Impkb2UifQ.QaPPLWTuyydMu_q7H4noMT7Lk6P4muet1OpJXF6ofhc"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
it "fails with an expired token" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE0NDY2NzgxNDksInJvbGUiOiJwb3N0Z3Jlc3RfdGVzdF9hdXRob3IiLCJpZCI6Impkb2UifQ.enk_qZ_u6gZsXY4R8bREKB_HNExRpM0lIWSLktk9JJQ"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 400
it "hides tables from users with invalid JWT" $ do
let auth = authHeaderJWT "ey9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 400
it "should fail when jwt contains no claims" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.MKYc_lOECtB0LJOiykilAdlHodB-I0_id2qHKq35dmc"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 400
it "hides tables from users with JWT that contain no claims about role" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6Impkb2UifQ.zyohGMnrDy4_8eJTl6I2AUXO3MeCCiwR24aGWRkTE9o"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 400
it "recovers after 400 error with logged in user" $ do
_ <- post "/authors_only" [json| { "owner": "jdoe", "secret": "test content" } |]
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
_ <- request methodPost "/rpc/problem" [auth] ""
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
| motiz88/postgrest | test/Feature/AuthSpec.hs | mit | 3,568 | 0 | 14 | 514 | 564 | 287 | 277 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
module SoOSiM.Components.Scheduler.Interface where
import Control.Concurrent.STM.TQueue (TQueue)
import Control.Concurrent.STM.TVar (TVar)
import Data.HashMap.Strict (HashMap)
import SoOSiM
import SoOSiM.Components.Common
import SoOSiM.Components.ResourceDescriptor
import SoOSiM.Components.Thread
import {-# SOURCE #-} SoOSiM.Components.Scheduler.Behaviour (sched)
import SoOSiM.Components.Scheduler.Types
data Scheduler = Scheduler
instance ComponentInterface Scheduler where
type State Scheduler = SC_State
type Receive Scheduler = SC_Cmd
type Send Scheduler = SC_Msg
initState = const schedIState
componentName = const "Scheduler"
componentBehaviour = const sched
scheduler ::
ComponentId
-> Sim ComponentId
scheduler p = componentLookup Scheduler >>= \x -> case x of
Nothing -> createComponentNPS Nothing Nothing (Just iState) Scheduler
Just cId -> return cId
where
iState = schedIState { _pm = p }
newScheduler ::
ComponentId
-> Sim ComponentId
newScheduler p = createComponentNPS Nothing Nothing (Just iState) Scheduler
where
iState = schedIState { _pm = p }
initScheduler ::
ComponentId
-> HashMap ThreadId (TVar Thread)
-> [(ResourceId,ResourceDescriptor)]
-> HashMap ThreadId [ResourceId]
-> Maybe String
-> String
-> TVar [(TQueue (Int,Int),Int,Int,Int)]
-> HashMap ThreadId ComponentId
-> Sim ()
initScheduler cId th res th_all smM an pE compMap =
notify Scheduler cId (Init th res th_all smM an pE compMap)
stopScheduler ::
ComponentId
-> Sim ()
stopScheduler cId = notify Scheduler cId StopSched
threadCompleted ::
ComponentId
-> ThreadId
-> Sim ()
threadCompleted cId tId = notify Scheduler cId (ThreadCompleted tId)
newIOToken ::
ComponentId
-> Sim ()
newIOToken cId = notify Scheduler cId NewIOToken
| christiaanb/SoOSiM-components | src/SoOSiM/Components/Scheduler/Interface.hs | mit | 1,864 | 0 | 16 | 341 | 536 | 287 | 249 | 56 | 2 |
import Test.Hspec
import RaytracerEtapa2_1113331018
main :: IO ()
main = hspec $ do
describe "Setup of .ppm image" $ do
it "returns a string with x, y and z" $ do
pixelToString (1, 2, 3) `shouldBe` "1 2 3\n"
it "returns the value of p3" $ do
p3 `shouldBe` "P3\n"
it "return a line of comment" $ do
(head comment) `shouldBe` '#'
it "returns \\n as the last char of a String" $ do
(last p3) `shouldBe` '\n'
(last comment) `shouldBe` '\n'
describe "a invalid string to the .ppm file" $ do
it "returns a error message" $ do
(create_text_to_ppm_file 2 3 []) `shouldBe` "Nao e possivel criar uma imagem sem pixels"
describe "the creation of a string to the .ppm image" $ do
it "returns a valid string for the file" $ do
(create_text_to_ppm_file 2 3 [(1, 2, 3)]) `shouldBe` "P3\n# It's a .ppm imagem for a raytracer\n2 3\n255\n1 2 3\n"
describe "the aritmethic operations" $ do
it "returns vetorial sum of two vectors" $ do
((1, 2, 3) $+ (1, 2, 3)) `shouldBe` (2, 4, 6)
it "returns a vetorial minus of two vectors" $ do
((1, 2, 3) $- (1, 2, 3)) `shouldBe` (0, 0, 0)
it "returns each elements times escalar value" $ do
((1, 2, 3) $* 3) `shouldBe` (3, 6, 9)
it "returns each elements divided by escalar value" $ do
((3, 3, 3) $/ 3) `shouldBe` (1, 1, 1)
it "returns a sum of each element of the vector times its correpondent in the second one" $ do
((1, 2, 3) $. (1, 2, 3)) `shouldBe` 14 | lipemorais/haskellando | RaytracerEtapa2_1113331018Spec.hs | mit | 1,514 | 0 | 18 | 400 | 493 | 262 | 231 | 31 | 1 |
module Sparklines where
import Data.Char (chr)
--import Data.List.Split (splitOneOf)
toSparkLine :: [Double] -> [Char]
toSparkLine xs = map cl xs
where
top = maximum xs
bot = minimum xs
range = top - bot
cl x = chr $ 0x2581 + round ((x - bot) / range * 7)
makeSparkLine :: [Double] -> (String, Stats)
makeSparkLine xs = (toSparkLine xs, stats xs)
-- where parsed = map read $ filter (not . null) $ splitOneOf " ," xs
data Stats = Stats { minValue, maxValue, rangeOfValues :: Double,
numberOfValues :: Int }
instance Show Stats where
show (Stats mn mx r n) = "min: " ++ show mn ++ "; max: " ++ show mx ++
"; range: " ++ show r ++ "; no. of values: " ++ show n
stats :: [Double] -> Stats
stats xs = Stats { minValue = mn, maxValue = mx,
rangeOfValues = mx - mn, numberOfValues = length xs }
where
mn = minimum xs
mx = maximum xs
drawSparkLineWithStats :: [Double] -> IO ()
drawSparkLineWithStats xs = putStrLn sp >> print st
where (sp, st) = makeSparkLine xs
| kejace/alto | src/Sparklines.hs | mit | 1,055 | 0 | 13 | 284 | 362 | 195 | 167 | 23 | 1 |
{-# LANGUAGE LambdaCase, DeriveGeneric #-}
module Intervals where
--import qualified Data.ByteString.Lazy as BS
--import Text.Printf
import Data.List hiding (union, intersect)
import Data.Monoid ((<>))
--import Data.Yaml (ToJSON, FromJSON)
--import GHC.Generics (Generic)
import Data.Function
import Data.Int
import Prelude hiding (subtract)
type Offset = Int64
data Interval = I { from :: Offset, to :: Offset }
-- deriving (Show,Generic)
newtype Intervals = Intervals [Interval]
-- deriving (Show,Generic)
mkInterval :: Offset -> Offset -> Intervals
mkInterval f t | f < t = Intervals [I f t]
| otherwise = Intervals []
fullIntervals :: Offset -> Intervals
fullIntervals len = mkInterval 0 len
nullInterval :: Intervals
nullInterval = Intervals []
size :: Intervals -> Offset
size (Intervals is) = sum [ t - f | I f t <- is ]
isEmpty :: Intervals -> Bool
isEmpty (Intervals is) = null is
subSetOf :: Intervals -> Intervals -> Bool
subSetOf a b = isEmpty (a `subtract` b)
intersects :: Intervals -> Intervals -> Bool
intersects a b = not $ isEmpty (a `intersect` b)
intersect :: Intervals -> Intervals -> Intervals
intersect (Intervals is1) (Intervals is2) = Intervals $ go is1 is2
where
go _ [] = []
go [] _ = []
go (i1:is1) (i2:is2)
-- reorder for symmetry
| to i1 < to i2 = go (i2:is2) (i1:is1)
-- disjoint
| from i1 >= to i2 = go (i1:is1) is2
-- subset
| to i1 == to i2 = I f' (to i2) : go is1 is2
-- overlapping
| otherwise = I f' (to i2) : go (i1 { from = to i2} : is1) is2
where f' = max (from i1) (from i2)
union :: Intervals -> Intervals -> Intervals
union (Intervals is1) (Intervals is2) = Intervals $ go is1 is2
where
go is [] = is
go [] is = is
go (i1:is1) (i2:is2)
-- reorder for symmetry
| to i1 < to i2 = go (i2:is2) (i1:is1)
-- disjoint
| from i1 > to i2 = i2 : go (i1:is1) is2
-- overlapping
| otherwise = go (i1 { from = f'} : is1) is2
where f' = min (from i1) (from i2)
subtract :: Intervals -> Intervals -> Intervals
subtract (Intervals is1) (Intervals is2) = Intervals $ go is1 is2
where
go is [] = is
go [] _ = []
go (i1:is1) (i2:is2)
-- i2 past i1
| to i1 <= from i2 = i1 : go is1 (i2:is2)
-- i1 past i2
| to i2 <= from i1 = go (i1:is1) is2
-- i1 contained in i2
| from i2 <= from i1 , to i1 <= to i2 = go is1 (i2:is2)
-- i2 covers beginning of i1
| from i1 >= from i2 = go (i1 { from = to i2} : is1) is2
-- i2 covers end of i1
| to i1 <= to i2 = i1 { to = from i2} : go is1 (i2:is2)
-- i2 in the middle of i1
| otherwise = I (from i1) (from i2) :
go (I (to i2) (to i1) : is1) is2
-- setZeros :: BS.ByteString -> Intervals -> BS.ByteString
-- setZeros s (Intervals is) = foldl' go s is
-- where
-- go s (I f t) = prefix <> zeroes <> postfix
-- where
-- (tmp, postfix) = BS.splitAt t s
-- (prefix, _discard) = BS.splitAt f tmp
-- zeroes = BS.replicate (t-f) 0
-- ppInterval :: Interval -> String
-- ppInterval (I f t) = printf "0x%04X-0x%04X" f t
--
-- ppIntervals :: Intervals -> String
-- ppIntervals (Intervals xs) = intercalate " " (map ppInterval xs)
--instance FromJSON Interval
--instance ToJSON Interval
--instance FromJSON Intervals
--instance ToJSON Intervals
| antalsz/hs-to-coq | examples/intervals/Intervals.hs | mit | 3,471 | 0 | 14 | 988 | 1,207 | 621 | 586 | 56 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module IndexControllerSpec where
import Test.Hspec (Spec, describe, it)
import Test.Hspec.Wai (get, shouldRespondWith, with)
import qualified Web.Scotty as S
import IndexController (app)
spec :: Spec
spec = with (S.scottyApp app) $ do
describe "GET /" $ do
it "responds with 200" $ do
get "/" `shouldRespondWith` 200
| robertjlooby/scotty-story-board | test/controllers/IndexControllerSpec.hs | mit | 400 | 0 | 15 | 98 | 111 | 63 | 48 | 11 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Queue
(Queue, empty, null, singleton, size, enqueue, enqueueList, dequeue, peek, take, drop, splitAt, fromList, toList)
where
import Prelude hiding (take, drop, splitAt, null)
import Data.List (genericLength)
import Data.Foldable as F hiding (toList)
import Data.Traversable as T
import Data.Function (on)
import Data.Typeable (Typeable)
import Control.DeepSeq (rnf, NFData)
import Control.Applicative ((<$>), (<*>))
import Control.Monad (liftM)
import Test.QuickCheck
import Test.QuickCheck.Gen
data Queue a = Queue Int [a] [a] deriving Typeable
empty :: Queue a
empty = Queue 0 [] []
null :: Queue a -> Bool
null (Queue _ [] []) = True
null _ = False
singleton :: a -> Queue a
singleton x = Queue 1 [] [x]
size :: Queue a -> Int
size (Queue s _ _) = s
enqueue :: a -> Queue a -> Queue a
enqueue x (Queue s xs ys) = Queue (s + 1) (x:xs) ys
enqueueList :: [a] -> Queue a -> Queue a
enqueueList zs (Queue s xs ys) = Queue (s + length zs) (zs ++ xs) ys
dequeue :: Queue a -> Maybe (a, Queue a)
dequeue (Queue s xs (y:ys)) = Just (y, Queue (s - 1) xs ys)
dequeue (Queue s xs []) = case reverse xs of
[] -> Nothing
(x:xs') -> Just (x, Queue (s - 1) [] xs')
peek :: Queue a -> Maybe a
peek (Queue _ [] []) = Nothing
peek (Queue _ _ (y:ys)) = Just y
peek (Queue _ xs []) = Just (last xs)
take :: Int -> Queue a -> [a]
take 0 q = []
take n q = case dequeue q of
Nothing -> []
Just (x,q') -> x : take (n - 1) q'
drop :: Int -> Queue a -> Queue a
drop 0 q = q
drop n q = case dequeue q of
Nothing -> q
Just (x,q') -> drop (n - 1) q'
splitAt :: Int -> Queue a -> ([a], Queue a)
splitAt 0 q = ([], q)
splitAt n q = case dequeue q of
Nothing -> ([], q)
Just (x,q') -> case splitAt (n - 1) q' of (xs, q'') -> (x:xs, q'')
fromList :: [a] -> Queue a
fromList xs = Queue (length xs) [] xs
toList :: Queue a -> [a]
toList (Queue _ xs ys) = ys ++ reverse xs
instance Eq a => Eq (Queue a) where
(==) = (==) `on` toList
instance Ord a => Ord (Queue a) where
compare = compare `on` toList
instance NFData a => NFData (Queue a) where
rnf (Queue s xs ys) = rnf s `seq` rnf xs `seq` rnf ys
instance Show a => Show (Queue a) where
showsPrec d m = showParen (d > 10) $
showString "fromList " . shows (toList m)
instance Read a => Read (Queue a) where
readsPrec p = readParen (p > 10) $ \ r -> do
("fromList",s) <- lex r
(xs,t) <- reads s
return (fromList xs,t)
instance Functor Queue where
fmap f (Queue s xs ys) = Queue s (map f xs) (map f ys)
instance Foldable Queue where
foldr f z (Queue _ xs ys) = F.foldr f (F.foldl (flip f) z xs) ys
instance Traversable Queue where
sequenceA (Queue s xs ys) = Queue s <$> sequenceA xs <*> sequenceA ys
traverse f (Queue s xs ys) = Queue s <$> traverse f xs <*> traverse f ys
mapM f = liftM fromList . T.mapM f . toList
sequence = liftM fromList . T.sequence . toList
instance Arbitrary a => Arbitrary (Queue a) where
arbitrary = fromList <$> arbitrary
shrink = map fromList . shrink . toList
| 3of8/haskell_playground | queue/Queue.hs | gpl-2.0 | 3,244 | 0 | 12 | 899 | 1,622 | 842 | 780 | 82 | 2 |
module LexML.URN.Atalhos where
import LexML.URN.Types
import Data.Maybe (fromJust,maybe)
import qualified Data.Map as M
import Data.List
import System.IO.Unsafe (unsafePerformIO)
leiFederal :: [Integer] -> Dia -> Mes -> Ano -> URNLexML
leiFederal numLei dia mes ano = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["lei"]))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data ano mes dia)) (Just $ ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
normaFederal :: [String] -> [Integer] -> Dia -> Mes -> Ano -> URNLexML
normaFederal tipo numLei dia mes ano = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome tipo))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data ano mes dia)) (Just $ ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
leiFederal' :: [Integer] -> Ano -> URNLexML
leiFederal' numLei ano = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["lei"]))) Nothing) (Descritor (TD_Ano ano (ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
normaFederal' :: [String] -> [Integer] -> Ano -> URNLexML
normaFederal' tipo numLei ano = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome tipo))) Nothing) (Descritor (TD_Ano ano (ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
leiFederalApelido :: [String] -> URNLexML
leiFederalApelido nl = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["lei"]))) Nothing) (Descritor (TD_Apelido Nothing (ApelidoDocumento (Nome nl))) [] Nothing) ) Nothing Nothing Nothing
ordemTCF tcf = fromJust $ M.lookup tcf $ M.fromList [
(TCF_Parte,[0,0]),
(TCF_Livro,[0,1]),
(TCF_Titulo,[0,2]),
(TCF_Capitulo,[0,3]),
(TCF_Secao,[0,4]),
(TCF_SubSecao,[0,5]),
(TCF_AgrupamentoHierarquico,[0,6]),
(TCF_Artigo,[1,0]),
(TCF_Caput,[1,1]),
(TCF_Paragrafo,[1,1]),
(TCF_Inciso,[1,2]),
(TCF_Alinea,[1,3]),
(TCF_Item,[1,4]),
(TCF_DispositivoGenerico,[1,5]),
(TCF_Aspas,[1,6])
]
splitFragmento tcf comps = (bef,middle,comps'')
where
(bef,comps') = span (\ (CompFragmento tcf' _) -> ordemTCF tcf' < ordemTCF tcf) comps
(middle,comps'') = case comps' of
(c@(CompFragmento tcf' _):cl) | ordemTCF tcf' == ordemTCF tcf -> (Just c,cl)
_ -> (Nothing,comps')
alteraFragmento f (URNLexML local doc versao forma fragmento) = URNLexML local doc versao forma (f fragmento)
selecionaFragmento :: TipoComponenteFragmento -> UnicoOuIndices -> URNLexML -> URNLexML
selecionaFragmento = selecionaFragmento' True
selecionaFragmento' :: Bool -> TipoComponenteFragmento -> UnicoOuIndices -> URNLexML -> URNLexML
selecionaFragmento' canReplace tcf nl = alteraFragmento f
where
f (Just (Fragmento comps)) = Just $ Fragmento $ before ++ [c] ++ after
where (before,m,after) = splitFragmento tcf comps
nc = CompFragmento tcf nl
c = maybe nc (if canReplace then const nc else id) m
f _ = Just $ Fragmento $ [CompFragmento tcf nl]
selecionaInciso n = selecionaFragmento TCF_Inciso n . selecionaFragmento' False TCF_Caput (UI_Indices [])
selecionaIncisoUnico = selecionaInciso UI_Unico
selecionaParagrafo = selecionaFragmento TCF_Paragrafo
selecionaParagrafoUnico = selecionaParagrafo UI_Unico
selecionaCaput = selecionaFragmento TCF_Caput (UI_Indices [])
selecionaAlinea = selecionaFragmento TCF_Alinea
selecionaItem = selecionaFragmento TCF_Item
selecionaArtigo = selecionaFragmento TCF_Artigo
selecionaArtigoUnico = selecionaArtigo UI_Unico
selecionaNorma = selecionaNormaAutoridade Nothing
selecionaNormaAutoridade mautoridade tipo numLei dia mes ano (URNLexML local (Documento autoridade _ _) _ _ _) =
URNLexML local (Documento (maybe autoridade id mautoridade) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome tipo))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data ano mes dia)) (Just $ ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
selecionaNorma' = selecionaNormaAutoridade' Nothing
selecionaNormaAutoridade' mautoridade tipo numLei ano (URNLexML local (Documento autoridade _ _) _ _ _) =
URNLexML local (Documento (maybe autoridade id mautoridade) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome tipo))) Nothing) (Descritor (TD_Ano ano (ID_Ids [IdDocumento $ makeNormalID numLei])) [] Nothing) ) Nothing Nothing Nothing
selecionaLocal :: Local -> URNLexML -> URNLexML
selecionaLocal local (URNLexML _ doc vers form frag) = URNLexML local doc vers form frag
selecionaNorma'' :: [String] -> [Integer] -> Int -> Maybe (Int,Int) -> Maybe ([String],[String]) -> Maybe [String] -> Maybe Autoridade ->
URNLexML -> URNLexML
selecionaNorma'' tipo num ano mmd mmunicipio mestado mautoridade (URNLexML (Local Brasil local) (Documento autoridade _ _) _ _ _) =
URNLexML (Local Brasil local') (Documento autoridade' (TipoDocumento1 (STD1_Norma (TipoNorma (Nome tipo))) Nothing) (Descritor tipoDesc [] Nothing)) Nothing Nothing Nothing
where
(local',autoridade') = case mmunicipio of
Just (m,e) -> (Just $ DLNormal (UnidadeFederacao $ Nome $ e) (Just $ Municipio $ Nome m ),
A_Convencionada $ seNaoDistrital e AC_Municipal)
Nothing -> case mestado of
Just e -> (Just $ DLNormal (UnidadeFederacao $ Nome $ e) Nothing, A_Convencionada $ seNaoDistrital e AC_Estadual)
Nothing -> case tipo of
["resolucao"] -> (local, maybe autoridade id mautoridade)
_ -> case autoridade of
A_Normal [SJ_Instituicao (Instituicao (Nome [x,"federal"])) [] Nothing] ->
(local, maybe (A_Convencionada AC_Federal) id mautoridade)
_ -> (local, maybe autoridade id mautoridade)
tipoDesc = case mmd of
Nothing -> TD_Ano ano idents
Just (m,d) -> TD_Datas (Datas $ Left $ Data ano m d) (Just idents)
idents = ID_Ids [IdDocumento $ makeNormalID num]
seNaoDistrital ["distrito","federal"] _ = AC_Distrital
seNaoDistrital _ a = a
selecionaLeiApelido nl (URNLexML local (Documento autoridade _ _) _ _ _) =
URNLexML local (Documento autoridade (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["lei"]))) Nothing) (Descritor (TD_Apelido Nothing (ApelidoDocumento (Nome nl))) [] Nothing) ) Nothing Nothing Nothing
apelidoCLT = normaFederal ["decreto","lei"] [5452] 1 5 1943
apelidoRegimentoInternoSenado =
URNLexML (Local Brasil Nothing) (Documento autoridadeSenado (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["regimento","interno"]))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data 1970 11 27)) (Just $ ID_Ids [IdDocumento $ makeNormalID [1970]])) [] Nothing) ) Nothing Nothing Nothing
apelidoRegimentoInternoCamara =
URNLexML (Local Brasil Nothing) (Documento autoridadeCamara (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["regimento","interno"]))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data 1989 9 21)) (Just $ ID_Ids [IdDocumento $ makeNormalID [1989]])) [] Nothing) ) Nothing Nothing Nothing
apelidoRegimentoComumCongresso =
URNLexML (Local Brasil Nothing) (Documento autoridadeCongresso (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["regimento","interno"]))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data 1970 8 11)) (Just $ ID_Ids [IdDocumento $ makeNormalID [1970]])) [] Nothing) ) Nothing Nothing Nothing
apelidoRegimentoInterno urn@(URNLexML (Local Brasil Nothing) (Documento aut _ _) _ _ _ ) =
if aut == autoridadeCamara then apelidoRegimentoInternoCamara
else apelidoRegimentoInternoSenado
apelidoRegulamentoAdministrativoSenado =
URNLexML (Local Brasil Nothing) (Documento autoridadeSenado (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["resolucao"]))) Nothing) (Descritor (TD_Datas (Datas (Left $ Data 1972 11 10)) (Just $ ID_Ids [IdDocumento $ makeNormalID [58]])) [] Nothing) ) Nothing Nothing Nothing
atoDisposicoesConstitucionaisTrans =
URNLexML (Local Brasil Nothing)
(Documento (A_Convencionada AC_Federal)
(TipoDocumento1 (STD1_Norma (TipoNorma (Nome nome))) Nothing)
(Descritor (TD_Datas (Datas (Left $ Data 1988 10 05))
(Just $ ID_Ids [IdDocumento $ makeNormalID [1988]]))
[] Nothing) )
Nothing Nothing Nothing
where
nome = ["ato","disposicoes","constitucionais","transitorias"]
makeNormalID :: [Integer] -> NormalID
makeNormalID nl = NormalID $ concat (intersperse "-" (map show nl))
autoridadeConstituicao (URNLexML local (Documento a@(A_Convencionada _) _ _) _ _ _) = (local, a)
autoridadeConstituicao (URNLexML local@(Local Brasil Nothing) _ _ _ _) = (local, A_Convencionada AC_Federal)
autoridadeConstituicao (URNLexML local@(Local Brasil (Just (DLNormal _ Nothing))) _ _ _ _) = (local, A_Convencionada AC_Estadual)
autoridadeConstituicao (URNLexML local@(Local Brasil (Just (DLNormal (UnidadeFederacao (Nome ["distrito", "federal"])) _))) _ _ _ _) =
(local, A_Convencionada AC_Distrital)
autoridadeConstituicao (URNLexML local@(Local Brasil (Just (DLNormal uf (Just _)))) _ _ _ _) =
(Local Brasil (Just (DLNormal uf Nothing)), A_Convencionada AC_Estadual)
autoridadeConstituicao (URNLexML local (Documento aut _ _) _ _ _) = (local,aut)
selecionaConstituicao _ = URNLexML (Local Brasil Nothing) (Documento (A_Convencionada AC_Federal) (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["constituicao"]))) Nothing) (Descritor (TD_Datas (Datas (Left (Data 1988 10 05))) (Just $ ID_Ids [IdDocumento $ NormalID "1988" ] )) [] Nothing)) Nothing Nothing Nothing
selecionaResolucao = selecionaNormaAutoridade' (Just autoridadeSenado) ["resolucao"]
autoridadeSenado = A_Normal [SJ_Instituicao (Instituicao $ Nome ["senado","federal"]) [] Nothing]
autoridadeCamara = A_Normal [SJ_Instituicao (Instituicao $ Nome ["camara","deputados"]) [] Nothing]
autoridadeCongresso = A_Normal [SJ_Instituicao (Instituicao $ Nome ["congresso","nacional"]) [] Nothing]
selecionaMunicipio :: [String] -> [String] -> URNLexML -> URNLexML
selecionaMunicipio nomeMunicipio nomeEstado (URNLexML local doc mversao mforma mfragmento) =
URNLexML (Local Brasil (Just (DLNormal (UnidadeFederacao $ Nome nomeEstado) (Just $ Municipio $ Nome nomeMunicipio)))) doc mversao mforma mfragmento
selecionaConstituicao' :: Maybe Int -> Maybe (Int,Int) -> Maybe [String] -> Maybe Data -> URNLexML -> URNLexML
selecionaConstituicao' mano mmd' mestado mvig (URNLexML (Local Brasil local) (Documento autoridade _ _) _ _ _) =
URNLexML (Local Brasil local') (Documento autoridade' (TipoDocumento1 (STD1_Norma (TipoNorma (Nome ["constituicao"]))) Nothing) (Descritor tipoDesc [] Nothing)) mversao Nothing Nothing
where
mversao = case mvig of
Just d -> Just $ Versao (TV_VersaoVigenteEm d) Nothing
_ -> Nothing
(local',autoridade') = case mestado of
Just e -> (Just $ DLNormal (UnidadeFederacao $ Nome $ e) Nothing, A_Convencionada AC_Estadual)
Nothing -> (local, A_Convencionada AC_Federal)
tipoDesc = case mano of
Nothing -> TD_Apelido Nothing (ApelidoDocumento $ Nome $ ["constituicao"])
Just ano -> let idents = ID_Ids [IdDocumento $ NormalID $ show ano]
in case mmd' of
Nothing -> TD_Ano ano idents
Just (m,d) -> TD_Datas (Datas $ Left $ Data ano m d) (Just idents)
| lexml/lexml-linker | src/main/haskell/LexML/URN/Atalhos.hs | gpl-2.0 | 11,936 | 0 | 26 | 2,308 | 4,404 | 2,287 | 2,117 | 141 | 7 |
{-# LANGUAGE DeriveDataTypeable, TypeSynonymInstances, MultiParamTypeClasses #-}
-- -*- mode: haskell -*-
module JVM.Check where
import JVM.Type
import JVM.Builtin
import Machine.Numerical.Config
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Set
import Data.Typeable
data Checker = Builtins (Set Statement)
| Smallnums Integer
deriving Typeable
instance Check Checker Program where
check (Builtins allowed) p = do
inform $ text "erlaubt sind diese Rechenbefehle:"
inform $ nest 4 $ toDoc allowed
let you = mkSet $ do
b <- flatten p
guard $ is_builtin b
return b
inform $ text "Sie benutzen:" <+> toDoc you
let wrong = minusSet you allowed
assert ( isEmptySet wrong ) $ text "sind alle zugelassen?"
check (Smallnums allowed) p = do
inform $ text "Push (i) ist nur erlaubt für abs(i) <= "
<+> toDoc allowed
let you = mkSet $ do
Push i <- flatten p
return $ abs i
inform $ text "Sie benutzen:" <+> toDoc you
let wrong = sfilter ( > allowed ) you
assert ( isEmptySet wrong ) $ text "sind alle zugelassen?"
instance Reader Checker where
atomic_readerPrec d = readerParenPrec d $ \ d -> do
((do guard (d < 9)
my_reserved "Builtins"
aa <- readerPrec 9
return (Builtins aa))
<|>
(do guard (d < 9)
my_reserved "Smallnums"
aa <- readerPrec 9
return (Smallnums aa)))
instance ToDoc Checker where
toDocPrec d (Builtins aa) = docParen (d >= 10)
(text "Builtins" </> fsep [toDocPrec 10 aa])
toDocPrec d (Smallnums aa) = docParen (d >= 10)
(text "Smallnums" </> fsep [toDocPrec 10 aa])
| Erdwolf/autotool-bonn | src/JVM/Check.hs | gpl-2.0 | 1,961 | 8 | 14 | 701 | 542 | 265 | 277 | 49 | 0 |
module Network.Counting.Picture where
import Network.Counting.Data
import Autolib.ToDoc
import qualified Data.Map as M
import qualified Data.Array as A
xscale :: Int -> Int
xscale x = 4 * x
yscale :: Wire -> Int
yscale (Wire y) = 3 * y
picture net =
let ws = 1 : wires net
ylo = minimum ws ; yhi = maximum ws
ls = levelled net
xlo = 0 ; xhi = succ $ maximum $ 1 : map snd ls
h = do y <- [ylo .. yhi ]
horizontal xlo xhi y
v = do ((up,down),l) <- ls
vertical_arrow l down up
bnd = ( (xscale xlo, yscale ylo)
, (xscale xhi, yscale yhi) )
a = A.accumArray
( \ o n -> n ) ' ' bnd $ h ++ v
in
vcat $ do
y <- [ yscale ylo .. yscale yhi ]
return $ text $ do
x <- [ xscale xlo .. xscale xhi ]
return $ a A.! (x,y)
horizontal x1 x2 y = do
x <- range' (xscale x1, xscale x2)
return ((x, yscale y), '-')
vertical_arrow x y1 y2 =
do y <- range' (yscale y1, yscale y2)
return ((xscale x, y), '|')
++ [ ((xscale x, yscale y1), 'o')
, ((xscale x, yscale y2), 'o')
, if y2 > y1
then ((xscale x, pred $ yscale y2), 'v')
else ((xscale x, succ $ yscale y2), '^')
]
-- | annotate each balancer with its level.
-- levels start at 1.
levelled :: Network -> [(Balancer, Int)]
levelled (Network bs) =
helped M.empty bs
helped m [] = []
helped m (b @ (up,down) : bs) =
let covers = range' (up, down)
this = succ $ maximum $ do
c <- covers
return $ M.findWithDefault 0 c m
m' = M.fromList $ zip covers $ repeat this
in (b, this) : helped (M.union m' m) bs
range' (x,y) = A.range (min x y, max x y) | marcellussiegburg/autotool | collection/src/Network/Counting/Picture.hs | gpl-2.0 | 1,813 | 1 | 15 | 664 | 799 | 418 | 381 | 50 | 2 |
{-#LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
module Carnap.Languages.PurePropositional.Logic.Tomassi
(parseTomassiPL, parseTomassiPLProof, tomassiPLCalc, TomassiPL(..), tomassiPLNotation) where
import Data.Map as M (lookup, Map)
import Text.Parsec
import Control.Lens (view)
import Carnap.Core.Data.Types (Form)
import Carnap.Core.Data.Classes (lhs)
import Carnap.Languages.PurePropositional.Syntax
import Carnap.Languages.PurePropositional.Parser
import Carnap.Calculi.NaturalDeduction.Syntax
import Carnap.Calculi.NaturalDeduction.Parser
import Carnap.Calculi.NaturalDeduction.Checker
import Carnap.Languages.ClassicalSequent.Syntax
import Carnap.Languages.ClassicalSequent.Parser
import Carnap.Languages.PurePropositional.Logic.Rules
{-| A system for propositional logic resembling the proof system PL
from Tomassi's Logic book
-}
data TomassiPL = AndI | AndE1 | AndE2
| MP | MT
| DNI | DNE
| BCI | BCE
| ORI1 | ORI2 | ORE
| As | CP | RAA1 | RAA2
| Pr (Maybe [(ClassicalSequentOver PurePropLexicon (Sequent (Form Bool)))])
deriving (Eq)
instance Show TomassiPL where
show AndI = "&I"
show AndE1 = "&E"
show AndE2 = "&E"
show MP = "MP"
show MT = "MT"
show DNI = "DNI"
show DNE = "DNE"
show BCI = "↔I"
show BCE = "↔E"
show ORI1 = "∨I"
show ORI2 = "∨I"
show ORE = "∨E"
show As = "As"
show CP = "CP"
show RAA1 = "RAA"
show RAA2 = "RAA"
show (Pr _) = "Pr"
instance Inference TomassiPL PurePropLexicon (Form Bool) where
ruleOf AndI = adjunction
ruleOf AndE1 = simplificationVariations !! 0
ruleOf AndE2 = simplificationVariations !! 1
ruleOf MP = modusPonens
ruleOf MT = modusTollens
ruleOf DNI = doubleNegationIntroduction
ruleOf DNE = doubleNegationElimination
ruleOf BCI = conditionalToBiconditional
ruleOf BCE = biconditionalToTwoConditional
ruleOf ORI1 = additionVariations !! 0
ruleOf ORI2 = additionVariations !! 1
ruleOf ORE = explicitSeparationOfCases 2
ruleOf As = axiom
ruleOf (Pr _) = axiom
ruleOf CP = explicitConditionalProofVariations !! 0
ruleOf RAA1 = explictConstructiveConjunctionReductioVariations !! 0
ruleOf RAA2 = explictConstructiveConjunctionReductioVariations !! 2
restriction (Pr prems) = Just (premConstraint prems)
restriction _ = Nothing
globalRestriction (Left ded) n CP = Just (dischargeConstraint n ded (view lhs $ conclusionOf CP))
globalRestriction (Left ded) n RAA1 = Just (dischargeConstraint n ded (view lhs $ conclusionOf RAA1))
globalRestriction (Left ded) n RAA2 = Just (dischargeConstraint n ded (view lhs $ conclusionOf RAA2))
globalRestriction (Left ded) n ORE = Just (dischargeConstraint n ded (view lhs $ conclusionOf ORE))
globalRestriction _ _ _ = Nothing
indirectInference CP = Just $ TypedProof (ProofType 1 1)
indirectInference RAA1 = Just $ TypedProof (ProofType 1 1)
indirectInference RAA2 = Just $ TypedProof (ProofType 1 1)
indirectInference ORE = Just $ PolyTypedProof 2 (ProofType 1 1)
indirectInference _ = Nothing
isAssumption As = True
isAssumption (Pr _) = True
isAssumption _ = False
parseTomassiPL rtc n _ = do r <- choice (map (try . string) [ "&I", "&E", "MP", "MT", "~I", "DNI", "~E", "DNE", "↔I", "<->I", "↔E", "<->E"
, "∨I", "vI", "\\/I", "∨E", "vE", "\\/E", "As", "CP", "RAA", "Pr"])
return $ case r of
r | r == "As" -> [As]
| r == "Pr" -> [Pr (problemPremises rtc)]
| r == "&I" -> [AndI]
| r == "&E" -> [AndE1, AndE2]
| r == "MP" -> [MP]
| r == "MT" -> [MT]
| r `elem` ["~I","DNI"] -> [DNI]
| r `elem` ["~E","DNE"] -> [DNE]
| r `elem` ["↔I","<->I"] -> [BCI]
| r `elem` ["↔E","<->E"] -> [BCE]
| r `elem` ["∨I", "vI", "\\/I"] -> [ORI1, ORI2]
| r `elem` ["∨E", "vE", "\\/E"] -> [ORE]
| r == "RAA" -> [RAA1, RAA2]
| r == "CP" -> [CP]
parseTomassiPLProof :: RuntimeNaturalDeductionConfig PurePropLexicon (Form Bool)
-> String -> [DeductionLine TomassiPL PurePropLexicon (Form Bool)]
parseTomassiPLProof rtc = toDeductionLemmonTomassi (parseTomassiPL rtc) (purePropFormulaParser standardLetters)
tomassiPLNotation :: String -> String
tomassiPLNotation = map replace
where replace '∧' = '&'
replace '¬' = '~'
replace c = c
tomassiPLCalc = mkNDCalc
{ ndRenderer = LemmonStyle TomassiStyle
, ndParseProof = parseTomassiPLProof
, ndProcessLine = hoProcessLineLemmon
, ndProcessLineMemo = Just hoProcessLineLemmonMemo
, ndNotation = tomassiPLNotation
}
| gleachkr/Carnap | Carnap/src/Carnap/Languages/PurePropositional/Logic/Tomassi.hs | gpl-3.0 | 5,477 | 0 | 15 | 1,816 | 1,492 | 804 | 688 | 106 | 3 |
{-# LANGUAGE RecursiveDo, JavaScriptFFI, OverloadedStrings #-}
module Estuary.Widgets.ResourceWidget where
import Reflex
import Reflex.Dom
import qualified Reflex.Dom.Widget.Basic as B
import qualified Data.Sequence as S
import qualified Data.Text as T
import qualified Data.Foldable as F
import Data.Map
import Data.Fixed (mod')
import Estuary.Types.Resources
import Estuary.Types.Scope
-- examples
-- aListOfVideoFiles :: ResourceMap VideoMeta --resourcesWidget
-- aListOfVideoFiles = ResourceMap {unResourceMap = (Data.Map.fromList [
-- ("butterflies", (S.fromList [sampleVideoMedia, sampleVideoMedia'])),
-- ("cats", (S.fromList [sampleVideoMedia, sampleVideoMedia'])),
-- ("dogs", (S.fromList [sampleVideoMedia, sampleVideoMedia'])),
-- ("landscapes", (S.fromList [sampleVideoMedia, sampleVideoMedia']))
-- ])}
--
-- aListOfAudioFiles :: ResourceMap AudioMeta
-- aListOfAudioFiles = ResourceMap {unResourceMap = (Data.Map.fromList [
-- ("arpy", (S.fromList [sampleAudioMedia, sampleAudioMedia'])),
-- ("bd", (S.fromList [sampleAudioMedia, sampleAudioMedia'])),
-- ("hh", (S.fromList [sampleAudioMedia, sampleAudioMedia'])),
-- ("moog", (S.fromList [sampleAudioMedia, sampleAudioMedia']))
-- ])}
--
-- aListOfImageFiles :: ResourceMap ImageMeta
-- aListOfImageFiles = ResourceMap {unResourceMap = (Data.Map.fromList [
-- ("arpy", (S.fromList [sampleImageMedia, sampleImageMedia'])),
-- ("bd", (S.fromList [sampleImageMedia, sampleImageMedia'])),
-- ("hh", (S.fromList [sampleImageMedia, sampleImageMedia'])),
-- ("moog", (S.fromList [sampleImageMedia, sampleImageMedia']))
-- ])}
--
-- sampleImageMedia' :: Resource ImageMeta
-- sampleImageMedia' = Resource {resourceGroup = "landscapes", resourceFileName = "landscape0", resourceFileSize = 300, resourceMeta = ImageMeta {imageResolution = (800,1900), imageAspectRatio = (NineOverSixteen)}, resourceTags = S.fromList ["image", "landscape"], resourceScope = Ensemble}
--
-- sampleImageMedia :: Resource ImageMeta
-- sampleImageMedia = Resource {resourceGroup = "sky", resourceFileName = "sky1", resourceFileSize = 300, resourceMeta = ImageMeta {imageResolution = (800,1900), imageAspectRatio = (NineOverSixteen)}, resourceTags = S.fromList ["image", "landscape"], resourceScope = Ensemble}
--
--
-- sampleVideoMedia :: Resource VideoMeta
-- sampleVideoMedia = Resource {resourceGroup = "butterflies", resourceFileName = "butterflies0", resourceFileSize = 100, resourceMeta = VideoMeta {videoDuration = 195.0, videoResolution = (400,600), videoAspectRatio = FourOverThree}, resourceTags = S.fromList ["video", "butterflies"], resourceScope = Public}
--
-- sampleVideoMedia' :: Resource VideoMeta
-- sampleVideoMedia' = Resource {resourceGroup = "cats", resourceFileName = "cats1", resourceFileSize = 300, resourceMeta = VideoMeta {videoDuration = 3.25, videoResolution = (800,1900), videoAspectRatio = (Custom 2.0)}, resourceTags = S.fromList ["video", "cats"], resourceScope = Ensemble}
--
--
-- sampleAudioMedia :: Resource AudioMeta
-- sampleAudioMedia = Resource {resourceGroup = "arpy", resourceFileName = "arpy0", resourceFileSize = 100, resourceMeta = AudioMeta {audioDuration = 195.0}, resourceTags = S.fromList ["audio", "arpy"], resourceScope = Private}
--
-- sampleAudioMedia' :: Resource AudioMeta
-- sampleAudioMedia' = Resource {resourceGroup = "bd", resourceFileName = "bd1", resourceFileSize = 100, resourceMeta = AudioMeta {audioDuration = 300.0}, resourceTags = S.fromList ["audio", "bd"], resourceScope = Ensemble}
--
-- -- a function to show audio Duration
-- showAudioDuration :: Resource AudioMeta -> T.Text
-- showAudioDuration a = T.pack $ humanReadableDuration $ (audioDuration (resourceMeta a))
--
-- -- a function to show video duration
-- showVideoDuration :: Resource VideoMeta -> T.Text
-- showVideoDuration x = do
-- let a = humanReadableDuration $ (videoDuration (resourceMeta x))
-- T.pack $ a
--
-- -- a function to show video Shape
-- showVideoShape :: Resource VideoMeta -> T.Text
-- showVideoShape x = T.pack $ show $ videoAspectRatio (resourceMeta x)
--
-- -- a function to show video image
-- showImageShape :: Resource ImageMeta -> T.Text
-- showImageShape a = T.pack $ show $ imageAspectRatio (resourceMeta a)
--
-- -- a function to show the resource tags
-- showTags :: Resource a -> T.Text
-- showTags a = concatTags $ F.toList (resourceTags a) --List
--
-- -- a helper function for showTags
-- concatTags :: [T.Text] -> T.Text
-- concatTags [] = ""
-- concatTags [x] = x
-- concatTags (x:xs) = T.concat ["<", x, ", ", concatTags xs, ">"]
--
-- -------- helper functions for showing Duration, adapted from Data.Duration -----------------------
-- ms :: Double
-- ms = 1.0
--
-- oneSec :: Double
-- oneSec= 1.0 -- * ms
--
-- oneMinute :: Double
-- oneMinute = 60.0 * oneSec
--
-- oneHour :: Double
-- oneHour = 60.0 * oneMinute
--
-- getMs :: Double -> Double
-- getMs n = (/) n ms
--
-- getSeconds :: Double -> Int
-- getSeconds n = floor $ (/) n oneSec
--
-- getMinutes :: Double -> Int
-- getMinutes n = floor $ (/) n oneMinute
--
-- getHours :: Double -> Int
-- getHours n = floor $ (/) n oneHour
--
-- humanReadableDuration :: Double -> String
-- humanReadableDuration n
-- | n < oneSec = let mi = getMs n in if mi > 0 then show mi ++ "ms" else ""
-- | n < oneMinute = let s = getSeconds n in if s > 0 then show s ++ "s" else ""
-- | n < oneHour = let m = getMinutes n in if m > 0 then show m ++ " min " ++ (show $ floor (((n `mod'` oneMinute) /oneMinute) * 100)) ++ "s" else ""
-- | otherwise = let h = getHours n in if h > 0 then show h ++ " hours " ++ (show $ floor (((n `mod'` oneHour) /oneHour) * 100)) ++ "min " ++ (show $ floor (((n `mod'` oneMinute) /oneMinute) * 100)) ++ "s" else ""
-- -- | n < year = let d = getDays n in if d > 0 then show d ++ " days " ++ humanReadableDuration (n `mod'` day) else ""
--
-- -- a widget to display one dyamic audio resources
-- audioResourceWidgetDyn :: MonadWidget t m => Dynamic t (Resource AudioMeta) -> m ()
-- audioResourceWidgetDyn a = divClass "resourceWidget" $ do
-- file' <- fmap resourceFileName a
-- dur <- fmap showAudioDuration a
-- tags' <- fmap showTags a
-- scope' <- fmap (T.pack . show . resourceScope) a
-- divClass "resourceName" $ dynText file'
-- divClass "resourceDur" $ dynText dur
-- divClass "resourceTags" $ dynText tags'
-- divClass "resourceScope" $ dynText scope'
--
-- -- a widget to display one dyamic video resources
-- videoResourceWidgetDyn :: MonadWidget t m => Dynamic t (Resource VideoMeta) -> m ()
-- videoResourceWidgetDyn a = divClass "resourceWidget" $ do
-- file' <- fmap resourceFileName a
-- dur <- fmap showVideoDuration a
-- shape <- fmap showVideoShape a
-- tags' <- fmap showTags a
-- scope' <- fmap (T.pack . show . resourceScope) a
-- divClass "resourceName" $ dynText file'
-- divClass "resourceDur" $ dynText dur
-- divClass "resourceShape" $ dynText shape
-- divClass "resourceTags" $ dynText tags'
-- divClass "resourceScope" $ dynText scope'
--
--
-- -- a widget to display one dyamic image resource
-- imageResourceWidgetDyn :: MonadWidget t m => Dynamic t (Resource ImageMeta) -> m ()
-- imageResourceWidgetDyn a = divClass "resourceWidget" $ do
-- file' <- fmap resourceFileName a
-- shape <- fmap showImageShape a
-- tags' <- fmap showTags a
-- scope' <- fmap (T.pack . show . resourceScope) a
-- divClass "resourceName" $ dynText file'
-- divClass "resourceShape" $ dynText shape
-- divClass "resourceTags" $ dynText tags'
-- divClass "resourceScope" $ dynText scope'
-- a widget to display a list of dyamic audio resources
-- audioResources :: MonadWidget t m => Dynamic t (ResourceMap AudioMeta) -> m (Dynamic t [()])
-- audioResources vs = divClass "resourceWidgetContainer code-font" $ do
-- divClass "resourceGroup" $ text $ T.pack "Audio Resources \n"
-- divClass "resourceLabels" $ do
-- divClass "resourceName" $ text $ T.pack $ "Name \n"
-- divClass "resourceDur" $ text $ T.pack $ "Duration \n"
-- divClass "resourceTags" $ text $ T.pack $ "Tags \n"
-- divClass "resourceScope" $ text $ T.pack $ "Scope \n"
-- vs' <- fmap resourceList vs
-- B.simpleList vs' $ \v -> audioResourceWidgetDyn v --m ()
-- a widget to display a list of dyamic video resources
-- videoResources :: MonadWidget t m => Dynamic t (ResourceMap VideoMeta) -> m (Dynamic t [()])
-- videoResources vs = divClass "resourceWidgetContainer code-font" $ do
-- divClass "resourceGroup" $ text $ T.pack "Video Resources \n"
-- divClass "resourceLabels" $ do
-- divClass "resourceName" $ text $ T.pack $ "Name \n"
-- divClass "resourceDur" $ text $ T.pack $ "Duration \n"
-- divClass "resourceShape" $ text $ T.pack $ "Shape \n"
-- divClass "resourceTags" $ text $ T.pack $ "Tags \n"
-- divClass "resourceScope" $ text $ T.pack $ "Scope \n"
-- vs' <- fmap resourceList vs
-- B.simpleList vs' $ \v -> videoResourceWidgetDyn v --m ()
-- a widget to display a list of dyamic image resources
-- imageResources :: MonadWidget t m => Dynamic t (ResourceMap ImageMeta) -> m (Dynamic t [()])
-- imageResources vs = divClass "resourceWidgetContainer code-font" $ do
-- divClass "resourceGroup" $ text $ T.pack "Image Resources \n"
-- divClass "resourceLabels" $ do
-- divClass "resourceName" $ text $ T.pack $ "Name \n"
-- divClass "resourceShape" $ text $ T.pack $ "Shape \n"
-- divClass "resourceTags" $ text $ T.pack $ "Tags \n"
-- divClass "resourceScope" $ text $ T.pack $ "Scope \n"
-- vs' <- fmap resourceList vs
-- B.simpleList vs' $ \v -> imageResourceWidgetDyn v --m ()
| d0kt0r0/estuary | client/src/Estuary/Widgets/ResourceWidget.hs | gpl-3.0 | 10,045 | 0 | 5 | 2,030 | 255 | 232 | 23 | 12 | 0 |
{-# LANGUAGE DeriveGeneric, ExistentialQuantification, FlexibleContexts #-}
-- | This module ties "Ltc.Diff" to "Ltc.Store".
module Ltc.Store.VersionControl (
-- * Getting history
DiffPack(..),
KeyHistory(..), getDiffPack, getKeyHistory,
-- * Selecting history
versionsFromToIncluding,
-- * Applying history
insertChangesInto, Reason
) where
import Control.Applicative ( (<$>) )
import Control.Monad ( forM )
import Data.ByteString.Lazy.Char8 ( ByteString )
import Data.Foldable ( foldlM )
import Data.Map ( Map )
import Data.Serialize ( Serialize )
import Data.Set ( Set )
import Data.VectorClock ( causes )
import GHC.Generics ( Generic )
import Language.Sexp ( Sexpable(..) )
import Ltc.Store.Class ( Store(..), SetCmd(..), Storable
, typeOf
, Key, getExn, getLatestExn
, Version, keyVersionsExn )
import Ltc.Diff ( Diff, Diffable(..) )
import qualified Data.Map as M
import System.Log.Logger ( debugM )
import Text.Printf ( printf )
----------------------
-- Debugging
----------------------
-- | Debugging tag for this module
tag :: String
tag = "VersionControl"
----------------------
-- Wrappers around values and diffs
----------------------
-- FIXME We should include the typerep with the key history and get rid of the separate
-- cases.
-- | 'KeyHistory' achieves two goals. First, it solidifies the type parameter of 'Diff'
-- by encoding the possibilities as a sum type. Second, it encapsulates the current value
-- associated with a key, and the history of changes leading up to that point.
--
-- The diffs are reversed such that they can be applied to the tip. So, the most recent
-- value is @tip@, the second most recent value is @applyDiff tip (head diffs)@, and so
-- on.
data KeyHistory = IntKeyHistory Integer [Diff Integer]
| IntSetKeyHistory (Set Integer) [Diff (Set Integer)]
| StringKeyHistory ByteString [Diff ByteString]
| StringSetKeyHistory (Set ByteString) [Diff (Set ByteString)]
deriving ( Eq, Generic, Show )
instance Sexpable KeyHistory
instance Serialize KeyHistory
-- | 'DiffPack' is just a map of 'Key's to 'KeyHistory's.
data DiffPack = DiffPack (Map Key KeyHistory)
deriving ( Eq, Generic, Show )
instance Sexpable DiffPack
instance Serialize DiffPack
type Reason = String
----------------------
-- Applying history
----------------------
-- | Insert the given changes into the store. Returns a list of conflicting keys.
insertChangesInto :: (Store s) => s -> DiffPack -> IO [(Key, Reason)]
insertChangesInto store (DiffPack m) = foldlM insertKeyHistory [] (M.toList m)
where
insertKeyHistory :: [(Key, Reason)] -> (Key, KeyHistory) -> IO [(Key, Reason)]
insertKeyHistory conflicts (key, theirHistory) = do
mmyHistory <- getKeyHistory store key
case tryMerge key mmyHistory theirHistory of
Right acts -> do
mapM_ (applyAction store) acts
return conflicts
Left reason -> do
return ((key, reason) : conflicts)
----------------------
-- Store actions
----------------------
-- | Apply the changes specified by the action to the store.
applyAction :: (Store s) => s -> SetCmd -> IO ()
applyAction store (SetCmd key val) = do
_ <- set store key val
return ()
----------------------
-- Merges
----------------------
-- | Attempt to merge to change histories together. If the merge is successful, return a
-- list of 'SetCmd's. For instance, this can fail if the histories have different
-- types.
tryMerge :: Key -> Maybe KeyHistory -> KeyHistory -> Either Reason [SetCmd]
tryMerge key Nothing theirHistory =
Right (insertNewActions key theirHistory)
tryMerge _ (Just (IntKeyHistory myTip myDiffs)) (IntKeyHistory theirTip theirDiffs) =
merge myTip myDiffs theirTip theirDiffs
tryMerge _ (Just (IntSetKeyHistory myTip myDiffs)) (IntSetKeyHistory theirTip theirDiffs) =
merge myTip myDiffs theirTip theirDiffs
tryMerge _ (Just (StringKeyHistory myTip myDiffs)) (StringKeyHistory theirTip theirDiffs) =
merge myTip myDiffs theirTip theirDiffs
tryMerge _ (Just (StringSetKeyHistory myTip myDiffs)) (StringSetKeyHistory theirTip theirDiffs) =
merge myTip myDiffs theirTip theirDiffs
tryMerge _ _ _ =
Left "different types"
-- | Attempt to merge two histories of the same type together. If the merge is
-- successful, return a list of 'SetCmd's.
merge :: a -> [Diff a] -> a -> [Diff a] -> Either Reason [SetCmd]
merge _ _ _ _ = Left "key already exists"
-- | Prepare the actions that insert the entire key history into the store.
insertNewActions :: Key -> KeyHistory -> [SetCmd]
insertNewActions key (IntKeyHistory tip diffs) =
map (SetCmd key) (reverse (diffsToValues tip diffs))
insertNewActions key (IntSetKeyHistory tip diffs) =
map (SetCmd key) (reverse (diffsToValues tip diffs))
insertNewActions key (StringKeyHistory tip diffs) =
map (SetCmd key) (reverse (diffsToValues tip diffs))
insertNewActions key (StringSetKeyHistory tip diffs) =
map (SetCmd key) (reverse (diffsToValues tip diffs))
-- | Convert a tip and some diffs from it to values.
diffsToValues :: (Diffable a) => a -> [Diff a] -> [a]
diffsToValues tip diffs = tip : reverse (snd (foldl diffToValue (tip, []) diffs))
where
diffToValue (v, vs) diff = let v' = applyDiff v diff in (v', v' : vs)
----------------------
-- Selecting history
----------------------
-- | We often need to select changes /after/ one version clock, but before and including
-- another version clock. The list of versions is in oldest-to-newest order.
versionsFromToIncluding :: (Store s) => s -> Key -> Version -> Version -> IO [Version]
versionsFromToIncluding store key from toInc = do
debugM tag (printf "selecting VCs from %s to (including) %s"
(show from) (show toInc))
vsns <- reverse <$> keyVersionsExn store key
return $
takeWhile (\vsn -> vsn `causes` toInc) $
dropWhile (\vsn -> not (from `causes` vsn) || from == vsn) vsns
----------------------
-- Getting history
----------------------
-- | Get all the data from a store.
getDiffPack :: (Store s) => s -> IO DiffPack
getDiffPack store = do
ks <- keys store ".*"
DiffPack <$> foldlM addKeyHistory M.empty ks
where
addKeyHistory m key = do
-- We just got the keys from the database, so the following cannot fail.
Just kh <- getKeyHistory store key
return (M.insert key kh m)
-- | Get the entire history of a key. If the key is missing, return 'Nothing'.
getKeyHistory :: (Store s) => s -> Key -> IO (Maybe KeyHistory)
getKeyHistory store key = do
mty <- keyType store key
case mty of
Nothing ->
return Nothing
Just ty | ty == typeOf (undefined :: Integer) -> do
(tip :: Integer, _) <- getLatestExn store key
diffs <- getDiffs tip
return (Just (IntKeyHistory tip diffs))
Just ty | ty == typeOf (undefined :: Set Integer) -> do
(tip :: Set Integer, _) <- getLatestExn store key
diffs <- getDiffs tip
return (Just (IntSetKeyHistory tip diffs))
Just ty | ty == typeOf (undefined :: ByteString) -> do
(tip :: ByteString, _) <- getLatestExn store key
diffs <- getDiffs tip
return (Just (StringKeyHistory tip diffs))
Just ty | ty == typeOf (undefined :: Set ByteString) -> do
(tip :: Set ByteString, _) <- getLatestExn store key
diffs <- getDiffs tip
return (Just (StringSetKeyHistory tip diffs))
Just _ ->
error "getKeyHistory: wtf"
where
getDiffs :: (Storable a) => a -> IO [Diff a]
getDiffs tip = do
vsns <- keyVersionsExn store key
-- @vsns@ contains at least the tip.
vs <- forM (tail vsns) (\vsn -> getExn store key vsn)
let (_, diffs) = foldl (\(v, ds) v' -> (v', diffFromTo v v' : ds))
(tip, [])
vs
return (reverse diffs)
| scvalex/ltc | src/Ltc/Store/VersionControl.hs | gpl-3.0 | 8,182 | 0 | 17 | 1,969 | 2,161 | 1,126 | 1,035 | -1 | -1 |
{-|
A reader for CSV data, using an extra rules file to help interpret the data.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Hledger.Read.CsvReader (
-- * Reader
reader,
-- * Misc.
CsvRecord,
-- rules,
rulesFileFor,
parseRulesFile,
parseAndValidateCsvRules,
expandIncludes,
transactionFromCsvRecord,
-- * Tests
tests_Hledger_Read_CsvReader
)
where
import Prelude ()
import Prelude.Compat hiding (getContents)
import Control.Exception hiding (try)
import Control.Monad
import Control.Monad.Except
import Control.Monad.State.Strict (StateT, get, modify', evalStateT)
-- import Test.HUnit
import Data.Char (toLower, isDigit, isSpace)
import Data.List.Compat
import Data.Maybe
import Data.Ord
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time.Calendar (Day)
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (parseTimeM, defaultTimeLocale)
#else
import Data.Time.Format (parseTime)
import System.Locale (defaultTimeLocale)
#endif
import Safe
import System.Directory (doesFileExist)
import System.FilePath
import Test.HUnit hiding (State)
import Text.CSV (parseCSV, CSV)
import Text.Megaparsec.Compat hiding (parse)
import qualified Text.Parsec as Parsec
import Text.Printf (printf)
import Hledger.Data
import Hledger.Utils.UTF8IOCompat (getContents)
import Hledger.Utils
import Hledger.Read.Common (amountp, statusp, genericSourcePos)
reader :: Reader
reader = Reader
{rFormat = "csv"
,rExtensions = ["csv"]
,rParser = parse
,rExperimental = False
}
-- | Parse and post-process a "Journal" from CSV data, or give an error.
-- XXX currently ignores the string and reads from the file path
parse :: Maybe FilePath -> Bool -> FilePath -> Text -> ExceptT String IO Journal
parse rulesfile _ f t = do
r <- liftIO $ readJournalFromCsv rulesfile f t
case r of Left e -> throwError e
Right j -> return $ journalNumberAndTieTransactions j
-- XXX does not use parseAndFinaliseJournal like the other readers
-- | Read a Journal from the given CSV data (and filename, used for error
-- messages), or return an error. Proceed as follows:
-- @
-- 1. parse CSV conversion rules from the specified rules file, or from
-- the default rules file for the specified CSV file, if it exists,
-- or throw a parse error; if it doesn't exist, use built-in default rules
-- 2. parse the CSV data, or throw a parse error
-- 3. convert the CSV records to transactions using the rules
-- 4. if the rules file didn't exist, create it with the default rules and filename
-- 5. return the transactions as a Journal
-- @
readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> IO (Either String Journal)
readJournalFromCsv Nothing "-" _ = return $ Left "please use --rules-file when reading CSV from stdin"
readJournalFromCsv mrulesfile csvfile csvdata =
handle (\e -> return $ Left $ show (e :: IOException)) $ do
let throwerr = throw.userError
-- parse rules
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile
rulesfileexists <- doesFileExist rulesfile
rulestext <-
if rulesfileexists
then do
dbg1IO "using conversion rules file" rulesfile
liftIO $ (readFile' rulesfile >>= expandIncludes (takeDirectory rulesfile))
else return $ defaultRulesText rulesfile
rules <- liftIO (runExceptT $ parseAndValidateCsvRules rulesfile rulestext) >>= either throwerr return
dbg2IO "rules" rules
-- apply skip directive
let skip = maybe 0 oneorerror $ getDirective "skip" rules
where
oneorerror "" = 1
oneorerror s = readDef (throwerr $ "could not parse skip value: " ++ show s) s
-- parse csv
-- parsec seems to fail if you pass it "-" here XXX try again with megaparsec
let parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
records <- (either throwerr id .
dbg2 "validateCsv" . validateCsv skip .
dbg2 "parseCsv")
`fmap` parseCsv parsecfilename (T.unpack csvdata)
dbg1IO "first 3 csv records" $ take 3 records
-- identify header lines
-- let (headerlines, datalines) = identifyHeaderLines records
-- mfieldnames = lastMay headerlines
let
-- convert CSV records to transactions
txns = snd $ mapAccumL
(\pos r ->
let
SourcePos name line col = pos
line' = (mpMkPos . (+1) . mpUnPos) line
pos' = SourcePos name line' col
in
(pos, transactionFromCsvRecord pos' rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, reverse them to get same-date transactions ordered chronologically,
-- if the CSV records seem to be most-recent-first, ie if there's an explicit
-- "newest-first" directive, or if there's more than one date and the first date
-- is more recent than the last.
txns' =
(if newestfirst || mseemsnewestfirst == Just True then reverse else id) txns
where
newestfirst = dbg3 "newestfirst" $ isJust $ getDirective "newest-first" rules
mseemsnewestfirst = dbg3 "mseemsnewestfirst" $
case nub $ map tdate txns of
ds | length ds > 1 -> Just $ head ds > last ds
_ -> Nothing
-- Second, sort by date.
txns'' = sortBy (comparing tdate) txns'
when (not rulesfileexists) $ do
dbg1IO "creating conversion rules file" rulesfile
writeFile rulesfile $ T.unpack rulestext
return $ Right nulljournal{jtxns=txns''}
parseCsv :: FilePath -> String -> IO (Either Parsec.ParseError CSV)
parseCsv path csvdata =
case path of
"-" -> liftM (parseCSV "(stdin)") getContents
_ -> return $ parseCSV path csvdata
-- | Return the cleaned up and validated CSV data, or an error.
validateCsv :: Int -> Either Parsec.ParseError CSV -> Either String [CsvRecord]
validateCsv _ (Left e) = Left $ show e
validateCsv numhdrlines (Right rs) = validate $ drop numhdrlines $ filternulls rs
where
filternulls = filter (/=[""])
validate [] = Left "no CSV records found"
validate rs@(first:_)
| isJust lessthan2 = let r = fromJust lessthan2 in Left $ printf "CSV record %s has less than two fields" (show r)
| isJust different = let r = fromJust different in Left $ printf "the first CSV record %s has %d fields but %s has %d" (show first) length1 (show r) (length r)
| otherwise = Right rs
where
length1 = length first
lessthan2 = headMay $ filter ((<2).length) rs
different = headMay $ filter ((/=length1).length) rs
-- -- | The highest (0-based) field index referenced in the field
-- -- definitions, or -1 if no fields are defined.
-- maxFieldIndex :: CsvRules -> Int
-- maxFieldIndex r = maximumDef (-1) $ catMaybes [
-- dateField r
-- ,statusField r
-- ,codeField r
-- ,amountField r
-- ,amountInField r
-- ,amountOutField r
-- ,currencyField r
-- ,accountField r
-- ,account2Field r
-- ,date2Field r
-- ]
-- rulesFileFor :: CliOpts -> FilePath -> FilePath
-- rulesFileFor CliOpts{rules_file_=Just f} _ = f
-- rulesFileFor CliOpts{rules_file_=Nothing} csvfile = replaceExtension csvfile ".rules"
rulesFileFor :: FilePath -> FilePath
rulesFileFor = (++ ".rules")
csvFileFor :: FilePath -> FilePath
csvFileFor = reverse . drop 6 . reverse
defaultRulesText :: FilePath -> Text
defaultRulesText csvfile = T.pack $ unlines
["# hledger csv conversion rules for " ++ csvFileFor (takeFileName csvfile)
,"# cf http://hledger.org/manual#csv-files"
,""
,"account1 assets:bank:checking"
,""
,"fields date, description, amount"
,""
,"#skip 1"
,"#newest-first"
,""
,"#date-format %-d/%-m/%Y"
,"#date-format %-m/%-d/%Y"
,"#date-format %Y-%h-%d"
,""
,"#currency $"
,""
,"if ITUNES"
," account2 expenses:entertainment"
,""
,"if (TO|FROM) SAVINGS"
," account2 assets:bank:savings\n"
]
--------------------------------------------------------------------------------
-- Conversion rules parsing
{-
Grammar for the CSV conversion rules, more or less:
RULES: RULE*
RULE: ( FIELD-LIST | FIELD-ASSIGNMENT | CONDITIONAL-BLOCK | SKIP | NEWEST-FIRST | DATE-FORMAT | COMMENT | BLANK ) NEWLINE
FIELD-LIST: fields SPACE FIELD-NAME ( SPACE? , SPACE? FIELD-NAME )*
FIELD-NAME: QUOTED-FIELD-NAME | BARE-FIELD-NAME
QUOTED-FIELD-NAME: " (any CHAR except double-quote)+ "
BARE-FIELD-NAME: any CHAR except space, tab, #, ;
FIELD-ASSIGNMENT: JOURNAL-FIELD ASSIGNMENT-SEPARATOR FIELD-VALUE
JOURNAL-FIELD: date | date2 | status | code | description | comment | account1 | account2 | amount | JOURNAL-PSEUDO-FIELD
JOURNAL-PSEUDO-FIELD: amount-in | amount-out | currency
ASSIGNMENT-SEPARATOR: SPACE | ( : SPACE? )
FIELD-VALUE: VALUE (possibly containing CSV-FIELD-REFERENCEs)
CSV-FIELD-REFERENCE: % CSV-FIELD
CSV-FIELD: ( FIELD-NAME | FIELD-NUMBER ) (corresponding to a CSV field)
FIELD-NUMBER: DIGIT+
CONDITIONAL-BLOCK: if ( FIELD-MATCHER NEWLINE )+ INDENTED-BLOCK
FIELD-MATCHER: ( CSV-FIELD-NAME SPACE? )? ( MATCHOP SPACE? )? PATTERNS
MATCHOP: ~
PATTERNS: ( NEWLINE REGEXP )* REGEXP
INDENTED-BLOCK: ( SPACE ( FIELD-ASSIGNMENT | COMMENT ) NEWLINE )+
REGEXP: ( NONSPACE CHAR* ) SPACE?
VALUE: SPACE? ( CHAR* ) SPACE?
COMMENT: SPACE? COMMENT-CHAR VALUE
COMMENT-CHAR: # | ;
NONSPACE: any CHAR not a SPACE-CHAR
BLANK: SPACE?
SPACE: SPACE-CHAR+
SPACE-CHAR: space | tab
CHAR: any character except newline
DIGIT: 0-9
-}
{- |
A set of data definitions and account-matching patterns sufficient to
convert a particular CSV data file into meaningful journal transactions.
-}
data CsvRules = CsvRules {
rdirectives :: [(DirectiveName,String)],
rcsvfieldindexes :: [(CsvFieldName, CsvFieldIndex)],
rassignments :: [(JournalFieldName, FieldTemplate)],
rconditionalblocks :: [ConditionalBlock]
} deriving (Show, Eq)
type CsvRulesParser a = StateT CsvRules SimpleTextParser a
type DirectiveName = String
type CsvFieldName = String
type CsvFieldIndex = Int
type JournalFieldName = String
type FieldTemplate = String
type ConditionalBlock = ([RecordMatcher], [(JournalFieldName, FieldTemplate)]) -- block matches if all RecordMatchers match
type RecordMatcher = [RegexpPattern] -- match if any regexps match any of the csv fields
-- type FieldMatcher = (CsvFieldName, [RegexpPattern]) -- match if any regexps match this csv field
type DateFormat = String
type RegexpPattern = String
rules = CsvRules {
rdirectives=[],
rcsvfieldindexes=[],
rassignments=[],
rconditionalblocks=[]
}
addDirective :: (DirectiveName, String) -> CsvRules -> CsvRules
addDirective d r = r{rdirectives=d:rdirectives r}
addAssignment :: (JournalFieldName, FieldTemplate) -> CsvRules -> CsvRules
addAssignment a r = r{rassignments=a:rassignments r}
setIndexesAndAssignmentsFromList :: [CsvFieldName] -> CsvRules -> CsvRules
setIndexesAndAssignmentsFromList fs r = addAssignmentsFromList fs . setCsvFieldIndexesFromList fs $ r
setCsvFieldIndexesFromList :: [CsvFieldName] -> CsvRules -> CsvRules
setCsvFieldIndexesFromList fs r = r{rcsvfieldindexes=zip fs [1..]}
addAssignmentsFromList :: [CsvFieldName] -> CsvRules -> CsvRules
addAssignmentsFromList fs r = foldl' maybeAddAssignment r journalfieldnames
where
maybeAddAssignment rules f = (maybe id addAssignmentFromIndex $ elemIndex f fs) rules
where
addAssignmentFromIndex i = addAssignment (f, "%"++show (i+1))
addConditionalBlock :: ConditionalBlock -> CsvRules -> CsvRules
addConditionalBlock b r = r{rconditionalblocks=b:rconditionalblocks r}
getDirective :: DirectiveName -> CsvRules -> Maybe FieldTemplate
getDirective directivename = lookup directivename . rdirectives
instance ShowErrorComponent String where
showErrorComponent = id
-- | An error-throwing action that parses this file's content
-- as CSV conversion rules, interpolating any included files first,
-- and runs some extra validation checks.
parseRulesFile :: FilePath -> ExceptT String IO CsvRules
parseRulesFile f =
liftIO (readFile' f >>= expandIncludes (takeDirectory f)) >>= parseAndValidateCsvRules f
-- | Inline all files referenced by include directives in this hledger CSV rules text, recursively.
-- Included file paths may be relative to the directory of the provided file path.
-- This is a cheap hack to avoid rewriting the CSV rules parser.
expandIncludes dir content = mapM (expandLine dir) (T.lines content) >>= return . T.unlines
where
expandLine dir line =
case line of
(T.stripPrefix "include " -> Just f) -> expandIncludes dir' =<< T.readFile f'
where
f' = dir </> dropWhile isSpace (T.unpack f)
dir' = takeDirectory f'
_ -> return line
-- | An error-throwing action that parses this text as CSV conversion rules
-- and runs some extra validation checks. The file path is for error messages.
parseAndValidateCsvRules :: FilePath -> T.Text -> ExceptT String IO CsvRules
parseAndValidateCsvRules rulesfile s = do
let rules = parseCsvRules rulesfile s
case rules of
Left e -> ExceptT $ return $ Left $ parseErrorPretty e
Right r -> do
r_ <- liftIO $ runExceptT $ validateRules r
ExceptT $ case r_ of
Left s -> return $ Left $ parseErrorPretty $ mpMkParseError rulesfile s
Right r -> return $ Right r
-- | Parse this text as CSV conversion rules. The file path is for error messages.
parseCsvRules :: FilePath -> T.Text -> Either (ParseError Char MPErr) CsvRules
-- parseCsvRules rulesfile s = runParser csvrulesfile nullrules{baseAccount=takeBaseName rulesfile} rulesfile s
parseCsvRules rulesfile s =
runParser (evalStateT rulesp rules) rulesfile s
-- | Return the validated rules, or an error.
validateRules :: CsvRules -> ExceptT String IO CsvRules
validateRules rules = do
unless (isAssigned "date") $ ExceptT $ return $ Left "Please specify (at top level) the date field. Eg: date %1\n"
unless ((amount && not (amountin || amountout)) ||
(not amount && (amountin && amountout)))
$ ExceptT $ return $ Left "Please specify (at top level) either the amount field, or both the amount-in and amount-out fields. Eg: amount %2\n"
ExceptT $ return $ Right rules
where
amount = isAssigned "amount"
amountin = isAssigned "amount-in"
amountout = isAssigned "amount-out"
isAssigned f = isJust $ getEffectiveAssignment rules [] f
-- parsers
rulesp :: CsvRulesParser CsvRules
rulesp = do
many $ choiceInState
[blankorcommentlinep <?> "blank or comment line"
,(directivep >>= modify' . addDirective) <?> "directive"
,(fieldnamelistp >>= modify' . setIndexesAndAssignmentsFromList) <?> "field name list"
,(fieldassignmentp >>= modify' . addAssignment) <?> "field assignment"
,(conditionalblockp >>= modify' . addConditionalBlock) <?> "conditional block"
]
eof
r <- get
return r{rdirectives=reverse $ rdirectives r
,rassignments=reverse $ rassignments r
,rconditionalblocks=reverse $ rconditionalblocks r
}
blankorcommentlinep :: CsvRulesParser ()
blankorcommentlinep = lift (pdbg 3 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep]
blanklinep :: CsvRulesParser ()
blanklinep = lift (many spacenonewline) >> newline >> return () <?> "blank line"
commentlinep :: CsvRulesParser ()
commentlinep = lift (many spacenonewline) >> commentcharp >> lift restofline >> return () <?> "comment line"
commentcharp :: CsvRulesParser Char
commentcharp = oneOf (";#*" :: [Char])
directivep :: CsvRulesParser (DirectiveName, String)
directivep = (do
lift $ pdbg 3 "trying directive"
d <- fmap T.unpack $ choiceInState $ map (lift . mptext . T.pack) directives
v <- (((char ':' >> lift (many spacenonewline)) <|> lift (some spacenonewline)) >> directivevalp)
<|> (optional (char ':') >> lift (many spacenonewline) >> lift eolof >> return "")
return (d, v)
) <?> "directive"
directives =
["date-format"
-- ,"default-account1"
-- ,"default-currency"
-- ,"skip-lines" -- old
,"skip"
,"newest-first"
-- ,"base-account"
-- ,"base-currency"
]
directivevalp :: CsvRulesParser String
directivevalp = anyChar `manyTill` lift eolof
fieldnamelistp :: CsvRulesParser [CsvFieldName]
fieldnamelistp = (do
lift $ pdbg 3 "trying fieldnamelist"
string "fields"
optional $ char ':'
lift (some spacenonewline)
let separator = lift (many spacenonewline) >> char ',' >> lift (many spacenonewline)
f <- fromMaybe "" <$> optional fieldnamep
fs <- some $ (separator >> fromMaybe "" <$> optional fieldnamep)
lift restofline
return $ map (map toLower) $ f:fs
) <?> "field name list"
fieldnamep :: CsvRulesParser String
fieldnamep = quotedfieldnamep <|> barefieldnamep
quotedfieldnamep :: CsvRulesParser String
quotedfieldnamep = do
char '"'
f <- some $ noneOf ("\"\n:;#~" :: [Char])
char '"'
return f
barefieldnamep :: CsvRulesParser String
barefieldnamep = some $ noneOf (" \t\n,;#~" :: [Char])
fieldassignmentp :: CsvRulesParser (JournalFieldName, FieldTemplate)
fieldassignmentp = do
lift $ pdbg 3 "trying fieldassignmentp"
f <- journalfieldnamep
assignmentseparatorp
v <- fieldvalp
return (f,v)
<?> "field assignment"
journalfieldnamep :: CsvRulesParser String
journalfieldnamep = do
lift (pdbg 2 "trying journalfieldnamep")
T.unpack <$> choiceInState (map (lift . mptext . T.pack) journalfieldnames)
-- Transaction fields and pseudo fields for CSV conversion.
-- Names must precede any other name they contain, for the parser
-- (amount-in before amount; date2 before date). TODO: fix
journalfieldnames = [
"account1"
,"account2"
,"amount-in"
,"amount-out"
,"amount"
,"balance"
,"code"
,"comment"
,"currency"
,"date2"
,"date"
,"description"
,"status"
]
assignmentseparatorp :: CsvRulesParser ()
assignmentseparatorp = do
lift $ pdbg 3 "trying assignmentseparatorp"
choice [
-- try (lift (many spacenonewline) >> oneOf ":="),
try (lift (many spacenonewline) >> char ':'),
spaceChar
]
_ <- lift (many spacenonewline)
return ()
fieldvalp :: CsvRulesParser String
fieldvalp = do
lift $ pdbg 2 "trying fieldvalp"
anyChar `manyTill` lift eolof
conditionalblockp :: CsvRulesParser ConditionalBlock
conditionalblockp = do
lift $ pdbg 3 "trying conditionalblockp"
string "if" >> lift (many spacenonewline) >> optional newline
ms <- some recordmatcherp
as <- many (lift (some spacenonewline) >> fieldassignmentp)
when (null as) $
fail "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)\n"
return (ms, as)
<?> "conditional block"
recordmatcherp :: CsvRulesParser [String]
recordmatcherp = do
lift $ pdbg 2 "trying recordmatcherp"
-- pos <- currentPos
_ <- optional (matchoperatorp >> lift (many spacenonewline) >> optional newline)
ps <- patternsp
when (null ps) $
fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)\n"
return ps
<?> "record matcher"
matchoperatorp :: CsvRulesParser String
matchoperatorp = fmap T.unpack $ choiceInState $ map mptext
["~"
-- ,"!~"
-- ,"="
-- ,"!="
]
patternsp :: CsvRulesParser [String]
patternsp = do
lift $ pdbg 3 "trying patternsp"
ps <- many regexp
return ps
regexp :: CsvRulesParser String
regexp = do
lift $ pdbg 3 "trying regexp"
notFollowedBy matchoperatorp
c <- lift nonspace
cs <- anyChar `manyTill` lift eolof
return $ strip $ c:cs
-- fieldmatcher = do
-- pdbg 2 "trying fieldmatcher"
-- f <- fromMaybe "all" `fmap` (optional $ do
-- f' <- fieldname
-- lift (many spacenonewline)
-- return f')
-- char '~'
-- lift (many spacenonewline)
-- ps <- patterns
-- let r = "(" ++ intercalate "|" ps ++ ")"
-- return (f,r)
-- <?> "field matcher"
--------------------------------------------------------------------------------
-- Converting CSV records to journal transactions
type CsvRecord = [String]
-- Convert a CSV record to a transaction using the rules, or raise an
-- error if the data can not be parsed.
transactionFromCsvRecord :: SourcePos -> CsvRules -> CsvRecord -> Transaction
transactionFromCsvRecord sourcepos rules record = t
where
mdirective = (`getDirective` rules)
mfieldtemplate = getEffectiveAssignment rules record
render = renderTemplate rules record
mskip = mdirective "skip"
mdefaultcurrency = mdirective "default-currency"
mparsedate = parseDateWithFormatOrDefaultFormats (mdirective "date-format")
-- render each field using its template and the csv record, and
-- in some cases parse the rendered string (eg dates and amounts)
mdateformat = mdirective "date-format"
date = render $ fromMaybe "" $ mfieldtemplate "date"
date' = fromMaybe (error' $ dateerror "date" date mdateformat) $ mparsedate date
mdate2 = maybe Nothing (Just . render) $ mfieldtemplate "date2"
mdate2' = maybe Nothing (maybe (error' $ dateerror "date2" (fromMaybe "" mdate2) mdateformat) Just . mparsedate) mdate2
dateerror datefield value mdateformat = unlines
["error: could not parse \""++value++"\" as a date using date format "++maybe "\"YYYY/M/D\", \"YYYY-M-D\" or \"YYYY.M.D\"" show mdateformat
,"the CSV record is: "++intercalate ", " (map show record)
,"the "++datefield++" rule is: "++(fromMaybe "required, but missing" $ mfieldtemplate datefield)
,"the date-format is: "++fromMaybe "unspecified" mdateformat
,"you may need to "
++"change your "++datefield++" rule, "
++maybe "add a" (const "change your") mdateformat++" date-format rule, "
++"or "++maybe "add a" (const "change your") mskip++" skip rule"
,"for m/d/y or d/m/y dates, use date-format %-m/%-d/%Y or date-format %-d/%-m/%Y"
]
status =
case mfieldtemplate "status" of
Nothing -> Unmarked
Just str -> either statuserror id .
runParser (statusp <* eof) "" .
T.pack $ render str
where
statuserror err = error' $ unlines
["error: could not parse \""++str++"\" as a cleared status (should be *, ! or empty)"
,"the parse error is: "++show err
]
code = maybe "" render $ mfieldtemplate "code"
description = maybe "" render $ mfieldtemplate "description"
comment = maybe "" render $ mfieldtemplate "comment"
precomment = maybe "" render $ mfieldtemplate "precomment"
currency = maybe (fromMaybe "" mdefaultcurrency) render $ mfieldtemplate "currency"
amountstr = (currency++) $ simplifySign $ getAmountStr rules record
amount = either amounterror (Mixed . (:[])) $ runParser (evalStateT (amountp <* eof) mempty) "" $ T.pack amountstr
amounterror err = error' $ unlines
["error: could not parse \""++amountstr++"\" as an amount"
,showRecord record
,"the amount rule is: "++(fromMaybe "" $ mfieldtemplate "amount")
,"the currency rule is: "++(fromMaybe "unspecified" $ mfieldtemplate "currency")
,"the default-currency is: "++fromMaybe "unspecified" mdefaultcurrency
,"the parse error is: "++show err
,"you may need to "
++"change your amount or currency rules, "
++"or "++maybe "add a" (const "change your") mskip++" skip rule"
]
amount1 = amount
-- convert balancing amount to cost like hledger print, so eg if
-- amount1 is "10 GBP @@ 15 USD", amount2 will be "-15 USD".
amount2 = costOfMixedAmount (-amount)
s `or` def = if null s then def else s
defaccount1 = fromMaybe "unknown" $ mdirective "default-account1"
defaccount2 = case isNegativeMixedAmount amount2 of
Just True -> "income:unknown"
_ -> "expenses:unknown"
account1 = T.pack $ maybe "" render (mfieldtemplate "account1") `or` defaccount1
account2 = T.pack $ maybe "" render (mfieldtemplate "account2") `or` defaccount2
balance = maybe Nothing (parsebalance.render) $ mfieldtemplate "balance"
parsebalance str
| all isSpace str = Nothing
| otherwise = Just $ (either (balanceerror str) id $ runParser (evalStateT (amountp <* eof) mempty) "" $ T.pack $ (currency++) $ simplifySign str, nullsourcepos)
balanceerror str err = error' $ unlines
["error: could not parse \""++str++"\" as balance amount"
,showRecord record
,"the balance rule is: "++(fromMaybe "" $ mfieldtemplate "balance")
,"the currency rule is: "++(fromMaybe "unspecified" $ mfieldtemplate "currency")
,"the default-currency is: "++fromMaybe "unspecified" mdefaultcurrency
,"the parse error is: "++show err
]
-- build the transaction
t = nulltransaction{
tsourcepos = genericSourcePos sourcepos,
tdate = date',
tdate2 = mdate2',
tstatus = status,
tcode = T.pack code,
tdescription = T.pack description,
tcomment = T.pack comment,
tpreceding_comment_lines = T.pack precomment,
tpostings =
[posting {paccount=account1, pamount=amount1, ptransaction=Just t, pbalanceassertion=balance}
,posting {paccount=account2, pamount=amount2, ptransaction=Just t}
]
}
getAmountStr :: CsvRules -> CsvRecord -> String
getAmountStr rules record =
let
mamount = getEffectiveAssignment rules record "amount"
mamountin = getEffectiveAssignment rules record "amount-in"
mamountout = getEffectiveAssignment rules record "amount-out"
render = fmap (strip . renderTemplate rules record)
in
case (render mamount, render mamountin, render mamountout) of
(Just "", Nothing, Nothing) -> error' $ "amount has no value\n"++showRecord record
(Just a, Nothing, Nothing) -> a
(Nothing, Just "", Just "") -> error' $ "neither amount-in or amount-out has a value\n"++showRecord record
(Nothing, Just i, Just "") -> i
(Nothing, Just "", Just o) -> negateStr o
(Nothing, Just _, Just _) -> error' $ "both amount-in and amount-out have a value\n"++showRecord record
_ -> error' $ "found values for amount and for amount-in/amount-out - please use either amount or amount-in/amount-out\n"++showRecord record
type CsvAmountString = String
-- | Canonicalise the sign in a CSV amount string.
-- Such strings can be parenthesized, which is equivalent to having a minus sign.
-- Also they can end up with a double minus sign, which cancels out.
simplifySign :: CsvAmountString -> CsvAmountString
simplifySign ('(':s) | lastMay s == Just ')' = simplifySign $ negateStr $ init s
simplifySign ('-':'-':s) = s
simplifySign s = s
negateStr :: String -> String
negateStr ('-':s) = s
negateStr s = '-':s
-- | Show a (approximate) recreation of the original CSV record.
showRecord :: CsvRecord -> String
showRecord r = "the CSV record is: "++intercalate ", " (map show r)
-- | Given the conversion rules, a CSV record and a journal entry field name, find
-- the template value ultimately assigned to this field, either at top
-- level or in a matching conditional block. Conditional blocks'
-- patterns are matched against an approximation of the original CSV
-- record: all the field values with commas intercalated.
getEffectiveAssignment :: CsvRules -> CsvRecord -> JournalFieldName -> Maybe FieldTemplate
getEffectiveAssignment rules record f = lastMay $ assignmentsFor f
where
assignmentsFor f = map snd $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments
where
toplevelassignments = rassignments rules
conditionalassignments = concatMap snd $ filter blockMatches $ blocksAssigning f
where
blocksAssigning f = filter (any ((==f).fst) . snd) $ rconditionalblocks rules
blockMatches :: ConditionalBlock -> Bool
blockMatches (matchers,_) = all matcherMatches matchers
where
matcherMatches :: RecordMatcher -> Bool
-- matcherMatches pats = any patternMatches pats
matcherMatches pats = patternMatches $ "(" ++ intercalate "|" pats ++ ")"
where
patternMatches :: RegexpPattern -> Bool
patternMatches pat = regexMatchesCI pat csvline
where
csvline = intercalate "," record
renderTemplate :: CsvRules -> CsvRecord -> FieldTemplate -> String
renderTemplate rules record t = regexReplaceBy "%[A-z0-9]+" replace t
where
replace ('%':pat) = maybe pat (\i -> atDef "" record (i-1)) mi
where
mi | all isDigit pat = readMay pat
| otherwise = lookup pat $ rcsvfieldindexes rules
replace pat = pat
-- Parse the date string using the specified date-format, or if unspecified try these default formats:
-- YYYY/MM/DD, YYYY-MM-DD, YYYY.MM.DD, MM/DD/YYYY (month and day can be 1 or 2 digits, year must be 4).
parseDateWithFormatOrDefaultFormats :: Maybe DateFormat -> String -> Maybe Day
parseDateWithFormatOrDefaultFormats mformat s = firstJust $ map parsewith formats
where
parsetime =
#if MIN_VERSION_time(1,5,0)
parseTimeM True
#else
parseTime
#endif
parsewith = flip (parsetime defaultTimeLocale) s
formats = maybe
["%Y/%-m/%-d"
,"%Y-%-m-%-d"
,"%Y.%-m.%-d"
-- ,"%-m/%-d/%Y"
-- ,parseTime defaultTimeLocale "%Y/%m/%e" (take 5 s ++ "0" ++ drop 5 s)
-- ,parseTime defaultTimeLocale "%Y-%m-%e" (take 5 s ++ "0" ++ drop 5 s)
-- ,parseTime defaultTimeLocale "%m/%e/%Y" ('0':s)
-- ,parseTime defaultTimeLocale "%m-%e-%Y" ('0':s)
]
(:[])
mformat
--------------------------------------------------------------------------------
-- tests
tests_Hledger_Read_CsvReader = TestList (test_parser)
-- ++ test_description_parsing)
-- test_description_parsing = [
-- "description-field 1" ~: assertParseDescription "description-field 1\n" [FormatField False Nothing Nothing (FieldNo 1)]
-- , "description-field 1 " ~: assertParseDescription "description-field 1 \n" [FormatField False Nothing Nothing (FieldNo 1)]
-- , "description-field %(1)" ~: assertParseDescription "description-field %(1)\n" [FormatField False Nothing Nothing (FieldNo 1)]
-- , "description-field %(1)/$(2)" ~: assertParseDescription "description-field %(1)/%(2)\n" [
-- FormatField False Nothing Nothing (FieldNo 1)
-- , FormatLiteral "/"
-- , FormatField False Nothing Nothing (FieldNo 2)
-- ]
-- ]
-- where
-- assertParseDescription string expected = do assertParseEqual (parseDescription string) (rules {descriptionField = expected})
-- parseDescription :: String -> Either ParseError CsvRules
-- parseDescription x = runParser descriptionfieldWrapper rules "(unknown)" x
-- descriptionfieldWrapper :: GenParser Char CsvRules CsvRules
-- descriptionfieldWrapper = do
-- descriptionfield
-- r <- getState
-- return r
test_parser = [
"convert rules parsing: empty file" ~: do
-- let assertMixedAmountParse parseresult mixedamount =
-- (either (const "parse error") showMixedAmountDebug parseresult) ~?= (showMixedAmountDebug mixedamount)
assertParseEqual (parseCsvRules "unknown" "") rules
-- ,"convert rules parsing: accountrule" ~: do
-- assertParseEqual (parseWithState rules accountrule "A\na\n") -- leading blank line required
-- ([("A",Nothing)], "a")
,"convert rules parsing: trailing comments" ~: do
assertParse (parseWithState' rules rulesp "skip\n# \n#\n")
,"convert rules parsing: trailing blank lines" ~: do
assertParse (parseWithState' rules rulesp "skip\n\n \n")
,"convert rules parsing: empty field value" ~: do
assertParse (parseWithState' rules rulesp "account1 \nif foo\n account2 foo\n")
-- not supported
-- ,"convert rules parsing: no final newline" ~: do
-- assertParse (parseWithState rules csvrulesfile "A\na")
-- assertParse (parseWithState rules csvrulesfile "A\na\n# \n#")
-- assertParse (parseWithState rules csvrulesfile "A\na\n\n ")
-- (rules{
-- -- dateField=Maybe FieldPosition,
-- -- statusField=Maybe FieldPosition,
-- -- codeField=Maybe FieldPosition,
-- -- descriptionField=Maybe FieldPosition,
-- -- amountField=Maybe FieldPosition,
-- -- currencyField=Maybe FieldPosition,
-- -- baseCurrency=Maybe String,
-- -- baseAccount=AccountName,
-- accountRules=[
-- ([("A",Nothing)], "a")
-- ]
-- })
]
| ony/hledger | hledger-lib/Hledger/Read/CsvReader.hs | gpl-3.0 | 33,895 | 0 | 23 | 7,874 | 6,665 | 3,479 | 3,186 | 494 | 7 |
{-# LANGUAGE Arrows #-}
module Main where
import System.IO
import Control.Monad.Random
import FRP.Yampa
import SIR
type SIRAgent = SF [SIRState] SIRState
agentCount :: Int
agentCount = 1000
infectedCount :: Int
infectedCount = 1
rngSeed :: Int
rngSeed = 42
dt :: DTime
dt = 0.01 -- 0.0025
t :: Time
t = 150
sirTest :: SIRAgent
sirTest = dSwitch
susceptible
(const infected)
where
susceptible :: SF [SIRState] (SIRState, Event ())
susceptible = proc _ -> returnA -< (Infected, Event ())
infected :: SIRAgent
infected = switch
infectedAux
(const recovered)
where
infectedAux :: SF [SIRState] (SIRState, Event ())
infectedAux = proc _ -> returnA -< (Recovered, Event ())
recovered :: SIRAgent
recovered = arr (const Recovered)
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
let g = mkStdGen rngSeed
let as = initAgents agentCount infectedCount
let ass = runSimulation g t dt as
let dyns = aggregateAllStates ass
let fileName = "STEP_2_YAMPA_DYNAMICS_" ++ show agentCount ++ "agents.m"
writeAggregatesToFile fileName dyns
runSimulation :: RandomGen g
=> g
-> Time
-> DTime
-> [SIRState]
-> [[SIRState]]
runSimulation g t dt as = embed (stepSimulation sfs as) ((), dts)
where
steps = floor $ t / dt
dts = replicate steps (dt, Nothing)
n = length as
(rngs, _) = rngSplits g n []
sfs = zipWith sirAgent rngs as
rngSplits :: RandomGen g => g -> Int -> [g] -> ([g], g)
rngSplits g 0 acc = (acc, g)
rngSplits g n acc = rngSplits g'' (n-1) (g' : acc)
where
(g', g'') = split g
stepSimulation :: [SIRAgent] -> [SIRState] -> SF () [SIRState]
stepSimulation sfs as =
pSwitch
(\_ sfs' -> (map (\sf -> (as, sf)) sfs'))
sfs
-- if we switch immediately we end up in endless switching, so always wait for 'next'
(switchingEvt >>> notYet)
stepSimulation
where
switchingEvt :: SF ((), [SIRState]) (Event [SIRState])
switchingEvt = arr (\(_, newAs) -> Event newAs)
sirAgent :: RandomGen g => g -> SIRState -> SIRAgent
sirAgent g Susceptible = susceptibleAgent g
sirAgent g Infected = infectedAgent g
sirAgent _ Recovered = recoveredAgent
susceptibleAgent :: RandomGen g => g -> SIRAgent
susceptibleAgent g =
-- NOTE: we need to use a delayed (d)Switch here because
-- according to the SIR model only a single state-transition
-- should occur during one step. If we are not using a delay
-- we could go from susceptible directly to recovered
-- if infected immediately generates a receovery event -
-- which probability is not very high but still possible
-- NOTE: we tested it with delay and without, it has no influence
-- it seems that the probability is way too low for it to happen?
dSwitch
(susceptible g)
(const $ infectedAgent g)
where
susceptible :: RandomGen g => g -> SF [SIRState] (SIRState, Event ())
susceptible g = proc as -> do
makeContact <- occasionally g (1 / contactRate) () -< ()
-- NOTE: strangely if we are not splitting all if-then-else into
-- separate but only a single one, then it seems not to work,
-- dunno why
if isEvent makeContact
then (do
a <- drawRandomElemSF g -< as
case a of
Infected -> do
i <- randomBoolSF g infectivity -< ()
if i
then returnA -< (Infected, Event ())
else returnA -< (Susceptible, NoEvent)
_ -> returnA -< (Susceptible, NoEvent))
else returnA -< (Susceptible, NoEvent)
infectedAgent :: RandomGen g => g -> SIRAgent
infectedAgent g =
switch
infected
(const recoveredAgent)
where
infected :: SF [SIRState] (SIRState, Event ())
infected = proc _ -> do
recEvt <- occasionally g illnessDuration () -< ()
let a = event Infected (const Recovered) recEvt
returnA -< (a, recEvt)
recoveredAgent :: SIRAgent
recoveredAgent = arr (const Recovered)
randomBoolSF :: RandomGen g => g -> Double -> SF () Bool
randomBoolSF g p = proc _ -> do
r <- noiseR ((0, 1) :: (Double, Double)) g -< ()
returnA -< (r <= p)
drawRandomElemSF :: RandomGen g => g -> SF [a] a
drawRandomElemSF g = proc as -> do
r <- noiseR ((0, 1) :: (Double, Double)) g -< ()
let len = length as
let idx = fromIntegral len * r
let a = as !! floor idx
returnA -< a
initAgents :: Int -> Int -> [SIRState]
initAgents n i = sus ++ inf
where
sus = replicate (n - i) Susceptible
inf = replicate i Infected | thalerjonathan/phd | coding/papers/pfe/Step2_Yampa/src/Main.hs | gpl-3.0 | 4,704 | 6 | 23 | 1,333 | 1,501 | 781 | 720 | 115 | 4 |
{-# LANGUAGE FlexibleInstances #-}
module HW08.Main where
import Test.HUnit
import Test.Framework as TF (defaultMain, testGroup, Test)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import HW08.Employee
import HW08.Party
main :: IO ()
main = TF.defaultMain tests
tests :: [TF.Test]
tests = [ testGroup "QuickCheck HW08" [
-- testProperty "prop_nextLevel" prop_nextLevel
],
testGroup "HUnit HW08" [
{- let i = 3
jl = (Append (Size 0) (Single (Size 1) 1) (Append (Size 1) (Append (Size 1) Empty (Single (Size 2) 2)) (Single (Size 2) 5)))::JoinList Size Int in
testCase "indexJ" (indexJ i jl @=? jlToList jl !!? i)-}
]
]
--prop_nextLevel :: Employee -> [(GuestList, GuestList)] -> Bool
--prop_nextLevel e gls = let (wb, nb) = nextLevel e gls in
{-
instance Arbitrary (T.Tree Int) where
arbitrary = frequency [ (1, t0)
, (5, T.Node <$> arbitrary <*> sequence [l, l]) ]
where t3 = T.Node <$> arbitrary <*> sequence [arbitrary, arbitrary, arbitrary]
t0 = T.Node <$> arbitrary <*> pure []
l = frequency [ (1, t3), (5, t0) ]
-}
| martinvlk/cis194-homeworks | tests/HW08/MainT.hs | gpl-3.0 | 1,241 | 0 | 7 | 317 | 124 | 76 | 48 | 14 | 1 |
module Chap03.Exercise09 where
import Chap03.Data.RedBlackTree
fromOrdList :: Ord a => [a] -> RedBlackTree a
fromOrdList xs =
let go :: Int -> [a] -> (RedBlackTree a, Int, [a])
go 0 xs' = (E, -1, xs')
go 1 (x:xs') = (T B E x E, 0, xs')
go len xs' =
let mid = len `div` 2
(p, q) = (mid, len - mid - 1)
(l, bdl, (x:xs'')) = go p xs'
(r, bdr, xs''') = go q xs''
in (T B (if bdl == bdr then l else paint R l) x r, bdr + 1, xs''')
(t, _, _) = go (length xs) xs
in t
| stappit/okasaki-pfds | src/Chap03/Exercise09.hs | gpl-3.0 | 564 | 0 | 15 | 211 | 303 | 166 | 137 | 15 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.UnSampledReports.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a new unsampled report.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.unsampledReports.insert@.
module Network.Google.Resource.Analytics.Management.UnSampledReports.Insert
(
-- * REST Resource
ManagementUnSampledReportsInsertResource
-- * Creating a Request
, managementUnSampledReportsInsert
, ManagementUnSampledReportsInsert
-- * Request Lenses
, musriWebPropertyId
, musriProFileId
, musriPayload
, musriAccountId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.unsampledReports.insert@ method which the
-- 'ManagementUnSampledReportsInsert' request conforms to.
type ManagementUnSampledReportsInsertResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"profiles" :>
Capture "profileId" Text :>
"unsampledReports" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] UnSampledReport :>
Post '[JSON] UnSampledReport
-- | Create a new unsampled report.
--
-- /See:/ 'managementUnSampledReportsInsert' smart constructor.
data ManagementUnSampledReportsInsert = ManagementUnSampledReportsInsert'
{ _musriWebPropertyId :: !Text
, _musriProFileId :: !Text
, _musriPayload :: !UnSampledReport
, _musriAccountId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementUnSampledReportsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'musriWebPropertyId'
--
-- * 'musriProFileId'
--
-- * 'musriPayload'
--
-- * 'musriAccountId'
managementUnSampledReportsInsert
:: Text -- ^ 'musriWebPropertyId'
-> Text -- ^ 'musriProFileId'
-> UnSampledReport -- ^ 'musriPayload'
-> Text -- ^ 'musriAccountId'
-> ManagementUnSampledReportsInsert
managementUnSampledReportsInsert pMusriWebPropertyId_ pMusriProFileId_ pMusriPayload_ pMusriAccountId_ =
ManagementUnSampledReportsInsert'
{ _musriWebPropertyId = pMusriWebPropertyId_
, _musriProFileId = pMusriProFileId_
, _musriPayload = pMusriPayload_
, _musriAccountId = pMusriAccountId_
}
-- | Web property ID to create the unsampled report for.
musriWebPropertyId :: Lens' ManagementUnSampledReportsInsert Text
musriWebPropertyId
= lens _musriWebPropertyId
(\ s a -> s{_musriWebPropertyId = a})
-- | View (Profile) ID to create the unsampled report for.
musriProFileId :: Lens' ManagementUnSampledReportsInsert Text
musriProFileId
= lens _musriProFileId
(\ s a -> s{_musriProFileId = a})
-- | Multipart request metadata.
musriPayload :: Lens' ManagementUnSampledReportsInsert UnSampledReport
musriPayload
= lens _musriPayload (\ s a -> s{_musriPayload = a})
-- | Account ID to create the unsampled report for.
musriAccountId :: Lens' ManagementUnSampledReportsInsert Text
musriAccountId
= lens _musriAccountId
(\ s a -> s{_musriAccountId = a})
instance GoogleRequest
ManagementUnSampledReportsInsert where
type Rs ManagementUnSampledReportsInsert =
UnSampledReport
type Scopes ManagementUnSampledReportsInsert =
'["https://www.googleapis.com/auth/analytics",
"https://www.googleapis.com/auth/analytics.edit"]
requestClient ManagementUnSampledReportsInsert'{..}
= go _musriAccountId _musriWebPropertyId
_musriProFileId
(Just AltJSON)
_musriPayload
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementUnSampledReportsInsertResource)
mempty
| rueshyna/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/UnSampledReports/Insert.hs | mpl-2.0 | 4,875 | 0 | 19 | 1,155 | 552 | 327 | 225 | 96 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AccessContextManager.AccessPolicies.ServicePerimeters.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update a Service Perimeter. The longrunning operation from this RPC will
-- have a successful status once the changes to the Service Perimeter have
-- propagated to long-lasting storage. Service Perimeter containing errors
-- will result in an error response for the first error encountered.
--
-- /See:/ <https://cloud.google.com/access-context-manager/docs/reference/rest/ Access Context Manager API Reference> for @accesscontextmanager.accessPolicies.servicePerimeters.patch@.
module Network.Google.Resource.AccessContextManager.AccessPolicies.ServicePerimeters.Patch
(
-- * REST Resource
AccessPoliciesServicePerimetersPatchResource
-- * Creating a Request
, accessPoliciesServicePerimetersPatch
, AccessPoliciesServicePerimetersPatch
-- * Request Lenses
, apsppXgafv
, apsppUploadProtocol
, apsppUpdateMask
, apsppAccessToken
, apsppUploadType
, apsppPayload
, apsppName
, apsppCallback
) where
import Network.Google.AccessContextManager.Types
import Network.Google.Prelude
-- | A resource alias for @accesscontextmanager.accessPolicies.servicePerimeters.patch@ method which the
-- 'AccessPoliciesServicePerimetersPatch' request conforms to.
type AccessPoliciesServicePerimetersPatchResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ServicePerimeter :>
Patch '[JSON] Operation
-- | Update a Service Perimeter. The longrunning operation from this RPC will
-- have a successful status once the changes to the Service Perimeter have
-- propagated to long-lasting storage. Service Perimeter containing errors
-- will result in an error response for the first error encountered.
--
-- /See:/ 'accessPoliciesServicePerimetersPatch' smart constructor.
data AccessPoliciesServicePerimetersPatch =
AccessPoliciesServicePerimetersPatch'
{ _apsppXgafv :: !(Maybe Xgafv)
, _apsppUploadProtocol :: !(Maybe Text)
, _apsppUpdateMask :: !(Maybe GFieldMask)
, _apsppAccessToken :: !(Maybe Text)
, _apsppUploadType :: !(Maybe Text)
, _apsppPayload :: !ServicePerimeter
, _apsppName :: !Text
, _apsppCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccessPoliciesServicePerimetersPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'apsppXgafv'
--
-- * 'apsppUploadProtocol'
--
-- * 'apsppUpdateMask'
--
-- * 'apsppAccessToken'
--
-- * 'apsppUploadType'
--
-- * 'apsppPayload'
--
-- * 'apsppName'
--
-- * 'apsppCallback'
accessPoliciesServicePerimetersPatch
:: ServicePerimeter -- ^ 'apsppPayload'
-> Text -- ^ 'apsppName'
-> AccessPoliciesServicePerimetersPatch
accessPoliciesServicePerimetersPatch pApsppPayload_ pApsppName_ =
AccessPoliciesServicePerimetersPatch'
{ _apsppXgafv = Nothing
, _apsppUploadProtocol = Nothing
, _apsppUpdateMask = Nothing
, _apsppAccessToken = Nothing
, _apsppUploadType = Nothing
, _apsppPayload = pApsppPayload_
, _apsppName = pApsppName_
, _apsppCallback = Nothing
}
-- | V1 error format.
apsppXgafv :: Lens' AccessPoliciesServicePerimetersPatch (Maybe Xgafv)
apsppXgafv
= lens _apsppXgafv (\ s a -> s{_apsppXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
apsppUploadProtocol :: Lens' AccessPoliciesServicePerimetersPatch (Maybe Text)
apsppUploadProtocol
= lens _apsppUploadProtocol
(\ s a -> s{_apsppUploadProtocol = a})
-- | Required. Mask to control which fields get updated. Must be non-empty.
apsppUpdateMask :: Lens' AccessPoliciesServicePerimetersPatch (Maybe GFieldMask)
apsppUpdateMask
= lens _apsppUpdateMask
(\ s a -> s{_apsppUpdateMask = a})
-- | OAuth access token.
apsppAccessToken :: Lens' AccessPoliciesServicePerimetersPatch (Maybe Text)
apsppAccessToken
= lens _apsppAccessToken
(\ s a -> s{_apsppAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
apsppUploadType :: Lens' AccessPoliciesServicePerimetersPatch (Maybe Text)
apsppUploadType
= lens _apsppUploadType
(\ s a -> s{_apsppUploadType = a})
-- | Multipart request metadata.
apsppPayload :: Lens' AccessPoliciesServicePerimetersPatch ServicePerimeter
apsppPayload
= lens _apsppPayload (\ s a -> s{_apsppPayload = a})
-- | Required. Resource name for the ServicePerimeter. The \`short_name\`
-- component must begin with a letter and only include alphanumeric and
-- \'_\'. Format:
-- \`accessPolicies\/{policy_id}\/servicePerimeters\/{short_name}\`
apsppName :: Lens' AccessPoliciesServicePerimetersPatch Text
apsppName
= lens _apsppName (\ s a -> s{_apsppName = a})
-- | JSONP
apsppCallback :: Lens' AccessPoliciesServicePerimetersPatch (Maybe Text)
apsppCallback
= lens _apsppCallback
(\ s a -> s{_apsppCallback = a})
instance GoogleRequest
AccessPoliciesServicePerimetersPatch
where
type Rs AccessPoliciesServicePerimetersPatch =
Operation
type Scopes AccessPoliciesServicePerimetersPatch =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
AccessPoliciesServicePerimetersPatch'{..}
= go _apsppName _apsppXgafv _apsppUploadProtocol
_apsppUpdateMask
_apsppAccessToken
_apsppUploadType
_apsppCallback
(Just AltJSON)
_apsppPayload
accessContextManagerService
where go
= buildClient
(Proxy ::
Proxy AccessPoliciesServicePerimetersPatchResource)
mempty
| brendanhay/gogol | gogol-accesscontextmanager/gen/Network/Google/Resource/AccessContextManager/AccessPolicies/ServicePerimeters/Patch.hs | mpl-2.0 | 6,876 | 0 | 17 | 1,433 | 864 | 506 | 358 | 130 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.StorageTransfer.TransferOperations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ <https://cloud.google.com/storage-transfer/docs Storage Transfer API Reference> for @storagetransfer.transferOperations.get@.
module Network.Google.Resource.StorageTransfer.TransferOperations.Get
(
-- * REST Resource
TransferOperationsGetResource
-- * Creating a Request
, transferOperationsGet
, TransferOperationsGet
-- * Request Lenses
, togXgafv
, togUploadProtocol
, togAccessToken
, togUploadType
, togName
, togCallback
) where
import Network.Google.Prelude
import Network.Google.StorageTransfer.Types
-- | A resource alias for @storagetransfer.transferOperations.get@ method which the
-- 'TransferOperationsGet' request conforms to.
type TransferOperationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Operation
-- | Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ 'transferOperationsGet' smart constructor.
data TransferOperationsGet =
TransferOperationsGet'
{ _togXgafv :: !(Maybe Xgafv)
, _togUploadProtocol :: !(Maybe Text)
, _togAccessToken :: !(Maybe Text)
, _togUploadType :: !(Maybe Text)
, _togName :: !Text
, _togCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TransferOperationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'togXgafv'
--
-- * 'togUploadProtocol'
--
-- * 'togAccessToken'
--
-- * 'togUploadType'
--
-- * 'togName'
--
-- * 'togCallback'
transferOperationsGet
:: Text -- ^ 'togName'
-> TransferOperationsGet
transferOperationsGet pTogName_ =
TransferOperationsGet'
{ _togXgafv = Nothing
, _togUploadProtocol = Nothing
, _togAccessToken = Nothing
, _togUploadType = Nothing
, _togName = pTogName_
, _togCallback = Nothing
}
-- | V1 error format.
togXgafv :: Lens' TransferOperationsGet (Maybe Xgafv)
togXgafv = lens _togXgafv (\ s a -> s{_togXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
togUploadProtocol :: Lens' TransferOperationsGet (Maybe Text)
togUploadProtocol
= lens _togUploadProtocol
(\ s a -> s{_togUploadProtocol = a})
-- | OAuth access token.
togAccessToken :: Lens' TransferOperationsGet (Maybe Text)
togAccessToken
= lens _togAccessToken
(\ s a -> s{_togAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
togUploadType :: Lens' TransferOperationsGet (Maybe Text)
togUploadType
= lens _togUploadType
(\ s a -> s{_togUploadType = a})
-- | The name of the operation resource.
togName :: Lens' TransferOperationsGet Text
togName = lens _togName (\ s a -> s{_togName = a})
-- | JSONP
togCallback :: Lens' TransferOperationsGet (Maybe Text)
togCallback
= lens _togCallback (\ s a -> s{_togCallback = a})
instance GoogleRequest TransferOperationsGet where
type Rs TransferOperationsGet = Operation
type Scopes TransferOperationsGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient TransferOperationsGet'{..}
= go _togName _togXgafv _togUploadProtocol
_togAccessToken
_togUploadType
_togCallback
(Just AltJSON)
storageTransferService
where go
= buildClient
(Proxy :: Proxy TransferOperationsGetResource)
mempty
| brendanhay/gogol | gogol-storage-transfer/gen/Network/Google/Resource/StorageTransfer/TransferOperations/Get.hs | mpl-2.0 | 4,822 | 0 | 15 | 1,068 | 698 | 409 | 289 | 100 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Enterprises.AcknowledgeNotificationSet
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Acknowledges notifications that were received from
-- Enterprises.PullNotificationSet to prevent subsequent calls from
-- returning the same notifications.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.enterprises.acknowledgeNotificationSet@.
module Network.Google.Resource.AndroidEnterprise.Enterprises.AcknowledgeNotificationSet
(
-- * REST Resource
EnterprisesAcknowledgeNotificationSetResource
-- * Creating a Request
, enterprisesAcknowledgeNotificationSet
, EnterprisesAcknowledgeNotificationSet
-- * Request Lenses
, eansNotificationSetId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.enterprises.acknowledgeNotificationSet@ method which the
-- 'EnterprisesAcknowledgeNotificationSet' request conforms to.
type EnterprisesAcknowledgeNotificationSetResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
"acknowledgeNotificationSet" :>
QueryParam "notificationSetId" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] ()
-- | Acknowledges notifications that were received from
-- Enterprises.PullNotificationSet to prevent subsequent calls from
-- returning the same notifications.
--
-- /See:/ 'enterprisesAcknowledgeNotificationSet' smart constructor.
newtype EnterprisesAcknowledgeNotificationSet = EnterprisesAcknowledgeNotificationSet'
{ _eansNotificationSetId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EnterprisesAcknowledgeNotificationSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eansNotificationSetId'
enterprisesAcknowledgeNotificationSet
:: EnterprisesAcknowledgeNotificationSet
enterprisesAcknowledgeNotificationSet =
EnterprisesAcknowledgeNotificationSet'
{ _eansNotificationSetId = Nothing
}
-- | The notification set ID as returned by Enterprises.PullNotificationSet.
-- This must be provided.
eansNotificationSetId :: Lens' EnterprisesAcknowledgeNotificationSet (Maybe Text)
eansNotificationSetId
= lens _eansNotificationSetId
(\ s a -> s{_eansNotificationSetId = a})
instance GoogleRequest
EnterprisesAcknowledgeNotificationSet where
type Rs EnterprisesAcknowledgeNotificationSet = ()
type Scopes EnterprisesAcknowledgeNotificationSet =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient
EnterprisesAcknowledgeNotificationSet'{..}
= go _eansNotificationSetId (Just AltJSON)
androidEnterpriseService
where go
= buildClient
(Proxy ::
Proxy EnterprisesAcknowledgeNotificationSetResource)
mempty
| rueshyna/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Enterprises/AcknowledgeNotificationSet.hs | mpl-2.0 | 3,769 | 0 | 13 | 718 | 313 | 193 | 120 | 53 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.InventoryItems.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one inventory item by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.inventoryItems.get@.
module Network.Google.Resource.DFAReporting.InventoryItems.Get
(
-- * REST Resource
InventoryItemsGetResource
-- * Creating a Request
, inventoryItemsGet
, InventoryItemsGet
-- * Request Lenses
, iigProFileId
, iigId
, iigProjectId
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.inventoryItems.get@ method which the
-- 'InventoryItemsGet' request conforms to.
type InventoryItemsGetResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"projects" :>
Capture "projectId" (Textual Int64) :>
"inventoryItems" :>
Capture "id" (Textual Int64) :>
QueryParam "alt" AltJSON :> Get '[JSON] InventoryItem
-- | Gets one inventory item by ID.
--
-- /See:/ 'inventoryItemsGet' smart constructor.
data InventoryItemsGet = InventoryItemsGet'
{ _iigProFileId :: !(Textual Int64)
, _iigId :: !(Textual Int64)
, _iigProjectId :: !(Textual Int64)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InventoryItemsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iigProFileId'
--
-- * 'iigId'
--
-- * 'iigProjectId'
inventoryItemsGet
:: Int64 -- ^ 'iigProFileId'
-> Int64 -- ^ 'iigId'
-> Int64 -- ^ 'iigProjectId'
-> InventoryItemsGet
inventoryItemsGet pIigProFileId_ pIigId_ pIigProjectId_ =
InventoryItemsGet'
{ _iigProFileId = _Coerce # pIigProFileId_
, _iigId = _Coerce # pIigId_
, _iigProjectId = _Coerce # pIigProjectId_
}
-- | User profile ID associated with this request.
iigProFileId :: Lens' InventoryItemsGet Int64
iigProFileId
= lens _iigProFileId (\ s a -> s{_iigProFileId = a})
. _Coerce
-- | Inventory item ID.
iigId :: Lens' InventoryItemsGet Int64
iigId
= lens _iigId (\ s a -> s{_iigId = a}) . _Coerce
-- | Project ID for order documents.
iigProjectId :: Lens' InventoryItemsGet Int64
iigProjectId
= lens _iigProjectId (\ s a -> s{_iigProjectId = a})
. _Coerce
instance GoogleRequest InventoryItemsGet where
type Rs InventoryItemsGet = InventoryItem
type Scopes InventoryItemsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient InventoryItemsGet'{..}
= go _iigProFileId _iigProjectId _iigId
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy InventoryItemsGetResource)
mempty
| rueshyna/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/InventoryItems/Get.hs | mpl-2.0 | 3,722 | 0 | 16 | 889 | 520 | 304 | 216 | 76 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.DicomStores.Studies.Series.RetrieveSeries
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- RetrieveSeries returns all instances within the given study and series.
-- See [RetrieveTransaction]
-- (http:\/\/dicom.nema.org\/medical\/dicom\/current\/output\/html\/part18.html#sect_10.4).
-- For details on the implementation of RetrieveSeries, see [DICOM
-- study\/series\/instances](https:\/\/cloud.google.com\/healthcare\/docs\/dicom#dicom_studyseriesinstances)
-- in the Cloud Healthcare API conformance statement. For samples that show
-- how to call RetrieveSeries, see [Retrieving DICOM
-- data](https:\/\/cloud.google.com\/healthcare\/docs\/how-tos\/dicomweb#retrieving_dicom_data).
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.dicomStores.studies.series.retrieveSeries@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.DicomStores.Studies.Series.RetrieveSeries
(
-- * REST Resource
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeriesResource
-- * Creating a Request
, projectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
, ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
-- * Request Lenses
, pldsdsssrsParent
, pldsdsssrsXgafv
, pldsdsssrsUploadProtocol
, pldsdsssrsAccessToken
, pldsdsssrsUploadType
, pldsdsssrsCallback
, pldsdsssrsDicomWebPath
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.dicomStores.studies.series.retrieveSeries@ method which the
-- 'ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries' request conforms to.
type ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeriesResource
=
"v1" :>
Capture "parent" Text :>
"dicomWeb" :>
Capture "dicomWebPath" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] HTTPBody
-- | RetrieveSeries returns all instances within the given study and series.
-- See [RetrieveTransaction]
-- (http:\/\/dicom.nema.org\/medical\/dicom\/current\/output\/html\/part18.html#sect_10.4).
-- For details on the implementation of RetrieveSeries, see [DICOM
-- study\/series\/instances](https:\/\/cloud.google.com\/healthcare\/docs\/dicom#dicom_studyseriesinstances)
-- in the Cloud Healthcare API conformance statement. For samples that show
-- how to call RetrieveSeries, see [Retrieving DICOM
-- data](https:\/\/cloud.google.com\/healthcare\/docs\/how-tos\/dicomweb#retrieving_dicom_data).
--
-- /See:/ 'projectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries' smart constructor.
data ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries =
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries'
{ _pldsdsssrsParent :: !Text
, _pldsdsssrsXgafv :: !(Maybe Xgafv)
, _pldsdsssrsUploadProtocol :: !(Maybe Text)
, _pldsdsssrsAccessToken :: !(Maybe Text)
, _pldsdsssrsUploadType :: !(Maybe Text)
, _pldsdsssrsCallback :: !(Maybe Text)
, _pldsdsssrsDicomWebPath :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldsdsssrsParent'
--
-- * 'pldsdsssrsXgafv'
--
-- * 'pldsdsssrsUploadProtocol'
--
-- * 'pldsdsssrsAccessToken'
--
-- * 'pldsdsssrsUploadType'
--
-- * 'pldsdsssrsCallback'
--
-- * 'pldsdsssrsDicomWebPath'
projectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
:: Text -- ^ 'pldsdsssrsParent'
-> Text -- ^ 'pldsdsssrsDicomWebPath'
-> ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
projectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries pPldsdsssrsParent_ pPldsdsssrsDicomWebPath_ =
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries'
{ _pldsdsssrsParent = pPldsdsssrsParent_
, _pldsdsssrsXgafv = Nothing
, _pldsdsssrsUploadProtocol = Nothing
, _pldsdsssrsAccessToken = Nothing
, _pldsdsssrsUploadType = Nothing
, _pldsdsssrsCallback = Nothing
, _pldsdsssrsDicomWebPath = pPldsdsssrsDicomWebPath_
}
-- | The name of the DICOM store that is being accessed. For example,
-- \`projects\/{project_id}\/locations\/{location_id}\/datasets\/{dataset_id}\/dicomStores\/{dicom_store_id}\`.
pldsdsssrsParent :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries Text
pldsdsssrsParent
= lens _pldsdsssrsParent
(\ s a -> s{_pldsdsssrsParent = a})
-- | V1 error format.
pldsdsssrsXgafv :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries (Maybe Xgafv)
pldsdsssrsXgafv
= lens _pldsdsssrsXgafv
(\ s a -> s{_pldsdsssrsXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldsdsssrsUploadProtocol :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries (Maybe Text)
pldsdsssrsUploadProtocol
= lens _pldsdsssrsUploadProtocol
(\ s a -> s{_pldsdsssrsUploadProtocol = a})
-- | OAuth access token.
pldsdsssrsAccessToken :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries (Maybe Text)
pldsdsssrsAccessToken
= lens _pldsdsssrsAccessToken
(\ s a -> s{_pldsdsssrsAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldsdsssrsUploadType :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries (Maybe Text)
pldsdsssrsUploadType
= lens _pldsdsssrsUploadType
(\ s a -> s{_pldsdsssrsUploadType = a})
-- | JSONP
pldsdsssrsCallback :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries (Maybe Text)
pldsdsssrsCallback
= lens _pldsdsssrsCallback
(\ s a -> s{_pldsdsssrsCallback = a})
-- | The path of the RetrieveSeries DICOMweb request. For example,
-- \`studies\/{study_uid}\/series\/{series_uid}\`.
pldsdsssrsDicomWebPath :: Lens' ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries Text
pldsdsssrsDicomWebPath
= lens _pldsdsssrsDicomWebPath
(\ s a -> s{_pldsdsssrsDicomWebPath = a})
instance GoogleRequest
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
where
type Rs
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
= HTTPBody
type Scopes
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeries'{..}
= go _pldsdsssrsParent _pldsdsssrsDicomWebPath
_pldsdsssrsXgafv
_pldsdsssrsUploadProtocol
_pldsdsssrsAccessToken
_pldsdsssrsUploadType
_pldsdsssrsCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsDicomStoresStudiesSeriesRetrieveSeriesResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/DicomStores/Studies/Series/RetrieveSeries.hs | mpl-2.0 | 8,249 | 0 | 17 | 1,423 | 794 | 471 | 323 | 126 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Organizations.Locations.StoredInfoTypes.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.organizations.locations.storedInfoTypes.create@.
module Network.Google.Resource.DLP.Organizations.Locations.StoredInfoTypes.Create
(
-- * REST Resource
OrganizationsLocationsStoredInfoTypesCreateResource
-- * Creating a Request
, organizationsLocationsStoredInfoTypesCreate
, OrganizationsLocationsStoredInfoTypesCreate
-- * Request Lenses
, olsitcParent
, olsitcXgafv
, olsitcUploadProtocol
, olsitcAccessToken
, olsitcUploadType
, olsitcPayload
, olsitcCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.organizations.locations.storedInfoTypes.create@ method which the
-- 'OrganizationsLocationsStoredInfoTypesCreate' request conforms to.
type OrganizationsLocationsStoredInfoTypesCreateResource
=
"v2" :>
Capture "parent" Text :>
"storedInfoTypes" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GooglePrivacyDlpV2CreateStoredInfoTypeRequest
:> Post '[JSON] GooglePrivacyDlpV2StoredInfoType
-- | Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ 'organizationsLocationsStoredInfoTypesCreate' smart constructor.
data OrganizationsLocationsStoredInfoTypesCreate =
OrganizationsLocationsStoredInfoTypesCreate'
{ _olsitcParent :: !Text
, _olsitcXgafv :: !(Maybe Xgafv)
, _olsitcUploadProtocol :: !(Maybe Text)
, _olsitcAccessToken :: !(Maybe Text)
, _olsitcUploadType :: !(Maybe Text)
, _olsitcPayload :: !GooglePrivacyDlpV2CreateStoredInfoTypeRequest
, _olsitcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsLocationsStoredInfoTypesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olsitcParent'
--
-- * 'olsitcXgafv'
--
-- * 'olsitcUploadProtocol'
--
-- * 'olsitcAccessToken'
--
-- * 'olsitcUploadType'
--
-- * 'olsitcPayload'
--
-- * 'olsitcCallback'
organizationsLocationsStoredInfoTypesCreate
:: Text -- ^ 'olsitcParent'
-> GooglePrivacyDlpV2CreateStoredInfoTypeRequest -- ^ 'olsitcPayload'
-> OrganizationsLocationsStoredInfoTypesCreate
organizationsLocationsStoredInfoTypesCreate pOlsitcParent_ pOlsitcPayload_ =
OrganizationsLocationsStoredInfoTypesCreate'
{ _olsitcParent = pOlsitcParent_
, _olsitcXgafv = Nothing
, _olsitcUploadProtocol = Nothing
, _olsitcAccessToken = Nothing
, _olsitcUploadType = Nothing
, _olsitcPayload = pOlsitcPayload_
, _olsitcCallback = Nothing
}
-- | Required. Parent resource name. The format of this value varies
-- depending on the scope of the request (project or organization) and
-- whether you have [specified a processing
-- location](https:\/\/cloud.google.com\/dlp\/docs\/specifying-location): +
-- Projects scope, location specified:
-- \`projects\/\`PROJECT_ID\`\/locations\/\`LOCATION_ID + Projects scope,
-- no location specified (defaults to global): \`projects\/\`PROJECT_ID +
-- Organizations scope, location specified:
-- \`organizations\/\`ORG_ID\`\/locations\/\`LOCATION_ID + Organizations
-- scope, no location specified (defaults to global):
-- \`organizations\/\`ORG_ID The following example \`parent\` string
-- specifies a parent project with the identifier \`example-project\`, and
-- specifies the \`europe-west3\` location for processing data:
-- parent=projects\/example-project\/locations\/europe-west3
olsitcParent :: Lens' OrganizationsLocationsStoredInfoTypesCreate Text
olsitcParent
= lens _olsitcParent (\ s a -> s{_olsitcParent = a})
-- | V1 error format.
olsitcXgafv :: Lens' OrganizationsLocationsStoredInfoTypesCreate (Maybe Xgafv)
olsitcXgafv
= lens _olsitcXgafv (\ s a -> s{_olsitcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olsitcUploadProtocol :: Lens' OrganizationsLocationsStoredInfoTypesCreate (Maybe Text)
olsitcUploadProtocol
= lens _olsitcUploadProtocol
(\ s a -> s{_olsitcUploadProtocol = a})
-- | OAuth access token.
olsitcAccessToken :: Lens' OrganizationsLocationsStoredInfoTypesCreate (Maybe Text)
olsitcAccessToken
= lens _olsitcAccessToken
(\ s a -> s{_olsitcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olsitcUploadType :: Lens' OrganizationsLocationsStoredInfoTypesCreate (Maybe Text)
olsitcUploadType
= lens _olsitcUploadType
(\ s a -> s{_olsitcUploadType = a})
-- | Multipart request metadata.
olsitcPayload :: Lens' OrganizationsLocationsStoredInfoTypesCreate GooglePrivacyDlpV2CreateStoredInfoTypeRequest
olsitcPayload
= lens _olsitcPayload
(\ s a -> s{_olsitcPayload = a})
-- | JSONP
olsitcCallback :: Lens' OrganizationsLocationsStoredInfoTypesCreate (Maybe Text)
olsitcCallback
= lens _olsitcCallback
(\ s a -> s{_olsitcCallback = a})
instance GoogleRequest
OrganizationsLocationsStoredInfoTypesCreate
where
type Rs OrganizationsLocationsStoredInfoTypesCreate =
GooglePrivacyDlpV2StoredInfoType
type Scopes
OrganizationsLocationsStoredInfoTypesCreate
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
OrganizationsLocationsStoredInfoTypesCreate'{..}
= go _olsitcParent _olsitcXgafv _olsitcUploadProtocol
_olsitcAccessToken
_olsitcUploadType
_olsitcCallback
(Just AltJSON)
_olsitcPayload
dLPService
where go
= buildClient
(Proxy ::
Proxy
OrganizationsLocationsStoredInfoTypesCreateResource)
mempty
| brendanhay/gogol | gogol-dlp/gen/Network/Google/Resource/DLP/Organizations/Locations/StoredInfoTypes/Create.hs | mpl-2.0 | 7,252 | 0 | 17 | 1,444 | 797 | 472 | 325 | 124 | 1 |
{- LANGUAGE CPP -}
module Files where
import System.FilePath
configFile = etcdir </> "kvm-in-a-box.cfg"
stateFile = varlibdir </> "state"
rootRel root path = root </> makeRelative "/" path
homedir = "/home"
varrundir uid = "/run/user" </> show uid
varlibdir = "/var/lib/kib/"
etcdir = "/etc"
usrsharedir = "/usr/share/kvm-in-a-box"
| DanielG/kvm-in-a-box | src/Files.hs | agpl-3.0 | 337 | 0 | 6 | 52 | 82 | 45 | 37 | 10 | 1 |
{-
Copyright 2015 Martin Buck
This file is part of H2D.
H2D is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
H2D is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with H2D. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE FlexibleInstances #-}
module Mirrorable where
import Types2D
import Base2D
class Mirrorable a where
mirrorV :: a -> Double -> a
mirrorH :: a -> Double -> a
mirrorP :: a -> Vec2D -> a
instance Mirrorable Vec2D where
mirrorV (Vec2D x y) a = Vec2D (2 * a - x) y
mirrorH (Vec2D x y) a = Vec2D x (2 * a - y)
mirrorP (Vec2D x y) (Vec2D a b) = Vec2D (2 * a - x) (2 * b - y)
instance Mirrorable Path2D where
mirrorV path a = chunkParMap pointChunkSize (flip mirrorV a) path
mirrorH path a = chunkParMap pointChunkSize (flip mirrorH a) path
mirrorP path a = chunkParMap pointChunkSize (flip mirrorP a) path
| I3ck/H2D | src/Mirrorable.hs | lgpl-3.0 | 1,335 | 0 | 9 | 293 | 272 | 139 | 133 | 16 | 0 |
module HW02 where
import Words
import Data.List
-- Though a Scrabble hand is the same Haskell type as a Scrabble word, they
-- have different properties. Specifically, a hand is unordered whereas a word
-- is ordered. We denote this distinction by using a type synonym to talk
-- about hands, even though we could just say `String`.
type Hand = [Char]
-- A `Template` is like a word, but it has '?' characters in some places as
-- placeholders for letters from a player's hand. Because real words do not
-- have '?' characters, we use another type synonym to track this distinction.
type Template = String
-- A 'STemplate' is like a template, but it has markers to indicate four kinds
-- of special board locations: double-letter noted with 'D', triple-letter
-- noted with 'T', double-word noted with '2', and triple-word noted with '3'.
-- For matching, these behave just like '?' does -- they can be filled in with
-- any letter. But, when scoring, any letter played on a 'D' gets double its
-- value, and any letter played on a 'T' gets triple its value. If any square
-- in the template is a '2', the whole word's value is doubled; if any square
-- in the template is a '3', the whole word's score is tripled. If multiple of
-- these special squares are in the same word, the effects multiply.
type STemplate = Template
-- | Exercise 1
-- >>> formableBy "fun" ['x','n','i','f','u','e','l']
-- True
-- >>> formableBy "haskell" ['k','l','e','h','a','l','s']
-- True
-- >>> formableBy "haskell" ['k','l','e','h','a','y','s']
-- False
-- >>> formableBy "" ['a','f']
-- True
formableBy :: String -> Hand -> Bool
formableBy xs ys = null (xs \\ ys)
-- | Exercise 2
-- >>> wordsFrom ['a','b','c','d']
--["ab","ad","ba","bad","cab","cad","dab"]
wordsFrom :: Hand -> [String]
wordsFrom hand = filter (`formableBy` hand) allWords
-- | Exercise 3
-- See if we can form the word given by string and template with the
-- given letters.
-- >>> wordFitsTemplate "let" ['x','x'] "let"
-- True
-- >>> wordFitsTemplate "??r?" ['c','x','e','a','b','c','l'] "care"
-- True
-- >>> wordFitsTemplate "??r?" ['c','x','e','w','b','c','l'] "care"
-- False
wordFitsTemplate :: Template -> Hand -> String -> Bool
wordFitsTemplate t h s = formableBy ns h
where ns = s \\ t
-- | Exercise 4
-- Calculate scrabble value by word
-- scrabbleValueWord "care"
-- 6
-- scrabbleValueWord "quiz"
-- 22
scrabbleValueWord :: String -> Int
scrabbleValueWord xs = sum $ map scrabbleValue xs
-- | Returns every string which tuple value is equal to given number
-- >>> filterByValue [(3,"1"), (5,"2")] 3
-- ["1"]
filterByValue :: [(Int, String)] -> Int -> [String]
filterByValue ((x,s):xs) y
| x == y = s : filterByValue xs y
| otherwise = []
-- | Exercise 5
-- filter words with max point value
-- bestWords ["cat", "rat", "bat"]
-- ["bat","cat"]
-- bestWords []
-- []
bestWords :: [String] -> [String]
bestWords xs = filterByValue new m
where val = map scrabbleValueWord xs
new = zip val xs
m = maximum val
extraValueLetter :: String -> String -> Int
extraValueLetter [] _ = 0
extraValueLetter ('D':s) (x:xs) = 2 + scrabbleValue x + extraValueLetter s xs
extraValueLetter ('T':s) (x:xs) = 3 + scrabbleValue x + extraValueLetter s xs
extraValueLetter (_:s) (_:xs) = 0 + extraValueLetter s xs
extraValueWord :: String -> Int
extraValueWord [] = 0
extraValueWord ('2':s) = 2 + extraValueWord s
extraValueWord ('3':s) = 3 + extraValueWord s
extraValueWord (_:s) = 0 + extraValueWord s
-- | Exercise 6
-- Computers the value of a word given by a template with additional
-- values indicated by numbers.
-- >>> scrabbleValueTemplate "?e??3" "peace"
-- 27
-- >>> scrabbleValueTemplate "De?2?" "peace"
-- 24
-- >>> scrabbleValueTemplate "??Tce" "peace"
-- 11
scrabbleValueTemplate :: STemplate -> String -> Int
scrabbleValueTemplate t s = (scrabbleValueWord s + extraValueLetter t s) * extraValueWord t
| romanofski/codesnippets | haskellCIS194/homework2/HW02.hs | unlicense | 3,909 | 0 | 8 | 697 | 633 | 360 | 273 | 36 | 1 |
{- Copyright 2014 David Farrell <shokku.ra@gmail.com>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Part where
import Data.List
import qualified Data.Map as M
import IRC.Message
import IRC.Numeric
import IRC.Action
import qualified IRC.Server.Client as Client
import qualified IRC.Server.Channel as Chan
import IRC.Server.Channel.Helper
import IRC.Server.Environment (whenRegistered)
import qualified IRC.Server.Environment as Env
import Plugin
plugin = defaultPlugin {handlers = [CommandHandler "PART" part]}
part :: CommandHSpec
part env (Message _ _ (chan@('#':_):xs)) = whenRegistered env $ do
let a = if M.member chan locChans
then if elem chan channels
then NamedAction "Part" aPart
else GenericAction aNotOn
else GenericAction aNoSuch
env {Env.actions=a:Env.actions env}
where
locChans = Env.channels (Env.local env)
channels = Client.channels (Env.client env)
aPart e = do
sendChannelFromClient cli e (lcs M.! chan) $ "PART " ++ chan ++ case xs of
reason:_ -> " :" ++ reason
[] -> ""
return e
{ Env.client = cli {Client.channels=delete chan cs}
, Env.local = l {Env.channels=newChans}
}
where l = Env.local e
lcs = Env.channels l
cli = Env.client e
cs = Client.channels (Env.client e)
Just uid = Client.uid cli
newChans = M.adjust (\c@(Chan.Channel {Chan.uids=us}) -> c {Chan.uids=delete uid us}) chan lcs
aNotOn e = sendNumeric e numERR_NOTONCHANNEL [chan, "You're not on that channel"] >> return e
aNoSuch e = sendNumeric e numERR_NOSUCHCHANNEL [chan, "No such channel"] >> return e
part env (Message _ _ (chan:_)) = whenRegistered env $ env {Env.actions=a:Env.actions env}
where a = GenericAction $ \e -> sendNumeric e numERR_BADCHANNAME [chan, "Illegal channel name"] >> return e
part env _ = whenRegistered env $ env {Env.actions=a:Env.actions env}
where a = GenericAction $ \e -> sendNumeric e numERR_NEEDMOREPARAMS ["PART", "Not enough parameters"] >> return e
| shockkolate/lambdircd | plugins.old/Part.hs | apache-2.0 | 2,630 | 0 | 16 | 607 | 710 | 380 | 330 | 42 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Quasar.Api.Http.Response where
import Control.Lens
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.CaseInsensitive
import Network.HTTP.Types.Status
import Network.HTTP.Types.Header
import qualified Network.Wai as W
import Quasar.Utils
data Response a = Response
{ _responseStatus :: Status
, _responseHeaders :: ResponseHeaders
, _responseBody :: a
}
deriving (Eq, Show)
$(makeLenses ''Response)
ok = Response { _responseStatus = status200, _responseHeaders = [], _responseBody = Nothing }
badRequestResponse :: Response (Maybe LBS.ByteString)
badRequestResponse = Response
{ _responseStatus = status404
, _responseHeaders = []
, _responseBody = Just "Bad request"
}
filterHeaders :: Response a -> (Header -> Bool) -> Response a
filterHeaders response f = responseHeaders .~ (filter f $ response^.responseHeaders) $ response
withHeaders :: Response a -> ResponseHeaders -> Response a
withHeaders response headers = responseHeaders .~ (headers ++ response^.responseHeaders) $ response
mapResponseBody :: Response a -> (a -> b) -> Response b
mapResponseBody response f = responseBody .~ (f $ response^.responseBody) $ response
buildResponse :: Response (Maybe LBS.ByteString) -> W.Response
buildResponse response =
W.responseLBS
(response^.responseStatus)
(response^.responseHeaders)
body
where body = case response^.responseBody of Nothing -> LBS.empty
Just bodyString -> bodyString | xdcrafts/Quasar | src/Quasar/Api/Http/Response.hs | apache-2.0 | 1,621 | 0 | 10 | 292 | 436 | 244 | 192 | 37 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : Data.ZoomCache.Write
-- Copyright : Conrad Parker
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Conrad Parker <conrad@metadecks.org>
-- Stability : unstable
-- Portability : unknown
--
-- Writing of ZoomCache files.
----------------------------------------------------------------------
module Data.ZoomCache.Write (
-- * The ZoomWrite class
ZoomWrite(..)
-- * Instance helpers
, writeData
, writeDataVBR
, writeDataTS
-- * The ZoomW monad
, ZoomW
, withFileWrite
, flush
-- * ZoomWHandle IO functions
, ZoomWHandle
, openWrite
, closeWrite
-- * Watermarks
, watermark
, setWatermark
) where
import Blaze.ByteString.Builder hiding (flush)
import Codec.Compression.Zlib
import Control.Applicative ((<$>))
import Control.Monad.State
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as L
import Data.Dynamic
import qualified Data.Foldable as Fold
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.List (foldl')
import Data.Monoid
import Data.Time (UTCTime)
import System.IO
import Blaze.ByteString.Builder.ZoomCache
import Blaze.ByteString.Builder.ZoomCache.Internal
import Data.ZoomCache.Common
import Data.ZoomCache.Format
import Data.ZoomCache.Numeric.Delta
import Data.ZoomCache.Types
------------------------------------------------------------
-- | The ZoomWrite class provides 'write', a method to write a
-- Haskell value to an open ZoomCache file.
--
class ZoomWrite t where
-- | Write a value to an open ZoomCache file.
write :: TrackNo -> t -> ZoomW ()
------------------------------------------------------------
data ZoomWHandle = ZoomWHandle
{ whHandle :: Handle
, whTrackWork :: !(IntMap TrackWork)
, whDeferred :: IntMap Builder
, whWriteData :: Bool
}
data TrackWork = TrackWork
{ twSpec :: TrackSpec
, twBuilder :: Builder
, twReverseSO :: [SampleOffset]
, twWriter :: Maybe ZoomWork
, twCount :: {-# UNPACK #-}!Int
, twWatermark :: {-# UNPACK #-}!Int
, twEntryTime :: {-# UNPACK #-}!SampleOffset
, twExitTime :: {-# UNPACK #-}!SampleOffset
}
----------------------------------------------------------------------
-- Public API
-- | A StateT IO monad for writing a ZoomCache file
type ZoomW = StateT ZoomWHandle IO
-- | Run a @ZoomW ()@ action on a given file handle, using the specified
-- 'TrackMap' specification
withFileWrite :: TrackMap
-> Maybe UTCTime
-> Bool -- ^ Whether or not to write raw data packets.
-- If False, only summary blocks are written.
-> ZoomW ()
-> FilePath
-> IO ()
withFileWrite ztypes utc doRaw f path = do
z <- openWrite ztypes utc doRaw path
z' <- execStateT (f >> flush >> finish) z
hClose (whHandle z')
-- | Force a flush of ZoomCache summary blocks to disk. It is not usually
-- necessary to call this function as summary blocks are transparently written
-- at regular intervals.
flush :: ZoomW ()
flush = diskTracks flushSummarySO
-- | Write final, whole-file summary blocks.
--
-- This function flushes saved summaries at all levels, to ensure that all
-- summary levels contain data for the entire time range of the track.
--
-- In particular, the highest level of summary will contain one block for
-- the entire range of the file, and this will be the last summary block
-- in the track.
finish :: ZoomW ()
finish = diskTracks finishSummarySO
diskTracks :: (TrackNo -> TrackWork -> ZoomW ()) -> ZoomW ()
diskTracks fSummarySO = do
h <- gets whHandle
tracks <- gets whTrackWork
doRaw <- gets whWriteData
when doRaw $
liftIO $ Fold.mapM_ (L.hPut h) $ IM.mapWithKey bsFromTrack tracks
mapM_ (uncurry fSummarySO) (IM.assocs tracks)
pending <- mconcat . IM.elems <$> gets whDeferred
liftIO . B.hPut h . toByteString $ pending
modify $ \z -> z
{ whTrackWork = IM.map flushTrack (whTrackWork z)
, whDeferred = IM.empty
}
where
flushTrack :: TrackWork -> TrackWork
flushTrack tw = d{twWriter = clearWork <$> (twWriter tw)}
where
d = mkTrackWork (twSpec tw) (twExitTime tw) (twWatermark tw)
-- | Open a new ZoomCache file for writing, using a specified 'TrackMap'.
openWrite :: TrackMap
-> Maybe UTCTime
-> Bool -- ^ Whether or not to write raw data packets.
-- If False, only summary blocks are written.
-> FilePath
-> IO ZoomWHandle
openWrite trackMap utc doRaw path = do
h <- openFile path WriteMode
let global = mkGlobal (IM.size trackMap) utc
writeGlobalHeader h global
let tracks = IM.foldWithKey addTrack IM.empty trackMap
mapM_ (uncurry (writeTrackHeader h)) (IM.assocs trackMap)
return $ ZoomWHandle h tracks IM.empty doRaw
where
addTrack :: TrackNo -> TrackSpec
-> IntMap TrackWork
-> IntMap TrackWork
addTrack trackNo spec = IM.insert trackNo trackState
where
trackState = mkTrackWork spec (SO 0) 1024
closeWrite :: ZoomWHandle -> IO ()
closeWrite z = hClose (whHandle z)
-- | Query the maximum number of data points to buffer for a given track before
-- forcing a flush of all buffered data and summaries.
watermark :: TrackNo -> ZoomW (Maybe Int)
watermark trackNo = do
track <- IM.lookup trackNo <$> gets whTrackWork
return (twWatermark <$> track)
-- | Set the maximum number of data points to buffer for a given track before
-- forcing a flush of all buffered data and summaries.
setWatermark :: TrackNo -> Int -> ZoomW ()
setWatermark trackNo w = modifyTrack trackNo f
where
f :: TrackWork -> TrackWork
f tw = tw { twWatermark = w }
----------------------------------------------------------------------
-- Global header
writeGlobalHeader :: Handle -> Global -> IO ()
writeGlobalHeader h = B.hPut h . toByteString . fromGlobal
----------------------------------------------------------------------
-- Track header
writeTrackHeader :: Handle -> Int -> TrackSpec -> IO ()
writeTrackHeader h trackNo TrackSpec{..} = do
B.hPut h . mconcat $
[ trackHeader
, toByteString $ mconcat
[ fromTrackNo trackNo
, fromFlags specDeltaEncode specZlibCompress specSRType
, fromRational64 specRate
, fromIntegral32be . C.length $ ident
]
, ident
, toByteString . fromIntegral32be . C.length $ specName
, specName
]
where
ident = toByteString $ fromCodec specType
----------------------------------------------------------------------
-- Data
incSampleOffset :: SampleOffset -> SampleOffset
incSampleOffset (SO t) = let t' = (t+1) in t' `seq` (SO t')
incTime :: TrackNo -> ZoomW ()
incTime trackNo = modifyTrack trackNo $ \tw -> tw
{ twEntryTime = if twCount tw == 0
then (incSampleOffset (twEntryTime tw))
else twEntryTime tw
, twExitTime = incSampleOffset (twExitTime tw)
}
setTime :: TrackNo -> SampleOffset -> ZoomW ()
setTime trackNo t = modifyTrack trackNo $ \tw -> tw
{ twEntryTime = if twCount tw == 0 then t else twEntryTime tw
, twExitTime = t
}
flushIfNeeded :: TrackNo -> ZoomW ()
flushIfNeeded trackNo = do
zt <- IM.lookup trackNo <$> gets whTrackWork
case zt of
Just track -> when (flushNeeded track) flush
Nothing -> error "no such track" -- addTrack trackNo, if no data has been written
where
flushNeeded :: TrackWork -> Bool
flushNeeded TrackWork{..} = twCount >= twWatermark
writeData :: (Typeable a, ZoomWrite a, ZoomWritable a)
=> TrackNo -> a -> ZoomW ()
writeData trackNo d = do
incTime trackNo
doRaw <- gets whWriteData
when doRaw $
modifyTrack trackNo $ \z -> z
{ twBuilder = twBuilder z <>
(deltaEncodeWork (specDeltaEncode . twSpec $ z) (twWriter z) d)
}
modifyTrack trackNo $ \z -> let c = (twCount z) in c `seq` z
{ twCount = c + 1
, twWriter = updateWork (twExitTime z) d (twWriter z)
}
flushIfNeeded trackNo
writeDataVBR :: (Typeable a, ZoomWrite a, ZoomWritable a)
=> TrackNo -> (SampleOffset, a) -> ZoomW ()
writeDataVBR trackNo (t, d) = do
setTime trackNo t
doRaw <- gets whWriteData
when doRaw $
modifyTrack trackNo $ \z -> z
{ twBuilder = twBuilder z <>
(deltaEncodeWork (specDeltaEncode . twSpec $ z) (twWriter z) d)
, twReverseSO = t : twReverseSO z
}
modifyTrack trackNo $ \z -> let c = (twCount z) in c `seq` z
{ twCount = c + 1
, twWriter = updateWork t d (twWriter z)
}
flushIfNeeded trackNo
writeDataTS :: (Typeable a, ZoomWrite a, ZoomWritable a)
=> TrackNo -> (TimeStamp, a) -> ZoomW ()
writeDataTS trackNo (TS ts, d) = do
tw <- IM.lookup trackNo <$> gets whTrackWork
case tw of
Just TrackWork{..} -> do
let so = floor (ts * fromRational (specRate twSpec))
writeDataVBR trackNo (SO so, d)
_ -> return ()
deltaEncodeWork :: (Typeable a, ZoomWritable a)
=> Bool -> Maybe ZoomWork -> a -> Builder
deltaEncodeWork False _ d = fromRaw d
deltaEncodeWork _ (Just (ZoomWork _ (Just cw))) d =
case (fromDynamic . toDyn $ d) of
Just d' -> fromRaw (deltaEncodeRaw cw d')
Nothing -> fromRaw d
deltaEncodeWork _ _ d = fromRaw d
----------------------------------------------------------------------
-- Global
mkGlobal :: Int -> Maybe UTCTime -> Global
mkGlobal n utc = Global
{ version = Version versionMajor versionMinor
, noTracks = n
, baseUTC = utc
}
----------------------------------------------------------------------
-- TrackState
modifyTracks :: (IntMap TrackWork -> IntMap TrackWork) -> ZoomW ()
modifyTracks f = modify (\z -> z { whTrackWork = f (whTrackWork z) })
modifyTrack :: TrackNo -> (TrackWork -> TrackWork) -> ZoomW ()
modifyTrack trackNo f = modifyTracks (IM.adjust f trackNo)
bsFromTrack :: TrackNo -> TrackWork -> L.ByteString
bsFromTrack trackNo TrackWork{..} = mconcat
[ L.pack . B.unpack $ packetHeader
, toLazyByteString $ mconcat
[ fromIntegral32be trackNo
, fromSampleOffset twEntryTime
, fromSampleOffset twExitTime
, fromIntegral32be twCount
, fromIntegral32be (L.length rawBS)
]
, rawBS
]
where
tsBuilder = mconcat . map fromInt64be .
deltaEncode . map unSO . reverse $ twReverseSO
rawBS = c $ toLazyByteString (twBuilder <> tsBuilder)
c | specZlibCompress twSpec = compress
| otherwise = id
mkTrackWork :: TrackSpec -> SampleOffset -> Int -> TrackWork
mkTrackWork !spec !entry !w = TrackWork
{ twSpec = spec
, twBuilder = mempty
, twReverseSO = []
, twCount = 0
, twWatermark = w
, twEntryTime = entry
, twExitTime = entry
, twWriter = Nothing
}
----------------------------------------------------------------------
-- Working state
clearWork :: ZoomWork -> ZoomWork
clearWork (ZoomWork l _) = ZoomWork l Nothing
updateWork :: (Typeable b, ZoomWritable b)
=> SampleOffset -> b
-> Maybe ZoomWork
-> Maybe ZoomWork
updateWork !t !d Nothing = Just (ZoomWork IM.empty (Just cw))
where
cw = updateSummaryData t d (initSummaryWork t)
updateWork !t !d (Just (ZoomWork l Nothing)) =
case cw'm of
Just _ -> Just (ZoomWork l cw'm)
Nothing -> Nothing
where
cw'm = case (fromDynamic . toDyn $ d) of
Just d' -> Just (updateSummaryData t d' (initSummaryWork t))
Nothing -> Nothing
updateWork !t !d (Just (ZoomWork l (Just cw))) =
case cw'm of
Just _ -> Just (ZoomWork l cw'm)
Nothing -> Nothing
where
cw'm = case (fromDynamic . toDyn $ d) of
Just d' -> Just (updateSummaryData t d' cw)
Nothing -> Nothing
----------------------------------------------------------------------
-- SummarySO
flushSummarySO :: TrackNo -> TrackWork -> ZoomW ()
flushSummarySO trackNo tw@TrackWork{..} =
diskSummarySO (flushWork twEntryTime twExitTime) trackNo tw
finishSummarySO :: TrackNo -> TrackWork -> ZoomW ()
finishSummarySO = diskSummarySO finishWork
diskSummarySO :: (TrackNo -> ZoomWork -> (ZoomWork, IntMap Builder))
-> TrackNo -> TrackWork -> ZoomW ()
diskSummarySO fWork trackNo TrackWork{..} = case twWriter of
Just writer -> do
let (writer', bs) = fWork trackNo writer
modify $ \z -> z { whDeferred = IM.unionWith mappend (whDeferred z) bs }
modifyTrack trackNo (\ztt -> ztt { twWriter = Just writer' } )
_ -> return ()
finishWork :: TrackNo -> ZoomWork -> (ZoomWork, IntMap Builder)
finishWork _trackNo (ZoomWork l cw) = (ZoomWork IM.empty cw, finishLevels l)
{-
When finishing the writing of a file, we want the final, highest-level
summary block to contain data for the entire range of the file:
1: [ ] [ ] [ ] [ ]
\ / \ /
2: [ ] [ ]
\_ _/
\ /
3: [ ]
However this is not usually the case -- unless, by chance, exactly 2^n level 1
summary blocks have been written.
So, we traverse all saved summary levels, and flush a summary at each level. In
order to do so we force all saved summary data to be flushed, and push that
saved data up to higher levels. In this way the contents of the final level 1
summary block are bubbled through the tree and appended to all saved summary
blocks.
1: [ ] [ ] [x]
\ / ||| Block x is propagated to the next summary level,
2: [s] [x] where it is appended to saved block s.
\_ |
\ /
3: [ ]
-}
-- Flush saved summaries at all levels, to ensure that all summary levels
-- contain data for the entire time range. In particular, the highest level
-- of summary should contain one block for the entire range of the file,
-- and this should be the last summary block in the track (as summary blocks
-- are written in order of level)
finishLevels :: (Typeable a, ZoomWritable a)
=> IntMap (SummarySO a) -> IntMap Builder
finishLevels l = snd $ foldl' propagate (Nothing, IM.empty) [1 .. fst $ IM.findMax l]
where
propagate (Nothing, bs) k = case IM.lookup k l of
Nothing -> -- Nothing propagated, nothing saved
(Nothing, bs)
Just saved -> -- Nothing propagated, saved to flush: propagate saved
(Just (incLevel saved), IM.insert k (fromSummarySO saved) bs)
propagate (Just bub, bs) k = case IM.lookup k l of
Nothing -> -- Something propagated to flush, nothing saved
(Just (incLevel bub), IM.insert k (fromSummarySO bub) bs)
Just saved -> -- Something propagated, something saved;
-- append these, flush and propagate
let new = saved `appendSummarySO` bub in
(Just (incLevel new), IM.insert k (fromSummarySO new) bs)
flushWork :: SampleOffset -> SampleOffset
-> TrackNo -> ZoomWork -> (ZoomWork, IntMap Builder)
flushWork _ _ _ op@(ZoomWork _ Nothing) = (op, IM.empty)
flushWork entrySO exitSO trackNo (ZoomWork l (Just cw)) =
(ZoomWork l' (Just cw), bs)
where
(bs, l') = pushSummarySO s IM.empty l
s = SummarySO
{ summarySOTrack = trackNo
, summarySOLevel = 1
, summarySOEntry = entrySO
, summarySOExit = exitSO
, summarySOData = toSummaryData dur cw
}
dur = sampleOffsetDiff exitSO entrySO
pushSummarySO :: (ZoomWritable a)
=> SummarySO a
-> IntMap Builder -> IntMap (SummarySO a)
-> (IntMap Builder, IntMap (SummarySO a))
pushSummarySO s bs l = do
case IM.lookup (summarySOLevel s) l of
Just saved -> pushSummarySO (saved `appendSummarySO` s) bs' cleared
Nothing -> (bs', inserted)
where
bs' = IM.insert (summarySOLevel s) (fromSummarySO s) bs
inserted = IM.insert (summarySOLevel s) (incLevel s) l
cleared = IM.delete (summarySOLevel s) l
incLevel :: SummarySO a -> SummarySO a
incLevel s = s { summarySOLevel = summarySOLevel s + 1 }
-- | Append two Summaries, merging statistical summary data.
-- XXX: summaries are only compatible if tracks and levels are equal
appendSummarySO :: (ZoomWritable a) => SummarySO a -> SummarySO a -> SummarySO a
appendSummarySO s1 s2 = SummarySO
{ summarySOTrack = summarySOTrack s1
, summarySOLevel = summarySOLevel s1
, summarySOEntry = summarySOEntry s1
, summarySOExit = summarySOExit s2
, summarySOData = appendSummaryData (dur s1) (summarySOData s1)
(dur s2) (summarySOData s2)
}
where
dur = summarySODuration
------------------------------------------------------------
#if !MIN_VERSION_base(4,5,0)
(<>) :: Monoid a => a -> a -> a
(<>) = mappend
#endif
| kfish/zoom-cache | Data/ZoomCache/Write.hs | bsd-2-clause | 17,819 | 0 | 20 | 4,812 | 4,397 | 2,295 | 2,102 | 319 | 5 |
{-# LANGUAGE PackageImports #-}
import "BottleStory" Application (withDevelAppPort)
import Data.Dynamic (fromDynamic)
import Network.Wai.Handler.Warp (run)
import Data.Maybe (fromJust)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
wdap <- (return . fromJust . fromDynamic) withDevelAppPort
forkIO . wdap $ \(port, app) -> run port app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| konn/BottleStory | dist/devel.hs | bsd-2-clause | 707 | 0 | 11 | 104 | 215 | 116 | 99 | 22 | 2 |
{-# OPTIONS_HADDOCK hide, prune, ignore-exports #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Silver.Compiler.Lexer
Description : The lexer using parser combinators for silver.
Copyright : (c) Nicholas Dujay, 2016
License : MIT
Maintainer : nickdujay@gmail.com
Stability : experimental
Portability : POSIX
-}
module Silver.Compiler.Lexer where
import Debug.Trace
import Data.Char
import Text.Megaparsec
import Text.Megaparsec.Text
import qualified Data.Text as T
import qualified Text.Megaparsec.Lexer as L
import qualified Text.Megaparsec.Char as C
-- space characters
void p = p >> return ()
blockComment :: Parser ()
blockComment = L.skipBlockComment "{-" "-}"
lineComment :: Parser ()
lineComment = L.skipLineComment "--"
spaces :: Parser ()
spaces = void (char ' ' <|> tab)
sc :: Parser ()
sc = L.space spaces lineComment blockComment
scn :: Parser ()
scn = L.space (void spaceChar) lineComment blockComment
-- lexeme
lexeme = L.lexeme sc
symbol = L.symbol sc
-- reserved words
rws :: [String]
rws = ["if", "then", "else", "data", "type"]
-- identifiers
identifier = (lexeme . try) (p >>= check)
where
p = (:) <$> identStart <*> identLetters
check x = if x `elem` rws
then fail $ "reserved word " ++ show x ++ "cannot be an identifier"
else return x
identStart :: Parser Char
identStart = letterChar <|> markChar
identLetters :: Parser String
identLetters = many (alphaNumChar <|> markChar <|> symbolChar <|> char '\'')
moduleId :: Parser String
moduleId = lexeme $ many letterChar
-- utils
parens = between (symbol "(") (symbol ")")
withParens p = (\s -> \s2 -> \s3 -> s ++ s2 ++ s3) <$> symbol "(" <*> p <*> symbol ")"
braces = between (symbol "{") (symbol "}")
brackets = between (symbol "[") (symbol "]")
comma = symbol ","
-- primitives
primChar :: Parser Char
primChar = lexeme $ char '\'' *> L.charLiteral <* char '\''
primString :: Parser String
primString = lexeme $ char '"' >> manyTill L.charLiteral (char '"')
primInteger :: Parser Integer
primInteger = L.signed sc L.integer
primFloat :: Parser Double
primFloat = L.signed sc L.float
| silver-lang/silver | src/Silver/Compiler/Lexer.hs | bsd-3-clause | 2,133 | 0 | 13 | 402 | 642 | 344 | 298 | 49 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module SyntaxHighlighting.Renderer where
import TypeSystem
import Utils.Utils
import SyntaxHighlighting.Coloring
import Text.PrettyPrint.ANSI.Leijen
class Renderer renderer where
create :: FullColoring -> SyntaxStyle -> renderer
name :: renderer -> String
supported :: renderer -> [String]
renderString :: Name -> String -> renderer -> Doc
renderParseTree' :: ParseTreeA LocationInfo -> renderer -> Doc
renderParseTree :: ParseTree -> renderer -> Doc
renderParseTreeDebug :: ParseTree -> renderer -> Doc
| pietervdvn/ALGT | src/SyntaxHighlighting/Renderer.hs | bsd-3-clause | 595 | 14 | 9 | 87 | 144 | 80 | 64 | 15 | 0 |
{-# language CPP #-}
{-# language QuasiQuotes #-}
{-# language TemplateHaskell #-}
#ifndef ENABLE_INTERNAL_DOCUMENTATION
{-# OPTIONS_HADDOCK hide #-}
#endif
-- | Interface between OpenCV (extra modules) and inline-c(pp) (Haskell)
module OpenCV.Extra.Internal.C.Inline ( openCvExtraCtx ) where
import "base" Data.Monoid ( (<>) )
import qualified "containers" Data.Map as M
import qualified "inline-c" Language.C.Inline as C
import qualified "inline-c" Language.C.Types as C
import qualified "inline-c" Language.C.Inline.Context as C
import "opencv" OpenCV.Internal.C.Inline ( openCvCtx )
import "this" OpenCV.Extra.Internal.C.Types
-- | Context useful to work with the OpenCV library's extra modules.
--
-- Based on 'C.cppCtx', 'C.bsCtx', 'C.vecCtx' and most importantly 'openCvCtx'.
--
-- 'C.ctxTypesTable': converts OpenCV basic types to their counterparts in
-- "OpenCV.Internal.C.Inline".
--
-- No 'C.ctxAntiQuoters'.
openCvExtraCtx :: C.Context
openCvExtraCtx = openCvCtx <> ctx
where
ctx = mempty { C.ctxTypesTable = openCvExtraTypesTable }
openCvExtraTypesTable :: C.TypesTable
openCvExtraTypesTable = M.fromList
[ ( C.TypeName "Ptr_BackgroundSubtractorGMG", [t| C'Ptr_BackgroundSubtractorGMG |] )
, ( C.TypeName "Ptr_BackgroundSubtractorMOG", [t| C'Ptr_BackgroundSubtractorMOG |] )
]
| lukexi/haskell-opencv | opencv-extra/src/OpenCV/Extra/Internal/C/Inline.hs | bsd-3-clause | 1,309 | 0 | 9 | 172 | 190 | 134 | 56 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
module LOGL.Mesh
(
Mesh, createMesh, drawMesh, Vertex(..), deleteMesh, Texture(..), TextureType(..)
)
where
import Linear.V2
import Linear.V3
import Graphics.Rendering.OpenGL.GL as GL hiding (Vertex, normalize, position)
import Foreign.Storable
import Foreign.Ptr
import Graphics.GLUtil.BufferObjects
import Graphics.GLUtil.ShaderProgram
import Graphics.GLUtil.VertexArrayObjects
import Control.Monad.Reader
import Control.Monad.IO.Class
import LOGL.Application.Context
data Vertex = Vertex { position :: V3 GLfloat, normal :: V3 GLfloat, texCoords :: V2 GLfloat }
deriving (Eq, Show)
instance Storable Vertex where
sizeOf _ = 2 * sizeOf (undefined :: V3 GLfloat) + sizeOf (undefined :: V2 GLfloat)
alignment _ = alignment (undefined :: GLfloat)
poke ptr (Vertex pos norm text) = do
poke (castPtr ptr) pos
pokeElemOff (castPtr ptr) 1 norm
pokeElemOff (castPtr ptr) 3 text
peek ptr = Vertex <$> peek (castPtr ptr) <*> peekElemOff (castPtr ptr) 1 <*> peekElemOff (castPtr ptr) 3
data Texture = Texture { tref :: String, ttype :: TextureType, tname :: String }
deriving (Eq, Show)
data TextureType = DiffuseMap | SpecularMap | NormalMap
deriving (Eq, Show)
data Mesh = Mesh { vertices :: [Vertex], indices :: [GLuint], textures :: [Texture], vao :: VertexArrayObject, vbo :: BufferObject, ebo :: BufferObject}
deriving (Eq, Show)
deleteMesh :: Mesh -> IO ()
deleteMesh mesh = do
deleteObjectName $ vao mesh
deleteObjectName $ ebo mesh
deleteObjectName $ vbo mesh
drawMesh :: (MonadReader m, EnvType m ~ AppContext, MonadIO m) => Mesh -> String -> m ()
drawMesh mesh sref = do
setTextures sref (textures mesh)
liftIO $ withVAO (vao mesh) $ drawElements Triangles idxCount UnsignedInt nullPtr
where
idxCount = fromIntegral (length (indices mesh))
setTextures ::(MonadReader m, EnvType m ~ AppContext, MonadIO m) => String -> [Texture] -> m ()
setTextures sref [] = return ()
setTextures sref texts = mapM_ (setTexture sref texts) [0..texCount - 1]
where
texCount = fromIntegral (length texts)
setTexture :: (MonadReader m, EnvType m ~ AppContext, MonadIO m) => String -> [Texture] -> GLuint -> m ()
setTexture sref texts tu = do
shader <- getShader sref
let text = texts !! fromIntegral tu
name = "mat." ++ tname text
tobj <- getTexture (tref text)
activeTexture $= TextureUnit tu
textureBinding Texture2D $= Just tobj
liftIO $ setUniform shader name (TextureUnit tu)
createMesh :: [Vertex] -> [GLuint] -> [Texture] -> IO Mesh
createMesh verts inds texts = do
newVao <- genObjectName
newVbo <- makeBuffer ArrayBuffer verts
newEbo <- makeBuffer ElementArrayBuffer inds
bindVertexArrayObject $= Just newVao
bindBuffer ArrayBuffer $= Just newVbo
bindBuffer ElementArrayBuffer $= Just newEbo
vertexAttribPointer (AttribLocation 0) $= (ToFloat, VertexArrayDescriptor 3 Float (8*4) offset0)
vertexAttribArray (AttribLocation 0) $= Enabled
vertexAttribPointer (AttribLocation 1) $= (ToFloat, VertexArrayDescriptor 3 Float (8*4) (offsetPtr (3*4)))
vertexAttribArray (AttribLocation 1) $= Enabled
vertexAttribPointer (AttribLocation 2) $= (ToFloat, VertexArrayDescriptor 2 Float (8*4) (offsetPtr (6*4)))
vertexAttribArray (AttribLocation 2) $= Enabled
bindVertexArrayObject $= Nothing
return Mesh {
vertices = verts, indices = inds, textures = texts, vao = newVao, vbo = newVbo, ebo = newEbo }
| atwupack/LearnOpenGL | src/LOGL/Mesh.hs | bsd-3-clause | 3,535 | 0 | 13 | 695 | 1,266 | 653 | 613 | 71 | 1 |
--
-- (c) The University of Glasgow 2002-2006
--
-- The IO Monad with an environment
--
-- The environment is passed around as a Reader monad but
-- as its in the IO monad, mutable references can be used
-- for updating state.
--
{-# LANGUAGE UndecidableInstances #-}
module IOEnv (
IOEnv, -- Instance of Monad
-- Monad utilities
module MonadUtils,
-- Errors
failM, failWithM,
IOEnvFailure(..),
-- Getting at the environment
getEnv, setEnv, updEnv,
runIOEnv, unsafeInterleaveM,
tryM, tryAllM, tryMostM, fixM,
-- I/O operations
IORef, newMutVar, readMutVar, writeMutVar, updMutVar,
atomicUpdMutVar, atomicUpdMutVar'
) where
import DynFlags
import Exception
import Module
import Panic
import Data.IORef ( IORef, newIORef, readIORef, writeIORef, modifyIORef,
atomicModifyIORef )
import Data.Typeable
import System.IO.Unsafe ( unsafeInterleaveIO )
import System.IO ( fixIO )
import Control.Monad
import MonadUtils
import Control.Applicative (Alternative(..))
----------------------------------------------------------------------
-- Defining the monad type
----------------------------------------------------------------------
newtype IOEnv env a = IOEnv (env -> IO a)
unIOEnv :: IOEnv env a -> (env -> IO a)
unIOEnv (IOEnv m) = m
instance Monad (IOEnv m) where
(>>=) = thenM
(>>) = thenM_
return = returnM
fail _ = failM -- Ignore the string
instance Applicative (IOEnv m) where
pure = returnM
IOEnv f <*> IOEnv x = IOEnv (\ env -> f env <*> x env )
instance Functor (IOEnv m) where
fmap f (IOEnv m) = IOEnv (\ env -> fmap f (m env))
returnM :: a -> IOEnv env a
returnM a = IOEnv (\ _ -> return a)
thenM :: IOEnv env a -> (a -> IOEnv env b) -> IOEnv env b
thenM (IOEnv m) f = IOEnv (\ env -> do { r <- m env ;
unIOEnv (f r) env })
thenM_ :: IOEnv env a -> IOEnv env b -> IOEnv env b
thenM_ (IOEnv m) f = IOEnv (\ env -> do { _ <- m env ; unIOEnv f env })
failM :: IOEnv env a
failM = IOEnv (\ _ -> throwIO IOEnvFailure)
failWithM :: String -> IOEnv env a
failWithM s = IOEnv (\ _ -> ioError (userError s))
data IOEnvFailure = IOEnvFailure
deriving Typeable
instance Show IOEnvFailure where
show IOEnvFailure = "IOEnv failure"
instance Exception IOEnvFailure
instance ContainsDynFlags env => HasDynFlags (IOEnv env) where
getDynFlags = do env <- getEnv
return $ extractDynFlags env
instance ContainsModule env => HasModule (IOEnv env) where
getModule = do env <- getEnv
return $ extractModule env
----------------------------------------------------------------------
-- Fundmantal combinators specific to the monad
----------------------------------------------------------------------
---------------------------
runIOEnv :: env -> IOEnv env a -> IO a
runIOEnv env (IOEnv m) = m env
---------------------------
{-# NOINLINE fixM #-}
-- Aargh! Not inlining fixTc alleviates a space leak problem.
-- Normally fixTc is used with a lazy tuple match: if the optimiser is
-- shown the definition of fixTc, it occasionally transforms the code
-- in such a way that the code generator doesn't spot the selector
-- thunks. Sigh.
fixM :: (a -> IOEnv env a) -> IOEnv env a
fixM f = IOEnv (\ env -> fixIO (\ r -> unIOEnv (f r) env))
---------------------------
tryM :: IOEnv env r -> IOEnv env (Either IOEnvFailure r)
-- Reflect UserError exceptions (only) into IOEnv monad
-- Other exceptions are not caught; they are simply propagated as exns
--
-- The idea is that errors in the program being compiled will give rise
-- to UserErrors. But, say, pattern-match failures in GHC itself should
-- not be caught here, else they'll be reported as errors in the program
-- begin compiled!
tryM (IOEnv thing) = IOEnv (\ env -> tryIOEnvFailure (thing env))
tryIOEnvFailure :: IO a -> IO (Either IOEnvFailure a)
tryIOEnvFailure = try
-- XXX We shouldn't be catching everything, e.g. timeouts
tryAllM :: IOEnv env r -> IOEnv env (Either SomeException r)
-- Catch *all* exceptions
-- This is used when running a Template-Haskell splice, when
-- even a pattern-match failure is a programmer error
tryAllM (IOEnv thing) = IOEnv (\ env -> try (thing env))
tryMostM :: IOEnv env r -> IOEnv env (Either SomeException r)
tryMostM (IOEnv thing) = IOEnv (\ env -> tryMost (thing env))
---------------------------
unsafeInterleaveM :: IOEnv env a -> IOEnv env a
unsafeInterleaveM (IOEnv m) = IOEnv (\ env -> unsafeInterleaveIO (m env))
----------------------------------------------------------------------
-- Alternative/MonadPlus
----------------------------------------------------------------------
instance MonadPlus IO => Alternative (IOEnv env) where
empty = mzero
(<|>) = mplus
-- For use if the user has imported Control.Monad.Error from MTL
-- Requires UndecidableInstances
instance MonadPlus IO => MonadPlus (IOEnv env) where
mzero = IOEnv (const mzero)
m `mplus` n = IOEnv (\env -> unIOEnv m env `mplus` unIOEnv n env)
----------------------------------------------------------------------
-- Accessing input/output
----------------------------------------------------------------------
instance MonadIO (IOEnv env) where
liftIO io = IOEnv (\ _ -> io)
newMutVar :: a -> IOEnv env (IORef a)
newMutVar val = liftIO (newIORef val)
writeMutVar :: IORef a -> a -> IOEnv env ()
writeMutVar var val = liftIO (writeIORef var val)
readMutVar :: IORef a -> IOEnv env a
readMutVar var = liftIO (readIORef var)
updMutVar :: IORef a -> (a -> a) -> IOEnv env ()
updMutVar var upd = liftIO (modifyIORef var upd)
-- | Atomically update the reference. Does not force the evaluation of the
-- new variable contents. For strict update, use 'atomicUpdMutVar''.
atomicUpdMutVar :: IORef a -> (a -> (a, b)) -> IOEnv env b
atomicUpdMutVar var upd = liftIO (atomicModifyIORef var upd)
-- | Strict variant of 'atomicUpdMutVar'.
atomicUpdMutVar' :: IORef a -> (a -> (a, b)) -> IOEnv env b
atomicUpdMutVar' var upd = do
r <- atomicUpdMutVar var upd
_ <- liftIO . evaluate =<< readMutVar var
return r
----------------------------------------------------------------------
-- Accessing the environment
----------------------------------------------------------------------
getEnv :: IOEnv env env
{-# INLINE getEnv #-}
getEnv = IOEnv (\ env -> return env)
-- | Perform a computation with a different environment
setEnv :: env' -> IOEnv env' a -> IOEnv env a
{-# INLINE setEnv #-}
setEnv new_env (IOEnv m) = IOEnv (\ _ -> m new_env)
-- | Perform a computation with an altered environment
updEnv :: (env -> env') -> IOEnv env' a -> IOEnv env a
{-# INLINE updEnv #-}
updEnv upd (IOEnv m) = IOEnv (\ env -> m (upd env))
| ekmett/ghc | compiler/utils/IOEnv.hs | bsd-3-clause | 6,839 | 0 | 13 | 1,392 | 1,781 | 942 | 839 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
#include "lens-common.h"
-----------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Each
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Control.Lens.Each
(
-- * Each
Each(..)
) where
import Prelude ()
import Control.Lens.Traversal
import Control.Lens.Internal.ByteString
import Control.Lens.Internal.Prelude
import Data.Array.Unboxed as Unboxed
import Data.Array.IArray as IArray
import Data.ByteString as StrictB
import Data.ByteString.Lazy as LazyB
import Data.Complex
import Data.HashMap.Lazy as HashMap
import Data.IntMap as IntMap
import Data.Map as Map
import Data.Sequence as Seq
import Data.Text.Lens (text)
import Data.Text as StrictT
import Data.Text.Lazy as LazyT
import Data.Tree as Tree
import Data.Vector.Generic.Lens (vectorTraverse)
import qualified Data.Vector as Vector
import qualified Data.Vector.Primitive as Prim
import Data.Vector.Primitive (Prim)
import qualified Data.Vector.Storable as Storable
import Data.Vector.Storable (Storable)
import qualified Data.Vector.Unboxed as Unboxed
import Data.Vector.Unboxed (Unbox)
import Data.Word
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Control.Lens
-- >>> import Data.Text.Strict.Lens as Text
-- >>> import Data.Char as Char
-- | Extract 'each' element of a (potentially monomorphic) container.
--
-- Notably, when applied to a tuple, this generalizes 'Control.Lens.Traversal.both' to arbitrary homogeneous tuples.
--
-- >>> (1,2,3) & each *~ 10
-- (10,20,30)
--
-- It can also be used on monomorphic containers like 'StrictT.Text' or 'StrictB.ByteString'.
--
-- >>> over each Char.toUpper ("hello"^.Text.packed)
-- "HELLO"
--
-- >>> ("hello","world") & each.each %~ Char.toUpper
-- ("HELLO","WORLD")
class Each s t a b | s -> a, t -> b, s b -> t, t a -> s where
each :: Traversal s t a b
default each :: (Traversable g, s ~ g a, t ~ g b) => Traversal s t a b
each = traverse
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a) (b,b) a b@
instance (a~a', b~b') => Each (a,a') (b,b') a b where
each f ~(a,b) = (,) <$> f a <*> f b
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a) (b,b,b) a b@
instance (a~a2, a~a3, b~b2, b~b3) => Each (a,a2,a3) (b,b2,b3) a b where
each f ~(a,b,c) = (,,) <$> f a <*> f b <*> f c
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a) (b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, b~b2, b~b3, b~b4) => Each (a,a2,a3,a4) (b,b2,b3,b4) a b where
each f ~(a,b,c,d) = (,,,) <$> f a <*> f b <*> f c <*> f d
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a,a) (b,b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, a~a5, b~b2, b~b3, b~b4, b~b5) => Each (a,a2,a3,a4,a5) (b,b2,b3,b4,b5) a b where
each f ~(a,b,c,d,e) = (,,,,) <$> f a <*> f b <*> f c <*> f d <*> f e
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a,a,a) (b,b,b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, a~a5, a~a6, b~b2, b~b3, b~b4, b~b5, b~b6) => Each (a,a2,a3,a4,a5,a6) (b,b2,b3,b4,b5,b6) a b where
each f ~(a,b,c,d,e,g) = (,,,,,) <$> f a <*> f b <*> f c <*> f d <*> f e <*> f g
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a,a,a,a) (b,b,b,b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, a~a5, a~a6, a~a7, b~b2, b~b3, b~b4, b~b5, b~b6, b~b7) => Each (a,a2,a3,a4,a5,a6,a7) (b,b2,b3,b4,b5,b6,b7) a b where
each f ~(a,b,c,d,e,g,h) = (,,,,,,) <$> f a <*> f b <*> f c <*> f d <*> f e <*> f g <*> f h
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a,a,a,a,a) (b,b,b,b,b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, a~a5, a~a6, a~a7, a~a8, b~b2, b~b3, b~b4, b~b5, b~b6, b~b7, b~b8) => Each (a,a2,a3,a4,a5,a6,a7,a8) (b,b2,b3,b4,b5,b6,b7,b8) a b where
each f ~(a,b,c,d,e,g,h,i) = (,,,,,,,) <$> f a <*> f b <*> f c <*> f d <*> f e <*> f g <*> f h <*> f i
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (a,a,a,a,a,a,a,a,a) (b,b,b,b,b,b,b,b,b) a b@
instance (a~a2, a~a3, a~a4, a~a5, a~a6, a~a7, a~a8, a~a9, b~b2, b~b3, b~b4, b~b5, b~b6, b~b7, b~b8, b~b9) => Each (a,a2,a3,a4,a5,a6,a7,a8,a9) (b,b2,b3,b4,b5,b6,b7,b8,b9) a b where
each f ~(a,b,c,d,e,g,h,i,j) = (,,,,,,,,) <$> f a <*> f b <*> f c <*> f d <*> f e <*> f g <*> f h <*> f i <*> f j
{-# INLINE each #-}
-- | @'each' :: ('RealFloat' a, 'RealFloat' b) => 'Traversal' ('Complex' a) ('Complex' b) a b@
instance Each (Complex a) (Complex b) a b where
each f (a :+ b) = (:+) <$> f a <*> f b
{-# INLINE each #-}
-- | @'each' :: 'Traversal' ('Map' c a) ('Map' c b) a b@
instance (c ~ d) => Each (Map c a) (Map d b) a b where
each = traversed
{-# INLINE each #-}
-- | @'each' :: 'Traversal' ('Map' c a) ('Map' c b) a b@
instance Each (IntMap a) (IntMap b) a b where
each = traversed
{-# INLINE each #-}
-- | @'each' :: 'Traversal' ('HashMap' c a) ('HashMap' c b) a b@
instance (c ~ d) => Each (HashMap c a) (HashMap d b) a b where
each = traversed
{-# INLINE each #-}
-- | @'each' :: 'Traversal' [a] [b] a b@
instance Each [a] [b] a b where
each = traversed
{-# INLINE each #-}
-- | @'each' :: 'Traversal' (NonEmpty a) (NonEmpty b) a b@
instance Each (NonEmpty a) (NonEmpty b) a b
-- | @'each' :: 'Traversal' ('Identity' a) ('Identity' b) a b@
instance Each (Identity a) (Identity b) a b
-- | @'each' :: 'Traversal' ('Maybe' a) ('Maybe' b) a b@
instance Each (Maybe a) (Maybe b) a b
-- | @'each' :: 'Traversal' ('Either' a a) ('Either' b b) a b@
--
-- @since 4.18
instance (a~a', b~b') => Each (Either a a') (Either b b') a b where
each f (Left a) = Left <$> f a
each f (Right a ) = Right <$> f a
{-# INLINE each #-}
-- | @'each' :: 'Traversal' ('Seq' a) ('Seq' b) a b@
instance Each (Seq a) (Seq b) a b where
each = traversed
{-# INLINE each #-}
-- | @'each' :: 'Traversal' ('Tree' a) ('Tree' b) a b@
instance Each (Tree a) (Tree b) a b
-- | @'each' :: 'Traversal' ('Vector.Vector' a) ('Vector.Vector' b) a b@
instance Each (Vector.Vector a) (Vector.Vector b) a b where
each = vectorTraverse
{-# INLINE each #-}
-- | @'each' :: ('Prim' a, 'Prim' b) => 'Traversal' ('Prim.Vector' a) ('Prim.Vector' b) a b@
instance (Prim a, Prim b) => Each (Prim.Vector a) (Prim.Vector b) a b where
each = vectorTraverse
{-# INLINE each #-}
-- | @'each' :: ('Storable' a, 'Storable' b) => 'Traversal' ('Storable.Vector' a) ('Storable.Vector' b) a b@
instance (Storable a, Storable b) => Each (Storable.Vector a) (Storable.Vector b) a b where
each = vectorTraverse
{-# INLINE each #-}
-- | @'each' :: ('Unbox' a, 'Unbox' b) => 'Traversal' ('Unboxed.Vector' a) ('Unboxed.Vector' b) a b@
instance (Unbox a, Unbox b) => Each (Unboxed.Vector a) (Unboxed.Vector b) a b where
each = vectorTraverse
{-# INLINE each #-}
-- | @'each' :: 'Traversal' 'StrictT.Text' 'StrictT.Text' 'Char' 'Char'@
instance (a ~ Char, b ~ Char) => Each StrictT.Text StrictT.Text a b where
each = text
{-# INLINE each #-}
-- | @'each' :: 'Traversal' 'LazyT.Text' 'LazyT.Text' 'Char' 'Char'@
instance (a ~ Char, b ~ Char) => Each LazyT.Text LazyT.Text a b where
each = text
{-# INLINE each #-}
-- | @'each' :: 'Traversal' 'StrictB.ByteString' 'StrictB.ByteString' 'Word8' 'Word8'@
instance (a ~ Word8, b ~ Word8) => Each StrictB.ByteString StrictB.ByteString a b where
each = traversedStrictTree
{-# INLINE each #-}
-- | @'each' :: 'Traversal' 'LazyB.ByteString' 'LazyB.ByteString' 'Word8' 'Word8'@
instance (a ~ Word8, b ~ Word8) => Each LazyB.ByteString LazyB.ByteString a b where
each = traversedLazy
{-# INLINE each #-}
-- | @'each' :: 'Ix' i => 'Traversal' ('Array' i a) ('Array' i b) a b@
instance (Ix i, i ~ j) => Each (Array i a) (Array j b) a b where
each f arr = array (bounds arr) <$> traverse (\(i,a) -> (,) i <$> f a) (IArray.assocs arr)
{-# INLINE each #-}
-- | @'each' :: ('Ix' i, 'IArray' 'UArray' a, 'IArray' 'UArray' b) => 'Traversal' ('Array' i a) ('Array' i b) a b@
instance (Ix i, IArray UArray a, IArray UArray b, i ~ j) => Each (UArray i a) (UArray j b) a b where
each f arr = array (bounds arr) <$> traverse (\(i,a) -> (,) i <$> f a) (IArray.assocs arr)
{-# INLINE each #-}
| ddssff/lens | src/Control/Lens/Each.hs | bsd-3-clause | 8,548 | 0 | 15 | 1,615 | 3,006 | 1,678 | 1,328 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
-- | Ring buffers.
module Game.LambdaHack.Common.RingBuffer
( RingBuffer
, empty, cons, toList, length
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude hiding (length, uncons)
import Data.Binary
import qualified Data.Foldable as Foldable
import qualified Data.Sequence as Seq
import GHC.Generics (Generic)
-- | Ring buffers of a size determined at initialization.
data RingBuffer a = RingBuffer
{ rbCarrier :: Seq.Seq a
, rbMaxSize :: Int
, rbNext :: Int
, rbLength :: Int
}
deriving (Show, Generic)
instance Binary a => Binary (RingBuffer a)
-- Only takes O(log n)).
empty :: Int -> a -> RingBuffer a
empty size dummy =
let rbMaxSize = max 1 size
in RingBuffer (Seq.replicate rbMaxSize dummy) rbMaxSize 0 0
cons :: a -> RingBuffer a -> RingBuffer a
cons a RingBuffer{..} =
let incNext = (rbNext + 1) `mod` rbMaxSize
incLength = min rbMaxSize $ rbLength + 1
in RingBuffer (Seq.update rbNext a rbCarrier) rbMaxSize incNext incLength
toList :: RingBuffer a -> [a]
toList RingBuffer{..} =
let l = Foldable.toList rbCarrier
start = (rbNext + rbMaxSize - rbLength) `mod` rbMaxSize
in take rbLength $ drop start $ l ++ l
length :: RingBuffer a -> Int
length RingBuffer{rbLength} = rbLength
| LambdaHack/LambdaHack | engine-src/Game/LambdaHack/Common/RingBuffer.hs | bsd-3-clause | 1,303 | 0 | 12 | 272 | 420 | 229 | 191 | -1 | -1 |
-- Copyright (c) 2009, Bjoern B. Brandenburg <bbb [at] cs.unc.edu>
--
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the copyright holder nor the names of any
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
{- |
This module implements a number of common bin-packing heuristics: 'FirstFit',
'LastFit', 'BestFit', 'WorstFit', and 'AlmostWorstFit'. In addition, the
not-so-common, but analytically superior (in terms of worst-case behavior),
'ModifiedFirstFit' heuristic is also supported. Further, the (slow)
'SumOfSquaresFit' heuristic, which has been considered in the context of online
bin-packing (Bender et al., 2008), is also supported.
Items can be packed in order of both 'Decreasing' and 'Increasing' size (and,
of course, in unmodified order; see 'AsGiven').
The module supports both the standard (textbook) minimization problem
(/"How many bins do I need to pack all items?"/; see 'minimizeBins' and
'countBins') and the more practical fitting problem
(/"I've got n bins; which items can I take?"/; see 'binpack').
The well-known heuristics are described online in many places and are not
further discussed here. For example, see
<http://www.cs.arizona.edu/icon/oddsends/bpack/bpack.htm> for an overview. A
description of the 'ModifiedFirstFit' algorithm is harder to come by online,
hence a brief description and references are provided below.
Note that most published analysis assumes items to be sorted in some specific
(mostly 'Decreasing') order. This module does not enforce such assumptions,
rather, any ordering can be combined with any placement heuristic.
If unsure what to pick, then try 'FirstFit' 'Decreasing' or 'BestFit'
'Decreasing' as a default. Use 'WorstFit' 'Decreasing' (in combination with
'binpack') if you want a pre-determined number of bins filled evenly.
A short overview of the 'ModifiedFirstFit' heuristic follows. This overview is
based on the description given in (Yue and Zhang, 1995).
Let @lst@ denote the list of items to be bin-packed, let @x@ denote the size of
the smallest element in @lst@, and let @cap@ denote the capacity of one
bin. @lst@ is split into the four sub-lists, @lA@, @lB@, @lC@, @lD@.
[@lA@] All items strictly larger than @cap\/2@.
[@lB@] All items of size at most @cap\/2@ and strictly larger than @cap\/3@.
[@lC@] All items of size at most @cap\/3@ and strictly larger than @(cap - x)\/5@.
[@lD@] The rest, /i.e./, all items of size at most @(cap - x)\/5@.
Items are placed as follows:
(1) Create a list of @length lA@ bins. Place each item in @lA@ into its own
bin (while maintaining relative item order with respect to @lst@). Note:
relevant published analysis assumes that @lst@ is sorted in order of
'decreasing' size.
(2) Take the list of bins created in Step 1 and reverse it.
(3) Sequentially consider each bin @b@. If the two smallest items in @lC@ do
NOT fit together into @b@ of if there a less than two items remaining in
@lC@, then pack nothing into @b@ and move on to the next bin (if any).
If they do fit together, then find the largest item @x1@ in @lC@ that
would fit together with the smallest item in @lC@ into @b@. Remove @x1@
from @lC@. Then find the largest item @x2@, @x2 \\= x1@, in @lC@ that will
now fit into @b@ /together/ with @x1@. Remove @x1@ from @lC@. Place both
@x1@ and @x2@ into @b@ and move on to the next item.
(4) Reverse the list of bins again.
(5) Use the 'FirstFit' heuristic to place all remaining items, /i.e./, @lB@,
@lD@, and any remaining items of @lC@.
References:
* D.S. Johnson and M.R. Garey (1985). A 71/60 Theorem for Bin-Packing.
/Journal of Complexity/, 1:65-106.
* M. Yue and L. Zhang (1995). A Simple Proof of the Inequality MFFD(L) <= 71/60
OPT(L) + 1, L for the MFFD Bin-Packing Algorithm.
/Acta Mathematicae Applicatae Sinica/, 11(3):318-330.
* M.A. Bender, B. Bradley, G. Jagannathan, and K. Pillaipakkamnatt (2008).
Sum-of-Squares Heuristics for Bin Packing and Memory Allocation.
/ACM Journal of Experimental Algorithmics/, 12:1-19.
-}
module Data.BinPack (
-- * Types
PlacementPolicy(..)
, OrderPolicy (AsGiven, Increasing, Decreasing)
, Measure
-- * Feature Enumeration
-- $features
, allOrders
, allPlacements
, allHeuristics
-- * Bin Abstraction
-- $bin
, Bin
, emptyBin
, emptyBins
, asBin
, tryAddItem
, addItem
, addItems
, items
, gap
-- * Bin-Packing Functions
, minimizeBins
, countBins
, binpack
) where
import Data.BinPack.Internals
import Data.BinPack.Internals.MFF (binpackMFF, minimizeMFF)
import Data.BinPack.Internals.SumOfSquares (sosfit, sosfitAnyFit)
-- | What placement heuristic should be used?
data PlacementPolicy = FirstFit -- ^ Traverse bin list from 'head' to
-- 'last' and place item in the first
-- bin that has sufficient capacity.
| ModifiedFirstFit -- ^ See above.
| LastFit -- ^ Traverse bin list from 'last' to
-- 'head' and place item in the first
-- bin that has sufficient capacity.
| BestFit -- ^ Place item in the bin with the
-- most capacity.
| WorstFit -- ^ Place item in the bin with the
-- least (but sufficient) capacity.
| AlmostWorstFit -- ^ Choose the 2nd to worst-fitting
-- bin.
| SumOfSquaresFit -- ^ Choose bin such that sum-of-squares
-- heuristic is minimized.
deriving (Show, Eq, Ord)
-- $features
-- Lists of all supported heuristics. Useful for benchmarking and testing.
-- | The list of all possible 'PlacementPolicy' choices.
allPlacements :: [PlacementPolicy]
allPlacements = [FirstFit, ModifiedFirstFit, LastFit, BestFit
, WorstFit, AlmostWorstFit, SumOfSquaresFit]
-- | The list of all possible 'OrderPolicy' choices.
allOrders :: [OrderPolicy]
allOrders = [Decreasing, Increasing, AsGiven]
-- | All supported ordering and placment choices.
allHeuristics :: [(PlacementPolicy, OrderPolicy)]
allHeuristics = [(p, o) | p <- allPlacements, o <- allOrders]
placement :: (Ord a, Num a) => PlacementPolicy -> Placement a b
placement WorstFit = worstfit
placement BestFit = bestfit
placement FirstFit = firstfit
placement LastFit = lastfit
placement AlmostWorstFit = almostWorstfit
placement SumOfSquaresFit = sosfitAnyFit
placement ModifiedFirstFit = error "Not a simple placment policy."
-- $bin
-- Conceptually, a bin is defined by its remaining capacity and the contained
-- items. Currently, it is just a tuple, but this may change in future
-- releases. Clients of this module should rely on the following accessor
-- functions.
{- | Bin-packing without a limit on the number of bins (minimization problem).
Assumption: The maximum item size is at most the size of one bin (this is not
checked).
Examples:
* Pack the words of the sentence /"Bin packing heuristics are a lot of fun!"/
into bins of size 11, assuming the size of a word is its length. The
'Increasing' ordering yields a sub-optimal result that leaves a lot of empty
space in the bins.
> minimizeBins FirstFit Increasing length 11 (words "Bin packing heuristics are a lot of fun!")
> ~~> [(2,["are","Bin","of","a"]),(4,["fun!","lot"]),(4,["packing"]),(1,["heuristics"])]
* Similarly, for 'Int'. Note that we use 'id' as a 'Measure' of the size of an 'Int'.
> minimizeBins FirstFit Decreasing id 11 [3,7,10,3,1,3,2,4]
> ~~> [(0,[1,10]),(0,[4,7]),(0,[2,3,3,3])]
-}
minimizeBins :: (Num a, Ord a) =>
PlacementPolicy -- ^ How to order the items before placement.
-> OrderPolicy -- ^ The bin-packing heuristic to use.
-> Measure a b -- ^ How to size the items.
-> a -- ^ The size of one bin.
-> [b] -- ^ The items.
-> [Bin a b] -- ^ The result: a list of 'Bins'.
minimizeBins fitPol ordPol size capacity objects =
case fitPol of
-- special MFF: more complicated looping; no re-ordered items.
ModifiedFirstFit -> minimizeMFF ordPol size capacity objects
-- special SOS: not an any-fit heuristic.
SumOfSquaresFit -> minimize capacity size (sosfit capacity) [] items'
-- everything else can be handled by minimize+placement.
_ -> minimize capacity size (placement fitPol) [] items'
where items' = order ordPol size objects
{- |
Wrapper around 'minimizeBins'; useful if only the number of required
bins is of interest. See 'minimizeBins' for a description of the arguments.
Examples:
* How many bins of size 11 characters each do we need to pack the words of the sentence
/"Bin packing heuristics are a lot of fun!"/?
> countBins FirstFit Increasing length 11 (words "Bin packing heuristics are a lot of fun!")
> ~~> 4
* Similarly, for 'Int'. As before, we use 'id' as a 'Measure' for the size of an 'Int'.
> countBins FirstFit Decreasing id 11 [3,7,10,3,1,3,2,4]
> ~~> 3
-}
countBins :: (Num a, Ord a) =>
PlacementPolicy -> OrderPolicy -> Measure a b -> a -> [b] -> Int
countBins fitPol ordPol size cap = length
. minimizeBins fitPol ordPol size cap
{- | Bin-pack a list of items into a list of (possibly non-uniform) bins. If
an item cannot be placed, instead of creating a new bin, this version will
return a list of items that could not be packed (if any).
Example: We have two empty bins, one of size 10 and one of size 12.
Which words can we fit in there?
> binpack WorstFit Decreasing length [emptyBin 10, emptyBin 12] (words "Bin packing heuristics are a lot of fun!")
> ~~> ([(0,["Bin","packing"]),(0,["of","heuristics"])],["a","lot","are","fun!"])
Both bins were filled completely, and the words /"are a lot fun!"/ coult not be
packed. -}
binpack :: (Num a, Ord a) =>
PlacementPolicy -- ^ The bin packing heuristic to use.
-> OrderPolicy -- ^ How to order the items before placement.
-> Measure a b -- ^ How to size the items.
-> [Bin a b] -- ^ The bins; may be non-uniform and pre-filled.
-> [b] -- ^ The items.
-> ([Bin a b], [b]) -- ^ The result; a list of bins
-- and a list of items that could not
-- be placed.
binpack fitPol ordPol size bins objects =
let
fit = placement fitPol
items' = order ordPol size objects
in
case fitPol of
ModifiedFirstFit -> binpackMFF ordPol size bins items'
_ -> binpack' (fit size) bins items' []
| ivanperez-keera/Binpack | Data/BinPack.hs | bsd-3-clause | 12,919 | 0 | 13 | 3,575 | 804 | 478 | 326 | 78 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wno-unused-imports #-}
{-# OPTIONS_GHC -Wno-unused-matches #-}
{-# OPTIONS_GHC -Wno-type-defaults #-}
module Test where
import Test.HUnit
import qualified Arch
import Data.Text (Text)
import CompareForm
import CompareFormTemplate
testGetComparePackage :: IO Text
testGetComparePackage = do
statisticsStore' <- Arch.getPackagesStats "packageStatistics.json"
case (statisticsStore') of
Just statisticsStore -> do
case (comparePackageGetPackages ["vim", "emacs"] statisticsStore) of
Right aps -> do
text <- getComparePackageTmpl ["vim", "emacs"] aps statisticsStore
return text
Left e -> error e
Nothing -> error "hmm"
bobby :: IO ()
bobby = do
texty <- testGetComparePackage
assertEqual "grr" texty texty
test1 :: Test
test1 = do
TestCase $ assert bobby
tests :: Test
tests = TestList [TestLabel "test1" test1]
main :: IO Counts
main = runTestTT tests
| chrissound/ArchPackageCompareStats | src/Test.hs | bsd-3-clause | 970 | 0 | 19 | 186 | 243 | 124 | 119 | 32 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module TimeParseSpec where
import Control.Monad.Except
import Data.Time
import Test.Hspec
import LuminescentDreams.Text.TimeParsers
-- 2016-11-05 13:18:00 CDT
-- 2016-11-05 13:18:00 UTC
-- 2016-11-05 13:18:00
-- 2016-11-05 13:18 CDT
-- 2016-11-05 13:18 UTC
-- 2016-11-05 13:18
-- 2016-11-05
zonedTimeSpec :: Spec
zonedTimeSpec = describe "Parses partial zoned time entries" $ do
it "parses a full string complete with timezone" $
runExcept (parseZonedTime "2016-11-04 15:00:00 CDT") `shouldBe`
Right (UTCTime (fromGregorian 2016 11 04) 72000)
it "parses a date and time with an explicit UTC timezone" $
runExcept (parseZonedTime "2016-11-04 15:00:00 UTC") `shouldBe`
Right (UTCTime (fromGregorian 2016 11 04) 54000)
it "parses a date and time with a time zone but without seconds" $
runExcept (parseZonedTime "2016-11-05 13:18 CDT") `shouldBe`
Right (UTCTime (fromGregorian 2016 11 05) 65880)
it "parses a date and time without a timezone specified" pending
it "parses just a date" pending
it "fails if the day of the month is out of range" pending
it "fails on a malformed date string" $ pendingWith "TODO: test for this bug"
-- (runExcept $ parseZonedTime "2016-a-b-c") `shouldBe` Left UnparsableDate
spec :: Spec
spec = zonedTimeSpec
| savannidgerinel/health | tests/TimeParseSpec.hs | bsd-3-clause | 1,377 | 0 | 13 | 289 | 237 | 121 | 116 | 23 | 1 |
module Distribution.ArchLinux.Libalpm.Wrapper.TH (
generateUpdaters,
generateEmptyRecord
) where
import Debug.Trace
import Control.Applicative
import Data.Char
import Language.Haskell.TH
-- | Generate update functions for a certain record. The transformation of field names
-- goes as follows:
--
-- * @_fieldName@ to @fieldName@ ;
--
-- * @fieldName@ to @setFieldName@ .
--
-- So for the record
--
-- > data Record = Record { _x :: Int, y :: Double }
--
-- the following updaters will be generated:
--
-- > x :: Int -> Record -> Record
-- > setY :: Double -> Record -> Record
--
generateUpdaters :: Name -> Q [Dec]
generateUpdaters name = do
TyConI (DataD _ _ _ ctors _) <- reify name
concat <$> mapM generateUpdatersForCtor ctors
generateUpdatersForCtor :: Con -> Q [Dec]
generateUpdatersForCtor (RecC _ vars) = mapM genUpdater vars
where
genUpdater (fname, _, _) = do
vname <- newName "value"
rname <- newName "record"
let nname = transformName fname
updater_body = RecUpdE (VarE rname) [(fname, VarE vname)]
updater_clause = Clause [VarP vname, VarP rname] (NormalB updater_body) []
updater_decl = FunD nname [updater_clause]
return updater_decl
transformName :: Name -> Name
transformName name = mkName $ case nameBase name of
('_':rest) -> rest
sname -> "set" ++ capitalize sname
capitalize :: String -> String
capitalize [] = []
capitalize (c:cs) = toUpper c : cs
-- | Generate a record value containing 'Nothing' values in all its fields given record type name.
generateEmptyRecord :: String -- ^ A name for the record value
-> Name -- ^ A name of record type
-> Name -- ^ A name of record value constructor
-> Q [Dec]
generateEmptyRecord (mkName -> declname) recname conname = do
TyConI (DataD _ _ _ ctors _) <- reify recname
let (ctor:_) = filter ((== conname) . ctorName) ctors
return $ [generateRecordEmptyDecl declname ctor]
where
ctorName (RecC name _) = name
generateRecordEmptyDecl :: Name -> Con -> Dec
generateRecordEmptyDecl declname (RecC conname vars) =
let varBindings = map (\(name, _, _) -> (name, ConE (mkName "Nothing"))) vars
val_body = RecConE conname varBindings
val_clause = Clause [] (NormalB val_body) []
val_decl = FunD declname [val_clause]
in val_decl
| netvl/alpmhs | lib/Distribution/ArchLinux/Libalpm/Wrapper/TH.hs | bsd-3-clause | 2,374 | 0 | 15 | 540 | 642 | 337 | 305 | -1 | -1 |
module T176 where
import Data.Kind (Type)
import Data.Singletons.TH
import Prelude.Singletons
$(singletons [d|
class Foo1 a where
bar1 :: a -> (a -> b) -> b
baz1 :: a
quux1 :: Foo1 a => a -> a
quux1 x = x `bar1` \_ -> baz1
class Foo2 a where
bar2 :: a -> b -> b
baz2 :: a
quux2 :: Foo2 a => a -> a
quux2 x = x `bar2` baz2
|])
| goldfirere/singletons | singletons-base/tests/compile-and-dump/Singletons/T176.hs | bsd-3-clause | 361 | 0 | 7 | 108 | 39 | 24 | 15 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Language.Eiffel.TypeCheck.Generic
(resolveIFace, unlike, unlikeType, updateGeneric, updateGenerics) where
import Control.Applicative
import qualified Data.Text as Text
import Data.Text (Text)
import Language.Eiffel.TypeCheck.Context
import qualified Language.Eiffel.TypeCheck.TypedExpr as T
import Language.Eiffel.Syntax
import Language.Eiffel.Util
import Language.Eiffel.Position
import Util.Monad
resolveIFace :: Typ -> TypingBody body (AbsClas body Expr)
resolveIFace t@(ClassType _ ts) = updateGenerics ts `fmap` lookupClass t
resolveIFace (Like _) = do
T.CurrentVar t <- contents <$> currentM
resolveIFace t
resolveIFace (Sep _ _ t) = resolveIFace (ClassType t [])
resolveIFace t = error $ "resolveIFace: called on " ++ show t
type GenUpd a = Typ -> Typ -> a -> a
updateGenerics :: [Typ] -> AbsClas body expr -> AbsClas body expr
updateGenerics ts ci =
let gs = map (\ gen -> ClassType (genericName gen) []) (generics ci)
f = foldl (.) id (zipWith updateGeneric gs ts)
newClass = f ci
in newClass -- { generics = [] }
updateGeneric :: GenUpd (AbsClas body expr)
updateGeneric g t =
classMapAttributes (updateAttribute g t) .
classMapRoutines (updateFeatDecl g t)
updateFeatDecl :: GenUpd (AbsRoutine body expr)
updateFeatDecl g t fd =
fd
{ routineArgs = map (updateDecl g t) (routineArgs fd)
, routineResult = updateTyp g t (routineResult fd)
}
updateAttribute g t a = a {attrDecl = updateDecl g t (attrDecl a)}
updateDecl :: GenUpd Decl
updateDecl g t (Decl n t') = Decl n (updateTyp g t t')
updateTyp :: GenUpd Typ
updateTyp g t t'@(ClassType name types)
| g == t' = t
| otherwise = ClassType name (map (updateTyp g t) types)
updateTyp g t t'@(TupleType typesOrDecls)
| g == t' = t
| otherwise = case typesOrDecls of
Left types -> TupleType (Left $ map (updateTyp g t) types)
Right decls -> TupleType (Right $ map (updateDecl g t) decls)
updateTyp g t t'
| g == t' = t
| otherwise = t'
unlike curr clas (Decl n (Like ident)) =
Decl n <$> unlikeType curr clas (Like ident)
unlike _ _ d = return d
unlikeType :: Typ -> AbsClas body expr -> Typ -> TypingBodyExpr body expr Typ
unlikeType curr _ (Like "Current") = return curr
unlikeType curr clas (Like ident) = do
typeMb <- typeOfVar ident
p <- currentPos
-- resMb <- lookupFlatFeatEx clas ident
case (featureResult <$> findSomeFeature clas ident) <|> typeMb of
-- case typeMb <|> (featureResult <$> resMb) of
Nothing -> error $ "unlikeType: can't find " ++ Text.unpack ident ++
" in " ++ show curr -- ++ "," ++ show p
Just resT -> unlikeType curr clas resT
unlikeType curr clas (ClassType name gs) =
ClassType name <$> mapM (unlikeType curr clas) gs
unlikeType _ _ t = return t | scottgw/eiffel-typecheck | Language/Eiffel/TypeCheck/Generic.hs | bsd-3-clause | 2,893 | 0 | 14 | 661 | 1,044 | 526 | 518 | 65 | 2 |
module Idris.REPLParser(parseCmd) where
import System.FilePath ((</>))
import System.Console.ANSI (Color(..))
import Idris.Colours
import Idris.AbsSyntax
import Idris.Core.TT
import qualified Idris.Parser as P
import Control.Applicative
import Control.Monad.State.Strict
import Text.Parser.Combinators
import Text.Parser.Char(anyChar)
import Text.Trifecta(Result, parseString)
import Text.Trifecta.Delta
import Debug.Trace
import Data.List
import Data.List.Split(splitOn)
import Data.Char(toLower)
import qualified Data.ByteString.UTF8 as UTF8
parseCmd :: IState -> String -> String -> Result Command
parseCmd i inputname = P.runparser pCmd i inputname
cmd :: [String] -> P.IdrisParser ()
cmd xs = do P.lchar ':'; docmd (sortBy (\x y -> compare (length y) (length x)) xs)
where docmd [] = fail "No such command"
docmd (x:xs) = try (discard (P.symbol x)) <|> docmd xs
pCmd :: P.IdrisParser Command
pCmd = do P.whiteSpace; try (do cmd ["q", "quit"]; eof; return Quit)
<|> try (do cmd ["h", "?", "help"]; eof; return Help)
<|> try (do cmd ["r", "reload"]; eof; return Reload)
<|> try (do cmd ["module"]; f <- P.identifier; eof;
return (ModImport (toPath f)))
<|> try (do cmd ["e", "edit"]; eof; return Edit)
<|> try (do cmd ["exec", "execute"]; eof; return Execute)
<|> try (do cmd ["c", "compile"]
i <- get
c <- option (opt_codegen $ idris_options i) codegenOption
f <- P.identifier
eof
return (Compile c f))
<|> try (do cmd ["proofs"]; eof; return Proofs)
<|> try (do cmd ["rmproof"]; n <- P.name; eof; return (RmProof n))
<|> try (do cmd ["showproof"]; n <- P.name; eof; return (ShowProof n))
<|> try (do cmd ["log"]; i <- P.natural; eof; return (LogLvl (fromIntegral i)))
<|> try (do cmd ["l", "load"]; f <- many anyChar; return (Load f))
<|> try (do cmd ["cd"]; f <- many anyChar; return (ChangeDirectory f))
<|> try (do cmd ["spec"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (Spec t))
<|> try (do cmd ["hnf"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (HNF t))
<|> try (do cmd ["inline"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (TestInline t))
<|> try (do cmd ["doc"]; c <- P.constant; eof; return (DocStr (Right c)))
<|> try (do cmd ["doc"]; n <- (P.fnName <|> (P.string "_|_" >> return falseTy)); eof; return (DocStr (Left n)))
<|> try (do cmd ["d", "def"]; some (P.char ' ') ; n <- P.fnName; eof; return (Defn n))
<|> try (do cmd ["total"]; do n <- P.fnName; eof; return (TotCheck n))
<|> try (do cmd ["t", "type"]; do P.whiteSpace; t <- P.fullExpr defaultSyntax; return (Check t))
<|> try (do cmd ["u", "universes"]; eof; return Universes)
<|> try (do cmd ["di", "dbginfo"]; n <- P.fnName; eof; return (DebugInfo n))
<|> try (do cmd ["miss", "missing"]; n <- P.fnName; eof; return (Missing n))
<|> try (do cmd ["dynamic"]; eof; return ListDynamic)
<|> try (do cmd ["dynamic"]; l <- many anyChar; return (DynamicLink l))
<|> try (do cmd ["color", "colour"]; pSetColourCmd)
<|> try (do cmd ["set"]; o <- pOption; return (SetOpt o))
<|> try (do cmd ["unset"]; o <- pOption; return (UnsetOpt o))
<|> try (do cmd ["s", "search"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (Search t))
<|> try (do cmd ["cs", "casesplit"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
return (CaseSplitAt upd (fromInteger l) n))
<|> try (do cmd ["apc", "addproofclause"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
return (AddProofClauseFrom upd (fromInteger l) n))
<|> try (do cmd ["ac", "addclause"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
return (AddClauseFrom upd (fromInteger l) n))
<|> try (do cmd ["am", "addmissing"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
return (AddMissing upd (fromInteger l) n))
<|> try (do cmd ["mw", "makewith"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
return (MakeWith upd (fromInteger l) n))
<|> try (do cmd ["ps", "proofsearch"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
hints <- many P.fnName
return (DoProofSearch upd True (fromInteger l) n hints))
<|> try (do cmd ["ref", "refine"]; P.whiteSpace;
upd <- option False (do P.lchar '!'; return True)
l <- P.natural; n <- P.name;
hint <- P.fnName
return (DoProofSearch upd False (fromInteger l) n [hint]))
<|> try (do cmd ["p", "prove"]; n <- P.name; eof; return (Prove n))
<|> try (do cmd ["m", "metavars"]; eof; return Metavars)
<|> try (do cmd ["a", "addproof"]; do n <- option Nothing (do x <- P.name;
return (Just x))
eof; return (AddProof n))
<|> try (do cmd ["x"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (ExecVal t))
<|> try (do cmd ["patt"]; P.whiteSpace; t <- P.fullExpr defaultSyntax; return (Pattelab t))
<|> try (do cmd ["errorhandlers"]; eof ; return ListErrorHandlers)
<|> try (do cmd ["consolewidth"]; w <- pConsoleWidth ; return (SetConsoleWidth w))
<|> try (do cmd ["apropos"]; str <- many anyChar ; return (Apropos str))
<|> try (do cmd ["wc", "whocalls"]; P.whiteSpace; n <- P.fnName ; return (WhoCalls n))
<|> try (do cmd ["cw", "callswho"]; P.whiteSpace; n <- P.fnName ; return (CallsWho n))
<|> try (do cmd ["mkdoc"]; str <- many anyChar; return (MakeDoc str))
<|> do P.whiteSpace; do eof; return NOP
<|> do t <- P.fullExpr defaultSyntax; return (Eval t)
where toPath n = foldl1' (</>) $ splitOn "." n
pOption :: P.IdrisParser Opt
pOption = do discard (P.symbol "errorcontext"); return ErrContext
<|> do discard (P.symbol "showimplicits"); return ShowImpl
<|> do discard (P.symbol "originalerrors"); return ShowOrigErr
<|> do discard (P.symbol "autosolve"); return AutoSolve
<|> do discard (P.symbol "nobanner") ; return NoBanner
<|> do discard (P.symbol "warnreach"); return WarnReach
codegenOption :: P.IdrisParser Codegen
codegenOption = do discard (P.symbol "javascript"); return ViaJavaScript
<|> do discard (P.symbol "node"); return ViaNode
<|> do discard (P.symbol "Java"); return ViaJava
<|> do discard (P.symbol "llvm"); return ViaLLVM
<|> do discard (P.symbol "bytecode"); return Bytecode
<|> do discard (P.symbol "C"); return ViaC
pConsoleWidth :: P.IdrisParser ConsoleWidth
pConsoleWidth = do discard (P.symbol "auto"); return AutomaticWidth
<|> do discard (P.symbol "infinite"); return InfinitelyWide
<|> do n <- fmap fromInteger P.natural; return (ColsWide n)
colours :: [(String, Maybe Color)]
colours = [ ("black", Just Black)
, ("red", Just Red)
, ("green", Just Green)
, ("yellow", Just Yellow)
, ("blue", Just Blue)
, ("magenta", Just Magenta)
, ("cyan", Just Cyan)
, ("white", Just White)
, ("default", Nothing)
]
pColour :: P.IdrisParser (Maybe Color)
pColour = doColour colours
where doColour [] = fail "Unknown colour"
doColour ((s, c):cs) = (try (P.symbol s) >> return c) <|> doColour cs
pColourMod :: P.IdrisParser (IdrisColour -> IdrisColour)
pColourMod = try (P.symbol "vivid" >> return doVivid)
<|> try (P.symbol "dull" >> return doDull)
<|> try (P.symbol "underline" >> return doUnderline)
<|> try (P.symbol "nounderline" >> return doNoUnderline)
<|> try (P.symbol "bold" >> return doBold)
<|> try (P.symbol "nobold" >> return doNoBold)
<|> try (P.symbol "italic" >> return doItalic)
<|> try (P.symbol "noitalic" >> return doNoItalic)
<|> try (pColour >>= return . doSetColour)
where doVivid i = i { vivid = True }
doDull i = i { vivid = False }
doUnderline i = i { underline = True }
doNoUnderline i = i { underline = False }
doBold i = i { bold = True }
doNoBold i = i { bold = False }
doItalic i = i { italic = True }
doNoItalic i = i { italic = False }
doSetColour c i = i { colour = c }
colourTypes :: [(String, ColourType)]
colourTypes = map (\x -> ((map toLower . reverse . drop 6 . reverse . show) x, x)) $
enumFromTo minBound maxBound
pColourType :: P.IdrisParser ColourType
pColourType = doColourType colourTypes
where doColourType [] = fail $ "Unknown colour category. Options: " ++
(concat . intersperse ", " . map fst) colourTypes
doColourType ((s,ct):cts) = (try (P.symbol s) >> return ct) <|> doColourType cts
pSetColourCmd :: P.IdrisParser Command
pSetColourCmd = (do c <- pColourType
let defaultColour = IdrisColour Nothing True False False False
opts <- sepBy pColourMod (P.whiteSpace)
let colour = foldr ($) defaultColour $ reverse opts
return $ SetColour c colour)
<|> try (P.symbol "on" >> return ColourOn)
<|> try (P.symbol "off" >> return ColourOff)
| DanielWaterworth/Idris-dev | src/Idris/REPLParser.hs | bsd-3-clause | 10,651 | 0 | 61 | 3,519 | 4,359 | 2,126 | 2,233 | 174 | 2 |
{-# LANGUAGE FunctionalDependencies
, RankNTypes
, FlexibleContexts
, FlexibleInstances
, CPP
#-}
module Sigym4.Geometry.Algorithms (
HasExtent(..)
, HasDistance(..)
, HasCentroid(..)
, HasPredicates(..)
) where
import Sigym4.Geometry.Types
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import qualified Data.Semigroup as SG
import qualified Linear.Metric as M
class HasPredicates a b where
contains :: a -> b -> Bool
instance VectorSpace v => HasPredicates (Extent v srid) (Point v srid) where
Extent{eMin=l, eMax=h} `contains` (Point v)
= (fmap (>= 0) (v - l) == pure True) && (fmap (>= 0) (h - v) == pure True)
instance VectorSpace v =>
HasPredicates (Extent v srid) (MultiPoint v srid) where
ext `contains` (MultiPoint ps) = V.all (contains ext) ps
instance VectorSpace v => HasPredicates (Point v srid) (Extent v srid) where
(Point v) `contains` (Extent lo hi) = v==lo && v==hi
instance VectorSpace v => HasPredicates (Extent v srid) (LinearRing v srid)
where
ext `contains` (LinearRing ps) = U.all (contains ext) ps
instance VectorSpace v => HasPredicates (Extent v srid) (LineString v srid)
where
ext `contains` (LineString ps) = U.all (contains ext) ps
instance VectorSpace v =>
HasPredicates (Extent v srid) (MultiLineString v srid) where
ext `contains` (MultiLineString ps) = V.all (contains ext) ps
instance VectorSpace v => HasPredicates (Extent v srid) (Polygon v srid) where
ext `contains` (Polygon oRing _) = ext `contains` oRing
instance VectorSpace v => HasPredicates (Extent v srid) (Triangle v srid) where
ext `contains` (Triangle a b c) = ext `contains` a &&
ext `contains` b &&
ext `contains` c
instance VectorSpace v =>
HasPredicates (Extent v srid) (MultiPolygon v srid) where
ext `contains` (MultiPolygon ps) = V.all (contains ext) ps
instance VectorSpace v =>
HasPredicates (Extent v srid) (PolyhedralSurface v srid) where
ext `contains` (PolyhedralSurface ps) = V.all (contains ext) ps
instance VectorSpace v => HasPredicates (Extent v srid) (TIN v srid) where
ext `contains` (TIN ts) = U.all (contains ext) ts
instance VectorSpace v => HasPredicates (Extent v srid) (Geometry v srid) where
ext `contains` (GeoPoint g) = ext `contains` g
ext `contains` (GeoMultiPoint g) = ext `contains` g
ext `contains` (GeoLineString g) = ext `contains` g
ext `contains` (GeoMultiLineString g) = ext `contains` g
ext `contains` (GeoPolygon g) = ext `contains` g
ext `contains` (GeoMultiPolygon g) = ext `contains` g
ext `contains` (GeoTriangle g) = ext `contains` g
ext `contains` (GeoPolyhedralSurface g) = ext `contains` g
ext `contains` (GeoTIN g) = ext `contains` g
ext `contains` (GeoCollection g) = ext `contains` g
instance VectorSpace v =>
HasPredicates (Extent v srid) (GeometryCollection v srid) where
ext `contains` (GeometryCollection ps) = V.all (contains ext) ps
instance VectorSpace v => HasPredicates (Extent v srid) (Feature v srid d) where
ext `contains` f = ext `contains` _fGeom f
instance VectorSpace v =>
HasPredicates (Extent v srid) (FeatureCollection v srid d) where
ext `contains` fc = all (contains ext) $ _fcFeatures fc
class VectorSpace v => HasCentroid a v srid where
centroid :: a -> Point v srid
instance VectorSpace v => HasCentroid (Point v srid) v srid where
centroid = id
instance VectorSpace v => HasCentroid (Extent v srid) v srid where
centroid e = Point $ eMin e + (eSize e / 2)
class HasDistance a b where
distance :: a -> b -> Double
instance VectorSpace v => HasDistance (Point v srid) (Point v srid) where
distance (Point a) (Point b) = M.distance a b
class VectorSpace v => HasExtent a v srid | a->v, a->srid where
extent :: a -> Extent v srid
instance VectorSpace v => HasExtent (Point v srid) v srid where
extent (Point v) = Extent v v
instance VectorSpace v => HasExtent (MultiPoint v srid) v srid where
extent = extentFromVector . V.convert . _mpPoints
instance VectorSpace v => HasExtent (LinearRing v srid) v srid where
extent = extentFromVector . V.convert . _lrPoints
instance VectorSpace v => HasExtent (LineString v srid) v srid where
extent = extentFromVector . V.convert . _lsPoints
instance VectorSpace v => HasExtent (MultiLineString v srid) v srid where
extent = extentFromVector . _mlLineStrings
instance VectorSpace v => HasExtent (Polygon v srid) v srid where
extent = extent . _pOuterRing
instance VectorSpace v => HasExtent (Triangle v srid) v srid where
extent (Triangle a b c) = a' SG.<> b' SG.<> c'
where a' = extent a
b' = extent b
c' = extent c
instance VectorSpace v => HasExtent (MultiPolygon v srid) v srid where
extent = extentFromVector . _mpPolygons
instance VectorSpace v => HasExtent (PolyhedralSurface v srid) v srid where
extent = extentFromVector . _psPolygons
instance VectorSpace v => HasExtent (TIN v srid) v srid where
extent = extentFromVector . V.convert . _tinTriangles
instance VectorSpace v => HasExtent (Geometry v srid) v srid where
extent (GeoPoint g) = extent g
extent (GeoMultiPoint g) = extent g
extent (GeoLineString g) = extent g
extent (GeoMultiLineString g) = extent g
extent (GeoPolygon g) = extent g
extent (GeoMultiPolygon g) = extent g
extent (GeoTriangle g) = extent g
extent (GeoPolyhedralSurface g) = extent g
extent (GeoTIN g) = extent g
extent (GeoCollection g) = extent g
instance VectorSpace v => HasExtent (GeometryCollection v srid) v srid where
extent = extentFromVector . _gcGeometries
extentFromVector :: (HasExtent a v srid, VectorSpace v) => V.Vector a -> Extent v srid
extentFromVector v = V.foldl' (SG.<>) (V.head es) (V.tail es)
where es = V.map extent v
instance VectorSpace v => HasExtent (Feature v srid d) v srid where
extent = extent . _fGeom
instance VectorSpace v => HasExtent (FeatureCollection v srid d) v srid where
extent = extentFromVector . V.map _fGeom . V.fromList . _fcFeatures
| krisajenkins/sigym4-geometry | Sigym4/Geometry/Algorithms.hs | bsd-3-clause | 6,293 | 0 | 11 | 1,458 | 2,469 | 1,261 | 1,208 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wall #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett 2014
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Hask.Prism where
import qualified Control.Arrow as Arrow
import Hask.Core
class (Precocartesian ((~>) :: i -> i -> *), Profunctor p) => Choice (p :: i -> i -> *) where
{-# MINIMAL _Left | _Right #-}
_Left :: p a b -> p (a + c) (b + c)
_Left = dimap swap swap . _Right
_Right :: p a b -> p (c + a) (c + b)
_Right = dimap swap swap . _Left
instance Choice (->) where
_Left = Arrow.left
_Right = Arrow.right
instance Choice (Nat :: (i -> *) -> (i -> *) -> *) where
_Left (Nat f) = Nat $ _Lift (_Left f)
_Right (Nat g) = Nat $ _Lift (_Right g)
instance Choice (Nat :: (i -> j -> *) -> (i -> j -> *) -> *) where
_Left (Nat f) = Nat $ _Lift (_Left f)
_Right (Nat g) = Nat $ _Lift (_Right g)
instance Choice Tagged where
_Left = bimap inl inl
_Right = bimap inr inr
instance Precocartesian ((~>) :: i -> i -> *) => Choice (Beget (r :: i)) where
_Left = bimap inl inl
_Right = bimap inr inr
instance Precocartesian ((~>)::i->i-> *) => Choice (Self :: i -> i -> *) where
_Left = _Self first
_Right = _Self fmap1
-- In lens terms this is a 'Review'
type Begetter t b = forall p. (Choice p, Functor p) => p b b -> p t t
unto :: Bifunctor p => (b ~> t) -> p b b -> p t t
unto f = bimap f f
type Prism s t a b = forall p. Choice p => p a b -> p s t
| ekmett/hask | old/src/Hask/Prism.hs | bsd-3-clause | 1,877 | 6 | 11 | 403 | 692 | 369 | 323 | 40 | 1 |
-- #!/usr/bin/env runghc
-- (,st') in parse
-- {-# LANGUAGE TupleSections #-}
-- {-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -cpp -DPiForallInstalled #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module tests Pire's parser, forget, and untie functionality
{-
toggle w/ opts in cabal file
cpp-options: -DPiForallInstalled
cf
http://stackoverflow.com/questions/6361846/where-can-i-learn-about-ifdef
+ setting ghc/ghci cmd line options (from man ghci)
-cpp -Dsymbol=value -U -Usymbol ...
thus eg.
ghci -cpp -DPiForallInstalled
these seem not to work (?)
cabal repl --ghc-options="-cpp -DPiForallInstalled"
anyway NOT cabal repl --ghc-options="--cpp-options=-DPiForallInstalled"
can the -cpp be set on the cabal repl cmd line at all?
but better just toggle w/
-- {-# OPTIONS_GHC -cpp -DPiForallInstalled #-}
-}
{-
usage
./tests.hs
./tests.hs -p sorting
etc
./tests.hs -h
resp.
Pire.Tests.main'
eg.
:l Pire.Tests
main' "*"
or
main' "basic"
main' "nat"
meanwhile just
runhaskell -package-db=.cabal-sandbox/x86_64-linux-ghc-7.6.3-packages.conf.d tests.hs -p parsing
-}
-- has to be module Main (or nothing)
-- otherwise "cabal test" will not run
-- module Tests where
import ParserTests.BConst
import ParserTests.Lambda
import ParserTests.If
import ParserTests.PatternMatchCase
import ParserTests.Nat
import ParserTests.DataTypes
import ParserTests.Subst
import ParserTests.SigmaTypes
import ParserTests.Pcase
import ParserTests.Contra
import ParserTests.ErasedApp
import ParserTests.ImpProd
import ParserTests.Decls
import ParserTests.Telescope
import Pire.Syntax.Ws
import Pire.Syntax.Eps
import Pire.Syntax.Nm
import Pire.Syntax.Token
import Pire.Syntax.Binder
import Pire.Syntax
import Pire.NoPos
import Pire.Parser.Basic
import Pire.Parser.Token
import Pire.Parser.VarWildcard
import Pire.Parser.Expr
import Pire.Forget
import Pire.Untie
import Pire.Parser.ParseUtils
import Pire.Parser.PiParseUtils
#ifdef PiForallInstalled
import qualified PiForall.Parser as P
#endif
import Test.Tasty
-- import Test.Tasty.SmallCheck as SC
-- import qualified Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
-- import qualified Test.Tasty.Runners as R
-- import Test.Tasty.Options as O
import System.Environment as Env
import Data.Either.Combinators (fromRight')
import Bound
-- import Bound.Name
main = defaultMain tests
main' pat = do
withArgs ["-p", pat] $ defaultMain $ tests
tests :: TestTree
tests = testGroup "Tests" [basicsU
, impOrExpVarU
, wildcardU
, varOrConU
, trustmeU
, reflU
, typenU
, bconstU
, ifU
, lambdaU
, letU
, expProdOrAnnotOrParensU
, tyEqU
, piU
, varU
, appU
, argU
, dconappU
-- TODO
, patMatchCaseU
, natU
, telescopesU
, dataTypesU
, substU
-- new as of May 2015
, parsingSigmaTypesU
, parsingPcaseU
, parsingContraU
, parsingErasedAppU
, parsingImpProdU
, parsingDeclsU
]
-- "the ? is towards the actual"
-- actual @?= expected = assertEqual "" expected actual
-- expected @=? actual = assertEqual "" expected actual
-- main' "basics"
basicsU = testGroup "parsing basics - unit tests" $ tail
[
undefined
, let s = "wow this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parse ide '"++s++"'")
$ parse ide s @?= "wow"
, testCase ("parse ide_ '"++s++"'")
$ parse id_ s @?= ("wow",Ws " ")
, testCase ("parse var '"++s++"'")
$ parse var s @?= V "wow"
, testCase ("parse var_ '"++s++"'")
-- $ (parse var_ s) @?= (V_ "wow" (Ws " "))
$ (parse var_ s) @?= (Ws_ (V "wow") (Ws " "))
, testCase ("parse & forget var_ '"++s++"'")
$ (forget $ parse var_ s) @?= parse var s
, testCase ("parse & forget var2_ '"++s++"'")
$ parse var2_ s @?= ("wow", Ws " ")
]
, let s = "Succ "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- but vars deserve some attention - when using constructors (in the prelude)
, testCase ("fail parsing var '"++s++"'")
$ failParsingP var s @?= "fail"
, testCase ("fail parsing var2_ '"++s++"'")
$ failParsingP var2_ s @?= "fail"
]
, let s = "Nat "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("fail parsing var_ '"++s++"'")
$ failParsingP var_ s @?= "fail"
]
, let s = "in{-foo-}this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parse reserved 'in' in '"++s++"'")
$ parse (reserved "in") s @?= ()
, testCase ("parse reserved_ 'in' in '"++s++"'")
$ parse (reserved_ "in") s @?= Ws "{-foo-}"
]
, let s = "foo{-bar-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- but no matter really if "in" or "foo"
, testCase ("parse reserved 'foo' in '"++s++"'")
$ parse (reserved "foo") s @?= ()
, testCase ("parse reserved_ 'foo' in '"++s++"'")
$ parse (reserved_ "foo") s @?= Ws "{-bar-}"
]
, let s = "bar{-baz-}this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- want parsing failure here (but assert equal)!
, testCase ("fail parsing of reserved 'foo' in '"++s++"'")
$ failParsing (reserved "foo") s @?= "fail"
-- want parsing failure here (but assert equal)!
, testCase ("fail parsing of reserved_ 'foo' in '"++s++"'")
$ failParsing (reserved "foo") s @?= "fail"
]
, let s = "+{-foo-}this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase "parsing reservedOp '+' "
$ parse (reservedOp "+") s @?= ()
, testCase "parsing reservedOp_ '+' " $
parse (reservedOp_ "+") s @?= Ws "{-foo-}"
]
, let s = "+4 "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing reservedOp '+' in '"++s++"'")
$ parse (reservedOp "+") s @?= ()
, testCase ("parsing reservedOp_ '+' in '"++s++"'")
$ parse (reservedOp_ "+") s @?= Ws ""
]
, let s = "++ "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("fail parsing reservedOp '+' in '"++s++"'")
$ failParsing (reservedOp "+") s @?= "fail"
, testCase ("fail parsing reservedOp_ '+' in '"++s++"'")
$ failParsing (reservedOp_ "+") " ++ " @?= "fail"
]
, let s = "+ + "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing reservedOp '+' in '"++s++"'")
$ parse (reservedOp "+") s @?= ()
, testCase ("parsing reservedOp_ '+' in '"++s++"'")
$ parse (reservedOp_ "+") s @?= Ws " "
]
, let s = "foo this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- strangely enough
, testCase ("parsing reservedOp 'foo' in '"++s++"'")
$ parse (reservedOp "foo") s @?= ()
, testCase ("parsing reservedOp_ 'foo' in '"++s++"'")
$ parse (reservedOp_ "foo") s @?= Ws " "
]
]
-- exprs and helpers
-- ! todo: split this into more manageable chunks
-- main' "impOrExpVar"
impOrExpVarU = testGroup "parsing impOrExpVar - unit tests" $ tail
[
undefined
, let s = "foo{-bar-}this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- helper
, testCase "parse as impOrExpVar"
$ parse impOrExpVar s @?= (V "foo",RuntimeP)
, testCase "parse as impOrExpVar_"
-- $ (parse impOrExpVar_ s) @?= (V_ "foo" (Ws "{-bar-}"),RuntimeP)
$ (parse impOrExpVar_ s) @?= (Ws_ (V "foo") $ Ws "{-bar-}",RuntimeP)
-- , testCase "parse & forget impOrExpVar_"
, testCase ("parse & forget impOrExpVar_ '"++s++"'")
$ (forget $ parse impOrExpVar_ s) @?= (parse impOrExpVar s)
#ifdef PiForallInstalled
-- untie needs PiForall (other stuff can be tested w/ pure Pire wo/ Pi-forall ie.)
, testCase ("parse & untie impOrExpVar_ '"++s++"'")
$ (untie $ parse impOrExpVar_ s) @?= (piParse P.impOrExpVar s)
#endif
]
, let s = "[ foo ]{-bar-}this rocks "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase "parse as impOrExpVar"
$ parse impOrExpVar s @?= (V "foo",ErasedP)
, testCase ("parse & forget impOrExpVar_ '"++s++"'")
$ (forget $ parse impOrExpVar_ s) @?= (parse impOrExpVar s)
#ifdef PiForallInstalled
, testCase ("parse & untie impOrExpVar_ '"++s++"'")
$ (untie $ parse impOrExpVar_ s) @?= (piParse P.impOrExpVar s)
#endif
]
-- slightly more complicated
, let s = "[{-foo-}y{-bar-}]{-baz-}x "
in
testGroup ("slightly more complicated: '"++s++"'") $ tail [
undefined
, testCase "parse as impOrExpVar"
$ parse impOrExpVar s @?= (V "y",ErasedP)
-- now this is interesting for impOrExpVar_
, testCase "parsing as impOrExpVar_"
$ parse impOrExpVar_ s
@?= (Brackets_
(BracketOpen "[" (Ws "{-foo-}"))
-- (V_ "y" $ Ws "{-bar-}")
(Ws_ (V "y") $ Ws "{-bar-}")
(BracketClose "]" (Ws "{-baz-}")),
ErasedP)
-- ; testCase ("parse & forget impOrExpVar_ '"++s++"'")
, testCase ("parse & forget impOrExpVar_")
$ (forget $ parse impOrExpVar_ s) @?= parse impOrExpVar s
#ifdef PiForallInstalled
, testCase ("parse & untie impOrExpVar_ '"++s++"'")
$ (untie $ parse impOrExpVar_ s) @?= (piParse P.impOrExpVar s)
#endif
]
-- should test the helpers as well maybe
-- maybe in an extra group helpersU
, let s = "[{-foo-}y] x "
in
testGroup ("test some helpers: bracketOpen_ etc.: '"++s++"'") $ tail [
undefined
-- should test the helpers as well maybe
, testCase "parse as bracketOpen_"
$ parse bracketOpen_ s @?= BracketOpen "[" (Ws "{-foo-}")
-- etc
-- ! many more...
]
, let s = "foo{-bar-}this rocks "
in
testGroup
-- ("...: '"++s++"'")
"impOrExpVar' -- variation of the above that returns just some string"
$ tail [
undefined
, testCase "parse as impOrExpVar'"
$ parse impOrExpVar' s @?= ("foo",RuntimeP)
]
, let s = "[ foo ]{-bar-}this rocks "
in
testGroup ("same erased: '"++s++"'") $ tail [
undefined
, testCase "parse as impOrExpVar'"
$ parse impOrExpVar' s @?= ("foo",ErasedP)
]
, let s = "[{-foo-}y{-bar-}]{-baz-}x "
in
testGroup ("... and the slightly more complicated: '"++s++"'") $ tail [
undefined
, testCase "parse as impOrExpVar'"
$ parse impOrExpVar' s @?= ("y",ErasedP)
]
]
-- test them w/
-- main' "wildcard -"
wildcardU = testGroup ("parsing wildcard - unit tests") $ tail
[
undefined
, let s = "_{-foo-}x "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- wildcards
, testCase ("parsing wildcard '"++s++"'")
$ parse wildcard s @?= "_"
, testCase ("parsing wildcard_ '"++s++"'")
-- $ parse wildcard_ s @?= (V_ "_" $ Ws "{-foo-}")
$ parse wildcard_ s @?= (Ws_ (V "_") $ Ws "{-foo-}")
, testCase ("parse & forget & extract wildcard_ '"++s++"'")
$ (extract $ forget $ parse wildcard_ s)
@?= (parse wildcard s)
, testCase ("parsing wildcard2_ '"++s++"'")
$ parse wildcard2_ s @?= ("_", Ws "{-foo-}")
-- dito for varOrWildcard
, testCase ("parsing varOrWildcard '"++s++"'")
$ parse varOrWildcard s @?= "_"
, testCase ("parsing varOrWildcard_ '"++s++"'")
-- $ parse varOrWildcard_ s @?= (V_ "_" $ Ws "{-foo-}")
$ parse varOrWildcard_ s @?= (Ws_ (V "_") $ Ws "{-foo-}")
, testCase ("parse, forget & extract varOrWildcard_ '"++s++"'")
$ (extract $ forget $ parse varOrWildcard_ s) @?= (parse varOrWildcard s)
, testCase ("parsing varOrWildcard2_ '"++s++"'")
$ parse varOrWildcard2_ s @?= ("_", Ws "{-foo-}")
]
, let s = "a{-foo-}x "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- but varOrWildcard allows for vars as well of course
, testCase ("parsing varOrWildcard '"++s++"'")
$ parse varOrWildcard s @?= "a"
, testCase ("parsing varOrWildcard_ '"++s++"'")
-- $ parse varOrWildcard_ s @?= V_ "a" (Ws "{-foo-}")
$ parse varOrWildcard_ s @?= Ws_ (V "a") (Ws "{-foo-}")
, testCase ("parse & forget varOrWildcard_ '"++s++"'")
$ (extract $ forget $ parse varOrWildcard_ s) @?= (parse varOrWildcard s)
, testCase ("parse varOrWildcard2_ '"++s++"'")
$ parse varOrWildcard2_ s @?= ("a", Ws "{-foo-}")
]
, let s = "Nat{-foo-}x "
in
testGroup ("mind constructor names though, parsing '"++s++"'") $ tail [
undefined
, testCase ("fail parsing varOrWildcard '"++s++"'")
$ failParsingP varOrWildcard s @?= "fail"
]
, let s = "Zero{-foo-}x "
in
testGroup ("mind constructor names, parsing '"++s++"'") $ tail [
undefined
, testCase ("fail parsing varOrWildcard '"++s++"'")
$ failParsingP varOrWildcard s @?= "fail"
]
]
-- test them w/
-- main' "varOrCon"
varOrConU = testGroup ("parsing varOrCon - unit tests"
++"\n "
++"parse w/ prelude (parsing*P*S)"
) $ tail
[
undefined
, let s = "bar{-baz-}x"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- varOrCon - parse w/ prelude (parsing*P*S)
-- example originally also preceded by comment
-- does not work Pi-forall's parser (and thus for parse & untie) however:
-- " {-foo-}bar{-baz-}x "
, testCase "parse as varOrCon"
$ parseP varOrCon s @?= V "bar"
, testCase "parse as varOrCon_"
-- $ parseP varOrCon_ s @?= V_ "bar" (Ws "{-baz-}")
$ parseP varOrCon_ s @?= Ws_ (V "bar") (Ws "{-baz-}")
, testCase "parse & forget varOrCon_"
$ (forget $ parseP varOrCon_ s) @?= parseP varOrCon s
#ifdef PiForallInstalled
, testCase "parse & untie varOrCon"
$ (untie $ parseP varOrCon s) @?= piParseP P.varOrCon s
-- and thus:
, testCase "parse & untie varOrCon_"
$ (untie $ parseP varOrCon_ s) @?= piParseP P.varOrCon s
-- and, well varOrCon yields an expr...
, testCase "parse & untie expr_"
$ (untie $ parseP expr_ s) @?= piParseP P.expr s
#endif
]
, let s = "Nat{-bar-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase "parse as varOrCon"
$ parseP varOrCon s @?= TCon "Nat" []
, testCase "parse as varOrCon_"
$ parseP varOrCon_ s @?= TCon_ (Nm_ "Nat" $ Ws "{-bar-}") []
-- ; testCase "parse as varOrCon_ & forget"
, testCase ("parse & forget varOrCon_ '"++s++"'")
$ (forget $ parseP varOrCon_ s) @?= parseP varOrCon s
#ifdef PiForallInstalled
-- ; testCase "parse as varOrCon & untie"
, testCase ("parse & untie varOrCon '"++s++"'")
$ (untie $ parseP varOrCon s) @?= piParseP P.varOrCon s
-- and thus:
, testCase "parse & untie varOrCon_"
$ (untie $ parseP varOrCon_ s) @?= piParseP P.varOrCon s
, testCase "parse & untie expr_"
$ (untie $ parseP expr_ s) @?= piParseP P.expr s
#endif
-- ! further, more complicated TCon examples
]
, let s = "Zero{-bar-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase "parse as varOrCon"
$ parseP varOrCon s @?= DCon "Zero" [] (Annot Nothing)
, testCase "parse as varOrCon_"
$ parseP varOrCon_ s
@?= DCon_ (Nm1_ "Zero" $ Ws "{-bar-}") [] (Annot_ Nothing NoWs)
, testCase "parse & forget varOrCon_"
$ (forget $ parseP varOrCon_ s ) @?= parseP varOrCon s
#ifdef PiForallInstalled
, testCase "parse & untie varOrCon"
$ (untie $ parseP varOrCon s) @?= piParseP P.varOrCon s
, testCase "parse & untie varOrCon_"
$ (untie $ parseP varOrCon_ s) @?= piParseP P.varOrCon s
, testCase "parse & untie expr_"
$ (untie $ parseP expr_ s) @?= piParseP P.expr s
#endif
]
, let s = "Succ{-bar-}x"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase "parse as varOrCon"
$ parseP varOrCon s @?= DCon "Succ" [] (Annot Nothing)
-- ; testCase ("parse varOrCon_ '"++s++"'")
, testCase "parse as varOrCon_"
$ parseP varOrCon_ s
@?= DCon_ (Nm1_ "Succ" $ Ws "{-bar-}") [] (Annot_ Nothing $ NoWs)
-- ; testCase "parse as varOrCon_ & forget"
, testCase ("parse & forget varOrCon_ '"++s++"'")
$ (forget $ parseP varOrCon_ s ) @?= parseP varOrCon s
#ifdef PiForallInstalled
, testCase "parse & untie varOrCon"
$ (untie $ parseP varOrCon s) @?= piParseP P.varOrCon s
, testCase "parse & untie varOrCon_"
$ (untie $ parseP varOrCon_ s) @?= piParseP P.varOrCon s
, testCase "parse & untie expr_"
$ (untie $ parseP expr_ s) @?= piParseP P.expr s
#endif
]
, let s = "\\ x . Nat"
in
testGroup (">>= for TCon: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for TCon/TCon_ is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
]
-- test them w/
-- main' "trustme"
trustmeU = testGroup "trustme - unit tests" $ tail
[
undefined
, let s = "TRUSTME{-baz-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing trustme '"++s++"'")
$ parse trustme s @?= TrustMe (Annot Nothing)
, testCase ("parsing trustme_ '"++s++"'")
$ parse trustme_ s @?= TrustMe_ "TRUSTME" (Ws "{-baz-}") (Annot_ Nothing $ Ws "")
, testCase ("parse & forget trustme_ '"++s++"'")
$ (forget $ parse trustme_ s) @?= parse trustme s
#ifdef PiForallInstalled
, testCase ("parse & untie trustme '"++s++"'")
$ (untie $ parse trustme s) @?= piParse P.trustme s
, testCase ("parse & untie trustme_ '"++s++"'")
$ (untie $ parse trustme_ s) @?= (piParse P.trustme s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
#endif
]
, let s = "\\ x . TRUSTME"
in
testGroup (">>= for trustme: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for trustme is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parse expr s) @?= (parse expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parse expr_ s) @?= (parse expr_ s)
-- , testCase ("parse & forget expr_ '"++s++"'")
-- $ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
#endif
]
]
-- main' "refl"
reflU = testGroup "parsing refl - unit tests" $ tail
[
undefined
, let s = "refl{-foo-}"
in
testGroup ("refl... w/ '"++s++"'") $ tail [
undefined
#ifdef PiForallInstalled
#endif
, testCase ("parse refl '"++s++"'")
$ parse refl s @?= Refl (Annot Nothing)
, testCase ("parse refl_ '"++s++"'")
$ parse refl_ s @?= Refl_ "refl" (Ws "{-foo-}") (Annot_ Nothing $ Ws "")
, testCase ("parse & forget refl_ '"++s++"'")
$ (forget $ parse refl_ s) @?= parse refl s
#ifdef PiForallInstalled
, testCase ("parse & untie refl_ '"++s++"'")
$ (untie $ parse refl_ s) @?= (piParse P.refl s)
-- and as expr
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
#endif
]
, let s = "\\ x . refl"
in
testGroup (">>= for refl: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for trustme is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parse expr s) @?= (parse expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parse expr_ s) @?= (parse expr_ s)
-- , testCase ("parse & forget expr_ '"++s++"'")
-- $ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
#endif
]
]
-- main' "typen"
typenU = testGroup "parsing typen - unit tests" $ tail
[
undefined
, let s = "Type{-foo-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- typen
, testCase ("parse typen '"++s++"'")
$ parse typen s @?= Type
, testCase ("parse typen_ '"++s++"'")
$ parse typen_ s @?= (Type_ "Type" (Ws "{-foo-}"))
, testCase ("parse & forget typen_ '"++s++"'")
$ (forget $ parse typen_ s) @?= parse typen s
#ifdef PiForallInstalled
, testCase ("parse & untie typen_ '"++s++"'")
$ (untie $ parse typen_ s) @?= piParse P.typen s
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= piParse P.expr s
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
]
-- bconstU
-- lambdaU...
-- main' "let"
letU = testGroup "parsing let - unit tests" $ tail
[
undefined
, let s = "let{-l-}x{-l'-}={-b-}y{-b'-}in{-i-}i{-i'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- let
, testCase ("parsing letExpr '"++s++"'")
$ (nopos $ parse letExpr s)
@?= Let "x" (V "y") (Scope (V (F (V "i"))))
, testCase ("parsing letExpr_ '"++s++"'")
$ (nopos $ parse letExpr_ s)
-- @?= Let_ (LetTok (Ws "{-l-}")) (Binder "x" (Ws "{-l'-}")) (Equal "=" $ Ws "{-b-}") (V_ "y" (Ws "{-b'-}")) (In "in" $ Ws "{-i-}") (Scope (V_ (F (V_ "i" (Ws ""))) (Ws "{-i'-}")))
@?= Let_ (LetTok "let" (Ws "{-l-}")) (Binder_ "x" (Ws "{-l'-}")) (Equal "=" (Ws "{-b-}")) (Ws_ (V "y") (Ws "{-b'-}")) (In "in" (Ws "{-i-}")) (Scope (Ws_ (V (F (V "i"))) (Ws "{-i'-}")))
, testCase ("parse & forget letExpr_ '"++s++"'")
$ (forget $ parse letExpr_ s) @?= (parse letExpr s)
#ifdef PiForallInstalled
, testCase ("parse & untie letExpr_ '"++s++"'")
$ (untie $ parse letExpr_ s) @?= (piParse P.letExpr s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "\\ x . let x = y in i "
in
testGroup (">>= for let: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for let is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parse expr s) @?= (parse expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parse expr_ s) @?= (parse expr_ s)
-- , testCase ("parse & forget expr_ '"++s++"'")
-- $ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
#endif
]
]
-- main' "expProd"
expProdOrAnnotOrParensU = testGroup "parsing expProdOrAnnotOrParens - unit tests"
$ tail
[
undefined
, let s = "({-c-}A{-c'-}){-c''-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- expProdOrAnnotOrParens
-- (A) w/ some ws
, testCase ("parsing expProdOrAnnotOrParens (Paren) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
@?= Paren (V "A")
, testCase ("parsing expProdOrAnnotOrParens_ (Paren_) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens_ s)
-- @?= Paren_ (ParenOpen "(" (Ws "{-c-}")) (V_ "A" (Ws "{-c'-}")) (ParenClose ")" (Ws "{-c''-}"))
@?= Paren_ (ParenOpen "(" (Ws "{-c-}")) (Ws_ (V "A") (Ws "{-c'-}")) (ParenClose ")" (Ws "{-c''-}"))
, testCase ("parse & forget expProdOrAnnotOrParens_ '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "\\ x . ( y ) "
in
testGroup (">>= for Paren: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for Paren/Paren_ is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
, let s = "(x: A)"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- (x: A) w/ some ws
, testCase ("parsing expProdOrAnnotOrParens, Ann '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
@?= Ann (V "x") (V "A")
-- ! maybe same w/ expProdOrAnnotOrParens_
, testCase ("parse & forget expProdOrAnnotOrParens_ '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "({-o-}x{-v-}:{-c-}A{-t-}){-c'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expProdOrAnnotOrParens (Ann) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
@?= Ann (V "x") (V "A")
, testCase ("parsing expProdOrAnnotOrParens_ (WitnessedAnn_) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens_ s)
-- @?= WitnessedAnnInParens_ (ParenOpen "(" (Ws "{-o-}")) (Ws_ (V "x") (Ws "{-v-}")) (Colon ":" (Ws "{-c-}")) (Ws_ (V "A") (Ws "{-t-}")) (ParenClose ")" (Ws "{-c'-}"))
@?= (Ann_ $ Paren_ (ParenOpen "(" (Ws "{-o-}")) (WitnessedAnnEx_ (Ws_ (V "x") (Ws "{-v-}")) (Colon ":" (Ws "{-c-}")) (Ws_ (V "A") (Ws "{-t-}"))) (ParenClose ")" (Ws "{-c'-}")))
, testCase ("parse & forget expProdOrAnnotOrParens_ (WitnessedAnn_) '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "\\ y . ( x : A ) "
in
testGroup (">>= for Ann/WitnessedAnn_: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for Paren/Paren_ is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
, let s = "\\ y . ( x : A ) -> B "
in
testGroup (">>= for PiP RuntimeP / PiP_ w/ WitnessedAnnP_: \""++s++"\"") $ tail [
undefined
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
, let s = "\\ y . [ x : A ] -> B "
in
testGroup (">>= for PiP ErasedP / PiP_ w/ WitnessedAnnInBracketsP_: \""++s++"\"") $ tail [
undefined
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
, let s = "(A, B) "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- (A, B) w/ some ws
, testCase ("parsing expProdOrAnnotOrParens (Prod) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
@?= Prod (V "A") (V "B") (Annot Nothing)
, testCase ("parse & forget expProdOrAnnotOrParens_ (Prod) '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "\\ x . (A, B) "
in
testGroup (">>= for Prod, parsing '"++s++"'") $ tail [
undefined
-- just make sure no exception occurrs, trivial equality
, testCase ("parsing expr (lambda of Prod) '"++s++"'")
$ (parse expr s) @?= (parse expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parse expr_ s) @?= (parse expr_ s)
#ifdef PiForallInstalled
#endif
]
, let s = "({-o-}A{-v-},{-c-}B{-t-}){-c'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expProdOrAnnotOrParens (Prod) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
@?= Prod (V "A") (V "B") (Annot Nothing)
, testCase ("parsing expProdOrAnnotOrParens_ (Prod_) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens_ s)
-- @?= Prod_ (ParenOpen "(" (Ws "{-o-}")) (V_ "A" (Ws "{-v-}")) (Comma "," (Ws "{-c-}")) (V_ "B" (Ws "{-t-}")) (ParenClose ")" (Ws "{-c'-}")) (Annot_ Nothing (Ws ""))
@?= Prod_ (ParenOpen "(" (Ws "{-o-}")) (Ws_ (V "A") (Ws "{-v-}")) (Comma "," (Ws "{-c-}")) (Ws_ (V "B") (Ws "{-t-}")) (ParenClose ")" (Ws "{-c'-}")) (Annot_ Nothing (Ws ""))
, testCase ("parse & forget expProdOrAnnotOrParens_ (Prod_) '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ (Prod) '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ (Prod) '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "(x: A) -> B "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expProdOrAnnotOrParens (PiP) \""++s++"\"")
$ (nopos $ parse expProdOrAnnotOrParens s)
-- @?= Pi "x" (V "A") (Scope (V (F (V "B"))))
@?= PiP RuntimeP "x" (V "A") (Scope (V (F (V "B"))))
, testCase ("parse & forget expProdOrAnnotOrParens_ (PiP_) \""++s++"\"")
$ (forget $ parse expProdOrAnnotOrParens_ s)
@?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ (PiP_) \""++s++"\"")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ (PiP_) '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "({-open-}x{-v-}:{-colon-}A{-ty-}){-close-}->{-arr-}B{-ty'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expProdOrAnnotOrParens (PiP) '"++s++"'")
$ (nopos $ parse expProdOrAnnotOrParens s)
-- @?= Pi "x" (V "A") (Scope (V (F (V "B"))))
@?= PiP RuntimeP "x" (V "A") (Scope (V (F (V "B"))))
-- TODO
-- , testCase ("parsing expProdOrAnnotOrParens_ (PiP_) \""++s++"\"")
-- $ (nopos $ parse expProdOrAnnotOrParens_ s)
-- -- @?= PiP_ RuntimeP (WitnessedAnnInParensBnd_ (ParenOpen "(" (Ws "{-open-}")) (Binder "x" $ Ws "{-v-}") (Colon ":" (Ws "{-colon-}")) (V_ "A" (Ws "{-ty-}")) (ParenClose ")" (Ws "{-close-}"))) (Arrow (Ws "{-arr-}")) (Scope (V_ (F (V_ "B" (Ws ""))) (Ws "{-ty'-}")))
-- @?= PiP_ RuntimeP (WitnessedAnnInParensBnd_ (ParenOpen "(" (Ws "{-open-}")) (Binder "x" $ Ws "{-v-}") (Colon ":" (Ws "{-colon-}")) (Ws_ (V "A") (Ws "{-ty-}")) (ParenClose ")" (Ws "{-close-}"))) (Arrow (Ws "{-arr-}")) (Scope (V_ (F (V_ "B" (Ws ""))) (Ws "{-ty'-}")))
, testCase ("parse & forget expProdOrAnnotOrParens_ (PiP_) '"++s++"'")
$ (forget $ parse expProdOrAnnotOrParens_ s) @?= (parse expProdOrAnnotOrParens s)
#ifdef PiForallInstalled
, testCase ("parse & untie expProdOrAnnotOrParens_ (PiP_) '"++s++"'")
$ (untie $ parse expProdOrAnnotOrParens_ s)
@?= (piParse P.expProdOrAnnotOrParens s)
, testCase ("parse & untie expr_ (PiP_) '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
-- ! ErasedPi (here?)
]
-- test w/
-- main' "TyEq"
tyEqU = testGroup "parsing TyEq - unit tests" $ tail
[
undefined
, let s = "x=y=z"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- = (TyEq), left assoc
, testCase ("parsing expr (left assoc '=') '"++s++"'")
$ (nopos $ parse expr s)
@?= TyEq (TyEq (V "x") (V "y")) (V "z")
, testCase ("parse & forget expr (left assoc '=') '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ (left assoc '=') '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
, testCase ("parse & untie expr_ (left assoc '=') '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "x ={-eq-}y ={-eq'-}z"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- w/ some ws
, testCase ("parsing expr (left assoc '=') '"++s++"'")
$ (nopos $ parse expr s)
@?= TyEq (TyEq (V "x") (V "y")) (V "z")
, testCase ("parsing expr_ (left assoc '=') '"++s++"'")
$ (nopos $ parse expr_ s)
-- @?= TyEq_ (TyEq_ (V_ "x" (Ws " ")) (Equal "=" (Ws "{-eq-}")) (V_ "y" (Ws " "))) (Equal "=" (Ws "{-eq'-}")) (V_ "z" (Ws ""))
@?= TyEq_ (TyEq_ (Ws_ (V "x") (Ws " ")) (Equal "=" (Ws "{-eq-}")) (Ws_ (V "y") (Ws " "))) (Equal "=" (Ws "{-eq'-}")) (Ws_ (V "z") (Ws ""))
, testCase ("parse & forget expr_ (left assoc '=') '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ (left assoc '=') '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
-- ! ErasedPi (here?)
, let s = "\\ x . y = z"
in
testGroup (">>= for TyEq: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for TyEq is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parse expr s) @?= (parse expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parse expr_ s) @?= (parse expr_ s)
-- , testCase ("parse & forget expr_ '"++s++"'")
-- $ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
#endif
]
]
-- test them w/
-- main' "Pi -"
piU = testGroup "Pi - unit tests" $ tail
[
undefined
, let s = "A->B->C"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- similar: -> (Pi_), right assoc
, testCase ("parsing expr (right assoc '->') '"++s++"'")
$ (nopos $ parse expr s)
@?= PiP RuntimeP "_" (V "A") (Scope (PiP RuntimeP "_1" (V (F (V "B"))) (Scope (V (F (V (F (V "C"))))))))
, testCase ("parse & forget expr_ (right assoc '->') \""++s++"\"")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ (right assoc '->') '"++s++"'")
$ (untie $ parse expr_ s) @?= (piParse P.expr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "A->{-a-}B->{-a'-}C"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- w/ some ws
, testCase ("parsing expr (right assoc '->') \""++s++"\"")
$ (nopos $ parse expr s)
@?= PiP RuntimeP "_" (V "A") (Scope (PiP RuntimeP "_1" (V (F (V "B"))) (Scope (V (F (V (F (V "C"))))))))
-- w/ some ws
-- ignore the ParenOpen "(" etc ws, just the Arrow ws should be kept
-- (besides ws after A, B, C - but not exercised here)
--- TODO
-- , testCase ("parsing expr_ (right assoc '->') '"++s++"'")
-- $ (nopos $ parse expr_ s)
-- @?= PiP_ RuntimeP (InferredAnnBnd_ (Binder "_" $ Ws "") (V_ "A" (Ws ""))) (Arrow (Ws "{-a-}")) (Scope (PiP_ RuntimeP (InferredAnnBnd_ (Binder "_1" $ Ws "") (V_ (F (V_ "B" (Ws ""))) (Ws ""))) (Arrow (Ws "{-a'-}")) (Scope (V_ (F (V_ (F (V_ "C" (Ws ""))) (Ws ""))) (Ws "")))))
, testCase ("parse & forget expr_ '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
]
-- main' "var -"
varU = testGroup "var - unit tests" $ tail
[
undefined
, let s = "foo "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- w/ some ws
, testCase ("parsing expr (var) '"++s++"'")
$ (nopos $ parse expr s) @?= V "foo"
-- ! do the same w/ expr_ maybe
, testCase ("parse & forget expr_ (var) '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "foo{-c-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expr (var) '"++s++"'")
$ (nopos $ parse expr s) @?= V "foo"
-- TODO
-- , testCase ("parsing expr_ (var) '"++s++"'")
-- $ (nopos $ parse expr_ " foo{-c-}") @?= (V_ "foo" (Ws "{-c-}"))
, testCase ("parse & forget expr_ (var) '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
]
-- main' "@"
appU = testGroup ":@ (app) - unit tests" $ tail
[
undefined
, let s = "f a "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
#ifdef PiForallInstalled
, testCase ("parsing expr (app) '"++s++"'")
$ (nopos $ parse expr s) @?= V "f" :@ V "a"
#endif
]
, let s = "f{-c-}a{-c'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing expr (app) '"++s++"'")
$ (nopos $ parse expr s) @?= V "f" :@ V "a"
-- TODO
-- , testCase ("parsing expr_ (app) '"++s++"'")
-- $ (nopos $ parse expr_ " f{-c-}a{-c'-}")
-- @?= (V_ "f" (Ws "{-c-}")) :@ (V_ "a" (Ws "{-c'-}"))
, testCase ("parse & forget expr_ '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
, let s = "f g a "
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
-- experimenting with formatting here
-- really waiting for ghc 7.8 to arrive in debian testing
-- so I can use (Chris Done's) formatting
, testCase ("parsing expr (app) '"++s++"'")
$ (nopos $ parse expr s) @?= (V "f" :@ V "g") :@ V "a"
, testCase ("parse & forget expr_ '"++s++"'")
$ (forget $ parse expr_ s) @?= (parse expr s)
#ifdef PiForallInstalled
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parse expr_ s) @?= (fromRight' $ P.parseExpr s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (fromRight' $ P.parseExpr s)
#endif
]
]
-- arg(s), dconapp, funapp etc - parsing w/ prelude here (for dconapp)
-- main' "arg -"
argU = testGroup "arg - unit tests" $ tail
[
undefined
-- first thing to notice:
-- not only can args be in brackets or not,
-- but (possibly within those brackets) they can be more complex than just vars
, let s = "a{-c-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing arg '"++s++"'")
$ parseP arg s @?= Arg RuntimeP (V "a")
-- TODO
-- , testCase ("parsing arg_ '"++s++"'")
-- $ parseP arg_ s @?= Arg RuntimeP (V_ "a" (Ws "{-c-}"))
, testCase ("parse & forget arg_ '"++s++"'")
$ (forget $ parseP arg_ s) @?= (parseP arg s)
#ifdef PiForallInstalled
, testCase ("parse & untie arg '"++s++"'")
$ (untie $ parseP arg s) @?= (piParseP P.arg s)
, testCase ("parse & untie arg_ '"++s++"'")
$ (untie $ parseP arg_ s) @?= (piParseP P.arg s)
#endif
]
-- here just f
-- but can be eg. a lambda as well (!)
-- [later:] rethink - maybe not that interesting
-- (don't care about the a{-c'-} part ie.)
-- but see how it is used if within brackets
, let s = "f{-c-}a{-c'-}"
in
testGroup ("parsing '"++s++"'") $ tail [
undefined
, testCase ("parsing arg '"++s++"'")
$ parseP arg s @?= Arg RuntimeP (V "f")
-- TODO
-- , testCase ("parsing arg_ '"++s++"'")
-- $ parseP arg_ s @?= Arg RuntimeP (V_ "f" $ Ws "{-c-}")
, testCase ("parse & forget arg_ '"++s++"'")
$ (forget $ parseP arg_ s) @?= (parseP arg s)
#ifdef PiForallInstalled
, testCase ("parse & untie arg_ '"++s++"'")
$ (untie $ parseP arg_ s) @?= (piParseP P.arg s)
#endif
]
-- within brackets "f a" would be an app
, let s = "[{-bo-}f{-c-}a{-c'-}]{-bc-}"
in
testGroup ("within brackets 'f a' would be an app") $ tail [
undefined
, testCase ("parsing arg '"++s++"'")
$ (nopos $ parseP arg s) @?= Arg ErasedP (V "f" :@ V "a")
-- TODO
-- , testCase ("parsing arg_ '"++s++"'")
-- $ (nopos $ parseP arg_ s)
-- @?= Arg ErasedP (Brackets_ (BracketOpen "[" (Ws "{-bo-}")) (V_ "f" (Ws "{-c-}") :@ V_ "a" (Ws "{-c'-}")) (BracketClose "]" (Ws "{-bc-}")))
, testCase ("parse & forget arg_ '"++s++"'")
$ (forget $ parseP arg_ s) @?= (parseP arg s)
#ifdef PiForallInstalled
, testCase ("parse & untie arg '"++s++"'")
$ (untie $ parseP arg s) @?= (piParseP P.arg s)
, testCase ("parse & untie arg_ '"++s++"'")
$ (untie $ parseP arg_ s) @?= (piParseP P.arg s)
#endif
]
]
-- main' "dconapp"
dconappU = testGroup "dconapp - unit tests" $ tail
[
undefined
-- now a dconapp should collect the arg n as an DCon arg
-- rather than as an app, as funapp would parse it
-- if parsed w/ prelude ie.
, let s = "Succ{-c-}n{-c'-}"
in
testGroup ("parsing (dconapp) '"++s++"'") $ tail [
undefined
, testCase ("parsing dconapp '"++s++"'")
$ parseP dconapp s @?= DCon "Succ" [Arg RuntimeP (V "n")] (Annot Nothing)
-- TODO
-- , testCase ("parsing dconapp_ '"++s++"'")
-- $ parseP dconapp_ s
-- @?= DCon_ (Nm_ "Succ" $ Ws "{-c-}") [Arg RuntimeP (V_ "n" (Ws "{-c'-}"))] (Annot_ Nothing (Ws ""))
-- TODO
-- , testCase ("parsing expr_ '"++s++"'")
-- $ (nopos $ parseP expr_ s)
-- @?= DCon_ (Nm_ "Succ" $ Ws "{-c-}") [Arg RuntimeP (V_ "n" (Ws "{-c'-}"))] (Annot_ Nothing (Ws ""))
-- and finally an exp should itself parse Succ n as an dconapp
-- rather than as a funapp
, testCase ("parsing expr (DCon) '"++s++"'")
$ (nopos $ parseP expr s)
@?= DCon "Succ" [Arg RuntimeP (V "n")] (Annot Nothing)
, testCase ("parse & forget dconapp_ '"++s++"'")
$ (forget $ parseP dconapp_ s) @?= (parseP dconapp s)
#ifdef PiForallInstalled
, testCase ("parse & untie dconapp_ '"++s++"'")
$ (untie $ parseP dconapp_ s) @?= (piParseP P.dconapp s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parseP expr_ s) @?= (piParseP P.expr s)
, testCase ("parse & untie dconapp '"++s++"'")
$ (untie $ parseP dconapp s) @?= (piParseP P.dconapp s)
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parseP expr s) @?= (piParseP P.expr s)
#endif
]
]
-- missing I guess: funapp unit tests - but covered by app (?)
| reuleaux/pire | tests/UTests.hs | bsd-3-clause | 49,992 | 0 | 30 | 16,529 | 12,855 | 6,635 | 6,220 | 608 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.