code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -F -pgmF ./dist/build/htfpp/htfpp #-}
{-# LANGUAGE CPP #-}
module Foo.A where
import Test.Framework
#include "test.h"
test_a_FAIL =
assertEqual x y
|
ekarayel/HTF
|
tests/Foo/A.hs
|
lgpl-2.1
| 172
| 0
| 5
| 30
| 24
| 15
| 9
| 6
| 1
|
-- Copyright 2013 Matthew Spellings
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
-- http://www.apache.org/licenses/LICENSE-2.0
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--module HasdyContrib (tests) where
import Prelude as P
import Data.Array.Accelerate as A
import Data.Array.Accelerate.CUDA
import Distribution.TestSuite.QuickCheck
import Test.QuickCheck
import Data.Array.Accelerate.HasdyContrib as HC
prop_repeat_size::[Int]->Bool
prop_repeat_size xs = repeatedSize == P.sum xs
where
xs' = A.fromList (Z:.length xs) xs
repeatedSize = arraySize . arrayShape $ run1 (\x -> HC.repeat x x) xs'
prop_repeat_size' = forAll (listOf $ choose (0, 129)) prop_repeat_size
prop_repeat_values::[Int]->Bool
prop_repeat_values xs = repeatedValues == repeatedValues'
where
xs' = A.fromList (Z:.length xs) xs
repeatedValues = A.toList $ run1 (\x -> HC.repeat x x) xs'
repeatedValues' = P.concatMap (\x -> P.take x . P.repeat $ x) xs
prop_repeat_values' = forAll (listOf $ choose (0, 129)) prop_repeat_values
prop_unfold_size::[Int]->Bool
prop_unfold_size xs = unfoldedSize == P.sum xs
where
xs' = A.fromList (Z:.length xs) xs
unfoldedSize = arraySize . arrayShape $ run1 (\x -> HC.unfoldSeg (\x y -> x + y + 3) 0 x x) xs'
prop_unfold_size' = forAll (listOf $ choose (0, 129)) prop_unfold_size
prop_unfold_values::[Int]->Bool
prop_unfold_values xs = unfoldedValues == unfoldedValues'
where
xs' = A.fromList (Z:.length xs) xs
unfoldedValues = A.toList $ run1 (\x -> HC.unfoldSeg (\x y -> x + y + 3) 0 x x) xs'
unfoldedValues' = P.concatMap (\x -> P.take x . P.iterate (+3) $ x) xs
prop_unfold_values' = forAll (listOf $ choose (0, 129)) prop_unfold_values
main = do
quickCheck prop_repeat_size'
quickCheck prop_repeat_values'
quickCheck prop_unfold_size'
quickCheck prop_unfold_values'
|
klarh/hasdy
|
test/HasdyContrib.hs
|
apache-2.0
| 2,256
| 0
| 15
| 383
| 636
| 341
| 295
| 33
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
module Web.Socdiff.FB.DataSource where
import Control.Concurrent.Async
import Control.Concurrent.QSem
import Control.Exception
import Control.Lens
import Control.Monad.Trans.Resource
import Data.Aeson
import Data.Aeson.Lens
import Data.Conduit
import Data.Conduit.List hiding (map, mapM, mapM_)
import Data.Hashable
import Data.Monoid
import qualified Data.Text as T
import Data.Typeable
import Facebook
import Haxl.Core
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.HTTP.Conduit
data FacebookReq a where
GetFriends :: UserId -> FacebookReq [T.Text]
deriving Typeable
deriving instance Eq (FacebookReq a)
deriving instance Show (FacebookReq a)
instance Show1 FacebookReq where show1 = show
instance Hashable (FacebookReq a) where
hashWithSalt s (GetFriends (Id uid)) = hashWithSalt s (1::Int, uid)
instance StateKey FacebookReq where
data State FacebookReq =
FacebookState
{ credentials :: Credentials
, userAccessToken :: UserAccessToken
, manager :: Manager
, numThreads :: Int
}
instance DataSourceName FacebookReq where
dataSourceName _ = "Facebook"
instance DataSource u FacebookReq where
fetch = githubFetch
initGlobalState :: Int -> Credentials -> UserAccessToken -> IO (State FacebookReq)
initGlobalState threads creds token = do
manager <- newManager tlsManagerSettings
return $ FacebookState creds token manager threads
githubFetch :: State FacebookReq -> Flags -> u -> [BlockedFetch FacebookReq] -> PerformFetch
githubFetch FacebookState{..} _flags _user bfs =
AsyncFetch $ \inner -> do
sem <- newQSem numThreads
asyncs <- mapM (fetchAsync credentials manager userAccessToken sem) bfs
inner
mapM_ wait asyncs
fetchAsync ::
Credentials
-> Manager
-> UserAccessToken
-> QSem
-> BlockedFetch FacebookReq
-> IO (Async ())
fetchAsync creds manager tok sem (BlockedFetch req rvar) =
async $ bracket_ (waitQSem sem) (signalQSem sem) $ do
e <- Control.Exception.try $
runResourceT $ runFacebookT creds manager $ fetchReq tok req
case e of
Left ex -> putFailure rvar (ex :: SomeException)
Right a -> putSuccess rvar a
fetchReq
:: UserAccessToken
-> FacebookReq a
-> FacebookT Auth (ResourceT IO) a
fetchReq tok (GetFriends (Id uid)) = do
f <- getObject ("/" <> uid <> "/taggable_friends") [] (Just tok)
source <- fetchAllNextPages f
resp <- source $$ consume :: FacebookT Auth (ResourceT IO) [Value]
return $ map (\x -> x ^. key "name" . _String) resp
|
relrod/socdiff
|
src/Web/Socdiff/FB/DataSource.hs
|
bsd-2-clause
| 2,788
| 1
| 13
| 499
| 805
| 421
| 384
| -1
| -1
|
module Language.JavaScript.Parser
( parseJavaScript ) where
import Control.Applicative ((<*), (*>))
import Text.Parsec
import Language.JavaScript.AST
import Language.JavaScript.Lexer (lexJavaScript)
import Language.JavaScript.Prim
import Language.JavaScript.Util
chainl1' :: (Stream s m t) =>
ParsecT s u m a -> ParsecT s u m b -> ParsecT s u m (a -> b -> a) -> ParsecT s u m a
chainl1' pa pb op = do
a <- pa
rest a
where
rest a = do { f <- op; b <- pb; rest (f a b) } <|> return a
primExpr :: JSParser PrimExpr
primExpr =
do { identName "this" ; return PrimExprThis } <|>
do { name <- getIdent; return $ PrimExprIdent name} <|>
do { l <- getLiteral; return $ PrimExprLiteral l } <|>
do { a <- arrayLit; return $ PrimExprArray a } <|>
do { o <- objectLit; return $ PrimExprObject o } <|>
do { punctuator "("; e <- expr; punctuator ")"; return $ PrimExprParens e }
arrayLit :: JSParser ArrayLit
arrayLit =
try (do { punctuator "["; me <- optionMaybe elision; punctuator "]"; return $ ArrayLitH me }) <|>
try (do { punctuator "["; el <- elementList; punctuator "]"; return $ ArrayLit el }) <|>
do { punctuator "[";
el <- elementList;
punctuator ",";
me <- optionMaybe elision;
punctuator "]";
return $ ArrayLitT el me }
elementList :: JSParser ElementList
elementList =
do { me <- optionMaybe elision; ae <- assignExpr; applyRest elementListRest (ElementList me ae) }
elementListRest :: JSParser (ElementList -> JSParser ElementList)
elementListRest =
do { punctuator ","; me <- optionMaybe elision; ae <- assignExpr; return $ \el -> applyRest elementListRest (ElementListCons el me ae) }
elision :: JSParser Elision
elision =
do { punctuator ","; applyRest elisionRest ElisionComma }
elisionRest :: JSParser (Elision -> JSParser Elision)
elisionRest =
do { punctuator ","; return $ \e -> applyRest elisionRest (ElisionCons e) }
objectLit :: JSParser ObjectLit
objectLit =
try (do { punctuator "{"; punctuator "}"; return ObjectLitEmpty }) <|>
do { punctuator "{"; pl <- propList; optional $ punctuator ","; punctuator "}"; return $ ObjectLit pl }
propList :: JSParser PropList
propList =
do { pa <- propAssign; applyRest propListRest (PropListAssign pa) }
propListRest :: JSParser (PropList -> JSParser PropList)
propListRest =
do { punctuator ","; pa <- propAssign; return $ \pl -> applyRest propListRest (PropListCons pl pa) }
propAssign :: JSParser PropAssign
propAssign =
do { pn <- propName; punctuator ":"; ae <- assignExpr; return $ PropAssignField pn ae } <|>
do { identName "get"; pn <- propName; punctuator "("; punctuator ")"; punctuator "{"; fb <- funcBody; punctuator "}"; return $ PropAssignGet pn fb } <|>
do { identName "set"; pn <- propName; punctuator "("; pl <- propSetParamList; punctuator ")"; punctuator "{"; fb <- funcBody; punctuator "}"; return $ PropAssignSet pn pl fb }
propName :: JSParser PropName
propName =
do { name <- getIdentName; return $ PropNameId name } <|>
do { sl <- getStringLit; return $ PropNameStr sl } <|>
do { nl <- getNumLit; return $ PropNameNum nl }
propSetParamList :: JSParser PropSetParamList
propSetParamList =
do { i <- getIdent; return $ PropSetParamList i }
memberExpr :: JSParser MemberExpr
memberExpr =
do { identName "new"; me <- memberExpr; args <- arguments; applyRest memberExprRest (MemberExprNew me args) } <|>
do { pe <- primExpr; applyRest memberExprRest (MemberExprPrim pe) } <|>
do { fe <- funcExpr; applyRest memberExprRest (MemberExprFunc fe) }
memberExprRest :: JSParser (MemberExpr -> JSParser MemberExpr)
memberExprRest =
do { punctuator "["; e <- expr; punctuator "]"; return $ \me -> applyRest memberExprRest (MemberExprArray me e) } <|>
do { punctuator "."; name <- getIdentName; return $ \me -> applyRest memberExprRest (MemberExprDot me name) }
newExpr :: JSParser NewExpr
newExpr =
try (do { me <- memberExpr; return $ NewExprMember me }) <|>
do { identName "new"; ne <- newExpr; return $ NewExprNew ne }
callExpr :: JSParser CallExpr
callExpr =
do { me <- memberExpr; args <- arguments; applyRest callExprRest (CallExprMember me args) }
callExprRest :: JSParser (CallExpr -> JSParser CallExpr)
callExprRest =
do { args <- arguments; return $ \ce -> applyRest callExprRest (CallExprCall ce args) } <|>
do { punctuator "["; e <- expr; punctuator "]"; return $ \ce -> applyRest callExprRest (CallExprArray ce e) } <|>
do { punctuator "."; name <- getIdentName; return $ \ce -> applyRest callExprRest (CallExprDot ce name) }
arguments :: JSParser Arguments
arguments =
try (do { punctuator "("; punctuator ")"; return ArgumentsEmpty }) <|>
do { punctuator "("; al <- argumentList; punctuator ")"; return $ Arguments al }
argumentList :: JSParser ArgumentList
argumentList =
chainl1'
(do { ae <- assignExpr; return $ ArgumentList ae })
assignExpr
(do { punctuator ","; return ArgumentListCons })
leftExpr :: JSParser LeftExpr
leftExpr =
try (do { ce <- callExpr; return $ LeftExprCallExpr ce }) <|>
do { ne <- newExpr; return $ LeftExprNewExpr ne }
postFixExpr :: JSParser PostFixExpr
postFixExpr = do
le <- leftExpr
do { punctuator "++"; return $ PostFixExprPostInc le } <|>
do { punctuator "--"; return $ PostFixExprPostDec le } <|>
do { return $ PostFixExprLeftExpr le }
uExpr :: JSParser UExpr
uExpr =
do { identName "delete"; u <- uExpr; return $ UExprDelete u } <|>
do { identName "void"; u <- uExpr; return $ UExprVoid u } <|>
do { identName "typeof"; u <- uExpr; return $ UExprTypeOf u } <|>
do { punctuator "++"; u <- uExpr; return $ UExprDoublePlus u } <|>
do { punctuator "--"; u <- uExpr; return $ UExprDoubleMinus u } <|>
do { punctuator "+"; u <- uExpr; return $ UExprUnaryPlus u } <|>
do { punctuator "-"; u <- uExpr; return $ UExprUnaryMinus u } <|>
do { punctuator "~"; u <- uExpr; return $ UExprBitNot u } <|>
do { punctuator "!"; u <- uExpr; return $ UExprNot u } <|>
do { pe <- postFixExpr; return $ UExprPostFix pe }
multExpr :: JSParser MultExpr
multExpr =
chainl1'
(do { u <- uExpr; return $ MultExpr u })
uExpr
(do { punctuator "*"; return MultExprMult } <|>
do { punctuator "/"; return MultExprDiv } <|>
do { punctuator "%"; return MultExprMod })
addExpr :: JSParser AddExpr
addExpr =
chainl1'
(do { m <- multExpr; return $ AddExpr m })
multExpr
(do { punctuator "+"; return AddExprAdd } <|>
do { punctuator "-"; return AddExprSub })
shiftExpr :: JSParser ShiftExpr
shiftExpr =
chainl1'
(do { a <- addExpr; return $ ShiftExpr a })
addExpr
(do { punctuator "<<"; return ShiftExprLeft } <|>
do { punctuator ">>"; return ShiftExprRight } <|>
do { punctuator ">>>"; return ShiftExprRightZero })
relExpr :: JSParser RelExpr
relExpr =
chainl1'
(do { s <- shiftExpr; return $ RelExpr s })
shiftExpr
(do { punctuator "<"; return RelExprLess } <|>
do { punctuator ">"; return RelExprGreater } <|>
do { punctuator "<="; return RelExprLessEq } <|>
do { punctuator ">="; return RelExprGreaterEq } <|>
do { identName "instanceof"; return RelExprInstanceOf } <|>
do { identName "in"; return RelExprIn })
relExprNoIn :: JSParser RelExprNoIn
relExprNoIn =
chainl1'
(do { s <- shiftExpr; return $ RelExprNoIn s })
shiftExpr
(do { punctuator "<"; return RelExprNoInLess } <|>
do { punctuator ">"; return RelExprNoInGreater } <|>
do { punctuator "<="; return RelExprNoInLessEq } <|>
do { punctuator ">="; return RelExprNoInGreaterEq } <|>
do { identName "instanceof"; return RelExprNoInInstanceOf })
eqExpr :: JSParser EqExpr
eqExpr =
chainl1'
(do { r <- relExpr; return $ EqExpr r })
relExpr
(do { punctuator "=="; return EqExprEq } <|>
do { punctuator "!="; return EqExprNotEq } <|>
do { punctuator "==="; return EqExprStrictEq } <|>
do { punctuator "!=="; return EqExprStrictNotEq })
eqExprNoIn :: JSParser EqExprNoIn
eqExprNoIn =
chainl1'
(do { r <- relExprNoIn; return $ EqExprNoIn r })
relExprNoIn
(do { punctuator "=="; return EqExprNoInEq } <|>
do { punctuator "!="; return EqExprNoInNotEq } <|>
do { punctuator "==="; return EqExprNoInStrictEq } <|>
do { punctuator "!=="; return EqExprNoInStrictNotEq })
bitAndExpr :: JSParser BitAndExpr
bitAndExpr =
chainl1'
(do { e <- eqExpr; return $ BitAndExpr e })
eqExpr
(do { punctuator "&"; return $ BitAndExprAnd })
bitAndExprNoIn :: JSParser BitAndExprNoIn
bitAndExprNoIn =
chainl1'
(do { e <- eqExprNoIn; return $ BitAndExprNoIn e })
eqExprNoIn
(do { punctuator "&"; return $ BitAndExprNoInAnd })
bitXorExpr :: JSParser BitXorExpr
bitXorExpr =
chainl1'
(do { bae <- bitAndExpr; return $ BitXorExpr bae })
bitAndExpr
(do { punctuator "^"; return $ BitXorExprXor })
bitXorExprNoIn :: JSParser BitXorExprNoIn
bitXorExprNoIn =
chainl1'
(do { bae <- bitAndExprNoIn; return $ BitXorExprNoIn bae })
bitAndExprNoIn
(do { punctuator "^"; return $ BitXorExprNoInXor })
bitOrExpr :: JSParser BitOrExpr
bitOrExpr =
chainl1'
(do { bxe <- bitXorExpr; return $ BitOrExpr bxe })
bitXorExpr
(do { punctuator "|"; return $ BitOrExprOr })
bitOrExprNoIn :: JSParser BitOrExprNoIn
bitOrExprNoIn =
chainl1'
(do { bxe <- bitXorExprNoIn; return $ BitOrExprNoIn bxe })
bitXorExprNoIn
(do { punctuator "|"; return $ BitOrExprNoInOr })
logicalAndExpr :: JSParser LogicalAndExpr
logicalAndExpr =
chainl1'
(do { boe <- bitOrExpr; return $ LogicalAndExpr boe })
bitOrExpr
(do { punctuator "&&"; return $ LogicalAndExprAnd })
logicalAndExprNoIn :: JSParser LogicalAndExprNoIn
logicalAndExprNoIn =
chainl1'
(do { boe <- bitOrExprNoIn; return $ LogicalAndExprNoIn boe })
bitOrExprNoIn
(do { punctuator "&&"; return $ LogicalAndExprNoInAnd })
logicalOrExpr :: JSParser LogicalOrExpr
logicalOrExpr =
chainl1'
(do { lae <- logicalAndExpr; return $ LogicalOrExpr lae })
logicalAndExpr
(do { punctuator "||"; return $ LogicalOrExprOr })
logicalOrExprNoIn :: JSParser LogicalOrExprNoIn
logicalOrExprNoIn =
chainl1'
(do { lae <- logicalAndExprNoIn; return $ LogicalOrExprNoIn lae })
logicalAndExprNoIn
(do { punctuator "||"; return $ LogicalOrExprNoInOr })
condExpr :: JSParser CondExpr
condExpr = do
loe <- logicalOrExpr
do { try $ do { punctuator "?"; }; ae1 <- assignExpr; punctuator ":"; ae2 <- assignExpr; return $ CondExprIf loe ae1 ae2 } <|>
do { return $ CondExpr loe }
condExprNoIn :: JSParser CondExprNoIn
condExprNoIn = do
loe <- logicalOrExprNoIn;
do { try $ do { punctuator "?"; }; ae1 <- assignExpr; punctuator ":"; ae2 <- assignExpr; return $ CondExprNoInIf loe ae1 ae2 } <|>
do { return $ CondExprNoIn loe }
assignExpr :: JSParser AssignExpr
assignExpr =
try (do { le <- leftExpr; ao <- assignOp; ae <- assignExpr; return $ AssignExprAssign le ao ae }) <|>
do { ce <- condExpr; return $ AssignExprCondExpr ce }
assignExprNoIn :: JSParser AssignExprNoIn
assignExprNoIn =
try (do { le <- leftExpr; ao <- assignOp; ae <- assignExprNoIn; return $ AssignExprNoInAssign le ao ae }) <|>
do { ce <- condExprNoIn; return $ AssignExprNoInCondExpr ce }
assignOp :: JSParser AssignOp
assignOp =
do { punctuator "="; return AssignOpNormal } <|>
do { punctuator "*="; return AssignOpMult } <|>
do { punctuator "/="; return AssignOpDiv } <|>
do { punctuator "%="; return AssignOpMod } <|>
do { punctuator "+="; return AssignOpPlus } <|>
do { punctuator "-="; return AssignOpMinus } <|>
do { punctuator "<<="; return AssignOpShiftLeft } <|>
do { punctuator ">>="; return AssignOpShiftRight } <|>
do { punctuator ">>>="; return AssignOpShiftRightZero } <|>
do { punctuator "&="; return AssignOpBitAnd } <|>
do { punctuator "^="; return AssignOpBitXor } <|>
do { punctuator "|="; return AssignOpBitOr }
expr :: JSParser Expr
expr =
chainl1'
(do { ae <- assignExpr; return $ Expr ae })
assignExpr
(do { punctuator ","; return ExprCons })
exprNoIn :: JSParser ExprNoIn
exprNoIn =
chainl1'
(do { ae <- assignExprNoIn; return $ ExprNoIn ae })
assignExprNoIn
(do { punctuator ","; return ExprNoInCons })
stmt :: JSParser Stmt
stmt =
do { b <- block; return $ StmtBlock b } <|>
do { vs <- varStmt; return $ StmtVar vs } <|>
do { es <- emptyStmt; return $ StmtEmpty es } <|>
try (do { es <- exprStmt; return $ StmtExpr es }) <|>
do { ic <- contStmt; return $ StmtCont ic } <|>
do { is <- ifStmt; return $ StmtIf is } <|>
do { is <- itStmt; return $ StmtIt is } <|>
do { bs <- breakStmt; return $ StmtBreak bs } <|>
do { rs <- returnStmt; return $ StmtReturn rs } <|>
do { ws <- withStmt; return $ StmtWith ws } <|>
do { ss <- switchStmt; return $ StmtSwitch ss } <|>
do { ts <- throwStmt; return $ StmtThrow ts } <|>
do { ts <- tryStmt; return $ StmtTry ts } <|>
do { ds <- dbgStmt; return $ StmtDbg ds } <|>
do { ls <- labelledStmt; return $ StmtLabelled ls }
block :: JSParser Block
block =
do { punctuator "{"; msl <- optionMaybe stmtList; punctuator "}"; return $ Block msl }
stmtList :: JSParser StmtList
stmtList =
chainl1'
(do { s <- stmt; return $ StmtList s })
stmt
(do { return StmtListCons })
varStmt :: JSParser VarStmt
varStmt =
do { identName "var"; vdl <- varDeclList; return $ VarStmt vdl }
varDeclList :: JSParser VarDeclList
varDeclList =
chainl1'
(do { vd <- varDecl; return $ VarDeclList vd})
varDecl
(do { punctuator ","; return $ VarDeclListCons })
varDeclListNoIn :: JSParser VarDeclListNoIn
varDeclListNoIn =
chainl1'
(do { vd <- varDeclNoIn; return $ VarDeclListNoIn vd})
varDeclNoIn
(do { punctuator ","; return $ VarDeclListNoInCons})
varDecl :: JSParser VarDecl
varDecl =
do { i <- getIdent; mInit <- optionMaybe initialiser; return $ VarDecl i mInit }
varDeclNoIn :: JSParser VarDeclNoIn
varDeclNoIn =
do { i <- getIdent; mInit <- optionMaybe initialiserNoIn; return $ VarDeclNoIn i mInit }
initialiser :: JSParser Initialiser
initialiser =
do { punctuator "="; ae <- assignExpr; return $ Initialiser ae }
initialiserNoIn :: JSParser InitialiserNoIn
initialiserNoIn =
do { punctuator "="; ae <- assignExprNoIn; return $ InitialiserNoIn ae }
emptyStmt :: JSParser EmptyStmt
emptyStmt =
do { punctuator ";"; return EmptyStmt }
exprStmt :: JSParser ExprStmt
exprStmt =
do { notFollowedBy (try $ punctuator "{" <|> identName "function"); e <- expr; autoSemi; return $ ExprStmt e }
ifStmt :: JSParser IfStmt
ifStmt =
try (do { identName "if"; punctuator "("; e <- expr; punctuator ")"; s1 <- stmt; identName "else"; s2 <- stmt; return $ IfStmtIfElse e s1 s2 }) <|>
do { identName "if"; punctuator "("; e <- expr; punctuator ")"; s <- stmt; return $ IfStmtIf e s }
itStmt :: JSParser ItStmt
itStmt =
try (do { identName "do"; s <- stmt; identName "while"; punctuator "("; e <- expr; punctuator ")"; return $ ItStmtDoWhile s e }) <|>
try (do { identName "while"; punctuator "("; e <- expr; punctuator ")"; s <- stmt; return $ ItStmtWhile e s }) <|>
try (do { identName "for"; punctuator "("; me1 <- optionMaybe exprNoIn; punctuator ";"; me2 <- optionMaybe expr; punctuator ";"; me3 <- optionMaybe expr; punctuator ")"; s <- stmt; return $ ItStmtFor me1 me2 me3 s }) <|>
try (do { identName "for"; punctuator "("; identName "var"; vdl <- varDeclListNoIn; punctuator ";"; me1 <- optionMaybe expr; punctuator ";"; me2 <- optionMaybe expr; punctuator ")"; s <- stmt; return $ ItStmtForVar vdl me1 me2 s }) <|>
do { identName "for"; punctuator "("; le <- leftExpr; identName "in"; e <- expr; punctuator ")"; s <- stmt; return $ ItStmtForIn le e s } <|>
do { identName "for"; punctuator "("; identName "var"; vd <- varDeclNoIn; identName "in"; e <- expr; punctuator ")"; s <- stmt; return $ ItStmtForVarIn vd e s }
contStmt :: JSParser ContStmt
contStmt =
try (do { identName "continue"; autoSemi; return ContStmt }) <|>
do { identName "continue"; noLineTerminatorHere; i <- getIdent; autoSemi; return $ ContStmtLabelled i }
breakStmt :: JSParser BreakStmt
breakStmt =
try (do { identName "break"; autoSemi; return BreakStmt }) <|>
do { identName "break"; noLineTerminatorHere; i <- getIdent; autoSemi; return $ BreakStmtLabelled i }
returnStmt :: JSParser ReturnStmt
returnStmt =
try (do { identName "return"; autoSemi; return ReturnStmt }) <|>
do { identName "return"; noLineTerminatorHere; e <- expr; autoSemi; return $ ReturnStmtExpr e }
withStmt :: JSParser WithStmt
withStmt =
do { identName "with"; punctuator "("; e <- expr; punctuator ")"; s <- stmt; return $ WithStmt e s }
switchStmt :: JSParser SwitchStmt
switchStmt =
do { identName "switch"; punctuator "("; e <- expr; punctuator ")"; cb <- caseBlock; return $ SwitchStmt e cb }
caseBlock :: JSParser CaseBlock
caseBlock =
try (do { punctuator "{"; mcc <- optionMaybe caseClauses; punctuator "}"; return $ CaseBlock mcc }) <|>
do { punctuator "{"; mcc1 <- optionMaybe caseClauses; dc <- defaultClause; mcc2 <- optionMaybe caseClauses; punctuator "}"; return $ CaseBlockDefault mcc1 dc mcc2 }
caseClauses :: JSParser CaseClauses
caseClauses =
chainl1'
(do { cc <- caseClause; return $ CaseClauses cc })
caseClause
(do { return $ CaseClausesCons })
caseClause :: JSParser CaseClause
caseClause =
do { identName "case"; e <- expr; punctuator ":"; msl <- optionMaybe stmtList; return $ CaseClause e msl }
defaultClause :: JSParser DefaultClause
defaultClause =
do { identName "default"; punctuator ":"; msl <- optionMaybe stmtList; return $ DefaultClause msl }
labelledStmt :: JSParser LabelledStmt
labelledStmt =
do { i <- getIdent; punctuator ":"; s <- stmt; return $ LabelledStmt i s }
throwStmt :: JSParser ThrowStmt
throwStmt =
do { identName "throw"; noLineTerminatorHere; e <- expr; autoSemi; return $ ThrowStmt e }
tryStmt :: JSParser TryStmt
tryStmt = do
identName "try"
b <- block
do { f <- finally; return $ TryStmtBF b f } <|>
do { c <- catch;
do { f <- finally; return $ TryStmtBCF b c f } <|>
do { return $ TryStmtBC b c }
}
catch :: JSParser Catch
catch =
do { identName "catch"; punctuator "("; i <- getIdent; punctuator ")"; b <- block; return $ Catch i b }
finally :: JSParser Finally
finally =
do { identName "finally"; b <- block; return $ Finally b }
dbgStmt :: JSParser DbgStmt
dbgStmt =
do { identName "debugger"; autoSemi; return DbgStmt }
funcDecl :: JSParser FuncDecl
funcDecl =
do { identName "function"; i <- getIdent; punctuator "("; mfpl <- optionMaybe formalParamList; punctuator ")"; punctuator "{"; fb <- funcBody; punctuator "}"; return $ FuncDecl i mfpl fb }
funcExpr :: JSParser FuncExpr
funcExpr =
do { identName "function"; mi <- optionMaybe getIdent; punctuator "("; mfpl <- optionMaybe formalParamList; punctuator ")"; punctuator "{"; fb <- funcBody; punctuator "}"; return $ FuncExpr mi mfpl fb }
formalParamList :: JSParser FormalParamList
formalParamList =
chainl1'
(do { i <- getIdent; return $ FormalParamList i })
getIdent
(do { punctuator ","; return $ FormalParamListCons })
funcBody :: JSParser FuncBody
funcBody =
do { mse <- optionMaybe sourceElements; return $ FuncBody mse }
program :: JSParser Program
program =
do { mse <- optionMaybe sourceElements; return $ Program mse }
sourceElements :: JSParser SourceElements
sourceElements =
chainl1'
(do { se <- sourceElement; return $ SourceElements se })
sourceElement
(do { return SourceElementsCons })
sourceElement :: JSParser SourceElement
sourceElement =
do { s <- stmt; return $ SourceElementStmt s } <|>
do { fd <- funcDecl; return $ SourceElementFuncDecl fd }
parseJavaScript :: String -> Either ParseError Program
parseJavaScript input =
let p = many lineTerminator *> program <* many lineTerminator <* eof
in runParser p newJSPState "" $ lexJavaScript input
|
fabianbergmark/ECMA-262
|
src/Language/JavaScript/Parser.hs
|
bsd-2-clause
| 19,754
| 0
| 22
| 3,851
| 7,522
| 3,723
| 3,799
| 446
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE UndecidableInstances #-}
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : GADTs, TFs, MPTCs
--
----------------------------------------------------------------------------
module Data.Functor.Contravariant.Yoneda
( Yoneda(..)
, liftYoneda, lowerYoneda
) where
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Adjunction
import Data.Functor.Contravariant.Rep
-- | Yoneda embedding for a presheaf
newtype Yoneda f a = Yoneda { runYoneda :: forall r. (r -> a) -> f r }
-- |
--
-- @
-- 'liftYoneda' . 'lowerYoneda' ≡ 'id'
-- 'lowerYoneda' . 'liftYoneda' ≡ 'id'
-- @
liftYoneda :: Contravariant f => f a -> Yoneda f a
liftYoneda fa = Yoneda $ \ra -> contramap ra fa
{-# INLINE liftYoneda #-}
lowerYoneda :: Yoneda f a -> f a
lowerYoneda f = runYoneda f id
{-# INLINE lowerYoneda #-}
instance Contravariant (Yoneda f) where
contramap ab (Yoneda m) = Yoneda (m . fmap ab)
{-# INLINE contramap #-}
instance Representable f => Representable (Yoneda f) where
type Rep (Yoneda f) = Rep f
tabulate = liftYoneda . tabulate
{-# INLINE tabulate #-}
index m a = index (lowerYoneda m) a
{-# INLINE index #-}
contramapWithRep beav = liftYoneda . contramapWithRep beav . lowerYoneda
{-# INLINE contramapWithRep #-}
instance Adjunction f g => Adjunction (Yoneda f) (Yoneda g) where
leftAdjunct f = liftYoneda . leftAdjunct (lowerYoneda . f)
{-# INLINE leftAdjunct #-}
rightAdjunct f = liftYoneda . rightAdjunct (lowerYoneda . f)
{-# INLINE rightAdjunct #-}
|
xuwei-k/kan-extensions
|
src/Data/Functor/Contravariant/Yoneda.hs
|
bsd-3-clause
| 1,979
| 0
| 10
| 333
| 393
| 222
| 171
| 35
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Lichen.Parser where
import Data.Aeson
import qualified Data.Text as T
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS.C8
import Lichen.Error
type Parser a = FilePath -> BS.ByteString -> Erring a
data Node = Node [Tag] [Node]
| MetaCount Int
| MetaIdent T.Text
| DataInt Integer
| DataFloat Double
| DataBool Bool
| DataStr T.Text
| DataBytes BS.C8.ByteString
deriving (Show, Eq)
instance ToJSON Node where
toJSON (Node ts ns) = object [ "type" .= ("node" :: T.Text)
, "tags" .= (toJSON <$> ts)
, "children" .= (toJSON <$> ns)
]
toJSON (MetaCount d) = object [ "type" .= ("meta_count" :: T.Text)
, "data" .= d
]
toJSON (MetaIdent d) = object [ "type" .= ("metadata_identifier" :: T.Text)
, "data" .= d
]
toJSON (DataInt d) = object [ "type" .= ("data_int" :: T.Text)
, "data" .= d
]
toJSON (DataFloat d) = object [ "type" .= ("data_float" :: T.Text)
, "data" .= d
]
toJSON (DataBool d) = object [ "type" .= ("data_bool" :: T.Text)
, "data" .= d
]
toJSON (DataStr d) = object [ "type" .= ("data_str" :: T.Text)
, "data" .= d
]
toJSON (DataBytes d) = object [ "type" .= ("data_bytes" :: T.Text)
, "data" .= show d
]
newtype Tag = Tag T.Text deriving (Show, Eq)
instance ToJSON Tag where
toJSON (Tag x) = toJSON x
hasTag :: T.Text -> Node -> Bool
hasTag t (Node ts _) = Tag t `elem` ts
hasTag _ _ = False
countTag :: T.Text -> Node -> Integer
countTag t n@(Node _ ns) = (if hasTag t n then 1 else 0) + sum (countTag t <$> ns)
countTag _ _ = 0
identChild :: T.Text -> Node -> Bool
identChild t (Node _ ns) = or (identChild t <$> ns)
identChild t (MetaIdent t') = t == t'
identChild _ _ = False
countCall :: T.Text -> Node -> Integer
countCall t n@(Node _ ns) = (if hasTag "call" n then case ns of (f:_) -> if identChild t f then 1 else 0; _ -> 0 else 0) + sum (countCall t <$> ns)
countCall _ _ = 0
countDepth :: T.Text -> Node -> Integer
countDepth t n@(Node _ ns) = (if hasTag t n then 1 else 0) + maximum (0:(countDepth t <$> ns))
countDepth _ _ = 0
|
Submitty/AnalysisTools
|
src/Lichen/Parser.hs
|
bsd-3-clause
| 2,720
| 0
| 12
| 1,084
| 935
| 505
| 430
| 54
| 4
|
{-# LANGUAGE LambdaCase #-}
module Data.List.Util where
import Data.List
sum' :: Num a => [a] -> a
sum' = \case
[] -> 0
xs@(_:_) -> foldl1' (+) xs
{-# INLINE sum' #-}
|
mstksg/tensor-ops
|
src/Data/List/Util.hs
|
bsd-3-clause
| 194
| 0
| 10
| 59
| 71
| 41
| 30
| 8
| 2
|
module View.Browse
( BrowseByLink(..)
, browseBarWidget
, resourceListWidget
, sortBarWidget
, sortResBarWidget
) where
import Import
import Handler.Utils
import Model.Browse
import Model.Resource
import Model.User
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Text as T
import Text.Cassius (cassiusFile)
import Text.Julius (juliusFile)
import Text.Hamlet (hamletFile)
boldIfEq :: Eq a => a -> a -> Text
boldIfEq x y | x == y = "bold"
boldIfEq _ _ = "normal"
browseBarWidget :: BrowseByLink -> Widget
browseBarWidget browse_by_link = do
[whamlet|
<div .bar>browse by: #
<a .bar-link #br-auth href=@{BrowseAuthorsR}>author
|
<a .bar-link #br-tag href=@{BrowseTagsR}>tag
|
<a .bar-link #br-coll href=@{BrowseCollectionsR}>collection
|
<a .bar-link #br-type href=@{BrowseTypesR}>type
|
<a .bar-link #br-res href=@{BrowseResourcesR}>list all
|]
topBarCSS
toWidget [cassius|
#br-auth
font-weight: #{boldIfEq browse_by_link BrowseByAuthorLink}
#br-coll
font-weight: #{boldIfEq browse_by_link BrowseByCollectionLink}
#br-res
font-weight: #{boldIfEq browse_by_link BrowseByResourceLink}
#br-tag
font-weight: #{boldIfEq browse_by_link BrowseByTagLink}
#br-type
font-weight: #{boldIfEq browse_by_link BrowseByTypeLink}
|]
sortBarWidget :: Text -> SortBy -> Widget
sortBarWidget text SortByAZ = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar>sort #{text} by: #
<a .bar-link #so-az href=@?{(route, addGetParam ("sort", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-count-down href=@?{(route, addGetParam ("sort", T.pack (show SortByCountDown)) params)}>count#
<span .arr>▼
|
<a .bar-link #so-year-down href=@?{(route, addGetParam ("sort", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▼
|]
topBarCSS
toWidget [cassius|
#so-az
font-weight: bold
|]
sortBarWidget text SortByCountUp = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar>sort #{text} by: #
<a .bar-link #so-az href=@?{(route, addGetParam ("sort", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-count-down href=@?{(route, addGetParam ("sort", T.pack (show SortByCountDown)) params)}>count#
<span .arr>▲
|
<a .bar-link #so-year-down href=@?{(route, addGetParam ("sort", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▼
|]
topBarCSS
toWidget [cassius|
#so-count-down
font-weight: bold
|]
sortBarWidget text SortByCountDown = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar>sort #{text} by: #
<a .bar-link #so-az href=@?{(route, addGetParam ("sort", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-count-up href=@?{(route, addGetParam ("sort", T.pack (show SortByCountUp)) params)}>count#
<span .arr>▼
|
<a .bar-link #so-year-down href=@?{(route, addGetParam ("sort", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▼
|]
topBarCSS
toWidget [cassius|
#so-count-up
font-weight: bold
|]
sortBarWidget text SortByYearUp = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar>sort #{text} by: #
<a .bar-link #so-az href=@?{(route, addGetParam ("sort", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-count-down href=@?{(route, addGetParam ("sort", T.pack (show SortByCountDown)) params)}>count#
<span .arr>▼
|
<a .bar-link #so-year-down href=@?{(route, addGetParam ("sort", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▲
|]
topBarCSS
toWidget [cassius|
#so-year-down
font-weight: bold
|]
sortBarWidget text SortByYearDown = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar>sort #{text} by: #
<a .bar-link #so-az href=@?{(route, addGetParam ("sort", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-count-down href=@?{(route, addGetParam ("sort", T.pack (show SortByCountDown)) params)}>count#
<span .arr>▼
|
<a .bar-link #so-year-up href=@?{(route, addGetParam ("sort", T.pack (show SortByYearUp)) params)}>year#
<span .arr>▼
|]
topBarCSS
toWidget [cassius|
#so-year-up
font-weight: bold
|]
sortResBarWidget :: SortBy -> Widget
sortResBarWidget SortByYearUp = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar .sort-res-bar>sort resources by: #
<a .bar-link #so-res-az href=@?{(route, addGetParam ("sort-res", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-res-year-down href=@?{(route, addGetParam ("sort-res", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▲
|]
sortResBarCSS
toWidget [cassius|
#so-res-year-down
font-weight: bold
|]
sortResBarWidget SortByYearDown = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar .sort-res-bar>sort resources by: #
<a .bar-link #so-res-az href=@?{(route, addGetParam ("sort-res", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-res-year-up href=@?{(route, addGetParam ("sort-res", T.pack (show SortByYearUp)) params)}>year#
<span .arr>▼
|]
sortResBarCSS
toWidget [cassius|
#so-res-year-up
font-weight: bold
|]
sortResBarWidget _ = do
(route, params) <- handlerToWidget getCurrentRouteWithGetParams
[whamlet|
<div .bar .sort-res-bar>sort resources by: #
<a .bar-link #so-res-az href=@?{(route, addGetParam ("sort-res", T.pack (show SortByAZ)) params)}>a-z
|
<a .bar-link #so-res-year-down href=@?{(route, addGetParam ("sort-res", T.pack (show SortByYearDown)) params)}>year#
<span .arr>▼
|]
sortResBarCSS
toWidget [cassius|
#so-res-az
font-weight: bold
|]
-- | CSS that applies to browse/sort/sort resources bars.
topBarCSS :: Widget
topBarCSS = toWidget
[cassius|
.bar
font-size: 1.1em
font-variant: small-caps
height: 1.1em
line-height: 1.1em
.bar-link
color: #069
a.bar-link:hover
text-decoration: none
.arr
font-size: 0.7em
|]
-- | CSS that applies to all sort resource bars (topBarCSS + bottom margin + bottom border)
sortResBarCSS :: Widget
sortResBarCSS = do
topBarCSS
toWidget [cassius|
.sort-res-bar
border-bottom: 1px solid black
margin-bottom: 4px
|]
resourceListWidget :: [Entity Resource] -> Widget
resourceListWidget resources = do
let resource_ids = map entityKey resources
authorsMap <- handlerToWidget $ runDB (fetchResourceAuthorsInDB resource_ids)
(is_logged_in, grokked) <- handlerToWidget $
maybeAuthId >>= \case
Nothing -> return (False, mempty)
Just user_id -> runDB $ (,)
<$> pure True
<*> (S.fromList <$> fetchGrokkedResourceIdsInDB user_id resource_ids)
toWidget $(hamletFile "templates/resource-list.hamlet")
toWidget $(cassiusFile "templates/resource-list.cassius")
if is_logged_in
then toWidget $(juliusFile "templates/resource-list-logged-in.julius")
else toWidget $(juliusFile "templates/resource-list-not-logged-in.julius")
|
duplode/dohaskell
|
src/View/Browse.hs
|
bsd-3-clause
| 7,966
| 0
| 15
| 1,977
| 810
| 439
| 371
| -1
| -1
|
{-# LANGUAGE CPP #-}
module Geo.Garmin(
time
, downloadDirectory
, buildDirectory
, distDirectory
, mkgmap
, splitter
, australiaOceaniaPbf
, getAustraliaOceania
, getAustraliaOceania'
, splitAustraliaOceania
, splitAustraliaOceania'
, gmapsuppAustraliaOceania
, gmapsuppAustraliaOceania'
, linkAustraliaOceania
, linkAustraliaOceania'
, getMountBarney
, getMountBarney'
, gmapsuppMountBarneyAustraliaOceania
, gmapsuppMountBarneyAustraliaOceania'
, linkAustraliaOceaniaMountBarney
, linkAustraliaOceaniaMountBarney'
, commands
, linkLatest
, Parameters(..)
, ReadParameters(..)
) where
#if !(MIN_VERSION_base(4,8,0))
import Control.Applicative(Applicative((<*>), pure))
#endif
import Data.Time(UTCTime(utctDay, utctDayTime), TimeOfDay(TimeOfDay), toGregorian, timeToTimeOfDay)
import Sys.Exit(CreateProcess, procIn)
import System.FilePath((</>))
time ::
UTCTime
-> String
time t =
let show2 = let s2 [x] = ['0', x]
s2 x = x
in s2 . show
(y, m, d) = toGregorian (utctDay t)
TimeOfDay h n s = timeToTimeOfDay (utctDayTime t)
in concat [show y, show2 m, show2 d, "-", show2 h, show2 n, show2 (floor s)]
downloadDirectory ::
FilePath
downloadDirectory =
"download"
buildDirectory ::
FilePath
buildDirectory =
"build"
distDirectory ::
FilePath
distDirectory =
"dist"
mkgmap ::
FilePath
mkgmap =
"opt" </> "mkgmap" </> "mkgmap.jar"
splitter ::
FilePath
splitter =
"opt" </> "splitter" </> "splitter.jar"
australiaOceaniaPbf ::
FilePath
australiaOceaniaPbf =
"australia-oceania.osm.pbf"
getAustraliaOceania ::
ReadParameters CreateProcess
getAustraliaOceania =
ReadParameters (\(Parameters w _ _) -> getAustraliaOceania' w)
getAustraliaOceania' ::
FilePath
-> CreateProcess
getAustraliaOceania' wd =
procIn (wd </> downloadDirectory) "wget"
[
"-c"
, "http://download.geofabrik.de/australia-oceania-latest.osm.pbf"
, "-O"
, australiaOceaniaPbf
]
splitAustraliaOceania ::
ReadParameters CreateProcess
splitAustraliaOceania =
ReadParameters (\(Parameters w _ s) -> splitAustraliaOceania' w s)
splitAustraliaOceania' ::
FilePath
-> FilePath
-> CreateProcess
splitAustraliaOceania' wd s =
procIn (wd </> buildDirectory </> "australia-oceania") "java"
[
"-Xmx1536M"
, "-jar"
, s
, wd </> downloadDirectory </> australiaOceaniaPbf
, "--mapid=82345912"
]
gmapsuppAustraliaOceania ::
ReadParameters CreateProcess
gmapsuppAustraliaOceania =
ReadParameters (\(Parameters w m _) -> gmapsuppAustraliaOceania' w m)
gmapsuppAustraliaOceania' ::
FilePath
-> FilePath
-> CreateProcess
gmapsuppAustraliaOceania' wd m =
procIn (wd </> buildDirectory </> "australia-oceania") "java"
[
"-Xmx1536M"
, "-jar"
, m
, "--add-pois-to-areas"
, "--reduce-point-density-polygon=8"
, "--remove-short-arcs"
, "--route"
, "--transparent"
, "--gmapsupp"
, "-c"
, "template.args"
, "--description=\"Australia and Oceania\""
, "--country-name=\"Australia and Oceania\""
, "--region-name=\"Australia and Oceania\""
, "--region-abbr=AU"
, "--country-abbr=AU"
, "--drive-on=left"
, "--check-roundabouts"
]
linkAustraliaOceania ::
ReadParameters CreateProcess
linkAustraliaOceania =
ReadParameters (\(Parameters w _ _) -> linkAustraliaOceania' w)
linkAustraliaOceania' ::
FilePath
-> CreateProcess
linkAustraliaOceania' wd =
procIn (wd </> distDirectory </> "australia-oceania") "ln"
[
"-s"
, ".." </> ".." </> buildDirectory </> "australia-oceania" </> "gmapsupp.img"
]
getMountBarney ::
ReadParameters CreateProcess
getMountBarney =
ReadParameters (\(Parameters w _ _) -> getMountBarney' w)
getMountBarney' ::
FilePath
-> CreateProcess
getMountBarney' wd =
procIn (wd </> downloadDirectory) "wget"
[
"-c"
, "http://geodetics.tmorris.net/misc/img/Mt_Barney_National_Park.img"
, "-O"
, "mt-barney-national-park.img"
]
gmapsuppMountBarneyAustraliaOceania ::
ReadParameters CreateProcess
gmapsuppMountBarneyAustraliaOceania =
ReadParameters (\(Parameters w m _) -> gmapsuppMountBarneyAustraliaOceania' w m)
gmapsuppMountBarneyAustraliaOceania' ::
FilePath
-> FilePath
-> CreateProcess
gmapsuppMountBarneyAustraliaOceania' wd m =
procIn (wd </> buildDirectory </> "australia-oceania_mt-barney") "java"
[
"-Xmx1536M"
, "-jar"
, m
, "--add-pois-to-areas"
, "--reduce-point-density-polygon=8"
, "--remove-short-arcs"
, "--route"
, "--transparent"
, "--gmapsupp"
, "--description=\"Australia and Oceania and Mt Barney Contour\""
, "--country-name=\"Australia and Oceania\""
, "--country-abbr=AU"
, "--region-name=\"Australia and Oceania\""
, "--region-abbr=AU"
, "--drive-on=left"
, "--check-roundabouts"
, wd </> buildDirectory </> "australia-oceania" </> "gmapsupp.img"
, wd </> downloadDirectory </> "mt-barney-national-park.img"
]
linkAustraliaOceaniaMountBarney ::
ReadParameters CreateProcess
linkAustraliaOceaniaMountBarney =
ReadParameters (\(Parameters w _ _) -> linkAustraliaOceaniaMountBarney' w)
linkAustraliaOceaniaMountBarney' ::
FilePath
-> CreateProcess
linkAustraliaOceaniaMountBarney' wd =
procIn (wd </> distDirectory </> "australia-oceania_mt-barney") "ln"
[
"-s"
, ".." </> ".." </> buildDirectory </> "australia-oceania_mt-barney" </> "gmapsupp.img"
]
commands ::
ReadParameters [CreateProcess]
commands =
sequence
[
getAustraliaOceania
, splitAustraliaOceania
, gmapsuppAustraliaOceania
, linkAustraliaOceania
, getMountBarney
, gmapsuppMountBarneyAustraliaOceania
, linkAustraliaOceaniaMountBarney
]
linkLatest ::
FilePath
-> String
-> CreateProcess
linkLatest d t =
procIn d "ln"
[
"-f"
, "-s"
, "-n"
, t
, "latest"
]
----
data Parameters =
Parameters
FilePath -- working directory
FilePath -- mkgmap
FilePath -- splitter
deriving (Eq, Ord, Show)
newtype ReadParameters a =
ReadParameters {
(~>.) ::
Parameters -> a
}
instance Functor ReadParameters where
fmap f (ReadParameters g) =
ReadParameters (f . g)
instance Applicative ReadParameters where
pure =
ReadParameters . const
ReadParameters f <*> ReadParameters a =
ReadParameters (f <*> a)
instance Monad ReadParameters where
return =
pure
ReadParameters r >>= f =
ReadParameters (\x -> f (r x) ~>. x)
|
tonymorris/osmgarmin
|
src/Geo/Garmin.hs
|
bsd-3-clause
| 6,563
| 2
| 14
| 1,309
| 1,382
| 775
| 607
| 238
| 2
|
{-# LANGUAGE DeriveDataTypeable, RecordWildCards, ScopedTypeVariables,
ViewPatterns #-}
{-# OPTIONS -Wall -fno-warn-missing-signatures -fno-warn-name-shadowing #-}
-- | Main entry point, just connect to the given IRC server and join
-- the given channels and log all messages received, to the given
-- file(s).
module Main where
import Control.Concurrent.Delay
import Control.Monad
import Control.Monad.Fix
import Data.Char
import Data.List
import Data.Time
import Network
import Network.IRC
import System.Console.CmdArgs
import System.FilePath
import System.IO
import System.Locale
import System.Posix
-- | Options for the executable.
data Options = Options
{ host :: String -- ^ The IRC server host/IP.
, port :: Int -- ^ The remote port to connect to.
, channels :: String -- ^ The channels to join (comma or
-- space sep'd).
, logpath :: FilePath -- ^ Which directory to log to.
, pass :: Maybe String -- ^ Maybe a *server* password.
, nick :: String -- ^ The nick.
, user :: String -- ^ User (not real name) to use.
, delay :: Integer -- ^ Reconnect delay (secs).
} deriving (Show,Data,Typeable)
-- | Options for the executable.
options :: Options
options = Options
{ host = def &= opt "irc.freenode.net" &= help "The IRC server."
, port = def &= opt (6667::Int) &= help "The remote port."
, channels = def &= opt "#hog" &= help "The channels to join."
, logpath = def &= opt "." &= help "The directory to save log files."
, pass = Nothing
, nick = def &= opt "hog" &= help "The nickname to use."
, user = def &= opt "hog" &= help "The user name to use."
, delay = def &= opt (30::Integer) &= help "Reconnect delay (secs)."
}
&= summary "Hog IRC logger (C) Chris Done 2011"
&= help "Simple IRC logger bot."
-- | Main entry point.
main :: IO ()
main = withSocketsDo $ do
_ <- installHandler sigPIPE Ignore Nothing
cmdArgs options >>= start
-- | Connect to the IRC server and start logging.
start :: Options -> IO ()
start options@Options{..} = do
hSetBuffering stdout NoBuffering
h <- connectTo host (PortNumber (fromIntegral port))
hSetBuffering h NoBuffering
register h options
fix $ \repeat -> do
line <- catch (Just `fmap` hGetLine h) $ \_e -> do
delaySeconds delay
start options
return Nothing
flip (maybe (return ())) line $ \line -> do
putStrLn $ "<- " ++ line
handleLine options h line
repeat
-- | Register to the server by sending user/nick/pass/etc.
register :: Handle -> Options -> IO ()
register h Options{..} = do
let send = sendLine h
maybe (return ()) (send . ("PASS "++)) pass
send $ "USER " ++ user ++ " * * *"
send $ "NICK " ++ nick
-- | Handle incoming lines; ping/pong, privmsg, etc.
handleLine :: Options -> Handle -> String -> IO ()
handleLine options handle line =
case decode line of
Nothing -> putStrLn $ "Unable to decode line " ++ show line
Just msg -> handleMsg options handle msg
-- | Handle an IRC message.
handleMsg :: Options -> Handle -> Message -> IO ()
handleMsg options h msg =
case msg_command msg of
"PING" -> reply $ msg {msg_command="PONG"}
"376" -> joinChannels options h
"PRIVMSG" -> logMsg options msg
_ -> return ()
where reply = sendLine h . encode
-- | Log a privmsg of a given channel to the right file.
logMsg :: Options -> Message -> IO ()
logMsg options@Options{..} msg@Message{..} = do
case msg_params of
[('#':chan),msg] -> logLine options chan from msg
_ -> putStrLn $ "Bogus message: " ++ show msg
where from = case msg_prefix of
Just (NickName str _ _) -> "<" ++ str ++ "> "
_ -> "unknown"
-- | Log a line of a channel.
logLine :: Options -> String -> String -> String -> IO ()
logLine Options{..} (sanitize -> chan) from line =
when (not (null chan)) $ do
now <- fmap format getCurrentTime
appendFile (logpath </> chan) $
now ++ " " ++ from ++ line ++ "\n"
where format = formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S %Z"
-- | Sanitize a channel name.
sanitize :: String -> String
sanitize = filter ok where
ok c = isDigit c || elem (toLower c) ['a'..'z']
-- | Join the requested channels.
joinChannels :: Options -> Handle -> IO ()
joinChannels Options{..} h = do
let chans = words $ replace ',' ' ' channels
putStrLn $ "Joining channels: " ++ show chans
sendLine h $ "JOIN :" ++ intercalate "," chans
-- | Replace x with y in xs.
replace :: Eq a => a -> a -> [a] -> [a]
replace c y = go where
go [] = []
go (c':cs) | c'==c = y : go cs
| otherwise = c' : go cs
-- | Send a line on a handle, ignoring errors (like, if the socket's
-- closed.)
sendLine :: Handle -> String -> IO ()
sendLine h line = do
catch (do hPutStrLn h line; putStrLn $ "-> " ++ line) $
\_e -> return ()
|
chrisdone/hog
|
src/Main.hs
|
bsd-3-clause
| 4,937
| 0
| 17
| 1,238
| 1,457
| 742
| 715
| 109
| 4
|
{-# LANGUAGE DeriveDataTypeable #-}
-- |Type aliases used throughout the crypto-api modules.
module Crypto.Types where
import qualified Control.Exception as X
import Data.Data
import Data.Typeable
import Data.ByteString as B
import Data.ByteString.Lazy as L
-- |Initilization Vectors for BlockCipher implementations (IV k) are
-- used for various modes and guarrenteed to be blockSize bits long.
-- The common ways to obtain an IV are to generate one ('getIV' or
-- 'getIVIO') or to use one provided with the ciphertext (using the
-- 'Serialize' instance of IV).
--
-- 'zeroIV' also exists and is of particular use for starting 'ctr'
-- mode with a fresh key.
data IV k = IV { initializationVector :: {-# UNPACK #-} !B.ByteString
} deriving (Eq, Ord, Show)
-- |The length of a field (usually a ByteString) in bits
type BitLength = Int
-- |The length fo a field in bytes.
type ByteLength = Int
data BlockCipherError = InputTooLong String
| AuthenticationFailed String
| Other String
deriving (Eq, Ord, Show, Read, Data, Typeable)
instance X.Exception BlockCipherError
|
TomMD/crypto-api
|
Crypto/Types.hs
|
bsd-3-clause
| 1,190
| 0
| 10
| 291
| 153
| 96
| 57
| 16
| 0
|
{-# LANGUAGE FlexibleInstances,
FlexibleContexts,
MultiParamTypeClasses,
UndecidableInstances,
TypeFamilies,
GADTs #-}
module Obsidian.GCDObsidian.Sync where
import Obsidian.GCDObsidian.Kernel
import Obsidian.GCDObsidian.Exp
import Obsidian.GCDObsidian.Array
import Obsidian.GCDObsidian.Types
import Obsidian.GCDObsidian.Program
import Obsidian.GCDObsidian.Library
import Control.Monad.Writer
import Data.Word
----------------------------------------------------------------------------
-- Library functions that use Sync
composeS [] = pure id
composeS (f:fs) = f ->- sync ->- composeS fs
----------------------------------------------------------------------------
-- Sync
pSyncArray :: Scalar a => Array Pull (Exp a) -> Kernel (Array Pull (Exp a))
pSyncArray arr =
do
name <- newArray
let p = parr !* (targetArray name)
tell$
(Allocate name (es * (len arr)) t ())
`ProgramSeq`
p
`ProgramSeq`
(Synchronize True)
return$ mkPullArray (index name) (len arr)
where
es = fromIntegral$ sizeOf (arr ! 0)
t = Pointer$ Local$ typeOf (arr ! 0)
parr = push arr
pSyncArrayP :: Scalar a => Array Push (Exp a) -> Kernel (Array Pull (Exp a))
pSyncArrayP arr =
do
name <- newArray
let n = len arr
result = mkPullArray (index name) n
es = fromIntegral$ sizeOf (result ! 0)
t = Pointer$ Local$ typeOf (result ! 0)
p = arr !* (targetArray name)
tell$ (Allocate name (es * n) t () )
`ProgramSeq`
p
`ProgramSeq`
(Synchronize True)
return result
pSyncArrayP2 :: (Scalar a, Scalar b ) => Array Push (Exp a,Exp b) -> Kernel (Array Pull (Exp a,Exp b))
pSyncArrayP2 arr =
do
name1 <- newArray
name2 <- newArray
let n = len arr
result1 = mkPullArray (index name1) n
result2 = mkPullArray (index name2) n
t1 = Pointer$ Local$ typeOf (result1 ! 0)
t2 = Pointer$ Local$ typeOf (result2 ! 0)
es1 = fromIntegral$ sizeOf (result1 ! 0)
es2 = fromIntegral$ sizeOf (result2 ! 0)
p = arr !* (targetPair name1 name2)
tell$ (Allocate name1 (es1 * n) t1 ()) *>*
(Allocate name2 (es2 * n) t2 ()) *>*
p
*>*
(Synchronize True)
return (zipp (result1,result2))
-- TODO: is this an approach to more general syncs ? (see limitations on Syncable class)
class Syncable' a where
type Synced a
sync' :: a -> Kernel (Synced a)
instance Syncable (Array Pull) (Exp a) => Syncable' (Array Pull (Exp a)) where
type Synced (Array Pull (Exp a)) = Array Pull (Exp a)
sync' = sync
instance Syncable (Array Push) (Exp a) => Syncable' (Array Push (Exp a)) where
type Synced (Array Push (Exp a)) = Array Pull (Exp a)
sync' = sync
instance (Syncable' a, Syncable' b) => Syncable' (a,b) where
type Synced (a,b) = (Synced a, Synced b)
sync' (a1,a2) =
do
a1' <- sync' a1
a2' <- sync' a2
return (a1',a2')
-- TODO: Here only possible to sync on arrays ?
class Syncable a b where
sync :: a b -> Kernel (Array Pull b)
instance (Scalar a) => Syncable (Array Pull) (Exp a) where
sync = pSyncArray
instance (Syncable (Array Pull) a, Syncable (Array Pull) b) => Syncable (Array Pull) (a,b) where
sync arr = do
a1' <- sync a1
a2' <- sync a2
return$ zipp (a1',a2')
where
(a1,a2) = unzipp arr
instance (Syncable (Array Pull) a, Syncable (Array Pull) b, Syncable (Array Pull) c)
=> Syncable (Array Pull) (a,b,c) where
sync arr = do
a1' <- sync a1
a2' <- sync a2
a3' <- sync a3
return$ zipp3 (a1',a2',a3')
where
(a1,a2,a3) = unzipp3 arr
instance Scalar a => Syncable (Array Push) (Exp a) where
sync arr = pSyncArrayP arr
-- GAH! not good !!
instance (Scalar a, Scalar b) => Syncable (Array Push) (Exp a, Exp b) where
sync = pSyncArrayP2
{-
pSyncA :: (Scalar a, Pushy arr)
=> arr (Exp a) -> Kernel (Array (Exp a))
pSyncA arrIn =
do
name <- newArray
let result = Array (index name) n
es = fromIntegral$ sizeOf (result ! 0)
t = Pointer$ Local$ typeOf (result ! 0)
p = pushApp arr (targetArray name)
tell$ Seq (syncUnit (programThreads p)
(Allocate name (es * n) t
p)) Skip
return result
where
arr@(ArrayP func n) = push arrIn
pSyncAP :: Scalar a
=> Word32 -> Array (Exp a) -> Kernel (Array (Exp a))
pSyncAP elemsPT arrIn = sync arr'
where
arr' = push' elemsPT arrIn
-- Work on the Scalar a thing!!!
pSyncArray :: Scalar a => Array (Exp a) -> Kernel (Array (Exp a))
pSyncArray arr =
do
name <- newArray
let p = pushApp parr (targetArray name)
tell$ Seq (syncUnit (programThreads p) ---(len arr)
(Allocate name (es * (len arr)) t
p)) Skip
return (Array (index name) (len arr))
where
es = fromIntegral$ sizeOf (arr ! 0)
t = Pointer$ Local$ typeOf (arr ! 0)
parr = push arr
-- THE GCD THING
pSyncArrays :: (Scalar a, Scalar b) => (Array (Exp a),Array (Exp b)) -> Kernel (Array (Exp a), Array (Exp b))
pSyncArrays (a1,a2) =
do
name1 <- newArray
name2 <- newArray
tell$ Seq (syncUnit n (Allocate name1 (es1 * (len a1)) t1
(pushApp pa1 (targetArray name1))
*>*
Allocate name2 (es2 * (len a2)) t2
(pushApp pa2 (targetArray name2)))) Skip
return (Array (index name1) (len a1)
,Array (index name2) (len a2))
where
es1 = fromIntegral$ sizeOf (a1 ! 0)
es2 = fromIntegral$ sizeOf (a2 ! 0)
t1 = Pointer$ Local$ typeOf (a1 ! 0)
t2 = Pointer$ Local$ typeOf (a2 ! 0)
n = gcd (len a1) (len a2)
pa1 = push' w1 a1
pa2 = push' w2 a2
(w1,w2) = nWrites n (pa1,pa2)
nWrites m (p1@(ArrayP _ n1),p2@(ArrayP _ n2)) = (p1Writes, p2Writes)
where
p1Writes = n1 `div` m
p2Writes = n2 `div` m
pSyncArrayP :: Scalar a => ArrayP (Exp a) -> Kernel (Array (Exp a))
pSyncArrayP arr@(ArrayP func n) =
do
name <- newArray
let result = Array (index name) n
es = fromIntegral$ sizeOf (result ! 0)
t = Pointer$ Local$ typeOf (result ! 0)
p = pushApp arr (targetArray name)
tell$ Seq (syncUnit (programThreads p)
(Allocate name (es * n) t
p)) Skip
return result
pSyncArrayP2 :: (Scalar a, Scalar b ) => ArrayP (Exp a,Exp b) -> Kernel (Array (Exp a,Exp b))
pSyncArrayP2 arr@(ArrayP f n) =
do
name1 <- newArray
name2 <- newArray
let result1 = Array (index name1) n
result2 = Array (index name2) n
t1 = Pointer$ Local$ typeOf (result1 ! 0)
t2 = Pointer$ Local$ typeOf (result2 ! 0)
es1 = fromIntegral$ sizeOf (result1 ! 0)
es2 = fromIntegral$ sizeOf (result2 ! 0)
p = pushApp arr (targetPair name1 name2)
tell$ Seq (syncUnit (programThreads p)
(Allocate name1 (es1 * n) t1
(Allocate name2 (es2 * n) t2
p))) Skip
return (zipp (result1,result2))
-}
|
svenssonjoel/GCDObsidian
|
Obsidian/GCDObsidian/Sync.hs
|
bsd-3-clause
| 7,581
| 0
| 15
| 2,519
| 1,536
| 785
| 751
| 104
| 1
|
module TestEditItem (tests) where
import Asserts
import Bucket.EditItem
import Bucket.Import
import Bucket.Types
import DirectoryInfo
import Fixtures
import Prelude hiding (catch)
import System.FilePath
import Test.Hspec.HUnit()
import Test.Hspec.Monadic
import Test.HUnit
tests = describe "editing item" $ do
it "item changes the meta on disk" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
bucket <- importFile bucket aSourceFile
let item = head (bucketItems bucket)
let newItem = setTags ["foo"] item
newBucket <- editItem bucket item newItem
assertFileContains (bucketPath bucket </> "a-file-1" </> metaFileName) "tag::foo"
it "returns an updated bucket" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
bucket <- importFile bucket aSourceFile
let item = head (bucketItems bucket)
let newItem = setTags ["foo"] item
newBucket <- editItem bucket item newItem
assertEqual "" [newItem] (bucketItems newBucket)
|
rickardlindberg/orgapp
|
tests/TestEditItem.hs
|
bsd-3-clause
| 1,119
| 0
| 17
| 242
| 328
| 166
| 162
| 27
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
-- | Process management
module Haskus.System.Linux.Process
( ProcessID(..)
, ThreadID(..)
, UserID(..)
, GroupID(..)
, SessionID(..)
, sysExit
, sysGetCPU
, sysGetProcessID
, sysGetParentProcessID
, sysGetRealUserID
, sysGetEffectiveUserID
, sysSetEffectiveUserID
, sysGetRealGroupID
, sysGetEffectiveGroupID
, sysSetEffectiveGroupID
, sysGetThreadID
, sysFork
, sysVFork
, sysSchedulerYield
)
where
import Haskus.Format.Binary.Ptr (Ptr, nullPtr)
import Haskus.Format.Binary.Word
import Haskus.Format.Binary.Storable
import Haskus.System.Linux.Syscalls
import Haskus.System.Linux.ErrorCode
import Haskus.Utils.Flow
-- | Process ID
newtype ProcessID = ProcessID Word32 deriving (Show,Eq,Ord,Storable)
-- | Thread ID
newtype ThreadID = ThreadID Word32 deriving (Show,Eq,Ord,Storable)
-- | User ID
newtype UserID = UserID Word32 deriving (Show,Eq,Ord,Storable)
-- | Group ID
newtype GroupID = GroupID Word32 deriving (Show,Eq,Ord,Storable)
-- | Session ID
newtype SessionID = SessionID Word32 deriving (Show,Eq,Ord,Storable)
-- | Exit the current process with the given return value
-- This syscall does not return.
sysExit :: Int64 -> IO ()
sysExit n = void (syscall_exit n)
-- | Get CPU and NUMA node executing the current process
sysGetCPU :: MonadInIO m => FlowT '[ErrorCode] m (Word,Word)
sysGetCPU =
alloca $ \cpu ->
alloca $ \node -> do
r <- liftIO (syscall_getcpu (cpu :: Ptr Word) (node :: Ptr Word) nullPtr)
checkErrorCode_ r
(,) <$> peek cpu <*> peek node
-- | Return process ID
sysGetProcessID :: IO ProcessID
sysGetProcessID = ProcessID . fromIntegral <$> syscall_getpid
-- | Return thread ID
sysGetThreadID :: IO ThreadID
sysGetThreadID = ThreadID . fromIntegral <$> syscall_gettid
-- | Return parent process ID
sysGetParentProcessID :: IO ProcessID
sysGetParentProcessID = ProcessID . fromIntegral <$> syscall_getppid
-- | Get real user ID of the calling process
sysGetRealUserID :: IO UserID
sysGetRealUserID = UserID . fromIntegral <$> syscall_getuid
-- | Get effective user ID of the calling process
sysGetEffectiveUserID :: IO UserID
sysGetEffectiveUserID = UserID . fromIntegral <$> syscall_geteuid
-- | Set effective user ID of the calling process
sysSetEffectiveUserID :: MonadIO m => UserID -> FlowT '[ErrorCode] m ()
sysSetEffectiveUserID (UserID uid) = checkErrorCode_ =<< liftIO (syscall_setuid uid)
-- | Get real group ID of the calling process
sysGetRealGroupID :: IO GroupID
sysGetRealGroupID = GroupID . fromIntegral <$> syscall_getgid
-- | Get effective group ID of the calling process
sysGetEffectiveGroupID :: IO GroupID
sysGetEffectiveGroupID = GroupID . fromIntegral <$> syscall_getegid
-- | Set effective group ID of the calling process
sysSetEffectiveGroupID :: MonadIO m => GroupID -> FlowT '[ErrorCode] m ()
sysSetEffectiveGroupID (GroupID gid) = checkErrorCode_ =<< liftIO (syscall_setgid gid)
-- | Create a child process
sysFork :: MonadIO m => FlowT '[ErrorCode] m ProcessID
sysFork = do
v <- checkErrorCode =<< liftIO (syscall_fork)
return (ProcessID (fromIntegral v))
-- | Create a child process and block parent
sysVFork :: MonadIO m => FlowT '[ErrorCode] m ProcessID
sysVFork = do
v <- checkErrorCode =<< liftIO (syscall_vfork)
return (ProcessID (fromIntegral v))
-- | Yield the processor
sysSchedulerYield :: MonadIO m => FlowT '[ErrorCode] m ()
sysSchedulerYield = checkErrorCode_ =<< liftIO (syscall_sched_yield)
|
hsyl20/ViperVM
|
haskus-system/src/lib/Haskus/System/Linux/Process.hs
|
bsd-3-clause
| 3,594
| 0
| 16
| 611
| 873
| 483
| 390
| 71
| 1
|
module CSV.Types ( CSV (..), SQLVal(..), ParseVal(..)) where
import Data.Text (Text)
data CSV a = CSV [[a]] deriving Show
data ParseVal = T Text | N Double | I Int
data SQLVal = SQLInt Int | NVar Text | SQLFloat Float | Null
|
smobs/HulkImport
|
src/CSV/Types.hs
|
bsd-3-clause
| 240
| 0
| 8
| 58
| 99
| 62
| 37
| 5
| 0
|
{-# LANGUAGE MultiParamTypeClasses,TemplateHaskell,QuasiQuotes #-}
module Language.XHaskell.LocalTypeInference where
import Data.List (nub)
import qualified Data.Map as M
import qualified Data.Traversable as Trvsbl (mapM)
import Control.Monad
import System.IO.Unsafe
import qualified Language.Haskell.TH as TH
import qualified Language.XHaskell.Source as S
import qualified Language.XHaskell.Target as T
-- an implementation of B.C Pierce and D. N. Turner's local type inference
data SubtypeConstr = STC { lb :: TH.Type, var :: TH.Name, up :: TH.Type } deriving Show
bot = TH.ConT (TH.mkName "Phi")
top = TH.AppT (TH.ConT (TH.mkName "Star")) (TH.ConT (TH.mkName "."))
isBot (TH.ConT n) = TH.nameBase n == "Phi"
isBot t = False
isTop (TH.AppT (TH.ConT op) (TH.ConT n)) = TH.nameBase op == "Star" && TH.nameBase n == "."
isTop t = False
isect :: [TH.Name] -> [TH.Name] -> [TH.Name]
isect vs vs' = filter (\v -> v `elem` vs') vs
-- variable elimination via promotion and demotion
promote :: [TH.Name] -> TH.Type -> TH.Type
promote vs (TH.VarT n) | n `elem` vs = top
promote vs t = t
demote :: [TH.Name] -> TH.Type -> TH.Type
demote vs (TH.VarT n) | n `elem` vs = bot
demote vs t = t
genSubtypeConstrs :: [TH.Name] -> -- target variables, the variable that we want to find the type substitution
[TH.Name] -> -- bound variables which are introduce along the way
TH.Type -> TH.Type -> [SubtypeConstr]
-- \bar{a}, \bar{b} |- t <: \sigma* ==> { } -- not in used
genSubtypeConstrs vars bvars t (TH.AppT (TH.ConT op) (TH.ConT n))
| TH.nameBase n == "." && TH.nameBase op == "Star" = []
-- \bar{a} |- \phi <: t ==> { } -- not in used
genSubtypeConstrs vars bvars (TH.ConT n) t | TH.nameBase n == "Phi" = []
{-
a \not \in \bar{a}
------------------------------------
\bar{a}, \bar{b} |- a <: a ===> { }
-}
genSubtypeConstrs vars bvars (TH.VarT n) (TH.VarT n') | not (n `elem` vars) && (n == n') = []
{-
a \in \bar{a}
fv(t) \cap \bar{a} = { }
t \demote^{\bar{b}} t'
---------------------------------
\bar{a}. \bar{b} |- a <: t ==> { \phi <: a <: t }
-}
genSubtypeConstrs vars bvars (TH.VarT n) t2 | n `elem` vars && null ((T.typeVars t2) `isect` vars) =
let t2' = demote bvars t2
in [ STC bot n t2' ]
{-
a \in \bar{a}
fv(t) \cap \bar{a} = { }
t \promote^{\bar{b}} t'
---------------------------------
\bar{a}, \bar{b} |- t <: a ==> { t <: a <: \sigma* }
-}
genSubtypeConstrs vars bvars t1 (TH.VarT n) | n `elem` vars && null ((T.typeVars t1) `isect` vars) =
let t1' = promote bvars t1
in [ STC t1' n top ]
{-
\bar{a}, \bar{b} |- t3 <: t1 ==> D1
\bar{a}, \bar{b} |- t2 <: t4 ==> D2
------------------------------------
\bar{a}, \bar{b} |- t1 -> t2 <: t3 -> t4 ==> D1 ++ D2
-}
genSubtypeConstrs vars bvars (TH.AppT (TH.AppT TH.ArrowT t1) t2) (TH.AppT (TH.AppT TH.ArrowT t3) t4) =
genSubtypeConstrs vars bvars t3 t1 ++ genSubtypeConstrs vars bvars t2 t4
{-
\bar{a}, \bar{b} |- t1 <: t3 ==> D1
\bar{a}, \bar{b} |- t2 <: t4 ==> D2
------------------------------------
\bar{a}, \bar{b} |- t1 | t2 <: t3 | t4 ==> D1 ++ D2
-}
genSubtypeConstrs vars bvars (TH.AppT (TH.AppT (TH.ConT n) t1) t2) (TH.AppT (TH.AppT (TH.ConT n') t3) t4)
| n == n' && TH.nameBase n == "Choice" =
genSubtypeConstrs vars bvars t1 t3 ++ genSubtypeConstrs vars bvars t2 t4
{-
\bar{a}, \bar{b} |- t1 <: t3 ==> D1
\bar{a}, \bar{b} |- t2 <: t4 ==> D2
------------------------------------
\bar{a} |- t1t2 <: t3t4 ==> D1 ++ D2
-}
| n == n' && TH.nameBase n == "," =
genSubtypeConstrs vars bvars t1 t3 ++ genSubtypeConstrs vars bvars t2 t4
{-
\bar{a}, \bar{b} |- t1 <: t2 ==> D
------------------------------------
\bar{a}, \bar{b} |- t1* <: t2* ==> D
-}
genSubtypeConstrs vars bvars (TH.AppT (TH.ConT n) t1) (TH.AppT (TH.ConT n') t2)
| n == n' && TH.nameBase n == "Star" = genSubtypeConstrs vars bvars t1 t2
{-
\bar{a}, \bar{b} |- () <: () ==> {}
-}
genSubtypeConstrs vars bvars (TH.ConT n) (TH.ConT n')
| n == n' && TH.nameBase n == "()" = []
{-
------------------------------------
\bar{a} |- l <: l ==> {}
-}
| TH.nameBase n == TH.nameBase n' = []
genSubtypeConstrs vars bvars t1 (TH.ForallT tvbs cxt t2) = -- todo : what to do with cxt?
let bvars' = bvars ++ nub (map T.getTvFromTvb tvbs)
in genSubtypeConstrs vars bvars' t1 t2
genSubtypeConstrs vars bvars (TH.ForallT tvbs cxt t1) t2 = -- todo : what to do with cxt?
let bvars' = bvars ++ nub (map T.getTvFromTvb tvbs)
in genSubtypeConstrs vars bvars' t1 t2
genSubtypeConstrs vars bvars t1 t2 = [] -- error $ "failed to generate subtype constraints " ++ show vars ++ " " ++ show t1 ++ " " ++ show t2
solveSubtypeConstrs :: [SubtypeConstr] -> TH.Type -> TH.Q T.Subst
solveSubtypeConstrs constrs r =
-- create a map mapping var -> [ constraint ]
let cdict = foldl (\m c@(STC l v u) -> case M.lookup v m of
{ Just cs -> M.update (\_ -> Just (c:cs)) v m
; Nothing -> M.insert v [c] m }) M.empty constrs
-- collapse constraints for the same variable based on
in do
{ cdict' <- Trvsbl.mapM collapseConstrs cdict
; subst <- Trvsbl.mapM (findMinSubst r) cdict'
; return subst
}
collapseConstrs :: [SubtypeConstr] -> TH.Q SubtypeConstr
collapseConstrs [] = fail "the collapseConstrs is given an empty list"
collapseConstrs (c:cs) =
foldM (\(STC l v u) (STC l' v' u') -> do -- precondition, v == v'
{ l'' <- upperBound l l'
; u'' <- lowerBound u u'
; return (STC l'' v u'')}) c cs
where upperBound t1 t2 | t1 == t2 = return t1
| isBot t1 = return t2
| isBot t2 = return t1
| isTop t1 = return t1
| isTop t2 = return t2
| otherwise = fail $ "can't decide the upper bound of " ++ (TH.pprint t1) ++ " and " ++ (TH.pprint t2) -- todo subtyp relation?
lowerBound t1 t2 | t1 == t2 = return t1
| isBot t1 = return t1
| isBot t2 = return t2
| isTop t1 = return t2
| isTop t2 = return t1
| otherwise = fail $ "can't decide the lower bound of " ++ (TH.pprint t1) ++ " and " ++ (TH.pprint t2)
findMinSubst :: TH.Type -> SubtypeConstr -> TH.Q TH.Type
findMinSubst r (STC l v u)
| v `constant` r || v `covariant` r = return l
| v `contravariant` r = return u
| v `invariant` r && l == u = return l
| otherwise = fail ("unable to find minimal substitution given type " ++ TH.pprint r ++ " and bounds " ++ TH.pprint l ++ "<:" ++ TH.pprint v ++ "<:" ++ TH.pprint u)
constant :: TH.Name -> TH.Type -> Bool
constant n t = not (n `elem` (T.typeVars t))
covariant :: TH.Name -> TH.Type -> Bool
covariant n t = n `elem` (T.coTypeVars t)
contravariant :: TH.Name -> TH.Type -> Bool
contravariant n t = n `elem` (T.contraTypeVars t)
invariant :: TH.Name -> TH.Type -> Bool
invariant n t = n `elem` (T.contraTypeVars t) &&
n `elem` (T.coTypeVars t)
|
luzhuomi/xhaskell
|
Language/XHaskell/LocalTypeInference.hs
|
bsd-3-clause
| 7,308
| 0
| 20
| 1,985
| 2,326
| 1,174
| 1,152
| 100
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
module Pronunciations
( homophonesUsing
, Pronunciations
, pronounceUsing
, spellUsing
, pronunciations
, entries
, Entry(..)
, Word(..)
, getWord
, Pronunciation
, unMark
, Phoneme(..)
) where
import Data.Attoparsec.Text hiding ( Number(..) )
import Data.Attoparsec.Combinator
import Data.Char
import Data.String
import Data.Text ( Text )
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import Data.Set ( Set )
import Data.Map.Strict ( Map )
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import Data.Sequence ( Seq , (|>), (<|) , (><) , ViewL(..) , ViewR(..) )
import qualified Data.Sequence as Seq
import Data.Traversable
import Control.Monad hiding ( mapM )
import Control.Arrow
import Control.Applicative
import Data.Functor
import Data.Maybe
import Data.Either
import Data.List ( groupBy )
import GHC.Generics ( Generic )
import System.IO
import Prelude hiding ( takeWhile , mapM , Word )
homophonesUsing :: Pronunciations -> Word -> Maybe (Set Word)
homophonesUsing dict =
fmap Set.unions . mapM (spellUsing dict) . Set.toList <=< pronounceUsing dict
data Pronunciations =
Pronunciations !(Map Word (Set Pronunciation))
!(Map Pronunciation (Set Word))
pronounceUsing :: Pronunciations -> Word -> Maybe (Set Pronunciation)
pronounceUsing (Pronunciations dict _) spelling = Map.lookup spelling dict
spellUsing :: Pronunciations -> Pronunciation -> Maybe (Set Word)
spellUsing (Pronunciations _ dict) saying = Map.lookup saying dict
pronunciations :: [Entry] -> Pronunciations
pronunciations =
Pronunciations <$> (Map.fromListWith Set.union .
map (_word &&& Set.singleton . _pronunciation))
<*> (Map.fromListWith Set.union .
map (_pronunciation &&& Set.singleton . _word))
entries :: Text -> Maybe [Entry]
entries = either (const Nothing) Just
. fmap catMaybes
. mapM (parseOnly line)
. Text.lines
line :: Parser (Maybe Entry)
line = skipSpace *> ((";;;" *> many' (notChar '\n') $> Nothing)
<|> ((endOfLine <|> endOfInput) $> Nothing)
<|> (Just <$> entry))
data Entry = Entry { _word :: Word
, _index :: Int
, _pronunciation :: Pronunciation }
deriving (Eq, Ord, Show, Generic)
entry :: Parser Entry
entry = Entry <$> wordOrPunc
<*> (option 0 $ "(" *> decimal <* ")")
<*> (skipSpace *> pronunciation)
data Word = Word Text
| Punctuation Text Text
deriving (Eq, Ord, Show, Generic)
instance IsString Word where
fromString = Word . Text.pack
getWord :: Word -> Text
getWord (Word word) = word
getWord (Punctuation mark name) = mark
isWordChar :: Char -> Bool
isWordChar c = isAlpha c
|| isDigit c
|| c == '\''
|| c == '.'
|| c == '-'
|| c == '_'
-- | A word is a word if it begins with an alphabetic character or is wrapped in square brackets;
-- otherwise, it is a named piece of punctuation given by a string like "%PERCENT".
wordOrPunc :: Parser Word
wordOrPunc = (Word <$> (Text.cons <$> satisfy isAlpha
<*> takeWhile isWordChar))
<|> "[" *> (Word <$> takeWhile1 isWordChar) <* "]"
<|> (Punctuation <$> takeWhile1 (not . isAlpha)
<*> takeWhile1 isWordChar)
type Pronunciation = Seq Phoneme
pronunciation :: Parser Pronunciation
pronunciation = Seq.fromList <$> phoneme `sepBy1` " "
unMark :: Phoneme -> Either Vowel Consonant
unMark (Vowel p _) = Left p
unMark (Consonant p) = Right p
phoneme :: Parser Phoneme
phoneme = (Vowel <$> vowel <*> decimal)
<|> (Consonant <$> consonant)
data Phoneme = Vowel Vowel Int
| Consonant Consonant
deriving (Eq, Ord, Show, Generic)
vowel :: Parser Vowel
vowel = "AA" $> AA <|> "AE" $> AE <|> "AH" $> AH
<|> "AO" $> AO <|> "AW" $> AW <|> "AY" $> AY
<|> "EH" $> EH <|> "ER" $> ER <|> "EY" $> EY
<|> "IH" $> IH <|> "IY" $> IY <|> "OW" $> OW
<|> "OY" $> OY <|> "UH" $> UH <|> "UW" $> UW
data Vowel = AA | AE | AH | AO | AW
| AY | EH | ER | EY | IH
| IY | OW | OY | UH | UW
deriving (Eq, Ord, Show, Generic)
consonant :: Parser Consonant
consonant = "B" $> B <|> "CH" $> CH <|> "DH" $> DH <|> "D" $> D <|> "F" $> F
<|> "G" $> G <|> "HH" $> HH <|> "JH" $> JH <|> "K" $> K <|> "L" $> L
<|> "M" $> M <|> "NG" $> NG <|> "N" $> N <|> "P" $> P <|> "R" $> R
<|> "SH" $> SH <|> "S" $> S <|> "TH" $> TH <|> "T" $> T <|> "V" $> V
<|> "W" $> W <|> "Y" $> Y <|> "ZH" $> ZH <|> "Z" $> Z
data Consonant = B | CH | D | DH | F
| G | HH | JH | K | L
| M | N | NG | P | R
| S | SH | T | TH | V
| W | Y | Z | ZH
deriving (Eq, Ord, Show, Generic)
|
bitemyapp/pronunciations
|
src/Pronunciations.hs
|
bsd-3-clause
| 5,293
| 0
| 51
| 1,648
| 1,682
| 925
| 757
| -1
| -1
|
{-# LANGUAGE
DeriveFoldable
, DeriveFunctor
, DeriveGeneric
, DeriveTraversable
, FlexibleContexts
, FlexibleInstances
, LambdaCase
, MultiParamTypeClasses
, TypeFamilies #-}
module Type.Graphic
( module Type.BindingFlag
, module Type.Node
, Type
, BoundNode
, Bound (..)
, binding
, term
, Binding (..)
, root
, binder
, Term (..)
, bot
, arr
, fromRestricted
, toSyntactic
, syntactic
) where
import Control.Applicative
import Control.Category ((<<<))
import Control.Lens
import Control.Monad.Reader
import Data.Foldable (Foldable (foldMap), foldlM, foldrM)
import Data.Maybe (fromMaybe)
import Data.Monoid (mempty)
import GHC.Generics (Generic)
import Prelude hiding (read)
import Int
import IntMap (IntMap, (!))
import Monad
import Name
import Product (Product (..))
import ST
import Supply
import Type.BindingFlag
import Type.Node (Node, postordered, preordered, projected)
import qualified Type.Node as Node
import qualified Type.Restricted as R
import qualified Type.Syntactic as S
import UnionFind (Var, (===), contents, union)
import qualified UnionFind as Var
type Type s a = Var s (BoundNode s a)
type BoundNode s a = Node s (Bound s a)
data Bound s a b =
Bound a {-# UNPACK #-} !(Var s (Binding b)) !(Term b) deriving Generic
instance Field1 (Bound s a b) (Bound s a' b) a a'
instance Field2 (Bound s a b) (Bound s a b) (Var s (Binding b)) (Var s (Binding b))
instance Field3 (Bound s a b) (Bound s a b) (Term b) (Term b)
binding :: Lens' (Bound s a b) (Var s (Binding b))
binding = _2
term :: Lens' (Bound s a b) (Term b)
term = _3
instance Foldable (Bound s a) where
foldMap f = foldMap f . view term
data Binding a
= Root
| Binder !BindingFlag a deriving ( Show
, Functor
, Foldable
, Traversable
, Generic
)
instance VariantA (Binding a) (Binding a) () ()
instance VariantB (Binding a) (Binding b) (BindingFlag, a) (BindingFlag, b)
root :: Prism' (Binding a) ()
root = _A
binder :: Prism (Binding a) (Binding b) (BindingFlag, a) (BindingFlag, b)
binder = _B
data Term a
= Bot
| Arr a a deriving ( Show
, Functor
, Foldable
, Traversable
, Generic
)
instance VariantA (Term a) (Term a) () ()
instance VariantB (Term a) (Term b) (a, a) (b, b)
bot :: Prism (Term a) (Term a) () ()
bot = _A
arr :: Prism (Term a) (Term b) (a, a) (b, b)
arr = _B
fromRestricted :: (MonadST m, MonadSupply Int m)
=> R.Type (Name a) -> m (Type (World m) (Maybe a))
fromRestricted =
flip runReaderT mempty <<<
fix (\ rec ann b -> \ case
R.Bot -> newType ann b Bot
R.Var x -> asks (!x)
R.Arr t_a t_b -> do
env <- ask
newType ann b $ Arr (env!t_a) (env!t_b)
R.Forall x'@(Name _ ann') bf o o' -> do
t_o <- newType Nothing Root Bot
t_o' <- local (at x' ?~ t_o) $ rec ann b o'
ifM (same t_o t_o') (rec ann' b o) $ do
join $ union <$> rec ann' (Binder bf t_o') o <*> pure t_o
return t_o') Nothing Root
where
newType ann b c = Var.new =<< newBoundNode ann b c
newBoundNode ann b c = Bound ann <$> Var.new b <*> pure c >>= Node.new
same var_a var_b = ifM (var_a === var_b) (return True) $
(==) <$> var_a^!contents <*> var_b^!contents
toSyntactic :: MonadST m
=> Type (World m) (Maybe a)
-> m (Product Int (S.PolyType (Product Int) (Name a)))
toSyntactic t0 = do
bns <- t0^!boundNodes
fix (\ rec n0 -> do
t_s0 <- case n0^.projected.term of
Bot -> return $ toInt n0 :* S.Bot
Arr t_a t_b -> do
n_a <- t_a^!contents
n_b <- t_b^!contents
return $ toInt n0 :* S.Mono (S.Arr (nodeVar n_a) (nodeVar n_b))
foldrM (\ (bf, n) t_s -> nodeForall n bf <$> rec n <*> pure t_s)
t_s0 (fromMaybe mempty $ bns^.at n0)) =<< t0^!contents
where
nodeVar n = toInt n :* S.Var (nodeName n)
nodeForall n bf o o' = toInt n :* S.Forall (nodeName n) bf o o'
nodeName n = Name (n^.int) (n^.projected._1)
syntactic :: MonadST m
=> IndexPreservingAction m
(Type (World m) (Maybe a))
(Product Int (S.PolyType (Product Int) (Name a)))
syntactic = act toSyntactic
boundNodes :: (MonadST m, s ~ World m)
=> IndexPreservingAction m
(Type s a)
(IntMap (BoundNode s a) [(BindingFlag, BoundNode s a)])
boundNodes = act $ perform (contents.preordered) >=> foldlM (\ ns ->
perform contents >=> \ n -> n^!projected.binding.contents >>= \ case
Root -> return ns
Binder bf var' -> var'^!contents <&> \ n' -> ns&at n' %~ \ case
Nothing -> Just [(bf, n)]
Just bs -> Just ((bf, n) <| bs)) mempty
|
sonyandy/mlf
|
src/Type/Graphic.hs
|
bsd-3-clause
| 4,976
| 0
| 25
| 1,510
| 2,062
| 1,078
| 984
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, TransformListComp, RankNTypes, GADTs, DeriveAnyClass #-}
{-
Base data definition for further data manipulation
-}
module Quark.Base.Data
(
QValue (..),
SupportedTypes(..)
) where
import qualified Data.Vector as V
import Data.Text
import Data.Time
import Data.Time.Calendar
-- import Text.Read
import Data.Text.Read
import GHC.Exts
import Data.Hashable
-- just an enum of supported primitive types in our database
data SupportedTypes = PInt | PDouble | PWord | PBool | PText deriving (Eq, Ord, Show, Enum)
-- primitive datatypes
data QValue = QString Text
| QDouble {-# UNPACK #-}!Double
| QInt {-# UNPACK #-} !Int
-- | QDate {-# UNPACK #-} !Day -- # of days from 1858-11-17
| QDateTime {-# UNPACK #-}!Int -- # of milliseconds as in Unix time
| QDateS {-# UNPACK #-} !(Int, Int, Int) -- stupid placeholder for dd.mm.yyyy format
| QBool {-# UNPACK #-} !Bool
| QMoney {-# UNPACK #-}!Int -- # representing currency in cents, i.e. 130.23 USD will be 13023 Money - for FAST processing
| QNull
| QIllegalValue
deriving (Eq, Ord, Show)
type KVP = V.Vector (Text, QValue)
-- ok, using RankNTypes here - need to be able to inject binary operation that acts on ANY Num types.
-- That is what the (forall a. Num a => a -> a -> a) signature defines - polymorfic function good for ALL Num types.
-- Otherwise it wouldn't work on both Ints and Doubles
{-# INLINE injectBinOp #-}
injectBinOp :: (forall a. Num a => a -> a -> a) -> QValue -> QValue -> QValue
injectBinOp binOp (QInt x) (QInt y) = QInt (binOp x y)
injectBinOp binOp (QDouble x) (QDouble y) = QDouble (binOp x y)
injectBinOp binOp (QDouble x) (QInt y) = QDouble (binOp x (fromIntegral y) )
injectBinOp binOp (QInt x) (QDouble y) = QDouble (binOp (fromIntegral x) y )
injectBinOp binOp (QMoney x) (QMoney y) = QMoney (binOp x y)
-- injectBinOp binOp (QDate x) (QDate y) = QDate (binOp x y) -- apparently, Day is not part of Num
injectBinOp binOp (QDateTime x) (QDateTime y) = QDateTime (binOp x y)
injectBinOp binOp _ _ = QIllegalValue
{-# INLINE injectOp #-}
injectOp :: (forall a. Num a => a -> a ) -> QValue -> QValue
injectOp op (QInt x) = QInt (op x)
injectOp op (QDouble x) = QDouble (op x)
instance Num QValue where
(+) = injectBinOp (+)
(-) = injectBinOp (-)
(*) = injectBinOp (*)
negate = injectOp negate
fromInteger x = QInt (fromInteger x)
abs = injectOp abs
signum = injectOp signum
instance Enum QValue where
toEnum = QInt
fromEnum (QInt i) = i
instance Hashable QValue where
hash (QString s) = hash s
hashWithSalt k (QString s) = hashWithSalt k s
{-
rankN :: (forall n. Num n => n -> n) -> (Int, Double)
rankN f = (f 1, f 1.0)
-}
-- ******************************************************************************************************************************************
-- Smart in memory data structures
-- ******************************************************************************************************************************************
{-
We are going to start with converting csv-like table to the collection of Maps and optimized (int-based) table.
Maps will be used for filtering (as they only have unique values stored), table - for map/reduce
In the table - we will change all strings to ints
-}
|
jhoxray/muon
|
src/Quark/Base/Data.hs
|
bsd-3-clause
| 3,465
| 0
| 10
| 784
| 733
| 401
| 332
| 51
| 1
|
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, JavaScriptFFI, CPP #-}
module Reflex.Dom.Brownies.LowLevel (
clampedArrayFromBS
) where
import GHCJS.DOM.Types (Uint8ClampedArray(..))
import qualified Data.ByteString as BS (ByteString)
#ifdef __GHCJS__
import GHCJS.DOM.Types (JSM, pFromJSVal, JSVal)
import Foreign.Ptr (Ptr)
import qualified Data.ByteString.Unsafe as BS (unsafeUseAsCString)
#else
import GHCJS.DOM.Types (JSM, liftDOM)
import GHCJS.Buffer (fromByteString, getArrayBuffer, thaw)
import Language.Javascript.JSaddle (new, jsg, pToJSVal, ghcjsPure)
#endif
-- | Create a clampedArray from a ByteString
clampedArrayFromBS :: BS.ByteString -> JSM Uint8ClampedArray
#ifdef __GHCJS__
clampedArrayFromBS bs =
BS.unsafeUseAsCString bs $ \ ptr ->
newUint8ClampedArray ptr
newUint8ClampedArray :: Ptr a -> JSM Uint8ClampedArray
newUint8ClampedArray ptr = pFromJSVal <$> jsUint8ClampedArray ptr
foreign import javascript unsafe
-- Arguments
-- pixels : Ptr a -- Pointer to a ByteString
"(function(){ return new Uint8ClampedArray($1.u8); })()"
jsUint8ClampedArray :: Ptr a -> JSM JSVal
#else
clampedArrayFromBS bs = do
(buffer,_,_) <- ghcjsPure $ fromByteString bs -- fromString converts to 64 encoding
buffer' <- thaw buffer
arrbuff <- ghcjsPure (getArrayBuffer buffer')
liftDOM (Uint8ClampedArray <$> new (jsg "Uint8ClampedArray") [pToJSVal arrbuff])
#endif
|
hansroland/reflex-dom-brownies
|
src/Reflex/Dom/Brownies/LowLevel.hs
|
bsd-3-clause
| 1,494
| 4
| 8
| 278
| 183
| 110
| 73
| 14
| 1
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
module Ivory.Language.Plugin (plugin) where
import DynamicLoading
import GhcPlugins
import GHC.Plugins.SrcSpan
#if __GLASGOW_HASKELL__ < 708
# error Ivory.Language.Plugin requires at least ghc-7.8
#endif
plugin :: Plugin
plugin = defaultPlugin { installCoreToDos = install }
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install opts todos = do
reinitializeGlobals
hsc_env <- getHscEnv
Just withLocName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env iVORY_MONAD wITH_LOC
withLocVar <- lookupId withLocName
Just mkLocName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env iVORY_MONAD mK_LOC
mkLocVar <- lookupId mkLocName
Just ivoryName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env iVORY_MONAD iVORY
ivoryCon <- lookupTyCon ivoryName
let annotate loc expr = mkWithLocExpr ivoryCon mkLocVar withLocVar loc expr
let locpass = mkPass annotate killForeignStubs
return $ (CoreDoPluginPass "Add Locations" locpass) : todos
where
killForeignStubs = "kill-foreign-stubs" `elem` opts
isIvoryStmt :: TyCon -> CoreExpr -> Bool
isIvoryStmt ivory expr@(App _ _)
| Just (tc, _) <- splitTyConApp_maybe $ exprType expr
= tc == ivory
isIvoryStmt ivory expr@(Var _)
| Just (tc, _) <- splitTyConApp_maybe $ exprType expr
= tc == ivory
isIvoryStmt _ _
= False
mkWithLocExpr :: TyCon -> Var -> Var -> SrcSpan -> CoreExpr -> CoreM CoreExpr
mkWithLocExpr ivoryTyCon mkLocVar withLocVar (RealSrcSpan ss) expr
| isIvoryStmt ivoryTyCon expr = do
loc <- mkLocExpr mkLocVar ss
return $ mkCoreApps (Var withLocVar) (tys' ++ [loc, expr])
where
tys' = map Type tys
(_, tys) = splitAppTys $ exprType expr
mkWithLocExpr _ _ _ _ expr = return expr
mkLocExpr :: Var -> RealSrcSpan -> CoreM CoreExpr
mkLocExpr mkLocVar ss = do
df <- getDynFlags
file <- mkStringExprFS $ srcSpanFile ss
return $ mkCoreApps (Var mkLocVar) [ file
, mkIntExprInt df (srcSpanStartLine ss)
, mkIntExprInt df (srcSpanStartCol ss)
, mkIntExprInt df (srcSpanEndLine ss)
, mkIntExprInt df (srcSpanEndCol ss)
]
iVORY_MONAD :: ModuleName
iVORY_MONAD = mkModuleName "Ivory.Language.Monad"
wITH_LOC, mK_LOC, iVORY :: RdrName
wITH_LOC = mkVarUnqual $ fsLit "withLocation"
mK_LOC = mkVarUnqual $ fsLit "mkLocation"
iVORY = mkRdrQual iVORY_MONAD $ mkTcOcc "Ivory"
|
Hodapp87/ivory
|
ivory/src/Ivory/Language/Plugin.hs
|
bsd-3-clause
| 2,666
| 0
| 12
| 645
| 720
| 359
| 361
| -1
| -1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Network.Tangaroa.Types
( Raft
, RaftSpec(..)
, LiftedRaftSpec(..)
, readLogEntry, writeLogEntry, readTermNumber, writeTermNumber
, readVotedFor, writeVotedFor, applyLogEntry, serializeRPC
, deserializeRPC, sendMessage, getMessage, debugPrint
, liftRaftSpec
, Term, startTerm
, LogIndex, startIndex
, RequestId, startRequestId
, Config(..), otherNodes, nodeId, electionTimeoutRange, heartbeatTimeout, enableDebug
, Role(..)
, RaftEnv(..), cfg, quorumSize, eventIn, eventOut, rs
, RaftState(..), role, votedFor, currentLeader, logEntries, commitIndex, lastApplied, timerThread
, pendingRequests, nextRequestId
, initialRaftState
, cYesVotes, cPotentialVotes, lNextIndex, lMatchIndex
, AppendEntries(..)
, AppendEntriesResponse(..)
, RequestVote(..)
, RequestVoteResponse(..)
, Command(..)
, CommandResponse(..)
, RPC(..)
, term
, Event(..)
) where
import Control.Concurrent (ThreadId)
import Control.Concurrent.Chan.Unagi
import Control.Lens hiding (Index)
import Control.Monad.RWS
import Data.Binary
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import GHC.Generics
newtype Term = Term Int
deriving (Show, Read, Eq, Ord, Generic, Num)
startTerm :: Term
startTerm = Term (-1)
type LogIndex = Int
startIndex :: LogIndex
startIndex = (-1)
newtype RequestId = RequestId Int
deriving (Show, Read, Eq, Ord, Generic, Num)
startRequestId :: RequestId
startRequestId = RequestId 0
data Config nt = Config
{ _otherNodes :: Set nt
, _nodeId :: nt
, _electionTimeoutRange :: (Int,Int) -- in microseconds
, _heartbeatTimeout :: Int -- in microseconds
, _enableDebug :: Bool
}
deriving (Show, Generic)
makeLenses ''Config
data Command nt et = Command
{ _cmdEntry :: et
, _cmdClientId :: nt
, _cmdRequestId :: RequestId
}
deriving (Show, Read, Generic)
data CommandResponse nt rt = CommandResponse
{ _cmdrResult :: rt
, _cmdrLeaderId :: nt
, _cmdrRequestId :: RequestId
}
deriving (Show, Read, Generic)
data AppendEntries nt et = AppendEntries
{ _aeTerm :: Term
, _leaderId :: nt
, _prevLogIndex :: LogIndex
, _prevLogTerm :: Term
, _aeEntries :: Seq (Term, Command nt et)
, _leaderCommit :: LogIndex
}
deriving (Show, Read, Generic)
data AppendEntriesResponse nt = AppendEntriesResponse
{ _aerTerm :: Term
, _aerNodeId :: nt
, _aerSuccess :: Bool
, _aerIndex :: LogIndex
}
deriving (Show, Read, Generic)
data RequestVote nt = RequestVote
{ _rvTerm :: Term
, _candidateId :: nt
, _lastLogIndex :: LogIndex
, _lastLogTerm :: Term
}
deriving (Show, Read, Generic)
data RequestVoteResponse nt = RequestVoteResponse
{ _rvrTerm :: Term
, _rvrNodeId :: nt
, _voteGranted :: Bool
}
deriving (Show, Read, Generic)
data RPC nt et rt = AE (AppendEntries nt et)
| AER (AppendEntriesResponse nt)
| RV (RequestVote nt)
| RVR (RequestVoteResponse nt)
| CMD (Command nt et)
| CMDR (CommandResponse nt rt)
| DBG String
deriving (Show, Read, Generic)
-- | A structure containing all the implementation details for running
-- the raft protocol.
data RaftSpec nt et rt mt = RaftSpec
{
-- ^ Function to get a log entry from persistent storage.
__readLogEntry :: LogIndex -> IO (Maybe et)
-- ^ Function to write a log entry to persistent storage.
, __writeLogEntry :: LogIndex -> (Term,et) -> IO ()
-- ^ Function to get the term number from persistent storage.
, __readTermNumber :: IO Term
-- ^ Function to write the term number to persistent storage.
, __writeTermNumber :: Term -> IO ()
-- ^ Function to read the node voted for from persistent storage.
, __readVotedFor :: IO (Maybe nt)
-- ^ Function to write the node voted for to persistent storage.
, __writeVotedFor :: Maybe nt -> IO ()
-- ^ Function to apply a log entry to the state machine.
, __applyLogEntry :: et -> IO rt
-- ^ Function to serialize an RPC.
, __serializeRPC :: RPC nt et rt -> mt
-- ^ Function to deserialize an RPC.
, __deserializeRPC :: mt -> Maybe (RPC nt et rt)
-- ^ Function to send a message to a node.
, __sendMessage :: nt -> mt -> IO ()
-- ^ Function to get the next message.
, __getMessage :: IO mt
-- ^ Function to log a debug message (no newline).
, __debugPrint :: nt -> String -> IO ()
}
data Role = Follower
| Candidate
| Leader
deriving (Show, Generic, Eq)
data Event nt et rt = ERPC (RPC nt et rt)
| ElectionTimeout String
| HeartbeatTimeout String
deriving (Show)
-- | A version of RaftSpec where all IO functions are lifted
-- into the Raft monad.
data LiftedRaftSpec nt et rt mt t = LiftedRaftSpec
{
-- ^ Function to get a log entry from persistent storage.
_readLogEntry :: MonadTrans t => LogIndex -> t IO (Maybe et)
-- ^ Function to write a log entry to persistent storage.
, _writeLogEntry :: MonadTrans t => LogIndex -> (Term,et) -> t IO ()
-- ^ Function to get the term number from persistent storage.
, _readTermNumber :: MonadTrans t => t IO Term
-- ^ Function to write the term number to persistent storage.
, _writeTermNumber :: MonadTrans t => Term -> t IO ()
-- ^ Function to read the node voted for from persistent storage.
, _readVotedFor :: MonadTrans t => t IO (Maybe nt)
-- ^ Function to write the node voted for to persistent storage.
, _writeVotedFor :: MonadTrans t => Maybe nt -> t IO ()
-- ^ Function to apply a log entry to the state machine.
, _applyLogEntry :: MonadTrans t => et -> t IO rt
-- ^ Function to serialize an RPC.
, _serializeRPC :: RPC nt et rt -> mt
-- ^ Function to deserialize an RPC.
, _deserializeRPC :: mt -> Maybe (RPC nt et rt)
-- ^ Function to send a message to a node.
, _sendMessage :: MonadTrans t => nt -> mt -> t IO ()
-- ^ Function to get the next message.
, _getMessage :: MonadTrans t => t IO mt
-- ^ Function to log a debug message (no newline).
, _debugPrint :: nt -> String -> t IO ()
}
makeLenses ''LiftedRaftSpec
liftRaftSpec :: MonadTrans t => RaftSpec nt et rt mt -> LiftedRaftSpec nt et rt mt t
liftRaftSpec RaftSpec{..} =
LiftedRaftSpec
{ _readLogEntry = lift . __readLogEntry
, _writeLogEntry = \i et -> lift (__writeLogEntry i et)
, _readTermNumber = lift __readTermNumber
, _writeTermNumber = lift . __writeTermNumber
, _readVotedFor = lift __readVotedFor
, _writeVotedFor = lift . __writeVotedFor
, _applyLogEntry = lift . __applyLogEntry
, _serializeRPC = __serializeRPC
, _deserializeRPC = __deserializeRPC
, _sendMessage = \n m -> lift (__sendMessage n m)
, _getMessage = lift __getMessage
, _debugPrint = \n s -> lift (__debugPrint n s)
}
data RaftState nt et = RaftState
{ _role :: Role
, _term :: Term
, _votedFor :: Maybe nt
, _currentLeader :: Maybe nt
, _logEntries :: Seq (Term, Command nt et)
, _commitIndex :: LogIndex
, _lastApplied :: LogIndex
, _timerThread :: Maybe ThreadId
, _cYesVotes :: Set nt
, _cPotentialVotes :: Set nt
, _lNextIndex :: Map nt LogIndex
, _lMatchIndex :: Map nt LogIndex
, _pendingRequests :: Map RequestId (Command nt et) -- used by clients
, _nextRequestId :: RequestId -- used by clients
}
makeLenses ''RaftState
initialRaftState :: RaftState nt et
initialRaftState = RaftState
Follower -- role
startTerm -- term
Nothing -- votedFor
Nothing -- currentLeader
Seq.empty -- log
startIndex -- commitIndex
startIndex -- lastApplied
Nothing -- timerThread
Set.empty -- cYesVotes
Set.empty -- cPotentialVotes
Map.empty -- lNextIndex
Map.empty -- lMatchIndex
Map.empty -- pendingRequests
0 -- nextRequestId
data RaftEnv nt et rt mt = RaftEnv
{ _cfg :: Config nt
, _quorumSize :: Int
, _eventIn :: InChan (Event nt et rt)
, _eventOut :: OutChan (Event nt et rt)
, _rs :: LiftedRaftSpec nt et rt mt (RWST (RaftEnv nt et rt mt) () (RaftState nt et))
}
makeLenses ''RaftEnv
type Raft nt et rt mt a = RWST (RaftEnv nt et rt mt) () (RaftState nt et) IO a
instance Binary Term
instance Binary RequestId
instance (Binary nt, Binary et) => Binary (AppendEntries nt et)
instance Binary nt => Binary (AppendEntriesResponse nt)
instance Binary nt => Binary (RequestVote nt)
instance Binary nt => Binary (RequestVoteResponse nt)
instance (Binary nt, Binary et) => Binary (Command nt et)
instance (Binary nt, Binary rt) => Binary (CommandResponse nt rt)
instance (Binary nt, Binary et, Binary rt) => Binary (RPC nt et rt)
|
chrisnc/tangaroa
|
src/Network/Tangaroa/Types.hs
|
bsd-3-clause
| 9,297
| 0
| 13
| 2,385
| 2,355
| 1,357
| 998
| 205
| 1
|
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric, LambdaCase, TemplateHaskell
, TupleSections, TypeFamilies, FlexibleContexts, ConstraintKinds #-}
-- | Conversion back to Langauge.Java.Syntax (hence: S).
module CoreS.ConvBack (
-- * Types
HoleSum (..)
, LJSynConv
, Repr
-- * Classes
, ToLJSyn
, ToLJSynP
-- * Operations
, toLJSyn
, prettyCore
, prettyCore'
, dumpCore
) where
import Prelude hiding (EQ, LT, GT)
import Data.Data (Data, Typeable)
import GHC.Generics (Generic)
import Data.Bifunctor (first)
import Data.Function.Pointless ((.:))
import Control.Monad ((>=>))
import Control.Lens ((^?))
import Util.TH (deriveLens)
import Util.Monad ((<$$>))
import Util.Debug (exitLeft)
import qualified Language.Java.Pretty as P
import qualified Language.Java.Syntax as S
import CoreS.AST
-- TODO: import Util.Function (in feature/norm/vardecl)
-- | 'applyN': applies a function f, i times to x. Therefore applyN 0 id == id.
applyN :: (Eq i, Num i) => i -> (a -> a) -> a -> a
applyN 0 _ x = x
applyN i f x = applyN (i - 1) f $ f x
--------------------------------------------------------------------------------
-- Errors:
--------------------------------------------------------------------------------
-- | Sum of types in CoreS.AST containing holes or might otherwise fail here.
data HoleSum
= HSType {
_hsType :: Type
}
| HSLiteral {
_hsLiteral :: Literal
}
| HSLValue {
_hLValueX :: LValue
}
| HSVarInit {
_hsVarInit :: VarInit
}
| HSExpr {
_hsExpr :: Expr
}
| HSVarDeclId {
_hsVarDeclId :: VarDeclId
}
| HSVarDecl {
_hsVarDecl :: VarDecl
}
| HSVMType {
_hsVMType :: VMType
}
| HSTypedVVDecl {
_hsTypedVVDecl :: TypedVVDecl
}
| HSForInit {
_hsForInit :: ForInit
}
| HSSwitchLabel {
_hsSwitchLabel :: SwitchLabel
}
| HSSwitchBlock {
_hsSwitchBlock :: SwitchBlock
}
| HSBlock {
_hsBlock :: Block
}
| HSStmt {
_hsStmt :: Stmt
}
| HSMemberDecl {
_hsMemberDecl :: MemberDecl
}
| HSDecl {
_hsDecl :: Decl
}
| HSClassBody {
_hsClassBody :: ClassBody
}
| HSClassDecl {
_hsClassDecl :: ClassDecl
}
| HSTypeDecl {
_hsTypeDecl :: TypeDecl
}
| HSImportDecl {
_hsImportDecl :: ImportDecl
}
| HSCompilationUnit {
_hsCompilationUnit :: CompilationUnit
}
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
$(deriveLens [''HoleSum])
--------------------------------------------------------------------------------
-- Conversion computation type:
--------------------------------------------------------------------------------
type LJSynConv a = Either HoleSum a
--------------------------------------------------------------------------------
-- Converting back class:
--------------------------------------------------------------------------------
-- | Models the conversion back to S.
class ToLJSyn t where
-- | The corresponding data type in S.
type Repr t :: *
-- | Convert back to S. The conversion is partial due to
-- possible holes.
toLJSyn :: t -> LJSynConv (Repr t)
-- | Combined constraint for things convertible back to S,
-- and which in turn in S is convertible to String via prettyfication.
type ToLJSynP ast = (ToLJSyn ast, P.Pretty (Repr ast))
-- | Prettified a term in CoreS.AST as the Java syntax tree representation.
prettyCore :: ToLJSynP ast => ast -> LJSynConv String
prettyCore core = P.prettyPrint <$> toLJSyn core
-- | Prettified a term in CoreS.AST as the Java syntax tree representation.
-- On error, shows the error.
prettyCore' :: ToLJSynP ast => ast -> Either String String
prettyCore' core = first show $ prettyCore core
-- | Dumps a CoreS.AST term prettified as the syntax tree in Java.
dumpCore :: ToLJSynP ast => ast -> IO ()
dumpCore = exitLeft . prettyCore >=> putStrLn
--------------------------------------------------------------------------------
-- Conversion DSL:
--------------------------------------------------------------------------------
toLJSynM :: (Traversable f, ToLJSyn t) => f t -> LJSynConv (f (Repr t))
toLJSynM = mapM toLJSyn
(<-$) :: ToLJSyn t => (Repr t -> b) -> t -> LJSynConv b
(<-$) f = fmap f . toLJSyn
(<-*) :: ToLJSyn t => LJSynConv (Repr t -> b) -> t -> LJSynConv b
(<-*) f x = f <*> toLJSyn x
(<=$) :: (Traversable f, ToLJSyn t) => (f (Repr t) -> b) -> f t -> LJSynConv b
(<=$) f = fmap f . toLJSynM
(<=*) :: (Traversable f, ToLJSyn t)
=> LJSynConv (f (Repr t) -> b) -> f t -> LJSynConv b
(<=*) f x = f <*> toLJSynM x
(<~$) :: (Traversable g, Traversable f, ToLJSyn t)
=> (g (f (Repr t)) -> b) -> g (f t) -> LJSynConv b
(<~$) f = fmap f . mapM toLJSynM
(<~*) :: (Traversable g, Traversable f, ToLJSyn t)
=> LJSynConv (g (f (Repr t)) -> b) -> g (f t) -> LJSynConv b
(<~*) f x = f <*> mapM toLJSynM x
infixl 4 <-$, <-*, <=$, <=*, <~$, <~*
--------------------------------------------------------------------------------
-- Concrete conversions, Names and identifiers:
--------------------------------------------------------------------------------
instance ToLJSyn Ident where
type Repr Ident = S.Ident
toLJSyn = \case
Ident i -> pure $ S.Ident i
instance ToLJSyn Name where
type Repr Name = S.Name
toLJSyn = \case
Name is -> S.Name <=$ is
--------------------------------------------------------------------------------
-- Concrete conversions, Types:
--------------------------------------------------------------------------------
instance ToLJSyn PrimType where
type Repr PrimType = S.PrimType
toLJSyn = pure . \case
BoolT -> S.BooleanT
ByteT -> S.ByteT
ShortT -> S.ShortT
IntT -> S.IntT
LongT -> S.LongT
CharT -> S.CharT
FloatT -> S.FloatT
DoubleT -> S.DoubleT
instance ToLJSyn Type where
type Repr Type = S.Type
toLJSyn = \case
PrimT t -> S.PrimType <-$ t
ArrayT t -> S.RefType . S.ArrayType <-$ t
StringT -> pure $ S.RefType $ S.ClassRefType $
S.ClassType [(S.Ident "String", [])]
NullT -> Left $ HSType NullT
--------------------------------------------------------------------------------
-- Concrete conversions, Literals:
--------------------------------------------------------------------------------
instance ToLJSyn Literal where
type Repr Literal = S.Literal
toLJSyn = pure . \case
Int i -> S.Int i
Word w -> S.Word w
Float f -> S.Float f
Double d -> S.Double d
Boolean b -> S.Boolean b
Char c -> S.Char c
String s -> S.String s
Null -> S.Null
--------------------------------------------------------------------------------
-- Concrete conversions, lvalues:
--------------------------------------------------------------------------------
instance ToLJSyn LValue where
type Repr LValue = S.Lhs
toLJSyn x = case x of
LVName n -> S.NameLhs <-$ n
LVArray e es -> S.ArrayLhs .: S.ArrayIndex <-$ e <=* es
HoleLValue i -> Left $ HSLValue x
--------------------------------------------------------------------------------
-- Concrete conversions, Variable & Array initialization:
--------------------------------------------------------------------------------
instance ToLJSyn ArrayInit where
type Repr ArrayInit = S.ArrayInit
toLJSyn = \case
ArrayInit xs -> S.ArrayInit <=$ xs
instance ToLJSyn VarInit where
type Repr VarInit = S.VarInit
toLJSyn x = case x of
InitExpr e -> S.InitExp <-$ e
InitArr ai -> S.InitArray <-$ ai
HoleVarInit i -> Left $ HSVarInit x
--------------------------------------------------------------------------------
-- Concrete conversions, Expressions:
--------------------------------------------------------------------------------
stepOp :: StepOp -> S.Exp -> S.Exp
stepOp = \case
PostInc -> S.PostIncrement
PostDec -> S.PostDecrement
PreInc -> S.PreIncrement
PreDec -> S.PreDecrement
appOp :: (t -> S.Op) -> t -> S.Exp -> S.Exp -> S.Exp
appOp f t l = S.BinOp l (f t)
logOp :: LogOp -> S.Op
logOp = \case
LAnd -> S.CAnd
LOr -> S.COr
numOp :: NumOp -> S.Op
numOp = \case
Add -> S.Add
Sub -> S.Sub
Mul -> S.Mult
Div -> S.Div
Rem -> S.Rem
LShift -> S.LShift
RShift -> S.RShift
RRShift -> S.RRShift
And -> S.And
Xor -> S.Xor
Or -> S.Or
numOpA :: NumOp -> S.AssignOp
numOpA = \case
Add -> S.AddA
Sub -> S.SubA
Mul -> S.MultA
Div -> S.DivA
Rem -> S.RemA
LShift -> S.LShiftA
RShift -> S.RShiftA
RRShift -> S.RRShiftA
And -> S.AndA
Xor -> S.XorA
Or -> S.OrA
cmpOp :: CmpOp -> S.Op
cmpOp = \case
EQ -> S.Equal
NE -> S.NotEq
LT -> S.LThan
GT -> S.GThan
LE -> S.LThanE
GE -> S.GThanE
println :: Name
println = Name (Ident <$> ["System", "out", "println"])
mkInt :: Applicative f => Integer -> f Int
mkInt = pure . fromInteger
toTDS :: Name -> LJSynConv S.TypeDeclSpecifier
toTDS = fmap (S.TypeDeclSpecifier . S.ClassType) . mapM ((, []) <-$) . _nmIds
instance ToLJSyn Expr where
type Repr Expr = S.Exp
toLJSyn x = case x of
ELit l -> S.Lit <-$ l
EVar lv -> toLJSyn lv >>= \case
S.NameLhs n -> pure $ S.ExpName n
S.ArrayLhs ai -> pure $ S.ArrayAccess ai
_ -> Left $ HSLValue lv
ECast t e -> S.Cast <-$ t <-* e
ECond c ei ee -> S.Cond <-$ c <-* ei <-* ee
EAssign lv e -> S.Assign <-$ lv <*> pure S.EqualA <-* e
EOAssign lv op e -> S.Assign <-$ lv <*> pure (numOpA op) <-* e
ENum op l r -> appOp numOp op <-$ l <-* r
ECmp op l r -> appOp cmpOp op <-$ l <-* r
ELog op l r -> appOp logOp op <-$ l <-* r
EStep op e -> stepOp op <-$ e
ENot e -> S.PreNot <-$ e
EBCompl e -> S.PreBitCompl <-$ e
EPlus e -> S.PrePlus <-$ e
EMinus e -> S.PreMinus <-$ e
EInstNew n es -> S.InstanceCreation [] <$> toTDS n <=* es <*> pure Nothing
EMApp n es -> S.MethodInv <$> (S.MethodCall <-$ n <=* es)
EArrNew t es i -> S.ArrayCreate <-$ t <=* es <*> mkInt i
EArrNewI t i ai -> S.ArrayCreateInit <-$ t <*> mkInt i <-* ai
ESysOut e -> toLJSyn $ EMApp println [e]
HoleExpr i -> Left $ HSExpr x
u = undefined
--------------------------------------------------------------------------------
-- Concrete conversions, Statements:
--------------------------------------------------------------------------------
instance ToLJSyn VarDeclId where
type Repr VarDeclId = S.VarDeclId
toLJSyn x = case x of
VarDId i -> S.VarId <-$ i
VarDArr i dim -> applyN dim S.VarDeclArray . S.VarId <-$ i
HoleVarDeclId i -> Left $ HSVarDeclId x
instance ToLJSyn VarDecl where
type Repr VarDecl = S.VarDecl
toLJSyn x = case x of
VarDecl vdi mvi -> S.VarDecl <-$ vdi <=* mvi
HoleVarDecl i -> Left $ HSVarDecl x
instance ToLJSyn VarMod where
type Repr VarMod = [S.Modifier]
toLJSyn = pure . \case
VMFinal -> [S.Final]
VMNormal -> []
instance ToLJSyn VMType where
type Repr VMType = ([S.Modifier], S.Type)
toLJSyn x = case x of
VMType mod t -> (,) <-$ mod <-* t
HoleVMType i -> Left $ HSVMType x
instance ToLJSyn TypedVVDecl where
type Repr TypedVVDecl = (([S.Modifier], S.Type), [S.VarDecl])
toLJSyn x = case x of
TypedVVDecl vmt vds -> (,) <-$ vmt <=* vds
HoleTypedVVDecl i -> Left $ HSTypedVVDecl x
instance ToLJSyn ForInit where
type Repr ForInit = S.ForInit
toLJSyn x = case x of
FIVars tvd -> uncurry (uncurry S.ForLocalVars) <-$ tvd
FIExprs es -> S.ForInitExps <=$ es
HoleForInit i -> Left $ HSForInit x
instance ToLJSyn SwitchLabel where
type Repr SwitchLabel = S.SwitchLabel
toLJSyn x = case x of
SwitchCase e -> S.SwitchCase <-$ e
Default -> pure S.Default
HoleSwitchLabel i -> Left $ HSSwitchLabel x
instance ToLJSyn SwitchBlock where
type Repr SwitchBlock = S.SwitchBlock
toLJSyn x = case x of
SwitchBlock l blk -> do
ss <- maybe (Left $ HSBlock blk) pure (blk ^? bStmts)
S.SwitchBlock <-$ l <=* ss
HoleSwitchBlock i -> Left $ HSSwitchBlock x
instance ToLJSyn Block where
type Repr Block = S.Block
toLJSyn x = case x of
Block ss -> S.Block <=$ ss
HoleBlock i -> Left $ HSBlock x
mkStmt :: Applicative f => S.Stmt -> f S.BlockStmt
mkStmt = pure . S.BlockStmt
mkSSimp :: Applicative f => (Maybe a -> S.Stmt) -> f S.BlockStmt
mkSSimp = mkStmt . ($ Nothing)
unBS :: S.BlockStmt -> S.Stmt
unBS = \case
S.BlockStmt s -> s
_ -> error "unBS only supports extracting S.Stmt"
instance ToLJSyn Stmt where
type Repr Stmt = S.BlockStmt
toLJSyn x = case x of
SEmpty -> mkStmt S.Empty
SVReturn -> mkSSimp S.Return
SReturn e -> S.Return . pure <-$ e >>= mkStmt
SBlock b -> S.StmtBlock <-$ b >>= mkStmt
SExpr e -> S.ExpStmt <-$ e >>= mkStmt
SVars tvd -> uncurry (uncurry S.LocalVars) <-$ tvd
SIf e si -> S.IfThen <-$ e <*> (unBS <-$ si) >>= mkStmt
SIfElse e si se -> S.IfThenElse <-$ e <*> (unBS <-$ si) <*> (unBS <-$ se)
>>= mkStmt
SWhile e si -> S.While <-$ e <*> (unBS <-$ si) >>= mkStmt
SDo e si -> S.Do <$> (unBS <-$ si) <-* e >>= mkStmt
SForB fi e es si -> S.BasicFor <=$ fi <=* e <~* es <*> (unBS <-$ si)
>>= mkStmt
SForE t i e si -> uncurry S.EnhancedFor <-$ t <-* i <-* e <*> (unBS <-$ si)
>>= mkStmt
SContinue -> mkSSimp S.Continue
SBreak -> mkSSimp S.Break
SSwitch e sbs -> S.Switch <-$ e <=* sbs >>= mkStmt
HoleStmt i -> Left $ HSStmt x
--------------------------------------------------------------------------------
-- Concrete conversions, Method:
--------------------------------------------------------------------------------
instance ToLJSyn FormalParam where
type Repr FormalParam = S.FormalParam
toLJSyn = \case
FormalParam vmt vdi -> uncurry S.FormalParam <-$ vmt <*> pure False <-* vdi
instance ToLJSyn MemberDecl where
type Repr MemberDecl = S.MemberDecl
toLJSyn x = case x of
MethodDecl rt i fps blk -> do
rt' <- toLJSynM rt
i' <- toLJSyn i
fps' <- toLJSynM fps
body <- (S.MethodBody . pure) <-$ blk
pure $ S.MethodDecl [S.Public, S.Static] [] rt' i' fps' [] Nothing body
HoleMemberDecl i -> Left $ HSMemberDecl x
--------------------------------------------------------------------------------
-- Concrete conversions, Compilation Unit:
--------------------------------------------------------------------------------
instance ToLJSyn Decl where
type Repr Decl = S.Decl
toLJSyn x = case x of
MemberDecl m -> S.MemberDecl <-$ m
HoleDecl i -> Left $ HSDecl x
instance ToLJSyn ClassBody where
type Repr ClassBody = S.ClassBody
toLJSyn x = case x of
ClassBody decls -> S.ClassBody <=$ decls
HoleClassBody i -> Left $ HSClassBody x
instance ToLJSyn ClassDecl where
type Repr ClassDecl = S.ClassDecl
toLJSyn x = case x of
ClassDecl i b -> do
i' <- toLJSyn i
b' <- toLJSyn b
pure $ S.ClassDecl [S.Public] i' [] Nothing [] b'
HoleClassDecl i -> Left $ HSClassDecl x
instance ToLJSyn TypeDecl where
type Repr TypeDecl = S.TypeDecl
toLJSyn x = case x of
ClassTypeDecl cd -> S.ClassTypeDecl <-$ cd
HoleTypeDecl i -> Left $ HSTypeDecl x
instance ToLJSyn ImportDecl where
type Repr ImportDecl = S.ImportDecl
toLJSyn x = case x of
ImportDecl n s w -> S.ImportDecl s <-$ n <*> pure w
HoleImportDecl i -> Left $ HSImportDecl x
instance ToLJSyn CompilationUnit where
type Repr CompilationUnit = S.CompilationUnit
toLJSyn x = case x of
CompilationUnit is tds -> S.CompilationUnit Nothing <=$ is <=* tds
HoleCompilationUnit i -> Left $ HSCompilationUnit x
|
Centril/DATX02-17-26
|
libsrc/CoreS/ConvBack.hs
|
gpl-2.0
| 16,817
| 0
| 15
| 4,046
| 4,931
| 2,528
| 2,403
| -1
| -1
|
module LargestSum where
-- Part of Cosmos by OpenGenus
sublists [] = []
sublists (a:as) = sublists as ++ [a]:(map (a:) (prefixes as))
suffixes [] = []
suffixes (x:xs) = (x:xs) : suffixes xs
prefixes x = map reverse $ (suffixes . reverse) x
largestSum = maximum . (map sum) . sublists
|
beingadityak/cosmos
|
code/dynamic_programming/largest_sum_contiguous_subarray/LargestSum.hs
|
gpl-3.0
| 289
| 0
| 9
| 57
| 149
| 78
| 71
| 7
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module KAT_AES.KATGCM where
import qualified Data.ByteString as B
import Data.ByteString.Char8 ()
-- (key, iv, aad, input, out, taglen, tag)
type KATGCM = (B.ByteString, B.ByteString, B.ByteString, B.ByteString, B.ByteString, Int, B.ByteString)
vectors_aes128_enc :: [KATGCM]
vectors_aes128_enc =
[ -- vectors 0
( {-key = -}"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}""
, {-input = -}""
, {-out = -}""
, {-taglen = -}16
, {-tag = -}"\x58\xe2\xfc\xce\xfa\x7e\x30\x61\x36\x7f\x1d\x57\xa4\xe7\x45\x5a")
-- vectors 1
, ( {-key = -}"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}"\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
, {-input = -}"\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a"
, {-out = -}"\x09\x82\xd0\xc4\x6a\xbc\xa9\x98\xf9\x22\xc8\xb3\x7b\xb8\xf4\x72\xfd\x9f\xa0\xa1\x43\x41\x53\x29\xfd\xf7\x83\xf5\x9e\x81\xcb\xea"
, {-taglen = -}16
, {-tag = -}"\x28\x50\x64\x2f\xa8\x8b\xab\x21\x2a\x67\x1a\x97\x48\x69\xa5\x6c")
-- vectors 2
, ( {-key = -}"\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}"\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
, {-input = -}"\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a"
, {-out = -}"\x1c\xa3\xb5\x41\x39\x6f\x19\x7a\x91\x2d\x27\x15\x70\xd1\xf5\x76\xde\xf1\xbe\x84\x42\x2a\xbb\xbe\x0b\x2d\x91\x21\x82\xbf\x7f\x17"
, {-taglen = -}16
, {-tag = -}"\x15\x2a\x05\xbb\x7e\x13\x5d\xbe\x93\x7f\xa0\x54\x7a\x8e\x74\xb6")
-- vectors 3
, ( {-key = -}"\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}"\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
, {-input = -}"\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a\x0a"
, {-out = -}"\xda\x35\xf6\x0a\x65\xc2\xa4\x6c\xb6\x6e\xb6\xf8\x1f\x0b\x9c\x74\x53\x4c\x97\x70\x36\xf7\xdf\x05\x6d\x00\xfe\xbf\xb4\xcb\xf5\x27"
, {-taglen = -}16
, {-tag = -}"\xb7\x76\x7c\x3b\x9e\xf1\xe2\xcb\xc9\x11\xf1\x9a\xdc\xfa\x35\x0d")
, ( {-key = -}"\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}"\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76"
, {-input = -}"\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b"
, {-out = -}"\xe4\x42\xf8\xc4\xc6\x67\x84\x86\x4a\x5a\x6e\xc7\xe0\xca\x68\xac\x16\xbc\x5b\xbf\xf7\xd5\xf3\xfa\xf3\xb2\xcb\xb0\xa2\x14\xa1\x81"
, {-taglen = -}16
, {-tag = -}"\x5f\x63\xb8\xeb\x1d\x6f\xa8\x7a\xeb\x39\xa5\xf6\xd7\xed\xc3\x13")
, ( {-key = -}"\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-aad = -}"\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76\x76"
, {-input = -}"\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b"
, {-out = -}"\xe4\x42\xf8\xc4\xc6\x67\x84\x86\x4a\x5a\x6e\xc7\xe0\xca\x68\xac\x16\xbc\x5b\xbf\xf7\xd5\xf3\xfa\xf3\xb2\xcb\xb0\xa2\x14\xa1"
, {-taglen = -}16
, {-tag = -}"\x94\xd1\x47\xc3\xa2\xca\x93\xe9\x66\x93\x1e\x3b\xb3\xbb\x67\x01")
-- vector 6 tests 32-bit counter wrapping
, ( {-key = -}"\x01\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
, {-iv = -}"\xe8\x38\x84\x1d\x75\xae\x33\xb5\x4b\x51\x57\x89\xc9\x5f\xbe\x65"
, {-aad = -}"\x54\x68\x65\x20\x66\x69\x76\x65\x20\x62\x6f\x78\x69\x6e\x67\x20\x77\x69\x7a\x61\x72\x64\x73\x20\x6a\x75\x6d\x70\x20\x71\x75\x69\x63\x6b\x6c\x79\x2e"
, {-input = -}"\x54\x68\x65\x20\x71\x75\x69\x63\x6b\x20\x62\x72\x6f\x77\x6e\x20\x66\x6f\x78\x20\x6a\x75\x6d\x70\x73\x20\x6f\x76\x65\x72\x20\x74\x68\x65\x20\x6c\x61\x7a\x79\x20\x64\x6f\x67"
, {-out = -}"\x82\x31\x9e\x5a\x6a\x7f\x43\xd0\x42\x8c\xf1\x01\xcf\x0c\x75\xf1\x5d\xda\x4f\xa1\x28\x95\xcd\xd7\x7b\xd5\x42\x68\x2f\xcd\x10\x1b\x0c\x75\x05\x54\xf4\x2f\x2b\xf6\x69\x96\x29"
, {-taglen = -}16
, {-tag = -}"\x9a\xfa\xf4\xea\xae\x2e\x6f\x40\x00\xf4\x89\x77\xd0\x1e\xd5\x14")
]
vectors_aes256_enc :: [KATGCM]
vectors_aes256_enc =
[
( "\xb5\x2c\x50\x5a\x37\xd7\x8e\xda\x5d\xd3\x4f\x20\xc2\x25\x40\xea\x1b\x58\x96\x3c\xf8\xe5\xbf\x8f\xfa\x85\xf9\xf2\x49\x25\x05\xb4"
, "\x51\x6c\x33\x92\x9d\xf5\xa3\x28\x4f\xf4\x63\xd7"
, ""
, ""
, ""
, 16
, "\xbd\xc1\xac\x88\x4d\x33\x24\x57\xa1\xd2\x66\x4f\x16\x8c\x76\xf0")
, ( "\x78\xdc\x4e\x0a\xaf\x52\xd9\x35\xc3\xc0\x1e\xea\x57\x42\x8f\x00\xca\x1f\xd4\x75\xf5\xda\x86\xa4\x9c\x8d\xd7\x3d\x68\xc8\xe2\x23"
, "\xd7\x9c\xf2\x2d\x50\x4c\xc7\x93\xc3\xfb\x6c\x8a"
, "\xb9\x6b\xaa\x8c\x1c\x75\xa6\x71\xbf\xb2\xd0\x8d\x06\xbe\x5f\x36"
, ""
, ""
, 16
, "\x3e\x5d\x48\x6a\xa2\xe3\x0b\x22\xe0\x40\xb8\x57\x23\xa0\x6e\x76")
, ( "\xc3\xf1\x05\x86\xf2\x46\xaa\xca\xdc\xce\x37\x01\x44\x17\x70\xc0\x3c\xfe\xc9\x40\xaf\xe1\x90\x8c\x4c\x53\x7d\xf4\xe0\x1c\x50\xa0"
, "\x4f\x52\xfa\xa1\xfa\x67\xa0\xe5\xf4\x19\x64\x52"
, "\x46\xf9\xa2\x2b\x4e\x52\xe1\x52\x65\x13\xa9\x52\xdb\xee\x3b\x91\xf6\x95\x95\x50\x1e\x01\x77\xd5\x0f\xf3\x64\x63\x85\x88\xc0\x8d\x92\xfa\xb8\xc5\x8a\x96\x9b\xdc\xc8\x4c\x46\x8d\x84\x98\xc4\xf0\x63\x92\xb9\x9e\xd5\xe0\xc4\x84\x50\x7f\xc4\x8d\xc1\x8d\x87\xc4\x0e\x2e\xd8\x48\xb4\x31\x50\xbe\x9d\x36\xf1\x4c\xf2\xce\xf1\x31\x0b\xa4\xa7\x45\xad\xcc\x7b\xdc\x41\xf6"
, "\x79\xd9\x7e\xa3\xa2\xed\xd6\x50\x45\x82\x1e\xa7\x45\xa4\x47\x42"
, "\x56\x0c\xf7\x16\xe5\x61\x90\xe9\x39\x7c\x2f\x10\x36\x29\xeb\x1f"
, 16
, "\xff\x7c\x91\x24\x87\x96\x44\xe8\x05\x55\x68\x7d\x27\x3c\x55\xd8"
)
]
|
vincenthz/cryptonite
|
tests/KAT_AES/KATGCM.hs
|
bsd-3-clause
| 6,642
| 0
| 6
| 902
| 384
| 272
| 112
| 81
| 1
|
module Print3Flipped where
main :: IO()
main = do
putStrLn foo
where foo = ((++) "hello" ((++) " " "world"))
thirdLetter :: [Char] -> Char
thirdLetter x = x !! 2
rvrs s = (++) (drop 9 s) $ (++) (drop 5 (take 9 s)) (take 5 s)
data Mood = Blah | Woot deriving Show
changeMood :: Mood -> Mood
changeMood Blah = Woot
changeMood Woot = Blah
|
punitrathore/haskell-first-principles
|
src/print3Flipped.hs
|
bsd-3-clause
| 349
| 0
| 10
| 82
| 170
| 92
| 78
| 12
| 1
|
{-# LANGUAGE RankNTypes #-}
module Transformation where
import Data.Dynamic
import Expr
-- partially apply as much staged computation as possible
stage :: Typeable a => Expr a -> Expr a
stage x = case x of
(Reserved Pair :$ a :$ b) ->
let a' = stage a
b' = stage b
in case (a', b') of
(Static sa a0, Static sb b0) -> case cast (a0,b0) of
Just ret -> reStatic ret
Nothing -> Reserved Pair :$ a' :$ b'
_ -> Reserved Pair :$ a' :$ b'
(f :$ a) ->
let f' = stage f
a' = stage a
in case (f', a') of
(Static sf f0, Static sa a0) -> reStatic (f0 a0)
_ -> f' :$ a'
Op1 o f' a ->
let a' = stage a
in case a' of
(Static sa a0) -> reStatic (f' a0)
_ -> Op1 o f' a'
Op2 o f' a b ->
let a' = stage a
b' = stage b
in case (a', b') of
(Static sa a0, Static sb b0) -> reStatic (f' a0 b0)
_ -> Op2 o f' a' b'
_ -> x
where
reStatic val = Static (reppr val) val
reppr :: forall a. Typeable a => a -> String
reppr a = (show :: Double -> String) |||? const (ppr x) $ a
-- obtain the result of staged computation, if possible
runStatic :: Expr a -> Maybe a
runStatic (Static _ x) = Just x
runStatic _ = Nothing
runStaticEither :: Expr a -> Either String a
runStaticEither (Static _ x) = Right x
runStaticEither y = Left $ "not static: " ++ ppr y
-- perform staged computation and obtain the result, if possible
evalStatic :: Typeable a => Expr a -> Maybe a
evalStatic = runStatic . stage
evalStaticEither :: Typeable a => Expr a -> Either String a
evalStaticEither = runStaticEither . stage
-- Apply the function at everywhere in an expresion
everywhere :: (Typeable a, Typeable b) => (Expr b->Expr b)->Expr a -> Expr a
everywhere f x = case x of
(Op1 o g a) -> f %? (Op1 o g (everywhere f a))
(Op2 o g a b) -> f %? (Op2 o g (everywhere f a) (everywhere f b))
(g :$ a) -> f %? ((everywhere f g) :$ (everywhere f a))
_ -> f %? x
-- Apply the polymorphic function at everywhere in an expresion, inside-out
everywhereP :: (Typeable a) => (forall b. Typeable b => Expr b->Expr b)->Expr a -> Expr a
everywhereP f x = case x of
(Op1 o g a) -> f (Op1 o g (everywhereP f a))
(Op2 o g a b) -> f (Op2 o g (everywhereP f a) (everywhereP f b))
(g :$ a) -> f ((everywhereP f g) :$ (everywhereP f a))
_ -> f x
-- Apply the polymorphic function at everywhere in an expresion, outside-in
everywhereP' :: (Typeable a) => (forall b. Typeable b => Expr b->Expr b)->Expr a -> Expr a
everywhereP' f x = let y = f x in case y of
(Op1 o g a) -> (Op1 o g (everywhereP' f a))
(Op2 o g a b) -> (Op2 o g (everywhereP' f a) (everywhereP' f b))
(g :$ a) -> ((everywhereP' f g) :$ (everywhereP' f a))
_ -> y
-- Apply the function at both hand sides of a statement
bhs :: (Expr a-> Expr b)-> Stmt a -> Stmt b
bhs f (lhs := rhs) = (f lhs := f rhs)
-- distribute function application among operators
distributeApply :: Typeable a => Expr a -> Expr a
distributeApply = everywhereP' distributeApply1
distributeApply1 :: Expr a -> Expr a
distributeApply1 x = case x of
(Op1 o g a :$ y) -> (Op1 o (\a0 -> g (const a0) undefined) (a:$y) )
(Op2 o g a b :$ y) -> (Op2 o (\a0 b0 -> g (const a0) (const b0) undefined) (a:$y) (b:$y) )
_ -> x
|
nushio3/Paraiso
|
attic/newexp/Transformation.hs
|
bsd-3-clause
| 3,441
| 0
| 18
| 1,027
| 1,541
| 766
| 775
| 72
| 10
|
-- | Validate a document against a dtd.
module Text.XML.HaXml.Validate
( validate
, partialValidate
) where
import Prelude hiding (elem,rem,mod,sequence)
import qualified Prelude (elem)
import Text.XML.HaXml.Types
import Text.XML.HaXml.Namespaces
import Text.XML.HaXml.Combinators (multi,tag,iffind,literal,none,o)
import Text.XML.HaXml.XmlContent (attr2str)
import Data.Maybe (fromMaybe,isNothing,fromJust)
import Data.List (intersperse,nub,(\\))
import Data.Char (isSpace)
#if __GLASGOW_HASKELL__ >= 604 || __NHC__ >= 118 || defined(__HUGS__)
-- emulate older finite map interface using Data.Map, if it is available
import qualified Data.Map as Map
type FiniteMap a b = Map.Map a b
listToFM :: Ord a => [(a,b)] -> FiniteMap a b
listToFM = Map.fromList
lookupFM :: Ord a => FiniteMap a b -> a -> Maybe b
lookupFM = flip Map.lookup
#elif __GLASGOW_HASKELL__ >= 504 || __NHC__ > 114
-- real finite map, if it is available
import Data.FiniteMap
#else
-- otherwise, a very simple and inefficient implementation of a finite map
type FiniteMap a b = [(a,b)]
listToFM :: Eq a => [(a,b)] -> FiniteMap a b
listToFM = id
lookupFM :: Eq a => FiniteMap a b -> a -> Maybe b
lookupFM fm k = lookup k fm
#endif
-- gather appropriate information out of the DTD
data SimpleDTD = SimpleDTD
{ elements :: FiniteMap QName ContentSpec -- content model of elem
, attributes :: FiniteMap (QName,QName) AttType -- type of (elem,attr)
, required :: FiniteMap QName [QName] -- required attributes of elem
, ids :: [(QName,QName)] -- all (element,attr) with ID type
, idrefs :: [(QName,QName)] -- all (element,attr) with IDREF type
}
simplifyDTD :: DocTypeDecl -> SimpleDTD
simplifyDTD (DTD _ _ decls) =
SimpleDTD
{ elements = listToFM [ (name,content)
| Element (ElementDecl name content) <- decls ]
, attributes = listToFM [ ((elem,attr),typ)
| AttList (AttListDecl elem attdefs) <- decls
, AttDef attr typ _ <- attdefs ]
-- Be sure to look at all attribute declarations for each
-- element, since we must merge them. This implements the
-- specification in that regard only; the specification's rules
-- about how to merge multiple declarations for the same
-- attribute are not considered by this implementation.
-- See: http://www.w3.org/TR/REC-xml/#NT-AttlistDecl
, required = listToFM [ (elem, concat [ [ attr | AttDef attr _ REQUIRED <- attdefs ]
| AttList (AttListDecl elem' attdefs) <- decls
, elem' == elem ]
)
| Element (ElementDecl elem _) <- decls ]
, ids = [ (elem,attr)
| Element (ElementDecl elem _) <- decls
, AttList (AttListDecl name attdefs) <- decls
, elem == name
, AttDef attr (TokenizedType ID) _ <- attdefs ]
, idrefs = [] -- not implemented
}
-- simple auxiliary to avoid lots of if-then-else with empty else clauses.
gives :: Bool -> a -> [a]
True `gives` x = [x]
False `gives` _ = []
-- | 'validate' takes a DTD and a tagged element, and returns a list of
-- errors in the document with respect to its DTD.
--
-- If you have several documents to validate against a single DTD,
-- then you will gain efficiency by freezing-in the DTD through partial
-- application, e.g. @checkMyDTD = validate myDTD@.
validate :: DocTypeDecl -> Element i -> [String]
validate dtd' elem = root dtd' elem ++ partialValidate dtd' elem
where
root (DTD name _ _) (Elem name' _ _) =
(name/=name') `gives` ("Document type should be <"++qname name
++"> but appears to be <"++qname name'++">.")
-- | 'partialValidate' is like validate, except that it does not check that
-- the element type matches that of the DTD's root element.
partialValidate :: DocTypeDecl -> Element i -> [String]
partialValidate dtd' elem = valid elem ++ checkIDs elem
where
dtd = simplifyDTD dtd'
valid (Elem name attrs contents) =
-- is the element defined in the DTD?
let spec = lookupFM (elements dtd) name in
(isNothing spec) `gives` ("Element <"++qname name++"> not known.")
-- is each attribute mentioned only once?
++ (let dups = duplicates (map (qname . fst) attrs) in
not (null dups) `gives`
("Element <"++qname name++"> has duplicate attributes: "
++concat (intersperse "," dups)++"."))
-- does each attribute belong to this element? value is in range?
++ concatMap (checkAttr name) attrs
-- are all required attributes present?
++ concatMap (checkRequired name attrs)
(fromMaybe [] (lookupFM (required dtd) name))
-- are its children in a permissible sequence?
++ checkContentSpec name (fromMaybe ANY spec) contents
-- now recursively check the element children
++ concatMap valid [ elm | CElem elm _ <- contents ]
checkAttr elm (attr, val) =
let typ = lookupFM (attributes dtd) (elm,attr)
attval = attr2str val in
if isNothing typ then ["Attribute \""++qname attr
++"\" not known for element <"++qname elm++">."]
else
case fromJust typ of
EnumeratedType e ->
case e of
Enumeration es ->
(not (attval `Prelude.elem` es)) `gives`
("Value \""++attval++"\" of attribute \""
++qname attr++"\" in element <"++qname elm
++"> is not in the required enumeration range: "
++unwords es)
_ -> []
_ -> []
checkRequired elm attrs req =
(not (req `Prelude.elem` map fst attrs)) `gives`
("Element <"++qname elm++"> requires the attribute \""++qname req
++"\" but it is missing.")
checkContentSpec _elm ANY _ = []
checkContentSpec _elm EMPTY [] = []
checkContentSpec elm EMPTY (_:_) =
["Element <"++qname elm++"> is not empty but should be."]
checkContentSpec elm (Mixed PCDATA) cs = concatMap (checkMixed elm []) cs
checkContentSpec elm (Mixed (PCDATAplus names)) cs =
concatMap (checkMixed elm names) cs
checkContentSpec elm (ContentSpec cp) cs = excludeText elm cs ++
(let (errs,rest) = checkCP elm cp (flatten cs) in
case rest of [] -> errs
_ -> errs++["Element <"++qname elm++"> contains extra "
++"elements beyond its content spec."])
checkMixed elm permitted (CElem (Elem name _ _) _)
| not (name `Prelude.elem` permitted) =
["Element <"++qname elm++"> contains an element <"++qname name
++"> but should not."]
checkMixed _elm _permitted _ = []
flatten (CElem (Elem name _ _) _: cs) = name: flatten cs
flatten (_: cs) = flatten cs
flatten [] = []
excludeText elm (CElem _ _: cs) = excludeText elm cs
excludeText elm (CMisc _ _: cs) = excludeText elm cs
excludeText elm (CString _ s _: cs) | all isSpace s = excludeText elm cs
excludeText elm (_:_) =
["Element <"++qname elm++"> contains text/references but should not."]
excludeText _elm [] = []
-- This is a little parser really. Returns any errors, plus the remainder
-- of the input string.
checkCP :: QName -> CP -> [QName] -> ([String],[QName])
checkCP elm cp@(TagName _ None) [] = (cpError elm cp, [])
checkCP elm cp@(TagName n None) (n':ns)
| n==n' = ([], ns)
| otherwise = (cpError elm cp, n':ns)
checkCP _ (TagName _ Query) [] = ([],[])
checkCP _ (TagName n Query) (n':ns)
| n==n' = ([], ns)
| otherwise = ([], n':ns)
checkCP _ (TagName _ Star) [] = ([],[])
checkCP elm (TagName n Star) (n':ns)
| n==n' = checkCP elm (TagName n Star) ns
| otherwise = ([], n':ns)
checkCP elm cp@(TagName _ Plus) [] = (cpError elm cp, [])
checkCP elm cp@(TagName n Plus) (n':ns)
| n==n' = checkCP elm (TagName n Star) ns
| otherwise = (cpError elm cp, n':ns)
-- omit this clause, to permit (a?|b?) as a valid but empty choice
-- checkCP elem cp@(Choice cps None) [] = (cpError elem cp, [])
checkCP elm cp@(Choice cps None) ns =
let next = choice elm ns cps in
if null next then (cpError elm cp, ns)
else ([], head next) -- choose the first alternative with no errors
checkCP _ (Choice _ Query) [] = ([],[])
checkCP elm (Choice cps Query) ns =
let next = choice elm ns cps in
if null next then ([],ns)
else ([], head next)
checkCP _ (Choice _ Star) [] = ([],[])
checkCP elm (Choice cps Star) ns =
let next = choice elm ns cps in
if null next then ([],ns)
else checkCP elm (Choice cps Star) (head next)
checkCP elm cp@(Choice _ Plus) [] = (cpError elm cp, [])
checkCP elm cp@(Choice cps Plus) ns =
let next = choice elm ns cps in
if null next then (cpError elm cp, ns)
else checkCP elm (Choice cps Star) (head next)
-- omit this clause, to permit (a?,b?) as a valid but empty sequence
-- checkCP elem cp@(Seq cps None) [] = (cpError elem cp, [])
checkCP elm cp@(Seq cps None) ns =
let (errs,next) = sequence elm ns cps in
if null errs then ([],next)
else (cpError elm cp++errs, ns)
checkCP _ (Seq _ Query) [] = ([],[])
checkCP elm (Seq cps Query) ns =
let (errs,next) = sequence elm ns cps in
if null errs then ([],next)
else ([], ns)
checkCP _ (Seq _ Star) [] = ([],[])
checkCP elm (Seq cps Star) ns =
let (errs,next) = sequence elm ns cps in
if null errs then checkCP elm (Seq cps Star) next
else ([], ns)
checkCP elm cp@(Seq _ Plus) [] = (cpError elm cp, [])
checkCP elm cp@(Seq cps Plus) ns =
let (errs,next) = sequence elm ns cps in
if null errs then checkCP elm (Seq cps Star) next
else (cpError elm cp++errs, ns)
choice elm ns cps = -- return only those parses that don't give any errors
[ rem | ([],rem) <- map (\cp-> checkCP elm (definite cp) ns) cps ]
++ [ ns | all possEmpty cps ]
where definite (TagName n Query) = TagName n None
definite (Choice cps Query) = Choice cps None
definite (Seq cps Query) = Seq cps None
definite (TagName n Star) = TagName n Plus
definite (Choice cps Star) = Choice cps Plus
definite (Seq cps Star) = Seq cps Plus
definite x = x
possEmpty (TagName _ mod) = mod `Prelude.elem` [Query,Star]
possEmpty (Choice cps None) = all possEmpty cps
possEmpty (Choice _ mod) = mod `Prelude.elem` [Query,Star]
possEmpty (Seq cps None) = all possEmpty cps
possEmpty (Seq _ mod) = mod `Prelude.elem` [Query,Star]
sequence elm ns cps = -- accumulate errors down the sequence
foldl (\(es,ns) cp-> let (es',ns') = checkCP elm cp ns
in (es++es', ns'))
([],ns) cps
checkIDs elm =
let celem = CElem elm undefined
showAttr a = iffind (printableName a) literal none
idElems = concatMap (\(name, at)->
multi (showAttr at `o`
tag (printableName name))
celem)
(ids dtd)
badIds = duplicates (map (\(CString _ s _)->s) idElems)
in not (null badIds) `gives`
("These attribute values of type ID are not unique: "
++concat (intersperse "," badIds)++".")
cpError :: QName -> CP -> [String]
cpError elm cp =
["Element <"++qname elm++"> should contain "++display cp++" but does not."]
display :: CP -> String
display (TagName name mod) = qname name ++ modifier mod
display (Choice cps mod) = "(" ++ concat (intersperse "|" (map display cps))
++ ")" ++ modifier mod
display (Seq cps mod) = "(" ++ concat (intersperse "," (map display cps))
++ ")" ++ modifier mod
modifier :: Modifier -> String
modifier None = ""
modifier Query = "?"
modifier Star = "*"
modifier Plus = "+"
duplicates :: Eq a => [a] -> [a]
duplicates xs = xs \\ (nub xs)
qname :: QName -> String
qname n = printableName n
|
Ian-Stewart-Binks/courseography
|
dependencies/HaXml-1.25.3/src/Text/XML/HaXml/Validate.hs
|
gpl-3.0
| 13,152
| 0
| 24
| 4,394
| 4,082
| 2,132
| 1,950
| 214
| 56
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Online Menu | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Localizar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
brunoqc/zap-extensions
|
src/org/zaproxy/zap/extension/onlineMenu/resources/help_pt_BR/helpset_pt_BR.hs
|
apache-2.0
| 976
| 80
| 66
| 160
| 415
| 210
| 205
| -1
| -1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.IO where
import Data.ByteString.Builder (Builder)
import Data.ByteString.Builder.Extra (runBuilder, Next(Done, More, Chunk))
import Network.Wai.Handler.Warp.Buffer
import Network.Wai.Handler.Warp.Imports
import Network.Wai.Handler.Warp.Types
toBufIOWith :: Buffer -> BufSize -> (ByteString -> IO ()) -> Builder -> IO ()
toBufIOWith buf !size io builder = loop firstWriter
where
firstWriter = runBuilder builder
runIO len = bufferIO buf len io
loop writer = do
(len, signal) <- writer buf size
case signal of
Done -> runIO len
More minSize next
| size < minSize -> error "toBufIOWith: BufferFull: minSize"
| otherwise -> do
runIO len
loop next
Chunk bs next -> do
runIO len
io bs
loop next
|
creichert/wai
|
warp/Network/Wai/Handler/Warp/IO.hs
|
mit
| 969
| 0
| 15
| 306
| 266
| 139
| 127
| 25
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : TestFramework
-- Copyright : (c) 2008 Benedikt Huber
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
-- Portability : portable
--
-- This module provides a small framework for testing IO - based stuff.
-------------------------------------------------------------------------------------------------------
module Language.C.Test.Framework (
-- * Test descriptions
Test(..),testTemplate,
-- * Test results
TestResult(..),initializeTestResult,setTestStatus,
-- * Status of a test
TestStatus(..),testError,isTestError,
testFailure,testFailNoReport,testFailWithReport,
testOk,testOkNoReport,testOkWithReport,testOkUntimed,isTestOk,
-- * Test runs, i.e. a sequence of consecutive (usually dependent) tests of a single test object
TestRun(..),hasTestResults,initFailure,emptyTestResults,insertTest,
-- ReExport pretty from the Language.C library
Pretty(..),
-- ReExport the formatting stuff
module Language.C.Test.Measures,
)
where
import Control.Monad.Error
import Data.Maybe
import Data.Map (Map)
import qualified Data.Map as Map
import Text.PrettyPrint
import Language.C.Pretty
import Language.C.Test.Measures
-- =====================
-- = Test descriptions =
-- =====================
data Test = Test
{
testName :: String,
testDescr :: String,
preferredScale :: MetricScale,
inputUnit :: UnitDescr
}
deriving (Show,Read)
testTemplate :: String -> String -> MetricScale -> UnitDescr -> Test
testTemplate testname testdescr preferredscale inputdim =
Test testname testdescr preferredscale inputdim
-- ================
-- = Test results =
-- ================
-- | Result of a test
data TestResult =
TestResult {
testInfo :: Test,
testArgs :: [String],
testStatus :: TestStatus
}
deriving (Show,Read)
initializeTestResult :: Test -> [String] -> TestResult
initializeTestResult t args = TestResult t args (testError "not exectued")
setTestStatus :: TestResult -> TestStatus -> TestResult
setTestStatus testresult status = testresult { testStatus = status }
-- | Status of a test
data TestStatus =
TestError String
| TestFailure String (Maybe FilePath)
| TestOk (Maybe PerfMeasure) (Maybe FilePath)
deriving (Show,Read)
testError :: String -> TestStatus
testError = TestError
isTestError :: TestStatus -> Bool
isTestError (TestError _) = True
isTestError _ = False
testFailure :: String -> (Maybe FilePath) -> TestStatus
testFailure errMsg mReport = TestFailure errMsg mReport
testFailNoReport :: String -> TestStatus
testFailNoReport errMsg = testFailure errMsg Nothing
testFailWithReport :: String -> FilePath -> TestStatus
testFailWithReport errMsg report = testFailure errMsg (Just report)
testOk :: PerfMeasure -> Maybe FilePath -> TestStatus
testOk measure report = TestOk (Just measure) report
testOkUntimed :: Maybe FilePath -> TestStatus
testOkUntimed report = TestOk Nothing report
testOkNoReport :: PerfMeasure -> TestStatus
testOkNoReport m = testOk m Nothing
testOkWithReport :: PerfMeasure -> FilePath -> TestStatus
testOkWithReport m r = testOk m (Just r)
isTestOk :: TestStatus -> Bool
isTestOk (TestOk _ _) = True
isTestOk _ = False
formatInputSize :: (Real a) => Test -> a -> String
formatInputSize testinfo q = formatUnits q (preferredScale testinfo) (inputUnit testinfo)
instance Pretty TestResult where
pretty (TestResult testinfo testargs teststatus) =
pretty' ( text (testName testinfo) <+> hsep (map text testargs) ) teststatus
where
pretty' ctx (TestError errMsg) =
ctx <+> text ("ERROR: "++errMsg)
pretty' ctx (TestFailure errMsg report) =
ctx <+> text ("FAILED: ")
$+$ (nest 4 . vcat . catMaybes)
[ Just (ppErrorMessage errMsg),
fmap (ppFileRef "report") report ]
pretty' ctx (TestOk measure report) =
ctx <+> text "succeeded" <+> stats
$+$ (nest 4 . vcat . catMaybes)
[ fmap (ppFileRef "result") report ]
where
stats =
case measure of
Nothing -> empty
Just (PerfMeasure (inpsize,ttime)) | ttime == 0 -> empty
| otherwise ->
parens$
text (formatInputSize testinfo inpsize ++ " in " ++ formatSeconds ttime ++ ", ")
<+> text (formatUnitsPerTime (inpsize `per` ttime) (preferredScale testinfo) (inputUnit testinfo) (scaleSecs Unit))
ppErrorMessage :: String -> Doc
ppErrorMessage = vcat . map text . filter (not . null) . lines
ppFileRef :: String -> String -> Doc
ppFileRef info file = text $ "See "++info++" file: `"++file++"'"
-- =============
-- = Test Runs =
-- =============
-- | Result of a parser test run
data TestRun =
FatalError {
fatalErrMsg :: String,
runArgs :: [String]
}
| InitFailure {
initFailMsg :: String,
runArgs :: [String]
}
| TestResults {
testObject :: String,
testInputFiles :: [FilePath],
testResults :: Map String TestResult
}
deriving (Show,Read)
hasTestResults :: TestRun -> Bool
hasTestResults (TestResults _ _ _) = True
hasTestResults _ = False
instance Pretty TestRun where
pretty (FatalError { fatalErrMsg = msg, runArgs = args}) =
text ("Test aborted with fatal error: "++msg) <+> brackets (text "$CC"<+>hsep (map text args))
pretty (InitFailure { initFailMsg = msg, runArgs = args }) =
text ("Test initialization failed: "++msg) <+> brackets (text "$CC"<+>hsep (map text args))
pretty tr = vcat $ map pretty (Map.elems $ testResults tr)
initFailure :: String -> [String] -> TestRun
initFailure msg args =
InitFailure { runArgs = args, initFailMsg = msg }
emptyTestResults :: String -> [FilePath] -> TestRun
emptyTestResults obj inpFs = TestResults { testObject = obj, testInputFiles = inpFs, testResults = Map.empty }
-- | Insert a test
insertTest :: TestResult -> TestRun -> TestRun
insertTest _ (InitFailure _ _) = error "insertTest: initialization failed"
insertTest result trun = trun { testResults = Map.insert (testName $ testInfo result) result (testResults trun) }
|
micknelso/language-c
|
test-framework/Language/C/Test/Framework.hs
|
bsd-3-clause
| 6,190
| 0
| 21
| 1,247
| 1,664
| 907
| 757
| 119
| 1
|
module Test17 where
f x = (case x of
10 -> y + x + r
where
y = 10
f = 25
_ -> x) + x
where
r = 56
|
mpickering/HaRe
|
old/testing/refacSlicing/Test17.hs
|
bsd-3-clause
| 194
| 0
| 11
| 128
| 62
| 34
| 28
| 7
| 2
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2012
Note [Unarisation]
~~~~~~~~~~~~~~~~~~
The idea of this pass is to translate away *all* unboxed-tuple binders. So for example:
f (x :: (# Int, Bool #)) = f x + f (# 1, True #)
==>
f (x1 :: Int) (x2 :: Bool) = f x1 x2 + f 1 True
It is important that we do this at the STG level and NOT at the core level
because it would be very hard to make this pass Core-type-preserving.
STG fed to the code generators *must* be unarised because the code generators do
not support unboxed tuple binders natively.
Note [Unarisation and arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of unarisation, the arity that will be recorded in the generated info table
for an Id may be larger than the idArity. Instead we record what we call the RepArity,
which is the Arity taking into account any expanded arguments, and corresponds to
the number of (possibly-void) *registers* arguments will arrive in.
-}
{-# LANGUAGE CPP #-}
module UnariseStg (unarise) where
#include "HsVersions.h"
import CoreSyn
import StgSyn
import VarEnv
import UniqSupply
import Id
import MkId (realWorldPrimId)
import Type
import TysWiredIn
import DataCon
import VarSet
import OccName
import Name
import Util
import Outputable
import BasicTypes
-- | A mapping from unboxed-tuple binders to the Ids they were expanded to.
--
-- INVARIANT: Ids in the range don't have unboxed tuple types.
--
-- Those in-scope variables without unboxed-tuple types are not present in
-- the domain of the mapping at all.
type UnariseEnv = VarEnv [Id]
ubxTupleId0 :: Id
ubxTupleId0 = dataConWorkId (tupleDataCon Unboxed 0)
unarise :: UniqSupply -> [StgBinding] -> [StgBinding]
unarise us binds = zipWith (\us -> unariseBinding us init_env) (listSplitUniqSupply us) binds
where -- See Note [Nullary unboxed tuple] in Type.hs
init_env = unitVarEnv ubxTupleId0 [realWorldPrimId]
unariseBinding :: UniqSupply -> UnariseEnv -> StgBinding -> StgBinding
unariseBinding us rho bind = case bind of
StgNonRec x rhs -> StgNonRec x (unariseRhs us rho rhs)
StgRec xrhss -> StgRec $ zipWith (\us (x, rhs) -> (x, unariseRhs us rho rhs))
(listSplitUniqSupply us) xrhss
unariseRhs :: UniqSupply -> UnariseEnv -> StgRhs -> StgRhs
unariseRhs us rho rhs = case rhs of
StgRhsClosure ccs b_info fvs update_flag srt args expr
-> StgRhsClosure ccs b_info (unariseIds rho fvs) update_flag
(unariseSRT rho srt) args' (unariseExpr us' rho' expr)
where (us', rho', args') = unariseIdBinders us rho args
StgRhsCon ccs con args
-> StgRhsCon ccs con (unariseArgs rho args)
------------------------
unariseExpr :: UniqSupply -> UnariseEnv -> StgExpr -> StgExpr
unariseExpr _ rho (StgApp f args)
| null args
, UbxTupleRep tys <- repType (idType f)
= -- Particularly important where (##) is concerned
-- See Note [Nullary unboxed tuple]
StgConApp (tupleDataCon Unboxed (length tys))
(map StgVarArg (unariseId rho f))
| otherwise
= StgApp f (unariseArgs rho args)
unariseExpr _ _ (StgLit l)
= StgLit l
unariseExpr _ rho (StgConApp dc args)
| isUnboxedTupleCon dc = StgConApp (tupleDataCon Unboxed (length args')) args'
| otherwise = StgConApp dc args'
where
args' = unariseArgs rho args
unariseExpr _ rho (StgOpApp op args ty)
= StgOpApp op (unariseArgs rho args) ty
unariseExpr us rho (StgLam xs e)
= StgLam xs' (unariseExpr us' rho' e)
where
(us', rho', xs') = unariseIdBinders us rho xs
unariseExpr us rho (StgCase e case_lives alts_lives bndr srt alt_ty alts)
= StgCase (unariseExpr us1 rho e) (unariseLives rho case_lives)
(unariseLives rho alts_lives) bndr (unariseSRT rho srt)
alt_ty' alts'
where
(us1, us2) = splitUniqSupply us
(alt_ty', alts') = unariseAlts us2 rho alt_ty bndr (repType (idType bndr)) alts
unariseExpr us rho (StgLet bind e)
= StgLet (unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgLetNoEscape live_in_let live_in_bind bind e)
= StgLetNoEscape (unariseLives rho live_in_let) (unariseLives rho live_in_bind)
(unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgTick tick e)
= StgTick tick (unariseExpr us rho e)
------------------------
unariseAlts :: UniqSupply -> UnariseEnv -> AltType -> Id -> RepType -> [StgAlt] -> (AltType, [StgAlt])
unariseAlts us rho alt_ty _ (UnaryRep _) alts
= (alt_ty, zipWith (\us alt -> unariseAlt us rho alt) (listSplitUniqSupply us) alts)
unariseAlts us rho _ bndr (UbxTupleRep tys) ((DEFAULT, [], [], e) : _)
= (UbxTupAlt n, [(DataAlt (tupleDataCon Unboxed n), ys, uses, unariseExpr us2' rho' e)])
where
(us2', rho', ys) = unariseIdBinder us rho bndr
uses = replicate (length ys) (not (isDeadBinder bndr))
n = length tys
unariseAlts us rho _ bndr (UbxTupleRep _) [(DataAlt _, ys, uses, e)]
= (UbxTupAlt n, [(DataAlt (tupleDataCon Unboxed n), ys', uses', unariseExpr us2' rho'' e)])
where
(us2', rho', ys', uses') = unariseUsedIdBinders us rho ys uses
rho'' = extendVarEnv rho' bndr ys'
n = length ys'
unariseAlts _ _ _ _ (UbxTupleRep _) alts
= pprPanic "unariseExpr: strange unboxed tuple alts" (ppr alts)
--------------------------
unariseAlt :: UniqSupply -> UnariseEnv -> StgAlt -> StgAlt
unariseAlt us rho (con, xs, uses, e)
= (con, xs', uses', unariseExpr us' rho' e)
where
(us', rho', xs', uses') = unariseUsedIdBinders us rho xs uses
------------------------
unariseSRT :: UnariseEnv -> SRT -> SRT
unariseSRT _ NoSRT = NoSRT
unariseSRT rho (SRTEntries ids) = SRTEntries (concatMapVarSet (unariseId rho) ids)
unariseSRT _ (SRT {}) = panic "unariseSRT"
unariseLives :: UnariseEnv -> StgLiveVars -> StgLiveVars
unariseLives rho ids = concatMapVarSet (unariseId rho) ids
unariseArgs :: UnariseEnv -> [StgArg] -> [StgArg]
unariseArgs rho = concatMap (unariseArg rho)
unariseArg :: UnariseEnv -> StgArg -> [StgArg]
unariseArg rho (StgVarArg x) = map StgVarArg (unariseId rho x)
unariseArg _ (StgLitArg l) = [StgLitArg l]
unariseIds :: UnariseEnv -> [Id] -> [Id]
unariseIds rho = concatMap (unariseId rho)
unariseId :: UnariseEnv -> Id -> [Id]
unariseId rho x
| Just ys <- lookupVarEnv rho x
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> True; _ -> x == ubxTupleId0
, text "unariseId: not unboxed tuple" <+> ppr x )
ys
| otherwise
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> False; _ -> True
, text "unariseId: was unboxed tuple" <+> ppr x )
[x]
unariseUsedIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> [Bool]
-> (UniqSupply, UnariseEnv, [Id], [Bool])
unariseUsedIdBinders us rho xs uses
= case mapAccumL2 do_one us rho (zipEqual "unariseUsedIdBinders" xs uses) of
(us', rho', xs_usess) -> uncurry ((,,,) us' rho') (unzip (concat xs_usess))
where
do_one us rho (x, use) = third3 (map (flip (,) use)) (unariseIdBinder us rho x)
unariseIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinders us rho xs = third3 concat $ mapAccumL2 unariseIdBinder us rho xs
unariseIdBinder :: UniqSupply -> UnariseEnv -> Id -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinder us rho x = case repType (idType x) of
UnaryRep _ -> (us, rho, [x])
UbxTupleRep tys -> let (us0, us1) = splitUniqSupply us
ys = unboxedTupleBindersFrom us0 x tys
rho' = extendVarEnv rho x ys
in (us1, rho', ys)
unboxedTupleBindersFrom :: UniqSupply -> Id -> [UnaryType] -> [Id]
unboxedTupleBindersFrom us x tys = zipWith (mkSysLocal fs) (uniqsFromSupply us) tys
where fs = occNameFS (getOccName x)
concatMapVarSet :: (Var -> [Var]) -> VarSet -> VarSet
concatMapVarSet f xs = mkVarSet [x' | x <- varSetElems xs, x' <- f x]
|
urbanslug/ghc
|
compiler/simplStg/UnariseStg.hs
|
bsd-3-clause
| 7,994
| 0
| 13
| 1,695
| 2,493
| 1,295
| 1,198
| 132
| 3
|
module T7963a where
import Prelude ()
import GHC.List
unlines = concat
|
urbanslug/ghc
|
testsuite/tests/rename/should_compile/T7963a.hs
|
bsd-3-clause
| 74
| 0
| 4
| 14
| 20
| 13
| 7
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Network.UTP where
import Network.Socket as NS
import Network.Socket.ByteString as NSB
import Data.Word
import Data.Serialize as DS
import Control.Applicative
import Control.Monad
import Prelude as P
import Data.ByteString.Char8 as DBC
import Data.Bits
import Data.Maybe
import Data.Tuple
import Control.Concurrent.STM
import Data.ByteString as BS
import System.Random
import Data.Dequeue as DQ
import Control.Monad.IO.Class
import Data.Time.Clock.POSIX
import Control.Concurrent
import System.Log.Logger
import System.Log.Handler.Syslog
import Control.Concurrent.STM.TChan
import Control.Concurrent
import Control.Monad.Trans.Either
import Data.Either.Combinators
import Control.Concurrent.Async
import Control.Exception
import Data.Typeable
import Data.HashTable.IO
import Data.Map.Strict as Map
{-
TODO: missing features
** refactor interface code to be like
makeConn :: Settings -> ConnHandler -> IO ()
instead of returning a connection. this way you execute the handler on your
own terms and are able to do resource cleanup on exceptions or when it's done
therefore, stop using forkIO and start using async for worker threads
** fix the resend policy; now you resend every 500ms regardless of when is the last
time you received smth from the other side
** packet loss
** window resizing
** rtt based timeouts
** keep alive
** circular buffer implementation for efficient send recv
** testing with unreliable networks
-}
{-
CIRCULAR BUFFER IDEA
a circular buffer can achieve the adding and consuming of the buffer
no problem about providing a stream based interface here.
need to work with some sort of mutable bytestrings.
buffer expansion/reduction means: copy everything in the new buffer
if bigger all good
if smaller ? do you just dump packets
Acking ahead
maybe it's problematic if an ack for a packet inbetween other packets comes in. how do you deal with the whole without copying a large amount of data?
without selective ack - the above is a non issue
with selective ack - you still need to wait for all packets to provide ordered data receive.so it's not reall an issue -
there's a haskell package for UTP by sam truszan
but that code is just too smart for me
EXCEPTION HANDLING AND CLOSING
exceptions:
udp connection is dropped
happens in recv thread or send thread. set conn state to ERR
next time a user calls send or recv he gets the error
< quite complicated >
timeout?
leave it to the connection user to handle; block indefinetly if needed
malformed packets coming our way -> just ignore them
CLOSURE
need to kill associated threads
recv thread
ack thread
resend thread
keep track of their threadId in the ConnData
close socket as well
-}
-- logging
utplogger = "utplog"
-- packet parsing
type SeqNum = Word16
type AckNum = Word16
type Time = Word32
type ConnectionId = Word16
type SockSend = ByteString -> IO Int
data PacketType = ST_DATA | ST_FIN | ST_STATE | ST_RESET | ST_SYN
deriving (Eq, Show)
packetTypeMap = P.zip [0, 1..] [ST_DATA, ST_FIN, ST_STATE, ST_RESET, ST_SYN]
data Packet = Packet {
packetType :: PacketType,
version :: Word8,
extensions :: Word8, -- ignore it for now
connId :: Word16,
time :: Time,
timeDiff :: Time,
windowSize :: Word32,
seqNum :: SeqNum,
ackNum :: AckNum,
payload :: ByteString
} deriving (Show, Eq)
defPacket = Packet ST_FIN 0 0 0 0 0 0 0 0 ""
-- CONSTANTS
headerSize = BS.length $ DS.encode $ Packet ST_DATA 0 0 0 0 0 0 0 0 ""
defWindowSize = 500
-- total size of the receive buffer
-- TODO: figure out what to do with this; currently not used
recvBufferSize = 10000
-- how many bytes to read in a socket receive operation
-- TODO: really need to have some logic behind this value
-- currently it's the value found in libtorrent
-- if a packet is bigger than this protocol logic fails
-- since packet
recvSize = 4096
versionNum = 1
defResendTimeout = 5 * 10 ^ 5
packetSize p = headerSize + (BS.length $ payload p)
-- how many duplicate acks to receive before marking a packet as lost
lossDupAcks = 3
-- the connection returned
data Connection = Conn {
send :: ByteString -> IO ()
, recv :: Int -> IO ByteString
, close :: IO ()
}
data ConnStage = CS_SYN_SENT | CS_CONNECTED | ERROR deriving (Show, Eq)
data ConnData = ConnData {
connState :: TVar ConnState
, inBuf :: TVar (BankersDequeue ByteString)
, outBuf :: TVar (BankersDequeue Packet)
, connSocket :: Socket
, sockSend :: SockSend
, connIdRecv ::ConnectionId
, connIdSend ::ConnectionId
}
data ConnState = ConnState {
connSeqNum :: SeqNum
, connAckNum :: AckNum
, maxWindow :: Word32
, peerMaxWindow :: Word32 -- window size advertised by peer
, replyMicro :: Time
, connStage :: ConnStage
, dupAcks :: Int
, lastAckRecv :: AckNum
}
data UTPException = FailedHandshake deriving (Show, Typeable)
instance Exception UTPException
{- server call
not optimized to handle a large number of calls
good enough to handle p2p apps with something
like at most 100 connections
TODO: figure out how to graciously kill this.
It has a thread listening on the socket and dispatching
incoming udp packets to the responsible threads
Potential optimization: to fetch the right socket distribute everything
over a fixed size hashtable with TVars containing sockets (lists, trees)
no single contention place in that situation
-}
utpListen :: Socket -> (SockAddr -> Connection -> IO()) -> IO ()
utpListen sock handle = do
connMapVar <- newTVarIO Map.empty
forever $ do
(packet, sockAddr) <- recvPacket sock recvSize
connMap <- atomically $ readTVar connMapVar
case (Map.lookup sockAddr connMap) of
Just inChan -> atomically $ writeTChan inChan packet
Nothing -> do
-- new connection - fork thread to handle this
forkIO $ do
inChan <- atomically $ do
inChan <- newTChan
modifyTVar connMapVar (Map.insert sockAddr inChan)
writeTChan inChan packet -- push the first message in the chan
return inChan
(serverHandshake inChan sock sockAddr >>= handle sockAddr)
`finally`
(atomically $ modifyTVar connMapVar (Map.delete sockAddr) )
return ()
serverHandshake :: TChan Packet -> Socket -> SockAddr -> IO Connection
serverHandshake packChan sock sockAddr = do
packet <- atomically $ readTChan packChan
when (packetType packet /= ST_SYN) $ throwIO FailedHandshake
debugM utplogger $ "received syn packet " ++ (show packet)
g <- newStdGen
let randNum = P.head $ (randoms g :: [Word16])
conn <- initConn (connId packet) (connId packet + 1) randNum (seqNum packet)
sock (\bs -> NSB.sendTo sock bs sockAddr)
-- send ack for syn
sendPacket (makePacket ST_STATE "" conn) conn
-- only the first ack increments the sequence number
atomically $ modifyTVar (connState conn) (\s -> s {connSeqNum = connSeqNum s + 1})
let recvF = atomically $ readTChan packChan
forkIO $ setInterval defResendTimeout $ resendOutgoing conn
-- run recv thread
forkIO $ forever (recvF >>= recvIncoming conn)
makeConnection conn
-- what a client calls
utpConnect :: Socket -> IO Connection
utpConnect sock = do
g <- newStdGen
let randId = P.head $ (randoms g :: [Word16])
conn <- initConn randId (randId + 1) 1 0 sock (NSB.send sock)
debugM utplogger "sending syn"
sendPacket (makePacket ST_SYN "" conn) conn
-- run resend thread
forkIO $ setInterval defResendTimeout $ resendOutgoing conn
let recvF = fmap P.fst $ recvPacket (connSocket conn) recvSize
-- block here until syn-ack stage is done
fstAck <- ackRecv recvF conn
-- handshake is succseful
atomically $ modifyTVar (connState conn)
(\s -> s {connStage = CS_CONNECTED, connAckNum = seqNum fstAck})
-- run recv thread
forkIO $ forever (recvF >>= recvIncoming conn)
makeConnection conn
-- loops until it reads a valid packet
recvPacket sock recvSize = fmap unwrapLeft $ runEitherT $ forever $ do
(msg, src) <- liftIO $ NSB.recvFrom sock recvSize
case (DS.decode msg :: Either String Packet) of
-- ignore and keep looping listening for packets
Left err -> liftIO $ infoM utplogger "non-utp package received"
Right packet -> left (packet, src) -- exit loop
makeConnection :: ConnData -> IO Connection
makeConnection conn = do
let send = \payload -> sendPacket (makePacket ST_DATA payload conn) conn
return $ Conn send (recvPub conn) (return ())
recvPub conn len = do
atomically $ do
incoming <- readTVar (inBuf conn)
when (DQ.length incoming == 0) retry -- empty buffer
let (bytes, rest) = takeBytes len incoming
writeTVar (inBuf conn) rest
return bytes
makePacket pType load conn
= defPacket {packetType = pType, payload = load, connId = connIdRecv conn
, version = versionNum, extensions = 0}
{-
this function sets the following fields of packet
time :: Time
timeDiff :: Time
windowSize :: Word32
seqNum :: SeqNum
ackNum :: AckNum
-}
sendPacket packet conn = do
debugM utplogger $ "building packet..."
sequenced <- atomically $ do
state <- readTVar $ connState conn
out <- readTVar $ outBuf conn
let currSeq = connSeqNum state
inB <- readTVar $ inBuf conn
let sequenced = packet {seqNum = currSeq, ackNum = connAckNum state
, timeDiff = replyMicro state
, windowSize = fromIntegral $ recvBufferSize - dqSize inB (BS.length)}
-- acks are not buffered and don't inc sequence number
when (packetType sequenced /= ST_STATE) $ do
-- if there is no space in the buffer block and retry later
when (maxWindow state < fromIntegral ((packetSize sequenced) + dqSize out packetSize))
retry
writeTVar (outBuf conn) (pushBack out sequenced)
writeTVar (connState conn) (state {connSeqNum = currSeq + 1})
return sequenced
micros <- getTimeMicros
let timestamped = sequenced {time = micros}
debugM utplogger $ "sending packet..." ++ (show timestamped)
(sockSend conn) (DS.encode $ timestamped)
return ()
ackRecv recv conn = do
packet <- recv
case (packetType packet) of
ST_STATE -> handleAck conn packet >> return packet
_ -> do
errorM utplogger "got something other than ack"
throwIO FailedHandshake
handleAck conn packet = do
lostPacket <- atomically $ do
modifyTVar (outBuf conn) (P.snd . (dqTakeWhile ((<= ackNum packet) . seqNum)))
-- handle duplicate acks
state <- readTVar (connState conn)
let dup = lastAckRecv state == ackNum packet
let reachedLimit = dupAcks state + 1 >= lossDupAcks
if' dup
(if' reachedLimit
(modifyTVar (outBuf conn) (P.snd . popFront)) -- drop that packet
(modifyTVar (connState conn) (\s -> s {dupAcks = dupAcks s + 1}) ))
-- new ack coming in; reset state
(modifyTVar (connState conn) (\s -> s {dupAcks = 0, lastAckRecv = ackNum packet}))
return $ dup && reachedLimit -- return if packet loss happened
when lostPacket $ errorM utplogger "lost packet!"
resendOutgoing conn = do
outgoing <- atomically $ readTVar (outBuf conn)
forM (dqToList outgoing) $ \p -> sockSend conn $ DS.encode p
return ()
recvIncoming :: ConnData -> Packet -> IO ()
recvIncoming conn packet = case packetType packet of
ST_SYN -> do
debugM utplogger "received syn packet"
sendPacket (makePacket ST_STATE "" conn) conn
ST_STATE -> handleAck conn packet
ST_DATA -> do
debugM utplogger $ "received data packet " ++ (show packet)
inB <- atomically $ do
stateNow <- readTVar $ connState conn
when (connAckNum stateNow + 1 == seqNum packet) $ do
modifyTVar (inBuf conn) (P.flip DQ.pushBack $ payload packet)
modifyTVar (connState conn) (\s -> s {connAckNum = seqNum packet})
readTVar (inBuf conn)
debugM utplogger $ show inB
sendPacket (makePacket ST_STATE "" conn) conn -- ack
initConn :: ConnectionId -> ConnectionId -> SeqNum
-> AckNum -> Socket -> SockSend -> IO ConnData
initConn recvId sendId initSeqNum initAckNum sock send = do
let initState = ConnState {connSeqNum = initSeqNum,
connAckNum = initAckNum,
connStage = CS_SYN_SENT,
maxWindow = defWindowSize,
peerMaxWindow = defWindowSize,
replyMicro = 0,
dupAcks = 0,
lastAckRecv = 0}
stateVar <- newTVarIO initState
inBufVar <- newTVarIO DQ.empty
outBufVar <- newTVarIO DQ.empty
return $ ConnData stateVar inBufVar outBufVar sock send recvId sendId
-- PACKET SERIALIZATION
instance Serialize Packet where
get = (\(t, v) -> Packet t v) <$> getTypeVersion <*> getWord8 <*> getWord16be
<*> getWord32be <*> getWord32be <*> getWord32be
<*> getWord16be <*> getWord16be <*> getRest
-- assumes valid packet
put Packet {..} = do
putWord8 ((shiftL (fromJust $ P.lookup packetType $
P.map swap packetTypeMap) 4) + version)
putWord8 extensions
putWord16be connId
putWord32be time
putWord32be timeDiff
putWord32be windowSize
putWord16be seqNum
putWord16be ackNum
putByteString payload
getTypeVersion = do
byte <- getWord8
let packType = P.lookup (shiftR byte 4) packetTypeMap
let version = shiftR (shiftL byte 4) 4
case packType of
Just typeVal -> return (typeVal, version)
Nothing -> fail "unknown packet type"
getRest = remaining >>= getBytes
-- HELPERS
getTimeMicros = fmap (\n -> P.round $ n * 10 ^ 6) $ getPOSIXTime
setInterval t f = forever $ threadDelay t >> f
-- take a number of bytes from a dq containing bytestrings
takeBytes c b = (\(cs, dq) -> (BS.concat cs, dq) ) $ go c b
where
go count buf
| DQ.length buf == 0 || count == 0 = ([], buf)
| otherwise = if' (count >= topLen)
(top : chunks, rest)
([lastChunk], pushFront tail bsRest)
where
(Just top, tail) = popFront buf
topLen = BS.length top
(chunks, rest) = go (count - topLen) tail -- lazy
(lastChunk, bsRest) = BS.splitAt count top
-- takes first elems returning remaining DQ
dqTakeWhile :: Dequeue q => (a -> Bool) -> q a -> ([a], q a)
dqTakeWhile cond dq = case DQ.length dq of
0 -> ([], dq)
_ -> let (taken, rest) = dqTakeWhile cond (P.snd $ popFront dq) in
if' (cond $ fromJust $ DQ.first dq)
((fromJust $ DQ.first dq) : taken, rest)
([], dq)
-- dumb summing up of sizes
-- optimize by keeping track of size on removal and insertion
dqSize :: Dequeue q => q a -> (a -> Int) -> Int
dqSize dq len = P.sum $ P.map len $ dqToList dq
dqToList dq = DQ.takeFront (DQ.length dq) dq
if' c a b = if c then a else b
unwrapLeft (Left x) = x
toWord32 :: [Word8] -> Word32
toWord32 = P.foldr (\o a -> (a `shiftL` 8) .|. fromIntegral o) 0
-- manual debugging Setup
-- create 2 UDP sockets tunnel through them
echoPort = 9901
maxline = 1500
clientUTP = do
updateGlobalLogger utplogger (setLevel DEBUG)
withSocketsDo $ do
debugM utplogger "running"
sock <- socket AF_INET Datagram 0
NS.connect sock (SockAddrInet echoPort (toWord32 [127, 0, 0, 1]))
conn <- utpConnect sock
Network.UTP.send conn "hello"
debugM utplogger "sent message "
resp <- Network.UTP.recv conn 2
debugM utplogger $ "got echo response " ++ (show resp)
utpechoserver :: IO ()
utpechoserver = do
updateGlobalLogger utplogger (setLevel DEBUG)
withSocketsDo $ do
sock <- socket AF_INET Datagram 0
bindSocket sock (SockAddrInet echoPort iNADDR_ANY)
utpListen sock utpsocketEcho
utpsocketEcho :: SockAddr -> Connection -> IO ()
utpsocketEcho addr conn = forever $ do
mesg <- Network.UTP.recv conn maxline
debugM utplogger $ "got message from utp socket " ++ (show mesg)
send_count <- Network.UTP.send conn mesg
return ()
-- send_count <- NS.sendTo sock mesg client
|
danoctavian/utp
|
src/Network/UTP.hs
|
mit
| 16,328
| 0
| 26
| 3,898
| 4,246
| 2,158
| 2,088
| 305
| 3
|
{-# LANGUAGE FlexibleContexts #-}
module Salesman.Instance
( downloadInstance
) where
import Control.Monad.Reader.Class (MonadReader(..))
import Control.Monad.IO.Class (MonadIO(..))
import System.Process (callCommand)
import System.IO.Temp (createTempDirectory)
import System.Directory (copyFile, createDirectoryIfMissing, getTemporaryDirectory)
import Salesman.OptionTypes (Common(..))
import Paths_salesman (getDataFileName)
downloadInstance :: (MonadReader Common m, MonadIO m) => m FilePath
downloadInstance = do
properties <- reader optProperties
tmpDir <- liftIO getTemporaryDirectory
targetDir <- liftIO $ createTempDirectory tmpDir "salesman."
let srcDir = targetDir ++ "/src"
liftIO $ createDirectoryIfMissing True srcDir
packageXml <- liftIO $ getDataFileName "package.xml"
liftIO $ copyFile packageXml (srcDir ++ "/package.xml")
liftIO $ callCommand $ "java -jar ~/.salesman/tooling-force.com.jar --action=refresh --projectPath=" ++ targetDir ++ " --responseFilePath=/dev/null --skipModifiedFilesCheck=true --config=" ++ properties ++ " --tempFolderPath=" ++ targetDir
let refreshDir = targetDir ++ "/refresh"
unpackagedDir = refreshDir ++ "/unpackaged"
liftIO $ callCommand $ "cp -r " ++ unpackagedDir ++ "/* " ++ srcDir
liftIO $ callCommand $ "rm -r " ++ refreshDir
return targetDir
|
thomasdziedzic/salesman
|
src/Salesman/Instance.hs
|
mit
| 1,372
| 0
| 13
| 220
| 324
| 170
| 154
| 25
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Dupes.Repository (
Repository(..),
initialize,
findFrom,
isRepository,
testGroup,
) where
import Control.Monad
import Data.Maybe
import Dupes.Index as Index
import Dupes.WorkingDirectory as WorkingDirectory
import FileAccess (FileAccess)
import qualified FileAccess
import System.FilePath
import Test.Tasty.HUnit
import Test.Tasty.TH
data Repository = Repository { workingDirectory :: WorkingDirectory, indexPath :: IndexPath }
deriving Show
initialize :: FilePath -> IO Repository
initialize = FileAccess.runIO . initializeF
findFrom :: FilePath -> IO (Maybe Repository)
findFrom path = (fmap . fmap) getAt (findPathFrom path)
isRepository :: FilePath -> IO Bool
isRepository = FileAccess.runIO . isRepositoryF
initializeF :: FilePath -> FileAccess Repository
initializeF path = do
let repositoryPath = repositorySubdirectory path
FileAccess.createDirectoryIfMissing repositoryPath
return $ getAt path
findPathFrom :: FilePath -> IO (Maybe FilePath)
findPathFrom = FileAccess.runIO . findPathFromF
findPathFromF :: FilePath -> FileAccess (Maybe FilePath)
findPathFromF = findM isRepositoryF <=< FileAccess.parentDirectories
findM :: (Monad m, Functor m) => (a -> m Bool) -> [a] -> m (Maybe a)
findM f = fmap listToMaybe . filterM f
isRepositoryF :: FilePath -> FileAccess Bool
isRepositoryF = FileAccess.doesDirectoryExist . repositorySubdirectory
repositorySubdirectory :: FilePath -> FilePath
repositorySubdirectory = (</> ".dupes")
getAt :: FilePath -> Repository
getAt path = Repository (WorkingDirectory.construct path)
(Index.construct (repositorySubdirectory path))
case_isRepository_for_repository_is_True = True @=? result
where
result = FileAccess.runPure filesystem $ isRepositoryF "/path"
filesystem = fakeRepositoryAt "/path"
case_isRepository_for_non_repo_path_is_False = False @=? result
where
result = FileAccess.runPure filesystem $ isRepositoryF "/path"
filesystem = []
case_findFrom_when_directory_is_repository = expected @=? actual
where
expected = Just "/path"
actual = FileAccess.runPure filesystem $ findPathFromF "/path"
filesystem = fakeRepositoryAt "/path"
case_findFrom_when_parent_directory_is_repository = expected @=? actual
where
expected = Just "/path"
actual = FileAccess.runPure filesystem $ findPathFromF "/path/inner"
filesystem = fakeRepositoryAt "/path"
case_findFrom_when_root_is_repository = expected @=? actual
where
expected = Just "/"
actual = FileAccess.runPure filesystem $ findPathFromF "/path"
filesystem = fakeRepositoryAt "/"
case_findFrom_when_no_repository_is_Nothing = True @=? isNothing result
where
result = FileAccess.runPure filesystem $ findPathFromF "/path"
filesystem = ["/path", "/"]
case_initialize_creates_a_repository = True @=? result
where
filesystem = ["/path", "/"]
result = FileAccess.runPure filesystem $ do
_ <- initializeF "/path"
isRepositoryF "/path"
case_initialize_is_idempotent = True @=? result
where
filesystem = ["/path", "/"] ++ fakeRepositoryAt "/path"
result = FileAccess.runPure filesystem $ do
_ <- initializeF "/path"
isRepositoryF "/path"
fakeRepositoryAt :: FilePath -> [FilePath]
fakeRepositoryAt path = [repositorySubdirectory path]
testGroup = $(testGroupGenerator)
|
danstiner/dupes
|
src/Dupes/Repository.hs
|
mit
| 3,488
| 0
| 11
| 659
| 851
| 446
| 405
| 76
| 1
|
module Unison.Test.BlockStore.FileBlockStore where
import System.IO.Unsafe
import System.Random
import Test.Tasty
import Test.Tasty.HUnit
import Unison.BlockStore (BlockStore)
import Unison.Runtime.Address
import Unison.Test.BlockStore
import qualified Control.Concurrent.MVar as MVar
import qualified Data.IORef as IORef
import qualified System.Directory as Directory
import qualified Unison.BlockStore.FileBlockStore as FBS
import qualified Unison.BlockStore.MemBlockStore as MBS
data FileResource = FileResource
{ path :: FilePath
, store :: BlockStore Address
}
setup :: IO FileResource
setup = do
tempDir <- Directory.makeAbsolute "tempFBS"
fileStore <- FBS.make' makeRandomAddress makeAddress tempDir
pure $ FileResource tempDir fileStore
tests :: TestTree
tests = withResource setup (Directory.removeDirectoryRecursive . path)
(testGroup "FileBlockStore" . makeCases . store . unsafePerformIO)
|
nightscape/platform
|
node/tests/Unison/Test/BlockStore/FileBlockStore.hs
|
mit
| 919
| 0
| 10
| 115
| 216
| 128
| 88
| 24
| 1
|
data Tree a = Empty | Node a (Tree a) (Tree a)
deriving (Show)
|
MartinThoma/LaTeX-examples
|
documents/Programmierparadigmen/scripts/haskell/binary-tree.hs
|
mit
| 82
| 0
| 8
| 33
| 37
| 20
| 17
| 2
| 0
|
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.CodeGen.JS.Optimizer.MagicDo
-- Copyright : (c) Phil Freeman 2013-14
-- License : MIT
--
-- Maintainer : Phil Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- |
-- This module implements the "Magic Do" optimization, which inlines calls to return
-- and bind for the Eff monad, as well as some of its actions.
--
-----------------------------------------------------------------------------
module Language.PureScript.CodeGen.JS.Optimizer.MagicDo (
magicDo
) where
import Data.List (nub)
import Data.Maybe (fromJust, isJust)
import Language.PureScript.CodeGen.JS.AST
import Language.PureScript.CodeGen.JS.Common
import Language.PureScript.Names
import Language.PureScript.Options
import qualified Language.PureScript.Constants as C
magicDo :: Options -> JS -> JS
magicDo opts | optionsNoMagicDo opts = id
| otherwise = inlineST . magicDo'
-- |
-- Inline type class dictionaries for >>= and return for the Eff monad
--
-- E.g.
--
-- Prelude[">>="](dict)(m1)(function(x) {
-- return ...;
-- })
--
-- becomes
--
-- function __do {
-- var x = m1();
-- ...
-- }
--
magicDo' :: JS -> JS
magicDo' = everywhereOnJS undo . everywhereOnJSTopDown convert
where
-- The name of the function block which is added to denote a do block
fnName = "__do"
-- Desugar monomorphic calls to >>= and return for the Eff monad
convert :: JS -> JS
-- Desugar return
convert (JSApp (JSApp ret [val]) []) | isReturn ret = val
-- Desugar pure
convert (JSApp (JSApp pure' [val]) []) | isPure pure' = val
-- Desugar >>
convert (JSApp (JSApp bind [m]) [JSFunction Nothing [] (JSBlock js)]) | isBind bind =
JSFunction (Just fnName) [] $ JSBlock (JSApp m [] : map applyReturns js )
-- Desugar >>=
convert (JSApp (JSApp bind [m]) [JSFunction Nothing [arg] (JSBlock js)]) | isBind bind =
JSFunction (Just fnName) [] $ JSBlock (JSVariableIntroduction arg (Just (JSApp m [])) : map applyReturns js)
-- Desugar untilE
convert (JSApp (JSApp f [arg]) []) | isEffFunc C.untilE f =
JSApp (JSFunction Nothing [] (JSBlock [ JSWhile (JSUnary Not (JSApp arg [])) (JSBlock []), JSReturn $ JSObjectLiteral []])) []
-- Desugar whileE
convert (JSApp (JSApp (JSApp f [arg1]) [arg2]) []) | isEffFunc C.whileE f =
JSApp (JSFunction Nothing [] (JSBlock [ JSWhile (JSApp arg1 []) (JSBlock [ JSApp arg2 [] ]), JSReturn $ JSObjectLiteral []])) []
convert other = other
-- Check if an expression represents a monomorphic call to >>= for the Eff monad
isBind (JSApp bindPoly [effDict]) | isBindPoly bindPoly && isEffDict C.bindEffDictionary effDict = True
isBind _ = False
-- Check if an expression represents a monomorphic call to return for the Eff monad
isReturn (JSApp retPoly [effDict]) | isRetPoly retPoly && isEffDict C.monadEffDictionary effDict = True
isReturn _ = False
-- Check if an expression represents a monomorphic call to pure for the Eff applicative
isPure (JSApp purePoly [effDict]) | isPurePoly purePoly && isEffDict C.applicativeEffDictionary effDict = True
isPure _ = False
-- Check if an expression represents the polymorphic >>= function
isBindPoly (JSAccessor prop (JSVar prelude)) = prelude == C.prelude && (prop `elem` map identToJs [Ident C.bind, Op (C.>>=)])
isBindPoly (JSIndexer (JSStringLiteral bind) (JSVar prelude)) = prelude == C.prelude && (bind `elem` [C.bind, (C.>>=)])
isBindPoly _ = False
-- Check if an expression represents the polymorphic return function
isRetPoly (JSAccessor returnEscaped (JSVar prelude)) = prelude == C.prelude && returnEscaped == C.returnEscaped
isRetPoly (JSIndexer (JSStringLiteral return') (JSVar prelude)) = prelude == C.prelude && return' == C.return
isRetPoly _ = False
-- Check if an expression represents the polymorphic pure function
isPurePoly (JSAccessor pure' (JSVar prelude)) = prelude == C.prelude && pure' == C.pure'
isPurePoly (JSIndexer (JSStringLiteral pure') (JSVar prelude)) = prelude == C.prelude && pure' == C.pure'
isPurePoly _ = False
-- Check if an expression represents a function in the Ef module
isEffFunc name (JSAccessor name' (JSVar eff)) = eff == C.eff && name == name'
isEffFunc _ _ = False
-- Check if an expression represents the Monad Eff dictionary
isEffDict name (JSVar ident) | ident == name = True
isEffDict name (JSAccessor prop (JSVar eff)) = eff == C.eff && prop == name
isEffDict _ _ = False
-- Remove __do function applications which remain after desugaring
undo :: JS -> JS
undo (JSReturn (JSApp (JSFunction (Just ident) [] body) [])) | ident == fnName = body
undo other = other
applyReturns :: JS -> JS
applyReturns (JSReturn ret) = JSReturn (JSApp ret [])
applyReturns (JSBlock jss) = JSBlock (map applyReturns jss)
applyReturns (JSWhile cond js) = JSWhile cond (applyReturns js)
applyReturns (JSFor v lo hi js) = JSFor v lo hi (applyReturns js)
applyReturns (JSForIn v xs js) = JSForIn v xs (applyReturns js)
applyReturns (JSIfElse cond t f) = JSIfElse cond (applyReturns t) (applyReturns `fmap` f)
applyReturns other = other
-- |
-- Inline functions in the ST module
--
inlineST :: JS -> JS
inlineST = everywhereOnJS convertBlock
where
-- Look for runST blocks and inline the STRefs there.
-- If all STRefs are used in the scope of the same runST, only using { read, write, modify }STRef then
-- we can be more aggressive about inlining, and actually turn STRefs into local variables.
convertBlock (JSApp f [arg]) | isSTFunc C.runST f =
let refs = nub . findSTRefsIn $ arg
usages = findAllSTUsagesIn arg
allUsagesAreLocalVars = all (\u -> let v = toVar u in isJust v && fromJust v `elem` refs) usages
localVarsDoNotEscape = all (\r -> length (r `appearingIn` arg) == length (filter (\u -> let v = toVar u in v == Just r) usages)) refs
in everywhereOnJS (convert (allUsagesAreLocalVars && localVarsDoNotEscape)) arg
convertBlock other = other
-- Convert a block in a safe way, preserving object wrappers of references,
-- or in a more aggressive way, turning wrappers into local variables depending on the
-- agg(ressive) parameter.
convert agg (JSApp f [arg]) | isSTFunc C.newSTRef f =
JSFunction Nothing [] (JSBlock [JSReturn $ if agg then arg else JSObjectLiteral [(C.stRefValue, arg)]])
convert agg (JSApp (JSApp f [ref]) []) | isSTFunc C.readSTRef f =
if agg then ref else JSAccessor C.stRefValue ref
convert agg (JSApp (JSApp (JSApp f [ref]) [arg]) []) | isSTFunc C.writeSTRef f =
if agg then JSAssignment ref arg else JSAssignment (JSAccessor C.stRefValue ref) arg
convert agg (JSApp (JSApp (JSApp f [ref]) [func]) []) | isSTFunc C.modifySTRef f =
if agg then JSAssignment ref (JSApp func [ref]) else JSAssignment (JSAccessor C.stRefValue ref) (JSApp func [JSAccessor C.stRefValue ref])
convert _ other = other
-- Check if an expression represents a function in the ST module
isSTFunc name (JSAccessor name' (JSVar st)) = st == C.st && name == name'
isSTFunc _ _ = False
-- Find all ST Refs initialized in this block
findSTRefsIn = everythingOnJS (++) isSTRef
where
isSTRef (JSVariableIntroduction ident (Just (JSApp (JSApp f [_]) []))) | isSTFunc C.newSTRef f = [ident]
isSTRef _ = []
-- Find all STRefs used as arguments to readSTRef, writeSTRef, modifySTRef
findAllSTUsagesIn = everythingOnJS (++) isSTUsage
where
isSTUsage (JSApp (JSApp f [ref]) []) | isSTFunc C.readSTRef f = [ref]
isSTUsage (JSApp (JSApp (JSApp f [ref]) [_]) []) | isSTFunc C.writeSTRef f || isSTFunc C.modifySTRef f = [ref]
isSTUsage _ = []
-- Find all uses of a variable
appearingIn ref = everythingOnJS (++) isVar
where
isVar e@(JSVar v) | v == ref = [e]
isVar _ = []
-- Convert a JS value to a String if it is a JSVar
toVar (JSVar v) = Just v
toVar _ = Nothing
|
michaelficarra/purescript
|
src/Language/PureScript/CodeGen/JS/Optimizer/MagicDo.hs
|
mit
| 7,964
| 0
| 25
| 1,540
| 2,504
| 1,287
| 1,217
| 90
| 26
|
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts, TypeFamilies #-}
module Main where
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
sierpinski 1 = triangle 1
sierpinski n = s
===
(s ||| s) # centerX
where s = sierpinski (n-1)
diagram :: Diagram B
diagram = sierpinski 7
main = mainWith $ diagram # frame 0.1
|
jeffreyrosenbluth/NYC-meetup
|
meetup/Sierpinski.hs
|
mit
| 370
| 0
| 9
| 90
| 100
| 54
| 46
| 12
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module holding consumer types.
-----------------------------------------------------------------------------
module Kafka.Consumer.Types
( KafkaConsumer(..)
, ConsumerGroupId(..)
, Offset(..)
, OffsetReset(..)
, RebalanceEvent(..)
, PartitionOffset(..)
, SubscribedPartitions(..)
, Timestamp(..)
, OffsetCommit(..)
, OffsetStoreSync(..)
, OffsetStoreMethod(..)
, TopicPartition(..)
, ConsumerRecord(..)
, crMapKey
, crMapValue
, crMapKV
-- why are these here?
-- * Deprecated
, sequenceFirst
, traverseFirst
, traverseFirstM
, traverseM
, bitraverseM
)
where
import Data.Bifoldable (Bifoldable (..))
import Data.Bifunctor (Bifunctor (..))
import Data.Bitraversable (Bitraversable (..), bimapM, bisequence)
import Data.Int (Int64)
import Data.String (IsString)
import Data.Text (Text)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Kafka.Internal.Setup (HasKafka (..), HasKafkaConf (..), Kafka (..), KafkaConf (..))
import Kafka.Types (Millis (..), PartitionId (..), TopicName (..))
-- | The main type for Kafka consumption, used e.g. to poll and commit messages.
--
-- Its constructor is intentionally not exposed, instead, one should use 'Kafka.Consumer.newConsumer' to acquire such a value.
data KafkaConsumer = KafkaConsumer
{ kcKafkaPtr :: !Kafka
, kcKafkaConf :: !KafkaConf
}
instance HasKafka KafkaConsumer where
getKafka = kcKafkaPtr
{-# INLINE getKafka #-}
instance HasKafkaConf KafkaConsumer where
getKafkaConf = kcKafkaConf
{-# INLINE getKafkaConf #-}
-- | Consumer group ID. Different consumers with the same consumer group ID will get assigned different partitions of each subscribed topic.
--
-- See <https://kafka.apache.org/documentation/#group.id Kafka documentation on consumer group>
newtype ConsumerGroupId = ConsumerGroupId
{ unConsumerGroupId :: Text
} deriving (Show, Ord, Eq, IsString, Generic)
-- | A message offset in a partition
newtype Offset = Offset { unOffset :: Int64 } deriving (Show, Eq, Ord, Read, Generic)
-- | Where to reset the offset when there is no initial offset in Kafka
--
-- See <https://kafka.apache.org/documentation/#auto.offset.reset Kafka documentation on offset reset>
data OffsetReset = Earliest | Latest deriving (Show, Eq, Generic)
-- | A set of events which happen during the rebalancing process
data RebalanceEvent =
-- | Happens before Kafka Client confirms new assignment
RebalanceBeforeAssign [(TopicName, PartitionId)]
-- | Happens after the new assignment is confirmed
| RebalanceAssign [(TopicName, PartitionId)]
-- | Happens before Kafka Client confirms partitions rejection
| RebalanceBeforeRevoke [(TopicName, PartitionId)]
-- | Happens after the rejection is confirmed
| RebalanceRevoke [(TopicName, PartitionId)]
deriving (Eq, Show, Generic)
-- | The partition offset
data PartitionOffset =
PartitionOffsetBeginning
| PartitionOffsetEnd
| PartitionOffset Int64
| PartitionOffsetStored
| PartitionOffsetInvalid
deriving (Eq, Show, Generic)
-- | Partitions subscribed by a consumer
data SubscribedPartitions
= SubscribedPartitions [PartitionId] -- ^ Subscribe only to those partitions
| SubscribedPartitionsAll -- ^ Subscribe to all partitions
deriving (Show, Eq, Generic)
-- | Consumer record timestamp
data Timestamp =
CreateTime !Millis
| LogAppendTime !Millis
| NoTimestamp
deriving (Show, Eq, Read, Generic)
-- | Offsets commit mode
data OffsetCommit =
OffsetCommit -- ^ Forces consumer to block until the broker offsets commit is done
| OffsetCommitAsync -- ^ Offsets will be committed in a non-blocking way
deriving (Show, Eq, Generic)
-- | Indicates how offsets are to be synced to disk
data OffsetStoreSync =
OffsetSyncDisable -- ^ Do not sync offsets (in Kafka: -1)
| OffsetSyncImmediate -- ^ Sync immediately after each offset commit (in Kafka: 0)
| OffsetSyncInterval Int -- ^ Sync after specified interval in millis
deriving (Show, Eq, Generic)
-- | Indicates the method of storing the offsets
data OffsetStoreMethod =
OffsetStoreBroker -- ^ Offsets are stored in Kafka broker (preferred)
| OffsetStoreFile FilePath OffsetStoreSync -- ^ Offsets are stored in a file (and synced to disk according to the sync policy)
deriving (Show, Eq, Generic)
-- | Kafka topic partition structure
data TopicPartition = TopicPartition
{ tpTopicName :: TopicName
, tpPartition :: PartitionId
, tpOffset :: PartitionOffset
} deriving (Show, Eq, Generic)
-- | Represents a /received/ message from Kafka (i.e. used in a consumer)
data ConsumerRecord k v = ConsumerRecord
{ crTopic :: !TopicName -- ^ Kafka topic this message was received from
, crPartition :: !PartitionId -- ^ Kafka partition this message was received from
, crOffset :: !Offset -- ^ Offset within the 'crPartition' Kafka partition
, crTimestamp :: !Timestamp -- ^ Message timestamp
, crKey :: !k -- ^ Message key
, crValue :: !v -- ^ Message value
}
deriving (Eq, Show, Read, Typeable, Generic)
instance Bifunctor ConsumerRecord where
bimap f g (ConsumerRecord t p o ts k v) = ConsumerRecord t p o ts (f k) (g v)
{-# INLINE bimap #-}
instance Functor (ConsumerRecord k) where
fmap = second
{-# INLINE fmap #-}
instance Foldable (ConsumerRecord k) where
foldMap f r = f (crValue r)
{-# INLINE foldMap #-}
instance Traversable (ConsumerRecord k) where
traverse f r = (\v -> crMapValue (const v) r) <$> f (crValue r)
{-# INLINE traverse #-}
instance Bifoldable ConsumerRecord where
bifoldMap f g r = f (crKey r) `mappend` g (crValue r)
{-# INLINE bifoldMap #-}
instance Bitraversable ConsumerRecord where
bitraverse f g r = (\k v -> bimap (const k) (const v) r) <$> f (crKey r) <*> g (crValue r)
{-# INLINE bitraverse #-}
{-# DEPRECATED crMapKey "Isn't concern of this library. Use 'first'" #-}
crMapKey :: (k -> k') -> ConsumerRecord k v -> ConsumerRecord k' v
crMapKey = first
{-# INLINE crMapKey #-}
{-# DEPRECATED crMapValue "Isn't concern of this library. Use 'second'" #-}
crMapValue :: (v -> v') -> ConsumerRecord k v -> ConsumerRecord k v'
crMapValue = second
{-# INLINE crMapValue #-}
{-# DEPRECATED crMapKV "Isn't concern of this library. Use 'bimap'" #-}
crMapKV :: (k -> k') -> (v -> v') -> ConsumerRecord k v -> ConsumerRecord k' v'
crMapKV = bimap
{-# INLINE crMapKV #-}
{-# DEPRECATED sequenceFirst "Isn't concern of this library. Use @'bitraverse' 'id' 'pure'@" #-}
sequenceFirst :: (Bitraversable t, Applicative f) => t (f k) v -> f (t k v)
sequenceFirst = bitraverse id pure
{-# INLINE sequenceFirst #-}
{-# DEPRECATED traverseFirst "Isn't concern of this library. Use @'bitraverse' f 'pure'@" #-}
traverseFirst :: (Bitraversable t, Applicative f)
=> (k -> f k')
-> t k v
-> f (t k' v)
traverseFirst f = bitraverse f pure
{-# INLINE traverseFirst #-}
{-# DEPRECATED traverseFirstM "Isn't concern of this library. Use @'bitraverse' 'id' 'pure' '<$>' 'bitraverse' f 'pure' r@" #-}
traverseFirstM :: (Bitraversable t, Applicative f, Monad m)
=> (k -> m (f k'))
-> t k v
-> m (f (t k' v))
traverseFirstM f r = bitraverse id pure <$> bitraverse f pure r
{-# INLINE traverseFirstM #-}
{-# DEPRECATED traverseM "Isn't concern of this library. Use @'sequenceA' '<$>' 'traverse' f r@" #-}
traverseM :: (Traversable t, Applicative f, Monad m)
=> (v -> m (f v'))
-> t v
-> m (f (t v'))
traverseM f r = sequenceA <$> traverse f r
{-# INLINE traverseM #-}
{-# DEPRECATED bitraverseM "Isn't concern of this library. Use @'Data.Bitraversable.bisequenceA' '<$>' 'bimapM' f g r@" #-}
bitraverseM :: (Bitraversable t, Applicative f, Monad m)
=> (k -> m (f k'))
-> (v -> m (f v'))
-> t k v
-> m (f (t k' v'))
bitraverseM f g r = bisequence <$> bimapM f g r
{-# INLINE bitraverseM #-}
|
haskell-works/kafka-client
|
src/Kafka/Consumer/Types.hs
|
mit
| 8,320
| 0
| 13
| 1,758
| 1,711
| 977
| 734
| 180
| 1
|
module Main (main) where
import XMonad
import qualified Data.Map as M
import Graphics.X11.Xlib
import Graphics.X11.ExtraTypes.XF86
-- needed by CopyWindows bindings
import qualified XMonad.StackSet as W
import XMonad.Actions.CopyWindow
-- XMobar
import XMonad.Util.Run(spawnPipe)
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.DynamicLog
import System.IO
import XMonad.Util.EZConfig(additionalKeys)
myManageHook = composeAll
[ className =? "thunderbird" --> doShift "6m"
, className =? "keepassx" --> doShift "7p"
, className =? "emacs" --> doShift "4d"
, manageDocks
]
main :: IO ()
main = do
xmproc <- spawnPipe "/usr/bin/xmobar ~/.xmobarrc"
xmonad $ defaultConfig{
keys = myKeys <+> keys defaultConfig
, terminal = "terminator"
, workspaces = ["1w", "2w", "3w" , "4d", "5d", "6m", "7p", "8", "9" ]
, modMask = mod4Mask
, manageHook = myManageHook <+> manageHook defaultConfig
, layoutHook = avoidStruts $ layoutHook defaultConfig
, logHook = dynamicLogWithPP xmobarPP
{ ppOutput = hPutStrLn xmproc
, ppTitle = xmobarColor "green" "" . shorten 50
}
} `additionalKeys`
[ ((mod4Mask .|. shiftMask, xK_z), spawn "xscreensaver-command -lock")
, ((mod4Mask, xK_b), sendMessage ToggleStruts)
, ((mod4Mask, xK_n), spawn "~/bin/pomodoro-stop")
, ((mod4Mask, xK_v), spawn "~/bin/pomodoro-interrupt")
, ((0, xF86XK_MonBrightnessUp), spawn "sudo /usr/local/sbin/backlight up")
, ((0, xF86XK_MonBrightnessDown), spawn "sudo /usr/local/sbin/backlight down")
, ((0, xF86XK_Launch1), spawn "sudo /usr/local/sbin/backlight toggle")
, ((0, xF86XK_Launch3), spawn "/usr/local/sbin/cpufreq_toggle_osd")
]
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList
[
((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_1 ..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask), (copy, shiftMask .|. controlMask)]
]
|
ajsalminen/ansible-role-dotfiles-xmonad
|
files/dotfiles/.xmonad/xmonad.hs
|
mit
| 2,073
| 0
| 15
| 483
| 587
| 346
| 241
| 44
| 1
|
-- Writing applicative parsers from scratch
-- http://www.codewars.com/kata/54f1fdb7f29358dd1f00015d/
module ApplicativeParser where
import Prelude hiding (fmap)
import Data.Char (Char, isDigit)
import Control.Arrow (second)
-- | An ambiguous parser.
newtype Parser a = P { unP :: String -> [(String, a)] }
-- | Change the result of a parser.
pmap :: (a -> b) -> Parser a -> Parser b
pmap f (P g) = P $ map (second f) . g
-- | Operator version of 'pmap'.
(<#>) :: (a -> b) -> Parser a -> Parser b
(<#>) = pmap
-- | Parse a value and replace it.
(<#) :: a -> Parser b -> Parser a
(<#) x = pmap (const x)
infixl 4 <#>
infixl 4 <#
-- | Parse a character only when a predicate matches.
predP :: (Char -> Bool) -> Parser Char
predP p = P f
where f [] = []
f (c:cs) = [(cs, c) | p c]
-- | Succeed only when parsing the given character.
charP :: Char -> Parser Char
charP = predP . (==)
-- | Inject a value into an identity parser.
inject :: a -> Parser a
inject x = P (\s -> [(s,x)])
-- | Given a parser with a function value and another parser, parse the function
-- first and then the value, return a parser which applies the function to the
-- value.
(<@>) :: Parser (a -> b) -> Parser a -> Parser b
(P f) <@> (P g) = P (\s -> [(s2, ff v) | (s1, ff) <- f s, (s2, v) <- g s1])
(<@) :: Parser a -> Parser b -> Parser a
pa <@ pb = (const <#> pa) <@> pb
(@>) :: Parser a -> Parser b -> Parser b
pa @> pb = (flip const <#> pa) <@> pb
infixl 4 <@
infixl 4 @>
infixl 4 <@>
-- | Parse a whole string.
stringP :: String -> Parser String
stringP = foldr (\ c -> (<@>) ((:) <#> charP c)) (inject [])
-- | Construct a parser that never parses anything.
emptyP :: Parser a
emptyP = P . const $ []
-- | Combine two parsers: When given an input, provide the results of both parser run on the input.
(<<>>) :: Parser a -> Parser a -> Parser a
(P f) <<>> (P g) = P (\s -> f s ++ g s)
infixl 3 <<>>
-- | Apply the parser zero or more times.
many :: Parser a -> Parser [a]
many p = inject [] <<>> some p
-- | Apply the parser one or more times.
some :: Parser a -> Parser [a]
some p = ((:) <#> p) <@> many p
-- | Apply a parser and return all ambiguous results, but only those where the input was fully consumed.
runParser :: Parser a -> String -> [a]
runParser (P f) s = [v | (s', v) <- f s, null s']
-- | Apply a parser and only return a result, if there was only one unambiguous result with output fully consumed.
runParserUnique :: Parser a -> String -> Maybe a
runParserUnique p s = case runParser p s of
[x] -> Just x
_ -> Nothing
-- | Kinds of binary operators.
data BinOp = AddBO | MulBO deriving (Eq, Show)
-- | Some kind of arithmetic expression.
data Expr = ConstE Int | BinOpE BinOp Expr Expr | NegE Expr | ZeroE deriving (Eq, Show)
evalExpr :: Expr -> Int
evalExpr e = case e of
ZeroE -> 0
(ConstE x) -> x
(NegE e) -> negate . evalExpr $ e
(BinOpE op e1 e2) -> f op (evalExpr e1) (evalExpr e2)
where f AddBO = (+)
f MulBO = (*)
-- | Parse arithmetic expressions, with the following grammar:
--
-- expr ::= const | binOpExpr | neg | zero
-- const ::= int
-- binOpExpr ::= '(' expr ' ' binOp ' ' expr ')'
-- binOp ::= '+' | '*'
-- neg ::= '-' expr
-- zero ::= 'z'
--
parseExpr :: String -> Maybe Expr
parseExpr = runParserUnique pExpr
where pExpr = pConst <<>> pBinOpExpr <<>> pNeg <<>> pZero
pConst = (ConstE . read) <#> some (predP isDigit)
pNeg = NegE <#> (charP '-' @> pExpr)
pZero = ZeroE <# charP 'z'
pBinOpExpr = (\e1 op e2 -> BinOpE op e1 e2) <#> (charP '(' @> pExpr <@ charP ' ') <@> pBinOp <@> (charP ' ' @> pExpr <@ charP ')')
pBinOp = (AddBO <# charP '+') <<>> (MulBO <# charP '*')
|
gafiatulin/codewars
|
src/2 kyu/ApplicativeParser.hs
|
mit
| 3,784
| 0
| 13
| 963
| 1,313
| 704
| 609
| 65
| 5
|
data Person = PersonWithAge String Int | PersonWithoutAge String
getName :: Person -> String
getName (PersonWithAge name _) = name
getName (PersonWithoutAge name) = name
getAge :: Person -> Maybe Int
getAge (PersonWithAge name age) = Just age
getAge (PersonWithoutAge name) = Nothing
find :: (a -> Bool) -> [a] -> Maybe a
find _ [] = Nothing
find predicate (first:rest) =
if predicate first
then Just first
else find predicate rest
|
tamasgal/haskell_exercises
|
maybe_haskell/Datatypes.hs
|
mit
| 456
| 0
| 7
| 96
| 175
| 89
| 86
| 13
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-metricdimension.html
module Stratosphere.ResourceProperties.ApplicationAutoScalingScalingPolicyMetricDimension where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- ApplicationAutoScalingScalingPolicyMetricDimension. See
-- 'applicationAutoScalingScalingPolicyMetricDimension' for a more
-- convenient constructor.
data ApplicationAutoScalingScalingPolicyMetricDimension =
ApplicationAutoScalingScalingPolicyMetricDimension
{ _applicationAutoScalingScalingPolicyMetricDimensionName :: Val Text
, _applicationAutoScalingScalingPolicyMetricDimensionValue :: Val Text
} deriving (Show, Eq)
instance ToJSON ApplicationAutoScalingScalingPolicyMetricDimension where
toJSON ApplicationAutoScalingScalingPolicyMetricDimension{..} =
object $
catMaybes
[ (Just . ("Name",) . toJSON) _applicationAutoScalingScalingPolicyMetricDimensionName
, (Just . ("Value",) . toJSON) _applicationAutoScalingScalingPolicyMetricDimensionValue
]
-- | Constructor for 'ApplicationAutoScalingScalingPolicyMetricDimension'
-- containing required fields as arguments.
applicationAutoScalingScalingPolicyMetricDimension
:: Val Text -- ^ 'aasspmdName'
-> Val Text -- ^ 'aasspmdValue'
-> ApplicationAutoScalingScalingPolicyMetricDimension
applicationAutoScalingScalingPolicyMetricDimension namearg valuearg =
ApplicationAutoScalingScalingPolicyMetricDimension
{ _applicationAutoScalingScalingPolicyMetricDimensionName = namearg
, _applicationAutoScalingScalingPolicyMetricDimensionValue = valuearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-metricdimension.html#cfn-applicationautoscaling-scalingpolicy-metricdimension-name
aasspmdName :: Lens' ApplicationAutoScalingScalingPolicyMetricDimension (Val Text)
aasspmdName = lens _applicationAutoScalingScalingPolicyMetricDimensionName (\s a -> s { _applicationAutoScalingScalingPolicyMetricDimensionName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-metricdimension.html#cfn-applicationautoscaling-scalingpolicy-metricdimension-value
aasspmdValue :: Lens' ApplicationAutoScalingScalingPolicyMetricDimension (Val Text)
aasspmdValue = lens _applicationAutoScalingScalingPolicyMetricDimensionValue (\s a -> s { _applicationAutoScalingScalingPolicyMetricDimensionValue = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/ApplicationAutoScalingScalingPolicyMetricDimension.hs
|
mit
| 2,668
| 0
| 13
| 222
| 267
| 153
| 114
| 29
| 1
|
-- (0.5 балла)
module Counter
( Counter
, tick
, runCounter
) where
-- Монада Counter считает количество тиков, т.е. вызовов функции tick
data Counter a = Counter Int a
-- Возвращает результат вычислений и количество тиков
runCounter :: Counter a -> (a, Int)
runCounter (Counter i v) = (v, i)
instance Monad Counter where
return v = Counter 0 v
(Counter i v) >>= g = do
let (Counter i' v') = g v
Counter (i + i') v'
tick :: Counter ()
tick = Counter 1 ()
|
nkartashov/haskell
|
hw10/Counter/Counter.hs
|
gpl-2.0
| 577
| 0
| 12
| 117
| 168
| 88
| 80
| 14
| 1
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module TestImport
( module TestImport
, module X
) where
import Application (makeFoundation, makeLogWare)
import ClassyPrelude as X hiding (delete, deleteBy, Handler)
import Database.Persist as X hiding (get)
import Database.Persist.Sql (SqlPersistM, runSqlPersistMPool, rawExecute, rawSql, unSingle, connEscapeName)
import Foundation as X
import Model as X
import Test.Hspec as X
import Yesod.Default.Config2 (useEnv, loadYamlSettings)
import Yesod.Auth as X
import Yesod.Test as X
import Yesod.Core.Unsafe (fakeHandlerGetLogger)
-- Wiping the database
import Database.Persist.Sqlite (sqlDatabase, mkSqliteConnectionInfo, fkEnabled, createSqlitePoolFromInfo)
import Control.Monad.Logger (runLoggingT)
import Lens.Micro (set)
import Settings (appDatabaseConf)
import Yesod.Core (messageLoggerSource)
runDB :: SqlPersistM a -> YesodExample App a
runDB query = do
pool <- fmap appConnPool getTestYesod
liftIO $ runSqlPersistMPool query pool
runHandler :: Handler a -> YesodExample App a
runHandler handler = do
app <- getTestYesod
fakeHandlerGetLogger appLogger app handler
withApp :: SpecWith (TestApp App) -> Spec
withApp = before $ do
settings <- loadYamlSettings
["config/test-settings.yml", "config/settings.yml"]
[]
useEnv
foundation <- makeFoundation settings
wipeDB foundation
logWare <- liftIO $ makeLogWare foundation
return (foundation, logWare)
-- This function will truncate all of the tables in your database.
-- 'withApp' calls it before each test, creating a clean environment for each
-- spec to run in.
wipeDB :: App -> IO ()
wipeDB app = do
-- In order to wipe the database, we need to use a connection which has
-- foreign key checks disabled. Foreign key checks are enabled or disabled
-- per connection, so this won't effect queries outside this function.
--
-- Aside: foreign key checks are enabled by persistent-sqlite, as of
-- version 2.6.2, unless they are explicitly disabled in the
-- SqliteConnectionInfo.
let logFunc = messageLoggerSource app (appLogger app)
let dbName = sqlDatabase $ appDatabaseConf $ appSettings app
connInfo = set fkEnabled False $ mkSqliteConnectionInfo dbName
pool <- runLoggingT (createSqlitePoolFromInfo connInfo 1) logFunc
flip runSqlPersistMPool pool $ do
tables <- getTables
sqlBackend <- ask
let queries = map (\t -> "DELETE FROM " ++ (connEscapeName sqlBackend $ DBName t)) tables
forM_ queries (\q -> rawExecute q [])
getTables :: DB [Text]
getTables = do
tables <- rawSql "SELECT name FROM sqlite_master WHERE type = 'table';" []
return (fmap unSingle tables)
-- | Authenticate as a user. This relies on the `auth-dummy-login: true` flag
-- being set in test-settings.yaml, which enables dummy authentication in
-- Foundation.hs
authenticateAs :: Entity User -> YesodExample App ()
authenticateAs (Entity _ u) = do
request $ do
setMethod "POST"
addPostParam "ident" $ userIdent u
setUrl $ AuthR $ PluginR "dummy" []
-- | Create a user. The dummy email entry helps to confirm that foreign-key
-- checking is switched off in wipeDB for those database backends which need it.
createUser :: Text -> YesodExample App (Entity User)
createUser ident = runDB $ do
user <- insertEntity User
{ userIdent = ident
, userPassword = Nothing
}
_ <- insert Email
{ emailEmail = ident
, emailUserId = Just $ entityKey user
, emailVerkey = Nothing
}
return user
|
Happy0/liscrabble
|
test/TestImport.hs
|
gpl-2.0
| 3,822
| 0
| 19
| 913
| 806
| 428
| 378
| 71
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Build
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | Simple build system for packages
--
-------------------------------------------------------------------------------
module IDE.Build (
MakeSettings(..),
MakeOp(..),
moNoOp,
makePackages,
defaultMakeSettings
) where
import Data.Map (Map)
import IDE.Core.State
(postAsyncIDE, postSyncIDE, triggerEventIDE, readIDE, IDEAction,
Workspace(..), ipdPackageId, ipdDepends, IDEPackage)
import qualified Data.Map as Map
(insert, empty, lookup, toList, fromList)
import Data.Graph
(edges, topSort, graphFromEdges, Vertex, Graph,
transposeG)
import Distribution.Package (pkgVersion, pkgName, Dependency(..))
import Data.List (delete, nub, (\\), find)
import Distribution.Version (withinRange)
import Data.Maybe (fromMaybe, mapMaybe)
import IDE.Package
(packageClean', packageCopy', packageRegister', buildPackage, packageConfig',
packageTest', packageDoc',packageBench')
import IDE.Core.Types
(IDEEvent(..), Prefs(..), IDE(..), WorkspaceAction)
import Control.Event (EventSource(..))
import Control.Monad.Trans.Reader (ask)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad (void)
import Control.Arrow ((***))
import Data.Text (Text)
import Distribution.Text (disp)
import Data.Monoid ((<>))
import qualified Data.Text as T (pack, unpack)
-- import Debug.Trace (trace)
trace a b = b
-- * Exported
data MakeSettings = MakeSettings {
msMakeMode :: Bool,
msSingleBuildWithoutLinking :: Bool,
msSaveAllBeforeBuild :: Bool,
msBackgroundBuild :: Bool,
msRunUnitTests :: Bool,
msRunBenchmarks :: Bool,
msJumpToWarnings :: Bool,
msDontInstallLast :: Bool}
-- | Take make settings from preferences
defaultMakeSettings :: Prefs -> MakeSettings
defaultMakeSettings prefs = MakeSettings {
msMakeMode = makeMode prefs,
msSingleBuildWithoutLinking = singleBuildWithoutLinking prefs,
msSaveAllBeforeBuild = saveAllBeforeBuild prefs,
msBackgroundBuild = backgroundBuild prefs,
msRunUnitTests = runUnitTests prefs,
msRunBenchmarks = runBenchmarks prefs,
msJumpToWarnings = jumpToWarnings prefs,
msDontInstallLast = dontInstallLast prefs}
-- | a make operation
data MakeOp =
MoConfigure
| MoBuild
| MoTest
| MoBench
| MoCopy
| MoRegister
| MoClean
| MoDocu
| MoOther Text
| MoMetaInfo -- rebuild meta info for workspace
| MoComposed [MakeOp]
deriving (Eq,Ord,Show)
moNoOp = MoComposed[]
-- | The interface to the build system
-- Consumes settings, a list of targets and a the operation to perform.
-- The firstOp will be applied to the first target
-- The restOp will be applied to all other targets
-- The finishOp will be applied to the last target after any op succeeded,
-- but it is applied after restOp has been tried on the last target
makePackages :: MakeSettings -> [IDEPackage] -> MakeOp -> MakeOp -> MakeOp -> WorkspaceAction
makePackages ms targets firstOp restOp finishOp = trace ("makePackages : " ++ show firstOp ++ " " ++ show restOp) $ do
ws <- ask
lift $ do
prefs' <- readIDE prefs
let plan = constrMakeChain ms ws targets firstOp restOp finishOp
trace ("makeChain : " ++ show plan) $ doBuildChain ms plan
-- ** Types
type MyGraph a = Map a [a]
type MakeGraph = MyGraph IDEPackage
data Chain alpha beta =
Chain {
mcAction :: alpha,
mcEle :: beta,
mcPos :: Chain alpha beta,
mcNeg :: Maybe (Chain alpha beta)}
| EmptyChain
deriving Show
-- ** Functions
-- | Construct a make chain for a package,
-- which is a plan of the build to perform.
-- Consumes settings, the workspace and a list of targets.
-- The first op is applied to the first target.
constrMakeChain :: MakeSettings -> Workspace -> [IDEPackage] -> MakeOp ->
MakeOp -> MakeOp -> Chain MakeOp IDEPackage
-- No more targets
constrMakeChain _ _ [] _ _ _ = EmptyChain
constrMakeChain ms@MakeSettings{msMakeMode = makeMode}
Workspace{wsPackages = packages, wsNobuildPack = noBuilds}
targets firstOp restOp finishOp =
trace (T.unpack $ "topsorted: " <> showTopSorted topsorted)
constrElem targets topsorted depGraph ms noBuilds
firstOp restOp finishOp False
where
depGraph | makeMode = constrDepGraph packages
| otherwise = Map.empty
topsorted = reverse $ topSortGraph $ constrParentGraph packages
-- Constructs a make chain
chainFor :: IDEPackage -> MakeSettings -> MakeOp -> Chain MakeOp IDEPackage
-> Maybe (Chain MakeOp IDEPackage)
-> Chain MakeOp IDEPackage
chainFor target settings (MoComposed [hdOp]) cont mbNegCont =
chainFor target settings hdOp cont mbNegCont
chainFor target settings (MoComposed (hdOp:rest)) cont mbNegCont =
chainFor target settings hdOp (chainFor target settings (MoComposed rest) cont mbNegCont)
mbNegCont
chainFor target settings op cont mbNegCont = Chain {
mcAction = op,
mcEle = target,
mcPos = cont,
mcNeg = mbNegCont}
-- Recursive building of a make chain
-- The first list of packages are the targets
-- The second list of packages is the topsorted graph of all deps of all targets
constrElem :: [IDEPackage] -> [IDEPackage] -> MakeGraph -> MakeSettings -> [IDEPackage]
-> MakeOp -> MakeOp -> MakeOp -> Bool -> Chain MakeOp IDEPackage
constrElem currentTargets tops depGraph ms noBuilds
firstOp restOp finishOp doneAnything
-- finished traversing the topsorted deps or no targets
| null currentTargets || null tops = EmptyChain
-- operations have to be applied to current
| elem (head tops) currentTargets && notElem (head tops) noBuilds =
let current = head tops
dependents = fromMaybe
(trace ("Build>>constrMakeChain: unknown package" ++ show current)
[])
(Map.lookup current depGraph)
withoutInstall = msDontInstallLast ms && null (delete current dependents)
filteredOps = case firstOp of
MoComposed l -> MoComposed (filter (\e -> e /= MoCopy && e /= MoRegister) l)
MoCopy -> MoComposed []
MoRegister -> MoComposed []
other -> other
in trace ("constrElem1 deps: " ++ show dependents ++ " withoutInstall: " ++ show withoutInstall)
$
chainFor current ms (if withoutInstall then filteredOps else firstOp)
(constrElem (nub $ currentTargets ++ dependents)
(tail tops) depGraph ms noBuilds restOp restOp finishOp True)
(Just $ if doneAnything
then chainFor current ms finishOp EmptyChain Nothing
else EmptyChain)
-- no operations have to be applied to current, just try the next
| otherwise = trace ("constrElem2 " ++ show restOp) $
constrElem currentTargets (tail tops) depGraph ms noBuilds
firstOp restOp finishOp doneAnything
-- | Performs the operations of a build chain
doBuildChain :: MakeSettings -> Chain MakeOp IDEPackage -> IDEAction
doBuildChain _ EmptyChain = return ()
doBuildChain ms chain@Chain{mcAction = MoConfigure} =
postAsyncIDE $ packageConfig' (mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoBuild} =
postAsyncIDE $ buildPackage (msBackgroundBuild ms) (msJumpToWarnings ms) (not (msMakeMode ms) && msSingleBuildWithoutLinking ms)
(mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoDocu} =
postAsyncIDE $ packageDoc' (msBackgroundBuild ms) (msJumpToWarnings ms) (mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoTest} =
postAsyncIDE $ packageTest' (msBackgroundBuild ms) (msJumpToWarnings ms) (mcEle chain) False (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoBench} =
postAsyncIDE $ packageBench' (msBackgroundBuild ms) (msJumpToWarnings ms) (mcEle chain) False (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoCopy} =
postAsyncIDE $ packageCopy' (mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoRegister} =
postAsyncIDE $ packageRegister' (mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoClean} =
postAsyncIDE $ packageClean' (mcEle chain) (constrCont ms (mcPos chain) (mcNeg chain))
doBuildChain ms chain@Chain{mcAction = MoMetaInfo} =
postAsyncIDE . void $ triggerEventIDE UpdateWorkspaceInfo
doBuildChain ms chain = doBuildChain ms (mcPos chain)
constrCont ms pos (Just neg) False = doBuildChain ms neg
constrCont ms pos _ _ = doBuildChain ms pos
-- | Construct a dependency graph for a package
-- pointing to the packages the subject package depends on
constrParentGraph :: [IDEPackage] -> MakeGraph
constrParentGraph targets = trace (T.unpack $ "parentGraph : " <> showGraph parGraph) parGraph
where
parGraph = Map.fromList
$ map (\ p -> (p,nub $ mapMaybe (depToTarget targets)(ipdDepends p))) targets
-- | Construct a dependency graph for a package
-- pointing to the packages which depend on the subject package
constrDepGraph :: [IDEPackage] -> MakeGraph
constrDepGraph packages = trace (T.unpack $ "depGraph : " <> showGraph depGraph) depGraph
where
depGraph = reverseGraph (constrParentGraph packages)
showGraph :: MakeGraph -> Text
showGraph mg =
T.pack $ show
$ map (\(k,v) -> (disp (ipdPackageId k), map (disp . ipdPackageId) v))
$ Map.toList mg
showTopSorted :: [IDEPackage] -> Text
showTopSorted = T.pack . show . map (disp .ipdPackageId)
-- | Calculates for every dependency a target (or not)
depToTarget :: [IDEPackage] -> Dependency -> Maybe IDEPackage
depToTarget list dep = find (doesMatch dep) list
where
doesMatch (Dependency name versionRange) thePack =
name == pkgName (ipdPackageId thePack)
&& withinRange (pkgVersion (ipdPackageId thePack)) versionRange
reverseGraph :: Ord alpha => MyGraph alpha -> MyGraph alpha
reverseGraph = withIndexGraph transposeG
topSortGraph :: Ord alpha => MyGraph alpha -> [alpha]
topSortGraph myGraph = map ((\ (_,x,_)-> x) . lookup) $ topSort graph
where
(graph,lookup,_) = fromMyGraph myGraph
withIndexGraph :: Ord alpha => (Graph -> Graph) -> MyGraph alpha -> MyGraph alpha
withIndexGraph idxOp myGraph = toMyGraph (idxOp graph) lookup
where
(graph,lookup,_) = fromMyGraph myGraph
fromMyGraph :: Ord alpha => MyGraph alpha -> (Graph, Vertex -> ((), alpha , [alpha]), alpha -> Maybe Vertex)
fromMyGraph myGraph =
graphFromEdges
$ map (\(e,l)-> ((),e,l))
$ graphList ++ map (\e-> (e,[])) missingEdges
where
mentionedEdges = nub $ concatMap snd graphList
graphList = Map.toList myGraph
missingEdges = mentionedEdges \\ map fst graphList
toMyGraph :: Ord alpha => Graph -> (Vertex -> ((), alpha, [alpha])) -> MyGraph alpha
toMyGraph graph lookup = foldr constr Map.empty myEdges
where
constr (from,to) map = case Map.lookup from map of
Nothing -> Map.insert from [to] map
Just l -> Map.insert from (to : l) map
myEdges = map (lookItUp *** lookItUp) $ edges graph
lookItUp = (\(_,e,_)-> e) . lookup
|
JPMoresmau/leksah
|
src/IDE/Build.hs
|
gpl-2.0
| 12,262
| 1
| 20
| 3,022
| 3,272
| 1,759
| 1,513
| 205
| 6
|
-- Check that the agent's observation is intuitively correct.
module T where
import Tests.KBPBasis
kbpt = kTest ("kbp" `knows_hat` "bit")
kbp = agent "kbp" kbpt
c = proc () ->
do boot <- delayAC nondetBitA trueA -< ()
bit <- nondetBitA -< ()
-- obs <- andA -< (boot, bit) -- in this case the agent knows that if obs == true then bit == true.
obs <- xorA -< (boot, bit)
probeA "bit" -< bit
probeA "out" <<< kbp -< obs
-- Synthesis using the clock semantics
Just (_kautos_clk, mclk, _) = clockSynth MinNone c
ctlM_clk = mkCTLModel mclk
-- Synthesis using the SPR semantics
Just (_kautos_spr, mspr, _) = singleSPRSynth MinNone c
ctlM_spr = mkCTLModel mspr
-- In branching time, the bit could always go either way.
test_clock_bit_alternates = isOK (mc ctlM_clk (ag (ex (probe "bit") /\ ex (neg (probe "bit")))))
test_spr_bit_alternates = isOK (mc ctlM_spr (ag (ex (probe "bit") /\ ex (neg (probe "bit")))))
-- Initially the agent does not the bit.
test_clock_knows_init = isOK (mc ctlM_clk (neg (probe "out")))
test_spr_knows_init = isOK (mc ctlM_spr (neg (probe "out")))
-- Later it does.
test_clock_oblivious_later = isOK (mc ctlM_clk (ax (ag (probe "out"))))
test_spr_oblivious_later = isOK (mc ctlM_spr (ax (ag (probe "out"))))
|
peteg/ADHOC
|
Tests/10_Knowledge/026_agent_obs_nondet_boot.hs
|
gpl-2.0
| 1,288
| 1
| 16
| 263
| 421
| 214
| 207
| -1
| -1
|
module Types where
data Crossing = Sigma Int | Tau Int deriving (Eq, Show)
data Braid = Braid { input :: Int
, work :: Int
, output :: Int
, sigmas :: [Crossing] } deriving (Eq, Show)
data Data = Data [Perm] deriving (Eq)
newtype Perm = Perm { cycles :: [[Int]] } deriving (Eq)
total braid = input braid + work braid + output braid
checkData :: Braid -> Data -> Bool
checkData braid (Data d) = all ((==total braid) . len) d
where len (Perm p) = length p
|
gltronred/braidf-ck
|
Types.hs
|
gpl-3.0
| 521
| 0
| 9
| 159
| 213
| 119
| 94
| 12
| 1
|
{--
(C)opyright 2013–2015 by Miguel Negrão
This file is part of pfVisualizer.
pfVisualizer is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pfVisualizer is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pfVisualizer. If not, see <http://www.gnu.org/licenses/>.
--}
module Display (display) where
import Graphics.Rendering.OpenGL as GL
import Graphics.UI.GLUT
import Control.Concurrent.STM.TVar
import Control.Concurrent.STM
import PState
import Control.Monad (zipWithM_)
rotx :: Vector3 GLfloat
rotx = Vector3 (1.0::GLfloat) 0.0 0.0
roty :: Vector3 GLfloat
roty = Vector3 (0.0::GLfloat) 1.0 0.0
rotz :: Vector3 GLfloat
rotz = Vector3 (0.0::GLfloat) 0.0 1.0
display :: TVar PST -> IO ()
display state = do
pst <- atomically $ readTVar state
a <- display1 pst
display2 a
displayBox
displayEnd
displayEnd :: IO ()
displayEnd = swapBuffers
display1 :: PST -> IO (Geom, [Cl])
display1 (PST geo cs (xdeg, ydeg, zdeg) scaleFactor _) = do
--putStrLn "doing display"
--hFlush stdout
clear [ColorBuffer,DepthBuffer]
loadIdentity
--let scaleFactor = 0.4
scale (scaleFactor::GLfloat) (-scaleFactor) scaleFactor
--print (xdeg, ydeg, zdeg)
--hFlush stdout
rotate xdeg rotx
rotate ydeg roty
rotate zdeg rotz
return (geo,cs)
display2 :: (Geom, [Color3 GLfloat]) -> IO ()
display2 (geo,cs) = renderGeom geo cs
--let cubeW = (0.5::GLfloat)
--color $ Color3 (1.0::GLfloat) (0.0::GLfloat) (0.0::GLfloat)
--cube cubeW
displayBox :: IO ()
displayBox = do
let x = 1.3
preservingMatrix $ do
color $ Color3 (1.0::GLfloat) (1.0::GLfloat) (1.0::GLfloat)
{-# SCC "cubeWireFrameRestMe" #-}cubeWireFrameRest x
preservingMatrix $ do
color $ Color3 (1.0::GLfloat) (0.0::GLfloat) (0.0::GLfloat)
cubeWireFrameFront x
preservingMatrix $ do
color $ Color3 (0.0::GLfloat) (1.0::GLfloat) (0.0::GLfloat)
cubeWireFrameUp x
--testCube
vertify3 :: [(GLfloat,GLfloat,GLfloat)] -> IO ()
vertify3 = mapM_ (\ (a, b, c) -> vertex $ Vertex3 a b c)
cubeVertices :: Num t => t -> [(t, t, t)]
cubeVertices w = [ ( w, w, w), ( w, w,-w), ( w,-w,-w), ( w,-w, w),
( w, w, w), ( w, w,-w), (-w, w,-w), (-w, w, w),
( w, w, w), ( w,-w, w), (-w,-w, w), (-w, w, w),
(-w, w, w), (-w, w,-w), (-w,-w,-w), (-w,-w, w),
( w,-w, w), ( w,-w,-w), (-w,-w,-w), (-w,-w, w),
( w, w,-w), ( w,-w,-w), (-w,-w,-w), (-w, w,-w) ]
cube :: GLfloat -> IO ()
cube w = renderPrimitive Quads $ vertify3 (cubeVertices w)
cubeWireFrameFront :: GLfloat -> IO ()
cubeWireFrameFront r = renderPrimitive Lines $ vertify3
[ ( w,-w, w), ( w,-w,-w), ( w, w, w), ( w, w,-w), ( w,-w, w), ( w,w, w), ( w,-w, -w), ( w, w, -w) ]
where w = r * 1.0
cubeWireFrameUp :: GLfloat -> IO ()
cubeWireFrameUp r = renderPrimitive Lines $ vertify3
--fmap (\(a,b) -> (a,b, w)) [ (w,w), (w,-w), (-w, -w), (-w, w) ]
[ ( w,w, w), ( -w,w,w), ( w, w, w), ( w, -w, w), ( -w, -w, w), ( w, -w, w), ( -w,-w, w), ( -w, w, w) ]
where w = r * 1.0
cubeWireFrameRest :: GLfloat -> IO ()
cubeWireFrameRest w = renderPrimitive Lines $ vertify3
[
(-w, w, w), (-w, w,-w), (-w,-w, w), (-w,-w,-w),
( w, w,-w), (-w, w,-w),
(-w, w,-w), (-w,-w,-w), (-w,-w,-w), ( w,-w,-w) ]
cubeWireFrame :: GLfloat -> IO ()
cubeWireFrame w = renderPrimitive Lines $ vertify3
[ ( w,-w, w), ( w, w, w), ( w, w, w), (-w, w, w),
(-w, w, w), (-w,-w, w), (-w,-w, w), ( w,-w, w),
( w,-w, w), ( w,-w,-w), ( w, w, w), ( w, w,-w),
(-w, w, w), (-w, w,-w), (-w,-w, w), (-w,-w,-w),
( w,-w,-w), ( w, w,-w), ( w, w,-w), (-w, w,-w),
(-w, w,-w), (-w,-w,-w), (-w,-w,-w), ( w,-w,-w) ]
renderTrigs0 :: Color a => [Vt] -> a -> IO ()
renderTrigs0 tri c = do
{-# SCC "rgColor" #-}color c
let v = {-# SCC "rgVertify" #-}vertifyTri tri
{-# SCC "rgRenderPrimitive" #-}renderPrimitive Polygon v
renderTrigs :: Color a => [Vt] -> a -> IO ()
renderTrigs tri c = preservingMatrix $ renderTrigs0 tri c
renderGeom :: Geom -> [Color3 GLfloat] -> IO ()
renderGeom (PState.Triangles tris) cs = zipWithM_ renderTrigs tris cs
renderGeom (PState.Points ps) cs = renderPrimitive GL.Points $ zipWithM_ (\p (Color3 a b c) -> preservingMatrix $ do
currentColor $= Color4 a b c 1.0
vertex p
) ps cs
renderGeom (PState.Cubes ps) cs = zipWithM_ f ps cs
where f::Vt -> Color3 GLfloat -> IO ()
f (Vertex3 x y z) clr = preservingMatrix $ do
color clr
translate $ Vector3 x y z-- (0.0::GLfloat) 0.0 0.0
let cubeW = (0.06::GLfloat)
cube cubeW
let wireFrameIntensity = (0.5::GLfloat)
color $ Color3 wireFrameIntensity wireFrameIntensity wireFrameIntensity
cubeWireFrame cubeW
vertifyTri :: Tri -> IO ()
vertifyTri [v1,v2,v3] = do
vertex v1
vertex v2
vertex v3
vertifyTri _ = return ()
{--
testCube :: IO ()
testCube = do
--test cube
let xy z = [ (1.0, 1.0, z), (1.0, -1.0, z), (-1.0, -1.0, z), (-1.0, 1.0, z) ]
let xz y = [ (1.0, y, 1.0), (1.0, y, -1.0), (-1.0, y, -1.0), (-1.0, y, 1.0) ]
let yz y = [ (y, 1.0, 1.0), (y, 1.0, -1.0), (y, -1.0, -1.0), (y, -1.0, 1.0) ]
--color $ Color3 (0.0::GLfloat) (1.0::GLfloat) (0.0::GLfloat)
--renderPrimitive Quads $ vertify3 $ f (0.5)
-- xy
color $ Color3 (1.0::GLfloat) (0.0::GLfloat) (0.0::GLfloat)
renderPrimitive Quads $ vertify3 $ xy (1.0) -- up red
color $ Color3 (0.0::GLfloat) (1.0::GLfloat) (0.0::GLfloat)
renderPrimitive Quads $ vertify3 $ xy (-1.0) -- down green
-- xz
color $ Color3 (0.0::GLfloat) (0.0::GLfloat) (1.0::GLfloat)
renderPrimitive Quads $ vertify3 $ xz (1.0) --left blue --left and right seem to be switched...
color $ Color3 (1.0::GLfloat) (0.0::GLfloat) (1.0::GLfloat)
renderPrimitive Quads $ vertify3 $ xz (-1.0) --right purple
-- yz
color $ Color3 (0.0::GLfloat) (1.0::GLfloat) (1.0::GLfloat)
renderPrimitive Quads $ vertify3 $ yz (1.0) --front cyan
color $ Color3 (1.0::GLfloat) (1.0::GLfloat) (0.0::GLfloat)
renderPrimitive Quads $ vertify3 $ yz (-1.0) --back yellow
--}
-- testing other stuff
{-
randomCPolygon = do
list <- randomVertexs
return $ ComplexPolygon [ ComplexContour list ]
randomVertexs :: IO [ AnnotatedVertex (Color3 GLfloat) ]
randomVertexs = mapM (const $ AnnotatedVertex <$> randomPoint <*> randomColor ) [1..100]
randomPoint :: IO (Vertex3 GLdouble)
randomPoint = random3floats f
where f x y z = Vertex3 (realToFrac x) (realToFrac y) (realToFrac z)
randomColor :: IO (Color3 GLfloat)
randomColor = random3floats f
where f x y z = Color3 (realToFrac x) (realToFrac y) (realToFrac z)
random3floats f = do
x <- randomIO :: IO Float
y <- randomIO :: IO Float
z <- randomIO :: IO Float
return $ f x y z
-}
|
miguel-negrao/pfVisualizer
|
src/Display.hs
|
gpl-3.0
| 7,618
| 8
| 19
| 2,018
| 2,386
| 1,353
| 1,033
| -1
| -1
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns
, CPP
, ExistentialQuantification
, NoImplicitPrelude
, TypeSynonymInstances
, FlexibleInstances
#-}
module GHC.Event.TimerManager
( -- * Types
TimerManager
-- * Creation
, new
, newWith
, newDefaultBackend
-- * Running
, finished
, loop
, step
, shutdown
, cleanup
, wakeManager
-- * Registering interest in timeout events
, TimeoutCallback
, TimeoutKey
, registerTimeout
, updateTimeout
, unregisterTimeout
) where
#include "EventConfig.h"
------------------------------------------------------------------------
-- Imports
import Control.Exception (finally)
import Control.Monad ((=<<), liftM, sequence_, when)
import Data.IORef (IORef, atomicModifyIORef', mkWeakIORef, newIORef, readIORef,
writeIORef)
import Data.Maybe (Maybe(..))
import Data.Monoid (mempty)
import GHC.Base
import GHC.Conc.Signal (runHandlers)
import GHC.Num (Num(..))
import GHC.Real ((/), fromIntegral )
import GHC.Show (Show(..))
import GHC.Event.Clock (getMonotonicTime)
import GHC.Event.Control
import GHC.Event.Internal (Backend, Event, evtRead, Timeout(..))
import GHC.Event.Unique (Unique, UniqueSource, newSource, newUnique)
import System.Posix.Types (Fd)
import qualified GHC.Event.Internal as I
import qualified GHC.Event.PSQ as Q
#if defined(HAVE_POLL)
import qualified GHC.Event.Poll as Poll
#else
# error not implemented for this operating system
#endif
------------------------------------------------------------------------
-- Types
-- | A timeout registration cookie.
newtype TimeoutKey = TK Unique
deriving (Eq)
-- | Callback invoked on timeout events.
type TimeoutCallback = IO ()
data State = Created
| Running
| Dying
| Finished
deriving (Eq, Show)
-- | A priority search queue, with timeouts as priorities.
type TimeoutQueue = Q.PSQ TimeoutCallback
{-
Instead of directly modifying the 'TimeoutQueue' in
e.g. 'registerTimeout' we keep a list of edits to perform, in the form
of a chain of function closures, and have the I/O manager thread
perform the edits later. This exist to address the following GC
problem:
Since e.g. 'registerTimeout' doesn't force the evaluation of the
thunks inside the 'emTimeouts' IORef a number of thunks build up
inside the IORef. If the I/O manager thread doesn't evaluate these
thunks soon enough they'll get promoted to the old generation and
become roots for all subsequent minor GCs.
When the thunks eventually get evaluated they will each create a new
intermediate 'TimeoutQueue' that immediately becomes garbage. Since
the thunks serve as roots until the next major GC these intermediate
'TimeoutQueue's will get copied unnecesarily in the next minor GC,
increasing GC time. This problem is known as "floating garbage".
Keeping a list of edits doesn't stop this from happening but makes the
amount of data that gets copied smaller.
TODO: Evaluate the content of the IORef to WHNF on each insert once
this bug is resolved: http://hackage.haskell.org/trac/ghc/ticket/3838
-}
-- | An edit to apply to a 'TimeoutQueue'.
type TimeoutEdit = TimeoutQueue -> TimeoutQueue
-- | The event manager state.
data TimerManager = TimerManager
{ emBackend :: !Backend
, emTimeouts :: {-# UNPACK #-} !(IORef TimeoutQueue)
, emState :: {-# UNPACK #-} !(IORef State)
, emUniqueSource :: {-# UNPACK #-} !UniqueSource
, emControl :: {-# UNPACK #-} !Control
}
------------------------------------------------------------------------
-- Creation
handleControlEvent :: TimerManager -> Fd -> Event -> IO ()
handleControlEvent mgr fd _evt = do
msg <- readControlMessage (emControl mgr) fd
case msg of
CMsgWakeup -> return ()
CMsgDie -> writeIORef (emState mgr) Finished
CMsgSignal fp s -> runHandlers fp s
newDefaultBackend :: IO Backend
#if defined(HAVE_POLL)
newDefaultBackend = Poll.new
#else
newDefaultBackend = error "no back end for this platform"
#endif
-- | Create a new event manager.
new :: IO TimerManager
new = newWith =<< newDefaultBackend
newWith :: Backend -> IO TimerManager
newWith be = do
timeouts <- newIORef Q.empty
ctrl <- newControl True
state <- newIORef Created
us <- newSource
_ <- mkWeakIORef state $ do
st <- atomicModifyIORef' state $ \s -> (Finished, s)
when (st /= Finished) $ do
I.delete be
closeControl ctrl
let mgr = TimerManager { emBackend = be
, emTimeouts = timeouts
, emState = state
, emUniqueSource = us
, emControl = ctrl
}
_ <- I.modifyFd be (controlReadFd ctrl) mempty evtRead
_ <- I.modifyFd be (wakeupReadFd ctrl) mempty evtRead
return mgr
-- | Asynchronously shuts down the event manager, if running.
shutdown :: TimerManager -> IO ()
shutdown mgr = do
state <- atomicModifyIORef' (emState mgr) $ \s -> (Dying, s)
when (state == Running) $ sendDie (emControl mgr)
finished :: TimerManager -> IO Bool
finished mgr = (== Finished) `liftM` readIORef (emState mgr)
cleanup :: TimerManager -> IO ()
cleanup mgr = do
writeIORef (emState mgr) Finished
I.delete (emBackend mgr)
closeControl (emControl mgr)
------------------------------------------------------------------------
-- Event loop
-- | Start handling events. This function loops until told to stop,
-- using 'shutdown'.
--
-- /Note/: This loop can only be run once per 'TimerManager', as it
-- closes all of its control resources when it finishes.
loop :: TimerManager -> IO ()
loop mgr = do
state <- atomicModifyIORef' (emState mgr) $ \s -> case s of
Created -> (Running, s)
_ -> (s, s)
case state of
Created -> go `finally` cleanup mgr
Dying -> cleanup mgr
_ -> do cleanup mgr
error $ "GHC.Event.Manager.loop: state is already " ++
show state
where
go = do running <- step mgr
when running go
step :: TimerManager -> IO Bool
step mgr = do
timeout <- mkTimeout
_ <- I.poll (emBackend mgr) (Just timeout) (handleControlEvent mgr)
state <- readIORef (emState mgr)
state `seq` return (state == Running)
where
-- | Call all expired timer callbacks and return the time to the
-- next timeout.
mkTimeout :: IO Timeout
mkTimeout = do
now <- getMonotonicTime
(expired, timeout) <- atomicModifyIORef' (emTimeouts mgr) $ \tq ->
let (expired, tq') = Q.atMost now tq
timeout = case Q.minView tq' of
Nothing -> Forever
Just (Q.E _ t _, _) ->
-- This value will always be positive since the call
-- to 'atMost' above removed any timeouts <= 'now'
let t' = t - now in t' `seq` Timeout t'
in (tq', (expired, timeout))
sequence_ $ map Q.value expired
return timeout
-- | Wake up the event manager.
wakeManager :: TimerManager -> IO ()
wakeManager mgr = sendWakeup (emControl mgr)
------------------------------------------------------------------------
-- Registering interest in timeout events
-- | Register a timeout in the given number of microseconds. The
-- returned 'TimeoutKey' can be used to later unregister or update the
-- timeout. The timeout is automatically unregistered after the given
-- time has passed.
registerTimeout :: TimerManager -> Int -> TimeoutCallback -> IO TimeoutKey
registerTimeout mgr us cb = do
!key <- newUnique (emUniqueSource mgr)
if us <= 0 then cb
else do
now <- getMonotonicTime
let expTime = fromIntegral us / 1000000.0 + now
editTimeouts mgr (Q.insert key expTime cb)
wakeManager mgr
return $ TK key
-- | Unregister an active timeout.
unregisterTimeout :: TimerManager -> TimeoutKey -> IO ()
unregisterTimeout mgr (TK key) = do
editTimeouts mgr (Q.delete key)
wakeManager mgr
-- | Update an active timeout to fire in the given number of
-- microseconds.
updateTimeout :: TimerManager -> TimeoutKey -> Int -> IO ()
updateTimeout mgr (TK key) us = do
now <- getMonotonicTime
let expTime = fromIntegral us / 1000000.0 + now
editTimeouts mgr (Q.adjust (const expTime) key)
wakeManager mgr
editTimeouts :: TimerManager -> TimeoutEdit -> IO ()
editTimeouts mgr g = atomicModifyIORef' (emTimeouts mgr) $ \tq -> (g tq, ())
|
jwiegley/ghc-release
|
libraries/base/GHC/Event/TimerManager.hs
|
gpl-3.0
| 8,583
| 0
| 23
| 2,048
| 1,812
| 963
| 849
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.TagManager.Accounts.Containers.Workspaces.Folders.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a GTM Folder.
--
-- /See:/ <https://developers.google.com/tag-manager Tag Manager API Reference> for @tagmanager.accounts.containers.workspaces.folders.create@.
module Network.Google.Resource.TagManager.Accounts.Containers.Workspaces.Folders.Create
(
-- * REST Resource
AccountsContainersWorkspacesFoldersCreateResource
-- * Creating a Request
, accountsContainersWorkspacesFoldersCreate
, AccountsContainersWorkspacesFoldersCreate
-- * Request Lenses
, acwfcParent
, acwfcXgafv
, acwfcUploadProtocol
, acwfcAccessToken
, acwfcUploadType
, acwfcPayload
, acwfcCallback
) where
import Network.Google.Prelude
import Network.Google.TagManager.Types
-- | A resource alias for @tagmanager.accounts.containers.workspaces.folders.create@ method which the
-- 'AccountsContainersWorkspacesFoldersCreate' request conforms to.
type AccountsContainersWorkspacesFoldersCreateResource
=
"tagmanager" :>
"v2" :>
Capture "parent" Text :>
"folders" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Folder :> Post '[JSON] Folder
-- | Creates a GTM Folder.
--
-- /See:/ 'accountsContainersWorkspacesFoldersCreate' smart constructor.
data AccountsContainersWorkspacesFoldersCreate =
AccountsContainersWorkspacesFoldersCreate'
{ _acwfcParent :: !Text
, _acwfcXgafv :: !(Maybe Xgafv)
, _acwfcUploadProtocol :: !(Maybe Text)
, _acwfcAccessToken :: !(Maybe Text)
, _acwfcUploadType :: !(Maybe Text)
, _acwfcPayload :: !Folder
, _acwfcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsContainersWorkspacesFoldersCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acwfcParent'
--
-- * 'acwfcXgafv'
--
-- * 'acwfcUploadProtocol'
--
-- * 'acwfcAccessToken'
--
-- * 'acwfcUploadType'
--
-- * 'acwfcPayload'
--
-- * 'acwfcCallback'
accountsContainersWorkspacesFoldersCreate
:: Text -- ^ 'acwfcParent'
-> Folder -- ^ 'acwfcPayload'
-> AccountsContainersWorkspacesFoldersCreate
accountsContainersWorkspacesFoldersCreate pAcwfcParent_ pAcwfcPayload_ =
AccountsContainersWorkspacesFoldersCreate'
{ _acwfcParent = pAcwfcParent_
, _acwfcXgafv = Nothing
, _acwfcUploadProtocol = Nothing
, _acwfcAccessToken = Nothing
, _acwfcUploadType = Nothing
, _acwfcPayload = pAcwfcPayload_
, _acwfcCallback = Nothing
}
-- | GTM Workspace\'s API relative path. Example:
-- accounts\/{account_id}\/containers\/{container_id}\/workspaces\/{workspace_id}
acwfcParent :: Lens' AccountsContainersWorkspacesFoldersCreate Text
acwfcParent
= lens _acwfcParent (\ s a -> s{_acwfcParent = a})
-- | V1 error format.
acwfcXgafv :: Lens' AccountsContainersWorkspacesFoldersCreate (Maybe Xgafv)
acwfcXgafv
= lens _acwfcXgafv (\ s a -> s{_acwfcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
acwfcUploadProtocol :: Lens' AccountsContainersWorkspacesFoldersCreate (Maybe Text)
acwfcUploadProtocol
= lens _acwfcUploadProtocol
(\ s a -> s{_acwfcUploadProtocol = a})
-- | OAuth access token.
acwfcAccessToken :: Lens' AccountsContainersWorkspacesFoldersCreate (Maybe Text)
acwfcAccessToken
= lens _acwfcAccessToken
(\ s a -> s{_acwfcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
acwfcUploadType :: Lens' AccountsContainersWorkspacesFoldersCreate (Maybe Text)
acwfcUploadType
= lens _acwfcUploadType
(\ s a -> s{_acwfcUploadType = a})
-- | Multipart request metadata.
acwfcPayload :: Lens' AccountsContainersWorkspacesFoldersCreate Folder
acwfcPayload
= lens _acwfcPayload (\ s a -> s{_acwfcPayload = a})
-- | JSONP
acwfcCallback :: Lens' AccountsContainersWorkspacesFoldersCreate (Maybe Text)
acwfcCallback
= lens _acwfcCallback
(\ s a -> s{_acwfcCallback = a})
instance GoogleRequest
AccountsContainersWorkspacesFoldersCreate
where
type Rs AccountsContainersWorkspacesFoldersCreate =
Folder
type Scopes AccountsContainersWorkspacesFoldersCreate
=
'["https://www.googleapis.com/auth/tagmanager.edit.containers"]
requestClient
AccountsContainersWorkspacesFoldersCreate'{..}
= go _acwfcParent _acwfcXgafv _acwfcUploadProtocol
_acwfcAccessToken
_acwfcUploadType
_acwfcCallback
(Just AltJSON)
_acwfcPayload
tagManagerService
where go
= buildClient
(Proxy ::
Proxy
AccountsContainersWorkspacesFoldersCreateResource)
mempty
|
brendanhay/gogol
|
gogol-tagmanager/gen/Network/Google/Resource/TagManager/Accounts/Containers/Workspaces/Folders/Create.hs
|
mpl-2.0
| 5,919
| 0
| 18
| 1,297
| 786
| 459
| 327
| 121
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.BillingAccounts.Sinks.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates a sink. If the named sink doesn\'t exist, then this method is
-- identical to sinks.create. If the named sink does exist, then this
-- method replaces the following fields in the existing sink with values
-- from the new sink: destination, filter, output_version_format,
-- start_time, and end_time. The updated filter might also have a new
-- writer_identity; see the unique_writer_identity field.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Stackdriver Logging API Reference> for @logging.billingAccounts.sinks.update@.
module Network.Google.Resource.Logging.BillingAccounts.Sinks.Update
(
-- * REST Resource
BillingAccountsSinksUpdateResource
-- * Creating a Request
, billingAccountsSinksUpdate
, BillingAccountsSinksUpdate
-- * Request Lenses
, basuXgafv
, basuUniqueWriterIdentity
, basuUploadProtocol
, basuPp
, basuAccessToken
, basuUploadType
, basuPayload
, basuBearerToken
, basuSinkName
, basuCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.billingAccounts.sinks.update@ method which the
-- 'BillingAccountsSinksUpdate' request conforms to.
type BillingAccountsSinksUpdateResource =
"v2" :>
Capture "sinkName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "uniqueWriterIdentity" Bool :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LogSink :> Put '[JSON] LogSink
-- | Updates a sink. If the named sink doesn\'t exist, then this method is
-- identical to sinks.create. If the named sink does exist, then this
-- method replaces the following fields in the existing sink with values
-- from the new sink: destination, filter, output_version_format,
-- start_time, and end_time. The updated filter might also have a new
-- writer_identity; see the unique_writer_identity field.
--
-- /See:/ 'billingAccountsSinksUpdate' smart constructor.
data BillingAccountsSinksUpdate = BillingAccountsSinksUpdate'
{ _basuXgafv :: !(Maybe Xgafv)
, _basuUniqueWriterIdentity :: !(Maybe Bool)
, _basuUploadProtocol :: !(Maybe Text)
, _basuPp :: !Bool
, _basuAccessToken :: !(Maybe Text)
, _basuUploadType :: !(Maybe Text)
, _basuPayload :: !LogSink
, _basuBearerToken :: !(Maybe Text)
, _basuSinkName :: !Text
, _basuCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BillingAccountsSinksUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'basuXgafv'
--
-- * 'basuUniqueWriterIdentity'
--
-- * 'basuUploadProtocol'
--
-- * 'basuPp'
--
-- * 'basuAccessToken'
--
-- * 'basuUploadType'
--
-- * 'basuPayload'
--
-- * 'basuBearerToken'
--
-- * 'basuSinkName'
--
-- * 'basuCallback'
billingAccountsSinksUpdate
:: LogSink -- ^ 'basuPayload'
-> Text -- ^ 'basuSinkName'
-> BillingAccountsSinksUpdate
billingAccountsSinksUpdate pBasuPayload_ pBasuSinkName_ =
BillingAccountsSinksUpdate'
{ _basuXgafv = Nothing
, _basuUniqueWriterIdentity = Nothing
, _basuUploadProtocol = Nothing
, _basuPp = True
, _basuAccessToken = Nothing
, _basuUploadType = Nothing
, _basuPayload = pBasuPayload_
, _basuBearerToken = Nothing
, _basuSinkName = pBasuSinkName_
, _basuCallback = Nothing
}
-- | V1 error format.
basuXgafv :: Lens' BillingAccountsSinksUpdate (Maybe Xgafv)
basuXgafv
= lens _basuXgafv (\ s a -> s{_basuXgafv = a})
-- | Optional. See sinks.create for a description of this field. When
-- updating a sink, the effect of this field on the value of
-- writer_identity in the updated sink depends on both the old and new
-- values of this field: If the old and new values of this field are both
-- false or both true, then there is no change to the sink\'s
-- writer_identity. If the old value was false and the new value is true,
-- then writer_identity is changed to a unique service account. It is an
-- error if the old value was true and the new value is false.
basuUniqueWriterIdentity :: Lens' BillingAccountsSinksUpdate (Maybe Bool)
basuUniqueWriterIdentity
= lens _basuUniqueWriterIdentity
(\ s a -> s{_basuUniqueWriterIdentity = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
basuUploadProtocol :: Lens' BillingAccountsSinksUpdate (Maybe Text)
basuUploadProtocol
= lens _basuUploadProtocol
(\ s a -> s{_basuUploadProtocol = a})
-- | Pretty-print response.
basuPp :: Lens' BillingAccountsSinksUpdate Bool
basuPp = lens _basuPp (\ s a -> s{_basuPp = a})
-- | OAuth access token.
basuAccessToken :: Lens' BillingAccountsSinksUpdate (Maybe Text)
basuAccessToken
= lens _basuAccessToken
(\ s a -> s{_basuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
basuUploadType :: Lens' BillingAccountsSinksUpdate (Maybe Text)
basuUploadType
= lens _basuUploadType
(\ s a -> s{_basuUploadType = a})
-- | Multipart request metadata.
basuPayload :: Lens' BillingAccountsSinksUpdate LogSink
basuPayload
= lens _basuPayload (\ s a -> s{_basuPayload = a})
-- | OAuth bearer token.
basuBearerToken :: Lens' BillingAccountsSinksUpdate (Maybe Text)
basuBearerToken
= lens _basuBearerToken
(\ s a -> s{_basuBearerToken = a})
-- | Required. The full resource name of the sink to update, including the
-- parent resource and the sink identifier:
-- \"projects\/[PROJECT_ID]\/sinks\/[SINK_ID]\"
-- \"organizations\/[ORGANIZATION_ID]\/sinks\/[SINK_ID]\" Example:
-- \"projects\/my-project-id\/sinks\/my-sink-id\".
basuSinkName :: Lens' BillingAccountsSinksUpdate Text
basuSinkName
= lens _basuSinkName (\ s a -> s{_basuSinkName = a})
-- | JSONP
basuCallback :: Lens' BillingAccountsSinksUpdate (Maybe Text)
basuCallback
= lens _basuCallback (\ s a -> s{_basuCallback = a})
instance GoogleRequest BillingAccountsSinksUpdate
where
type Rs BillingAccountsSinksUpdate = LogSink
type Scopes BillingAccountsSinksUpdate =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/logging.admin"]
requestClient BillingAccountsSinksUpdate'{..}
= go _basuSinkName _basuXgafv
_basuUniqueWriterIdentity
_basuUploadProtocol
(Just _basuPp)
_basuAccessToken
_basuUploadType
_basuBearerToken
_basuCallback
(Just AltJSON)
_basuPayload
loggingService
where go
= buildClient
(Proxy :: Proxy BillingAccountsSinksUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-logging/gen/Network/Google/Resource/Logging/BillingAccounts/Sinks/Update.hs
|
mpl-2.0
| 8,002
| 0
| 19
| 1,816
| 1,033
| 607
| 426
| 145
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DeploymentManager.Deployments.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a deployment and all of the resources in the deployment.
--
-- /See:/ <https://cloud.google.com/deployment-manager Cloud Deployment Manager V2 API Reference> for @deploymentmanager.deployments.delete@.
module Network.Google.Resource.DeploymentManager.Deployments.Delete
(
-- * REST Resource
DeploymentsDeleteResource
-- * Creating a Request
, deploymentsDelete
, DeploymentsDelete
-- * Request Lenses
, ddXgafv
, ddUploadProtocol
, ddProject
, ddAccessToken
, ddUploadType
, ddDeletePolicy
, ddCallback
, ddDeployment
) where
import Network.Google.DeploymentManager.Types
import Network.Google.Prelude
-- | A resource alias for @deploymentmanager.deployments.delete@ method which the
-- 'DeploymentsDelete' request conforms to.
type DeploymentsDeleteResource =
"deploymentmanager" :>
"v2" :>
"projects" :>
Capture "project" Text :>
"global" :>
"deployments" :>
Capture "deployment" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "deletePolicy"
DeploymentsDeleteDeletePolicy
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] Operation
-- | Deletes a deployment and all of the resources in the deployment.
--
-- /See:/ 'deploymentsDelete' smart constructor.
data DeploymentsDelete =
DeploymentsDelete'
{ _ddXgafv :: !(Maybe Xgafv)
, _ddUploadProtocol :: !(Maybe Text)
, _ddProject :: !Text
, _ddAccessToken :: !(Maybe Text)
, _ddUploadType :: !(Maybe Text)
, _ddDeletePolicy :: !DeploymentsDeleteDeletePolicy
, _ddCallback :: !(Maybe Text)
, _ddDeployment :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DeploymentsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddXgafv'
--
-- * 'ddUploadProtocol'
--
-- * 'ddProject'
--
-- * 'ddAccessToken'
--
-- * 'ddUploadType'
--
-- * 'ddDeletePolicy'
--
-- * 'ddCallback'
--
-- * 'ddDeployment'
deploymentsDelete
:: Text -- ^ 'ddProject'
-> Text -- ^ 'ddDeployment'
-> DeploymentsDelete
deploymentsDelete pDdProject_ pDdDeployment_ =
DeploymentsDelete'
{ _ddXgafv = Nothing
, _ddUploadProtocol = Nothing
, _ddProject = pDdProject_
, _ddAccessToken = Nothing
, _ddUploadType = Nothing
, _ddDeletePolicy = Delete'
, _ddCallback = Nothing
, _ddDeployment = pDdDeployment_
}
-- | V1 error format.
ddXgafv :: Lens' DeploymentsDelete (Maybe Xgafv)
ddXgafv = lens _ddXgafv (\ s a -> s{_ddXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ddUploadProtocol :: Lens' DeploymentsDelete (Maybe Text)
ddUploadProtocol
= lens _ddUploadProtocol
(\ s a -> s{_ddUploadProtocol = a})
-- | The project ID for this request.
ddProject :: Lens' DeploymentsDelete Text
ddProject
= lens _ddProject (\ s a -> s{_ddProject = a})
-- | OAuth access token.
ddAccessToken :: Lens' DeploymentsDelete (Maybe Text)
ddAccessToken
= lens _ddAccessToken
(\ s a -> s{_ddAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ddUploadType :: Lens' DeploymentsDelete (Maybe Text)
ddUploadType
= lens _ddUploadType (\ s a -> s{_ddUploadType = a})
-- | Sets the policy to use for deleting resources.
ddDeletePolicy :: Lens' DeploymentsDelete DeploymentsDeleteDeletePolicy
ddDeletePolicy
= lens _ddDeletePolicy
(\ s a -> s{_ddDeletePolicy = a})
-- | JSONP
ddCallback :: Lens' DeploymentsDelete (Maybe Text)
ddCallback
= lens _ddCallback (\ s a -> s{_ddCallback = a})
-- | The name of the deployment for this request.
ddDeployment :: Lens' DeploymentsDelete Text
ddDeployment
= lens _ddDeployment (\ s a -> s{_ddDeployment = a})
instance GoogleRequest DeploymentsDelete where
type Rs DeploymentsDelete = Operation
type Scopes DeploymentsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/ndev.cloudman"]
requestClient DeploymentsDelete'{..}
= go _ddProject _ddDeployment _ddXgafv
_ddUploadProtocol
_ddAccessToken
_ddUploadType
(Just _ddDeletePolicy)
_ddCallback
(Just AltJSON)
deploymentManagerService
where go
= buildClient
(Proxy :: Proxy DeploymentsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-deploymentmanager/gen/Network/Google/Resource/DeploymentManager/Deployments/Delete.hs
|
mpl-2.0
| 5,702
| 0
| 21
| 1,468
| 862
| 500
| 362
| 130
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FusionTables.Column.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Adds a new column to the table.
--
-- /See:/ <https://developers.google.com/fusiontables Fusion Tables API Reference> for @fusiontables.column.insert@.
module Network.Google.Resource.FusionTables.Column.Insert
(
-- * REST Resource
ColumnInsertResource
-- * Creating a Request
, columnInsert
, ColumnInsert
-- * Request Lenses
, ciPayload
, ciTableId
) where
import Network.Google.FusionTables.Types
import Network.Google.Prelude
-- | A resource alias for @fusiontables.column.insert@ method which the
-- 'ColumnInsert' request conforms to.
type ColumnInsertResource =
"fusiontables" :>
"v2" :>
"tables" :>
Capture "tableId" Text :>
"columns" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Column :> Post '[JSON] Column
-- | Adds a new column to the table.
--
-- /See:/ 'columnInsert' smart constructor.
data ColumnInsert =
ColumnInsert'
{ _ciPayload :: !Column
, _ciTableId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ColumnInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ciPayload'
--
-- * 'ciTableId'
columnInsert
:: Column -- ^ 'ciPayload'
-> Text -- ^ 'ciTableId'
-> ColumnInsert
columnInsert pCiPayload_ pCiTableId_ =
ColumnInsert' {_ciPayload = pCiPayload_, _ciTableId = pCiTableId_}
-- | Multipart request metadata.
ciPayload :: Lens' ColumnInsert Column
ciPayload
= lens _ciPayload (\ s a -> s{_ciPayload = a})
-- | Table for which a new column is being added.
ciTableId :: Lens' ColumnInsert Text
ciTableId
= lens _ciTableId (\ s a -> s{_ciTableId = a})
instance GoogleRequest ColumnInsert where
type Rs ColumnInsert = Column
type Scopes ColumnInsert =
'["https://www.googleapis.com/auth/fusiontables"]
requestClient ColumnInsert'{..}
= go _ciTableId (Just AltJSON) _ciPayload
fusionTablesService
where go
= buildClient (Proxy :: Proxy ColumnInsertResource)
mempty
|
brendanhay/gogol
|
gogol-fusiontables/gen/Network/Google/Resource/FusionTables/Column/Insert.hs
|
mpl-2.0
| 2,955
| 0
| 14
| 682
| 386
| 232
| 154
| 60
| 1
|
module Handler.HistoryBG where
import Import
------------------------------------------------------------------------
getHistoryBGR :: Handler Html
getHistoryBGR = do
Entity uid _ <- requireAuth
-- TODO: pagination
sugars <- fmap (map entityVal) $
runDB $ selectList [BloodGlucoseHistoryUid ==. uid] [LimitTo 10]
let bloodGlucoseHistoryDate _ = "TODO" :: Text -- TODO
bloodGlucoseHistoryTime _ = "TODO" :: Text -- TODO
defaultLayout $ do
setTitle "Betty : Blood Sugar Logs"
$(widgetFile "bg.history")
------------------------------------------------------------------------
|
sajith/betty-web
|
Handler/HistoryBG.hs
|
agpl-3.0
| 639
| 0
| 12
| 129
| 130
| 65
| 65
| 12
| 1
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module: SwiftNav.SBP.Navigation
-- Copyright: Copyright (C) 2015 Swift Navigation, Inc.
-- License: LGPL-3
-- Maintainer: Mark Fine <dev@swiftnav.com>
-- Stability: experimental
-- Portability: portable
--
-- Geodetic navigation messages reporting GPS time, position, velocity, and
-- baseline position solutions. For position solutions, these messages define
-- several different position solutions: single-point (SPP), RTK, and pseudo-
-- absolute position solutions. The SPP is the standalone, absolute GPS
-- position solution using only a single receiver. The RTK solution is the
-- differential GPS solution, which can use either a fixed/integer or floating
-- carrier phase ambiguity. The pseudo-absolute position solution uses a user-
-- provided, well-surveyed base station position (if available) and the RTK
-- solution in tandem.
module SwiftNav.SBP.Navigation where
import BasicPrelude
import Control.Lens
import Control.Monad.Loops
import Data.Aeson.TH (defaultOptions, deriveJSON, fieldLabelModifier)
import Data.Binary
import Data.Binary.Get
import Data.Binary.IEEE754
import Data.Binary.Put
import Data.ByteString
import Data.ByteString.Lazy hiding (ByteString)
import Data.Int
import Data.Word
import SwiftNav.SBP.Encoding
import SwiftNav.SBP.TH
import SwiftNav.SBP.Types
msgGpsTime :: Word16
msgGpsTime = 0x0100
-- | SBP class for message MSG_GPS_TIME (0x0100).
--
-- This message reports the GPS time, representing the time since the GPS epoch
-- began on midnight January 6, 1980 UTC. GPS time counts the weeks and seconds
-- of the week. The weeks begin at the Saturday/Sunday transition. GPS week 0
-- began at the beginning of the GPS time scale. Within each week number, the
-- GPS time of the week is between between 0 and 604800 seconds (=60*60*24*7).
-- Note that GPS time does not accumulate leap seconds, and as of now, has a
-- small offset from UTC. In a message stream, this message precedes a set of
-- other navigation messages referenced to the same time (but lacking the ns
-- field) and indicates a more precise time of these messages.
data MsgGpsTime = MsgGpsTime
{ _msgGpsTime_wn :: Word16
-- ^ GPS week number
, _msgGpsTime_tow :: Word32
-- ^ GPS time of week rounded to the nearest millisecond
, _msgGpsTime_ns :: Int32
-- ^ Nanosecond residual of millisecond-rounded TOW (ranges from -500000 to
-- 500000)
, _msgGpsTime_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgGpsTime where
get = do
_msgGpsTime_wn <- getWord16le
_msgGpsTime_tow <- getWord32le
_msgGpsTime_ns <- liftM fromIntegral getWord32le
_msgGpsTime_flags <- getWord8
return MsgGpsTime {..}
put MsgGpsTime {..} = do
putWord16le _msgGpsTime_wn
putWord32le _msgGpsTime_tow
putWord32le $ fromIntegral _msgGpsTime_ns
putWord8 _msgGpsTime_flags
$(deriveSBP 'msgGpsTime ''MsgGpsTime)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgGpsTime_" . stripPrefix "_msgGpsTime_"}
''MsgGpsTime)
$(makeLenses ''MsgGpsTime)
msgDops :: Word16
msgDops = 0x0206
-- | SBP class for message MSG_DOPS (0x0206).
--
-- This dilution of precision (DOP) message describes the effect of navigation
-- satellite geometry on positional measurement precision.
data MsgDops = MsgDops
{ _msgDops_tow :: Word32
-- ^ GPS Time of Week
, _msgDops_gdop :: Word16
-- ^ Geometric Dilution of Precision
, _msgDops_pdop :: Word16
-- ^ Position Dilution of Precision
, _msgDops_tdop :: Word16
-- ^ Time Dilution of Precision
, _msgDops_hdop :: Word16
-- ^ Horizontal Dilution of Precision
, _msgDops_vdop :: Word16
-- ^ Vertical Dilution of Precision
} deriving ( Show, Read, Eq )
instance Binary MsgDops where
get = do
_msgDops_tow <- getWord32le
_msgDops_gdop <- getWord16le
_msgDops_pdop <- getWord16le
_msgDops_tdop <- getWord16le
_msgDops_hdop <- getWord16le
_msgDops_vdop <- getWord16le
return MsgDops {..}
put MsgDops {..} = do
putWord32le _msgDops_tow
putWord16le _msgDops_gdop
putWord16le _msgDops_pdop
putWord16le _msgDops_tdop
putWord16le _msgDops_hdop
putWord16le _msgDops_vdop
$(deriveSBP 'msgDops ''MsgDops)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgDops_" . stripPrefix "_msgDops_"}
''MsgDops)
$(makeLenses ''MsgDops)
msgPosEcef :: Word16
msgPosEcef = 0x0200
-- | SBP class for message MSG_POS_ECEF (0x0200).
--
-- The position solution message reports absolute Earth Centered Earth Fixed
-- (ECEF) coordinates and the status (single point vs pseudo-absolute RTK) of
-- the position solution. If the rover receiver knows the surveyed position of
-- the base station and has an RTK solution, this reports a pseudo-absolute
-- position solution using the base station position and the rover's RTK
-- baseline vector. The full GPS time is given by the preceding MSG_GPS_TIME
-- with the matching time-of-week (tow).
data MsgPosEcef = MsgPosEcef
{ _msgPosEcef_tow :: Word32
-- ^ GPS Time of Week
, _msgPosEcef_x :: Double
-- ^ ECEF X coordinate
, _msgPosEcef_y :: Double
-- ^ ECEF Y coordinate
, _msgPosEcef_z :: Double
-- ^ ECEF Z coordinate
, _msgPosEcef_accuracy :: Word16
-- ^ Position accuracy estimate (not implemented). Defaults to 0.
, _msgPosEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgPosEcef_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgPosEcef where
get = do
_msgPosEcef_tow <- getWord32le
_msgPosEcef_x <- getFloat64le
_msgPosEcef_y <- getFloat64le
_msgPosEcef_z <- getFloat64le
_msgPosEcef_accuracy <- getWord16le
_msgPosEcef_n_sats <- getWord8
_msgPosEcef_flags <- getWord8
return MsgPosEcef {..}
put MsgPosEcef {..} = do
putWord32le _msgPosEcef_tow
putFloat64le _msgPosEcef_x
putFloat64le _msgPosEcef_y
putFloat64le _msgPosEcef_z
putWord16le _msgPosEcef_accuracy
putWord8 _msgPosEcef_n_sats
putWord8 _msgPosEcef_flags
$(deriveSBP 'msgPosEcef ''MsgPosEcef)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgPosEcef_" . stripPrefix "_msgPosEcef_"}
''MsgPosEcef)
$(makeLenses ''MsgPosEcef)
msgPosLlh :: Word16
msgPosLlh = 0x0201
-- | SBP class for message MSG_POS_LLH (0x0201).
--
-- This position solution message reports the absolute geodetic coordinates and
-- the status (single point vs pseudo-absolute RTK) of the position solution.
-- If the rover receiver knows the surveyed position of the base station and
-- has an RTK solution, this reports a pseudo-absolute position solution using
-- the base station position and the rover's RTK baseline vector. The full GPS
-- time is given by the preceding MSG_GPS_TIME with the matching time-of-week
-- (tow).
data MsgPosLlh = MsgPosLlh
{ _msgPosLlh_tow :: Word32
-- ^ GPS Time of Week
, _msgPosLlh_lat :: Double
-- ^ Latitude
, _msgPosLlh_lon :: Double
-- ^ Longitude
, _msgPosLlh_height :: Double
-- ^ Height
, _msgPosLlh_h_accuracy :: Word16
-- ^ Horizontal position accuracy estimate (not implemented). Defaults to 0.
, _msgPosLlh_v_accuracy :: Word16
-- ^ Vertical position accuracy estimate (not implemented). Defaults to 0.
, _msgPosLlh_n_sats :: Word8
-- ^ Number of satellites used in solution.
, _msgPosLlh_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgPosLlh where
get = do
_msgPosLlh_tow <- getWord32le
_msgPosLlh_lat <- getFloat64le
_msgPosLlh_lon <- getFloat64le
_msgPosLlh_height <- getFloat64le
_msgPosLlh_h_accuracy <- getWord16le
_msgPosLlh_v_accuracy <- getWord16le
_msgPosLlh_n_sats <- getWord8
_msgPosLlh_flags <- getWord8
return MsgPosLlh {..}
put MsgPosLlh {..} = do
putWord32le _msgPosLlh_tow
putFloat64le _msgPosLlh_lat
putFloat64le _msgPosLlh_lon
putFloat64le _msgPosLlh_height
putWord16le _msgPosLlh_h_accuracy
putWord16le _msgPosLlh_v_accuracy
putWord8 _msgPosLlh_n_sats
putWord8 _msgPosLlh_flags
$(deriveSBP 'msgPosLlh ''MsgPosLlh)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgPosLlh_" . stripPrefix "_msgPosLlh_"}
''MsgPosLlh)
$(makeLenses ''MsgPosLlh)
msgBaselineEcef :: Word16
msgBaselineEcef = 0x0202
-- | SBP class for message MSG_BASELINE_ECEF (0x0202).
--
-- This message reports the baseline solution in Earth Centered Earth Fixed
-- (ECEF) coordinates. This baseline is the relative vector distance from the
-- base station to the rover receiver. The full GPS time is given by the
-- preceding MSG_GPS_TIME with the matching time-of-week (tow).
data MsgBaselineEcef = MsgBaselineEcef
{ _msgBaselineEcef_tow :: Word32
-- ^ GPS Time of Week
, _msgBaselineEcef_x :: Int32
-- ^ Baseline ECEF X coordinate
, _msgBaselineEcef_y :: Int32
-- ^ Baseline ECEF Y coordinate
, _msgBaselineEcef_z :: Int32
-- ^ Baseline ECEF Z coordinate
, _msgBaselineEcef_accuracy :: Word16
-- ^ Position accuracy estimate (not implemented). Defaults to 0.
, _msgBaselineEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgBaselineEcef_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgBaselineEcef where
get = do
_msgBaselineEcef_tow <- getWord32le
_msgBaselineEcef_x <- liftM fromIntegral getWord32le
_msgBaselineEcef_y <- liftM fromIntegral getWord32le
_msgBaselineEcef_z <- liftM fromIntegral getWord32le
_msgBaselineEcef_accuracy <- getWord16le
_msgBaselineEcef_n_sats <- getWord8
_msgBaselineEcef_flags <- getWord8
return MsgBaselineEcef {..}
put MsgBaselineEcef {..} = do
putWord32le _msgBaselineEcef_tow
putWord32le $ fromIntegral _msgBaselineEcef_x
putWord32le $ fromIntegral _msgBaselineEcef_y
putWord32le $ fromIntegral _msgBaselineEcef_z
putWord16le _msgBaselineEcef_accuracy
putWord8 _msgBaselineEcef_n_sats
putWord8 _msgBaselineEcef_flags
$(deriveSBP 'msgBaselineEcef ''MsgBaselineEcef)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgBaselineEcef_" . stripPrefix "_msgBaselineEcef_"}
''MsgBaselineEcef)
$(makeLenses ''MsgBaselineEcef)
msgBaselineNed :: Word16
msgBaselineNed = 0x0203
-- | SBP class for message MSG_BASELINE_NED (0x0203).
--
-- This message reports the baseline solution in North East Down (NED)
-- coordinates. This baseline is the relative vector distance from the base
-- station to the rover receiver, and NED coordinate system is defined at the
-- local WGS84 tangent plane centered at the base station position. The full
-- GPS time is given by the preceding MSG_GPS_TIME with the matching time-of-
-- week (tow).
data MsgBaselineNed = MsgBaselineNed
{ _msgBaselineNed_tow :: Word32
-- ^ GPS Time of Week
, _msgBaselineNed_n :: Int32
-- ^ Baseline North coordinate
, _msgBaselineNed_e :: Int32
-- ^ Baseline East coordinate
, _msgBaselineNed_d :: Int32
-- ^ Baseline Down coordinate
, _msgBaselineNed_h_accuracy :: Word16
-- ^ Horizontal position accuracy estimate (not implemented). Defaults to 0.
, _msgBaselineNed_v_accuracy :: Word16
-- ^ Vertical position accuracy estimate (not implemented). Defaults to 0.
, _msgBaselineNed_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgBaselineNed_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgBaselineNed where
get = do
_msgBaselineNed_tow <- getWord32le
_msgBaselineNed_n <- liftM fromIntegral getWord32le
_msgBaselineNed_e <- liftM fromIntegral getWord32le
_msgBaselineNed_d <- liftM fromIntegral getWord32le
_msgBaselineNed_h_accuracy <- getWord16le
_msgBaselineNed_v_accuracy <- getWord16le
_msgBaselineNed_n_sats <- getWord8
_msgBaselineNed_flags <- getWord8
return MsgBaselineNed {..}
put MsgBaselineNed {..} = do
putWord32le _msgBaselineNed_tow
putWord32le $ fromIntegral _msgBaselineNed_n
putWord32le $ fromIntegral _msgBaselineNed_e
putWord32le $ fromIntegral _msgBaselineNed_d
putWord16le _msgBaselineNed_h_accuracy
putWord16le _msgBaselineNed_v_accuracy
putWord8 _msgBaselineNed_n_sats
putWord8 _msgBaselineNed_flags
$(deriveSBP 'msgBaselineNed ''MsgBaselineNed)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgBaselineNed_" . stripPrefix "_msgBaselineNed_"}
''MsgBaselineNed)
$(makeLenses ''MsgBaselineNed)
msgVelEcef :: Word16
msgVelEcef = 0x0204
-- | SBP class for message MSG_VEL_ECEF (0x0204).
--
-- This message reports the velocity in Earth Centered Earth Fixed (ECEF)
-- coordinates. The full GPS time is given by the preceding MSG_GPS_TIME with
-- the matching time-of-week (tow).
data MsgVelEcef = MsgVelEcef
{ _msgVelEcef_tow :: Word32
-- ^ GPS Time of Week
, _msgVelEcef_x :: Int32
-- ^ Velocity ECEF X coordinate
, _msgVelEcef_y :: Int32
-- ^ Velocity ECEF Y coordinate
, _msgVelEcef_z :: Int32
-- ^ Velocity ECEF Z coordinate
, _msgVelEcef_accuracy :: Word16
-- ^ Velocity accuracy estimate (not implemented). Defaults to 0.
, _msgVelEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgVelEcef_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgVelEcef where
get = do
_msgVelEcef_tow <- getWord32le
_msgVelEcef_x <- liftM fromIntegral getWord32le
_msgVelEcef_y <- liftM fromIntegral getWord32le
_msgVelEcef_z <- liftM fromIntegral getWord32le
_msgVelEcef_accuracy <- getWord16le
_msgVelEcef_n_sats <- getWord8
_msgVelEcef_flags <- getWord8
return MsgVelEcef {..}
put MsgVelEcef {..} = do
putWord32le _msgVelEcef_tow
putWord32le $ fromIntegral _msgVelEcef_x
putWord32le $ fromIntegral _msgVelEcef_y
putWord32le $ fromIntegral _msgVelEcef_z
putWord16le _msgVelEcef_accuracy
putWord8 _msgVelEcef_n_sats
putWord8 _msgVelEcef_flags
$(deriveSBP 'msgVelEcef ''MsgVelEcef)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgVelEcef_" . stripPrefix "_msgVelEcef_"}
''MsgVelEcef)
$(makeLenses ''MsgVelEcef)
msgVelNed :: Word16
msgVelNed = 0x0205
-- | SBP class for message MSG_VEL_NED (0x0205).
--
-- This message reports the velocity in local North East Down (NED)
-- coordinates. The NED coordinate system is defined as the local WGS84 tangent
-- plane centered at the current position. The full GPS time is given by the
-- preceding MSG_GPS_TIME with the matching time-of-week (tow).
data MsgVelNed = MsgVelNed
{ _msgVelNed_tow :: Word32
-- ^ GPS Time of Week
, _msgVelNed_n :: Int32
-- ^ Velocity North coordinate
, _msgVelNed_e :: Int32
-- ^ Velocity East coordinate
, _msgVelNed_d :: Int32
-- ^ Velocity Down coordinate
, _msgVelNed_h_accuracy :: Word16
-- ^ Horizontal velocity accuracy estimate (not implemented). Defaults to 0.
, _msgVelNed_v_accuracy :: Word16
-- ^ Vertical velocity accuracy estimate (not implemented). Defaults to 0.
, _msgVelNed_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgVelNed_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgVelNed where
get = do
_msgVelNed_tow <- getWord32le
_msgVelNed_n <- liftM fromIntegral getWord32le
_msgVelNed_e <- liftM fromIntegral getWord32le
_msgVelNed_d <- liftM fromIntegral getWord32le
_msgVelNed_h_accuracy <- getWord16le
_msgVelNed_v_accuracy <- getWord16le
_msgVelNed_n_sats <- getWord8
_msgVelNed_flags <- getWord8
return MsgVelNed {..}
put MsgVelNed {..} = do
putWord32le _msgVelNed_tow
putWord32le $ fromIntegral _msgVelNed_n
putWord32le $ fromIntegral _msgVelNed_e
putWord32le $ fromIntegral _msgVelNed_d
putWord16le _msgVelNed_h_accuracy
putWord16le _msgVelNed_v_accuracy
putWord8 _msgVelNed_n_sats
putWord8 _msgVelNed_flags
$(deriveSBP 'msgVelNed ''MsgVelNed)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgVelNed_" . stripPrefix "_msgVelNed_"}
''MsgVelNed)
$(makeLenses ''MsgVelNed)
msgBaselineHeading :: Word16
msgBaselineHeading = 0x0207
-- | SBP class for message MSG_BASELINE_HEADING (0x0207).
--
-- This message reports the baseline heading pointing from the base station to
-- the rover relative to True North. The full GPS time is given by the
-- preceding MSG_GPS_TIME with the matching time-of-week (tow).
data MsgBaselineHeading = MsgBaselineHeading
{ _msgBaselineHeading_tow :: Word32
-- ^ GPS Time of Week
, _msgBaselineHeading_heading :: Word32
-- ^ Heading
, _msgBaselineHeading_n_sats :: Word8
-- ^ Number of satellites used in solution
, _msgBaselineHeading_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgBaselineHeading where
get = do
_msgBaselineHeading_tow <- getWord32le
_msgBaselineHeading_heading <- getWord32le
_msgBaselineHeading_n_sats <- getWord8
_msgBaselineHeading_flags <- getWord8
return MsgBaselineHeading {..}
put MsgBaselineHeading {..} = do
putWord32le _msgBaselineHeading_tow
putWord32le _msgBaselineHeading_heading
putWord8 _msgBaselineHeading_n_sats
putWord8 _msgBaselineHeading_flags
$(deriveSBP 'msgBaselineHeading ''MsgBaselineHeading)
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "_msgBaselineHeading_" . stripPrefix "_msgBaselineHeading_"}
''MsgBaselineHeading)
$(makeLenses ''MsgBaselineHeading)
|
swift-nav/libsbp
|
haskell/src/SwiftNav/SBP/Navigation.hs
|
lgpl-3.0
| 17,844
| 0
| 11
| 3,435
| 2,664
| 1,378
| 1,286
| -1
| -1
|
import Data.List
import Text.Printf
ans i =
let g1 = take 2 $ sortBy (\ (a,b) (c,d) -> compare b d) $ take 8 i
g2 = take 2 $ sortBy (\ (a,b) (c,d) -> compare b d) $ drop 8 $ take 16 i
g3 = take 2 $ sortBy (\ (a,b) (c,d) -> compare b d) $ drop 16 i
r1 = i \\ g1
r2 = r1 \\ g2
r3 = r2 \\ g3
rr = take 2 $ sortBy (\ (a,b) (c,d) -> compare b d) $ r3
in
g1 ++ g2 ++ g3 ++ rr
main = do
c <- getContents
let i = map (\ [a,b] -> (read a, read b) ) $ map words $ lines c :: [(Int,Float)]
o = ans i
mapM_ (\(a,b) -> printf "%d %.02f\n" a b) o
|
a143753/AOJ
|
0138.hs
|
apache-2.0
| 596
| 8
| 16
| 202
| 392
| 201
| 191
| 16
| 1
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGraphicsPixmapItem.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QGraphicsPixmapItem (
ShapeMode, eMaskShape, eBoundingRectShape, eHeuristicMaskShape
, QGraphicsPixmapItem__
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CShapeMode a = CShapeMode a
type ShapeMode = QEnum(CShapeMode Int)
ieShapeMode :: Int -> ShapeMode
ieShapeMode x = QEnum (CShapeMode x)
instance QEnumC (CShapeMode Int) where
qEnum_toInt (QEnum (CShapeMode x)) = x
qEnum_fromInt x = QEnum (CShapeMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ShapeMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eMaskShape :: ShapeMode
eMaskShape
= ieShapeMode $ 0
eBoundingRectShape :: ShapeMode
eBoundingRectShape
= ieShapeMode $ 1
eHeuristicMaskShape :: ShapeMode
eHeuristicMaskShape
= ieShapeMode $ 2
data CQGraphicsPixmapItem__ a = CQGraphicsPixmapItem__ a
type QGraphicsPixmapItem__ = QEnum(CQGraphicsPixmapItem__ Int)
ieQGraphicsPixmapItem__ :: Int -> QGraphicsPixmapItem__
ieQGraphicsPixmapItem__ x = QEnum (CQGraphicsPixmapItem__ x)
instance QEnumC (CQGraphicsPixmapItem__ Int) where
qEnum_toInt (QEnum (CQGraphicsPixmapItem__ x)) = x
qEnum_fromInt x = QEnum (CQGraphicsPixmapItem__ x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QGraphicsPixmapItem__ -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeType QGraphicsPixmapItem__ where
eType
= ieQGraphicsPixmapItem__ $ 7
|
uduki/hsQt
|
Qtc/Enums/Gui/QGraphicsPixmapItem.hs
|
bsd-2-clause
| 4,217
| 0
| 18
| 940
| 1,108
| 549
| 559
| 95
| 1
|
{-# LANGUAGE RecordWildCards #-}
-- | The module provides first-order, linear-chain conditional random fields
-- (CRFs).
--
-- Important feature of the implemented flavour of CRFs is that transition
-- features which are not included in the CRF model are considered to have
-- probability of 0.
-- It is particularly useful when the training material determines the set
-- of possible label transitions (e.g. when using the IOB encoding method).
-- Furthermore, this design decision makes the implementation much faster
-- for sparse datasets.
module Data.CRF.Chain1
(
-- * Data types
Word
, Sent
, Dist (unDist)
, mkDist
, WordL
, annotate
, SentL
-- * CRF
, CRF (..)
-- ** Training
, train
-- ** Tagging
, tag
-- * Feature selection
, hiddenFeats
, presentFeats
) where
import Data.CRF.Chain1.Dataset.External
import Data.CRF.Chain1.Dataset.Codec
import Data.CRF.Chain1.Feature.Present
import Data.CRF.Chain1.Feature.Hidden
import Data.CRF.Chain1.Train
import qualified Data.CRF.Chain1.Inference as I
-- | Determine the most probable label sequence within the context of the
-- given sentence using the model provided by the 'CRF'.
tag :: (Ord a, Ord b) => CRF a b -> Sent a -> [b]
tag CRF{..} = decodeLabels codec . I.tag model . encodeSent codec
|
kawu/crf-chain1
|
src/Data/CRF/Chain1.hs
|
bsd-2-clause
| 1,259
| 0
| 8
| 205
| 192
| 126
| 66
| 26
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | This module creates simple default switch without user program
-- that can be used for testing purposes:
--
-- This program should be as simple as possibe
module Main
where
import Control.Concurrent (threadDelay)
import Control.Monad (replicateM_, forM_)
import Control.Monad.Trans (lift)
import Control.Concurrent.MVar
import Control.Concurrent.Async
import Data.Bits -- for IP creation [ TODO: remove ]
import HCProbe.EDSL
-- low level message generation
import Network.Openflow.Ethernet.Generator
import Network.Openflow.Ethernet.IPv4
import Network.Openflow.Ethernet.TCP
import HCProbe.Ethernet
import HCProbe.TCP
main :: IO ()
main = do
(s1, s2) <- config $ do
s1 <- switch $ do
features $ do -- create 2 ports
addPort [] [] [OFPPF_1GB_FD, OFPPF_COPPER] def
addPort [] [] [OFPPF_1GB_FD, OFPPF_COPPER] def
s2 <- switchOn s1 (return ())
return (s1,s2)
lock <- newEmptyMVar
async $ withSwitch s2 "localhost" 6633 $ do
-- wait for type examples:
lift $ putStr "waiting for barrier request.. "
waitForType OFPT_BARRIER_REQUEST
let port = 0
m1 = 37 -- TODO gen mac here
m2 = 29 -- TODO gen mac here
let pl = putEthernetFrame . (EthFrame m1 m2) . putIPv4Pkt $
TestPacketTCP { dstMAC = m2
, srcMAC = m1
, srcIP = 99
, dstIP = 66
, dstPort = 22
, srcPort = 12342
, testWSS = Just 3
, testFlags = tcpFlagsOf [ACK]
, testPayloadLen = 32
, testAckNo = Nothing
, testSeqNo = Nothing
, testIpID = Nothing
}
-- correct message
replicateM_ 10 $ do
lift $ putStr "sending.. "
bid <- sendOFPPacketIn port 43 pl
waitForBID bid
lift $ putStrLn "done"
-- broken length
let msg = putOFMessage $ do
putOFHeader $ do
putHdrType OFPT_PACKET_IN
putPacketLength 4
putPacketIn $ do
putPacketInData pl
send msg
lift $ putMVar lock ()
-- correct message
replicateM_ 10 $ do
bid <- sendOFPPacketIn port 43 pl
waitForBID bid
withSwitch s1 "localhost" 6633 $ do
_ <- lift $ takeMVar lock
let port = 0
m1 = 37 -- TODO gen mac here
m2 = 29 -- TODO gen mac here
let pl = putEthernetFrame . (EthFrame m1 m2) . putIPv4Pkt $
TestPacketTCP { dstMAC = m2
, srcMAC = m1
, srcIP = 99
, dstIP = 66
, dstPort = 22
, srcPort = 12342
, testWSS = Just 3
, testFlags = tcpFlagsOf [ACK]
, testPayloadLen = 32
, testAckNo = Nothing
, testSeqNo = Nothing
, testIpID = Nothing
}
bid <- sendOFPPacketIn port 43 pl
waitForBID bid
lift $ putStrLn "ok"
|
ARCCN/hcprobe
|
src/examples/test1.hs
|
bsd-3-clause
| 4,092
| 0
| 19
| 2,156
| 739
| 390
| 349
| 80
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts #-}
{-|
This module defines non-Prelude 'Forkable'/'ForkableT' instances. It is separated from "Control.Concurrent.Forkable" because imported modules might not be -XSafe
-}
module Control.Concurrent.ForkableT.Instances
( module Control.Concurrent.ForkableT
)
where
import Control.Concurrent.ForkableT
import Control.Monad.Trans.Control
import Control.Monad.Trans.Resource
import Control.Monad.State
-- ResourceT -should- be an instance of ForkableT, however the exposed functionality does not allow this for now.
instance (MonadBaseControl IO m, MonadIO m) => Forkable (ResourceT m) (ResourceT m) where
fork = resourceForkIO
|
exFalso/ForkableT
|
src/Control/Concurrent/ForkableT/Instances.hs
|
bsd-3-clause
| 695
| 0
| 7
| 91
| 90
| 56
| 34
| 9
| 0
|
{-# LANGUAGE RankNTypes, ScopedTypeVariables, GADTs, EmptyDataDecls, PatternGuards, TypeFamilies, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-} -- bug in GHC
{- Notes about the genesis of Hoopl7
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Hoopl7 has the following major chages
a) GMany has symmetric entry and exit
b) GMany closed-entry does not record a BlockId
c) GMany open-exit does not record a BlockId
d) The body of a GMany is called Body
e) A Body is just a list of blocks, not a map. I've argued
elsewhere that this is consistent with (c)
A consequence is that Graph is no longer an instance of NonLocal,
but nevertheless I managed to keep the ARF and ARB signatures
nice and uniform.
This was made possible by
* FwdTransfer looks like this:
type FwdTransfer n f
= forall e x. n e x -> Fact e f -> Fact x f
type family Fact x f :: *
type instance Fact C f = FactBase f
type instance Fact O f = f
Note that the incoming fact is a Fact (not just 'f' as in Hoopl5,6).
It's up to the *transfer function* to look up the appropriate fact
in the FactBase for a closed-entry node. Example:
constProp (Label l) fb = lookupFact fb l
That is how Hoopl can avoid having to know the block-id for the
first node: it defers to the client.
[Side note: that means the client must know about
bottom, in case the looupFact returns Nothing]
* Note also that FwdTransfer *returns* a Fact too;
that is, the types in both directions are symmetrical.
Previously we returned a [(BlockId,f)] but I could not see
how to make everything line up if we do this.
Indeed, the main shortcoming of Hoopl7 is that we are more
or less forced into this uniform representation of the facts
flowing into or out of a closed node/block/graph, whereas
previously we had more flexibility.
In exchange the code is neater, with fewer distinct types.
And morally a FactBase is equivalent to [(BlockId,f)] and
nearly equivalent to (BlockId -> f).
* I've realised that forwardBlockList and backwardBlockList
both need (NonLocal n), and that goes everywhere.
* I renamed BlockId to Label
-}
module Compiler.Hoopl.OldDataflow
( DataflowLattice(..), JoinFun, OldFact(..), NewFact(..), Fact
, ChangeFlag(..), changeIf
, FwdPass(..), FwdTransfer, mkFTransfer, mkFTransfer', getFTransfers
, FwdRes(..), FwdRewrite, mkFRewrite, mkFRewrite', getFRewrites
, BwdPass(..), BwdTransfer, mkBTransfer, mkBTransfer', getBTransfers
, BwdRes(..), BwdRewrite, mkBRewrite, mkBRewrite', getBRewrites
, analyzeAndRewriteFwd, analyzeAndRewriteBwd
, analyzeAndRewriteFwd', analyzeAndRewriteBwd'
)
where
import Data.Maybe
import Compiler.Hoopl.Fuel
import Compiler.Hoopl.Graph
import Compiler.Hoopl.MkGraph
import qualified Compiler.Hoopl.GraphUtil as U
import Compiler.Hoopl.Label
import Compiler.Hoopl.Util
-----------------------------------------------------------------------------
-- DataflowLattice
-----------------------------------------------------------------------------
data DataflowLattice a = DataflowLattice
{ fact_name :: String -- Documentation
, fact_bot :: a -- Lattice bottom element
, fact_extend :: JoinFun a -- Lattice join plus change flag
-- (changes iff result > old fact)
}
-- ^ A transfer function might want to use the logging flag
-- to control debugging, as in for example, it updates just one element
-- in a big finite map. We don't want Hoopl to show the whole fact,
-- and only the transfer function knows exactly what changed.
type JoinFun a = Label -> OldFact a -> NewFact a -> (ChangeFlag, a)
-- the label argument is for debugging purposes only
newtype OldFact a = OldFact a
newtype NewFact a = NewFact a
data ChangeFlag = NoChange | SomeChange deriving (Eq, Ord)
changeIf :: Bool -> ChangeFlag
changeIf changed = if changed then SomeChange else NoChange
-----------------------------------------------------------------------------
-- Analyze and rewrite forward: the interface
-----------------------------------------------------------------------------
data FwdPass n f
= FwdPass { fp_lattice :: DataflowLattice f
, fp_transfer :: FwdTransfer n f
, fp_rewrite :: FwdRewrite n f }
newtype FwdTransfer n f
= FwdTransfers { getFTransfers ::
( n C O -> f -> f
, n O O -> f -> f
, n O C -> f -> FactBase f
) }
newtype FwdRewrite n f
= FwdRewrites { getFRewrites ::
( n C O -> f -> Maybe (FwdRes n f C O)
, n O O -> f -> Maybe (FwdRes n f O O)
, n O C -> f -> Maybe (FwdRes n f O C)
) }
data FwdRes n f e x = FwdRes (AGraph n e x) (FwdRewrite n f)
-- result of a rewrite is a new graph and a (possibly) new rewrite function
mkFTransfer :: (n C O -> f -> f)
-> (n O O -> f -> f)
-> (n O C -> f -> FactBase f)
-> FwdTransfer n f
mkFTransfer f m l = FwdTransfers (f, m, l)
mkFTransfer' :: (forall e x . n e x -> f -> Fact x f) -> FwdTransfer n f
mkFTransfer' f = FwdTransfers (f, f, f)
mkFRewrite :: (n C O -> f -> Maybe (FwdRes n f C O))
-> (n O O -> f -> Maybe (FwdRes n f O O))
-> (n O C -> f -> Maybe (FwdRes n f O C))
-> FwdRewrite n f
mkFRewrite f m l = FwdRewrites (f, m, l)
mkFRewrite' :: (forall e x . n e x -> f -> Maybe (FwdRes n f e x)) -> FwdRewrite n f
mkFRewrite' f = FwdRewrites (f, f, f)
type family Fact x f :: *
type instance Fact C f = FactBase f
type instance Fact O f = f
analyzeAndRewriteFwd
:: forall n f. NonLocal n
=> FwdPass n f
-> Body n -> FactBase f
-> FuelMonad (Body n, FactBase f)
analyzeAndRewriteFwd pass body facts
= do { (rg, _) <- arfBody pass body facts
; return (normaliseBody rg) }
-- | if the graph being analyzed is open at the entry, there must
-- be no other entry point, or all goes horribly wrong...
analyzeAndRewriteFwd'
:: forall n f e x. NonLocal n
=> FwdPass n f
-> Graph n e x -> Fact e f
-> FuelMonad (Graph n e x, FactBase f, MaybeO x f)
analyzeAndRewriteFwd' pass g f =
do (rg, fout) <- arfGraph pass g f
let (g', fb) = normalizeGraph rg
return (g', fb, distinguishedExitFact g' fout)
distinguishedExitFact :: forall n e x f . Graph n e x -> Fact x f -> MaybeO x f
distinguishedExitFact g f = maybe g
where maybe :: Graph n e x -> MaybeO x f
maybe GNil = JustO f
maybe (GUnit {}) = JustO f
maybe (GMany _ _ x) = case x of NothingO -> NothingO
JustO _ -> JustO f
----------------------------------------------------------------
-- Forward Implementation
----------------------------------------------------------------
type ARF' n f thing e x
= FwdPass n f -> thing e x -> f -> FuelMonad (RG f n e x, Fact x f)
-- ^ Analyze and rewrite forward
type ARFX' n f thing e x
= FwdPass n f -> thing e x -> Fact e f -> FuelMonad (RG f n e x, Fact x f)
-- ^ Analyze and rewrite forward extended -- can take @FactBase f@
arfx :: NonLocal thing => ARF' n f thing C x -> ARFX' n f thing C x
arfx arf pass thing fb =
arf pass thing $ fromJust $ lookupFact (joinInFacts lattice fb) $ entryLabel thing
where lattice = fp_lattice pass
-- joinInFacts adds debugging information
type ARF thing n = forall f e x . ARF' n f thing e x
type ARFX thing n = forall f e x . ARFX' n f thing e x
arfNode :: (NonLocal n, ShapeLifter e x) => ARF' n f n e x
arfNode pass node f
= do { mb_g <- withFuel (frewrite pass node f)
; case mb_g of
Nothing -> return (rgunit f (unit node),
ftransfer pass node f)
Just (FwdRes ag rw) -> do { g <- graphOfAGraph ag
; let pass' = pass { fp_rewrite = rw }
; arfGraph pass' g (elift node f) } }
-- type demonstration
_arfBlock :: NonLocal n => ARF' n f (Block n) e x
_arfBlock = arfBlock
arfBlock :: NonLocal n => ARF (Block n) n
-- Lift from nodes to blocks
arfBlock pass (BFirst node) = arfNode pass node
arfBlock pass (BMiddle node) = arfNode pass node
arfBlock pass (BLast node) = arfNode pass node
arfBlock pass (BCat b1 b2) = arfCat arfBlock arfBlock pass b1 b2
arfBlock pass (BHead h n) = arfCat arfBlock arfNode pass h n
arfBlock pass (BTail n t) = arfCat arfNode arfBlock pass n t
arfBlock pass (BClosed h t) = arfCat arfBlock arfBlock pass h t
arfCat :: (pass -> thing1 -> info1 -> FuelMonad (RG f n e a, info2))
-> (pass -> thing2 -> info2 -> FuelMonad (RG f n a x, info2'))
-> (pass -> thing1 -> thing2 -> info1 -> FuelMonad (RG f n e x, info2'))
{-# INLINE arfCat #-}
arfCat arf1 arf2 pass thing1 thing2 f = do { (g1,f1) <- arf1 pass thing1 f
; (g2,f2) <- arf2 pass thing2 f1
; return (g1 `rgCat` g2, f2) }
arfBody :: NonLocal n
=> FwdPass n f -> Body n -> FactBase f
-> FuelMonad (RG f n C C, FactBase f)
-- Outgoing factbase is restricted to Labels *not* in
-- in the Body; the facts for Labels *in*
-- the Body are in the BodyWithFacts
arfBody pass blocks init_fbase
= fixpoint True (fp_lattice pass) do_block init_fbase $
forwardBlockList (factBaseLabels init_fbase) blocks
where
do_block b f = do (g, fb) <- arfBlock pass b $ lookupF pass (entryLabel b) f
return (g, factBaseList fb)
arfGraph :: NonLocal n => ARFX (Graph n) n
-- Lift from blocks to graphs
arfGraph _ GNil = \f -> return (rgnil, f)
arfGraph pass (GUnit blk) = arfBlock pass blk
arfGraph pass (GMany NothingO body NothingO) = arfBody pass body
arfGraph pass (GMany NothingO body (JustO exit))
= arfCat arfBody (arfx arfBlock) pass body exit
arfGraph pass (GMany (JustO entry) body NothingO)
= arfCat arfBlock arfBody pass entry body
arfGraph pass (GMany (JustO entry) body (JustO exit))
= arfCat arfeb (arfx arfBlock) pass (entry, body) exit
where arfeb pass = uncurry $ arfCat arfBlock arfBody pass
-- Join all the incoming facts with bottom.
-- We know the results _shouldn't change_, but the transfer
-- functions might, for example, generate some debugging traces.
joinInFacts :: DataflowLattice f -> FactBase f -> FactBase f
joinInFacts (DataflowLattice {fact_bot = bot, fact_extend = fe}) fb =
mkFactBase $ map botJoin $ factBaseList fb
where botJoin (l, f) = (l, snd $ fe l (OldFact bot) (NewFact f))
forwardBlockList :: (NonLocal n, LabelsPtr entry)
=> entry -> Body n -> [Block n C C]
-- This produces a list of blocks in order suitable for forward analysis,
-- along with the list of Labels it may depend on for facts.
forwardBlockList entries blks = postorder_dfs_from (bodyMap blks) entries
-----------------------------------------------------------------------------
-- Backward analysis and rewriting: the interface
-----------------------------------------------------------------------------
data BwdPass n f
= BwdPass { bp_lattice :: DataflowLattice f
, bp_transfer :: BwdTransfer n f
, bp_rewrite :: BwdRewrite n f }
newtype BwdTransfer n f
= BwdTransfers { getBTransfers ::
( n C O -> f -> f
, n O O -> f -> f
, n O C -> FactBase f -> f
) }
newtype BwdRewrite n f
= BwdRewrites { getBRewrites ::
( n C O -> f -> Maybe (BwdRes n f C O)
, n O O -> f -> Maybe (BwdRes n f O O)
, n O C -> FactBase f -> Maybe (BwdRes n f O C)
) }
data BwdRes n f e x = BwdRes (AGraph n e x) (BwdRewrite n f)
mkBTransfer :: (n C O -> f -> f) -> (n O O -> f -> f) ->
(n O C -> FactBase f -> f) -> BwdTransfer n f
mkBTransfer f m l = BwdTransfers (f, m, l)
mkBTransfer' :: (forall e x . n e x -> Fact x f -> f) -> BwdTransfer n f
mkBTransfer' f = BwdTransfers (f, f, f)
mkBRewrite :: (n C O -> f -> Maybe (BwdRes n f C O))
-> (n O O -> f -> Maybe (BwdRes n f O O))
-> (n O C -> FactBase f -> Maybe (BwdRes n f O C))
-> BwdRewrite n f
mkBRewrite f m l = BwdRewrites (f, m, l)
mkBRewrite' :: (forall e x . n e x -> Fact x f -> Maybe (BwdRes n f e x)) -> BwdRewrite n f
mkBRewrite' f = BwdRewrites (f, f, f)
-----------------------------------------------------------------------------
-- Backward implementation
-----------------------------------------------------------------------------
type ARB' n f thing e x
= BwdPass n f -> thing e x -> Fact x f -> FuelMonad (RG f n e x, f)
type ARBX' n f thing e x
= BwdPass n f -> thing e x -> Fact x f -> FuelMonad (RG f n e x, Fact e f)
type ARB thing n = forall f e x. ARB' n f thing e x
type ARBX thing n = forall f e x. ARBX' n f thing e x
arbx :: NonLocal thing => ARB' n f thing C x -> ARBX' n f thing C x
arbx arb pass thing f = do { (rg, f) <- arb pass thing f
; let fb = joinInFacts (bp_lattice pass) $
mkFactBase [(entryLabel thing, f)]
; return (rg, fb) }
arbNode :: (NonLocal n, ShapeLifter e x) => ARB' n f n e x
-- Lifts (BwdTransfer,BwdRewrite) to ARB_Node;
-- this time we do rewriting as well.
-- The ARB_Graph parameters specifies what to do with the rewritten graph
arbNode pass node f
= do { mb_g <- withFuel (brewrite pass node f)
; case mb_g of
Nothing -> return (rgunit entry_f (unit node), entry_f)
where entry_f = btransfer pass node f
Just (BwdRes ag rw) -> do { g <- graphOfAGraph ag
; let pass' = pass { bp_rewrite = rw }
; (g, f) <- arbGraph pass' g f
; return (g, elower (bp_lattice pass) node f)} }
arbBlock :: NonLocal n => ARB (Block n) n
-- Lift from nodes to blocks
arbBlock pass (BFirst node) = arbNode pass node
arbBlock pass (BMiddle node) = arbNode pass node
arbBlock pass (BLast node) = arbNode pass node
arbBlock pass (BCat b1 b2) = arbCat arbBlock arbBlock pass b1 b2
arbBlock pass (BHead h n) = arbCat arbBlock arbNode pass h n
arbBlock pass (BTail n t) = arbCat arbNode arbBlock pass n t
arbBlock pass (BClosed h t) = arbCat arbBlock arbBlock pass h t
arbCat :: (pass -> thing1 -> info1 -> FuelMonad (RG f n e a, info1'))
-> (pass -> thing2 -> info2 -> FuelMonad (RG f n a x, info1))
-> (pass -> thing1 -> thing2 -> info2 -> FuelMonad (RG f n e x, info1'))
{-# INLINE arbCat #-}
arbCat arb1 arb2 pass thing1 thing2 f = do { (g2,f2) <- arb2 pass thing2 f
; (g1,f1) <- arb1 pass thing1 f2
; return (g1 `rgCat` g2, f1) }
arbBody :: NonLocal n
=> BwdPass n f -> Body n -> FactBase f
-> FuelMonad (RG f n C C, FactBase f)
arbBody pass blocks init_fbase
= fixpoint False (bp_lattice pass) do_block init_fbase $
backwardBlockList blocks
where
do_block b f = do (g, f) <- arbBlock pass b f
return (g, [(entryLabel b, f)])
arbGraph :: NonLocal n => ARBX (Graph n) n
arbGraph _ GNil = \f -> return (rgnil, f)
arbGraph pass (GUnit blk) = arbBlock pass blk
arbGraph pass (GMany NothingO body NothingO) = arbBody pass body
arbGraph pass (GMany NothingO body (JustO exit)) =
arbCat arbBody (arbx arbBlock) pass body exit
arbGraph pass (GMany (JustO entry) body NothingO) =
arbCat arbBlock arbBody pass entry body
arbGraph pass (GMany (JustO entry) body (JustO exit)) =
arbCat arbeb (arbx arbBlock) pass (entry, body) exit
where arbeb pass = uncurry $ arbCat arbBlock arbBody pass
backwardBlockList :: NonLocal n => Body n -> [Block n C C]
-- This produces a list of blocks in order suitable for backward analysis,
-- along with the list of Labels it may depend on for facts.
backwardBlockList body = reachable ++ missing
where reachable = reverse $ forwardBlockList entries body
entries = externalEntryLabels body
all = bodyList body
missingLabels =
mkLabelSet (map fst all) `minusLabelSet`
mkLabelSet (map entryLabel reachable)
missing = map snd $ filter (flip elemLabelSet missingLabels . fst) all
{-
The forward and backward dataflow analyses now use postorder depth-first
order for faster convergence.
The forward and backward cases are not dual. In the forward case, the
entry points are known, and one simply traverses the body blocks from
those points. In the backward case, something is known about the exit
points, but this information is essentially useless, because we don't
actually have a dual graph (that is, one with edges reversed) to
compute with. (Even if we did have a dual graph, it would not avail
us---a backward analysis must include reachable blocks that don't
reach the exit, as in a procedure that loops forever and has side
effects.)
Since in the general case, no information is available about entry
points, I have put in a horrible hack. First, I assume that every
label defined but not used is an entry point. Then, because an entry
point might also be a loop header, I add, in arbitrary order, all the
remaining "missing" blocks. Needless to say, I am not pleased.
I am not satisfied. I am not Senator Morgan.
Wait! I believe that the Right Thing here is to require that anyone
wishing to analyze a graph closed at the entry provide a way of
determining the entry points, if any, of that graph. This requirement
can apply equally to forward and backward analyses; I believe that
using the input FactBase to determine the entry points of a closed
graph is *also* a hack.
NR
-}
analyzeAndRewriteBwd
:: forall n f. NonLocal n
=> BwdPass n f
-> Body n -> FactBase f
-> FuelMonad (Body n, FactBase f)
analyzeAndRewriteBwd pass body facts
= do { (rg, _) <- arbBody pass body facts
; return (normaliseBody rg) }
-- | if the graph being analyzed is open at the exit, I don't
-- quite understand the implications of possible other exits
analyzeAndRewriteBwd'
:: forall n f e x. NonLocal n
=> BwdPass n f
-> Graph n e x -> Fact x f
-> FuelMonad (Graph n e x, FactBase f, MaybeO e f)
analyzeAndRewriteBwd' pass g f =
do (rg, fout) <- arbGraph pass g f
let (g', fb) = normalizeGraph rg
return (g', fb, distinguishedEntryFact g' fout)
distinguishedEntryFact :: forall n e x f . Graph n e x -> Fact e f -> MaybeO e f
distinguishedEntryFact g f = maybe g
where maybe :: Graph n e x -> MaybeO e f
maybe GNil = JustO f
maybe (GUnit {}) = JustO f
maybe (GMany e _ _) = case e of NothingO -> NothingO
JustO _ -> JustO f
-----------------------------------------------------------------------------
-- fixpoint: finding fixed points
-----------------------------------------------------------------------------
data TxFactBase n f
= TxFB { tfb_fbase :: FactBase f
, tfb_rg :: RG f n C C -- Transformed blocks
, tfb_cha :: ChangeFlag
, tfb_lbls :: LabelSet }
-- Note [TxFactBase change flag]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Set the tfb_cha flag iff
-- (a) the fact in tfb_fbase for or a block L changes
-- (b) L is in tfb_lbls.
-- The tfb_lbls are all Labels of the *original*
-- (not transformed) blocks
updateFact :: DataflowLattice f -> LabelSet -> (Label, f)
-> (ChangeFlag, FactBase f)
-> (ChangeFlag, FactBase f)
-- See Note [TxFactBase change flag]
updateFact lat lbls (lbl, new_fact) (cha, fbase)
| NoChange <- cha2 = (cha, fbase)
| lbl `elemLabelSet` lbls = (SomeChange, new_fbase)
| otherwise = (cha, new_fbase)
where
(cha2, res_fact) -- Note [Unreachable blocks]
= case lookupFact fbase lbl of
Nothing -> (SomeChange, snd $ join $ fact_bot lat) -- Note [Unreachable blocks]
Just old_fact -> join old_fact
where join old_fact = fact_extend lat lbl (OldFact old_fact) (NewFact new_fact)
new_fbase = extendFactBase fbase lbl res_fact
fixpoint :: forall block n f. NonLocal (block n)
=> Bool -- Going forwards?
-> DataflowLattice f
-> (block n C C -> FactBase f
-> FuelMonad (RG f n C C, [(Label, f)]))
-> FactBase f
-> [block n C C]
-> FuelMonad (RG f n C C, FactBase f)
fixpoint is_fwd lat do_block init_fbase untagged_blocks
= do { fuel <- getFuel
; tx_fb <- loop fuel init_fbase
; return (tfb_rg tx_fb,
tfb_fbase tx_fb `delFromFactBase` map fst blocks) }
-- The successors of the Graph are the the Labels for which
-- we have facts, that are *not* in the blocks of the graph
where
blocks = map tag untagged_blocks
where tag b = ((entryLabel b, b), if is_fwd then [entryLabel b] else successors b)
tx_blocks :: [((Label, block n C C), [Label])] -- I do not understand this type
-> TxFactBase n f -> FuelMonad (TxFactBase n f)
tx_blocks [] tx_fb = return tx_fb
tx_blocks (((lbl,blk), deps):bs) tx_fb = tx_block lbl blk deps tx_fb >>= tx_blocks bs
-- "deps" == Labels the block may _depend_ upon for facts
tx_block :: Label -> block n C C -> [Label]
-> TxFactBase n f -> FuelMonad (TxFactBase n f)
tx_block lbl blk deps tx_fb@(TxFB { tfb_fbase = fbase, tfb_lbls = lbls
, tfb_rg = blks, tfb_cha = cha })
| is_fwd && not (lbl `elemFactBase` fbase)
= return tx_fb {tfb_lbls = lbls `unionLabelSet` mkLabelSet deps} -- Note [Unreachable blocks]
| otherwise
= do { (rg, out_facts) <- do_block blk fbase
; let (cha',fbase')
= foldr (updateFact lat lbls) (cha,fbase) out_facts
lbls' = lbls `unionLabelSet` mkLabelSet deps
; return (TxFB { tfb_lbls = lbls'
, tfb_rg = rg `rgCat` blks
, tfb_fbase = fbase', tfb_cha = cha' }) }
loop :: Fuel -> FactBase f -> FuelMonad (TxFactBase n f)
loop fuel fbase
= do { let init_tx_fb = TxFB { tfb_fbase = fbase
, tfb_cha = NoChange
, tfb_rg = rgnilC
, tfb_lbls = emptyLabelSet }
; tx_fb <- tx_blocks blocks init_tx_fb
; case tfb_cha tx_fb of
NoChange -> return tx_fb
SomeChange -> do { setFuel fuel
; loop fuel (tfb_fbase tx_fb) } }
{- Note [Unreachable blocks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A block that is not in the domain of tfb_fbase is "currently unreachable".
A currently-unreachable block is not even analyzed. Reason: consider
constant prop and this graph, with entry point L1:
L1: x:=3; goto L4
L2: x:=4; goto L4
L4: if x>3 goto L2 else goto L5
Here L2 is actually unreachable, but if we process it with bottom input fact,
we'll propagate (x=4) to L4, and nuke the otherwise-good rewriting of L4.
* If a currently-unreachable block is not analyzed, then its rewritten
graph will not be accumulated in tfb_rg. And that is good:
unreachable blocks simply do not appear in the output.
* Note that clients must be careful to provide a fact (even if bottom)
for each entry point. Otherwise useful blocks may be garbage collected.
* Note that updateFact must set the change-flag if a label goes from
not-in-fbase to in-fbase, even if its fact is bottom. In effect the
real fact lattice is
UNR
bottom
the points above bottom
* Even if the fact is going from UNR to bottom, we still call the
client's fact_extend function because it might give the client
some useful debugging information.
* All of this only applies for *forward* fixpoints. For the backward
case we must treat every block as reachable; it might finish with a
'return', and therefore have no successors, for example.
-}
-----------------------------------------------------------------------------
-- RG: an internal data type for graphs under construction
-- TOTALLY internal to Hoopl; each block carries its fact
-----------------------------------------------------------------------------
type RG f n e x = Graph' (FBlock f) n e x
data FBlock f n e x = FBlock f (Block n e x)
--- constructors
rgnil :: RG f n O O
rgnilC :: RG f n C C
rgunit :: f -> Block n e x -> RG f n e x
rgCat :: RG f n e a -> RG f n a x -> RG f n e x
---- observers
type BodyWithFacts n f = (Body n, FactBase f)
type GraphWithFacts n f e x = (Graph n e x, FactBase f)
-- A Graph together with the facts for that graph
-- The domains of the two maps should be identical
normalizeGraph :: forall n f e x .
NonLocal n => RG f n e x -> GraphWithFacts n f e x
normaliseBody :: NonLocal n => RG f n C C -> BodyWithFacts n f
normalizeGraph g = (graphMapBlocks dropFact g, facts g)
where dropFact (FBlock _ b) = b
facts :: RG f n e x -> FactBase f
facts GNil = noFacts
facts (GUnit _) = noFacts
facts (GMany _ body exit) = bodyFacts body `unionFactBase` exitFacts exit
exitFacts :: MaybeO x (FBlock f n C O) -> FactBase f
exitFacts NothingO = noFacts
exitFacts (JustO (FBlock f b)) = mkFactBase [(entryLabel b, f)]
bodyFacts :: Body' (FBlock f) n -> FactBase f
bodyFacts (BodyUnit (FBlock f b)) = mkFactBase [(entryLabel b, f)]
bodyFacts (b1 `BodyCat` b2) = bodyFacts b1 `unionFactBase` bodyFacts b2
normaliseBody rg = (body, fact_base)
where (GMany _ body _, fact_base) = normalizeGraph rg
--- implementation of the constructors (boring)
rgnil = GNil
rgnilC = GMany NothingO BodyEmpty NothingO
rgunit f b@(BFirst {}) = gUnitCO (FBlock f b)
rgunit f b@(BMiddle {}) = gUnitOO (FBlock f b)
rgunit f b@(BLast {}) = gUnitOC (FBlock f b)
rgunit f b@(BCat {}) = gUnitOO (FBlock f b)
rgunit f b@(BHead {}) = gUnitCO (FBlock f b)
rgunit f b@(BTail {}) = gUnitOC (FBlock f b)
rgunit f b@(BClosed {}) = gUnitCC (FBlock f b)
rgCat = U.splice fzCat
where fzCat (FBlock f b1) (FBlock _ b2) = FBlock f (b1 `U.cat` b2)
----------------------------------------------------------------
-- Utilities
----------------------------------------------------------------
-- Lifting based on shape:
-- - from nodes to blocks
-- - from facts to fact-like things
-- Lowering back:
-- - from fact-like things to facts
-- Note that the latter two functions depend only on the entry shape.
class ShapeLifter e x where
unit :: n e x -> Block n e x
elift :: NonLocal n => n e x -> f -> Fact e f
elower :: NonLocal n => DataflowLattice f -> n e x -> Fact e f -> f
ftransfer :: FwdPass n f -> n e x -> f -> Fact x f
btransfer :: BwdPass n f -> n e x -> Fact x f -> f
frewrite :: FwdPass n f -> n e x -> f -> Maybe (FwdRes n f e x)
brewrite :: BwdPass n f -> n e x -> Fact x f -> Maybe (BwdRes n f e x)
instance ShapeLifter C O where
unit = BFirst
elift n f = mkFactBase [(entryLabel n, f)]
elower lat n fb = getFact lat (entryLabel n) fb
ftransfer (FwdPass {fp_transfer = FwdTransfers (ft, _, _)}) n f = ft n f
btransfer (BwdPass {bp_transfer = BwdTransfers (bt, _, _)}) n f = bt n f
frewrite (FwdPass {fp_rewrite = FwdRewrites (fr, _, _)}) n f = fr n f
brewrite (BwdPass {bp_rewrite = BwdRewrites (br, _, _)}) n f = br n f
instance ShapeLifter O O where
unit = BMiddle
elift _ f = f
elower _ _ f = f
ftransfer (FwdPass {fp_transfer = FwdTransfers (_, ft, _)}) n f = ft n f
btransfer (BwdPass {bp_transfer = BwdTransfers (_, bt, _)}) n f = bt n f
frewrite (FwdPass {fp_rewrite = FwdRewrites (_, fr, _)}) n f = fr n f
brewrite (BwdPass {bp_rewrite = BwdRewrites (_, br, _)}) n f = br n f
instance ShapeLifter O C where
unit = BLast
elift _ f = f
elower _ _ f = f
ftransfer (FwdPass {fp_transfer = FwdTransfers (_, _, ft)}) n f = ft n f
btransfer (BwdPass {bp_transfer = BwdTransfers (_, _, bt)}) n f = bt n f
frewrite (FwdPass {fp_rewrite = FwdRewrites (_, _, fr)}) n f = fr n f
brewrite (BwdPass {bp_rewrite = BwdRewrites (_, _, br)}) n f = br n f
-- Fact lookup: the fact `orelse` bottom
lookupF :: FwdPass n f -> Label -> FactBase f -> f
lookupF = getFact . fp_lattice
getFact :: DataflowLattice f -> Label -> FactBase f -> f
getFact lat l fb = case lookupFact fb l of Just f -> f
Nothing -> fact_bot lat
|
ezyang/hoopl
|
src/Compiler/Hoopl/OldDataflow.hs
|
bsd-3-clause
| 28,994
| 3
| 17
| 7,984
| 8,214
| 4,292
| 3,922
| 396
| 5
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.NHC
-- Copyright : Isaac Jones 2003-2006
-- Duncan Coutts 2009
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module contains most of the NHC-specific code for configuring, building
-- and installing packages.
{- Copyright (c) 2003-2005, Isaac Jones
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.NHC (
configure,
getInstalledPackages,
buildLib,
buildExe,
installLib,
installExe,
) where
import Distribution.Package
( PackageName, PackageIdentifier(..), InstalledPackageId(..)
, packageId, packageName )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo
, InstalledPackageInfo_( InstalledPackageInfo, installedPackageId
, sourcePackageId )
, emptyInstalledPackageInfo, parseInstalledPackageInfo )
import Distribution.PackageDescription
( PackageDescription(..), BuildInfo(..), Library(..), Executable(..)
, hcOptions, usedExtensions )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..) )
import Distribution.Simple.BuildPaths
( mkLibName, objExtension, exeExtension )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..)
, Flag, languageToFlags, extensionsToFlags
, PackageDB(..), PackageDBStack )
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (PackageIndex)
import Language.Haskell.Extension
( Language(Haskell98), Extension(..), KnownExtension(..) )
import Distribution.Simple.Program
( ProgramConfiguration, userMaybeSpecifyPath, programPath
, requireProgram, requireProgramVersion, lookupProgram
, nhcProgram, hmakeProgram, ldProgram, arProgram
, rawSystemProgramConf )
import Distribution.Simple.Utils
( die, info, findFileWithExtension, findModuleFiles
, installOrdinaryFile, installExecutableFile, installOrdinaryFiles
, createDirectoryIfMissingVerbose, withUTF8FileContents )
import Distribution.Version
( Version(..), orLaterVersion )
import Distribution.Verbosity
import Distribution.Text
( display, simpleParse )
import Distribution.ParseUtils
( ParseResult(..) )
import System.FilePath
( (</>), (<.>), normalise, takeDirectory, dropExtension )
import System.Directory
( doesFileExist, doesDirectoryExist, getDirectoryContents
, removeFile, getHomeDirectory )
import Data.Char ( toLower )
import Data.List ( nub )
import Data.Maybe ( catMaybes )
import Data.Monoid ( Monoid(..) )
import Control.Monad ( when, unless )
import Distribution.Compat.Exception
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> IO (Compiler, ProgramConfiguration)
configure verbosity hcPath _hcPkgPath conf = do
(_nhcProg, nhcVersion, conf') <-
requireProgramVersion verbosity nhcProgram
(orLaterVersion (Version [1,20] []))
(userMaybeSpecifyPath "nhc98" hcPath conf)
(_hmakeProg, _hmakeVersion, conf'') <-
requireProgramVersion verbosity hmakeProgram
(orLaterVersion (Version [3,13] [])) conf'
(_ldProg, conf''') <- requireProgram verbosity ldProgram conf''
(_arProg, conf'''') <- requireProgram verbosity arProgram conf'''
--TODO: put this stuff in a monad so we can say just:
-- requireProgram hmakeProgram (orLaterVersion (Version [3,13] []))
-- requireProgram ldProgram anyVersion
-- requireProgram ldPrograrProgramam anyVersion
-- unless (null (cSources bi)) $ requireProgram ccProgram anyVersion
let comp = Compiler {
compilerId = CompilerId NHC nhcVersion,
compilerLanguages = nhcLanguages,
compilerExtensions = nhcLanguageExtensions
}
return (comp, conf'''')
nhcLanguages :: [(Language, Flag)]
nhcLanguages = [(Haskell98, "-98")]
-- | The flags for the supported extensions
nhcLanguageExtensions :: [(Extension, Flag)]
nhcLanguageExtensions =
-- TODO: pattern guards in 1.20
-- NHC doesn't enforce the monomorphism restriction at all.
-- Technically it therefore doesn't support MonomorphismRestriction,
-- but that would mean it doesn't support Haskell98, so we pretend
-- that it does.
[(EnableExtension MonomorphismRestriction, "")
,(DisableExtension MonomorphismRestriction, "")
-- Similarly, I assume the FFI is always on
,(EnableExtension ForeignFunctionInterface, "")
,(DisableExtension ForeignFunctionInterface, "")
-- Similarly, I assume existential quantification is always on
,(EnableExtension ExistentialQuantification, "")
,(DisableExtension ExistentialQuantification, "")
-- Similarly, I assume empty data decls is always on
,(EnableExtension EmptyDataDecls, "")
,(DisableExtension EmptyDataDecls, "")
,(EnableExtension NamedFieldPuns, "-puns")
,(DisableExtension NamedFieldPuns, "-nopuns")
-- CPP can't actually be turned off, but we pretend that it can
,(EnableExtension CPP, "-cpp")
,(DisableExtension CPP, "")
]
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration
-> IO PackageIndex
getInstalledPackages verbosity packagedbs conf = do
homedir <- getHomeDirectory
(nhcProg, _) <- requireProgram verbosity nhcProgram conf
let bindir = takeDirectory (programPath nhcProg)
incdir = takeDirectory bindir </> "include" </> "nhc98"
dbdirs = nub (concatMap (packageDbPaths homedir incdir) packagedbs)
indexes <- mapM getIndividualDBPackages dbdirs
return $! mconcat indexes
where
getIndividualDBPackages :: FilePath -> IO PackageIndex
getIndividualDBPackages dbdir = do
pkgdirs <- getPackageDbDirs dbdir
pkgs <- sequence [ getInstalledPackage pkgname pkgdir
| (pkgname, pkgdir) <- pkgdirs ]
let pkgs' = map setInstalledPackageId (catMaybes pkgs)
return (PackageIndex.fromList pkgs')
packageDbPaths :: FilePath -> FilePath -> PackageDB -> [FilePath]
packageDbPaths _home incdir db = case db of
GlobalPackageDB -> [ incdir </> "packages" ]
UserPackageDB -> [] --TODO any standard per-user db?
SpecificPackageDB path -> [ path ]
getPackageDbDirs :: FilePath -> IO [(PackageName, FilePath)]
getPackageDbDirs dbdir = do
dbexists <- doesDirectoryExist dbdir
if not dbexists
then return []
else do
entries <- getDirectoryContents dbdir
pkgdirs <- sequence
[ do pkgdirExists <- doesDirectoryExist pkgdir
return (pkgname, pkgdir, pkgdirExists)
| (entry, Just pkgname) <- [ (entry, simpleParse entry)
| entry <- entries ]
, let pkgdir = dbdir </> entry ]
return [ (pkgname, pkgdir) | (pkgname, pkgdir, True) <- pkgdirs ]
getInstalledPackage :: PackageName -> FilePath -> IO (Maybe InstalledPackageInfo)
getInstalledPackage pkgname pkgdir = do
let pkgconfFile = pkgdir </> "package.conf"
pkgconfExists <- doesFileExist pkgconfFile
let cabalFile = pkgdir <.> "cabal"
cabalExists <- doesFileExist cabalFile
case () of
_ | pkgconfExists -> getFullInstalledPackageInfo pkgname pkgconfFile
| cabalExists -> getPhonyInstalledPackageInfo pkgname cabalFile
| otherwise -> return Nothing
getFullInstalledPackageInfo :: PackageName -> FilePath
-> IO (Maybe InstalledPackageInfo)
getFullInstalledPackageInfo pkgname pkgconfFile =
withUTF8FileContents pkgconfFile $ \contents ->
case parseInstalledPackageInfo contents of
ParseOk _ pkginfo | packageName pkginfo == pkgname
-> return (Just pkginfo)
_ -> return Nothing
-- | This is a backup option for existing versions of nhc98 which do not supply
-- proper installed package info files for the bundled libs. Instead we look
-- for the .cabal file and extract the package version from that.
-- We don't know any other details for such packages, in particular we pretend
-- that they have no dependencies.
--
getPhonyInstalledPackageInfo :: PackageName -> FilePath
-> IO (Maybe InstalledPackageInfo)
getPhonyInstalledPackageInfo pkgname pathsModule = do
content <- readFile pathsModule
case extractVersion content of
Nothing -> return Nothing
Just version -> return (Just pkginfo)
where
pkgid = PackageIdentifier pkgname version
pkginfo = emptyInstalledPackageInfo { sourcePackageId = pkgid }
where
-- search through the .cabal file, looking for a line like:
--
-- > version: 2.0
--
extractVersion :: String -> Maybe Version
extractVersion content =
case catMaybes (map extractVersionLine (lines content)) of
[version] -> Just version
_ -> Nothing
extractVersionLine :: String -> Maybe Version
extractVersionLine line =
case words line of
[versionTag, ":", versionStr]
| map toLower versionTag == "version" -> simpleParse versionStr
[versionTag, versionStr]
| map toLower versionTag == "version:" -> simpleParse versionStr
_ -> Nothing
-- Older installed package info files did not have the installedPackageId
-- field, so if it is missing then we fill it as the source package ID.
setInstalledPackageId :: InstalledPackageInfo -> InstalledPackageInfo
setInstalledPackageId pkginfo@InstalledPackageInfo {
installedPackageId = InstalledPackageId "",
sourcePackageId = pkgid
}
= pkginfo {
--TODO use a proper named function for the conversion
-- from source package id to installed package id
installedPackageId = InstalledPackageId (display pkgid)
}
setInstalledPackageId pkginfo = pkginfo
-- -----------------------------------------------------------------------------
-- Building
-- |FIX: For now, the target must contain a main module. Not used
-- ATM. Re-add later.
buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib verbosity pkg_descr lbi lib clbi = do
let conf = withPrograms lbi
Just nhcProg = lookupProgram nhcProgram conf
let bi = libBuildInfo lib
modules = exposedModules lib ++ otherModules bi
-- Unsupported extensions have already been checked by configure
languageFlags = languageToFlags (compiler lbi) (defaultLanguage bi)
++ extensionsToFlags (compiler lbi) (usedExtensions bi)
inFiles <- getModulePaths lbi bi modules
let targetDir = buildDir lbi
srcDirs = nub (map takeDirectory inFiles)
destDirs = map (targetDir </>) srcDirs
mapM_ (createDirectoryIfMissingVerbose verbosity True) destDirs
rawSystemProgramConf verbosity hmakeProgram conf $
["-hc=" ++ programPath nhcProg]
++ nhcVerbosityOptions verbosity
++ ["-d", targetDir, "-hidir", targetDir]
++ maybe [] (hcOptions NHC . libBuildInfo)
(library pkg_descr)
++ languageFlags
++ concat [ ["-package", display (packageName pkgid) ]
| (_, pkgid) <- componentPackageDeps clbi ]
++ inFiles
{-
-- build any C sources
unless (null (cSources bi)) $ do
info verbosity "Building C Sources..."
let commonCcArgs = (if verbosity >= deafening then ["-v"] else [])
++ ["-I" ++ dir | dir <- includeDirs bi]
++ [opt | opt <- ccOptions bi]
++ (if withOptimization lbi then ["-O2"] else [])
flip mapM_ (cSources bi) $ \cfile -> do
let ofile = targetDir </> cfile `replaceExtension` objExtension
createDirectoryIfMissingVerbose verbosity True (takeDirectory ofile)
rawSystemProgramConf verbosity hmakeProgram conf
(commonCcArgs ++ ["-c", cfile, "-o", ofile])
-}
-- link:
info verbosity "Linking..."
let --cObjs = [ targetDir </> cFile `replaceExtension` objExtension
-- | cFile <- cSources bi ]
libFilePath = targetDir </> mkLibName (packageId pkg_descr)
hObjs = [ targetDir </> ModuleName.toFilePath m <.> objExtension
| m <- modules ]
unless (null hObjs {-&& null cObjs-}) $ do
-- first remove library if it exists
removeFile libFilePath `catchIO` \_ -> return ()
let arVerbosity | verbosity >= deafening = "v"
| verbosity >= normal = ""
| otherwise = "c"
rawSystemProgramConf verbosity arProgram (withPrograms lbi) $
["q"++ arVerbosity, libFilePath]
++ hObjs
-- ++ cObjs
-- | Building an executable for NHC.
buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe verbosity pkg_descr lbi exe clbi = do
let conf = withPrograms lbi
Just nhcProg = lookupProgram nhcProgram conf
when (dropExtension (modulePath exe) /= exeName exe) $
die $ "hmake does not support exe names that do not match the name of "
++ "the 'main-is' file. You will have to rename your executable to "
++ show (dropExtension (modulePath exe))
let bi = buildInfo exe
modules = otherModules bi
-- Unsupported extensions have already been checked by configure
languageFlags = languageToFlags (compiler lbi) (defaultLanguage bi)
++ extensionsToFlags (compiler lbi) (usedExtensions bi)
inFiles <- getModulePaths lbi bi modules
let targetDir = buildDir lbi </> exeName exe
exeDir = targetDir </> (exeName exe ++ "-tmp")
srcDirs = nub (map takeDirectory (modulePath exe : inFiles))
destDirs = map (exeDir </>) srcDirs
mapM_ (createDirectoryIfMissingVerbose verbosity True) destDirs
rawSystemProgramConf verbosity hmakeProgram conf $
["-hc=" ++ programPath nhcProg]
++ nhcVerbosityOptions verbosity
++ ["-d", targetDir, "-hidir", targetDir]
++ maybe [] (hcOptions NHC . libBuildInfo)
(library pkg_descr)
++ languageFlags
++ concat [ ["-package", display (packageName pkgid) ]
| (_, pkgid) <- componentPackageDeps clbi ]
++ inFiles
++ [exeName exe]
nhcVerbosityOptions :: Verbosity -> [String]
nhcVerbosityOptions verbosity
| verbosity >= deafening = ["-v"]
| verbosity >= normal = []
| otherwise = ["-q"]
--TODO: where to put this? it's duplicated in .Simple too
getModulePaths :: LocalBuildInfo -> BuildInfo -> [ModuleName] -> IO [FilePath]
getModulePaths lbi bi modules = sequence
[ findFileWithExtension ["hs", "lhs"] (buildDir lbi : hsSourceDirs bi)
(ModuleName.toFilePath module_) >>= maybe (notFound module_) (return . normalise)
| module_ <- modules ]
where notFound module_ = die $ "can't find source for module " ++ display module_
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for NHC.
installExe :: Verbosity -- ^verbosity
-> FilePath -- ^install location
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> Executable
-> IO ()
installExe verbosity pref buildPref (progprefix,progsuffix) exe
= do createDirectoryIfMissingVerbose verbosity True pref
let exeBaseName = exeName exe
exeFileName = exeBaseName <.> exeExtension
fixedExeFileName = (progprefix ++ exeBaseName ++ progsuffix) <.> exeExtension
installExecutableFile verbosity
(buildPref </> exeBaseName </> exeFileName)
(pref </> fixedExeFileName)
-- |Install for nhc98: .hi and .a files
installLib :: Verbosity -- ^verbosity
-> FilePath -- ^install location
-> FilePath -- ^Build location
-> PackageIdentifier
-> Library
-> IO ()
installLib verbosity pref buildPref pkgid lib
= do let bi = libBuildInfo lib
modules = exposedModules lib ++ otherModules bi
findModuleFiles [buildPref] ["hi"] modules
>>= installOrdinaryFiles verbosity pref
let libName = mkLibName pkgid
installOrdinaryFile verbosity (buildPref </> libName) (pref </> libName)
|
alphaHeavy/cabal
|
Cabal/Distribution/Simple/NHC.hs
|
bsd-3-clause
| 18,460
| 0
| 17
| 4,454
| 3,466
| 1,843
| 1,623
| 280
| 5
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
module Pipeline.UKFTractography
( UKFTractographyExe (..)
, UKFTractographyType (..)
, rules
) where
import Pipeline.DWI hiding (rules)
import Pipeline.DWIMask hiding (rules)
import Control.Monad (unless, when)
import qualified Paths
import Shake.BuildNode
import qualified System.Directory as IO
import Util (buildGitHubCMake)
import Pipeline.Util (showKey)
newtype UKFTractographyExe = UKFTractographyExe GitHash
deriving (Show,Generic,Typeable,Eq,Hashable,Binary,NFData,Read)
instance BuildNode UKFTractographyExe where
path (UKFTractographyExe hash) = Paths.ukfTractographyExePrefix ++ "-" ++ hash
build out@(UKFTractographyExe hash) = Just $ do
clonedir <- liftIO . IO.makeAbsolute $ takeDirectory (path out)
</> "UKFTractography-" ++ hash ++ "-tmp"
buildGitHubCMake [] "pnlbwh/ukftractography" hash clonedir
liftIO $ IO.renameFile (clonedir
</> "_build"
</> "UKFTractography-build/ukf/bin/UKFTractography") (path out)
liftIO $ IO.removeDirectoryRecursive clonedir
type CaseId = String
type Params = [(String, String)]
data UKFTractographyType = UKFTractographyDefault
| UKFTractography Params
deriving (Show,Generic,Typeable,Eq,Hashable,Binary,NFData,Read)
defaultParams :: Params
defaultParams = [("Ql","70")
,("Qm","0.001")
,("Rs","0.015")
,("numTensor","2")
,("recordLength","1.7")
,("seedFALimit","0.18")
,("seedsPerVoxel","10")
,("stepLength","0.3")]
formatParams :: Params -> [String]
formatParams ps = concatMap (\(arg,val) -> ["--"++arg,val]) ps
instance BuildNode (UKFTractographyType, DwiType, DwiMaskType, CaseId) where
path key@(UKFTractographyDefault, _, _, caseid)
= Paths.ukfTractographyDir caseid </> showKey key <.> "vtk"
path key@(UKFTractography params, _, _, caseid)
= Paths.ukfTractographyDir caseid
</> params2dirs params
</> "UKFTractography-" ++ caseid
<.> "vtk"
where params2dirs = foldr (</>) "" . map snd
build key@(ukftype, dwitype, dwimasktype, caseid) = Just $ do
Just exeNode <- fmap UKFTractographyExe <$> getConfig "UKFTractography-hash"
need exeNode
need $ Dwi (dwitype, caseid)
need $ DwiMask (dwimasktype, dwitype, caseid)
let params = case ukftype of
UKFTractographyDefault -> defaultParams
(UKFTractography params) -> params
cmd (path exeNode) (["--dwiFile", path $ Dwi (dwitype, caseid)
,"--maskFile", path $ DwiMask (dwimasktype, dwitype, caseid)
,"--seedsFile", path $ DwiMask (dwimasktype, dwitype, caseid)
,"--recordTensors"
,"--tracts", path key] ++ formatParams params)
rules :: Rules ()
rules = do
rule (buildNode :: UKFTractographyExe -> Maybe (Action [Double]))
rule (buildNode :: (UKFTractographyType, DwiType, DwiMaskType, CaseId) -> Maybe (Action [Double]))
|
pnlbwh/test-tensormasking
|
pipeline-lib/Pipeline/UKFTractography.hs
|
bsd-3-clause
| 3,225
| 0
| 16
| 815
| 919
| 508
| 411
| 69
| 1
|
module Parser.ObjParser
( ObjMaterial
, ObjVertexGroup(..)
, ObjMesh(..)
, ObjScene(..)
, parseObjFile
) where
import Graphics.Rendering.OpenGL.GL.VertexSpec
import Graphics.Rendering.OpenGL.GL.Texturing
import Graphics.Rendering.OpenGL.GL.Tensor
import Text.Parsec
import Text.Parsec.Perm
import qualified Text.Parsec.Token as P
--import Text.Parsec.Token
import Text.Parsec.Language
import Text.Parsec.String
import Data.Maybe
import qualified Data.IntMap as IM
import qualified Data.Map as M
import System.Directory
import Data.List
import Data.Array
import Math.Vector
import Engine.Texture
import Engine.Geometry
import Utility.Map
import Utility.List
import Utility.Tuple
type ObjMaterial a = Material a
-- a vertex group is what we can fit into a single drawcall, we use glDrawElements with with indexed
-- vbos to draw, so the offset is an offset into an element array buffer and the size is the number of
-- indices we want to render
--
-- in the obj file, each usemtl that specifies a material will result in a new vertex group like this,
-- so that we can set the appropriate parameters for the material before issuing the drawcall, the
-- implementation does not recognize when the same material is repeatedly used, it assumes each usemtl
-- directive uses a new material
data ObjVertexGroup = ObjVertexGroup
{ group_material :: Maybe String
, group_offset :: Int
, group_size :: Int
}
deriving Show
-- a mesh has a name, a list of vertices, optional normals and texcoords and a list of indices that
-- describe the faces of the mesh, all faces will be triangles in this implementation
--
-- the list of vertex groups associates ranges of the data with materials, using the offset and size
-- of the vertex groups we can render the data using a material with a glDrawElements call
data ObjMesh a b = ObjMesh
{ objmesh_name :: String
, objmesh_data :: ([Vertex3 a], [Normal3 a], [TexCoord2 a], Indices b)
, objmesh_groups :: [ObjVertexGroup]
}
deriving Show
-- the main data structure holding the whole scene, it contains a material library associating
-- material string names with actual material data structures, and a map of objects with their names
-- used as keys
data ObjScene a b = ObjScene
{ objscene_mtllib :: M.Map String (ObjMaterial a)
, objscene_objects :: M.Map b (ObjMesh a b)
}
-- this definition is used by Parsec to create a custom lexer for our language
objStyle :: P.LanguageDef st
objStyle = P.LanguageDef
{ P.commentStart = ""
, P.commentEnd = ""
, P.commentLine = "#"
, P.nestedComments = True
, P.identStart = alphaNum <|> oneOf "_/"
, P.identLetter = alphaNum <|> oneOf "_./-"
, P.opStart = P.opLetter objStyle
, P.opLetter = parserZero
, P.reservedOpNames= []
, P.reservedNames = ["mtllib","o","v","vp","vt","vn","g","s","usemtl","f"
,"newmtl","Ka","Kd","Ks","Ke","Ns","Ni","Tf","d","-halo","illum","sharpness","Ni"
,"map_Ka","map_Kd","map_Ks"]
, P.caseSensitive = True
}
-- create the lexer
lexer = P.makeTokenParser objStyle
-- these are for simplicity, we use our custom lexer to create parsers
-- for different types of tokens
naturalOrFloat = P.naturalOrFloat lexer
natural = P.natural lexer
identifier = P.identifier lexer
reserved = P.reserved lexer
symbol = P.symbol lexer
whiteSpace = P.whiteSpace lexer
-- a component a vertex, normal or texcoord, also used when parsing materials
-- this exists so that I can parse numbers that look like an int or a float and
-- may even have a plus or minus in front indicating its sign
parseComponent :: (Fractional c) => GenParser Char st c
parseComponent = do
sign <- option 1 $ do
s <- oneOf "+-"
return $ case s of
'+' -> (1.0)
'-' -> (-1.0)
x <- naturalOrFloat
return $ case x of
Left y -> fromRational((fromInteger y) * sign)
Right y -> fromRational((fromRational . toRational $ y) * sign)
-- sadly this code relies on the mutable parser state, in an obj file their can be "g", "s" and "usemtl"
-- statements more or less everywhere that are stateful, that means setting the group with "g", the smoothgroup
-- with "s" or the material with "usemtl" applies to all statements that come afterwards, so I use this state
-- to set the current groupname, smoothgroup or material whenever I parse those, then query the state when I
-- actually need them
data ParserState = ParserState
{ parserstate_groupname :: Maybe String,
parserstate_smoothgroup :: Maybe String,
parserstate_material :: Maybe String,
num_vertices :: (Integer,Integer),
num_texcoords :: (Integer,Integer),
num_normals :: (Integer,Integer) }
initParserState :: ParserState
initParserState = ParserState
{ parserstate_groupname = Nothing,
parserstate_smoothgroup = Nothing,
parserstate_material = Nothing,
num_vertices = (0,0),
num_normals = (0,0),
num_texcoords = (0,0) }
-- both parseSmoothGroup and parseMaterialGroup look and act very similar for a reason which I explain in the
-- comment for parseSmoothGroup
--
-- parseMaterialGroup exists because I noticed a tendency of exporters to sprinkle "g", "s" and "usemtl" blocks
-- everywhere, even at places where I don't expected them
-- so parseMaterialGroup parses these blocks which may consist of any number of randomly ordered occurences of
-- these "g", "s" and "usemtl" keywords using a permuting parser
--
-- notice how parseMaterialGroup and parseSmoothGroup have no return type, these functions actually only have a
-- side effect, they modify the parser state if "g", "s" or "usemtl" were parsed, and only if they were parsed
parseMaterialGroup :: GenParser Char ParserState ()
parseMaterialGroup = do
(maybe_material, maybe_groupname, maybe_smoothgroup) <- permute $
(,,)
<$?> (Nothing,
do reserved "usemtl"
mat <- identifier
return $ Just mat)
<|?> (Nothing,
do reserved "g"
groupname <- option "off" identifier
return $ Just groupname)
<|?> (Nothing,
do reserved "s"
smoothgroup <- option "off" identifier
return $ Just smoothgroup)
if isJust maybe_material
then modifyState (\p -> p{ parserstate_material = maybe_material })
else return ()
if isJust maybe_groupname
then modifyState (\p -> p{ parserstate_groupname = if maybe_groupname == (Just "off") then Nothing else maybe_groupname })
else return ()
if isJust maybe_smoothgroup
then modifyState (\p -> p{ parserstate_smoothgroup = if maybe_smoothgroup == (Just "off") then Nothing else maybe_smoothgroup })
else return ()
-- the reason why parseSmoothGroup exists is because I needed a way to distinguish when parseMaterialGroup parses only
-- a smoothgroup consisting of "g" or "s", or when parseMaterialGroup parses a full material with "g","s" or "usemtl",
-- while still having each of the "g","s" or "usemtl" options remain optional in the permuting parser, so that I can
-- use the parser even when there is none of the keywords present to be parsed at all
--
-- parseSmoothGroup is used instead of parseMaterialGroup when I parse the indices of the mesh that make up the faces,
-- these are grouped by an initial material definition but then also may have multiple groupnames and smoothgroups,
-- making it neccessary that I parse only "g" or "s" keywords, but not "usemtl"
--
-- just as parseMaterialGroup this has no return type, it only modifies the parser state
parseSmoothGroup :: GenParser Char ParserState ()
parseSmoothGroup = do
(maybe_groupname, maybe_smoothgroup) <- permute $
(,)
<$?> (Nothing,
do reserved "g"
groupname <- option "off" identifier
return $ Just groupname)
<|?> (Nothing,
do reserved "s"
smoothgroup <- option "off" identifier
return $ Just smoothgroup)
if isJust maybe_groupname
then modifyState (\p -> p{ parserstate_groupname = if maybe_groupname == (Just "off") then Nothing else maybe_groupname })
else return ()
if isJust maybe_smoothgroup
then modifyState (\p -> p{ parserstate_smoothgroup = if maybe_smoothgroup == (Just "off") then Nothing else maybe_smoothgroup })
else return ()
-- parseVertices should be relativly simple to understand, it just parses the raw vertices, normals and texcoords of
-- the mesh with are represented by lines starting with either "v", "vn" or "vt" in the obj files
--
-- the v, vn or vt lines each come as a block, but the whole blocks may be ordered arbitrarily, so again I am using
-- a permuting parser
parseVertices :: (VertexComponent c, Fractional c) => GenParser Char ParserState ([Vertex3 c], [Normal3 c], [TexCoord2 c])
parseVertices = do
parseMaterialGroup
(vertex_list,normal_list,texcoord_list) <- permute $
(,,)
<$$> (many1 $ do
reserved "v"
x <- parseComponent
y <- parseComponent
z <- parseComponent
return $ Vertex3 x y z)
<|?> ([],many1 $ do
reserved "vn"
nx <- parseComponent
ny <- parseComponent
nz <- parseComponent
return $ Normal3 nx ny nz)
<|?> ([],many1 $ do
reserved "vt"
u <- parseComponent
v <- parseComponent
return $ TexCoord2 u v)
updateState (\p -> p{ num_vertices = (\(a,b) -> (fromIntegral $ length vertex_list, a+b)) $ num_vertices p} )
updateState (\p -> p{ num_normals = (\(a,b) -> (fromIntegral $ length normal_list, a+b)) $ num_normals p} )
updateState (\p -> p{ num_texcoords = (\(a,b) -> (fromIntegral $ length texcoord_list, a+b)) $ num_texcoords p} )
return $ (vertex_list, normal_list, texcoord_list)
-- after parsing the vertices with parseVertices, whats left is parsing the indices that make up the faces, together with all smoothgroups
-- and materials that are associated with faces, thats what parseVertexGroups is for
--
-- the face indices come as lines starting with a "f" and then any number (>=3) of triplets like v/n/t, where v,n,t are indices into the
-- vertices, normal and texcoord arrays parsed above, and only v is mandatory, n and t are optional
--
-- this function is crucial to understanding how this programs data is setup and then rendered, the parseVertexGroup is used as argument
-- to a many1 parser to parse all batches of indices seperated by different materials that we can render with glDrawElements
--
-- notice how we first use parseMaterialGroup, then use parseSmoothGroup in the argument to many1Till, notice also how the second argument
-- to many1Till, the end parser, is a parser very similar to parseMaterialGroup but uses try $ lookAhead in front and "usemtl" is non optional
--
-- the end parser ensures that we terminate as soon as we match another material group, but can not use parseMaterialGroup as end parser
-- because that would also match smooth groups because its "usemtl" is optional
--
-- the parser that does all the work of parsing all the indices, that also contains parseSmoothGroup at its beginning is repeatedly parsing
-- the f v/n/t ... lines and the parseSmoothGroup ensures that all smooth groups are also parsed but we still put everything into the same
-- material batch because only when we parse a material group with a "usemtl" we terminate
--
-- maybe things become clearer when looking at the result type: (Maybe String, [(Maybe String, [(i, Maybe i, Maybe i)])])
-- - the first Maybe String is a material, this function only returns one material batch
-- - the second list are all smooth groups belonging to the material batch, each smooth group is identified by a Maybe String, there may
-- be several smooth groups with the same identifying Maybe String
-- - the last list [(i, Maybe i, Maybe i)] represent polygons assembled by the index triplets v/n/t we are mainly interested in
--
-- finally notice in that case statement that if a parsed polygon is larger then a triangle, it will get triangulated, we need the actual
-- vertices for that so thats why we pass them to this functions as first argument
parseVertexGroup :: (Enum c, RealFloat c, VertexComponent c, Integral i) => [Vertex3 c] -> GenParser Char ParserState (Maybe String, [(Maybe String, [(i, Maybe i, Maybe i)])])
parseVertexGroup vertices = do
parseMaterialGroup
maybe_material <- getState >>= return . parserstate_material
(ParserState _ _ _ (_,v_offset) (_,t_offset) (_,n_offset)) <- getState
vertexgroup <- many1Till (do
parseSmoothGroup
maybe_smoothgroup <- getState >>= return . parserstate_smoothgroup
reserved "f"
face <- many3 $ try (do
v <- do
v' <- natural
return $ fromIntegral (v'-v_offset)
symbol "/"
t <- option Nothing $ do
t' <- natural
return $ Just $ fromIntegral (t'-t_offset)
symbol "/"
n <- option Nothing $ do
n' <- natural
return $ Just $ fromIntegral (n'-n_offset)
return (v, n, t))
<|> (do
v' <- natural
return (fromIntegral v',Nothing,Nothing))
case face of
(a:b:c:[]) -> return (maybe_smoothgroup, [a,b,c])
_ -> let array_vertices = array (0,length vertices) $ zip [0..] vertices
polygon_indices = fst3 $ unzip3 face
polygon_vertices = M.fromList $ [(i,v) | i <- polygon_indices, v <- map (\i -> array_vertices ! (fromIntegral i)) polygon_indices]
polygon = M.fromList $ zip polygon_indices face
(_, triangle_indices) = triangulatePolygon polygon_vertices polygon_indices
triangles = map (\i -> fromJust $ M.lookup i polygon) triangle_indices
in return (maybe_smoothgroup, triangles))
(try $ lookAhead $ (permute $ (,,)
<$$> ((reserved "usemtl" >> identifier) <|> (eof >> return ""))
<|?> (Nothing, reserved "g" >> option "off" identifier >>= return . Just)
<|?> (Nothing, reserved "s" >> option "off" identifier >>= return . Just))
<|> (reserved "o" >> option "" identifier >> return ("", Nothing, Nothing)))
return (maybe_material, vertexgroup)
-- the data parsed by parseVertices and parseVertexGroups is not suitable to be rendered with opengl yet, we need to first transform it
-- so that it fits opengls idea of how the data should look like
--
-- most importantly we have the following problem: in the obj file format a vertex can have multiple normals associated with it, but to
-- render in opengl using vbos every vertex can only have one normal associated with it
-- solving this problem is easy though, we just have to iterate over all indices, look up the corresponding vertex/normal/texcoord triplet
-- from the input arrays, and append those to the output arrays
-- with this method we get all vertices, normals and texcoord in the correct order, and they are guaranteed to be unique pairings
--
-- the above gives us the vertices in correct order, and to get fitting indices we can just enumerate integers from 0 to (length vertices),
-- and since this function is called repeatedly for different vertex groups which together make up the same mesh, that get uploaded altogether
-- into opengl buffers, we have an offset for the indices, so its [offset .. offset+(length vertices)] below
--
-- when you look at the code below, you see both the vertex, normal and texcoord array generation and the indices generation besides the
-- let bindings for (more_vertices, more_normals, more_texcoords) = unzip3... and more_indices = [offset...]
--
-- the approach described above has a downside: we are throwing away all the information in the indices, and inflate the number of vertices
-- by inserting a new unique vertex for every index encountered, this is slow for larger meshes so I tried to counter this downside by
-- using the original vertices, normals, texcoords and indices as output if either there are no normals or texcoords at all, or the indices
-- for vertices, normals and textures are all equal
assembleObjMeshData :: (VertexComponent c, Fractional c, RealFloat c, Enum c, Integral i) => ([Vertex3 c], [Normal3 c], [TexCoord2 c]) -> [(Maybe String, [(i, Maybe i, Maybe i)])] -> i -> ([Vertex3 c], [Normal3 c], [TexCoord2 c], Indices i)
assembleObjMeshData (orig_vertices, orig_normals, orig_texcoords) polygons offset =
let array_vertices = array (0,length orig_vertices) $ zip [0..] orig_vertices
array_normals = array (0,length orig_normals) $ zip [0..] orig_normals
array_texcoords = array (0,length orig_texcoords) $ zip [0..] orig_texcoords
(indices_equal_list, inflated_vertices, maybe_meshnormals, maybe_meshtexcoords) = unzip4 $ do
meshindex <- concatMap snd polygons
let (vertex_index, maybe_normal_index, maybe_texcoord_index) = meshindex
return ((isNothing maybe_normal_index) && (isNothing maybe_texcoord_index) ||
(isNothing maybe_texcoord_index) && (vertex_index == (fromJust maybe_normal_index)) ||
(isNothing maybe_normal_index) && (vertex_index == (fromJust maybe_texcoord_index)) ||
(vertex_index == (fromJust maybe_normal_index)) && (vertex_index == (fromJust maybe_texcoord_index)),
array_vertices ! fromIntegral (vertex_index - 1),
maybe Nothing (\i -> Just $ array_normals ! fromIntegral (i - 1)) maybe_normal_index,
maybe Nothing (\i -> Just $ array_texcoords ! fromIntegral (i - 1)) maybe_texcoord_index)
unzipped_polygon_indices = unzip3 $ concatMap snd polygons
orig_vertex_indices = map (\i -> i - 1) $ fst3 $ unzipped_polygon_indices
inflated_indices = [offset .. offset+inflated_vertices_size-1]
inflated_vertices_size = fromIntegral $ length inflated_vertices
all_indices_equal = all id indices_equal_list
use_orig = (null orig_normals && null orig_texcoords) || all_indices_equal
in if use_orig
then (orig_vertices, orig_normals, orig_texcoords, orig_vertex_indices)
else (inflated_vertices, catMaybes maybe_meshnormals, catMaybes maybe_meshtexcoords, inflated_indices)
-- this just applies assembleObjMeshData to all the vertex groups and returns the resulting data as (meshdata, meshgroups) tuple which we
-- can then use to create a ObjMesh, its just a fold which accumulates lists and keeps track of an offset
assembleObjMeshGroups :: (VertexComponent c, Fractional c, RealFloat c, Enum c, Num i, Integral i) => ([Vertex3 c], [Normal3 c], [TexCoord2 c]) -> [(Maybe String, [(Maybe String, [(i, Maybe i, Maybe i)])])] -> (([Vertex3 c], [Normal3 c], [TexCoord2 c], Indices i), [ObjVertexGroup])
assembleObjMeshGroups sparsedata groupstuples =
snd $ foldl' (\(offset, ((accum_vertices, accum_normals, accum_texcoords, accum_indices), accum_groups)) (maybe_material, polygons) ->
let (vertices, normals, texcoords, indices) = assembleObjMeshData sparsedata polygons offset
indices_size = fromIntegral $ length indices
result_meshdata = (accum_vertices ++ vertices, accum_normals ++ normals, accum_texcoords ++ texcoords, accum_indices ++ indices)
result_groups = accum_groups ++ [(ObjVertexGroup maybe_material (fromIntegral offset) (fromIntegral indices_size))]
in (offset + indices_size, (result_meshdata, result_groups)))
(0, (([], [], [], []), [])) groupstuples
-- compared to the stuff above, parseObject, parseObjScene and parseObjFile are uninteresting, these just call the parsers we defined above
-- and stick the results together
-- parseObject parses just one mesh, consisting of first a name, then the section with all the vertices and after that a section with all
-- the face indices, makes an ObjMesh out of the parsed meshdata and meshgroups
parseObject :: (VertexComponent c, Fractional c, RealFloat c, Enum c, Integral i) => String -> GenParser Char ParserState (ObjMesh c i)
parseObject fallbackname = do
name <- option fallbackname $ do
reserved "o"
identifier >>= return
sparsedata <- parseVertices
groupstuples <- many1 $ parseVertexGroup $ fst3 sparsedata
let (meshdata, meshgroups) = assembleObjMeshGroups sparsedata groupstuples
return $ ObjMesh name meshdata meshgroups
-- parseObjScene parse a scene that consists of an optional mtlfile keyword and then a number of parseObjects, but at least one
parseObjScene :: (VertexComponent c, Fractional c, RealFloat c, Enum c, Integral i) => String -> GenParser Char ParserState (Maybe FilePath,ObjScene c i)
parseObjScene fallbackname = do
whiteSpace
mtlfile <- option Nothing $ do
reserved "mtllib"
identifier >>= return . Just
objects <- many1 $ parseObject fallbackname
eof
return $ (mtlfile, ObjScene M.empty (listIntMap objects))
-- parseObjFile parses a whole obj file, here the error reporting is done when the parse fails, and we also call the parser for the material lib here,
-- but only if the mtl file actually exists
--
-- this function produces the final ObjScene that we use in the renderer
parseObjFile :: (ColorComponent c,VertexComponent c, Fractional c, RealFloat c, Enum c, Integral i) => FilePath -> IO (Either ParseError (ObjScene c i))
parseObjFile objpath = do
objinput <- readFile objpath
let fallbackname = takeWhileEscaped (/='.') (=='\\') $ reverse $ takeWhile (/='/') $ reverse objpath
objparse = (runParser (parseObjScene fallbackname) initParserState objpath objinput)
case objparse of
Left err -> return $ Left err
Right (maybe_mtlpath, os@(ObjScene _ o)) -> do
case maybe_mtlpath of
Just mtlpath -> do
b <- doesFileExist mtlpath
if b
then do
mtlinput <- readFile mtlpath
let mtlparse = (runParser parseObjMaterialLib initParserState mtlpath mtlinput)
case mtlparse of
Left err -> return $ Left err
Right m -> return $ Right $ ObjScene m o
else return $ Right os
otherwise -> return $ Right os
--
--
-- parseObjMaterial is just one big permute parser so that a material file can have all their keywords ordered completely arbitrary
parseObjMaterial :: (ColorComponent c, Fractional c) => GenParser Char ParserState (ObjMaterial c)
parseObjMaterial = do
reserved "newmtl"
name <- identifier
permute $
(createMaterial name)
<$?> (material_ambient defaultMaterial, do -- a
reserved "Ka"
cr <- parseComponent
cg <- parseComponent
cb <- parseComponent
return $ Color4 cr cg cb 1.0)
<|?> (material_diffuse defaultMaterial, do -- b
reserved "Kd"
cr <- parseComponent
cg <- parseComponent
cb <- parseComponent
return $ Color4 cr cg cb 1.0)
<|?> (material_specular defaultMaterial, do -- c
reserved "Ks"
cr <- parseComponent
cg <- parseComponent
cb <- parseComponent
return $ Color4 cr cg cb 1.0)
<|?> (material_filter defaultMaterial, do -- d
reserved "Tf"
cr <- parseComponent
cg <- parseComponent
cb <- parseComponent
return $ Color4 cr cg cb 1.0)
<|?> (material_emission defaultMaterial, do -- e
reserved "Ke"
cr <- parseComponent
cg <- parseComponent
cb <- parseComponent
return $ Color4 cr cg cb 1.0)
<|?> (material_exponent defaultMaterial, do -- f
reserved "Ns"
x <- parseComponent
return x)
<|?> (material_dissolve defaultMaterial, do -- g
reserved "d"
b <- option False $ do
reserved "-halo"
return True
x <- parseComponent
return (b,x))
<|?> (material_illum defaultMaterial, do -- h
reserved "illum"
x <- natural
return $ fromInteger x)
<|?> (material_sharpness defaultMaterial, do -- i
reserved "sharpness"
x <- natural
return $ fromInteger x)
<|?> (material_refraction defaultMaterial, do -- j
reserved "Ni"
x <- parseComponent
return x)
<|?> (Nothing, do -- k
reserved "map_Ka"
x <- identifier
return $ Just (x,Nothing))
<|?> (Nothing, do -- l
reserved "map_Kd"
x <- identifier
return $ Just (x,Nothing))
<|?> (Nothing, do -- m
reserved "map_Ks"
x <- identifier
return $ Just (x,Nothing))
where
createMaterial name a b c d e f g h i j k l m = Material
{ material_name = name
, material_ambient = a
, material_diffuse = b
, material_specular = c
, material_filter = d
, material_emission = e
, material_exponent = f
, material_dissolve = g
, material_illum = h
, material_sharpness = i
, material_refraction = j
, material_ambientTexture = k
, material_diffuseTexture = l
, material_specularTexture = m
}
-- parseObjMaterialLib and parseMtlFile just as their equivalents above parseObjScene and parseObjFile are both just
-- boring wrappers around existing functionality, parseObjMaterialLib parses many1 parseObjMaterial and is used by
-- parseMtlFile to parse a material file
parseObjMaterialLib :: (ColorComponent c, Fractional c) => GenParser Char ParserState (M.Map String (ObjMaterial c))
parseObjMaterialLib = do
whiteSpace
material_list <- many1 parseObjMaterial
eof
return $ listStringMap material_name material_list
parseMtlFile :: (ColorComponent c, Fractional c) => FilePath -> IO (Either ParseError (M.Map String (ObjMaterial c)))
parseMtlFile path = do
input <- readFile path
return (runParser parseObjMaterialLib initParserState path input)
--
--
many3 :: GenParser tok st a -> GenParser tok st [a]
many3 p = do{ x <- p; y <- p; z <- p; xs <- many p; return (x:y:z:xs) }
many1Till :: (Stream s m t, Show end) =>
ParsecT s u m a ->
ParsecT s u m end ->
ParsecT s u m [a]
many1Till p end = do
--notFollowedBy end
first <- p
rest <- manyTill p end
return (first:rest)
|
rakete/ObjViewer
|
src/Parser/ObjParser.hs
|
bsd-3-clause
| 27,228
| 0
| 28
| 6,881
| 5,577
| 2,957
| 2,620
| 374
| 6
|
module Network.Punch.Broker.UDP (
PacketType(..),
startClient,
startServer
) where
import Data.IORef
import Control.Exception
import Control.Applicative
import Control.Monad
import qualified Data.Map as M
import Network.Socket
import Network.BSD
import Text.Read
import Text.Printf
-- The exchange protocol is used to let two clients know each other's
-- public ip and port. It should be run on a public server.
--
-- XXX: this is just an unreliable protoype.
data Packet
= Packet {
pktType :: PacketType,
pktPeerId :: String,
pktPubAddr :: String,
pktPrivAddr :: String
}
deriving (Show)
encodePacket :: Packet -> String
encodePacket (Packet {..})
= show (show pktType, pktPeerId, pktPubAddr, pktPrivAddr)
decodePacket :: String -> Packet
decodePacket xs = Packet ty cid sPubAddr sPrivAddr
where
ty = readOrError "decodePacket" sTy
(sTy, cid, sPubAddr, sPrivAddr) = readOrError "decodePacket" xs
readOrError :: Read a => String -> String -> a
readOrError err xs = maybe (error err) id $ readMaybe xs
encodeAddr :: SockAddr -> String
encodeAddr (SockAddrInet localPort localAddr)
= show localAddr ++ ":" ++ show localPort
decodeAddr :: String -> SockAddr
decodeAddr xs = SockAddrInet (fromIntegral port) addr
where
(sAddr, _:sPort) = span (/= ':') xs
port = readOrError "decodeAddr" sPort :: Int
addr = readOrError "decodeAddr" sAddr
data PacketType
= Listen
| Connect
deriving (Show, Eq, Ord, Read)
bindUDPSock :: Int -> IO Socket
bindUDPSock port = do
s <- socket AF_INET Datagram defaultProtocol
--localhost <- hostAddress <$> getHostByName "192.168.2.3"
-- ^ Better use INADDR_ANY
bindSocket s (SockAddrInet (fromIntegral port) iNADDR_ANY)
return s
startClient
:: (String, Int)
-- ^ Exchange server's hostname and port
-> PacketType
-- ^ Connect or Listen
-> String
-- ^ Client Id
-> IO (SockAddr, Socket)
-- ^ Established and hole punched connection.
startClient (serverHostName, serverPort) ty cid = do
s <- bindUDPSock 0
localAddr <- getSocketName s
[serverHost] <- hostAddresses <$> getHostByName serverHostName
let
serverAddr = SockAddrInet (fromIntegral serverPort) serverHost
pkt = Packet ty cid "" $ encodeAddr localAddr
-- ^ XXX: hack
sendTo s (encodePacket pkt) serverAddr
let
readServerReply = do
(got, _, who) <- recvFrom s 512
putStrLn $ "readServerReply -> " ++ show got ++ " from " ++ show who
if who /= serverAddr
then readServerReply
else return got
got <- readServerReply
-- ^ The other client's addresses
let peerPkt = decodePacket got
pubAddr = decodeAddr (pktPubAddr peerPkt)
privAddr = decodeAddr (pktPrivAddr peerPkt)
sendTo s cid pubAddr
try $ sendTo s cid privAddr :: IO (Either SomeException Int)
-- ^ Punch
let
readPeerReply = do
(got, _, who) <- recvFrom s 512
putStrLn $ "readPeerReply -> " ++ show got ++ " from " ++ show who
if got == cid && who == pubAddr
then return pubAddr
else if got == cid && who == privAddr
then return privAddr
else readPeerReply
peerAddr <- readPeerReply
return (peerAddr, s)
-- | This function is also provided by exch-svr/run.py
startServer
:: Int
-- ^ Port
-> IO ()
startServer port = do
s <- bindUDPSock port
clients <- newIORef M.empty
forever $ do
(got, _, who) <- recvFrom s 512
eiPkt <- try $ return $ decodePacket got
case eiPkt of
Left (e :: SomeException) -> return ()
Right pktWithoutPub -> do
let pkt = pktWithoutPub { pktPubAddr = encodeAddr who }
printf "[Server] %s\n" (show pkt)
mbPending <- M.lookup (pktPeerId pkt) <$> readIORef clients
let
onPairing = do
let Just pending = mbPending
modifyIORef clients $ M.delete (pktPeerId pkt)
sendTo s (encodePacket pending) (decodeAddr (pktPubAddr pkt))
sendTo s (encodePacket pkt) (decodeAddr (pktPubAddr pending))
return ()
case (pktType pkt, pktType <$> mbPending) of
(Listen, Just Connect) -> onPairing
(Connect, Just Listen) -> onPairing
_ -> do
-- XXX: dup udp msg will overwrite!
modifyIORef clients $ M.insert (pktPeerId pkt) pkt
|
overminder/punch-forward
|
src/Network/Punch/Broker/UDP.hs
|
bsd-3-clause
| 4,306
| 0
| 24
| 1,055
| 1,283
| 645
| 638
| -1
| -1
|
{-# LANGUAGE LambdaCase #-}
module River.Source.Annotation (
annotOfProgram
, annotOfBlock
, annotOfStatement
, annotOfExpression
) where
import River.Source.Syntax
annotOfProgram :: Program a -> a
annotOfProgram = \case
Program a _ ->
a
annotOfBlock :: Block a -> a
annotOfBlock = \case
Block a _ ->
a
annotOfStatement :: Statement a -> a
annotOfStatement = \case
Declare a _ _ _ ->
a
Assign a _ _ ->
a
If a _ _ _ ->
a
While a _ _ ->
a
Return a _ ->
a
annotOfExpression :: Expression a -> a
annotOfExpression = \case
Literal a _ ->
a
Variable a _ ->
a
Unary a _ _ ->
a
Binary a _ _ _ ->
a
Conditional a _ _ _ ->
a
|
jystic/river
|
src/River/Source/Annotation.hs
|
bsd-3-clause
| 715
| 0
| 8
| 225
| 258
| 130
| 128
| 39
| 5
|
{-# LANGUAGE GADTs #-}
module Streaming.MyIteratee (
module Data.MyIteratee
, enumPcapFile
, reorderQuotes
) where
import Data.MyIteratee
import Base
import qualified Network.Pcap as Pcap
import qualified Data.ByteString.Char8 as BS
import qualified Data.Time as T
-- enumerator for streaming contents of pcap files
enumPcapFile fname it = do
handle <- Pcap.openOffline fname
let
onFinish = return
onGet = do
(hdr, bs) <- Pcap.nextBS handle
return $ if bs == BS.pack "" then Nothing else Just (hdr, bs)
onPrint = putStrLn
-- [todo] handle better
onThrow = putStrLn
enumFromHandlers onFinish onGet onPrint onThrow it
-- [note] no need/way to close handle
-- reorders quotes based on quote accept time, assuming that
-- `pt - qt <= 3` for each quote,
-- where `pt` : packet accept time
-- `qt` : quote accept time
-- we implement this handler using a buffer that assumes quotes
-- `q` satisfying `pt_r - qt_q > 3` can be safely reordered and emitted.
-- Here `r` is the most recently inserted quote.
-- proof that `qt_q` < qt_f` for any quote `q` in the buffer, and
-- all `f`s that we might receive in the future:
-- suppose `f` is a future quote, then since `pt_f > pt_r`, we have
-- `qt_q < pt_r - 3 < pt_f - 3 <= qt_f
-- proof that we cannot do better: let `q` be a quote in the buffer
-- such that `pt_r - qt_q = 3 - e` for `e > 0`. we construct a future `f`
-- such that `qt_q > qt_f`
-- let `f` be a packet with
-- `qt_f = pt_r - 3 + e/2` and `pt_f = pt_r + e/3`
-- then `f` is indeed a valid future packet since:
-- `pt_f > pt_r`
-- `pt_f - qt_f = 3 - e/6 < 3`
-- morever, `qt_q = pt_r - 3 + e > qt_f`
-- reorderQuotes :: Iter (Sum3 (Get (Data Quote)) x y) a
-- -> Iter (Sum3 (Get (Data Quote)) x y) a
reorderQuotes = reorder $ \q q' ->
T.diffUTCTime (packetTime q) (acceptTime q') > maxOffset
|
iteloo/tsuru-sample
|
src/Streaming/MyIteratee.hs
|
bsd-3-clause
| 1,916
| 0
| 16
| 462
| 229
| 138
| 91
| 22
| 2
|
module ExTenSpec where
import Test.Hspec
import Nine (pack)
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
describe "Run length encode a list" $ do
it "should return an empty list for empty list" $ do
encode [] `shouldBe` ([] :: [(Int, Char)])
it "should return (1, a) for a single element" $ do
encode [1] `shouldBe` [(1, 1)]
it "should run length code" $ do
encode "aaaabccaadeeee"
`shouldBe` [(4,'a'),(1,'b'),(2,'c'),(2,'a'),(1,'d'),(4,'e')]
encode :: Eq a => [a] -> [(Int, a)]
encode =
fmap ((,) <$> length <*> head) . pack
|
abinr/nine
|
test/ExTenSpec.hs
|
bsd-3-clause
| 587
| 0
| 16
| 143
| 263
| 147
| 116
| 19
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module NormTypes ( NRiver
, toNRiver'
, toNRiver
, deNRiver
, riverSet
, NClaim
, toNClaim
) where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Aeson
import Protocol (River (River), PunterId, SiteId)
newtype NRiver =
NRiver (SiteId, SiteId)
deriving (Eq, Ord, Show)
toNRiver' :: SiteId -> SiteId -> NRiver
toNRiver' s t = if s < t then NRiver (s, t) else NRiver (t, s)
toNRiver :: River -> NRiver
toNRiver (River s t) = toNRiver' s t
deNRiver :: NRiver -> (SiteId, SiteId)
deNRiver (NRiver p) = p
instance ToJSON NRiver where
toJSON = toJSON . uncurry River . deNRiver
instance FromJSON NRiver where
parseJSON = (toNRiver <$>) . parseJSON
riverSet :: [River] -> Set NRiver
riverSet = Set.fromList . map toNRiver
type NClaim = (PunterId, NRiver)
toNClaim :: (PunterId, SiteId, SiteId) -> NClaim
toNClaim (pid, src, tar) = (pid, toNRiver' src tar)
|
nobsun/icfpc2017
|
hs/src/NormTypes.hs
|
bsd-3-clause
| 1,045
| 0
| 8
| 292
| 342
| 197
| 145
| 30
| 2
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE QuasiQuotes #-}
-- | Error handling helpers
module Network.Libtorrent.Exceptions.Internal where
import Data.Monoid ((<>))
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Unsafe as CU
import Language.Haskell.TH.Quote ( QuasiQuoter, quoteExp )
import Network.Libtorrent.Inline
C.context libtorrentCtx
C.include "<libtorrent/error_code.hpp>"
C.using "namespace libtorrent"
except :: QuasiQuoter
except = C.block {quoteExp = \s -> quoteExp C.block $ wrapLibtorrentExcept s}
exceptU :: QuasiQuoter
exceptU = CU.block {quoteExp = \s -> quoteExp CU.block $ wrapLibtorrentExcept s}
wrapLibtorrentExcept :: String -> String
wrapLibtorrentExcept s =
"error_code * {\n\
\ try\n\
\ {\n " <> s <> "\n\
\ return NULL;\n\
\ }\n\
\ catch (const libtorrent_exception & le)\n\
\ {\n\
\ return new error_code(le.error());\n\
\ }\n\
\}"
|
eryx67/haskell-libtorrent
|
src/Network/Libtorrent/Exceptions/Internal.hs
|
bsd-3-clause
| 1,059
| 0
| 10
| 218
| 197
| 117
| 80
| 21
| 1
|
{-# OPTIONS_GHC -Wall -Werror -fno-warn-orphans #-}
import SCD.M4.Dependencies(buildM4IdMap, originCallGraph,
callGraph2Dot, callGraph2Ncol, callGraphPolicyModules, Origin)
import SCD.M4.ModuleFiles(readAllPolicyModules)
import System.Environment(getArgs, getProgName)
import Data.Map(Map)
import Data.Set(Set)
main :: IO ()
main = do
as <- getArgs
case as of
["-dot",f] -> dotCallGraph f
["-ncol",f] -> ncolCallGraph f
_ -> usage
getCallGraph :: String -> IO (Map Origin (Set Origin))
getCallGraph d = do
ms <- readAllPolicyModules d
m <- either fail return $ buildM4IdMap ms
return (originCallGraph m (callGraphPolicyModules ms))
dotCallGraph :: String -> IO ()
dotCallGraph d = getCallGraph d >>= (putStrLn . callGraph2Dot)
ncolCallGraph :: String -> IO ()
ncolCallGraph d = getCallGraph d >>= (putStrLn . callGraph2Ncol)
usage :: IO ()
usage = do
p <- getProgName
fail (p++": [-dot|-ncol] <reference-policy-dir>")
|
GaloisInc/sk-dev-platform
|
libs/SCD/src/SCD/M4/CallGraph/CallGraph.hs
|
bsd-3-clause
| 960
| 0
| 11
| 162
| 354
| 174
| 180
| 27
| 3
|
module Tc.TcMonad where
import Control.Monad.Trans
import Data.List((\\),union, nub)
import Language.Haskell.Exts
import Tc.Assumption
import Tc.Class
import Tc.TcEnv
import Tc.TcInst
import Tc.TcSubst
import Tc.TcLabel
import Utils.Env
import Utils.EnvMonad
import Utils.ErrMsg hiding (empty)
import Utils.Id
import Utils.Stack
-- this module defines the tc monad type
-- and some operations over it.
type TcM a = EnvM TcEnv a
-- some operations over the Tc monad
getInstances :: Id -> TcM [Inst]
getInstances n
= do
c <- gets (lookupEnv n . classenv)
maybe (return []) (mapM freshInst . instances) c
getVarEnv :: TcM (Stack VarEnv)
getVarEnv = gets varenv
getClass :: Id -> TcM Class
getClass n
= do
c <- gets (lookupEnv n . classenv)
maybe (classNotDefinedError (unid n)) freshInst c
getSubst :: TcM Subst
getSubst = gets subst
extSubst :: Subst -> TcM ()
extSubst s = modify (\e -> e{ subst = s @@ (subst e)})
freshM :: TcM Int
freshM = do
e <- get
let i = fresh e
put (e{fresh = i + 1})
return i
-- getting labels from a record constructor
lookupLabels :: Id -> TcM [Label]
lookupLabels i
= do
e <- gets (labelsFrom i)
maybe (notDefinedError (unid i)) return e
-- typing context related functions
lookupGamma :: Id -> TcM Assumption
lookupGamma i = do
e <- gets (lookupVar i)
maybe (notDefinedError (unid i)) return e
-- including new assumptions
extendGamma :: [Assumption] -> TcM a -> TcM a
extendGamma as t
= block f f' t
where
is = map (\(i :>: _) -> i) as
g (i :>: t) ac = insert i (i :>: t) ac
f s = s{varenv = push p (varenv s)}
p = foldr g empty as
f' s = s {varenv = maybe (emptyStackPanic "Tc.TcMonad.extendGamma")
snd (pop $ varenv s)}
-- generating a new fresh type variable
newFreshVar :: TcM TypeVar
newFreshVar = do
n <- liftM (('$':) . show) freshM
return (TyVar (Ident n))
-- a instantiation function inside tc monad
freshInst :: (Substitutable t, Instantiate t) => t -> TcM t
freshInst t = do
let
vs = bv t
vs' <- mapM (const newFreshVar) vs
return (instantiate vs vs' t)
-- free type variables in context
ftvM :: TcM [TypeVar]
ftvM = getSubst >>= \s -> liftM (apply s) (gets fv)
-- quantification of a type
quantify :: Context -> Type -> TcM Type
quantify ctx t
= do
ns <- ftvM
let
s = zip ((fv ctx `union` fv t) \\ ns) allBoundVars
t' = TyForall Nothing (apply s ctx) (apply s t)
return t'
allBoundVars :: [TypeVar]
allBoundVars = [f [x] | x <- ['a'..'z']] ++
[f (x : show i) | i <- [1..], x <- ['a'..'z']]
where
f x = TyVar (Ident x)
-- quantify classes and instances for constraint generations for
-- overloaded bindings
quantifyClass :: Class -> TcM Class
quantifyClass c@(Class n ts ss as is)
= do
vs <- liftM (\v -> fv c \\ v) ftvM
let s = zip vs allBoundVars
is' <- mapM quantifyInst is
let c' = apply s c
return (c' {instances = is'})
quantifyInst :: Inst -> TcM Inst
quantifyInst i@(Inst n ts ss)
= do
vs <- liftM (\v -> fv i \\ v) ftvM
let s = zip vs allBoundVars
return (apply s i)
-- unification functions
unify :: (MGU t) => t -> t -> TcM Subst
unify t t' = unify1 True t t'
unify1 :: (MGU t) => Bool -> t -> t -> TcM Subst
unify1 b t t' = do
s <- getSubst
s' <- mgu (apply s t) (apply s t')
when b (extSubst s')
return s' -- XXX may break
matchOk :: (Matchable t) => t -> t -> TcM Bool
matchOk t1 t2 = handle (match t1 t2) True False
matchfy :: (Matchable t) => t -> t -> TcM Subst
matchfy t1 t2
= matchfy1 False t1 t2
matchfy1 :: (Matchable t) => Bool -> t -> t -> TcM Subst
matchfy1 b t1 t2
= do
s <- getSubst
s' <- match (apply s t1) (apply s t2)
when b (extSubst s')
return s'
-- a type class for the unification
class Substitutable t => MGU t where
mgu :: t -> t -> TcM Subst
instance (Pretty t, MGU t) => MGU [t] where
mgu [] [] = return nullSubst
mgu (t:ts) (t':ts')
= do
s <- mgu t t'
s' <- mgu (apply s ts) (apply s ts')
return (s' @@ s)
mgu t t' = differentSizeListUnifyError t t'
instance MGU Asst where
mgu c1@(ClassA n ts) c2@(ClassA n' ts')
| n == n' = mgu ts ts'
| otherwise = unificationError c1 c2
mgu c1@(InfixA l n r) c2@(InfixA l' n' r')
| n == n' = mgu [l,r] [l',r']
| otherwise = unificationError c1 c2
instance MGU Type where
mgu (TyForall _ _ ty) (TyForall _ _ ty')
= mgu ty ty'
mgu (TyFun l r) (TyFun l' r')
= do
s <- mgu l l'
s' <- mgu (apply s r) (apply s r')
return (s' @@ s)
mgu (TyTuple _ ts) (TyTuple _ ts')
= mgu ts ts'
mgu (TyTuple x ts) (TyApp l r)
| isTuple l = mgu (toTupleApp ts) (TyApp l r)
| otherwise = unificationError (TyTuple x ts) (TyApp l r)
mgu (TyList t) (TyList t')
= mgu t t'
mgu (TyList t) (TyApp l r)
| isList l = mgu (toListApp t) (TyApp l r)
| otherwise = unificationError (TyList t) (TyApp l r)
mgu (TyApp l r) (TyList t)
| isList l = mgu (toListApp t) (TyApp l r)
| otherwise = unificationError (TyApp l r) (TyList t)
mgu (TyApp l r) (TyTuple x ts)
| isTuple l = mgu (toTupleApp ts) (TyApp l r)
| otherwise = unificationError (TyTuple x ts) (TyApp l r)
mgu (TyApp l r) (TyApp l' r')
= do
s <- mgu l l'
s' <- mgu (apply s r) (apply s r')
return (s' @@ s)
mgu (TyCon n) (TyCon n')
| n == n' = return nullSubst
| otherwise = unificationError n n'
mgu (TyParen t) (TyParen t')
= mgu t t'
mgu (TyParen t) t' = mgu t t'
mgu t (TyParen t') = mgu t t'
mgu (TyInfix l n r) (TyInfix l' n' r')
| n == n' = do
s <- mgu l l'
s' <- mgu (apply s r) (apply s r')
return (s' @@ s)
| otherwise = unificationError n n'
mgu v@(TyVar _) t
| isFree v = varBind v t
| otherwise = boundVariableInUnifyError v t
mgu t v@(TyVar _)
| isFree v = varBind v t
| otherwise = boundVariableInUnifyError v t
mgu t t' = unificationError t t'
-- type class for matching operation
class Substitutable t => Matchable t where
match :: t -> t -> TcM Subst
instance (Substitutable t, Pretty t, Matchable t) => Matchable [t] where
match [] [] = return nullSubst
match (t:ts) (t':ts')
= do
s <- match t t'
s' <- match (apply s ts) (apply s ts')
maybe (matchingError t t') return (merge s' s)
match t t' = differentSizeListUnifyError t t'
instance Matchable Asst where
match t@(ClassA n ts) t'@(ClassA n' ts')
| n == n' = match ts ts'
| otherwise = matchingError t t'
match t@(InfixA l n r) t'@(InfixA l' n' r')
| n == n' = match [l,r] [l',r']
| otherwise = matchingError t t'
instance Matchable Type where
match t@(TyForall _ ctx ty) t'@(TyForall _ ctx' ty')
= do
s <- match ctx ctx'
s' <- match (apply s ty) (apply s ty')
maybe (matchingError t t') return (merge s' s)
match t@(TyFun l r) t'@(TyFun l' r')
= do
s <- match l l'
s' <- match (apply s r) (apply s r')
maybe (matchingError t t') return (merge s' s)
match t@(TyTuple _ ts) t'@(TyTuple _ ts')
= match ts ts'
match (TyTuple b ts) (TyApp l r)
| isTuple l = match (toTupleApp ts) (TyApp l r)
| otherwise = matchingError (TyTuple b ts) (TyApp l r)
match (TyApp l r) (TyTuple b ts)
| isTuple l = match (TyApp l r) (toTupleApp ts)
| otherwise = matchingError (TyTuple b ts) (TyApp l r)
match (TyList t) (TyList t')
= match t t'
match (TyList t) (TyApp l r)
| isList l = match (toListApp t) (TyApp l r)
| otherwise = matchingError (TyList t) (TyApp l r)
match (TyApp l r) (TyList t)
= match (TyApp l r) (toListApp t)
match t@(TyApp l r) t'@(TyApp l' r')
= do
s <- match l l'
s' <- match (apply s r) (apply s r')
maybe (matchingError t t') return (merge s' s)
match (TyParen t) (TyParen t')
= match t t'
match (TyParen t) t' = match t t'
match t (TyParen t') = match t t'
match t@(TyInfix l n r) t'@(TyInfix l' n' r')
| n == n'
= do
s <- match l l'
s' <- match (apply s r) (apply s r')
maybe (matchingError t t') return (merge s' s)
| otherwise = matchingError t t'
match v@(TyVar _) t
| isFree v = varBind v t
| otherwise = boundVariableInUnifyError v t
match t@(TyCon c) t'@(TyCon c')
| c == c' = return nullSubst
| otherwise = matchingError t t'
match t t' = matchingError t t'
-- binding a variable to a type
varBind :: Type -> Type -> TcM Subst
varBind v t
| t == v = return nullSubst
| v `elem` fv t = occursCheckError v t
| otherwise = return (v +-> t)
-- some auxiliar functions
isList (TyApp l _) = isList l
isList (TyCon (Special ListCon)) = True
isList _ = False
toListApp t = TyApp (TyCon (Special ListCon)) t
isTuple (TyApp l _) = isTuple l
isTuple (TyCon (Special (TupleCon b n))) = True
isTuple _ = False
toTupleApp ts = let n = length ts
in foldl TyApp (TyCon (Special (TupleCon Boxed n))) ts
|
rodrigogribeiro/mptc
|
src/Tc/TcMonad.hs
|
bsd-3-clause
| 10,486
| 2
| 15
| 3,941
| 4,413
| 2,143
| 2,270
| 260
| 1
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ScopedTypeVariables #-}
module NanoFeldsparTests where
import Control.Monad
import Data.List
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.Golden
import Test.Tasty.QuickCheck
import Data.ByteString.Lazy.UTF8 (fromString)
import Data.Syntactic
import Data.Syntactic.Functional
import qualified NanoFeldspar as Nano
scProd :: [Float] -> [Float] -> Float
scProd as bs = sum $ zipWith (*) as bs
prop_scProd as bs = scProd as bs == Nano.eval Nano.scProd as bs
genMat :: Gen [[Float]]
genMat = sized $ \s -> do
x <- liftM succ $ choose (0, s `mod` 10)
y <- liftM succ $ choose (0, s `mod` 10)
replicateM y $ vector x
forEach = flip map
matMul :: [[Float]] -> [[Float]] -> [[Float]]
matMul a b = forEach a $ \a' ->
forEach (transpose b) $ \b' ->
scProd a' b'
prop_matMul =
forAll genMat $ \a ->
forAll genMat $ \b ->
matMul a b == Nano.eval Nano.matMul a b
mkGold_scProd = writeFile "tests/gold/scProd.txt" $ Nano.showAST Nano.scProd
mkGold_matMul = writeFile "tests/gold/matMul.txt" $ Nano.showAST Nano.matMul
alphaRename :: ASTF Nano.FeldDomain a -> ASTF Nano.FeldDomain a
alphaRename = mapAST rename
where
rename :: Nano.FeldDomain a -> Nano.FeldDomain a
rename s
| Just (VarT v) <- prj s = inj (VarT (v+1))
| Just (LamT v) <- prj s = inj (LamT (v+1))
| otherwise = s
badRename :: ASTF Nano.FeldDomain a -> ASTF Nano.FeldDomain a
badRename = mapAST rename
where
rename :: Nano.FeldDomain a -> Nano.FeldDomain a
rename s
| Just (VarT v) <- prj s = inj (VarT (v+1))
| Just (LamT v) <- prj s = inj (LamT (v-1))
| otherwise = s
prop_alphaEq a = alphaEq a (alphaRename a)
prop_alphaEqBad a = alphaEq a (badRename a)
tests = testGroup "NanoFeldsparTests"
[ goldenVsString "scProd tree" "tests/gold/scProd.txt" $ return $ fromString $ Nano.showAST Nano.scProd
, goldenVsString "matMul tree" "tests/gold/matMul.txt" $ return $ fromString $ Nano.showAST Nano.matMul
, testProperty "scProd eval" prop_scProd
, testProperty "matMul eval" prop_matMul
, testProperty "alphaEq scProd" (prop_alphaEq (desugar Nano.scProd))
, testProperty "alphaEq matMul" (prop_alphaEq (desugar Nano.matMul))
, testProperty "alphaEq scProd matMul" (not (alphaEq (desugar Nano.scProd) (desugar Nano.matMul)))
, testProperty "alphaEqBad scProd" (not (prop_alphaEqBad (desugar Nano.scProd)))
, testProperty "alphaEqBad matMul" (not (prop_alphaEqBad (desugar Nano.matMul)))
]
main = defaultMain tests
|
emwap/syntactic
|
tests/NanoFeldsparTests.hs
|
bsd-3-clause
| 2,627
| 0
| 14
| 576
| 969
| 488
| 481
| 59
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards, NamedFieldPuns #-}
{-# LANGUAGE RankNTypes, ScopedTypeVariables #-}
-- | This module deals with building and incrementally rebuilding a collection
-- of packages. It is what backs the @cabal build@ and @configure@ commands,
-- as well as being a core part of @run@, @test@, @bench@ and others.
--
-- The primary thing is in fact rebuilding (and trying to make that quick by
-- not redoing unnecessary work), so building from scratch is just a special
-- case.
--
-- The build process and the code can be understood by breaking it down into
-- three major parts:
--
-- * The 'ElaboratedInstallPlan' type
--
-- * The \"what to do\" phase, where we look at the all input configuration
-- (project files, .cabal files, command line etc) and produce a detailed
-- plan of what to do -- the 'ElaboratedInstallPlan'.
--
-- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we
-- re-execute it.
--
-- As far as possible, the \"what to do\" phase embodies all the policy, leaving
-- the \"do it\" phase policy free. The first phase contains more of the
-- complicated logic, but it is contained in code that is either pure or just
-- has read effects (except cache updates). Then the second phase does all the
-- actions to build packages, but as far as possible it just follows the
-- instructions and avoids any logic for deciding what to do (apart from
-- recompilation avoidance in executing the plan).
--
-- This division helps us keep the code under control, making it easier to
-- understand, test and debug. So when you are extending these modules, please
-- think about which parts of your change belong in which part. It is
-- perfectly ok to extend the description of what to do (i.e. the
-- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the
-- first phase. Also, the second phase does not have direct access to any of
-- the input configuration anyway; all the information has to flow via the
-- 'ElaboratedInstallPlan'.
--
module Distribution.Client.ProjectOrchestration (
-- * Discovery phase: what is in the project?
establishProjectBaseContext,
ProjectBaseContext(..),
BuildTimeSettings(..),
commandLineFlagsToProjectConfig,
-- * Pre-build phase: decide what to do.
runProjectPreBuildPhase,
ProjectBuildContext(..),
-- ** Selecting what targets we mean
readTargetSelectors,
reportTargetSelectorProblems,
resolveTargets,
TargetsMap,
TargetSelector(..),
PackageId,
AvailableTarget(..),
AvailableTargetStatus(..),
TargetRequested(..),
ComponentName(..),
ComponentKind(..),
ComponentTarget(..),
SubComponentTarget(..),
TargetProblemCommon(..),
selectComponentTargetBasic,
distinctTargetComponents,
-- ** Utils for selecting targets
filterTargetsKind,
filterTargetsKindWith,
selectBuildableTargets,
selectBuildableTargetsWith,
selectBuildableTargets',
selectBuildableTargetsWith',
forgetTargetsDetail,
-- ** Adjusting the plan
pruneInstallPlanToTargets,
TargetAction(..),
pruneInstallPlanToDependencies,
CannotPruneDependencies(..),
printPlan,
-- * Build phase: now do it.
runProjectBuildPhase,
-- * Post build actions
runProjectPostBuildPhase,
dieOnBuildFailures,
-- * Shared CLI utils
cmdCommonHelpTextNewBuildBeta,
) where
import Distribution.Client.ProjectConfig
import Distribution.Client.ProjectPlanning
hiding ( pruneInstallPlanToTargets )
import qualified Distribution.Client.ProjectPlanning as ProjectPlanning
( pruneInstallPlanToTargets )
import Distribution.Client.ProjectPlanning.Types
import Distribution.Client.ProjectBuilding
import Distribution.Client.ProjectPlanOutput
import Distribution.Client.Types
( GenericReadyPackage(..), UnresolvedSourcePackage )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.TargetSelector
( TargetSelector(..)
, ComponentKind(..), componentKind
, readTargetSelectors, reportTargetSelectorProblems )
import Distribution.Client.DistDirLayout
import Distribution.Client.Config (defaultCabalDir)
import Distribution.Client.Setup hiding (packageName)
import Distribution.Solver.Types.OptionalStanza
import Distribution.Package
hiding (InstalledPackageId, installedPackageId)
import Distribution.PackageDescription (FlagAssignment, showFlagValue)
import Distribution.Simple.LocalBuildInfo
( ComponentName(..), pkgComponents )
import qualified Distribution.Simple.Setup as Setup
import Distribution.Simple.Command (commandShowOptions)
import Distribution.Simple.Utils
( die'
, notice, noticeNoWrap, debugNoWrap )
import Distribution.Verbosity
import Distribution.Text
import qualified Data.Monoid as Mon
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Map (Map)
import Data.List
import Data.Maybe
import Data.Either
import Control.Exception (Exception(..), throwIO, assert)
import System.Exit (ExitCode(..), exitFailure)
#ifdef MIN_VERSION_unix
import System.Posix.Signals (sigKILL, sigSEGV)
#endif
-- | This holds the context of a project prior to solving: the content of the
-- @cabal.project@ and all the local package @.cabal@ files.
--
data ProjectBaseContext = ProjectBaseContext {
distDirLayout :: DistDirLayout,
cabalDirLayout :: CabalDirLayout,
projectConfig :: ProjectConfig,
localPackages :: [UnresolvedSourcePackage],
buildSettings :: BuildTimeSettings
}
establishProjectBaseContext :: Verbosity
-> ProjectConfig
-> IO ProjectBaseContext
establishProjectBaseContext verbosity cliConfig = do
cabalDir <- defaultCabalDir
projectRoot <- either throwIO return =<<
findProjectRoot Nothing mprojectFile
let cabalDirLayout = defaultCabalDirLayout cabalDir
distDirLayout = defaultDistDirLayout projectRoot
mdistDirectory
(projectConfig, localPackages) <-
rebuildProjectConfig verbosity
distDirLayout
cliConfig
let buildSettings = resolveBuildTimeSettings
verbosity cabalDirLayout
projectConfig
return ProjectBaseContext {
distDirLayout,
cabalDirLayout,
projectConfig,
localPackages,
buildSettings
}
where
mdistDirectory = Setup.flagToMaybe projectConfigDistDir
mprojectFile = Setup.flagToMaybe projectConfigProjectFile
ProjectConfigShared {
projectConfigDistDir,
projectConfigProjectFile
} = projectConfigShared cliConfig
-- | This holds the context between the pre-build, build and post-build phases.
--
data ProjectBuildContext = ProjectBuildContext {
-- | This is the improved plan, before we select a plan subset based on
-- the build targets, and before we do the dry-run. So this contains
-- all packages in the project.
elaboratedPlanOriginal :: ElaboratedInstallPlan,
-- | This is the 'elaboratedPlanOriginal' after we select a plan subset
-- and do the dry-run phase to find out what is up-to or out-of date.
-- This is the plan that will be executed during the build phase. So
-- this contains only a subset of packages in the project.
elaboratedPlanToExecute:: ElaboratedInstallPlan,
-- | The part of the install plan that's shared between all packages in
-- the plan. This does not change between the two plan variants above,
-- so there is just the one copy.
elaboratedShared :: ElaboratedSharedConfig,
-- | The result of the dry-run phase. This tells us about each member of
-- the 'elaboratedPlanToExecute'.
pkgsBuildStatus :: BuildStatusMap
}
-- | Pre-build phase: decide what to do.
--
runProjectPreBuildPhase
:: Verbosity
-> ProjectBaseContext
-> (ElaboratedInstallPlan -> IO ElaboratedInstallPlan)
-> IO ProjectBuildContext
runProjectPreBuildPhase
verbosity
ProjectBaseContext {
distDirLayout,
cabalDirLayout,
projectConfig,
localPackages
}
selectPlanSubset = do
-- Take the project configuration and make a plan for how to build
-- everything in the project. This is independent of any specific targets
-- the user has asked for.
--
(elaboratedPlan, _, elaboratedShared) <-
rebuildInstallPlan verbosity
distDirLayout cabalDirLayout
projectConfig
localPackages
-- The plan for what to do is represented by an 'ElaboratedInstallPlan'
-- Now given the specific targets the user has asked for, decide
-- which bits of the plan we will want to execute.
--
elaboratedPlan' <- selectPlanSubset elaboratedPlan
-- Check which packages need rebuilding.
-- This also gives us more accurate reasons for the --dry-run output.
--
pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared
elaboratedPlan'
-- Improve the plan by marking up-to-date packages as installed.
--
let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages
pkgsBuildStatus elaboratedPlan'
debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'')
return ProjectBuildContext {
elaboratedPlanOriginal = elaboratedPlan,
elaboratedPlanToExecute = elaboratedPlan'',
elaboratedShared,
pkgsBuildStatus
}
-- | Build phase: now do it.
--
-- Execute all or parts of the description of what to do to build or
-- rebuild the various packages needed.
--
runProjectBuildPhase :: Verbosity
-> ProjectBaseContext
-> ProjectBuildContext
-> IO BuildOutcomes
runProjectBuildPhase _ ProjectBaseContext{buildSettings} _
| buildSettingDryRun buildSettings
= return Map.empty
runProjectBuildPhase verbosity
ProjectBaseContext{..} ProjectBuildContext {..} =
fmap (Map.union (previousBuildOutcomes pkgsBuildStatus)) $
rebuildTargets verbosity
distDirLayout
(cabalStoreDirLayout cabalDirLayout)
elaboratedPlanToExecute
elaboratedShared
pkgsBuildStatus
buildSettings
where
previousBuildOutcomes :: BuildStatusMap -> BuildOutcomes
previousBuildOutcomes =
Map.mapMaybe $ \status -> case status of
BuildStatusUpToDate buildSuccess -> Just (Right buildSuccess)
--TODO: [nice to have] record build failures persistently
_ -> Nothing
-- | Post-build phase: various administrative tasks
--
-- Update bits of state based on the build outcomes and report any failures.
--
runProjectPostBuildPhase :: Verbosity
-> ProjectBaseContext
-> ProjectBuildContext
-> BuildOutcomes
-> IO ()
runProjectPostBuildPhase _ ProjectBaseContext{buildSettings} _ _
| buildSettingDryRun buildSettings
= return ()
runProjectPostBuildPhase verbosity
ProjectBaseContext {..} ProjectBuildContext {..}
buildOutcomes = do
-- Update other build artefacts
-- TODO: currently none, but could include:
-- - bin symlinks/wrappers
-- - haddock/hoogle/ctags indexes
-- - delete stale lib registrations
-- - delete stale package dirs
postBuildStatus <- updatePostBuildProjectStatus
verbosity
distDirLayout
elaboratedPlanOriginal
pkgsBuildStatus
buildOutcomes
writePlanGhcEnvironment distDirLayout
elaboratedPlanOriginal
elaboratedShared
postBuildStatus
-- Finally if there were any build failures then report them and throw
-- an exception to terminate the program
dieOnBuildFailures verbosity elaboratedPlanToExecute buildOutcomes
-- Note that it is a deliberate design choice that the 'buildTargets' is
-- not passed to phase 1, and the various bits of input config is not
-- passed to phase 2.
--
-- We make the install plan without looking at the particular targets the
-- user asks us to build. The set of available things we can build is
-- discovered from the env and config and is used to make the install plan.
-- The targets just tell us which parts of the install plan to execute.
--
-- Conversely, executing the plan does not directly depend on any of the
-- input config. The bits that are needed (or better, the decisions based
-- on it) all go into the install plan.
-- Notionally, the 'BuildFlags' should be things that do not affect what
-- we build, just how we do it. These ones of course do
------------------------------------------------------------------------------
-- Taking targets into account, selecting what to build
--
-- | The set of components to build, represented as a mapping from 'UnitId's
-- to the 'ComponentTarget's within the unit that will be selected
-- (e.g. selected to build, test or repl).
--
-- Associated with each 'ComponentTarget' is the set of 'TargetSelector's that
-- matched this target. Typically this is exactly one, but in general it is
-- possible to for different selectors to match the same target. This extra
-- information is primarily to help make helpful error messages.
--
type TargetsMap = Map UnitId [(ComponentTarget, [TargetSelector PackageId])]
-- | Given a set of 'TargetSelector's, resolve which 'UnitId's and
-- 'ComponentTarget's they ought to refer to.
--
-- The idea is that every user target identifies one or more roots in the
-- 'ElaboratedInstallPlan', which we will use to determine the closure
-- of what packages need to be built, dropping everything from the plan
-- that is unnecessary. This closure and pruning is done by
-- 'pruneInstallPlanToTargets' and this needs to be told the roots in terms
-- of 'UnitId's and the 'ComponentTarget's within those.
--
-- This means we first need to translate the 'TargetSelector's into the
-- 'UnitId's and 'ComponentTarget's. This translation has to be different for
-- the different command line commands, like @build@, @repl@ etc. For example
-- the command @build pkgfoo@ could select a different set of components in
-- pkgfoo than @repl pkgfoo@. The @build@ command would select any library and
-- all executables, whereas @repl@ would select the library or a single
-- executable. Furthermore, both of these examples could fail, and fail in
-- different ways and each needs to be able to produce helpful error messages.
--
-- So 'resolveTargets' takes two helpers: one to select the targets to be used
-- by user targets that refer to a whole package ('TargetPackage'), and
-- another to check user targets that refer to a component (or a module or
-- file within a component). These helpers can fail, and use their own error
-- type. Both helpers get given the 'AvailableTarget' info about the
-- component(s).
--
-- While commands vary quite a bit in their behaviour about which components to
-- select for a whole-package target, most commands have the same behaviour for
-- checking a user target that refers to a specific component. To help with
-- this commands can use 'selectComponentTargetBasic', either directly or as
-- a basis for their own @selectComponentTarget@ implementation.
--
resolveTargets :: forall err.
(forall k. TargetSelector PackageId
-> [AvailableTarget k]
-> Either err [k])
-> (forall k. PackageId -> ComponentName -> SubComponentTarget
-> AvailableTarget k
-> Either err k )
-> (TargetProblemCommon -> err)
-> ElaboratedInstallPlan
-> [TargetSelector PackageId]
-> Either [err] TargetsMap
resolveTargets selectPackageTargets selectComponentTarget liftProblem
installPlan targetSelectors =
--TODO: [required eventually]
-- we cannot resolve names of packages other than those that are
-- directly in the current plan. We ought to keep a set of the known
-- hackage packages so we can resolve names to those. Though we don't
-- really need that until we can do something sensible with packages
-- outside of the project.
case partitionEithers
[ fmap ((,) targetSelector) (checkTarget targetSelector)
| targetSelector <- targetSelectors ] of
([], targets) -> Right
. Map.map nubComponentTargets
$ Map.fromListWith (++)
[ (uid, [(ct, ts)])
| (ts, cts) <- targets
, (uid, ct) <- cts ]
(problems, _) -> Left problems
where
-- TODO [required eventually] currently all build targets refer to packages
-- inside the project. Ultimately this has to be generalised to allow
-- referring to other packages and targets.
checkTarget :: TargetSelector PackageId
-> Either err [(UnitId, ComponentTarget)]
-- We can ask to build any whole package, project-local or a dependency
checkTarget bt@(TargetPackage _ pkgid mkfilter)
| Just ats <- fmap (maybe id filterTargetsKind mkfilter)
$ Map.lookup pkgid availableTargetsByPackage
= case selectPackageTargets bt ats of
Left e -> Left e
Right ts -> Right [ (unitid, ComponentTarget cname WholeComponent)
| (unitid, cname) <- ts ]
| otherwise
= Left (liftProblem (TargetProblemNoSuchPackage pkgid))
checkTarget bt@(TargetAllPackages mkfilter) =
let ats = maybe id filterTargetsKind mkfilter
$ filter availableTargetLocalToProject
$ concat (Map.elems availableTargetsByPackage)
in case selectPackageTargets bt ats of
Left e -> Left e
Right ts -> Right [ (unitid, ComponentTarget cname WholeComponent)
| (unitid, cname) <- ts ]
checkTarget (TargetComponent pkgid cname subtarget)
| Just ats <- Map.lookup (pkgid, cname) availableTargetsByComponent
= case partitionEithers
(map (selectComponentTarget pkgid cname subtarget) ats) of
(e:_,_) -> Left e
([],ts) -> Right [ (unitid, ctarget)
| let ctarget = ComponentTarget cname subtarget
, (unitid, _) <- ts ]
| Map.member pkgid availableTargetsByPackage
= Left (liftProblem (TargetProblemNoSuchComponent pkgid cname))
| otherwise
= Left (liftProblem (TargetProblemNoSuchPackage pkgid))
--TODO: check if the package is in the plan, even if it's not local
--TODO: check if the package is in hackage and return different
-- error cases here so the commands can handle things appropriately
availableTargetsByPackage :: Map PackageId [AvailableTarget (UnitId, ComponentName)]
availableTargetsByComponent :: Map (PackageId, ComponentName) [AvailableTarget (UnitId, ComponentName)]
availableTargetsByComponent = availableTargets installPlan
availableTargetsByPackage = Map.mapKeysWith
(++) (\(pkgid, _cname) -> pkgid)
availableTargetsByComponent
`Map.union` availableTargetsEmptyPackages
-- Add in all the empty packages. These do not appear in the
-- availableTargetsByComponent map, since that only contains components
-- so packages with no components are invisible from that perspective.
-- The empty packages need to be there for proper error reporting, so users
-- can select the empty package and then we can report that it is empty,
-- otherwise we falsely report there is no such package at all.
availableTargetsEmptyPackages =
Map.fromList
[ (packageId pkg, [])
| InstallPlan.Configured pkg <- InstallPlan.toList installPlan
, case elabPkgOrComp pkg of
ElabComponent _ -> False
ElabPackage _ -> null (pkgComponents (elabPkgDescription pkg))
]
--TODO: [research required] what if the solution has multiple versions of this package?
-- e.g. due to setup deps or due to multiple independent sets of
-- packages being built (e.g. ghc + ghcjs in a project)
filterTargetsKind :: ComponentKind -> [AvailableTarget k] -> [AvailableTarget k]
filterTargetsKind ckind = filterTargetsKindWith (== ckind)
filterTargetsKindWith :: (ComponentKind -> Bool)
-> [AvailableTarget k] -> [AvailableTarget k]
filterTargetsKindWith p ts =
[ t | t@(AvailableTarget _ cname _ _) <- ts
, p (componentKind cname) ]
selectBuildableTargets :: [AvailableTarget k] -> [k]
selectBuildableTargets ts =
[ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ]
selectBuildableTargetsWith :: (TargetRequested -> Bool)
-> [AvailableTarget k] -> [k]
selectBuildableTargetsWith p ts =
[ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ]
selectBuildableTargets' :: [AvailableTarget k] -> ([k], [AvailableTarget ()])
selectBuildableTargets' ts =
(,) [ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ]
[ forgetTargetDetail t
| t@(AvailableTarget _ _ (TargetBuildable _ _) _) <- ts ]
selectBuildableTargetsWith' :: (TargetRequested -> Bool)
-> [AvailableTarget k] -> ([k], [AvailableTarget ()])
selectBuildableTargetsWith' p ts =
(,) [ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ]
[ forgetTargetDetail t
| t@(AvailableTarget _ _ (TargetBuildable _ req) _) <- ts, p req ]
forgetTargetDetail :: AvailableTarget k -> AvailableTarget ()
forgetTargetDetail = fmap (const ())
forgetTargetsDetail :: [AvailableTarget k] -> [AvailableTarget ()]
forgetTargetsDetail = map forgetTargetDetail
-- | A basic @selectComponentTarget@ implementation to use or pass to
-- 'resolveTargets', that does the basic checks that the component is
-- buildable and isn't a test suite or benchmark that is disabled. This
-- can also be used to do these basic checks as part of a custom impl that
--
selectComponentTargetBasic :: PackageId
-> ComponentName
-> SubComponentTarget
-> AvailableTarget k
-> Either TargetProblemCommon k
selectComponentTargetBasic pkgid cname subtarget AvailableTarget {..} =
case availableTargetStatus of
TargetDisabledByUser ->
Left (TargetOptionalStanzaDisabledByUser pkgid cname subtarget)
TargetDisabledBySolver ->
Left (TargetOptionalStanzaDisabledBySolver pkgid cname subtarget)
TargetNotLocal ->
Left (TargetComponentNotProjectLocal pkgid cname subtarget)
TargetNotBuildable ->
Left (TargetComponentNotBuildable pkgid cname subtarget)
TargetBuildable targetKey _ ->
Right targetKey
data TargetProblemCommon
= TargetNotInProject PackageName
| TargetComponentNotProjectLocal PackageId ComponentName SubComponentTarget
| TargetComponentNotBuildable PackageId ComponentName SubComponentTarget
| TargetOptionalStanzaDisabledByUser PackageId ComponentName SubComponentTarget
| TargetOptionalStanzaDisabledBySolver PackageId ComponentName SubComponentTarget
-- The target matching stuff only returns packages local to the project,
-- so these lookups should never fail, but if 'resolveTargets' is called
-- directly then of course it can.
| TargetProblemNoSuchPackage PackageId
| TargetProblemNoSuchComponent PackageId ComponentName
deriving (Eq, Show)
-- | Wrapper around 'ProjectPlanning.pruneInstallPlanToTargets' that adjusts
-- for the extra unneeded info in the 'TargetsMap'.
--
pruneInstallPlanToTargets :: TargetAction -> TargetsMap
-> ElaboratedInstallPlan -> ElaboratedInstallPlan
pruneInstallPlanToTargets targetActionType targetsMap elaboratedPlan =
assert (Map.size targetsMap > 0) $
ProjectPlanning.pruneInstallPlanToTargets
targetActionType
(Map.map (map fst) targetsMap)
elaboratedPlan
-- | Utility used by repl and run to check if the targets spans multiple
-- components, since those commands do not support multiple components.
--
distinctTargetComponents :: TargetsMap -> Set.Set (UnitId, ComponentName)
distinctTargetComponents targetsMap =
Set.fromList [ (uid, cname)
| (uid, cts) <- Map.toList targetsMap
, (ComponentTarget cname _, _) <- cts ]
------------------------------------------------------------------------------
-- Displaying what we plan to do
--
-- | Print a user-oriented presentation of the install plan, indicating what
-- will be built.
--
printPlan :: Verbosity
-> ProjectBaseContext
-> ProjectBuildContext
-> IO ()
printPlan verbosity
ProjectBaseContext {
buildSettings = BuildTimeSettings{buildSettingDryRun}
}
ProjectBuildContext {
elaboratedPlanToExecute = elaboratedPlan,
elaboratedShared,
pkgsBuildStatus
}
| null pkgs
= notice verbosity "Up to date"
| otherwise
= noticeNoWrap verbosity $ unlines $
("In order, the following " ++ wouldWill ++ " be built" ++
ifNormal " (use -v for more details)" ++ ":")
: map showPkgAndReason pkgs
where
pkgs = InstallPlan.executionOrder elaboratedPlan
ifVerbose s | verbosity >= verbose = s
| otherwise = ""
ifNormal s | verbosity >= verbose = ""
| otherwise = s
wouldWill | buildSettingDryRun = "would"
| otherwise = "will"
showPkgAndReason :: ElaboratedReadyPackage -> String
showPkgAndReason (ReadyPackage elab) =
" - " ++
(if verbosity >= deafening
then display (installedUnitId elab)
else display (packageId elab)
) ++
(case elabPkgOrComp elab of
ElabPackage pkg -> showTargets elab ++ ifVerbose (showStanzas pkg)
ElabComponent comp ->
" (" ++ showComp elab comp ++ ")"
) ++
showFlagAssignment (nonDefaultFlags elab) ++
showConfigureFlags elab ++
let buildStatus = pkgsBuildStatus Map.! installedUnitId elab in
" (" ++ showBuildStatus buildStatus ++ ")"
showComp elab comp =
maybe "custom" display (compComponentName comp) ++
if Map.null (elabInstantiatedWith elab)
then ""
else " with " ++
intercalate ", "
-- TODO: Abbreviate the UnitIds
[ display k ++ "=" ++ display v
| (k,v) <- Map.toList (elabInstantiatedWith elab) ]
nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment
nonDefaultFlags elab = elabFlagAssignment elab \\ elabFlagDefaults elab
showStanzas pkg = concat
$ [ " *test"
| TestStanzas `Set.member` pkgStanzasEnabled pkg ]
++ [ " *bench"
| BenchStanzas `Set.member` pkgStanzasEnabled pkg ]
showTargets elab
| null (elabBuildTargets elab) = ""
| otherwise
= " (" ++ intercalate ", " [ showComponentTarget (packageId elab) t | t <- elabBuildTargets elab ]
++ ")"
showFlagAssignment :: FlagAssignment -> String
showFlagAssignment = concatMap ((' ' :) . showFlagValue)
showConfigureFlags elab =
let fullConfigureFlags
= setupHsConfigureFlags
(ReadyPackage elab)
elaboratedShared
verbosity
"$builddir"
-- | Given a default value @x@ for a flag, nub @Flag x@
-- into @NoFlag@. This gives us a tidier command line
-- rendering.
nubFlag :: Eq a => a -> Setup.Flag a -> Setup.Flag a
nubFlag x (Setup.Flag x') | x == x' = Setup.NoFlag
nubFlag _ f = f
-- TODO: Closely logic from 'configureProfiling'.
tryExeProfiling = Setup.fromFlagOrDefault False
(configProf fullConfigureFlags)
tryLibProfiling = Setup.fromFlagOrDefault False
(Mon.mappend (configProf fullConfigureFlags)
(configProfExe fullConfigureFlags))
partialConfigureFlags
= Mon.mempty {
configProf =
nubFlag False (configProf fullConfigureFlags),
configProfExe =
nubFlag tryExeProfiling (configProfExe fullConfigureFlags),
configProfLib =
nubFlag tryLibProfiling (configProfLib fullConfigureFlags)
-- Maybe there are more we can add
}
-- Not necessary to "escape" it, it's just for user output
in unwords . ("":) $
commandShowOptions
(Setup.configureCommand (pkgConfigCompilerProgs elaboratedShared))
partialConfigureFlags
showBuildStatus status = case status of
BuildStatusPreExisting -> "existing package"
BuildStatusInstalled -> "already installed"
BuildStatusDownload {} -> "requires download & build"
BuildStatusUnpack {} -> "requires build"
BuildStatusRebuild _ rebuild -> case rebuild of
BuildStatusConfigure
(MonitoredValueChanged _) -> "configuration changed"
BuildStatusConfigure mreason -> showMonitorChangedReason mreason
BuildStatusBuild _ buildreason -> case buildreason of
BuildReasonDepsRebuilt -> "dependency rebuilt"
BuildReasonFilesChanged
mreason -> showMonitorChangedReason mreason
BuildReasonExtraTargets _ -> "additional components to build"
BuildReasonEphemeralTargets -> "ephemeral targets"
BuildStatusUpToDate {} -> "up to date" -- doesn't happen
showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file ++ " changed"
showMonitorChangedReason (MonitoredValueChanged _) = "value changed"
showMonitorChangedReason MonitorFirstRun = "first run"
showMonitorChangedReason MonitorCorruptCache = "cannot read state cache"
-- | If there are build failures then report them and throw an exception.
--
dieOnBuildFailures :: Verbosity
-> ElaboratedInstallPlan -> BuildOutcomes -> IO ()
dieOnBuildFailures verbosity plan buildOutcomes
| null failures = return ()
| isSimpleCase = exitFailure
| otherwise = do
-- For failures where we have a build log, print the log plus a header
sequence_
[ do notice verbosity $
'\n' : renderFailureDetail False pkg reason
++ "\nBuild log ( " ++ logfile ++ " ):"
readFile logfile >>= noticeNoWrap verbosity
| (pkg, ShowBuildSummaryAndLog reason logfile)
<- failuresClassification
]
-- For all failures, print either a short summary (if we showed the
-- build log) or all details
die' verbosity $ unlines
[ case failureClassification of
ShowBuildSummaryAndLog reason _
| verbosity > normal
-> renderFailureDetail mentionDepOf pkg reason
| otherwise
-> renderFailureSummary mentionDepOf pkg reason
++ ". See the build log above for details."
ShowBuildSummaryOnly reason ->
renderFailureDetail mentionDepOf pkg reason
| let mentionDepOf = verbosity <= normal
, (pkg, failureClassification) <- failuresClassification ]
where
failures = [ (pkgid, failure)
| (pkgid, Left failure) <- Map.toList buildOutcomes ]
failuresClassification =
[ (pkg, classifyBuildFailure failure)
| (pkgid, failure) <- failures
, case buildFailureReason failure of
DependentFailed {} -> verbosity > normal
_ -> True
, InstallPlan.Configured pkg <-
maybeToList (InstallPlan.lookup plan pkgid)
]
classifyBuildFailure :: BuildFailure -> BuildFailurePresentation
classifyBuildFailure BuildFailure {
buildFailureReason = reason,
buildFailureLogFile = mlogfile
} =
maybe (ShowBuildSummaryOnly reason)
(ShowBuildSummaryAndLog reason) $ do
logfile <- mlogfile
e <- buildFailureException reason
ExitFailure 1 <- fromException e
return logfile
-- Special case: we don't want to report anything complicated in the case
-- of just doing build on the current package, since it's clear from
-- context which package failed.
--
-- We generalise this rule as follows:
-- - if only one failure occurs, and it is in a single root package (ie a
-- package with nothing else depending on it)
-- - and that failure is of a kind that always reports enough detail
-- itself (e.g. ghc reporting errors on stdout)
-- - then we do not report additional error detail or context.
--
isSimpleCase
| [(pkgid, failure)] <- failures
, [pkg] <- rootpkgs
, installedUnitId pkg == pkgid
, isFailureSelfExplanatory (buildFailureReason failure)
= True
| otherwise
= False
-- NB: if the Setup script segfaulted or was interrupted,
-- we should give more detailed information. So only
-- assume that exit code 1 is "pedestrian failure."
isFailureSelfExplanatory (BuildFailed e)
| Just (ExitFailure 1) <- fromException e = True
isFailureSelfExplanatory (ConfigureFailed e)
| Just (ExitFailure 1) <- fromException e = True
isFailureSelfExplanatory _ = False
rootpkgs =
[ pkg
| InstallPlan.Configured pkg <- InstallPlan.toList plan
, hasNoDependents pkg ]
ultimateDeps pkgid =
filter (\pkg -> hasNoDependents pkg && installedUnitId pkg /= pkgid)
(InstallPlan.reverseDependencyClosure plan [pkgid])
hasNoDependents :: HasUnitId pkg => pkg -> Bool
hasNoDependents = null . InstallPlan.revDirectDeps plan . installedUnitId
renderFailureDetail mentionDepOf pkg reason =
renderFailureSummary mentionDepOf pkg reason ++ "."
++ renderFailureExtraDetail reason
++ maybe "" showException (buildFailureException reason)
renderFailureSummary mentionDepOf pkg reason =
case reason of
DownloadFailed _ -> "Failed to download " ++ pkgstr
UnpackFailed _ -> "Failed to unpack " ++ pkgstr
ConfigureFailed _ -> "Failed to build " ++ pkgstr
BuildFailed _ -> "Failed to build " ++ pkgstr
ReplFailed _ -> "repl failed for " ++ pkgstr
HaddocksFailed _ -> "Failed to build documentation for " ++ pkgstr
TestsFailed _ -> "Tests failed for " ++ pkgstr
InstallFailed _ -> "Failed to build " ++ pkgstr
DependentFailed depid
-> "Failed to build " ++ display (packageId pkg)
++ " because it depends on " ++ display depid
++ " which itself failed to build"
where
pkgstr = elabConfiguredName verbosity pkg
++ if mentionDepOf
then renderDependencyOf (installedUnitId pkg)
else ""
renderFailureExtraDetail reason =
case reason of
ConfigureFailed _ -> " The failure occurred during the configure step."
InstallFailed _ -> " The failure occurred during the final install step."
_ -> ""
renderDependencyOf pkgid =
case ultimateDeps pkgid of
[] -> ""
(p1:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ ")"
(p1:p2:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1
++ " and " ++ elabPlanPackageName verbosity p2 ++ ")"
(p1:p2:_) -> " (which is required by " ++ elabPlanPackageName verbosity p1
++ ", " ++ elabPlanPackageName verbosity p2
++ " and others)"
showException e = case fromException e of
Just (ExitFailure 1) -> ""
#ifdef MIN_VERSION_unix
-- Note [Positive "signal" exit code]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- What's the business with the test for negative and positive
-- signal values? The API for process specifies that if the
-- process died due to a signal, it returns a *negative* exit
-- code. So that's the negative test.
--
-- What about the positive test? Well, when we find out that
-- a process died due to a signal, we ourselves exit with that
-- exit code. However, we don't "kill ourselves" with the
-- signal; we just exit with the same code as the signal: thus
-- the caller sees a *positive* exit code. So that's what
-- happens when we get a positive exit code.
Just (ExitFailure n)
| -n == fromIntegral sigSEGV ->
" The build process segfaulted (i.e. SIGSEGV)."
| n == fromIntegral sigSEGV ->
" The build process terminated with exit code " ++ show n
++ " which may be because some part of it segfaulted. (i.e. SIGSEGV)."
| -n == fromIntegral sigKILL ->
" The build process was killed (i.e. SIGKILL). " ++ explanation
| n == fromIntegral sigKILL ->
" The build process terminated with exit code " ++ show n
++ " which may be because some part of it was killed "
++ "(i.e. SIGKILL). " ++ explanation
where
explanation = "The typical reason for this is that there is not "
++ "enough memory available (e.g. the OS killed a process "
++ "using lots of memory)."
#endif
Just (ExitFailure n) ->
" The build process terminated with exit code " ++ show n
_ -> " The exception was:\n "
#if MIN_VERSION_base(4,8,0)
++ displayException e
#else
++ show e
#endif
buildFailureException reason =
case reason of
DownloadFailed e -> Just e
UnpackFailed e -> Just e
ConfigureFailed e -> Just e
BuildFailed e -> Just e
ReplFailed e -> Just e
HaddocksFailed e -> Just e
TestsFailed e -> Just e
InstallFailed e -> Just e
DependentFailed _ -> Nothing
data BuildFailurePresentation =
ShowBuildSummaryOnly BuildFailureReason
| ShowBuildSummaryAndLog BuildFailureReason FilePath
cmdCommonHelpTextNewBuildBeta :: String
cmdCommonHelpTextNewBuildBeta =
"Note: this command is part of the new project-based system (aka "
++ "nix-style\nlocal builds). These features are currently in beta. "
++ "Please see\n"
++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html "
++ "for\ndetails and advice on what you can expect to work. If you "
++ "encounter problems\nplease file issues at "
++ "https://github.com/haskell/cabal/issues and if you\nhave any time "
++ "to get involved and help with testing, fixing bugs etc then\nthat "
++ "is very much appreciated.\n"
|
mydaum/cabal
|
cabal-install/Distribution/Client/ProjectOrchestration.hs
|
bsd-3-clause
| 40,661
| 0
| 18
| 11,557
| 5,991
| 3,181
| 2,810
| 577
| 29
|
{-# LANGUAGE InstanceSigs #-}
module Lib where
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Except
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
import Data.Functor.Identity
import Control.Monad
import Control.Applicative
-- 26.3 EitherT
newtype EitherT e m a =
EitherT { runEitherT :: m (Either e a)}
instance Functor m => Functor (EitherT e m) where
fmap :: (a -> b) -> EitherT e m a -> EitherT e m b
fmap f (EitherT mea) = EitherT $ (fmap.fmap) f mea
instance Applicative m => Applicative (EitherT e m) where
pure :: a -> EitherT e m a
pure = EitherT . pure . pure
(<*>) :: EitherT e m (a -> b) -> EitherT e m a -> EitherT e m b
(EitherT emf) <*> (EitherT ema) = EitherT $ (<*>) <$> emf <*> ema
instance Monad m => Monad (EitherT e m) where
return :: a -> EitherT e m a
return = pure
(>>=) :: EitherT e m a -> (a -> EitherT e m b) -> EitherT e m b
(EitherT v) >>= f = EitherT $ do
ema <- v
case ema of
Left e -> return $ Left e
Right a -> runEitherT $ f a
swapEither :: Either a b -> Either b a
swapEither (Left a) = Right a
swapEither (Right b) = Left b
swampEitherT :: (Functor m) => EitherT e m a -> EitherT a m e
swampEitherT (EitherT ema) = EitherT $ swapEither <$> ema
eitherT :: Monad m => (a -> m c) -> (b -> m c) -> EitherT a m b -> m c
eitherT fa fb (EitherT amb)= amb >>= either fa fb
-- 26.5 StateT
newtype StateT s m a =
StateT { runStateT :: s -> m (a, s) }
instance (Functor m) => Functor (StateT s m) where
fmap :: (a -> b) -> StateT s m a -> StateT s m b
fmap f (StateT sma)= StateT $ (fmap.fmap) g sma
where g (a, s) = (f a, s)
instance (Monad m) => Applicative (StateT s m) where
pure :: a -> StateT s m a
pure a = StateT $ \s->return (a, s)
(<*>) :: StateT s m (a -> b) -> StateT s m a -> StateT s m b
(StateT smf) <*> (StateT sma) = StateT $ \s-> do
(f, s1) <- smf s
(a, s2) <- sma s1
return (f a, s2)
instance (Monad m) => Monad (StateT s m) where
return :: a -> StateT s m a
return = pure
(>>=) :: StateT s m a -> (a -> StateT s m b) ->StateT s m b
(StateT sma) >>= f = StateT $ \s->do
(a, s1) <- sma s
runStateT (f a) s1
-- 26.8 Lexically inner is structurally outer, Wrap It Up
-- I have to inject 'return' to wrap value in IO monad. Any better solution?
embedded :: MaybeT (ExceptT String (ReaderT () IO)) Int
embedded = (MaybeT . ExceptT . ReaderT) (const $ return (Right (Just 1)))
-- 26.9 MonadTrans
instance MonadTrans (EitherT e) where
lift :: (Monad m) => m a -> EitherT e m a
lift = EitherT . fmap Right
instance MonadTrans (StateT r) where
lift :: (Monad m) => m a -> StateT r m a
lift ma = StateT $ \r -> let g a = (a, r) in fmap g ma
-- 26.10
-- instance could not be hidden, we have to screate another MaybeT
newtype MaybeT' m a = MaybeT' {runMaybeT' :: m (Maybe a)}
instance Functor m => Functor (MaybeT' m) where
fmap :: (a -> b) -> MaybeT' m a -> MaybeT' m b
fmap f (MaybeT' mma) = MaybeT' $ (fmap.fmap) f mma
instance Applicative m => Applicative (MaybeT' m) where
pure :: a -> MaybeT' m a
pure = MaybeT' . pure . pure
(<*>) :: MaybeT' m (a -> b) -> MaybeT' m a -> MaybeT' m b
(MaybeT' mf) <*> (MaybeT' ma) = MaybeT' $ (<*>) <$> mf <*> ma
instance Monad m => Monad (MaybeT' m) where
return :: a -> MaybeT' m a
return = pure
(>>=) :: MaybeT' m a -> (a -> MaybeT' m b) -> MaybeT' m b
(MaybeT' mma) >>= f = MaybeT' $ do
ma <- mma -- Maybe a
case ma of
Nothing -> return Nothing
Just a -> runMaybeT' $ f a
-- Finnally, it type checks. But how to verify?
instance (MonadIO m) => MonadIO (MaybeT' m) where
liftIO :: IO a -> MaybeT' m a
liftIO = MaybeT' . liftIO . fmap Just
-- Chapter Exercises
-- 1
rDec :: Num a => Reader a a
rDec = reader $ \a -> a - 1
-- 2
rDec' :: Num a => Reader a a
rDec' = reader $ flip (-) 1
-- 3 & 4
rShow :: Show a => ReaderT a Identity String
rShow = reader show
-- 5
rPrintAndInc :: (Num a, Show a) => ReaderT a IO a
rPrintAndInc = ReaderT $ \a -> putStrLn ("Hi: " ++ show a) >> return (a + 1)
-- 6
sPrintIncAccum :: (Num a, Show a) => StateT a IO String
sPrintIncAccum = StateT $ \a -> putStrLn ("Hi: " ++ show a) >> return (show a, a+1)
-- Fix the code (Original code are there following fixed ones)
isValid :: String -> Bool
isValid v = '!' `elem` v
maybeExcite :: MaybeT IO String
maybeExcite = MaybeT $ do -- maybeExcite = do
v <- getLine
guard $ isValid v -- When invalid, this one throws exception rather than returning Nothing
-- Because it is in a IO (Maybe String), not a Maybe Monad
return $ Just v -- return v
doExcite :: IO ()
doExcite = do
putStrLn "say something excite!"
excite <- runMaybeT maybeExcite <|> return Nothing -- excite <- maybeExcite
case excite of
Nothing -> putStrLn "MOAR EXCITE"
Just e -> putStrLn ("Good, was very excite: " ++ e)
|
backofhan/HaskellExercises
|
CH26/src/Lib.hs
|
bsd-3-clause
| 5,012
| 0
| 13
| 1,304
| 2,148
| 1,102
| 1,046
| 110
| 2
|
{-| General purpose utilities
The names in this module clash heavily with the Haskell Prelude, so I
recommend the following import scheme:
> import Pipes
> import qualified Pipes.Prelude as P -- or use any other qualifier you prefer
Note that 'String'-based 'IO' is inefficient. The 'String'-based utilities
in this module exist only for simple demonstrations without incurring a
dependency on the @text@ package.
Also, 'stdinLn' and 'stdoutLn' remove and add newlines, respectively. This
behavior is intended to simplify examples. The corresponding @stdin@ and
@stdout@ utilities from @pipes-bytestring@ and @pipes-text@ preserve
newlines.
-}
{-# LANGUAGE RankNTypes, Trustworthy #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Pipes.Prelude (
-- * Producers
-- $producers
stdinLn
, readLn
, fromHandle
, repeatM
, replicateM
, unfoldr
-- * Consumers
-- $consumers
, stdoutLn
, stdoutLn'
, mapM_
, print
, toHandle
, drain
-- * Pipes
-- $pipes
, map
, mapM
, sequence
, mapFoldable
, filter
, mapMaybe
, filterM
, wither
, take
, takeWhile
, takeWhile'
, drop
, dropWhile
, concat
, elemIndices
, findIndices
, scan
, scanM
, chain
, read
, show
, seq
-- *ListT
, loop
-- * Folds
-- $folds
, fold
, fold'
, foldM
, foldM'
, all
, any
, and
, or
, elem
, notElem
, find
, findIndex
, head
, index
, last
, length
, maximum
, minimum
, null
, sum
, product
, toList
, toListM
, toListM'
-- * Zips
, zip
, zipWith
-- * Utilities
, tee
, generalize
) where
import Control.Exception (throwIO, try)
import Control.Monad (liftM, when, unless, (>=>))
import Control.Monad.Trans.State.Strict (get, put)
import Data.Functor.Identity (Identity, runIdentity)
import Foreign.C.Error (Errno(Errno), ePIPE)
import GHC.Exts (build)
import Pipes
import Pipes.Core
import Pipes.Internal
import Pipes.Lift (evalStateP)
import qualified GHC.IO.Exception as G
import qualified System.IO as IO
import qualified Prelude
import Prelude hiding (
all
, and
, any
, concat
, drop
, dropWhile
, elem
, filter
, head
, last
, length
, map
, mapM
, mapM_
, maximum
, minimum
, notElem
, null
, or
, print
, product
, read
, readLn
, sequence
, show
, seq
, sum
, take
, takeWhile
, zip
, zipWith
)
{- $producers
Use 'for' loops to iterate over 'Producer's whenever you want to perform the
same action for every element:
> -- Echo all lines from standard input to standard output
> runEffect $ for P.stdinLn $ \str -> do
> lift $ putStrLn str
... or more concisely:
>>> runEffect $ for P.stdinLn (lift . putStrLn)
Test<Enter>
Test
ABC<Enter>
ABC
...
-}
{-| Read 'String's from 'IO.stdin' using 'getLine'
Terminates on end of input
-}
stdinLn :: MonadIO m => Producer' String m ()
stdinLn = fromHandle IO.stdin
{-# INLINABLE stdinLn #-}
-- | 'read' values from 'IO.stdin', ignoring failed parses
readLn :: (MonadIO m, Read a) => Producer' a m ()
readLn = stdinLn >-> read
{-# INLINABLE readLn #-}
{-| Read 'String's from a 'IO.Handle' using 'IO.hGetLine'
Terminates on end of input
@
'fromHandle' :: 'MonadIO' m => 'IO.Handle' -> 'Producer' 'String' m ()
@
-}
fromHandle :: MonadIO m => IO.Handle -> Proxy x' x () String m ()
fromHandle h = go
where
go = do
eof <- liftIO $ IO.hIsEOF h
unless eof $ do
str <- liftIO $ IO.hGetLine h
yield str
go
{-# INLINABLE fromHandle #-}
{-| Repeat a monadic action indefinitely, 'yield'ing each result
'repeatM' :: 'Monad' m => m a -> 'Producer' a m r
-}
repeatM :: Monad m => m a -> Proxy x' x () a m r
repeatM m = lift m >~ cat
{-# INLINABLE [1] repeatM #-}
{-# RULES
"repeatM m >-> p" forall m p . repeatM m >-> p = lift m >~ p
#-}
{-| Repeat a monadic action a fixed number of times, 'yield'ing each result
> replicateM 0 x = return ()
>
> replicateM (m + n) x = replicateM m x >> replicateM n x -- 0 <= {m,n}
@
'replicateM' :: 'Monad' m => Int -> m a -> 'Producer' a m ()
@
-}
replicateM :: Monad m => Int -> m a -> Proxy x' x () a m ()
replicateM n m = lift m >~ take n
{-# INLINABLE replicateM #-}
{- $consumers
Feed a 'Consumer' the same value repeatedly using ('>~'):
>>> runEffect $ lift getLine >~ P.stdoutLn
Test<Enter>
Test
ABC<Enter>
ABC
...
-}
{-| Write 'String's to 'IO.stdout' using 'putStrLn'
Unlike 'toHandle', 'stdoutLn' gracefully terminates on a broken output pipe
-}
stdoutLn :: MonadIO m => Consumer' String m ()
stdoutLn = go
where
go = do
str <- await
x <- liftIO $ try (putStrLn str)
case x of
Left (G.IOError { G.ioe_type = G.ResourceVanished
, G.ioe_errno = Just ioe })
| Errno ioe == ePIPE
-> return ()
Left e -> liftIO (throwIO e)
Right () -> go
{-# INLINABLE stdoutLn #-}
{-| Write 'String's to 'IO.stdout' using 'putStrLn'
This does not handle a broken output pipe, but has a polymorphic return
value
-}
stdoutLn' :: MonadIO m => Consumer' String m r
stdoutLn' = for cat (\str -> liftIO (putStrLn str))
{-# INLINABLE [1] stdoutLn' #-}
{-# RULES
"p >-> stdoutLn'" forall p .
p >-> stdoutLn' = for p (\str -> liftIO (putStrLn str))
#-}
-- | Consume all values using a monadic function
mapM_ :: Monad m => (a -> m ()) -> Consumer' a m r
mapM_ f = for cat (\a -> lift (f a))
{-# INLINABLE [1] mapM_ #-}
{-# RULES
"p >-> mapM_ f" forall p f .
p >-> mapM_ f = for p (\a -> lift (f a))
#-}
-- | 'print' values to 'IO.stdout'
print :: (MonadIO m, Show a) => Consumer' a m r
print = for cat (\a -> liftIO (Prelude.print a))
{-# INLINABLE [1] print #-}
{-# RULES
"p >-> print" forall p .
p >-> print = for p (\a -> liftIO (Prelude.print a))
#-}
-- | Write 'String's to a 'IO.Handle' using 'IO.hPutStrLn'
toHandle :: MonadIO m => IO.Handle -> Consumer' String m r
toHandle handle = for cat (\str -> liftIO (IO.hPutStrLn handle str))
{-# INLINABLE [1] toHandle #-}
{-# RULES
"p >-> toHandle handle" forall p handle .
p >-> toHandle handle = for p (\str -> liftIO (IO.hPutStrLn handle str))
#-}
-- | 'discard' all incoming values
drain :: Functor m => Consumer' a m r
drain = for cat discard
{-# INLINABLE [1] drain #-}
{-# RULES
"p >-> drain" forall p .
p >-> drain = for p discard
#-}
{- $pipes
Use ('>->') to connect 'Producer's, 'Pipe's, and 'Consumer's:
>>> runEffect $ P.stdinLn >-> P.takeWhile (/= "quit") >-> P.stdoutLn
Test<Enter>
Test
ABC<Enter>
ABC
quit<Enter>
>>>
-}
{-| Apply a function to all values flowing downstream
> map id = cat
>
> map (g . f) = map f >-> map g
-}
map :: Functor m => (a -> b) -> Pipe a b m r
map f = for cat (\a -> yield (f a))
{-# INLINABLE [1] map #-}
{-# RULES
"p >-> map f" forall p f . p >-> map f = for p (\a -> yield (f a))
; "map f >-> p" forall p f . map f >-> p = (do
a <- await
return (f a) ) >~ p
#-}
{-| Apply a monadic function to all values flowing downstream
> mapM return = cat
>
> mapM (f >=> g) = mapM f >-> mapM g
-}
mapM :: Monad m => (a -> m b) -> Pipe a b m r
mapM f = for cat $ \a -> do
b <- lift (f a)
yield b
{-# INLINABLE [1] mapM #-}
{-# RULES
"p >-> mapM f" forall p f . p >-> mapM f = for p (\a -> do
b <- lift (f a)
yield b )
; "mapM f >-> p" forall p f . mapM f >-> p = (do
a <- await
b <- lift (f a)
return b ) >~ p
#-}
-- | Convert a stream of actions to a stream of values
sequence :: Monad m => Pipe (m a) a m r
sequence = mapM id
{-# INLINABLE sequence #-}
{- | Apply a function to all values flowing downstream, and
forward each element of the result.
-}
mapFoldable :: (Functor m, Foldable t) => (a -> t b) -> Pipe a b m r
mapFoldable f = for cat (\a -> each (f a))
{-# INLINABLE [1] mapFoldable #-}
{-# RULES
"p >-> mapFoldable f" forall p f .
p >-> mapFoldable f = for p (\a -> each (f a))
#-}
{-| @(filter predicate)@ only forwards values that satisfy the predicate.
> filter (pure True) = cat
>
> filter (liftA2 (&&) p1 p2) = filter p1 >-> filter p2
>
> filter f = mapMaybe (\a -> a <$ guard (f a))
-}
filter :: Functor m => (a -> Bool) -> Pipe a a m r
filter predicate = for cat $ \a -> when (predicate a) (yield a)
{-# INLINABLE [1] filter #-}
{-# RULES
"p >-> filter predicate" forall p predicate.
p >-> filter predicate = for p (\a -> when (predicate a) (yield a))
#-}
{-| @(mapMaybe f)@ yields 'Just' results of 'f'.
Basic laws:
> mapMaybe (f >=> g) = mapMaybe f >-> mapMaybe g
>
> mapMaybe (pure @Maybe . f) = mapMaybe (Just . f) = map f
>
> mapMaybe (const Nothing) = drain
As a result of the second law,
> mapMaybe return = mapMaybe Just = cat
-}
mapMaybe :: Functor m => (a -> Maybe b) -> Pipe a b m r
mapMaybe f = for cat $ maybe (pure ()) yield . f
{-# INLINABLE [1] mapMaybe #-}
{-# RULES
"p >-> mapMaybe f" forall p f.
p >-> mapMaybe f = for p $ maybe (pure ()) yield . f
#-}
{-| @(filterM predicate)@ only forwards values that satisfy the monadic
predicate
> filterM (pure (pure True)) = cat
>
> filterM (liftA2 (liftA2 (&&)) p1 p2) = filterM p1 >-> filterM p2
>
> filterM f = wither (\a -> (\b -> a <$ guard b) <$> f a)
-}
filterM :: Monad m => (a -> m Bool) -> Pipe a a m r
filterM predicate = for cat $ \a -> do
b <- lift (predicate a)
when b (yield a)
{-# INLINABLE [1] filterM #-}
{-# RULES
"p >-> filterM predicate" forall p predicate .
p >-> filterM predicate = for p (\a -> do
b <- lift (predicate a)
when b (yield a) )
#-}
{-| @(wither f)@ forwards 'Just' values produced by the
monadic action.
Basic laws:
> wither (runMaybeT . (MaybeT . f >=> MaybeT . g)) = wither f >-> wither g
>
> wither (runMaybeT . lift . f) = wither (fmap Just . f) = mapM f
>
> wither (pure . f) = mapMaybe f
As a result of the second law,
> wither (runMaybeT . return) = cat
As a result of the third law,
> wither (pure . const Nothing) = wither (const (pure Nothing)) = drain
-}
wither :: Monad m => (a -> m (Maybe b)) -> Pipe a b m r
wither f = for cat $ lift . f >=> maybe (pure ()) yield
{-# INLINABLE [1] wither #-}
{-# RULES
"p >-> wither f" forall p f .
p >-> wither f = for p $ lift . f >=> maybe (pure ()) yield
#-}
{-| @(take n)@ only allows @n@ values to pass through
> take 0 = return ()
>
> take (m + n) = take m >> take n
> take <infinity> = cat
>
> take (min m n) = take m >-> take n
-}
take :: Functor m => Int -> Pipe a a m ()
take = go
where
go 0 = return ()
go n = do
a <- await
yield a
go (n-1)
{-# INLINABLE take #-}
{-| @(takeWhile p)@ allows values to pass downstream so long as they satisfy
the predicate @p@.
> takeWhile (pure True) = cat
>
> takeWhile (liftA2 (&&) p1 p2) = takeWhile p1 >-> takeWhile p2
-}
takeWhile :: Functor m => (a -> Bool) -> Pipe a a m ()
takeWhile predicate = go
where
go = do
a <- await
if (predicate a)
then do
yield a
go
else return ()
{-# INLINABLE takeWhile #-}
{-| @(takeWhile' p)@ is a version of takeWhile that returns the value failing
the predicate.
> takeWhile' (pure True) = cat
>
> takeWhile' (liftA2 (&&) p1 p2) = takeWhile' p1 >-> takeWhile' p2
-}
takeWhile' :: Functor m => (a -> Bool) -> Pipe a a m a
takeWhile' predicate = go
where
go = do
a <- await
if (predicate a)
then do
yield a
go
else return a
{-# INLINABLE takeWhile' #-}
{-| @(drop n)@ discards @n@ values going downstream
> drop 0 = cat
>
> drop (m + n) = drop m >-> drop n
-}
drop :: Functor m => Int -> Pipe a a m r
drop = go
where
go 0 = cat
go n = do
await
go (n-1)
{-# INLINABLE drop #-}
{-| @(dropWhile p)@ discards values going downstream until one violates the
predicate @p@.
> dropWhile (pure False) = cat
>
> dropWhile (liftA2 (||) p1 p2) = dropWhile p1 >-> dropWhile p2
-}
dropWhile :: Functor m => (a -> Bool) -> Pipe a a m r
dropWhile predicate = go
where
go = do
a <- await
if (predicate a)
then go
else do
yield a
cat
{-# INLINABLE dropWhile #-}
-- | Flatten all 'Foldable' elements flowing downstream
concat :: (Functor m, Foldable f) => Pipe (f a) a m r
concat = for cat each
{-# INLINABLE [1] concat #-}
{-# RULES
"p >-> concat" forall p . p >-> concat = for p each
#-}
-- | Outputs the indices of all elements that match the given element
elemIndices :: (Functor m, Eq a) => a -> Pipe a Int m r
elemIndices a = findIndices (a ==)
{-# INLINABLE elemIndices #-}
-- | Outputs the indices of all elements that satisfied the predicate
findIndices :: Functor m => (a -> Bool) -> Pipe a Int m r
findIndices predicate = go 0
where
go n = do
a <- await
when (predicate a) (yield n)
go $! n + 1
{-# INLINABLE findIndices #-}
{-| Strict left scan
> Control.Foldl.purely scan :: Monad m => Fold a b -> Pipe a b m r
-}
scan :: Functor m => (x -> a -> x) -> x -> (x -> b) -> Pipe a b m r
scan step begin done = go begin
where
go x = do
yield (done x)
a <- await
let x' = step x a
go $! x'
{-# INLINABLE scan #-}
{-| Strict, monadic left scan
> Control.Foldl.impurely scanM :: Monad m => FoldM m a b -> Pipe a b m r
-}
scanM :: Monad m => (x -> a -> m x) -> m x -> (x -> m b) -> Pipe a b m r
scanM step begin done = do
x <- lift begin
go x
where
go x = do
b <- lift (done x)
yield b
a <- await
x' <- lift (step x a)
go $! x'
{-# INLINABLE scanM #-}
{-| Apply an action to all values flowing downstream
> chain (pure (return ())) = cat
>
> chain (liftA2 (>>) m1 m2) = chain m1 >-> chain m2
-}
chain :: Monad m => (a -> m ()) -> Pipe a a m r
chain f = for cat $ \a -> do
lift (f a)
yield a
{-# INLINABLE [1] chain #-}
{-# RULES
"p >-> chain f" forall p f .
p >-> chain f = for p (\a -> do
lift (f a)
yield a )
; "chain f >-> p" forall p f .
chain f >-> p = (do
a <- await
lift (f a)
return a ) >~ p
#-}
-- | Parse 'Read'able values, only forwarding the value if the parse succeeds
read :: (Functor m, Read a) => Pipe String a m r
read = for cat $ \str -> case (reads str) of
[(a, "")] -> yield a
_ -> return ()
{-# INLINABLE [1] read #-}
{-# RULES
"p >-> read" forall p .
p >-> read = for p (\str -> case (reads str) of
[(a, "")] -> yield a
_ -> return () )
#-}
-- | Convert 'Show'able values to 'String's
show :: (Functor m, Show a) => Pipe a String m r
show = map Prelude.show
{-# INLINABLE show #-}
-- | Evaluate all values flowing downstream to WHNF
seq :: Functor m => Pipe a a m r
seq = for cat $ \a -> yield $! a
{-# INLINABLE seq #-}
{-| Create a `Pipe` from a `ListT` transformation
> loop (k1 >=> k2) = loop k1 >-> loop k2
>
> loop return = cat
-}
loop :: Monad m => (a -> ListT m b) -> Pipe a b m r
loop k = for cat (every . k)
{-# INLINABLE loop #-}
{- $folds
Use these to fold the output of a 'Producer'. Many of these folds will stop
drawing elements if they can compute their result early, like 'any':
>>> P.any Prelude.null P.stdinLn
Test<Enter>
ABC<Enter>
<Enter>
True
>>>
-}
{-| Strict fold of the elements of a 'Producer'
> Control.Foldl.purely fold :: Monad m => Fold a b -> Producer a m () -> m b
-}
fold :: Monad m => (x -> a -> x) -> x -> (x -> b) -> Producer a m () -> m b
fold step begin done p0 = go p0 begin
where
go p x = case p of
Request v _ -> closed v
Respond a fu -> go (fu ()) $! step x a
M m -> m >>= \p' -> go p' x
Pure _ -> return (done x)
{-# INLINABLE fold #-}
{-| Strict fold of the elements of a 'Producer' that preserves the return value
> Control.Foldl.purely fold' :: Monad m => Fold a b -> Producer a m r -> m (b, r)
-}
fold' :: Monad m => (x -> a -> x) -> x -> (x -> b) -> Producer a m r -> m (b, r)
fold' step begin done p0 = go p0 begin
where
go p x = case p of
Request v _ -> closed v
Respond a fu -> go (fu ()) $! step x a
M m -> m >>= \p' -> go p' x
Pure r -> return (done x, r)
{-# INLINABLE fold' #-}
{-| Strict, monadic fold of the elements of a 'Producer'
> Control.Foldl.impurely foldM :: Monad m => FoldM a b -> Producer a m () -> m b
-}
foldM
:: Monad m
=> (x -> a -> m x) -> m x -> (x -> m b) -> Producer a m () -> m b
foldM step begin done p0 = do
x0 <- begin
go p0 x0
where
go p x = case p of
Request v _ -> closed v
Respond a fu -> do
x' <- step x a
go (fu ()) $! x'
M m -> m >>= \p' -> go p' x
Pure _ -> done x
{-# INLINABLE foldM #-}
{-| Strict, monadic fold of the elements of a 'Producer'
> Control.Foldl.impurely foldM' :: Monad m => FoldM a b -> Producer a m r -> m (b, r)
-}
foldM'
:: Monad m
=> (x -> a -> m x) -> m x -> (x -> m b) -> Producer a m r -> m (b, r)
foldM' step begin done p0 = do
x0 <- begin
go p0 x0
where
go p x = case p of
Request v _ -> closed v
Respond a fu -> do
x' <- step x a
go (fu ()) $! x'
M m -> m >>= \p' -> go p' x
Pure r -> do
b <- done x
return (b, r)
{-# INLINABLE foldM' #-}
{-| @(all predicate p)@ determines whether all the elements of @p@ satisfy the
predicate.
-}
all :: Monad m => (a -> Bool) -> Producer a m () -> m Bool
all predicate p = null $ p >-> filter (\a -> not (predicate a))
{-# INLINABLE all #-}
{-| @(any predicate p)@ determines whether any element of @p@ satisfies the
predicate.
-}
any :: Monad m => (a -> Bool) -> Producer a m () -> m Bool
any predicate p = liftM not $ null (p >-> filter predicate)
{-# INLINABLE any #-}
-- | Determines whether all elements are 'True'
and :: Monad m => Producer Bool m () -> m Bool
and = all id
{-# INLINABLE and #-}
-- | Determines whether any element is 'True'
or :: Monad m => Producer Bool m () -> m Bool
or = any id
{-# INLINABLE or #-}
{-| @(elem a p)@ returns 'True' if @p@ has an element equal to @a@, 'False'
otherwise
-}
elem :: (Monad m, Eq a) => a -> Producer a m () -> m Bool
elem a = any (a ==)
{-# INLINABLE elem #-}
{-| @(notElem a)@ returns 'False' if @p@ has an element equal to @a@, 'True'
otherwise
-}
notElem :: (Monad m, Eq a) => a -> Producer a m () -> m Bool
notElem a = all (a /=)
{-# INLINABLE notElem #-}
-- | Find the first element of a 'Producer' that satisfies the predicate
find :: Monad m => (a -> Bool) -> Producer a m () -> m (Maybe a)
find predicate p = head (p >-> filter predicate)
{-# INLINABLE find #-}
{-| Find the index of the first element of a 'Producer' that satisfies the
predicate
-}
findIndex :: Monad m => (a -> Bool) -> Producer a m () -> m (Maybe Int)
findIndex predicate p = head (p >-> findIndices predicate)
{-# INLINABLE findIndex #-}
-- | Retrieve the first element from a 'Producer'
head :: Monad m => Producer a m () -> m (Maybe a)
head p = do
x <- next p
return $ case x of
Left _ -> Nothing
Right (a, _) -> Just a
{-# INLINABLE head #-}
-- | Index into a 'Producer'
index :: Monad m => Int -> Producer a m () -> m (Maybe a)
index n p = head (p >-> drop n)
{-# INLINABLE index #-}
-- | Retrieve the last element from a 'Producer'
last :: Monad m => Producer a m () -> m (Maybe a)
last p0 = do
x <- next p0
case x of
Left _ -> return Nothing
Right (a, p') -> go a p'
where
go a p = do
x <- next p
case x of
Left _ -> return (Just a)
Right (a', p') -> go a' p'
{-# INLINABLE last #-}
-- | Count the number of elements in a 'Producer'
length :: Monad m => Producer a m () -> m Int
length = fold (\n _ -> n + 1) 0 id
{-# INLINABLE length #-}
-- | Find the maximum element of a 'Producer'
maximum :: (Monad m, Ord a) => Producer a m () -> m (Maybe a)
maximum = fold step Nothing id
where
step x a = Just $ case x of
Nothing -> a
Just a' -> max a a'
{-# INLINABLE maximum #-}
-- | Find the minimum element of a 'Producer'
minimum :: (Monad m, Ord a) => Producer a m () -> m (Maybe a)
minimum = fold step Nothing id
where
step x a = Just $ case x of
Nothing -> a
Just a' -> min a a'
{-# INLINABLE minimum #-}
-- | Determine if a 'Producer' is empty
null :: Monad m => Producer a m () -> m Bool
null p = do
x <- next p
return $ case x of
Left _ -> True
Right _ -> False
{-# INLINABLE null #-}
-- | Compute the sum of the elements of a 'Producer'
sum :: (Monad m, Num a) => Producer a m () -> m a
sum = fold (+) 0 id
{-# INLINABLE sum #-}
-- | Compute the product of the elements of a 'Producer'
product :: (Monad m, Num a) => Producer a m () -> m a
product = fold (*) 1 id
{-# INLINABLE product #-}
-- | Convert a pure 'Producer' into a list
toList :: Producer a Identity () -> [a]
toList prod0 = build (go prod0)
where
go prod cons nil =
case prod of
Request v _ -> closed v
Respond a fu -> cons a (go (fu ()) cons nil)
M m -> go (runIdentity m) cons nil
Pure _ -> nil
{-# INLINE toList #-}
{-| Convert an effectful 'Producer' into a list
Note: 'toListM' is not an idiomatic use of @pipes@, but I provide it for
simple testing purposes. Idiomatic @pipes@ style consumes the elements
immediately as they are generated instead of loading all elements into
memory.
-}
toListM :: Monad m => Producer a m () -> m [a]
toListM = fold step begin done
where
step x a = x . (a:)
begin = id
done x = x []
{-# INLINABLE toListM #-}
{-| Convert an effectful 'Producer' into a list alongside the return value
Note: 'toListM'' is not an idiomatic use of @pipes@, but I provide it for
simple testing purposes. Idiomatic @pipes@ style consumes the elements
immediately as they are generated instead of loading all elements into
memory.
-}
toListM' :: Monad m => Producer a m r -> m ([a], r)
toListM' = fold' step begin done
where
step x a = x . (a:)
begin = id
done x = x []
{-# INLINABLE toListM' #-}
-- | Zip two 'Producer's
zip :: Monad m
=> (Producer a m r)
-> (Producer b m r)
-> (Proxy x' x () (a, b) m r)
zip = zipWith (,)
{-# INLINABLE zip #-}
-- | Zip two 'Producer's using the provided combining function
zipWith :: Monad m
=> (a -> b -> c)
-> (Producer a m r)
-> (Producer b m r)
-> (Proxy x' x () c m r)
zipWith f = go
where
go p1 p2 = do
e1 <- lift $ next p1
case e1 of
Left r -> return r
Right (a, p1') -> do
e2 <- lift $ next p2
case e2 of
Left r -> return r
Right (b, p2') -> do
yield (f a b)
go p1' p2'
{-# INLINABLE zipWith #-}
{-| Transform a 'Consumer' to a 'Pipe' that reforwards all values further
downstream
-}
tee :: Monad m => Consumer a m r -> Pipe a a m r
tee p = evalStateP Nothing $ do
r <- up >\\ (hoist lift p //> dn)
ma <- lift get
case ma of
Nothing -> return ()
Just a -> yield a
return r
where
up () = do
ma <- lift get
case ma of
Nothing -> return ()
Just a -> yield a
a <- await
lift $ put (Just a)
return a
dn v = closed v
{-# INLINABLE tee #-}
{-| Transform a unidirectional 'Pipe' to a bidirectional 'Proxy'
> generalize (f >-> g) = generalize f >+> generalize g
>
> generalize cat = pull
-}
generalize :: Monad m => Pipe a b m r -> x -> Proxy x a x b m r
generalize p x0 = evalStateP x0 $ up >\\ hoist lift p //> dn
where
up () = do
x <- lift get
request x
dn a = do
x <- respond a
lift $ put x
{-# INLINABLE generalize #-}
{-| The natural unfold into a 'Producer' with a step function and a seed
> unfoldr next = id
-}
unfoldr :: Monad m
=> (s -> m (Either r (a, s))) -> s -> Producer a m r
unfoldr step = go where
go s0 = do
e <- lift (step s0)
case e of
Left r -> return r
Right (a,s) -> do
yield a
go s
{-# INLINABLE unfoldr #-}
|
Gabriel439/Haskell-Pipes-Library
|
src/Pipes/Prelude.hs
|
bsd-3-clause
| 24,938
| 0
| 21
| 7,618
| 6,004
| 3,044
| 2,960
| -1
| -1
|
{-# LANGUAGE CPP #-}
{- |
Module : $Header$
Description : The definition of CMDL interface for
standard input and file input
Copyright : uni-bremen and DFKI
License : GPLv2 or higher, see LICENSE.txt
Maintainer : r.pascanu@jacobs-university.de
Stability : provisional
Portability : portable
CMDL.Interface describes the interface specific function
for standard input and file input
-}
module CMDL.Interface where
#ifdef HASKELINE
import System.Console.Haskeline
import Interfaces.DataTypes
import Comorphisms.LogicGraph (logicGraph)
import Proofs.AbstractState (getConsCheckers, sublogicOfTheory, getCcName )
import Logic.Grothendieck
#endif
import System.IO
import CMDL.Commands (getCommands)
import CMDL.DataTypes
import CMDL.DataTypesUtils
import CMDL.Shell
import CMDL.ProcessScript
import CMDL.Utils (stripComments)
import Interfaces.Command
import Common.Utils (trim)
import Data.List
import Data.IORef
import Control.Monad
import Control.Monad.Trans (MonadIO (..))
#ifdef HASKELINE
shellSettings :: IORef CmdlState -> Settings IO
shellSettings st =
Settings {
complete = cmdlComplete st
, historyFile = Just "consoleHistory.tmp"
, autoAddHistory = True
}
{- We need an MVar here because our CmdlState is no Monad
(and we use IO as Monad). -}
showCmdComplete :: CmdlState -> [String] -> [String] -> String ->
IO (String, [Completion])
showCmdComplete state shortConsCList comps left = do
let (_, nodes) = case i_state $ intState state of
Nothing -> ("", [])
Just dgState -> getSelectedDGNodes dgState
cmdss = "prove-all" : map (cmdNameStr . cmdDescription) getCommands
cmds = cmdss ++ (map ("cons-checker " ++) shortConsCList)
cmdcomps = filter (isPrefixOf (reverse left)) cmds
cmdcomps' = if null nodes
then filter (not . isSuffixOf "-current") cmdcomps
else cmdcomps
return ("", map simpleCompletion $ comps ++ cmdcomps')
cmdlComplete :: IORef CmdlState -> CompletionFunc IO
cmdlComplete st (left, _) = do
state <- liftIO $ readIORef st
comps <- liftIO $ cmdlCompletionFn getCommands state $ reverse left
if isPrefixOf (reverse left) "cons-checker "
then
case i_state $ intState state of
Just pS ->
case elements pS of
Element z _ : _ ->
do
consCheckList <- getConsCheckers $ findComorphismPaths
logicGraph $ sublogicOfTheory z
let shortConsCList = nub $ map (\ (y, _) -> getCcName y)
consCheckList
showCmdComplete state shortConsCList comps left
[] -> showCmdComplete state [] comps left
Nothing -> showCmdComplete state [] comps left
else showCmdComplete state [] comps left
#endif
#ifdef HASKELINE
getMultiLineT :: String -> String -> InputT IO (Maybe String)
getMultiLineT prompt past = do
minput <- getInputLine prompt
case minput of
Nothing -> return Nothing
Just input -> let
str = reverse input
has = hasSlash str
in if has then
getMultiLineT prompt ( past ++ (reverse (takeOutSlash str)))
else
return $ Just $ past ++ input
#endif
hasSlash :: String -> Bool
hasSlash x = case x of
'\\' : _ -> True
' ' : ls -> hasSlash ls
'\n' : ls -> hasSlash ls
_ -> False
takeOutSlash :: String -> String
takeOutSlash str = case str of
'\\' : ls -> ls
'\n' : ls -> takeOutSlash ls
' ' : ls -> takeOutSlash ls
l -> l
getLongLine :: IO String
getLongLine = do
l <- getLine
if isSuffixOf "\\" l then fmap (init l ++) getLongLine else return l
shellLoop :: IORef CmdlState
-> Bool
#ifdef HASKELINE
-> InputT IO CmdlState
#else
-> IO CmdlState
#endif
shellLoop st isTerminal =
do
state <- liftIO $ readIORef st
let prompt = if isTerminal then generatePrompter state else ""
#ifdef HASKELINE
minput <- getMultiLineT prompt ""
#else
putStr prompt
hFlush stdout
eof <- isEOF
minput <- if eof then return Nothing else liftM Just getLongLine
#endif
case minput of
Nothing -> return state
Just input ->
do
let echo = trim $ stripComments input
when (not isTerminal && not (null echo))
(liftIO $ putStrLn $ generatePrompter state ++ echo)
(state', mc) <- liftIO $ cmdlProcessString "" 0 input state
case mc of
Nothing -> if elem input ["exit", ":q"] -- additional exit cmds
then return state'
else do
liftIO $ putStrLn $ "Unknown command: " ++ input
shellLoop st isTerminal
Just ExitCmd -> return state'
Just c -> do
newState <- liftIO $ printCmdResult state'
newState' <- liftIO $ case find
(eqCmd c . cmdDescription) getCommands of
Nothing -> return newState
Just cm -> checkCom
cm { cmdDescription = c } newState
liftIO $ writeIORef st newState'
shellLoop st isTerminal
-- | The function runs hets in a shell
cmdlRunShell :: CmdlState -> IO CmdlState
cmdlRunShell state = do
isTerminal <- hIsTerminalDevice stdin
st <- newIORef state
#ifdef HASKELINE
runInputT (shellSettings st) $ shellLoop st isTerminal
#else
hSetBuffering stdin LineBuffering
shellLoop st isTerminal
#endif
|
mariefarrell/Hets
|
CMDL/Interface.hs
|
gpl-2.0
| 5,817
| 0
| 24
| 1,872
| 1,414
| 710
| 704
| 72
| 8
|
main :: Bool -> ()
main True | True = ()
main True = ()
main False = ()
|
roberth/uu-helium
|
test/staticwarnings/Guards3.hs
|
gpl-3.0
| 86
| 2
| 8
| 33
| 59
| 24
| 35
| 4
| 1
|
-- |
-- Module: BDCS.Sources
-- Copyright: (c) 2016-2018 Red Hat, Inc.
-- License: LGPL
--
-- Maintainer: https://github.com/weldr
-- Stability: alpha
-- Portability: portable
--
-- Manage 'Sources' records in the database. This record keeps track of a single
-- software release of a single project. A single project can make many releases,
-- each of which will require a separate 'Sources' record.
module BDCS.Sources(findSource,
findSources,
getSource,
insertSource,
insertSourceKeyValue)
where
import Control.Monad.IO.Class(MonadIO)
import qualified Data.Text as T
import Database.Esqueleto
import BDCS.DB
import BDCS.KeyType
import BDCS.KeyValue(findKeyValue)
{-# ANN findSource ("HLint: ignore Use ." :: String) #-}
-- | Given a version number and a key to a 'Projects' record, find a matching software
-- source in the database. If it exists, the database key is returned.
findSource :: MonadIO m => T.Text -> Key Projects -> SqlPersistT m (Maybe (Key Sources))
findSource version projectId = firstKeyResult $
-- FIXME: Is (project_id, version) unique in Sources?
select $ from $ \src -> do
where_ $ src ^. SourcesProject_id ==. val projectId &&.
src ^. SourcesVersion ==. val version
limit 1
return $ src ^. SourcesId
-- | Given a key to a 'Projects' record, find all software sources for that project in
-- the database. The key for each result is returned.
findSources :: MonadIO m => Key Projects -> SqlPersistT m [Key Sources]
findSources projectId = do
vals <- select $ from $ \src -> do
where_ $ src ^. SourcesProject_id ==. val projectId
return $ src ^. SourcesId
return $ map unValue vals
-- | Given a key to a 'Sources' record in the database, return that record. This function
-- is suitable for using on the result of 'findSource'.
getSource :: MonadIO m => Key Sources -> SqlPersistT m (Maybe Sources)
getSource key = firstEntityResult $
select $ from $ \source -> do
where_ $ source ^. SourcesId ==. val key
limit 1
return source
-- | Conditionally add a new 'Sources' record to the database. If the record already exists,
-- return its key. Otherwise, insert the record and return the new key.
insertSource :: MonadIO m => Sources -> SqlPersistT m (Key Sources)
insertSource source@Sources{..} =
findSource sourcesVersion sourcesProject_id `orInsert` source
-- | Conditionally add a new 'KeyVal' record to the database and associate a 'Sources'
-- record with it. If the 'KeyVal' record already exists, it is reused in creating the
-- association. They database key of the association is returned.
--
-- A single source can potentially have zero or more 'KeyVal' paris associated with it.
-- On the other hand, a single 'KeyVal' pair can apply to many sources.
insertSourceKeyValue :: MonadIO m =>
KeyType -- ^ Type of the 'KeyVal'
-> T.Text -- ^ Value of the 'KeyVal'
-> Maybe T.Text -- ^ Extended value of the 'KeyVal'
-> Key Sources -- ^ Source to be associated with the 'KeyVal'
-> SqlPersistT m (Key SourceKeyValues)
insertSourceKeyValue k v e sourceId = do
kvId <- findKeyValue k (Just v) e `orInsert` KeyVal k (Just v) e
insert $ SourceKeyValues sourceId kvId
|
atodorov/bdcs
|
src/BDCS/Sources.hs
|
lgpl-2.1
| 3,518
| 0
| 14
| 936
| 572
| 299
| 273
| -1
| -1
|
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Server.Packages.PackageIndex
-- Copyright : (c) David Himmelstrup 2005,
-- Bjorn Bringert 2007,
-- Duncan Coutts 2008
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- An index of packages.
--
module Distribution.Server.Packages.PackageIndex (
-- * Package index data type
PackageIndex,
-- * Creating an index
fromList,
-- * Updates
merge,
insert,
insertWith,
deletePackageName,
deletePackageId,
-- * Queries
indexSize,
packageNames,
-- ** Precise lookups
lookupPackageName,
lookupPackageId,
lookupPackageForId,
lookupDependency,
-- ** Case-insensitive searches
searchByName,
SearchResult(..),
searchByNameSubstring,
-- ** Bulk queries
allPackages,
allPackagesByName
) where
import Distribution.Server.Util.Merge
import Prelude hiding (lookup)
import Control.Exception (assert)
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Foldable as Foldable
import Data.List (groupBy, sortBy, find, isInfixOf)
import Data.Monoid (Monoid(..))
import Data.Maybe (fromMaybe)
import Data.Typeable
import Distribution.Package
( PackageName(..), PackageIdentifier(..)
, Package(..), packageName, packageVersion
, Dependency(Dependency) )
import Distribution.Version ( withinRange )
import Distribution.Simple.Utils (lowercase, comparing)
-- | The collection of information about packages from one or more 'PackageDB's.
--
-- It can be searched effeciently by package name and version.
--
newtype PackageIndex pkg = PackageIndex
-- This index package names to all the package records matching that package
-- name case-sensitively. It includes all versions.
--
-- This allows us to find all versions satisfying a dependency.
-- Most queries are a map lookup followed by a linear scan of the bucket.
--
(Map PackageName [pkg])
deriving (Show, Read, Typeable)
instance Eq pkg => Eq (PackageIndex pkg) where
PackageIndex m1 == PackageIndex m2 = flip Foldable.all (mergeMaps m1 m2) $ \mr -> case mr of
InBoth pkgs1 pkgs2 -> bagsEq pkgs1 pkgs2
OnlyInLeft _ -> False
OnlyInRight _ -> False
where
bagsEq [] [] = True
bagsEq [] _ = False
bagsEq (x:xs) ys = case suitable_ys of
[] -> False
(_y:suitable_ys') -> bagsEq xs (unsuitable_ys ++ suitable_ys')
where (unsuitable_ys, suitable_ys) = break (==x) ys
instance Package pkg => Monoid (PackageIndex pkg) where
mempty = PackageIndex (Map.empty)
mappend = merge
--save one mappend with empty in the common case:
mconcat [] = mempty
mconcat xs = foldr1 mappend xs
invariant :: Package pkg => PackageIndex pkg -> Bool
invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m)
where
goodBucket _ [] = False
goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0
where
check pkgid [] = packageName pkgid == name
check pkgid (pkg':pkgs) = packageName pkgid == name
&& pkgid < pkgid'
&& check pkgid' pkgs
where pkgid' = packageId pkg'
--
-- * Internal helpers
--
mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg
mkPackageIndex index = assert (invariant (PackageIndex index)) (PackageIndex index)
internalError :: String -> a
internalError name = error ("PackageIndex." ++ name ++ ": internal error")
-- | Lookup a name in the index to get all packages that match that name
-- case-sensitively.
--
lookup :: Package pkg => PackageIndex pkg -> PackageName -> [pkg]
lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m
--
-- * Construction
--
-- | Build an index out of a bunch of packages.
--
-- If there are duplicates, later ones mask earlier ones.
--
fromList :: Package pkg => [pkg] -> PackageIndex pkg
fromList pkgs = mkPackageIndex
. Map.map fixBucket
. Map.fromListWith (++)
$ [ (packageName pkg, [pkg])
| pkg <- pkgs ]
where
fixBucket = -- out of groups of duplicates, later ones mask earlier ones
-- but Map.fromListWith (++) constructs groups in reverse order
map head
-- Eq instance for PackageIdentifier is wrong, so use Ord:
. groupBy (\a b -> EQ == comparing packageId a b)
-- relies on sortBy being a stable sort so we
-- can pick consistently among duplicates
. sortBy (comparing packageId)
--
-- * Updates
--
-- | Merge two indexes.
--
-- Packages from the second mask packages of the same exact name
-- (case-sensitively) from the first.
--
merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg
merge i1@(PackageIndex m1) i2@(PackageIndex m2) =
assert (invariant i1 && invariant i2) $
mkPackageIndex (Map.unionWith mergeBuckets m1 m2)
-- | Elements in the second list mask those in the first.
mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg]
mergeBuckets [] ys = ys
mergeBuckets xs [] = xs
mergeBuckets xs@(x:xs') ys@(y:ys') =
case packageId x `compare` packageId y of
GT -> y : mergeBuckets xs ys'
EQ -> y : mergeBuckets xs' ys'
LT -> x : mergeBuckets xs' ys
-- | Inserts a single package into the index.
--
-- This is equivalent to (but slightly quicker than) using 'mappend' or
-- 'merge' with a singleton index.
--
insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg
insert pkg (PackageIndex index) = mkPackageIndex $ -- or insertWith const
Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index
where
pkgid = packageId pkg
insertNoDup [] = [pkg]
insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of
LT -> pkg : pkgs
EQ -> pkg : pkgs' -- this replaces the package
GT -> pkg' : insertNoDup pkgs'
-- | Inserts a single package into the index, combining an old and new value with a function.
-- This isn't in cabal's version of PackageIndex.
--
-- The merge function is called as (f newPkg oldPkg). Ensure that the result has the same
-- package id as the two arguments; otherwise newPkg is used.
--
insertWith :: Package pkg => (pkg -> pkg -> pkg) -> pkg -> PackageIndex pkg -> PackageIndex pkg
insertWith mergeFunc pkg (PackageIndex index) = mkPackageIndex $
Map.insertWith (\_ -> insertMerge) (packageName pkg) [pkg] index
where
pkgid = packageId pkg
insertMerge [] = [pkg]
insertMerge pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of
LT -> pkg : pkgs
EQ -> let merged = mergeFunc pkg pkg' in
if packageId merged == pkgid then merged : pkgs'
else pkg : pkgs'
GT -> pkg' : insertMerge pkgs'
-- | Internal delete helper.
--
delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg
delete name p (PackageIndex index) = mkPackageIndex $
Map.update filterBucket name index
where
filterBucket = deleteEmptyBucket
. filter (not . p)
deleteEmptyBucket [] = Nothing
deleteEmptyBucket remaining = Just remaining
-- | Removes a single package from the index.
--
deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg
deletePackageId pkgid =
delete (packageName pkgid) (\pkg -> packageId pkg == pkgid)
-- | Removes all packages with this (case-sensitive) name from the index.
--
deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg
deletePackageName name =
delete name (\pkg -> packageName pkg == name)
--
-- * Bulk queries
--
-- | Get all the packages from the index.
--
allPackages :: Package pkg => PackageIndex pkg -> [pkg]
allPackages (PackageIndex m) = concat (Map.elems m)
-- | Get all the packages from the index.
--
-- They are grouped by package name, case-sensitively.
--
allPackagesByName :: Package pkg => PackageIndex pkg -> [[pkg]]
allPackagesByName (PackageIndex m) = Map.elems m
--
-- * Lookups
--
-- | Does a lookup by package id (name & version).
--
-- Since multiple package DBs mask each other case-sensitively by package name,
-- then we get back at most one package.
--
lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg
lookupPackageId index pkgid =
case [ pkg | pkg <- lookup index (packageName pkgid)
, packageId pkg == pkgid ] of
[] -> Nothing
[pkg] -> Just pkg
_ -> internalError "lookupPackageIdentifier"
-- | Does a case-sensitive search by package name.
-- The returned list should be ordered (strictly ascending) by version number.
--
lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg]
lookupPackageName index name =
[ pkg | pkg <- lookup index name
, packageName pkg == name ]
-- | Search by name of a package identifier, and further select a version if possible.
--
lookupPackageForId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> ([pkg], Maybe pkg)
lookupPackageForId index pkgid =
let pkgs = lookupPackageName index (packageName pkgid)
in (,) pkgs $ find ((==pkgid) . packageId) pkgs
-- | Does a case-sensitive search by package name and a range of versions.
--
-- We get back any number of versions of the specified package name, all
-- satisfying the version range constraint.
--
lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg]
lookupDependency index (Dependency name versionRange) =
[ pkg | pkg <- lookup index name
, packageName pkg == name
, packageVersion pkg `withinRange` versionRange ]
--
-- * Case insensitive name lookups
--
-- | Does a case-insensitive search by package name.
--
-- If there is only one package that compares case-insentiviely to this name
-- then the search is unambiguous and we get back all versions of that package.
-- If several match case-insentiviely but one matches exactly then it is also
-- unambiguous.
--
-- If however several match case-insentiviely and none match exactly then we
-- have an ambiguous result, and we get back all the versions of all the
-- packages. The list of ambiguous results is split by exact package name. So
-- it is a non-empty list of non-empty lists.
--
searchByName :: Package pkg => PackageIndex pkg -> String -> SearchResult [pkg]
searchByName (PackageIndex m) name =
case [ pkgs | pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ] of
[] -> None
[(_,pkgs)] -> Unambiguous pkgs
pkgss -> case find ((PackageName name==) . fst) pkgss of
Just (_,pkgs) -> Unambiguous pkgs
Nothing -> Ambiguous (map snd pkgss)
where lname = lowercase name
data SearchResult a = None | Unambiguous a | Ambiguous [a] deriving (Show)
-- | Does a case-insensitive substring search by package name.
--
-- That is, all packages that contain the given string in their name.
--
searchByNameSubstring :: Package pkg => PackageIndex pkg -> String -> [pkg]
searchByNameSubstring (PackageIndex m) searchterm =
[ pkg
| (PackageName name, pkgs) <- Map.toList m
, lsearchterm `isInfixOf` lowercase name
, pkg <- pkgs ]
where lsearchterm = lowercase searchterm
-- | Gets the number of packages in the index (number of names).
indexSize :: Package pkg => PackageIndex pkg -> Int
indexSize (PackageIndex m) = Map.size m
-- | Get an ascending list of package names in the index.
packageNames :: Package pkg => PackageIndex pkg -> [PackageName]
packageNames (PackageIndex m) = Map.keys m
|
isomorphism/hackage2
|
Distribution/Server/Packages/PackageIndex.hs
|
bsd-3-clause
| 11,892
| 0
| 14
| 2,774
| 2,765
| 1,480
| 1,285
| 171
| 5
|
{-# LANGUAGE ExistentialQuantification #-}
module Utils where
import Control.Applicative
import Data.Char
import Data.Word
import Data.List
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Crypto.Random
import Crypto.Number.Serialize (os2ip)
import Prelude
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit ((@=?))
newtype TestDRG = TestDRG (Word64, Word64, Word64, Word64, Word64)
deriving (Show,Eq)
instance Arbitrary TestDRG where
arbitrary = TestDRG `fmap` arbitrary -- distribution not uniform
withTestDRG (TestDRG l) f = fst $ withDRG (drgNewTest l) f
newtype ChunkingLen = ChunkingLen [Int]
deriving (Show,Eq)
instance Arbitrary ChunkingLen where
arbitrary = ChunkingLen `fmap` vectorOf 16 (choose (0,14))
newtype ChunkingLen0_127 = ChunkingLen0_127 [Int]
deriving (Show,Eq)
instance Arbitrary ChunkingLen0_127 where
arbitrary = ChunkingLen0_127 `fmap` vectorOf 16 (choose (0,127))
newtype ArbitraryBS0_2901 = ArbitraryBS0_2901 ByteString
deriving (Show,Eq,Ord)
instance Arbitrary ArbitraryBS0_2901 where
arbitrary = ArbitraryBS0_2901 `fmap` arbitraryBSof 0 2901
newtype Int0_2901 = Int0_2901 Int
deriving (Show,Eq,Ord)
newtype Int1_2901 = Int1_2901 Int
deriving (Show,Eq,Ord)
instance Arbitrary Int0_2901 where
arbitrary = Int0_2901 `fmap` choose (0,2901)
instance Arbitrary Int1_2901 where
arbitrary = Int1_2901 `fmap` choose (1,2901)
-- | a integer wrapper with a better range property
newtype QAInteger = QAInteger { getQAInteger :: Integer }
deriving (Show,Eq)
instance Arbitrary QAInteger where
arbitrary = oneof
[ QAInteger . fromIntegral <$> (choose (0, 65536) :: Gen Int) -- small integer
, larger <$> choose (0,4096) <*> choose (0, 65536) -- medium integer
, QAInteger . os2ip <$> arbitraryBSof 0 32 -- [ 0 .. 2^32 ] sized integer
]
where
larger :: Int -> Int -> QAInteger
larger p b = QAInteger (fromIntegral p * somePrime + fromIntegral b)
somePrime :: Integer
somePrime = 18446744073709551557
arbitraryBS :: Int -> Gen ByteString
arbitraryBS = fmap B.pack . vector
arbitraryBSof :: Int -> Int -> Gen ByteString
arbitraryBSof minSize maxSize = choose (minSize, maxSize) >>= arbitraryBS
chunkS :: ChunkingLen -> ByteString -> [ByteString]
chunkS (ChunkingLen originalChunks) = loop originalChunks
where loop l bs
| B.null bs = []
| otherwise =
case l of
(x:xs) -> let (b1, b2) = B.splitAt x bs in b1 : loop xs b2
[] -> loop originalChunks bs
chunksL :: ChunkingLen -> L.ByteString -> L.ByteString
chunksL (ChunkingLen originalChunks) = L.fromChunks . loop originalChunks . L.toChunks
where loop _ [] = []
loop l (b:bs)
| B.null b = loop l bs
| otherwise =
case l of
(x:xs) -> let (b1, b2) = B.splitAt x b in b1 : loop xs (b2:bs)
[] -> loop originalChunks (b:bs)
katZero :: Int
katZero = 0
--hexalise :: String -> [Word8]
hexalise s = concatMap (\c -> [ hex $ c `div` 16, hex $ c `mod` 16 ]) s
where hex i
| i >= 0 && i <= 9 = fromIntegral (ord '0') + i
| i >= 10 && i <= 15 = fromIntegral (ord 'a') + i - 10
| otherwise = 0
splitB :: Int -> ByteString -> [ByteString]
splitB l b =
if B.length b > l
then
let (b1, b2) = B.splitAt l b in
b1 : splitB l b2
else
[ b ]
assertBytesEq :: ByteString -> ByteString -> Bool
assertBytesEq b1 b2 | b1 /= b2 = error ("expected: " ++ show b1 ++ " got: " ++ show b2)
| otherwise = True
assertEq :: (Show a, Eq a) => a -> a -> Bool
assertEq b1 b2 | b1 /= b2 = error ("expected: " ++ show b1 ++ " got: " ++ show b2)
| otherwise = True
propertyEq :: (Show a, Eq a) => a -> a -> Bool
propertyEq = assertEq
data PropertyTest =
forall a . (Show a, Eq a) => EqTest String a a
type PropertyName = String
eqTest :: (Show a, Eq a)
=> PropertyName
-> a -- ^ expected value
-> a -- ^ got
-> PropertyTest
eqTest name a b = EqTest name a b
propertyHold :: [PropertyTest] -> Bool
propertyHold l =
case foldl runProperty [] l of
[] -> True
failed -> error (intercalate "\n" failed)
where
runProperty acc (EqTest name a b)
| a == b = acc
| otherwise =
(name ++ ": expected " ++ show a ++ " but got: " ++ show b) : acc
propertyHoldCase :: [PropertyTest] -> IO ()
propertyHoldCase l = True @=? propertyHold l
|
vincenthz/cryptonite
|
tests/Utils.hs
|
bsd-3-clause
| 4,707
| 0
| 16
| 1,288
| 1,649
| 870
| 779
| 112
| 3
|
module XMonad.Log
( -- * XMonad Logging feature
-- $LOG
-- ** Setting up the logger
setupLogger
-- ** Working with the logger
, debugX
, infoX
, noticeX
, warningX
, errorX
, criticalX
, alertX
, emergencyX
-- ** Abort with error logging.
, abortX
, abortX'
)
where
import System.IO (stderr)
import System.FilePath ((</>))
import System.IO.Unsafe (unsafePerformIO) -- used when aborting
import System.Log.Logger (Priority(..), logM, setHandlers, updateGlobalLogger, rootLoggerName, setLevel)
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple (fileHandler, streamHandler)
import System.Log.Formatter (simpleLogFormatter)
import Control.Monad.State
-- $LOG
-- Logging support for XMonad, that will log to 'stderr' and a file. Everything
-- that is written to 'stderr' is placed in @~/.xsession-errors@, however this
-- might not be optimal, as on some systems quite a few applications tend to be
-- chatty and thus the XMonad specific messages may be hard to find. To solve
-- this, everything is also written into an XMonad specific log such that it is
-- easy to locate.
--
-- The level of "chattyness" can be controlled by setting the 'Priority' in the
-- XMonad configuration. Setting the priority to 'WARNING' will show any
-- messages with that priority and above.
-- TODO: Fix this documentation.
-- * Add example of setting up default config for different logging levels.
-- | Setup a logger in @dir@/xmonad.log and on stderr. 'WARNING's and above will
-- be written to 'stderr', but only @lowestPriority@ will be written to the
-- log file.
setupLogger :: MonadIO m => Priority -> FilePath -> m ()
setupLogger lowestPriority dir = liftIO $
do fileH <- fileHandler (dir </> logFile) lowestPriority
streamH <- streamHandler stderr WARNING
updateGlobalLogger rootLoggerName $
setLevel DEBUG . -- We use individual priorities above.
setHandlers (map (flip setFormatter $ format) [streamH, fileH])
where
format = simpleLogFormatter "$time, $loggername [$prio]: $msg"
logFile = "xmonad.log"
-- | Main log function used by the specialised loggers below.
logX :: MonadIO m => Priority -> String -> String -> m ()
logX prio name msg =
liftIO $ logM name prio msg
-- | Logging with various importance. Importance goes from DEBUG through
-- EMERGENCY, with EMERGENCY being the most important.
debugX, infoX, noticeX, warningX, errorX, criticalX, alertX, emergencyX :: MonadIO m => String -> String -> m ()
debugX = logX DEBUG -- Debug messages
infoX = logX INFO -- Information
noticeX = logX NOTICE -- Normal runtime conditions
warningX = logX WARNING -- General Warnings
errorX = logX ERROR -- General Errors
criticalX = logX CRITICAL -- Severe situations
alertX = logX ALERT -- Take immediate action
emergencyX = logX EMERGENCY -- System is unusable
-- | Abort execution, yielding a critical log entry and an error.
abortX :: MonadIO m => String -> String -> m a
abortX name msg =
do criticalX name msg
error $ "xmonad: " ++ name ++ ": " ++ msg
-- | Abort execution outside MonadIO.
abortX' :: String -> String -> a
abortX' name msg =
-- force execution of abortX
id $! unsafePerformIO $ abortX name msg
|
lally/xmonad-reenberg
|
XMonad/Log.hs
|
bsd-3-clause
| 3,384
| 0
| 14
| 763
| 564
| 324
| 240
| 49
| 1
|
--------------------------------------------------------------------
-- |
-- Module : Flickr.Types.Import
-- Description : Parsing Flickr API responses.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer : Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability : portable
--
-- Translating XML responses into Haskell type representations
-- of the Flickr API resources/entities/types.
--------------------------------------------------------------------
module Flickr.Types.Import where
import Flickr.Types
import Flickr.Utils
import Flickr.Monad ( parseDoc, ErrM )
import Control.Monad ( guard, mplus )
import Data.Char ( toLower )
import Data.Maybe ( mapMaybe )
import Text.XML.Light.Types
import Text.XML.Light.Proc ( strContent, findChild )
toAuthFrob :: String -> ErrM AuthFrob
toAuthFrob s = parseDoc eltAuthFrob s
eltAuthFrob :: Element -> Maybe AuthFrob
eltAuthFrob e = do
guard (elName e == nsName "frob")
return AuthFrob{aFrob = strContent e}
toAuthToken :: String -> ErrM AuthToken
toAuthToken s = parseDoc eltAuthToken s
eltAuthToken :: Element -> Maybe AuthToken
eltAuthToken e = ifNamed "auth" e $ do
let es = children e
t <- pLeaf "token" es
p <- pLeaf "perms" es
user <- pNode "user" es >>= eltUser
return AuthToken{ authToken = t
, authPerms = words p
, authUser = user
}
toUser :: String -> ErrM User
toUser s = parseDoc eltUser s
eltUser :: Element -> Maybe User
eltUser u = do
nid <- pAttr "nsid" u `mplus` pAttr "id" u
uname <- pAttr "username" u `mplus` (fmap strContent $ findChild (nsName "username") u)
let fname = pAttr "fullname" u `mplus` (fmap strContent $ findChild (nsName "realname") u )
let pro = eltBool "ispro" u
let adm = eltBool "isadmin" u
return
nullUser{ userName = uname
, userId = nid
, userFullName = fname
, userIsPro = pro
, userIsAdmin = adm
}
toGroupList :: String -> ErrM [Group]
toGroupList s = parseDoc eltGroupList s
eltGroupList :: Element -> Maybe [Group]
eltGroupList e = ifNamed "groups" e $ do
let ls = pNodes "group" (children e)
mapM eltGroup ls
toGroup :: String -> ErrM Group
toGroup s = parseDoc eltGroup s
eltGroup :: Element -> Maybe Group
eltGroup u = ifNamed "group" u $ do
nid <- pAttr "nsid" u `mplus` pAttr "id" u
gname <- pAttr "groupname" u `mplus` (fmap strContent $ findChild (nsName "groupname") u)
return
Group{ groupId = nid
, groupName = gname
, groupMembers = fmap fromIntegral $ eltIntAttr "members" u
, groupIsOnline = fmap fromIntegral $ eltIntAttr "members" u
, groupChatId = pAttr "chatid" u `mplus` pAttr "chatnsid" u
, groupInChat = fmap fromIntegral $ eltIntAttr "inchat" u
}
toPlaces :: String -> ErrM (PlaceQuery,[Place])
toPlaces s = parseDoc eltPlaces s
toPlacesList :: String -> ErrM [Place]
toPlacesList s = parseDoc eltPlacesList s
eltPlaceQuery :: Element -> Maybe PlaceQuery
eltPlaceQuery e = ifNamed "places" e $ do
let qu = pAttr "query" e
let la = pAttr "latitude" e
let lo = pAttr "longitude" e
let ac = pAttr "accuracy" e >>= readMb
t <- pAttr "total" e >>= readMb
return
PlaceQuery
{ placeQuery = qu
, placeQueryLatitude = la
, placeQueryLongitude = lo
, placeQueryAccuracy = ac
, placeTotal = t
}
eltPlaces :: Element -> Maybe (PlaceQuery, [Place])
eltPlaces e = ifNamed "places" e $ do
q <- eltPlaceQuery e
let ls = pNodes "place" (children e)
ps <- mapM eltPlace ls
return (q, ps)
eltPlacesList :: Element -> Maybe [Place]
eltPlacesList e = ifNamed "places" e $ do
let ls = pNodes "place" (children e)
mapM eltPlace ls
eltPlace :: Element -> Maybe Place
eltPlace e = ifNamed "place" e $ do
pid <- pAttr "place_id" e
woeid <- pAttr "woeid" e
lat <- pAttr "latitude" e
long <- pAttr "longitude" e
url <- pAttr "place_url" e
ty <- pAttr "place_type" e
let d = strContent e
return Place
{ placeId = pid
, placeWOEId = woeid
, placeLat = lat
, placeLong = long
, placeURL = url
, placeType = ty
, placeDesc = d
}
toBlogs :: String -> ErrM [Blog]
toBlogs s = parseDoc eltBlogsList s
eltBlogsList :: Element -> Maybe [Blog]
eltBlogsList e = ifNamed "blogs" e $ do
let ls = pNodes "blog" (children e)
mapM eltBlog ls
eltBlog :: Element -> Maybe Blog
eltBlog e = ifNamed "blog" e $ do
bid <- pAttr "id" e
nm <- pAttr "name" e
npwd <- eltBool "needspassword" e
url <- pAttr "url" e
return Blog
{ blogId = bid
, blogName = nm
, blogNeedsPW = npwd
, blogURL = url
}
toPlaceTypes :: String -> ErrM [PlaceType]
toPlaceTypes s = parseDoc eltPlaceTypeList s
eltPlaceTypeList :: Element -> Maybe [PlaceType]
eltPlaceTypeList e = ifNamed "place_types" e $ do
let ls = pNodes "place_type" (children e)
mapM eltPlaceType ls
eltPlaceType :: Element -> Maybe PlaceType
eltPlaceType e = ifNamed "place_type" e $ do
return PlaceType
{ placeTypeId = fromMaybe "" (pAttr "place_type_id" e)
, placeTypeName = strContent e
}
toLocationPlace :: String -> ErrM LocationPlace
toLocationPlace s = parseDoc eltLocationPlace s
eltLocationPlace :: Element -> Maybe LocationPlace
eltLocationPlace e = do
pid <- pAttr "place_id" e
woeid <- pAttr "woeid" e
lat <- pAttr "latitude" e
long <- pAttr "longitude" e
url <- pAttr "place_url" e
let ty = fromMaybe (qName $ elName e) $ pAttr "place_type" e
let d = strContent e
cs <- mapM eltLocationPlace (children e)
return LocationPlace
{ locationPlaceId = pid
, locationPlaceWOEId = woeid
, locationPlaceLat = lat
, locationPlaceLong = long
, locationPlaceURL = url
, locationPlaceType = ty
, locationPlaceDesc = d
, locationPlaceDetails = cs
}
toContentType :: String -> ErrM ContentType
toContentType s = parseDoc eltContentType s
eltContentType :: Element -> Maybe ContentType
eltContentType e = do
x <- pAttr "content_type" e
let getV ((v,_):_) = Just (v::Int)
getV _ = Nothing
case getV $ reads x of
Just 1 -> return ContentPhoto
Just 2 -> return ContentScreenshot
_ -> return ContentOther
toPrivacy :: String -> String -> ErrM Privacy
toPrivacy x s = parseDoc (eltPrivacy x) s
eltPrivacy :: String -> Element -> Maybe Privacy
eltPrivacy tg e = do
x <- pAttr tg e
let getV ((v,_):_) = Just (v::Int)
getV _ = Nothing
case getV $ reads x of
Just 0 -> return Public
Just 1 -> return Public
Just 2 -> return (Private False False)
Just 3 -> return (Private True{-friends-} True{-family-})
Just 4 -> return (Private True{-friends-} False{-family-})
Just 5 -> return (Private False{-friends-} True{-family-})
Just 6 -> return (Private False{-friends-} True{-family-})
_ -> fail ("unexpected privacy setting: " ++ x)
toBool :: String -> String -> ErrM Bool
toBool x s = parseDoc (eltBool x) s
eltBool :: String -> Element -> Maybe Bool
eltBool tg e = do
x <- pAttr tg e
let getV ((v,_):_) = Just (v::Int)
getV _ = Nothing
case getV $ reads x of
Just 0 -> return False
Just 1 -> return True
_ -> case map toLower x of
"true" -> return True
"false" -> return False
_ -> fail ("unexpected bool value: " ++ x)
toSafetyLevel :: String -> String -> ErrM Int
toSafetyLevel x s = parseDoc (eltIntAttr x) s
eltIntAttr :: String -> Element -> Maybe Int
eltIntAttr tg e = do
x <- pAttr tg e
let getV ((v,_):_) = Just (v::Int)
getV _ = Nothing
case getV $ reads x of
Just v -> return v
_ -> fail ("unexpected non-Int value: " ++ x)
toString :: String -> String -> ErrM String
toString x s = parseDoc (eltStringAttr x) s
eltStringAttr :: String -> Element -> Maybe String
eltStringAttr tg e = pAttr tg e
toItems :: String -> ErrM [Item]
toItems s = parseDoc eltItems s
eltItems :: Element -> Maybe [Item]
eltItems e = ifNamed "items" e $ do
let ls = pNodes "item" (children e)
mapM eltItem ls
eltItem :: Element -> Maybe Item
eltItem e = ifNamed "item" e $ do
ty <- pAttr "type" e
iid <- pAttr "id" e
own <- pAttr "owner" e
prim <- eltIntAttr "primary" e
serv <- pAttr "server" e
sec <- pAttr "secret" e
let comold = fromMaybe 0 $ eltIntAttr "commentsold" e
comnew = fromMaybe 0 $ eltIntAttr "commentsnew" e
com = fromMaybe 0 $ eltIntAttr "comments" e
vie <- eltIntAttr "views" e
npho <- eltIntAttr "photos" e
more <- eltBool "more" e
let tit = fmap strContent $ findChild (nsName "title") e
let act = findChild (nsName "activity") e >>= eltActivity
return Item
{ itType = ty
, itId = iid
, itTitle = tit
, itActivity = act
, itOwner = own
, itSecret = sec
, itServer = serv
, itPhotos = fromIntegral npho
, itPrimary = fromIntegral prim
, itComments = fromIntegral (com + comold + comnew)
, itViews = fromIntegral vie
, itMore = more
}
eltActivity :: Element -> Maybe [Activity]
eltActivity e = do
let es = pNodes "event" (children e)
mapM eltEvent es
eltEvent :: Element -> Maybe Activity
eltEvent e = do
ty <- pAttr "type" e
uid <- pAttr "user" e
usr <- pAttr "username" e
dat <- pAttr "dateadded" e
let s = strContent e
return Activity
{ actType = ty
, actUser = nullUser{userName=usr,userId=uid}
, actDate = dat
, actContent = s
}
toContactList :: String -> ErrM [Contact]
toContactList s = parseDoc eltContactList s
eltContactList :: Element -> Maybe [Contact]
eltContactList e = ifNamed "contacts" e $ do
let ls = pNodes "contact" (children e)
mapM eltContact ls
eltContact :: Element -> Maybe Contact
eltContact e = do
cid <- pAttr "nsid" e
usr <- eltUser e
let ico = eltBool "iconserver" e
let fri = eltBool "friend" e
let fam = eltBool "family" e
let ign = eltBool "ignored" e
return Contact
{ conId = cid
, conUser = usr
, conIcon = ico
, conIsFriend = fri
, conIsFamily = fam
, conIgnored = ign
}
toPhotoList :: String -> ErrM (PhotoContext, [Photo])
toPhotoList s = parseDoc eltPhotoList s
toPhotoPair :: String -> ErrM (Photo,Photo)
toPhotoPair s = parseDoc eltPhotoPair s
eltPhotoList :: Element -> Maybe (PhotoContext, [Photo])
eltPhotoList e = ifNamed "photos" e $ do
ls <- mapM eltPhoto $ pNodes "photo" (children e)
c <- eltPhotoContext e
return (c, ls)
eltPhotoPair :: Element -> Maybe (Photo, Photo)
eltPhotoPair e = do
f <- findChild (nsName "prevphoto") e >>= eltPhoto
s <- findChild (nsName "nextphoto") e >>= eltPhoto
return (f,s)
eltPhoto :: Element -> Maybe Photo
eltPhoto e = do
pid <- pAttr "id" e
let own = pAttr "owner" e
sec <- pAttr "secret" e
tit <- pAttr "title" e `mplus` fmap strContent (findChild (nsName "title") e)
let url = pAttr "url" e
return Photo
{ photoId = pid
, photoOwner = fmap (\ x -> nullUser{userId=x}) own
, photoURL = url
, photoSecret = sec
, photoServer = fmap fromIntegral (eltIntAttr "server" e)
, photoFarm = pAttr "farm" e
, photoLicense = pAttr "license" e
, photoTitle = tit
, photoPublic = eltBool "ispublic" e
, photoFriend = eltBool "isfriend" e
, photoFamily = eltBool "isfamily" e
}
eltPhotoContext :: Element -> Maybe PhotoContext
eltPhotoContext e =
return PhotoContext
{ photoCtxtPage = eltIntAttr "page" e
, photoCtxtPages = eltIntAttr "pages" e
, photoCtxtPerPage = eltIntAttr "perpage" e
, photoCtxtTotal = eltIntAttr "total" e
}
toCategory :: String -> ErrM Category
toCategory s = parseDoc eltCategory s
eltCategory :: Element -> Maybe Category
eltCategory e = do
nm <- pAttr "name" e
pth <- pAttr "path" e
let mid = pAttr "id" e
pts <- pAttr "pathids" e
let ls = children e
let cs = mapMaybe eltGroupCat ls
return Category
{ catName = nm
, catId = mid
, catPath = pth
, catPaths = pts
, catSubs = cs
}
eltGroupCat :: Element -> Maybe GroupCat
eltGroupCat e
| elName e == nsName "subcat" = eltSubCategory e >>= \ x -> return (SubCat x)
| elName e == nsName "group" = eltGroup e >>= \ x -> return (AGroup x)
| otherwise = Nothing
eltSubCategory :: Element -> Maybe SubCategory
eltSubCategory e = do
cid <- pAttr "id" e
nm <- pAttr "name" e
c <- eltIntAttr "count" e
return SubCategory
{ subCatId = cid
, subName = nm
, subCount = fromIntegral c
}
eltBandwidth :: Element -> Maybe Bandwidth
eltBandwidth e = do
mx <- eltIntAttr "maxbytes" e
let xkb = eltIntAttr "maxkb" e
us <- eltIntAttr "usedbytes" e
let uskb = eltIntAttr "usedkb" e
re <- eltIntAttr "remainingbytes" e
let rekb = eltIntAttr "remainingkb" e
return Bandwidth
{ bandWidthBytes = fromIntegral mx
, bandWidthKB = fmap fromIntegral xkb
, bandWidthUsedBytes = fromIntegral us
, bandWidthUsedKB = fmap fromIntegral uskb
, bandWidthRemainingBytes = fromIntegral re
, bandWidthRemainingKB = fmap fromIntegral rekb
}
eltFileSize :: Element -> Maybe FileSize
eltFileSize e = do
fs <- eltIntAttr "maxbytes" e
let fskb = eltIntAttr "maxkb" e
return FileSize
{ fileSizeBytes = fromIntegral fs
, fileSizeKB = fmap fromIntegral fskb
}
eltPhotosetQuota :: Element -> Maybe PhotosetQuota
eltPhotosetQuota e = do
c <- eltIntAttr "created" e
z <- pAttr "remaining" e
let
f = case z of
"remaining" -> Nothing
x -> case reads x of
((v,_):_) -> Just v
_ -> Nothing
return PhotosetQuota
{ photosetCreated = fromIntegral c
, photosetRemaining = f
}
toPhotoset :: String -> ErrM Photoset
toPhotoset s = parseDoc eltPhotoset s
eltPhotoset :: Element -> Maybe Photoset
eltPhotoset e = do
pid <- pAttr "id" e
uid <- pAttr "owner" e
prim <- pAttr "primary" e
c <- eltIntAttr "photos" e
tit <- pAttr "title" e
desc <- pAttr "description" e
return Photoset
{ photosetId = pid
, photosetOwner = uid
, photosetPrimaryPhoto = prim
, photosetPhotos = c
, photosetTitle = tit
, photosetDescription = desc
}
toPhotoPool :: String -> ErrM PhotoPool
toPhotoPool s = parseDoc eltPhotoPool s
eltPhotoPool :: Element -> Maybe PhotoPool
eltPhotoPool e = do
pid <- pAttr "id" e
tit <- pAttr "title" e
return PhotoPool
{ photoPoolId = pid
, photoPoolTitle = tit
}
toPhotoDetails :: String -> ErrM PhotoDetails
toPhotoDetails s = parseDoc eltPhotoDetails s
eltPhotoDetails :: Element -> Maybe PhotoDetails
eltPhotoDetails e = do
ph <- eltPhoto e
let
rot = eltIntAttr "rotation" e
fav = eltBool "isfavorite" e
lic = pAttr "license" e
ofm = pAttr "originalformat" e
ose = pAttr "originalsecret" e
tit = fmap strContent (findChild (nsName "title") e)
des = fmap strContent (findChild (nsName "description") e)
es = children e
isp = pNode "visibility" es >>= eltBool "ispublic"
fam = pNode "visibility" es >>= eltBool "isfamily"
fri = pNode "visibility" es >>= eltBool "isfriend"
per = do
ch <- pNode "permissions" es
a <- eltIntAttr "permcomment" ch
b <- eltIntAttr "permaddmeta" ch
return (a,b)
edi = do
ch <- pNode "editability" es
a <- eltBool "cancomment" ch
b <- eltBool "canaddmeta" ch
return (a,b)
ns = mapMaybe eltNote (fromMaybe [] $ fmap children $ pNode "notes" es)
ts = mapMaybe eltTagDetails (fromMaybe [] $ fmap children $ pNode "tags" es)
us = mapMaybe eltURLDetails (fromMaybe [] $ fmap children $ pNode "urls" es)
d <- pNode "dates" es >>= eltPhotoDate
return PhotoDetails
{ photoDetailsPhoto = ph
, photoDetailsRotation = rot
, photoDetailsLicense = lic
, photoDetailsIsFavorite = fav
, photoDetailsIsPublic = isp
, photoDetailsIsFamily = fam
, photoDetailsIsFriend = fri
, photoDetailsOrigFormat = ofm
, photoDetailsOrigSecret = ose
, photoDetailsTitle = tit
, photoDetailsDesc = des
, photoDetailsDates = d
, photoDetailsPerms = per
, photoDetailsEdits = edi
, photoDetailsComments = pNode "comments" es >>= intContent
, photoDetailsNotes = ns
, photoDetailsTags = ts
, photoDetailsURLs = us
}
eltPhotoDate :: Element -> Maybe PhotoDate
eltPhotoDate e = do
p <- pAttr "posted" e
t <- pAttr "taken" e
l <- pAttr "lastupdate" e
return PhotoDate
{ photoDatePosted = p
, photoDateTaken = t
, photoDateLastUpdate = l
, photoDateGranularity = eltIntAttr "takengranularity" e
}
eltNote :: Element -> Maybe Note
eltNote e = do
i <- pAttr "id" e
uid <- pAttr "author" e
nm <- pAttr "authorname" e
let x = eltIntAttr "x" e
y = eltIntAttr "y" e
w = eltIntAttr "w" e
h = eltIntAttr "h" e
s = strContent e
return Note
{ noteId = i
, noteAuthor = uid
, noteAuthorName = nm
, notePoint = x >>= \ xv -> y >>= \ yv -> return (Point xv yv)
, noteSize = w >>= \ wv -> h >>= \ hv -> return (Size wv hv)
, noteText = s
}
eltTagDetails :: Element -> Maybe TagDetails
eltTagDetails e = do
i <- pAttr "id" e
uid <- pAttr "author" e
let c = eltIntAttr "count" e
let s = eltIntAttr "score" e
rs <- (pAttr "raw" e >>= \ x -> return [x]) `mplus`
(return (map strContent (pNodes "raw" (children e))))
return TagDetails
{ tagDetailsId = i
, tagDetailsAuthor = uid
, tagDetailsRaw = rs
, tagDetailsName = strContent e
, tagDetailsCount = c
, tagDetailsScore = s
}
eltURLDetails :: Element -> Maybe URLDetails
eltURLDetails e = do
ty <- pAttr "type" e
return URLDetails
{ urlDetailsType = ty
, urlDetailsURL = strContent e
}
toPhotoCountList :: String -> ErrM [PhotoCount]
toPhotoCountList s = parseDoc eltPhotoCountList s
eltPhotoCountList :: Element -> Maybe [PhotoCount]
eltPhotoCountList e = ifNamed "photocounts" e $ do
let ls = mapMaybe eltPhotoCount $ pNodes "photocount" (children e)
return ls
eltPhotoCount :: Element -> Maybe PhotoCount
eltPhotoCount e = ifNamed "photocount" e $ do
c <- eltIntAttr "count" e
fd <- pAttr "fromdate" e
td <- pAttr "todate" e
return PhotoCount
{ photoCount = c
, photoCountFrom = fd
, photoCountTo = td
}
toEXIFList :: String -> ErrM [EXIF]
toEXIFList s = parseDoc eltEXIFList s
eltEXIFList :: Element -> Maybe [EXIF]
eltEXIFList e = do
let ls = mapMaybe eltEXIF $ pNodes "exif" (children e)
return ls
eltEXIF :: Element -> Maybe EXIF
eltEXIF e = ifNamed "exif" e $ do
ts <- pAttr "tagspace" e
tsid <- pAttr "tagspaceid" e
tid <- pAttr "tag" e
lbl <- pAttr "label" e
let rw = fmap strContent $ findChild (nsName "raw") e
let cl = fmap strContent $ findChild (nsName "clean") e
return EXIF
{ exifTag = EXIFTag{exifTagId=tid,exifTagspace=ts,exifTagspaceId=tsid}
, exifLabel = lbl
, exifRaw = rw
, exifClean = cl
}
toPermissions :: String -> ErrM Permissions
toPermissions s = parseDoc eltPermissions s
eltPermissions :: Element -> Maybe Permissions
eltPermissions e = do
i <- pAttr "id" e
pu <- eltBool "ispublic" e
fa <- eltBool "isfamily" e
fr <- eltBool "isfriend" e
pc <- eltIntAttr "permcomment" e
pa <- eltIntAttr "permaddmeta" e
return Permissions
{ permId = i
, permIsPublic = pu
, permIsFriend = fr
, permIsFamily = fa
, permCommentLevel = pc
, permAddMetaLevel = pa
}
toSizeList :: String -> ErrM [SizeDetails]
toSizeList s = parseDoc eltSizeList s
eltSizeList :: Element -> Maybe [SizeDetails]
eltSizeList e = do
let ls = mapMaybe eltSize $ pNodes "size" (children e)
return ls
eltSize :: Element -> Maybe SizeDetails
eltSize e = ifNamed "size" e $ do
la <- pAttr "label" e
w <- eltIntAttr "width" e
h <- eltIntAttr "height" e
src <- pAttr "source" e
url <- pAttr "url" e
return SizeDetails
{ sizeDetailsLabel = la
, sizeDetailsWidth = w
, sizeDetailsHeight = h
, sizeDetailsSource = src
, sizeDetailsURL = url
}
toPhotoID :: String -> ErrM PhotoID
toPhotoID s = parseDoc eltPhotoID s
eltPhotoID :: Element -> Maybe PhotoID
eltPhotoID e = ifNamed "photoid" e $ return (strContent e)
toCommentID :: String -> ErrM CommentID
toCommentID s = parseDoc eltCommentID s
eltCommentID :: Element -> Maybe CommentID
eltCommentID e = pAttr "id" e -- that wasn't too hard, was it?
toNoteID :: String -> ErrM NoteID
toNoteID s = parseDoc eltNoteID s
eltNoteID :: Element -> Maybe NoteID
eltNoteID e = pAttr "id" e
toCommentList :: String -> ErrM [Comment]
toCommentList s = parseDoc eltCommentList s
eltCommentList :: Element -> Maybe [Comment]
eltCommentList e =
return $ mapMaybe eltComment $ pNodes "comment" (children e)
eltComment :: Element -> Maybe Comment
eltComment e = ifNamed "comment" e $ do
i <- pAttr "id" e
au <- eltUser e
da <- pAttr "datecreate" e
return Comment
{ commentId = i
, commentAuthor = au
, commentDate = da
, commentURL = pAttr "permalink" e `mplus` pAttr "url" e
, commentText = strContent e
}
toGeoLocation :: String -> ErrM GeoLocation
toGeoLocation s = parseDoc eltGeoLocation s
eltGeoLocation :: Element -> Maybe GeoLocation
eltGeoLocation e = ifNamed "location" e $ do
la <- pAttr "latitude" e
lo <- pAttr "longitude" e
let ac = eltIntAttr "accuracy" e
return (la,lo,ac)
toLicenseList :: String -> ErrM [License]
toLicenseList s = parseDoc eltLicenseList s
eltLicenseList :: Element -> Maybe [License]
eltLicenseList e =
return $ mapMaybe eltLicense $ pNodes "license" (children e)
eltLicense :: Element -> Maybe License
eltLicense e = ifNamed "license" e $ do
i <- pAttr "id" e
nm <- pAttr "name" e
url <- pAttr "url" e
return License
{ licenseId = i
, licenseName = nm
, licenseLink = url
}
toTicketList :: String -> ErrM [Ticket]
toTicketList s = parseDoc eltTicketList s
eltTicketList :: Element -> Maybe [Ticket]
eltTicketList e =
return $ mapMaybe eltTicket $ pNodes "ticket" (children e)
eltTicket :: Element -> Maybe Ticket
eltTicket e = ifNamed "ticket" e $ do
i <- pAttr "id" e
c <- eltIntAttr "complete" e
p <- pAttr "photoid" e
let isInv = fromMaybe False (eltBool "invalid" e)
return Ticket
{ ticketId = i
, ticketComplete = c
, ticketInvalid = isInv
, ticketPhoto = p
}
toClusterList :: String -> ErrM [Cluster]
toClusterList s = parseDoc eltClusterList s
eltClusterList :: Element -> Maybe [Cluster]
eltClusterList e =
return $ mapMaybe eltCluster $ pNodes "cluster" (children e)
eltCluster :: Element -> Maybe Cluster
eltCluster e = ifNamed "cluster" e $ do
t <- eltIntAttr "total" e
let ts = pNodes "tag" (children e)
return Cluster
{ clusterCount = t
, clusterTags = map strContent ts
}
toTagDetailsList :: String -> ErrM [TagDetails]
toTagDetailsList s = parseDoc eltTagDetailsList s
eltTagDetailsList :: Element -> Maybe [TagDetails]
eltTagDetailsList e = do
t <- pNode "tags" (children e)
return $ mapMaybe eltTagDetails $ pNodes "tag" (children t)
toNamespaceList :: String -> ErrM (NameContext, [Namespace])
toNamespaceList s = parseDoc eltNamespaceList s
eltNamespaceList :: Element -> Maybe (NameContext, [Namespace])
eltNamespaceList e = ifNamed "namespaces" e $ do
ls <- mapM eltNamespace $ pNodes "namespace" (children e)
c <- eltResContext e
return (c, ls)
eltResContext :: Element -> Maybe (ResContext a)
eltResContext e =
return ResContext
{ resCtxtPage = eltIntAttr "page" e
, resCtxtPages = eltIntAttr "pages" e
, resCtxtPerPage = eltIntAttr "perpage" e
, resCtxtTotal = eltIntAttr "total" e
}
eltNamespace :: Element -> Maybe Namespace
eltNamespace e = ifNamed "namespace" e $ do
return Namespace
{ namespaceUsage = fromIntegral $ fromMaybe 0 (eltIntAttr "usage" e)
, namespacePreds = fromIntegral $ fromMaybe 0 (eltIntAttr "predicates" e)
, namespaceName = strContent e
}
eltMTPair :: Element -> Maybe MachineTagPair
eltMTPair e = ifNamed "pair" e $ do
return MachineTagPair
{ mtPairNamespace = fromMaybe "" (pAttr "namespace" e)
, mtPairPredicate = fromMaybe "" (pAttr "predicate" e)
, mtPairUsage = fromIntegral $ fromMaybe 0 (eltIntAttr "usage" e)
, mtPairName = strContent e
}
eltMTPred :: Element -> Maybe MachineTagPred
eltMTPred e = ifNamed "predicate" e $ do
return MachineTagPred
{ mtPredNamespaces = fromIntegral $ fromMaybe 0 (eltIntAttr "namespaces" e)
, mtPredUsage = fromIntegral $ fromMaybe 0 (eltIntAttr "usage" e)
, mtPredName = strContent e
}
eltMTag :: Element -> Maybe MachineTag
eltMTag e = ifNamed "value" e $ do
return MachineTag
{ mTagNamespace = ""
, mTagPredicate = ""
, mTagUsage = fromIntegral $ fromMaybe 0 (eltIntAttr "usage" e)
, mTagValue = strContent e
}
toMachineTagList :: String -> ErrM (ResContext MachineTag, [MachineTag])
toMachineTagList s = parseDoc eltMachineTagList s
eltMachineTagList :: Element -> Maybe (ResContext MachineTag, [MachineTag])
eltMachineTagList e = ifNamed "values" e $ do
ls <- mapM eltMTag $ pNodes "value" (children e)
c <- eltResContext e
return (c, ls)
toPredList :: String -> ErrM (ResContext MachineTagPred, [MachineTagPred])
toPredList s = parseDoc eltMachinePredList s
eltMachinePredList :: Element -> Maybe (ResContext MachineTagPred, [MachineTagPred])
eltMachinePredList e = ifNamed "predicates" e $ do
ls <- mapM eltMTPred $ pNodes "predicate" (children e)
c <- eltResContext e
return (c, ls)
toPairList :: String -> ErrM (ResContext MachineTagPair, [MachineTagPair])
toPairList s = parseDoc eltMachinePairList s
eltMachinePairList :: Element -> Maybe (ResContext MachineTagPair, [MachineTagPair])
eltMachinePairList e = ifNamed "pairs" e $ do
ls <- mapM eltMTPair $ pNodes "pair" (children e)
c <- eltResContext e
return (c, ls)
toTagInfoList :: String -> ErrM [TagInfo]
toTagInfoList s = parseDoc eltTagInfoList s
eltTagInfoList :: Element -> Maybe [TagInfo]
eltTagInfoList e = ifNamed "tags" e $ do
mapM eltTagInfo $ pNodes "tag" (children e)
eltTagInfo :: Element -> Maybe TagInfo
eltTagInfo e = do
let c = pAttr "count" e
return TagInfo
{ tagName = strContent e
, tagCount = maybe Nothing id (fmap readMb c)
}
|
BeautifulDestinations/hs-flickr
|
Flickr/Types/Import.hs
|
bsd-3-clause
| 26,748
| 10
| 16
| 6,659
| 9,227
| 4,543
| 4,684
| 736
| 9
|
module DataAnalysis.Application.Handler.Home where
import Yesod
import DataAnalysis.Application.Foundation
getHomeR :: Handler Html
getHomeR = redirect ImportR
|
teuffy/min-var-ci
|
src/DataAnalysis/Application/Handler/Home.hs
|
mit
| 163
| 0
| 5
| 18
| 33
| 20
| 13
| 5
| 1
|
{-# LANGUAGE ForeignFunctionInterface #-}
-- |Asymmetric cipher decryption using encrypted symmetric key. This
-- is an opposite of "OpenSSL.EVP.Open".
module OpenSSL.EVP.Seal
( seal
, sealBS
, sealLBS
)
where
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy.Char8 as L8
import Foreign
import Foreign.C
import OpenSSL.EVP.Cipher hiding (cipher)
import OpenSSL.EVP.PKey
import OpenSSL.EVP.Internal
import OpenSSL.Utils
foreign import ccall unsafe "EVP_SealInit"
_SealInit :: Ptr EVP_CIPHER_CTX
-> Cipher
-> Ptr (Ptr CChar)
-> Ptr CInt
-> CString
-> Ptr (Ptr EVP_PKEY)
-> CInt
-> IO CInt
sealInit :: Cipher
-> [SomePublicKey]
-> IO (CipherCtx, [B8.ByteString], B8.ByteString)
sealInit _ []
= fail "sealInit: at least one public key is required"
sealInit cipher pubKeys
= do ctx <- newCipherCtx
-- Allocate a list of buffers to write encrypted symmetric
-- keys. Each keys will be at most pkeySize bytes long.
encKeyBufs <- mapM mallocEncKeyBuf pubKeys
-- encKeyBufs is [Ptr a] but we want Ptr (Ptr CChar).
encKeyBufsPtr <- newArray encKeyBufs
-- Allocate a buffer to write lengths of each encrypted
-- symmetric keys.
encKeyBufsLenPtr <- mallocArray nKeys
-- Allocate a buffer to write IV.
ivPtr <- mallocArray (cipherIvLength cipher)
-- Create Ptr (Ptr EVP_PKEY) from [PKey]. Don't forget to
-- apply touchForeignPtr to each PKey's later.
pkeys <- mapM toPKey pubKeys
pubKeysPtr <- newArray $ map unsafePKeyToPtr pkeys
-- Prepare an IO action to free buffers we allocated above.
let cleanup = do mapM_ free encKeyBufs
free encKeyBufsPtr
free encKeyBufsLenPtr
free ivPtr
free pubKeysPtr
mapM_ touchPKey pkeys
-- Call EVP_SealInit finally.
ret <- withCipherCtxPtr ctx $ \ ctxPtr ->
_SealInit ctxPtr cipher encKeyBufsPtr encKeyBufsLenPtr ivPtr pubKeysPtr (fromIntegral nKeys)
if ret == 0 then
cleanup >> raiseOpenSSLError
else
do encKeysLen <- peekArray nKeys encKeyBufsLenPtr
encKeys <- mapM B8.packCStringLen $ zip encKeyBufs (fromIntegral `fmap` encKeysLen)
iv <- B8.packCStringLen (ivPtr, cipherIvLength cipher)
cleanup
return (ctx, encKeys, iv)
where
nKeys :: Int
nKeys = length pubKeys
mallocEncKeyBuf :: (PKey k, Storable a) => k -> IO (Ptr a)
mallocEncKeyBuf = mallocArray . pkeySize
-- |@'seal'@ lazilly encrypts a stream of data. The input string
-- doesn't necessarily have to be finite.
seal :: Cipher -- ^ symmetric cipher algorithm to use
-> [SomePublicKey] -- ^ A list of public keys to encrypt a
-- symmetric key. At least one public key
-- must be supplied. If two or more keys are
-- given, the symmetric key are encrypted by
-- each public keys so that any of the
-- corresponding private keys can decrypt
-- the message.
-> String -- ^ input string to encrypt
-> IO ( String
, [String]
, String
) -- ^ (encrypted string, list of encrypted asymmetric
-- keys, IV)
{-# DEPRECATED seal "Use sealBS or sealLBS instead." #-}
seal cipher pubKeys input
= do (output, encKeys, iv) <- sealLBS cipher pubKeys $ L8.pack input
return ( L8.unpack output
, B8.unpack `fmap` encKeys
, B8.unpack iv
)
-- |@'sealBS'@ strictly encrypts a chunk of data.
sealBS :: Cipher -- ^ symmetric cipher algorithm to use
-> [SomePublicKey] -- ^ list of public keys to encrypt a
-- symmetric key
-> B8.ByteString -- ^ input string to encrypt
-> IO ( B8.ByteString
, [B8.ByteString]
, B8.ByteString
) -- ^ (encrypted string, list of encrypted asymmetric
-- keys, IV)
sealBS cipher pubKeys input
= do (ctx, encKeys, iv) <- sealInit cipher pubKeys
output <- cipherStrictly ctx input
return (output, encKeys, iv)
-- |@'sealLBS'@ lazilly encrypts a stream of data. The input string
-- doesn't necessarily have to be finite.
sealLBS :: Cipher -- ^ symmetric cipher algorithm to use
-> [SomePublicKey] -- ^ list of public keys to encrypt a
-- symmetric key
-> L8.ByteString -- ^ input string to encrypt
-> IO ( L8.ByteString
, [B8.ByteString]
, B8.ByteString
) -- ^ (encrypted string, list of encrypted asymmetric
-- keys, IV)
sealLBS cipher pubKeys input
= do (ctx, encKeys, iv) <- sealInit cipher pubKeys
output <- cipherLazily ctx input
return (output, encKeys, iv)
|
phonohawk/HsOpenSSL
|
OpenSSL/EVP/Seal.hs
|
cc0-1.0
| 5,401
| 0
| 14
| 1,966
| 890
| 475
| 415
| 86
| 2
|
module Quote00001 where
f = map (`Declaration` Nothing)
|
charleso/intellij-haskforce
|
tests/gold/parser/Quote00001.hs
|
apache-2.0
| 57
| 0
| 6
| 9
| 18
| 12
| 6
| 2
| 1
|
module Language.Hareview.Registry where
-- hint
import Language.Haskell.Interpreter hiding ((:=),set)
-- glob
-- import System.FilePath.Glob (compile,globDir)
-- astview-utils
import Language.Hareview.Language (Language)
-- local
-- import Paths_hareview (getDataFileName,getDataDir) -- by cabal
import Language.Hareview.Languages.Languages
loadLanguages :: IO [Language]
loadLanguages = do
return languages
{-
-- | loads the language registration and all modules in data dir
loadLanguages :: IO [Language]
loadLanguages = do
-- find additional modules in data
(glob,_) <- globDir [compile "data/**/*.hs"] =<< getDataDir
let modules = head glob -- glob is [[FilePath]]
-- run Interpreter
langs' <- runInterpreter $ interpretLangs modules
case langs' of
Right l -> return l
Left err -> error (show err)
-- | interprets the modules and returns all languages found.
interpretLangs :: [FilePath] -> Interpreter [Language]
interpretLangs modules = do
loadModules modules
setTopLevelModules ["Languages"]
return =<< interpret "languages" (as :: [Language])
-}
|
RefactoringTools/HaRe
|
hareview/src/Language/Hareview/Registry.hs
|
bsd-3-clause
| 1,092
| 0
| 7
| 175
| 71
| 47
| 24
| 7
| 1
|
{-# LANGUAGE CPP #-}
----------------------------------------------------------------------------
--
-- Stg to C--: primitive operations
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmPrim (
cgOpApp,
cgPrimOp, -- internal(ish), used by cgCase to get code for a
-- comparison without also turning it into a Bool.
shouldInlinePrimOp
) where
#include "HsVersions.h"
import StgCmmLayout
import StgCmmForeign
import StgCmmEnv
import StgCmmMonad
import StgCmmUtils
import StgCmmTicky
import StgCmmHeap
import StgCmmProf ( costCentreFrom, curCCS )
import DynFlags
import Platform
import BasicTypes
import MkGraph
import StgSyn
import Cmm
import CmmInfo
import Type ( Type, tyConAppTyCon )
import TyCon
import CLabel
import CmmUtils
import PrimOp
import SMRep
import FastString
import Outputable
import Util
#if __GLASGOW_HASKELL__ >= 709
import Prelude hiding ((<*>))
#endif
import Data.Bits ((.&.), bit)
import Control.Monad (liftM, when)
------------------------------------------------------------------------
-- Primitive operations and foreign calls
------------------------------------------------------------------------
{- Note [Foreign call results]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A foreign call always returns an unboxed tuple of results, one
of which is the state token. This seems to happen even for pure
calls.
Even if we returned a single result for pure calls, it'd still be
right to wrap it in a singleton unboxed tuple, because the result
might be a Haskell closure pointer, we don't want to evaluate it. -}
----------------------------------
cgOpApp :: StgOp -- The op
-> [StgArg] -- Arguments
-> Type -- Result type (always an unboxed tuple)
-> FCode ReturnKind
-- Foreign calls
cgOpApp (StgFCallOp fcall _) stg_args res_ty
= cgForeignCall fcall stg_args res_ty
-- Note [Foreign call results]
-- tagToEnum# is special: we need to pull the constructor
-- out of the table, and perform an appropriate return.
cgOpApp (StgPrimOp TagToEnumOp) [arg] res_ty
= ASSERT(isEnumerationTyCon tycon)
do { dflags <- getDynFlags
; args' <- getNonVoidArgAmodes [arg]
; let amode = case args' of [amode] -> amode
_ -> panic "TagToEnumOp had void arg"
; emitReturn [tagToClosure dflags tycon amode] }
where
-- If you're reading this code in the attempt to figure
-- out why the compiler panic'ed here, it is probably because
-- you used tagToEnum# in a non-monomorphic setting, e.g.,
-- intToTg :: Enum a => Int -> a ; intToTg (I# x#) = tagToEnum# x#
-- That won't work.
tycon = tyConAppTyCon res_ty
cgOpApp (StgPrimOp primop) args res_ty = do
dflags <- getDynFlags
cmm_args <- getNonVoidArgAmodes args
case shouldInlinePrimOp dflags primop cmm_args of
Nothing -> do -- out-of-line
let fun = CmmLit (CmmLabel (mkRtsPrimOpLabel primop))
emitCall (NativeNodeCall, NativeReturn) fun cmm_args
Just f -- inline
| ReturnsPrim VoidRep <- result_info
-> do f []
emitReturn []
| ReturnsPrim rep <- result_info
-> do dflags <- getDynFlags
res <- newTemp (primRepCmmType dflags rep)
f [res]
emitReturn [CmmReg (CmmLocal res)]
| ReturnsAlg tycon <- result_info, isUnboxedTupleTyCon tycon
-> do (regs, _hints) <- newUnboxedTupleRegs res_ty
f regs
emitReturn (map (CmmReg . CmmLocal) regs)
| otherwise -> panic "cgPrimop"
where
result_info = getPrimOpResultInfo primop
cgOpApp (StgPrimCallOp primcall) args _res_ty
= do { cmm_args <- getNonVoidArgAmodes args
; let fun = CmmLit (CmmLabel (mkPrimCallLabel primcall))
; emitCall (NativeNodeCall, NativeReturn) fun cmm_args }
-- | Interpret the argument as an unsigned value, assuming the value
-- is given in two-complement form in the given width.
--
-- Example: @asUnsigned W64 (-1)@ is 18446744073709551615.
--
-- This function is used to work around the fact that many array
-- primops take Int# arguments, but we interpret them as unsigned
-- quantities in the code gen. This means that we have to be careful
-- every time we work on e.g. a CmmInt literal that corresponds to the
-- array size, as it might contain a negative Integer value if the
-- user passed a value larger than 2^(wORD_SIZE_IN_BITS-1) as the Int#
-- literal.
asUnsigned :: Width -> Integer -> Integer
asUnsigned w n = n .&. (bit (widthInBits w) - 1)
-- TODO: Several primop implementations (e.g. 'doNewByteArrayOp') use
-- ByteOff (or some other fixed width signed type) to represent
-- array sizes or indices. This means that these will overflow for
-- large enough sizes.
-- | Decide whether an out-of-line primop should be replaced by an
-- inline implementation. This might happen e.g. if there's enough
-- static information, such as statically know arguments, to emit a
-- more efficient implementation inline.
--
-- Returns 'Nothing' if this primop should use its out-of-line
-- implementation (defined elsewhere) and 'Just' together with a code
-- generating function that takes the output regs as arguments
-- otherwise.
shouldInlinePrimOp :: DynFlags
-> PrimOp -- ^ The primop
-> [CmmExpr] -- ^ The primop arguments
-> Maybe ([LocalReg] -> FCode ())
shouldInlinePrimOp dflags NewByteArrayOp_Char [(CmmLit (CmmInt n w))]
| asUnsigned w n <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> doNewByteArrayOp res (fromInteger n)
shouldInlinePrimOp dflags NewArrayOp [(CmmLit (CmmInt n w)), init]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] ->
doNewArrayOp res (arrPtrsRep dflags (fromInteger n)) mkMAP_DIRTY_infoLabel
[ (mkIntExpr dflags (fromInteger n),
fixedHdrSize dflags + oFFSET_StgMutArrPtrs_ptrs dflags)
, (mkIntExpr dflags (nonHdrSizeW (arrPtrsRep dflags (fromInteger n))),
fixedHdrSize dflags + oFFSET_StgMutArrPtrs_size dflags)
]
(fromInteger n) init
shouldInlinePrimOp _ CopyArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopyArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp _ CopyMutableArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopyMutableArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp _ CopyArrayArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopyArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp _ CopyMutableArrayArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopyMutableArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp dflags CloneArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneArray mkMAP_FROZEN_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags CloneMutableArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneArray mkMAP_DIRTY_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags FreezeArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneArray mkMAP_FROZEN_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags ThawArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneArray mkMAP_DIRTY_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags NewSmallArrayOp [(CmmLit (CmmInt n w)), init]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] ->
doNewArrayOp res (smallArrPtrsRep (fromInteger n)) mkSMAP_DIRTY_infoLabel
[ (mkIntExpr dflags (fromInteger n),
fixedHdrSize dflags + oFFSET_StgSmallMutArrPtrs_ptrs dflags)
]
(fromInteger n) init
shouldInlinePrimOp _ CopySmallArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopySmallArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp _ CopySmallMutableArrayOp
[src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] =
Just $ \ [] -> doCopySmallMutableArrayOp src src_off dst dst_off (fromInteger n)
shouldInlinePrimOp dflags CloneSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneSmallArray mkSMAP_FROZEN_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags CloneSmallMutableArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneSmallArray mkSMAP_DIRTY_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags FreezeSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneSmallArray mkSMAP_FROZEN_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags ThawSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))]
| wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) =
Just $ \ [res] -> emitCloneSmallArray mkSMAP_DIRTY_infoLabel res src src_off (fromInteger n)
shouldInlinePrimOp dflags primop args
| primOpOutOfLine primop = Nothing
| otherwise = Just $ \ regs -> emitPrimOp dflags regs primop args
-- TODO: Several primops, such as 'copyArray#', only have an inline
-- implementation (below) but could possibly have both an inline
-- implementation and an out-of-line implementation, just like
-- 'newArray#'. This would lower the amount of code generated,
-- hopefully without a performance impact (needs to be measured).
---------------------------------------------------
cgPrimOp :: [LocalReg] -- where to put the results
-> PrimOp -- the op
-> [StgArg] -- arguments
-> FCode ()
cgPrimOp results op args
= do dflags <- getDynFlags
arg_exprs <- getNonVoidArgAmodes args
emitPrimOp dflags results op arg_exprs
------------------------------------------------------------------------
-- Emitting code for a primop
------------------------------------------------------------------------
emitPrimOp :: DynFlags
-> [LocalReg] -- where to put the results
-> PrimOp -- the op
-> [CmmExpr] -- arguments
-> FCode ()
-- First we handle various awkward cases specially. The remaining
-- easy cases are then handled by translateOp, defined below.
emitPrimOp _ [res] ParOp [arg]
=
-- for now, just implement this in a C function
-- later, we might want to inline it.
emitCCall
[(res,NoHint)]
(CmmLit (CmmLabel (mkForeignLabel (fsLit "newSpark") Nothing ForeignLabelInExternalPackage IsFunction)))
[(CmmReg (CmmGlobal BaseReg), AddrHint), (arg,AddrHint)]
emitPrimOp dflags [res] SparkOp [arg]
= do
-- returns the value of arg in res. We're going to therefore
-- refer to arg twice (once to pass to newSpark(), and once to
-- assign to res), so put it in a temporary.
tmp <- assignTemp arg
tmp2 <- newTemp (bWord dflags)
emitCCall
[(tmp2,NoHint)]
(CmmLit (CmmLabel (mkForeignLabel (fsLit "newSpark") Nothing ForeignLabelInExternalPackage IsFunction)))
[(CmmReg (CmmGlobal BaseReg), AddrHint), ((CmmReg (CmmLocal tmp)), AddrHint)]
emitAssign (CmmLocal res) (CmmReg (CmmLocal tmp))
emitPrimOp dflags [res] GetCCSOfOp [arg]
= emitAssign (CmmLocal res) val
where
val
| gopt Opt_SccProfilingOn dflags = costCentreFrom dflags (cmmUntag dflags arg)
| otherwise = CmmLit (zeroCLit dflags)
emitPrimOp _ [res] GetCurrentCCSOp [_dummy_arg]
= emitAssign (CmmLocal res) curCCS
emitPrimOp dflags [res] ReadMutVarOp [mutv]
= emitAssign (CmmLocal res) (cmmLoadIndexW dflags mutv (fixedHdrSizeW dflags) (gcWord dflags))
emitPrimOp dflags [] WriteMutVarOp [mutv,var]
= do emitStore (cmmOffsetW dflags mutv (fixedHdrSizeW dflags)) var
emitCCall
[{-no results-}]
(CmmLit (CmmLabel mkDirty_MUT_VAR_Label))
[(CmmReg (CmmGlobal BaseReg), AddrHint), (mutv,AddrHint)]
-- #define sizzeofByteArrayzh(r,a) \
-- r = ((StgArrWords *)(a))->bytes
emitPrimOp dflags [res] SizeofByteArrayOp [arg]
= emit $ mkAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags) (bWord dflags))
-- #define sizzeofMutableByteArrayzh(r,a) \
-- r = ((StgArrWords *)(a))->bytes
emitPrimOp dflags [res] SizeofMutableByteArrayOp [arg]
= emitPrimOp dflags [res] SizeofByteArrayOp [arg]
-- #define touchzh(o) /* nothing */
emitPrimOp _ res@[] TouchOp args@[_arg]
= do emitPrimCall res MO_Touch args
-- #define byteArrayContentszh(r,a) r = BYTE_ARR_CTS(a)
emitPrimOp dflags [res] ByteArrayContents_Char [arg]
= emitAssign (CmmLocal res) (cmmOffsetB dflags arg (arrWordsHdrSize dflags))
-- #define stableNameToIntzh(r,s) (r = ((StgStableName *)s)->sn)
emitPrimOp dflags [res] StableNameToIntOp [arg]
= emitAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags) (bWord dflags))
-- #define eqStableNamezh(r,sn1,sn2) \
-- (r = (((StgStableName *)sn1)->sn == ((StgStableName *)sn2)->sn))
emitPrimOp dflags [res] EqStableNameOp [arg1,arg2]
= emitAssign (CmmLocal res) (CmmMachOp (mo_wordEq dflags) [
cmmLoadIndexW dflags arg1 (fixedHdrSizeW dflags) (bWord dflags),
cmmLoadIndexW dflags arg2 (fixedHdrSizeW dflags) (bWord dflags)
])
emitPrimOp dflags [res] ReallyUnsafePtrEqualityOp [arg1,arg2]
= emitAssign (CmmLocal res) (CmmMachOp (mo_wordEq dflags) [arg1,arg2])
-- #define addrToHValuezh(r,a) r=(P_)a
emitPrimOp _ [res] AddrToAnyOp [arg]
= emitAssign (CmmLocal res) arg
-- #define dataToTagzh(r,a) r=(GET_TAG(((StgClosure *)a)->header.info))
-- Note: argument may be tagged!
emitPrimOp dflags [res] DataToTagOp [arg]
= emitAssign (CmmLocal res) (getConstrTag dflags (cmmUntag dflags arg))
{- Freezing arrays-of-ptrs requires changing an info table, for the
benefit of the generational collector. It needs to scavenge mutable
objects, even if they are in old space. When they become immutable,
they can be removed from this scavenge list. -}
-- #define unsafeFreezzeArrayzh(r,a)
-- {
-- SET_INFO((StgClosure *)a,&stg_MUT_ARR_PTRS_FROZEN0_info);
-- r = a;
-- }
emitPrimOp _ [res] UnsafeFreezeArrayOp [arg]
= emit $ catAGraphs
[ setInfo arg (CmmLit (CmmLabel mkMAP_FROZEN0_infoLabel)),
mkAssign (CmmLocal res) arg ]
emitPrimOp _ [res] UnsafeFreezeArrayArrayOp [arg]
= emit $ catAGraphs
[ setInfo arg (CmmLit (CmmLabel mkMAP_FROZEN0_infoLabel)),
mkAssign (CmmLocal res) arg ]
emitPrimOp _ [res] UnsafeFreezeSmallArrayOp [arg]
= emit $ catAGraphs
[ setInfo arg (CmmLit (CmmLabel mkSMAP_FROZEN0_infoLabel)),
mkAssign (CmmLocal res) arg ]
-- #define unsafeFreezzeByteArrayzh(r,a) r=(a)
emitPrimOp _ [res] UnsafeFreezeByteArrayOp [arg]
= emitAssign (CmmLocal res) arg
-- Reading/writing pointer arrays
emitPrimOp _ [res] ReadArrayOp [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] IndexArrayOp [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [] WriteArrayOp [obj,ix,v] = doWritePtrArrayOp obj ix v
emitPrimOp _ [res] IndexArrayArrayOp_ByteArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] IndexArrayArrayOp_ArrayArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] ReadArrayArrayOp_ByteArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] ReadArrayArrayOp_MutableByteArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] ReadArrayArrayOp_ArrayArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [res] ReadArrayArrayOp_MutableArrayArray [obj,ix] = doReadPtrArrayOp res obj ix
emitPrimOp _ [] WriteArrayArrayOp_ByteArray [obj,ix,v] = doWritePtrArrayOp obj ix v
emitPrimOp _ [] WriteArrayArrayOp_MutableByteArray [obj,ix,v] = doWritePtrArrayOp obj ix v
emitPrimOp _ [] WriteArrayArrayOp_ArrayArray [obj,ix,v] = doWritePtrArrayOp obj ix v
emitPrimOp _ [] WriteArrayArrayOp_MutableArrayArray [obj,ix,v] = doWritePtrArrayOp obj ix v
emitPrimOp _ [res] ReadSmallArrayOp [obj,ix] = doReadSmallPtrArrayOp res obj ix
emitPrimOp _ [res] IndexSmallArrayOp [obj,ix] = doReadSmallPtrArrayOp res obj ix
emitPrimOp _ [] WriteSmallArrayOp [obj,ix,v] = doWriteSmallPtrArrayOp obj ix v
-- Getting the size of pointer arrays
emitPrimOp dflags [res] SizeofArrayOp [arg]
= emit $ mkAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags + oFFSET_StgMutArrPtrs_ptrs dflags) (bWord dflags))
emitPrimOp dflags [res] SizeofMutableArrayOp [arg]
= emitPrimOp dflags [res] SizeofArrayOp [arg]
emitPrimOp dflags [res] SizeofArrayArrayOp [arg]
= emitPrimOp dflags [res] SizeofArrayOp [arg]
emitPrimOp dflags [res] SizeofMutableArrayArrayOp [arg]
= emitPrimOp dflags [res] SizeofArrayOp [arg]
emitPrimOp dflags [res] SizeofSmallArrayOp [arg] =
emit $ mkAssign (CmmLocal res)
(cmmLoadIndexW dflags arg
(fixedHdrSizeW dflags + oFFSET_StgSmallMutArrPtrs_ptrs dflags) (bWord dflags))
emitPrimOp dflags [res] SizeofSmallMutableArrayOp [arg] =
emitPrimOp dflags [res] SizeofSmallArrayOp [arg]
-- IndexXXXoffAddr
emitPrimOp dflags res IndexOffAddrOp_Char args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexOffAddrOp_WideChar args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp dflags res IndexOffAddrOp_Int args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexOffAddrOp_Word args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexOffAddrOp_Addr args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp _ res IndexOffAddrOp_Float args = doIndexOffAddrOp Nothing f32 res args
emitPrimOp _ res IndexOffAddrOp_Double args = doIndexOffAddrOp Nothing f64 res args
emitPrimOp dflags res IndexOffAddrOp_StablePtr args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexOffAddrOp_Int8 args = doIndexOffAddrOp (Just (mo_s_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexOffAddrOp_Int16 args = doIndexOffAddrOp (Just (mo_s_16ToWord dflags)) b16 res args
emitPrimOp dflags res IndexOffAddrOp_Int32 args = doIndexOffAddrOp (Just (mo_s_32ToWord dflags)) b32 res args
emitPrimOp _ res IndexOffAddrOp_Int64 args = doIndexOffAddrOp Nothing b64 res args
emitPrimOp dflags res IndexOffAddrOp_Word8 args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexOffAddrOp_Word16 args = doIndexOffAddrOp (Just (mo_u_16ToWord dflags)) b16 res args
emitPrimOp dflags res IndexOffAddrOp_Word32 args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp _ res IndexOffAddrOp_Word64 args = doIndexOffAddrOp Nothing b64 res args
-- ReadXXXoffAddr, which are identical, for our purposes, to IndexXXXoffAddr.
emitPrimOp dflags res ReadOffAddrOp_Char args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadOffAddrOp_WideChar args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp dflags res ReadOffAddrOp_Int args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadOffAddrOp_Word args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadOffAddrOp_Addr args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp _ res ReadOffAddrOp_Float args = doIndexOffAddrOp Nothing f32 res args
emitPrimOp _ res ReadOffAddrOp_Double args = doIndexOffAddrOp Nothing f64 res args
emitPrimOp dflags res ReadOffAddrOp_StablePtr args = doIndexOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadOffAddrOp_Int8 args = doIndexOffAddrOp (Just (mo_s_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadOffAddrOp_Int16 args = doIndexOffAddrOp (Just (mo_s_16ToWord dflags)) b16 res args
emitPrimOp dflags res ReadOffAddrOp_Int32 args = doIndexOffAddrOp (Just (mo_s_32ToWord dflags)) b32 res args
emitPrimOp _ res ReadOffAddrOp_Int64 args = doIndexOffAddrOp Nothing b64 res args
emitPrimOp dflags res ReadOffAddrOp_Word8 args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadOffAddrOp_Word16 args = doIndexOffAddrOp (Just (mo_u_16ToWord dflags)) b16 res args
emitPrimOp dflags res ReadOffAddrOp_Word32 args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp _ res ReadOffAddrOp_Word64 args = doIndexOffAddrOp Nothing b64 res args
-- IndexXXXArray
emitPrimOp dflags res IndexByteArrayOp_Char args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexByteArrayOp_WideChar args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp dflags res IndexByteArrayOp_Int args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexByteArrayOp_Word args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexByteArrayOp_Addr args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp _ res IndexByteArrayOp_Float args = doIndexByteArrayOp Nothing f32 res args
emitPrimOp _ res IndexByteArrayOp_Double args = doIndexByteArrayOp Nothing f64 res args
emitPrimOp dflags res IndexByteArrayOp_StablePtr args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res IndexByteArrayOp_Int8 args = doIndexByteArrayOp (Just (mo_s_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexByteArrayOp_Int16 args = doIndexByteArrayOp (Just (mo_s_16ToWord dflags)) b16 res args
emitPrimOp dflags res IndexByteArrayOp_Int32 args = doIndexByteArrayOp (Just (mo_s_32ToWord dflags)) b32 res args
emitPrimOp _ res IndexByteArrayOp_Int64 args = doIndexByteArrayOp Nothing b64 res args
emitPrimOp dflags res IndexByteArrayOp_Word8 args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res IndexByteArrayOp_Word16 args = doIndexByteArrayOp (Just (mo_u_16ToWord dflags)) b16 res args
emitPrimOp dflags res IndexByteArrayOp_Word32 args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp _ res IndexByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args
-- ReadXXXArray, identical to IndexXXXArray.
emitPrimOp dflags res ReadByteArrayOp_Char args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadByteArrayOp_WideChar args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp dflags res ReadByteArrayOp_Int args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadByteArrayOp_Word args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadByteArrayOp_Addr args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp _ res ReadByteArrayOp_Float args = doIndexByteArrayOp Nothing f32 res args
emitPrimOp _ res ReadByteArrayOp_Double args = doIndexByteArrayOp Nothing f64 res args
emitPrimOp dflags res ReadByteArrayOp_StablePtr args = doIndexByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res ReadByteArrayOp_Int8 args = doIndexByteArrayOp (Just (mo_s_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadByteArrayOp_Int16 args = doIndexByteArrayOp (Just (mo_s_16ToWord dflags)) b16 res args
emitPrimOp dflags res ReadByteArrayOp_Int32 args = doIndexByteArrayOp (Just (mo_s_32ToWord dflags)) b32 res args
emitPrimOp _ res ReadByteArrayOp_Int64 args = doIndexByteArrayOp Nothing b64 res args
emitPrimOp dflags res ReadByteArrayOp_Word8 args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args
emitPrimOp dflags res ReadByteArrayOp_Word16 args = doIndexByteArrayOp (Just (mo_u_16ToWord dflags)) b16 res args
emitPrimOp dflags res ReadByteArrayOp_Word32 args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args
emitPrimOp _ res ReadByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args
-- WriteXXXoffAddr
emitPrimOp dflags res WriteOffAddrOp_Char args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteOffAddrOp_WideChar args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp dflags res WriteOffAddrOp_Int args = doWriteOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteOffAddrOp_Word args = doWriteOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteOffAddrOp_Addr args = doWriteOffAddrOp Nothing (bWord dflags) res args
emitPrimOp _ res WriteOffAddrOp_Float args = doWriteOffAddrOp Nothing f32 res args
emitPrimOp _ res WriteOffAddrOp_Double args = doWriteOffAddrOp Nothing f64 res args
emitPrimOp dflags res WriteOffAddrOp_StablePtr args = doWriteOffAddrOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteOffAddrOp_Int8 args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteOffAddrOp_Int16 args = doWriteOffAddrOp (Just (mo_WordTo16 dflags)) b16 res args
emitPrimOp dflags res WriteOffAddrOp_Int32 args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp _ res WriteOffAddrOp_Int64 args = doWriteOffAddrOp Nothing b64 res args
emitPrimOp dflags res WriteOffAddrOp_Word8 args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteOffAddrOp_Word16 args = doWriteOffAddrOp (Just (mo_WordTo16 dflags)) b16 res args
emitPrimOp dflags res WriteOffAddrOp_Word32 args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp _ res WriteOffAddrOp_Word64 args = doWriteOffAddrOp Nothing b64 res args
-- WriteXXXArray
emitPrimOp dflags res WriteByteArrayOp_Char args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteByteArrayOp_WideChar args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp dflags res WriteByteArrayOp_Int args = doWriteByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteByteArrayOp_Word args = doWriteByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteByteArrayOp_Addr args = doWriteByteArrayOp Nothing (bWord dflags) res args
emitPrimOp _ res WriteByteArrayOp_Float args = doWriteByteArrayOp Nothing f32 res args
emitPrimOp _ res WriteByteArrayOp_Double args = doWriteByteArrayOp Nothing f64 res args
emitPrimOp dflags res WriteByteArrayOp_StablePtr args = doWriteByteArrayOp Nothing (bWord dflags) res args
emitPrimOp dflags res WriteByteArrayOp_Int8 args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteByteArrayOp_Int16 args = doWriteByteArrayOp (Just (mo_WordTo16 dflags)) b16 res args
emitPrimOp dflags res WriteByteArrayOp_Int32 args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp _ res WriteByteArrayOp_Int64 args = doWriteByteArrayOp Nothing b64 res args
emitPrimOp dflags res WriteByteArrayOp_Word8 args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args
emitPrimOp dflags res WriteByteArrayOp_Word16 args = doWriteByteArrayOp (Just (mo_WordTo16 dflags)) b16 res args
emitPrimOp dflags res WriteByteArrayOp_Word32 args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args
emitPrimOp _ res WriteByteArrayOp_Word64 args = doWriteByteArrayOp Nothing b64 res args
-- Copying and setting byte arrays
emitPrimOp _ [] CopyByteArrayOp [src,src_off,dst,dst_off,n] =
doCopyByteArrayOp src src_off dst dst_off n
emitPrimOp _ [] CopyMutableByteArrayOp [src,src_off,dst,dst_off,n] =
doCopyMutableByteArrayOp src src_off dst dst_off n
emitPrimOp _ [] CopyByteArrayToAddrOp [src,src_off,dst,n] =
doCopyByteArrayToAddrOp src src_off dst n
emitPrimOp _ [] CopyMutableByteArrayToAddrOp [src,src_off,dst,n] =
doCopyMutableByteArrayToAddrOp src src_off dst n
emitPrimOp _ [] CopyAddrToByteArrayOp [src,dst,dst_off,n] =
doCopyAddrToByteArrayOp src dst dst_off n
emitPrimOp _ [] SetByteArrayOp [ba,off,len,c] =
doSetByteArrayOp ba off len c
emitPrimOp _ [res] BSwap16Op [w] = emitBSwapCall res w W16
emitPrimOp _ [res] BSwap32Op [w] = emitBSwapCall res w W32
emitPrimOp _ [res] BSwap64Op [w] = emitBSwapCall res w W64
emitPrimOp dflags [res] BSwapOp [w] = emitBSwapCall res w (wordWidth dflags)
-- Population count
emitPrimOp _ [res] PopCnt8Op [w] = emitPopCntCall res w W8
emitPrimOp _ [res] PopCnt16Op [w] = emitPopCntCall res w W16
emitPrimOp _ [res] PopCnt32Op [w] = emitPopCntCall res w W32
emitPrimOp _ [res] PopCnt64Op [w] = emitPopCntCall res w W64
emitPrimOp dflags [res] PopCntOp [w] = emitPopCntCall res w (wordWidth dflags)
-- count leading zeros
emitPrimOp _ [res] Clz8Op [w] = emitClzCall res w W8
emitPrimOp _ [res] Clz16Op [w] = emitClzCall res w W16
emitPrimOp _ [res] Clz32Op [w] = emitClzCall res w W32
emitPrimOp _ [res] Clz64Op [w] = emitClzCall res w W64
emitPrimOp dflags [res] ClzOp [w] = emitClzCall res w (wordWidth dflags)
-- count trailing zeros
emitPrimOp _ [res] Ctz8Op [w] = emitCtzCall res w W8
emitPrimOp _ [res] Ctz16Op [w] = emitCtzCall res w W16
emitPrimOp _ [res] Ctz32Op [w] = emitCtzCall res w W32
emitPrimOp _ [res] Ctz64Op [w] = emitCtzCall res w W64
emitPrimOp dflags [res] CtzOp [w] = emitCtzCall res w (wordWidth dflags)
-- Unsigned int to floating point conversions
emitPrimOp _ [res] Word2FloatOp [w] = emitPrimCall [res]
(MO_UF_Conv W32) [w]
emitPrimOp _ [res] Word2DoubleOp [w] = emitPrimCall [res]
(MO_UF_Conv W64) [w]
-- SIMD primops
emitPrimOp dflags [res] (VecBroadcastOp vcat n w) [e] = do
checkVecCompatibility dflags vcat n w
doVecPackOp (vecElemInjectCast dflags vcat w) ty zeros (replicate n e) res
where
zeros :: CmmExpr
zeros = CmmLit $ CmmVec (replicate n zero)
zero :: CmmLit
zero = case vcat of
IntVec -> CmmInt 0 w
WordVec -> CmmInt 0 w
FloatVec -> CmmFloat 0 w
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags [res] (VecPackOp vcat n w) es = do
checkVecCompatibility dflags vcat n w
when (length es /= n) $
panic "emitPrimOp: VecPackOp has wrong number of arguments"
doVecPackOp (vecElemInjectCast dflags vcat w) ty zeros es res
where
zeros :: CmmExpr
zeros = CmmLit $ CmmVec (replicate n zero)
zero :: CmmLit
zero = case vcat of
IntVec -> CmmInt 0 w
WordVec -> CmmInt 0 w
FloatVec -> CmmFloat 0 w
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecUnpackOp vcat n w) [arg] = do
checkVecCompatibility dflags vcat n w
when (length res /= n) $
panic "emitPrimOp: VecUnpackOp has wrong number of results"
doVecUnpackOp (vecElemProjectCast dflags vcat w) ty arg res
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags [res] (VecInsertOp vcat n w) [v,e,i] = do
checkVecCompatibility dflags vcat n w
doVecInsertOp (vecElemInjectCast dflags vcat w) ty v e i res
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecIndexByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexByteArrayOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecReadByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexByteArrayOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecWriteByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doWriteByteArrayOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecIndexOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexOffAddrOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecReadOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexOffAddrOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecWriteOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doWriteOffAddrOp Nothing ty res args
where
ty :: CmmType
ty = vecVmmType vcat n w
emitPrimOp dflags res (VecIndexScalarByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexByteArrayOpAs Nothing vecty ty res args
where
vecty :: CmmType
vecty = vecVmmType vcat n w
ty :: CmmType
ty = vecCmmCat vcat w
emitPrimOp dflags res (VecReadScalarByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexByteArrayOpAs Nothing vecty ty res args
where
vecty :: CmmType
vecty = vecVmmType vcat n w
ty :: CmmType
ty = vecCmmCat vcat w
emitPrimOp dflags res (VecWriteScalarByteArrayOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doWriteByteArrayOp Nothing ty res args
where
ty :: CmmType
ty = vecCmmCat vcat w
emitPrimOp dflags res (VecIndexScalarOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexOffAddrOpAs Nothing vecty ty res args
where
vecty :: CmmType
vecty = vecVmmType vcat n w
ty :: CmmType
ty = vecCmmCat vcat w
emitPrimOp dflags res (VecReadScalarOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doIndexOffAddrOpAs Nothing vecty ty res args
where
vecty :: CmmType
vecty = vecVmmType vcat n w
ty :: CmmType
ty = vecCmmCat vcat w
emitPrimOp dflags res (VecWriteScalarOffAddrOp vcat n w) args = do
checkVecCompatibility dflags vcat n w
doWriteOffAddrOp Nothing ty res args
where
ty :: CmmType
ty = vecCmmCat vcat w
-- Prefetch
emitPrimOp _ [] PrefetchByteArrayOp3 args = doPrefetchByteArrayOp 3 args
emitPrimOp _ [] PrefetchMutableByteArrayOp3 args = doPrefetchMutableByteArrayOp 3 args
emitPrimOp _ [] PrefetchAddrOp3 args = doPrefetchAddrOp 3 args
emitPrimOp _ [] PrefetchValueOp3 args = doPrefetchValueOp 3 args
emitPrimOp _ [] PrefetchByteArrayOp2 args = doPrefetchByteArrayOp 2 args
emitPrimOp _ [] PrefetchMutableByteArrayOp2 args = doPrefetchMutableByteArrayOp 2 args
emitPrimOp _ [] PrefetchAddrOp2 args = doPrefetchAddrOp 2 args
emitPrimOp _ [] PrefetchValueOp2 args = doPrefetchValueOp 2 args
emitPrimOp _ [] PrefetchByteArrayOp1 args = doPrefetchByteArrayOp 1 args
emitPrimOp _ [] PrefetchMutableByteArrayOp1 args = doPrefetchMutableByteArrayOp 1 args
emitPrimOp _ [] PrefetchAddrOp1 args = doPrefetchAddrOp 1 args
emitPrimOp _ [] PrefetchValueOp1 args = doPrefetchValueOp 1 args
emitPrimOp _ [] PrefetchByteArrayOp0 args = doPrefetchByteArrayOp 0 args
emitPrimOp _ [] PrefetchMutableByteArrayOp0 args = doPrefetchMutableByteArrayOp 0 args
emitPrimOp _ [] PrefetchAddrOp0 args = doPrefetchAddrOp 0 args
emitPrimOp _ [] PrefetchValueOp0 args = doPrefetchValueOp 0 args
-- Atomic read-modify-write
emitPrimOp dflags [res] FetchAddByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_Add mba ix (bWord dflags) n
emitPrimOp dflags [res] FetchSubByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_Sub mba ix (bWord dflags) n
emitPrimOp dflags [res] FetchAndByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_And mba ix (bWord dflags) n
emitPrimOp dflags [res] FetchNandByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_Nand mba ix (bWord dflags) n
emitPrimOp dflags [res] FetchOrByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_Or mba ix (bWord dflags) n
emitPrimOp dflags [res] FetchXorByteArrayOp_Int [mba, ix, n] =
doAtomicRMW res AMO_Xor mba ix (bWord dflags) n
emitPrimOp dflags [res] AtomicReadByteArrayOp_Int [mba, ix] =
doAtomicReadByteArray res mba ix (bWord dflags)
emitPrimOp dflags [] AtomicWriteByteArrayOp_Int [mba, ix, val] =
doAtomicWriteByteArray mba ix (bWord dflags) val
emitPrimOp dflags [res] CasByteArrayOp_Int [mba, ix, old, new] =
doCasByteArray res mba ix (bWord dflags) old new
-- The rest just translate straightforwardly
emitPrimOp dflags [res] op [arg]
| nopOp op
= emitAssign (CmmLocal res) arg
| Just (mop,rep) <- narrowOp op
= emitAssign (CmmLocal res) $
CmmMachOp (mop rep (wordWidth dflags)) [CmmMachOp (mop (wordWidth dflags) rep) [arg]]
emitPrimOp dflags r@[res] op args
| Just prim <- callishOp op
= do emitPrimCall r prim args
| Just mop <- translateOp dflags op
= let stmt = mkAssign (CmmLocal res) (CmmMachOp mop args) in
emit stmt
emitPrimOp dflags results op args
= case callishPrimOpSupported dflags op of
Left op -> emit $ mkUnsafeCall (PrimTarget op) results args
Right gen -> gen results args
type GenericOp = [CmmFormal] -> [CmmActual] -> FCode ()
callishPrimOpSupported :: DynFlags -> PrimOp -> Either CallishMachOp GenericOp
callishPrimOpSupported dflags op
= case op of
IntQuotRemOp | ncg && x86ish -> Left (MO_S_QuotRem (wordWidth dflags))
| otherwise -> Right (genericIntQuotRemOp dflags)
WordQuotRemOp | ncg && x86ish -> Left (MO_U_QuotRem (wordWidth dflags))
| otherwise -> Right (genericWordQuotRemOp dflags)
WordQuotRem2Op | ncg && x86ish -> Left (MO_U_QuotRem2 (wordWidth dflags))
| otherwise -> Right (genericWordQuotRem2Op dflags)
WordAdd2Op | ncg && x86ish -> Left (MO_Add2 (wordWidth dflags))
| otherwise -> Right genericWordAdd2Op
IntAddCOp | ncg && x86ish -> Left (MO_AddIntC (wordWidth dflags))
| otherwise -> Right genericIntAddCOp
IntSubCOp | ncg && x86ish -> Left (MO_SubIntC (wordWidth dflags))
| otherwise -> Right genericIntSubCOp
WordMul2Op | ncg && x86ish -> Left (MO_U_Mul2 (wordWidth dflags))
| otherwise -> Right genericWordMul2Op
_ -> pprPanic "emitPrimOp: can't translate PrimOp " (ppr op)
where
ncg = case hscTarget dflags of
HscAsm -> True
_ -> False
x86ish = case platformArch (targetPlatform dflags) of
ArchX86 -> True
ArchX86_64 -> True
_ -> False
genericIntQuotRemOp :: DynFlags -> GenericOp
genericIntQuotRemOp dflags [res_q, res_r] [arg_x, arg_y]
= emit $ mkAssign (CmmLocal res_q)
(CmmMachOp (MO_S_Quot (wordWidth dflags)) [arg_x, arg_y]) <*>
mkAssign (CmmLocal res_r)
(CmmMachOp (MO_S_Rem (wordWidth dflags)) [arg_x, arg_y])
genericIntQuotRemOp _ _ _ = panic "genericIntQuotRemOp"
genericWordQuotRemOp :: DynFlags -> GenericOp
genericWordQuotRemOp dflags [res_q, res_r] [arg_x, arg_y]
= emit $ mkAssign (CmmLocal res_q)
(CmmMachOp (MO_U_Quot (wordWidth dflags)) [arg_x, arg_y]) <*>
mkAssign (CmmLocal res_r)
(CmmMachOp (MO_U_Rem (wordWidth dflags)) [arg_x, arg_y])
genericWordQuotRemOp _ _ _ = panic "genericWordQuotRemOp"
genericWordQuotRem2Op :: DynFlags -> GenericOp
genericWordQuotRem2Op dflags [res_q, res_r] [arg_x_high, arg_x_low, arg_y]
= emit =<< f (widthInBits (wordWidth dflags)) zero arg_x_high arg_x_low
where ty = cmmExprType dflags arg_x_high
shl x i = CmmMachOp (MO_Shl (wordWidth dflags)) [x, i]
shr x i = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, i]
or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y]
ge x y = CmmMachOp (MO_U_Ge (wordWidth dflags)) [x, y]
ne x y = CmmMachOp (MO_Ne (wordWidth dflags)) [x, y]
minus x y = CmmMachOp (MO_Sub (wordWidth dflags)) [x, y]
times x y = CmmMachOp (MO_Mul (wordWidth dflags)) [x, y]
zero = lit 0
one = lit 1
negone = lit (fromIntegral (widthInBits (wordWidth dflags)) - 1)
lit i = CmmLit (CmmInt i (wordWidth dflags))
f :: Int -> CmmExpr -> CmmExpr -> CmmExpr -> FCode CmmAGraph
f 0 acc high _ = return (mkAssign (CmmLocal res_q) acc <*>
mkAssign (CmmLocal res_r) high)
f i acc high low =
do roverflowedBit <- newTemp ty
rhigh' <- newTemp ty
rhigh'' <- newTemp ty
rlow' <- newTemp ty
risge <- newTemp ty
racc' <- newTemp ty
let high' = CmmReg (CmmLocal rhigh')
isge = CmmReg (CmmLocal risge)
overflowedBit = CmmReg (CmmLocal roverflowedBit)
let this = catAGraphs
[mkAssign (CmmLocal roverflowedBit)
(shr high negone),
mkAssign (CmmLocal rhigh')
(or (shl high one) (shr low negone)),
mkAssign (CmmLocal rlow')
(shl low one),
mkAssign (CmmLocal risge)
(or (overflowedBit `ne` zero)
(high' `ge` arg_y)),
mkAssign (CmmLocal rhigh'')
(high' `minus` (arg_y `times` isge)),
mkAssign (CmmLocal racc')
(or (shl acc one) isge)]
rest <- f (i - 1) (CmmReg (CmmLocal racc'))
(CmmReg (CmmLocal rhigh''))
(CmmReg (CmmLocal rlow'))
return (this <*> rest)
genericWordQuotRem2Op _ _ _ = panic "genericWordQuotRem2Op"
genericWordAdd2Op :: GenericOp
genericWordAdd2Op [res_h, res_l] [arg_x, arg_y]
= do dflags <- getDynFlags
r1 <- newTemp (cmmExprType dflags arg_x)
r2 <- newTemp (cmmExprType dflags arg_x)
let topHalf x = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, hww]
toTopHalf x = CmmMachOp (MO_Shl (wordWidth dflags)) [x, hww]
bottomHalf x = CmmMachOp (MO_And (wordWidth dflags)) [x, hwm]
add x y = CmmMachOp (MO_Add (wordWidth dflags)) [x, y]
or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y]
hww = CmmLit (CmmInt (fromIntegral (widthInBits (halfWordWidth dflags)))
(wordWidth dflags))
hwm = CmmLit (CmmInt (halfWordMask dflags) (wordWidth dflags))
emit $ catAGraphs
[mkAssign (CmmLocal r1)
(add (bottomHalf arg_x) (bottomHalf arg_y)),
mkAssign (CmmLocal r2)
(add (topHalf (CmmReg (CmmLocal r1)))
(add (topHalf arg_x) (topHalf arg_y))),
mkAssign (CmmLocal res_h)
(topHalf (CmmReg (CmmLocal r2))),
mkAssign (CmmLocal res_l)
(or (toTopHalf (CmmReg (CmmLocal r2)))
(bottomHalf (CmmReg (CmmLocal r1))))]
genericWordAdd2Op _ _ = panic "genericWordAdd2Op"
genericIntAddCOp :: GenericOp
genericIntAddCOp [res_r, res_c] [aa, bb]
{-
With some bit-twiddling, we can define int{Add,Sub}Czh portably in
C, and without needing any comparisons. This may not be the
fastest way to do it - if you have better code, please send it! --SDM
Return : r = a + b, c = 0 if no overflow, 1 on overflow.
We currently don't make use of the r value if c is != 0 (i.e.
overflow), we just convert to big integers and try again. This
could be improved by making r and c the correct values for
plugging into a new J#.
{ r = ((I_)(a)) + ((I_)(b)); \
c = ((StgWord)(~(((I_)(a))^((I_)(b))) & (((I_)(a))^r))) \
>> (BITS_IN (I_) - 1); \
}
Wading through the mass of bracketry, it seems to reduce to:
c = ( (~(a^b)) & (a^r) ) >>unsigned (BITS_IN(I_)-1)
-}
= do dflags <- getDynFlags
emit $ catAGraphs [
mkAssign (CmmLocal res_r) (CmmMachOp (mo_wordAdd dflags) [aa,bb]),
mkAssign (CmmLocal res_c) $
CmmMachOp (mo_wordUShr dflags) [
CmmMachOp (mo_wordAnd dflags) [
CmmMachOp (mo_wordNot dflags) [CmmMachOp (mo_wordXor dflags) [aa,bb]],
CmmMachOp (mo_wordXor dflags) [aa, CmmReg (CmmLocal res_r)]
],
mkIntExpr dflags (wORD_SIZE_IN_BITS dflags - 1)
]
]
genericIntAddCOp _ _ = panic "genericIntAddCOp"
genericIntSubCOp :: GenericOp
genericIntSubCOp [res_r, res_c] [aa, bb]
{- Similarly:
#define subIntCzh(r,c,a,b) \
{ r = ((I_)(a)) - ((I_)(b)); \
c = ((StgWord)((((I_)(a))^((I_)(b))) & (((I_)(a))^r))) \
>> (BITS_IN (I_) - 1); \
}
c = ((a^b) & (a^r)) >>unsigned (BITS_IN(I_)-1)
-}
= do dflags <- getDynFlags
emit $ catAGraphs [
mkAssign (CmmLocal res_r) (CmmMachOp (mo_wordSub dflags) [aa,bb]),
mkAssign (CmmLocal res_c) $
CmmMachOp (mo_wordUShr dflags) [
CmmMachOp (mo_wordAnd dflags) [
CmmMachOp (mo_wordXor dflags) [aa,bb],
CmmMachOp (mo_wordXor dflags) [aa, CmmReg (CmmLocal res_r)]
],
mkIntExpr dflags (wORD_SIZE_IN_BITS dflags - 1)
]
]
genericIntSubCOp _ _ = panic "genericIntSubCOp"
genericWordMul2Op :: GenericOp
genericWordMul2Op [res_h, res_l] [arg_x, arg_y]
= do dflags <- getDynFlags
let t = cmmExprType dflags arg_x
xlyl <- liftM CmmLocal $ newTemp t
xlyh <- liftM CmmLocal $ newTemp t
xhyl <- liftM CmmLocal $ newTemp t
r <- liftM CmmLocal $ newTemp t
-- This generic implementation is very simple and slow. We might
-- well be able to do better, but for now this at least works.
let topHalf x = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, hww]
toTopHalf x = CmmMachOp (MO_Shl (wordWidth dflags)) [x, hww]
bottomHalf x = CmmMachOp (MO_And (wordWidth dflags)) [x, hwm]
add x y = CmmMachOp (MO_Add (wordWidth dflags)) [x, y]
sum = foldl1 add
mul x y = CmmMachOp (MO_Mul (wordWidth dflags)) [x, y]
or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y]
hww = CmmLit (CmmInt (fromIntegral (widthInBits (halfWordWidth dflags)))
(wordWidth dflags))
hwm = CmmLit (CmmInt (halfWordMask dflags) (wordWidth dflags))
emit $ catAGraphs
[mkAssign xlyl
(mul (bottomHalf arg_x) (bottomHalf arg_y)),
mkAssign xlyh
(mul (bottomHalf arg_x) (topHalf arg_y)),
mkAssign xhyl
(mul (topHalf arg_x) (bottomHalf arg_y)),
mkAssign r
(sum [topHalf (CmmReg xlyl),
bottomHalf (CmmReg xhyl),
bottomHalf (CmmReg xlyh)]),
mkAssign (CmmLocal res_l)
(or (bottomHalf (CmmReg xlyl))
(toTopHalf (CmmReg r))),
mkAssign (CmmLocal res_h)
(sum [mul (topHalf arg_x) (topHalf arg_y),
topHalf (CmmReg xhyl),
topHalf (CmmReg xlyh),
topHalf (CmmReg r)])]
genericWordMul2Op _ _ = panic "genericWordMul2Op"
-- These PrimOps are NOPs in Cmm
nopOp :: PrimOp -> Bool
nopOp Int2WordOp = True
nopOp Word2IntOp = True
nopOp Int2AddrOp = True
nopOp Addr2IntOp = True
nopOp ChrOp = True -- Int# and Char# are rep'd the same
nopOp OrdOp = True
nopOp _ = False
-- These PrimOps turn into double casts
narrowOp :: PrimOp -> Maybe (Width -> Width -> MachOp, Width)
narrowOp Narrow8IntOp = Just (MO_SS_Conv, W8)
narrowOp Narrow16IntOp = Just (MO_SS_Conv, W16)
narrowOp Narrow32IntOp = Just (MO_SS_Conv, W32)
narrowOp Narrow8WordOp = Just (MO_UU_Conv, W8)
narrowOp Narrow16WordOp = Just (MO_UU_Conv, W16)
narrowOp Narrow32WordOp = Just (MO_UU_Conv, W32)
narrowOp _ = Nothing
-- Native word signless ops
translateOp :: DynFlags -> PrimOp -> Maybe MachOp
translateOp dflags IntAddOp = Just (mo_wordAdd dflags)
translateOp dflags IntSubOp = Just (mo_wordSub dflags)
translateOp dflags WordAddOp = Just (mo_wordAdd dflags)
translateOp dflags WordSubOp = Just (mo_wordSub dflags)
translateOp dflags AddrAddOp = Just (mo_wordAdd dflags)
translateOp dflags AddrSubOp = Just (mo_wordSub dflags)
translateOp dflags IntEqOp = Just (mo_wordEq dflags)
translateOp dflags IntNeOp = Just (mo_wordNe dflags)
translateOp dflags WordEqOp = Just (mo_wordEq dflags)
translateOp dflags WordNeOp = Just (mo_wordNe dflags)
translateOp dflags AddrEqOp = Just (mo_wordEq dflags)
translateOp dflags AddrNeOp = Just (mo_wordNe dflags)
translateOp dflags AndOp = Just (mo_wordAnd dflags)
translateOp dflags OrOp = Just (mo_wordOr dflags)
translateOp dflags XorOp = Just (mo_wordXor dflags)
translateOp dflags NotOp = Just (mo_wordNot dflags)
translateOp dflags SllOp = Just (mo_wordShl dflags)
translateOp dflags SrlOp = Just (mo_wordUShr dflags)
translateOp dflags AddrRemOp = Just (mo_wordURem dflags)
-- Native word signed ops
translateOp dflags IntMulOp = Just (mo_wordMul dflags)
translateOp dflags IntMulMayOfloOp = Just (MO_S_MulMayOflo (wordWidth dflags))
translateOp dflags IntQuotOp = Just (mo_wordSQuot dflags)
translateOp dflags IntRemOp = Just (mo_wordSRem dflags)
translateOp dflags IntNegOp = Just (mo_wordSNeg dflags)
translateOp dflags IntGeOp = Just (mo_wordSGe dflags)
translateOp dflags IntLeOp = Just (mo_wordSLe dflags)
translateOp dflags IntGtOp = Just (mo_wordSGt dflags)
translateOp dflags IntLtOp = Just (mo_wordSLt dflags)
translateOp dflags AndIOp = Just (mo_wordAnd dflags)
translateOp dflags OrIOp = Just (mo_wordOr dflags)
translateOp dflags XorIOp = Just (mo_wordXor dflags)
translateOp dflags NotIOp = Just (mo_wordNot dflags)
translateOp dflags ISllOp = Just (mo_wordShl dflags)
translateOp dflags ISraOp = Just (mo_wordSShr dflags)
translateOp dflags ISrlOp = Just (mo_wordUShr dflags)
-- Native word unsigned ops
translateOp dflags WordGeOp = Just (mo_wordUGe dflags)
translateOp dflags WordLeOp = Just (mo_wordULe dflags)
translateOp dflags WordGtOp = Just (mo_wordUGt dflags)
translateOp dflags WordLtOp = Just (mo_wordULt dflags)
translateOp dflags WordMulOp = Just (mo_wordMul dflags)
translateOp dflags WordQuotOp = Just (mo_wordUQuot dflags)
translateOp dflags WordRemOp = Just (mo_wordURem dflags)
translateOp dflags AddrGeOp = Just (mo_wordUGe dflags)
translateOp dflags AddrLeOp = Just (mo_wordULe dflags)
translateOp dflags AddrGtOp = Just (mo_wordUGt dflags)
translateOp dflags AddrLtOp = Just (mo_wordULt dflags)
-- Char# ops
translateOp dflags CharEqOp = Just (MO_Eq (wordWidth dflags))
translateOp dflags CharNeOp = Just (MO_Ne (wordWidth dflags))
translateOp dflags CharGeOp = Just (MO_U_Ge (wordWidth dflags))
translateOp dflags CharLeOp = Just (MO_U_Le (wordWidth dflags))
translateOp dflags CharGtOp = Just (MO_U_Gt (wordWidth dflags))
translateOp dflags CharLtOp = Just (MO_U_Lt (wordWidth dflags))
-- Double ops
translateOp _ DoubleEqOp = Just (MO_F_Eq W64)
translateOp _ DoubleNeOp = Just (MO_F_Ne W64)
translateOp _ DoubleGeOp = Just (MO_F_Ge W64)
translateOp _ DoubleLeOp = Just (MO_F_Le W64)
translateOp _ DoubleGtOp = Just (MO_F_Gt W64)
translateOp _ DoubleLtOp = Just (MO_F_Lt W64)
translateOp _ DoubleAddOp = Just (MO_F_Add W64)
translateOp _ DoubleSubOp = Just (MO_F_Sub W64)
translateOp _ DoubleMulOp = Just (MO_F_Mul W64)
translateOp _ DoubleDivOp = Just (MO_F_Quot W64)
translateOp _ DoubleNegOp = Just (MO_F_Neg W64)
-- Float ops
translateOp _ FloatEqOp = Just (MO_F_Eq W32)
translateOp _ FloatNeOp = Just (MO_F_Ne W32)
translateOp _ FloatGeOp = Just (MO_F_Ge W32)
translateOp _ FloatLeOp = Just (MO_F_Le W32)
translateOp _ FloatGtOp = Just (MO_F_Gt W32)
translateOp _ FloatLtOp = Just (MO_F_Lt W32)
translateOp _ FloatAddOp = Just (MO_F_Add W32)
translateOp _ FloatSubOp = Just (MO_F_Sub W32)
translateOp _ FloatMulOp = Just (MO_F_Mul W32)
translateOp _ FloatDivOp = Just (MO_F_Quot W32)
translateOp _ FloatNegOp = Just (MO_F_Neg W32)
-- Vector ops
translateOp _ (VecAddOp FloatVec n w) = Just (MO_VF_Add n w)
translateOp _ (VecSubOp FloatVec n w) = Just (MO_VF_Sub n w)
translateOp _ (VecMulOp FloatVec n w) = Just (MO_VF_Mul n w)
translateOp _ (VecDivOp FloatVec n w) = Just (MO_VF_Quot n w)
translateOp _ (VecNegOp FloatVec n w) = Just (MO_VF_Neg n w)
translateOp _ (VecAddOp IntVec n w) = Just (MO_V_Add n w)
translateOp _ (VecSubOp IntVec n w) = Just (MO_V_Sub n w)
translateOp _ (VecMulOp IntVec n w) = Just (MO_V_Mul n w)
translateOp _ (VecQuotOp IntVec n w) = Just (MO_VS_Quot n w)
translateOp _ (VecRemOp IntVec n w) = Just (MO_VS_Rem n w)
translateOp _ (VecNegOp IntVec n w) = Just (MO_VS_Neg n w)
translateOp _ (VecAddOp WordVec n w) = Just (MO_V_Add n w)
translateOp _ (VecSubOp WordVec n w) = Just (MO_V_Sub n w)
translateOp _ (VecMulOp WordVec n w) = Just (MO_V_Mul n w)
translateOp _ (VecQuotOp WordVec n w) = Just (MO_VU_Quot n w)
translateOp _ (VecRemOp WordVec n w) = Just (MO_VU_Rem n w)
-- Conversions
translateOp dflags Int2DoubleOp = Just (MO_SF_Conv (wordWidth dflags) W64)
translateOp dflags Double2IntOp = Just (MO_FS_Conv W64 (wordWidth dflags))
translateOp dflags Int2FloatOp = Just (MO_SF_Conv (wordWidth dflags) W32)
translateOp dflags Float2IntOp = Just (MO_FS_Conv W32 (wordWidth dflags))
translateOp _ Float2DoubleOp = Just (MO_FF_Conv W32 W64)
translateOp _ Double2FloatOp = Just (MO_FF_Conv W64 W32)
-- Word comparisons masquerading as more exotic things.
translateOp dflags SameMutVarOp = Just (mo_wordEq dflags)
translateOp dflags SameMVarOp = Just (mo_wordEq dflags)
translateOp dflags SameMutableArrayOp = Just (mo_wordEq dflags)
translateOp dflags SameMutableByteArrayOp = Just (mo_wordEq dflags)
translateOp dflags SameMutableArrayArrayOp= Just (mo_wordEq dflags)
translateOp dflags SameSmallMutableArrayOp= Just (mo_wordEq dflags)
translateOp dflags SameTVarOp = Just (mo_wordEq dflags)
translateOp dflags EqStablePtrOp = Just (mo_wordEq dflags)
translateOp _ _ = Nothing
-- These primops are implemented by CallishMachOps, because they sometimes
-- turn into foreign calls depending on the backend.
callishOp :: PrimOp -> Maybe CallishMachOp
callishOp DoublePowerOp = Just MO_F64_Pwr
callishOp DoubleSinOp = Just MO_F64_Sin
callishOp DoubleCosOp = Just MO_F64_Cos
callishOp DoubleTanOp = Just MO_F64_Tan
callishOp DoubleSinhOp = Just MO_F64_Sinh
callishOp DoubleCoshOp = Just MO_F64_Cosh
callishOp DoubleTanhOp = Just MO_F64_Tanh
callishOp DoubleAsinOp = Just MO_F64_Asin
callishOp DoubleAcosOp = Just MO_F64_Acos
callishOp DoubleAtanOp = Just MO_F64_Atan
callishOp DoubleLogOp = Just MO_F64_Log
callishOp DoubleExpOp = Just MO_F64_Exp
callishOp DoubleSqrtOp = Just MO_F64_Sqrt
callishOp FloatPowerOp = Just MO_F32_Pwr
callishOp FloatSinOp = Just MO_F32_Sin
callishOp FloatCosOp = Just MO_F32_Cos
callishOp FloatTanOp = Just MO_F32_Tan
callishOp FloatSinhOp = Just MO_F32_Sinh
callishOp FloatCoshOp = Just MO_F32_Cosh
callishOp FloatTanhOp = Just MO_F32_Tanh
callishOp FloatAsinOp = Just MO_F32_Asin
callishOp FloatAcosOp = Just MO_F32_Acos
callishOp FloatAtanOp = Just MO_F32_Atan
callishOp FloatLogOp = Just MO_F32_Log
callishOp FloatExpOp = Just MO_F32_Exp
callishOp FloatSqrtOp = Just MO_F32_Sqrt
callishOp _ = Nothing
------------------------------------------------------------------------------
-- Helpers for translating various minor variants of array indexing.
doIndexOffAddrOp :: Maybe MachOp
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doIndexOffAddrOp maybe_post_read_cast rep [res] [addr,idx]
= mkBasicIndexedRead 0 maybe_post_read_cast rep res addr rep idx
doIndexOffAddrOp _ _ _ _
= panic "StgCmmPrim: doIndexOffAddrOp"
doIndexOffAddrOpAs :: Maybe MachOp
-> CmmType
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doIndexOffAddrOpAs maybe_post_read_cast rep idx_rep [res] [addr,idx]
= mkBasicIndexedRead 0 maybe_post_read_cast rep res addr idx_rep idx
doIndexOffAddrOpAs _ _ _ _ _
= panic "StgCmmPrim: doIndexOffAddrOpAs"
doIndexByteArrayOp :: Maybe MachOp
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doIndexByteArrayOp maybe_post_read_cast rep [res] [addr,idx]
= do dflags <- getDynFlags
mkBasicIndexedRead (arrWordsHdrSize dflags) maybe_post_read_cast rep res addr rep idx
doIndexByteArrayOp _ _ _ _
= panic "StgCmmPrim: doIndexByteArrayOp"
doIndexByteArrayOpAs :: Maybe MachOp
-> CmmType
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doIndexByteArrayOpAs maybe_post_read_cast rep idx_rep [res] [addr,idx]
= do dflags <- getDynFlags
mkBasicIndexedRead (arrWordsHdrSize dflags) maybe_post_read_cast rep res addr idx_rep idx
doIndexByteArrayOpAs _ _ _ _ _
= panic "StgCmmPrim: doIndexByteArrayOpAs"
doReadPtrArrayOp :: LocalReg
-> CmmExpr
-> CmmExpr
-> FCode ()
doReadPtrArrayOp res addr idx
= do dflags <- getDynFlags
mkBasicIndexedRead (arrPtrsHdrSize dflags) Nothing (gcWord dflags) res addr (gcWord dflags) idx
doWriteOffAddrOp :: Maybe MachOp
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doWriteOffAddrOp maybe_pre_write_cast idx_ty [] [addr,idx,val]
= mkBasicIndexedWrite 0 maybe_pre_write_cast addr idx_ty idx val
doWriteOffAddrOp _ _ _ _
= panic "StgCmmPrim: doWriteOffAddrOp"
doWriteByteArrayOp :: Maybe MachOp
-> CmmType
-> [LocalReg]
-> [CmmExpr]
-> FCode ()
doWriteByteArrayOp maybe_pre_write_cast idx_ty [] [addr,idx,val]
= do dflags <- getDynFlags
mkBasicIndexedWrite (arrWordsHdrSize dflags) maybe_pre_write_cast addr idx_ty idx val
doWriteByteArrayOp _ _ _ _
= panic "StgCmmPrim: doWriteByteArrayOp"
doWritePtrArrayOp :: CmmExpr
-> CmmExpr
-> CmmExpr
-> FCode ()
doWritePtrArrayOp addr idx val
= do dflags <- getDynFlags
let ty = cmmExprType dflags val
mkBasicIndexedWrite (arrPtrsHdrSize dflags) Nothing addr ty idx val
emit (setInfo addr (CmmLit (CmmLabel mkMAP_DIRTY_infoLabel)))
-- the write barrier. We must write a byte into the mark table:
-- bits8[a + header_size + StgMutArrPtrs_size(a) + x >> N]
emit $ mkStore (
cmmOffsetExpr dflags
(cmmOffsetExprW dflags (cmmOffsetB dflags addr (arrPtrsHdrSize dflags))
(loadArrPtrsSize dflags addr))
(CmmMachOp (mo_wordUShr dflags) [idx,
mkIntExpr dflags (mUT_ARR_PTRS_CARD_BITS dflags)])
) (CmmLit (CmmInt 1 W8))
loadArrPtrsSize :: DynFlags -> CmmExpr -> CmmExpr
loadArrPtrsSize dflags addr = CmmLoad (cmmOffsetB dflags addr off) (bWord dflags)
where off = fixedHdrSize dflags + oFFSET_StgMutArrPtrs_ptrs dflags
mkBasicIndexedRead :: ByteOff -- Initial offset in bytes
-> Maybe MachOp -- Optional result cast
-> CmmType -- Type of element we are accessing
-> LocalReg -- Destination
-> CmmExpr -- Base address
-> CmmType -- Type of element by which we are indexing
-> CmmExpr -- Index
-> FCode ()
mkBasicIndexedRead off Nothing ty res base idx_ty idx
= do dflags <- getDynFlags
emitAssign (CmmLocal res) (cmmLoadIndexOffExpr dflags off ty base idx_ty idx)
mkBasicIndexedRead off (Just cast) ty res base idx_ty idx
= do dflags <- getDynFlags
emitAssign (CmmLocal res) (CmmMachOp cast [
cmmLoadIndexOffExpr dflags off ty base idx_ty idx])
mkBasicIndexedWrite :: ByteOff -- Initial offset in bytes
-> Maybe MachOp -- Optional value cast
-> CmmExpr -- Base address
-> CmmType -- Type of element by which we are indexing
-> CmmExpr -- Index
-> CmmExpr -- Value to write
-> FCode ()
mkBasicIndexedWrite off Nothing base idx_ty idx val
= do dflags <- getDynFlags
emitStore (cmmIndexOffExpr dflags off (typeWidth idx_ty) base idx) val
mkBasicIndexedWrite off (Just cast) base idx_ty idx val
= mkBasicIndexedWrite off Nothing base idx_ty idx (CmmMachOp cast [val])
-- ----------------------------------------------------------------------------
-- Misc utils
cmmIndexOffExpr :: DynFlags
-> ByteOff -- Initial offset in bytes
-> Width -- Width of element by which we are indexing
-> CmmExpr -- Base address
-> CmmExpr -- Index
-> CmmExpr
cmmIndexOffExpr dflags off width base idx
= cmmIndexExpr dflags width (cmmOffsetB dflags base off) idx
cmmLoadIndexOffExpr :: DynFlags
-> ByteOff -- Initial offset in bytes
-> CmmType -- Type of element we are accessing
-> CmmExpr -- Base address
-> CmmType -- Type of element by which we are indexing
-> CmmExpr -- Index
-> CmmExpr
cmmLoadIndexOffExpr dflags off ty base idx_ty idx
= CmmLoad (cmmIndexOffExpr dflags off (typeWidth idx_ty) base idx) ty
setInfo :: CmmExpr -> CmmExpr -> CmmAGraph
setInfo closure_ptr info_ptr = mkStore closure_ptr info_ptr
------------------------------------------------------------------------------
-- Helpers for translating vector primops.
vecVmmType :: PrimOpVecCat -> Length -> Width -> CmmType
vecVmmType pocat n w = vec n (vecCmmCat pocat w)
vecCmmCat :: PrimOpVecCat -> Width -> CmmType
vecCmmCat IntVec = cmmBits
vecCmmCat WordVec = cmmBits
vecCmmCat FloatVec = cmmFloat
vecElemInjectCast :: DynFlags -> PrimOpVecCat -> Width -> Maybe MachOp
vecElemInjectCast _ FloatVec _ = Nothing
vecElemInjectCast dflags IntVec W8 = Just (mo_WordTo8 dflags)
vecElemInjectCast dflags IntVec W16 = Just (mo_WordTo16 dflags)
vecElemInjectCast dflags IntVec W32 = Just (mo_WordTo32 dflags)
vecElemInjectCast _ IntVec W64 = Nothing
vecElemInjectCast dflags WordVec W8 = Just (mo_WordTo8 dflags)
vecElemInjectCast dflags WordVec W16 = Just (mo_WordTo16 dflags)
vecElemInjectCast dflags WordVec W32 = Just (mo_WordTo32 dflags)
vecElemInjectCast _ WordVec W64 = Nothing
vecElemInjectCast _ _ _ = Nothing
vecElemProjectCast :: DynFlags -> PrimOpVecCat -> Width -> Maybe MachOp
vecElemProjectCast _ FloatVec _ = Nothing
vecElemProjectCast dflags IntVec W8 = Just (mo_s_8ToWord dflags)
vecElemProjectCast dflags IntVec W16 = Just (mo_s_16ToWord dflags)
vecElemProjectCast dflags IntVec W32 = Just (mo_s_32ToWord dflags)
vecElemProjectCast _ IntVec W64 = Nothing
vecElemProjectCast dflags WordVec W8 = Just (mo_u_8ToWord dflags)
vecElemProjectCast dflags WordVec W16 = Just (mo_u_16ToWord dflags)
vecElemProjectCast dflags WordVec W32 = Just (mo_u_32ToWord dflags)
vecElemProjectCast _ WordVec W64 = Nothing
vecElemProjectCast _ _ _ = Nothing
-- Check to make sure that we can generate code for the specified vector type
-- given the current set of dynamic flags.
checkVecCompatibility :: DynFlags -> PrimOpVecCat -> Length -> Width -> FCode ()
checkVecCompatibility dflags vcat l w = do
when (hscTarget dflags /= HscLlvm) $ do
sorry $ unlines ["SIMD vector instructions require the LLVM back-end."
,"Please use -fllvm."]
check vecWidth vcat l w
where
check :: Width -> PrimOpVecCat -> Length -> Width -> FCode ()
check W128 FloatVec 4 W32 | not (isSseEnabled dflags) =
sorry $ "128-bit wide single-precision floating point " ++
"SIMD vector instructions require at least -msse."
check W128 _ _ _ | not (isSse2Enabled dflags) =
sorry $ "128-bit wide integer and double precision " ++
"SIMD vector instructions require at least -msse2."
check W256 FloatVec _ _ | not (isAvxEnabled dflags) =
sorry $ "256-bit wide floating point " ++
"SIMD vector instructions require at least -mavx."
check W256 _ _ _ | not (isAvx2Enabled dflags) =
sorry $ "256-bit wide integer " ++
"SIMD vector instructions require at least -mavx2."
check W512 _ _ _ | not (isAvx512fEnabled dflags) =
sorry $ "512-bit wide " ++
"SIMD vector instructions require -mavx512f."
check _ _ _ _ = return ()
vecWidth = typeWidth (vecVmmType vcat l w)
------------------------------------------------------------------------------
-- Helpers for translating vector packing and unpacking.
doVecPackOp :: Maybe MachOp -- Cast from element to vector component
-> CmmType -- Type of vector
-> CmmExpr -- Initial vector
-> [CmmExpr] -- Elements
-> CmmFormal -- Destination for result
-> FCode ()
doVecPackOp maybe_pre_write_cast ty z es res = do
dst <- newTemp ty
emitAssign (CmmLocal dst) z
vecPack dst es 0
where
vecPack :: CmmFormal -> [CmmExpr] -> Int -> FCode ()
vecPack src [] _ =
emitAssign (CmmLocal res) (CmmReg (CmmLocal src))
vecPack src (e : es) i = do
dst <- newTemp ty
if isFloatType (vecElemType ty)
then emitAssign (CmmLocal dst) (CmmMachOp (MO_VF_Insert len wid)
[CmmReg (CmmLocal src), cast e, iLit])
else emitAssign (CmmLocal dst) (CmmMachOp (MO_V_Insert len wid)
[CmmReg (CmmLocal src), cast e, iLit])
vecPack dst es (i + 1)
where
-- vector indices are always 32-bits
iLit = CmmLit (CmmInt (toInteger i) W32)
cast :: CmmExpr -> CmmExpr
cast val = case maybe_pre_write_cast of
Nothing -> val
Just cast -> CmmMachOp cast [val]
len :: Length
len = vecLength ty
wid :: Width
wid = typeWidth (vecElemType ty)
doVecUnpackOp :: Maybe MachOp -- Cast from vector component to element result
-> CmmType -- Type of vector
-> CmmExpr -- Vector
-> [CmmFormal] -- Element results
-> FCode ()
doVecUnpackOp maybe_post_read_cast ty e res =
vecUnpack res 0
where
vecUnpack :: [CmmFormal] -> Int -> FCode ()
vecUnpack [] _ =
return ()
vecUnpack (r : rs) i = do
if isFloatType (vecElemType ty)
then emitAssign (CmmLocal r) (cast (CmmMachOp (MO_VF_Extract len wid)
[e, iLit]))
else emitAssign (CmmLocal r) (cast (CmmMachOp (MO_V_Extract len wid)
[e, iLit]))
vecUnpack rs (i + 1)
where
-- vector indices are always 32-bits
iLit = CmmLit (CmmInt (toInteger i) W32)
cast :: CmmExpr -> CmmExpr
cast val = case maybe_post_read_cast of
Nothing -> val
Just cast -> CmmMachOp cast [val]
len :: Length
len = vecLength ty
wid :: Width
wid = typeWidth (vecElemType ty)
doVecInsertOp :: Maybe MachOp -- Cast from element to vector component
-> CmmType -- Vector type
-> CmmExpr -- Source vector
-> CmmExpr -- Element
-> CmmExpr -- Index at which to insert element
-> CmmFormal -- Destination for result
-> FCode ()
doVecInsertOp maybe_pre_write_cast ty src e idx res = do
dflags <- getDynFlags
-- vector indices are always 32-bits
let idx' :: CmmExpr
idx' = CmmMachOp (MO_SS_Conv (wordWidth dflags) W32) [idx]
if isFloatType (vecElemType ty)
then emitAssign (CmmLocal res) (CmmMachOp (MO_VF_Insert len wid) [src, cast e, idx'])
else emitAssign (CmmLocal res) (CmmMachOp (MO_V_Insert len wid) [src, cast e, idx'])
where
cast :: CmmExpr -> CmmExpr
cast val = case maybe_pre_write_cast of
Nothing -> val
Just cast -> CmmMachOp cast [val]
len :: Length
len = vecLength ty
wid :: Width
wid = typeWidth (vecElemType ty)
------------------------------------------------------------------------------
-- Helpers for translating prefetching.
-- | Translate byte array prefetch operations into proper primcalls.
doPrefetchByteArrayOp :: Int
-> [CmmExpr]
-> FCode ()
doPrefetchByteArrayOp locality [addr,idx]
= do dflags <- getDynFlags
mkBasicPrefetch locality (arrWordsHdrSize dflags) addr idx
doPrefetchByteArrayOp _ _
= panic "StgCmmPrim: doPrefetchByteArrayOp"
-- | Translate mutable byte array prefetch operations into proper primcalls.
doPrefetchMutableByteArrayOp :: Int
-> [CmmExpr]
-> FCode ()
doPrefetchMutableByteArrayOp locality [addr,idx]
= do dflags <- getDynFlags
mkBasicPrefetch locality (arrWordsHdrSize dflags) addr idx
doPrefetchMutableByteArrayOp _ _
= panic "StgCmmPrim: doPrefetchByteArrayOp"
-- | Translate address prefetch operations into proper primcalls.
doPrefetchAddrOp ::Int
-> [CmmExpr]
-> FCode ()
doPrefetchAddrOp locality [addr,idx]
= mkBasicPrefetch locality 0 addr idx
doPrefetchAddrOp _ _
= panic "StgCmmPrim: doPrefetchAddrOp"
-- | Translate value prefetch operations into proper primcalls.
doPrefetchValueOp :: Int
-> [CmmExpr]
-> FCode ()
doPrefetchValueOp locality [addr]
= do dflags <- getDynFlags
mkBasicPrefetch locality 0 addr (CmmLit (CmmInt 0 (wordWidth dflags)))
doPrefetchValueOp _ _
= panic "StgCmmPrim: doPrefetchValueOp"
-- | helper to generate prefetch primcalls
mkBasicPrefetch :: Int -- Locality level 0-3
-> ByteOff -- Initial offset in bytes
-> CmmExpr -- Base address
-> CmmExpr -- Index
-> FCode ()
mkBasicPrefetch locality off base idx
= do dflags <- getDynFlags
emitPrimCall [] (MO_Prefetch_Data locality) [cmmIndexExpr dflags W8 (cmmOffsetB dflags base off) idx]
return ()
-- ----------------------------------------------------------------------------
-- Allocating byte arrays
-- | Takes a register to return the newly allocated array in and the
-- size of the new array in bytes. Allocates a new
-- 'MutableByteArray#'.
doNewByteArrayOp :: CmmFormal -> ByteOff -> FCode ()
doNewByteArrayOp res_r n = do
dflags <- getDynFlags
let info_ptr = mkLblExpr mkArrWords_infoLabel
rep = arrWordsRep dflags n
tickyAllocPrim (mkIntExpr dflags (arrWordsHdrSize dflags))
(mkIntExpr dflags (nonHdrSize dflags rep))
(zeroExpr dflags)
let hdr_size = fixedHdrSize dflags
base <- allocHeapClosure rep info_ptr curCCS
[ (mkIntExpr dflags n,
hdr_size + oFFSET_StgArrWords_bytes dflags)
]
emit $ mkAssign (CmmLocal res_r) base
-- ----------------------------------------------------------------------------
-- Copying byte arrays
-- | Takes a source 'ByteArray#', an offset in the source array, a
-- destination 'MutableByteArray#', an offset into the destination
-- array, and the number of bytes to copy. Copies the given number of
-- bytes from the source array to the destination array.
doCopyByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ()
doCopyByteArrayOp = emitCopyByteArray copy
where
-- Copy data (we assume the arrays aren't overlapping since
-- they're of different types)
copy _src _dst dst_p src_p bytes =
do dflags <- getDynFlags
emitMemcpyCall dst_p src_p bytes (mkIntExpr dflags 1)
-- | Takes a source 'MutableByteArray#', an offset in the source
-- array, a destination 'MutableByteArray#', an offset into the
-- destination array, and the number of bytes to copy. Copies the
-- given number of bytes from the source array to the destination
-- array.
doCopyMutableByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ()
doCopyMutableByteArrayOp = emitCopyByteArray copy
where
-- The only time the memory might overlap is when the two arrays
-- we were provided are the same array!
-- TODO: Optimize branch for common case of no aliasing.
copy src dst dst_p src_p bytes = do
dflags <- getDynFlags
[moveCall, cpyCall] <- forkAlts [
getCode $ emitMemmoveCall dst_p src_p bytes (mkIntExpr dflags 1),
getCode $ emitMemcpyCall dst_p src_p bytes (mkIntExpr dflags 1)
]
emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall
emitCopyByteArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ())
-> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ()
emitCopyByteArray copy src src_off dst dst_off n = do
dflags <- getDynFlags
dst_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags dst (arrWordsHdrSize dflags)) dst_off
src_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags src (arrWordsHdrSize dflags)) src_off
copy src dst dst_p src_p n
-- | Takes a source 'ByteArray#', an offset in the source array, a
-- destination 'Addr#', and the number of bytes to copy. Copies the given
-- number of bytes from the source array to the destination memory region.
doCopyByteArrayToAddrOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
doCopyByteArrayToAddrOp src src_off dst_p bytes = do
-- Use memcpy (we are allowed to assume the arrays aren't overlapping)
dflags <- getDynFlags
src_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags src (arrWordsHdrSize dflags)) src_off
emitMemcpyCall dst_p src_p bytes (mkIntExpr dflags 1)
-- | Takes a source 'MutableByteArray#', an offset in the source array, a
-- destination 'Addr#', and the number of bytes to copy. Copies the given
-- number of bytes from the source array to the destination memory region.
doCopyMutableByteArrayToAddrOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ()
doCopyMutableByteArrayToAddrOp = doCopyByteArrayToAddrOp
-- | Takes a source 'Addr#', a destination 'MutableByteArray#', an offset into
-- the destination array, and the number of bytes to copy. Copies the given
-- number of bytes from the source memory region to the destination array.
doCopyAddrToByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
doCopyAddrToByteArrayOp src_p dst dst_off bytes = do
-- Use memcpy (we are allowed to assume the arrays aren't overlapping)
dflags <- getDynFlags
dst_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags dst (arrWordsHdrSize dflags)) dst_off
emitMemcpyCall dst_p src_p bytes (mkIntExpr dflags 1)
-- ----------------------------------------------------------------------------
-- Setting byte arrays
-- | Takes a 'MutableByteArray#', an offset into the array, a length,
-- and a byte, and sets each of the selected bytes in the array to the
-- character.
doSetByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ()
doSetByteArrayOp ba off len c
= do dflags <- getDynFlags
p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags ba (arrWordsHdrSize dflags)) off
emitMemsetCall p c len (mkIntExpr dflags 1)
-- ----------------------------------------------------------------------------
-- Allocating arrays
-- | Allocate a new array.
doNewArrayOp :: CmmFormal -- ^ return register
-> SMRep -- ^ representation of the array
-> CLabel -- ^ info pointer
-> [(CmmExpr, ByteOff)] -- ^ header payload
-> WordOff -- ^ array size
-> CmmExpr -- ^ initial element
-> FCode ()
doNewArrayOp res_r rep info payload n init = do
dflags <- getDynFlags
let info_ptr = mkLblExpr info
tickyAllocPrim (mkIntExpr dflags (hdrSize dflags rep))
(mkIntExpr dflags (nonHdrSize dflags rep))
(zeroExpr dflags)
base <- allocHeapClosure rep info_ptr curCCS payload
arr <- CmmLocal `fmap` newTemp (bWord dflags)
emit $ mkAssign arr base
-- Initialise all elements of the the array
p <- assignTemp $ cmmOffsetB dflags (CmmReg arr) (hdrSize dflags rep)
for <- newLabelC
emitLabel for
let loopBody =
[ mkStore (CmmReg (CmmLocal p)) init
, mkAssign (CmmLocal p) (cmmOffsetW dflags (CmmReg (CmmLocal p)) 1)
, mkBranch for ]
emit =<< mkCmmIfThen
(cmmULtWord dflags (CmmReg (CmmLocal p))
(cmmOffsetW dflags (CmmReg arr)
(hdrSizeW dflags rep + n)))
(catAGraphs loopBody)
emit $ mkAssign (CmmLocal res_r) (CmmReg arr)
-- ----------------------------------------------------------------------------
-- Copying pointer arrays
-- EZY: This code has an unusually high amount of assignTemp calls, seen
-- nowhere else in the code generator. This is mostly because these
-- "primitive" ops result in a surprisingly large amount of code. It
-- will likely be worthwhile to optimize what is emitted here, so that
-- our optimization passes don't waste time repeatedly optimizing the
-- same bits of code.
-- More closely imitates 'assignTemp' from the old code generator, which
-- returns a CmmExpr rather than a LocalReg.
assignTempE :: CmmExpr -> FCode CmmExpr
assignTempE e = do
t <- assignTemp e
return (CmmReg (CmmLocal t))
-- | Takes a source 'Array#', an offset in the source array, a
-- destination 'MutableArray#', an offset into the destination array,
-- and the number of elements to copy. Copies the given number of
-- elements from the source array to the destination array.
doCopyArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
doCopyArrayOp = emitCopyArray copy
where
-- Copy data (we assume the arrays aren't overlapping since
-- they're of different types)
copy _src _dst dst_p src_p bytes =
do dflags <- getDynFlags
emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags))
-- | Takes a source 'MutableArray#', an offset in the source array, a
-- destination 'MutableArray#', an offset into the destination array,
-- and the number of elements to copy. Copies the given number of
-- elements from the source array to the destination array.
doCopyMutableArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
doCopyMutableArrayOp = emitCopyArray copy
where
-- The only time the memory might overlap is when the two arrays
-- we were provided are the same array!
-- TODO: Optimize branch for common case of no aliasing.
copy src dst dst_p src_p bytes = do
dflags <- getDynFlags
[moveCall, cpyCall] <- forkAlts [
getCode $ emitMemmoveCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags)),
getCode $ emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags))
]
emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall
emitCopyArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> ByteOff
-> FCode ()) -- ^ copy function
-> CmmExpr -- ^ source array
-> CmmExpr -- ^ offset in source array
-> CmmExpr -- ^ destination array
-> CmmExpr -- ^ offset in destination array
-> WordOff -- ^ number of elements to copy
-> FCode ()
emitCopyArray copy src0 src_off dst0 dst_off0 n = do
dflags <- getDynFlags
when (n /= 0) $ do
-- Passed as arguments (be careful)
src <- assignTempE src0
dst <- assignTempE dst0
dst_off <- assignTempE dst_off0
-- Set the dirty bit in the header.
emit (setInfo dst (CmmLit (CmmLabel mkMAP_DIRTY_infoLabel)))
dst_elems_p <- assignTempE $ cmmOffsetB dflags dst
(arrPtrsHdrSize dflags)
dst_p <- assignTempE $ cmmOffsetExprW dflags dst_elems_p dst_off
src_p <- assignTempE $ cmmOffsetExprW dflags
(cmmOffsetB dflags src (arrPtrsHdrSize dflags)) src_off
let bytes = wordsToBytes dflags n
copy src dst dst_p src_p bytes
-- The base address of the destination card table
dst_cards_p <- assignTempE $ cmmOffsetExprW dflags dst_elems_p
(loadArrPtrsSize dflags dst)
emitSetCards dst_off dst_cards_p n
doCopySmallArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
doCopySmallArrayOp = emitCopySmallArray copy
where
-- Copy data (we assume the arrays aren't overlapping since
-- they're of different types)
copy _src _dst dst_p src_p bytes =
do dflags <- getDynFlags
emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags))
doCopySmallMutableArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
doCopySmallMutableArrayOp = emitCopySmallArray copy
where
-- The only time the memory might overlap is when the two arrays
-- we were provided are the same array!
-- TODO: Optimize branch for common case of no aliasing.
copy src dst dst_p src_p bytes = do
dflags <- getDynFlags
[moveCall, cpyCall] <- forkAlts
[ getCode $ emitMemmoveCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags))
, getCode $ emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes)
(mkIntExpr dflags (wORD_SIZE dflags))
]
emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall
emitCopySmallArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> ByteOff
-> FCode ()) -- ^ copy function
-> CmmExpr -- ^ source array
-> CmmExpr -- ^ offset in source array
-> CmmExpr -- ^ destination array
-> CmmExpr -- ^ offset in destination array
-> WordOff -- ^ number of elements to copy
-> FCode ()
emitCopySmallArray copy src0 src_off dst0 dst_off n = do
dflags <- getDynFlags
-- Passed as arguments (be careful)
src <- assignTempE src0
dst <- assignTempE dst0
-- Set the dirty bit in the header.
emit (setInfo dst (CmmLit (CmmLabel mkSMAP_DIRTY_infoLabel)))
dst_p <- assignTempE $ cmmOffsetExprW dflags
(cmmOffsetB dflags dst (smallArrPtrsHdrSize dflags)) dst_off
src_p <- assignTempE $ cmmOffsetExprW dflags
(cmmOffsetB dflags src (smallArrPtrsHdrSize dflags)) src_off
let bytes = wordsToBytes dflags n
copy src dst dst_p src_p bytes
-- | Takes an info table label, a register to return the newly
-- allocated array in, a source array, an offset in the source array,
-- and the number of elements to copy. Allocates a new array and
-- initializes it from the source array.
emitCloneArray :: CLabel -> CmmFormal -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
emitCloneArray info_p res_r src src_off n = do
dflags <- getDynFlags
let info_ptr = mkLblExpr info_p
rep = arrPtrsRep dflags n
tickyAllocPrim (mkIntExpr dflags (arrPtrsHdrSize dflags))
(mkIntExpr dflags (nonHdrSize dflags rep))
(zeroExpr dflags)
let hdr_size = fixedHdrSize dflags
base <- allocHeapClosure rep info_ptr curCCS
[ (mkIntExpr dflags n,
hdr_size + oFFSET_StgMutArrPtrs_ptrs dflags)
, (mkIntExpr dflags (nonHdrSizeW rep),
hdr_size + oFFSET_StgMutArrPtrs_size dflags)
]
arr <- CmmLocal `fmap` newTemp (bWord dflags)
emit $ mkAssign arr base
dst_p <- assignTempE $ cmmOffsetB dflags (CmmReg arr)
(arrPtrsHdrSize dflags)
src_p <- assignTempE $ cmmOffsetExprW dflags src
(cmmAddWord dflags
(mkIntExpr dflags (arrPtrsHdrSizeW dflags)) src_off)
emitMemcpyCall dst_p src_p (mkIntExpr dflags (wordsToBytes dflags n))
(mkIntExpr dflags (wORD_SIZE dflags))
emit $ mkAssign (CmmLocal res_r) (CmmReg arr)
-- | Takes an info table label, a register to return the newly
-- allocated array in, a source array, an offset in the source array,
-- and the number of elements to copy. Allocates a new array and
-- initializes it from the source array.
emitCloneSmallArray :: CLabel -> CmmFormal -> CmmExpr -> CmmExpr -> WordOff
-> FCode ()
emitCloneSmallArray info_p res_r src src_off n = do
dflags <- getDynFlags
let info_ptr = mkLblExpr info_p
rep = smallArrPtrsRep n
tickyAllocPrim (mkIntExpr dflags (smallArrPtrsHdrSize dflags))
(mkIntExpr dflags (nonHdrSize dflags rep))
(zeroExpr dflags)
let hdr_size = fixedHdrSize dflags
base <- allocHeapClosure rep info_ptr curCCS
[ (mkIntExpr dflags n,
hdr_size + oFFSET_StgSmallMutArrPtrs_ptrs dflags)
]
arr <- CmmLocal `fmap` newTemp (bWord dflags)
emit $ mkAssign arr base
dst_p <- assignTempE $ cmmOffsetB dflags (CmmReg arr)
(smallArrPtrsHdrSize dflags)
src_p <- assignTempE $ cmmOffsetExprW dflags src
(cmmAddWord dflags
(mkIntExpr dflags (smallArrPtrsHdrSizeW dflags)) src_off)
emitMemcpyCall dst_p src_p (mkIntExpr dflags (wordsToBytes dflags n))
(mkIntExpr dflags (wORD_SIZE dflags))
emit $ mkAssign (CmmLocal res_r) (CmmReg arr)
-- | Takes and offset in the destination array, the base address of
-- the card table, and the number of elements affected (*not* the
-- number of cards). The number of elements may not be zero.
-- Marks the relevant cards as dirty.
emitSetCards :: CmmExpr -> CmmExpr -> WordOff -> FCode ()
emitSetCards dst_start dst_cards_start n = do
dflags <- getDynFlags
start_card <- assignTempE $ cardCmm dflags dst_start
let end_card = cardCmm dflags
(cmmSubWord dflags
(cmmAddWord dflags dst_start (mkIntExpr dflags n))
(mkIntExpr dflags 1))
emitMemsetCall (cmmAddWord dflags dst_cards_start start_card)
(mkIntExpr dflags 1)
(cmmAddWord dflags (cmmSubWord dflags end_card start_card) (mkIntExpr dflags 1))
(mkIntExpr dflags 1) -- no alignment (1 byte)
-- Convert an element index to a card index
cardCmm :: DynFlags -> CmmExpr -> CmmExpr
cardCmm dflags i =
cmmUShrWord dflags i (mkIntExpr dflags (mUT_ARR_PTRS_CARD_BITS dflags))
------------------------------------------------------------------------------
-- SmallArray PrimOp implementations
doReadSmallPtrArrayOp :: LocalReg
-> CmmExpr
-> CmmExpr
-> FCode ()
doReadSmallPtrArrayOp res addr idx = do
dflags <- getDynFlags
mkBasicIndexedRead (smallArrPtrsHdrSize dflags) Nothing (gcWord dflags) res addr
(gcWord dflags) idx
doWriteSmallPtrArrayOp :: CmmExpr
-> CmmExpr
-> CmmExpr
-> FCode ()
doWriteSmallPtrArrayOp addr idx val = do
dflags <- getDynFlags
let ty = cmmExprType dflags val
mkBasicIndexedWrite (smallArrPtrsHdrSize dflags) Nothing addr ty idx val
emit (setInfo addr (CmmLit (CmmLabel mkSMAP_DIRTY_infoLabel)))
------------------------------------------------------------------------------
-- Atomic read-modify-write
-- | Emit an atomic modification to a byte array element. The result
-- reg contains that previous value of the element. Implies a full
-- memory barrier.
doAtomicRMW :: LocalReg -- ^ Result reg
-> AtomicMachOp -- ^ Atomic op (e.g. add)
-> CmmExpr -- ^ MutableByteArray#
-> CmmExpr -- ^ Index
-> CmmType -- ^ Type of element by which we are indexing
-> CmmExpr -- ^ Op argument (e.g. amount to add)
-> FCode ()
doAtomicRMW res amop mba idx idx_ty n = do
dflags <- getDynFlags
let width = typeWidth idx_ty
addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags)
width mba idx
emitPrimCall
[ res ]
(MO_AtomicRMW width amop)
[ addr, n ]
-- | Emit an atomic read to a byte array that acts as a memory barrier.
doAtomicReadByteArray
:: LocalReg -- ^ Result reg
-> CmmExpr -- ^ MutableByteArray#
-> CmmExpr -- ^ Index
-> CmmType -- ^ Type of element by which we are indexing
-> FCode ()
doAtomicReadByteArray res mba idx idx_ty = do
dflags <- getDynFlags
let width = typeWidth idx_ty
addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags)
width mba idx
emitPrimCall
[ res ]
(MO_AtomicRead width)
[ addr ]
-- | Emit an atomic write to a byte array that acts as a memory barrier.
doAtomicWriteByteArray
:: CmmExpr -- ^ MutableByteArray#
-> CmmExpr -- ^ Index
-> CmmType -- ^ Type of element by which we are indexing
-> CmmExpr -- ^ Value to write
-> FCode ()
doAtomicWriteByteArray mba idx idx_ty val = do
dflags <- getDynFlags
let width = typeWidth idx_ty
addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags)
width mba idx
emitPrimCall
[ {- no results -} ]
(MO_AtomicWrite width)
[ addr, val ]
doCasByteArray
:: LocalReg -- ^ Result reg
-> CmmExpr -- ^ MutableByteArray#
-> CmmExpr -- ^ Index
-> CmmType -- ^ Type of element by which we are indexing
-> CmmExpr -- ^ Old value
-> CmmExpr -- ^ New value
-> FCode ()
doCasByteArray res mba idx idx_ty old new = do
dflags <- getDynFlags
let width = (typeWidth idx_ty)
addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags)
width mba idx
emitPrimCall
[ res ]
(MO_Cmpxchg width)
[ addr, old, new ]
------------------------------------------------------------------------------
-- Helpers for emitting function calls
-- | Emit a call to @memcpy@.
emitMemcpyCall :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitMemcpyCall dst src n align = do
emitPrimCall
[ {-no results-} ]
MO_Memcpy
[ dst, src, n, align ]
-- | Emit a call to @memmove@.
emitMemmoveCall :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitMemmoveCall dst src n align = do
emitPrimCall
[ {- no results -} ]
MO_Memmove
[ dst, src, n, align ]
-- | Emit a call to @memset@. The second argument must fit inside an
-- unsigned char.
emitMemsetCall :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitMemsetCall dst c n align = do
emitPrimCall
[ {- no results -} ]
MO_Memset
[ dst, c, n, align ]
emitBSwapCall :: LocalReg -> CmmExpr -> Width -> FCode ()
emitBSwapCall res x width = do
emitPrimCall
[ res ]
(MO_BSwap width)
[ x ]
emitPopCntCall :: LocalReg -> CmmExpr -> Width -> FCode ()
emitPopCntCall res x width = do
emitPrimCall
[ res ]
(MO_PopCnt width)
[ x ]
emitClzCall :: LocalReg -> CmmExpr -> Width -> FCode ()
emitClzCall res x width = do
emitPrimCall
[ res ]
(MO_Clz width)
[ x ]
emitCtzCall :: LocalReg -> CmmExpr -> Width -> FCode ()
emitCtzCall res x width = do
emitPrimCall
[ res ]
(MO_Ctz width)
[ x ]
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/codeGen/StgCmmPrim.hs
|
bsd-3-clause
| 96,368
| 0
| 19
| 25,570
| 24,910
| 12,454
| 12,456
| -1
| -1
|
{-# LANGUAGE GADTs #-}
module STy where
data STy ty where
SIntTy :: STy Int
SBoolTy :: STy Bool
SMaybeTy :: STy a -> STy (Maybe a)
SArr :: STy a -> STy b -> STy (a -> b)
zero :: STy ty -> ty
zero SIntTy = 0
zero SBoolTy = False
zero (SMaybeTy _) = Nothing
zero (SArr _ res) = const (zero res)
eqSTy :: STy ty -> STy ty -> Bool
eqSTy SIntTy SIntTy = True
{-
eqSTy SBoolTy SBoolTy = True
eqSTy (SMaybeTy t1) (SMaybeTy t2) = t1 `eqSTy` t2
eqSTy (t1 `SArr` t2) (t3 `SArr` t4) = t1 `eqSTy` t3 && t2 `eqSTy` t4
-}
|
bwmcadams/lambdaconf-2015
|
speakers/goldfirere/pres/STy.hs
|
artistic-2.0
| 520
| 0
| 10
| 123
| 173
| 88
| 85
| 14
| 1
|
{-# OPTIONS -XRecursiveDo #-}
-- test scoping
module Main (main) where
import Control.Monad.Fix
import Data.Maybe ( fromJust )
t = mdo x <- fromJust (mdo x <- Just (1:x)
return (take 4 x))
return x
main :: IO ()
main = print t
|
holzensp/ghc
|
testsuite/tests/mdo/should_compile/mdo005.hs
|
bsd-3-clause
| 241
| 0
| 17
| 57
| 98
| 52
| 46
| -1
| -1
|
{-# Language MagicHash, UnboxedTuples #-}
-- | Note: extensive testing of atomic operations is performed in the
-- "atomic-primops" library. Only extremely rudimentary tests appear
-- here.
import GHC.IO
import GHC.IORef
import GHC.ST
import GHC.STRef
import GHC.Prim
import GHC.Base
import Data.Primitive.Array
import Data.IORef
import Control.Monad
------------------------------------------------------------------------
casArrayST :: MutableArray s a -> Int -> a -> a -> ST s (Bool, a)
casArrayST (MutableArray arr#) (I# i#) old new = ST$ \s1# ->
case casArray# arr# i# old new s1# of
(# s2#, x#, res #) -> (# s2#, (isTrue# (x# ==# 0#), res) #)
casSTRef :: STRef s a -- ^ The 'STRef' containing a value 'current'
-> a -- ^ The 'old' value to compare
-> a -- ^ The 'new' value to replace 'current' if @old == current@
-> ST s (Bool, a)
casSTRef (STRef var#) old new = ST $ \s1# ->
-- The primop treats the boolean as a sort of error code.
-- Zero means the CAS worked, one that it didn't.
-- We flip that here:
case casMutVar# var# old new s1# of
(# s2#, x#, res #) -> (# s2#, (isTrue# (x# ==# 0#), res) #)
-- | Performs a machine-level compare and swap operation on an
-- 'IORef'. Returns a tuple containing a 'Bool' which is 'True' when a
-- swap is performed, along with the 'current' value from the 'IORef'.
--
-- Note \"compare\" here means pointer equality in the sense of
-- 'GHC.Prim.reallyUnsafePtrEquality#'.
casIORef :: IORef a -- ^ The 'IORef' containing a value 'current'
-> a -- ^ The 'old' value to compare
-> a -- ^ The 'new' value to replace 'current' if @old == current@
-> IO (Bool, a)
casIORef (IORef var) old new = stToIO (casSTRef var old new)
------------------------------------------------------------------------
-- Make sure this Int corresponds to a single object in memory (NOINLINE):
{-# NOINLINE mynum #-}
mynum :: Int
mynum = 33
main = do
putStrLn "Perform a CAS within an IORef"
ref <- newIORef mynum
res <- casIORef ref mynum 44
res2 <- casIORef ref mynum 44
putStrLn$ " 1st try should succeed: "++show res
putStrLn$ " 2nd should fail: "++show res2
------------------------------------------------------------
putStrLn "Perform a CAS within a MutableArray#"
arr <- newArray 5 mynum
res <- stToIO$ casArrayST arr 3 mynum 44
res2 <- stToIO$ casArrayST arr 3 mynum 44
putStrLn$ " 1st try should succeed: "++show res
putStrLn$ " 2nd should fail: "++show res2
putStrLn "Printing array:"
forM_ [0..4] $ \ i -> do
x <- readArray arr i
putStr (" "++show x)
putStrLn ""
------------------------------------------------------------
putStrLn "Done."
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/concurrent/should_run/compareAndSwap.hs
|
bsd-3-clause
| 2,727
| 0
| 14
| 578
| 601
| 308
| 293
| 48
| 1
|
module Bowling where
import Debug.Trace
nextTwoThrows :: [Int] -> Int
nextTwoThrows (x:y:_) = x + y
nextTwoThrows _ = error "NextTwoThrows: Not enough elements"
nextThrow :: [Int] -> Int
nextThrow (x:_) = x
nextThrow _ = error "NextThrow: Not enough elements"
score :: Int -> [Int] -> [Int]
score 0 _ = []
score _ [] = error "Something went wrong"
score rounds (10:xs) = 10 + nextTwoThrows xs : score (rounds - 1) xs
score rounds (x:y:xs)
| x + y == 10 = 10 + nextThrow xs : score (rounds - 1) xs
| otherwise = x + y : score (rounds - 1) xs
score rounds xs = error $ "Score: Unknown: " ++ show xs ++ " rounds = " ++ show rounds
-- return (ThrowScore, RoundScore, Result)
-- score :: [Int] -> (Int, Int, [Int])
-- score [] = (0, 0, [])
-- score [10] = (10, 10, [10])
-- score [x] = (x, x, [])
-- score (x:y:xs)
-- | x == 10 = let (_, roundScore, rest) = score (y:xs)
-- in (x, x + roundScore, (x + roundScore):rest)
-- | x + y == 10 = let (throwScore, _, rest) = score xs
-- in (x, x + y + throwScore, (x + y + throwScore):rest)
-- | otherwise = let (_, _, rest) = score xs
-- in (x, x + y, (x + y):rest)
cumulativeScore :: Int -> [Int] -> [Int]
cumulativeScore _ [] = []
cumulativeScore y [x] = [y + x]
cumulativeScore y (x:xs) = x + y : cumulativeScore (x + y) xs
roundsScore :: Int -> [Int] -> [Int]
roundsScore rounds scores = trace ("rounds = " ++ show rounds) $
let roundScores = score rounds scores
in cumulativeScore 0 roundScores
|
fredmorcos/attic
|
snippets/haskell/Bowling.hs
|
isc
| 1,522
| 0
| 10
| 379
| 467
| 244
| 223
| 24
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Lish core
module Lish.Core
(
runLish
) where
import Data.Fix
import qualified Data.Map.Strict as Map
import GHC.IO.Handle (hGetContents)
import Pipes
import Prelude (String, lines)
import Protolude hiding (for, many, show, (<|>))
import System.Console.Haskeline
import System.Environment (getEnvironment)
import Text.Parsec (ParseError)
import Lish.Balanced (Balanced (..), checkBalanced)
import Lish.Eval (reduceLambda)
import Lish.Parser (parseCmd)
import Lish.Types
-- | Start an interactive lish shell
runLish :: IO ()
runLish = do
env <- toEnv <$> getEnvironment
-- load core
runInputT (defaultSettings { historyFile = Just ".lish-history" })
(do
-- load lish core
fileContent <- liftIO $ readFile "lish/core.lsh"
newEnv <- eval env (fmap unFix (parseCmd ("(do " <> fileContent <> ")")))
mainLoop Nothing newEnv "")
-- | System Environment -> LISH Env
toEnv :: [(String,String)] -> Env
toEnv env =
env &
map (\(k,v) -> (toS k, Str (toS v))) &
Map.fromList
-- | Main REPL loop / Interpreter
-- the first argument is a @Maybe Char@ it contains the char in the stack
-- that verify if the expression is balanced.
-- So if the first argument is not Nothing, it means we are in the middle
-- of a multiline expression.
mainLoop :: Maybe Char -- ^ Check to know if we are in the middle of the writting of a multiline expression
-> Env -- ^ The Lish environement
-> Text -- ^ The previous partial input (if in the middle of a multiline expression)
-> InputT IO ()
mainLoop mc env previousPartialnput = do
maybeLine <- getInputLine (toS (prompt mc env))
case maybeLine of
x | x `elem` [ Nothing -- EOF / control-d
, Just "bye"
, Just "exit"
, Just "logout"] -> outputStrLn "bye bye!"
Just line -> do
let exprs = previousPartialnput
<> (if isJust mc then " " else "")
<> toS line
case checkBalanced exprs empty of
Unbalanced c -> mainLoop (Just c) env exprs
Balanced -> do
newenv <- eval env (fmap unFix (parseCmd ("(" <> exprs <> ")")))
mainLoop Nothing newenv ""
_ -> panic "That should NEVER Happens, please file bug"
prompt :: Maybe Char -> Env -> Text
prompt mc env = case mc of
Just _ -> ">>> "
Nothing -> case Map.lookup "PROMPT" env of
Just (Str p) -> p
_ -> ":€ > "
-- | Eval the reduced form
evalReduced :: SExp -> IO ()
evalReduced Void = return ()
evalReduced (Stream Nothing) = return ()
evalReduced (Stream (Just h)) = do
cmdoutput <- hGetContents h
let splittedLines = lines cmdoutput
producer = mapM_ yield splittedLines
runEffect (for producer (lift . putStrLn))
evalReduced (WaitingStream Nothing) = return ()
evalReduced (WaitingStream (Just h)) = do
cmdoutput <- hGetContents h
let splittedLines = lines cmdoutput
producer = mapM_ yield splittedLines
runEffect (for producer (lift . putStrLn))
evalReduced x = putStrLn (pprint (Fix x))
-- | Evaluate the parsed expr
eval :: Env -> Either ParseError SExp -> InputT IO Env
eval env parsed = case parsed of
Right sexpr -> liftIO $ do
(reduced,newenv) <- runStateT (reduceLambda sexpr) env
evalReduced reduced
return newenv
Left err -> outputStrLn (show err) >> return env
|
yogsototh/lish
|
src/Lish/Core.hs
|
isc
| 3,721
| 0
| 25
| 1,131
| 1,012
| 517
| 495
| 80
| 5
|
module Oden.Output.Instantiate where
import Text.PrettyPrint.Leijen
import Oden.Compiler.Instantiate
import Oden.Output
import Oden.Pretty ()
instance OdenOutput InstantiateError where
outputType _ = Error
name TypeMismatch{} = "Instantiate.TypeMismatch"
name SubstitutionFailed{} = "Instantiate.SubstitutionFailed"
header TypeMismatch{} _ =
text "Type mismatch in instantiation"
header (SubstitutionFailed _ tvar _) s =
text "Substitution failed for type variable " <+> code s (pretty tvar)
details (TypeMismatch _ pt mt) s =
text "Polymorphic type" <+> code s (pretty pt)
<+> text "cannot be instantiated to" <+> code s (pretty mt)
details (SubstitutionFailed _ _ vars) s =
text "Type variables in context:" <+> hcat (map (code s . pretty) vars)
sourceInfo (TypeMismatch si _ _) = Just si
sourceInfo (SubstitutionFailed si _ _) = Just si
|
oden-lang/oden
|
src/Oden/Output/Instantiate.hs
|
mit
| 951
| 0
| 12
| 229
| 277
| 138
| 139
| 20
| 0
|
module HaSC.Prim.ObjInfo where
import Data.List
data ObjInfo = ObjInfo { objName :: String,
objKind :: Kind,
objCtype :: CType,
objLevel :: Level
}deriving(Eq, Ord)
instance Show ObjInfo where
show (ObjInfo name _ _ lev) = name ++ ":" ++ show lev
type Level = Int
data Kind = Var | Func | FuncProto | Parm deriving(Show, Eq, Ord)
data CType = CVoid
| CInt
| CPointer CType
| CArray CType Integer
| CFun CType [CType]
| CTemp
deriving(Ord)
instance Show CType where
show (CInt) = "int"
show (CTemp) = "temp"
show (CVoid) = "void"
show (CPointer CInt) = "int *"
show (CArray ty size) = show ty ++ "[" ++ show size ++ "]"
show (CFun ty args) = concat ["(", concat $ intersperse ", " (map show args),
") -> ", show ty]
show _ = "*** complex type... cannot show ***"
instance Eq CType where
(==) CInt CInt = True
(==) CTemp CTemp = True
(==) CVoid CVoid = True
(==) (CPointer ty1) (CPointer ty2) = ty1 == ty2
(==) (CArray ty1 _) (CArray ty2 _) = ty1 == ty2
(==) (CFun ty1 args1) (CFun ty2 args2) = (ty1 == ty2) && (args1 == args2)
(==) (CArray ty1 _) (CPointer ty2) = ty1 == ty2
(==) (CPointer ty1) (CArray ty2 _) = ty1 == ty2
(==) _ _ = False
containVoid :: CType -> Bool
containVoid (CVoid) = True
containVoid (CArray ty _) = containVoid ty
containVoid (CPointer ty) = containVoid ty
containVoid _ = False
-- void かそうでないかが重要なので、max を使って型を合成する
synType :: CType -> CType -> CType
synType = max
|
yu-i9/HaSC
|
src/HaSC/Prim/ObjInfo.hs
|
mit
| 1,876
| 0
| 11
| 701
| 627
| 340
| 287
| 44
| 1
|
-- Vasya and System of Equations
-- http://www.codewars.com/kata/556eed2836b302917b0000a3/
module Codewars.Kata.SystemQuadratic where
solution :: Integer -> Integer -> Integer
solution n m = sum [1 | a <-[0..ceiling . sqrt . fromIntegral $ n], b <-[0..ceiling . sqrt . fromIntegral $ m], a^2+b == n && b^2+a == m ]
|
gafiatulin/codewars
|
src/6 kyu/SystemQuadratic.hs
|
mit
| 317
| 0
| 14
| 51
| 120
| 64
| 56
| 3
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.