code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-# LANGUAGE TupleSections, OverloadedStrings, NoImplicitPrelude #-}
module ParseEmail
( parseEmail
, flatten
, getAttachments
, getPart
, subject
, Email(..)
) where
import ClassyPrelude hiding (try, (<|>))
import Prelude (tail)
import Text.ParserCombinators.Parsec (parse, manyTill, anyChar, try, string, eof, (<?>), (<|>))
import Text.Parsec.Prim (ParsecT)
import Text.Parsec.Error (ParseError)
import Data.Functor.Identity (Identity)
data Email = Email String Content deriving (Eq, Show)
data Content = Multipart String [Content]
| Singlepart String String deriving (Eq, Show)
data Attachment = Attachment {
extension :: String,
headers :: [String],
fileData :: String
} deriving (Eq, Ord, Show)
subject :: Email -> Either ParseError String
subject (Email header content) = parse subjectFormat "(unknown)" header
subjectFormat :: ParsecT [Char] u Identity String
subjectFormat = do
manyTill line $ try (string "Subject: ")
subject <- manyTill anyChar eol
return subject
getContentPart :: String -> Content -> String
getContentPart part (Multipart x contents) = concatMap (getContentPart part) contents
getContentPart part (Singlepart contentType lines) = if (part == contentType)
then show lines
else ""
getPart :: String -> Email -> String
getPart partName (Email x contents) = getContentPart partName contents
flatten :: Email -> [Content]
flatten (Email header content) = flattenContent content
flattenContent :: Content -> [Content]
flattenContent content = case content of
Multipart _ contents -> concatMap flattenContent contents
Singlepart _ _ -> [content]
getAttachments :: [Content] -> [Attachment]
getAttachments = mapMaybe convertToAttachment
convertToAttachment :: Content -> Maybe Attachment
convertToAttachment content = case content of
Multipart contentType contents -> Nothing
Singlepart contentType headersAndData -> case headMay (lines headersAndData) of
Nothing -> Nothing
Just firstLine -> if not ("name" `isInfixOf` firstLine)
then Nothing
else let fileData = tail $ dropWhile (/= "") (lines headersAndData)
headers = takeWhile (/= "") (lines headersAndData)
in Just $ Attachment contentType headers (concat fileData)
parseEmail :: String -> Either ParseError Email
parseEmail = parse emailFormat "(unknown)"
emailFormat :: ParsecT [Char] u Identity Email
emailFormat = do
(header, contentType) <- getHeaders
body <- emailContent contentType Nothing
return $ Email header body
contentFormat :: Maybe [String] -> ParsecT [Char] u Identity Content
contentFormat boundary = do
(header, contentType) <- getHeaders
body <- emailContent contentType boundary
return body
getHeaders :: ParsecT [Char] u Identity ([Char], [Char])
getHeaders = do
header <- manyTill anyChar $ try (string "Content-Type: ")
contentType <- manyTill anyChar $ string "; "
return (header, contentType)
emailContent :: String -> Maybe [String] -> ParsecT [Char] u Identity Content
emailContent contentType boundary =
if "multipart" `isInfixOf` contentType
then do
manyTill anyChar $ try (string "boundary=")
thisBoundary <- manyTill anyChar eol
newBoundary <- return $ maybe [thisBoundary] (thisBoundary :) boundary
eol
body <- multipart $ Just newBoundary
return $ Multipart contentType body
else do
content <- notBoundaryLines boundary
return $ Singlepart contentType content
multipart :: Maybe [String] -> ParsecT [Char] u Identity [Content]
multipart boundary = do
contents <- manyTill (contentFormat boundary) eof
return contents
line :: ParsecT [Char] u Identity [Char]
line = manyTill anyChar eol
--Eats newlines
notBoundaryLines :: Maybe [String] -> ParsecT [Char] u Identity [Char]
notBoundaryLines boundary = do
curLine <- line
if maybeInfix curLine boundary
then return ""
else notBoundaryLines boundary >>= (\lines -> return $ curLine ++ lines)
maybeInfix :: String -> Maybe [String] -> Bool
maybeInfix string = maybe False ((any . flip isInfixOf) string)
boundaries :: [String] -> ParsecT [Char] u Identity [Char]
boundaries [] = try (string "hopefully this never matches #HACK aewjfkccnas")
boundaries (x:xs) = try (string x) <|> boundaries xs
boundaries [x] = try (string x) <?> "boundary"
eol :: ParsecT [Char] u Identity [Char]
eol = try (string "\n\r")
<|> try (string "\r\n")
<|> string "\n"
<|> string "\r"
<?> "end of line"
|
MattWis/smallEmail
|
smallEmail/ParseEmail.hs
|
Haskell
|
mit
| 4,544
|
{-# LANGUAGE OverloadedStrings #-}
module Console.GitHubStats.StatsSpec where
import Test.Hspec
import Console.GitHubStats.Stats
import Console.GitHubStats.Types
spec :: Spec
spec =
describe "mkHistogram" $ do
it "sorts languages in ascending order" $ do
let repos =
[ Repository { repoLanguage = Just "PureScript" }
, Repository { repoLanguage = Just "Ruby" }
, Repository { repoLanguage = Just "Haskell" }
, Repository { repoLanguage = Just "Haskell" }
, Repository { repoLanguage = Just "Ruby" }
, Repository { repoLanguage = Just "Haskell" }
]
shouldBe
(mkHistogram repos)
[ "### Haskell 3"
, "## Ruby 2"
, "# PureScript 1"
]
it "discards repositories without a language" $ do
let repos =
[ Repository { repoLanguage = Nothing }
, Repository { repoLanguage = Just "Haskell" }
, Repository { repoLanguage = Just "Haskell" }
]
mkHistogram repos `shouldBe` [ "## Haskell 2" ]
|
acamino/ghs
|
test/Console/GitHubStats/StatsSpec.hs
|
Haskell
|
mit
| 1,118
|
module Core.LambdaLift.MFE
( identifyMFE
) where
import Common
import Core.AST
import Core.AnnotAST
import Core.Prelude
identifyMFE :: AnnotProgram Int (Annot Int Name) -> Program (Annot Int Name)
identifyMFE = Program . map identifySC . getProgramF
where identifySC (SupercombF name [] body) = Supercomb name [] body'
where body' = identifyExpr 0 body
transformMFE :: Int -> Expr (Annot Int Name) -> Expr (Annot Int Name)
transformMFE k e = ELet False [(Annot (k, anonym), e)] (EVar anonym)
-- check whether a redex
notCandidate :: Expr (Annot Int Name) -> Bool
notCandidate e = case e of
EVar _ -> True
ENum _ -> True
EConstr _ _ -> True
EAp (EVar v) _ -> elem v operators
_ -> False
identifyExpr :: Int -> AnnotExpr Int (Annot Int Name) -> Expr (Annot Int Name)
identifyExpr cxt a@(Annot (k, e))
| cxt == k || notCandidate e' = e'
| otherwise = transformMFE k e'
where e' = identifyExpr1 a
identifyExpr1 :: AnnotExpr Int (Annot Int Name) -> Expr (Annot Int Name)
identifyExpr1 (Annot (k, e)) = case e of
EVarF v -> EVar v
ENumF n -> ENum n
EConstrF tag arity -> EConstr tag arity
EApF e1 e2 -> EAp (identifyExpr k e1) (identifyExpr k e2)
ELetF rec defs body -> ELet rec defs' body'
where defs' = [(Annot (k, x), identifyExpr k e) | (Annot (k, x), e) <- defs]
body' = identifyExpr k body
ECaseF e alts -> ECase (identifyExpr k e) (map (identifyAlter k) alts)
EAbsF args body -> EAbs args (identifyExpr k' body)
where k' = getAnnot (head args)
identifyAlter :: Int -> AnnotAlter Int (Annot Int Name) -> Alter (Annot Int Name)
identifyAlter k (AlterF tag xs body) = Alter tag xs (identifyExpr k body)
|
meimisaki/Rin
|
src/Core/LambdaLift/MFE.hs
|
Haskell
|
mit
| 1,669
|
module Ternary.Performance (
performanceTest, evalPerformance) where
import System.TimeIt
import Ternary.Core.Digit (T2(..))
import Ternary.Util.Misc (forceElements, forceElementsIO)
import Ternary.List.Exact
import Ternary.List.ExactNum ()
import Ternary.Compiler.ArrayLookup (warmup)
import Ternary.Sampling.Expression
import Ternary.Sampling.Evaluation
import Ternary.QuickCheckUtil (randomsR)
randomT2s :: Int -> [T2]
randomT2s seed = map toEnum (randomsR seed (0,4))
randomExact :: Int -> Exact
randomExact seed = Exact (randomT2s seed) 0
assertWarm :: IO ()
assertWarm = putStr " Warmup: " >> timeIt warmup
-- The time needed to construct random test samples must be excluded
-- from measurements. On the flip side, the time to construct the
-- final result of a computation must be included. The following
-- ensures the first n digits of an exact number are fully evaluated:
force :: Int -> Exact -> IO ()
force n = (return $!) . forceElements . take n . streamDigits
timeMultiplication :: Int -> Exact -> Exact -> IO ()
timeMultiplication n x y = do
force (n+2) x
force (n+2) y
putStr " Array Lookup "
time multiplyAltAL
putStr " Array State "
time multiplyAltAS
where
time (**) = timeIt $ force n (x ** y)
performanceTest = do
putStrLn "\nPerformance:"
assertWarm
timeMultiplication 6000 (randomExact 0) (randomExact 1)
timeExpressionEval :: Expr -> [T2] -> IO ()
timeExpressionEval expr as = do
forceElementsIO as
len <- time (evalFinite1 expr as)
time (take len (streamDigits $ smartEval expr binding))
putStrLn ("Number of output digits = " ++ show len)
where
binding = bind (Exact as 0)
time list = timeIt (forceElementsIO list >> return (length list))
evalPerformance = do
timeExpressionEval (extreme Mins 20000) (take 5 $ randomT2s 0)
timeExpressionEval (extreme Plus 60) (take 8000 $ randomT2s 0)
|
jeroennoels/exact-real
|
test/Ternary/Performance.hs
|
Haskell
|
mit
| 1,887
|
-- | <https://tools.ietf.org/html/rfc4511#section-4.11 Abandon> operation.
--
-- This operation comes in two flavours:
--
-- * asynchronous, 'IO' based ('abandonAsync')
--
-- * asynchronous, 'STM' based ('abandonAsyncSTM')
--
-- Of those, the first one ('abandonAsync') is probably the most useful for the typical usecase.
--
-- Synchronous variants are unavailable because the Directory does not
-- respond to @AbandonRequest@s.
module Ldap.Client.Abandon
( abandonAsync
, abandonAsyncSTM
) where
import Control.Monad (void)
import Control.Monad.STM (STM, atomically)
import qualified Ldap.Asn1.Type as Type
import Ldap.Client.Internal
-- | Perform the Abandon operation asynchronously.
abandonAsync :: Ldap -> Async a -> IO ()
abandonAsync l =
atomically . abandonAsyncSTM l
-- | Perform the Abandon operation asynchronously.
abandonAsyncSTM :: Ldap -> Async a -> STM ()
abandonAsyncSTM l =
void . sendRequest l die . abandonRequest
where
die = error "Ldap.Client.Abandon: do not wait for the response to UnbindRequest"
abandonRequest :: Async a -> Request
abandonRequest (Async i _) =
Type.AbandonRequest i
|
VictorDenisov/ldap-client
|
src/Ldap/Client/Abandon.hs
|
Haskell
|
bsd-2-clause
| 1,166
|
{-# LANGUAGE TemplateHaskell, KindSignatures, TypeFamilies, FlexibleContexts, GADTs #-}
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Database.Persist.MongoDB
import Language.Haskell.TH.Syntax
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist MkPersistSettings { mpsBackend = ConT ''Action }, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
|
cutsea110/blog
|
Model.hs
|
Haskell
|
bsd-2-clause
| 601
|
module Stats (nintyFifth, Estimate(..)) where
import qualified Data.Vector.Unboxed as U
import Statistics.Sample (mean)
import Statistics.Resampling (resample, fromResample)
import Statistics.Resampling.Bootstrap (bootstrapBCA, Estimate(..) )
import System.Random.MWC (create)
nintyFifth :: [Double] -> IO Estimate
nintyFifth sample = do
g <- create
resamples <- resample g [mean] 10000 sampleU -- (length sample^2) sampleU
-- print $ U.length $ fromResample $ head $ resamples
-- print resamples
return $ head $ bootstrapBCA 0.95 sampleU [mean] resamples
where
sampleU = U.fromList sample
|
ku-fpg/ldpc
|
src/Stats.hs
|
Haskell
|
bsd-2-clause
| 647
|
module ImplicitRefs.Evaluator
( valueOf
, run
, eval
, evalProgram
) where
import Control.Applicative ((<|>))
import Control.Arrow (second)
import Control.Monad.Except
import ImplicitRefs.Data
import ImplicitRefs.Parser
type EvaluateResult = IOTry ExpressedValue
liftMaybe :: LangError -> Maybe a -> IOTry a
liftMaybe _ (Just x) = return x
liftMaybe y Nothing = throwError y
run :: String -> IO (Try ExpressedValue)
run input = runExceptT $ do
prog <- liftTry (parseProgram input)
store <- liftIO initStore
evalProgram store prog
evalProgram :: Store -> Program -> EvaluateResult
evalProgram store (Prog expr) = eval store expr
eval :: Store -> Expression -> EvaluateResult
eval store expr = valueOf expr empty store
valueOf :: Expression -> Environment -> Store -> EvaluateResult
valueOf (ConstExpr x) _ _ = evalConstExpr x
valueOf (VarExpr var) env s = evalVarExpr var env s
valueOf (LetRecExpr procs recBody) env s = evalLetRecExpr procs recBody env s
valueOf (BinOpExpr op expr1 expr2) env s = evalBinOpExpr op expr1 expr2 env s
valueOf (UnaryOpExpr op expr) env s = evalUnaryOpExpr op expr env s
valueOf (CondExpr pairs) env s = evalCondExpr pairs env s
valueOf (LetExpr bindings body) env s = evalLetExpr bindings body env s
valueOf (ProcExpr params body) env _ = evalProcExpr params body env
valueOf (CallExpr rator rands) env s = evalCallExpr rator rands env s
valueOf (BeginExpr exprs) env s = evalBeginExpr exprs env s
valueOf (AssignExpr name expr) env s = evalAssignExpr name expr env s
valueOf (SetDynamicExpr n e b) env s = evalSetDynamicExpr n e b env s
valueOf (RefExpr name) env s = evalRefExpr name env
valueOf (DeRefExpr name) env s = evalDeRefExpr name env s
valueOf (SetRefExpr name expr) env s = evalSetRefExpr name expr env s
evalRefExpr :: String -> Environment -> EvaluateResult
evalRefExpr name env = do
ref <- getRef env name
return $ ExprRef ref
unpackExprRef :: ExpressedValue -> IOTry Ref
unpackExprRef (ExprRef ref) = return ref
unpackExprRef notRef = throwError $ TypeMismatch "reference" notRef
unpackProc :: ExpressedValue -> IOTry Procedure
unpackProc (ExprProc proc) = return proc
unpackProc notProc = throwError $ TypeMismatch "procedure" notProc
getExprRef :: String -> Environment -> Store -> IOTry Ref
getExprRef name env store = do
refRef <- getRef env name
refVal <- deRef store refRef
unpackExprRef refVal
evalDeRefExpr :: String -> Environment -> Store -> EvaluateResult
evalDeRefExpr name env store = do
ref <- getExprRef name env store
deRef store ref
evalSetRefExpr :: String -> Expression -> Environment -> Store -> EvaluateResult
evalSetRefExpr name expr env store = do
ref <- getExprRef name env store
val <- valueOf expr env store
setRef store ref val
return $ ExprBool False
evalSetDynamicExpr :: String -> Expression -> Expression -> Environment -> Store
-> EvaluateResult
evalSetDynamicExpr name expr body env store = do
ref <- getRef env name
oldVal <- deRef store ref
newVal <- valueOf expr env store
setRef store ref newVal
result <- valueOf body env store
setRef store ref oldVal
return result
getRef :: Environment -> String -> IOTry Ref
getRef env name = case apply env name of
Just (DenoRef ref) -> return ref
Nothing -> throwError $ UnboundVar name
evalAssignExpr :: String -> Expression -> Environment -> Store -> EvaluateResult
evalAssignExpr name expr env store = do
val <- valueOf expr env store
ref <- getRef env name
setRef store ref val
return $ ExprBool False
evalBeginExpr :: [Expression] -> Environment -> Store -> EvaluateResult
evalBeginExpr exprs env store = foldl func (return $ ExprBool False) exprs
where
func acc ele = do
acc
valueOf ele env store
evalExpressionList :: [Expression] -> Environment -> Store
-> IOTry [ExpressedValue]
evalExpressionList lst env store = reverse <$> evaledList
where
func acc expr = do
lst <- acc
ele <- valueOf expr env store
return $ ele:lst
evaledList = foldl func (return []) lst
evalConstExpr :: ExpressedValue -> EvaluateResult
evalConstExpr = return
evalVarExpr :: String -> Environment -> Store -> EvaluateResult
evalVarExpr name env store = do
denoRef <- liftMaybe (UnboundVar name) (apply env name)
let (DenoRef ref) = denoRef
deRef store ref
evalLetRecExpr :: [(String, [String], Expression)] -> Expression
-> Environment -> Store
-> EvaluateResult
evalLetRecExpr procsSubUnits recBody env store = do
newEnv <- extendRecMany store procsSubUnits env
valueOf recBody newEnv store
binBoolOpMap :: [(BinOp, Bool -> Bool -> Bool)]
binBoolOpMap = []
binNumToNumOpMap :: [(BinOp, Integer -> Integer -> Integer)]
binNumToNumOpMap = [(Add, (+)), (Sub, (-)), (Mul, (*)), (Div, div)]
binNumToBoolOpMap :: [(BinOp, Integer -> Integer -> Bool)]
binNumToBoolOpMap = [(Gt, (>)), (Le, (<)), (Eq, (==))]
unaryBoolOpMap :: [(UnaryOp, Bool -> Bool)]
unaryBoolOpMap = []
unaryNumToNumOpMap :: [(UnaryOp, Integer -> Integer)]
unaryNumToNumOpMap = [(Minus, negate)]
unaryNumToBoolOpMap :: [(UnaryOp, Integer -> Bool)]
unaryNumToBoolOpMap = [(IsZero, (0 ==))]
unpackNum :: ExpressedValue -> IOTry Integer
unpackNum (ExprNum n) = return n
unpackNum notNum = throwError $ TypeMismatch "number" notNum
unpackBool :: ExpressedValue -> IOTry Bool
unpackBool (ExprBool b) = return b
unpackBool notBool = throwError $ TypeMismatch "boolean" notBool
tryFind :: Eq a => LangError -> a -> [(a, b)] -> IOTry b
tryFind err x pairs = liftMaybe err (lookup x pairs)
tryFindOp :: (Eq a, Show a) => a -> [(a, b)] -> IOTry b
tryFindOp op = tryFind (UnknownOperator $ show op) op
binOpConverter :: (ExpressedValue -> IOTry a)
-> (ExpressedValue -> IOTry b)
-> (c -> ExpressedValue)
-> (a -> b -> c)
-> (ExpressedValue -> ExpressedValue -> EvaluateResult)
binOpConverter unpack1 unpack2 trans func val1 val2 = do
va <- unpack1 val1
vb <- unpack2 val2
return . trans $ func va vb
binOps :: [(BinOp, ExpressedValue -> ExpressedValue -> EvaluateResult)]
binOps = concat [binNum2Num, binNum2Bool, binBool2Bool]
where
n2nTrans = binOpConverter unpackNum unpackNum ExprNum
binNum2Num = fmap (second n2nTrans) binNumToNumOpMap
n2bTrans = binOpConverter unpackNum unpackNum ExprBool
binNum2Bool = fmap (second n2bTrans) binNumToBoolOpMap
b2bTrans = binOpConverter unpackBool unpackBool ExprBool
binBool2Bool = fmap (second b2bTrans) binBoolOpMap
unaryOpConverter :: (ExpressedValue -> IOTry a)
-> (b -> ExpressedValue)
-> (a -> b)
-> (ExpressedValue -> EvaluateResult)
unaryOpConverter unpack trans func val = do
va <- unpack val
return . trans $ func va
unaryOps :: [(UnaryOp, ExpressedValue -> EvaluateResult)]
unaryOps = concat [unaryNum2Num, unaryNum2Bool, unaryBool2Bool]
where
n2nTrans = unaryOpConverter unpackNum ExprNum
unaryNum2Num = fmap (second n2nTrans) unaryNumToNumOpMap
n2bTrans = unaryOpConverter unpackNum ExprBool
unaryNum2Bool = fmap (second n2bTrans) unaryNumToBoolOpMap
b2bTrans = unaryOpConverter unpackBool ExprBool
unaryBool2Bool = fmap (second b2bTrans) unaryBoolOpMap
evalBinOpExpr :: BinOp -> Expression -> Expression -> Environment -> Store
-> EvaluateResult
evalBinOpExpr op expr1 expr2 env store = do
func <- tryFindOp op binOps
v1 <- valueOf expr1 env store
v2 <- valueOf expr2 env store
func v1 v2
evalUnaryOpExpr :: UnaryOp -> Expression -> Environment -> Store
-> EvaluateResult
evalUnaryOpExpr op expr env store = do
func <- tryFindOp op unaryOps
v <- valueOf expr env store
func v
evalCondExpr :: [(Expression, Expression)] -> Environment -> Store
-> EvaluateResult
evalCondExpr [] _ _ = throwError $ RuntimeError "No predicate is true."
evalCondExpr ((e1, e2):pairs) env store = do
val <- valueOf e1 env store
bool <- unpackBool val
if bool then valueOf e2 env store else evalCondExpr pairs env store
evalLetExpr :: [(String, Expression)] -> Expression -> Environment -> Store
-> EvaluateResult
evalLetExpr bindings body env store = evalLetExpr' bindings body env
where
evalLetExpr' [] body newEnv = valueOf body newEnv store
evalLetExpr' ((name, expr):xs) body newEnv = do
val <- valueOf expr env store
ref <- newRef store val
evalLetExpr' xs body (extend name (DenoRef ref) newEnv)
evalProcExpr :: [String] -> Expression -> Environment -> EvaluateResult
evalProcExpr params body env = return . ExprProc $ Procedure params body env
evalCallExpr :: Expression -> [Expression] -> Environment -> Store
-> EvaluateResult
evalCallExpr ratorExpr randExprs env store = do
rator <- valueOf ratorExpr env store
content <- unpackProc rator
rands <- evalExpressionList randExprs env store
applyProcedure content rands
where
safeZip :: [String] -> [ExpressedValue] -> IOTry [(String, ExpressedValue)]
safeZip as bs =
let na = length as
nb = length bs
in if na /= nb
then throwError $ ArgNumMismatch (toInteger na) bs
else return $ zip as bs
allocateAll :: [(String, ExpressedValue)] -> Environment
-> IOTry Environment
allocateAll [] env = return env
allocateAll ((name, val):pairs) env = do
ref <- newRef store val
allocateAll pairs (extend name (DenoRef ref) env)
applyProcedure :: Procedure -> [ExpressedValue] -> EvaluateResult
applyProcedure (Procedure params body savedEnv) rands = do
pairs <- safeZip params rands
newEnv <- allocateAll pairs savedEnv
valueOf body newEnv store
|
li-zhirui/EoplLangs
|
src/ImplicitRefs/Evaluator.hs
|
Haskell
|
bsd-3-clause
| 9,909
|
module Main where
import Text.Regex.Posix ((=~))
import System.Environment (getArgs)
myGrep :: String -> String -> IO ()
myGrep regex filename = do
fileSlurp <- readFile filename
mapM_ putStrLn $
filter (=~ regex) (lines fileSlurp)
main :: IO ()
main = do
(myRegex:filenames) <- getArgs
mapM_ (myGrep myRegex) filenames
|
mrordinaire/data-analysis
|
app/hgrep.hs
|
Haskell
|
bsd-3-clause
| 335
|
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.THEff
import Control.THEff.Fresh
mkEff "UnicalChar" ''Fresh ''Char ''NoEff
main:: IO ()
main = putStrLn $ runUnicalChar 'A' $ do
a <- fresh
b <- fresh
c <- fresh
return $ a:b:[c]
|
KolodeznyDiver/THEff
|
samples/SampleFresh.hs
|
Haskell
|
bsd-3-clause
| 412
|
{-|
Module : Types.BooleanLogic
Description : Some type families on the kind Bool.
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : aovieth@gmail.com
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Types.BooleanLogic (
And
, Or
, Not
, Any
, All
) where
type family And (a :: Bool) (b :: Bool) :: Bool where
And 'True 'True = 'True
And a b = 'False
type family Or (a :: Bool) (b :: Bool) :: Bool where
Or 'False 'False = 'False
Or a b = 'True
type family Not (a :: Bool) :: Bool where
Not 'True = 'False
Not 'False = 'True
type family Any (bs :: [Bool]) :: Bool where
Any '[] = False
Any (b ': bs) = Or b (Any bs)
type family All (bs :: [Bool]) :: Bool where
All '[] = True
All (b ': bs) = And b (All bs)
|
avieth/Relational
|
Types/BooleanLogic.hs
|
Haskell
|
bsd-3-clause
| 998
|
{-# Language DataKinds, OverloadedStrings #-}
{-# Language RankNTypes, TypeOperators #-}
{-# Language PatternSynonyms #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE GADTs #-}
module SAWScript.X86
( Options(..)
, proof
, proofWithOptions
, linuxInfo
, bsdInfo
, Fun(..)
, Goal(..)
, gGoal
, getGoals
, X86Error(..)
, X86Unsupported(..)
, SharedContext
, CallHandler
, Sym
, RelevantElf(..)
, getElf
, getRelevant
, findSymbols
, posFn
, loadGlobal
) where
import Control.Lens ((^.))
import Control.Exception(Exception(..),throwIO)
import Control.Monad.IO.Class(liftIO)
import qualified Data.BitVector.Sized as BV
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Map as Map
import qualified Data.Text as Text
import Data.Text.Encoding(decodeUtf8)
import System.IO(hFlush,stdout)
import Data.Maybe(mapMaybe)
-- import Text.PrettyPrint.ANSI.Leijen(pretty)
import qualified Data.ElfEdit as Elf
import Data.Parameterized.Some(Some(..))
import Data.Parameterized.Context(EmptyCtx,(::>),singleton)
-- What4
import What4.Interface(asNat,asBV)
import qualified What4.Interface as W4
import qualified What4.Config as W4
import What4.FunctionName(functionNameFromText)
import What4.ProgramLoc(ProgramLoc,Position(OtherPos))
-- Crucible
import Lang.Crucible.Analysis.Postdom (postdomInfo)
import Lang.Crucible.CFG.Core(SomeCFG(..), TypeRepr(..), cfgHandle)
import Lang.Crucible.CFG.Common(GlobalVar)
import Lang.Crucible.Simulator.RegMap(regValue, RegMap(..), RegEntry(..))
import Lang.Crucible.Simulator.RegValue(RegValue'(..))
import Lang.Crucible.Simulator.GlobalState(insertGlobal,emptyGlobals)
import Lang.Crucible.Simulator.Operations(defaultAbortHandler)
import Lang.Crucible.Simulator.OverrideSim(runOverrideSim, callCFG, readGlobal)
import Lang.Crucible.Simulator.EvalStmt(executeCrucible)
import Lang.Crucible.Simulator.ExecutionTree
(ExecResult(..), SimContext(..), FnState(..)
, ExecState(InitialState)
, FunctionBindings(..)
)
import Lang.Crucible.Simulator.SimError(SimError(..), SimErrorReason)
import Lang.Crucible.Backend
(getProofObligations,ProofGoal(..),labeledPredMsg,labeledPred,goalsToList
,assumptionsPred,IsSymBackend(..),SomeBackend(..),HasSymInterface(..))
import Lang.Crucible.FunctionHandle(HandleAllocator,newHandleAllocator,insertHandleMap,emptyHandleMap)
-- Crucible LLVM
import SAWScript.Crucible.LLVM.CrucibleLLVM
(Mem, ppPtr, pattern LLVMPointer, bytesToInteger)
import Lang.Crucible.LLVM.Intrinsics(llvmIntrinsicTypes)
import Lang.Crucible.LLVM.MemModel (mkMemVar)
import qualified Lang.Crucible.LLVM.MemModel as Crucible
-- Macaw
import Data.Macaw.Architecture.Info(ArchitectureInfo)
import Data.Macaw.Discovery(analyzeFunction)
import Data.Macaw.Discovery.State(FunctionExploreReason(UserRequest)
, emptyDiscoveryState, AddrSymMap)
import Data.Macaw.Memory( Memory, MemSegment(..), MemSegmentOff(..)
, segmentBase, segmentOffset
, addrOffset, memWordToUnsigned
, segoffAddr, incAddr
, readWord8, readWord16le, readWord32le, readWord64le)
import Data.Macaw.Memory.ElfLoader( LoadOptions(..)
, memoryForElfAllSymbols
, memoryForElf
, MemSymbol(..)
)
import Data.Macaw.Symbolic( ArchRegStruct
, mkFunCFG
, GlobalMap
, MacawSimulatorState(..)
, macawExtensions
, unsupportedSyscalls
, defaultMacawArchStmtExtensionOverride
)
import qualified Data.Macaw.Symbolic as Macaw ( LookupFunctionHandle(..) )
import Data.Macaw.Symbolic( MacawExt
, MacawFunctionArgs
)
import Data.Macaw.Symbolic.Backend(MacawSymbolicArchFunctions(..), crucArchRegTypes)
import Data.Macaw.X86(X86Reg(..), x86_64_linux_info,x86_64_freeBSD_info)
import Data.Macaw.X86.ArchTypes(X86_64)
import Data.Macaw.X86.Symbolic
( x86_64MacawSymbolicFns, x86_64MacawEvalFn, newSymFuns
, lookupX86Reg
)
import Data.Macaw.X86.Crucible(SymFuns(..))
-- Saw Core
import Verifier.SAW.SharedTerm(Term, mkSharedContext, SharedContext, scImplies)
import Verifier.SAW.Term.Pretty(showTerm)
import Verifier.SAW.Recognizer(asBool)
import Verifier.SAW.Simulator.What4.ReturnTrip (sawRegisterSymFunInterp, toSC, saw_ctx)
-- Cryptol Verifier
import Verifier.SAW.CryptolEnv(CryptolEnv,initCryptolEnv,loadCryptolModule,defaultPrimitiveOptions)
import Verifier.SAW.Cryptol.Prelude(scLoadPreludeModule,scLoadCryptolModule)
-- SAWScript
import SAWScript.X86Spec hiding (Prop)
import SAWScript.Proof(boolToProp, Prop)
import SAWScript.Crucible.Common
( newSAWCoreBackend, newSAWCoreExprBuilder
, sawCoreState, SomeOnlineBackend(..)
)
--------------------------------------------------------------------------------
-- Input Options
-- | What we'd like done, plus additional information from the "outside world".
data Options = Options
{ fileName :: FilePath
-- ^ Name of the elf file to process.
, function :: Fun
-- ^ Function that we'd like to extract.
, archInfo :: ArchitectureInfo X86_64
-- ^ Architectural flavor. See "linuxInfo" and "bsdInfo".
, backend :: SomeBackend Sym
-- ^ The Crucible backend to use.
, allocator :: HandleAllocator
-- ^ The handle allocator used to allocate @memvar@
, memvar :: GlobalVar Mem
-- ^ The global variable storing the heap
, cryEnv :: CryptolEnv
, extraGlobals :: [(ByteString,Integer,Unit)]
-- ^ Additional globals to auto-load from the ELF file
}
linuxInfo :: ArchitectureInfo X86_64
linuxInfo = x86_64_linux_info
bsdInfo :: ArchitectureInfo X86_64
bsdInfo = x86_64_freeBSD_info
--------------------------------------------------------------------------------
-- Spec
data Fun = Fun { funName :: ByteString, funSpec :: FunSpec }
--------------------------------------------------------------------------------
type CallHandler = Sym -> Macaw.LookupFunctionHandle (MacawSimulatorState Sym) Sym X86_64
-- | Run a top-level proof.
-- Should be used when making a standalone proof script.
proof ::
(FilePath -> IO ByteString) ->
ArchitectureInfo X86_64 ->
FilePath {- ^ ELF binary -} ->
Maybe FilePath {- ^ Cryptol spec, if any -} ->
[(ByteString,Integer,Unit)] ->
Fun ->
IO (SharedContext,Integer,[Goal])
proof fileReader archi file mbCry globs fun =
do sc <- mkSharedContext
halloc <- newHandleAllocator
scLoadPreludeModule sc
scLoadCryptolModule sc
sym <- newSAWCoreExprBuilder sc
SomeOnlineBackend bak <- newSAWCoreBackend sym
let ?fileReader = fileReader
cenv <- loadCry sym mbCry
mvar <- mkMemVar "saw_x86:llvm_memory" halloc
proofWithOptions Options
{ fileName = file
, function = fun
, archInfo = archi
, backend = SomeBackend bak
, allocator = halloc
, memvar = mvar
, cryEnv = cenv
, extraGlobals = globs
}
-- | Run a proof using the given backend.
-- Useful for integrating with other tool.
proofWithOptions :: Options -> IO (SharedContext,Integer,[Goal])
proofWithOptions opts =
do elf <- getRelevant =<< getElf (fileName opts)
translate opts elf (function opts)
-- | Add interpretations for the symbolic functions, by looking
-- them up in the Cryptol environment. There should be definitions
-- for "aesenc", "aesenclast", and "clmul".
registerSymFuns :: Opts -> IO (SymFuns Sym)
registerSymFuns opts =
do let sym = optsSym opts
st <- sawCoreState sym
sfs <- newSymFuns sym
sawRegisterSymFunInterp st (fnAesEnc sfs) (mk2 "aesenc")
sawRegisterSymFunInterp st (fnAesEncLast sfs) (mk2 "aesenclast")
sawRegisterSymFunInterp st (fnClMul sfs) (mk2 "clmul")
return sfs
where
err nm xs =
unlines [ "Type error in call to " ++ show (nm::String) ++ ":"
, "*** Expected: 2 arguments"
, "*** Given: " ++ show (length xs) ++ " arguments"
]
mk2 nm _sc xs = case xs of
[_,_] -> cryTerm opts nm xs
_ -> fail (err nm xs)
--------------------------------------------------------------------------------
-- ELF
-- | These are the parts of the ELF file that we care about.
data RelevantElf = RelevantElf
{ memory :: Memory 64
, funSymMap :: AddrSymMap 64
, symMap :: AddrSymMap 64
}
-- | Parse an elf file.
getElf :: FilePath -> IO (Elf.ElfHeaderInfo 64)
getElf path =
do bs <- BS.readFile path
case Elf.decodeElfHeaderInfo bs of
Right (Elf.SomeElf hdr)
| Elf.ELFCLASS64 <- Elf.headerClass (Elf.header hdr) -> pure hdr
| otherwise -> unsupported "32-bit ELF format"
Left (off, msg) -> malformed $ mconcat [ "Invalid ELF header at offset "
, show off
, ": "
, msg
]
-- | Extract a Macaw "memory" from an ELF file and resolve symbols.
getRelevant :: Elf.ElfHeaderInfo 64 -> IO RelevantElf
getRelevant elf =
case (memoryForElf opts elf, memoryForElfAllSymbols opts elf) of
(Left err, _) -> malformed err
(_, Left err) -> malformed err
(Right (mem, faddrs, _warnings, _errs), Right (_, addrs, _, _)) ->
do let toEntry msym = (memSymbolStart msym, memSymbolName msym)
return RelevantElf { memory = mem
, funSymMap = Map.fromList (map toEntry faddrs)
, symMap = Map.fromList (map toEntry addrs)
}
where
-- XXX: What options do we want?
opts = LoadOptions { loadOffset = Just 0
}
-- | Find the address(es) of a symbol by name.
findSymbols :: AddrSymMap 64 -> ByteString -> [ MemSegmentOff 64 ]
findSymbols addrs nm = Map.findWithDefault [] nm invertedMap
where
invertedMap = Map.fromListWith (++) [ (y,[x]) | (x,y) <- Map.toList addrs ]
-- | Find the single address of a symbol, or fail.
findSymbol :: AddrSymMap 64 -> ByteString -> IO (MemSegmentOff 64)
findSymbol addrs nm =
case findSymbols addrs nm of
[addr] -> return $! addr
[] -> malformed ("Could not find function " ++ show nm)
_ -> malformed ("Multiple definitions for " ++ show nm)
loadGlobal ::
RelevantElf ->
(ByteString, Integer, Unit) ->
IO [(String, Integer, Unit, [Integer])]
loadGlobal elf (nm,n,u) =
case findSymbols (symMap elf) nm of
[] -> do print $ symMap elf
err "Global not found"
_ -> mapM loadLoc (findSymbols (symMap elf) nm)
where
mem = memory elf
sname = BSC.unpack nm
readOne a = case u of
Bytes -> check (readWord8 mem a)
Words -> check (readWord16le mem a)
DWords -> check (readWord32le mem a)
QWords -> check (readWord64le mem a)
_ -> err ("unsuported global size: " ++ show u)
nextAddr = incAddr (bytesToInteger (1 *. u))
addrsFor o = take (fromIntegral n) (iterate nextAddr o)
check :: (Show b, Integral a) => Either b a -> IO Integer
check res = case res of
Left e -> err (show e)
Right a -> return (fromIntegral a)
loadLoc off = do let start = segoffAddr off
a = memWordToUnsigned (addrOffset start)
is <- mapM readOne (addrsFor start)
return (sname, a, u, is)
err :: [Char] -> IO a
err xs = fail $ unlines
[ "Failed to load global."
, "*** Global: " ++ show nm
, "*** Error: " ++ xs
]
-- | The position associated with a specific location.
posFn :: MemSegmentOff 64 -> Position
posFn = OtherPos . Text.pack . show
-- | Load a file with Cryptol decls.
loadCry ::
(?fileReader :: FilePath -> IO ByteString) =>
Sym -> Maybe FilePath ->
IO CryptolEnv
loadCry sym mb =
do sc <- saw_ctx <$> sawCoreState sym
env <- initCryptolEnv sc
case mb of
Nothing -> return env
Just file -> snd <$> loadCryptolModule sc defaultPrimitiveOptions env file
--------------------------------------------------------------------------------
-- Translation
callHandler :: Overrides -> CallHandler
callHandler callMap sym = Macaw.LookupFunctionHandle $ \st mem regs -> do
case lookupX86Reg X86_IP regs of
Just (RV ptr) | LLVMPointer base off <- ptr ->
case (asNat base, BV.asUnsigned <$> asBV off) of
(Just b, Just o) ->
case Map.lookup (b,o) callMap of
Just h -> case h sym of
Macaw.LookupFunctionHandle f -> f st mem regs
Nothing ->
fail ("No over-ride for function: " ++ show (ppPtr ptr))
_ -> fail ("Non-static call: " ++ show (ppPtr ptr))
_ -> fail "[Bug?] Failed to obtain the value of the IP register."
-- | Verify the given function. The function matches it sepcification,
-- as long as the returned goals can be discharged.
-- Returns the shared context and the goals (from the Sym)
-- and the integer is the (aboslute) address of the function.
translate ::
Options -> RelevantElf -> Fun -> IO (SharedContext, Integer, [Goal])
translate opts elf fun =
do let name = funName fun
sayLn ("Translating function: " ++ BSC.unpack name)
let ?memOpts = Crucible.defaultMemOptions
let ?recordLLVMAnnotation = \_ _ _ -> return ()
let bak = backend opts
sym = case bak of SomeBackend b -> backendGetSym b
sopts = Opts { optsBackend = bak, optsCry = cryEnv opts, optsMvar = memvar opts }
sfs <- registerSymFuns sopts
(globs,st,checkPost) <-
case funSpec fun of
NewStyle mkSpec debug ->
do gss <- mapM (loadGlobal elf) (extraGlobals opts)
spec0 <- mkSpec (cryEnv opts)
let spec = spec0 {specGlobsRO = concat (specGlobsRO spec0:gss)}
(gs,st,po) <- verifyMode spec sopts
debug st
return (gs,st,\st1 -> debug st1 >> po st1)
addr <- doSim opts elf sfs name globs st checkPost
gs <- getGoals bak
sc <- saw_ctx <$> sawCoreState sym
return (sc, addr, gs)
setSimulatorVerbosity :: (W4.IsSymExprBuilder sym) => Int -> sym -> IO ()
setSimulatorVerbosity verbosity sym = do
verbSetting <- W4.getOptionSetting W4.verbosity (W4.getConfiguration sym)
_ <- W4.setOpt verbSetting (toInteger verbosity)
return ()
doSim ::
(?memOpts::Crucible.MemOptions, Crucible.HasLLVMAnn Sym) =>
Options ->
RelevantElf ->
SymFuns Sym ->
ByteString ->
(GlobalMap Sym Crucible.Mem 64, Overrides) ->
State ->
(State -> IO ()) ->
IO Integer
doSim opts elf sfs name (globs,overs) st checkPost =
do say " Looking for address... "
addr <- findSymbol (symMap elf) name
-- addr :: MemSegmentOff 64
let addrInt =
let seg :: MemSegment 64
seg = segoffSegment addr
in if segmentBase seg == 0
then toInteger (segmentOffset seg + segoffOffset addr)
else error " Not an absolute address"
sayLn (show addr)
SomeCFG cfg <- statusBlock " Constructing CFG... "
$ makeCFG opts elf name addr
-- writeFile "XXX.hs" (show cfg)
let sym = case backend opts of SomeBackend bak -> backendGetSym bak
mvar = memvar opts
setSimulatorVerbosity 0 sym
execResult <- statusBlock " Simulating... " $ do
let crucRegTypes = crucArchRegTypes x86
let macawStructRepr = StructRepr crucRegTypes
-- The global pointer validity predicate is required if your memory
-- representation has gaps that are not supposed to be mapped and you
-- want to verify that no memory accesses touch unmapped regions.
--
-- The memory setup for this verifier does not have that problem, and
-- thus does not need any additional validity predicates.
let noExtraValidityPred _ _ _ _ = return Nothing
let archEvalFns = x86_64MacawEvalFn sfs defaultMacawArchStmtExtensionOverride
let lookupSyscall = unsupportedSyscalls "saw-script"
let ctx :: SimContext (MacawSimulatorState Sym) Sym (MacawExt X86_64)
ctx = SimContext { _ctxBackend = backend opts
, ctxSolverProof = \a -> a
, ctxIntrinsicTypes = llvmIntrinsicTypes
, simHandleAllocator = allocator opts
, printHandle = stdout
, extensionImpl = macawExtensions archEvalFns mvar globs (callHandler overs sym) lookupSyscall noExtraValidityPred
, _functionBindings = FnBindings $
insertHandleMap (cfgHandle cfg) (UseCFG cfg (postdomInfo cfg)) emptyHandleMap
, _cruciblePersonality = MacawSimulatorState
, _profilingMetrics = Map.empty
}
let initGlobals = insertGlobal mvar (stateMem st) emptyGlobals
executeCrucible []
$ InitialState ctx initGlobals defaultAbortHandler macawStructRepr
$ runOverrideSim macawStructRepr
$ do let args :: RegMap Sym (MacawFunctionArgs X86_64)
args = RegMap (singleton (RegEntry macawStructRepr
(stateRegs st)))
crucGenArchConstraints x86 $
do r <- callCFG cfg args
mem <- readGlobal mvar
let regs = regValue r
let sta = State { stateMem = mem, stateRegs = regs }
liftIO (checkPost sta)
pure regs
case execResult of
FinishedResult {} -> pure ()
AbortedResult {} -> sayLn "[Warning] Function never returns"
TimeoutResult {} -> malformed $ unlines [ "Execution timed out" ]
return addrInt
type TheCFG = SomeCFG (MacawExt X86_64)
(EmptyCtx ::> ArchRegStruct X86_64)
(ArchRegStruct X86_64)
-- | Generate a CFG for the function at the given address.
makeCFG ::
Options ->
RelevantElf ->
ByteString ->
MemSegmentOff 64 ->
IO TheCFG
makeCFG opts elf name addr =
do (_,Some funInfo) <- return $ analyzeFunction addr UserRequest empty
-- writeFile "MACAW.cfg" (show (pretty funInfo))
mkFunCFG x86 (allocator opts) cruxName posFn funInfo
where
txtName = decodeUtf8 name
cruxName = functionNameFromText txtName
empty = emptyDiscoveryState (memory elf) (funSymMap elf) (archInfo opts)
--------------------------------------------------------------------------------
-- Goals
data Goal = Goal
{ gAssumes :: [ Term ] -- ^ Assuming these
, gShows :: Term -- ^ We need to show this
, gLoc :: ProgramLoc -- ^ The goal came from here
, gMessage :: SimErrorReason -- ^ We should say this if the proof fails
}
-- | The proposition that needs proving (i.e., assumptions imply conclusion)
gGoal :: SharedContext -> Goal -> IO Prop
gGoal sc g0 = boolToProp sc [] =<< go (gAssumes g)
where
g = g0 { gAssumes = mapMaybe skip (gAssumes g0) }
_shG = do putStrLn "Assuming:"
mapM_ _shT (gAssumes g)
putStrLn "Shows:"
_shT (gShows g)
_shT t = putStrLn (" " ++ showTerm t)
skip a = case asBool a of
Just True -> Nothing
_ -> Just a
go xs = case xs of
[] -> return (gShows g)
a : as -> scImplies sc a =<< go as
getGoals :: SomeBackend Sym -> IO [Goal]
getGoals (SomeBackend bak) =
do obls <- maybe [] goalsToList <$> getProofObligations bak
st <- sawCoreState sym
mapM (toGoal st) obls
where
sym = backendGetSym bak
toGoal st (ProofGoal asmps g) =
do a1 <- toSC sym st =<< assumptionsPred sym asmps
p <- toSC sym st (g ^. labeledPred)
let SimError loc msg = g^.labeledPredMsg
return Goal { gAssumes = [a1]
, gShows = p
, gLoc = loc
, gMessage = msg
}
instance Show Goal where
showsPrec _ g = showString "Goal { gAssumes = "
. showList (map (show . showTerm) (gAssumes g))
. showString ", gShows = " . shows (showTerm (gShows g))
. showString ", gLoc = " . shows (gLoc g)
. showString ", gMessage = " . shows (show (gMessage g))
. showString " }"
--------------------------------------------------------------------------------
-- Specialize the generic functions to the X86.
-- | All functions related to X86.
x86 :: MacawSymbolicArchFunctions X86_64
x86 = x86_64MacawSymbolicFns
--------------------------------------------------------------------------------
-- Calling Convention
-- see: http://refspecs.linuxfoundation.org/elf/x86_64-abi-0.99.pdf
-- Need to preserve: %rbp, %rbx, %r12--%r15
-- Preserve control bits in MXCSR
-- Preserve x87 control word.
-- On entry:
-- CPU is in x87 mode
-- DF in $rFLAGS is clear one entry and return.
-- "Red zone" 128 bytes past the end of the stack %rsp.
-- * not modified by interrupts
--------------------------------------------------------------------------------
-- Errors
data X86Unsupported = X86Unsupported String deriving Show
data X86Error = X86Error String deriving Show
instance Exception X86Unsupported
instance Exception X86Error
unsupported :: String -> IO a
unsupported x = throwIO (X86Unsupported x)
malformed :: String -> IO a
malformed x = throwIO (X86Error x)
--------------------------------------------------------------------------------
-- Status output
say :: String -> IO ()
say x = putStr x >> hFlush stdout
sayLn :: String -> IO ()
sayLn = putStrLn
sayOK :: IO ()
sayOK = sayLn "[OK]"
statusBlock :: String -> IO a -> IO a
statusBlock msg m =
do say msg
a <- m
sayOK
return a
|
GaloisInc/saw-script
|
src/SAWScript/X86.hs
|
Haskell
|
bsd-3-clause
| 22,551
|
{-# LANGUAGE OverloadedStrings #-}
module Advent.Day11 where
import qualified Data.Char as C
import qualified Data.List as L
increment :: String -> String
increment str = reverse (increment' (reverse str))
where increment' ('z':rest) = 'a' : increment' rest
increment' (c:rest) = nextChar c : rest
increment' "" = ""
nextChar c = C.chr ((C.ord c) + 1)
hasIncreasing :: String -> Bool
hasIncreasing (a:b:c:rest) =
(b' == a' + 1 && c' == b' + 1) || hasIncreasing (b:c:rest)
where a' = C.ord a
b' = C.ord b
c' = C.ord c
hasIncreasing _ = False
hasBadChar :: String -> Bool
hasBadChar str =
"i" `L.isInfixOf` str || "o" `L.isInfixOf` str || "l" `L.isInfixOf` str
hasPairs :: String -> Bool
hasPairs str =
(length (filter (\g -> (length g) >= 2) (L.group str))) >= 2
goodPassword :: String -> Bool
goodPassword pass =
hasIncreasing pass && not (hasBadChar pass) && hasPairs pass
nextPassword :: String -> String
nextPassword pass =
head (filter goodPassword (iterate increment pass))
|
micxjo/hs-advent
|
src/Advent/Day11.hs
|
Haskell
|
bsd-3-clause
| 1,061
|
{-# language CPP #-}
-- No documentation found for Chapter "CommandBufferResetFlagBits"
module Vulkan.Core10.Enums.CommandBufferResetFlagBits ( CommandBufferResetFlags
, CommandBufferResetFlagBits( COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
, ..
)
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
type CommandBufferResetFlags = CommandBufferResetFlagBits
-- | VkCommandBufferResetFlagBits - Bitmask controlling behavior of a command
-- buffer reset
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'CommandBufferResetFlags'
newtype CommandBufferResetFlagBits = CommandBufferResetFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT' specifies that most or all
-- memory resources currently owned by the command buffer /should/ be
-- returned to the parent command pool. If this flag is not set, then the
-- command buffer /may/ hold onto memory resources and reuse them when
-- recording commands. @commandBuffer@ is moved to the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#commandbuffers-lifecycle initial state>.
pattern COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = CommandBufferResetFlagBits 0x00000001
conNameCommandBufferResetFlagBits :: String
conNameCommandBufferResetFlagBits = "CommandBufferResetFlagBits"
enumPrefixCommandBufferResetFlagBits :: String
enumPrefixCommandBufferResetFlagBits = "COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"
showTableCommandBufferResetFlagBits :: [(CommandBufferResetFlagBits, String)]
showTableCommandBufferResetFlagBits = [(COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT, "")]
instance Show CommandBufferResetFlagBits where
showsPrec = enumShowsPrec enumPrefixCommandBufferResetFlagBits
showTableCommandBufferResetFlagBits
conNameCommandBufferResetFlagBits
(\(CommandBufferResetFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read CommandBufferResetFlagBits where
readPrec = enumReadPrec enumPrefixCommandBufferResetFlagBits
showTableCommandBufferResetFlagBits
conNameCommandBufferResetFlagBits
CommandBufferResetFlagBits
|
expipiplus1/vulkan
|
src/Vulkan/Core10/Enums/CommandBufferResetFlagBits.hs
|
Haskell
|
bsd-3-clause
| 2,983
|
{-# LANGUAGE CPP #-}
module TcFlatten(
FlattenEnv(..), FlattenMode(..), mkFlattenEnv,
flatten, flattenMany, flatten_many,
flattenFamApp, flattenTyVarOuter,
unflatten,
eqCanRewrite, eqCanRewriteFR, canRewriteOrSame,
CtFlavourRole, ctEvFlavourRole, ctFlavourRole
) where
#include "HsVersions.h"
import TcRnTypes
import TcType
import Type
import TcEvidence
import TyCon
import TypeRep
import Kind( isSubKind )
import Coercion ( tyConRolesX )
import Var
import VarEnv
import NameEnv
import Outputable
import VarSet
import TcSMonad as TcS
import DynFlags( DynFlags )
import Util
import Bag
import FastString
import Control.Monad( when, liftM )
import MonadUtils ( zipWithAndUnzipM )
import GHC.Exts ( inline )
{-
Note [The flattening story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A CFunEqCan is either of form
[G] <F xis> : F xis ~ fsk -- fsk is a FlatSkol
[W] x : F xis ~ fmv -- fmv is a unification variable,
-- but untouchable,
-- with MetaInfo = FlatMetaTv
where
x is the witness variable
fsk/fmv is a flatten skolem
xis are function-free
CFunEqCans are always [Wanted], or [Given], never [Derived]
fmv untouchable just means that in a CTyVarEq, say,
fmv ~ Int
we do NOT unify fmv.
* KEY INSIGHTS:
- A given flatten-skolem, fsk, is known a-priori to be equal to
F xis (the LHS), with <F xis> evidence
- A unification flatten-skolem, fmv, stands for the as-yet-unknown
type to which (F xis) will eventually reduce
* Inert set invariant: if F xis1 ~ fsk1, F xis2 ~ fsk2
then xis1 /= xis2
i.e. at most one CFunEqCan with a particular LHS
* Each canonical CFunEqCan x : F xis ~ fsk/fmv has its own
distinct evidence variable x and flatten-skolem fsk/fmv.
Why? We make a fresh fsk/fmv when the constraint is born;
and we never rewrite the RHS of a CFunEqCan.
* Function applications can occur in the RHS of a CTyEqCan. No reason
not allow this, and it reduces the amount of flattening that must occur.
* Flattening a type (F xis):
- If we are flattening in a Wanted/Derived constraint
then create new [W] x : F xis ~ fmv
else create new [G] x : F xis ~ fsk
with fresh evidence variable x and flatten-skolem fsk/fmv
- Add it to the work list
- Replace (F xis) with fsk/fmv in the type you are flattening
- You can also add the CFunEqCan to the "flat cache", which
simply keeps track of all the function applications you
have flattened.
- If (F xis) is in the cache already, just
use its fsk/fmv and evidence x, and emit nothing.
- No need to substitute in the flat-cache. It's not the end
of the world if we start with, say (F alpha ~ fmv1) and
(F Int ~ fmv2) and then find alpha := Int. Athat will
simply give rise to fmv1 := fmv2 via [Interacting rule] below
* Canonicalising a CFunEqCan [G/W] x : F xis ~ fsk/fmv
- Flatten xis (to substitute any tyvars; there are already no functions)
cos :: xis ~ flat_xis
- New wanted x2 :: F flat_xis ~ fsk/fmv
- Add new wanted to flat cache
- Discharge x = F cos ; x2
* Unification flatten-skolems, fmv, ONLY get unified when either
a) The CFunEqCan takes a step, using an axiom
b) During un-flattening
They are never unified in any other form of equality.
For example [W] ffmv ~ Int is stuck; it does not unify with fmv.
* We *never* substitute in the RHS (i.e. the fsk/fmv) of a CFunEqCan.
That would destroy the invariant about the shape of a CFunEqCan,
and it would risk wanted/wanted interactions. The only way we
learn information about fsk is when the CFunEqCan takes a step.
However we *do* substitute in the LHS of a CFunEqCan (else it
would never get to fire!)
* [Interacting rule]
(inert) [W] x1 : F tys ~ fmv1
(work item) [W] x2 : F tys ~ fmv2
Just solve one from the other:
x2 := x1
fmv2 := fmv1
This just unites the two fsks into one.
Always solve given from wanted if poss.
* [Firing rule: wanteds]
(work item) [W] x : F tys ~ fmv
instantiate axiom: ax_co : F tys ~ rhs
Dischard fmv:
fmv := alpha
x := ax_co ; sym x2
[W] x2 : alpha ~ rhs (Non-canonical)
discharging the work item. This is the way that fmv's get
unified; even though they are "untouchable".
NB: this deals with the case where fmv appears in xi, which can
happen; it just happens through the non-canonical stuff
Possible short cut (shortCutReduction) if rhs = G rhs_tys,
where G is a type function. Then
- Flatten rhs_tys (cos : rhs_tys ~ rhs_xis)
- Add G rhs_xis ~ fmv to flat cache
- New wanted [W] x2 : G rhs_xis ~ fmv
- Discharge x := co ; G cos ; x2
* [Firing rule: givens]
(work item) [G] g : F tys ~ fsk
instantiate axiom: co : F tys ~ rhs
Now add non-canonical (since rhs is not flat)
[G] (sym g ; co) : fsk ~ rhs
Short cut (shortCutReduction) for when rhs = G rhs_tys and G is a type function
[G] (co ; g) : G tys ~ fsk
But need to flatten tys: flat_cos : tys ~ flat_tys
[G] (sym (G flat_cos) ; co ; g) : G flat_tys ~ fsk
Why given-fsks, alone, doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Could we get away with only flatten meta-tyvars, with no flatten-skolems? No.
[W] w : alpha ~ [F alpha Int]
---> flatten
w = ...w'...
[W] w' : alpha ~ [fsk]
[G] <F alpha Int> : F alpha Int ~ fsk
--> unify (no occurs check)
alpha := [fsk]
But since fsk = F alpha Int, this is really an occurs check error. If
that is all we know about alpha, we will succeed in constraint
solving, producing a program with an infinite type.
Even if we did finally get (g : fsk ~ Boo)l by solving (F alpha Int ~ fsk)
using axiom, zonking would not see it, so (x::alpha) sitting in the
tree will get zonked to an infinite type. (Zonking always only does
refl stuff.)
Why flatten-meta-vars, alone doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Look at Simple13, with unification-fmvs only
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fmv]
[W] x : F a ~ fmv
--> subst a in x
x = F g' ; x2
[W] x2 : F [fmv] ~ fmv
And now we have an evidence cycle between g' and x!
If we used a given instead (ie current story)
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fsk]
[G] <F a> : F a ~ fsk
---> Substitute for a
[G] g' : a ~ [fsk]
[G] F (sym g'); <F a> : F [fsk] ~ fsk
Why is it right to treat fmv's differently to ordinary unification vars?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
f :: forall a. a -> a -> Bool
g :: F Int -> F Int -> Bool
Consider
f (x:Int) (y:Bool)
This gives alpha~Int, alpha~Bool. There is an inconsistency,
but really only one error. SherLoc may tell you which location
is most likely, based on other occurrences of alpha.
Consider
g (x:Int) (y:Bool)
Here we get (F Int ~ Int, F Int ~ Bool), which flattens to
(fmv ~ Int, fmv ~ Bool)
But there are really TWO separate errors. We must not complain
about Int~Bool. Moreover these two errors could arise in entirely
unrelated parts of the code. (In the alpha case, there must be
*some* connection (eg v:alpha in common envt).)
Note [Orient equalities with flatten-meta-vars on the left]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This example comes from IndTypesPerfMerge
From the ambiguity check for
f :: (F a ~ a) => a
we get:
[G] F a ~ a
[W] F alpha ~ alpha, alpha ~ a
From Givens we get
[G] F a ~ fsk, fsk ~ a
Now if we flatten we get
[W] alpha ~ fmv, F alpha ~ fmv, alpha ~ a
Now, processing the first one first, choosing alpha := fmv
[W] F fmv ~ fmv, fmv ~ a
And now we are stuck. We must either *unify* fmv := a, or
use the fmv ~ a to rewrite F fmv ~ fmv, so we can make it
meet up with the given F a ~ blah.
Solution: always put fmvs on the left, so we get
[W] fmv ~ alpha, F alpha ~ fmv, alpha ~ a
The point is that fmvs are very uninformative, so doing alpha := fmv
is a bad idea. We want to use other constraints on alpha first.
Note [Derived constraints from wanted CTyEqCans]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is this type ambiguous: (Foo e ~ Maybe e) => Foo e
(indexed-types/should_fail/T4093a)
[G] Foo e ~ Maybe e
[W] Foo e ~ Foo ee -- ee is a unification variable
[W] Foo ee ~ Maybe ee)
---
[G] Foo e ~ fsk
[G] fsk ~ Maybe e
[W] Foo e ~ fmv1
[W] Foo ee ~ fmv2
[W] fmv1 ~ fmv2
[W] fmv2 ~ Maybe ee
---> fmv1 := fsk by matching LHSs
[W] Foo ee ~ fmv2
[W] fsk ~ fmv2
[W] fmv2 ~ Maybe ee
--->
[W] Foo ee ~ fmv2
[W] fmv2 ~ Maybe e
[W] fmv2 ~ Maybe ee
Now maybe we shuld get [D] e ~ ee, and then we'd solve it entirely.
But if in a smilar situation we got [D] Int ~ Bool we'd be back
to complaining about wanted/wanted interactions. Maybe this arises
also for fundeps?
Here's another example:
f :: [a] -> [b] -> blah
f (e1 :: F Int) (e2 :: F Int)
we get
F Int ~ fmv
fmv ~ [alpha]
fmv ~ [beta]
We want: alpha := beta (which might unlock something else). If we
generated [D] [alpha] ~ [beta] we'd be good here.
Current story: we don't generate these derived constraints. We could, but
we'd want to make them very weak, so we didn't get the Int~Bool complaint.
************************************************************************
* *
* Other notes (Oct 14)
I have not revisted these, but I didn't want to discard them
* *
************************************************************************
Try: rewrite wanted with wanted only for fmvs (not all meta-tyvars)
But: fmv ~ alpha[0]
alpha[0] ~ fmv’
Now we don’t see that fmv ~ fmv’, which is a problem for injectivity detection.
Conclusion: rewrite wanteds with wanted for all untouchables.
skol ~ untch, must re-orieint to untch ~ skol, so that we can use it to rewrite.
************************************************************************
* *
* Examples
Here is a long series of examples I had to work through
* *
************************************************************************
Simple20
~~~~~~~~
axiom F [a] = [F a]
[G] F [a] ~ a
-->
[G] fsk ~ a
[G] [F a] ~ fsk (nc)
-->
[G] F a ~ fsk2
[G] fsk ~ [fsk2]
[G] fsk ~ a
-->
[G] F a ~ fsk2
[G] a ~ [fsk2]
[G] fsk ~ a
-----------------------------------
----------------------------------------
indexed-types/should_compile/T44984
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
flatten
~~~~~~~
fmv0 := F Bool
fmv1 := H (F Bool)
fmv2 := H alpha
alpha := F Bool
plus
fmv1 ~ fmv2
But these two are equal under the above assumptions.
Solve by Refl.
--- under plan B, namely solve fmv1:=fmv2 eagerly ---
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2 fmv2 := fmv1
fmv0 ~ alpha
flatten
fmv0 := F Bool
fmv1 := H fmv0 = H (F Bool)
retain H alpha ~ fmv2
because fmv2 has been filled
alpha := F Bool
----------------------------
indexed-types/should_failt/T4179
after solving
[W] fmv_1 ~ fmv_2
[W] A3 (FCon x) ~ fmv_1 (CFunEqCan)
[W] A3 (x (aoa -> fmv_2)) ~ fmv_2 (CFunEqCan)
----------------------------------------
indexed-types/should_fail/T7729a
a) [W] BasePrimMonad (Rand m) ~ m1
b) [W] tt m1 ~ BasePrimMonad (Rand m)
---> process (b) first
BasePrimMonad (Ramd m) ~ fmv_atH
fmv_atH ~ tt m1
---> now process (a)
m1 ~ s_atH ~ tt m1 -- An obscure occurs check
----------------------------------------
typecheck/TcTypeNatSimple
Original constraint
[W] x + y ~ x + alpha (non-canonical)
==>
[W] x + y ~ fmv1 (CFunEqCan)
[W] x + alpha ~ fmv2 (CFuneqCan)
[W] fmv1 ~ fmv2 (CTyEqCan)
(sigh)
----------------------------------------
indexed-types/should_fail/GADTwrong1
[G] Const a ~ ()
==> flatten
[G] fsk ~ ()
work item: Const a ~ fsk
==> fire top rule
[G] fsk ~ ()
work item fsk ~ ()
Surely the work item should rewrite to () ~ ()? Well, maybe not;
it'a very special case. More generally, our givens look like
F a ~ Int, where (F a) is not reducible.
----------------------------------------
indexed_types/should_fail/T8227:
Why using a different can-rewrite rule in CFunEqCan heads
does not work.
Assuming NOT rewriting wanteds with wanteds
Inert: [W] fsk_aBh ~ fmv_aBk -> fmv_aBk
[W] fmv_aBk ~ fsk_aBh
[G] Scalar fsk_aBg ~ fsk_aBh
[G] V a ~ f_aBg
Worklist includes [W] Scalar fmv_aBi ~ fmv_aBk
fmv_aBi, fmv_aBk are flatten unificaiton variables
Work item: [W] V fsk_aBh ~ fmv_aBi
Note that the inert wanteds are cyclic, because we do not rewrite
wanteds with wanteds.
Then we go into a loop when normalise the work-item, because we
use rewriteOrSame on the argument of V.
Conclusion: Don't make canRewrite context specific; instead use
[W] a ~ ty to rewrite a wanted iff 'a' is a unification variable.
----------------------------------------
Here is a somewhat similar case:
type family G a :: *
blah :: (G a ~ Bool, Eq (G a)) => a -> a
blah = error "urk"
foo x = blah x
For foo we get
[W] Eq (G a), G a ~ Bool
Flattening
[W] G a ~ fmv, Eq fmv, fmv ~ Bool
We can't simplify away the Eq Bool unless we substitute for fmv.
Maybe that doesn't matter: we would still be left with unsolved
G a ~ Bool.
--------------------------
Trac #9318 has a very simple program leading to
[W] F Int ~ Int
[W] F Int ~ Bool
We don't want to get "Error Int~Bool". But if fmv's can rewrite
wanteds, we will
[W] fmv ~ Int
[W] fmv ~ Bool
--->
[W] Int ~ Bool
************************************************************************
* *
* The main flattening functions
* *
************************************************************************
Note [Flattening]
~~~~~~~~~~~~~~~~~~~~
flatten ty ==> (xi, cc)
where
xi has no type functions, unless they appear under ForAlls
cc = Auxiliary given (equality) constraints constraining
the fresh type variables in xi. Evidence for these
is always the identity coercion, because internally the
fresh flattening skolem variables are actually identified
with the types they have been generated to stand in for.
Note that it is flatten's job to flatten *every type function it sees*.
flatten is only called on *arguments* to type functions, by canEqGiven.
Recall that in comments we use alpha[flat = ty] to represent a
flattening skolem variable alpha which has been generated to stand in
for ty.
----- Example of flattening a constraint: ------
flatten (List (F (G Int))) ==> (xi, cc)
where
xi = List alpha
cc = { G Int ~ beta[flat = G Int],
F beta ~ alpha[flat = F beta] }
Here
* alpha and beta are 'flattening skolem variables'.
* All the constraints in cc are 'given', and all their coercion terms
are the identity.
NB: Flattening Skolems only occur in canonical constraints, which
are never zonked, so we don't need to worry about zonking doing
accidental unflattening.
Note that we prefer to leave type synonyms unexpanded when possible,
so when the flattener encounters one, it first asks whether its
transitive expansion contains any type function applications. If so,
it expands the synonym and proceeds; if not, it simply returns the
unexpanded synonym.
Note [Flattener EqRels]
~~~~~~~~~~~~~~~~~~~~~~~
When flattening, we need to know which equality relation -- nominal
or representation -- we should be respecting. The only difference is
that we rewrite variables by representational equalities when fe_eq_rel
is ReprEq.
-}
data FlattenEnv
= FE { fe_mode :: FlattenMode
, fe_loc :: CtLoc
, fe_flavour :: CtFlavour
, fe_eq_rel :: EqRel } -- See Note [Flattener EqRels]
data FlattenMode -- Postcondition for all three: inert wrt the type substitution
= FM_FlattenAll -- Postcondition: function-free
| FM_Avoid TcTyVar Bool -- See Note [Lazy flattening]
-- Postcondition:
-- * tyvar is only mentioned in result under a rigid path
-- e.g. [a] is ok, but F a won't happen
-- * If flat_top is True, top level is not a function application
-- (but under type constructors is ok e.g. [F a])
| FM_SubstOnly -- See Note [Flattening under a forall]
mkFlattenEnv :: FlattenMode -> CtEvidence -> FlattenEnv
mkFlattenEnv fm ctev = FE { fe_mode = fm
, fe_loc = ctEvLoc ctev
, fe_flavour = ctEvFlavour ctev
, fe_eq_rel = ctEvEqRel ctev }
feRole :: FlattenEnv -> Role
feRole = eqRelRole . fe_eq_rel
{-
Note [Lazy flattening]
~~~~~~~~~~~~~~~~~~~~~~
The idea of FM_Avoid mode is to flatten less aggressively. If we have
a ~ [F Int]
there seems to be no great merit in lifting out (F Int). But if it was
a ~ [G a Int]
then we *do* want to lift it out, in case (G a Int) reduces to Bool, say,
which gets rid of the occurs-check problem. (For the flat_top Bool, see
comments above and at call sites.)
HOWEVER, the lazy flattening actually seems to make type inference go
*slower*, not faster. perf/compiler/T3064 is a case in point; it gets
*dramatically* worse with FM_Avoid. I think it may be because
floating the types out means we normalise them, and that often makes
them smaller and perhaps allows more re-use of previously solved
goals. But to be honest I'm not absolutely certain, so I am leaving
FM_Avoid in the code base. What I'm removing is the unique place
where it is *used*, namely in TcCanonical.canEqTyVar.
See also Note [Conservative unification check] in TcUnify, which gives
other examples where lazy flattening caused problems.
Bottom line: FM_Avoid is unused for now (Nov 14).
Note: T5321Fun got faster when I disabled FM_Avoid
T5837 did too, but it's pathalogical anyway
Note [Phantoms in the flattener]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data Proxy p = Proxy
and we're flattening (Proxy ty) w.r.t. ReprEq. Then, we know that `ty`
is really irrelevant -- it will be ignored when solving for representational
equality later on. So, we omit flattening `ty` entirely. This may
violate the expectation of "xi"s for a bit, but the canonicaliser will
soon throw out the phantoms when decomposing a TyConApp. (Or, the
canonicaliser will emit an insoluble, in which case the unflattened version
yields a better error message anyway.)
Note [flatten_many performance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In programs with lots of type-level evaluation, flatten_many becomes
part of a tight loop. For example, see test perf/compiler/T9872a, which
calls flatten_many a whopping 7,106,808 times. It is thus important
that flatten_many be efficient.
Performance testing showed that the current implementation is indeed
efficient. It's critically important that zipWithAndUnzipM be
specialized to TcS, and it's also quite helpful to actually `inline`
it. On test T9872a, here are the allocation stats (Dec 16, 2014):
* Unspecialized, uninlined: 8,472,613,440 bytes allocated in the heap
* Specialized, uninlined: 6,639,253,488 bytes allocated in the heap
* Specialized, inlined: 6,281,539,792 bytes allocated in the heap
To improve performance even further, flatten_many_nom is split off
from flatten_many, as nominal equality is the common case. This would
be natural to write using mapAndUnzipM, but even inlined, that function
is not as performant as a hand-written loop.
* mapAndUnzipM, inlined: 7,463,047,432 bytes allocated in the heap
* hand-written recursion: 5,848,602,848 bytes allocated in the heap
If you make any change here, pay close attention to the T9872{a,b,c} tests
and T5321Fun.
If we need to make this yet more performant, a possible way forward is to
duplicate the flattener code for the nominal case, and make that case
faster. This doesn't seem quite worth it, yet.
-}
------------------
flatten :: FlattenMode -> CtEvidence -> TcType -> TcS (Xi, TcCoercion)
flatten mode ev ty
= runFlatten (flatten_one fmode ty)
where
fmode = mkFlattenEnv mode ev
flattenMany :: FlattenMode -> CtEvidence -> [Role]
-> [TcType] -> TcS ([Xi], [TcCoercion])
-- Flatten a bunch of types all at once. Roles on the coercions returned
-- always match the corresponding roles passed in.
flattenMany mode ev roles tys
= runFlatten (flatten_many fmode roles tys)
where
fmode = mkFlattenEnv mode ev
flattenFamApp :: FlattenMode -> CtEvidence
-> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
flattenFamApp mode ev tc tys
= runFlatten (flatten_fam_app fmode tc tys)
where
fmode = mkFlattenEnv mode ev
------------------
flatten_many :: FlattenEnv -> [Role] -> [Type] -> TcS ([Xi], [TcCoercion])
-- Coercions :: Xi ~ Type, at roles given
-- Returns True iff (no flattening happened)
-- NB: The EvVar inside the 'fe_ev :: CtEvidence' is unused,
-- we merely want (a) Given/Solved/Derived/Wanted info
-- (b) the GivenLoc/WantedLoc for when we create new evidence
flatten_many fmode roles tys
-- See Note [flatten_many performance]
= inline zipWithAndUnzipM go roles tys
where
go Nominal ty = flatten_one (setFEEqRel fmode NomEq) ty
go Representational ty = flatten_one (setFEEqRel fmode ReprEq) ty
go Phantom ty = -- See Note [Phantoms in the flattener]
return (ty, mkTcPhantomCo ty ty)
-- | Like 'flatten_many', but assumes that every role is nominal.
flatten_many_nom :: FlattenEnv -> [Type] -> TcS ([Xi], [TcCoercion])
flatten_many_nom _ [] = return ([], [])
-- See Note [flatten_many performance]
flatten_many_nom fmode (ty:tys)
= ASSERT( fe_eq_rel fmode == NomEq )
do { (xi, co) <- flatten_one fmode ty
; (xis, cos) <- flatten_many_nom fmode tys
; return (xi:xis, co:cos) }
------------------
flatten_one :: FlattenEnv -> TcType -> TcS (Xi, TcCoercion)
-- Flatten a type to get rid of type function applications, returning
-- the new type-function-free type, and a collection of new equality
-- constraints. See Note [Flattening] for more detail.
--
-- Postcondition: Coercion :: Xi ~ TcType
-- The role on the result coercion matches the EqRel in the FlattenEnv
flatten_one fmode xi@(LitTy {}) = return (xi, mkTcReflCo (feRole fmode) xi)
flatten_one fmode (TyVarTy tv)
= flattenTyVar fmode tv
flatten_one fmode (AppTy ty1 ty2)
= do { (xi1,co1) <- flatten_one fmode ty1
; case (fe_eq_rel fmode, nextRole xi1) of
(NomEq, _) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Nominal) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Representational) -> flatten_rhs xi1 co1 ReprEq
(ReprEq, Phantom) ->
return (mkAppTy xi1 ty2, co1 `mkTcAppCo` mkTcNomReflCo ty2) }
where
flatten_rhs xi1 co1 eq_rel2
= do { (xi2,co2) <- flatten_one (setFEEqRel fmode eq_rel2) ty2
; traceTcS "flatten/appty"
(ppr ty1 $$ ppr ty2 $$ ppr xi1 $$
ppr co1 $$ ppr xi2 $$ ppr co2)
; let role1 = feRole fmode
role2 = eqRelRole eq_rel2
; return ( mkAppTy xi1 xi2
, mkTcTransAppCo role1 co1 xi1 ty1
role2 co2 xi2 ty2
role1 ) } -- output should match fmode
flatten_one fmode (FunTy ty1 ty2)
= do { (xi1,co1) <- flatten_one fmode ty1
; (xi2,co2) <- flatten_one fmode ty2
; return (mkFunTy xi1 xi2, mkTcFunCo (feRole fmode) co1 co2) }
flatten_one fmode (TyConApp tc tys)
-- Expand type synonyms that mention type families
-- on the RHS; see Note [Flattening synonyms]
| Just (tenv, rhs, tys') <- tcExpandTyCon_maybe tc tys
, let expanded_ty = mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys'
= case fe_mode fmode of
FM_FlattenAll | anyNameEnv isTypeFamilyTyCon (tyConsOfType rhs)
-> flatten_one fmode expanded_ty
| otherwise
-> flattenTyConApp fmode tc tys
_ -> flattenTyConApp fmode tc tys
-- Otherwise, it's a type function application, and we have to
-- flatten it away as well, and generate a new given equality constraint
-- between the application and a newly generated flattening skolem variable.
| isTypeFamilyTyCon tc
= flatten_fam_app fmode tc tys
-- For * a normal data type application
-- * data family application
-- we just recursively flatten the arguments.
| otherwise
-- FM_Avoid stuff commented out; see Note [Lazy flattening]
-- , let fmode' = case fmode of -- Switch off the flat_top bit in FM_Avoid
-- FE { fe_mode = FM_Avoid tv _ }
-- -> fmode { fe_mode = FM_Avoid tv False }
-- _ -> fmode
= flattenTyConApp fmode tc tys
flatten_one fmode ty@(ForAllTy {})
-- We allow for-alls when, but only when, no type function
-- applications inside the forall involve the bound type variables.
= do { let (tvs, rho) = splitForAllTys ty
; (rho', co) <- flatten_one (setFEMode fmode FM_SubstOnly) rho
-- Substitute only under a forall
-- See Note [Flattening under a forall]
; return (mkForAllTys tvs rho', foldr mkTcForAllCo co tvs) }
flattenTyConApp :: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
flattenTyConApp fmode tc tys
= do { (xis, cos) <- case fe_eq_rel fmode of
NomEq -> flatten_many_nom fmode tys
ReprEq -> flatten_many fmode (tyConRolesX role tc) tys
; return (mkTyConApp tc xis, mkTcTyConAppCo role tc cos) }
where
role = feRole fmode
{-
Note [Flattening synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Not expanding synonyms aggressively improves error messages, and
keeps types smaller. But we need to take care.
Suppose
type T a = a -> a
and we want to flatten the type (T (F a)). Then we can safely flatten
the (F a) to a skolem, and return (T fsk). We don't need to expand the
synonym. This works because TcTyConAppCo can deal with synonyms
(unlike TyConAppCo), see Note [TcCoercions] in TcEvidence.
But (Trac #8979) for
type T a = (F a, a) where F is a type function
we must expand the synonym in (say) T Int, to expose the type function
to the flattener.
Note [Flattening under a forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under a forall, we
(a) MUST apply the inert substitution
(b) MUST NOT flatten type family applications
Hence FMSubstOnly.
For (a) consider c ~ a, a ~ T (forall b. (b, [c]))
If we don't apply the c~a substitution to the second constraint
we won't see the occurs-check error.
For (b) consider (a ~ forall b. F a b), we don't want to flatten
to (a ~ forall b.fsk, F a b ~ fsk)
because now the 'b' has escaped its scope. We'd have to flatten to
(a ~ forall b. fsk b, forall b. F a b ~ fsk b)
and we have not begun to think about how to make that work!
************************************************************************
* *
Flattening a type-family application
* *
************************************************************************
-}
flatten_fam_app, flatten_exact_fam_app, flatten_exact_fam_app_fully
:: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
-- flatten_fam_app can be over-saturated
-- flatten_exact_fam_app is exactly saturated
-- flatten_exact_fam_app_fully lifts out the application to top level
-- Postcondition: Coercion :: Xi ~ F tys
flatten_fam_app fmode tc tys -- Can be over-saturated
= ASSERT( tyConArity tc <= length tys ) -- Type functions are saturated
-- The type function might be *over* saturated
-- in which case the remaining arguments should
-- be dealt with by AppTys
do { let (tys1, tys_rest) = splitAt (tyConArity tc) tys
; (xi1, co1) <- flatten_exact_fam_app fmode tc tys1
-- co1 :: xi1 ~ F tys1
-- all Nominal roles b/c the tycon is oversaturated
; (xis_rest, cos_rest) <- flatten_many fmode (repeat Nominal) tys_rest
-- cos_res :: xis_rest ~ tys_rest
; return ( mkAppTys xi1 xis_rest -- NB mkAppTys: rhs_xi might not be a type variable
-- cf Trac #5655
, mkTcAppCos co1 cos_rest -- (rhs_xi :: F xis) ; (F cos :: F xis ~ F tys)
) }
flatten_exact_fam_app fmode tc tys
= case fe_mode fmode of
FM_FlattenAll -> flatten_exact_fam_app_fully fmode tc tys
FM_SubstOnly -> do { (xis, cos) <- flatten_many fmode roles tys
; return ( mkTyConApp tc xis
, mkTcTyConAppCo (feRole fmode) tc cos ) }
FM_Avoid tv flat_top ->
do { (xis, cos) <- flatten_many fmode roles tys
; if flat_top || tv `elemVarSet` tyVarsOfTypes xis
then flatten_exact_fam_app_fully fmode tc tys
else return ( mkTyConApp tc xis
, mkTcTyConAppCo (feRole fmode) tc cos ) }
where
-- These are always going to be Nominal for now,
-- but not if #8177 is implemented
roles = tyConRolesX (feRole fmode) tc
flatten_exact_fam_app_fully fmode tc tys
= do { (xis, cos) <- flatten_many_nom (setFEEqRel (setFEMode fmode FM_FlattenAll) NomEq) tys
; let ret_co = mkTcTyConAppCo (feRole fmode) tc cos
-- ret_co :: F xis ~ F tys
; mb_ct <- lookupFlatCache tc xis
; case mb_ct of
Just (co, rhs_ty, flav) -- co :: F xis ~ fsk
| (flav, NomEq) `canRewriteOrSameFR` (feFlavourRole fmode)
-> -- Usable hit in the flat-cache
-- We certainly *can* use a Wanted for a Wanted
do { traceTcS "flatten/flat-cache hit" $ (ppr tc <+> ppr xis $$ ppr rhs_ty $$ ppr co)
; (fsk_xi, fsk_co) <- flatten_one fmode rhs_ty
-- The fsk may already have been unified, so flatten it
-- fsk_co :: fsk_xi ~ fsk
; return (fsk_xi, fsk_co `mkTcTransCo`
maybeTcSubCo (fe_eq_rel fmode)
(mkTcSymCo co) `mkTcTransCo`
ret_co) }
-- :: fsk_xi ~ F xis
-- Try to reduce the family application right now
-- See Note [Reduce type family applications eagerly]
_ -> do { mb_match <- matchFam tc xis
; case mb_match of {
Just (norm_co, norm_ty)
-> do { (xi, final_co) <- flatten_one fmode norm_ty
; let co = norm_co `mkTcTransCo` mkTcSymCo final_co
; extendFlatCache tc xis ( co, xi
, fe_flavour fmode )
; return (xi, mkTcSymCo co `mkTcTransCo` ret_co) } ;
Nothing ->
do { let fam_ty = mkTyConApp tc xis
; (ev, fsk) <- newFlattenSkolem (fe_flavour fmode)
(fe_loc fmode)
fam_ty
; let fsk_ty = mkTyVarTy fsk
co = ctEvCoercion ev
; extendFlatCache tc xis (co, fsk_ty, ctEvFlavour ev)
-- The new constraint (F xis ~ fsk) is not necessarily inert
-- (e.g. the LHS may be a redex) so we must put it in the work list
; let ct = CFunEqCan { cc_ev = ev
, cc_fun = tc
, cc_tyargs = xis
, cc_fsk = fsk }
; emitFlatWork ct
; traceTcS "flatten/flat-cache miss" $ (ppr fam_ty $$ ppr fsk $$ ppr ev)
; return (fsk_ty, maybeTcSubCo (fe_eq_rel fmode)
(mkTcSymCo co)
`mkTcTransCo` ret_co) }
} } }
{- Note [Reduce type family applications eagerly]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we come across a type-family application like (Append (Cons x Nil) t),
then, rather than flattening to a skolem etc, we may as well just reduce
it on the spot to (Cons x t). This saves a lot of intermediate steps.
Examples that are helped are tests T9872, and T5321Fun.
So just before we create the new skolem, we attempt to reduce it by one
step (using matchFam). If that works, then recursively flatten the rhs,
which may in turn do lots more reductions.
Once we've got a flat rhs, we extend the flatten-cache to record the
result. Doing so can save lots of work when the same redex shows up
more than once. Note that we record the link from the redex all the
way to its *final* value, not just the single step reduction.
************************************************************************
* *
Flattening a type variable
* *
************************************************************************
Note [The inert equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Definition [Can-rewrite relation]
A "can-rewrite" relation between flavours, written f1 >= f2, is a
binary relation with the following properties
R1. >= is transitive
R2. If f1 >= f, and f2 >= f,
then either f1 >= f2 or f2 >= f1
Lemma. If f1 >= f then f1 >= f1
Proof. By property (R2), with f1=f2
Definition [Generalised substitution]
A "generalised substitution" S is a set of triples (a -f-> t), where
a is a type variable
t is a type
f is a flavour
such that
(WF1) if (a -f1-> t1) in S
(a -f2-> t2) in S
then neither (f1 >= f2) nor (f2 >= f1) hold
(WF2) if (a -f-> t) is in S, then t /= a
Definition [Applying a generalised substitution]
If S is a generalised substitution
S(f,a) = t, if (a -fs-> t) in S, and fs >= f
= a, otherwise
Application extends naturally to types S(f,t), modulo roles.
See Note [Flavours with roles].
Theorem: S(f,a) is well defined as a function.
Proof: Suppose (a -f1-> t1) and (a -f2-> t2) are both in S,
and f1 >= f and f2 >= f
Then by (R2) f1 >= f2 or f2 >= f1, which contradicts (WF)
Notation: repeated application.
S^0(f,t) = t
S^(n+1)(f,t) = S(f, S^n(t))
Definition: inert generalised substitution
A generalised substitution S is "inert" iff
(IG1) there is an n such that
for every f,t, S^n(f,t) = S^(n+1)(f,t)
(IG2) if (b -f-> t) in S, and f >= f, then S(f,t) = t
that is, each individual binding is "self-stable"
----------------------------------------------------------------
Our main invariant:
the inert CTyEqCans should be an inert generalised substitution
----------------------------------------------------------------
Note that inertness is not the same as idempotence. To apply S to a
type, you may have to apply it recursive. But inertness does
guarantee that this recursive use will terminate.
---------- The main theorem --------------
Suppose we have a "work item"
a -fw-> t
and an inert generalised substitution S,
such that
(T1) S(fw,a) = a -- LHS of work-item is a fixpoint of S(fw,_)
(T2) S(fw,t) = t -- RHS of work-item is a fixpoint of S(fw,_)
(T3) a not in t -- No occurs check in the work item
(K1) if (a -fs-> s) is in S then not (fw >= fs)
(K2) if (b -fs-> s) is in S, where b /= a, then
(K2a) not (fs >= fs)
or (K2b) not (fw >= fs)
or (K2c) a not in s
(K3) If (b -fs-> s) is in S with (fw >= fs), then
(K3a) If the role of fs is nominal: s /= a
(K3b) If the role of fs is representational: EITHER
a not in s, OR
the path from the top of s to a includes at least one non-newtype
then the extended substition T = S+(a -fw-> t)
is an inert generalised substitution.
The idea is that
* (T1-2) are guaranteed by exhaustively rewriting the work-item
with S(fw,_).
* T3 is guaranteed by a simple occurs-check on the work item.
* (K1-3) are the "kick-out" criteria. (As stated, they are really the
"keep" criteria.) If the current inert S contains a triple that does
not satisfy (K1-3), then we remove it from S by "kicking it out",
and re-processing it.
* Note that kicking out is a Bad Thing, because it means we have to
re-process a constraint. The less we kick out, the better.
TODO: Make sure that kicking out really *is* a Bad Thing. We've assumed
this but haven't done the empirical study to check.
* Assume we have G>=G, G>=W, D>=D, and that's all. Then, when performing
a unification we add a new given a -G-> ty. But doing so does NOT require
us to kick out an inert wanted that mentions a, because of (K2a). This
is a common case, hence good not to kick out.
* Lemma (L1): The conditions of the Main Theorem imply that there is no
(a fs-> t) in S, s.t. (fs >= fw).
Proof. Suppose the contrary (fs >= fw). Then because of (T1),
S(fw,a)=a. But since fs>=fw, S(fw,a) = s, hence s=a. But now we
have (a -fs-> a) in S, which contradicts (WF2).
* The extended substitution satisfies (WF1) and (WF2)
- (K1) plus (L1) guarantee that the extended substiution satisfies (WF1).
- (T3) guarantees (WF2).
* (K2) is about inertness. Intuitively, any infinite chain T^0(f,t),
T^1(f,t), T^2(f,T).... must pass through the new work item infnitely
often, since the substution without the work item is inert; and must
pass through at least one of the triples in S infnitely often.
- (K2a): if not(fs>=fs) then there is no f that fs can rewrite (fs>=f),
and hence this triple never plays a role in application S(f,a).
It is always safe to extend S with such a triple.
(NB: we could strengten K1) in this way too, but see K3.
- (K2b): If this holds, we can't pass through this triple infinitely
often, because if we did then fs>=f, fw>=f, hence fs>=fw,
contradicting (L1), or fw>=fs contradicting K2b.
- (K2c): if a not in s, we hae no further opportunity to apply the
work item.
NB: this reasoning isn't water tight.
Key lemma to make it watertight.
Under the conditions of the Main Theorem,
forall f st fw >= f, a is not in S^k(f,t), for any k
Also, consider roles more carefully. See Note [Flavours with roles].
Completeness
~~~~~~~~~~~~~
K3: completeness. (K3) is not necessary for the extended substitution
to be inert. In fact K1 could be made stronger by saying
... then (not (fw >= fs) or not (fs >= fs))
But it's not enough for S to be inert; we also want completeness.
That is, we want to be able to solve all soluble wanted equalities.
Suppose we have
work-item b -G-> a
inert-item a -W-> b
Assuming (G >= W) but not (W >= W), this fulfills all the conditions,
so we could extend the inerts, thus:
inert-items b -G-> a
a -W-> b
But if we kicked-out the inert item, we'd get
work-item a -W-> b
inert-item b -G-> a
Then rewrite the work-item gives us (a -W-> a), which is soluble via Refl.
So we add one more clause to the kick-out criteria
Another way to understand (K3) is that we treat an inert item
a -f-> b
in the same way as
b -f-> a
So if we kick out one, we should kick out the other. The orientation
is somewhat accidental.
When considering roles, we also need the second clause (K3b). Consider
inert-item a -W/R-> b c
work-item c -G/N-> a
The work-item doesn't get rewritten by the inert, because (>=) doesn't hold.
We've satisfied conditions (T1)-(T3) and (K1) and (K2). If all we had were
condition (K3a), then we would keep the inert around and add the work item.
But then, consider if we hit the following:
work-item2 b -G/N-> Id
where
newtype Id x = Id x
For similar reasons, if we only had (K3a), we wouldn't kick the
representational inert out. And then, we'd miss solving the inert, which
now reduced to reflexivity. The solution here is to kick out representational
inerts whenever the tyvar of a work item is "exposed", where exposed means
not under some proper data-type constructor, like [] or Maybe. See
isTyVarExposed in TcType. This is encoded in (K3b).
Note [Flavours with roles]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The system described in Note [The inert equalities] discusses an abstract
set of flavours. In GHC, flavours have two components: the flavour proper,
taken from {Wanted, Derived, Given}; and the equality relation (often called
role), taken from {NomEq, ReprEq}. When substituting w.r.t. the inert set,
as described in Note [The inert equalities], we must be careful to respect
roles. For example, if we have
inert set: a -G/R-> Int
b -G/R-> Bool
type role T nominal representational
and we wish to compute S(W/R, T a b), the correct answer is T a Bool, NOT
T Int Bool. The reason is that T's first parameter has a nominal role, and
thus rewriting a to Int in T a b is wrong. Indeed, this non-congruence of
subsitution means that the proof in Note [The inert equalities] may need
to be revisited, but we don't think that the end conclusion is wrong.
-}
flattenTyVar :: FlattenEnv -> TcTyVar -> TcS (Xi, TcCoercion)
-- "Flattening" a type variable means to apply the substitution to it
-- The substitution is actually the union of
-- * the unifications that have taken place (either before the
-- solver started, or in TcInteract.solveByUnification)
-- * the CTyEqCans held in the inert set
--
-- Postcondition: co : xi ~ tv
flattenTyVar fmode tv
= do { mb_yes <- flattenTyVarOuter fmode tv
; case mb_yes of
Left tv' -> -- Done
do { traceTcS "flattenTyVar1" (ppr tv $$ ppr (tyVarKind tv'))
; return (ty', mkTcReflCo (feRole fmode) ty') }
where
ty' = mkTyVarTy tv'
Right (ty1, co1) -- Recurse
-> do { (ty2, co2) <- flatten_one fmode ty1
; traceTcS "flattenTyVar3" (ppr tv $$ ppr ty2)
; return (ty2, co2 `mkTcTransCo` co1) }
}
flattenTyVarOuter :: FlattenEnv -> TcTyVar
-> TcS (Either TyVar (TcType, TcCoercion))
-- Look up the tyvar in
-- a) the internal MetaTyVar box
-- b) the tyvar binds
-- c) the inerts
-- Return (Left tv') if it is not found, tv' has a properly zonked kind
-- (Right (ty, co) if found, with co :: ty ~ tv;
flattenTyVarOuter fmode tv
| not (isTcTyVar tv) -- Happens when flatten under a (forall a. ty)
= Left `liftM` flattenTyVarFinal fmode tv
-- So ty contains refernces to the non-TcTyVar a
| otherwise
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of {
Just ty -> do { traceTcS "Following filled tyvar" (ppr tv <+> equals <+> ppr ty)
; return (Right (ty, mkTcReflCo (feRole fmode) ty)) } ;
Nothing ->
-- Try in the inert equalities
-- See Definition [Applying a generalised substitution]
do { ieqs <- getInertEqs
; case lookupVarEnv ieqs tv of
Just (ct:_) -- If the first doesn't work,
-- the subsequent ones won't either
| CTyEqCan { cc_ev = ctev, cc_tyvar = tv, cc_rhs = rhs_ty } <- ct
, ctEvFlavourRole ctev `eqCanRewriteFR` feFlavourRole fmode
-> do { traceTcS "Following inert tyvar" (ppr tv <+> equals <+> ppr rhs_ty $$ ppr ctev)
; let rewrite_co1 = mkTcSymCo (ctEvCoercion ctev)
rewrite_co = case (ctEvEqRel ctev, fe_eq_rel fmode) of
(ReprEq, _rel) -> ASSERT( _rel == ReprEq )
-- if this ASSERT fails, then
-- eqCanRewriteFR answered incorrectly
rewrite_co1
(NomEq, NomEq) -> rewrite_co1
(NomEq, ReprEq) -> mkTcSubCo rewrite_co1
; return (Right (rhs_ty, rewrite_co)) }
-- NB: ct is Derived then fmode must be also, hence
-- we are not going to touch the returned coercion
-- so ctEvCoercion is fine.
_other -> Left `liftM` flattenTyVarFinal fmode tv
} } }
flattenTyVarFinal :: FlattenEnv -> TcTyVar -> TcS TyVar
flattenTyVarFinal fmode tv
= -- Done, but make sure the kind is zonked
do { let kind = tyVarKind tv
kind_fmode = setFEMode fmode FM_SubstOnly
; (new_knd, _kind_co) <- flatten_one kind_fmode kind
; return (setVarType tv new_knd) }
{-
Note [An alternative story for the inert substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(This entire note is just background, left here in case we ever want
to return the the previousl state of affairs)
We used (GHC 7.8) to have this story for the inert substitution inert_eqs
* 'a' is not in fvs(ty)
* They are *inert* in the weaker sense that there is no infinite chain of
(i1 `eqCanRewrite` i2), (i2 `eqCanRewrite` i3), etc
This means that flattening must be recursive, but it does allow
[G] a ~ [b]
[G] b ~ Maybe c
This avoids "saturating" the Givens, which can save a modest amount of work.
It is easy to implement, in TcInteract.kick_out, by only kicking out an inert
only if (a) the work item can rewrite the inert AND
(b) the inert cannot rewrite the work item
This is signifcantly harder to think about. It can save a LOT of work
in occurs-check cases, but we don't care about them much. Trac #5837
is an example; all the constraints here are Givens
[G] a ~ TF (a,Int)
-->
work TF (a,Int) ~ fsk
inert fsk ~ a
--->
work fsk ~ (TF a, TF Int)
inert fsk ~ a
--->
work a ~ (TF a, TF Int)
inert fsk ~ a
---> (attempting to flatten (TF a) so that it does not mention a
work TF a ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (substitute for a)
work TF (fsk2, TF Int) ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (top-level reduction, re-orient)
work fsk2 ~ (TF fsk2, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (attempt to flatten (TF fsk2) to get rid of fsk2
work TF fsk2 ~ fsk3
work fsk2 ~ (fsk3, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
--->
work TF fsk2 ~ fsk3
inert fsk2 ~ (fsk3, TF Int)
inert a ~ ((fsk3, TF Int), TF Int)
inert fsk ~ ((fsk3, TF Int), TF Int)
Because the incoming given rewrites all the inert givens, we get more and
more duplication in the inert set. But this really only happens in pathalogical
casee, so we don't care.
-}
eqCanRewrite :: CtEvidence -> CtEvidence -> Bool
eqCanRewrite ev1 ev2 = ctEvFlavourRole ev1 `eqCanRewriteFR` ctEvFlavourRole ev2
-- | Whether or not one 'Ct' can rewrite another is determined by its
-- flavour and its equality relation
type CtFlavourRole = (CtFlavour, EqRel)
-- | Extract the flavour and role from a 'CtEvidence'
ctEvFlavourRole :: CtEvidence -> CtFlavourRole
ctEvFlavourRole ev = (ctEvFlavour ev, ctEvEqRel ev)
-- | Extract the flavour and role from a 'Ct'
ctFlavourRole :: Ct -> CtFlavourRole
ctFlavourRole = ctEvFlavourRole . cc_ev
-- | Extract the flavour and role from a 'FlattenEnv'
feFlavourRole :: FlattenEnv -> CtFlavourRole
feFlavourRole (FE { fe_flavour = flav, fe_eq_rel = eq_rel })
= (flav, eq_rel)
eqCanRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool
-- Very important function!
-- See Note [eqCanRewrite]
eqCanRewriteFR (Given, NomEq) (_, _) = True
eqCanRewriteFR (Given, ReprEq) (_, ReprEq) = True
eqCanRewriteFR _ _ = False
canRewriteOrSame :: CtEvidence -> CtEvidence -> Bool
-- See Note [canRewriteOrSame]
canRewriteOrSame ev1 ev2 = ev1 `eqCanRewrite` ev2 ||
ctEvFlavourRole ev1 == ctEvFlavourRole ev2
canRewriteOrSameFR :: CtFlavourRole -> CtFlavourRole -> Bool
canRewriteOrSameFR fr1 fr2 = fr1 `eqCanRewriteFR` fr2 || fr1 == fr2
{-
Note [eqCanRewrite]
~~~~~~~~~~~~~~~~~~~
(eqCanRewrite ct1 ct2) holds if the constraint ct1 (a CTyEqCan of form
tv ~ ty) can be used to rewrite ct2. It must satisfy the properties of
a can-rewrite relation, see Definition [Can-rewrite relation]
At the moment we don't allow Wanteds to rewrite Wanteds, because that can give
rise to very confusing type error messages. A good example is Trac #8450.
Here's another
f :: a -> Bool
f x = ( [x,'c'], [x,True] ) `seq` True
Here we get
[W] a ~ Char
[W] a ~ Bool
but we do not want to complain about Bool ~ Char!
Accordingly, we also don't let Deriveds rewrite Deriveds.
With the solver handling Coercible constraints like equality constraints,
the rewrite conditions must take role into account, never allowing
a representational equality to rewrite a nominal one.
Note [canRewriteOrSame]
~~~~~~~~~~~~~~~~~~~~~~~
canRewriteOrSame is similar but
* returns True for Wanted/Wanted.
* works for all kinds of constraints, not just CTyEqCans
See the call sites for explanations.
************************************************************************
* *
Unflattening
* *
************************************************************************
An unflattening example:
[W] F a ~ alpha
flattens to
[W] F a ~ fmv (CFunEqCan)
[W] fmv ~ alpha (CTyEqCan)
We must solve both!
-}
unflatten :: Cts -> Cts -> TcS Cts
unflatten tv_eqs funeqs
= do { dflags <- getDynFlags
; tclvl <- getTcLevel
; traceTcS "Unflattening" $ braces $
vcat [ ptext (sLit "Funeqs =") <+> pprCts funeqs
, ptext (sLit "Tv eqs =") <+> pprCts tv_eqs ]
-- Step 1: unflatten the CFunEqCans, except if that causes an occurs check
-- See Note [Unflatten using funeqs first]
; funeqs <- foldrBagM (unflatten_funeq dflags) emptyCts funeqs
; traceTcS "Unflattening 1" $ braces (pprCts funeqs)
-- Step 2: unify the irreds, if possible
; tv_eqs <- foldrBagM (unflatten_eq dflags tclvl) emptyCts tv_eqs
; traceTcS "Unflattening 2" $ braces (pprCts tv_eqs)
-- Step 3: fill any remaining fmvs with fresh unification variables
; funeqs <- mapBagM finalise_funeq funeqs
; traceTcS "Unflattening 3" $ braces (pprCts funeqs)
-- Step 4: remove any irreds that look like ty ~ ty
; tv_eqs <- foldrBagM finalise_eq emptyCts tv_eqs
; let all_flat = tv_eqs `andCts` funeqs
; traceTcS "Unflattening done" $ braces (pprCts all_flat)
; return all_flat }
where
----------------
unflatten_funeq :: DynFlags -> Ct -> Cts -> TcS Cts
unflatten_funeq dflags ct@(CFunEqCan { cc_fun = tc, cc_tyargs = xis
, cc_fsk = fmv, cc_ev = ev }) rest
= do { -- fmv should be a flatten meta-tv; we now fix its final
-- value, and then zonking will eliminate it
filled <- tryFill dflags fmv (mkTyConApp tc xis) ev
; return (if filled then rest else ct `consCts` rest) }
unflatten_funeq _ other_ct _
= pprPanic "unflatten_funeq" (ppr other_ct)
----------------
finalise_funeq :: Ct -> TcS Ct
finalise_funeq (CFunEqCan { cc_fsk = fmv, cc_ev = ev })
= do { demoteUnfilledFmv fmv
; return (mkNonCanonical ev) }
finalise_funeq ct = pprPanic "finalise_funeq" (ppr ct)
----------------
unflatten_eq :: DynFlags -> TcLevel -> Ct -> Cts -> TcS Cts
unflatten_eq dflags tclvl ct@(CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) rest
| isFmvTyVar tv
= do { lhs_elim <- tryFill dflags tv rhs ev
; if lhs_elim then return rest else
do { rhs_elim <- try_fill dflags tclvl ev rhs (mkTyVarTy tv)
; if rhs_elim then return rest else
return (ct `consCts` rest) } }
| otherwise
= return (ct `consCts` rest)
unflatten_eq _ _ ct _ = pprPanic "unflatten_irred" (ppr ct)
----------------
finalise_eq :: Ct -> Cts -> TcS Cts
finalise_eq (CTyEqCan { cc_ev = ev, cc_tyvar = tv
, cc_rhs = rhs, cc_eq_rel = eq_rel }) rest
| isFmvTyVar tv
= do { ty1 <- zonkTcTyVar tv
; ty2 <- zonkTcType rhs
; let is_refl = ty1 `tcEqType` ty2
; if is_refl then do { when (isWanted ev) $
setEvBind (ctEvId ev)
(EvCoercion $
mkTcReflCo (eqRelRole eq_rel) rhs)
; return rest }
else return (mkNonCanonical ev `consCts` rest) }
| otherwise
= return (mkNonCanonical ev `consCts` rest)
finalise_eq ct _ = pprPanic "finalise_irred" (ppr ct)
----------------
try_fill dflags tclvl ev ty1 ty2
| Just tv1 <- tcGetTyVar_maybe ty1
, isTouchableOrFmv tclvl tv1
, typeKind ty1 `isSubKind` tyVarKind tv1
= tryFill dflags tv1 ty2 ev
| otherwise
= return False
tryFill :: DynFlags -> TcTyVar -> TcType -> CtEvidence -> TcS Bool
-- (tryFill tv rhs ev) sees if 'tv' is an un-filled MetaTv
-- If so, and if tv does not appear in 'rhs', set tv := rhs
-- bind the evidence (which should be a CtWanted) to Refl<rhs>
-- and return True. Otherwise return False
tryFill dflags tv rhs ev
= ASSERT2( not (isGiven ev), ppr ev )
do { is_filled <- isFilledMetaTyVar tv
; if is_filled then return False else
do { rhs' <- zonkTcType rhs
; case occurCheckExpand dflags tv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { when (isWanted ev) $
setEvBind (ctEvId ev)
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; setWantedTyBind tv rhs''
; return True }
_ -> -- Occurs check
return False } }
{-
Note [Unflatten using funeqs first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
[W] G a ~ Int
[W] F (G a) ~ G a
do not want to end up with
[W} F Int ~ Int
because that might actually hold! Better to end up with the two above
unsolved constraints. The flat form will be
G a ~ fmv1 (CFunEqCan)
F fmv1 ~ fmv2 (CFunEqCan)
fmv1 ~ Int (CTyEqCan)
fmv1 ~ fmv2 (CTyEqCan)
Flatten using the fun-eqs first.
-}
-- | Change the 'EqRel' in a 'FlattenEnv'. Avoids allocating a
-- new 'FlattenEnv' where possible.
setFEEqRel :: FlattenEnv -> EqRel -> FlattenEnv
setFEEqRel fmode@(FE { fe_eq_rel = old_eq_rel }) new_eq_rel
| old_eq_rel == new_eq_rel = fmode
| otherwise = fmode { fe_eq_rel = new_eq_rel }
-- | Change the 'FlattenMode' in a 'FlattenEnv'. Avoids allocating
-- a new 'FlattenEnv' where possible.
setFEMode :: FlattenEnv -> FlattenMode -> FlattenEnv
setFEMode fmode@(FE { fe_mode = old_mode }) new_mode
| old_mode `eq` new_mode = fmode
| otherwise = fmode { fe_mode = new_mode }
where
FM_FlattenAll `eq` FM_FlattenAll = True
FM_SubstOnly `eq` FM_SubstOnly = True
FM_Avoid tv1 b1 `eq` FM_Avoid tv2 b2 = tv1 == tv2 && b1 == b2
_ `eq` _ = False
|
bitemyapp/ghc
|
compiler/typecheck/TcFlatten.hs
|
Haskell
|
bsd-3-clause
| 57,104
|
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TypeFamilies #-}
module Wavecore.ECDIS.SeaMap where
import Control.Applicative
import Data.Geo.TransverseMercator
import Data.Geo.UTM
import Data.Maybe
import FRP.Sodium
import Numeric.Units.Dimensional.TF.Prelude
import qualified Prelude as P ()
import Wavecore.ECDIS.Controller
newtype Coordinate =
MkCoordinate (PlaneAngle Double, PlaneAngle Double)
deriving (Eq)
newtype UTMCoordinate =
MkUTMCoordinate (Length Double, Length Double)
deriving (Eq)
newtype UTMZone = UTMZone Int
deriving (Eq)
newtype UTMZonedCoordinate =
MkZonedCoordinate ( ((UTMZone, Bool),
(PlaneAngle Double, Dimensionless Double)),
UTMCoordinate)
deriving (Eq)
_utmForward :: Coordinate -> UTMZonedCoordinate
_utmForward (MkCoordinate (lat, lon)) =
let ((z',n), (TM x y conv scale)) =
maybe (error "utmForward: no result") id $
utmForward' lat lon
utm = MkUTMCoordinate (x, y)
cs = (conv, scale)
zn = (UTMZone z', n)
in MkZonedCoordinate ((zn, cs), utm)
_utmZonedForward :: UTMZone -> Coordinate -> UTMCoordinate
_utmZonedForward (UTMZone z) (MkCoordinate (lat,lon)) =
let (_, (TM x y _ _)) =
maybe (error "utmZonedForward: no result") id $
utmZonedForward z lat lon
in MkUTMCoordinate (x, y)
_utmReverse :: Bool -> UTMZone -> UTMCoordinate -> Coordinate
_utmReverse n (UTMZone z) (MkUTMCoordinate (x,y)) =
MkCoordinate $ maybe (error "utmReverse: no result") id $
utmReverse z n x y
--
-- SEA MAP
--
data SeaMap
data SeaMapNorthing =
TrueNorth | MapNorth | HeadingNorth
deriving (Eq)
newtype SeaMapZoom =
SeaMapZoom (Dimensionless Int)
deriving (Eq)
class SeaMapProjection proj where
instance Controller SeaMap where
data ControllerCommand SeaMap =
SeaMapToggleInput |
SeaMapSetNorthing SeaMapNorthing |
SeaMapSetIntPos Coordinate
deriving (Eq)
data ControllerInput SeaMap =
SeaMapInput {
_smInPosition :: Behavior (Maybe Coordinate),
_smInZoomFactor :: Behavior (SeaMapZoom),
_smInMapSize :: Behavior (Dimensionless Double, Dimensionless Double),
_smInPixelFactor :: Behavior (Length Double),
_smInExtHeading :: Behavior (Maybe (PlaneAngle Double))
}
data ControllerOutput SeaMap =
SeaMapOutput {
_smUTMForward :: Behavior (Coordinate -> UTMCoordinate),
_smUTMReverse :: Behavior (UTMCoordinate -> Coordinate),
_smExtPosAvail :: Behavior Bool,
_smExtHeadingAvail :: Behavior Bool,
_smCurrentPosition :: Behavior (Coordinate),
_smCurrentPositionIsExternal :: Behavior Bool,
_smZone :: Behavior (UTMZone),
_smIsNorth :: Behavior Bool,
_smMeridianConvergence :: Behavior (PlaneAngle Double),
_smProjectionScale :: Behavior (Dimensionless Double),
_smRotation :: Behavior (PlaneAngle Double),
_smNorthing :: Behavior SeaMapNorthing,
_smMapOrigin :: Behavior (Length Double, Length Double),
_smMapDim :: Behavior (Length Double, Length Double)
}
newController i e =
let (toggleInputE, setNorthingE, setIntPosE) = seaMapSplitInputEvents e
extPosAvailE = fmap isJust $ updates (_smInPosition i)
extHeadingAvailE = fmap isJust $ updates (_smInExtHeading i)
in do
-- external inputs
extPosAvail <- hold False extPosAvailE
extHeadingAvail <- hold False extHeadingAvailE
-- the switching of position input
intPos <- hold smDefaultCoord $ fmap (\(SeaMapSetIntPos p) -> p) setIntPosE
(isExternal, pushIsExternal) <- newBehavior False
let checkInputs :: () -> Reactive (Behavior Coordinate)
checkInputs _ = do
extAvail <- sample extPosAvail
if (not extAvail)
then (pushIsExternal False >> return intPos)
else do
isExt <- sample isExternal
if (isExt) then (pushIsExternal False >> return intPos)
else do
pushIsExternal True
let frJust =
maybe (error "SeaMap Controller: no external pos") id
return $ fmap frJust $ _smInPosition i
switchPosSrcE :: Event (Behavior Coordinate)
switchPosSrcE = execute $ fmap checkInputs $
let toggle = fmap (\_ -> ()) toggleInputE
extLost = fmap (\_ -> ()) $ filterE (not) (updates extPosAvail)
in toggle `merge` extLost
posSrcSwitch <- hold intPos $ switchPosSrcE
curPos <- switch posSrcSwitch
-- position and map transformation
let curPosUTMZoned' = fmap _utmForward curPos
curPosUTMZoned = fmap (\(MkZonedCoordinate a) -> a) curPosUTMZoned'
northp = fmap (snd.fst.fst) curPosUTMZoned
zone = fmap (fst.fst.fst) curPosUTMZoned
meridianConvergence = fmap (fst.snd.fst) curPosUTMZoned
projectionScale = fmap (snd.snd.fst) curPosUTMZoned
curPosUTM = fmap snd curPosUTMZoned
utmRv = _utmReverse <$> northp <*> zone
utmFw = _utmZonedForward <$> zone
-- map viewport
let pxZoom =
liftA2 (\px (SeaMapZoom z) -> px * fmap fromIntegral z)
(_smInPixelFactor i) (_smInZoomFactor i)
mapWidth = liftA2 (*) pxZoom . fmap fst $
(_smInMapSize i)
mapWidth2 = liftA2 (/) mapWidth (pure _2)
mapHeight = liftA2 (*) pxZoom . fmap snd $
(_smInMapSize i)
mapHeight2 = liftA2 (/) mapHeight (pure _2)
mapDim = liftA2 (\a b -> (a,b)) mapWidth mapHeight
mapOrigin = liftA3 (\(MkUTMCoordinate (x,y)) w2 h2 -> (x - w2, y - h2))
curPosUTM mapWidth2 mapHeight2
-- rotation / heading
let mapNorth = pure $ 0 *~ degree
trueNorth = liftA2 (-) mapNorth meridianConvergence
headingLostE = fmap (\_ -> SeaMapSetNorthing MapNorth) $
filterE not . updates $ extHeadingAvail
northingChange = headingLostE `merge` setNorthingE
(northingSwitch, pushNorthingSwitch) <- newBehavior MapNorth
let frJust = maybe (error "SeaMap Controller: no extern heading") id
onNorthingChange (SeaMapSetNorthing n) =
case n of
TrueNorth -> do pushNorthingSwitch TrueNorth >> return trueNorth
MapNorth -> do pushNorthingSwitch MapNorth >> return mapNorth
HeadingNorth -> do
extAvail <- sample extHeadingAvail
if (extAvail)
then do pushNorthingSwitch HeadingNorth
return $ fmap frJust (_smInExtHeading i)
else do pushNorthingSwitch MapNorth >> return mapNorth
onNorthingChange _ =
error "SeaMap Controller: unexpxted Northing event"
northingSwitchToE = execute $ fmap onNorthingChange northingChange
rotationSwitch <- hold mapNorth northingSwitchToE
rotation <- switch rotationSwitch
return $ SeaMapOutput {
_smExtPosAvail = extPosAvail,
_smExtHeadingAvail = extHeadingAvail,
_smCurrentPosition = curPos,
_smCurrentPositionIsExternal = isExternal,
_smZone = zone,
_smIsNorth = northp,
_smMeridianConvergence = meridianConvergence,
_smProjectionScale = projectionScale,
_smRotation = rotation,
_smNorthing = northingSwitch,
_smUTMReverse = utmRv,
_smUTMForward = utmFw,
_smMapOrigin = mapOrigin,
_smMapDim = mapDim
}
smDefaultCoord :: Coordinate
smDefaultCoord = MkCoordinate (52.3 *~ degree, 7.1 *~ degree)
seaMapSplitInputEvents :: Event (ControllerCommand SeaMap) ->
( Event (ControllerCommand SeaMap)
, Event (ControllerCommand SeaMap)
, Event (ControllerCommand SeaMap)
)
seaMapSplitInputEvents e =
let toggleInputE = filterE ((==) SeaMapToggleInput) e
setNorthingE = filterE (\e' -> case e' of
SeaMapSetNorthing _ -> True
_ -> False) e
setIntPosE = filterE (\e' -> case e' of
SeaMapSetIntPos _ -> True
_ -> False) e
in (toggleInputE, setNorthingE, setIntPosE)
instance Controller SeaMapZoom where
data ControllerCommand SeaMapZoom = ZoomIn | ZoomOut
data ControllerInput SeaMapZoom =
SeaMapZoomInput {
_initZoom :: SeaMapZoom,
_zoomFactor :: (Dimensionless Int)
}
data ControllerOutput SeaMapZoom =
SeaMapZoomOutput {
_seaMapZoom :: Behavior SeaMapZoom
}
newController i e =
let zoomF ZoomIn (SeaMapZoom z)
= SeaMapZoom $ z - (_zoomFactor i)
zoomF ZoomOut (SeaMapZoom z)
= SeaMapZoom $ z + (_zoomFactor i)
in do
a <- accum (_initZoom i) (fmap zoomF e)
return SeaMapZoomOutput {
_seaMapZoom = a
}
|
wavecorenautic/ecdis-client
|
src/Wavecore/ECDIS/SeaMap.hs
|
Haskell
|
bsd-3-clause
| 9,216
|
{-
(c) Galois, 2006
(c) University of Glasgow, 2007
-}
{-# LANGUAGE CPP, NondecreasingIndentation, RecordWildCards #-}
module Coverage (addTicksToBinds, hpcInitCode) where
#ifdef GHCI
import qualified GHCi
import GHCi.RemoteTypes
import Data.Array
import ByteCodeTypes
import GHC.Stack.CCS
#endif
import Type
import HsSyn
import Module
import Outputable
import DynFlags
import Control.Monad
import SrcLoc
import ErrUtils
import NameSet hiding (FreeVars)
import Name
import Bag
import CostCentre
import CoreSyn
import Id
import VarSet
import Data.List
import FastString
import HscTypes
import TyCon
import UniqSupply
import BasicTypes
import MonadUtils
import Maybes
import CLabel
import Util
import Data.Time
import System.Directory
import Trace.Hpc.Mix
import Trace.Hpc.Util
import Data.Map (Map)
import qualified Data.Map as Map
{-
************************************************************************
* *
* The main function: addTicksToBinds
* *
************************************************************************
-}
addTicksToBinds
:: HscEnv
-> Module
-> ModLocation -- ... off the current module
-> NameSet -- Exported Ids. When we call addTicksToBinds,
-- isExportedId doesn't work yet (the desugarer
-- hasn't set it), so we have to work from this set.
-> [TyCon] -- Type constructor in this module
-> LHsBinds Id
-> IO (LHsBinds Id, HpcInfo, Maybe ModBreaks)
addTicksToBinds hsc_env mod mod_loc exports tyCons binds
| let dflags = hsc_dflags hsc_env
passes = coveragePasses dflags, not (null passes),
Just orig_file <- ml_hs_file mod_loc = do
if "boot" `isSuffixOf` orig_file
then return (binds, emptyHpcInfo False, Nothing)
else do
us <- mkSplitUniqSupply 'C' -- for cost centres
let orig_file2 = guessSourceFile binds orig_file
tickPass tickish (binds,st) =
let env = TTE
{ fileName = mkFastString orig_file2
, declPath = []
, tte_dflags = dflags
, exports = exports
, inlines = emptyVarSet
, inScope = emptyVarSet
, blackList = Map.fromList
[ (getSrcSpan (tyConName tyCon),())
| tyCon <- tyCons ]
, density = mkDensity tickish dflags
, this_mod = mod
, tickishType = tickish
}
(binds',_,st') = unTM (addTickLHsBinds binds) env st
in (binds', st')
initState = TT { tickBoxCount = 0
, mixEntries = []
, uniqSupply = us
}
(binds1,st) = foldr tickPass (binds, initState) passes
let tickCount = tickBoxCount st
entries = reverse $ mixEntries st
hashNo <- writeMixEntries dflags mod tickCount entries orig_file2
modBreaks <- mkModBreaks hsc_env mod tickCount entries
when (dopt Opt_D_dump_ticked dflags) $
log_action dflags dflags SevDump noSrcSpan defaultDumpStyle
(pprLHsBinds binds1)
return (binds1, HpcInfo tickCount hashNo, Just modBreaks)
| otherwise = return (binds, emptyHpcInfo False, Nothing)
guessSourceFile :: LHsBinds Id -> FilePath -> FilePath
guessSourceFile binds orig_file =
-- Try look for a file generated from a .hsc file to a
-- .hs file, by peeking ahead.
let top_pos = catMaybes $ foldrBag (\ (L pos _) rest ->
srcSpanFileName_maybe pos : rest) [] binds
in
case top_pos of
(file_name:_) | ".hsc" `isSuffixOf` unpackFS file_name
-> unpackFS file_name
_ -> orig_file
mkModBreaks :: HscEnv -> Module -> Int -> [MixEntry_] -> IO ModBreaks
#ifndef GHCI
mkModBreaks _hsc_env _mod _count _entries = return emptyModBreaks
#else
mkModBreaks hsc_env mod count entries
| HscInterpreted <- hscTarget (hsc_dflags hsc_env) = do
breakArray <- GHCi.newBreakArray hsc_env (length entries)
ccs <- mkCCSArray hsc_env mod count entries
let
locsTicks = listArray (0,count-1) [ span | (span,_,_,_) <- entries ]
varsTicks = listArray (0,count-1) [ vars | (_,_,vars,_) <- entries ]
declsTicks = listArray (0,count-1) [ decls | (_,decls,_,_) <- entries ]
return emptyModBreaks
{ modBreaks_flags = breakArray
, modBreaks_locs = locsTicks
, modBreaks_vars = varsTicks
, modBreaks_decls = declsTicks
, modBreaks_ccs = ccs
}
| otherwise = return emptyModBreaks
mkCCSArray
:: HscEnv -> Module -> Int -> [MixEntry_]
-> IO (Array BreakIndex (RemotePtr GHC.Stack.CCS.CostCentre))
mkCCSArray hsc_env modul count entries = do
if interpreterProfiled dflags
then do
let module_str = moduleNameString (moduleName modul)
costcentres <- GHCi.mkCostCentres hsc_env module_str (map mk_one entries)
return (listArray (0,count-1) costcentres)
else do
return (listArray (0,-1) [])
where
dflags = hsc_dflags hsc_env
mk_one (srcspan, decl_path, _, _) = (name, src)
where name = concat (intersperse "." decl_path)
src = showSDoc dflags (ppr srcspan)
#endif
writeMixEntries
:: DynFlags -> Module -> Int -> [MixEntry_] -> FilePath -> IO Int
writeMixEntries dflags mod count entries filename
| not (gopt Opt_Hpc dflags) = return 0
| otherwise = do
let
hpc_dir = hpcDir dflags
mod_name = moduleNameString (moduleName mod)
hpc_mod_dir
| moduleUnitId mod == mainUnitId = hpc_dir
| otherwise = hpc_dir ++ "/" ++ unitIdString (moduleUnitId mod)
tabStop = 8 -- <tab> counts as a normal char in GHC's
-- location ranges.
createDirectoryIfMissing True hpc_mod_dir
modTime <- getModificationUTCTime filename
let entries' = [ (hpcPos, box)
| (span,_,_,box) <- entries, hpcPos <- [mkHpcPos span] ]
when (length entries' /= count) $ do
panic "the number of .mix entries are inconsistent"
let hashNo = mixHash filename modTime tabStop entries'
mixCreate hpc_mod_dir mod_name
$ Mix filename modTime (toHash hashNo) tabStop entries'
return hashNo
-- -----------------------------------------------------------------------------
-- TickDensity: where to insert ticks
data TickDensity
= TickForCoverage -- for Hpc
| TickForBreakPoints -- for GHCi
| TickAllFunctions -- for -prof-auto-all
| TickTopFunctions -- for -prof-auto-top
| TickExportedFunctions -- for -prof-auto-exported
| TickCallSites -- for stack tracing
deriving Eq
mkDensity :: TickishType -> DynFlags -> TickDensity
mkDensity tickish dflags = case tickish of
HpcTicks -> TickForCoverage
SourceNotes -> TickForCoverage
Breakpoints -> TickForBreakPoints
ProfNotes ->
case profAuto dflags of
ProfAutoAll -> TickAllFunctions
ProfAutoTop -> TickTopFunctions
ProfAutoExports -> TickExportedFunctions
ProfAutoCalls -> TickCallSites
_other -> panic "mkDensity"
-- | Decide whether to add a tick to a binding or not.
shouldTickBind :: TickDensity
-> Bool -- top level?
-> Bool -- exported?
-> Bool -- simple pat bind?
-> Bool -- INLINE pragma?
-> Bool
shouldTickBind density top_lev exported _simple_pat inline
= case density of
TickForBreakPoints -> False
-- we never add breakpoints to simple pattern bindings
-- (there's always a tick on the rhs anyway).
TickAllFunctions -> not inline
TickTopFunctions -> top_lev && not inline
TickExportedFunctions -> exported && not inline
TickForCoverage -> True
TickCallSites -> False
shouldTickPatBind :: TickDensity -> Bool -> Bool
shouldTickPatBind density top_lev
= case density of
TickForBreakPoints -> False
TickAllFunctions -> True
TickTopFunctions -> top_lev
TickExportedFunctions -> False
TickForCoverage -> False
TickCallSites -> False
-- -----------------------------------------------------------------------------
-- Adding ticks to bindings
addTickLHsBinds :: LHsBinds Id -> TM (LHsBinds Id)
addTickLHsBinds = mapBagM addTickLHsBind
addTickLHsBind :: LHsBind Id -> TM (LHsBind Id)
addTickLHsBind (L pos bind@(AbsBinds { abs_binds = binds,
abs_exports = abs_exports })) = do
withEnv add_exports $ do
withEnv add_inlines $ do
binds' <- addTickLHsBinds binds
return $ L pos $ bind { abs_binds = binds' }
where
-- in AbsBinds, the Id on each binding is not the actual top-level
-- Id that we are defining, they are related by the abs_exports
-- field of AbsBinds. So if we're doing TickExportedFunctions we need
-- to add the local Ids to the set of exported Names so that we know to
-- tick the right bindings.
add_exports env =
env{ exports = exports env `extendNameSetList`
[ idName mid
| ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports
, idName pid `elemNameSet` (exports env) ] }
add_inlines env =
env{ inlines = inlines env `extendVarSetList`
[ mid
| ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports
, isAnyInlinePragma (idInlinePragma pid) ] }
addTickLHsBind (L pos bind@(AbsBindsSig { abs_sig_bind = val_bind
, abs_sig_export = poly_id }))
| L _ FunBind { fun_id = L _ mono_id } <- val_bind
= do withEnv (add_export mono_id) $ do
withEnv (add_inlines mono_id) $ do
val_bind' <- addTickLHsBind val_bind
return $ L pos $ bind { abs_sig_bind = val_bind' }
| otherwise
= pprPanic "addTickLHsBind" (ppr bind)
where
-- see AbsBinds comments
add_export mono_id env
| idName poly_id `elemNameSet` exports env
= env { exports = exports env `extendNameSet` idName mono_id }
| otherwise
= env
add_inlines mono_id env
| isAnyInlinePragma (idInlinePragma poly_id)
= env { inlines = inlines env `extendVarSet` mono_id }
| otherwise
= env
addTickLHsBind (L pos (funBind@(FunBind { fun_id = (L _ id) }))) = do
let name = getOccString id
decl_path <- getPathEntry
density <- getDensity
inline_ids <- liftM inlines getEnv
let inline = isAnyInlinePragma (idInlinePragma id)
|| id `elemVarSet` inline_ids
-- See Note [inline sccs]
tickish <- tickishType `liftM` getEnv
if inline && tickish == ProfNotes then return (L pos funBind) else do
(fvs, mg@(MG { mg_alts = matches' })) <-
getFreeVars $
addPathEntry name $
addTickMatchGroup False (fun_matches funBind)
blackListed <- isBlackListed pos
exported_names <- liftM exports getEnv
-- We don't want to generate code for blacklisted positions
-- We don't want redundant ticks on simple pattern bindings
-- We don't want to tick non-exported bindings in TickExportedFunctions
let simple = isSimplePatBind funBind
toplev = null decl_path
exported = idName id `elemNameSet` exported_names
tick <- if not blackListed &&
shouldTickBind density toplev exported simple inline
then
bindTick density name pos fvs
else
return Nothing
let mbCons = maybe Prelude.id (:)
return $ L pos $ funBind { fun_matches = mg { mg_alts = matches' }
, fun_tick = tick `mbCons` fun_tick funBind }
where
-- a binding is a simple pattern binding if it is a funbind with
-- zero patterns
isSimplePatBind :: HsBind a -> Bool
isSimplePatBind funBind = matchGroupArity (fun_matches funBind) == 0
-- TODO: Revisit this
addTickLHsBind (L pos (pat@(PatBind { pat_lhs = lhs, pat_rhs = rhs }))) = do
let name = "(...)"
(fvs, rhs') <- getFreeVars $ addPathEntry name $ addTickGRHSs False False rhs
let pat' = pat { pat_rhs = rhs'}
-- Should create ticks here?
density <- getDensity
decl_path <- getPathEntry
let top_lev = null decl_path
if not (shouldTickPatBind density top_lev) then return (L pos pat') else do
-- Allocate the ticks
rhs_tick <- bindTick density name pos fvs
let patvars = map getOccString (collectPatBinders lhs)
patvar_ticks <- mapM (\v -> bindTick density v pos fvs) patvars
-- Add to pattern
let mbCons = maybe id (:)
rhs_ticks = rhs_tick `mbCons` fst (pat_ticks pat')
patvar_tickss = zipWith mbCons patvar_ticks
(snd (pat_ticks pat') ++ repeat [])
return $ L pos $ pat' { pat_ticks = (rhs_ticks, patvar_tickss) }
-- Only internal stuff, not from source, uses VarBind, so we ignore it.
addTickLHsBind var_bind@(L _ (VarBind {})) = return var_bind
addTickLHsBind patsyn_bind@(L _ (PatSynBind {})) = return patsyn_bind
bindTick
:: TickDensity -> String -> SrcSpan -> FreeVars -> TM (Maybe (Tickish Id))
bindTick density name pos fvs = do
decl_path <- getPathEntry
let
toplev = null decl_path
count_entries = toplev || density == TickAllFunctions
top_only = density /= TickAllFunctions
box_label = if toplev then TopLevelBox [name]
else LocalBox (decl_path ++ [name])
--
allocATickBox box_label count_entries top_only pos fvs
-- Note [inline sccs]
--
-- It should be reasonable to add ticks to INLINE functions; however
-- currently this tickles a bug later on because the SCCfinal pass
-- does not look inside unfoldings to find CostCentres. It would be
-- difficult to fix that, because SCCfinal currently works on STG and
-- not Core (and since it also generates CostCentres for CAFs,
-- changing this would be difficult too).
--
-- Another reason not to add ticks to INLINE functions is that this
-- sometimes handy for avoiding adding a tick to a particular function
-- (see #6131)
--
-- So for now we do not add any ticks to INLINE functions at all.
-- -----------------------------------------------------------------------------
-- Decorate an LHsExpr with ticks
-- selectively add ticks to interesting expressions
addTickLHsExpr :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExpr e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | isGoodBreakExpr e0 -> tick_it
TickForCoverage -> tick_it
TickCallSites | isCallSite e0 -> tick_it
_other -> dont_tick_it
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- Add a tick to an expression which is the RHS of an equation or a binding.
-- We always consider these to be breakpoints, unless the expression is a 'let'
-- (because the body will definitely have a tick somewhere). ToDo: perhaps
-- we should treat 'case' and 'if' the same way?
addTickLHsExprRHS :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprRHS e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it
| otherwise -> tick_it
TickForCoverage -> tick_it
TickCallSites | isCallSite e0 -> tick_it
_other -> dont_tick_it
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- The inner expression of an evaluation context:
-- let binds in [], ( [] )
-- we never tick these if we're doing HPC, but otherwise
-- we treat it like an ordinary expression.
addTickLHsExprEvalInner :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprEvalInner e = do
d <- getDensity
case d of
TickForCoverage -> addTickLHsExprNever e
_otherwise -> addTickLHsExpr e
-- | A let body is treated differently from addTickLHsExprEvalInner
-- above with TickForBreakPoints, because for breakpoints we always
-- want to tick the body, even if it is not a redex. See test
-- break012. This gives the user the opportunity to inspect the
-- values of the let-bound variables.
addTickLHsExprLetBody :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprLetBody e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it
| otherwise -> tick_it
_other -> addTickLHsExprEvalInner e
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- version of addTick that does not actually add a tick,
-- because the scope of this tick is completely subsumed by
-- another.
addTickLHsExprNever :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprNever (L pos e0) = do
e1 <- addTickHsExpr e0
return $ L pos e1
-- general heuristic: expressions which do not denote values are good
-- break points
isGoodBreakExpr :: HsExpr Id -> Bool
isGoodBreakExpr (HsApp {}) = True
isGoodBreakExpr (OpApp {}) = True
isGoodBreakExpr _other = False
isCallSite :: HsExpr Id -> Bool
isCallSite HsApp{} = True
isCallSite OpApp{} = True
isCallSite _ = False
addTickLHsExprOptAlt :: Bool -> LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprOptAlt oneOfMany (L pos e0)
= ifDensity TickForCoverage
(allocTickBox (ExpBox oneOfMany) False False pos $ addTickHsExpr e0)
(addTickLHsExpr (L pos e0))
addBinTickLHsExpr :: (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id)
addBinTickLHsExpr boxLabel (L pos e0)
= ifDensity TickForCoverage
(allocBinTickBox boxLabel pos $ addTickHsExpr e0)
(addTickLHsExpr (L pos e0))
-- -----------------------------------------------------------------------------
-- Decorate the body of an HsExpr with ticks.
-- (Whether to put a tick around the whole expression was already decided,
-- in the addTickLHsExpr family of functions.)
addTickHsExpr :: HsExpr Id -> TM (HsExpr Id)
addTickHsExpr e@(HsVar (L _ id)) = do freeVar id; return e
addTickHsExpr (HsUnboundVar {}) = panic "addTickHsExpr.HsUnboundVar"
addTickHsExpr e@(HsIPVar _) = return e
addTickHsExpr e@(HsOverLit _) = return e
addTickHsExpr e@(HsOverLabel _) = return e
addTickHsExpr e@(HsLit _) = return e
addTickHsExpr (HsLam matchgroup) = liftM HsLam (addTickMatchGroup True matchgroup)
addTickHsExpr (HsLamCase ty mgs) = liftM (HsLamCase ty) (addTickMatchGroup True mgs)
addTickHsExpr (HsApp e1 e2) = liftM2 HsApp (addTickLHsExprNever e1) e2'
-- This might be a type application. Then don't put a tick around e2,
-- or dsExpr won't recognize it as a type application any more (#11329).
-- It doesn't make sense to put a tick on a type anyways.
where e2'
| isLHsTypeExpr e2 = return e2
| otherwise = addTickLHsExpr e2
addTickHsExpr (OpApp e1 e2 fix e3) =
liftM4 OpApp
(addTickLHsExpr e1)
(addTickLHsExprNever e2)
(return fix)
(addTickLHsExpr e3)
addTickHsExpr (NegApp e neg) =
liftM2 NegApp
(addTickLHsExpr e)
(addTickSyntaxExpr hpcSrcSpan neg)
addTickHsExpr (HsPar e) =
liftM HsPar (addTickLHsExprEvalInner e)
addTickHsExpr (SectionL e1 e2) =
liftM2 SectionL
(addTickLHsExpr e1)
(addTickLHsExprNever e2)
addTickHsExpr (SectionR e1 e2) =
liftM2 SectionR
(addTickLHsExprNever e1)
(addTickLHsExpr e2)
addTickHsExpr (ExplicitTuple es boxity) =
liftM2 ExplicitTuple
(mapM addTickTupArg es)
(return boxity)
addTickHsExpr (HsCase e mgs) =
liftM2 HsCase
(addTickLHsExpr e) -- not an EvalInner; e might not necessarily
-- be evaluated.
(addTickMatchGroup False mgs)
addTickHsExpr (HsIf cnd e1 e2 e3) =
liftM3 (HsIf cnd)
(addBinTickLHsExpr (BinBox CondBinBox) e1)
(addTickLHsExprOptAlt True e2)
(addTickLHsExprOptAlt True e3)
addTickHsExpr (HsMultiIf ty alts)
= do { let isOneOfMany = case alts of [_] -> False; _ -> True
; alts' <- mapM (liftL $ addTickGRHS isOneOfMany False) alts
; return $ HsMultiIf ty alts' }
addTickHsExpr (HsLet (L l binds) e) =
bindLocals (collectLocalBinders binds) $
liftM2 (HsLet . L l)
(addTickHsLocalBinds binds) -- to think about: !patterns.
(addTickLHsExprLetBody e)
addTickHsExpr (HsDo cxt (L l stmts) srcloc)
= do { (stmts', _) <- addTickLStmts' forQual stmts (return ())
; return (HsDo cxt (L l stmts') srcloc) }
where
forQual = case cxt of
ListComp -> Just $ BinBox QualBinBox
_ -> Nothing
addTickHsExpr (ExplicitList ty wit es) =
liftM3 ExplicitList
(return ty)
(addTickWit wit)
(mapM (addTickLHsExpr) es)
where addTickWit Nothing = return Nothing
addTickWit (Just fln)
= do fln' <- addTickSyntaxExpr hpcSrcSpan fln
return (Just fln')
addTickHsExpr (ExplicitPArr ty es) =
liftM2 ExplicitPArr
(return ty)
(mapM (addTickLHsExpr) es)
addTickHsExpr (HsStatic e) = HsStatic <$> addTickLHsExpr e
addTickHsExpr expr@(RecordCon { rcon_flds = rec_binds })
= do { rec_binds' <- addTickHsRecordBinds rec_binds
; return (expr { rcon_flds = rec_binds' }) }
addTickHsExpr expr@(RecordUpd { rupd_expr = e, rupd_flds = flds })
= do { e' <- addTickLHsExpr e
; flds' <- mapM addTickHsRecField flds
; return (expr { rupd_expr = e', rupd_flds = flds' }) }
addTickHsExpr (ExprWithTySig e ty) =
liftM2 ExprWithTySig
(addTickLHsExprNever e) -- No need to tick the inner expression
-- for expressions with signatures
(return ty)
addTickHsExpr (ArithSeq ty wit arith_seq) =
liftM3 ArithSeq
(return ty)
(addTickWit wit)
(addTickArithSeqInfo arith_seq)
where addTickWit Nothing = return Nothing
addTickWit (Just fl) = do fl' <- addTickSyntaxExpr hpcSrcSpan fl
return (Just fl')
-- We might encounter existing ticks (multiple Coverage passes)
addTickHsExpr (HsTick t e) =
liftM (HsTick t) (addTickLHsExprNever e)
addTickHsExpr (HsBinTick t0 t1 e) =
liftM (HsBinTick t0 t1) (addTickLHsExprNever e)
addTickHsExpr (HsTickPragma _ _ _ (L pos e0)) = do
e2 <- allocTickBox (ExpBox False) False False pos $
addTickHsExpr e0
return $ unLoc e2
addTickHsExpr (PArrSeq ty arith_seq) =
liftM2 PArrSeq
(return ty)
(addTickArithSeqInfo arith_seq)
addTickHsExpr (HsSCC src nm e) =
liftM3 HsSCC
(return src)
(return nm)
(addTickLHsExpr e)
addTickHsExpr (HsCoreAnn src nm e) =
liftM3 HsCoreAnn
(return src)
(return nm)
(addTickLHsExpr e)
addTickHsExpr e@(HsBracket {}) = return e
addTickHsExpr e@(HsTcBracketOut {}) = return e
addTickHsExpr e@(HsRnBracketOut {}) = return e
addTickHsExpr e@(HsSpliceE {}) = return e
addTickHsExpr (HsProc pat cmdtop) =
liftM2 HsProc
(addTickLPat pat)
(liftL (addTickHsCmdTop) cmdtop)
addTickHsExpr (HsWrap w e) =
liftM2 HsWrap
(return w)
(addTickHsExpr e) -- Explicitly no tick on inside
addTickHsExpr (ExprWithTySigOut e ty) =
liftM2 ExprWithTySigOut
(addTickLHsExprNever e) -- No need to tick the inner expression
(return ty) -- for expressions with signatures
-- Others should never happen in expression content.
addTickHsExpr e = pprPanic "addTickHsExpr" (ppr e)
addTickTupArg :: LHsTupArg Id -> TM (LHsTupArg Id)
addTickTupArg (L l (Present e)) = do { e' <- addTickLHsExpr e
; return (L l (Present e')) }
addTickTupArg (L l (Missing ty)) = return (L l (Missing ty))
addTickMatchGroup :: Bool{-is lambda-} -> MatchGroup Id (LHsExpr Id) -> TM (MatchGroup Id (LHsExpr Id))
addTickMatchGroup is_lam mg@(MG { mg_alts = L l matches }) = do
let isOneOfMany = matchesOneOfMany matches
matches' <- mapM (liftL (addTickMatch isOneOfMany is_lam)) matches
return $ mg { mg_alts = L l matches' }
addTickMatch :: Bool -> Bool -> Match Id (LHsExpr Id) -> TM (Match Id (LHsExpr Id))
addTickMatch isOneOfMany isLambda (Match mf pats opSig gRHSs) =
bindLocals (collectPatsBinders pats) $ do
gRHSs' <- addTickGRHSs isOneOfMany isLambda gRHSs
return $ Match mf pats opSig gRHSs'
addTickGRHSs :: Bool -> Bool -> GRHSs Id (LHsExpr Id) -> TM (GRHSs Id (LHsExpr Id))
addTickGRHSs isOneOfMany isLambda (GRHSs guarded (L l local_binds)) = do
bindLocals binders $ do
local_binds' <- addTickHsLocalBinds local_binds
guarded' <- mapM (liftL (addTickGRHS isOneOfMany isLambda)) guarded
return $ GRHSs guarded' (L l local_binds')
where
binders = collectLocalBinders local_binds
addTickGRHS :: Bool -> Bool -> GRHS Id (LHsExpr Id) -> TM (GRHS Id (LHsExpr Id))
addTickGRHS isOneOfMany isLambda (GRHS stmts expr) = do
(stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox) stmts
(addTickGRHSBody isOneOfMany isLambda expr)
return $ GRHS stmts' expr'
addTickGRHSBody :: Bool -> Bool -> LHsExpr Id -> TM (LHsExpr Id)
addTickGRHSBody isOneOfMany isLambda expr@(L pos e0) = do
d <- getDensity
case d of
TickForCoverage -> addTickLHsExprOptAlt isOneOfMany expr
TickAllFunctions | isLambda ->
addPathEntry "\\" $
allocTickBox (ExpBox False) True{-count-} False{-not top-} pos $
addTickHsExpr e0
_otherwise ->
addTickLHsExprRHS expr
addTickLStmts :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM [ExprLStmt Id]
addTickLStmts isGuard stmts = do
(stmts, _) <- addTickLStmts' isGuard stmts (return ())
return stmts
addTickLStmts' :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM a
-> TM ([ExprLStmt Id], a)
addTickLStmts' isGuard lstmts res
= bindLocals (collectLStmtsBinders lstmts) $
do { lstmts' <- mapM (liftL (addTickStmt isGuard)) lstmts
; a <- res
; return (lstmts', a) }
addTickStmt :: (Maybe (Bool -> BoxLabel)) -> Stmt Id (LHsExpr Id) -> TM (Stmt Id (LHsExpr Id))
addTickStmt _isGuard (LastStmt e noret ret) = do
liftM3 LastStmt
(addTickLHsExpr e)
(pure noret)
(addTickSyntaxExpr hpcSrcSpan ret)
addTickStmt _isGuard (BindStmt pat e bind fail ty) = do
liftM5 BindStmt
(addTickLPat pat)
(addTickLHsExprRHS e)
(addTickSyntaxExpr hpcSrcSpan bind)
(addTickSyntaxExpr hpcSrcSpan fail)
(return ty)
addTickStmt isGuard (BodyStmt e bind' guard' ty) = do
liftM4 BodyStmt
(addTick isGuard e)
(addTickSyntaxExpr hpcSrcSpan bind')
(addTickSyntaxExpr hpcSrcSpan guard')
(return ty)
addTickStmt _isGuard (LetStmt (L l binds)) = do
liftM (LetStmt . L l)
(addTickHsLocalBinds binds)
addTickStmt isGuard (ParStmt pairs mzipExpr bindExpr ty) = do
liftM4 ParStmt
(mapM (addTickStmtAndBinders isGuard) pairs)
(unLoc <$> addTickLHsExpr (L hpcSrcSpan mzipExpr))
(addTickSyntaxExpr hpcSrcSpan bindExpr)
(return ty)
addTickStmt isGuard (ApplicativeStmt args mb_join body_ty) = do
args' <- mapM (addTickApplicativeArg isGuard) args
return (ApplicativeStmt args' mb_join body_ty)
addTickStmt isGuard stmt@(TransStmt { trS_stmts = stmts
, trS_by = by, trS_using = using
, trS_ret = returnExpr, trS_bind = bindExpr
, trS_fmap = liftMExpr }) = do
t_s <- addTickLStmts isGuard stmts
t_y <- fmapMaybeM addTickLHsExprRHS by
t_u <- addTickLHsExprRHS using
t_f <- addTickSyntaxExpr hpcSrcSpan returnExpr
t_b <- addTickSyntaxExpr hpcSrcSpan bindExpr
L _ t_m <- addTickLHsExpr (L hpcSrcSpan liftMExpr)
return $ stmt { trS_stmts = t_s, trS_by = t_y, trS_using = t_u
, trS_ret = t_f, trS_bind = t_b, trS_fmap = t_m }
addTickStmt isGuard stmt@(RecStmt {})
= do { stmts' <- addTickLStmts isGuard (recS_stmts stmt)
; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt)
; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt)
; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt)
; return (stmt { recS_stmts = stmts', recS_ret_fn = ret'
, recS_mfix_fn = mfix', recS_bind_fn = bind' }) }
addTick :: Maybe (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id)
addTick isGuard e | Just fn <- isGuard = addBinTickLHsExpr fn e
| otherwise = addTickLHsExprRHS e
addTickApplicativeArg
:: Maybe (Bool -> BoxLabel) -> (SyntaxExpr Id, ApplicativeArg Id Id)
-> TM (SyntaxExpr Id, ApplicativeArg Id Id)
addTickApplicativeArg isGuard (op, arg) =
liftM2 (,) (addTickSyntaxExpr hpcSrcSpan op) (addTickArg arg)
where
addTickArg (ApplicativeArgOne pat expr) =
ApplicativeArgOne <$> addTickLPat pat <*> addTickLHsExpr expr
addTickArg (ApplicativeArgMany stmts ret pat) =
ApplicativeArgMany
<$> addTickLStmts isGuard stmts
<*> (unLoc <$> addTickLHsExpr (L hpcSrcSpan ret))
<*> addTickLPat pat
addTickStmtAndBinders :: Maybe (Bool -> BoxLabel) -> ParStmtBlock Id Id
-> TM (ParStmtBlock Id Id)
addTickStmtAndBinders isGuard (ParStmtBlock stmts ids returnExpr) =
liftM3 ParStmtBlock
(addTickLStmts isGuard stmts)
(return ids)
(addTickSyntaxExpr hpcSrcSpan returnExpr)
addTickHsLocalBinds :: HsLocalBinds Id -> TM (HsLocalBinds Id)
addTickHsLocalBinds (HsValBinds binds) =
liftM HsValBinds
(addTickHsValBinds binds)
addTickHsLocalBinds (HsIPBinds binds) =
liftM HsIPBinds
(addTickHsIPBinds binds)
addTickHsLocalBinds (EmptyLocalBinds) = return EmptyLocalBinds
addTickHsValBinds :: HsValBindsLR Id a -> TM (HsValBindsLR Id b)
addTickHsValBinds (ValBindsOut binds sigs) =
liftM2 ValBindsOut
(mapM (\ (rec,binds') ->
liftM2 (,)
(return rec)
(addTickLHsBinds binds'))
binds)
(return sigs)
addTickHsValBinds _ = panic "addTickHsValBinds"
addTickHsIPBinds :: HsIPBinds Id -> TM (HsIPBinds Id)
addTickHsIPBinds (IPBinds ipbinds dictbinds) =
liftM2 IPBinds
(mapM (liftL (addTickIPBind)) ipbinds)
(return dictbinds)
addTickIPBind :: IPBind Id -> TM (IPBind Id)
addTickIPBind (IPBind nm e) =
liftM2 IPBind
(return nm)
(addTickLHsExpr e)
-- There is no location here, so we might need to use a context location??
addTickSyntaxExpr :: SrcSpan -> SyntaxExpr Id -> TM (SyntaxExpr Id)
addTickSyntaxExpr pos syn@(SyntaxExpr { syn_expr = x }) = do
L _ x' <- addTickLHsExpr (L pos x)
return $ syn { syn_expr = x' }
-- we do not walk into patterns.
addTickLPat :: LPat Id -> TM (LPat Id)
addTickLPat pat = return pat
addTickHsCmdTop :: HsCmdTop Id -> TM (HsCmdTop Id)
addTickHsCmdTop (HsCmdTop cmd tys ty syntaxtable) =
liftM4 HsCmdTop
(addTickLHsCmd cmd)
(return tys)
(return ty)
(return syntaxtable)
addTickLHsCmd :: LHsCmd Id -> TM (LHsCmd Id)
addTickLHsCmd (L pos c0) = do
c1 <- addTickHsCmd c0
return $ L pos c1
addTickHsCmd :: HsCmd Id -> TM (HsCmd Id)
addTickHsCmd (HsCmdLam matchgroup) =
liftM HsCmdLam (addTickCmdMatchGroup matchgroup)
addTickHsCmd (HsCmdApp c e) =
liftM2 HsCmdApp (addTickLHsCmd c) (addTickLHsExpr e)
{-
addTickHsCmd (OpApp e1 c2 fix c3) =
liftM4 OpApp
(addTickLHsExpr e1)
(addTickLHsCmd c2)
(return fix)
(addTickLHsCmd c3)
-}
addTickHsCmd (HsCmdPar e) = liftM HsCmdPar (addTickLHsCmd e)
addTickHsCmd (HsCmdCase e mgs) =
liftM2 HsCmdCase
(addTickLHsExpr e)
(addTickCmdMatchGroup mgs)
addTickHsCmd (HsCmdIf cnd e1 c2 c3) =
liftM3 (HsCmdIf cnd)
(addBinTickLHsExpr (BinBox CondBinBox) e1)
(addTickLHsCmd c2)
(addTickLHsCmd c3)
addTickHsCmd (HsCmdLet (L l binds) c) =
bindLocals (collectLocalBinders binds) $
liftM2 (HsCmdLet . L l)
(addTickHsLocalBinds binds) -- to think about: !patterns.
(addTickLHsCmd c)
addTickHsCmd (HsCmdDo (L l stmts) srcloc)
= do { (stmts', _) <- addTickLCmdStmts' stmts (return ())
; return (HsCmdDo (L l stmts') srcloc) }
addTickHsCmd (HsCmdArrApp e1 e2 ty1 arr_ty lr) =
liftM5 HsCmdArrApp
(addTickLHsExpr e1)
(addTickLHsExpr e2)
(return ty1)
(return arr_ty)
(return lr)
addTickHsCmd (HsCmdArrForm e fix cmdtop) =
liftM3 HsCmdArrForm
(addTickLHsExpr e)
(return fix)
(mapM (liftL (addTickHsCmdTop)) cmdtop)
addTickHsCmd (HsCmdWrap w cmd)
= liftM2 HsCmdWrap (return w) (addTickHsCmd cmd)
-- Others should never happen in a command context.
--addTickHsCmd e = pprPanic "addTickHsCmd" (ppr e)
addTickCmdMatchGroup :: MatchGroup Id (LHsCmd Id) -> TM (MatchGroup Id (LHsCmd Id))
addTickCmdMatchGroup mg@(MG { mg_alts = L l matches }) = do
matches' <- mapM (liftL addTickCmdMatch) matches
return $ mg { mg_alts = L l matches' }
addTickCmdMatch :: Match Id (LHsCmd Id) -> TM (Match Id (LHsCmd Id))
addTickCmdMatch (Match mf pats opSig gRHSs) =
bindLocals (collectPatsBinders pats) $ do
gRHSs' <- addTickCmdGRHSs gRHSs
return $ Match mf pats opSig gRHSs'
addTickCmdGRHSs :: GRHSs Id (LHsCmd Id) -> TM (GRHSs Id (LHsCmd Id))
addTickCmdGRHSs (GRHSs guarded (L l local_binds)) = do
bindLocals binders $ do
local_binds' <- addTickHsLocalBinds local_binds
guarded' <- mapM (liftL addTickCmdGRHS) guarded
return $ GRHSs guarded' (L l local_binds')
where
binders = collectLocalBinders local_binds
addTickCmdGRHS :: GRHS Id (LHsCmd Id) -> TM (GRHS Id (LHsCmd Id))
-- The *guards* are *not* Cmds, although the body is
-- C.f. addTickGRHS for the BinBox stuff
addTickCmdGRHS (GRHS stmts cmd)
= do { (stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox)
stmts (addTickLHsCmd cmd)
; return $ GRHS stmts' expr' }
addTickLCmdStmts :: [LStmt Id (LHsCmd Id)] -> TM [LStmt Id (LHsCmd Id)]
addTickLCmdStmts stmts = do
(stmts, _) <- addTickLCmdStmts' stmts (return ())
return stmts
addTickLCmdStmts' :: [LStmt Id (LHsCmd Id)] -> TM a -> TM ([LStmt Id (LHsCmd Id)], a)
addTickLCmdStmts' lstmts res
= bindLocals binders $ do
lstmts' <- mapM (liftL addTickCmdStmt) lstmts
a <- res
return (lstmts', a)
where
binders = collectLStmtsBinders lstmts
addTickCmdStmt :: Stmt Id (LHsCmd Id) -> TM (Stmt Id (LHsCmd Id))
addTickCmdStmt (BindStmt pat c bind fail ty) = do
liftM5 BindStmt
(addTickLPat pat)
(addTickLHsCmd c)
(return bind)
(return fail)
(return ty)
addTickCmdStmt (LastStmt c noret ret) = do
liftM3 LastStmt
(addTickLHsCmd c)
(pure noret)
(addTickSyntaxExpr hpcSrcSpan ret)
addTickCmdStmt (BodyStmt c bind' guard' ty) = do
liftM4 BodyStmt
(addTickLHsCmd c)
(addTickSyntaxExpr hpcSrcSpan bind')
(addTickSyntaxExpr hpcSrcSpan guard')
(return ty)
addTickCmdStmt (LetStmt (L l binds)) = do
liftM (LetStmt . L l)
(addTickHsLocalBinds binds)
addTickCmdStmt stmt@(RecStmt {})
= do { stmts' <- addTickLCmdStmts (recS_stmts stmt)
; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt)
; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt)
; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt)
; return (stmt { recS_stmts = stmts', recS_ret_fn = ret'
, recS_mfix_fn = mfix', recS_bind_fn = bind' }) }
addTickCmdStmt ApplicativeStmt{} =
panic "ToDo: addTickCmdStmt ApplicativeLastStmt"
-- Others should never happen in a command context.
addTickCmdStmt stmt = pprPanic "addTickHsCmd" (ppr stmt)
addTickHsRecordBinds :: HsRecordBinds Id -> TM (HsRecordBinds Id)
addTickHsRecordBinds (HsRecFields fields dd)
= do { fields' <- mapM addTickHsRecField fields
; return (HsRecFields fields' dd) }
addTickHsRecField :: LHsRecField' id (LHsExpr Id) -> TM (LHsRecField' id (LHsExpr Id))
addTickHsRecField (L l (HsRecField id expr pun))
= do { expr' <- addTickLHsExpr expr
; return (L l (HsRecField id expr' pun)) }
addTickArithSeqInfo :: ArithSeqInfo Id -> TM (ArithSeqInfo Id)
addTickArithSeqInfo (From e1) =
liftM From
(addTickLHsExpr e1)
addTickArithSeqInfo (FromThen e1 e2) =
liftM2 FromThen
(addTickLHsExpr e1)
(addTickLHsExpr e2)
addTickArithSeqInfo (FromTo e1 e2) =
liftM2 FromTo
(addTickLHsExpr e1)
(addTickLHsExpr e2)
addTickArithSeqInfo (FromThenTo e1 e2 e3) =
liftM3 FromThenTo
(addTickLHsExpr e1)
(addTickLHsExpr e2)
(addTickLHsExpr e3)
liftL :: (Monad m) => (a -> m a) -> Located a -> m (Located a)
liftL f (L loc a) = do
a' <- f a
return $ L loc a'
data TickTransState = TT { tickBoxCount:: Int
, mixEntries :: [MixEntry_]
, uniqSupply :: UniqSupply
}
data TickTransEnv = TTE { fileName :: FastString
, density :: TickDensity
, tte_dflags :: DynFlags
, exports :: NameSet
, inlines :: VarSet
, declPath :: [String]
, inScope :: VarSet
, blackList :: Map SrcSpan ()
, this_mod :: Module
, tickishType :: TickishType
}
-- deriving Show
data TickishType = ProfNotes | HpcTicks | Breakpoints | SourceNotes
deriving (Eq)
coveragePasses :: DynFlags -> [TickishType]
coveragePasses dflags =
ifa (hscTarget dflags == HscInterpreted) Breakpoints $
ifa (gopt Opt_Hpc dflags) HpcTicks $
ifa (gopt Opt_SccProfilingOn dflags &&
profAuto dflags /= NoProfAuto) ProfNotes $
ifa (debugLevel dflags > 0) SourceNotes []
where ifa f x xs | f = x:xs
| otherwise = xs
-- | Tickishs that only make sense when their source code location
-- refers to the current file. This might not always be true due to
-- LINE pragmas in the code - which would confuse at least HPC.
tickSameFileOnly :: TickishType -> Bool
tickSameFileOnly HpcTicks = True
tickSameFileOnly _other = False
type FreeVars = OccEnv Id
noFVs :: FreeVars
noFVs = emptyOccEnv
-- Note [freevars]
-- For breakpoints we want to collect the free variables of an
-- expression for pinning on the HsTick. We don't want to collect
-- *all* free variables though: in particular there's no point pinning
-- on free variables that are will otherwise be in scope at the GHCi
-- prompt, which means all top-level bindings. Unfortunately detecting
-- top-level bindings isn't easy (collectHsBindsBinders on the top-level
-- bindings doesn't do it), so we keep track of a set of "in-scope"
-- variables in addition to the free variables, and the former is used
-- to filter additions to the latter. This gives us complete control
-- over what free variables we track.
data TM a = TM { unTM :: TickTransEnv -> TickTransState -> (a,FreeVars,TickTransState) }
-- a combination of a state monad (TickTransState) and a writer
-- monad (FreeVars).
instance Functor TM where
fmap = liftM
instance Applicative TM where
pure a = TM $ \ _env st -> (a,noFVs,st)
(<*>) = ap
instance Monad TM where
(TM m) >>= k = TM $ \ env st ->
case m env st of
(r1,fv1,st1) ->
case unTM (k r1) env st1 of
(r2,fv2,st2) ->
(r2, fv1 `plusOccEnv` fv2, st2)
instance HasDynFlags TM where
getDynFlags = TM $ \ env st -> (tte_dflags env, noFVs, st)
instance MonadUnique TM where
getUniqueSupplyM = TM $ \_ st -> (uniqSupply st, noFVs, st)
getUniqueM = TM $ \_ st -> let (u, us') = takeUniqFromSupply (uniqSupply st)
in (u, noFVs, st { uniqSupply = us' })
getState :: TM TickTransState
getState = TM $ \ _ st -> (st, noFVs, st)
setState :: (TickTransState -> TickTransState) -> TM ()
setState f = TM $ \ _ st -> ((), noFVs, f st)
getEnv :: TM TickTransEnv
getEnv = TM $ \ env st -> (env, noFVs, st)
withEnv :: (TickTransEnv -> TickTransEnv) -> TM a -> TM a
withEnv f (TM m) = TM $ \ env st ->
case m (f env) st of
(a, fvs, st') -> (a, fvs, st')
getDensity :: TM TickDensity
getDensity = TM $ \env st -> (density env, noFVs, st)
ifDensity :: TickDensity -> TM a -> TM a -> TM a
ifDensity d th el = do d0 <- getDensity; if d == d0 then th else el
getFreeVars :: TM a -> TM (FreeVars, a)
getFreeVars (TM m)
= TM $ \ env st -> case m env st of (a, fv, st') -> ((fv,a), fv, st')
freeVar :: Id -> TM ()
freeVar id = TM $ \ env st ->
if id `elemVarSet` inScope env
then ((), unitOccEnv (nameOccName (idName id)) id, st)
else ((), noFVs, st)
addPathEntry :: String -> TM a -> TM a
addPathEntry nm = withEnv (\ env -> env { declPath = declPath env ++ [nm] })
getPathEntry :: TM [String]
getPathEntry = declPath `liftM` getEnv
getFileName :: TM FastString
getFileName = fileName `liftM` getEnv
isGoodSrcSpan' :: SrcSpan -> Bool
isGoodSrcSpan' pos@(RealSrcSpan _) = srcSpanStart pos /= srcSpanEnd pos
isGoodSrcSpan' (UnhelpfulSpan _) = False
isGoodTickSrcSpan :: SrcSpan -> TM Bool
isGoodTickSrcSpan pos = do
file_name <- getFileName
tickish <- tickishType `liftM` getEnv
let need_same_file = tickSameFileOnly tickish
same_file = Just file_name == srcSpanFileName_maybe pos
return (isGoodSrcSpan' pos && (not need_same_file || same_file))
ifGoodTickSrcSpan :: SrcSpan -> TM a -> TM a -> TM a
ifGoodTickSrcSpan pos then_code else_code = do
good <- isGoodTickSrcSpan pos
if good then then_code else else_code
bindLocals :: [Id] -> TM a -> TM a
bindLocals new_ids (TM m)
= TM $ \ env st ->
case m env{ inScope = inScope env `extendVarSetList` new_ids } st of
(r, fv, st') -> (r, fv `delListFromOccEnv` occs, st')
where occs = [ nameOccName (idName id) | id <- new_ids ]
isBlackListed :: SrcSpan -> TM Bool
isBlackListed pos = TM $ \ env st ->
case Map.lookup pos (blackList env) of
Nothing -> (False,noFVs,st)
Just () -> (True,noFVs,st)
-- the tick application inherits the source position of its
-- expression argument to support nested box allocations
allocTickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> TM (HsExpr Id)
-> TM (LHsExpr Id)
allocTickBox boxLabel countEntries topOnly pos m =
ifGoodTickSrcSpan pos (do
(fvs, e) <- getFreeVars m
env <- getEnv
tickish <- mkTickish boxLabel countEntries topOnly pos fvs (declPath env)
return (L pos (HsTick tickish (L pos e)))
) (do
e <- m
return (L pos e)
)
-- the tick application inherits the source position of its
-- expression argument to support nested box allocations
allocATickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> FreeVars
-> TM (Maybe (Tickish Id))
allocATickBox boxLabel countEntries topOnly pos fvs =
ifGoodTickSrcSpan pos (do
let
mydecl_path = case boxLabel of
TopLevelBox x -> x
LocalBox xs -> xs
_ -> panic "allocATickBox"
tickish <- mkTickish boxLabel countEntries topOnly pos fvs mydecl_path
return (Just tickish)
) (return Nothing)
mkTickish :: BoxLabel -> Bool -> Bool -> SrcSpan -> OccEnv Id -> [String]
-> TM (Tickish Id)
mkTickish boxLabel countEntries topOnly pos fvs decl_path = do
let ids = filter (not . isUnliftedType . idType) $ occEnvElts fvs
-- unlifted types cause two problems here:
-- * we can't bind them at the GHCi prompt
-- (bindLocalsAtBreakpoint already fliters them out),
-- * the simplifier might try to substitute a literal for
-- the Id, and we can't handle that.
me = (pos, decl_path, map (nameOccName.idName) ids, boxLabel)
cc_name | topOnly = head decl_path
| otherwise = concat (intersperse "." decl_path)
dflags <- getDynFlags
env <- getEnv
case tickishType env of
HpcTicks -> do
c <- liftM tickBoxCount getState
setState $ \st -> st { tickBoxCount = c + 1
, mixEntries = me : mixEntries st }
return $ HpcTick (this_mod env) c
ProfNotes -> do
ccUnique <- getUniqueM
let cc = mkUserCC (mkFastString cc_name) (this_mod env) pos ccUnique
count = countEntries && gopt Opt_ProfCountEntries dflags
return $ ProfNote cc count True{-scopes-}
Breakpoints -> do
c <- liftM tickBoxCount getState
setState $ \st -> st { tickBoxCount = c + 1
, mixEntries = me:mixEntries st }
return $ Breakpoint c ids
SourceNotes | RealSrcSpan pos' <- pos ->
return $ SourceNote pos' cc_name
_otherwise -> panic "mkTickish: bad source span!"
allocBinTickBox :: (Bool -> BoxLabel) -> SrcSpan -> TM (HsExpr Id)
-> TM (LHsExpr Id)
allocBinTickBox boxLabel pos m = do
env <- getEnv
case tickishType env of
HpcTicks -> do e <- liftM (L pos) m
ifGoodTickSrcSpan pos
(mkBinTickBoxHpc boxLabel pos e)
(return e)
_other -> allocTickBox (ExpBox False) False False pos m
mkBinTickBoxHpc :: (Bool -> BoxLabel) -> SrcSpan -> LHsExpr Id
-> TM (LHsExpr Id)
mkBinTickBoxHpc boxLabel pos e =
TM $ \ env st ->
let meT = (pos,declPath env, [],boxLabel True)
meF = (pos,declPath env, [],boxLabel False)
meE = (pos,declPath env, [],ExpBox False)
c = tickBoxCount st
mes = mixEntries st
in
( L pos $ HsTick (HpcTick (this_mod env) c) $ L pos $ HsBinTick (c+1) (c+2) e
-- notice that F and T are reversed,
-- because we are building the list in
-- reverse...
, noFVs
, st {tickBoxCount=c+3 , mixEntries=meF:meT:meE:mes}
)
mkHpcPos :: SrcSpan -> HpcPos
mkHpcPos pos@(RealSrcSpan s)
| isGoodSrcSpan' pos = toHpcPos (srcSpanStartLine s,
srcSpanStartCol s,
srcSpanEndLine s,
srcSpanEndCol s - 1)
-- the end column of a SrcSpan is one
-- greater than the last column of the
-- span (see SrcLoc), whereas HPC
-- expects to the column range to be
-- inclusive, hence we subtract one above.
mkHpcPos _ = panic "bad source span; expected such spans to be filtered out"
hpcSrcSpan :: SrcSpan
hpcSrcSpan = mkGeneralSrcSpan (fsLit "Haskell Program Coverage internals")
matchesOneOfMany :: [LMatch Id body] -> Bool
matchesOneOfMany lmatches = sum (map matchCount lmatches) > 1
where
matchCount (L _ (Match _ _pats _ty (GRHSs grhss _binds))) = length grhss
type MixEntry_ = (SrcSpan, [String], [OccName], BoxLabel)
-- For the hash value, we hash everything: the file name,
-- the timestamp of the original source file, the tab stop,
-- and the mix entries. We cheat, and hash the show'd string.
-- This hash only has to be hashed at Mix creation time,
-- and is for sanity checking only.
mixHash :: FilePath -> UTCTime -> Int -> [MixEntry] -> Int
mixHash file tm tabstop entries = fromIntegral $ hashString
(show $ Mix file tm 0 tabstop entries)
{-
************************************************************************
* *
* initialisation
* *
************************************************************************
Each module compiled with -fhpc declares an initialisation function of
the form `hpc_init_<module>()`, which is emitted into the _stub.c file
and annotated with __attribute__((constructor)) so that it gets
executed at startup time.
The function's purpose is to call hs_hpc_module to register this
module with the RTS, and it looks something like this:
static void hpc_init_Main(void) __attribute__((constructor));
static void hpc_init_Main(void)
{extern StgWord64 _hpc_tickboxes_Main_hpc[];
hs_hpc_module("Main",8,1150288664,_hpc_tickboxes_Main_hpc);}
-}
hpcInitCode :: Module -> HpcInfo -> SDoc
hpcInitCode _ (NoHpcInfo {}) = Outputable.empty
hpcInitCode this_mod (HpcInfo tickCount hashNo)
= vcat
[ text "static void hpc_init_" <> ppr this_mod
<> text "(void) __attribute__((constructor));"
, text "static void hpc_init_" <> ppr this_mod <> text "(void)"
, braces (vcat [
text "extern StgWord64 " <> tickboxes <>
text "[]" <> semi,
text "hs_hpc_module" <>
parens (hcat (punctuate comma [
doubleQuotes full_name_str,
int tickCount, -- really StgWord32
int hashNo, -- really StgWord32
tickboxes
])) <> semi
])
]
where
tickboxes = ppr (mkHpcTicksLabel $ this_mod)
module_name = hcat (map (text.charToC) $
bytesFS (moduleNameFS (Module.moduleName this_mod)))
package_name = hcat (map (text.charToC) $
bytesFS (unitIdFS (moduleUnitId this_mod)))
full_name_str
| moduleUnitId this_mod == mainUnitId
= module_name
| otherwise
= package_name <> char '/' <> module_name
|
nushio3/ghc
|
compiler/deSugar/Coverage.hs
|
Haskell
|
bsd-3-clause
| 51,902
|
module Main where
import Types
import RegexPattern
import System.Environment
import Data.String
import Text.Regex.Posix
import qualified Data.Text as T (splitOn, unpack)
import Data.Maybe
import System.IO
import qualified System.IO as S
import System.Process
{-
*# ADD #*
Add to Todo.txt
*# REMOVE #*
Remove from Todo.txt, but place entry in backup file todo.backup.txt
*# VIEW #*
Default. Print in readable format.
TODO: (pun not intended) Priority set for listed items, Completely edit an item.
Written by Frank Hucek
-}
todoFile :: String
todoFile = "/home/frank/bin_storage/Todo.txt"
main :: IO ()
main = do
x <- getArgs
mainThrow x
mainThrow :: [String] -> IO ()
mainThrow [] = viewTodoList []
mainThrow (option:xs) = do
case option of
"add" -> addItem xs
"remove" -> removeItem xs
_ -> viewTodoList xs
-- sort
removeItem :: [String] -> IO ()
removeItem [] = putStrLn "Please specify number of item you wish to remove"
removeItem (x:_) = do
case readMaybe x :: Maybe Int of
Nothing -> removeItem []
Just itemNum -> removeFromFile itemNum
addItem :: [String] -> IO ()
addItem inputItem = do
let input = argOp inputItem =~ regexPattern :: String
case input of
"" -> putStrLn "Failed to match input pattern"
_ -> appendFile todoFile (input ++ "\n")
argOp :: [String] -> String
argOp xs = init $ foldl (++) "" $ map (++ " ") xs
-- map a space to end of each string in list
-- concatenate list of strings into 1 string
-- take new string - last character b/c last char is a whitespace
-- input can now be checked against regular expression
readMaybe :: Read a => String -> Maybe a
readMaybe s = case reads s of
[(val, "")] -> Just val
_ -> Nothing
-- READ, WRITE, APPEND, REMOVE operations on file
-- File and user IO uses regex pattern. convert to Item type in program
removeFromFile :: Int -> IO ()
removeFromFile x = do
file <- readFile todoFile
let xs = lines file
(a, b) = splitAt x xs
itemList = (init a) ++ b
items = unlines itemList
newTodoFile = todoFile ++ ".new"
writeFile newTodoFile items
_ <- createProcess (proc "mv" [newTodoFile, todoFile]) -- SUPER jank, temporary fix to lazy eval here
return ()
-- removes indices even when typing in the wrong number
viewTodoList :: [String] -> IO ()
viewTodoList _ = do
(_, Just hout, _, _) <- createProcess (proc "cal" []) {std_out = CreatePipe}
cal <- hGetContents hout
putStrLn cal
hClose hout
putStrLn $ "\tPRIOR.\tDESCRIPTION"
file <- readFile todoFile
let items = fmap (displayItem . patternToItem) $ lines file -- [String]
printTodoList items
--putStrLn cal
printTodoList = printTodo 1
printTodo :: Int -> [String] -> IO ()
printTodo _ [] = return ()
printTodo i (x:xs) = do
putStrLn $ show i ++ ")\t" ++ x
printTodo (i + 1) xs
|
frankhucek/Todo
|
app/Main.hs
|
Haskell
|
bsd-3-clause
| 2,999
|
{-# LANGUAGE NoImplicitPrelude
, ScopedTypeVariables
, UnicodeSyntax
#-}
module System.FTDI.Utils.Properties where
-- base
import Control.Monad ( (>>) )
import Data.Bool ( otherwise )
import Data.Function ( ($) )
import Data.Ord ( Ord )
import Prelude ( Integral, RealFrac, Fractional, Double
, Bounded, minBound, maxBound
, fromInteger, toInteger, fromIntegral
, (+), abs, mod, ceiling, div
)
-- base-unicode
import Data.Bool.Unicode ( (∧) )
import Data.Eq.Unicode ( (≡), (≢) )
import Data.Ord.Unicode ( (≤), (≥) )
import Prelude.Unicode ( (⋅), (÷) )
-- ftdi
import System.FTDI.Utils ( clamp, divRndUp )
-- QuickCheck
import Test.QuickCheck ( Property, (==>) )
-------------------------------------------------------------------------------
prop_divRndUp_min ∷ Integral α ⇒ α → α → Property
prop_divRndUp_min x y = y ≢ 0 ==>
let d = divRndUp x (abs y)
d' = toInteger d
y' = toInteger y
x' = toInteger x
in d' ⋅ abs y' ≥ x'
prop_divRndUp_max ∷ Integral α ⇒ α → α → Property
prop_divRndUp_max x y = y ≢ 0 ==>
let d = divRndUp x y
in x `div` y ≤ d
prop_divRndUp_ceilFrac ∷ Integral α ⇒ α → α → Property
prop_divRndUp_ceilFrac x y = y ≢ 0 ==>
let x' = fromIntegral x ∷ Double
y' = fromIntegral y ∷ Double
in divRndUp x y ≡ ceilFrac x' y'
prop_divRndUp2 ∷ Integral α ⇒ α → α → Property
prop_divRndUp2 x y = y ≢ 0 ==> divRndUp x y ≡ divRndUp2 x y
prop_clamp ∷ ∀ α. (Bounded α, Ord α) ⇒ α → Property
prop_clamp x = (minBound ∷ α) ≤ (maxBound ∷ α)
==> minBound ≤ cx ∧ cx ≤ maxBound
where cx = clamp x
-------------------------------------------------------------------------------
ceilFrac ∷ (Fractional α, RealFrac α, Integral β) ⇒ α → α → β
ceilFrac x y = ceiling $ x ÷ y
divRndUp2 ∷ Integral α ⇒ α → α → α
divRndUp2 x y = let r | mod x y ≡ 0 = 0
| otherwise = 1
in div x y + r
|
roelvandijk/ftdi
|
System/FTDI/Utils/Properties.hs
|
Haskell
|
bsd-3-clause
| 2,166
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module PrettyPrint where
import Data.Function ((&))
import Data.Monoid ((<>))
import Data.String.Here
import Data.Text (Text)
import qualified Data.Text as T
import Hexon
import Hexon.Types
format_id :: Either String RID -> Text
format_id (Left _) = "meh."
format_id (Right result) = [i|
${top}
${banner_top}
${banner_name}
${banner_bottom}
• By ${username}
• ${date}
${display_comments}
|]
where
msg = result |> rid_message
name = result |> rid_info |> info_name
username = result |> rid_info |> info_username
date = result |> rid_info |> info_date
comments = result |> rid_info |> info_comments
pre :: Text
pre = case msg of
"error" ->
"Item not available :("
"success" ->
"*Item available!*"
_ -> ""
nlength = T.length name + 2
top = "\ESC[1m" <> pre <> "\STX"
banner_top = "┌" <> (T.replicate nlength "─") <> "┐"
banner_name = "│ " <> name <> " │"
banner_bottom = "└" <> (T.replicate nlength "─") <> "┘"
display_comments = fmap ("• " <>) (T.splitOn "\n" comments)
-- format_add :: Either String RItem -> Text
-- format_add (Left _) = "meh."
-- format_add (Right result) = undefined
-- format_del :: Either String RID -> Text
-- format_del (Left _) = "meh."
-- format_del (Right result) = undefined
-- format_auth :: Either String RAuth -> Text
-- format_auth (Left _) = "meh."
-- format_auth (Right result) = undefined
-- format_comment :: Either String RComment -> Text
-- format_comment (Left _) = "meh"
-- format_comment (Right result) = undefined
|
tchoutri/Hexon
|
src/PrettyPrint.hs
|
Haskell
|
bsd-3-clause
| 1,934
|
module Wallet.Inductive (
-- * Wallet events
WalletEvent(..)
, walletEventIsRollback
-- * Inductive wallets
, Inductive(..)
, uptoFirstRollback
, inductiveInit
) where
import Universum
import qualified Data.List as List
import qualified Data.Set as Set
import Formatting (bprint, build, (%))
import qualified Formatting.Buildable
import Pos.Core.Chrono
import Serokell.Util (listJson)
import UTxO.DSL
import UTxO.Util
{-------------------------------------------------------------------------------
Wallet events
-------------------------------------------------------------------------------}
-- | Wallet event
data WalletEvent h a =
-- | Inform the wallet of a new block added to the blockchain
ApplyBlock (Block h a)
-- | Submit a new transaction to the wallet to be included in the blockchain
| NewPending (Transaction h a)
-- | Roll back the last block added to the blockchain
| Rollback
walletEventIsRollback :: WalletEvent h a -> Bool
walletEventIsRollback Rollback = True
walletEventIsRollback _ = False
{-------------------------------------------------------------------------------
Inductive wallets
-------------------------------------------------------------------------------}
-- | Inductive definition of a wallet
data Inductive h a = Inductive {
-- | Bootstrap transaction
inductiveBoot :: Transaction h a
-- | Addresses that belong to the wallet
, inductiveOurs :: Set a
-- | Wallet events
, inductiveEvents :: OldestFirst [] (WalletEvent h a)
}
-- | The prefix of the 'Inductive' that doesn't include any rollbacks
uptoFirstRollback :: Inductive h a -> Inductive h a
uptoFirstRollback i@Inductive{..} = i {
inductiveEvents = liftOldestFirst (takeWhile notRollback) inductiveEvents
}
where
notRollback = not . walletEventIsRollback
inductiveInit :: forall h a. Inductive h a -> Inductive h a
inductiveInit i@Inductive{..} = i {
inductiveEvents = liftOldestFirst List.init inductiveEvents
}
{-------------------------------------------------------------------------------
Pretty-printing
-------------------------------------------------------------------------------}
instance (Hash h a, Buildable a) => Buildable (OldestFirst [] (WalletEvent h a)) where
build = bprint listJson . getOldestFirst
instance (Hash h a, Buildable a) => Buildable (WalletEvent h a) where
build (ApplyBlock b) = bprint ("ApplyBlock " % build) b
build (NewPending t) = bprint ("NewPending " % build) t
build Rollback = bprint "Rollback"
instance (Hash h a, Buildable a) => Buildable (Inductive h a) where
build Inductive{..} = bprint
( "Inductive"
% "{ boot: " % build
% ", ours: " % listJson
% ", events: " % build
% "}"
)
inductiveBoot
(Set.toList inductiveOurs)
inductiveEvents
|
input-output-hk/pos-haskell-prototype
|
wallet/test/unit/Wallet/Inductive.hs
|
Haskell
|
mit
| 2,930
|
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Chronological sequences.
module Test.Pos.Core.Chrono
(
) where
import Pos.Core.Chrono
import Test.QuickCheck (Arbitrary)
deriving instance Arbitrary (f a) => Arbitrary (NewestFirst f a)
deriving instance Arbitrary (f a) => Arbitrary (OldestFirst f a)
|
input-output-hk/pos-haskell-prototype
|
core/test/Test/Pos/Core/Chrono.hs
|
Haskell
|
mit
| 405
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hu-HU">
<title>AdvFuzzer Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Keresés</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/fuzz/src/main/javahelp/org/zaproxy/zap/extension/fuzz/resources/help_hu_HU/helpset_hu_HU.hs
|
Haskell
|
apache-2.0
| 964
|
module Test.Invariant where
import Test.QuickCheck
infix 1 &>
infix 2 <~~, @~>, <?>, <=>
-- | Defines extensional equality. This allows concise, point-free,
-- definitions of laws.
--
-- > f(x) == g(x)
-- > f <=> g
(<=>) :: Eq b => (a -> b) -> (a -> b) -> a -> Bool
(f <=> g) x = f x == g x
-- | Pointfree version of QuickChecks ==>. This notation reduces a
-- lot of lambdas, for example:
--
-- >>> quickCheck $ (/=0) &> not . idempotent (*(2::Int))
-- +++ OK, passed 100 tests.
(&>) :: Testable b => (a -> Bool) -> (a -> b) -> a -> Property
(a &> b) x = a x ==> b x
-- | Checks whether a function is idempotent.
--
-- > f(f(x)) == f(x)
--
-- >>> quickCheck $ idempotent (abs :: Int -> Int)
-- +++ OK, passed 100 tests.
idempotent :: Eq a => (a -> a) -> a -> Bool
idempotent f = f <=> f . f
-- | Checks whether a function is pointSymmetric.
--
-- > f(-x) == -f(x)
--
-- >>> quickCheck $ pointSymmetric (^3)
-- +++ OK, passed 100 tests.
pointSymmetric :: (Num a, Num b, Eq b) => (a -> b) -> a -> Bool
pointSymmetric f = f . negate <=> negate . f
-- | Checks whether a function is reflectionSymmetric.
--
-- > f(x) == f(-x)
--
-- >>> quickCheck $ pointSymmetric (^2)
-- +++ OK, passed 100 tests.
reflectionSymmetric :: (Num a, Eq b) => (a -> b) -> a -> Bool
reflectionSymmetric f = f . negate <=> f
-- | Checks whether a function is monotonicIncreasing.
--
-- > x >= y, f(x) >= f(y)
--
-- >>> quickCheck $ monotonicIncreasing ceiling
-- +++ OK, passed 100 tests.
monotonicIncreasing :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicIncreasing f x y = compare (f x) (f y) `elem` [EQ, compare x y]
-- | Checks whether a function is strictly monotonicIncreasing'.
--
-- > x > y, f(x) > f(y)
--
-- >>> quickCheck $ monotonicIncreasing' (+1)
-- +++ OK, passed 100 tests.
monotonicIncreasing' :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicIncreasing' f x y = compare (f x) (f y) == compare x y
-- | Checks whether a function is monotonicDecreasing.
--
-- > x >= y, f(x) <= f(y)
--
-- >>> quickCheck $ monotonicDecreasing (\x -> floor $ negate x)
-- +++ OK, passed 100 tests.
monotonicDecreasing :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicDecreasing f x y = compare (f x) (f y) `elem` [EQ, compare y x]
-- | Checks whether a function is strictly monotonicDecreasing'.
--
-- > x > y, f(x) < f(y)
--
-- >>> quickCheck $ monotonicDecreasing' (-1)
-- +++ OK, passed 100 tests.
monotonicDecreasing' :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicDecreasing' f x y = compare (f x) (f y) == compare y x
-- TODO create sorted list and fold with predicate over it
-- | Checks whether a function is involutory.
--
-- > f(f(x)) = x
--
-- >>> quickCheck $ involutory negate
-- +++ OK, passed 100 tests.
involutory :: Eq a => (a -> a) -> a -> Bool
involutory f = f . f <=> id
-- | Checks whether a function is the inverse of another function.
--
-- > f(g(x)) = x
--
-- >>> quickCheck $ (`div` 2) `inverts` (*2)
-- +++ OK, passed 100 tests.
inverts :: Eq a => (b -> a) -> (a -> b) -> a -> Bool
f `inverts` g = f . g <=> id
-- | Checks whether an binary operator is commutative.
--
-- > a * b = b * a
--
-- >>> quickCheck $ commutative (+)
-- +++ OK, passed 100 tests.
commutative :: Eq b => (a -> a -> b) -> a -> a -> Bool
commutative f x y = x `f` y == y `f` x
-- | Checks whether an binary operator is associative.
--
-- > a + (b + c) = (a + b) + c
--
-- >>> quickCheck $ associative (+)
-- +++ OK, passed 100 tests.
associative :: Eq a => (a -> a -> a) -> a -> a -> a -> Bool
associative f x y z = x `f` (y `f` z) == (x `f` y) `f` z
-- | Checks whether an operator is left-distributive over an other operator.
--
-- > a * (b + c) = (a * b) + (a * c)
--
-- >>> quickCheck $ (*) `distributesLeftOver` (+)
-- +++ OK, passed 100 tests.
distributesLeftOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool
(f `distributesLeftOver` g) x y z = x `f` (y `g` z) == (x `f` y) `g` (x `f` z)
-- | Checks whether an operator is right-distributive over an other operator.
--
-- > (b + c) / a = (b / a) + (c / a)
--
-- >>> quickCheck $ (/) `distributesRightOver` (+)
-- +++ OK, passed 100 tests.
distributesRightOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool
(f `distributesRightOver` g) x y z = (y `g` z) `f` x == (x `f` y) `g` (x `f` z)
-- | Checks whether an operator is distributive over an other operator.
--
-- > a * (b + c) = (a * b) + (a * c) = (b + c) * a
--
-- >>> quickCheck $ (*) `distributesOver` (+)
-- +++ OK, passed 100 tests.
distributesOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool
(f `distributesOver` g) x y z = (f `distributesLeftOver` g) x y z
&& (f `distributesRightOver` g) x y z
-- | Checks whether a function increases the size of a list.
--
-- >>> quickCheck $ inflating (1:)
-- +++ OK, passed 100 tests.
inflating :: ([a] -> [b]) -> [a] -> Bool
inflating f xs = length (f xs) >= length xs
-- | Checks whether a function increases strictly the size of a list.
--
-- >>> quickCheck $ inflating (1:)
-- +++ OK, passed 100 tests.
inflating' :: ([a] -> [b]) -> [a] -> Bool
inflating' f xs = length (f xs) > length xs
-- For GHC 7.10
-- inflating :: (Foldable f, Foldable f') => (f a -> f' b) -> f a -> Bool
-- inflating f xs = length (f xs) > length xs
-- | Checks whether a function decreases the size of a list.
--
--
-- >>> quickCheck $ deflating tail
-- +++ OK, passed 100 tests.
deflating :: ([a] -> [b]) -> [a] -> Bool
deflating f xs = length (f xs) <= length xs
-- | Checks whether a function decreases strictly the size of a list.
--
--
-- >>> quickCheck $ deflating tail
-- +++ OK, passed 100 tests.
deflating' :: ([a] -> [b]) -> [a] -> Bool
deflating' f xs = null xs || length (f xs) < length xs
-- For GHC 7.10
-- deflating :: (Foldable f, Foldable f') => (f a -> f' b) -> f a -> Bool
-- deflating f xs = null xs || length (f xs) < length xs
-- | Checks whether a function is cyclic by applying its result to
-- itself within n applications.
--
-- >>> quickCheck $ (`div` 10) `cyclesWithin` 100
-- +++ OK, passed 100 tests.
cyclesWithin :: Eq a => (a -> a) -> Int -> a -> Bool
f `cyclesWithin` n = go [] . take (n + 1) . iterate f
where go xs (y:ys) | y `elem` xs = True
| otherwise = go (y:xs) ys
go _ _ = False
-- | Checks whether a function is invariant over an other function.
--
-- >>> quickCheck $ length `invariatesOver` reverse
-- +++ OK, passed 100 tests.
invariatesOver :: Eq b => (a -> b) -> (a -> a) -> a -> Bool
f `invariatesOver` g = f . g <=> f
-- | Checks whether a binary function is fixed by an argument.
--
-- f x y == const a y
--
-- >>> quickCheck $ (*) `fixedBy` 0
-- +++ OK, passed 100 tests.
fixedBy :: Eq c => (a -> b -> c) -> a -> b -> b -> Bool
(f `fixedBy` x) y z = f x y == f x z
-- | Checks whether a function is invariant over an other function.
--
-- >>> quickCheck $ length <~~ reverse
-- +++ OK, passed 100 tests.
(<~~) :: Eq b => (a -> b) -> (a -> a) -> a -> Bool
f <~~ g = f . g <=> f
-- | Checks whether a function is the inverse of another function.
--
-- > f(g(x)) = x
--
-- >>> quickCheck $ (`div` 2) @~> (*2)
-- +++ OK, passed 100 tests.
(@~>) :: Eq a => (b -> a) -> (a -> b) -> a -> Bool
f @~> g = f . g <=> id
-- | Checks whether a function is an endomorphism in relation to a unary operator.
--
-- > f(g(x)) = g(f(x))
--
-- >>> quickCheck $ (*7) <?> abs
-- +++ OK, passed 100 tests.
(<?>) :: Eq a => (a -> a) -> (a -> a) -> a -> Bool
f <?> g = f . g <=> g . f
-- | Checks whether a function is an endomorphism in relation to a binary operator.
--
-- > f(g(x,y)) = g(f(x),f(y))
--
-- >>> quickCheck $ (^2) <??> (*)
-- +++ OK, passed 100 tests.
(<??>) :: Eq a => (a -> a) -> (a -> a -> a) -> a -> a -> Bool
(f <??> g) x y = f (x `g` y) == f x `g` f y
-- | Checks whether a function is an endomorphism in relation to a ternary operator.
--
-- > f(g(x,y,z)) = g(f(x),f(y),f(z))
--
(<???>) :: Eq a => (a -> a) -> (a -> a -> a -> a) -> a -> a -> a -> Bool
(f <???> g) x y z = f (g x y z) == g (f x) (f y) (f z)
|
knupfer/test-invariant
|
src/Test/Invariant.hs
|
Haskell
|
bsd-3-clause
| 8,090
|
{-# LANGUAGE IncoherentInstances #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- | This module provides a category transformer for automatic differentiation.
--
-- There are many alternative notions of a generalized derivative.
-- Perhaps the most common is the differential Ring.
-- In Haskell, this might be defined as:
--
-- > class Field r => Differential r where
-- > derivative :: r -> r
-- >
-- > type Diff cat = forall a b. (Category cat, Differential cat a b)
--
-- But this runs into problems with the lack of polymorphic constraints in GHC.
-- See, for example <https://ghc.haskell.org/trac/ghc/ticket/2893 GHC ticket #2893>.
--
-- References:
--
-- * <http://en.wikipedia.org/wiki/Differential_algebra wikipedia article on differntial algebras>
module SubHask.Category.Trans.Derivative
where
import SubHask.Algebra
import SubHask.Category
import SubHask.SubType
import SubHask.Internal.Prelude
--------------------------------------------------------------------------------
-- | This is essentially just a translation of the "Numeric.AD.Forward.Forward" type
-- for use with the SubHask numeric hierarchy.
--
-- FIXME:
--
-- Add reverse mode auto-differentiation for vectors.
-- Apply the "ProofOf" framework from Monotonic
data Forward a = Forward
{ val :: !a
, val' :: a
}
deriving (Typeable,Show)
mkMutable [t| forall a. Forward a |]
instance Semigroup a => Semigroup (Forward a) where
(Forward a1 a1')+(Forward a2 a2') = Forward (a1+a2) (a1'+a2')
instance Cancellative a => Cancellative (Forward a) where
(Forward a1 a1')-(Forward a2 a2') = Forward (a1-a2) (a1'-a2')
instance Monoid a => Monoid (Forward a) where
zero = Forward zero zero
instance Group a => Group (Forward a) where
negate (Forward a b) = Forward (negate a) (negate b)
instance Abelian a => Abelian (Forward a)
instance Rg a => Rg (Forward a) where
(Forward a1 a1')*(Forward a2 a2') = Forward (a1*a2) (a1*a2'+a2*a1')
instance Rig a => Rig (Forward a) where
one = Forward one zero
instance Ring a => Ring (Forward a) where
fromInteger x = Forward (fromInteger x) zero
instance Field a => Field (Forward a) where
reciprocal (Forward a a') = Forward (reciprocal a) (-a'/(a*a))
(Forward a1 a1')/(Forward a2 a2') = Forward (a1/a2) ((a1'*a2+a1*a2')/(a2'*a2'))
fromRational r = Forward (fromRational r) 0
---------
proveC1 :: (a ~ (a><a), Rig a) => (Forward a -> Forward a) -> C1 (a -> a)
proveC1 f = Diffn (\a -> val $ f $ Forward a one) $ Diff0 $ \a -> val' $ f $ Forward a one
proveC2 :: (a ~ (a><a), Rig a) => (Forward (Forward a) -> Forward (Forward a)) -> C2 (a -> a)
proveC2 f
= Diffn (\a -> val $ val $ f $ Forward (Forward a one) one)
$ Diffn (\a -> val' $ val $ f $ Forward (Forward a one) one)
$ Diff0 (\a -> val' $ val' $ f $ Forward (Forward a one) one)
--------------------------------------------------------------------------------
class C (cat :: * -> * -> *) where
type D cat :: * -> * -> *
derivative :: cat a b -> D cat a (a >< b)
data Diff (n::Nat) a b where
Diff0 :: (a -> b) -> Diff 0 a b
Diffn :: (a -> b) -> Diff (n-1) a (a >< b) -> Diff n a b
---------
instance Sup (->) (Diff n) (->)
instance Sup (Diff n) (->) (->)
instance Diff 0 <: (->) where
embedType_ = Embed2 unDiff0
where
unDiff0 :: Diff 0 a b -> a -> b
unDiff0 (Diff0 f) = f
unDiff0 (Diffn _ _) = undefined
instance Diff n <: (->) where
embedType_ = Embed2 unDiffn
where
unDiffn :: Diff n a b -> a -> b
unDiffn (Diffn f _) = f
unDiffn (Diff0 _) = undefined
--
-- FIXME: these subtyping instance should be made more generic
-- the problem is that type families aren't currently powerful enough
--
instance Sup (Diff 0) (Diff 1) (Diff 0)
instance Sup (Diff 1) (Diff 0) (Diff 0)
instance Diff 1 <: Diff 0
where embedType_ = Embed2 m2n
where m2n (Diffn f _) = Diff0 f
m2n (Diff0 _) = undefined
instance Sup (Diff 0) (Diff 2) (Diff 0)
instance Sup (Diff 2) (Diff 0) (Diff 0)
instance Diff 2 <: Diff 0
where embedType_ = Embed2 m2n
where m2n (Diffn f _) = Diff0 f
m2n (Diff0 _) = undefined
instance Sup (Diff 1) (Diff 2) (Diff 1)
instance Sup (Diff 2) (Diff 1) (Diff 1)
instance Diff 2 <: Diff 1
where embedType_ = Embed2 m2n
where m2n (Diffn f f') = Diffn f (embedType2 f')
m2n (Diff0 _) = undefined
---------
instance (1 <= n) => C (Diff n) where
type D (Diff n) = Diff (n-1)
derivative (Diffn _ f') = f'
-- doesn't work, hence no non-ehaustive pattern ghc option
-- derivative (Diff0 _) = undefined
unsafeProveC0 :: (a -> b) -> Diff 0 a b
unsafeProveC0 f = Diff0 f
unsafeProveC1
:: (a -> b) -- ^ f(x)
-> (a -> a><b) -- ^ f'(x)
-> C1 (a -> b)
unsafeProveC1 f f' = Diffn f $ unsafeProveC0 f'
unsafeProveC2
:: (a -> b) -- ^ f(x)
-> (a -> a><b) -- ^ f'(x)
-> (a -> a><a><b) -- ^ f''(x)
-> C2 (a -> b)
unsafeProveC2 f f' f'' = Diffn f $ unsafeProveC1 f' f''
type C0 a = C0_ a
type family C0_ (f :: *) :: * where
C0_ (a -> b) = Diff 0 a b
type C1 a = C1_ a
type family C1_ (f :: *) :: * where
C1_ (a -> b) = Diff 1 a b
type C2 a = C2_ a
type family C2_ (f :: *) :: * where
C2_ (a -> b) = Diff 2 a b
---------------------------------------
-- algebra
mkMutable [t| forall n a b. Diff n a b |]
instance Semigroup b => Semigroup (Diff 0 a b) where
(Diff0 f1 )+(Diff0 f2 ) = Diff0 (f1+f2)
_ + _ = undefined
instance (Semigroup b, Semigroup (a><b)) => Semigroup (Diff 1 a b) where
(Diffn f1 f1')+(Diffn f2 f2') = Diffn (f1+f2) (f1'+f2')
instance (Semigroup b, Semigroup (a><b), Semigroup (a><a><b)) => Semigroup (Diff 2 a b) where
(Diffn f1 f1')+(Diffn f2 f2') = Diffn (f1+f2) (f1'+f2')
instance Monoid b => Monoid (Diff 0 a b) where
zero = Diff0 zero
instance (Monoid b, Monoid (a><b)) => Monoid (Diff 1 a b) where
zero = Diffn zero zero
instance (Monoid b, Monoid (a><b), Monoid (a><a><b)) => Monoid (Diff 2 a b) where
zero = Diffn zero zero
--------------------------------------------------------------------------------
-- test
-- v = unsafeToModule [1,2,3,4,5] :: SVector 5 Double
--
-- sphere :: Hilbert v => C0 (v -> Scalar v)
-- sphere = unsafeProveC0 f
-- where
-- f v = v<>v
|
Drezil/subhask
|
src/SubHask/Category/Trans/Derivative.hs
|
Haskell
|
bsd-3-clause
| 6,414
|
-- | When there aren't enough registers to hold all the vregs we have to spill some of those
-- vregs to slots on the stack. This module is used modify the code to use those slots.
--
module RegAlloc.Graph.Spill (
regSpill,
SpillStats(..),
accSpillSL
)
where
import RegAlloc.Liveness
import Instruction
import Reg
import OldCmm hiding (RegSet)
import BlockId
import State
import Unique
import UniqFM
import UniqSet
import UniqSupply
import Outputable
import Data.List
import Data.Maybe
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | Spill all these virtual regs to stack slots.
--
-- TODO: See if we can split some of the live ranges instead of just globally
-- spilling the virtual reg. This might make the spill cleaner's job easier.
--
-- TODO: On CISCy x86 and x86_64 we don't nessesarally have to add a mov instruction
-- when making spills. If an instr is using a spilled virtual we may be able to
-- address the spill slot directly.
--
regSpill
:: Instruction instr
=> [LiveCmmDecl statics instr] -- ^ the code
-> UniqSet Int -- ^ available stack slots
-> UniqSet VirtualReg -- ^ the regs to spill
-> UniqSM
([LiveCmmDecl statics instr] -- code with SPILL and RELOAD meta instructions added.
, UniqSet Int -- left over slots
, SpillStats ) -- stats about what happened during spilling
regSpill code slotsFree regs
-- not enough slots to spill these regs
| sizeUniqSet slotsFree < sizeUniqSet regs
= pprPanic "regSpill: out of spill slots!"
( text " regs to spill = " <> ppr (sizeUniqSet regs)
$$ text " slots left = " <> ppr (sizeUniqSet slotsFree))
| otherwise
= do
-- allocate a slot for each of the spilled regs
let slots = take (sizeUniqSet regs) $ uniqSetToList slotsFree
let regSlotMap = listToUFM
$ zip (uniqSetToList regs) slots
-- grab the unique supply from the monad
us <- getUs
-- run the spiller on all the blocks
let (code', state') =
runState (mapM (regSpill_top regSlotMap) code)
(initSpillS us)
return ( code'
, minusUniqSet slotsFree (mkUniqSet slots)
, makeSpillStats state')
-- | Spill some registers to stack slots in a top-level thing.
regSpill_top
:: Instruction instr
=> RegMap Int -- ^ map of vregs to slots they're being spilled to.
-> LiveCmmDecl statics instr -- ^ the top level thing.
-> SpillM (LiveCmmDecl statics instr)
regSpill_top regSlotMap cmm
= case cmm of
CmmData{}
-> return cmm
CmmProc info label sccs
| LiveInfo static firstId mLiveVRegsOnEntry liveSlotsOnEntry <- info
-> do
-- We should only passed Cmms with the liveness maps filled in, but we'll
-- create empty ones if they're not there just in case.
let liveVRegsOnEntry = fromMaybe mapEmpty mLiveVRegsOnEntry
-- The liveVRegsOnEntry contains the set of vregs that are live on entry to
-- each basic block. If we spill one of those vregs we remove it from that
-- set and add the corresponding slot number to the liveSlotsOnEntry set.
-- The spill cleaner needs this information to erase unneeded spill and
-- reload instructions after we've done a successful allocation.
let liveSlotsOnEntry' :: Map BlockId (Set Int)
liveSlotsOnEntry'
= mapFoldWithKey patchLiveSlot liveSlotsOnEntry liveVRegsOnEntry
let info'
= LiveInfo static firstId
(Just liveVRegsOnEntry)
liveSlotsOnEntry'
-- Apply the spiller to all the basic blocks in the CmmProc.
sccs' <- mapM (mapSCCM (regSpill_block regSlotMap)) sccs
return $ CmmProc info' label sccs'
where -- | Given a BlockId and the set of registers live in it,
-- if registers in this block are being spilled to stack slots,
-- then record the fact that these slots are now live in those blocks
-- in the given slotmap.
patchLiveSlot :: BlockId -> RegSet -> Map BlockId (Set Int) -> Map BlockId (Set Int)
patchLiveSlot blockId regsLive slotMap
= let curSlotsLive = fromMaybe Set.empty
$ Map.lookup blockId slotMap
moreSlotsLive = Set.fromList
$ catMaybes
$ map (lookupUFM regSlotMap)
$ uniqSetToList regsLive
slotMap' = Map.insert blockId (Set.union curSlotsLive moreSlotsLive) slotMap
in slotMap'
-- | Spill some registers to stack slots in a basic block.
regSpill_block
:: Instruction instr
=> UniqFM Int -- ^ map of vregs to slots they're being spilled to.
-> LiveBasicBlock instr
-> SpillM (LiveBasicBlock instr)
regSpill_block regSlotMap (BasicBlock i instrs)
= do instrss' <- mapM (regSpill_instr regSlotMap) instrs
return $ BasicBlock i (concat instrss')
-- | Spill some registers to stack slots in a single instruction. If the instruction
-- uses registers that need to be spilled, then it is prefixed (or postfixed) with
-- the appropriate RELOAD or SPILL meta instructions.
regSpill_instr
:: Instruction instr
=> UniqFM Int -- ^ map of vregs to slots they're being spilled to.
-> LiveInstr instr
-> SpillM [LiveInstr instr]
regSpill_instr _ li@(LiveInstr _ Nothing)
= do return [li]
regSpill_instr regSlotMap
(LiveInstr instr (Just _))
= do
-- work out which regs are read and written in this instr
let RU rlRead rlWritten = regUsageOfInstr instr
-- sometimes a register is listed as being read more than once,
-- nub this so we don't end up inserting two lots of spill code.
let rsRead_ = nub rlRead
let rsWritten_ = nub rlWritten
-- if a reg is modified, it appears in both lists, want to undo this..
let rsRead = rsRead_ \\ rsWritten_
let rsWritten = rsWritten_ \\ rsRead_
let rsModify = intersect rsRead_ rsWritten_
-- work out if any of the regs being used are currently being spilled.
let rsSpillRead = filter (\r -> elemUFM r regSlotMap) rsRead
let rsSpillWritten = filter (\r -> elemUFM r regSlotMap) rsWritten
let rsSpillModify = filter (\r -> elemUFM r regSlotMap) rsModify
-- rewrite the instr and work out spill code.
(instr1, prepost1) <- mapAccumLM (spillRead regSlotMap) instr rsSpillRead
(instr2, prepost2) <- mapAccumLM (spillWrite regSlotMap) instr1 rsSpillWritten
(instr3, prepost3) <- mapAccumLM (spillModify regSlotMap) instr2 rsSpillModify
let (mPrefixes, mPostfixes) = unzip (prepost1 ++ prepost2 ++ prepost3)
let prefixes = concat mPrefixes
let postfixes = concat mPostfixes
-- final code
let instrs' = prefixes
++ [LiveInstr instr3 Nothing]
++ postfixes
return
{- $ pprTrace "* regSpill_instr spill"
( text "instr = " <> ppr instr
$$ text "read = " <> ppr rsSpillRead
$$ text "write = " <> ppr rsSpillWritten
$$ text "mod = " <> ppr rsSpillModify
$$ text "-- out"
$$ (vcat $ map ppr instrs')
$$ text " ")
-}
$ instrs'
spillRead
:: Instruction instr
=> UniqFM Int
-> instr
-> Reg
-> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr']))
spillRead regSlotMap instr reg
| Just slot <- lookupUFM regSlotMap reg
= do (instr', nReg) <- patchInstr reg instr
modify $ \s -> s
{ stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 0, 1) }
return ( instr'
, ( [LiveInstr (RELOAD slot nReg) Nothing]
, []) )
| otherwise = panic "RegSpill.spillRead: no slot defined for spilled reg"
spillWrite
:: Instruction instr
=> UniqFM Int
-> instr
-> Reg
-> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr']))
spillWrite regSlotMap instr reg
| Just slot <- lookupUFM regSlotMap reg
= do (instr', nReg) <- patchInstr reg instr
modify $ \s -> s
{ stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 1, 0) }
return ( instr'
, ( []
, [LiveInstr (SPILL nReg slot) Nothing]))
| otherwise = panic "RegSpill.spillWrite: no slot defined for spilled reg"
spillModify
:: Instruction instr
=> UniqFM Int
-> instr
-> Reg
-> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr']))
spillModify regSlotMap instr reg
| Just slot <- lookupUFM regSlotMap reg
= do (instr', nReg) <- patchInstr reg instr
modify $ \s -> s
{ stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 1, 1) }
return ( instr'
, ( [LiveInstr (RELOAD slot nReg) Nothing]
, [LiveInstr (SPILL nReg slot) Nothing]))
| otherwise = panic "RegSpill.spillModify: no slot defined for spilled reg"
-- | Rewrite uses of this virtual reg in an instr to use a different virtual reg
patchInstr
:: Instruction instr
=> Reg -> instr -> SpillM (instr, Reg)
patchInstr reg instr
= do nUnique <- newUnique
let nReg = case reg of
RegVirtual vr -> RegVirtual (renameVirtualReg nUnique vr)
RegReal{} -> panic "RegAlloc.Graph.Spill.patchIntr: not patching real reg"
let instr' = patchReg1 reg nReg instr
return (instr', nReg)
patchReg1
:: Instruction instr
=> Reg -> Reg -> instr -> instr
patchReg1 old new instr
= let patchF r
| r == old = new
| otherwise = r
in patchRegsOfInstr instr patchF
-- Spiller monad --------------------------------------------------------------
data SpillS
= SpillS
{ -- | unique supply for generating fresh vregs.
stateUS :: UniqSupply
-- | spilled vreg vs the number of times it was loaded, stored
, stateSpillSL :: UniqFM (Reg, Int, Int) }
initSpillS :: UniqSupply -> SpillS
initSpillS uniqueSupply
= SpillS
{ stateUS = uniqueSupply
, stateSpillSL = emptyUFM }
type SpillM a = State SpillS a
newUnique :: SpillM Unique
newUnique
= do us <- gets stateUS
case takeUniqFromSupply us of
(uniq, us')
-> do modify $ \s -> s { stateUS = us' }
return uniq
accSpillSL :: (Reg, Int, Int) -> (Reg, Int, Int) -> (Reg, Int, Int)
accSpillSL (r1, s1, l1) (_, s2, l2)
= (r1, s1 + s2, l1 + l2)
-- Spiller stats --------------------------------------------------------------
data SpillStats
= SpillStats
{ spillStoreLoad :: UniqFM (Reg, Int, Int) }
makeSpillStats :: SpillS -> SpillStats
makeSpillStats s
= SpillStats
{ spillStoreLoad = stateSpillSL s }
instance Outputable SpillStats where
ppr stats
= (vcat $ map (\(r, s, l) -> ppr r <+> int s <+> int l)
$ eltsUFM (spillStoreLoad stats))
|
mcmaniac/ghc
|
compiler/nativeGen/RegAlloc/Graph/Spill.hs
|
Haskell
|
bsd-3-clause
| 12,564
|
{-# LANGUAGE TypeFamilies, FlexibleInstances, ConstraintKinds, DeriveGeneric, DefaultSignatures #-}
module BayesStack.DirMulti ( -- * Dirichlet/multinomial pair
Multinom, dirMulti, symDirMulti, multinom
-- | Do not do record updates with these
, dmTotal, dmAlpha, dmDomain
, setMultinom, SetUnset (..)
, addMultinom, subMultinom
, decMultinom, incMultinom
, prettyMultinom
, updatePrior
, obsProb
-- * Parameter estimation
, estimatePrior, reestimatePriors, reestimateSymPriors
-- * Convenience functions
, probabilities, decProbabilities
) where
import Data.EnumMap (EnumMap)
import qualified Data.EnumMap as EM
import Data.Sequence (Seq)
import qualified Data.Sequence as SQ
import qualified Data.Foldable as Foldable
import Data.Foldable (toList, Foldable, foldMap)
import Data.Function (on)
import Text.PrettyPrint
import Text.Printf
import GHC.Generics (Generic)
import Data.Binary
import Data.Binary.EnumMap ()
import BayesStack.Types
import BayesStack.Dirichlet
import Numeric.Log hiding (sum)
import Numeric.Digamma
import Math.Gamma hiding (p)
-- | Make error handling a bit easier
checkNaN :: RealFloat a => String -> a -> a
checkNaN loc x | isNaN x = error $ "BayesStack.DirMulti."++loc++": Not a number"
checkNaN loc x | isInfinite x = error $ "BayesStack.DirMulti."++loc++": Infinity"
checkNaN _ x = x
maybeInc, maybeDec :: (Num a, Eq a) => Maybe a -> Maybe a
maybeInc Nothing = Just 1
maybeInc (Just n) = Just (n+1)
maybeDec Nothing = error "Can't decrement zero count"
maybeDec (Just 1) = Nothing
maybeDec (Just n) = Just (n-1)
{-# INLINEABLE decMultinom #-}
{-# INLINEABLE incMultinom #-}
decMultinom, incMultinom :: (Num w, Eq w, Ord a, Enum a)
=> a -> Multinom w a -> Multinom w a
decMultinom k = subMultinom 1 k
incMultinom k = addMultinom 1 k
subMultinom, addMultinom :: (Num w, Eq w, Ord a, Enum a)
=> w -> a -> Multinom w a -> Multinom w a
subMultinom w k dm = dm { dmCounts = EM.alter maybeDec k $ dmCounts dm
, dmTotal = dmTotal dm - w }
addMultinom w k dm = dm { dmCounts = EM.alter maybeInc k $ dmCounts dm
, dmTotal = dmTotal dm + w }
data SetUnset = Set | Unset
setMultinom :: (Num w, Eq w, Enum a, Ord a) => SetUnset -> a -> Multinom w a -> Multinom w a
setMultinom Set s = incMultinom s
setMultinom Unset s = decMultinom s
-- | 'Multinom a' represents multinomial distribution over domain 'a'.
-- Optionally, this can include a collapsed Dirichlet prior.
-- 'Multinom alpha count total' is a multinomial with Dirichlet prior
-- with symmetric parameter 'alpha', ...
data Multinom w a = DirMulti { dmAlpha :: !(Alpha a)
, dmCounts :: !(EnumMap a w)
, dmTotal :: !w
, dmDomain :: !(Seq a)
}
| Multinom { dmProbs :: !(EnumMap a Double)
, dmCounts :: !(EnumMap a w)
, dmTotal :: !w
, dmDomain :: !(Seq a)
}
deriving (Show, Eq, Generic)
instance (Enum a, Binary a, Binary w) => Binary (Multinom w a)
-- | 'symMultinomFromPrecision d p' is a symmetric Dirichlet/multinomial over a
-- domain 'd' with precision 'p'
symDirMultiFromPrecision :: (Num w, Enum a) => [a] -> DirPrecision -> Multinom w a
symDirMultiFromPrecision domain prec = symDirMulti (0.5*prec) domain
-- | 'dirMultiFromMeanPrecision m p' is an asymmetric Dirichlet/multinomial
-- over a domain 'd' with mean 'm' and precision 'p'
dirMultiFromPrecision :: (Num w, Enum a) => DirMean a -> DirPrecision -> Multinom w a
dirMultiFromPrecision m p = dirMultiFromAlpha $ meanPrecisionToAlpha m p
-- | Create a symmetric Dirichlet/multinomial
symDirMulti :: (Num w, Enum a) => Double -> [a] -> Multinom w a
symDirMulti alpha domain = dirMultiFromAlpha $ symAlpha domain alpha
-- | A multinomial without a prior
multinom :: (Num w, Enum a) => [(a,Double)] -> Multinom w a
multinom probs = Multinom { dmProbs = EM.fromList probs
, dmCounts = EM.empty
, dmTotal = 0
, dmDomain = SQ.fromList $ map fst probs
}
-- | Create an asymmetric Dirichlet/multinomial from items and alphas
dirMulti :: (Num w, Enum a) => [(a,Double)] -> Multinom w a
dirMulti domain = dirMultiFromAlpha $ asymAlpha $ EM.fromList domain
-- | Create a Dirichlet/multinomial with a given prior
dirMultiFromAlpha :: (Enum a, Num w) => Alpha a -> Multinom w a
dirMultiFromAlpha alpha = DirMulti { dmAlpha = alpha
, dmCounts = EM.empty
, dmTotal = 0
, dmDomain = alphaDomain alpha
}
data Acc w = Acc !w !Probability
obsProb :: (Enum a, Real w, Functor f, Foldable f)
=> Multinom w a -> f (a, w) -> Probability
obsProb (Multinom {dmProbs=prob}) obs =
Foldable.product $ fmap (\(k,w)->(realToFrac $ prob EM.! k)^^w) obs
where (^^) :: Real w => Log Double -> w -> Log Double
x ^^ y = Exp $ realToFrac y * ln x
obsProb (DirMulti {dmAlpha=alpha}) obs =
let go (Acc w p) (k',w') = Acc (w+w') (p*p')
where p' = Exp $ checkNaN "obsProb"
$ lnGamma (realToFrac w' + alpha `alphaOf` k')
in case Foldable.foldl' go (Acc 0 1) obs of
Acc w p -> p / alphaNormalizer alpha
/ Exp (lnGamma $ realToFrac w + sumAlpha alpha)
{-# INLINE obsProb #-}
dmGetCounts :: (Enum a, Num w) => Multinom w a -> a -> w
dmGetCounts dm k =
EM.findWithDefault 0 k (dmCounts dm)
instance HasLikelihood (Multinom w) where
type LContext (Multinom w) a = (Real w, Ord a, Enum a)
likelihood dm = obsProb dm $ EM.assocs $ dmCounts dm
{-# INLINEABLE likelihood #-}
instance FullConditionable (Multinom w) where
type FCContext (Multinom w) a = (Real w, Ord a, Enum a)
sampleProb (Multinom {dmProbs=prob}) k = prob EM.! k
sampleProb dm@(DirMulti {dmAlpha=a}) k =
let alpha = a `alphaOf` k
n = realToFrac $ dmGetCounts dm k
total = realToFrac $ dmTotal dm
in (n + alpha) / (total + sumAlpha a)
{-# INLINEABLE sampleProb #-}
{-# INLINEABLE probabilities #-}
probabilities :: (Real w, Ord a, Enum a) => Multinom w a -> Seq (Double, a)
probabilities dm = fmap (\a->(sampleProb dm a, a)) $ dmDomain dm -- FIXME
-- | Probabilities sorted decreasingly
decProbabilities :: (Real w, Ord a, Enum a, Num w) => Multinom w a -> Seq (Double, a)
decProbabilities = SQ.sortBy (flip (compare `on` fst)) . probabilities
prettyMultinom :: (Real w, Ord a, Enum a) => Int -> (a -> String) -> Multinom w a -> Doc
prettyMultinom _ _ (Multinom {}) = error "TODO: prettyMultinom"
prettyMultinom n showA dm@(DirMulti {}) =
text "DirMulti" <+> parens (text "alpha=" <> prettyAlpha showA (dmAlpha dm))
$$ nest 5 (fsep $ punctuate comma
$ map (\(p,a)->text (showA a) <> parens (text $ printf "%1.2e" p))
$ take n $ Data.Foldable.toList $ decProbabilities dm)
-- | Update the prior of a Dirichlet/multinomial
updatePrior :: (Alpha a -> Alpha a) -> Multinom w a -> Multinom w a
updatePrior _ (Multinom {}) = error "TODO: updatePrior"
updatePrior f dm = dm {dmAlpha=f $ dmAlpha dm}
-- | Relative tolerance in precision for prior estimation
estimationTol = 1e-8
reestimatePriors :: (Foldable f, Functor f, Real w, Enum a)
=> f (Multinom w a) -> f (Multinom w a)
reestimatePriors dms =
let usableDms = filter (\dm->dmTotal dm > 5) $ toList dms
alpha = case () of
_ | length usableDms <= 3 -> id
otherwise -> const $ estimatePrior estimationTol usableDms
in fmap (updatePrior alpha) dms
reestimateSymPriors :: (Foldable f, Functor f, Real w, Enum a)
=> f (Multinom w a) -> f (Multinom w a)
reestimateSymPriors dms =
let usableDms = filter (\dm->dmTotal dm > 5) $ toList dms
alpha = case () of
_ | length usableDms <= 3 -> id
otherwise -> const $ symmetrizeAlpha $ estimatePrior estimationTol usableDms
in fmap (updatePrior alpha) dms
-- | Estimate the prior alpha from a set of Dirichlet/multinomials
estimatePrior' :: (Real w, Enum a) => [Multinom w a] -> Alpha a -> Alpha a
estimatePrior' dms alpha =
let domain = toList $ dmDomain $ head dms
f k = let num = sum $ map (\i->digamma (realToFrac (dmGetCounts i k) + alphaOf alpha k)
- digamma (alphaOf alpha k)
)
$ filter (\i->dmGetCounts i k > 0) dms
total i = realToFrac $ sum $ map (\k->dmGetCounts i k) domain
sumAlpha = sum $ map (alphaOf alpha) domain
denom = sum $ map (\i->digamma (total i + sumAlpha) - digamma sumAlpha) dms
in case () of
_ | isNaN num -> error $ "BayesStack.DirMulti.estimatePrior': num = NaN: "++show (map (\i->(digamma (realToFrac (dmGetCounts i k) + alphaOf alpha k), digamma (alphaOf alpha k))) dms)
_ | denom == 0 -> error "BayesStack.DirMulti.estimatePrior': denom=0"
_ | isInfinite num -> error "BayesStack.DirMulti.estimatePrior': num is infinity "
_ | isNaN (alphaOf alpha k * num / denom) -> error $ "NaN"++show (num, denom)
otherwise -> alphaOf alpha k * num / denom
in asymAlpha $ foldMap (\k->EM.singleton k (f k)) domain
estimatePrior :: (Real w, Enum a) => Double -> [Multinom w a] -> Alpha a
estimatePrior tol dms = iter $ dmAlpha $ head dms
where iter alpha = let alpha' = estimatePrior' dms alpha
(_, prec) = alphaToMeanPrecision alpha
(_, prec') = alphaToMeanPrecision alpha'
in if abs ((prec' - prec) / prec) > tol
then iter alpha'
else alpha'
|
beni55/bayes-stack
|
BayesStack/DirMulti.hs
|
Haskell
|
bsd-3-clause
| 10,404
|
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.DebugWindow
-- Copyright : (c) Brandon S Allbery KF8NH, 2014
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : allbery.b@gmail.com
-- Stability : unstable
-- Portability : not portable
--
-- Module to dump window information for diagnostic/debugging purposes. See
-- "XMonad.Hooks.DebugEvents" and "XMonad.Hooks.DebugStack" for practical uses.
--
-----------------------------------------------------------------------------
module XMonad.Util.DebugWindow (debugWindow) where
import Prelude
import XMonad
import Codec.Binary.UTF8.String (decodeString)
import Control.Exception.Extensible as E
import Control.Monad (when)
import Data.List (unfoldr
,intercalate
)
import Foreign
import Foreign.C.String
import Numeric (showHex)
import System.Exit
-- | Output a window by ID in hex, decimal, its ICCCM resource name and class,
-- and its title if available. Also indicate override_redirect with an
-- exclamation mark, and wrap in brackets if it is unmapped or withdrawn.
debugWindow :: Window -> X String
debugWindow 0 = return "-no window-"
debugWindow w = do
let wx = pad 8 '0' $ showHex w ""
w' <- withDisplay $ \d -> io (safeGetWindowAttributes d w)
case w' of
Nothing ->
return $ "(deleted window " ++ wx ++ ")"
Just (WindowAttributes
{ wa_x = x
, wa_y = y
, wa_width = wid
, wa_height = ht
, wa_border_width = bw
, wa_map_state = m
, wa_override_redirect = o
}) -> do
c' <- withDisplay $ \d ->
io (getWindowProperty8 d wM_CLASS w)
let c = case c' of
Nothing -> ""
Just c'' -> intercalate "/" $
flip unfoldr (map (toEnum . fromEnum) c'') $
\s -> if null s
then Nothing
else let (w'',s'') = break (== '\NUL') s
s' = if null s''
then s''
else tail s''
in Just (w'',s')
t <- catchX' (wrap `fmap` getEWMHTitle "VISIBLE" w) $
catchX' (wrap `fmap` getEWMHTitle "" w) $
catchX' (wrap `fmap` getICCCMTitle w) $
return ""
h' <- getMachine w
let h = if null h' then "" else '@':h'
-- if it has WM_COMMAND use it, else use the appName
-- NB. modern stuff often does not set WM_COMMAND since it's only ICCCM required and not some
-- horrible gnome/freedesktop session manager thing like Wayland intended. How helpful of them.
p' <- withDisplay $ \d -> safeGetCommand d w
let p = if null p' then "" else wrap $ intercalate " " p'
nWP <- getAtom "_NET_WM_PID"
pid' <- withDisplay $ \d -> io $ getWindowProperty32 d nWP w
let pid = case pid' of
Just [pid''] -> '(':show pid'' ++ ")"
_ -> ""
let cmd = p ++ pid ++ h
let (lb,rb) = case () of
() | m == waIsViewable -> ("","")
| otherwise -> ("[","]")
o' = if o then "!" else ""
return $ concat [lb
,o'
,wx
,t
," "
,show wid
,'x':show ht
,if bw == 0 then "" else '+':show bw
,"@"
,show x
,',':show y
,if null c then "" else ' ':c
,if null cmd then "" else ' ':cmd
,rb
]
getEWMHTitle :: String -> Window -> X String
getEWMHTitle sub w = do
a <- getAtom $ "_NET_WM_" ++ (if null sub then "" else '_':sub) ++ "_NAME"
(Just t) <- withDisplay $ \d -> io $ getWindowProperty32 d a w
return $ map (toEnum . fromEnum) t
getICCCMTitle :: Window -> X String
getICCCMTitle w = getDecodedStringProp w wM_NAME
getDecodedStringProp :: Window -> Atom -> X String
getDecodedStringProp w a = do
t@(TextProperty t' _ 8 _) <- withDisplay $ \d -> io $ getTextProperty d w a
[s] <- catchX' (tryUTF8 t) $
catchX' (tryCompound t) $
io ((:[]) `fmap` peekCString t')
return s
tryUTF8 :: TextProperty -> X [String]
tryUTF8 (TextProperty s enc _ _) = do
uTF8_STRING <- getAtom "UTF8_STRING"
when (enc == uTF8_STRING) $ error "String is not UTF8_STRING"
(map decodeString . splitNul) `fmap` io (peekCString s)
tryCompound :: TextProperty -> X [String]
tryCompound t@(TextProperty _ enc _ _) = do
cOMPOUND_TEXT <- getAtom "COMPOUND_TEXT"
when (enc == cOMPOUND_TEXT) $ error "String is not COMPOUND_TEXT"
withDisplay $ \d -> io $ wcTextPropertyToTextList d t
splitNul :: String -> [String]
splitNul "" = []
splitNul s = let (s',ss') = break (== '\NUL') s in s' : splitNul ss'
pad :: Int -> Char -> String -> String
pad w c s = replicate (w - length s) c ++ s
-- modified 'catchX' without the print to 'stderr'
catchX' :: X a -> X a -> X a
catchX' job errcase = do
st <- get
c <- ask
(a, s') <- io $ runX c st job `E.catch` \e -> case fromException e of
Just x -> throw e `const` (x `asTypeOf` ExitSuccess)
_ -> runX c st errcase
put s'
return a
wrap :: String -> String
wrap s = ' ' : '"' : wrap' s ++ "\""
where
wrap' (s':ss) | s' == '"' = '\\' : s' : wrap' ss
| s' == '\\' = '\\' : s' : wrap' ss
| otherwise = s' : wrap' ss
wrap' "" = ""
-- Graphics.X11.Extras.getWindowAttributes is bugggggggy
safeGetWindowAttributes :: Display -> Window -> IO (Maybe WindowAttributes)
safeGetWindowAttributes d w = alloca $ \p -> do
s <- xGetWindowAttributes d w p
case s of
0 -> return Nothing
_ -> Just `fmap` peek p
-- and so is getCommand
safeGetCommand :: Display -> Window -> X [String]
safeGetCommand d w = do
wC <- getAtom "WM_COMMAND"
p <- io $ getWindowProperty8 d wC w
case p of
Nothing -> return []
Just cs' -> do
let cs = map (toEnum . fromEnum) cs'
go (a,(s,"\NUL")) = (s:a,("",""))
go (a,(s,'\NUL':ss)) = go (s:a,go' ss)
go r = r -- ???
go' = break (== '\NUL')
in return $ reverse $ fst $ go ([],go' cs)
getMachine :: Window -> X String
getMachine w = catchX' (getAtom "WM_CLIENT_MACHINE" >>= getDecodedStringProp w) (return "")
|
f1u77y/xmonad-contrib
|
XMonad/Util/DebugWindow.hs
|
Haskell
|
bsd-3-clause
| 7,208
|
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Tests.Control.Monad(tests) where
import Test.HUnitPlus.Base
import qualified Tests.Control.Monad.Symbols as Symbols
tests :: Test
tests = "Monad" ~: [Symbols.tests]
|
emc2/compiler-misc
|
test/Tests/Control/Monad.hs
|
Haskell
|
bsd-3-clause
| 1,732
|
-- |
-- Module : Foundation.VFS.FilePath
-- License : BSD-style
-- Maintainer : foundation
-- Stability : experimental
-- Portability : portable
--
-- # Opaque implementation for FilePath
--
-- The underlying type of a FilePath is a `Foundation.ByteArray`. It is indeed like
-- this because for some systems (Unix systems) a `FilePath` is a null
-- terminated array of bytes.
--
-- # FilePath and FileName for type checking validation
--
-- In order to add some constraint at compile time, it is not possible to
-- append (`</>`) a `FilePath` to another `FilePath`.
-- You can only append (`</>`) a `FileName` to a given `FilePath`.
--
{-# LANGUAGE CPP #-}
module Foundation.VFS.FilePath
( FilePath
, Relativity(..)
, FileName
-- * conversion
, filePathToString
, filePathToLString
-- ** unsafe
, unsafeFilePath
, unsafeFileName
, extension
) where
import Basement.Compat.Base
import Basement.Compat.Semigroup
import Foundation.Collection
import Foundation.Array
import Foundation.String (Encoding(..), ValidationFailure, toBytes, fromBytes, String)
import Foundation.VFS.Path(Path(..))
import qualified Data.List
-- ------------------------------------------------------------------------- --
-- System related helpers --
-- ------------------------------------------------------------------------- --
#ifdef mingw32_HOST_OS
pathSeparatorWINC :: Char
pathSeparatorWINC = '\\'
-- | define the Path separator for Windows systems : '\\'
pathSeparatorWIN :: String
pathSeparatorWIN = fromString [pathSeparatorWINC]
#else
pathSeparatorPOSIXC :: Char
pathSeparatorPOSIXC = '/'
-- | define the Path separator for POSIX systems : '/'
pathSeparatorPOSIX :: String
pathSeparatorPOSIX = fromString [pathSeparatorPOSIXC]
#endif
pathSeparatorC :: Char
pathSeparator :: String
#ifdef mingw32_HOST_OS
pathSeparatorC = pathSeparatorWINC
pathSeparator = pathSeparatorWIN
#else
pathSeparatorC = pathSeparatorPOSIXC
pathSeparator = pathSeparatorPOSIX
#endif
-- ------------------------------------------------------------------------- --
-- FilePath --
-- ------------------------------------------------------------------------- --
-- | information about type of FilePath
--
-- A file path being only `Relative` or `Absolute`.
data Relativity = Absolute | Relative
deriving (Eq, Show)
-- | FilePath is a collection of FileName
--
-- TODO: Eq and Ord are implemented using Show
-- This is not very efficient and would need to be improved
-- Also, it is possible the ordering is not necessary what we want
-- in this case.
--
-- A FilePath is one of the following:
--
-- * An Absolute:
-- * starts with one of the follwing "/"
-- * A relative:
-- * don't start with a "/"
--
-- * authorised:
-- * "/"
-- * "/file/path"
-- * "."
-- * ".."
-- * "work/haskell/hs-foundation"
--
-- * unauthorised
-- * "path//"
data FilePath = FilePath Relativity [FileName]
instance Show FilePath where
show = filePathToLString
instance Eq FilePath where
(==) a b = (==) (show a) (show b)
instance Ord FilePath where
compare a b = compare (show a) (show b)
-- | error associated to filepath manipulation
data FilePath_Invalid
= ContiguousPathSeparator
-- ^ this mean there were 2 contiguous path separators.
--
-- This is not valid in Foundation's FilePath specifications
deriving (Typeable, Show)
instance Exception FilePath_Invalid
instance IsString FilePath where
fromString [] = FilePath Absolute mempty
fromString s@(x:xs)
| hasContigueSeparators s = throw ContiguousPathSeparator
| otherwise = FilePath relativity $ case relativity of
Absolute -> fromString <$> splitOn isSeparator xs
Relative -> fromString <$> splitOn isSeparator s
where
relativity :: Relativity
relativity = if isSeparator x then Absolute else Relative
-- | A filename (or path entity) in the FilePath
--
-- * Authorised
-- * ""
-- * "."
-- * ".."
-- * "foundation"
-- * Unauthorised
-- * "/"
-- * "file/"
-- * "/file"
-- * "file/path"
--
data FileName = FileName (UArray Word8)
deriving (Eq)
-- | errors related to FileName manipulation
data FileName_Invalid
= ContainsNullByte
-- ^ this means a null byte was found in the FileName
| ContainsSeparator
-- ^ this means a path separator was found in the FileName
| EncodingError ValidationFailure
-- ^ encoding error
| UnknownTrailingBytes (UArray Word8)
-- ^ some unknown trainling bytes found
deriving (Typeable, Show)
instance Exception FileName_Invalid
instance Show FileName where
show = fileNameToLString
instance IsString FileName where
fromString [] = FileName mempty
fromString xs | hasNullByte xs = throw ContainsNullByte
| hasSeparator xs = throw ContainsSeparator
| otherwise = FileName $ toBytes UTF8 $ fromString xs
hasNullByte :: [Char] -> Bool
hasNullByte = Data.List.elem '\0'
hasSeparator :: [Char] -> Bool
hasSeparator = Data.List.elem pathSeparatorC
isSeparator :: Char -> Bool
isSeparator = (==) pathSeparatorC
hasContigueSeparators :: [Char] -> Bool
hasContigueSeparators [] = False
hasContigueSeparators [_] = False
hasContigueSeparators (x1:x2:xs) =
(isSeparator x1 && x1 == x2) || hasContigueSeparators xs
instance Semigroup FileName where
(<>) (FileName a) (FileName b) = FileName $ a `mappend` b
instance Monoid FileName where
mempty = FileName mempty
mappend (FileName a) (FileName b) = FileName $ a `mappend` b
instance Path FilePath where
type PathEnt FilePath = FileName
type PathPrefix FilePath = Relativity
type PathSuffix FilePath = ()
(</>) = join
splitPath (FilePath r xs) = (r, xs, ())
buildPath (r, xs , _) = FilePath r xs
-- compare to the original </>, this type disallow to be able to append an absolute filepath to a filepath
join :: FilePath -> FileName -> FilePath
join p (FileName x) | null x = p
join (FilePath r xs) x = FilePath r $ snoc xs x
filePathToString :: FilePath -> String
filePathToString (FilePath Absolute []) = fromString [pathSeparatorC]
filePathToString (FilePath Relative []) = fromString "."
filePathToString (FilePath Absolute fns) = cons pathSeparatorC $ filenameIntercalate fns
filePathToString (FilePath Relative fns) = filenameIntercalate fns
filenameIntercalate :: [FileName] -> String
filenameIntercalate = mconcat . Data.List.intersperse pathSeparator . fmap fileNameToString
-- | convert a FileName into a String
--
-- This function may throw an exception associated to the encoding
fileNameToString :: FileName -> String
fileNameToString (FileName fp) =
-- FIXME probably incorrect considering windows.
-- this is just to get going to be able to be able to reuse System.IO functions which
-- works on [Char]
case fromBytes UTF8 fp of
(s, Nothing, bs)
| null bs -> s
| otherwise -> throw $ UnknownTrailingBytes bs
(_, Just err, _) -> throw $ EncodingError err
-- | conversion of FileName into a list of Char
--
-- this function may throw exceptions
fileNameToLString :: FileName -> [Char]
fileNameToLString = toList . fileNameToString
-- | conversion of a FilePath into a list of Char
--
-- this function may throw exceptions
filePathToLString :: FilePath -> [Char]
filePathToLString = toList . filePathToString
-- | build a file path from a given list of filename
--
-- this is unsafe and is mainly needed for testing purpose
unsafeFilePath :: Relativity -> [FileName] -> FilePath
unsafeFilePath = FilePath
-- | build a file name from a given ByteArray
--
-- this is unsafe and is mainly needed for testing purpose
unsafeFileName :: UArray Word8 -> FileName
unsafeFileName = FileName
extension :: FileName -> Maybe FileName
extension (FileName fn) = case splitOn (\c -> c == 0x2E) fn of
[] -> Nothing
[_] -> Nothing
xs -> Just $ FileName $ last $ nonEmpty_ xs
|
vincenthz/hs-foundation
|
foundation/Foundation/VFS/FilePath.hs
|
Haskell
|
bsd-3-clause
| 8,201
|
{-# LANGUAGE RecordWildCards, ViewPatterns #-}
module Development.Bake.Pretty(ovenPretty, ovenPrettyMerge, Pretty(..)) where
import Development.Bake.Core.Type
import Data.List.Extra
data Pretty a = Pretty String a deriving (Read,Show,Eq)
instance Stringy a => Stringy (Pretty a) where
stringyTo (Pretty a b) = a ++ "=" ++ stringyTo b
stringyFrom s = case breakOn "=" s of
(a,_:b) -> Pretty a $ stringyFrom b
_ -> Pretty "" $ stringyFrom s
stringyPretty (Pretty a b) = a ++ "=" ++ stringyPretty b
-- | Define an oven that allows @foo=...@ annotations to be added to the strings.
-- These can be used to annotate important information, e.g. instead of talking about
-- Git SHA1's, you can talk about @person=SHA1@ or @branch=SHA1@.
ovenPretty :: Oven state patch test -> Oven state (Pretty patch) test
ovenPretty oven@Oven{..} = oven
{ovenUpdate = \s ps -> ovenUpdate s (map unpretty ps)
,ovenPrepare = \s ps -> ovenPrepare s (map unpretty ps)
,ovenPatchExtra = \s p -> ovenPatchExtra s (fmap unpretty p)
,ovenSupersede = \p1 p2 -> ovenSupersede (unpretty p1) (unpretty p2)
}
where
unpretty :: Pretty a -> a
unpretty (Pretty _ x) = x
-- | An oven suitable for use with 'ovenPretty' that supersedes patches which have the same
-- pretty name.
ovenPrettyMerge :: Oven state (Pretty patch) test -> Oven state (Pretty patch) test
ovenPrettyMerge oven = oven
{ovenSupersede = \(Pretty p1 _) (Pretty p2 _) -> p1 == p2
}
|
Pitometsu/bake
|
src/Development/Bake/Pretty.hs
|
Haskell
|
bsd-3-clause
| 1,502
|
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Home where
import Import as I
import Data.Time
import Data.List as I (isPrefixOf)
import Text.Blaze.Html (preEscapedToHtml)
import Text.Blaze.Html.Renderer.String (renderHtml)
import Yesod.Auth (requireAuthId)
import Yesod.Auth.HashDB (setSaltAndPasswordHash)
import Data.Digest.Pure.SHA (sha1, showDigest)
import Data.Text as T (append, pack, unpack)
import Data.ByteString.Lazy.Char8 as BS (pack)
import Data.Maybe
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getHomeR :: Handler Html
getHomeR = do
articles <- runDB $ selectList [ArticlePromoteHeadline ==. True, ArticleApproved ==. True] [Desc ArticleId]
users <- sequence $ fmap (\x -> articleAuthorName x) articles
let zippedArticles = I.zip articles users
defaultLayout $ do
aDomId <- newIdent
setTitle "乃村研究室ホームページ"
$(widgetFile "homepage")
getChangePassR :: Handler Html
getChangePassR = do
userId <- requireAuthId
user <- runDB $ get404 userId
defaultLayout $ do
aDomId <- newIdent
setTitle "パスワード変更"
$(widgetFile "changePass")
postChangePassR :: Handler Html
postChangePassR = do
userId <- requireAuthId
user <- runDB $ get404 userId
let salt = userSalt user
inputPassword <- runInputPost $ ireq textField "password"
runDB $ do
update userId
[ UserPassword =. (Just $ saltedHash salt inputPassword) ]
setMessage $ toHtml $ (userIdent user) <> " is updated."
redirect $ HomeR
-- local functions --
articleAuthorName :: Entity Article -> Handler (Maybe User)
articleAuthorName (Entity _ article) = do
runDB $ get (articleUser article)
displayAuthorName :: Maybe User -> Text
displayAuthorName (Just user) = userIdent user
displayAuthorName Nothing = "Unknown user"
takeHeadLine :: Html -> Html
takeHeadLine content = preEscapedToHtml $ prettyHeadLine $ renderHtml content
prettyHeadLine :: String -> String
prettyHeadLine article = gsub "_br_" "<br>" $ stripTags $ gsub "<br>" "_br_" $ foldArticle article
stripTags :: [Char] -> [Char]
stripTags str = stripTags' False str
stripTags' :: Bool -> [Char] -> [Char]
stripTags' bool (x:xs)
| xs == [] = if x == '>'
then []
else [x]
| bool == True = if x == '>'
then stripTags' False xs
else stripTags' True xs
| bool == False = if x == '<'
then stripTags' True xs
else x : (stripTags' False xs)
| otherwise = [] -- maybe don't occur
gsub :: Eq a => [a] -> [a] -> [a] -> [a]
gsub _ _ [] = []
gsub x y str@(s:ss)
| I.isPrefixOf x str = y ++ gsub x y (drop (length x) str)
| otherwise = s:gsub x y ss
foldArticle :: String -> String
foldArticle content = case foldAtFolding content of
Just value -> value
Nothing -> I.unlines $ I.take defaultNumOfLines $ I.lines content
foldAtFolding :: String -> Maybe String
foldAtFolding content = if (I.length splitContent) > 1
then Just $ I.head splitContent
else Nothing
where splitContent = split "<!-- folding -->" content
defaultNumOfLines :: Int
defaultNumOfLines = 3
numOfNewArticles :: Int
numOfNewArticles = 3
-- We want to import Data.List.Utils (split), but...
split :: Eq a => [a] -> [a] -> [[a]]
split _ [] = []
split delim str =
let (firstline, remainder) = breakList (startswith delim) str
in
firstline : case remainder of
[] -> []
x -> if x == delim
then [] : []
else split delim
(drop (length delim) x)
startswith :: Eq a => [a] -> [a] -> Bool
startswith = isPrefixOf
breakList :: ([a] -> Bool) -> [a] -> ([a], [a])
breakList func = spanList (not . func)
spanList :: ([a] -> Bool) -> [a] -> ([a], [a])
spanList _ [] = ([],[])
spanList func list@(x:xs) =
if func list
then (x:ys,zs)
else ([],list)
where (ys,zs) = spanList func xs
saltedHash :: Text -> Text -> Text
saltedHash salt = T.pack . showDigest . sha1 . BS.pack . T.unpack . T.append salt
|
kobayashi1027/nomnichi-haskell
|
Handler/Home.hs
|
Haskell
|
bsd-2-clause
| 4,641
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Encode/Decode/Hash Add-on</title>
<maps>
<homeID>encoder</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/encoder/src/main/javahelp/org/zaproxy/addon/encoder/resources/help_ko_KR/helpset_ko_KR.hs
|
Haskell
|
apache-2.0
| 974
|
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1994-1998
\section[UniqSet]{Specialised sets, for things with @Uniques@}
Based on @UniqFMs@ (as you would expect).
Basically, the things need to be in class @Uniquable@.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
module UniqSet (
-- * Unique set type
UniqSet, -- type synonym for UniqFM a
getUniqSet,
pprUniqSet,
-- ** Manipulating these sets
emptyUniqSet,
unitUniqSet,
mkUniqSet,
addOneToUniqSet, addListToUniqSet,
delOneFromUniqSet, delOneFromUniqSet_Directly, delListFromUniqSet,
delListFromUniqSet_Directly,
unionUniqSets, unionManyUniqSets,
minusUniqSet, uniqSetMinusUFM,
intersectUniqSets,
restrictUniqSetToUFM,
uniqSetAny, uniqSetAll,
elementOfUniqSet,
elemUniqSet_Directly,
filterUniqSet,
filterUniqSet_Directly,
sizeUniqSet,
isEmptyUniqSet,
lookupUniqSet,
lookupUniqSet_Directly,
partitionUniqSet,
mapUniqSet,
unsafeUFMToUniqSet,
nonDetEltsUniqSet,
nonDetKeysUniqSet,
nonDetFoldUniqSet,
nonDetFoldUniqSet_Directly
) where
import GhcPrelude
import UniqFM
import Unique
import Data.Coerce
import Outputable
import Data.Foldable (foldl')
import Data.Data
import qualified Data.Semigroup as Semi
-- Note [UniqSet invariant]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- UniqSet has the following invariant:
-- The keys in the map are the uniques of the values
-- It means that to implement mapUniqSet you have to update
-- both the keys and the values.
newtype UniqSet a = UniqSet {getUniqSet' :: UniqFM a}
deriving (Data, Semi.Semigroup, Monoid)
emptyUniqSet :: UniqSet a
emptyUniqSet = UniqSet emptyUFM
unitUniqSet :: Uniquable a => a -> UniqSet a
unitUniqSet x = UniqSet $ unitUFM x x
mkUniqSet :: Uniquable a => [a] -> UniqSet a
mkUniqSet = foldl' addOneToUniqSet emptyUniqSet
addOneToUniqSet :: Uniquable a => UniqSet a -> a -> UniqSet a
addOneToUniqSet (UniqSet set) x = UniqSet (addToUFM set x x)
addListToUniqSet :: Uniquable a => UniqSet a -> [a] -> UniqSet a
addListToUniqSet = foldl' addOneToUniqSet
delOneFromUniqSet :: Uniquable a => UniqSet a -> a -> UniqSet a
delOneFromUniqSet (UniqSet s) a = UniqSet (delFromUFM s a)
delOneFromUniqSet_Directly :: UniqSet a -> Unique -> UniqSet a
delOneFromUniqSet_Directly (UniqSet s) u = UniqSet (delFromUFM_Directly s u)
delListFromUniqSet :: Uniquable a => UniqSet a -> [a] -> UniqSet a
delListFromUniqSet (UniqSet s) l = UniqSet (delListFromUFM s l)
delListFromUniqSet_Directly :: UniqSet a -> [Unique] -> UniqSet a
delListFromUniqSet_Directly (UniqSet s) l =
UniqSet (delListFromUFM_Directly s l)
unionUniqSets :: UniqSet a -> UniqSet a -> UniqSet a
unionUniqSets (UniqSet s) (UniqSet t) = UniqSet (plusUFM s t)
unionManyUniqSets :: [UniqSet a] -> UniqSet a
unionManyUniqSets = foldl' (flip unionUniqSets) emptyUniqSet
minusUniqSet :: UniqSet a -> UniqSet a -> UniqSet a
minusUniqSet (UniqSet s) (UniqSet t) = UniqSet (minusUFM s t)
intersectUniqSets :: UniqSet a -> UniqSet a -> UniqSet a
intersectUniqSets (UniqSet s) (UniqSet t) = UniqSet (intersectUFM s t)
restrictUniqSetToUFM :: UniqSet a -> UniqFM b -> UniqSet a
restrictUniqSetToUFM (UniqSet s) m = UniqSet (intersectUFM s m)
uniqSetMinusUFM :: UniqSet a -> UniqFM b -> UniqSet a
uniqSetMinusUFM (UniqSet s) t = UniqSet (minusUFM s t)
elementOfUniqSet :: Uniquable a => a -> UniqSet a -> Bool
elementOfUniqSet a (UniqSet s) = elemUFM a s
elemUniqSet_Directly :: Unique -> UniqSet a -> Bool
elemUniqSet_Directly a (UniqSet s) = elemUFM_Directly a s
filterUniqSet :: (a -> Bool) -> UniqSet a -> UniqSet a
filterUniqSet p (UniqSet s) = UniqSet (filterUFM p s)
filterUniqSet_Directly :: (Unique -> elt -> Bool) -> UniqSet elt -> UniqSet elt
filterUniqSet_Directly f (UniqSet s) = UniqSet (filterUFM_Directly f s)
partitionUniqSet :: (a -> Bool) -> UniqSet a -> (UniqSet a, UniqSet a)
partitionUniqSet p (UniqSet s) = coerce (partitionUFM p s)
uniqSetAny :: (a -> Bool) -> UniqSet a -> Bool
uniqSetAny p (UniqSet s) = anyUFM p s
uniqSetAll :: (a -> Bool) -> UniqSet a -> Bool
uniqSetAll p (UniqSet s) = allUFM p s
sizeUniqSet :: UniqSet a -> Int
sizeUniqSet (UniqSet s) = sizeUFM s
isEmptyUniqSet :: UniqSet a -> Bool
isEmptyUniqSet (UniqSet s) = isNullUFM s
lookupUniqSet :: Uniquable a => UniqSet b -> a -> Maybe b
lookupUniqSet (UniqSet s) k = lookupUFM s k
lookupUniqSet_Directly :: UniqSet a -> Unique -> Maybe a
lookupUniqSet_Directly (UniqSet s) k = lookupUFM_Directly s k
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetEltsUniqSet :: UniqSet elt -> [elt]
nonDetEltsUniqSet = nonDetEltsUFM . getUniqSet'
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetKeysUniqSet :: UniqSet elt -> [Unique]
nonDetKeysUniqSet = nonDetKeysUFM . getUniqSet'
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetFoldUniqSet :: (elt -> a -> a) -> a -> UniqSet elt -> a
nonDetFoldUniqSet c n (UniqSet s) = nonDetFoldUFM c n s
-- See Note [Deterministic UniqFM] to learn about nondeterminism.
-- If you use this please provide a justification why it doesn't introduce
-- nondeterminism.
nonDetFoldUniqSet_Directly:: (Unique -> elt -> a -> a) -> a -> UniqSet elt -> a
nonDetFoldUniqSet_Directly f n (UniqSet s) = nonDetFoldUFM_Directly f n s
-- See Note [UniqSet invariant]
mapUniqSet :: Uniquable b => (a -> b) -> UniqSet a -> UniqSet b
mapUniqSet f = mkUniqSet . map f . nonDetEltsUniqSet
-- Two 'UniqSet's are considered equal if they contain the same
-- uniques.
instance Eq (UniqSet a) where
UniqSet a == UniqSet b = equalKeysUFM a b
getUniqSet :: UniqSet a -> UniqFM a
getUniqSet = getUniqSet'
-- | 'unsafeUFMToUniqSet' converts a @'UniqFM' a@ into a @'UniqSet' a@
-- assuming, without checking, that it maps each 'Unique' to a value
-- that has that 'Unique'. See Note [UniqSet invariant].
unsafeUFMToUniqSet :: UniqFM a -> UniqSet a
unsafeUFMToUniqSet = UniqSet
instance Outputable a => Outputable (UniqSet a) where
ppr = pprUniqSet ppr
pprUniqSet :: (a -> SDoc) -> UniqSet a -> SDoc
pprUniqSet f (UniqSet s) = pprUniqFM f s
|
shlevy/ghc
|
compiler/utils/UniqSet.hs
|
Haskell
|
bsd-3-clause
| 6,618
|
{-# OPTIONS_GHC -Wall #-}
{-# Language TypeFamilies #-}
{-# Language DeriveGeneric #-}
module T8479 where
import GHC.Generics
import Data.Kind (Type)
class Blah (a :: Type -> Type) where
type F a :: Type -> Type
data Foo (f :: Type -> Type) a = MkFoo ((F f) a) deriving Generic1
|
sdiehl/ghc
|
testsuite/tests/generics/T8479.hs
|
Haskell
|
bsd-3-clause
| 285
|
{-# LANGUAGE DeriveDataTypeable #-}
----------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.GroupNavigation
-- Copyright : (c) nzeh@cs.dal.ca
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : nzeh@cs.dal.ca
-- Stability : unstable
-- Portability : unportable
--
-- Provides methods for cycling through groups of windows across
-- workspaces, ignoring windows that do not belong to this group. A
-- group consists of all windows matching a user-provided boolean
-- query.
--
-- Also provides a method for jumping back to the most recently used
-- window in any given group.
--
----------------------------------------------------------------------
module XMonad.Actions.GroupNavigation ( -- * Usage
-- $usage
Direction (..)
, nextMatch
, nextMatchOrDo
, nextMatchWithThis
, historyHook
) where
import Control.Monad.Reader
import Data.Foldable as Fold
import Data.Map as Map
import Data.Sequence as Seq
import Data.Set as Set
import Graphics.X11.Types
import Prelude hiding (concatMap, drop, elem, filter, null, reverse)
import XMonad.Core
import XMonad.ManageHook
import XMonad.Operations (windows, withFocused)
import qualified XMonad.StackSet as SS
import qualified XMonad.Util.ExtensibleState as XS
{- $usage
Import the module into your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Actions.GroupNavigation
To support cycling forward and backward through all xterm windows, add
something like this to your keybindings:
> , ((modm , xK_t), nextMatch Forward (className =? "XTerm"))
> , ((modm .|. shiftMask, xK_t), nextMatch Backward (className =? "XTerm"))
These key combinations do nothing if there is no xterm window open.
If you rather want to open a new xterm window if there is no open
xterm window, use 'nextMatchOrDo' instead:
> , ((modm , xK_t), nextMatchOrDo Forward (className =? "XTerm") (spawn "xterm"))
> , ((modm .|. shiftMask, xK_t), nextMatchOrDo Backward (className =? "XTerm") (spawn "xterm"))
You can use 'nextMatchWithThis' with an arbitrary query to cycle
through all windows for which this query returns the same value as the
current window. For example, to cycle through all windows in the same
window class as the current window use:
> , ((modm , xK_f), nextMatchWithThis Forward className)
> , ((modm , xK_b), nextMatchWithThis Backward className)
Finally, you can define keybindings to jump to the most recent window
matching a certain Boolean query. To do this, you need to add
'historyHook' to your logHook:
> main = xmonad $ def { logHook = historyHook }
Then the following keybindings, for example, allow you to return to
the most recent xterm or emacs window or to simply to the most recent
window:
> , ((modm .|. controlMask, xK_e), nextMatch History (className =? "Emacs"))
> , ((modm .|. controlMask, xK_t), nextMatch History (className =? "XTerm"))
> , ((modm , xK_BackSpace), nextMatch History (return True))
Again, you can use 'nextMatchOrDo' instead of 'nextMatch' if you want
to execute an action if no window matching the query exists. -}
--- Basic cyclic navigation based on queries -------------------------
-- | The direction in which to look for the next match
data Direction = Forward -- ^ Forward from current window or workspace
| Backward -- ^ Backward from current window or workspace
| History -- ^ Backward in history
-- | Focuses the next window for which the given query produces the
-- same result as the currently focused window. Does nothing if there
-- is no focused window (i.e., the current workspace is empty).
nextMatchWithThis :: Eq a => Direction -> Query a -> X ()
nextMatchWithThis dir qry = withFocused $ \win -> do
prop <- runQuery qry win
nextMatch dir (qry =? prop)
-- | Focuses the next window that matches the given boolean query.
-- Does nothing if there is no such window. This is the same as
-- 'nextMatchOrDo' with alternate action @return ()@.
nextMatch :: Direction -> Query Bool -> X ()
nextMatch dir qry = nextMatchOrDo dir qry (return ())
-- | Focuses the next window that matches the given boolean query. If
-- there is no such window, perform the given action instead.
nextMatchOrDo :: Direction -> Query Bool -> X () -> X ()
nextMatchOrDo dir qry act = orderedWindowList dir
>>= focusNextMatchOrDo qry act
-- Produces the action to perform depending on whether there's a
-- matching window
focusNextMatchOrDo :: Query Bool -> X () -> Seq Window -> X ()
focusNextMatchOrDo qry act = findM (runQuery qry)
>=> maybe act (windows . SS.focusWindow)
-- Returns the list of windows ordered by workspace as specified in
-- ~/.xmonad/xmonad.hs
orderedWindowList :: Direction -> X (Seq Window)
orderedWindowList History = liftM (\(HistoryDB w ws) -> maybe ws (ws |>) w) XS.get
orderedWindowList dir = withWindowSet $ \ss -> do
wsids <- asks (Seq.fromList . workspaces . config)
let wspcs = orderedWorkspaceList ss wsids
wins = dirfun dir
$ Fold.foldl' (><) Seq.empty
$ fmap (Seq.fromList . SS.integrate' . SS.stack) wspcs
cur = SS.peek ss
return $ maybe wins (rotfun wins) cur
where
dirfun Backward = Seq.reverse
dirfun _ = id
rotfun wins x = rotate $ rotateTo (== x) wins
-- Returns the ordered workspace list as specified in ~/.xmonad/xmonad.hs
orderedWorkspaceList :: WindowSet -> Seq String -> Seq WindowSpace
orderedWorkspaceList ss wsids = rotateTo isCurWS wspcs'
where
wspcs = SS.workspaces ss
wspcsMap = Fold.foldl' (\m ws -> Map.insert (SS.tag ws) ws m) Map.empty wspcs
wspcs' = fmap (\wsid -> wspcsMap ! wsid) wsids
isCurWS ws = SS.tag ws == SS.tag (SS.workspace $ SS.current ss)
--- History navigation, requires a layout modifier -------------------
-- The state extension that holds the history information
data HistoryDB = HistoryDB (Maybe Window) -- currently focused window
(Seq Window) -- previously focused windows
deriving (Read, Show, Typeable)
instance ExtensionClass HistoryDB where
initialValue = HistoryDB Nothing Seq.empty
extensionType = PersistentExtension
-- | Action that needs to be executed as a logHook to maintain the
-- focus history of all windows as the WindowSet changes.
historyHook :: X ()
historyHook = XS.get >>= updateHistory >>= XS.put
-- Updates the history in response to a WindowSet change
updateHistory :: HistoryDB -> X HistoryDB
updateHistory (HistoryDB oldcur oldhist) = withWindowSet $ \ss -> do
let newcur = SS.peek ss
wins = Set.fromList $ SS.allWindows ss
newhist = flt (flip Set.member wins) (ins oldcur oldhist)
return $ HistoryDB newcur (del newcur newhist)
where
ins x xs = maybe xs (<| xs) x
del x xs = maybe xs (\x' -> flt (/= x') xs) x
--- Two replacements for Seq.filter and Seq.breakl available only in
--- containers-0.3.0.0, which only ships with ghc 6.12. Once we
--- decide to no longer support ghc < 6.12, these should be replaced
--- with Seq.filter and Seq.breakl.
flt :: (a -> Bool) -> Seq a -> Seq a
flt p = Fold.foldl (\xs x -> if p x then xs |> x else xs) Seq.empty
brkl :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
brkl p xs = flip Seq.splitAt xs
$ snd
$ Fold.foldr (\x (i, j) -> if p x then (i-1, i-1) else (i-1, j)) (l, l) xs
where
l = Seq.length xs
--- Some sequence helpers --------------------------------------------
-- Rotates the sequence by one position
rotate :: Seq a -> Seq a
rotate xs = rotate' (viewl xs)
where
rotate' EmptyL = Seq.empty
rotate' (x' :< xs') = xs' |> x'
-- Rotates the sequence until an element matching the given condition
-- is at the beginning of the sequence.
rotateTo :: (a -> Bool) -> Seq a -> Seq a
rotateTo cond xs = let (lxs, rxs) = brkl cond xs in rxs >< lxs
--- A monadic find ---------------------------------------------------
-- Applies the given action to every sequence element in turn until
-- the first element is found for which the action returns true. The
-- remaining elements in the sequence are ignored.
findM :: Monad m => (a -> m Bool) -> Seq a -> m (Maybe a)
findM cond xs = findM' cond (viewl xs)
where
findM' _ EmptyL = return Nothing
findM' qry (x' :< xs') = do
isMatch <- qry x'
if isMatch
then return (Just x')
else findM qry xs'
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Actions/GroupNavigation.hs
|
Haskell
|
bsd-2-clause
| 8,762
|
module Distribution.Client.Dependency.Modular.Index where
import Data.List as L
import Data.Map as M
import Prelude hiding (pi)
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.Tree
-- | An index contains information about package instances. This is a nested
-- dictionary. Package names are mapped to instances, which in turn is mapped
-- to info.
type Index = Map PN (Map I PInfo)
-- | Info associated with a package instance.
-- Currently, dependencies, flags, encapsulations and failure reasons.
-- Packages that have a failure reason recorded for them are disabled
-- globally, for reasons external to the solver. We currently use this
-- for shadowing which essentially is a GHC limitation, and for
-- installed packages that are broken.
data PInfo = PInfo (FlaggedDeps PN) FlagInfo Encaps (Maybe FailReason)
deriving (Show)
-- | Encapsulations. A list of package names.
type Encaps = [PN]
mkIndex :: [(PN, I, PInfo)] -> Index
mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs))
groupMap :: Ord a => [(a, b)] -> Map a [b]
groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs)
|
DavidAlphaFox/ghc
|
libraries/Cabal/cabal-install/Distribution/Client/Dependency/Modular/Index.hs
|
Haskell
|
bsd-3-clause
| 1,304
|
module Test13 where
f ((x : xs)) = x : xs
g = f (1 : [1, 2])
|
kmate/HaRe
|
old/testing/refacFunDef/Test13_AstOut.hs
|
Haskell
|
bsd-3-clause
| 64
|
-- |Simple vectorised constructors and projections.
--
module Vectorise.Vect
( Vect, VVar, VExpr, VBind
, vectorised
, lifted
, mapVect
, vVarType
, vNonRec
, vRec
, vVar
, vType
, vTick
, vLet
, vLams
, vVarApps
, vCaseDEFAULT
)
where
import CoreSyn
import Type ( Type )
import Var
-- |Contains the vectorised and lifted versions of some thing.
--
type Vect a = (a,a)
type VVar = Vect Var
type VExpr = Vect CoreExpr
type VBind = Vect CoreBind
-- |Get the vectorised version of a thing.
--
vectorised :: Vect a -> a
vectorised = fst
-- |Get the lifted version of a thing.
--
lifted :: Vect a -> a
lifted = snd
-- |Apply some function to both the vectorised and lifted versions of a thing.
--
mapVect :: (a -> b) -> Vect a -> Vect b
mapVect f (x, y) = (f x, f y)
-- |Combine vectorised and lifted versions of two things componentwise.
--
zipWithVect :: (a -> b -> c) -> Vect a -> Vect b -> Vect c
zipWithVect f (x1, y1) (x2, y2) = (f x1 x2, f y1 y2)
-- |Get the type of a vectorised variable.
--
vVarType :: VVar -> Type
vVarType = varType . vectorised
-- |Wrap a vectorised variable as a vectorised expression.
--
vVar :: VVar -> VExpr
vVar = mapVect Var
-- |Wrap a vectorised type as a vectorised expression.
--
vType :: Type -> VExpr
vType ty = (Type ty, Type ty)
-- |Make a vectorised note.
--
vTick :: Tickish Id -> VExpr -> VExpr
vTick = mapVect . Tick
-- |Make a vectorised non-recursive binding.
--
vNonRec :: VVar -> VExpr -> VBind
vNonRec = zipWithVect NonRec
-- |Make a vectorised recursive binding.
--
vRec :: [VVar] -> [VExpr] -> VBind
vRec vs es = (Rec (zip vvs ves), Rec (zip lvs les))
where
(vvs, lvs) = unzip vs
(ves, les) = unzip es
-- |Make a vectorised let expresion.
--
vLet :: VBind -> VExpr -> VExpr
vLet = zipWithVect Let
-- |Make a vectorised lambda abstraction.
--
-- The lifted version also binds the lifting context 'lc'.
--
vLams :: Var -- ^ Var bound to the lifting context.
-> [VVar] -- ^ Parameter vars for the abstraction.
-> VExpr -- ^ Body of the abstraction.
-> VExpr
vLams lc vs (ve, le)
= (mkLams vvs ve, mkLams (lc:lvs) le)
where
(vvs, lvs) = unzip vs
-- |Apply an expression to a set of argument variables.
--
-- The lifted version is also applied to the variable of the lifting context.
--
vVarApps :: Var -> VExpr -> [VVar] -> VExpr
vVarApps lc (ve, le) vvs
= (ve `mkVarApps` vs, le `mkVarApps` (lc : ls))
where
(vs, ls) = unzip vvs
vCaseDEFAULT :: VExpr -- scrutiniy
-> VVar -- bnder
-> Type -- type of vectorised version
-> Type -- type of lifted version
-> VExpr -- body of alternative.
-> VExpr
vCaseDEFAULT (vscrut, lscrut) (vbndr, lbndr) vty lty (vbody, lbody)
= (Case vscrut vbndr vty (mkDEFAULT vbody),
Case lscrut lbndr lty (mkDEFAULT lbody))
where
mkDEFAULT e = [(DEFAULT, [], e)]
|
oldmanmike/ghc
|
compiler/vectorise/Vectorise/Vect.hs
|
Haskell
|
bsd-3-clause
| 2,935
|
{-# LANGUAGE CPP #-}
module X86.RegInfo (
mkVirtualReg,
regDotColor
)
where
#include "nativeGen/NCG.h"
#include "HsVersions.h"
import Size
import Reg
import Outputable
import Platform
import Unique
import UniqFM
import X86.Regs
mkVirtualReg :: Unique -> Size -> VirtualReg
mkVirtualReg u size
= case size of
FF32 -> VirtualRegSSE u
FF64 -> VirtualRegSSE u
FF80 -> VirtualRegD u
_other -> VirtualRegI u
regDotColor :: Platform -> RealReg -> SDoc
regDotColor platform reg
= let Just str = lookupUFM (regColors platform) reg
in text str
regColors :: Platform -> UniqFM [Char]
regColors platform = listToUFM (normalRegColors platform ++ fpRegColors)
normalRegColors :: Platform -> [(Reg,String)]
normalRegColors platform
| target32Bit platform = [ (eax, "#00ff00")
, (ebx, "#0000ff")
, (ecx, "#00ffff")
, (edx, "#0080ff") ]
| otherwise = [ (rax, "#00ff00"), (eax, "#00ff00")
, (rbx, "#0000ff"), (ebx, "#0000ff")
, (rcx, "#00ffff"), (ecx, "#00ffff")
, (rdx, "#0080ff"), (edx, "#00ffff")
, (r8, "#00ff80")
, (r9, "#008080")
, (r10, "#0040ff")
, (r11, "#00ff40")
, (r12, "#008040")
, (r13, "#004080")
, (r14, "#004040")
, (r15, "#002080") ]
fpRegColors :: [(Reg,String)]
fpRegColors =
[ (fake0, "#ff00ff")
, (fake1, "#ff00aa")
, (fake2, "#aa00ff")
, (fake3, "#aa00aa")
, (fake4, "#ff0055")
, (fake5, "#5500ff") ]
++ zip (map regSingle [24..39]) (repeat "red")
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/nativeGen/X86/RegInfo.hs
|
Haskell
|
bsd-3-clause
| 1,869
|
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE FlexibleInstances #-}
{- With "hugs -98 +o test.hs" gives me:
ERROR "test.hs":8 - Cannot justify constraints in instance member binding
*** Expression : fromStr
*** Type : FromStr [a] => String -> [a]
*** Given context : FromStr [a]
*** Constraints : FromStr [a]
Adding the constraint "FromStr a" to the declaration of fromStr fixes
the problem, but that seems like it should be redundant. Removing the
second instance (lines 10-11) also fixes the problem, interestingly enough.
/Bjorn Bringert -}
-- August 08: on reflection I think a complaint about overlapping
-- instances for line 8 is absolutely right, so I've changed this to
-- expected-failure
-- Sept 08: on further reflection (!) I'm changing it back
-- See Note [Subtle interaction of recursion and overlap]
-- in TcInstDcls
module ShouldCompile where
class FromStr a where
fromStr :: String -> a
typeError :: FromStr a => a -> a
typeError t = error "type error"
instance {-# OVERLAPPABLE #-} FromStr [a] where
fromStr _ = typeError undefined -- line 8
instance {-# OVERLAPPING #-} FromStr [(String,a)] where -- line 10
fromStr _ = typeError undefined -- line 11
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc176.hs
|
Haskell
|
bsd-3-clause
| 1,272
|
{-# LANGUAGE ExistentialQuantification, TemplateHaskell #-}
module Light.Primitive
( Primitive, primitive, primitiveShape, primitiveMaterial
, Material(..)
)
where
import Light.Shape
import Light.Geometry.Transform
data Material = Material deriving (Eq, Show, Read)
data Primitive = Primitive { primitiveShape :: ShapeBox
, primitiveMaterial :: Material
}
deriving (Show)
primitive :: (Shape s, Transformable s, Show s) => s -> Material -> Primitive
primitive s = Primitive (shapeBox s)
instance Shape Primitive where
shapeTransform = shapeTransform . primitiveShape
bound = bound . primitiveShape
worldBound = worldBound . primitiveShape
surfaceArea = surfaceArea . primitiveShape
intersects r s = intersects r (primitiveShape s)
intersect r s = intersect r (primitiveShape s)
|
jtdubs/Light
|
src/Light/Primitive.hs
|
Haskell
|
mit
| 908
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.DelayNode
(js_getDelayTime, getDelayTime, DelayNode, castToDelayNode,
gTypeDelayNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"delayTime\"]"
js_getDelayTime :: JSRef DelayNode -> IO (JSRef AudioParam)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DelayNode.delayTime Mozilla DelayNode.delayTime documentation>
getDelayTime :: (MonadIO m) => DelayNode -> m (Maybe AudioParam)
getDelayTime self
= liftIO ((js_getDelayTime (unDelayNode self)) >>= fromJSRef)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/DelayNode.hs
|
Haskell
|
mit
| 1,351
|
--------------------------------------------------------------------------------
-- |
-- | Module : Data
-- | Copyright : (c) Vladimir Lopatin 2014
-- | License : BSD3
-- |
-- | Maintainer : Vladimir Lopatin <madjestic13@gmail.com>
-- | Stability : experimental
-- | Portability : untested
-- |
-- | The NGL library works, by dumping a vertex array into OpenGL buffer
-- |
-- | basic shapes types should be of 2 kinds:
-- | Shapes positioned by center
-- | Shapes' positioned by bottom-left corner--
--------------------------------------------------------------------------------
module NGL.Shape where
import Graphics.Rendering.OpenGL (Vertex2(..))
import NGL.Utils
data Shape = Circle Point Radius Divisions
| Square Point Side
| Rect Point Point
| Line Point Point Float -- | Ordered pair to store directionality
| Triangle Point Point Point
| Quad [Point] -- | BL vertex TR vertex
| Polygon [Point] -- | [Triangle] ?
| Polyline [Point] Float
| Curve [Point]
deriving Show
data Transform = Rotate2D Float Point
| Translate2D Point Point
deriving Show
type Picture =[Vertex2 Float]
type Point =(Float, Float)
type Radius = Float
type Side = Float
type Divisions = Int
toVertex :: [[Point]] -> Picture
toVertex xs = map vertex $ concat xs
vertex :: Point -> Vertex2 Float
vertex p = (\(k,l) -> Vertex2 k l) p
rotate :: Float -> [(Float, Float)] -> [(Float, Float)]
rotate theta = rotate2D' (toRadians theta)
shape :: Shape -> [Point]
shape (Square pos side) = square pos side
shape (Circle pos rad divs) = circle pos rad divs
shape (Rect bl tr) = rect bl tr -- | bl := bottom left, tr := top right
shape (Line p1 p2 w) = line p1 p2 w
shape (Polyline ps w) = polyline ps w
shape (Triangle p1 p2 p3) = triangle p1 p2 p3
polyline :: [Point] -> Float -> [Point]
polyline ps w = concatMap (\(x,y) -> line x y w) $ pairs $ abbcca ps
triangle :: Point -> Point -> Point -> [Point]
triangle p1 p2 p3 = [p1, p2, p3]
square :: Point -> Float -> [Point]
square pos side = [p1, p2, p3,
p1, p3, p4]
where
x = fst pos
y = snd pos
r = side/2
p1 = (x + r, y + r)
p2 = (x - r, y + r)
p3 = (x - r, y - r)
p4 = (x + r, y - r)
abbcca :: [a] -> [a]
abbcca (x:xs) = [x] ++ (concat $ map (\(x,y) -> [x,y]) $ map (\x -> (x, x)) (init xs)) ++ [last xs]
circle :: Point -> Float -> Int -> [Point]
circle pos r divs =
let
x = fst pos
y = snd pos
divs' = fromIntegral divs
sines = map ((y +).(r *).sin) [0.0, 2*pi/divs' .. 2*pi]
cosines = map ((x +).(r *).cos) [0.0, 2*pi/divs' .. 2*pi]
in
concat $ insertpos $ abbcca $ zip sines cosines
where
insertpos (x:y:[]) = [[pos,x,y]]
insertpos (x:y:xs) = [pos,x,y] : insertpos xs
rect :: Point -> Point -> [Point]
rect (x1,y1) (x2,y2) = [(x2,y2),(x1,y2),(x1,y1),
(x2,y2),(x1,y1),(x2,y1)]
line :: Point -> Point -> Float -> [Point]
line (x1,y1) (x2,y2) w = map (addVectors (x1,y1)) $ rotate2D' theta $ rect (0.0,-w/2) (len,w/2) -- rotation is wrong
where
(x,y) = normalize $ ((x2-x1),(y2-y1))
theta = signum y * acos x -- | angle in radians
len = sqrt((x2-x1)^2+ (y2-y1)^2)
|
ublubu/zombieapaperclypse
|
NGL/Shape.hs
|
Haskell
|
mit
| 3,609
|
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides functionality to manipulate raw transaction. It
-- automatically interprets transactions using the `bitcoin-tx` package, so
-- you can work with actual 'Btc.Transaction' objects rather than their
-- serialized format.
module Network.Bitcoin.Api.Transaction where
import Data.Aeson
import Data.Aeson.Lens
import Data.Maybe (fromMaybe, catMaybes)
import Control.Lens ((^.), (^?))
import qualified Data.Base58String as B58S
import qualified Data.Bitcoin.Block as Btc hiding (encode, decode)
import qualified Data.Bitcoin.Transaction as Btc
import qualified Data.Bitcoin.Types as BT
import qualified Network.Bitcoin.Api.Blockchain as Blockchain
import qualified Network.Bitcoin.Api.Internal as I
import qualified Network.Bitcoin.Api.Types as T
import Network.Bitcoin.Api.Types.UnspentTransaction hiding (confirmations)
-- | Creates a new transaction, but does not sign or submit it yet. You provide
-- a set of unspent transactions that you have the authority to spend, and you
-- provide a destination for all your bitcoins.
--
-- __WARNING: Check your math!__ If the sum of the Btc in unspent transactions
-- of your request is more than the sum of the Btc in the destinations, this
-- will be the miner's fee. It is reasonable to leave a small amount for the
-- miners, but if there is a large discrepancy between input and output, there
-- are no guarantees you will be warned.
--
-- All this function does is create a default script on how to spend coins from
-- one or more inputs to one or more outputs. Checking and verifying the
-- transaction will only happen when you actually submit the transaction to
-- the network.
create :: T.Client -- ^ The client session we are using
-> [UnspentTransaction] -- ^ The inputs we are using for this transaction
-> [(BT.Address, BT.Btc)] -- ^ A key/value pair which associates a
-- destination address with a specific amount
-- of bitcoins to send.
-> IO Btc.Transaction
create client utxs outputs =
let configuration = [toJSON (map txToOutpoint utxs), object (map outToAddress outputs)]
txToOutpoint tx = object [
("txid", toJSON (tx ^. transactionId)),
("vout", toJSON (tx ^. vout))]
outToAddress (addr, btc) = (B58S.toText addr, toJSON btc)
in (return . Btc.decode) =<< I.call client "createrawtransaction" configuration
-- | Signs a raw transaction with configurable parameters.
sign :: T.Client -- ^ Our client context
-> Btc.Transaction -- ^ The transaction to sign
-> Maybe [UnspentTransaction] -- ^ Previous outputs being spent by this transaction
-> Maybe [BT.PrivateKey] -- ^ Private keys to use for signing.
-> IO (Btc.Transaction, Bool) -- ^ The signed transaction, and a boolean that is true
-- when the signing is complete or and is false when
-- more signatures are required.
sign client tx utxs pks =
let configuration = [configurationTx tx, configurationUtxs utxs, configurationPks pks]
configurationTx tx' =
toJSON (Btc.encode tx')
configurationUtxs Nothing = Null
configurationUtxs (Just utxs') =
toJSON (map utxToDependency utxs')
where
utxToDependency utx = object [
("txid", toJSON (utx ^. transactionId)),
("vout", toJSON (utx ^. vout)),
("scriptPubKey", toJSON (utx ^. scriptPubKey)),
("redeemScript", toJSON (utx ^. redeemScript))]
configurationPks Nothing = Null
configurationPks (Just privateKeys) =
toJSON privateKeys
extractTransaction res =
maybe
(error "Incorrect JSON response")
Btc.decode
(res ^? key "hex" . _JSON)
extractCompleted res =
fromMaybe
(error "Incorrect JSON response")
(res ^? key "complete" . _JSON)
in do
res <- I.call client "signrawtransaction" configuration :: IO Value
return (extractTransaction res, extractCompleted res)
-- | Sends a transaction through the Bitcoin network
send :: T.Client
-> Btc.Transaction
-> IO BT.TransactionId
send client tx =
let configuration = [toJSON (Btc.encode tx)]
in I.call client "sendrawtransaction" configuration
-- | Returns a list of transactions that occured since a certain block height.
-- If no block height was provided, the genisis block with height 0 is assumed.
-- The transactions returned are listed chronologically.
list :: T.Client -- ^ Our client session context
-> Maybe Integer -- ^ The offset / height we should start listing transactions
-> Maybe Integer -- ^ Minimum amount of confirmations for a transaction to have. Should be 1 or higher.
-- A default value of 6 is used.
-> IO [Btc.Transaction]
list client Nothing confirmations = list client (Just 0) confirmations
list client offset Nothing = list client offset (Just 6)
list client (Just offset) (Just confirmations) = do
limit <- Blockchain.getBlockCount client
blocks <- mapM (Blockchain.getBlock client) =<< mapM (Blockchain.getBlockHash client) [offset..limit - confirmations]
return $ foldl (\lhs rhs -> lhs ++ rhs ^. Btc.blockTxns) [] (catMaybes blocks)
|
solatis/haskell-bitcoin-api
|
src/Network/Bitcoin/Api/Transaction.hs
|
Haskell
|
mit
| 5,718
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
import Control.Lens
import Control.Lens.TH
data Record1 a = Record1
{ _a :: Int
, _b :: Maybe a
} deriving Show
data Record2 = Record2
{ _c :: String
, _d :: [Int]
} deriving Show
$(makeLenses ''Record1)
$(makeLenses ''Record2)
records = [
Record1 {
_a = 1,
_b = Nothing
},
Record1 {
_a = 2,
_b = Just $ Record2 {
_c = "Picard",
_d = [1,2,3]
}
},
Record1 {
_a = 3,
_b = Just $ Record2 {
_c = "Riker",
_d = [4,5,6]
}
},
Record1 {
_a = 4,
_b = Just $ Record2 {
_c = "Data",
_d = [7,8,9]
}
}
]
-- Some abstract traversals.
ids = traverse.a
names = traverse.b._Just.c
nums = traverse.b._Just.d
list2 = traverse.b._Just.d.ix 2
-- Modify/read/extract in terms of generic traversals.
-- Modify to set all 'id' fields to 0
ex1 = set ids 0 records
-- Return a view of the concatenated 'd' fields for all nested records.
ex2 = view nums records
-- [1,2,3,4,5,6,7,8,9]
-- Increment all 'id' fields by 1
ex3 = over ids (+1) records
-- Return a list of all 'c' fields.
ex4 = toListOf names records
-- ["Picard","Riker","Data"]
-- Return the the second element of all 'd' fields.
ex5 = toListOf list2 records
-- [3,6,9]
|
riwsky/wiwinwlh
|
src/lens.hs
|
Haskell
|
mit
| 1,357
|
module Euler.Problems.Euler012
(
euler012
) where
import Data.List (group)
import Euler.Primes (primeFactors)
euler012 :: () -> Int
euler012 _ = fromIntegral $ head $ dropWhile ((<501) . divisors) $ drop 1 triangles
where
triangles = 1 : zipWith (+) triangles [2..]
divisors = product . map ((+1) . length) . group . primeFactors
|
b52/projecteuler
|
src/Euler/Problems/Euler012.hs
|
Haskell
|
mit
| 357
|
main = do
line <- getLine
if null line
then return ()
else do
putStrLn $ reverseWords line
main
reverseWords :: String -> String
reverseWords = unwords . map reverse . words
|
fabriceleal/learn-you-a-haskell
|
09/infinite_input.hs
|
Haskell
|
mit
| 228
|
module Util.Util where
import Graphics.Rendering.OpenGL
-- |'fib' returns given Fibonacci number
fib :: Int -> Int
fib = (!!) fibs
-- |'fibs' is a list of Fibonacci numbers
fibs :: [Int]
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
cube w =
renderPrimitive Quads $ do
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 w w w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) w (-w)
plane width = renderPrimitive Quads $ do
let texCoord2f = texCoord :: TexCoord2 GLfloat -> IO ()
vertex3f = vertex :: Vertex3 GLfloat -> IO ()
w = width / 2
texCoord2f $ TexCoord2 0 1
vertex3f $ Vertex3 (-w) (-w) 0
texCoord2f $ TexCoord2 1 1
vertex3f $ Vertex3 w (-w) 0
texCoord2f $ TexCoord2 1 0
vertex3f $ Vertex3 w w 0
texCoord2f $ TexCoord2 0 0
vertex3f $ Vertex3 (-w) w 0
points :: Int -> [(GLfloat,GLfloat,GLfloat)]
points n' = let n = fromIntegral n' in map (\k -> let t = 2*pi*k/n in (sin t, cos t, 0.0)) [1..n]
|
andrey013/mynd
|
src/Util/Util.hs
|
Haskell
|
mit
| 1,595
|
module Rebase.Data.Profunctor.Adjunction
(
module Data.Profunctor.Adjunction
)
where
import Data.Profunctor.Adjunction
|
nikita-volkov/rebase
|
library/Rebase/Data/Profunctor/Adjunction.hs
|
Haskell
|
mit
| 122
|
{-# LANGUAGE RecordWildCards, DeriveGeneric #-}
module Exp where
import GHC.Generics
import Language.Haskell.Exts.Annotated
-- import Control.Monad
import Control.Applicative
import Control.Arrow
import Text.PrettyPrint.GenericPretty
import Data.List
import Data.Char
import Data.Maybe
import Control.Conditional
import Safe
import Debug.Trace
try :: IO ()
try = do
putStrLn "begin"
s <-readFile "./test/Data/Blah.hs"
let (hl, e) = case parseFileContentsWithComments defaultParseMode{parseFilename = "Blah.hs"} s of
ParseOk (m, cms) -> (sort $ map hlComment cms ++ hlModule m, [])
err -> ([], prty . show $ err)
putStrLn e
putStrLn . pr hl $ s
-- putStrLn (prty . show $ hl)
putStrLn "done"
pr :: [Highlight] -> String -> String
pr hl = drop 1 . reverse . fst . foldl _scan ("1", (hl, 1,0))
where
_scan (s@(chp:_), st) ch = case ch of
'\n' -> _ignore (s, if chp == '\r' then st else _incL st) ch
'\r' -> _ignore (s, if chp == '\n' then st else _incL st) ch
'\t' -> _ignore (s, head . drop 8 . iterate _incC $ st) ch
_ -> if isSpace ch then _ignore (s, _incC st) ch else _proc (s, _discard . _incC $ st) $ ch
_scan x _ = error $ "_scan mis match " ++ show x
_incL (hs, l, _) = (hs, l + 1, 0)
_incC (hs, l, c) = (hs, l, c + 1)
_discard x@([],_,_) = x
_discard (h:hs, l, c) | hlEnd h <= (l,c) = _discard (hs, l, c)
| otherwise = (h:hs, l, c)
_discard' (s, st) = (s, _discard st)
_ignore (s, st) ch = (ch:s, st)
_proc (s, st@([],_,_)) ch = (ch : s, st)
_proc (s, st@(h:_, l, c)) ch = (_end (ch : _start s h l c) h l c, st)
_start s h l c | hlStart h == (l,c) = foldl (flip (:)) s (_hlO . hlType $ h)
| otherwise = s
_end s h l c | hlEnd h == (l,c+1) = foldl (flip (:)) s (_hlC . hlType $ h)
| otherwise = s
_hlO = (++"|") . ("<"++) . _hlId
_hlC = (++">") . ("|"++) . _hlId
_hlId hlt = case hlt of
HlComment -> "co"
HlModuleName -> "mn"
HlKeyword -> "kw"
HlImport -> "im"
HlPragma -> "pr"
HlBrace -> "br"
HlComma -> "cm"
HlElipse -> "el"
HlIdentType -> "it"
HlIdentFunc -> "if"
HlSymbolType -> "st"
HlSymbolFunc -> "sf"
HlSpecialCon -> "sc"
HlOpType -> "ot"
HlOpFunc -> "of"
HlOther -> "__"
type LnCol = (Int, Int)
data Highlight = Highlight { hlStart :: LnCol
, hlEnd :: LnCol
, hlType :: HighlightType
}
deriving (Show, Eq, Ord, Generic)
instance Out Highlight
defaultHighlight :: Highlight
defaultHighlight = Highlight (0,0) (0,0) HlOther
data HighlightType = HlComment
| HlModuleName
| HlKeyword
| HlImport
| HlPragma
| HlBrace
| HlComma
| HlElipse
| HlIdentType
| HlIdentFunc
| HlSymbolType
| HlSymbolFunc
| HlSpecialCon
| HlOpType
| HlOpFunc
| HlOther
deriving (Show, Eq, Ord, Generic)
instance Out HighlightType
prty :: String -> String
prty = fst . foldl f ("", "")
where
f (s, pfx) c
| c `elem` "{([" = let pfx' = pfx ++ " " in (s ++ "\n" ++ pfx ++ [c], pfx')
| c `elem` "})]" = let pfx' = drop 2 pfx in (s ++ "\n" ++ pfx' ++ [c], pfx')
| c `elem` "," = (s ++ "\n" ++ pfx ++ [c], pfx)
| otherwise = (s ++ [c], pfx)
tracePrtyMsg :: Show a => String -> a -> b -> b
tracePrtyMsg s a = trace ((s++) . prty . show $ a)
hlSrcSpan :: HighlightType -> SrcSpan -> Highlight
hlSrcSpan t SrcSpan {..} = defaultHighlight { hlStart = (srcSpanStartLine, srcSpanStartColumn)
, hlEnd = (srcSpanEndLine, srcSpanEndColumn)
, hlType = t
}
hlSrcSpanInfo :: HighlightType -> SrcSpanInfo -> Highlight
hlSrcSpanInfo t = hlSrcSpan t . srcInfoSpan
hlComment :: Comment -> Highlight
hlComment (Comment _ sp _) = hlSrcSpan HlComment sp
type SPI = SrcSpanInfo
hlModule :: Module SPI -> [Highlight]
hlModule (XmlPage _ _ _ _ _ _ _) = error "not supporting XmlPage"
hlModule (XmlHybrid _ _ _ _ _ _ _ _ _) = error "not supporting XmlHybrid"
hlModule (Module _ mHead mPragmas mImport decls) = hlModuleHead mHead
++ map hlModulePragma mPragmas
++ concatMap hlImportDecl mImport
++ concatMap hlDecl decls
hlModuleHead :: Maybe (ModuleHead SPI) -> [Highlight]
hlModuleHead Nothing = []
hlModuleHead (Just (ModuleHead l mName mWarning mExpList)) = [mImport, hlModuleName mName, mWhere]
++ hlWarningText mWarning
++ hlExportSpecList mExpList
where
[mImport, mWhere] = map (hlSrcSpan HlKeyword) . srcInfoPoints $ l
hlModuleName :: ModuleName SPI -> Highlight
hlModuleName (ModuleName i _) = hlSrcSpanInfo HlModuleName i
hlWarningText :: Maybe (WarningText SPI) -> [Highlight]
hlWarningText x = case x of
Nothing -> []
Just (DeprText i s) -> [hlSrcSpanInfo HlPragma i]
Just (WarnText i s) -> [hlSrcSpanInfo HlPragma i]
hlExportSpecList :: Maybe (ExportSpecList SPI) -> [Highlight]
hlExportSpecList x = case x of
Nothing -> []
Just (ExportSpecList i es) -> hlBracedListPunc i ++ concatMap hlExportSpec es
hlBracedExpr_ :: ([SrcSpan] -> ([Highlight], [SrcSpan])) -> [SrcSpan] -> [Highlight]
hlBracedExpr_ inner (ph:ps) = ob : cb : cs
where
ob = hlSrcSpan HlBrace ph
(cs, pl:_) = inner ps
cb = hlSrcSpan HlBrace pl
hlBracedListPunc :: SPI -> [Highlight]
hlBracedListPunc = hlBracedListPunc' . srcInfoPoints
hlBracedListPunc' :: [SrcSpan] -> [Highlight]
hlBracedListPunc' = hlBracedExpr_ cms
where cms ps = foldl f ([],ps) ps
where f (cs', ps') p = case drop 1 ps' of
[] -> (cs', ps')
ps'' -> (hlSrcSpan HlComma p : cs', ps'')
hlBracedElipse :: SPI -> [Highlight]
hlBracedElipse = hlBracedExpr_ cms . srcInfoPoints
where cms (p:ps) = ([hlSrcSpan HlElipse p], ps)
hlExportSpec :: ExportSpec SPI -> [Highlight]
hlExportSpec x = case x of
EVar _ n -> hlQName False n
EAbs _ n -> hlQName True n
EThingAll i n -> hlBracedElipse i ++ hlQName True n
EThingWith i n cs -> hlBracedListPunc i ++ hlQName True n ++ map hlCName cs
EModuleContents i n -> tracePrtyMsg "EModuleContents" i $ [hlModuleName n]
hlQName :: Bool -> QName SPI -> [Highlight]
hlQName typeLevel x = case x of
Qual _ mn n -> _correct mn (hlModuleName mn) (hlName typeLevel n)
UnQual _ n -> [hlName typeLevel n]
Special _ n -> [hlSpecialCon n]
where
_correct (ModuleName _ s) m n = m {hlEnd = (fst . hlEnd $ m, (snd . hlStart $ m) + length s + 1)}
: n {hlStart = (fst . hlStart $ n, (snd . hlStart $ n) + length s + 1)}
: []
hlName :: Bool -> Name SPI -> Highlight
hlName True (Ident i _) = hlSrcSpanInfo HlIdentType i
hlName False (Ident i _) = hlSrcSpanInfo HlIdentFunc i
hlName True (Symbol i _) = hlSrcSpanInfo HlSymbolType i
hlName False (Symbol i _) = hlSrcSpanInfo HlSymbolFunc i
hlSpecialCon :: SpecialCon SPI -> Highlight
hlSpecialCon x = case x of
UnitCon i -> tracePrtyMsg "UnitCon" i hlSrcSpanInfo HlSpecialCon i
ListCon i -> tracePrtyMsg "ListCon" i hlSrcSpanInfo HlSpecialCon i
FunCon i -> tracePrtyMsg "FunCon" i hlSrcSpanInfo HlSpecialCon i
TupleCon i _ _ -> tracePrtyMsg "TupleCon" i hlSrcSpanInfo HlSpecialCon i
Cons i -> tracePrtyMsg "Cons" i hlSrcSpanInfo HlSpecialCon i
UnboxedSingleCon i -> tracePrtyMsg "UnboxedSingleCon" i hlSrcSpanInfo HlSpecialCon i
hlCName :: CName SPI -> Highlight
hlCName x = case x of
VarName _ n -> hlName False n
ConName _ n -> hlName True n
hlModulePragma :: ModulePragma SPI -> Highlight
hlModulePragma x = case x of
LanguagePragma i _ -> hlSrcSpanInfo HlPragma i
OptionsPragma i _ _ -> hlSrcSpanInfo HlPragma i
AnnModulePragma i _ -> hlSrcSpanInfo HlPragma i
hlImportDecl :: ImportDecl SPI -> [Highlight]
hlImportDecl ImportDecl {..} = [hlModuleName importModule] ++ _hlImprt ++ _hlSrc ++ _hlQual ++ _hlPkg ++ _hlAs ++ _hlSpec
where
mk t = (:[]) . hlSrcSpan t . head &&& drop 1
(_hlImprt, ps) = mk HlImport . srcInfoPoints $ importAnn
(_hlSrc, ps') = case importSrc of
True -> let ([b], _ps) = mk HlPragma ps
([e], _ps') = mk HlOther _ps
in ([b{hlEnd = hlEnd e}], _ps')
False -> ([], ps)
(_hlQual, ps'') = case importQualified of
True -> mk HlImport ps'
False -> ([], ps')
(_hlPkg, ps''') = case importPkg of
Just s -> mk HlImport ps''
Nothing -> ([], ps'')
_hlAs = case importAs of
Just mn -> let (cs, _ps) = mk HlImport ps'''
in hlModuleName mn : cs
Nothing -> []
_hlSpec = case importSpecs of
Nothing -> []
Just (ImportSpecList i hid imps) -> _hlSpecPunc i hid ++ concatMap _hlImpSpec imps
_hlImpSpec x = case x of
IVar _ n -> [hlName False n]
IAbs _ n -> [hlName True n]
IThingAll i n -> hlName True n : hlBracedElipse i
IThingWith i n cns -> hlName True n : hlBracedListPunc i ++ map hlCName cns
_hlSpecPunc i hid = case hid of
False -> hlBracedListPunc i
True -> uncurry (:) .
( hlSrcSpan HlImport . head
&&& hlBracedListPunc . (\p->i{srcInfoPoints = p}) . drop 1
) . srcInfoPoints $ i
hlDecl :: Decl SPI -> [Highlight]
hlDecl x = case x of
TypeDecl i hd tp -> let hl = hlSrcSpan HlKeyword
sps = srcInfoPoints i
in (hl . head) sps : (hl . last) sps : hlDeclHead hd ++ hlTypE tp
TypeFamDecl i hd knd -> (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlDeclHead hd
++ maybe [] hlKind knd
DataDecl i dn ctx hd qs dr -> hlDataOrNew dn
: (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlContext ctx
++ hlDeclHead hd
++ concatMap hlQualConDecl qs
++ hlDeriving dr
GDataDecl i dn ctx hd knd gds dr -> hlDataOrNew dn
: (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ hlContext ctx
++ hlDeclHead hd
++ maybe [] hlKind knd
++ concatMap hlGadtDecl gds
++ hlDeriving dr
DataFamDecl i ctx hd knd -> tracePrtyMsg "DataFamDecl" i hlContext ctx ++ hlDeclHead hd ++ maybe [] hlKind knd
TypeInsDecl i tp1 tp2 -> tracePrtyMsg "TypeInstDecl" i hlTypE tp1 ++ hlTypE tp2
DataInsDecl i dn tp qs dr -> []
GDataInsDecl i dn tp knd gds dr -> []
ClassDecl i ctx hd fds cds -> []
InstDecl i ctx ihd ids -> []
DerivDecl i ctx ihd -> []
InfixDecl i ass l ops -> []
DefaultDecl i tp -> []
SpliceDecl i exp -> []
TypeSig i ns tp -> []
FunBind i ms -> []
PatBind i p mtp rhs bnds -> []
ForImp i cv sfty s nm tp -> []
ForExp i cv s nm tp -> []
RulePragmaDecl i r -> []
DeprPragmaDecl i ds -> []
WarnPragmaDecl i ds -> []
InlineSig i b act qnm -> []
InlineConlikeSig i act qnm -> []
SpecSig i act qnm tp -> []
SpecInlineSig i b act qnm tp -> []
InstSig i ctx ihd -> []
AnnPragma i ann -> []
hlDeclHead :: DeclHead SPI -> [Highlight]
hlDeclHead x = case x of
DHead i n tvs -> hlName True n : concatMap hlTyVarBind tvs
DHInfix i tvl n tvr -> hlTyVarBind tvl ++ [hlName True n] ++ hlTyVarBind tvr
DHParen i dh -> hlDeclHead dh
hlTyVarBind :: TyVarBind SPI -> [Highlight]
hlTyVarBind x = case x of
KindedVar i nm kd -> zipWith ($) (zipWith ($) (repeat hlSrcSpan) [HlBrace, HlKeyword, HlBrace]) (srcInfoPoints i) ++ [hlName True nm] ++ hlKind kd
UnkindedVar _ nm -> [hlName True nm]
hlKind :: Kind SPI -> [Highlight]
hlKind x = case x of
KindStar i -> [hlSrcSpanInfo HlOpType i]
KindBang i -> [hlSrcSpanInfo HlOpType i]
KindFn i k1 k2 -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : (hlKind k1 ++ hlKind k2)
KindParen i k -> hlBracedListPunc i ++ hlKind k
KindVar i n -> [hlName True n]
hlTypE :: Type SPI -> [Highlight]
hlTypE x = case x of
TyForall i tvb ctx tp -> (map (hlSrcSpan HlKeyword) . srcInfoPoints $ i)
++ maybe [] (concatMap hlTyVarBind) tvb
++ hlContext ctx
++ hlTypE tp
TyFun i tp1 tp2 -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i)
: hlTypE tp1
++ hlTypE tp2
TyTuple i _ tps -> hlBracedListPunc i ++ concatMap hlTypE tps
TyList i tp -> hlBracedListPunc i ++ (hlTypE tp)
TyApp _ tp1 tp2 -> hlTypE tp1 ++ hlTypE tp2
TyVar _ nm -> [hlName True nm]
TyCon _ qn -> hlQName True qn
TyParen i tp -> hlBracedListPunc i ++ (hlTypE tp)
TyInfix i tp1 qn tp2 -> trace (("TyInfix - "++) . prty . show $ i) (hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2)
TyKind i tp kd -> trace (("TyKind - "++) . prty . show $ i) (hlTypE tp ++ hlKind kd)
hlContext :: Maybe (Context SPI) -> [Highlight]
hlContext x = case x of
Just (CxSingle i ass) -> _punc i ++ hlAsst ass
Just (CxTuple i ass) -> _punc i ++ concatMap hlAsst ass
Just (CxParen i ctx) -> _punc i ++ hlContext (Just ctx)
Just (CxEmpty i) -> trace (("CxEmpty - " ++ ) . prty . show $ i) []
_ -> []
where _punc = uncurry (:) . (hlSrcSpan HlKeyword . last &&& select null (const []) hlBracedListPunc' . init) . srcInfoPoints
hlAsst :: Asst SPI -> [Highlight]
hlAsst x = case x of
ClassA i qn tps -> hlQName True qn ++ concatMap hlTypE tps
InfixA i tp1 qn tp2 -> hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2
IParam i ipn tp -> hlIPName ipn : hlTypE tp
EqualP i tp1 tp2 -> hlTypE tp1 ++ hlTypE tp2
hlIPName :: IPName SPI -> Highlight
hlIPName x = case x of
IPDup i s -> trace (("IPDup - " ++ ) . prty . show $ i) $ hlSrcSpanInfo HlIdentType i
IPLin i s -> trace (("IPLin - " ++ ) . prty . show $ i) $ hlSrcSpanInfo HlIdentType i
hlDataOrNew :: DataOrNew SPI -> Highlight
hlDataOrNew x = case x of
DataType i -> hlSrcSpanInfo HlKeyword i
NewType i -> hlSrcSpanInfo HlKeyword i
hlQualConDecl :: QualConDecl SPI -> [Highlight]
hlQualConDecl (QualConDecl i tvb ctx cdecl) = -- tracePrtyMsg "hlQualConDecl" i
maybe [] (concatMap hlTyVarBind) tvb
++ hlContext ctx
++ hlConDecl cdecl
++ if isJust tvb then map (hlSrcSpan HlKeyword) . srcInfoPoints $ i else []
-- ++ (select null (const []) hlBracedListPunc' . srcInfoPoints $ i)
hlDeriving :: Maybe (Deriving SPI) -> [Highlight]
hlDeriving x = case x of
Just (Deriving i ihs) -> (uncurry (:) . (hlSrcSpan HlKeyword . head &&& select null (const []) hlBracedListPunc' . drop 1) . srcInfoPoints $ i)
++ concatMap hlInstanceHead ihs
_ -> []
hlInstanceHead :: InstHead SPI -> [Highlight]
hlInstanceHead x = case x of
IHead i qn tps -> {-tracePrtyMsg "IHead" i -}hlQName True qn ++ concatMap hlTypE tps
IHInfix i tp1 qn tp2 -> {-tracePrtyMsg "IHInfix" i -}hlTypE tp1 ++ hlQName True qn ++ hlTypE tp2
IHParen i ih -> {-tracePrtyMsg "IHParen" i -}hlBracedListPunc i ++ hlInstanceHead ih
hlConDecl :: ConDecl SPI -> [Highlight]
hlConDecl x = case x of
ConDecl i nm bgts -> -- tracePrtyMsg "ConDecl" i
hlName True nm
: concatMap hlBangType bgts
InfixConDecl i bgt1 nm bgt2 -> -- tracePrtyMsg "InfixConDecl" i
hlName True nm
: hlBangType bgt1
++ hlBangType bgt2
RecDecl i nm flds -> -- tracePrtyMsg "RecDecl" i
hlName True nm
: hlBracedListPunc i
++ concatMap hlFieldDecl flds
hlFieldDecl :: FieldDecl SPI -> [Highlight]
hlFieldDecl (FieldDecl i nms bgt) = -- tracePrtyMsg "FieldDecl" i
(hlSrcSpan HlKeyword . last . srcInfoPoints $ i)
: (map (hlSrcSpan HlComma) . init . srcInfoPoints $ i)
++ map (hlName True) nms ++ hlBangType bgt
hlBangType :: BangType SPI -> [Highlight]
hlBangType x = case x of
BangedTy i tp -> (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : hlTypE tp
UnBangedTy _ tp -> hlTypE tp
UnpackedTy i tp -> tracePrtyMsg "UnpackedTy" i hlTypE tp
hlGadtDecl :: GadtDecl SPI -> [Highlight]
hlGadtDecl (GadtDecl i nm tp) = (hlSrcSpan HlKeyword . head . srcInfoPoints $ i) : hlName True nm : hlTypE tp
|
HanStolpo/ghc-edit
|
test/Exp.hs
|
Haskell
|
mit
| 19,543
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Export the data source to various data formats.
module DataAnalysis.Application.Handler.Export where
import Blaze.ByteString.Builder
--
import Data.Conduit
import qualified Data.Conduit.List as CL
-- -- import Data.Conduit.Zlib
-- -- import Data.Default
import Data.Double.Conversion.Text
import Data.IORef (newIORef)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.XML.Types
-- -- import Text.XML.Stream.Render
import Yesod
import DataAnalysis.Application.Foundation
import DataAnalysis.Application.Analyze
import DataAnalysis.Application.Types
-- | Export the data source to various data formats.
getExportR :: Text -> ExportType -> Handler TypedContent
getExportR ident typ = do
countRef <- liftIO $ newIORef 0
logRef <- liftIO $ newIORef id
source <- analysisSource countRef logRef
error "TODO: Export"
{-case typ of
CsvData ->
attachmentFromSource
(fname "csv")
"text/csv"
(source
$= CL.mapMaybe dataPointCSV
$= (writeHeaders settings >> fromCSV settings)
$= CL.map fromByteString)
where settings = def
CsvDataGzip ->
attachmentFromSource
(fname "csv.gz")
"application/x-gzip"
(source
$= CL.mapMaybe dataPointCSV
$= (writeHeaders settings >> fromCSV settings)
$= gzip
$= CL.map fromByteString)
where settings = def
XmlData ->
attachmentFromSource
(fname "xml")
"application/xml"
(source
$= toXmlRows dataPointXML
$= renderBuilder settings)
where settings = def
XmlDataGzip ->
attachmentFromSource
(fname "xml.gz")
"application/x-gzip"
(source
$= toXmlRows dataPointXML
$= renderBytes settings
$= gzip
$= CL.map fromByteString)
where settings = def-}
where fname ext =
ident <> "-export." <> ext
--------------------------------------------------------------------------------
-- CSV export
-- | Convert a data point to maybe a row. Not all data points are
-- data… points.
dataPointCSV :: DataPoint -> Maybe (Map Text Text)
dataPointCSV (DP2 (D2D label value g)) =
Just
(Map.fromList
[("label",label)
,("value",toShortest value)
,("group",fromMaybe "" g)])
dataPointCSV (DP3 (D3D x y z)) =
Just
(Map.fromList
[("x",toShortest (fromIntegral x))
,("y",toShortest (fromIntegral y))
,("z",toShortest z)])
dataPointCSV DPM{} =
Nothing
--------------------------------------------------------------------------------
-- XML export
-- | Render a data point to XML events.
dataPointXML :: Monad m => DataPoint -> Producer m Event
dataPointXML (DP2 dp) =
do with "label" (text (_d2dLabel dp))
with "value" (text (tshow (_d2dValue dp)))
maybe (return ()) (with "label" . text) (_d2dGroup dp)
where text = yield . EventContent . ContentText
dataPointXML (DP3 (D3D x y z)) =
do with "x" (text (tshow (fromIntegral x)))
with "y" (text (tshow (fromIntegral y)))
with "z" (text (tshow z))
where text = yield . EventContent . ContentText
dataPointXML DPM{} =
return ()
-- | Show a double to text.
tshow :: Double -> Text
tshow = T.pack . show
--------------------------------------------------------------------------------
-- Utilities
-- | Output an attachment from a source.
attachmentFromSource :: Text
-> ContentType
-> Source (HandlerT site IO) Builder
-> HandlerT site IO TypedContent
attachmentFromSource filename contentType source = do
addHeader "content-disposition"
("attachment; filename=" <> T.pack (show (T.unpack filename)))
respondSource contentType
(source $= CL.map Chunk)
-- | Render to an XML document of rows.
toXmlRows :: Monad m => (row -> Conduit row m Event) -> Conduit row m Event
toXmlRows renderRow =
do yield EventBeginDocument
with "rows"
(awaitForever (with "row" . renderRow))
yield EventEndDocument
-- | With opening/closing tags for the given name, render the inner
-- conduit inside it.
with :: Monad m => Name -> Conduit void m Event -> Conduit void m Event
with name inner =
do yield (EventBeginElement name [])
inner
yield (EventEndElement name)
|
teuffy/min-var-ci
|
src/DataAnalysis/Application/Handler/Export.hs
|
Haskell
|
mit
| 4,762
|
{-# LANGUAGE RecordWildCards, ParallelListComp #-}
module Exec where
import Data.Char
import Data.List
import Debug.Trace
import TypesEtc
import Sprockell
-- ============================================================================================
-- execution functions for simulation purposes: exec, sim
--
-- addrs: register+memory addresses that you want to follow/inspect when running a program (instrs).
-- instrs: a list of assembly instructions (the program) that the Sprockell executes.
-- count: counts the number of instructions that are executeds. The total is shown at the end.
-- state: contains 4 things: program counter, stack pointer, registers, data memory.
-- i:is: list of inputs. In this case just a repeating clock tick.
--
-- The output of exec is generated every clock cycle by the function demoOutput,
-- after which exec continues in the next state (state') calculated by one cycle of the Sprockell processor.
exec addrs instrs (count, state@State{..}) (i:is)
| instrs!!pc==EndProg = traceShow ("Instructions: " ++ show count)
[]
| otherwise = demoOutput addrs instrs state' : exec addrs instrs (count+1, state') is
where
state' = sprockell instrs state i
-- ============================================================================================
-- generating demoOutput
--
-- demoOutput calculates a value of type DemoOutput. The function show for this type is in TypesEtc.hs
demoOutput addrs instrs State{..} = DemoOutput pc
(instrs!!pc)
(map (regbank!!) regaddrs)
(map (dmem!!) memaddrs)
sp
(map (dmem!!) [sp0+1..sp])
where
(regaddrs,memaddrs) = addrs
-- sim: the simulation function which runs exec and outputs the result in a readable way.
-- --------------------------------------------------------------------------------------
sim addrs instrs = putStr . unlines . map show $ results
where
results = demoOutput addrs instrs initstate : exec addrs instrs (0,initstate) clock
-- showInstructions: shows a list of instructions in a readable way.
-- -----------------------------------------------------------------
showInstrs instrs = putStr . unlines $ strs
where
m = length $ show $ length instrs + 1
strs = [ ' ' : replicate (m-w) ' ' ++ show n ++ ": " ++ show instr | (n,instr) <- zip [0..] instrs
, w <- [length $ show n]
]
-- ============================================================================================
-- Examples
-- ============================================================================================
{---------------------------------------------
| Example 1: computes the value of 3^5 (= 243)
----------------------------------------------
Program in imperative pseudo-code:
a = 3;
n = 5;
power = 1;
while (n != 0) {
power = a * power;
n = n-1;
};
----------------------------------------------}
-- A list of assembly instruction that calculates example 1
-- --------------------------------------------------------
instrs1 = [ Load (Imm 3) 3 -- 0 value of a (=3) is put in register 3;
-- Register 3 will be used for a.
, Load (Imm 5) 4 -- 1 value of n (=5) is put in register 4;
-- Register 4 will contain the value of n throughout the execution.
, Load (Imm 1) 5 -- 2 initial value of power (=1) is put in register 5;
-- Register 5 be used for the value of the power.
, Compute Equal 4 0 1 -- 3 Compute n==0 (reg 4 contains the value of n, reg 0 contains 0), and put the result in register 1;
-- Register 1 is checked for conditional jumps.
, Jump CA 8 -- 4 If True (ie register 1 contains 1), then go to EndProg
, Compute Mul 3 5 5 -- 5 multiply a (reg 3) with power (reg 5), give the result to power
, Compute Decr 4 0 4 -- 6 Decrement n (reg 4) with 1
, Jump UA 3 -- 7 Go back to instruction 3
, EndProg -- 8
]
-- relevant addresses to show during simulation
-- --------------------------------------------
addrs1 = ( [3,4,5] -- registers
, [] -- heap
) :: ([Int],[Int])
-- show the list of instructions
-- -----------------------------
is1 = showInstrs instrs1
-- run the program instrs1, and show the content of the addresses addrs1
-- ---------------------------------------------------------------------
run1 = sim addrs1 instrs1
{---------------------------------------
| Example 2: compute the "3n+1" function
----------------------------------------
Program in imperative pseudo-code:
program threeNplus1;
var a;
function even (n);
{return (n%2) == 0};
function three (n);
{ while n>1
{ if even(n)
{ n=n/2; }
{ n=(3*n)+1; };
};
return n
};
{ a = three(7);
}
-}
-- Haskell definition (runnable):
-- -----------------------------------------------------
three :: Int -> [Int]
three n | n == 1 = [1]
| n `mod` 2 == 0 = n : three (n `div` 2)
| otherwise = n : three (3*n +1)
-- A list of assembly instruction that calculates example 2
-- --------------------------------------------------------
instrs2 = [ Load (Imm 1) 2 -- 0 Load the constant 1 in register 2
, Load (Imm 2) 3 -- 1 Load the constant 2 in register 3
, Load (Imm 3) 4 -- 2 Load the constant 3 in register 4
, Load (Imm 7) 5 -- 3 Load initial value of n (7) in register 5
, Compute Equal 5 2 1 -- 4 Compute n==1, and load result in register 1;
, Jump CA 13 -- 5 If reg1=1, then we're done, and thus go to EndProg
, Compute Mod 5 3 1 -- 6 Otherwise: calculate n`mod`2, and load the result in reg1.
, Jump CA 10 -- 7 If reg1=1 (i.e: if n is odd), then go to instruction 10
, Compute Div 5 3 5 -- 8 else divide n by 2 (the content of reg3) and put the result in register 5.
, Jump UA 4 -- 9 Jump back to instruction 4.
, Compute Mul 5 4 5 -- 10 At this point n is odd, thus multiply by 3 (the content of reg4)...
, Compute Add 5 2 5 -- 11 ... and add 1 (the content of reg2).
, Jump UA 4 -- 12 Jump back to 4.
, EndProg -- 13 End of Program.
]
-- relevant addresses to show during simulation
-- --------------------------------------------
addrs2 = ( [1,5] -- registers
, [] -- heap
) :: ([Int],[Int])
-- show the list of instructions
-- -----------------------------
is2 = showInstrs instrs2
-- run the program instrs2, and show the content of the addresses addrs2
-- ---------------------------------------------------------------------
run2 = sim addrs2 instrs2
|
Oboema/FP-GO1
|
Exec.hs
|
Haskell
|
mit
| 6,509
|
module Network.Server
(
module Network.Server.Common
) where
import Network.Server.Common
|
harrisi/on-being-better
|
list-expansion/Haskell/course/projects/NetworkServer/haskell/src/Network/Server.hs
|
Haskell
|
cc0-1.0
| 94
|
{-# LANGUAGE OverloadedStrings, CPP #-}
module Model.ActionKey where
import qualified Data.ByteString.Builder as BS
import qualified Data.ByteString.Char8 as BS
import qualified Database.PostgreSQL.Simple.FromRow as PG
import qualified Database.PostgreSQL.Simple.FromField as PG
import qualified Database.PostgreSQL.Simple.ToField as PG
import qualified Data.Time.Clock as DTC
import Debug.Trace (traceShow)
#ifdef __HASTE__
type Text = String
#else
import Data.Text (Text)
#endif
{-# ANN module ("HLint: ignore Use camelCase" :: String) #-}
type ActionKeyKey = Text
data Action = ConfirmRegistration | ResetPassword deriving (Show, Read)
instance PG.FromField Action where
fromField f bs =
case bs of
Nothing -> PG.returnError PG.UnexpectedNull f ""
Just val -> pure $ read (traceShow val (BS.unpack val))
instance PG.ToField Action where
toField val = PG.Plain $ PG.inQuotes $ BS.stringUtf8 $ show val
data ActionKey = ActionKey
{ ac_id :: Int
, ac_user_id :: Int
, ac_action :: Action
, ac_key :: ActionKeyKey
, ac_created :: DTC.UTCTime
} deriving (Show)
instance PG.FromRow ActionKey where
fromRow = ActionKey <$> PG.field <*> PG.field <*> PG.field <*> PG.field <*> PG.field
|
DataStewardshipPortal/ds-wizard
|
Model/ActionKey.hs
|
Haskell
|
apache-2.0
| 1,229
|
module Main where
import Control.Lens
import Control.Monad
import Data.Bits
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Bytes.Get
import Data.List (intercalate)
import System.Ext2
import System.Environment
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $
error "Usage: ext2checker <path to ext2 filesystem>"
fs <- BL.readFile (head args)
let s = flip runGetL fs $ skip 1024 >> readSuperblock
putStrLn $ "FS Size: " ++ show (fsSize s) ++ " Bytes"
putStrLn $ "Unallocated: " ++ show (unallocated s) ++ " Bytes"
putStrLn $ "FS State: " ++ s ^. state . to show
putStrLn $ "Required feature flags: " ++
(intercalate ", " . map show $ s ^. featureCompat)
putStrLn $ "Optional feature flags: " ++
(intercalate ", " . map show $ s ^. featureIncompat)
putStrLn $ "Read-only feature flags: " ++
(intercalate ", " . map show $ s ^. featureRoCompat)
where
fsSize :: Superblock -> Double
fsSize s =
fromIntegral
((s ^. blocksCount) *
(1024 `shiftL` fromIntegral (s ^. logBlockSize)))
unallocated :: Superblock -> Double
unallocated s =
fromIntegral
((s ^. freeBlocksCount) *
(1024 `shiftL` fromIntegral (s ^. logBlockSize)))
|
relrod/ext2
|
src/ext2checker.hs
|
Haskell
|
bsd-2-clause
| 1,244
|
module Graphics.GL.Low.Classes where
import Graphics.GL
-- | OpenGL internal image formats.
class InternalFormat a where
internalFormat :: (Eq b, Num b) => proxy a -> b
-- | The allowed attachment point for images with an internal format.
class InternalFormat a => Attachable a where
attachPoint :: (Eq b, Num b) => proxy a -> b
-- | Textures are GL objects.
class GLObject a => Texture a where
-- | Framebuffers can be bound to the framebuffer binding target. There is
-- a default framebuffer and the client may create an arbitrary number of
-- new framebuffer objects.
class Framebuffer a where
framebufferName :: Num b => a -> b
class GLObject a => BufferObject a where
-- | Mappable to GL enums.
class ToGL a where
toGL :: (Num b, Eq b) => a -> b
instance ToGL Bool where
toGL True = GL_TRUE
toGL False = GL_FALSE
-- | All GL objects have some numeric name.
class GLObject a where
glObjectName :: Num b => a -> b
|
sgraf812/lowgl
|
Graphics/GL/Low/Classes.hs
|
Haskell
|
bsd-2-clause
| 944
|
-- | Collection of utilities to make @wybor@ customization palatable
--
-- Those are mostly thin wrappers over things in "System.Console.ANSI" from @ansi-terminal@
module Ansi
( reset
, bold
, regular
, underlining
, swap
, unswap
, fgcolor
, bgcolor
, Ansi.Underlining(..)
, Ansi.ColorIntensity(..)
, Ansi.Color(..)
) where
import Data.Text (Text)
import qualified Data.Text as Text
import qualified System.Console.ANSI as Ansi
-- | Sets all attributes off
reset :: Text
reset = sgr Ansi.Reset
-- | Set bold font style
bold :: Text
bold = sgr (Ansi.SetConsoleIntensity Ansi.BoldIntensity)
-- | Set regular font style
regular :: Text
regular = sgr (Ansi.SetConsoleIntensity Ansi.NormalIntensity)
-- | Set underlining style
underlining :: Ansi.Underlining -> Text
underlining = sgr . Ansi.SetUnderlining
-- | Swap foreground and background colors
swap :: Text
swap = sgr (Ansi.SetSwapForegroundBackground True)
-- | Unswap foreground and background colors
unswap :: Text
unswap = sgr (Ansi.SetSwapForegroundBackground False)
-- | Set foreground color
fgcolor :: Ansi.ColorIntensity -> Ansi.Color -> Text
fgcolor i c = sgr (Ansi.SetColor Ansi.Foreground i c)
-- | Set background color
bgcolor :: Ansi.ColorIntensity -> Ansi.Color -> Text
bgcolor i c = sgr (Ansi.SetColor Ansi.Background i c)
sgr :: Ansi.SGR -> Text
sgr = Text.pack . Ansi.setSGRCode . return
|
supki/wybor
|
src/Ansi.hs
|
Haskell
|
bsd-2-clause
| 1,402
|
{-# LANGUAGE FlexibleContexts #-}
module Horbits.UI.Camera.Control (setupMouseControl) where
import Control.Lens
import Control.Monad.Trans.State
import Data.IORef
import Graphics.UI.Gtk
import Linear
import Horbits.Data.Binding
import Horbits.UI.Camera.Internal
-- Ongoing mouse state
data MState = MState [MouseButton] (Double, Double)
-- Camera updates
mousePan :: (Monad m, RealFloat a, Epsilon a) => (Double, Double) -> StateT (OrthoCamera a) m ()
mousePan (dx, dy) = do
w <- use orthoCameraViewportWidth
h <- use orthoCameraViewportHeight
let v = V2 (2 * realToFrac dx / fromIntegral w) (2 * realToFrac dy / fromIntegral h)
modify (addTranslation v)
mouseRotate :: (Monad m, RealFloat a, Epsilon a) => (Double, Double) -> StateT (OrthoCamera a) m ()
mouseRotate (dx, dy) = do
w <- use orthoCameraViewportWidth
h <- use orthoCameraViewportHeight
modify . addColatitude $ pi * realToFrac dy / fromIntegral w
modify . addLongitude $ pi * realToFrac dx / fromIntegral h
mouseScroll :: (Monad m, Num a, Ord a) => ScrollDirection -> StateT (OrthoCamera a) m ()
mouseScroll dir = do
let z = if dir == ScrollUp then zoomIn else zoomOut
modify z
-- Mouse event processing
-- TODO map MState with lens?
onButtonEvent :: (HasUpdate v MState MState)
=> (MouseButton -> [MouseButton] -> [MouseButton]) -> v -> EventM EButton ()
onButtonEvent f st = do
button <- eventButton
coords <- eventCoordinates
st $~ newState coords button -- TODO ??? zoom or sth
where
newState c b (MState bs _) = MState (f b bs) c
onMouseMove :: (HasGetter vs MState, HasSetter vs MState,
HasGetter vc (OrthoCamera a), HasSetter vc (OrthoCamera a),
RealFloat a, Epsilon a) =>
vc -> vs -> EventM t (Double, Double) -> EventM t ()
onMouseMove cam st evCoords = do
(coords @ (cx, cy)) <- evCoords
MState buttons (sx, sy) <- readVar st
st $= MState buttons coords -- TODO MState manipulation is weak, see above, also <<%= (!)
evalStateVar cam $ case buttons of
LeftButton : _ -> mousePan (cx - sx, sy - cy)
RightButton : _ -> mouseRotate (cx - sx, sy - cy)
_ -> return ()
setupMouseControl :: (HasGetter v (OrthoCamera a), HasSetter v (OrthoCamera a),
WidgetClass w, RealFloat a, Epsilon a)
=> w -> v -> IO [ConnectId w]
setupMouseControl w cam = do
st <- newVar (MState [] (0.0, 0.0)) :: IO (IORef MState)
widgetAddEvents w [PointerMotionHintMask, Button1MotionMask, Button3MotionMask]
sequence [
on w motionNotifyEvent $ tryEvent $ do
onMouseMove cam st eventCoordinates
eventRequestMotions,
on w buttonPressEvent $ tryEvent $
onButtonEvent (\b bs -> b : filter (/= b) bs) st,
on w buttonReleaseEvent $ tryEvent $
onButtonEvent (\b bs -> filter (/= b) bs) st,
on w scrollEvent $ tryEvent $ do
d <- eventScrollDirection
evalStateVar cam $ mouseScroll d
]
|
chwthewke/horbits
|
src/horbits/Horbits/UI/Camera/Control.hs
|
Haskell
|
bsd-3-clause
| 3,179
|
module ETA.CodeGen.Utils where
import ETA.Main.DynFlags
import ETA.BasicTypes.Name
import ETA.Types.TyCon
import ETA.BasicTypes.Literal
import Codec.JVM
import Data.Char (ord)
import Control.Arrow(first)
import ETA.CodeGen.Name
import ETA.CodeGen.Rts
import ETA.Debug
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8)
import Data.Monoid
import Data.Foldable
cgLit :: Literal -> (FieldType, Code)
cgLit (MachChar c) = (jint, iconst jint . fromIntegral $ ord c)
cgLit (MachInt i) = (jint, iconst jint $ fromIntegral i)
cgLit (MachWord i) = (jint, iconst jint $ fromIntegral i)
cgLit (MachInt64 i) = (jlong, lconst $ fromIntegral i)
-- TODO: Verify that fromIntegral converts well
cgLit (MachWord64 i) = (jlong, lconst $ fromIntegral i)
cgLit (MachFloat r) = (jfloat, fconst $ fromRational r)
cgLit (MachDouble r) = (jdouble, dconst $ fromRational r)
-- TODO: Remove this literal variant?
cgLit MachNullAddr = (jobject, lconst 0)
cgLit MachNull = (jobject, aconst_null jobject)
cgLit (MachStr s) = (jstring, sconst $ decodeUtf8 s)
-- TODO: Implement MachLabel
cgLit MachLabel {} = error "cgLit: MachLabel"
cgLit other = pprPanic "mkSimpleLit" (ppr other)
litToInt :: Literal -> Int
litToInt (MachInt i) = fromInteger i
litToInt (MachWord i) = fromInteger i
litToInt (MachChar c) = ord c
litToInt _ = error "litToInt: not integer"
intSwitch :: Code -> [(Int, Code)] -> Maybe Code -> Code
intSwitch = gswitch
litSwitch :: FieldType -> Code -> [(Literal, Code)] -> Code -> Code
litSwitch ft expr branches deflt
-- | isObjectFt ft = deflt -- ASSERT (length branches == 0)
-- TODO: When switching on an object, perform a checkcast
-- TODO: When switching on long/float/double, use an if-else tree
| null branches = deflt
| ft `notElem` [jint, jbool, jbyte, jshort, jchar] = error $ "litSwitch[" ++ show ft ++ "]: " ++
"primitive cases not supported for non-integer values"
| otherwise = intSwitch expr intBranches (Just deflt)
where intBranches = map (first litToInt) branches
tagToClosure :: DynFlags -> TyCon -> Code -> (FieldType, Code)
tagToClosure dflags tyCon loadArg = (closureType, enumCode)
where enumCode = invokestatic (mkMethodRef modClass fieldName [] (Just arrayFt))
<> loadArg
<> gaload closureType
tyName = tyConName tyCon
modClass = moduleJavaClass $ nameModule tyName
fieldName = nameTypeTable dflags $ tyConName tyCon
arrayFt = jarray closureType
initCodeTemplate' :: FieldType -> Bool -> Text -> Text -> FieldRef -> Code -> MethodDef
initCodeTemplate' retFt synchronized modClass qClName field code =
mkMethodDef modClass accessFlags qClName [] (Just retFt) $ fold
[ getstatic field
, ifnonnull mempty code
, getstatic field
, greturn retFt ]
where accessFlags = [Public, Static] ++ (if synchronized then [Synchronized] else [])
initCodeTemplate :: Bool -> Text -> Text -> FieldRef -> Code -> MethodDef
initCodeTemplate synchronized modClass qClName field code =
initCodeTemplate' closureType synchronized modClass qClName field code
|
pparkkin/eta
|
compiler/ETA/CodeGen/Utils.hs
|
Haskell
|
bsd-3-clause
| 3,217
|
{-# LANGUAGE DataKinds, GADTs, TypeFamilies, TypeOperators #-}
module Text.Printf.Safe.Core (type (~>), Formatter, Printf(..),
HList(..), printf, printf') where
import Data.String (IsString (..))
-- | Variadic function types.
type family (~>) as b where
(~>) '[] a = a
(~>) (x ': xs) a = x -> xs ~> a
-- | Formatter type.
type Formatter a = a -> String
-- | Printf Format.
data Printf xs where
EOS :: Printf '[]
(:<>) :: String -> Printf xs -> Printf xs
(:%) :: Formatter x -> Printf xs -> Printf (x ': xs)
instance (xs ~ '[]) => IsString (Printf xs) where
fromString str = str :<> EOS
-- | Hetero list.
data HList ts where
HNil :: HList '[]
(:-) :: a -> HList xs -> HList (a ': xs)
infixr 9 :-, :<>, :%
-- | HList version.
printf' :: Printf ts -> HList ts -> String
printf' ps0 ts0 = go ps0 ts0 ""
where
go :: Printf us -> HList us -> ShowS
go EOS HNil = id
go (str :<> fs) xs = showString str . go fs xs
go (fm :% fs) (x :- ds) = showString (fm x) . go fs ds
go _ _ = error "bug in GHC!"
-- | Variadic version.
printf :: Printf xs -> xs ~> String
printf p = go p ""
where
go :: Printf xs -> String -> xs ~> String
go EOS a = a
go (str :<> xs) a = go xs (a ++ str)
go (fmt :% xs) a = \x -> go xs (a ++ fmt x)
|
konn/safe-printf
|
src/Text/Printf/Safe/Core.hs
|
Haskell
|
bsd-3-clause
| 1,341
|
module Cakefile where
import Development.Cake3
import Development.Cake3.Ext.UrWeb
import Cakefile_P
main = writeMake (file "Makefile") $ do
prebuild [cmd|urweb -version|]
u <- uwlib (file "Script.urp") $ do
ffi (file "Script.urs")
include (file "Script.h")
src (file "Script.c")
pkgconfig "jansson"
t1 <- uwapp "-dbms sqlite" (file "Test1.urp") $ do
allow url "http://code.jquery.com/ui/1.10.3/jquery-ui.js"
allow mime "text/javascript"
library u
debug
ur (file "Test1.ur")
t2 <- uwapp "-dbms sqlite" (file "Test2.urp") $ do
library u
ur (file "Test2.ur")
rule $ do
phony "all"
depend u
depend t1
depend t2
return ()
|
grwlf/cake3
|
Example/UrWeb/Cakefile.hs
|
Haskell
|
bsd-3-clause
| 698
|
-- | The issues API as described on <http://developer.github.com/v3/issues/>.
module Github.Issues (
issue
,issue'
,issuesForRepo
,issuesForRepo'
,IssueLimitation(..)
,module Github.Data
) where
import Github.Data
import Github.Private
import Data.List (intercalate)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
import Data.Time.Clock (UTCTime(..))
-- | A data structure for describing how to filter issues. This is used by
-- @issuesForRepo@.
data IssueLimitation =
AnyMilestone -- ^ Issues appearing in any milestone. [default]
| NoMilestone -- ^ Issues without a milestone.
| MilestoneId Int -- ^ Only issues that are in the milestone with the given id.
| Open -- ^ Only open issues. [default]
| OnlyClosed -- ^ Only closed issues.
| Unassigned -- ^ Issues to which no one has been assigned ownership.
| AnyAssignment -- ^ All issues regardless of assignment. [default]
| AssignedTo String -- ^ Only issues assigned to the user with the given login.
| Mentions String -- ^ Issues which mention the given string, taken to be a user's login.
| Labels [String] -- ^ A list of labels to filter by.
| Ascending -- ^ Sort ascending.
| Descending -- ^ Sort descending. [default]
| Since UTCTime -- ^ Only issues created since the specified date and time.
-- | Details on a specific issue, given the repo owner and name, and the issue
-- number.'
--
-- > issue' (Just ("github-username", "github-password")) "thoughtbot" "paperclip" "462"
issue' :: Maybe GithubAuth -> String -> String -> Int -> IO (Either Error Issue)
issue' auth user repoName issueNumber =
githubGet' auth ["repos", user, repoName, "issues", show issueNumber]
-- | Details on a specific issue, given the repo owner and name, and the issue
-- number.
--
-- > issue "thoughtbot" "paperclip" "462"
issue :: String -> String -> Int -> IO (Either Error Issue)
issue = issue' Nothing
-- | All issues for a repo (given the repo owner and name), with optional
-- restrictions as described in the @IssueLimitation@ data type.
--
-- > issuesForRepo' (Just ("github-username", "github-password")) "thoughtbot" "paperclip" [NoMilestone, OnlyClosed, Mentions "jyurek", Ascending]
issuesForRepo' :: Maybe GithubAuth -> String -> String -> [IssueLimitation] -> IO (Either Error [Issue])
issuesForRepo' auth user repoName issueLimitations =
githubGetWithQueryString'
auth
["repos", user, repoName, "issues"]
(queryStringFromLimitations issueLimitations)
where
queryStringFromLimitations = intercalate "&" . map convert
convert AnyMilestone = "milestone=*"
convert NoMilestone = "milestone=none"
convert (MilestoneId n) = "milestone=" ++ show n
convert Open = "state=open"
convert OnlyClosed = "state=closed"
convert Unassigned = "assignee=none"
convert AnyAssignment = "assignee=*"
convert (AssignedTo u) = "assignee=" ++ u
convert (Mentions u) = "mentioned=" ++ u
convert (Labels l) = "labels=" ++ intercalate "," l
convert Ascending = "direction=asc"
convert Descending = "direction=desc"
convert (Since t) =
"since=" ++ formatTime defaultTimeLocale "%FT%TZ" t
-- | All issues for a repo (given the repo owner and name), with optional
-- restrictions as described in the @IssueLimitation@ data type.
--
-- > issuesForRepo "thoughtbot" "paperclip" [NoMilestone, OnlyClosed, Mentions "jyurek", Ascending]
issuesForRepo :: String -> String -> [IssueLimitation] -> IO (Either Error [Issue])
issuesForRepo = issuesForRepo' Nothing
|
erochest/github
|
Github/Issues.hs
|
Haskell
|
bsd-3-clause
| 3,623
|
module Wigner.Complex (
Complex((:+)),
ComplexValued(conjugate),
ComplexNum(fromComplexRational)) where
import Data.Ratio
import Wigner.Texable
data Complex a = a :+ a deriving (Show, Eq)
class ComplexValued a where
conjugate :: a -> a
instance (Num a) => ComplexValued (Complex a) where
conjugate (x :+ y) = x :+ (-y)
instance (Num a) => Num (Complex a) where
negate (x :+ y) = negate x :+ negate y
(x1 :+ y1) + (x2 :+ y2) = (x1 + x2) :+ (y1 + y2)
(x1 :+ y1) * (x2 :+ y2) = (x1 * x2 - y1 * y2) :+ (x1 * y2 + y1 * x2)
abs x = undefined
signum x = undefined
fromInteger x = fromInteger x :+ 0
instance (Fractional a) => Fractional (Complex a) where
(x1 :+ y1) / (x2 :+ y2) = ((x1 * x2 + y1 * y2) / m) :+ ((x1 * y2 - y1 * x2) / m) where
m = x2 * x2 + y2 * y2
fromRational x = fromRational x :+ fromRational 0
class ComplexNum a where
fromComplexRational :: Complex Rational -> a
instance (Texable a, Ord a, Num a) => Texable (Complex a) where
showTex (x :+ y)
| y == 0 = sx
| x == 0 && y == 1 = "i"
| x == 0 && y == -1 = "-i"
| x == 0 = sy ++ "i"
| otherwise = "(" ++ showTex (x :+ 0) ++ sign ++ showTex (0 :+ y) ++ ")"
where
sx = showTex x
sy = showTex y
sign = if y < 0 then "" else "+"
|
fjarri/wigner
|
src/Wigner/Complex.hs
|
Haskell
|
bsd-3-clause
| 1,348
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring arrow commands
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
module DsArrows ( dsProcExpr ) where
#include "HsVersions.h"
import GhcPrelude
import Match
import DsUtils
import DsMonad
import GHC.Hs hiding (collectPatBinders, collectPatsBinders,
collectLStmtsBinders, collectLStmtBinders,
collectStmtBinders )
import TcHsSyn
import qualified GHC.Hs.Utils as HsUtils
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types (newtypes etc), and sometimes not
-- So WATCH OUT; check each use of split*Ty functions.
-- Sigh. This is a pain.
import {-# SOURCE #-} DsExpr ( dsExpr, dsLExpr, dsLExprNoLP, dsLocalBinds,
dsSyntaxExpr )
import TcType
import Type ( splitPiTy )
import TcEvidence
import CoreSyn
import CoreFVs
import CoreUtils
import MkCore
import DsBinds (dsHsWrapper)
import Name
import Id
import ConLike
import TysWiredIn
import BasicTypes
import PrelNames
import Outputable
import VarSet
import SrcLoc
import ListSetOps( assocMaybe )
import Data.List
import Util
import UniqDSet
data DsCmdEnv = DsCmdEnv {
arr_id, compose_id, first_id, app_id, choice_id, loop_id :: CoreExpr
}
mkCmdEnv :: CmdSyntaxTable GhcTc -> DsM ([CoreBind], DsCmdEnv)
-- See Note [CmdSyntaxTable] in GHC.Hs.Expr
mkCmdEnv tc_meths
= do { (meth_binds, prs) <- mapAndUnzipM mk_bind tc_meths
-- NB: Some of these lookups might fail, but that's OK if the
-- symbol is never used. That's why we use Maybe first and then
-- panic. An eager panic caused trouble in typecheck/should_compile/tc192
; let the_arr_id = assocMaybe prs arrAName
the_compose_id = assocMaybe prs composeAName
the_first_id = assocMaybe prs firstAName
the_app_id = assocMaybe prs appAName
the_choice_id = assocMaybe prs choiceAName
the_loop_id = assocMaybe prs loopAName
-- used as an argument in, e.g., do_premap
; check_lev_poly 3 the_arr_id
-- used as an argument in, e.g., dsCmdStmt/BodyStmt
; check_lev_poly 5 the_compose_id
-- used as an argument in, e.g., dsCmdStmt/BodyStmt
; check_lev_poly 4 the_first_id
-- the result of the_app_id is used as an argument in, e.g.,
-- dsCmd/HsCmdArrApp/HsHigherOrderApp
; check_lev_poly 2 the_app_id
-- used as an argument in, e.g., HsCmdIf
; check_lev_poly 5 the_choice_id
-- used as an argument in, e.g., RecStmt
; check_lev_poly 4 the_loop_id
; return (meth_binds, DsCmdEnv {
arr_id = Var (unmaybe the_arr_id arrAName),
compose_id = Var (unmaybe the_compose_id composeAName),
first_id = Var (unmaybe the_first_id firstAName),
app_id = Var (unmaybe the_app_id appAName),
choice_id = Var (unmaybe the_choice_id choiceAName),
loop_id = Var (unmaybe the_loop_id loopAName)
}) }
where
mk_bind (std_name, expr)
= do { rhs <- dsExpr expr
; id <- newSysLocalDs (exprType rhs)
-- no check needed; these are functions
; return (NonRec id rhs, (std_name, id)) }
unmaybe Nothing name = pprPanic "mkCmdEnv" (text "Not found:" <+> ppr name)
unmaybe (Just id) _ = id
-- returns the result type of a pi-type (that is, a forall or a function)
-- Note that this result type may be ill-scoped.
res_type :: Type -> Type
res_type ty = res_ty
where
(_, res_ty) = splitPiTy ty
check_lev_poly :: Int -- arity
-> Maybe Id -> DsM ()
check_lev_poly _ Nothing = return ()
check_lev_poly arity (Just id)
= dsNoLevPoly (nTimes arity res_type (idType id))
(text "In the result of the function" <+> quotes (ppr id))
-- arr :: forall b c. (b -> c) -> a b c
do_arr :: DsCmdEnv -> Type -> Type -> CoreExpr -> CoreExpr
do_arr ids b_ty c_ty f = mkApps (arr_id ids) [Type b_ty, Type c_ty, f]
-- (>>>) :: forall b c d. a b c -> a c d -> a b d
do_compose :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_compose ids b_ty c_ty d_ty f g
= mkApps (compose_id ids) [Type b_ty, Type c_ty, Type d_ty, f, g]
-- first :: forall b c d. a b c -> a (b,d) (c,d)
do_first :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_first ids b_ty c_ty d_ty f
= mkApps (first_id ids) [Type b_ty, Type c_ty, Type d_ty, f]
-- app :: forall b c. a (a b c, b) c
do_app :: DsCmdEnv -> Type -> Type -> CoreExpr
do_app ids b_ty c_ty = mkApps (app_id ids) [Type b_ty, Type c_ty]
-- (|||) :: forall b d c. a b d -> a c d -> a (Either b c) d
-- note the swapping of d and c
do_choice :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_choice ids b_ty c_ty d_ty f g
= mkApps (choice_id ids) [Type b_ty, Type d_ty, Type c_ty, f, g]
-- loop :: forall b d c. a (b,d) (c,d) -> a b c
-- note the swapping of d and c
do_loop :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_loop ids b_ty c_ty d_ty f
= mkApps (loop_id ids) [Type b_ty, Type d_ty, Type c_ty, f]
-- premap :: forall b c d. (b -> c) -> a c d -> a b d
-- premap f g = arr f >>> g
do_premap :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_premap ids b_ty c_ty d_ty f g
= do_compose ids b_ty c_ty d_ty (do_arr ids b_ty c_ty f) g
mkFailExpr :: HsMatchContext Id -> Type -> DsM CoreExpr
mkFailExpr ctxt ty
= mkErrorAppDs pAT_ERROR_ID ty (matchContextErrString ctxt)
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> a
mkFstExpr :: Type -> Type -> DsM CoreExpr
mkFstExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var a_var)))
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> b
mkSndExpr :: Type -> Type -> DsM CoreExpr
mkSndExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var b_var)))
{-
Build case analysis of a tuple. This cannot be done in the DsM monad,
because the list of variables is typically not yet defined.
-}
-- coreCaseTuple [u1..] v [x1..xn] body
-- = case v of v { (x1, .., xn) -> body }
-- But the matching may be nested if the tuple is very big
coreCaseTuple :: UniqSupply -> Id -> [Id] -> CoreExpr -> CoreExpr
coreCaseTuple uniqs scrut_var vars body
= mkTupleCase uniqs vars body scrut_var (Var scrut_var)
coreCasePair :: Id -> Id -> Id -> CoreExpr -> CoreExpr
coreCasePair scrut_var var1 var2 body
= Case (Var scrut_var) scrut_var (exprType body)
[(DataAlt (tupleDataCon Boxed 2), [var1, var2], body)]
mkCorePairTy :: Type -> Type -> Type
mkCorePairTy t1 t2 = mkBoxedTupleTy [t1, t2]
mkCorePairExpr :: CoreExpr -> CoreExpr -> CoreExpr
mkCorePairExpr e1 e2 = mkCoreTup [e1, e2]
mkCoreUnitExpr :: CoreExpr
mkCoreUnitExpr = mkCoreTup []
{-
The input is divided into a local environment, which is a flat tuple
(unless it's too big), and a stack, which is a right-nested pair.
In general, the input has the form
((x1,...,xn), (s1,...(sk,())...))
where xi are the environment values, and si the ones on the stack,
with s1 being the "top", the first one to be matched with a lambda.
-}
envStackType :: [Id] -> Type -> Type
envStackType ids stack_ty = mkCorePairTy (mkBigCoreVarTupTy ids) stack_ty
-- splitTypeAt n (t1,... (tn,t)...) = ([t1, ..., tn], t)
splitTypeAt :: Int -> Type -> ([Type], Type)
splitTypeAt n ty
| n == 0 = ([], ty)
| otherwise = case tcTyConAppArgs ty of
[t, ty'] -> let (ts, ty_r) = splitTypeAt (n-1) ty' in (t:ts, ty_r)
_ -> pprPanic "splitTypeAt" (ppr ty)
----------------------------------------------
-- buildEnvStack
--
-- ((x1,...,xn),stk)
buildEnvStack :: [Id] -> Id -> CoreExpr
buildEnvStack env_ids stack_id
= mkCorePairExpr (mkBigCoreVarTup env_ids) (Var stack_id)
----------------------------------------------
-- matchEnvStack
--
-- \ ((x1,...,xn),stk) -> body
-- =>
-- \ pair ->
-- case pair of (tup,stk) ->
-- case tup of (x1,...,xn) ->
-- body
matchEnvStack :: [Id] -- x1..xn
-> Id -- stk
-> CoreExpr -- e
-> DsM CoreExpr
matchEnvStack env_ids stack_id body = do
uniqs <- newUniqueSupply
tup_var <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
let match_env = coreCaseTuple uniqs tup_var env_ids body
pair_id <- newSysLocalDs (mkCorePairTy (idType tup_var) (idType stack_id))
return (Lam pair_id (coreCasePair pair_id tup_var stack_id match_env))
----------------------------------------------
-- matchEnv
--
-- \ (x1,...,xn) -> body
-- =>
-- \ tup ->
-- case tup of (x1,...,xn) ->
-- body
matchEnv :: [Id] -- x1..xn
-> CoreExpr -- e
-> DsM CoreExpr
matchEnv env_ids body = do
uniqs <- newUniqueSupply
tup_id <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
return (Lam tup_id (coreCaseTuple uniqs tup_id env_ids body))
----------------------------------------------
-- matchVarStack
--
-- case (x1, ...(xn, s)...) -> e
-- =>
-- case z0 of (x1,z1) ->
-- case zn-1 of (xn,s) ->
-- e
matchVarStack :: [Id] -> Id -> CoreExpr -> DsM (Id, CoreExpr)
matchVarStack [] stack_id body = return (stack_id, body)
matchVarStack (param_id:param_ids) stack_id body = do
(tail_id, tail_code) <- matchVarStack param_ids stack_id body
pair_id <- newSysLocalDs (mkCorePairTy (idType param_id) (idType tail_id))
return (pair_id, coreCasePair pair_id param_id tail_id tail_code)
mkHsEnvStackExpr :: [Id] -> Id -> LHsExpr GhcTc
mkHsEnvStackExpr env_ids stack_id
= mkLHsTupleExpr [mkLHsVarTuple env_ids, nlHsVar stack_id]
-- Translation of arrow abstraction
-- D; xs |-a c : () --> t' ---> c'
-- --------------------------
-- D |- proc p -> c :: a t t' ---> premap (\ p -> ((xs),())) c'
--
-- where (xs) is the tuple of variables bound by p
dsProcExpr
:: LPat GhcTc
-> LHsCmdTop GhcTc
-> DsM CoreExpr
dsProcExpr pat (L _ (HsCmdTop (CmdTopTc _unitTy cmd_ty ids) cmd)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
let locals = mkVarSet (collectPatBinders pat)
(core_cmd, _free_vars, env_ids)
<- dsfixCmd meth_ids locals unitTy cmd_ty cmd
let env_ty = mkBigCoreVarTupTy env_ids
let env_stk_ty = mkCorePairTy env_ty unitTy
let env_stk_expr = mkCorePairExpr (mkBigCoreVarTup env_ids) mkCoreUnitExpr
fail_expr <- mkFailExpr ProcExpr env_stk_ty
var <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var var) ProcExpr pat env_stk_expr fail_expr
let pat_ty = hsLPatType pat
let proc_code = do_premap meth_ids pat_ty env_stk_ty cmd_ty
(Lam var match_code)
core_cmd
return (mkLets meth_binds proc_code)
dsProcExpr _ _ = panic "dsProcExpr"
{-
Translation of a command judgement of the form
D; xs |-a c : stk --> t
to an expression e such that
D |- e :: a (xs, stk) t
-}
dsLCmd :: DsCmdEnv -> IdSet -> Type -> Type -> LHsCmd GhcTc -> [Id]
-> DsM (CoreExpr, DIdSet)
dsLCmd ids local_vars stk_ty res_ty cmd env_ids
= dsCmd ids local_vars stk_ty res_ty (unLoc cmd) env_ids
dsCmd :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> HsCmd GhcTc -- command to desugar
-> [Id] -- list of vars in the input to this command
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
-- D |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- -----------------------------
-- D; xs |-a fun -< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> arg) fun
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow_ty arrow arg HsFirstOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExprNoLP arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_arg <- matchEnvStack env_ids stack_id core_arg
return (do_premap ids
(envStackType env_ids stack_ty)
arg_ty
res_ty
core_make_arg
core_arrow,
exprFreeIdsDSet core_arg `uniqDSetIntersectUniqSet` local_vars)
-- D, xs |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- ------------------------------
-- D; xs |-a fun -<< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> (fun, arg)) app
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow_ty arrow arg HsHigherOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_pair <- matchEnvStack env_ids stack_id
(mkCorePairExpr core_arrow core_arg)
return (do_premap ids
(envStackType env_ids stack_ty)
(mkCorePairTy arrow_ty arg_ty)
res_ty
core_make_pair
(do_app ids arg_ty res_ty),
(exprsFreeIdsDSet [core_arrow, core_arg])
`uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a cmd : (t,stk) --> t'
-- D, xs |- exp :: t
-- ------------------------
-- D; xs |-a cmd exp : stk --> t'
--
-- ---> premap (\ ((xs),stk) -> ((ys),(e,stk))) cmd
dsCmd ids local_vars stack_ty res_ty (HsCmdApp _ cmd arg) env_ids = do
core_arg <- dsLExpr arg
let
arg_ty = exprType core_arg
stack_ty' = mkCorePairTy arg_ty stack_ty
(core_cmd, free_vars, env_ids')
<- dsfixCmd ids local_vars stack_ty' res_ty cmd
stack_id <- newSysLocalDs stack_ty
arg_id <- newSysLocalDsNoLP arg_ty
-- push the argument expression onto the stack
let
stack' = mkCorePairExpr (Var arg_id) (Var stack_id)
core_body = bindNonRec arg_id core_arg
(mkCorePairExpr (mkBigCoreVarTup env_ids') stack')
-- match the environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_body
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty')
res_ty
core_map
core_cmd,
free_vars `unionDVarSet`
(exprFreeIdsDSet core_arg `uniqDSetIntersectUniqSet` local_vars))
-- D; ys |-a cmd : stk t'
-- -----------------------------------------------
-- D; xs |-a \ p1 ... pk -> cmd : (t1,...(tk,stk)...) t'
--
-- ---> premap (\ ((xs), (p1, ... (pk,stk)...)) -> ((ys),stk)) cmd
dsCmd ids local_vars stack_ty res_ty
(HsCmdLam _ (MG { mg_alts
= (L _ [L _ (Match { m_pats = pats
, m_grhss = GRHSs _ [L _ (GRHS _ [] body)] _ })]) }))
env_ids = do
let pat_vars = mkVarSet (collectPatsBinders pats)
let
local_vars' = pat_vars `unionVarSet` local_vars
(pat_tys, stack_ty') = splitTypeAt (length pats) stack_ty
(core_body, free_vars, env_ids')
<- dsfixCmd ids local_vars' stack_ty' res_ty body
param_ids <- mapM newSysLocalDsNoLP pat_tys
stack_id' <- newSysLocalDs stack_ty'
-- the expression is built from the inside out, so the actions
-- are presented in reverse order
let
-- build a new environment, plus what's left of the stack
core_expr = buildEnvStack env_ids' stack_id'
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty'
fail_expr <- mkFailExpr LambdaExpr in_ty'
-- match the patterns against the parameters
match_code <- matchSimplys (map Var param_ids) LambdaExpr pats core_expr
fail_expr
-- match the parameters against the top of the old stack
(stack_id, param_code) <- matchVarStack param_ids stack_id' match_code
-- match the old environment and stack against the input
select_code <- matchEnvStack env_ids stack_id param_code
return (do_premap ids in_ty in_ty' res_ty select_code core_body,
free_vars `uniqDSetMinusUniqSet` pat_vars)
dsCmd ids local_vars stack_ty res_ty (HsCmdPar _ cmd) env_ids
= dsLCmd ids local_vars stack_ty res_ty cmd env_ids
-- D, xs |- e :: Bool
-- D; xs1 |-a c1 : stk --> t
-- D; xs2 |-a c2 : stk --> t
-- ----------------------------------------
-- D; xs |-a if e then c1 else c2 : stk --> t
--
-- ---> premap (\ ((xs),stk) ->
-- if e then Left ((xs1),stk) else Right ((xs2),stk))
-- (c1 ||| c2)
dsCmd ids local_vars stack_ty res_ty (HsCmdIf _ mb_fun cond then_cmd else_cmd)
env_ids = do
core_cond <- dsLExpr cond
(core_then, fvs_then, then_ids)
<- dsfixCmd ids local_vars stack_ty res_ty then_cmd
(core_else, fvs_else, else_ids)
<- dsfixCmd ids local_vars stack_ty res_ty else_cmd
stack_id <- newSysLocalDs stack_ty
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let mk_left_expr ty1 ty2 e = mkCoreConApps left_con [Type ty1,Type ty2, e]
mk_right_expr ty1 ty2 e = mkCoreConApps right_con [Type ty1,Type ty2, e]
in_ty = envStackType env_ids stack_ty
then_ty = envStackType then_ids stack_ty
else_ty = envStackType else_ids stack_ty
sum_ty = mkTyConApp either_con [then_ty, else_ty]
fvs_cond = exprFreeIdsDSet core_cond
`uniqDSetIntersectUniqSet` local_vars
core_left = mk_left_expr then_ty else_ty
(buildEnvStack then_ids stack_id)
core_right = mk_right_expr then_ty else_ty
(buildEnvStack else_ids stack_id)
core_if <- case mb_fun of
Just fun -> do { fun_apps <- dsSyntaxExpr fun
[core_cond, core_left, core_right]
; matchEnvStack env_ids stack_id fun_apps }
Nothing -> matchEnvStack env_ids stack_id $
mkIfThenElse core_cond core_left core_right
return (do_premap ids in_ty sum_ty res_ty
core_if
(do_choice ids then_ty else_ty res_ty core_then core_else),
fvs_cond `unionDVarSet` fvs_then `unionDVarSet` fvs_else)
{-
Case commands are treated in much the same way as if commands
(see above) except that there are more alternatives. For example
case e of { p1 -> c1; p2 -> c2; p3 -> c3 }
is translated to
premap (\ ((xs)*ts) -> case e of
p1 -> (Left (Left (xs1)*ts))
p2 -> Left ((Right (xs2)*ts))
p3 -> Right ((xs3)*ts))
((c1 ||| c2) ||| c3)
The idea is to extract the commands from the case, build a balanced tree
of choices, and replace the commands with expressions that build tagged
tuples, obtaining a case expression that can be desugared normally.
To build all this, we use triples describing segments of the list of
case bodies, containing the following fields:
* a list of expressions of the form (Left|Right)* ((xs)*ts), to be put
into the case replacing the commands
* a sum type that is the common type of these expressions, and also the
input type of the arrow
* a CoreExpr for an arrow built by combining the translated command
bodies with |||.
-}
dsCmd ids local_vars stack_ty res_ty
(HsCmdCase _ exp (MG { mg_alts = L l matches
, mg_ext = MatchGroupTc arg_tys _
, mg_origin = origin }))
env_ids = do
stack_id <- newSysLocalDs stack_ty
-- Extract and desugar the leaf commands in the case, building tuple
-- expressions that will (after tagging) replace these leaves
let
leaves = concatMap leavesMatch matches
make_branch (leaf, bound_vars) = do
(core_leaf, _fvs, leaf_ids)
<- dsfixCmd ids (bound_vars `unionVarSet` local_vars) stack_ty
res_ty leaf
return ([mkHsEnvStackExpr leaf_ids stack_id],
envStackType leaf_ids stack_ty,
core_leaf)
branches <- mapM make_branch leaves
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let
left_id = HsConLikeOut noExtField (RealDataCon left_con)
right_id = HsConLikeOut noExtField (RealDataCon right_con)
left_expr ty1 ty2 e = noLoc $ HsApp noExtField
(noLoc $ mkHsWrap (mkWpTyApps [ty1, ty2]) left_id ) e
right_expr ty1 ty2 e = noLoc $ HsApp noExtField
(noLoc $ mkHsWrap (mkWpTyApps [ty1, ty2]) right_id) e
-- Prefix each tuple with a distinct series of Left's and Right's,
-- in a balanced way, keeping track of the types.
merge_branches (builds1, in_ty1, core_exp1)
(builds2, in_ty2, core_exp2)
= (map (left_expr in_ty1 in_ty2) builds1 ++
map (right_expr in_ty1 in_ty2) builds2,
mkTyConApp either_con [in_ty1, in_ty2],
do_choice ids in_ty1 in_ty2 res_ty core_exp1 core_exp2)
(leaves', sum_ty, core_choices) = foldb merge_branches branches
-- Replace the commands in the case with these tagged tuples,
-- yielding a HsExpr Id we can feed to dsExpr.
(_, matches') = mapAccumL (replaceLeavesMatch res_ty) leaves' matches
in_ty = envStackType env_ids stack_ty
core_body <- dsExpr (HsCase noExtField exp
(MG { mg_alts = L l matches'
, mg_ext = MatchGroupTc arg_tys sum_ty
, mg_origin = origin }))
-- Note that we replace the HsCase result type by sum_ty,
-- which is the type of matches'
core_matches <- matchEnvStack env_ids stack_id core_body
return (do_premap ids in_ty sum_ty res_ty core_matches core_choices,
exprFreeIdsDSet core_body `uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a cmd : stk --> t
-- ----------------------------------
-- D; xs |-a let binds in cmd : stk --> t
--
-- ---> premap (\ ((xs),stk) -> let binds in ((ys),stk)) c
dsCmd ids local_vars stack_ty res_ty (HsCmdLet _ lbinds@(L _ binds) body)
env_ids = do
let
defined_vars = mkVarSet (collectLocalBinders binds)
local_vars' = defined_vars `unionVarSet` local_vars
(core_body, _free_vars, env_ids')
<- dsfixCmd ids local_vars' stack_ty res_ty body
stack_id <- newSysLocalDs stack_ty
-- build a new environment, plus the stack, using the let bindings
core_binds <- dsLocalBinds lbinds (buildEnvStack env_ids' stack_id)
-- match the old environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_binds
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty)
res_ty
core_map
core_body,
exprFreeIdsDSet core_binds `uniqDSetIntersectUniqSet` local_vars)
-- D; xs |-a ss : t
-- ----------------------------------
-- D; xs |-a do { ss } : () --> t
--
-- ---> premap (\ (env,stk) -> env) c
dsCmd ids local_vars stack_ty res_ty do_block@(HsCmdDo stmts_ty
(L loc stmts))
env_ids = do
putSrcSpanDs loc $
dsNoLevPoly stmts_ty
(text "In the do-command:" <+> ppr do_block)
(core_stmts, env_ids') <- dsCmdDo ids local_vars res_ty stmts env_ids
let env_ty = mkBigCoreVarTupTy env_ids
core_fst <- mkFstExpr env_ty stack_ty
return (do_premap ids
(mkCorePairTy env_ty stack_ty)
env_ty
res_ty
core_fst
core_stmts,
env_ids')
-- D |- e :: forall e. a1 (e,stk1) t1 -> ... an (e,stkn) tn -> a (e,stk) t
-- D; xs |-a ci :: stki --> ti
-- -----------------------------------
-- D; xs |-a (|e c1 ... cn|) :: stk --> t ---> e [t_xs] c1 ... cn
dsCmd _ local_vars _stack_ty _res_ty (HsCmdArrForm _ op _ _ args) env_ids = do
let env_ty = mkBigCoreVarTupTy env_ids
core_op <- dsLExpr op
(core_args, fv_sets) <- mapAndUnzipM (dsTrimCmdArg local_vars env_ids) args
return (mkApps (App core_op (Type env_ty)) core_args,
unionDVarSets fv_sets)
dsCmd ids local_vars stack_ty res_ty (HsCmdWrap _ wrap cmd) env_ids = do
(core_cmd, env_ids') <- dsCmd ids local_vars stack_ty res_ty cmd env_ids
core_wrap <- dsHsWrapper wrap
return (core_wrap core_cmd, env_ids')
dsCmd _ _ _ _ _ c = pprPanic "dsCmd" (ppr c)
-- D; ys |-a c : stk --> t (ys <= xs)
-- ---------------------
-- D; xs |-a c : stk --> t ---> premap (\ ((xs),stk) -> ((ys),stk)) c
dsTrimCmdArg
:: IdSet -- set of local vars available to this command
-> [Id] -- list of vars in the input to this command
-> LHsCmdTop GhcTc -- command argument to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsTrimCmdArg local_vars env_ids
(L _ (HsCmdTop
(CmdTopTc stack_ty cmd_ty ids) cmd )) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
(core_cmd, free_vars, env_ids')
<- dsfixCmd meth_ids local_vars stack_ty cmd_ty cmd
stack_id <- newSysLocalDs stack_ty
trim_code
<- matchEnvStack env_ids stack_id (buildEnvStack env_ids' stack_id)
let
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty
arg_code = if env_ids' == env_ids then core_cmd else
do_premap meth_ids in_ty in_ty' cmd_ty trim_code core_cmd
return (mkLets meth_binds arg_code, free_vars)
dsTrimCmdArg _ _ _ = panic "dsTrimCmdArg"
-- Given D; xs |-a c : stk --> t, builds c with xs fed back.
-- Typically needs to be prefixed with arr (\(p, stk) -> ((xs),stk))
dsfixCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> LHsCmd GhcTc -- command to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- the same local vars as a list, fed back
dsfixCmd ids local_vars stk_ty cmd_ty cmd
= do { putSrcSpanDs (getLoc cmd) $ dsNoLevPoly cmd_ty
(text "When desugaring the command:" <+> ppr cmd)
; trimInput (dsLCmd ids local_vars stk_ty cmd_ty cmd) }
-- Feed back the list of local variables actually used a command,
-- for use as the input tuple of the generated arrow.
trimInput
:: ([Id] -> DsM (CoreExpr, DIdSet))
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list, fed back to
-- the inner function to form the tuple of
-- inputs to the arrow.
trimInput build_arrow
= fixDs (\ ~(_,_,env_ids) -> do
(core_cmd, free_vars) <- build_arrow env_ids
return (core_cmd, free_vars, dVarSetElems free_vars))
{-
Translation of command judgements of the form
D |-a do { ss } : t
-}
dsCmdDo :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> Type -- return type of the statement
-> [CmdLStmt GhcTc] -- statements to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsCmdDo _ _ _ [] _ = panic "dsCmdDo"
-- D; xs |-a c : () --> t
-- --------------------------
-- D; xs |-a do { c } : t
--
-- ---> premap (\ (xs) -> ((xs), ())) c
dsCmdDo ids local_vars res_ty [L loc (LastStmt _ body _ _)] env_ids = do
putSrcSpanDs loc $ dsNoLevPoly res_ty
(text "In the command:" <+> ppr body)
(core_body, env_ids') <- dsLCmd ids local_vars unitTy res_ty body env_ids
let env_ty = mkBigCoreVarTupTy env_ids
env_var <- newSysLocalDs env_ty
let core_map = Lam env_var (mkCorePairExpr (Var env_var) mkCoreUnitExpr)
return (do_premap ids
env_ty
(mkCorePairTy env_ty unitTy)
res_ty
core_map
core_body,
env_ids')
dsCmdDo ids local_vars res_ty (stmt:stmts) env_ids = do
let bound_vars = mkVarSet (collectLStmtBinders stmt)
let local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _, env_ids') <- trimInput (dsCmdDo ids local_vars' res_ty stmts)
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
res_ty
core_stmt
core_stmts,
fv_stmt)
{-
A statement maps one local environment to another, and is represented
as an arrow from one tuple type to another. A statement sequence is
translated to a composition of such arrows.
-}
dsCmdLStmt :: DsCmdEnv -> IdSet -> [Id] -> CmdLStmt GhcTc -> [Id]
-> DsM (CoreExpr, DIdSet)
dsCmdLStmt ids local_vars out_ids cmd env_ids
= dsCmdStmt ids local_vars out_ids (unLoc cmd) env_ids
dsCmdStmt
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- list of vars in the output of this statement
-> CmdStmt GhcTc -- statement to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t'
-- ------------------------------
-- D; xs |-a do { c; ss } : t'
--
-- ---> premap (\ ((xs)) -> (((xs1),()),(xs')))
-- (first c >>> arr snd) >>> ss
dsCmdStmt ids local_vars out_ids (BodyStmt c_ty cmd _ _) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy c_ty cmd
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup out_ids))
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
out_ty = mkBigCoreVarTupTy out_ids
before_c_ty = mkCorePairTy in_ty1 out_ty
after_c_ty = mkCorePairTy c_ty out_ty
dsNoLevPoly c_ty empty -- I (Richard E, Dec '16) have no idea what to say here
snd_fn <- mkSndExpr c_ty out_ty
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 c_ty out_ty core_cmd) $
do_arr ids after_c_ty out_ty snd_fn,
extendDVarSetList fv_cmd out_ids)
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t' xs2 = xs' - defs(p)
-- -----------------------------------
-- D; xs |-a do { p <- c; ss } : t'
--
-- ---> premap (\ (xs) -> (((xs1),()),(xs2)))
-- (first c >>> arr (\ (p, (xs2)) -> (xs'))) >>> ss
--
-- It would be simpler and more consistent to do this using second,
-- but that's likely to be defined in terms of first.
dsCmdStmt ids local_vars out_ids (BindStmt _ pat cmd _ _) env_ids = do
let pat_ty = hsLPatType pat
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy pat_ty cmd
let pat_vars = mkVarSet (collectPatBinders pat)
let
env_ids2 = filterOut (`elemVarSet` pat_vars) out_ids
env_ty2 = mkBigCoreVarTupTy env_ids2
-- multiplexing function
-- \ (xs) -> (((xs1),()),(xs2))
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup env_ids2))
-- projection function
-- \ (p, (xs2)) -> (zs)
env_id <- newSysLocalDs env_ty2
uniqs <- newUniqueSupply
let
after_c_ty = mkCorePairTy pat_ty env_ty2
out_ty = mkBigCoreVarTupTy out_ids
body_expr = coreCaseTuple uniqs env_id env_ids2 (mkBigCoreVarTup out_ids)
fail_expr <- mkFailExpr (StmtCtxt DoExpr) out_ty
pat_id <- selectSimpleMatchVarL pat
match_code
<- matchSimply (Var pat_id) (StmtCtxt DoExpr) pat body_expr fail_expr
pair_id <- newSysLocalDs after_c_ty
let
proj_expr = Lam pair_id (coreCasePair pair_id pat_id env_id match_code)
-- put it all together
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
in_ty2 = mkBigCoreVarTupTy env_ids2
before_c_ty = mkCorePairTy in_ty1 in_ty2
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 pat_ty in_ty2 core_cmd) $
do_arr ids after_c_ty out_ty proj_expr,
fv_cmd `unionDVarSet` (mkDVarSet out_ids
`uniqDSetMinusUniqSet` pat_vars))
-- D; xs' |-a do { ss } : t
-- --------------------------------------
-- D; xs |-a do { let binds; ss } : t
--
-- ---> arr (\ (xs) -> let binds in (xs')) >>> ss
dsCmdStmt ids local_vars out_ids (LetStmt _ binds) env_ids = do
-- build a new environment using the let bindings
core_binds <- dsLocalBinds binds (mkBigCoreVarTup out_ids)
-- match the old environment against the input
core_map <- matchEnv env_ids core_binds
return (do_arr ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy out_ids)
core_map,
exprFreeIdsDSet core_binds `uniqDSetIntersectUniqSet` local_vars)
-- D; ys |-a do { ss; returnA -< ((xs1), (ys2)) } : ...
-- D; xs' |-a do { ss' } : t
-- ------------------------------------
-- D; xs |-a do { rec ss; ss' } : t
--
-- xs1 = xs' /\ defs(ss)
-- xs2 = xs' - defs(ss)
-- ys1 = ys - defs(ss)
-- ys2 = ys /\ defs(ss)
--
-- ---> arr (\(xs) -> ((ys1),(xs2))) >>>
-- first (loop (arr (\((ys1),~(ys2)) -> (ys)) >>> ss)) >>>
-- arr (\((xs1),(xs2)) -> (xs')) >>> ss'
dsCmdStmt ids local_vars out_ids
(RecStmt { recS_stmts = stmts
, recS_later_ids = later_ids, recS_rec_ids = rec_ids
, recS_ext = RecStmtTc { recS_later_rets = later_rets
, recS_rec_rets = rec_rets } })
env_ids = do
let
later_ids_set = mkVarSet later_ids
env2_ids = filterOut (`elemVarSet` later_ids_set) out_ids
env2_id_set = mkDVarSet env2_ids
env2_ty = mkBigCoreVarTupTy env2_ids
-- post_loop_fn = \((later_ids),(env2_ids)) -> (out_ids)
uniqs <- newUniqueSupply
env2_id <- newSysLocalDs env2_ty
let
later_ty = mkBigCoreVarTupTy later_ids
post_pair_ty = mkCorePairTy later_ty env2_ty
post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkBigCoreVarTup out_ids)
post_loop_fn <- matchEnvStack later_ids env2_id post_loop_body
--- loop (...)
(core_loop, env1_id_set, env1_ids)
<- dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets
-- pre_loop_fn = \(env_ids) -> ((env1_ids),(env2_ids))
let
env1_ty = mkBigCoreVarTupTy env1_ids
pre_pair_ty = mkCorePairTy env1_ty env2_ty
pre_loop_body = mkCorePairExpr (mkBigCoreVarTup env1_ids)
(mkBigCoreVarTup env2_ids)
pre_loop_fn <- matchEnv env_ids pre_loop_body
-- arr pre_loop_fn >>> first (loop (...)) >>> arr post_loop_fn
let
env_ty = mkBigCoreVarTupTy env_ids
out_ty = mkBigCoreVarTupTy out_ids
core_body = do_premap ids env_ty pre_pair_ty out_ty
pre_loop_fn
(do_compose ids pre_pair_ty post_pair_ty out_ty
(do_first ids env1_ty later_ty env2_ty
core_loop)
(do_arr ids post_pair_ty out_ty
post_loop_fn))
return (core_body, env1_id_set `unionDVarSet` env2_id_set)
dsCmdStmt _ _ _ _ s = pprPanic "dsCmdStmt" (ppr s)
-- loop (premap (\ ((env1_ids), ~(rec_ids)) -> (env_ids))
-- (ss >>> arr (\ (out_ids) -> ((later_rets),(rec_rets))))) >>>
dsRecCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [CmdLStmt GhcTc] -- list of statements inside the RecCmd
-> [Id] -- list of vars defined here and used later
-> [HsExpr GhcTc] -- expressions corresponding to later_ids
-> [Id] -- list of vars fed back through the loop
-> [HsExpr GhcTc] -- expressions corresponding to rec_ids
-> DsM (CoreExpr, -- desugared statement
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets = do
let
later_id_set = mkVarSet later_ids
rec_id_set = mkVarSet rec_ids
local_vars' = rec_id_set `unionVarSet` later_id_set `unionVarSet` local_vars
-- mk_pair_fn = \ (out_ids) -> ((later_rets),(rec_rets))
core_later_rets <- mapM dsExpr later_rets
core_rec_rets <- mapM dsExpr rec_rets
let
-- possibly polymorphic version of vars of later_ids and rec_ids
out_ids = exprsFreeIdsList (core_later_rets ++ core_rec_rets)
out_ty = mkBigCoreVarTupTy out_ids
later_tuple = mkBigCoreTup core_later_rets
later_ty = mkBigCoreVarTupTy later_ids
rec_tuple = mkBigCoreTup core_rec_rets
rec_ty = mkBigCoreVarTupTy rec_ids
out_pair = mkCorePairExpr later_tuple rec_tuple
out_pair_ty = mkCorePairTy later_ty rec_ty
mk_pair_fn <- matchEnv out_ids out_pair
-- ss
(core_stmts, fv_stmts, env_ids) <- dsfixCmdStmts ids local_vars' out_ids stmts
-- squash_pair_fn = \ ((env1_ids), ~(rec_ids)) -> (env_ids)
rec_id <- newSysLocalDs rec_ty
let
env1_id_set = fv_stmts `uniqDSetMinusUniqSet` rec_id_set
env1_ids = dVarSetElems env1_id_set
env1_ty = mkBigCoreVarTupTy env1_ids
in_pair_ty = mkCorePairTy env1_ty rec_ty
core_body = mkBigCoreTup (map selectVar env_ids)
where
selectVar v
| v `elemVarSet` rec_id_set
= mkTupleSelector rec_ids v rec_id (Var rec_id)
| otherwise = Var v
squash_pair_fn <- matchEnvStack env1_ids rec_id core_body
-- loop (premap squash_pair_fn (ss >>> arr mk_pair_fn))
let
env_ty = mkBigCoreVarTupTy env_ids
core_loop = do_loop ids env1_ty later_ty rec_ty
(do_premap ids in_pair_ty env_ty out_pair_ty
squash_pair_fn
(do_compose ids env_ty out_ty out_pair_ty
core_stmts
(do_arr ids out_ty out_pair_ty mk_pair_fn)))
return (core_loop, env1_id_set, env1_ids)
{-
A sequence of statements (as in a rec) is desugared to an arrow between
two environments (no stack)
-}
dsfixCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt GhcTc] -- statements to desugar
-> DsM (CoreExpr, -- desugared expression
DIdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsfixCmdStmts ids local_vars out_ids stmts
= trimInput (dsCmdStmts ids local_vars out_ids stmts)
-- TODO: Add levity polymorphism check for the resulting expression.
-- But I (Richard E.) don't know enough about arrows to do so.
dsCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt GhcTc] -- statements to desugar
-> [Id] -- list of vars in the input to these statements
-> DsM (CoreExpr, -- desugared expression
DIdSet) -- subset of local vars that occur free
dsCmdStmts ids local_vars out_ids [stmt] env_ids
= dsCmdLStmt ids local_vars out_ids stmt env_ids
dsCmdStmts ids local_vars out_ids (stmt:stmts) env_ids = do
let bound_vars = mkVarSet (collectLStmtBinders stmt)
let local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _fv_stmts, env_ids') <- dsfixCmdStmts ids local_vars' out_ids stmts
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
(mkBigCoreVarTupTy out_ids)
core_stmt
core_stmts,
fv_stmt)
dsCmdStmts _ _ _ [] _ = panic "dsCmdStmts []"
-- Match a list of expressions against a list of patterns, left-to-right.
matchSimplys :: [CoreExpr] -- Scrutinees
-> HsMatchContext Name -- Match kind
-> [LPat GhcTc] -- Patterns they should match
-> CoreExpr -- Return this if they all match
-> CoreExpr -- Return this if they don't
-> DsM CoreExpr
matchSimplys [] _ctxt [] result_expr _fail_expr = return result_expr
matchSimplys (exp:exps) ctxt (pat:pats) result_expr fail_expr = do
match_code <- matchSimplys exps ctxt pats result_expr fail_expr
matchSimply exp ctxt pat match_code fail_expr
matchSimplys _ _ _ _ _ = panic "matchSimplys"
-- List of leaf expressions, with set of variables bound in each
leavesMatch :: LMatch GhcTc (Located (body GhcTc))
-> [(Located (body GhcTc), IdSet)]
leavesMatch (L _ (Match { m_pats = pats
, m_grhss = GRHSs _ grhss (L _ binds) }))
= let
defined_vars = mkVarSet (collectPatsBinders pats)
`unionVarSet`
mkVarSet (collectLocalBinders binds)
in
[(body,
mkVarSet (collectLStmtsBinders stmts)
`unionVarSet` defined_vars)
| L _ (GRHS _ stmts body) <- grhss]
leavesMatch _ = panic "leavesMatch"
-- Replace the leaf commands in a match
replaceLeavesMatch
:: Type -- new result type
-> [Located (body' GhcTc)] -- replacement leaf expressions of that type
-> LMatch GhcTc (Located (body GhcTc)) -- the matches of a case command
-> ([Located (body' GhcTc)], -- remaining leaf expressions
LMatch GhcTc (Located (body' GhcTc))) -- updated match
replaceLeavesMatch _res_ty leaves
(L loc
match@(Match { m_grhss = GRHSs x grhss binds }))
= let
(leaves', grhss') = mapAccumL replaceLeavesGRHS leaves grhss
in
(leaves', L loc (match { m_ext = noExtField, m_grhss = GRHSs x grhss' binds }))
replaceLeavesMatch _ _ _ = panic "replaceLeavesMatch"
replaceLeavesGRHS
:: [Located (body' GhcTc)] -- replacement leaf expressions of that type
-> LGRHS GhcTc (Located (body GhcTc)) -- rhss of a case command
-> ([Located (body' GhcTc)], -- remaining leaf expressions
LGRHS GhcTc (Located (body' GhcTc))) -- updated GRHS
replaceLeavesGRHS (leaf:leaves) (L loc (GRHS x stmts _))
= (leaves, L loc (GRHS x stmts leaf))
replaceLeavesGRHS [] _ = panic "replaceLeavesGRHS []"
replaceLeavesGRHS _ _ = panic "replaceLeavesGRHS"
-- Balanced fold of a non-empty list.
foldb :: (a -> a -> a) -> [a] -> a
foldb _ [] = error "foldb of empty list"
foldb _ [x] = x
foldb f xs = foldb f (fold_pairs xs)
where
fold_pairs [] = []
fold_pairs [x] = [x]
fold_pairs (x1:x2:xs) = f x1 x2:fold_pairs xs
{-
Note [Dictionary binders in ConPatOut] See also same Note in GHC.Hs.Utils
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following functions to collect value variables from patterns are
copied from GHC.Hs.Utils, with one change: we also collect the dictionary
bindings (pat_binds) from ConPatOut. We need them for cases like
h :: Arrow a => Int -> a (Int,Int) Int
h x = proc (y,z) -> case compare x y of
GT -> returnA -< z+x
The type checker turns the case into
case compare x y of
GT { p77 = plusInt } -> returnA -< p77 z x
Here p77 is a local binding for the (+) operation.
See comments in GHC.Hs.Utils for why the other version does not include
these bindings.
-}
collectPatBinders :: LPat GhcTc -> [Id]
collectPatBinders pat = collectl pat []
collectPatsBinders :: [LPat GhcTc] -> [Id]
collectPatsBinders pats = foldr collectl [] pats
---------------------
collectl :: LPat GhcTc -> [Id] -> [Id]
-- See Note [Dictionary binders in ConPatOut]
collectl (L _ pat) bndrs
= go pat
where
go (VarPat _ (L _ var)) = var : bndrs
go (WildPat _) = bndrs
go (LazyPat _ pat) = collectl pat bndrs
go (BangPat _ pat) = collectl pat bndrs
go (AsPat _ (L _ a) pat) = a : collectl pat bndrs
go (ParPat _ pat) = collectl pat bndrs
go (ListPat _ pats) = foldr collectl bndrs pats
go (TuplePat _ pats _) = foldr collectl bndrs pats
go (SumPat _ pat _ _) = collectl pat bndrs
go (ConPatIn _ ps) = foldr collectl bndrs (hsConPatArgs ps)
go (ConPatOut {pat_args=ps, pat_binds=ds}) =
collectEvBinders ds
++ foldr collectl bndrs (hsConPatArgs ps)
go (LitPat _ _) = bndrs
go (NPat {}) = bndrs
go (NPlusKPat _ (L _ n) _ _ _ _) = n : bndrs
go (SigPat _ pat _) = collectl pat bndrs
go (CoPat _ _ pat _) = collectl (noLoc pat) bndrs
go (ViewPat _ _ pat) = collectl pat bndrs
go p@(SplicePat {}) = pprPanic "collectl/go" (ppr p)
go p@(XPat {}) = pprPanic "collectl/go" (ppr p)
collectEvBinders :: TcEvBinds -> [Id]
collectEvBinders (EvBinds bs) = foldr add_ev_bndr [] bs
collectEvBinders (TcEvBinds {}) = panic "ToDo: collectEvBinders"
add_ev_bndr :: EvBind -> [Id] -> [Id]
add_ev_bndr (EvBind { eb_lhs = b }) bs | isId b = b:bs
| otherwise = bs
-- A worry: what about coercion variable binders??
collectLStmtsBinders :: [LStmt GhcTc body] -> [Id]
collectLStmtsBinders = concatMap collectLStmtBinders
collectLStmtBinders :: LStmt GhcTc body -> [Id]
collectLStmtBinders = collectStmtBinders . unLoc
collectStmtBinders :: Stmt GhcTc body -> [Id]
collectStmtBinders (RecStmt { recS_later_ids = later_ids }) = later_ids
collectStmtBinders stmt = HsUtils.collectStmtBinders stmt
|
sdiehl/ghc
|
compiler/deSugar/DsArrows.hs
|
Haskell
|
bsd-3-clause
| 49,514
|
module Yawn.Test.BlackBox.ParserTest where
import Test.HUnit
import Yawn.Test.Common
tests :: Test
tests = TestList [
TestLabel "TestSimpleGet" testSimpleGet,
TestLabel "TestSimplePost" testSimplePost,
TestLabel "TestInvalidRequest" testInvalidRequest]
testSimpleGet :: Test
testSimpleGet = TestCase $ do
response <- transmit "GET / HTTP/1.1"
assertEqual "GET /" "HTTP/1.1 200 Ok" response
response2 <- transmit "GET / HTTP/1.0"
assertEqual "GET /" "HTTP/1.0 200 Ok" response2
testSimplePost :: Test
testSimplePost = TestCase $ do
response <- transmit "POST / HTTP/1.1"
assertEqual "POST /" "HTTP/1.1 200 Ok" response
response2 <- transmit "POST / HTTP/1.0"
assertEqual "POST /" "HTTP/1.0 200 Ok" response2
testInvalidRequest :: Test
testInvalidRequest = TestCase $ do
response <- transmit "INVALID / HTTP/1.0"
assertEqual "INVALID /" "HTTP/1.0 400 Bad Request" response
|
ameingast/yawn
|
test/src/Yawn/Test/BlackBox/ParserTest.hs
|
Haskell
|
bsd-3-clause
| 903
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.SSH.Sender (sshSenderTests) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Test.Tasty (TestTree, testGroup)
-----------
-- Tests --
-----------
-- | TODO: tests.
sshSenderTests :: TestTree
sshSenderTests = testGroup "SSH/Sender.hs tests"
[
]
|
cdepillabout/ssh
|
test/Test/SSH/Sender.hs
|
Haskell
|
bsd-3-clause
| 354
|
{-# LANGUAGE QuasiQuotes, TypeFamilies #-}
import Text.Papillon
import Data.Char
import System.Environment
main :: IO ()
main = do
arg : _ <- getArgs
case runError $ expr $ parse arg of
Right (r, _) -> print r
Left _ -> putStrLn "parse error"
[papillon|
op1 :: Int -> Int -> Int
= '*' { (*) }
/ '/' { div }
/ '%' { mod }
;
op2 :: Int -> Int -> Int
= '+' { (+) }
/ '-' { (-) }
;
factor :: Int
= ds:<isDigit>+ { read ds }
/ '(' e:expr ')' { e }
;
term :: Int
= f0:factor fs:(op:op1 f:factor { (`op` f) })*
{ foldl (flip ($)) f0 fs }
;
expr :: Int
= t0:term ts:(op:op2 t:term { (`op` t) })*
{ foldl (flip ($)) t0 ts }
;
|]
|
YoshikuniJujo/papillon
|
examples/arith.hs
|
Haskell
|
bsd-3-clause
| 659
|
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendTCvSubst env bndr (mkCoercionTy co))
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnLiftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 _ rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env expr = makeTrivialWithInfo top_lvl env vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv -> IdInfo
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq (fsLit "a")
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed becase we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnLiftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendTCvSubst env old_bndr (mkCoercionTy co))
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTCvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendTCvSubst env bndr (mkCoercionTy co)
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTCvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweeep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing -> return Nothing ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnLiftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTCvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendTCvSubst env' b (mkCoercionTy co)) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifer might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnLiftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty')
-- Note [Funky mkPiTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkPiTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkPiTypes. If the contructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnLiftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
|
gridaphobe/ghc
|
compiler/simplCore/Simplify.hs
|
Haskell
|
bsd-3-clause
| 123,812
|
{-
Module : Main
Description : UI and top level game loop
Module which handles UI and top level game loop.
-}
module Main(
main
, module Exported
) where
import Pentago.Data.Matrix as Exported
import Pentago.Data.Pentago as Exported hiding (Player)
import Pentago.Data.Tree as Exported
import Pentago.AI.MinMax as Exported
import qualified Pentago.AI.Pentago as AP
import Control.Applicative
import Control.Monad
import Control.Monad.State
import Data.Char
import Text.ParserCombinators.Parsec
import System.Random
type GameStateType = SmartGameState
initialGameState :: GameStateType
initialGameState = initialSmartGameState
aiDifficulty :: Int
aiDifficulty = 3
main :: IO ()
main = fst <$> runStateT mainMenu
(MainMenuState
(Player humanPlayerWrapper "Human 0")
(Player (aiPlayerWrapper $ AP.trivialAIPlayer aiDifficulty) "AI 1"))
-- main = trialGame
-- |IO Monad which runs a game between two AI players.
{- trialGame = runStateT runGame
$ SessionState initialGameState (mkStdGen 0)
(Player (aiPlayerWrapper $ AP.trivialAIPlayer 3) "AI 0")
(Player (aiPlayerWrapper $ AP.trivialAIPlayer 3) "AI 1") -}
-- main menu
data MainMenuState = MainMenuState {
firstPlayer :: Player GameStateType,
secondPlayer :: Player GameStateType
}
mainMenuString :: String
mainMenuString =
"1) Start game" ++ "\n"
++ "2) Configure" ++ "\n"
++ "3) Exit" ++ "\n"
mainMenu :: StateT MainMenuState IO ()
mainMenu = do
liftIO $ putStr mainMenuString
menuOption <- head <$> liftIO getLine
liftIO $ putStrLn ""
if menuOption == '1'
then do
firstPlayer' <- firstPlayer <$> get
secondPlayer' <- secondPlayer <$> get
lift $ do
stdGen <- newStdGen
_ <- runStateT runGame
$ SessionState initialGameState stdGen firstPlayer' secondPlayer'
return ()
mainMenu
else Control.Monad.when (menuOption == '2') $
do
configurationMenu
mainMenu
-- configuration menu
switchPlayer :: (GameState s) => Player s -> Player s
switchPlayer player =
if playerName == "Human"
then Player (aiPlayerWrapper $ AP.trivialAIPlayer aiDifficulty) ("AI " ++ idx)
else Player humanPlayerWrapper ("Human " ++ idx)
where (playerName:(idx:[])) = words $ name player
configurationMenuString :: String
configurationMenuString =
"1) Switch first player" ++ "\n"
++ "2) Switch second player" ++ "\n"
++ "3) Go to main menu" ++ "\n"
showCurrentState :: MainMenuState -> IO ()
showCurrentState mainMenuState = do
putStrLn $ "1. player: " ++ (name . firstPlayer $ mainMenuState)
putStrLn $ "2. player: " ++ (name . secondPlayer $ mainMenuState)
configurationMenuMainLoop :: IO Char
configurationMenuMainLoop = do
putStr configurationMenuString
head <$> getLine
-- |Configuration menu allowing user to choose player types.
configurationMenu :: StateT MainMenuState IO ()
configurationMenu = do
mainMenuState <- get
let curFirstPlayer = firstPlayer mainMenuState
curSecondPlayer = secondPlayer mainMenuState
which <- lift $ do
showCurrentState mainMenuState
putStrLn ""
menuOption <- configurationMenuMainLoop
putStrLn ""
return $
if menuOption == '1'
then 1
else if menuOption == '2'
then 2
else 3
if which == (1 :: Int)
then do
put $ MainMenuState (switchPlayer curFirstPlayer) curSecondPlayer
configurationMenu
else Control.Monad.when (which == 2) $
do
put $ MainMenuState curFirstPlayer (switchPlayer curSecondPlayer)
configurationMenu
-- runGame
data Player s = Player {
playerWrapper :: PlayerWrapper s -- ^Wrapper for player function
, name :: String -- ^Human readable player name
}
data SessionState = SessionState {
gameState :: GameStateType,
randomGen :: StdGen,
curPlayer :: Player GameStateType,
nextPlayer :: Player GameStateType
}
-- |Runs a game between two players displaying current board betwen moves.
runGame :: StateT SessionState IO ()
runGame = do
sessionState <- get
let curGameState = gameState sessionState
liftIO . putStr . prettyShowBoard . getBoardArray $ curGameState
if isFinished curGameState
then
let
result = getResult curGameState
winMessage = case result of
Just Draw -> "The game has ended in a draw."
Just WhiteWin -> "The white player has won."
Just BlackWin -> "The black player has won."
Nothing -> error "getResult has returned Nothing."
in
liftIO . putStrLn $ winMessage
else do
let curPlayerWrapper = playerWrapper . curPlayer $ sessionState
(newGameState, newPlayerState) <- liftIO
. runStateT (curPlayerWrapper curGameState)
$ randomGen sessionState
put $ SessionState
newGameState
newPlayerState
(nextPlayer sessionState)
(curPlayer sessionState)
runGame
type PlayerWrapperMonad = StateT StdGen IO
-- |Wrapper for Pentago.AI.Pentago.Player function which unifies monads used by
-- AI and human player.
type PlayerWrapper s = AP.Player PlayerWrapperMonad s
aiPlayerWrapper :: (GameState s) => AP.AIPlayer s StdGen -> PlayerWrapper s
aiPlayerWrapper aiPlayer board =
do
gen <- get
let (newState, newGen) = runState (aiPlayer board) gen
put newGen
return newState
humanPlayer :: (GameState s) => AP.HumanPlayer s
humanPlayer currentGameState = do
putStrLn moveHelp
moveOrder <- readMoveOrder
return $ makeMove moveOrder currentGameState
humanPlayerWrapper :: (GameState s) => PlayerWrapper s
humanPlayerWrapper = lift . humanPlayer
moveHelp :: String
moveHelp = "Provide move order of form posX posY quadrant rotation, "
++ "where pos in [0,5], quadrant in {RT, LT, LB, RB}, rotation in {L,R}]"
parsePosition :: Parser Int
parsePosition = do
posX <- digit
let diff = ord posX - ord '0'
if diff > 5
then
fail "Read position is too large."
else
return diff
parseQuadrant :: Parser Quadrant
parseQuadrant = do
lr <- oneOf "RL"
tb <- oneOf "TB"
let quadrant = [lr, tb]
return $
if quadrant == "RT"
then RightTop
else if quadrant == "LT"
then LeftTop
else if quadrant == "LB"
then LeftBottom
else RightBottom
parseRotation :: Parser RotationDirection
parseRotation = do
lr <- oneOf "RL"
return $ if lr == 'R' then RightRotation else LeftRotation
parseMoveOrder :: Parser MoveOrder
parseMoveOrder = do
spaces
posX <- parsePosition
spaces
posY <- parsePosition
spaces
quadrant <- parseQuadrant
spaces
rotation <- parseRotation
spaces
return ((posX, posY), (quadrant, rotation))
readMoveOrder :: IO MoveOrder
readMoveOrder = do
line <- getLine
case parse parseMoveOrder "MoveOrder Parser" line of
Left err -> print err >> readMoveOrder
Right moveOrder -> return moveOrder
|
gregorias/Pentago
|
src/Main.hs
|
Haskell
|
bsd-3-clause
| 6,757
|
-- | Module Parser transforms text files in a Tiles array (strongly typed).
module Parser where
import Hyrule
import Data.Array
import Text.Trifecta
import Control.Applicative ((<|>))
data TerrainError = TerrainError
data ObjectError = ObjectError
parseTerrain :: AreaType -> Parser Terrain
parseTerrain Overworld = do
terrain' <- letter
case terrain' of
'g' -> return Grass
's' -> return Sand
'f' -> return Forest
'm' -> return Mountain
'w' -> return Water
_ -> fail "Terreno desconhecido"
parseTerrain (Dungeon _) = do
terrain' <- letter
case terrain' of
'd' -> return WDungeon
'n' -> return NWDungeon
_ -> fail "Terreno desconhecido"
parseObject :: AreaType -> Parser Object
parseObject Overworld = do
object' <- letter <|> digit <|> char '_'
case object' of
'S' -> return MasterSword
'_' -> return Empty
'H' -> return Home
'D' -> return DummyGate
'1' -> return . Gate $ Dungeon 1
'2' -> return . Gate $ Dungeon 2
'3' -> return . Gate $ Dungeon 3
_ -> fail "Objeto desconhecido"
parseObject (Dungeon _) = do
object' <- letter <|> char '_'
case object' of
'P' -> return Pendant
'O' -> return . Gate $ Overworld
'_' -> return Empty
_ -> fail "Objeto desconhecido"
parseTiles :: AreaType -> AreaSize -> Parser (Array Position Tile)
parseTiles areaType' areaSize = do
rows <- some parseLine
if length rows == areaSize
then return $ listArray ((0,0), (areaSize - 1, areaSize - 1)) $ concat rows
else fail $ "O mapa deve possuir " ++ show areaSize ++ " linhas"
where
parseLine = do
tiles <- some parseTile
newline
let row = tiles
if length row == areaSize
then return row <?> "Row of Tiles"
else fail $ "O mapa deve possuir " ++ show areaSize ++ " colunas"
parseTile = do
terrain' <- parseTerrain areaType'
object' <- parseObject areaType'
(return $ Tile terrain' object') <?> "Tile"
parseMap :: AreaType -> AreaSize -> String -> Area
parseMap areaType' areaSize str = extract $ parseString (parseTiles areaType' areaSize) mempty str
where
extract (Success p) = Area areaType' p
extract (Failure e) = error $ "Nao foi possível realizar o parser do mapa. Erro: " ++ show e
|
trxeste/wrk
|
haskell/TrabalhoIA/src/Parser.hs
|
Haskell
|
bsd-3-clause
| 2,283
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Frontend
( module Data.Patch
, Syntactic
, Internal
, FeldDomain
, Data
, Syntax
, module Frontend
, FeldOpts
, defaultFeldOpts
, reifyFeld
, reifyFeldM
, reifyFeldUnOpt
, showExpr
, printExpr
, printExpr2
, printExprWith
, printExpr2With
, printExprUnOpt
, drawUntyped
, drawUntypedWith
, showAST
, drawAST
, drawASTUnOpt
, writeHtmlAST
, showDecor
, drawDecor
, writeHtmlDecor
, eval
, evalTarget
, desugar
, sugar
, resugar
-- * QuickCheck
, (===>)
, (====)
-- * Type constraints
, tData
, tArr1
, tArr2
-- * Functions
, ilog2
, nlz
) where
import Prelude as P
import Control.Monad.State
import Test.QuickCheck
import Data.Patch
import Data.Tree.View
import qualified Data.Map as Map
import Language.Syntactic hiding
(desugar, sugar, resugar, showAST, drawAST, writeHtmlAST, stringTree)
import qualified Language.Syntactic as Syntactic
import qualified Language.Syntactic.Constructs.Decoration as Syntactic
import Language.Syntactic.Constructs.Binding
import Language.Syntactic.Constructs.Binding.HigherOrder
import Language.Syntactic.Sharing.SimpleCodeMotion
import Language.Syntactic.Sharing.CodeMotion2
import Language.Syntactic.Sharing.SimpleCodeMotion3
import Feldspar.Range
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Middleend.FromTyped
import Feldspar.Core.UntypedRepresentation (stringTree)
import Feldspar.Core.Constructs
import Feldspar.Core.Constructs.Binding (cLambda)
import Feldspar.Core.Frontend.Array as Frontend
import Feldspar.Core.Frontend.Binding as Frontend
import Feldspar.Core.Frontend.Bits as Frontend
import Feldspar.Core.Frontend.Complex as Frontend
import Feldspar.Core.Frontend.Condition as Frontend
import Feldspar.Core.Frontend.Conversion as Frontend
import Feldspar.Core.Frontend.Elements as Frontend
import Feldspar.Core.Frontend.Eq as Frontend
import Feldspar.Core.Frontend.Error as Frontend
import Feldspar.Core.Frontend.FFI as Frontend
import Feldspar.Core.Frontend.Floating as Frontend
import Feldspar.Core.Frontend.Fractional as Frontend
import Feldspar.Core.Frontend.Future as Frontend
import Feldspar.Core.Frontend.Integral as Frontend
import Feldspar.Core.Frontend.Literal as Frontend
import Feldspar.Core.Frontend.Logic as Frontend
import Feldspar.Core.Frontend.Loop as Frontend
import Feldspar.Core.Frontend.NoInline as Frontend
import Feldspar.Core.Frontend.Num as Frontend
import Feldspar.Core.Frontend.Ord as Frontend
import Feldspar.Core.Frontend.Par as Frontend
import Feldspar.Core.Frontend.Save as Frontend
import Feldspar.Core.Frontend.SizeProp as Frontend
import Feldspar.Core.Frontend.SourceInfo as Frontend
import Feldspar.Core.Frontend.Switch as Frontend
import Feldspar.Core.Frontend.RealFloat as Frontend
import Feldspar.Core.Frontend.Tuple as Frontend
prjDict :: PrjDict (Decor Info FeldDom)
prjDict = PrjDict
(prjVariable prjDictFO . decorExpr)
(prjLambda prjDictFO . decorExpr)
mkId :: FeldOpts -> MkInjDict (Decor Info FeldDom)
mkId opts a b
| simpleMatch (const . sharable) a
, Just Dict <- typeDict a
, Dict <- exprDictSub pTypeable b
, Info {infoType = bType} <- getInfo b
= case bType of
FunType{} | P.not (SICS `inTarget` opts) -> Nothing
_ -> Just InjDict
{ injVariable = Decor (getInfo a) . injC . c' . Variable
, injLambda = let info = ((mkInfoTy (FunType typeRep bType)) {infoSize = (infoSize (getInfo a), infoSize (getInfo b))})
in Decor info . injC . cLambda
, injLet = Decor (getInfo b) $ injC Let
}
mkId _ _ _ = Nothing
hoister opts
| CSE `inTarget` opts
= cm1 . optimize opts . stripDecor <=< cm2
| otherwise = cm3
where cm1 = codeMotion (simpleMatch (const . hoistOver)) prjDict (mkId opts)
cm2 = codeMotion2 (simpleMatch (const . hoistOver)) prjDict (mkId opts)
cm3 = codeMotion3 10 (simpleMatch (const . hoistOver)) prjDict (mkId opts) mkSubEnvDefault
reifyFeldM :: (SyntacticFeld a, MonadState VarId m)
=> FeldOpts
-> BitWidth n
-> a
-> m (ASTF (Decor Info FeldDom) (Internal a))
reifyFeldM opts n =
( return
. optimize opts
. stripDecor
<=< hoister opts
. optimize opts
. targetSpecialization n
<=< reifyM
. fromFeld
. Syntactic.desugar
)
-- Note that it's important to do 'codeMotion' after 'optimize'. There may be
-- sub-expressions that appear more than once in the original program, but
-- where 'optimize' removes all but one occurrence. If 'codeMotion' was run
-- first, these sub-expressions would be let bound, preventing subsequent
-- optimizations.
-- | Reification and optimization of a Feldspar program
reifyFeld :: SyntacticFeld a
=> FeldOpts
-> BitWidth n
-> a
-> ASTF (Decor Info FeldDom) (Internal a)
reifyFeld opts n = flip evalState 0 . reifyFeldM opts n
-- | Reification of a Feldspar program
reifyFeldUnOpt :: SyntacticFeld a
=> FeldOpts -> BitWidth n
-> a
-> ASTF FeldDom (Internal a)
reifyFeldUnOpt _ n = flip evalState 0 .
( return
. targetSpecialization n
<=< reifyM
. fromFeld
. Syntactic.desugar
)
showExpr :: SyntacticFeld a => a -> String
showExpr = render . reifyFeld defaultFeldOpts N32
-- | Print an optimized untyped expression
printExpr2 :: SyntacticFeld a => a -> IO ()
printExpr2 = printExpr2With defaultFeldOpts
-- | Draw the untyped syntax tree using unicode art
drawUntyped :: SyntacticFeld a => a -> IO ()
drawUntyped = drawUntypedWith defaultFeldOpts
-- | Draw the untyped syntax tree using unicode art
drawUntypedWith :: SyntacticFeld a => FeldOpts -> a -> IO ()
drawUntypedWith opts = drawTree . stringTree . untype opts . reifyFeld opts N32
-- | Print an optimized expression
printExpr :: SyntacticFeld a => a -> IO ()
printExpr = print . reifyFeld defaultFeldOpts N32
-- | Print an optimized untyped expression with options
printExpr2With :: SyntacticFeld a => FeldOpts -> a -> IO ()
printExpr2With opts = print . untype opts . reifyFeld opts N32
-- | Print an optimized expression with options
printExprWith :: SyntacticFeld a => FeldOpts -> a -> IO ()
printExprWith opts = print . reifyFeld opts N32
-- | Print an unoptimized expression
printExprUnOpt :: SyntacticFeld a => a -> IO ()
printExprUnOpt = print . reifyFeldUnOpt defaultFeldOpts N32
-- | Show the syntax tree using Unicode art
showAST :: SyntacticFeld a => a -> String
showAST = Syntactic.showAST . reifyFeld defaultFeldOpts N32
-- | Draw the syntax tree on the terminal using Unicode art
drawAST :: SyntacticFeld a => a -> IO ()
drawAST = Syntactic.drawAST . reifyFeld defaultFeldOpts N32
drawASTUnOpt :: SyntacticFeld a => a -> IO ()
drawASTUnOpt = Syntactic.drawAST . reifyFeldUnOpt defaultFeldOpts N32
-- | Write the syntax tree to an HTML file with foldable nodes
writeHtmlAST :: SyntacticFeld a => FilePath -> a -> IO ()
writeHtmlAST file = Syntactic.writeHtmlAST file . reifyFeld defaultFeldOpts N32
-- | Draw a syntax tree decorated with type and size information
showDecor :: SyntacticFeld a => a -> String
showDecor = Syntactic.showDecorWith show . reifyFeld defaultFeldOpts N32
-- | Draw a syntax tree decorated with type and size information
drawDecor :: SyntacticFeld a => a -> IO ()
drawDecor = Syntactic.drawDecorWith show . reifyFeld defaultFeldOpts N32
-- | Write the syntax tree decorated with type and size information to an HTML file with foldable nodes
writeHtmlDecor :: SyntacticFeld a => FilePath -> a -> IO ()
writeHtmlDecor file = Syntactic.writeHtmlDecorWith showInfo file . reifyFeld defaultFeldOpts N32
where
showInfo :: Show (Info b) => Info b -> String
showInfo Info{..} = unlines [ "Type: " ++ show infoType
, "Size: " ++ show infoSize
, "Vars: " ++ show (Map.keys infoVars)
, "Src: " ++ show infoSource
]
eval :: SyntacticFeld a => a -> Internal a
eval = evalBind . reifyFeld defaultFeldOpts N32
evalTarget
:: ( SyntacticFeld a
, BoundedInt (GenericInt U n)
, BoundedInt (GenericInt S n)
)
=> BitWidth n -> a -> Internal a
evalTarget n = evalBind . reifyFeld defaultFeldOpts n
-- TODO This doesn't work yet, because 'targetSpecialization' is not implemented
desugar :: SyntacticFeld a => a -> Data (Internal a)
desugar = Syntactic.resugar
sugar :: SyntacticFeld a => Data (Internal a) -> a
sugar = Syntactic.resugar
resugar :: (SyntacticFeld a, SyntacticFeld b, Internal a ~ Internal b) => a -> b
resugar = Syntactic.resugar
--------------------------------------------------------------------------------
-- * QuickCheck
--------------------------------------------------------------------------------
instance (Type a, Arbitrary a) => Arbitrary (Data a)
where
arbitrary = fmap value arbitrary
instance Testable (Data Bool)
where
property = property . eval
(===>) :: Testable prop => Data Bool -> prop -> Property
a ===> b = eval a ==> b
-- | Test that two function of the same arity have the same semantics
class Equal a
where
(====) :: a -> a -> Property
instance (P.Eq a, Show a) => Equal a
where
x ==== y = x === y
instance (Show a, Arbitrary a, Equal b) => Equal (a -> b)
where
f ==== g = property (\x -> f x ==== g x)
--------------------------------------------------------------------------------
-- * Type annotations
--------------------------------------------------------------------------------
tData :: Patch a a -> Patch (Data a) (Data a)
tData _ = id
tArr1 :: Patch a a -> Patch (Data [a]) (Data [a])
tArr1 _ = id
tArr2 :: Patch a a -> Patch (Data [[a]]) (Data [[a]])
tArr2 _ = id
--------------------------------------------------------------------------------
-- * Functions
--------------------------------------------------------------------------------
-- | Integer logarithm in base 2
-- Based on an algorithm in Hacker's Delight
ilog2 :: (Bits a) => Data a -> Data Index
ilog2 x = bitSize x - 1 - nlz x
-- | Count leading zeros
-- Based on an algorithm in Hacker's Delight
nlz :: (Bits a) => Data a -> Data Index
nlz x = bitCount $ complement $ foldl go x $ takeWhile (P.< bitSize' x) $ P.map (2 P.^) [(0::Integer)..]
where
go b s = share b $ \b' -> b' .|. (b' .>>. value s)
-- TODO share is probably not needed when observable sharing is implemented
|
emwap/feldspar-language
|
src/Feldspar/Core/Frontend.hs
|
Haskell
|
bsd-3-clause
| 12,977
|
module ETA.TypeCheck.TcForeign
( tcForeignImports
, tcForeignExports
-- Low-level exports for hooks
, isForeignImport, isForeignExport
, tcFImport --, tcFExport
-- , tcForeignImports'
, tcCheckFIType, checkJavaTarget, checkForeignArgs, checkForeignRes
, normaliseFfiType
, nonIOok, mustBeIO
, checkSafe, noCheckSafe
-- , tcForeignExports'
-- , tcCheckFEType
) where
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Unique
import ETA.BasicTypes.SrcLoc
import ETA.BasicTypes.Name
import ETA.BasicTypes.VarSet
import ETA.BasicTypes.Id
import ETA.BasicTypes.RdrName
import ETA.TypeCheck.FamInst
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcHsType
import ETA.TypeCheck.TcExpr
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcType
import ETA.Prelude.TysWiredIn (unitTyCon)
import ETA.Prelude.PrelNames
import ETA.Prelude.ForeignCall
import ETA.Main.Hooks
import ETA.Main.ErrUtils
import ETA.Main.DynFlags
import ETA.Types.FamInstEnv
import ETA.Types.Type
import ETA.Types.TypeRep
import ETA.Types.Coercion
import ETA.Types.TyCon
import ETA.Debug
import ETA.HsSyn.HsSyn
import ETA.Utils.Bag
import ETA.Utils.Outputable
import ETA.Utils.FastString
import ETA.Utils.Maybes
import Data.Maybe(fromMaybe)
-- Defines a binding
isForeignImport :: LForeignDecl name -> Bool
isForeignImport (L _ ForeignImport {}) = True
isForeignImport _ = False
-- Exports a binding
isForeignExport :: LForeignDecl name -> Bool
isForeignExport (L _ ForeignExport {}) = True
isForeignExport _ = False
tcForeignImports :: [LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt)
tcForeignImports decls
= getHooked tcForeignImportsHook tcForeignImports' >>= ($ decls)
tcForeignImports' :: [LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt)
tcForeignImports' decls = do
(ids, decls, gres) <- mapAndUnzip3M tcFImport $ filter isForeignImport decls
return (ids, decls, unionManyBags gres)
printDebug h s = do
dflags <- getDynFlags
liftIO . putStrLn . showSDoc dflags $ (ptext $ sLit h) <+> s
tcFImport :: LForeignDecl Name -> TcM (Id, LForeignDecl Id, Bag GlobalRdrElt)
tcFImport (L declLoc fi@(ForeignImport (L nameLoc name) hsType _ impDecl))
= setSrcSpan declLoc . addErrCtxt (foreignDeclCtxt fi) $ do
sigType <- tcHsSigType (ForSigCtxt name) hsType
--printDebug "tcFImport: sigType" $ ppr sigType
(normCo, normSigType, gres) <- normaliseFfiType sigType
--printDebug "tcFImport: normSigType" $ ppr normSigType
let (_, ty) = tcSplitForAllTys normSigType
(theta, ty') = tcSplitPhiTy ty
(argTypes, resType) = tcSplitFunTys ty'
id = mkLocalId name sigType
traceTc "tcFIImport" $ ppr theta <+> ppr argTypes <+> ppr resType
--printDebug "tcFImport: normSigType" $ ppr argTypes <+> ppr resType
impDecl' <- tcCheckFIType theta argTypes resType impDecl
let fiDecl = ForeignImport (L nameLoc id) undefined
(mkSymCo normCo) impDecl'
return (id, L declLoc fiDecl, gres)
tcFImport d = pprPanic "tcFImport" (ppr d)
normaliseFfiType :: Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
normaliseFfiType ty = do
famEnvs <- tcGetFamInstEnvs
normaliseFfiType' famEnvs ty
normaliseFfiType' :: FamInstEnvs -> Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
normaliseFfiType' env ty0 = go initRecTc ty0
where go :: RecTcChecker -> Type -> TcM (Coercion, Type, Bag GlobalRdrElt)
go recNts ty
| Just ty' <- coreView ty = go recNts ty'
go recNts ty@(TyConApp tc tys)
-- TODO: Address funPtrs
| tcKey == ioTyConKey
= childrenOnly False
| tcKey == javaTyConKey
= childrenOnly True
| isNewTyCon tc
, Just recNts' <- checkRecTc recNts tc
= do
rdrEnv <- getGlobalRdrEnv
case checkNewtypeFFI rdrEnv tc of
Nothing -> nothing
Just gre -> do
(co', ty', gres) <- go recNts' ntRhs
return (mkTransCo ntCo co', ty', gre `consBag` gres)
| isFamilyTyCon tc
, (co, nty) <- normaliseTcApp env Representational tc tys
, not (isReflCo co)
= do (co', ty', gres) <- go recNts nty
return (mkTransCo co co', ty', gres)
| otherwise
= nothing
where tcKey = getUnique tc
childrenOnly isJava = do
xs <- mapM (go recNts) tys
let (cos, tys', gres) = unzip3 xs
cos' = zipWith3 downgradeRole (tyConRoles tc)
((if isJava then [Nominal] else [])
++ repeat Representational) cos
co' = mkTyConAppCo Representational tc cos'
return ( co'
, mkTyConApp tc tys'
, unionManyBags gres )
ntCo = mkUnbranchedAxInstCo Representational (newTyConCo tc)
tys
ntRhs = newTyConInstRhs tc tys
nothing = return (Refl Representational ty, ty, emptyBag)
go recNts (FunTy ty1 ty2) = do
(coi1, nty1, gres1) <- go recNts ty1
(coi2, nty2, gres2) <- go recNts ty2
return (mkFunCo Representational coi1 coi2, mkFunTy nty1 nty2,
gres1 `unionBags` gres2)
go recNts (ForAllTy tyVar ty) = do
(coi, nty, gres) <- go recNts ty
return (mkForAllCo tyVar coi, ForAllTy tyVar nty, gres)
go _ ty@(TyVarTy {}) = return (Refl Representational ty, ty, emptyBag)
go _ ty@(LitTy {}) = return (Refl Representational ty, ty, emptyBag)
go _ ty@(AppTy {}) = return (Refl Representational ty, ty, emptyBag)
checkNewtypeFFI :: GlobalRdrEnv -> TyCon -> Maybe GlobalRdrElt
checkNewtypeFFI rdrEnv tc
| Just con <- tyConSingleDataCon_maybe tc
, [gre] <- lookupGRE_Name rdrEnv (dataConName con)
= Just gre
| otherwise
= Nothing
foreignDeclCtxt :: ForeignDecl Name -> SDoc
foreignDeclCtxt fo
= hang (str "When checking declaration:")
2 (ppr fo)
tcCheckFIType :: ThetaType -> [Type] -> Type -> ForeignImport -> TcM ForeignImport
tcCheckFIType thetaType argTypes resType idecl@(CImport (L lc cconv) (L ls safety) mh
targetSpec src)
| CFunction target <- targetSpec
= case cconv of
PrimCallConv -> do
dflags <- getDynFlags
checkTc (xopt Opt_GHCForeignImportPrim dflags)
(text "Use GHCForeignImportPrim to allow `foreign import prim'.")
-- TODO: Validate the target string
checkJavaTarget target
checkTc (playSafe safety)
(text $ "The safe/unsafe annotation should not be used with "
++ "`foreign import prim'.")
checkForeignArgs (isFFIPrimArgumentTy dflags) argTypes
checkForeignRes nonIOok checkSafe (isFFIPrimResultTy dflags) resType
return idecl
JavaCallConv -> do
-- TODO: Validate the target string for @new, @field
-- TODO: Validate ThetaType
dflags <- getDynFlags
checkJavaTarget target
let javaClassVars = extendsVars thetaType
checkForeignArgs (isFFIArgumentTy dflags safety javaClassVars) argTypes
checkForeignRes nonIOok checkSafe (isFFIImportResultTy dflags) resType
return idecl
_ -> pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
| CWrapper target isAbstract <- targetSpec
, JavaCallConv <- cconv
= do
-- TODO: Validate target
dflags <- getDynFlags
let javaClassVars = extendsVars thetaType
-- TODO: Typecheck foreign wrappers properly
-- checkForeignArgs (isFFIArgumentTy dflags safety javaClassVars) argTypes
-- checkForeignRes nonIOok checkSafe (isFFIImportResultTy dflags) resType
return idecl
| otherwise = pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
tcCheckFIType _ _ _ idecl = pprPanic "tcCheckFIType: Unsupported calling convention." (ppr idecl)
check :: Validity -> (MsgDoc -> MsgDoc) -> TcM ()
check IsValid _ = return ()
check (NotValid doc) err_fn = addErrTc (err_fn doc)
checkForeignArgs :: (Type -> Validity) -> [Type] -> TcM ()
checkForeignArgs pred tys = mapM_ go tys
where
go ty = check (pred ty) (illegalForeignTyErr argument)
illegalForeignTyErr :: SDoc -> SDoc -> SDoc
illegalForeignTyErr argOrRes extra
= hang msg 2 extra
where
msg = hsep [ str "Unacceptable", argOrRes
, str "type in foreign declaration:"]
checkForeignRes :: Bool -> Bool -> (Type -> Validity) -> Type -> TcM ()
checkForeignRes nonIOResultOk checkSafe predResType ty
| Just (_, resType) <- tcSplitIOType_maybe ty
= do
traceTc "checkForeignRes[IO]" $ ppr resType
check (predResType resType) (illegalForeignTyErr result)
| Just (_, tagType, resType) <- tcSplitJavaType_maybe ty
= do
traceTc "checkForeignRes[Java]" $ ppr tagType <+> ppr resType
check (predResType resType) (illegalForeignTyErr result)
-- Case for non-IO result type with FFI Import
| not nonIOResultOk = addErrTc
. illegalForeignTyErr result
$ str "IO result type expected"
| otherwise = do
traceTc "checkForeignRes[Other]" $ ppr ty
dflags <- getDynFlags
case predResType ty of
-- Handle normal typecheck fail, we want to handle this first and
-- only report safe haskell errors if the normal type check is OK.
NotValid msg -> addErrTc $ illegalForeignTyErr result msg
-- handle safe infer fail
_ | checkSafe && safeInferOn dflags -> recordUnsafeInfer
-- handle safe language typecheck fail
_ | checkSafe && safeLanguageOn dflags ->
addErrTc $ illegalForeignTyErr result safeHsErr
-- sucess! non-IO return is fine
_ -> return ()
where safeHsErr = str $ "Safe Haskell is on, all FFI imports must be in the"
++ " IO monad"
argument, result :: SDoc
argument = text "argument"
result = text "result"
checkSafe, noCheckSafe :: Bool
checkSafe = True
noCheckSafe = False
nonIOok, mustBeIO :: Bool
nonIOok = True
mustBeIO = False
checkJavaTarget :: CCallTarget -> TcM ()
checkJavaTarget (StaticTarget str _ _) = do
-- TODO: Validate the name
return ()
isAnyTy :: Type -> Bool
isAnyTy = isTc anyTyConKey
isTc :: Unique -> Type -> Bool
isTc uniq ty = case tcSplitTyConApp_maybe ty of
Just (tc, _) -> uniq == getUnique tc
Nothing -> False
tcForeignExports :: [LForeignDecl Name]
-> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt)
tcForeignExports decls =
getHooked tcForeignExportsHook tcForeignExports' >>= ($ decls)
tcForeignExports' :: [LForeignDecl Name]
-> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt)
tcForeignExports' decls = foldlM combine (emptyLHsBinds, [], emptyBag)
(filter isForeignExport decls)
where combine (binds, fs, gres1) (L loc fe) = do
(b, f, gres2) <- setSrcSpan loc (tcFExport fe)
return (b `consBag` binds, L loc f : fs, gres1 `unionBags` gres2)
tcFExport :: ForeignDecl Name -> TcM (LHsBind Id, ForeignDecl Id, Bag GlobalRdrElt)
tcFExport fo@(ForeignExport (L loc nm) hs_ty _ spec)
= addErrCtxt (foreignDeclCtxt fo) $ do
sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
rhs <- tcPolyExpr (nlHsVar nm) sig_ty
(norm_co, norm_sig_ty, gres) <- normaliseFfiType sig_ty
spec' <- tcCheckFEType norm_sig_ty spec
id <- mkStableIdFromName nm sig_ty loc mkForeignExportOcc
return (mkVarBind id rhs, ForeignExport (L loc id) undefined norm_co spec', gres)
tcFExport d = pprPanic "tcFExport" (ppr d)
tcCheckFEType :: Type -> ForeignExport -> TcM ForeignExport
tcCheckFEType sigType exportspec = do
-- (CExport (L l (CExportStatic str cconv)) src)
checkForeignArgs isFFIExternalTy argTypes
checkForeignRes nonIOok noCheckSafe isFFIExportResultTy resType
return exportspec
where (_, ty) = tcSplitForAllTys sigType
(thetaType, ty') = tcSplitPhiTy ty
(argTypes, resType) = tcSplitFunTys ty'
javaClassVars = extendsVars thetaType
|
alexander-at-github/eta
|
compiler/ETA/TypeCheck/TcForeign.hs
|
Haskell
|
bsd-3-clause
| 12,447
|
-- | News page controller. This page simply downloads from
-- haskellnews.org which already has a pre-prepared page of news to
-- display.
module HL.C.News where
import HL.C
import HL.M.News
import HL.V.News
-- | News controller.
getNewsR :: C Html
getNewsR =
do html <- io getHaskellNews
blaze (newsV html)
|
chrisdone/hl
|
src/HL/C/News.hs
|
Haskell
|
bsd-3-clause
| 318
|
-- A point is a point in the xy plane, represented by x and y coordinates
-- E.g. (Point 0.0 0.0) is the origin, (Point (-1) (1)) is in the top left
-- quadrant.
data Point = Point Double Double
deriving (Show, Eq)
-- A line segment is a straight line of finite length, defined by its
-- two end points. E.g. (LineSegment (Point 0 0) (Point 1 1)) is a
-- line segment from the origin to the coordinate (1, 1)
data LineSegment = LineSegment Point Point
deriving (Show, Eq)
-- A Path is a 2D path in the xy-plane. The idea is that Path can be
-- extended to support straight lines, curves, and arbitrary paths,
-- but currently there is only one data constructor for Path: Line.
data Path =
-- Line represents an infinite straight line defined by its slope a
-- and its y intercept b, ie. by the equation y = ax + b
Line Double Double
deriving (Show, Eq)
|
markstoehr/cs161
|
_site/fls/Lab2_flymake.hs
|
Haskell
|
cc0-1.0
| 879
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Targets
-- Copyright : (c) Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : duncan@community.haskell.org
--
-- Handling for user-specified targets
-----------------------------------------------------------------------------
module Distribution.Client.Targets (
-- * User targets
UserTarget(..),
readUserTargets,
-- * Package specifiers
PackageSpecifier(..),
pkgSpecifierTarget,
pkgSpecifierConstraints,
-- * Resolving user targets to package specifiers
resolveUserTargets,
-- ** Detailed interface
UserTargetProblem(..),
readUserTarget,
reportUserTargetProblems,
expandUserTarget,
PackageTarget(..),
fetchPackageTarget,
readPackageTarget,
PackageTargetProblem(..),
reportPackageTargetProblems,
disambiguatePackageTargets,
disambiguatePackageName,
-- * User constraints
UserQualifier(..),
UserConstraintScope(..),
UserConstraint(..),
userConstraintPackageName,
readUserConstraint,
userToPackageConstraint,
) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Distribution.Package
( Package(..), PackageName, unPackageName, mkPackageName
, PackageIdentifier(..), packageName, packageVersion )
import Distribution.Types.Dependency
import Distribution.Client.Types
( PackageLocation(..)
, ResolvedPkgLoc, UnresolvedSourcePackage )
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.LabeledPackageConstraint
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageConstraint
import Distribution.Solver.Types.PackagePath
import Distribution.Solver.Types.PackageIndex (PackageIndex)
import qualified Distribution.Solver.Types.PackageIndex as PackageIndex
import Distribution.Solver.Types.SourcePackage
import qualified Distribution.Client.World as World
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Distribution.Client.Tar as Tar
import Distribution.Client.FetchUtils
import Distribution.Client.Utils ( tryFindPackageDesc )
import Distribution.Client.GlobalFlags
( RepoContext(..) )
import Distribution.PackageDescription
( GenericPackageDescription, parseFlagAssignment )
import Distribution.Version
( nullVersion, thisVersion, anyVersion, isAnyVersion )
import Distribution.Text
( Text(..), display )
import Distribution.Verbosity (Verbosity)
import Distribution.Simple.Utils
( die', warn, lowercase )
#ifdef CABAL_PARSEC
import Distribution.PackageDescription.Parsec
( readGenericPackageDescription, parseGenericPackageDescriptionMaybe )
#else
import Distribution.PackageDescription.Parse
( readGenericPackageDescription, parseGenericPackageDescription, ParseResult(..) )
import Distribution.Simple.Utils
( fromUTF8, ignoreBOM )
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
#endif
-- import Data.List ( find, nub )
import Data.Either
( partitionEithers )
import qualified Data.Map as Map
import qualified Data.ByteString.Lazy as BS
import qualified Distribution.Client.GZipUtils as GZipUtils
import Control.Monad (mapM)
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP
( (+++), (<++) )
import Distribution.ParseUtils
( readPToMaybe )
import System.FilePath
( takeExtension, dropExtension, takeDirectory, splitPath )
import System.Directory
( doesFileExist, doesDirectoryExist )
import Network.URI
( URI(..), URIAuth(..), parseAbsoluteURI )
-- ------------------------------------------------------------
-- * User targets
-- ------------------------------------------------------------
-- | Various ways that a user may specify a package or package collection.
--
data UserTarget =
-- | A partially specified package, identified by name and possibly with
-- an exact version or a version constraint.
--
-- > cabal install foo
-- > cabal install foo-1.0
-- > cabal install 'foo < 2'
--
UserTargetNamed Dependency
-- | A special virtual package that refers to the collection of packages
-- recorded in the world file that the user specifically installed.
--
-- > cabal install world
--
| UserTargetWorld
-- | A specific package that is unpacked in a local directory, often the
-- current directory.
--
-- > cabal install .
-- > cabal install ../lib/other
--
-- * Note: in future, if multiple @.cabal@ files are allowed in a single
-- directory then this will refer to the collection of packages.
--
| UserTargetLocalDir FilePath
-- | A specific local unpacked package, identified by its @.cabal@ file.
--
-- > cabal install foo.cabal
-- > cabal install ../lib/other/bar.cabal
--
| UserTargetLocalCabalFile FilePath
-- | A specific package that is available as a local tarball file
--
-- > cabal install dist/foo-1.0.tar.gz
-- > cabal install ../build/baz-1.0.tar.gz
--
| UserTargetLocalTarball FilePath
-- | A specific package that is available as a remote tarball file
--
-- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz
--
| UserTargetRemoteTarball URI
deriving (Show,Eq)
-- ------------------------------------------------------------
-- * Package specifier
-- ------------------------------------------------------------
-- | A fully or partially resolved reference to a package.
--
data PackageSpecifier pkg =
-- | A partially specified reference to a package (either source or
-- installed). It is specified by package name and optionally some
-- required properties. Use a dependency resolver to pick a specific
-- package satisfying these properties.
--
NamedPackage PackageName [PackageProperty]
-- | A fully specified source package.
--
| SpecificSourcePackage pkg
deriving (Eq, Show, Generic)
instance Binary pkg => Binary (PackageSpecifier pkg)
pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName
pkgSpecifierTarget (NamedPackage name _) = name
pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg
pkgSpecifierConstraints :: Package pkg
=> PackageSpecifier pkg -> [LabeledPackageConstraint]
pkgSpecifierConstraints (NamedPackage name props) = map toLpc props
where
toLpc prop = LabeledPackageConstraint
(PackageConstraint (scopeToplevel name) prop)
ConstraintSourceUserTarget
pkgSpecifierConstraints (SpecificSourcePackage pkg) =
[LabeledPackageConstraint pc ConstraintSourceUserTarget]
where
pc = PackageConstraint
(ScopeTarget $ packageName pkg)
(PackagePropertyVersion $ thisVersion (packageVersion pkg))
-- ------------------------------------------------------------
-- * Parsing and checking user targets
-- ------------------------------------------------------------
readUserTargets :: Verbosity -> [String] -> IO [UserTarget]
readUserTargets verbosity targetStrs = do
(problems, targets) <- liftM partitionEithers
(mapM readUserTarget targetStrs)
reportUserTargetProblems verbosity problems
return targets
data UserTargetProblem
= UserTargetUnexpectedFile String
| UserTargetNonexistantFile String
| UserTargetUnexpectedUriScheme String
| UserTargetUnrecognisedUri String
| UserTargetUnrecognised String
| UserTargetBadWorldPkg
deriving Show
readUserTarget :: String -> IO (Either UserTargetProblem UserTarget)
readUserTarget targetstr =
case testNamedTargets targetstr of
Just (Dependency pkgn verrange)
| pkgn == mkPackageName "world"
-> return $ if verrange == anyVersion
then Right UserTargetWorld
else Left UserTargetBadWorldPkg
Just dep -> return (Right (UserTargetNamed dep))
Nothing -> do
fileTarget <- testFileTargets targetstr
case fileTarget of
Just target -> return target
Nothing ->
case testUriTargets targetstr of
Just target -> return target
Nothing -> return (Left (UserTargetUnrecognised targetstr))
where
testNamedTargets = readPToMaybe parseDependencyOrPackageId
testFileTargets filename = do
isDir <- doesDirectoryExist filename
isFile <- doesFileExist filename
parentDirExists <- case takeDirectory filename of
[] -> return False
dir -> doesDirectoryExist dir
let result
| isDir
= Just (Right (UserTargetLocalDir filename))
| isFile && extensionIsTarGz filename
= Just (Right (UserTargetLocalTarball filename))
| isFile && takeExtension filename == ".cabal"
= Just (Right (UserTargetLocalCabalFile filename))
| isFile
= Just (Left (UserTargetUnexpectedFile filename))
| parentDirExists
= Just (Left (UserTargetNonexistantFile filename))
| otherwise
= Nothing
return result
testUriTargets str =
case parseAbsoluteURI str of
Just uri@URI {
uriScheme = scheme,
uriAuthority = Just URIAuth { uriRegName = host }
}
| scheme /= "http:" && scheme /= "https:" ->
Just (Left (UserTargetUnexpectedUriScheme targetstr))
| null host ->
Just (Left (UserTargetUnrecognisedUri targetstr))
| otherwise ->
Just (Right (UserTargetRemoteTarball uri))
_ -> Nothing
extensionIsTarGz f = takeExtension f == ".gz"
&& takeExtension (dropExtension f) == ".tar"
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse
+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
v | v == nullVersion -> Dependency (packageName p) anyVersion
| otherwise -> Dependency (packageName p) (thisVersion v)
reportUserTargetProblems :: Verbosity -> [UserTargetProblem] -> IO ()
reportUserTargetProblems verbosity problems = do
case [ target | UserTargetUnrecognised target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognised target '" ++ name ++ "'."
| name <- target ]
++ "Targets can be:\n"
++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n"
++ " - the special 'world' target\n"
++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n"
++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'"
case [ () | UserTargetBadWorldPkg <- problems ] of
[] -> return ()
_ -> die' verbosity "The special 'world' target does not take any version."
case [ target | UserTargetNonexistantFile target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "The file does not exist '" ++ name ++ "'."
| name <- target ]
case [ target | UserTargetUnexpectedFile target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognised file target '" ++ name ++ "'."
| name <- target ]
++ "File targets can be either package tarballs 'pkgname.tar.gz' "
++ "or cabal files 'pkgname.cabal'."
case [ target | UserTargetUnexpectedUriScheme target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "URL target not supported '" ++ name ++ "'."
| name <- target ]
++ "Only 'http://' and 'https://' URLs are supported."
case [ target | UserTargetUnrecognisedUri target <- problems ] of
[] -> return ()
target -> die' verbosity
$ unlines
[ "Unrecognise URL target '" ++ name ++ "'."
| name <- target ]
-- ------------------------------------------------------------
-- * Resolving user targets to package specifiers
-- ------------------------------------------------------------
-- | Given a bunch of user-specified targets, try to resolve what it is they
-- refer to. They can either be specific packages (local dirs, tarballs etc)
-- or they can be named packages (with or without version info).
--
resolveUserTargets :: Package pkg
=> Verbosity
-> RepoContext
-> FilePath
-> PackageIndex pkg
-> [UserTarget]
-> IO [PackageSpecifier UnresolvedSourcePackage]
resolveUserTargets verbosity repoCtxt worldFile available userTargets = do
-- given the user targets, get a list of fully or partially resolved
-- package references
packageTargets <- mapM (readPackageTarget verbosity)
=<< mapM (fetchPackageTarget verbosity repoCtxt) . concat
=<< mapM (expandUserTarget verbosity worldFile) userTargets
-- users are allowed to give package names case-insensitively, so we must
-- disambiguate named package references
let (problems, packageSpecifiers) =
disambiguatePackageTargets available availableExtra packageTargets
-- use any extra specific available packages to help us disambiguate
availableExtra = [ packageName pkg
| PackageTargetLocation pkg <- packageTargets ]
reportPackageTargetProblems verbosity problems
return packageSpecifiers
-- ------------------------------------------------------------
-- * Package targets
-- ------------------------------------------------------------
-- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'.
-- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package.
--
data PackageTarget pkg =
PackageTargetNamed PackageName [PackageProperty] UserTarget
-- | A package identified by name, but case insensitively, so it needs
-- to be resolved to the right case-sensitive name.
| PackageTargetNamedFuzzy PackageName [PackageProperty] UserTarget
| PackageTargetLocation pkg
deriving (Show, Functor, Foldable, Traversable)
-- ------------------------------------------------------------
-- * Converting user targets to package targets
-- ------------------------------------------------------------
-- | Given a user-specified target, expand it to a bunch of package targets
-- (each of which refers to only one package).
--
expandUserTarget :: Verbosity
-> FilePath
-> UserTarget
-> IO [PackageTarget (PackageLocation ())]
expandUserTarget verbosity worldFile userTarget = case userTarget of
UserTargetNamed (Dependency name vrange) ->
let props = [ PackagePropertyVersion vrange
| not (isAnyVersion vrange) ]
in return [PackageTargetNamedFuzzy name props userTarget]
UserTargetWorld -> do
worldPkgs <- World.getContents verbosity worldFile
--TODO: should we warn if there are no world targets?
return [ PackageTargetNamed name props userTarget
| World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs
, let props = [ PackagePropertyVersion vrange
| not (isAnyVersion vrange) ]
++ [ PackagePropertyFlags flags
| not (null flags) ] ]
UserTargetLocalDir dir ->
return [ PackageTargetLocation (LocalUnpackedPackage dir) ]
UserTargetLocalCabalFile file -> do
let dir = takeDirectory file
_ <- tryFindPackageDesc verbosity dir (localPackageError dir) -- just as a check
return [ PackageTargetLocation (LocalUnpackedPackage dir) ]
UserTargetLocalTarball tarballFile ->
return [ PackageTargetLocation (LocalTarballPackage tarballFile) ]
UserTargetRemoteTarball tarballURL ->
return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ]
localPackageError :: FilePath -> String
localPackageError dir =
"Error reading local package.\nCouldn't find .cabal file in: " ++ dir
-- ------------------------------------------------------------
-- * Fetching and reading package targets
-- ------------------------------------------------------------
-- | Fetch any remote targets so that they can be read.
--
fetchPackageTarget :: Verbosity
-> RepoContext
-> PackageTarget (PackageLocation ())
-> IO (PackageTarget ResolvedPkgLoc)
fetchPackageTarget verbosity repoCtxt = traverse $
fetchPackage verbosity repoCtxt . fmap (const Nothing)
-- | Given a package target that has been fetched, read the .cabal file.
--
-- This only affects targets given by location, named targets are unaffected.
--
readPackageTarget :: Verbosity
-> PackageTarget ResolvedPkgLoc
-> IO (PackageTarget UnresolvedSourcePackage)
readPackageTarget verbosity = traverse modifyLocation
where
modifyLocation location = case location of
LocalUnpackedPackage dir -> do
pkg <- tryFindPackageDesc verbosity dir (localPackageError dir) >>=
readGenericPackageDescription verbosity
return $ SourcePackage {
packageInfoId = packageId pkg,
packageDescription = pkg,
packageSource = fmap Just location,
packageDescrOverride = Nothing
}
LocalTarballPackage tarballFile ->
readTarballPackageTarget location tarballFile tarballFile
RemoteTarballPackage tarballURL tarballFile ->
readTarballPackageTarget location tarballFile (show tarballURL)
RepoTarballPackage _repo _pkgid _ ->
error "TODO: readPackageTarget RepoTarballPackage"
-- For repo tarballs this info should be obtained from the index.
readTarballPackageTarget location tarballFile tarballOriginalLoc = do
(filename, content) <- extractTarballPackageCabalFile
tarballFile tarballOriginalLoc
case parsePackageDescription' content of
Nothing -> die' verbosity $ "Could not parse the cabal file "
++ filename ++ " in " ++ tarballFile
Just pkg ->
return $ SourcePackage {
packageInfoId = packageId pkg,
packageDescription = pkg,
packageSource = fmap Just location,
packageDescrOverride = Nothing
}
extractTarballPackageCabalFile :: FilePath -> String
-> IO (FilePath, BS.ByteString)
extractTarballPackageCabalFile tarballFile tarballOriginalLoc =
either (die' verbosity . formatErr) return
. check
. accumEntryMap
. Tar.filterEntries isCabalFile
. Tar.read
. GZipUtils.maybeDecompress
=<< BS.readFile tarballFile
where
formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg
accumEntryMap = Tar.foldlEntries
(\m e -> Map.insert (Tar.entryTarPath e) e m)
Map.empty
check (Left e) = Left (show e)
check (Right m) = case Map.elems m of
[] -> Left noCabalFile
[file] -> case Tar.entryContent file of
Tar.NormalFile content _ -> Right (Tar.entryPath file, content)
_ -> Left noCabalFile
_files -> Left multipleCabalFiles
where
noCabalFile = "No cabal file found"
multipleCabalFiles = "Multiple cabal files found"
isCabalFile e = case splitPath (Tar.entryPath e) of
[ _dir, file] -> takeExtension file == ".cabal"
[".", _dir, file] -> takeExtension file == ".cabal"
_ -> False
parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription
#ifdef CABAL_PARSEC
parsePackageDescription' bs =
parseGenericPackageDescriptionMaybe (BS.toStrict bs)
#else
parsePackageDescription' content =
case parseGenericPackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of
ParseOk _ pkg -> Just pkg
_ -> Nothing
#endif
-- ------------------------------------------------------------
-- * Checking package targets
-- ------------------------------------------------------------
data PackageTargetProblem
= PackageNameUnknown PackageName UserTarget
| PackageNameAmbiguous PackageName [PackageName] UserTarget
deriving Show
-- | Users are allowed to give package names case-insensitively, so we must
-- disambiguate named package references.
--
disambiguatePackageTargets :: Package pkg'
=> PackageIndex pkg'
-> [PackageName]
-> [PackageTarget pkg]
-> ( [PackageTargetProblem]
, [PackageSpecifier pkg] )
disambiguatePackageTargets availablePkgIndex availableExtra targets =
partitionEithers (map disambiguatePackageTarget targets)
where
disambiguatePackageTarget packageTarget = case packageTarget of
PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg)
PackageTargetNamed pkgname props userTarget
| null (PackageIndex.lookupPackageName availablePkgIndex pkgname)
-> Left (PackageNameUnknown pkgname userTarget)
| otherwise -> Right (NamedPackage pkgname props)
PackageTargetNamedFuzzy pkgname props userTarget ->
case disambiguatePackageName packageNameEnv pkgname of
None -> Left (PackageNameUnknown
pkgname userTarget)
Ambiguous pkgnames -> Left (PackageNameAmbiguous
pkgname pkgnames userTarget)
Unambiguous pkgname' -> Right (NamedPackage pkgname' props)
-- use any extra specific available packages to help us disambiguate
packageNameEnv :: PackageNameEnv
packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex)
(extraPackageNameEnv availableExtra)
-- | Report problems to the user. That is, if there are any problems
-- then raise an exception.
reportPackageTargetProblems :: Verbosity
-> [PackageTargetProblem] -> IO ()
reportPackageTargetProblems verbosity problems = do
case [ pkg | PackageNameUnknown pkg originalTarget <- problems
, not (isUserTagetWorld originalTarget) ] of
[] -> return ()
pkgs -> die' verbosity $ unlines
[ "There is no package named '" ++ display name ++ "'. "
| name <- pkgs ]
++ "You may need to run 'cabal update' to get the latest "
++ "list of available packages."
case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of
[] -> return ()
ambiguities -> die' verbosity $ unlines
[ "The package name '" ++ display name
++ "' is ambiguous. It could be: "
++ intercalate ", " (map display matches)
| (name, matches) <- ambiguities ]
case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of
[] -> return ()
pkgs -> warn verbosity $
"The following 'world' packages will be ignored because "
++ "they refer to packages that cannot be found: "
++ intercalate ", " (map display pkgs) ++ "\n"
++ "You can suppress this warning by correcting the world file."
where
isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False
-- ------------------------------------------------------------
-- * Disambiguating package names
-- ------------------------------------------------------------
data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a]
-- | Given a package name and a list of matching names, figure out which one it
-- might be referring to. If there is an exact case-sensitive match then that's
-- ok. If it matches just one package case-insensitively then that's also ok.
-- The only problem is if it matches multiple packages case-insensitively, in
-- that case it is ambiguous.
--
disambiguatePackageName :: PackageNameEnv
-> PackageName
-> MaybeAmbiguous PackageName
disambiguatePackageName (PackageNameEnv pkgNameLookup) name =
case nub (pkgNameLookup name) of
[] -> None
[name'] -> Unambiguous name'
names -> case find (name==) names of
Just name' -> Unambiguous name'
Nothing -> Ambiguous names
newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName])
instance Monoid PackageNameEnv where
mempty = PackageNameEnv (const [])
mappend = (<>)
instance Semigroup PackageNameEnv where
PackageNameEnv lookupA <> PackageNameEnv lookupB =
PackageNameEnv (\name -> lookupA name ++ lookupB name)
indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv
indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup
where
pkgNameLookup pname =
map fst (PackageIndex.searchByName pkgIndex $ unPackageName pname)
extraPackageNameEnv :: [PackageName] -> PackageNameEnv
extraPackageNameEnv names = PackageNameEnv pkgNameLookup
where
pkgNameLookup pname =
[ pname'
| let lname = lowercase (unPackageName pname)
, pname' <- names
, lowercase (unPackageName pname') == lname ]
-- ------------------------------------------------------------
-- * Package constraints
-- ------------------------------------------------------------
-- | Version of 'Qualifier' that a user may specify on the
-- command line.
data UserQualifier =
-- | Top-level dependency.
UserQualToplevel
-- | Setup dependency.
| UserQualSetup PackageName
-- | Executable dependency.
| UserQualExe PackageName PackageName
deriving (Eq, Show, Generic)
instance Binary UserQualifier
-- | Version of 'ConstraintScope' that a user may specify on the
-- command line.
data UserConstraintScope =
-- | Scope that applies to the package when it has the specified qualifier.
UserQualified UserQualifier PackageName
-- | Scope that applies to the package when it has a setup qualifier.
| UserAnySetupQualifier PackageName
-- | Scope that applies to the package when it has any qualifier.
| UserAnyQualifier PackageName
deriving (Eq, Show, Generic)
instance Binary UserConstraintScope
fromUserQualifier :: UserQualifier -> Qualifier
fromUserQualifier UserQualToplevel = QualToplevel
fromUserQualifier (UserQualSetup name) = QualSetup name
fromUserQualifier (UserQualExe name1 name2) = QualExe name1 name2
fromUserConstraintScope :: UserConstraintScope -> ConstraintScope
fromUserConstraintScope (UserQualified q pn) =
ScopeQualified (fromUserQualifier q) pn
fromUserConstraintScope (UserAnySetupQualifier pn) = ScopeAnySetupQualifier pn
fromUserConstraintScope (UserAnyQualifier pn) = ScopeAnyQualifier pn
-- | Version of 'PackageConstraint' that the user can specify on
-- the command line.
data UserConstraint =
UserConstraint UserConstraintScope PackageProperty
deriving (Eq, Show, Generic)
instance Binary UserConstraint
userConstraintPackageName :: UserConstraint -> PackageName
userConstraintPackageName (UserConstraint scope _) = scopePN scope
where
scopePN (UserQualified _ pn) = pn
scopePN (UserAnyQualifier pn) = pn
scopePN (UserAnySetupQualifier pn) = pn
userToPackageConstraint :: UserConstraint -> PackageConstraint
userToPackageConstraint (UserConstraint scope prop) =
PackageConstraint (fromUserConstraintScope scope) prop
readUserConstraint :: String -> Either String UserConstraint
readUserConstraint str =
case readPToMaybe parse str of
Nothing -> Left msgCannotParse
Just c -> Right c
where
msgCannotParse =
"expected a (possibly qualified) package name followed by a " ++
"constraint, which is either a version range, 'installed', " ++
"'source', 'test', 'bench', or flags"
instance Text UserConstraint where
disp (UserConstraint scope prop) =
dispPackageConstraint $ PackageConstraint (fromUserConstraintScope scope) prop
parse =
let parseConstraintScope :: Parse.ReadP a UserConstraintScope
parseConstraintScope =
do
_ <- Parse.string "any."
pn <- parse
return (UserAnyQualifier pn)
+++
do
_ <- Parse.string "setup."
pn <- parse
return (UserAnySetupQualifier pn)
+++
do
-- Qualified name
pn <- parse
(return (UserQualified UserQualToplevel pn)
+++
do _ <- Parse.string ":setup."
pn2 <- parse
return (UserQualified (UserQualSetup pn) pn2))
-- -- TODO: Re-enable parsing of UserQualExe once we decide on a syntax.
--
-- +++
-- do _ <- Parse.string ":"
-- pn2 <- parse
-- _ <- Parse.string ":exe."
-- pn3 <- parse
-- return (UserQualExe pn pn2, pn3)
in do
scope <- parseConstraintScope
-- Package property
let keyword str x = Parse.skipSpaces1 >> Parse.string str >> return x
prop <- ((parse >>= return . PackagePropertyVersion)
+++
keyword "installed" PackagePropertyInstalled
+++
keyword "source" PackagePropertySource
+++
keyword "test" (PackagePropertyStanzas [TestStanzas])
+++
keyword "bench" (PackagePropertyStanzas [BenchStanzas]))
-- Note: the parser is left-biased here so that we
-- don't get an ambiguous parse from 'installed',
-- 'source', etc. being regarded as flags.
<++
(Parse.skipSpaces1 >> parseFlagAssignment
>>= return . PackagePropertyFlags)
-- Result
return (UserConstraint scope prop)
|
mydaum/cabal
|
cabal-install/Distribution/Client/Targets.hs
|
Haskell
|
bsd-3-clause
| 31,363
|
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE MultiParamTypeClasses, Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Groups.Helpers
-- Copyright : Quentin Moser <moserq@gmail.com>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : orphaned
-- Stability : stable
-- Portability : unportable
--
-- Utility functions for "XMonad.Layout.Groups".
--
-----------------------------------------------------------------------------
module XMonad.Layout.Groups.Helpers ( -- * Usage
-- $usage
-- ** Layout-generic actions
swapUp
, swapDown
, swapMaster
, focusUp
, focusDown
, focusMaster
, toggleFocusFloat
-- ** 'G.Groups'-secific actions
, swapGroupUp
, swapGroupDown
, swapGroupMaster
, focusGroupUp
, focusGroupDown
, focusGroupMaster
, moveToGroupUp
, moveToGroupDown
, moveToNewGroupUp
, moveToNewGroupDown
, splitGroup ) where
import XMonad hiding ((|||))
import qualified XMonad.StackSet as W
import qualified XMonad.Layout.Groups as G
import XMonad.Actions.MessageFeedback
import Control.Monad (unless)
import qualified Data.Map as M
-- $usage
--
-- This module provides helpers functions for use with "XMonad.Layout.Groups"-based
-- layouts. You can use its contents by adding
--
-- > import XMonad.Layout.Groups.Helpers
--
-- to the top of your @.\/.xmonad\/xmonad.hs@.
--
-- "XMonad.Layout.Groups"-based layouts do not have the same notion
-- of window ordering as the rest of XMonad. For this reason, the usual
-- ways of reordering windows and moving focus do not work with them.
-- "XMonad.Layout.Groups" provides 'Message's that can be used to obtain
-- the right effect.
--
-- But what if you want to use both 'G.Groups' and other layouts?
-- This module provides actions that try to send 'G.GroupsMessage's, and
-- fall back to the classic way if the current layout doesn't hande them.
-- They are in the section called \"Layout-generic actions\".
--
-- The sections \"Groups-specific actions\" contains actions that don't make
-- sense for non-'G.Groups'-based layouts. These are simply wrappers around
-- the equivalent 'G.GroupsMessage's, but are included so you don't have to
-- write @sendMessage $ Modify $ ...@ everytime.
--
-- This module exports many operations with the same names as
-- 'G.ModifySpec's from "XMonad.Layout.Groups", so if you want
-- to import both, we suggest to import "XMonad.Layout.Groups"
-- qualified:
--
-- > import qualified XMonad.Layout.Groups as G
--
-- For more information on how to extend your layour hook and key bindings, see
-- "XMonad.Doc.Extending".
-- ** Layout-generic actions
-- #Layout-generic actions#
alt :: G.ModifySpec -> (WindowSet -> WindowSet) -> X ()
alt f g = alt2 (G.Modify f) $ windows g
alt2 :: G.GroupsMessage -> X () -> X ()
alt2 m x = do b <- send m
unless b x
-- | Swap the focused window with the previous one
swapUp :: X ()
swapUp = alt G.swapUp W.swapUp
-- | Swap the focused window with the next one
swapDown :: X ()
swapDown = alt G.swapDown W.swapDown
-- | Swap the focused window with the master window
swapMaster :: X ()
swapMaster = alt G.swapMaster W.swapMaster
-- | If the focused window is floating, focus the next floating
-- window. otherwise, focus the next non-floating one.
focusUp :: X ()
focusUp = ifFloat focusFloatUp focusNonFloatUp
-- | If the focused window is floating, focus the next floating
-- window. otherwise, focus the next non-floating one.
focusDown :: X ()
focusDown = ifFloat focusFloatDown focusNonFloatDown
-- | Move focus to the master window
focusMaster :: X ()
focusMaster = alt G.focusMaster W.shiftMaster
-- | Move focus between the floating and non-floating layers
toggleFocusFloat :: X ()
toggleFocusFloat = ifFloat focusNonFloat focusFloatUp
-- *** Floating layer helpers
getFloats :: X [Window]
getFloats = gets $ M.keys . W.floating . windowset
getWindows :: X [Window]
getWindows = gets $ W.integrate' . W.stack . W.workspace . W.current . windowset
ifFloat :: X () -> X () -> X ()
ifFloat x1 x2 = withFocused $ \w -> do floats <- getFloats
if elem w floats then x1 else x2
focusNonFloat :: X ()
focusNonFloat = alt2 G.Refocus helper
where helper = withFocused $ \w -> do
ws <- getWindows
floats <- getFloats
let (before, after) = span (/=w) ws
case filter (flip notElem floats) $ after ++ before of
[] -> return ()
w':_ -> focus w'
focusHelper :: (Bool -> Bool) -- ^ if you want to focus a floating window, 'id'.
-- if you want a non-floating one, 'not'.
-> ([Window] -> [Window]) -- ^ if you want the next window, 'id'.
-- if you want the previous one, 'reverse'.
-> X ()
focusHelper f g = withFocused $ \w -> do
ws <- getWindows
let (before, _:after) = span (/=w) ws
let toFocus = g $ after ++ before
floats <- getFloats
case filter (f . flip elem floats) toFocus of
[] -> return ()
w':_ -> focus w'
focusNonFloatUp :: X ()
focusNonFloatUp = alt2 (G.Modify G.focusUp) $ focusHelper not reverse
focusNonFloatDown :: X ()
focusNonFloatDown = alt2 (G.Modify G.focusDown) $ focusHelper not id
focusFloatUp :: X ()
focusFloatUp = focusHelper id reverse
focusFloatDown :: X ()
focusFloatDown = focusHelper id id
-- ** Groups-specific actions
wrap :: G.ModifySpec -> X ()
wrap x = sendMessage (G.Modify x)
-- | Swap the focused group with the previous one
swapGroupUp :: X ()
swapGroupUp = wrap G.swapGroupUp
-- | Swap the focused group with the next one
swapGroupDown :: X ()
swapGroupDown = wrap G.swapGroupDown
-- | Swap the focused group with the master group
swapGroupMaster :: X ()
swapGroupMaster = wrap G.swapGroupMaster
-- | Move the focus to the previous group
focusGroupUp :: X ()
focusGroupUp = wrap G.focusGroupUp
-- | Move the focus to the next group
focusGroupDown :: X ()
focusGroupDown = wrap G.focusGroupDown
-- | Move the focus to the master group
focusGroupMaster :: X ()
focusGroupMaster = wrap G.focusGroupMaster
-- | Move the focused window to the previous group. The 'Bool' argument
-- determines what will be done if the focused window is in the very first
-- group: Wrap back to the end ('True'), or create a new group before
-- it ('False').
moveToGroupUp :: Bool -> X ()
moveToGroupUp b = wrap (G.moveToGroupUp b)
-- | Move the focused window to the next group. The 'Bool' argument
-- determines what will be done if the focused window is in the very last
-- group: Wrap back to the beginning ('True'), or create a new group after
-- it ('False').
moveToGroupDown :: Bool -> X ()
moveToGroupDown b = wrap (G.moveToGroupDown b)
-- | Move the focused window to a new group before the current one
moveToNewGroupUp :: X ()
moveToNewGroupUp = wrap G.moveToNewGroupUp
-- | Move the focused window to a new group after the current one
moveToNewGroupDown :: X ()
moveToNewGroupDown = wrap G.moveToNewGroupDown
-- | Split the focused group in two at the position of the focused
-- window.
splitGroup :: X ()
splitGroup = wrap G.splitGroup
|
f1u77y/xmonad-contrib
|
XMonad/Layout/Groups/Helpers.hs
|
Haskell
|
bsd-3-clause
| 8,103
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK hide #-}
module Network.Xmpp.IM.Message where
import Data.Default
import Data.Function
import Data.List
import Data.Text (Text)
import Data.XML.Pickle
import Data.XML.Types
import Network.Xmpp.Marshal
import Network.Xmpp.Types
data MessageBody = MessageBody { bodyLang :: Maybe LangTag
, bodyContent :: Text
}
data MessageThread = MessageThread { threadID :: Text
, threadParent :: Maybe Text
}
data MessageSubject = MessageSubject { subjectLang :: Maybe LangTag
, subjectContent :: Text
}
-- | The instant message (IM) specific part of a message.
data InstantMessage = InstantMessage { imThread :: Maybe MessageThread
, imSubject :: [MessageSubject]
, imBody :: [MessageBody]
}
-- | Empty instant message.
instantMessage :: InstantMessage
instantMessage = InstantMessage { imThread = Nothing
, imSubject = []
, imBody = []
}
instance Default InstantMessage where
def = instantMessage
-- | Get the IM specific parts of a message. Returns 'Nothing' when the received
-- payload is not valid IM data.
getIM :: Message -> Maybe InstantMessage
getIM im = either (const Nothing) Just . unpickle xpIM $ messagePayload im
sanitizeIM :: InstantMessage -> InstantMessage
sanitizeIM im = im{imBody = nubBy ((==) `on` bodyLang) $ imBody im}
-- | Append IM data to a message. Additional IM bodies with the same Langtag are
-- discarded.
withIM :: Message -> InstantMessage -> Message
withIM m im = m{ messagePayload = messagePayload m
++ pickleTree xpIM (sanitizeIM im) }
imToElements :: InstantMessage -> [Element]
imToElements im = pickle xpIM (sanitizeIM im)
-- | Generate a simple message
simpleIM :: Jid -- ^ recipient
-> Text -- ^ body
-> Message
simpleIM to bd = withIM message{messageTo = Just to}
instantMessage{imBody = [MessageBody Nothing bd]}
-- | Generate an answer from a received message. The recepient is
-- taken from the original sender, the sender is set to 'Nothing',
-- message ID, language tag, message type as well as subject and
-- thread are inherited.
--
-- Additional IM bodies with the same Langtag are discarded.
answerIM :: [MessageBody] -> Message -> Maybe Message
answerIM bd msg = case getIM msg of
Nothing -> Nothing
Just im -> Just $ flip withIM (im{imBody = bd}) $
message { messageID = messageID msg
, messageFrom = Nothing
, messageTo = messageFrom msg
, messageLangTag = messageLangTag msg
, messageType = messageType msg
}
--------------------------
-- Picklers --------------
--------------------------
xpIM :: PU [Element] InstantMessage
xpIM = xpWrap (\(t, s, b) -> InstantMessage t s b)
(\(InstantMessage t s b) -> (t, s, b))
. xpClean
$ xp3Tuple
xpMessageThread
xpMessageSubject
xpMessageBody
xpMessageSubject :: PU [Element] [MessageSubject]
xpMessageSubject = xpUnliftElems .
xpWrap (map $ \(l, s) -> MessageSubject l s)
(map $ \(MessageSubject l s) -> (l,s))
$ xpElems "{jabber:client}subject" xpLangTag $ xpContent xpId
xpMessageBody :: PU [Element] [MessageBody]
xpMessageBody = xpUnliftElems .
xpWrap (map $ \(l, s) -> MessageBody l s)
(map $ \(MessageBody l s) -> (l,s))
$ xpElems "{jabber:client}body" xpLangTag $ xpContent xpId
xpMessageThread :: PU [Element] (Maybe MessageThread)
xpMessageThread = xpUnliftElems
. xpOption
. xpWrap (\(t, p) -> MessageThread p t)
(\(MessageThread p t) -> (t,p))
$ xpElem "{jabber:client}thread"
(xpAttrImplied "parent" xpId)
(xpContent xpId)
|
Philonous/pontarius-xmpp
|
source/Network/Xmpp/IM/Message.hs
|
Haskell
|
bsd-3-clause
| 4,325
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
-- | The web server.
module Ircbrowse.Server where
import Ircbrowse.Types
import qualified Ircbrowse.Controllers as C
import Snap.App
import Snap.Http.Server hiding (Config)
import Snap.Util.FileServe
-- | Run the server.
runServer :: Config -> Pool -> IO ()
runServer config pool = do
setUnicodeLocale "en_US"
httpServe server (serve config pool)
where server = setPort 10001 defaultConfig
-- | Serve the controllers.
serve :: Config -> Pool -> Snap ()
serve config pool = route routes where
routes = [("/js/",serveDirectory "static/js")
,("/css/",serveDirectory "static/css")
,("/js/",serveDirectory "static/js")
,("/browse/:channel",run C.browse)
,("/nick-cloud/:channel",run C.nickCloud)
,("/social",run C.socialGraph)
,("/day/:channel/:year/:month/:day",run (C.browseDay False))
,("/day/:channel/today/:mode",run (C.browseDay True))
,("/day/:channel/today",run (C.browseDay True))
,("/nick/:nick",run C.nickProfile)
,("/nicks/:channel/:mode",run C.allNicks)
,("/nicks/:channel",run C.allNicks)
,("/quotes.rss",run C.quotes)
,("/pdfs/:channel/:unique",run C.pdfs)
,("/pdfs/:channel",run C.pdfs)
,("/stats/:channel",run C.stats)
,("/calendar/:channel",run C.calendar)
,("/:channel",run C.stats)
,("/selection/:channel",run C.browseSpecified)
,("/export/:filename",run C.export)
,("/",run C.overview)
]
run = runHandler PState config pool
|
plow-technologies/ircbrowse
|
src/Ircbrowse/Server.hs
|
Haskell
|
bsd-3-clause
| 1,704
|
{----------------------------------------------------------------------------
Abstract syntax of JOOS, based on:
David A. Watt. JOOS action semantics. Version 1, available from
http://www.dcs.gla.ac.uk/~daw/publications/JOOS.ps, October 1997.
Modifications:
o StatFocus
o StringLiterals
----------------------------------------------------------------------------}
module Datatypes where
import TermRep
import Monad
data Assignment = Assignment Identifier Expression
deriving (Eq, Show)
data InstanceCreation = InstanceCreation Identifier Arguments
deriving (Eq, Show)
data MethodInvocation = ExpressionInvocation Expression Identifier Arguments
| SuperInvocation Identifier Arguments
deriving (Eq, Show)
data Arguments = Arguments [Expression]
deriving (Eq, Show)
data Expression = Literal Literal
| Identifier Identifier
| This
| PrefixExpr PrefixOperator Expression
| InfixExpr Expression InfixOperator Expression
| AndOrExpr Expression AndOr Expression
| InstanceOf Expression Identifier
| TypeCast Type Expression
| BracketExpr Expression
| AssignmentExpr Assignment
| InstanceCreationExpr InstanceCreation
| MethodInvocationExpr MethodInvocation
deriving (Eq, Show)
data AndOr = AND | OR
deriving (Eq, Show)
data PrefixOperator = Neg | Fac
deriving (Eq, Show)
data InfixOperator = Eq | NEQ | Lt | Gt | LEQ | GEQ
| PLUS | MINUS | MUL | DIV | MOD
deriving (Eq, Show)
data Literal = BooleanLit BooleanLiteral
| IntegerLit IntegerLiteral
| Null
| StringLit StringLiteral
deriving (Eq, Show)
data BooleanLiteral = TRUE | FALSE
deriving (Eq, Show)
type IntegerLiteral = Integer
type StringLiteral = String
type Identifier = String
data BlockStatements = BlockStatements [VariableDeclaration] [Statement]
deriving (Eq, Show)
data Statement = Skip
| Block BlockStatements
| AssignmentStat Assignment
| InstanceCreationStat InstanceCreation
| MethodInvocationStat MethodInvocation
| ReturnStat (Maybe Expression)
| IfStat Expression Statement Statement
| WhileStat Expression Statement
--- Additions
| StatFocus Statement
deriving (Eq, Show)
data ClassDeclaration = ClassDecl FinalOpt Identifier Identifier
[FieldDeclaration]
ConstructorDeclaration
[MethodDeclaration]
deriving (Eq, Show)
type FinalOpt = Bool
data FieldDeclaration = FieldDecl Type Identifier
deriving (Eq, Show)
data ConstructorDeclaration
= ConstructorDecl Identifier FormalParameters
Arguments BlockStatements
deriving (Eq, Show)
data MethodDeclaration = MethodDecl (Maybe Type) Identifier FormalParameters
BlockStatements
deriving (Eq, Show)
data FormalParameters = FormalParams [FormalParameter]
deriving (Eq, Show)
data FormalParameter = FormalParam Type Identifier
deriving (Eq, Show)
data VariableDeclaration
= VariableDecl Type Identifier
deriving (Eq, Show)
data Type = INT | BOOLEAN | Type Identifier
deriving (Eq, Show)
|
forste/haReFork
|
StrategyLib-4.0-beta/examples/joos-padl02/Datatypes.hs
|
Haskell
|
bsd-3-clause
| 3,590
|
{-# LANGUAGE OverloadedStrings #-}
module Stack.Options
(Command(..)
,benchOptsParser
,buildOptsParser
,configOptsParser
,dockerOptsParser
,dockerCleanupOptsParser
,dotOptsParser
,execOptsParser
,globalOptsParser
,initOptsParser
,newOptsParser
,logLevelOptsParser
,abstractResolverOptsParser
,solverOptsParser
,testOptsParser
) where
import Control.Monad.Logger (LogLevel(..))
import Data.Char (isSpace, toLower)
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Read (decimal)
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Options.Applicative.Simple
import Options.Applicative.Types (readerAsk)
import Stack.Build.Types
import Stack.Docker
import qualified Stack.Docker as Docker
import Stack.Dot
import Stack.Init
import Stack.New (NewOpts(..))
import Stack.Types
-- | Command sum type for conditional arguments.
data Command
= Build
| Test
| Haddock
| Bench
deriving (Eq)
-- | Parser for bench arguments.
benchOptsParser :: Parser BenchmarkOpts
benchOptsParser = BenchmarkOpts
<$> optional (strOption (long "benchmark-arguments" <>
metavar "BENCH_ARGS" <>
help ("Forward BENCH_ARGS to the benchmark suite. " <>
"Supports templates from `cabal bench`")))
-- | Parser for build arguments.
buildOptsParser :: Command
-> Bool -- ^ default copy-bins value
-> Parser BuildOpts
buildOptsParser cmd defCopyBins =
BuildOpts <$> target <*> libProfiling <*> exeProfiling <*>
optimize <*> haddock <*> haddockDeps <*> finalAction <*> dryRun <*> ghcOpts <*>
flags <*> copyBins <*> preFetch <*> onlySnapshot <*>
fileWatch' <*> keepGoing <*> forceDirty
where optimize =
maybeBoolFlags "optimizations" "optimizations for TARGETs and all its dependencies" idm
target =
fmap (map T.pack)
(many (strArgument
(metavar "TARGET" <>
help "If none specified, use all packages")))
libProfiling =
boolFlags False
"library-profiling"
"library profiling for TARGETs and all its dependencies"
idm
exeProfiling =
boolFlags False
"executable-profiling"
"library profiling for TARGETs and all its dependencies"
idm
haddock =
boolFlags (cmd == Haddock)
"haddock"
"building Haddocks"
idm
haddockDeps =
if cmd == Haddock
then maybeBoolFlags
"haddock-deps"
"building Haddocks for dependencies"
idm
else pure Nothing
finalAction = pure DoNothing
copyBins = boolFlags defCopyBins
"copy-bins"
"copying binaries to the local-bin-path (see 'stack path')"
idm
dryRun = flag False True (long "dry-run" <>
help "Don't build anything, just prepare to")
ghcOpts = (++)
<$> flag [] ["-Wall", "-Werror"]
( long "pedantic"
<> help "Turn on -Wall and -Werror (note: option name may change in the future"
)
<*> many (fmap T.pack
(strOption (long "ghc-options" <>
metavar "OPTION" <>
help "Additional options passed to GHC")))
flags =
fmap (Map.unionsWith Map.union) $ many
(option readFlag
( long "flag"
<> metavar "PACKAGE:[-]FLAG"
<> help "Override flags set in stack.yaml (applies to local packages and extra-deps)"
))
preFetch = flag False True
(long "prefetch" <>
help "Fetch packages necessary for the build immediately, useful with --dry-run")
onlySnapshot = flag False True
(long "only-snapshot" <>
help "Only build packages for the snapshot database, not the local database")
fileWatch' = flag False True
(long "file-watch" <>
help "Watch for changes in local files and automatically rebuild")
keepGoing = maybeBoolFlags
"keep-going"
"continue running after a step fails (default: false for build, true for test/bench)"
idm
forceDirty = flag False True
(long "force-dirty" <>
help "Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change)")
-- | Parser for package:[-]flag
readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool))
readFlag = do
s <- readerAsk
case break (== ':') s of
(pn, ':':mflag) -> do
pn' <-
case parsePackageNameFromString pn of
Nothing
| pn == "*" -> return Nothing
| otherwise -> readerError $ "Invalid package name: " ++ pn
Just x -> return $ Just x
let (b, flagS) =
case mflag of
'-':x -> (False, x)
_ -> (True, mflag)
flagN <-
case parseFlagNameFromString flagS of
Nothing -> readerError $ "Invalid flag name: " ++ flagS
Just x -> return x
return $ Map.singleton pn' $ Map.singleton flagN b
_ -> readerError "Must have a colon"
-- | Command-line arguments parser for configuration.
configOptsParser :: Bool -> Parser ConfigMonoid
configOptsParser docker =
(\opts systemGHC installGHC arch os jobs includes libs skipGHCCheck skipMsys localBin -> mempty
{ configMonoidDockerOpts = opts
, configMonoidSystemGHC = systemGHC
, configMonoidInstallGHC = installGHC
, configMonoidSkipGHCCheck = skipGHCCheck
, configMonoidArch = arch
, configMonoidOS = os
, configMonoidJobs = jobs
, configMonoidExtraIncludeDirs = includes
, configMonoidExtraLibDirs = libs
, configMonoidSkipMsys = skipMsys
, configMonoidLocalBinPath = localBin
})
<$> dockerOptsParser docker
<*> maybeBoolFlags
"system-ghc"
"using the system installed GHC (on the PATH) if available and a matching version"
idm
<*> maybeBoolFlags
"install-ghc"
"downloading and installing GHC if necessary (can be done manually with stack setup)"
idm
<*> optional (strOption
( long "arch"
<> metavar "ARCH"
<> help "System architecture, e.g. i386, x86_64"
))
<*> optional (strOption
( long "os"
<> metavar "OS"
<> help "Operating system, e.g. linux, windows"
))
<*> optional (option auto
( long "jobs"
<> short 'j'
<> metavar "JOBS"
<> help "Number of concurrent jobs to run"
))
<*> fmap (Set.fromList . map T.pack) (many $ strOption
( long "extra-include-dirs"
<> metavar "DIR"
<> help "Extra directories to check for C header files"
))
<*> fmap (Set.fromList . map T.pack) (many $ strOption
( long "extra-lib-dirs"
<> metavar "DIR"
<> help "Extra directories to check for libraries"
))
<*> maybeBoolFlags
"skip-ghc-check"
"skipping the GHC version and architecture check"
idm
<*> maybeBoolFlags
"skip-msys"
"skipping the local MSYS installation (Windows only)"
idm
<*> optional (strOption
( long "local-bin-path"
<> metavar "DIR"
<> help "Install binaries to DIR"
))
-- | Options parser configuration for Docker.
dockerOptsParser :: Bool -> Parser DockerOptsMonoid
dockerOptsParser showOptions =
DockerOptsMonoid
<$> pure Nothing
<*> maybeBoolFlags dockerCmdName
"using a Docker container"
hide
<*> ((Just . DockerMonoidRepo) <$> option str (long (dockerOptName dockerRepoArgName) <>
hide <>
metavar "NAME" <>
help "Docker repository name") <|>
(Just . DockerMonoidImage) <$> option str (long (dockerOptName dockerImageArgName) <>
hide <>
metavar "IMAGE" <>
help "Exact Docker image ID (overrides docker-repo)") <|>
pure Nothing)
<*> maybeBoolFlags (dockerOptName dockerRegistryLoginArgName)
"registry requires login"
hide
<*> maybeStrOption (long (dockerOptName dockerRegistryUsernameArgName) <>
hide <>
metavar "USERNAME" <>
help "Docker registry username")
<*> maybeStrOption (long (dockerOptName dockerRegistryPasswordArgName) <>
hide <>
metavar "PASSWORD" <>
help "Docker registry password")
<*> maybeBoolFlags (dockerOptName dockerAutoPullArgName)
"automatic pulling latest version of image"
hide
<*> maybeBoolFlags (dockerOptName dockerDetachArgName)
"running a detached Docker container"
hide
<*> maybeBoolFlags (dockerOptName dockerPersistArgName)
"not deleting container after it exits"
hide
<*> maybeStrOption (long (dockerOptName dockerContainerNameArgName) <>
hide <>
metavar "NAME" <>
help "Docker container name")
<*> argsOption (long (dockerOptName dockerRunArgsArgName) <>
hide <>
value [] <>
metavar "'ARG1 [ARG2 ...]'" <>
help "Additional options to pass to 'docker run'")
<*> many (option auto (long (dockerOptName dockerMountArgName) <>
hide <>
metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <>
help ("Mount volumes from host in container " ++
"(may specify mutliple times)")))
<*> maybeStrOption (long (dockerOptName dockerDatabasePathArgName) <>
hide <>
metavar "PATH" <>
help "Location of image usage tracking database")
where
dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName
maybeStrOption = optional . option str
hide = if showOptions
then idm
else internal <> hidden
-- | Parser for docker cleanup arguments.
dockerCleanupOptsParser :: Parser Docker.CleanupOpts
dockerCleanupOptsParser =
Docker.CleanupOpts <$>
(flag' Docker.CleanupInteractive
(short 'i' <>
long "interactive" <>
help "Show cleanup plan in editor and allow changes (default)") <|>
flag' Docker.CleanupImmediate
(short 'y' <>
long "immediate" <>
help "Immediately execute cleanup plan") <|>
flag' Docker.CleanupDryRun
(short 'n' <>
long "dry-run" <>
help "Display cleanup plan but do not execute") <|>
pure Docker.CleanupInteractive) <*>
opt (Just 14) "known-images" "LAST-USED" <*>
opt Nothing "unknown-images" "CREATED" <*>
opt (Just 0) "dangling-images" "CREATED" <*>
opt Nothing "stopped-containers" "CREATED" <*>
opt Nothing "running-containers" "CREATED"
where opt def' name mv =
fmap Just
(option auto
(long name <>
metavar (mv ++ "-DAYS-AGO") <>
help ("Remove " ++
toDescr name ++
" " ++
map toLower (toDescr mv) ++
" N days ago" ++
case def' of
Just n -> " (default " ++ show n ++ ")"
Nothing -> ""))) <|>
flag' Nothing
(long ("no-" ++ name) <>
help ("Do not remove " ++
toDescr name ++
case def' of
Just _ -> ""
Nothing -> " (default)")) <|>
pure def'
toDescr = map (\c -> if c == '-' then ' ' else c)
-- | Parser for arguments to `stack dot`
dotOptsParser :: Parser DotOpts
dotOptsParser = DotOpts
<$> includeExternal
<*> includeBase
<*> depthLimit
<*> fmap (maybe Set.empty Set.fromList . fmap splitNames) prunedPkgs
where includeExternal = boolFlags False
"external"
"inclusion of external dependencies"
idm
includeBase = boolFlags True
"include-base"
"inclusion of dependencies on base"
idm
depthLimit =
optional (option auto
(long "depth" <>
metavar "DEPTH" <>
help ("Limit the depth of dependency resolution " <>
"(Default: No limit)")))
prunedPkgs = optional (strOption
(long "prune" <>
metavar "PACKAGES" <>
help ("Prune each package name " <>
"from the comma separated list " <>
"of package names PACKAGES")))
splitNames :: String -> [String]
splitNames = map (takeWhile (not . isSpace) . dropWhile isSpace) . splitOn ","
-- | Parser for exec command
execOptsParser :: Maybe String -- ^ command
-> Parser ExecOpts
execOptsParser mcmd =
ExecOpts
<$> maybe eoCmdParser pure mcmd
<*> eoArgsParser
<*> (eoPlainParser <|>
ExecOptsEmbellished
<$> eoEnvSettingsParser
<*> eoPackagesParser)
where
eoCmdParser :: Parser String
eoCmdParser = strArgument (metavar "CMD")
eoArgsParser :: Parser [String]
eoArgsParser = many (strArgument (metavar "-- ARGS (e.g. stack ghc -- X.hs -o x)"))
eoEnvSettingsParser :: Parser EnvSettings
eoEnvSettingsParser = EnvSettings
<$> pure True
<*> boolFlags True
"ghc-package-path"
"setting the GHC_PACKAGE_PATH variable for the subprocess"
idm
<*> boolFlags True
"stack-exe"
"setting the STACK_EXE environment variable to the path for the stack executable"
idm
eoPackagesParser :: Parser [String]
eoPackagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
eoPlainParser :: Parser ExecOptsExtra
eoPlainParser = flag' ExecOptsPlain
(long "plain" <>
help "Use an unmodified environment (only useful with Docker)")
-- | Parser for global command-line options.
globalOptsParser :: Bool -> Parser GlobalOpts
globalOptsParser defaultTerminal =
GlobalOpts <$> logLevelOptsParser <*>
configOptsParser False <*>
optional abstractResolverOptsParser <*>
flag
defaultTerminal
False
(long "no-terminal" <>
help
"Override terminal detection in the case of running in a false terminal") <*>
(optional (strOption
(long "stack-yaml" <>
metavar "STACK-YAML" <>
help "Override project stack.yaml file (overrides any STACK_YAML environment variable)")))
initOptsParser :: Parser InitOpts
initOptsParser =
InitOpts <$> method <*> overwrite <*> fmap not ignoreSubDirs
where
ignoreSubDirs = flag False
True
(long "ignore-subdirs" <>
help "Do not search for .cabal files in sub directories")
overwrite = flag False
True
(long "force" <>
help "Force overwriting of an existing stack.yaml if it exists")
method = solver
<|> (MethodResolver <$> resolver)
<|> (MethodSnapshot <$> snapPref)
solver =
flag' MethodSolver
(long "solver" <>
help "Use a dependency solver to determine dependencies")
snapPref =
flag' PrefLTS
(long "prefer-lts" <>
help "Prefer LTS snapshots over Nightly snapshots") <|>
flag' PrefNightly
(long "prefer-nightly" <>
help "Prefer Nightly snapshots over LTS snapshots") <|>
pure PrefNone
resolver = option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Use the given resolver, even if not all dependencies are met")
-- | Parse for a logging level.
logLevelOptsParser :: Parser LogLevel
logLevelOptsParser =
fmap parse
(strOption (long "verbosity" <>
metavar "VERBOSITY" <>
help "Verbosity: silent, error, warn, info, debug")) <|>
flag defaultLogLevel
verboseLevel
(short 'v' <> long "verbose" <>
help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\""))
where verboseLevel = LevelDebug
showLevel l =
case l of
LevelDebug -> "debug"
LevelInfo -> "info"
LevelWarn -> "warn"
LevelError -> "error"
LevelOther x -> T.unpack x
parse s =
case s of
"debug" -> LevelDebug
"info" -> LevelInfo
"warn" -> LevelWarn
"error" -> LevelError
_ -> LevelOther (T.pack s)
-- | Parser for the resolver
abstractResolverOptsParser :: Parser AbstractResolver
abstractResolverOptsParser =
option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Override resolver in project file")
readAbstractResolver :: ReadM AbstractResolver
readAbstractResolver = do
s <- readerAsk
case s of
"global" -> return ARGlobal
"nightly" -> return ARLatestNightly
"lts" -> return ARLatestLTS
'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x ->
return $ ARLatestLTSMajor x'
_ ->
case parseResolverText $ T.pack s of
Left e -> readerError $ show e
Right x -> return $ ARResolver x
-- | Parser for @solverCmd@
solverOptsParser :: Parser Bool
solverOptsParser = boolFlags False
"modify-stack-yaml"
"Automatically modify stack.yaml with the solver's recommendations"
idm
-- | Parser for test arguments.
testOptsParser :: Parser TestOpts
testOptsParser = TestOpts
<$> boolFlags True
"rerun-tests"
"running already successful tests"
idm
<*> fmap (fromMaybe [])
(optional (argsOption(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program")))
<*> flag False
True
(long "coverage" <>
help "Generate a code coverage report")
<*> flag False
True
(long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)")
newOptsParser :: Parser NewOpts
newOptsParser =
NewOpts <$> templateRepositoryParser
<*> optional templateParser
<*> many templateArgParser
<*> initOptsParser
where
templateRepositoryParser = strOption
$ long "template-url-base"
<> metavar "URL"
<> value "raw.githubusercontent.com/commercialhaskell/stack-templates/master/"
-- TODO(DanBurton): reject argument if it has a colon.
templateParser = strArgument $ metavar "TEMPLATE"
-- TODO(DanBurton): reject argument if it doesn't have a colon.
templateArgParser = strArgument $ metavar "ARG:VAL"
|
wskplho/stack
|
src/Stack/Options.hs
|
Haskell
|
bsd-3-clause
| 21,355
|
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE MultiParamTypeClasses, Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Groups.Examples
-- Copyright : Quentin Moser <moserq@gmail.com>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : orphaned
-- Stability : unstable
-- Portability : unportable
--
-- Example layouts for "XMonad.Layout.Groups".
--
-----------------------------------------------------------------------------
module XMonad.Layout.Groups.Examples ( -- * Usage
-- $usage
-- * Example: Row of columns
-- $example1
rowOfColumns
, zoomColumnIn
, zoomColumnOut
, zoomColumnReset
, toggleColumnFull
, zoomWindowIn
, zoomWindowOut
, zoomWindowReset
, toggleWindowFull
-- * Example: Tiled tab groups
-- $example2
, tallTabs
, mirrorTallTabs
, fullTabs
, TiledTabsConfig(..)
, defaultTiledTabsConfig
, increaseNMasterGroups
, decreaseNMasterGroups
, shrinkMasterGroups
, expandMasterGroups
, nextOuterLayout
-- * Useful re-exports and utils
, module XMonad.Layout.Groups.Helpers
, shrinkText
, defaultTheme
, GroupEQ(..)
, zoomRowG
) where
import XMonad hiding ((|||))
import qualified XMonad.Layout.Groups as G
import XMonad.Layout.Groups.Helpers
import XMonad.Layout.ZoomRow
import XMonad.Layout.Tabbed
import XMonad.Layout.Named
import XMonad.Layout.Renamed
import XMonad.Layout.LayoutCombinators
import XMonad.Layout.Decoration
import XMonad.Layout.Simplest
-- $usage
-- This module contains example 'G.Groups'-based layouts.
-- You can either import this module directly, or look at its source
-- for ideas of how "XMonad.Layout.Groups" may be used.
--
-- You can use the contents of this module by adding
--
-- > import XMonad.Layout.Groups.Examples
--
-- to the top of your @.\/.xmonad\/xmonad.hs@.
--
-- For more information on using any of the layouts, jump directly
-- to its \"Example\" section.
--
-- Whichever layout you choose to use, you will probably want to be
-- able to move focus and windows between groups in a consistent
-- manner. For this, you should take a look at the functions from
-- the "XMonad.Layout.Groups.Helpers" module, which are all
-- re-exported by this module.
--
-- For more information on how to extend your layour hook and key bindings, see
-- "XMonad.Doc.Extending".
-- * Helper: ZoomRow of Group elements
-- | Compare two 'Group's by comparing the ids of their layouts.
data GroupEQ a = GroupEQ
deriving (Show, Read)
instance Eq a => EQF GroupEQ (G.Group l a) where
eq _ (G.G l1 _) (G.G l2 _) = G.sameID l1 l2
zoomRowG :: (Eq a, Show a, Read a, Show (l a), Read (l a))
=> ZoomRow GroupEQ (G.Group l a)
zoomRowG = zoomRowWith GroupEQ
-- * Example 1: Row of columns
-- $example1
-- A layout that arranges windows in a row of columns. It uses 'ZoomRow's for
-- both, allowing you to:
--
-- * Freely change the proportion of the screen width allocated to each column
--
-- * Freely change the proportion of a column's heigth allocated to each of its windows
--
-- * Set a column to occupy the whole screen space whenever it has focus
--
-- * Set a window to occupy its whole column whenever it has focus
--
-- to use this layout, add 'rowOfColumns' to your layout hook, for example:
--
-- > myLayout = rowOfColumns
--
-- To be able to change the sizes of columns and windows, you can create key bindings
-- for the relevant actions:
--
-- > ((modMask, xK_minus), zoomWindowOut)
--
-- and so on.
rowOfColumns = G.group column zoomRowG
where column = renamed [CutWordsLeft 2, PrependWords "ZoomColumn"] $ Mirror zoomRow
-- | Increase the width of the focused column
zoomColumnIn :: X ()
zoomColumnIn = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomIn
-- | Decrease the width of the focused column
zoomColumnOut :: X ()
zoomColumnOut = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomOut
-- | Reset the width of the focused column
zoomColumnReset :: X ()
zoomColumnReset = sendMessage $ G.ToEnclosing $ SomeMessage $ zoomReset
-- | Toggle whether the currently focused column should
-- take up all available space whenever it has focus
toggleColumnFull :: X ()
toggleColumnFull = sendMessage $ G.ToEnclosing $ SomeMessage $ ZoomFullToggle
-- | Increase the heigth of the focused window
zoomWindowIn :: X ()
zoomWindowIn = sendMessage zoomIn
-- | Decrease the height of the focused window
zoomWindowOut :: X ()
zoomWindowOut = sendMessage zoomOut
-- | Reset the height of the focused window
zoomWindowReset :: X ()
zoomWindowReset = sendMessage zoomReset
-- | Toggle whether the currently focused window should
-- take up the whole column whenever it has focus
toggleWindowFull :: X ()
toggleWindowFull = sendMessage ZoomFullToggle
-- * Example 2: Tabbed groups in a Tall/Full layout.
-- $example2
-- A layout which arranges windows into tabbed groups, and the groups
-- themselves according to XMonad's default algorithm
-- (@'Tall' ||| 'Mirror' 'Tall' ||| 'Full'@). As their names
-- indicate, 'tallTabs' starts as 'Tall', 'mirrorTallTabs' starts
-- as 'Mirror' 'Tall' and 'fullTabs' starts as 'Full', but in any
-- case you can freely switch between the three afterwards.
--
-- You can use any of these three layouts by including it in your layout hook.
-- You will need to provide it with a 'TiledTabsConfig' containing the size
-- parameters for 'Tall' and 'Mirror' 'Tall', and the shrinker and decoration theme
-- for the tabs. If you're happy with defaults, you can use 'defaultTiledTabsConfig':
--
-- > myLayout = tallTabs defaultTiledTabsConfig
--
-- To be able to increase\/decrease the number of master groups and shrink\/expand
-- the master area, you can create key bindings for the relevant actions:
--
-- > ((modMask, xK_h), shrinkMasterGroups)
--
-- and so on.
-- | Configuration data for the "tiled tab groups" layout
data TiledTabsConfig s = TTC { vNMaster :: Int
, vRatio :: Rational
, vIncrement :: Rational
, hNMaster :: Int
, hRatio :: Rational
, hIncrement :: Rational
, tabsShrinker :: s
, tabsTheme :: Theme }
defaultTiledTabsConfig :: TiledTabsConfig DefaultShrinker
defaultTiledTabsConfig = TTC 1 0.5 (3/100) 1 0.5 (3/100) shrinkText defaultTheme
fullTabs c = _tab c $ G.group _tabs $ Full ||| _vert c ||| _horiz c
tallTabs c = _tab c $ G.group _tabs $ _vert c ||| _horiz c ||| Full
mirrorTallTabs c = _tab c $ G.group _tabs $ _horiz c ||| Full ||| _vert c
_tabs = named "Tabs" Simplest
_tab c l = renamed [CutWordsLeft 1] $ addTabs (tabsShrinker c) (tabsTheme c) l
_vert c = named "Vertical" $ Tall (vNMaster c) (vIncrement c) (vRatio c)
_horiz c = named "Horizontal" $ Mirror $ Tall (hNMaster c) (hIncrement c) (hRatio c)
-- | Increase the number of master groups by one
increaseNMasterGroups :: X ()
increaseNMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ IncMasterN 1
-- | Decrease the number of master groups by one
decreaseNMasterGroups :: X ()
decreaseNMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ IncMasterN (-1)
-- | Shrink the master area
shrinkMasterGroups :: X ()
shrinkMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ Shrink
-- | Expand the master area
expandMasterGroups :: X ()
expandMasterGroups = sendMessage $ G.ToEnclosing $ SomeMessage $ Expand
-- | Rotate the available outer layout algorithms
nextOuterLayout :: X ()
nextOuterLayout = sendMessage $ G.ToEnclosing $ SomeMessage $ NextLayout
|
MasseR/xmonadcontrib
|
XMonad/Layout/Groups/Examples.hs
|
Haskell
|
bsd-3-clause
| 8,792
|
-- Example.hs -- Examples from HUnit user's guide
--
-- For more examples, check out the tests directory. It contains unit tests
-- for HUnit.
module Main where
import Test.HUnit
foo :: Int -> (Int, Int)
foo x = (1, x)
partA :: Int -> IO (Int, Int)
partA v = return (v+2, v+3)
partB :: Int -> IO Bool
partB v = return (v > 5)
test1 :: Test
test1 = TestCase (assertEqual "for (foo 3)," (1,2) (foo 3))
test2 :: Test
test2 = TestCase (do (x,y) <- partA 3
assertEqual "for the first result of partA," 5 x
b <- partB y
assertBool ("(partB " ++ show y ++ ") failed") b)
tests :: Test
tests = TestList [TestLabel "test1" test1, TestLabel "test2" test2]
tests' :: Test
tests' = test [ "test1" ~: "(foo 3)" ~: (1,2) ~=? (foo 3),
"test2" ~: do (x, y) <- partA 3
assertEqual "for the first result of partA," 5 x
partB y @? "(partB " ++ show y ++ ") failed" ]
main :: IO Counts
main = do runTestTT tests
runTestTT tests'
|
Numberartificial/workflow
|
snipets/.stack-work/install/x86_64-osx/lts-8.12/8.0.2/share/x86_64-osx-ghc-8.0.2/HUnit-1.2.5.2/examples/Example.hs
|
Haskell
|
mit
| 1,073
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>OAST Support Add-on</title>
<maps>
<homeID>oast</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/oast/src/main/javahelp/org/zaproxy/addon/oast/resources/help_id_ID/helpset_id_ID.hs
|
Haskell
|
apache-2.0
| 965
|
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.WorkspaceCursors
-- Copyright : (c) 2009 Adam Vogt <vogt.adam@gmail.com>
-- License : BSD
--
-- Maintainer : Adam Vogt
-- Stability : unstable
-- Portability : unportable
--
-- Like "XMonad.Actions.Plane" for an arbitrary number of dimensions.
-----------------------------------------------------------------------------
module XMonad.Actions.WorkspaceCursors
(
-- * Usage
-- $usage
focusDepth
,makeCursors
,toList
,workspaceCursors
,WorkspaceCursors
,getFocus
-- * Modifying the focus
,modifyLayer
,modifyLayer'
,shiftModifyLayer,shiftLayer
-- * Functions to pass to 'modifyLayer'
,focusNth'
,noWrapUp,noWrapDown,
-- * Todo
-- $todo
-- * Types
Cursors,
) where
import qualified XMonad.StackSet as W
import XMonad.Actions.FocusNth(focusNth')
import XMonad.Layout.LayoutModifier(ModifiedLayout(..),
LayoutModifier(handleMess, redoLayout))
import XMonad(Typeable, Message, WorkspaceId, X, XState(windowset),
fromMessage, sendMessage, windows, gets)
import Control.Monad((<=<), guard, liftM, liftM2, when)
import Control.Applicative((<$>))
import Data.Foldable(Foldable(foldMap), toList)
import Data.Maybe(fromJust, listToMaybe)
import Data.Monoid(Monoid(mappend, mconcat))
import Data.Traversable(sequenceA)
-- $usage
--
-- Here is an example config:
--
-- > import XMonad
-- > import XMonad.Actions.WorkspaceCursors
-- > import XMonad.Hooks.DynamicLog
-- > import XMonad.Util.EZConfig
-- > import qualified XMonad.StackSet as W
-- >
-- > main = do
-- > x <- xmobar conf
-- > xmonad x
-- >
-- > conf = additionalKeysP defaultConfig
-- > { layoutHook = workspaceCursors myCursors $ layoutHook defaultConfig
-- > , workspaces = toList myCursors } $
-- > [("M-"++shift++control++[k], f direction depth)
-- > | (f,shift) <- zip [modifyLayer,shiftModifyLayer] ["","S-"]
-- > , (direction,control) <- zip [W.focusUp',W.focusDown'] ["C-",""]
-- > , (depth,k) <- zip (reverse [1..focusDepth myCursors]) "asdf"]
-- > ++ moreKeybindings
-- >
-- > moreKeybindings = []
-- >
-- > myCursors = makeCursors $ map (map (\x -> [x])) [ "1234", "abc", "xyz"]
-- > -- myCursors = makeCursors [["wsA","wsB","wsC"],["-alpha-","-beta-","-gamma-"],["x","y"]]
-- $todo
--
-- * Find and document how to raise the allowable length of arguments:
-- restoring xmonad's state results in: @xmonad: executeFile: resource
-- exhausted (Argument list too long)@ when you specify more than about 50
-- workspaces. Or change it such that workspaces are created when you try to
-- view it.
--
-- * Function for pretty printing for DynamicLog that groups workspaces by
-- common prefixes
--
-- * Examples of adding workspaces to the cursors, having them appear multiple
-- times for being able to show jumping to some n'th multiple workspace
-- | makeCursors requires a nonempty string, and each sublist must be nonempty
makeCursors :: [[String]] -> Cursors String
makeCursors [] = error "Workspace Cursors cannot be empty"
makeCursors a = concat . reverse <$> foldl addDim x xs
where x = end $ map return $ head a
xs = map (map return) $ tail a
-- this could probably be simplified, but this true:
-- toList . makeCursors == map (concat . reverse) . sequence . reverse . map (map (:[]))
-- the strange order is used because it makes the regular M-1..9
-- bindings change the prefixes first
addDim :: (Monoid a) => Cursors a -> [a] -> Cursors a
addDim prev prefixes = Cons . fromJust . W.differentiate
$ map ((<$> prev) . mappend) prefixes
end :: [a] -> Cursors a
end = Cons . fromJust . W.differentiate . map End
data Cursors a
= Cons (W.Stack (Cursors a))
| End a deriving (Eq,Show,Read,Typeable)
instance Foldable Cursors where
foldMap f (End x) = f x
foldMap f (Cons (W.Stack x y z)) = foldMap f x `mappend` mconcat (map (foldMap f) $ reverse y ++ z)
instance Functor Cursors where
fmap f (End a) = End $ f a
fmap f (Cons (W.Stack x y z)) = Cons $ W.Stack (fmap f x) (fmap (fmap f) y) (fmap (fmap f) z)
changeFocus :: (Cursors t -> Bool) -> Cursors t -> [Cursors t]
changeFocus p (Cons x) = do
choose <- chFocus p x
foc <- changeFocus p $ W.focus choose
return . Cons $ choose { W.focus = foc }
changeFocus p x = guard (p x) >> return x
chFocus :: (a -> Bool) -> W.Stack a -> [W.Stack a]
chFocus p st = filter (p . W.focus) $ zipWith const (iterate W.focusDown' st) (W.integrate st)
getFocus :: Cursors b -> b
getFocus (Cons x) = getFocus $ W.focus x
getFocus (End x) = x
-- This could be made more efficient, if the fact that the suffixes are grouped
focusTo :: (Eq t) => t -> Cursors t -> Maybe (Cursors t)
focusTo x = listToMaybe . filter ((x==) . getFocus) . changeFocus (const True)
-- | non-wrapping version of 'W.focusUp''
noWrapUp :: W.Stack t -> W.Stack t
noWrapUp (W.Stack t (l:ls) rs) = W.Stack l ls (t:rs)
noWrapUp x@(W.Stack _ [] _ ) = x
-- | non-wrapping version of 'W.focusDown''
noWrapDown :: W.Stack t -> W.Stack t
noWrapDown = reverseStack . noWrapUp . reverseStack
where reverseStack (W.Stack t ls rs) = W.Stack t rs ls
focusDepth :: Cursors t -> Int
focusDepth (Cons x) = 1 + focusDepth (W.focus x)
focusDepth (End _) = 0
descend :: Monad m =>(W.Stack (Cursors a) -> m (W.Stack (Cursors a)))-> Int-> Cursors a-> m (Cursors a)
descend f 1 (Cons x) = Cons `liftM` f x
descend f n (Cons x) | n > 1 = liftM Cons $ descend f (pred n) `onFocus` x
descend _ _ x = return x
onFocus :: (Monad m) => (a1 -> m a1) -> W.Stack a1 -> m (W.Stack a1)
onFocus f st = (\x -> st { W.focus = x}) `liftM` f (W.focus st)
-- | @modifyLayer@ is used to change the focus at a given depth
modifyLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors String)) -> Int -> X ()
modifyLayer f depth = modifyCursors (descend (return . f) depth)
-- | @shiftModifyLayer@ is the same as 'modifyLayer', but also shifts the
-- currently focused window to the new workspace
shiftModifyLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors WorkspaceId))-> Int-> X ()
shiftModifyLayer f = modifyLayer' $ \st -> do
let st' = f st
windows $ W.shift $ getFocus (Cons st')
return st'
-- | @shiftLayer@ is the same as 'shiftModifyLayer', but the focus remains on
-- the current workspace.
shiftLayer :: (W.Stack (Cursors String) -> W.Stack (Cursors WorkspaceId))-> Int-> X ()
shiftLayer f = modifyLayer' $ \st -> do
windows $ W.shift $ getFocus $ Cons $ f st
return st
-- | example usages are 'shiftLayer' and 'shiftModifyLayer'
modifyLayer' :: (W.Stack (Cursors String) -> X (W.Stack (Cursors String))) -> Int -> X ()
modifyLayer' f depth = modifyCursors (descend f depth)
modifyCursors :: (Cursors String -> X (Cursors String)) -> X ()
modifyCursors = sendMessage . ChangeCursors . (liftM2 (>>) updateXMD return <=<)
data WorkspaceCursors a = WorkspaceCursors (Cursors String)
deriving (Typeable,Read,Show)
-- | The state is stored in the 'WorkspaceCursors' layout modifier. Put this as
-- your outermost modifier, unless you want different cursors at different
-- times (using "XMonad.Layout.MultiToggle")
workspaceCursors :: Cursors String -> l a -> ModifiedLayout WorkspaceCursors l a
workspaceCursors = ModifiedLayout . WorkspaceCursors
data ChangeCursors = ChangeCursors { unWrap :: Cursors String -> X (Cursors String) }
deriving (Typeable)
instance Message ChangeCursors
updateXMD :: Cursors WorkspaceId -> X ()
updateXMD cs = do
changed <- gets $ (getFocus cs /=) . W.currentTag . windowset
when changed $ windows $ W.greedyView $ getFocus cs
instance LayoutModifier WorkspaceCursors a where
redoLayout (WorkspaceCursors cs) _ _ arrs = do
cws <- gets $ W.currentTag . windowset
return (arrs,WorkspaceCursors <$> focusTo cws cs)
handleMess (WorkspaceCursors cs) m =
sequenceA $ fmap WorkspaceCursors . ($ cs) . unWrap <$> fromMessage m
|
adinapoli/xmonad-contrib
|
XMonad/Actions/WorkspaceCursors.hs
|
Haskell
|
bsd-3-clause
| 8,231
|
module Grin.Lint(
lintCheckGrin,
typecheckGrin,
transformGrin,
dumpGrin
) where
import Control.Exception
import Control.Monad.Reader
import Data.Monoid
import System.IO
import qualified Data.Set as Set
import Doc.DocLike
import Grin.Grin
import Grin.Show
import Options
import Support.CanType
import Support.Compat
import Support.FreeVars
import Support.Transform
import Text.Printf
import Util.Gen
import Util.SetLike
import qualified FlagDump as FD
import qualified Stats
lintCheckGrin grin = when flint $ typecheckGrin grin
lintCheckGrin' onerr grin | flint = do
let env = TcEnv { envTyEnv = grinTypeEnv grin, envInScope = fromList (fsts $ grinCafs grin) }
let errs = [ (err ++ "\n" ++ render (prettyFun a) ) | (a,Left err) <- [ (a,runTc env (tcLam Nothing c)) | a@(_,c) <- grinFuncs grin ]]
if null errs then return () else do
onerr
putErrLn ">>> Type Errors"
mapM_ putErrLn errs
unless (null errs || optKeepGoing options) $ fail "There were type errors!"
lintCheckGrin' _ _ = return ()
typecheckGrin grin = do
let env = TcEnv { envTyEnv = grinTypeEnv grin, envInScope = fromList (fsts $ grinCafs grin) }
let errs = [ (err ++ "\n" ++ render (prettyFun a) ) | (a,Left err) <- [ (a,runTc env (tcLam Nothing c)) | a@(_,c) <- grinFuncs grin ]]
mapM_ putErrLn errs
unless (null errs || optKeepGoing options) $ fail "There were type errors!"
{-# NOINLINE dumpGrin #-}
dumpGrin pname grin = do
(argstring,sversion) <- getArgString
let fn ext action = do
let oname = outputName ++ "_" ++ pname ++ "." ++ ext
putErrLn $ "Writing: " ++ oname
h <- openFile oname WriteMode
action h
hClose h
fn "grin" $ \h -> do
hPutStrLn h $ unlines [ "-- " ++ argstring,"-- " ++ sversion,""]
hPrintGrin h grin
wdump FD.GrinDatalog $ fn "datalog" $ \h -> do
hPutStrLn h $ unlines [ "% " ++ argstring,"% " ++ sversion,""]
hPrintGrinDL h grin
wdump FD.Grin $ do
putErrLn $ "v-- " ++ pname ++ " Grin"
printGrin grin
putErrLn $ "^-- " ++ pname ++ " Grin"
class DShow a where
dshow :: a -> String
instance DShow String where
dshow s = '\'':f s where
f ('\'':rs) = "''" ++ f rs
f (x:xs) = x:f xs
f [] = "'"
instance DShow Tag where
dshow s = '\'':f (show s) where
f ('\'':rs) = "''" ++ f rs
f (x:xs) = x:f xs
f [] = "'"
instance DShow Var where
dshow v = dshow (show v)
instance DShow Ty where
dshow v = dshow $ show v
instance (DShow a,DShow b) => DShow (Either a b) where
dshow (Left x) = dshow x
dshow (Right x) = dshow x
funArg n i = show n ++ "@arg@" ++ show i
funRet n i = show n ++ "@ret@" ++ show i
printFunc h n (l :-> e) = do
hPrintf h "func(%s,%i).\n" (dshow n) (length l)
forM_ (zip naturals l) $ \ (i,Var v t) -> do
hPrintf h "perform(assign,%s,%s).\n" (dshow v) (dshow $ funArg n i)
hPrintf h "what(%s,funarg).\n" (dshow $ funArg n i)
hPrintf h "typeof(%s,%s).\n" (dshow $ funArg n i) (dshow t)
hPrintf h "typeof(%s,%s).\n" (dshow v) (dshow t)
let rts = getType e
lts = [ (t,funRet n i) | t <- rts | i <- naturals ]
mapM_ (hPrintf h "what(%s,funret).\n" . dshow) (snds lts)
mapM_ (\ (t,n) -> hPrintf h "typeof(%s,%s).\n" (dshow n) (dshow t)) lts
printDL h n (map (Left . snd) lts) e
hPrintGrinDL :: Handle -> Grin -> IO ()
hPrintGrinDL h grin = do
let cafs = grinCafs grin
when (not $ null cafs) $ do
hPutStrLn h "% cafs"
mapM_ (\ (x,y) -> hPrintf h "what(%s,'caf').\ntypeof(%s,inode).\n" (dshow x) (dshow x)) cafs
hPutStrLn h "% functions"
forM_ (grinFuncs grin) $ \ (n,l :-> e) -> printFunc h n (l :-> e)
bindUnknown h l r = do
mapM_ (\ (x,t) -> when (tyInteresting t) $ setUnknown h x r) (Set.toList $ freeVars l :: [(Var,Ty)])
setUnknown :: DShow a => Handle -> a -> String -> IO ()
setUnknown h x r = do hPrintf h "unknown(%s,%s).\n" (dshow x) (dshow r)
printDL h n fs e = f fs e where
f fs (x :>>= l :-> y) = do
f (map Right l) x
f fs y
f bs (Return vs) = do zipWithM_ (assign "assign") bs vs
-- f [Left b] (Store (NodeC n vs)) = hPrintf h "store(%s,%s,%s).\n" (dshow b) (dshow n) (if tagIsWHNF n then "true" else "false")
-- f [Right (Var b _)] (Store (NodeC n vs)) = hPrintf h "store(%s,%s,%s).\n" (dshow b) (dshow n) (if tagIsWHNF n then "true" else "false") >> app n vs
-- f [b] (Store x@Var {}) = do assign "demote" b x
f [b] (BaseOp Eval [x]) = do assign "eval" b x
f b (App fn as ty) = do
forM_ (zip naturals as) $ \ (i,a) -> do
assign "assign" (Left $ funArg fn i) a
forM_ (zip naturals b) $ \ (i,a) -> do
genAssign "assign" a (Left $ funRet fn i)
f b (Case v ls) = mapM_ (\l -> f b (Return [v] :>>= l)) ls
f b Let { expDefs = defs, expBody = body } = do
forM_ defs $ \d -> printFunc h (funcDefName d) (funcDefBody d)
forM_ defs $ \d -> hPrintf h "subfunc(%s,%s).\n" (dshow $ funcDefName d) (dshow n)
f b body
f b Error {} = return ()
f b Call { expValue = Item fn _, expArgs = as, expType = ty} = do
forM_ (zip naturals as) $ \ (i,a) -> do
assign "assign" (Left $ funArg fn i) a
forM_ (zip naturals b) $ \ (i,a) -> do
genAssign "assign" a (Left $ funRet fn i)
f bs e = do zipWithM_ (assign "assign") bs (map ValUnknown (getType e))
--app n as | Just (0,fn) <- tagUnfunction n = do
-- hPrintf h "lazyfunc(%s).\n" (dshow fn)
-- forM_ (zip naturals as) $ \ (i,a) -> do
-- assign "assign" (Left $ funArg fn i) a
--app _ _ = return ()
assign op b v = genAssign op b (Right v)
genAssign :: String -> Either String Val -> Either String Val -> IO ()
genAssign op (Left b) (Left l) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) (dshow l)
genAssign op (Right (Var v1 _)) (Left l) = hPrintf h "perform(%s,%s,%s).\n" op (dshow v1) (dshow l)
genAssign op (Left b) (Right (Var v _)) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) (dshow v)
genAssign op (Left b) (Right (Const {})) = hPrintf h "perform(%s,%s,%s).\n" op (dshow b) "const"
genAssign op (Right (Var v1 _)) (Right (Var v2 _)) = hPrintf h "perform(%s,%s,%s).\n" op (dshow v1) (dshow v2)
genAssign op (Left b) (Right v) = when (tyInteresting $ getType v) $ setUnknown h b (show (op,v))
genAssign op (Right b) rv = bindUnknown h b (take 20 $ show (op,rv))
tyInteresting ty = ty == TyNode || ty == tyINode
transformGrin :: TransformParms Grin -> Grin -> IO Grin
transformGrin TransformParms { transformIterate = IterateMax n } prog | n <= 0 = return prog
transformGrin TransformParms { transformIterate = IterateExactly n } prog | n <= 0 = return prog
transformGrin tp prog = do
let dodump = transformDumpProgress tp
name = transformCategory tp ++ pname (transformPass tp) ++ pname (transformName tp)
_scname = transformCategory tp ++ pname (transformPass tp)
pname "" = ""
pname xs = '-':xs
iterate = transformIterate tp
when dodump $ putErrLn $ "-- " ++ name
let ferr e = do
putErrLn $ "\n>>> Exception thrown"
putErrLn $ "\n>>> Before " ++ name
dumpGrin ("lint-before-" ++ name) prog
putErrLn $ "\n>>>"
putErrLn (show (e::SomeException'))
maybeDie
return prog
let istat = grinStats prog
prog' <- Control.Exception.catch (transformOperation tp prog { grinStats = mempty } >>= Control.Exception.evaluate ) ferr
let estat = grinStats prog'
let onerr grin' = do
putErrLn $ "\n>>> Before " ++ name
dumpGrin ("lint-before-" ++ name) prog
Stats.printStat name estat
putErrLn $ "\n>>> After " ++ name
dumpGrin ("lint-after-" ++ name) grin'
if transformSkipNoStats tp && Stats.null estat then do
when dodump $ putErrLn "program not changed"
return prog
else do
when (dodump && not (Stats.null estat)) $ Stats.printStat name estat
lintCheckGrin' (onerr prog') prog'
let tstat = istat `mappend` estat
if doIterate iterate (not $ Stats.null estat) then transformGrin tp { transformIterate = iterateStep iterate } prog' { grinStats = tstat } else return prog' { grinStats = tstat }
-- if doIterate iterate (estat /= mempty) then transformGrin tp { transformIterate = iterateStep iterate } prog' { progStats = istat `mappend` estat } else
-- return prog' { progStats = istat `mappend` estat, progPasses = name:progPasses prog' }
maybeDie = case optKeepGoing options of
True -> return ()
False -> putErrDie "Internal Error"
data TcEnv = TcEnv {
envTyEnv :: TyEnv,
envInScope :: Set.Set Var
}
newtype Tc a = Tc (ReaderT TcEnv (Either String) a)
deriving(Monad,MonadReader TcEnv)
tcErr :: String -> Tc a
tcErr s = Tc $ lift (Left s)
runTc :: TcEnv -> Tc a -> Either String a
runTc env (Tc r) = runReaderT r env
same _ t1 t2 | t1 == t2 = return t1
same msg t1 t2 = tcErr $ "Types not the same:" <+> parens msg <+> parens (tshow t1) <+> parens (tshow t2)
tcLam :: Maybe [Ty] -> Lam -> Tc [Ty]
tcLam mty (v :-> e) = f mty where
f Nothing = ans (mapM tcVal v)
f (Just ty) = ans $ do
t <- mapM tcVal v
same (":->" <+> show mty <+> show (v :-> e)) ty t
ans r = local (\e -> e { envInScope = freeVars v `mappend` envInScope e }) $ r >> tcExp e
tcExp :: Exp -> Tc [Ty]
tcExp e = f e where
f (e :>>= lam) = do
t1 <- f e
tcLam (Just t1) lam
f n@(Prim p as t') = do
mapM_ tcVal as
return t'
f ap@(BaseOp (Apply t) vs) = do
(v':_) <- mapM tcVal vs
if v' == TyNode then return t
else tcErr $ "App apply arg doesn't match: " ++ show ap
f ap@(BaseOp Eval [v]) = do
v' <- tcVal v
if v' == tyINode then return [TyNode]
else tcErr $ "App eval arg doesn't match: " ++ show ap
f a@(App fn as t) = do
te <- asks envTyEnv
(as',t') <- findArgsType te fn
as'' <- mapM tcVal as
if t' == t then
if as'' == as' then return t' else
tcErr $ "App: arguments do not match: " ++ show (a,as',t')
else tcErr $ "App: results do not match: " ++ show (a,t,(as',t'))
f e@(BaseOp (StoreNode _) vs) = do
[NodeC {}] <- return vs
mapM_ tcVal vs
return (getType e)
f Alloc { expValue = v, expCount = c, expRegion = r } = do
t <- tcVal v
tcVal c
tcVal r
return [TyPtr t]
f (Return v) = mapM tcVal v
f (BaseOp Promote [v]) = do
TyINode <- tcVal v
return [TyNode]
f (BaseOp Demote [v]) = do
TyNode <- tcVal v
return [TyINode]
f (Error _ t) = return t
f e@(BaseOp Overwrite [w,v]) = do
NodeC {} <- return v
tcVal w
tcVal v
return []
f e@(BaseOp PokeVal [w,v]) = do
TyPtr t <- tcVal w
tv <- tcVal v
when (t /= tv) $
tcErr "PokeVal: types don't match"
return []
f e@(BaseOp PeekVal [w]) = do
TyPtr t <- tcVal w
return [t]
f (Case _ []) = tcErr "empty case"
f (Case v as) = do
tv <- tcVal v
es <- mapM (tcLam (Just [tv])) as
foldl1M (same $ "case exp: " ++ show (map head $ sortGroupUnder fst (zip es as)) ) es
f (Let { expDefs = defs, expBody = body }) = do
local (\e -> e { envTyEnv = extendTyEnv defs (envTyEnv e) }) $ do
mapM_ (tcLam Nothing) [ b | FuncDef { funcDefBody = b } <- defs ]
f body
f _ = error "Grin.Lint: unknown value passed to f"
tcVal :: Val -> Tc Ty
tcVal v = f v where
f e@(Var v t) = do
s <- asks envInScope
case v `member` s of
True -> return t
False -> tcErr $ "variable not in scope: " ++ show e
f (Lit _ t) = return t
f Unit = return TyUnit
f (Const t) = do
v <- f t
case v of
TyNode -> return TyINode
v -> return (TyPtr v)
f (Index v offset) = do
t <- f v
TyPrim _ <- f offset
return t
f (ValUnknown ty) = return ty
f (ValPrim _ vs ty) = do mapM_ f vs >> return ty
f n@(NodeC tg as) = do
te <- asks envTyEnv
(as',_) <- findArgsType te tg
as'' <- mapM f as
if as'' == as' then return TyNode else
tcErr $ "NodeC: arguments do not match " ++ show n ++ show (as'',as')
f (Item _ t) = return t
|
m-alvarez/jhc
|
src/Grin/Lint.hs
|
Haskell
|
mit
| 12,583
|
{-# LANGUAGE CPP #-}
--------------------------------------------------------------------------------
-- | Deal with Cmm registers
--
module LlvmCodeGen.Regs (
lmGlobalRegArg, lmGlobalRegVar, alwaysLive,
stgTBAA, baseN, stackN, heapN, rxN, otherN, tbaa, getTBAA
) where
#include "HsVersions.h"
import Llvm
import CmmExpr
import DynFlags
import FastString
import Outputable ( panic )
import Unique
-- | Get the LlvmVar function variable storing the real register
lmGlobalRegVar :: DynFlags -> GlobalReg -> LlvmVar
lmGlobalRegVar dflags = pVarLift . lmGlobalReg dflags "_Var"
-- | Get the LlvmVar function argument storing the real register
lmGlobalRegArg :: DynFlags -> GlobalReg -> LlvmVar
lmGlobalRegArg dflags = lmGlobalReg dflags "_Arg"
{- Need to make sure the names here can't conflict with the unique generated
names. Uniques generated names containing only base62 chars. So using say
the '_' char guarantees this.
-}
lmGlobalReg :: DynFlags -> String -> GlobalReg -> LlvmVar
lmGlobalReg dflags suf reg
= case reg of
BaseReg -> ptrGlobal $ "Base" ++ suf
Sp -> ptrGlobal $ "Sp" ++ suf
Hp -> ptrGlobal $ "Hp" ++ suf
VanillaReg 1 _ -> wordGlobal $ "R1" ++ suf
VanillaReg 2 _ -> wordGlobal $ "R2" ++ suf
VanillaReg 3 _ -> wordGlobal $ "R3" ++ suf
VanillaReg 4 _ -> wordGlobal $ "R4" ++ suf
VanillaReg 5 _ -> wordGlobal $ "R5" ++ suf
VanillaReg 6 _ -> wordGlobal $ "R6" ++ suf
VanillaReg 7 _ -> wordGlobal $ "R7" ++ suf
VanillaReg 8 _ -> wordGlobal $ "R8" ++ suf
SpLim -> wordGlobal $ "SpLim" ++ suf
FloatReg 1 -> floatGlobal $"F1" ++ suf
FloatReg 2 -> floatGlobal $"F2" ++ suf
FloatReg 3 -> floatGlobal $"F3" ++ suf
FloatReg 4 -> floatGlobal $"F4" ++ suf
FloatReg 5 -> floatGlobal $"F5" ++ suf
FloatReg 6 -> floatGlobal $"F6" ++ suf
DoubleReg 1 -> doubleGlobal $ "D1" ++ suf
DoubleReg 2 -> doubleGlobal $ "D2" ++ suf
DoubleReg 3 -> doubleGlobal $ "D3" ++ suf
DoubleReg 4 -> doubleGlobal $ "D4" ++ suf
DoubleReg 5 -> doubleGlobal $ "D5" ++ suf
DoubleReg 6 -> doubleGlobal $ "D6" ++ suf
XmmReg 1 -> xmmGlobal $ "XMM1" ++ suf
XmmReg 2 -> xmmGlobal $ "XMM2" ++ suf
XmmReg 3 -> xmmGlobal $ "XMM3" ++ suf
XmmReg 4 -> xmmGlobal $ "XMM4" ++ suf
XmmReg 5 -> xmmGlobal $ "XMM5" ++ suf
XmmReg 6 -> xmmGlobal $ "XMM6" ++ suf
YmmReg 1 -> ymmGlobal $ "YMM1" ++ suf
YmmReg 2 -> ymmGlobal $ "YMM2" ++ suf
YmmReg 3 -> ymmGlobal $ "YMM3" ++ suf
YmmReg 4 -> ymmGlobal $ "YMM4" ++ suf
YmmReg 5 -> ymmGlobal $ "YMM5" ++ suf
YmmReg 6 -> ymmGlobal $ "YMM6" ++ suf
ZmmReg 1 -> zmmGlobal $ "ZMM1" ++ suf
ZmmReg 2 -> zmmGlobal $ "ZMM2" ++ suf
ZmmReg 3 -> zmmGlobal $ "ZMM3" ++ suf
ZmmReg 4 -> zmmGlobal $ "ZMM4" ++ suf
ZmmReg 5 -> zmmGlobal $ "ZMM5" ++ suf
ZmmReg 6 -> zmmGlobal $ "ZMM6" ++ suf
MachSp -> wordGlobal $ "MachSp" ++ suf
_other -> panic $ "LlvmCodeGen.Reg: GlobalReg (" ++ (show reg)
++ ") not supported!"
-- LongReg, HpLim, CCSS, CurrentTSO, CurrentNusery, HpAlloc
-- EagerBlackholeInfo, GCEnter1, GCFun, BaseReg, PicBaseReg
where
wordGlobal name = LMNLocalVar (fsLit name) (llvmWord dflags)
ptrGlobal name = LMNLocalVar (fsLit name) (llvmWordPtr dflags)
floatGlobal name = LMNLocalVar (fsLit name) LMFloat
doubleGlobal name = LMNLocalVar (fsLit name) LMDouble
xmmGlobal name = LMNLocalVar (fsLit name) (LMVector 4 (LMInt 32))
ymmGlobal name = LMNLocalVar (fsLit name) (LMVector 8 (LMInt 32))
zmmGlobal name = LMNLocalVar (fsLit name) (LMVector 16 (LMInt 32))
-- | A list of STG Registers that should always be considered alive
alwaysLive :: [GlobalReg]
alwaysLive = [BaseReg, Sp, Hp, SpLim, HpLim, node]
-- | STG Type Based Alias Analysis hierarchy
stgTBAA :: [(Unique, LMString, Maybe Unique)]
stgTBAA
= [ (topN, fsLit "top", Nothing)
, (stackN, fsLit "stack", Just topN)
, (heapN, fsLit "heap", Just topN)
, (rxN, fsLit "rx", Just heapN)
, (baseN, fsLit "base", Just topN)
-- FIX: Not 100% sure about 'others' place. Might need to be under 'heap'.
-- OR I think the big thing is Sp is never aliased, so might want
-- to change the hieracy to have Sp on its own branch that is never
-- aliased (e.g never use top as a TBAA node).
, (otherN, fsLit "other", Just topN)
]
-- | Id values
topN, stackN, heapN, rxN, baseN, otherN :: Unique
topN = getUnique (fsLit "LlvmCodeGen.Regs.topN")
stackN = getUnique (fsLit "LlvmCodeGen.Regs.stackN")
heapN = getUnique (fsLit "LlvmCodeGen.Regs.heapN")
rxN = getUnique (fsLit "LlvmCodeGen.Regs.rxN")
baseN = getUnique (fsLit "LlvmCodeGen.Regs.baseN")
otherN = getUnique (fsLit "LlvmCodeGen.Regs.otherN")
-- | The TBAA metadata identifier
tbaa :: LMString
tbaa = fsLit "tbaa"
-- | Get the correct TBAA metadata information for this register type
getTBAA :: GlobalReg -> Unique
getTBAA BaseReg = baseN
getTBAA Sp = stackN
getTBAA Hp = heapN
getTBAA (VanillaReg _ _) = rxN
getTBAA _ = topN
|
tjakway/ghcjvm
|
compiler/llvmGen/LlvmCodeGen/Regs.hs
|
Haskell
|
bsd-3-clause
| 5,550
|
{-# OPTIONS_GHC -w #-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
module ShouldCompile where
import Control.Monad.Reader
instance Eq (a -> b) where
_ == _ = error "whoops"
instance Show (a -> b) where
show = const "<fun>"
-- This is the example from Trac #179
foo x = show (\_ -> True)
-- This is the example from Trac #963
instance (Num a, Monad m, Eq (m a), Show (m a)) => Num (m a) where
test = 1 True
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc217.hs
|
Haskell
|
bsd-3-clause
| 436
|
{-# LANGUAGE Trustworthy, Unsafe #-}
-- | Basic test to see if Safe flags compiles
module SafeFlags06 where
f :: Int
f = 1
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/flags/SafeFlags06.hs
|
Haskell
|
bsd-3-clause
| 126
|
{-# OPTIONS_GHC -fwarn-warnings-deprecations #-}
-- Test deprecation of constructors and class ops
module ShouldCompile where
import Rn050_A
instance Foo T where
op x = x
bop y = y
foo = op C
|
urbanslug/ghc
|
testsuite/tests/rename/should_compile/rn050.hs
|
Haskell
|
bsd-3-clause
| 202
|
{- |
This module works with recursive data structure with cycles.
-}
module Data.Generics.Rec (
universeRec
)
where
import Data.Generics
import Control.Monad.State
import Data.Set
import System.Mem.StableName
import System.IO.Unsafe
type RecState a = StateT (Set Int) IO a
goRec :: (Data v, Data r) => v -> RecState [r]
goRec !v = do
hash <- hashStableName `fmap` liftIO (makeStableName v)
p <- gets (member hash)
if p
then return []
else do
modify $ insert hash
case cast v of
Just v0 -> (v0:) `fmap` continue
Nothing -> continue
where
continue = concat `fmap` sequence (gmapQ goRec v)
-- | Get all distinct children of a node, including itself and all children.
--
-- > dataRose = Rose { roseId :: Int, roseBranches :: [Rose] }
-- > deriving (Data,Typeable)
--
-- > roses =
-- > let a = Rose 1 [b,c]
-- > b = Rose 2 [a,c]
-- > c = Rose 3 [a,b]
-- > in a
--
-- > [ i | Rose i _ <- universeRec roses ]
-- > [1,2,3]
universeRec :: (Data v, Data r) => v -> [r]
universeRec v = unsafePerformIO $ evalStateT (goRec v) empty
|
lomeo/unirec
|
src/Data/Generics/Rec.hs
|
Haskell
|
mit
| 1,167
|
{- |
module: Main
description: Querying the contents of OpenTheory packages
license: MIT
maintainer: Joe Leslie-Hurd <joe@gilith.com>
stability: provisional
portability: portable
-}
module Main
( main )
where
import System.FilePath (isValid,takeDirectory,takeExtension)
import qualified System.Environment as Environment
import HOL.OpenTheory (readArticle,readPackages)
import qualified HOL.OpenTheory.Interpret as Interpret
import HOL.OpenTheory.Package (Name,NameVersion)
import qualified HOL.OpenTheory.Package as Package
import HOL.Parse
import HOL.Print
import HOL.Theory (Theory)
import qualified HOL.Theory as Theory
-------------------------------------------------------------------------------
-- An article file
-------------------------------------------------------------------------------
articleArg :: [String] -> Maybe FilePath
articleArg [f] | isValid f && takeExtension f == ".art" = Just f
articleArg _ = Nothing
articleThy :: FilePath -> IO Theory
articleThy f = do
ths <- readArticle Theory.standard Interpret.empty f
return $ Theory.fromThmSet ths
-------------------------------------------------------------------------------
-- A collection of packages
-------------------------------------------------------------------------------
packagesArg :: [String] -> Maybe [Name]
packagesArg = mapM fromString
packagesThy :: [Name] -> IO Theory
packagesThy = fmap Theory.unionList . readPackages
-------------------------------------------------------------------------------
-- A package file
-------------------------------------------------------------------------------
packageFileArg :: [String] -> Maybe FilePath
packageFileArg [f] | isValid f && takeExtension f == ".thy" = Just f
packageFileArg _ = Nothing
packageFileThy :: FilePath -> IO Theory
packageFileThy f = do
pkg <- fromTextFile f
req <- packagesThy (Package.requires pkg)
let thy = Theory.union Theory.standard req
let int = Interpret.empty
let dir = takeDirectory f
Package.readPackage thy int dir pkg
-------------------------------------------------------------------------------
-- A specific version of a package
-------------------------------------------------------------------------------
packageVersionArg :: [String] -> Maybe NameVersion
packageVersionArg [s] = fromString s
packageVersionArg _ = Nothing
packageVersionThy :: NameVersion -> IO Theory
packageVersionThy nv = do
dir <- Package.directoryVersion nv
packageFileThy (Package.packageFile dir (Package.name nv))
-------------------------------------------------------------------------------
-- Top-level
-------------------------------------------------------------------------------
usage :: String -> a
usage err =
error $ err ++ "\n" ++ info
where
info =
"Usage: hol-pkg INPUT\n" ++
"where INPUT is one of the following forms:\n" ++
" FILE.art : a proof article file\n" ++
" FILE.thy : a theory package file\n" ++
" NAME-VERSION : a specific version of an installed theory package\n" ++
" NAME ... : the latest installed version of a list of packages\n" ++
"hol-pkg reads the INPUT to generate a set of theorems, which are\n" ++
"pretty-printed to standard output together with the symbols they contain."
main :: IO ()
main = do
args <- Environment.getArgs
if null args then usage "no arguments" else return ()
thy <- case articleArg args of
Just f -> articleThy f
Nothing ->
case packageFileArg args of
Just f -> packageFileThy f
Nothing ->
case packageVersionArg args of
Just nv -> packageVersionThy nv
Nothing ->
case packagesArg args of
Just ns -> packagesThy ns
Nothing -> usage $ "bad arguments: " ++ show args
putStrLn $ toString thy
return ()
|
gilith/hol
|
src/Main.hs
|
Haskell
|
mit
| 3,999
|
data Vector = Vector { x :: Double, y :: Double } deriving (Eq, Ord, Show)
instance Num Vector where
p1 + p2 = Vector (x p1 + x p2) (y p1 + y p2)
p1 - p2 = p1 + negate p2
p1 * p2 = Vector (x p1 * x p2) (y p1 * y p2)
abs (Vector x y) = Vector (sqrt (x * x + y * y)) 0
negate (Vector x y) = Vector (-x) (-y)
fromInteger x = Vector (fromInteger x) 0
signum (Vector x y) = let m = sqrt (x * x + y * y) in Vector (x / m) (y / m)
p .* s = Vector (x p * s) (y p * s)
(*.) = flip (.*)
|
0xd34df00d/hencil
|
src/Vector.hs
|
Haskell
|
mit
| 507
|
{-# LANGUAGE RecordWildCards #-}
module Hogldev.Pipeline (
Pipeline(..)
, getTrans
, PersProj(..)
, Camera(..)
) where
import Graphics.Rendering.OpenGL
import Hogldev.Math3D
import Hogldev.Utils
import Hogldev.Camera
data Pipeline
= WPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
}
| WPPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
, persProj :: PersProj
}
| WVPPipeline
{ scaleInfo :: Vector3 GLfloat
, worldInfo :: Vector3 GLfloat
, rotateInfo :: Vector3 GLfloat
, persProj :: PersProj
, pipeCamera :: Camera
}
| VPPipeline
{ persProj :: PersProj
, pipeCamera :: Camera
}
deriving Show
getTrans :: Pipeline -> Matrix4
getTrans WPipeline{..} = worldTrans scaleInfo worldInfo rotateInfo
getTrans WPPipeline{..} = projTrans scaleInfo worldInfo rotateInfo persProj
getTrans VPPipeline{..} = vpTrans persProj pipeCamera
getTrans WVPPipeline{..} =
projViewTrans scaleInfo worldInfo rotateInfo persProj pipeCamera
vpTrans :: PersProj -> Camera -> Matrix4
vpTrans persProj camera = perspProjTrans persProj !*! cameraTrans camera
worldTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> Matrix4
worldTrans scaleInfo worldInfo rotateInfo =
translationTrans !*! rotateTrans !*! scaleTrans
where
scaleTrans, rotateTrans, translationTrans :: Matrix4
scaleTrans = scaleMatrix scaleInfo
rotateTrans = initRotateTransform rotateInfo
translationTrans = translateMatrix worldInfo
projTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> PersProj
-> Matrix4
projTrans scaleInfo worldInfo rotateInfo persProj =
perspProjTrans persProj !*! worldTrans scaleInfo worldInfo rotateInfo
projViewTrans :: Vector3 GLfloat
-> Vector3 GLfloat
-> Vector3 GLfloat
-> PersProj
-> Camera
-> Matrix4
projViewTrans scaleInfo worldInfo rotateInfo persProj camera =
perspProjTrans persProj
!*! cameraTrans camera
!*! worldTrans scaleInfo worldInfo rotateInfo
cameraTrans :: Camera -> Matrix4
cameraTrans c@Camera{..} =
cameraRotationTrans c !*! translateMatrix (fmap (*(-1) ) cameraPos)
initRotateTransform :: Vector3 GLfloat -> Matrix4
initRotateTransform (Vector3 x y z) = rz !*! ry !*! rx
where
rx, ry, rz :: Matrix4
rx = rotateXMatrix(toRadian x)
ry = rotateYMatrix(toRadian y)
rz = rotateZMatrix(toRadian z)
|
triplepointfive/hogldev
|
common/Hogldev/Pipeline.hs
|
Haskell
|
mit
| 2,783
|
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE JavaScriptFFI #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE EmptyDataDecls #-}
module JavaScript.Web.Blob.Internal where
import Data.Typeable
import GHCJS.Types
data BlobType = BlobTypeBlob
| BlobTypeFile
newtype SomeBlob (a :: BlobType) = SomeBlob JSRef deriving Typeable
type File = SomeBlob BlobTypeFile
type Blob = SomeBlob BlobTypeBlob
size :: SomeBlob a -> Int
size b = js_size b
{-# INLINE size #-}
contentType :: SomeBlob a -> JSString
contentType b = js_type b
{-# INLINE contentType #-}
-- is the type correct, does slicing a File give another File?
slice :: Int -> Int -> JSString -> SomeBlob a -> SomeBlob a
slice start end contentType b = js_slice start end contentType b
{-# INLINE slice #-}
isClosed :: SomeBlob a -> IO Bool
isClosed b = js_isClosed b
{-# INLINE isClosed #-}
close :: SomeBlob a -> IO ()
close b = js_close b
{-# INLINE close #-}
-- -----------------------------------------------------------------------------
foreign import javascript unsafe "$1.size" js_size :: SomeBlob a -> Int
foreign import javascript unsafe "$1.type" js_type :: SomeBlob a -> JSString
-- fixme figure out if we need to support older browsers with obsolete slice
foreign import javascript unsafe "$4.slice($1,$2,$3)"
js_slice :: Int -> Int -> JSString -> SomeBlob a -> SomeBlob a
foreign import javascript unsafe "$1.isClosed"
js_isClosed :: SomeBlob a -> IO Bool
foreign import javascript unsafe "$1.close();"
js_close :: SomeBlob a -> IO ()
|
tavisrudd/ghcjs-base
|
JavaScript/Web/Blob/Internal.hs
|
Haskell
|
mit
| 1,607
|
module Main where
import Data.Bifunctor (first)
import Data.Bits (testBit, unsafeShiftL)
import Data.Bool (bool)
import Data.Char (digitToInt)
import Data.Foldable (foldl', traverse_)
import Data.Function (on)
import Data.List.NonEmpty (NonEmpty (..), fromList)
readBits :: String -> [Bool]
readBits = concatMap (fourBits . digitToInt)
where fourBits n = [ n `testBit` i | i <- [3, 2, 1, 0] ]
bToI :: [Bool] -> Int
bToI = foldl' (\i b -> (i `unsafeShiftL` 1) + bool 0 1 b) 0
type Parser a = [Bool] -> (a, [Bool])
int :: Int -> Parser Int
int n = first bToI . splitAt n
data Operation = OSum | OMul | OMin | OMax deriving Enum
data Comparator = CGT | CLT | CEQ deriving Enum
data Packet = Literal !Int !Int
| Operation !Int !Operation !(NonEmpty Packet)
| Comparison !Int !Comparator !Packet !Packet
literal :: Int -> Parser Packet
literal v = first (Literal v . bToI) . go
where
go [] = error "Empty list passed to `literal`"
go bs =
let (x : ys, bs') = splitAt 5 bs
in if not x then (ys, bs') else first (ys ++) $ go bs'
subN :: Int -> Parser [Packet]
subN n = go []
where
go ps bs =
if length ps >= n then (ps, bs) else let (p, bs') = parse bs in go (p : ps) bs'
subT :: Int -> Parser [Packet]
subT n = go 0 []
where
go i ps bs = if i >= n
then (ps, bs)
else
let (p, bs') = parse bs
d = ((-) `on` length) bs bs'
in go (i + d) (p : ps) bs'
dispatch :: Parser [Packet]
dispatch [] = error "Empty list passed to `dispatch`"
dispatch (b : bs) = if b
then let (n, bs') = (first bToI $ splitAt 11 bs) in subN n bs'
else let (n, bs') = (first bToI $ splitAt 15 bs) in subT n bs'
operation :: Int -> Int -> Parser Packet
operation v o = first (Operation v (toEnum o) . fromList) . dispatch
comparison :: Int -> Int -> Parser Packet
comparison v c bits = (Comparison v (toEnum c) x y, bits')
where
(ps, bits') = dispatch bits
[x , y ] = reverse $ take 2 ps
parse :: Parser Packet
parse bs = p
where
(v, bs' ) = int 3 bs
(t, bs'') = int 3 bs'
p = case t of
n | n `elem` [0 .. 3] -> operation v n bs''
4 -> literal v bs''
n | n `elem` [5 .. 7] -> comparison v (n - 5) bs''
n -> error $ "Unexpected integer in `parse`: " ++ show n
sumVersions :: Packet -> Int
sumVersions (Literal v _ ) = v
sumVersions (Operation v _ ps ) = v + sum (sumVersions <$> ps)
sumVersions (Comparison v _ x y) = v + sumVersions x + sumVersions y
eval :: Packet -> Int
eval (Literal _ v ) = v
eval (Operation _ o ps) = op $ eval <$> ps
where op = [sum, product, minimum, maximum] !! fromEnum o
eval (Comparison _ c x y) = bool 0 1 $ cmp (eval x) (eval y)
where cmp = [(>), (<), (==)] !! fromEnum c
solve :: (Packet -> Int) -> String -> Int
solve = (. fst . parse . readBits)
part1 :: String -> Int
part1 = solve sumVersions
part2 :: String -> Int
part2 = solve eval
main :: IO ()
main = do
input <- readFile "input.txt"
traverse_ (print . ($ input)) [part1, part2]
|
genos/online_problems
|
advent_of_code_2021/day16/Main.hs
|
Haskell
|
mit
| 3,088
|
{-# LANGUAGE FlexibleContexts, Rank2Types, NoMonomorphismRestriction #-}
module Game.World.Lens
( objectBoundary
, Get
, Set
, Component(..)
, writeProp
, compUnit
, getWires
, addWire
, addObject
, moveObject
, isCollidable
, getCollisionFilters
, setCollisionEvent
, getAnimations
, setAnimation
, setAnimations
, deleteObject
, objectPosition
, rotateObject
, getPositions
, collisionEvent
, setIgnoreCollision
, setBoundary
, getItems
, getObjects
, setOrientation
, setStaticCollidable
)
where
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Control.Lens
import Game.World.Objects
import Control.Monad.Writer
--import Game.World.Types
import Data.Maybe
import Game.World.Common
import Control.Arrow
--objectExists :: ObjectId -> World -> Bool
--objectExists oId w = Map.member oId (w^.wObjects)
--wObjectExists :: ObjectId -> Getter World Bool
--wObjectExists oId = to (objectExists oId)
type Get a = Getter World a
type Set a = Setter' WorldDelta a
data Component a da = Component
{ _compGet :: Getter World a
, _compSet :: Setter' WorldDelta da
}
--makeLenses ''Component (ObjectProp Position) (ObjectProp Position)
--compGet = to _compGet
--compSet = to _compSet
type Component' a = Component a a
compObject :: Component (ObjectProp Object) (ObjectProp (Maybe Object))
compObject = Component
{ _compGet = wObjects
, _compSet = wdObjects
}
compUnit :: Component (ObjectProp Unit) (ModifyContainer UnitId Unit)
compUnit = Component
{ _compGet = wUnitManager.umUnits
, _compSet = wdUnitManager.umdUnits
}
compItem :: Component (ObjectProp Item) (ModifyContainer ItemId Item)
compItem = Component
{ _compGet = wUnitManager.umItems
, _compSet = wdUnitManager.umdItems
}
compPosition :: Component' (ObjectProp Position)
compPosition = Component
{ _compGet = wCommon.wcPositions
, _compSet = wdCommon.wcDelta.wcPositions
}
compRotation :: Component' (ObjectProp Rotation)
compRotation = Component
{ _compGet = wCommon.wcRotations
, _compSet = wdCommon.wcDelta.wcRotations
}
compWires :: Component' (ObjectProp [ObjectWire ObjectId ()])
compWires = Component
{ _compGet = wCommon.wcWires
, _compSet = wdCommon.wcDelta.wcWires
}
compAnimations :: Component' (ObjectProp Animation)
compAnimations = Component
{ _compGet = wCommon.wcAnimations
, _compSet = wdCommon.wcDelta.wcAnimations
}
compBoundaries :: Component' (ObjectProp Boundary)
compBoundaries = Component
{ _compGet = wCommon.wcBoundaries
, _compSet = wdCommon.wcDelta.wcBoundaries
}
compOrientation :: Component' (ObjectProp Orientation)
compOrientation = Component
{ _compGet = wCommon.wcOrientation
, _compSet = wdCommon.wcDelta.wcOrientation
}
compCollisionEvent :: Component' (ObjectProp [ObjectId])
compCollisionEvent = Component
{ _compGet = wCommon.wcCollisionEvents
, _compSet = wdCommon.wcDelta.wcCollisionEvents
}
type IngoredObjects = Set.Set ObjectId
type ObjectIdTo a = ObjectProp a
type ListOfChanges = Map.Map ObjectId (Maybe ObjectId)
type CollisionFilter = Component
--type ObjectChangeSet = Map.Map ObjectId (Maybe ObjectId)
compCollisionFilter :: CollisionFilter (ObjectIdTo IngoredObjects) (ObjectIdTo ObjectChangeSet)
compCollisionFilter = Component
{ _compGet = wCollisionFilter
, _compSet = wdCollisionFilter
}
getWires :: Get (ObjectProp [ObjectWire ObjectId ()])
getWires = _compGet compWires
setWires :: Set (ObjectProp [ObjectWire ObjectId ()])
setWires = _compSet compWires
writeProp :: (MonadWriter WorldDelta m)
=> Set (ObjectProp a)
-> ObjectId
-> a
-> m ()
writeProp mapSetter oId a = scribe (mapSetter . at oId) (Just a)
addWire :: (MonadWriter WorldDelta m) => ObjectId -> ObjectWire ObjectId () -> m ()
addWire oId w = writeProp setWires oId [w]
setAnimations :: Set (ObjectProp Animation)
setAnimations = _compSet compAnimations
getAnimations :: Get (ObjectProp Animation)
getAnimations = _compGet compAnimations
setAnimation :: (MonadWriter WorldDelta m) => ObjectId -> Animation -> m ()
setAnimation = writeProp setAnimations
setPositions :: Setter' WorldDelta (ObjectProp Position)
setPositions = _compSet compPosition
getPositions :: Getter World (ObjectProp Position)
getPositions = _compGet compPosition
objectPosition :: ObjectId -> Getter World (Maybe Position)
objectPosition oId = getPositions . at oId
-- | Rotation component
setRotations :: Setter' WorldDelta (ObjectProp Rotation)
setRotations = _compSet compRotation
getRotations :: Getter World (ObjectProp Rotation)
getRotations = _compGet compRotation
rotateObject :: (MonadWriter WorldDelta m)
=> ObjectId -> Rotation -> m ()
rotateObject = writeProp setRotations
objectRotation :: ObjectId -> Get (Maybe Rotation)
objectRotation oId = getRotations . at oId
moveObject :: (MonadWriter WorldDelta m) => ObjectId -> (Float, Float) -> m ()
moveObject = writeProp setPositions
setObjects :: Setter' WorldDelta (ObjectProp (Maybe Object))
setObjects = _compSet compObject
getObjects :: Getter World (ObjectProp Object)
getObjects = _compGet compObject
addObject :: (MonadWriter WorldDelta m) => ObjectId -> Object -> m ()
addObject oId obj = writeProp setObjects oId (Just obj)
deleteObject :: (MonadWriter WorldDelta m) => ObjectId -> m ()
deleteObject oId = writeProp setObjects oId Nothing
deletedObjects :: Getter WorldDelta [ObjectId]
deletedObjects = to getDeleted
where
getDeleted wd = getDeletedObjects (wd^.wdObjects)
-- new objects are inserted into the map with Just
getDeletedObjects objectMap = map fst $
filter (\(objId, mobj) -> case mobj of Nothing -> True; _ -> False) $
Map.toList objectMap
newObjects :: Getter WorldDelta [Object]
newObjects = to getNew
where
getNew wd = getNewObjects (wd^.wdObjects)
-- new objects are inserted into the map with Just
getNewObjects objectMap = map fromJust .
filter (\mobj -> case mobj of Just _ -> True; _ -> False) $
map snd $ Map.toList objectMap
findObject :: String -> Getter World (Maybe Object)
findObject name = to (\w ->
unOne $ ifoldMap (\_ obj -> One $
if (obj^.objName) == name then Just obj else Nothing
) (w^.wObjects)
)
-- | boundary + position
tileBoundary :: ObjectId -> Get ((Float, Float), (Float, Float))
tileBoundary oId = to boundary
where
boundary w = (pos w, w^.wTileBoundary)
pos w = fromJust $ w^.getPositions . at oId
-- | boundary + position
objectBoundary :: ObjectId -> Get Boundary
objectBoundary oId = to boundary
where
boundary w = let (px, py) = pos w in -- collision boundary = object boundary + position
map ((+) px *** (+) py) $ fromJust $ w^.getBoundaries . at oId
pos w = fromJust $ w^.getPositions . at oId
setBoundary :: (MonadWriter WorldDelta m) => ObjectId -> Boundary -> m ()
setBoundary = writeProp setBoundaries
setBoundaries :: Set (ObjectProp Boundary)
setBoundaries = _compSet compBoundaries
getBoundaries :: Get (ObjectProp Boundary)
getBoundaries = _compGet compBoundaries
setStaticCollidable :: (MonadWriter WorldDelta m) => ObjectId -> m ()
setStaticCollidable oId = scribe (wdCommon.wcDelta.wcStaticCollidable) (Set.insert oId Set.empty)
isCollidable :: ObjectId -> Get Bool
isCollidable oId = to collidable
where
collidable w = Set.member oId $ objPosAndBoundary w
objectsWithPos w = Set.fromList $ w^..wCommon.wcPositions.itraversed.asIndex
objectsWithBoundary w = Set.fromList $ w^..wCommon.wcBoundaries.itraversed.asIndex
objPosAndBoundary w = Set.intersection (objectsWithPos w) (objectsWithBoundary w)
setOrientations :: Set (ObjectProp Orientation)
setOrientations = _compSet compOrientation
getOrientations :: Get (ObjectProp Orientation)
getOrientations = _compGet compOrientation
setOrientation :: (MonadWriter WorldDelta m) => ObjectId -> Orientation -> m ()
setOrientation = writeProp setOrientations
getCollisionFilters :: Get (ObjectProp (Set.Set ObjectId))
getCollisionFilters = _compGet compCollisionFilter
setCollisionFilters :: Set (ObjectProp (Map.Map ObjectId (Maybe ObjectId)))
setCollisionFilters = _compSet compCollisionFilter
setIgnoreCollision :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
setIgnoreCollision oId otherId = writeProp setCollisionFilters oId (Map.fromList [(otherId, Just otherId)])
unsetIgnoreCollision :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
unsetIgnoreCollision oId otherId = writeProp setCollisionFilters oId (Map.fromList [(otherId, Nothing)])
setCollisionEvents :: Set (ObjectProp [ObjectId])
setCollisionEvents = _compSet compCollisionEvent
getCollisionEvents :: Get (ObjectProp [ObjectId])
getCollisionEvents = _compGet compCollisionEvent
setCollisionEvent :: (MonadWriter WorldDelta m) => ObjectId -> ObjectId -> m ()
setCollisionEvent oId otherId = writeProp setCollisionEvents oId [otherId]
collisionEvent :: ObjectId -> Get [ObjectId]
collisionEvent oId = to (\w -> fromMaybe [] $ w^.getCollisionEvents . at oId)
--collided :: ObjectId -> ObjectId -> Get Bool
--collided oId otherId = to (\w -> otherId `elem` (w^.collisionEvent oId))
setItems :: Set (ModifyContainer ItemId Item)
setItems = _compSet compItem
getItems :: Get (ObjectProp Item)
getItems = _compGet compItem
--unitsInDistance :: ObjectId -> Float -> Get Set.Set ObjectId
--unitsInDistance oId distance = to get
-- where
-- get world = let
-- Just oPos = world^.wcPositions.at oId
|
mfpi/q-inqu
|
Game/World/Lens.hs
|
Haskell
|
mit
| 9,726
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module deals with Exception logging.
module System.Wlog.Exception
( logException
, catchLog
) where
import Universum
import System.Wlog.CanLog (WithLogger, WithLoggerIO, logError)
-- | Logs exception's description with ''System.Wlog.Severity.Error' 'System.Wlog.Severity.Severity'
logException :: forall e m . (WithLogger m, Exception e) => e -> m ()
logException = logError . show
{- | Runs the action, if an exception is raised the 'logException' is executed.
==== __Example__
Here is very simple example of usage 'catchLog' on IO functions:
@
main :: IO ()
main = do
buildAndSetupYamlLogging productionB "log-config.yaml"
usingLoggerName "new-logger" runWithExceptionLog
runWithExceptionLog :: (WithLoggerIO m, MonadCatch m) => m ()
runWithExceptionLog = catchLog @IOException (liftIO simpleIOfun)
simpleIOfun :: IO ()
simpleIOfun = getLine >>= readFile >>= putStrLn
@
and when run you will get:
>>> run-main-from-this-example
> not-existing-filename.txt
[new-logger:ERROR] [2017-12-01 13:07:33.21 UTC] asd: openFile: does not exist (No such file or directory)
-}
catchLog :: forall e m . (WithLoggerIO m, MonadCatch m, Exception e) => m () -> m ()
catchLog a = a `catch` logE
where
logE :: e -> m ()
logE = logException
|
serokell/log-warper
|
src/System/Wlog/Exception.hs
|
Haskell
|
mit
| 1,398
|
{-# LANGUAGE DatatypeContexts #-}
module Ch13.Num
where
import qualified Data.List as L
--------------------------------------------------
-- Symbolic/units manipulation
--------------------------------------------------
data Op
= Plus
| Minus
| Mul
| Div
| Pow
deriving (Eq, Show, Ord)
data SymbolicManip a
= Number a
| Symbol String
| BinaryArith Op (SymbolicManip a) (SymbolicManip a)
| UnaryArith String (SymbolicManip a)
deriving Eq
instance Num a => Num (SymbolicManip a) where
a + b =
BinaryArith Plus a b
a - b =
BinaryArith Minus a b
a * b =
BinaryArith Mul a b
negate a =
BinaryArith Mul a (-1)
abs a =
UnaryArith "abs" a
signum =
undefined
fromInteger i =
Number (fromInteger i)
instance (Fractional a) => Fractional (SymbolicManip a) where
a / b =
BinaryArith Div a b
recip a =
BinaryArith Div (Number 1) a
fromRational r =
Number (fromRational r)
instance (Floating a) => Floating (SymbolicManip a) where
pi =
Symbol "pi"
exp a =
UnaryArith "exp" a
log a =
UnaryArith "log" a
sqrt a =
UnaryArith "sqrt" a
a ** b =
BinaryArith Pow a b
sin a =
UnaryArith "sin" a
cos a =
UnaryArith "cos" a
tan a =
UnaryArith "tan" a
asin a =
UnaryArith "asin" a
acos a =
UnaryArith "acos" a
atan a =
UnaryArith "atan" a
sinh a =
UnaryArith "sinh" a
cosh a =
UnaryArith "cosh" a
tanh a =
UnaryArith "tanh" a
asinh a =
UnaryArith "asinh" a
acosh a =
UnaryArith "acosh" a
atanh a =
UnaryArith "atanh" a
prettyShow :: (Show a, Num a) => SymbolicManip a -> String
prettyShow (Number n) =
show n
prettyShow (Symbol x) =
x
prettyShow (BinaryArith op x y) =
let px = simpleParen x op
py = simpleParen y op
pop = op2str op
in px ++ pop ++ py
prettyShow (UnaryArith op x) =
op ++ "(" ++ prettyShow x ++ ")"
simpleParen :: (Num a, Show a) => SymbolicManip a -> Op -> [Char]
simpleParen x@(BinaryArith op _ _) op'
| prio op' > prio op =
"(" ++ prettyShow x ++ ")"
| prio op' == prio op && op /= op' =
"(" ++ prettyShow x ++ ")"
| otherwise =
prettyShow x
simpleParen x _ =
prettyShow x
prio :: Op -> Int
prio Plus =
0
prio Minus =
0
prio Mul =
1
prio Div =
1
prio Pow =
1
op2str :: Op -> String
op2str Plus =
"+"
op2str Minus =
"-"
op2str Mul =
"*"
op2str Div =
"/"
op2str Pow =
"**"
instance (Show a, Num a) => Show (SymbolicManip a) where
show =
prettyShow
rpnShow :: (Show a, Num a) => SymbolicManip a -> String
rpnShow =
L.intercalate " " . rpnShow'
where
rpnShow' :: (Show a, Num a) => SymbolicManip a -> [String]
rpnShow' (Number n) =
[show n]
rpnShow' (Symbol str) =
[str]
rpnShow' (BinaryArith op x y) =
rpnShow' x ++ rpnShow' y ++ [op2str op]
rpnShow' (UnaryArith op x) =
rpnShow' x ++ [op]
simplify :: (Eq a, Num a) => SymbolicManip a -> SymbolicManip a
simplify (BinaryArith op ia ib) =
let sa = simplify ia
sb = simplify ib
in
case (op, sa, sb) of
(Mul, Number 1, b) ->
b
(Mul, a, Number 1) ->
a
(Mul, Number 0, _) ->
Number 0
(Mul, _, Number 0) ->
Number 0
(Div, a, Number 1) ->
a
(Plus, a, Number 0) ->
a
(Plus, Number 0, b) ->
b
(Minus, a, Number 0) ->
a
_ ->
BinaryArith op sa sb
simplify (UnaryArith op a) =
UnaryArith op (simplify a)
simplify x =
x
data Num a => Units a =
Units a (SymbolicManip a)
deriving Eq
instance (Eq a, Num a) => Num (Units a) where
(Units xa ua) + (Units xb ub)
| ua == ub =
Units (xa + xb) ua
| otherwise =
error "Mis-matched units in add or subtract"
(Units xa ua) - (Units xb ub) =
(Units xa ua) + (Units (xb * (-1)) ub)
(Units xa ua) * (Units xb ub) =
Units (xa * xb) (ua * ub)
negate (Units xa ua) =
Units (negate xa) ua
abs (Units xa ua) =
Units (abs xa) ua
signum (Units xa _) =
Units (signum xa) (Number 1)
fromInteger i =
Units (fromInteger i) (Number 1)
instance (Eq a, Fractional a) => Fractional (Units a) where
(Units xa ua) / (Units xb ub) =
Units (xa / xb) (ua / ub)
recip a =
1 / a
fromRational r =
Units (fromRational r) (Number 1)
instance (Eq a, Floating a) => Floating (Units a) where
pi =
(Units pi (Number 1))
exp _ =
error "exp not yet implemented in Units"
log _ =
error "log not yet implemented in Units"
(Units xa ua) ** (Units xb ub)
| ub == Number 1 =
Units (xa ** xb) (ua ** Number xb)
| otherwise =
error "units for RHS of ** not supported"
sqrt (Units xa ua) =
Units (sqrt xa) (sqrt ua)
sin (Units xa ua)
| ua == Symbol "rad" =
Units (sin xa) (Number 1)
| ua == Symbol "deg" =
Units (sin (deg2rad xa)) (Number 1)
| otherwise =
error "Units for sin must be deg or rad"
cos (Units xa ua)
| ua == Symbol "rad" =
Units (cos xa) (Number 1)
| ua == Symbol "deg" =
Units (cos (deg2rad xa)) (Number 1)
| otherwise =
error "Units for cos must be deg or rad"
tan (Units xa ua)
| ua == Symbol "rad" =
Units (tan xa) (Number 1)
| ua == Symbol "deg" =
Units (tan (deg2rad xa)) (Number 1)
| otherwise =
error "Units for tan must be deg or rad"
asin (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ asin xa) (Symbol "deg")
| otherwise =
error "Units for asin must be empty"
acos (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ acos xa) (Symbol "deg")
| otherwise =
error "Units for acos must be empty"
atan (Units xa ua)
| ua == Number 1 =
Units (rad2deg $ atan xa) (Symbol "deg")
| otherwise =
error "Units for atan must be empty"
sinh =
error "sinh not yet implemented in Units"
cosh =
error "cosh not yet implemented in Units"
tanh =
error "tanh not yet implemented in Units"
asinh =
error "asinh not yet implemented in Units"
acosh =
error "acosh not yet implemented in Units"
atanh =
error "atanh not yet implemented in Units"
units :: (Num z) => z -> String -> Units z
units a b =
Units a (Symbol b)
dropUnits :: (Num z) => Units z -> z
dropUnits (Units x _) =
x
deg2rad :: Floating a => a -> a
deg2rad x =
2 * pi * x / 360
rad2deg :: Floating a => a -> a
rad2deg x =
360 * x / (2 * pi)
instance (Show a, Num a, Eq a) => Show (Units a) where
show (Units xa ua) =
show xa ++ "_" ++ prettyShow (simplify ua)
|
futtetennista/IntroductionToFunctionalProgramming
|
RWH/src/ch13/Num.hs
|
Haskell
|
mit
| 6,719
|
-- Copyright (C) 2013 Jorge Aparicio
main :: IO()
main
= print
$ squaredSumOfIntegers - sumOfSquaredIntegers
where integers = [1..100] :: [Int]
sumOfIntegers = sum integers
squaredSumOfIntegers = sumOfIntegers * sumOfIntegers
squaredIntegers = zipWith (*) integers integers
sumOfSquaredIntegers = sum squaredIntegers
|
japaric/eulermark
|
problems/0/0/6/006.hs
|
Haskell
|
mit
| 366
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.