code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -F -pgmF htfpp #-}
-- | entry module for tests
module Main where
import Control.Monad (forM_)
import Data.Aeson
import Test.Framework
import Test.HUnit (Assertion)
import Data.CountryCodes
-- | test entry point
main :: IO()
main = htfMain [htf_thisModulesTests]
-- | CountryCode -> Text -> CountryCode
test_Text :: Assertion
test_Text = forM_ [minBound .. maxBound] $ \c ->
assertEqual c (fromText $ toText c)
-- | CountryCode -> name -> CountryCode
test_Name :: Assertion
test_Name = forM_ [minBound .. maxBound] $ \c ->
assertEqual c (fromName $ toName c)
-- | CountryCode -> JSON -> CountryCode
test_JSON :: Assertion
test_JSON = forM_ ([minBound .. maxBound]::[CountryCode]) $ \c ->
assertEqual (Data.Aeson.Success c) (fromJSON $ toJSON c) | prowdsponsor/country-codes | test/country-codes-tests.hs | bsd-3-clause | 775 | 0 | 10 | 130 | 213 | 121 | 92 | 18 | 1 |
module AST.Module
( Interfaces
, Types, Aliases, ADTs
, AdtInfo, CanonicalAdt
, SourceModule, ValidModule, CanonicalModule, Optimized
, Module(..), Body(..)
, Header(..)
, Interface(..), toInterface
, UserImport, DefaultImport, ImportMethod(..)
) where
import Data.Binary
import qualified Data.Map as Map
import qualified AST.Declaration as Decl
import qualified AST.Expression.Canonical as Canonical
import qualified AST.Expression.Optimized as Optimized
import qualified AST.Module.Name as Name
import qualified AST.Type as Type
import qualified AST.Variable as Var
import qualified Docs.AST as Docs
import qualified Elm.Package as Package
import qualified Elm.Compiler.Version as Compiler
import qualified Reporting.Annotation as A
import qualified Type.Effect.Common as Effect
-- HELPFUL TYPE ALIASES
type Interfaces = Map.Map Name.Canonical Interface
type Types = Map.Map String Type.Canonical
type Aliases = Map.Map String ([String], Type.Canonical)
type ADTs = Map.Map String (AdtInfo String)
type AdtInfo v = ( [String], [(v, [Type.Canonical])] )
type CanonicalAdt = (Var.Canonical, AdtInfo Var.Canonical)
-- MODULES
type SourceModule =
Module
String
[UserImport]
(Var.Listing (A.Located Var.Value))
[Decl.SourceDecl]
type ValidModule =
Module
String
([DefaultImport], [UserImport])
(Var.Listing (A.Located Var.Value))
[Decl.ValidDecl]
type CanonicalModule =
Module Docs.Centralized [Name.Raw] [Var.Value] (Body Canonical.Expr)
type Optimized =
Module Docs.Centralized [Name.Raw] [Var.Value] (Body [Optimized.Def])
data Module docs imports exports body = Module
{ name :: Name.Canonical
, path :: FilePath
, docs :: A.Located (Maybe docs)
, exports :: exports
, imports :: imports
, body :: body
}
data Body expr = Body
{ program :: expr
, types :: Types
, annots :: Map.Map String (Effect.CanonicalAnnot, [Int], [Effect.CanonicalConstr])
, fixities :: [(Decl.Assoc, Int, String)]
, aliases :: Aliases
, datatypes :: ADTs
, ports :: [String]
}
-- HEADERS
{-| Basic info needed to identify modules and determine dependencies. -}
data Header imports = Header
{ _name :: Name.Raw
, _docs :: A.Located (Maybe String)
, _exports :: Var.Listing (A.Located Var.Value)
, _imports :: imports
}
-- IMPORTs
type UserImport = A.Located (Name.Raw, ImportMethod)
type DefaultImport = (Name.Raw, ImportMethod)
data ImportMethod = ImportMethod
{ alias :: Maybe String
, exposedVars :: !(Var.Listing Var.Value)
}
-- INTERFACES
{-| Key facts about a module, used when reading info from .elmi files. -}
data Interface = Interface
{ iVersion :: Package.Version
, iPackage :: Package.Name
, iExports :: [Var.Value]
, iTypes :: Types
, iAnnots :: Map.Map String (Effect.CanonicalAnnot, [Int], [Effect.CanonicalConstr])
, iImports :: [Name.Raw]
, iAdts :: ADTs
, iAliases :: Aliases
, iFixities :: [(Decl.Assoc, Int, String)]
, iPorts :: [String]
}
toInterface :: Package.Name -> Optimized -> Interface
toInterface pkgName modul =
let body' = body modul in
Interface
{ iVersion = Compiler.version
, iPackage = pkgName
, iExports = exports modul
, iTypes = types body'
, iAnnots = annots body'
, iImports = imports modul
, iAdts = datatypes body'
, iAliases = aliases body'
, iFixities = fixities body'
, iPorts = ports body'
}
instance Binary Interface where
get = Interface <$> get <*> get <*> get <*> get <*> get <*> get <*> get <*> get <*> get <*> get
put modul = do
put (iVersion modul)
put (iPackage modul)
put (iExports modul)
put (iTypes modul)
put (iAnnots modul)
put (iImports modul)
put (iAdts modul)
put (iAliases modul)
put (iFixities modul)
put (iPorts modul)
| JoeyEremondi/elm-pattern-effects | src/AST/Module.hs | bsd-3-clause | 4,015 | 0 | 15 | 972 | 1,202 | 706 | 496 | 109 | 1 |
module Text.CarbonSuit.Processing where
import Text.CarbonSuit.Types
-- Merges consecutive Prompt blocks into one block.
mergePromptBlocks :: Carbon -> Carbon
mergePromptBlocks (Carbon fn bs) = Carbon fn (go bs)
where go (Prompt p1 : Prompt p2 : rest) = go $ Prompt (p1 ++ [""] ++ p2) : rest
go (b : rest) = b : go rest
go [] = []
| noteed/carbon-suit | Text/CarbonSuit/Processing.hs | bsd-3-clause | 351 | 0 | 13 | 80 | 136 | 71 | 65 | 7 | 3 |
module Jerimum.PostgreSQL.Types.DateRange
-- * CBOR codec
( dateRangeEncoderV0
, dateRangeDecoderV0
, encodeDateRange
-- * Text codec
, parseDateRange
, formatDateRange
-- * Value constructors
, fromDateRange
) where
import qualified Codec.CBOR.Decoding as D
import qualified Codec.CBOR.Encoding as E
import qualified Data.Attoparsec.Text as A
import qualified Data.Text as T
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder
import Jerimum.PostgreSQL.Types
import Jerimum.PostgreSQL.Types.DateTime
import Jerimum.PostgreSQL.Types.Encoding
v0 :: Version
v0 = 0
encodeDateRange :: Maybe (Range Day) -> E.Encoding
encodeDateRange = encodeWithVersion v0 dateRangeEncoderV0
dateRangeParser :: A.Parser (Range Day)
dateRangeParser = rangeParser (Scalar <$> dateParser)
parseDateRange :: T.Text -> Maybe (Range Day)
parseDateRange = runParser "parseDateRange: parse error" dateRangeParser
formatDateRange :: Range Day -> T.Text
formatDateRange = toStrict . toLazyText . buildRange buildDate
fromDateRange :: Maybe (Range Day) -> Value
fromDateRange = mkValue (RangeType TDate) encodeDateRange
dateRangeEncoderV0 :: Range Day -> E.Encoding
dateRangeEncoderV0 = rangeEncoderV0 dateEncoderV0
dateRangeDecoderV0 :: D.Decoder s (Range Day)
dateRangeDecoderV0 = rangeDecoderV0 dateDecoderV0
| dgvncsz0f/nws | src/Jerimum/PostgreSQL/Types/DateRange.hs | bsd-3-clause | 1,461 | 0 | 8 | 318 | 317 | 183 | 134 | 32 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Handler.Warp.Run (
Port,
runWarp,
) where
import Data.Foldable
import Network.Wai
import Network.Wai.Handler.Warp
import System.Directory
import System.Process
import qualified System.Logging.Facade as Log
runWarp :: Port -> Application -> IO ()
runWarp port application = do
let settings =
setPort port $
setHost "127.0.0.1" $
setBeforeMainLoop (do
let message = "listening on port " ++ show port
Log.info message
systemdNotify message) $
defaultSettings
runSettings settings application
systemdNotify :: String -> IO ()
systemdNotify message = do
mExecutable <- findExecutable "systemd-notify"
forM_ mExecutable $ \ executable -> do
process <- spawnProcess executable $
"--ready" :
("--status=" ++ message ++ "") :
[]
_ <- waitForProcess process
return ()
| zalora/kraken | src/Network/Wai/Handler/Warp/Run.hs | bsd-3-clause | 974 | 0 | 20 | 272 | 254 | 128 | 126 | 31 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.TypeLevel.Ordering
( EQ
, LT
, GT
, Compare , OperatorCompare
, Maximum , OperatorMaximum
, Minimum , OperatorMinimum
) where
import Data.TypeLevel.Operator
-- | The result of a comparison between /a/ and /b/ such that (/a/ = /b/).
data EQ
-- | The result of a comparison between /a/ and /b/ such that (/a/ \< /b/).
data LT
-- | The result of a comparison between /a/ and /b/ such that (/a/ > /b/).
data GT
-- | Compares /a/ with /b/ to find their relative ordering /c/ such that /a/ /b/ /c/.
class ComparisonResult c => Compare a b c | a b -> c
-- | The result of a comparison between two types.
class ComparisonResult c
instance ComparisonResult EQ
instance ComparisonResult LT
instance ComparisonResult GT
-- | Finds the greater of /a/ and /b/.
class Maximum a b c | a b -> c
instance (Compare a b z, Maximum' z a b c) => Maximum a b c
-- | Finds the greater of /a/ and /b/ when the relative ordering /z/ between /a/ and /b/ is known.
class Maximum' z a b c | z a b -> c
instance Maximum' EQ a a a
instance Maximum' LT a b b
instance Maximum' GT a b a
-- | Finds the smaller of /a/ and /b/.
class Minimum a b c | a b -> c
instance (Compare a b z, Minimum' z a b c) => Minimum a b c
-- | Finds the smaller of /a/ and /b/ when the relative ordering /z/ between /a/ and /b/ is known.
class Minimum' z a b c | z a b -> c
instance Minimum' EQ a a a
instance Minimum' LT a b a
instance Minimum' GT a b b
data OperatorCompare
data OperatorMaximum
data OperatorMinimum
instance Compare a b c => ApplyBinary OperatorCompare a b c
instance Minimum a b c => ApplyBinary OperatorMinimum a b c
instance Maximum a b c => ApplyBinary OperatorMaximum a b c
| jonathanknowles/hs-type-level-prime-product | Data/TypeLevel/Ordering.hs | bsd-3-clause | 1,884 | 4 | 6 | 401 | 431 | 230 | 201 | -1 | -1 |
{-# LANGUAGE NamedFieldPuns #-}
module Network.HTTP2.Arch.Stream where
import Control.Concurrent
import Control.Concurrent.STM
import Data.IORef
import qualified Data.IntMap.Strict as M
import Imports
import Network.HTTP2.Arch.Types
import Network.HTTP2.Frame
----------------------------------------------------------------
isIdle :: StreamState -> Bool
isIdle Idle = True
isIdle _ = False
isOpen :: StreamState -> Bool
isOpen Open{} = True
isOpen _ = False
isHalfClosedRemote :: StreamState -> Bool
isHalfClosedRemote HalfClosedRemote = True
isHalfClosedRemote (Closed _) = True
isHalfClosedRemote _ = False
isHalfClosedLocal :: StreamState -> Bool
isHalfClosedLocal (HalfClosedLocal _) = True
isHalfClosedLocal (Closed _) = True
isHalfClosedLocal _ = False
isClosed :: StreamState -> Bool
isClosed Closed{} = True
isClosed _ = False
----------------------------------------------------------------
newStream :: StreamId -> WindowSize -> IO Stream
newStream sid win = Stream sid <$> newIORef Idle
<*> newTVarIO win
<*> newEmptyMVar
newPushStream :: StreamId -> WindowSize -> IO Stream
newPushStream sid win = Stream sid <$> newIORef Reserved
<*> newTVarIO win
<*> newEmptyMVar
----------------------------------------------------------------
{-# INLINE readStreamState #-}
readStreamState :: Stream -> IO StreamState
readStreamState Stream{streamState} = readIORef streamState
----------------------------------------------------------------
newStreamTable :: IO StreamTable
newStreamTable = StreamTable <$> newIORef M.empty
insert :: StreamTable -> M.Key -> Stream -> IO ()
insert (StreamTable ref) k v = atomicModifyIORef' ref $ \m ->
let m' = M.insert k v m
in (m', ())
remove :: StreamTable -> M.Key -> IO ()
remove (StreamTable ref) k = atomicModifyIORef' ref $ \m ->
let m' = M.delete k m
in (m', ())
search :: StreamTable -> M.Key -> IO (Maybe Stream)
search (StreamTable ref) k = M.lookup k <$> readIORef ref
updateAllStreamWindow :: (WindowSize -> WindowSize) -> StreamTable -> IO ()
updateAllStreamWindow adst (StreamTable ref) = do
strms <- M.elems <$> readIORef ref
forM_ strms $ \strm -> atomically $ modifyTVar (streamWindow strm) adst
| kazu-yamamoto/http2 | Network/HTTP2/Arch/Stream.hs | bsd-3-clause | 2,385 | 0 | 12 | 516 | 662 | 340 | 322 | 53 | 1 |
#!/usr/bin/env runhaskell
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
{-# OPTIONS_GHC -fno-cse #-}
module Main where
import Control.Concurrent
import Control.Monad
import Data.Function
import Data.Global
import Data.IORef
import Text.Printf
un "waitThreads" =:: (utl [ud| fix $ writeIORef waitThreads |], ut [t| IO () |] :: UT IORef) -- This exemplifies self-referential recursion. The action, when executed, will stop the current thread until all the threads created by 'forkIO'' have finished.
un "waits" =:: ([| () |], ut [t| () |] :: UT (UDEmpty MVar))
-- 'test' is used as an IORef would be in this program, but we declare it as an 'MVar'.
un "test" =:: ([| 'a' |], ut [t| Char |] :: UT MVar)
-- | Fork a thread; waitForThreadsToFinish will not terminate until every thread created by this does.
forkIO' :: IO () -> IO ThreadId
forkIO' m = do
atomicModifyIORef waitThreads $ \a -> (a >> takeMVar waits, ())
forkIO $ m >> putMVar waits () -- The thread won't terminate until 'waitThreads' consumes our signal. Using "channels" solves this problem, since the thread won't wait until the 'MVar' is empty; as is seen in a similar example.
forkIO'' :: IO () -> IO ()
forkIO'' = void . forkIO'
-- | Calling thread sleeps (GHC only)
sleepSeconds :: Integer -> IO ()
sleepSeconds = threadDelay . fromIntegral . (* 1000000)
thread1 :: IO ()
thread1 = do
putStrLn $ "thread1 begin"
incTest
putStrLn $ "thread1 end"
remoi :: IO ()
remoi = do
putStrLn $ "remoi begin"
putStrLn $ "remoi end"
foobarquux :: IO ()
foobarquux = do
putStrLn $ "foobarquux begin"
putStrLn $ "foobarquux begins its sleeping for 3 seconds"
sleepSeconds 3
putStrLn $ "foobarquux end"
-- | Alias to reading action stored in 'waitThreads' and executing it.
waitForThreadsToFinish :: IO ()
waitForThreadsToFinish = join $ readIORef waitThreads
incTest :: IO ()
incTest = do
modifyMVar_ test $ return . succ
main :: IO ()
main = (>> waitForThreadsToFinish) $ do
putStrLn . printf "'%c' was taken from 'test'" =<< takeMVar test
putMVar test 'c'
putStrLn "Potentially quuxy output begin"
forkIO'' thread1
forkIO'' remoi
forkIO'' foobarquux
incTest
waitForThreadsToFinish
putStrLn "All other threads have finished; potentially quuxy output end"
putStrLn . printf "'test' is now '%c'" =<< readMVar test
incTest
putStrLn . printf "'test' is now '%c'" =<< readMVar test
forkIO'' foobarquux
-- main will not terminate yet until all the other threads created by 'forkIO'' terminate (so we'll see "foobarquux end")
| bairyn/global | examples/mvar/Main.hs | bsd-3-clause | 2,593 | 0 | 11 | 526 | 563 | 284 | 279 | 55 | 1 |
------------------------------------------------------------------------
-- |
-- Module : ALife.Creatur.Wain.Audio.Wain
-- Copyright : (c) Amy de Buitléir 2013-2016
-- License : BSD-style
-- Maintainer : amy@nualeargais.ie
-- Stability : experimental
-- Portability : portable
--
-- Utilities for working with wains.
--
------------------------------------------------------------------------
{-# LANGUAGE Rank2Types #-}
module ALife.Creatur.Wain.Audio.Wain
(
PatternWain,
describeClassifierModels,
describePredictorModels,
adjustEnergy,
metabCost,
packageVersion
) where
import ALife.Creatur (agentId)
import qualified ALife.Creatur.Wain as W
import ALife.Creatur.Wain.Brain (classifier, predictor)
import ALife.Creatur.Wain.GeneticSOM (modelMap, numModels)
import ALife.Creatur.Wain.Pretty (pretty)
import ALife.Creatur.Wain.Audio.Pattern (Pattern)
import ALife.Creatur.Wain.Audio.Tweaker (PatternTweaker(..))
import ALife.Creatur.Wain.UnitInterval (uiToDouble)
import Control.Lens hiding (universe)
import Control.Monad.State.Lazy (StateT)
import qualified Data.Map.Strict as M
import Data.Version (showVersion)
import Paths_creatur_audio_wains (version)
import Text.Printf (printf)
-- | Returns the current version number of this library.
packageVersion :: String
packageVersion = "creatur-audio-wains-" ++ showVersion version
type PatternWain a rt = W.Wain Pattern PatternTweaker rt a
describeClassifierModels :: PatternWain a rt -> [String]
describeClassifierModels w = map f ms
where ms = M.toList . modelMap . view (W.brain . classifier) $ w
f (l, r) = agentId w ++ "'s classifier model "
++ show l ++ " " ++ show r
describePredictorModels :: Show a => PatternWain a rt -> [String]
describePredictorModels w = map f ms
where ms = M.toList . modelMap . view (W.brain . predictor) $ w
f (l, r) = agentId w ++ "'s predictor model "
++ show l ++ ": " ++ pretty r
adjustEnergy
:: Simple Lens e (PatternWain a rt) -> Double
-> Simple Lens s Double -> String -> Simple Lens e s
-> (String -> StateT e IO ()) -> StateT e IO ()
adjustEnergy
wainLens deltaE statLens reason summary report = do
w <- use wainLens
let (w', used) = W.adjustEnergy deltaE w
report $ "Adjusting energy of " ++ agentId w
++ " because " ++ reason ++ ": "
++ printf "%.3f" (uiToDouble . W._energy $ w)
++ " + " ++ printf "%.3f" deltaE
++ " -> " ++ printf "%.3f" (uiToDouble . W._energy $ w')
++ " used=" ++ printf "%.3f" used ++ " leftover="
++ printf "%.3f" (deltaE - used)
(summary . statLens) += used
assign wainLens w'
metabCost :: Double -> Double -> Double -> PatternWain a rt -> Double
metabCost bmc cpcm scale w = scale * (bmc + cpcm * fromIntegral n)
where n = numModels . view (W.brain . classifier) $ w
| mhwombat/creatur-audio-wains | src/ALife/Creatur/Wain/Audio/Wain.hs | bsd-3-clause | 2,857 | 0 | 20 | 559 | 814 | 443 | 371 | 56 | 1 |
module Parser where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Syntax
expression :: Parser Expr
expression = buildExpressionParser exprSpliter factor
<?> "polynomial"
neg :: Expr -> Expr
neg p = Mult (Number $ -1) p
sub :: Expr -> Expr -> Expr
sub l r = Plus l (neg r)
exprSpliter :: [[Operator Char () Expr]]
exprSpliter = [[op_prefix "+" id, op_prefix "-" neg],
[op_infix "^" Power AssocRight],
[op_infix "*" Mult AssocLeft],
[op_infix "+" Plus AssocLeft, op_infix "-" sub AssocLeft]]
where op_infix s f assoc = Infix (string s >> return f) assoc
op_prefix s f = Prefix (string s >> return f)
number :: Parser Int
number = many1 digit >>= return . read
factor :: Parser Expr
factor = try (do coeff <- number
fact <- primitiveFactor
return $ Mult (Number coeff) fact)
<|> primitiveFactor
<?> "error"
primitiveFactor :: Parser Expr
primitiveFactor = try (number >>= return . Number)
<|> (lower >>= return . Variable)
<|> between (char '(') (char ')') expression
<?> "error"
| asi1024/haskell-differentiator | src/Parser.hs | bsd-3-clause | 1,198 | 0 | 14 | 345 | 410 | 210 | 200 | 31 | 1 |
module Main where
import Conduit (sinkList)
import Data.Conduit ((.|), runConduitRes)
import System.Environment (getArgs)
import Lib (doubleFromFileC)
main :: IO ()
main = do
args <- getArgs
case args of
inputFile:_ -> do
y <- runConduitRes $ doubleFromFileC inputFile .| sinkList
print $ sum y
_ -> print "Please supply the file with the list of numbers to sum"
| yanhan/bin-src | sum-lines/app/Main.hs | bsd-3-clause | 390 | 0 | 15 | 85 | 121 | 64 | 57 | 13 | 2 |
import System.Environment
import System.Log.Logger
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import BitTorrent.Core
main :: IO ()
main = do
updateGlobalLogger rootLoggerName $ setLevel INFO
args <- getArgs
case args of
[x] -> run x
_ -> putStrLn "Usage: torrent <file.torrent>"
| kylc/torrent | src/Main.hs | bsd-3-clause | 336 | 0 | 10 | 70 | 97 | 50 | 47 | 12 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
-----------------------------------------------------------------------------
-- |
-- Module : Main (doctests)
-- Copyright : (C) 2012-13 Edward Kmett
-- License : BSD-style (see the file LICENSE.doctests)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : portable
--
-- This module provides doctests for a project based on the actual versions
-- of the packages it was built with. It requires a corresponding Setup.lhs
-- to be added to the project
-----------------------------------------------------------------------------
module Main where
import Build_doctests (deps)
import Control.Applicative
import Control.Monad
import Data.List
import System.Directory
import System.FilePath
import Test.DocTest
-- | Run in a modified codepage where we can print UTF-8 values on Windows.
withUnicode :: IO a -> IO a
withUnicode m = m
main :: IO ()
main = withUnicode $ getSources >>= \sources -> doctest $
"-isrc"
: "-idist/build/autogen"
: "-optP-include"
: "-optPdist/build/autogen/cabal_macros.h"
: "-hide-all-packages"
: map ("-package="++) deps ++ sources
getSources :: IO [FilePath]
getSources = filter (isSuffixOf ".hs") <$> go "src"
where
go dir = do
(dirs, files) <- getFilesAndDirectories dir
(files ++) . concat <$> mapM go dirs
getFilesAndDirectories :: FilePath -> IO ([FilePath], [FilePath])
getFilesAndDirectories dir = do
c <- map (dir </>) . filter (`notElem` ["..", "."]) <$> getDirectoryContents dir
(,) <$> filterM doesDirectoryExist c <*> filterM doesFileExist c
| nushio3/citation-resolve | test/doctests.hs | bsd-3-clause | 1,647 | 0 | 13 | 279 | 320 | 177 | 143 | 29 | 1 |
{- CIS 194 HW 11
due Monday, 8 April
-}
module HW11.SExpr where
import HW11.AParser
import Control.Applicative
import Data.Char (isSpace, isAlpha, isAlphaNum)
------------------------------------------------------------
-- 1. Parsing repetitions
------------------------------------------------------------
zeroOrMore :: Parser a -> Parser [a]
zeroOrMore p = (:) <$> p <*> zeroOrMore p <|> pure []
oneOrMore :: Parser a -> Parser [a]
oneOrMore p = (:) <$> p <*> zeroOrMore p
------------------------------------------------------------
-- 2. Utilities
------------------------------------------------------------
spaces :: Parser String
spaces = zeroOrMore $ satisfy isSpace
ident :: Parser String
ident = (:) <$> satisfy isAlpha <*> zeroOrMore (satisfy isAlphaNum)
------------------------------------------------------------
-- 3. Parsing S-expressions
------------------------------------------------------------
-- An "identifier" is represented as just a String; however, only
-- those Strings consisting of a letter followed by any number of
-- letters and digits are valid identifiers.
type Ident = String
-- An "atom" is either an integer value or an identifier.
data Atom = N Integer | I Ident
deriving Show
-- An S-expression is either an atom, or a list of S-expressions.
data SExpr = A Atom
| Comb [SExpr]
deriving Show
parseSExpr :: Parser SExpr
parseSExpr = spaces *> (sExprAtom <|> sExprList) <* spaces
where
sExprAtom = A <$> atom
sExprList = open *> (Comb <$> zeroOrMore parseSExpr) <* close
atom = N <$> posInt <|> I <$> ident
open = satisfy (== '(')
close = satisfy (== ')')
| kemskems/cis194-spring13 | src/HW11/SExpr.hs | bsd-3-clause | 1,650 | 0 | 11 | 279 | 335 | 189 | 146 | 25 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
module Fragment.SystemF.Ast (
module X
) where
import Fragment.SystemF.Ast.Term as X
| dalaing/type-systems | src/Fragment/SystemF/Ast.hs | bsd-3-clause | 245 | 0 | 4 | 47 | 23 | 17 | 6 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module ReduceExprTests (tests) where
import Data.Fix
import Test.Tasty
import Test.Tasty.HUnit
import Nix.Atoms
import Nix.Expr.Types
import Nix.Expr.Types.Annotated
import Nix.Parser
import Nix.Reduce ( reduceExpr )
tests :: TestTree
tests = testGroup
"Expr Reductions"
[ testCase "Non nested NSelect on set should be reduced"
$ cmpReduceResult selectBasic selectBasicExpect
, testCase "Nested NSelect on set should be reduced"
$ cmpReduceResult selectNested selectNestedExpect
, testCase "Non nested NSelect with incorrect attrpath shouldn't be reduced"
$ shouldntReduce selectIncorrectAttrPath
, testCase "Nested NSelect with incorrect attrpath shouldn't be reduced"
$ shouldntReduce selectNestedIncorrectAttrPath
]
assertSucc :: Result a -> IO a
assertSucc (Success a) = pure a
assertSucc (Failure d) = assertFailure $ show d
cmpReduceResult :: Result NExprLoc -> NExpr -> Assertion
cmpReduceResult r e = do
r <- assertSucc r
r <- stripAnnotation <$> reduceExpr Nothing r
r @?= e
shouldntReduce :: Result NExprLoc -> Assertion
shouldntReduce r = do
r <- assertSucc r
rReduced <- reduceExpr Nothing r
r @?= rReduced
selectBasic :: Result NExprLoc
selectBasic = parseNixTextLoc "{b=2;a=42;}.a"
selectBasicExpect :: NExpr
selectBasicExpect = Fix . NConstant $ NInt 42
selectNested :: Result NExprLoc
selectNested = parseNixTextLoc "{a={b=2;a=42;};b={a=2;};}.a.a"
selectNestedExpect :: NExpr
selectNestedExpect = Fix . NConstant $ NInt 42
selectIncorrectAttrPath :: Result NExprLoc
selectIncorrectAttrPath = parseNixTextLoc "{a=42;}.b"
selectNestedIncorrectAttrPath :: Result NExprLoc
selectNestedIncorrectAttrPath = parseNixTextLoc "{a={a=42;};}.a.b"
| jwiegley/hnix | tests/ReduceExprTests.hs | bsd-3-clause | 1,891 | 0 | 9 | 377 | 398 | 201 | 197 | 47 | 1 |
module OneLineParser (parseOneLine, parseOneLinePure) where
import System.Exit
import Text.Parsec hiding (Line, lookAhead, spaces)
import Text.Parsec.String
import Text.Parsec.Error
import qualified Text.Parsec as P
-- Parser utilities
-- | A nicer way to print an error message
lineParserErrorMessage :: String -> ParseError -> String
lineParserErrorMessage input err =
"In " ++ sourceName pos ++ " column " ++ show (sourceColumn pos) ++ ":\n" ++
input ++ "\n" ++
replicate (sourceColumn pos - 1) ' ' ++ "↑" ++
showErrorMessages "or" "unknown parse err" "expecting"
"unexpected" "end of input"
(errorMessages err)
where pos = errorPos err
parseOneLine :: Parser a -> String -> String -> IO a
parseOneLine p name input =
case parseOneLinePure p name input of
Left e -> putStrLn e >> exitFailure
Right l -> return l
parseOneLinePure :: Parser a -> String -> String -> Either String a
parseOneLinePure p name input =
case P.parse p name input of
Left e -> Left $ lineParserErrorMessage input e
Right l -> return l
| colinba/tip-toi-reveng | src/OneLineParser.hs | mit | 1,124 | 0 | 14 | 273 | 325 | 165 | 160 | 25 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2007-2010 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.ConTeXt
Copyright : Copyright (C) 2007-2010 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' format into ConTeXt.
-}
module Text.Pandoc.Writers.ConTeXt ( writeConTeXt ) where
import Text.Pandoc.Definition
import Text.Pandoc.Shared
import Text.Printf ( printf )
import Data.List ( intercalate )
import Control.Monad.State
import Text.Pandoc.Pretty
import Text.Pandoc.Templates ( renderTemplate )
import Network.URI ( isAbsoluteURI, unEscapeString )
data WriterState =
WriterState { stNextRef :: Int -- number of next URL reference
, stOrderedListLevel :: Int -- level of ordered list
, stOptions :: WriterOptions -- writer options
}
orderedListStyles :: [[Char]]
orderedListStyles = cycle ["[n]","[a]", "[r]", "[g]"]
-- | Convert Pandoc to ConTeXt.
writeConTeXt :: WriterOptions -> Pandoc -> String
writeConTeXt options document =
let defaultWriterState = WriterState { stNextRef = 1
, stOrderedListLevel = 0
, stOptions = options
}
in evalState (pandocToConTeXt options document) defaultWriterState
pandocToConTeXt :: WriterOptions -> Pandoc -> State WriterState String
pandocToConTeXt options (Pandoc (Meta title authors date) blocks) = do
let colwidth = if writerWrapText options
then Just $ writerColumns options
else Nothing
titletext <- if null title
then return ""
else liftM (render colwidth) $ inlineListToConTeXt title
authorstext <- mapM (liftM (render colwidth) . inlineListToConTeXt) authors
datetext <- if null date
then return ""
else liftM (render colwidth) $ inlineListToConTeXt date
body <- blockListToConTeXt blocks
let main = render colwidth $ body
let context = writerVariables options ++
[ ("toc", if writerTableOfContents options then "yes" else "")
, ("body", main)
, ("title", titletext)
, ("date", datetext) ] ++
[ ("author", a) | a <- authorstext ]
return $ if writerStandalone options
then renderTemplate context $ writerTemplate options
else main
-- escape things as needed for ConTeXt
escapeCharForConTeXt :: Char -> String
escapeCharForConTeXt ch =
case ch of
'{' -> "\\letteropenbrace{}"
'}' -> "\\letterclosebrace{}"
'\\' -> "\\letterbackslash{}"
'$' -> "\\$"
'|' -> "\\letterbar{}"
'^' -> "\\letterhat{}"
'%' -> "\\%"
'~' -> "\\lettertilde{}"
'&' -> "\\&"
'#' -> "\\#"
'<' -> "\\letterless{}"
'>' -> "\\lettermore{}"
'[' -> "{[}"
']' -> "{]}"
'_' -> "\\letterunderscore{}"
'\160' -> "~"
x -> [x]
-- | Escape string for ConTeXt
stringToConTeXt :: String -> String
stringToConTeXt = concatMap escapeCharForConTeXt
-- | Convert Pandoc block element to ConTeXt.
blockToConTeXt :: Block
-> State WriterState Doc
blockToConTeXt Null = return empty
blockToConTeXt (Plain lst) = inlineListToConTeXt lst
blockToConTeXt (Para [Image txt (src,_)]) = do
capt <- inlineListToConTeXt txt
return $ blankline $$ "\\placefigure[here,nonumber]" <> braces capt <>
braces ("\\externalfigure" <> brackets (text src)) <> blankline
blockToConTeXt (Para lst) = do
contents <- inlineListToConTeXt lst
return $ contents <> blankline
blockToConTeXt (BlockQuote lst) = do
contents <- blockListToConTeXt lst
return $ "\\startblockquote" $$ nest 0 contents $$ "\\stopblockquote" <> blankline
blockToConTeXt (CodeBlock _ str) =
return $ flush ("\\starttyping" <> cr <> text str <> cr <> "\\stoptyping") $$ blankline
-- blankline because \stoptyping can't have anything after it, inc. '}'
blockToConTeXt (RawBlock "context" str) = return $ text str <> blankline
blockToConTeXt (RawBlock _ _ ) = return empty
blockToConTeXt (BulletList lst) = do
contents <- mapM listItemToConTeXt lst
return $ "\\startitemize" $$ vcat contents $$ text "\\stopitemize" <> blankline
blockToConTeXt (OrderedList (start, style', delim) lst) = do
st <- get
let level = stOrderedListLevel st
put $ st {stOrderedListLevel = level + 1}
contents <- mapM listItemToConTeXt lst
put $ st {stOrderedListLevel = level}
let start' = if start == 1 then "" else "start=" ++ show start
let delim' = case delim of
DefaultDelim -> ""
Period -> "stopper=."
OneParen -> "stopper=)"
TwoParens -> "left=(,stopper=)"
let width = maximum $ map length $ take (length contents)
(orderedListMarkers (start, style', delim))
let width' = (toEnum width + 1) / 2
let width'' = if width' > (1.5 :: Double)
then "width=" ++ show width' ++ "em"
else ""
let specs2Items = filter (not . null) [start', delim', width'']
let specs2 = if null specs2Items
then ""
else "[" ++ intercalate "," specs2Items ++ "]"
let style'' = case style' of
DefaultStyle -> orderedListStyles !! level
Decimal -> "[n]"
Example -> "[n]"
LowerRoman -> "[r]"
UpperRoman -> "[R]"
LowerAlpha -> "[a]"
UpperAlpha -> "[A]"
let specs = style'' ++ specs2
return $ "\\startitemize" <> text specs $$ vcat contents $$
"\\stopitemize" <> blankline
blockToConTeXt (DefinitionList lst) =
liftM vcat $ mapM defListItemToConTeXt lst
blockToConTeXt HorizontalRule = return $ "\\thinrule" <> blankline
blockToConTeXt (Header level lst) = do
contents <- inlineListToConTeXt lst
st <- get
let opts = stOptions st
let base = if writerNumberSections opts then "section" else "subject"
let level' = if writerChapters opts then level - 1 else level
return $ if level' >= 1 && level' <= 5
then char '\\' <> text (concat (replicate (level' - 1) "sub")) <>
text base <> char '{' <> contents <> char '}' <> blankline
else if level' == 0
then "\\chapter{" <> contents <> "}"
else contents <> blankline
blockToConTeXt (Table caption aligns widths heads rows) = do
let colDescriptor colWidth alignment = (case alignment of
AlignLeft -> 'l'
AlignRight -> 'r'
AlignCenter -> 'c'
AlignDefault -> 'l'):
if colWidth == 0
then "|"
else ("p(" ++ printf "%.2f" colWidth ++ "\\textwidth)|")
let colDescriptors = "|" ++ (concat $
zipWith colDescriptor widths aligns)
headers <- if all null heads
then return empty
else liftM ($$ "\\HL") $ tableRowToConTeXt heads
captionText <- inlineListToConTeXt caption
let captionText' = if null caption then text "none" else captionText
rows' <- mapM tableRowToConTeXt rows
return $ "\\placetable[here]" <> braces captionText' $$
"\\starttable" <> brackets (text colDescriptors) $$
"\\HL" $$ headers $$
vcat rows' $$ "\\HL" $$ "\\stoptable" <> blankline
tableRowToConTeXt :: [[Block]] -> State WriterState Doc
tableRowToConTeXt cols = do
cols' <- mapM blockListToConTeXt cols
return $ (vcat (map ("\\NC " <>) cols')) $$ "\\NC\\AR"
listItemToConTeXt :: [Block] -> State WriterState Doc
listItemToConTeXt list = blockListToConTeXt list >>=
return . ("\\item" $$) . (nest 2)
defListItemToConTeXt :: ([Inline], [[Block]]) -> State WriterState Doc
defListItemToConTeXt (term, defs) = do
term' <- inlineListToConTeXt term
def' <- liftM vsep $ mapM blockListToConTeXt defs
return $ "\\startdescr" <> braces term' $$ nest 2 def' $$
"\\stopdescr" <> blankline
-- | Convert list of block elements to ConTeXt.
blockListToConTeXt :: [Block] -> State WriterState Doc
blockListToConTeXt lst = liftM vcat $ mapM blockToConTeXt lst
-- | Convert list of inline elements to ConTeXt.
inlineListToConTeXt :: [Inline] -- ^ Inlines to convert
-> State WriterState Doc
inlineListToConTeXt lst = liftM hcat $ mapM inlineToConTeXt lst
-- | Convert inline element to ConTeXt
inlineToConTeXt :: Inline -- ^ Inline to convert
-> State WriterState Doc
inlineToConTeXt (Emph lst) = do
contents <- inlineListToConTeXt lst
return $ braces $ "\\em " <> contents
inlineToConTeXt (Strong lst) = do
contents <- inlineListToConTeXt lst
return $ braces $ "\\bf " <> contents
inlineToConTeXt (Strikeout lst) = do
contents <- inlineListToConTeXt lst
return $ "\\overstrikes" <> braces contents
inlineToConTeXt (Superscript lst) = do
contents <- inlineListToConTeXt lst
return $ "\\high" <> braces contents
inlineToConTeXt (Subscript lst) = do
contents <- inlineListToConTeXt lst
return $ "\\low" <> braces contents
inlineToConTeXt (SmallCaps lst) = do
contents <- inlineListToConTeXt lst
return $ braces $ "\\sc " <> contents
inlineToConTeXt (Code _ str) | not ('{' `elem` str || '}' `elem` str) =
return $ "\\type" <> braces (text str)
inlineToConTeXt (Code _ str) =
return $ "\\mono" <> braces (text $ stringToConTeXt str)
inlineToConTeXt (Quoted SingleQuote lst) = do
contents <- inlineListToConTeXt lst
return $ "\\quote" <> braces contents
inlineToConTeXt (Quoted DoubleQuote lst) = do
contents <- inlineListToConTeXt lst
return $ "\\quotation" <> braces contents
inlineToConTeXt (Cite _ lst) = inlineListToConTeXt lst
inlineToConTeXt Apostrophe = return $ char '\''
inlineToConTeXt EmDash = return "---"
inlineToConTeXt EnDash = return "--"
inlineToConTeXt Ellipses = return "\\ldots{}"
inlineToConTeXt (Str str) = return $ text $ stringToConTeXt str
inlineToConTeXt (Math InlineMath str) =
return $ char '$' <> text str <> char '$'
inlineToConTeXt (Math DisplayMath str) =
return $ text "\\startformula " <> text str <> text " \\stopformula"
inlineToConTeXt (RawInline "context" str) = return $ text str
inlineToConTeXt (RawInline "tex" str) = return $ text str
inlineToConTeXt (RawInline _ _) = return empty
inlineToConTeXt (LineBreak) = return $ text "\\crlf" <> cr
inlineToConTeXt Space = return space
inlineToConTeXt (Link [Code _ str] (src, tit)) = -- since ConTeXt has its own
inlineToConTeXt (Link [Str str] (src, tit)) -- way of printing links...
inlineToConTeXt (Link txt (src, _)) = do
st <- get
let next = stNextRef st
put $ st {stNextRef = next + 1}
let ref = show next
label <- inlineListToConTeXt txt
return $ "\\useURL" <> brackets (text ref) <> brackets (text src) <>
brackets empty <> brackets label <>
"\\from" <> brackets (text ref)
inlineToConTeXt (Image _ (src, _)) = do
let src' = if isAbsoluteURI src
then src
else unEscapeString src
return $ braces $ "\\externalfigure" <> brackets (text src')
inlineToConTeXt (Note contents) = do
contents' <- blockListToConTeXt contents
return $ text "\\footnote{" <>
nest 2 contents' <> char '}'
| Lythimus/lptv | sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/Writers/ConTeXt.hs | gpl-2.0 | 12,489 | 0 | 21 | 3,439 | 3,269 | 1,615 | 1,654 | 240 | 23 |
{-
- Copyright (c) 2017 The Agile Monkeys S.L. <hackers@theam.io>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Main where
import HaskellDo
import System.Environment (getArgs)
import System.Directory
defaultPort :: Integer
defaultPort = 3001
main :: IO ()
main = do
args <- getArgs
hdopath <- findExecutable "haskell-do"
case hdopath of
Just p -> do
let parentDir = reverse . dropWhile (/= '/') . reverse
x <- listDirectory $ parentDir p
print x
setCurrentDirectory (parentDir p)
Nothing ->
#ifdef ghcjs_HOST_OS
return ()
#else
error "haskell.do must be on PATH"
#endif
let port = case args of
[x] -> read x :: Integer
_ -> defaultPort
showWelcomeMessage
let msg = " Open the following URL in your browser: http://localhost:" ++ show port ++ " "
putStrLn $ replicate (length msg) '='
putStrLn msg
putStrLn $ replicate (length msg) '='
run port
showWelcomeMessage :: IO ()
showWelcomeMessage = putStrLn $
"\n"
++ " ________________\n"
++ "´ , , ` ,--. ,--. ,--.,--. ,--.\n"
++ "| /\\| | \\ | | ,---. ,--,--. ,---. | |,-. ,---. | || | ,-| | ,---.\n"
++ "| /\\\\| | /\\ | | .-. |' ,-. |( .-' | /| .-. :| || | ' .-. || .-. |\n"
++ "| ( `| |´//) | | | | |\\ '-' |.-' `)| \\ \\\\ --.| || |.--.\\ `-' |' '-' '\n"
++ "| \\ | |/// | `--' `--' `--`--'`----' `--'`--'`----'`--'`--''--' `---' `---'\n"
++ "| \\| |/ |\n"
++ "\\________________/\n"
| J2RGEZ/haskell-do | src/common/Main.hs | apache-2.0 | 2,193 | 0 | 18 | 677 | 307 | 149 | 158 | 38 | 3 |
module Tests.Matrix (tests) where
import Statistics.Matrix hiding (map)
import Statistics.Matrix.Algorithms
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (testProperty)
import Test.QuickCheck
import Tests.ApproxEq (ApproxEq(..))
import Tests.Matrix.Types
import qualified Data.Vector.Unboxed as U
t_row :: Mat Double -> Gen Property
t_row ms@(Mat r _ xs) = do
i <- choose (0,r-1)
return $ row (fromMat ms) i === U.fromList (xs !! i)
t_column :: Mat Double -> Gen Property
t_column ms@(Mat _ c xs) = do
i <- choose (0,c-1)
return $ column (fromMat ms) i === U.fromList (map (!! i) xs)
t_center :: Mat Double -> Property
t_center ms@(Mat r c xs) =
(xs !! (r `quot` 2)) !! (c `quot` 2) === center (fromMat ms)
t_transpose :: Matrix -> Property
t_transpose m = U.concat (map (column n) [0..rows m-1]) === toVector m
where n = transpose m
t_qr :: Matrix -> Property
t_qr a = hasNaN p .||. eql 1e-10 a p
where p = uncurry multiply (qr a)
tests :: TestTree
tests = testGroup "Matrix"
[ testProperty "t_row" t_row
, testProperty "t_column" t_column
, testProperty "t_center" t_center
, testProperty "t_transpose" t_transpose
, testProperty "t_qr" t_qr
]
| bos/statistics | tests/Tests/Matrix.hs | bsd-2-clause | 1,202 | 0 | 11 | 218 | 515 | 272 | 243 | 33 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-}
-- These warnings can be inaccurate, because of conditional compilation.
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
{-# OPTIONS_GHC -Wno-unused-imports #-}
#else
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
#endif
-- | __WARNING:__ This module is an experiment to see how 'Alternative' may be encoded.
-- The authors are not aware of any generalized applicatives that make use of 'Alternative'.
-- Hence, we do not know if this encoding of it is sufficient.
-- Therefore, the encoding is not in its final form and may change in the future.
module Control.Super.Monad.Alternative
( AlternativeEmpty(..)
, AlternativeAlt(..)
) where
import qualified Prelude as P
import qualified Control.Applicative as A
import GHC.Exts ( Constraint )
import qualified GHC.Conc as STM
--import qualified Control.Arrow as Arrow
--import qualified Control.Applicative as Applic
import qualified Data.Monoid as Mon
import qualified Text.ParserCombinators.ReadP as ReadP
import qualified Text.ParserCombinators.ReadPrec as ReadPrec
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
import qualified GHC.Generics as Generics
import qualified Data.Semigroup as Semigroup
import qualified Data.Proxy as Proxy
import qualified Data.Functor.Product as Product ( Product(..) )
import qualified Data.Functor.Compose as Compose ( Compose(..) )
#endif
import Control.Super.Monad.Prelude
( ($), Applicative(..), Functor(..) )
-- Import of 'Functor' required for GHC 8+ instances.
-- | The encoding of the 'empty' operation.
--
-- 'Return' is not a superclass, because the indices or constraints involved
-- in an 'AlternativeEmpty' instance may differ from those involved with the 'Return'
-- instance.
--
-- __WARNING:__ This module is an experiment to see how 'Alternative' may be encoded.
-- The authors are not aware of any generalized applicatives that make use of 'Alternative'.
-- Hence, we do not know if this encoding of it is sufficient.
-- Therefore, the encoding is not in its final form and may change in the future.
class (Functor f) => AlternativeEmpty f where
type AlternativeEmptyCts f :: Constraint
type AlternativeEmptyCts f = ()
empty :: AlternativeEmptyCts f => f a
instance AlternativeEmpty [] where
empty = A.empty
instance AlternativeEmpty P.Maybe where
empty = A.empty
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeEmpty P.IO where
empty = A.empty
#endif
instance AlternativeEmpty ReadP.ReadP where
empty = A.empty
instance AlternativeEmpty ReadPrec.ReadPrec where
empty = A.empty
instance AlternativeEmpty STM.STM where
empty = A.empty
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeEmpty Semigroup.Option where
empty = A.empty
instance AlternativeEmpty Proxy.Proxy where
empty = A.empty
#endif
instance (AlternativeEmpty f) => AlternativeEmpty (Mon.Alt f) where
type AlternativeEmptyCts (Mon.Alt f) = AlternativeEmptyCts f
empty = Mon.Alt $ empty
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance (AlternativeEmpty f, AlternativeEmpty f') => AlternativeEmpty (Product.Product f f') where
type AlternativeEmptyCts (Product.Product f f') = (AlternativeEmptyCts f, AlternativeEmptyCts f')
empty = Product.Pair empty empty
instance (AlternativeEmpty f, AlternativeEmpty f') => AlternativeEmpty (Compose.Compose f f') where
type AlternativeEmptyCts (Compose.Compose f f') = (AlternativeEmptyCts f, AlternativeEmptyCts f')
empty = Compose.Compose $ empty
#endif
-- TODO: ArrowMonad and WrappedMonad instances. These lead to cyclic dependencies.
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeEmpty Generics.U1 where
empty = A.empty
instance AlternativeEmpty f => AlternativeEmpty (Generics.Rec1 f) where
type AlternativeEmptyCts (Generics.Rec1 f) = AlternativeEmptyCts f
empty = Generics.Rec1 empty
instance (AlternativeEmpty f, AlternativeEmpty g) => AlternativeEmpty (f Generics.:*: g) where
type AlternativeEmptyCts (f Generics.:*: g) = (AlternativeEmptyCts f, AlternativeEmptyCts g)
empty = empty Generics.:*: empty
instance (AlternativeEmpty f, AlternativeEmpty g) => AlternativeEmpty (f Generics.:.: g) where
type AlternativeEmptyCts (f Generics.:.: g) = (AlternativeEmptyCts f, AlternativeEmptyCts g)
empty = Generics.Comp1 $ empty
instance AlternativeEmpty f => AlternativeEmpty (Generics.M1 i c f) where
type AlternativeEmptyCts (Generics.M1 i c f) = AlternativeEmptyCts f
empty = Generics.M1 $ empty
#endif
-- | The encoding of the '<|>' operation.
--
-- 'Applicative' is not a superclass, because the indices or constraints involved
-- in an 'Alternative' instance may differ from those involved with the 'Applicative'
-- instance.
--
-- __WARNING:__ This module is an experiment to see how 'Alternative' may be encoded.
-- The authors are not aware of any generalized applicatives that make use of 'Alternative'.
-- Hence, we do not know if this encoding of it is sufficient.
-- Therefore, the encoding is not in its final form and may change in the future.
class (Functor f, Functor g, Functor h) => AlternativeAlt f g h where
type AlternativeAltCts f g h :: Constraint
type AlternativeAltCts f g h = ()
(<|>) :: AlternativeAltCts f g h => f a -> g a -> h a
instance AlternativeAlt [] [] [] where
(<|>) = (A.<|>)
instance AlternativeAlt P.Maybe P.Maybe P.Maybe where
(<|>) = (A.<|>)
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeAlt P.IO P.IO P.IO where
(<|>) = (A.<|>)
#endif
instance AlternativeAlt ReadP.ReadP ReadP.ReadP ReadP.ReadP where
(<|>) = (A.<|>)
instance AlternativeAlt ReadPrec.ReadPrec ReadPrec.ReadPrec ReadPrec.ReadPrec where
(<|>) = (A.<|>)
instance AlternativeAlt STM.STM STM.STM STM.STM where
(<|>) = (A.<|>)
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeAlt Semigroup.Option Semigroup.Option Semigroup.Option where
(<|>) = (A.<|>)
instance AlternativeAlt Proxy.Proxy Proxy.Proxy Proxy.Proxy where
(<|>) = (A.<|>)
#endif
instance (AlternativeAlt f g h) => AlternativeAlt (Mon.Alt f) (Mon.Alt g) (Mon.Alt h) where
type AlternativeAltCts (Mon.Alt f) (Mon.Alt g) (Mon.Alt h) = AlternativeAltCts f g h
(Mon.Alt ma) <|> (Mon.Alt na) = Mon.Alt $ ma <|> na
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance (AlternativeAlt f g h, AlternativeAlt f' g' h') => AlternativeAlt (Product.Product f f') (Product.Product g g') (Product.Product h h') where
type AlternativeAltCts (Product.Product f f') (Product.Product g g') (Product.Product h h') = (AlternativeAltCts f g h, AlternativeAltCts f' g' h')
Product.Pair m1 m2 <|> Product.Pair n1 n2 = Product.Pair (m1 <|> n1) (m2 <|> n2)
-- TODO: This does the application of '<|>' on the inner type constructors, whereas the original
-- implementation for the standard classes applies '<|>' on the outer type constructors.
instance (Applicative f g h, AlternativeAlt f' g' h') => AlternativeAlt (Compose.Compose f f') (Compose.Compose g g') (Compose.Compose h h') where
type AlternativeAltCts (Compose.Compose f f') (Compose.Compose g g') (Compose.Compose h h') = (ApplicativeCts f g h, AlternativeAltCts f' g' h')
(Compose.Compose f) <|> (Compose.Compose g) = Compose.Compose $ fmap (<|>) f <*> g
#endif
-- TODO: ArrowMonad and WrappedMonad instances. These lead to cyclic dependencies.
#if MIN_VERSION_GLASGOW_HASKELL(8,0,0,0)
instance AlternativeAlt Generics.U1 Generics.U1 Generics.U1 where
(<|>) = (A.<|>)
instance AlternativeAlt f g h => AlternativeAlt (Generics.Rec1 f) (Generics.Rec1 g) (Generics.Rec1 h) where
type AlternativeAltCts (Generics.Rec1 f) (Generics.Rec1 g) (Generics.Rec1 h) = AlternativeAltCts f g h
(Generics.Rec1 f) <|> (Generics.Rec1 g) = Generics.Rec1 $ f <|> g
instance (AlternativeAlt f g h, AlternativeAlt f' g' h') => AlternativeAlt (f Generics.:*: f') (g Generics.:*: g') (h Generics.:*: h') where
type AlternativeAltCts (f Generics.:*: f') (g Generics.:*: g') (h Generics.:*: h') = (AlternativeAltCts f g h, AlternativeAltCts f' g' h')
(f Generics.:*: g) <|> (f' Generics.:*: g') = (f <|> f') Generics.:*: (g <|> g')
-- TODO: This does the application of '<|>' on the inner type constructors, whereas the original
-- implementation for the standard classes applies '<|>' on the outer type constructors.
instance (Applicative f g h, AlternativeAlt f' g' h') => AlternativeAlt (f Generics.:.: f') (g Generics.:.: g') (h Generics.:.: h') where
type AlternativeAltCts (f Generics.:.: f') (g Generics.:.: g') (h Generics.:.: h') = (ApplicativeCts f g h, AlternativeAltCts f' g' h')
(Generics.Comp1 f) <|> (Generics.Comp1 g) = Generics.Comp1 $ fmap (<|>) f <*> g
instance AlternativeAlt f g h => AlternativeAlt (Generics.M1 i c f) (Generics.M1 i c g) (Generics.M1 i c h) where
type AlternativeAltCts (Generics.M1 i c f) (Generics.M1 i c g) (Generics.M1 i c h) = AlternativeAltCts f g h
(Generics.M1 f) <|> (Generics.M1 g) = Generics.M1 $ f <|> g
#endif
| jbracker/supermonad-plugin | src/Control/Super/Monad/Alternative.hs | bsd-3-clause | 9,063 | 0 | 10 | 1,434 | 2,350 | 1,292 | 1,058 | 52 | 0 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
-- |
-- Module : Data.Array.Accelerate.Trafo.Vectorise
-- Copyright : [2012..2013] Manuel M T Chakravarty, Gabriele Keller, Trevor L. McDonell, Robert Clifton-Everes
-- License : BSD3
--
-- Maintainer : Robert Clifton-Everest <robertce@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Performs Blelloch's flattening transform on an embedded accelerate computation.
--
module Data.Array.Accelerate.Trafo.Vectorise (
vectoriseSeq,
vectoriseSeqAcc,
vectoriseSeqAfun,
liftOpenAfun1,
liftOpenAfun2,
Size,
Strength(..),
Context(..)
) where
import Prelude hiding ( exp, replicate, concat )
import qualified Prelude as P
import Data.Typeable
import Control.Applicative hiding ( Const )
import Data.Maybe
-- friends
import Data.Array.Accelerate.AST
import Data.Array.Accelerate.Analysis.Match ( matchIdx )
import Data.Array.Accelerate.Array.Lifted
import Data.Array.Accelerate.Array.Representation ( SliceIndex(..) )
import Data.Array.Accelerate.Array.Sugar
import Data.Array.Accelerate.Trafo.Base
import Data.Array.Accelerate.Pretty ()
import Data.Array.Accelerate.Trafo.Substitution
import Data.Array.Accelerate.Product
import Data.Array.Accelerate.Type
import qualified Data.Array.Accelerate.Smart as S
import qualified Data.Array.Accelerate.Prelude as S
import qualified Data.Array.Accelerate.Language as S
import qualified Data.Array.Accelerate.Trafo.Sharing as S
import qualified Data.Array.Accelerate.Debug as Debug
import Data.Array.Accelerate.Error
-- |Encodes the relationship between the old environments and the new environments during the
-- lifting transform
--
data Context env aenv env' aenv' where
-- All environments are empty
EmptyC :: Context () () () ()
-- An expression that has already been lifted
PushLExpC :: Elt e
=> Context env aenv env' aenv'
-> Context (env, e) aenv env' (aenv', Vector e)
-- An unlifted expression
PushExpC :: Elt e
=> Context env aenv env' aenv'
-> Context (env, e) aenv (env',e) aenv'
-- A lifted array expression
PushLAccC :: Arrays t
=> Context env aenv env' aenv'
-> Context env (aenv, t) env' (aenv', Vector' t)
-- An unlifted array expression
PushAccC :: Arrays t
=> Context env aenv env' aenv'
-> Context env (aenv, t) env' (aenv', t)
data Strength = Aggressive | Conservative | HoistOnly | Nested deriving Show
type VectoriseAcc acc = forall aenv aenv' t.
Arrays t
=> Strength
-> Context () aenv () aenv'
-> Size acc aenv'
-> acc aenv t
-> LiftedAcc acc aenv' t
data None sh = None sh
deriving (Typeable, Show, Eq)
type instance EltRepr (None sh) = EltRepr sh
instance Shape sh => Elt (None sh) where
eltType _ = eltType (undefined::sh)
fromElt (None sh) = fromElt sh
toElt sh = None (toElt sh)
instance Shape sh => Slice (None sh) where
type SliceShape (None sh) = Z
type CoSliceShape (None sh) = sh
type FullShape (None sh) = sh
sliceIndex _ = sliceNoneIndex (undefined :: sh)
instance Shape sh => IsProduct Elt (None sh) where
type ProdRepr (None sh) = ((),sh)
fromProd _ (None sh) = ((),sh)
toProd _ ((),sh) = None sh
prod _ _ = ProdRsnoc ProdRunit
-- Lifting terms
-- ---------------
--
-- |The size parameter in the lifting transform.
--
type Size acc aenv = PreExp acc aenv Int
data LiftedAcc acc aenv t = AvoidedAcc (acc aenv t)
| LiftedAcc (acc aenv (Vector' t))
instance RebuildableAcc acc => Rebuildable (LiftedAcc acc) where
type AccClo (LiftedAcc acc) = acc
rebuildPartial v (AvoidedAcc a) = AvoidedAcc <$> rebuildPartial v a
rebuildPartial v (LiftedAcc a) = LiftedAcc <$> rebuildPartial v a
instance Sink acc => Sink (LiftedAcc acc) where
weaken k (AvoidedAcc a) = AvoidedAcc (weaken k a)
weaken k (LiftedAcc a) = LiftedAcc (weaken k a)
data LiftedExp acc env aenv t where
AvoidedExp :: Extend acc aenv aenv'
-> PreOpenExp acc env aenv' t
-> LiftedExp acc env aenv t
LiftedExp :: acc aenv (Vector t)
-> LiftedExp acc env aenv t
data AvoidedFun acc env aenv t where
AvoidedFun :: Extend acc aenv aenv'
-> PreOpenFun acc env aenv' t
-> AvoidedFun acc env aenv t
type LiftedOpenAcc aenv t = LiftedAcc OpenAcc aenv t
over :: (acc aenv t -> acc' aenv' t')
-> (acc aenv (Vector' t) -> acc' aenv' (Vector' t'))
-> LiftedAcc acc aenv t
-> LiftedAcc acc' aenv' t'
over f _ (AvoidedAcc a) = AvoidedAcc (f a)
over _ f' (LiftedAcc l) = LiftedAcc (f' l)
injectL :: Kit acc => LiftedAcc (PreOpenAcc acc) aenv t -> LiftedAcc acc aenv t
injectL = over inject inject
vectoriseOpenAcc :: Arrays t
=> Strength
-> Context () aenv () aenv'
-> Size OpenAcc aenv'
-> OpenAcc aenv t
-> LiftedOpenAcc aenv' t
vectoriseOpenAcc strength ctx size (OpenAcc a) = liftPreOpenAcc vectoriseOpenAcc strength ctx size a
liftedSize :: forall acc aenv t.
(Kit acc, Arrays t, Arrays (Vector' t))
=> acc aenv (Vector' t)
-> Size acc aenv
liftedSize a =
case flavour (undefined :: t) of
ArraysFunit -> Index (inject $ Aprj ZeroTupIdx a) IndexNil
ArraysFarray -> ShapeSize (Shape $ inject $ Aprj (SuccTupIdx ZeroTupIdx) a)
ArraysFtuple -> fromTup $ prod (Proxy :: Proxy Arrays) (undefined :: t)
where
fromTup :: (ArrRepr t ~ (l,e), IsAtuple t) => ProdR Arrays (TupleRepr t) -> Size acc aenv
fromTup ProdRunit = Const 0
fromTup (ProdRsnoc _) = convince a
where
convince :: forall f l a e. (ArrRepr t ~ (l,e), TupleRepr t ~ (f,a), Arrays a)
=> acc aenv (Vector' t)
-> Size acc aenv
convince a | IsC <- isArraysFlat (undefined :: a)
= liftedSize $^ Aprj ZeroTupIdx a
-- |Lift a unary open array function
--
liftOpenAfun1 :: forall aenv aenv' a b.
Strength
-> Context () aenv () aenv'
-> OpenAfun aenv (a -> b)
-> OpenAfun aenv' (Vector' a -> Vector' b)
liftOpenAfun1 strength ctx (Alam (Abody f))
| trace "liftOpenAfun1" ("Starting " ++ show strength ++ " vectorisation") True
, IsC <- isArraysFlat (undefined :: a)
, IsC <- isArraysFlat (undefined :: b)
= case vectoriseOpenAcc Conservative (PushLAccC ctx) (liftedSize avar0) f of
-- In the case that the body of the function does not depend on its argument,
-- conservative vectorisation will return the unmodified body. In this,
-- we just need to replicate the result.
AvoidedAcc a' -> Alam . Abody $ replicateC (inject $ Unit (liftedSize avar0)) a'
-- Otherwise, we have the lifted body.
LiftedAcc a' -> Alam . Abody $ a'
liftOpenAfun1 _ _ _
= error "Unreachable"
-- |Lift a binary open array function
--
liftOpenAfun2 :: forall aenv aenv' a b c.
Strength
-> Context () aenv () aenv'
-> Size OpenAcc aenv'
-> OpenAfun aenv (a -> b -> c)
-> OpenAfun aenv' (Vector' a -> Vector' b -> Vector' c)
liftOpenAfun2 strength ctx sz (Alam (Alam (Abody f)))
| trace "liftOpenAfun2" ("Starting " ++ show strength ++ " vectorisation") True
, IsC <- isArraysFlat (undefined :: a)
, IsC <- isArraysFlat (undefined :: b)
, IsC <- isArraysFlat (undefined :: c)
= case vectoriseOpenAcc Conservative (PushLAccC . PushLAccC $ ctx) (weakenA2 sz) f of
-- In the case that the body of the function does not depend on its argument,
-- conservative vectorisation will return the unmodified body. In this,
-- we just need to replicate the result.
AvoidedAcc a' -> Alam . Alam . Abody $ replicateC (inject $ Unit (weakenA2 sz)) a'
-- Otherwise, we have the lifted body.
LiftedAcc a' -> Alam . Alam . Abody $ a'
liftOpenAfun2 _ _ _ _
= error "Unreachable"
-- |The core of the lifting transformation for array expression.
--
liftPreOpenAcc :: forall acc aenv aenv' t. (Kit acc, Arrays t)
=> VectoriseAcc acc
-> Strength
-> Context () aenv () aenv'
-> Size acc aenv'
-> PreOpenAcc acc aenv t
-> LiftedAcc acc aenv' t
liftPreOpenAcc vectAcc strength ctx size acc
| IsC <- isArraysFlat (undefined :: t)
= case acc of
Alet a b -> aletL a b
Avar ix -> avarL ix
Atuple tup -> atupleL tup
Aprj tup a -> aprjL tup a
Apply f a -> applyL f a
Aforeign ff afun as -> foreignL ff afun as
Acond p t e -> acondL p t e
Awhile p it i -> awhileL p it i
Use a -> useL a
Unit e -> unitL e
Reshape e a -> reshapeL e a
Generate e f -> generateL e f
-- Transform only appears as part of subsequent optimsations.
Transform _ _ _ _ -> $internalError "liftPreOpenAcc" "Unable to vectorise Transform"
Replicate sl slix a -> replicateL sl slix a
Slice sl a slix -> sliceL sl a slix
Map f a -> mapL f a
ZipWith f a1 a2 -> zipWithL f a1 a2
Fold f z a -> foldL f z a
Fold1 f a -> fold1L f a
FoldSeg f z a s -> foldSegL f z a s
Fold1Seg f a s -> fold1SegL f a s
Scanl f z a -> scanlL f z a
Scanl' f z a -> scanl'L f z a
Scanl1 f a -> scanl1L f a
Scanr f z a -> scanrL f z a
Scanr' f z a -> scanr'L f z a
Scanr1 f a -> scanr1L f a
Permute f1 a1 f2 a2 -> permuteL f1 a1 f2 a2
Backpermute sh f a -> backpermuteL sh f a
Stencil f b a -> stencilL f b a
Stencil2 f b1 a1 b2 a2
-> stencil2L f b1 a1 b2 a2
Collect _ -> error "Nested sequence"
where
nestedError :: String -> String -> String
nestedError place op = "Unexpect nested parallelism in " ++ place ++ " argument to " ++ op
hoistingOnlyError :: a
hoistingOnlyError = error "The vectorisation strength is set to hoisting only, but nested parallelism has been encountered"
avoidLifting | Conservative <- strength = True
| HoistOnly <- strength = True
| otherwise = False
cvtA :: forall t. Arrays t => acc aenv t -> LiftedAcc acc aenv' t
cvtA a = vectAcc strength ctx size a
liftedAcc :: forall aenv t. acc aenv (Vector' t) -> LiftedAcc acc aenv t
liftedAcc | HoistOnly <- strength
= hoistingOnlyError
| otherwise
= trace "liftPreOpenAcc" ("Lifting Acc term " ++ showPreAccOp acc) . LiftedAcc
liftE :: forall env env' aenv aenv' e. (Elt e)
=> Context env aenv env' aenv'
-> Size acc aenv'
-> PreOpenExp acc env aenv e
-> PreOpenAcc acc aenv' (Vector e)
liftE | HoistOnly <- strength
= hoistingOnlyError
| otherwise
= liftExp vectAcc strength
cvtE :: forall e. Elt e
=> PreExp acc aenv e
-> LiftedExp acc () aenv' e
cvtE e | avoidLifting
, Avoided (b,e') <- avoidE e
= AvoidedExp b e'
| otherwise
= trace "liftPreOpenAcc" ("Expression had to be lifted: " ++ showPreExpOp e)
$ LiftedExp $ inject $ liftE ctx size e
cvtE' :: forall e. Elt e
=> PreExp acc aenv e
-> LiftedExp acc () aenv' e
cvtE' e | Avoided (b,e') <- avoidE e
= AvoidedExp b e'
| otherwise
= trace "liftPreOpenAcc" ("Expression had to be lifted: " ++ showPreExpOp e)
$ LiftedExp $ inject $ liftE ctx size e
cvtT :: forall t.
Atuple (acc aenv) t
-> Atuple (LiftedAcc acc aenv') t
cvtT NilAtup = NilAtup
cvtT (SnocAtup t (a :: acc aenv a)) | IsC <- isArraysFlat (undefined :: a)
= SnocAtup (cvtT t) (cvtA a)
liftTupleIdx :: forall t a. TupleIdx t a -> TupleIdx (LiftedTupleRepr t) (Vector' a)
liftTupleIdx ZeroTupIdx = ZeroTupIdx
liftTupleIdx (SuccTupIdx ix) = SuccTupIdx (liftTupleIdx ix)
-- We are slightly introducing nested parallelism here in that we are embedding `Aprj` in an
-- `Exp`. Fusion should always be able to handle this case properly however.
liftedSize :: forall aenv t. (Arrays t, Arrays (Vector' t))
=> acc aenv (Vector' t)
-> Size acc aenv
liftedSize a = case flavour (undefined :: t) of
ArraysFunit -> Index (inject $ Aprj ZeroTupIdx a) IndexNil
ArraysFarray -> ShapeSize (Shape $ segments a)
ArraysFtuple -> fromTup $ prod (Proxy :: Proxy Arrays) (undefined :: t)
where
fromTup :: (ArrRepr t ~ (l,e), IsAtuple t) => ProdR Arrays (TupleRepr t) -> Size acc aenv
fromTup ProdRunit = Const 0
fromTup (ProdRsnoc _) = convince a
where
convince :: forall f l a e. (ArrRepr t ~ (l,e), TupleRepr t ~ (f,a), Arrays a)
=> acc aenv (Vector' t)
-> Size acc aenv
convince a | IsC <- isArraysFlat (undefined :: a)
= liftedSize $^ Aprj ZeroTupIdx a
liftAfun1 :: forall a b. (Arrays a, Arrays b)
=> PreOpenAfun acc aenv (a -> b)
-> ( PreOpenAfun acc aenv' (Vector' a -> Vector' b)
, Maybe (PreOpenAfun acc aenv' (a -> b)))
liftAfun1 (Alam (Abody b))
| IsC <- isArraysFlat (undefined :: a)
, IsC <- isArraysFlat (undefined :: b)
= let
lft = case vectAcc strength (PushLAccC ctx) (liftedSize avar0) b of
-- Result does not actually depend on the argument.
AvoidedAcc b' -> Alam (Abody (replicateA b' (liftedSize avar0)))
-- Result does depend on argument
LiftedAcc b' -> Alam (Abody b')
pln = case vectAcc strength (PushAccC ctx) (weakenA1 size) b of
AvoidedAcc b' -> Just $ Alam (Abody b')
LiftedAcc _ -> Nothing
in (lft,pln)
liftAfun1 _ = error "Inconsistent"
cvtF1 :: forall a b. (Elt a, Elt b)
=> PreFun acc aenv (a -> b)
-> ( PreOpenAfun acc aenv' (Vector a -> Vector b)
, Maybe (AvoidedFun acc () aenv' (a -> b)))
cvtF1 f@(Lam (Body e))
= let l = Alam (Abody (inject $ liftE (PushLExpC ctx) (ShapeSize (Shape avar0)) e))
in case (avoidF f) of
Avoided (b, Lam (Body e')) | avoidLifting
-> (l, Just $ AvoidedFun b (Lam (Body e')))
_ -> trace "liftPreOpenAcc" ("Function had to be lifted: \n" ++ show f)
$ (l, Nothing)
cvtF1 f = $internalError "cvtF1" ("Inconsistent valuation" ++ show f)
cvtF2 :: forall a b c. (Elt a, Elt b, Elt c)
=> PreFun acc aenv (a -> b -> c)
-> ( PreOpenAfun acc aenv' (Vector a -> Vector b -> Vector c)
, Maybe (AvoidedFun acc () aenv' (a -> b -> c)))
cvtF2 f@(Lam (Lam (Body e)))
= let l = Alam (Alam (Abody (inject $ liftE (PushLExpC (PushLExpC ctx))
(ShapeSize (Shape avar0))
e)))
in case (avoidF f) of
Avoided (b, Lam (Lam (Body e'))) | avoidLifting
-> (l, Just $ AvoidedFun b (Lam (Lam (Body e'))))
_ -> trace "liftPreOpenAcc" ("Function had to be lifted: \n" ++ show f)
$ (l, Nothing)
cvtF2 _ = $internalError "cvtF2" "Inconsistent valuation"
cvtF2' :: forall a b c. (Elt a, Elt b, Elt c)
=> PreFun acc aenv (a -> b -> c)
-> ( PreOpenAfun acc aenv' (Vector a -> Vector b -> Vector c)
, Maybe (AvoidedFun acc () aenv' (a -> b -> c)))
cvtF2' f@(Lam (Lam (Body e)))
= let l = Alam (Alam (Abody (inject $ liftE (PushLExpC (PushLExpC ctx))
(ShapeSize (Shape avar0))
e)))
in case (avoidF f) of
Avoided (b, Lam (Lam (Body e'))) -> (l, Just $ AvoidedFun b (Lam (Lam (Body e'))))
_ -> trace "liftPreOpenAcc" ("Function had to be lifted")
$ (l, Nothing)
cvtF2' _
= $internalError "cvtF2" "Inconsistent valuation"
unzip :: forall aenv a b sh.
(Elt a, Elt b, Shape sh)
=> acc aenv (Array sh (a,b))
-> acc aenv (Array sh a, Array sh b)
unzip a = inject
$ Alet a
$ atup (inject $ Map (Lam . Body $ fstE var0) avar0)
(inject $ Map (Lam . Body $ sndE var0) avar0)
construct :: forall aenv e sh.
(Elt e, Shape sh)
=> acc aenv (Segments sh)
-> acc aenv (Vector e)
-> acc aenv (LiftedArray sh e)
construct segs vals
= inject
$ Atuple (SnocAtup (SnocAtup NilAtup segs) vals)
segments :: forall aenv e sh.
(Elt e, Shape sh)
=> acc aenv (LiftedArray sh e)
-> acc aenv (Segments sh)
segments arrs
= inject $ Aprj (SuccTupIdx ZeroTupIdx) arrs
values :: forall aenv e sh.
(Elt e, Shape sh)
=> acc aenv (LiftedArray sh e)
-> acc aenv (Vector e)
values arrs
= inject $ Aprj ZeroTupIdx arrs
lifted :: forall t. Arrays t => LiftedAcc acc aenv' t -> acc aenv' (Vector' t)
lifted (AvoidedAcc a) = replicateA a size
lifted (LiftedAcc l) = l
liftedE :: forall t. Elt t => LiftedExp acc () aenv' t -> acc aenv' (Vector t)
liftedE (AvoidedExp b e) = inject $ bind b $ replicateE e (sink b size)
liftedE (LiftedExp e) = e
-- Unfortunately, IndexTail, IndexHead and IndexCons requires the Slice constraint, which we
-- don't always have.
indexSplit :: forall env aenv sh. Shape sh
=> PreOpenExp acc env aenv (sh:.Int)
-> PreOpenExp acc env aenv (sh, Int)
indexSplit sh = Let sh $ Let tail $ tup var0 (IndexHead head)
where
tail = IndexSlice ix slix var0
head :: PreOpenExp acc ((env,(sh:.Int)),sh) aenv DIM1
head = IndexSlice ix' slix' var1
ix = sliceIndex (undefined :: Any sh :. Int)
slix :: forall env. PreOpenExp acc env aenv (Any sh :. Int)
slix = IndexCons IndexAny (Const (0 :: Int))
slix' :: forall env. PreOpenExp acc (env,sh) aenv (None sh :. All)
slix' = IndexCons (Tuple (SnocTup NilTup var0)) (Const ())
ix' :: SliceIndex (EltRepr sh, ()) ((),Int) (EltRepr sh) (EltRepr sh, Int)
ix' = SliceAll (sliceNoneIndex (undefined :: sh))
indexCons :: forall env aenv sh. Shape sh
=> PreOpenExp acc env aenv sh
-> PreOpenExp acc env aenv Int
-> PreOpenExp acc env aenv (sh:.Int)
indexCons t h = IndexFull (sliceIndex (undefined :: Any sh :. Int)) slix t
where
slix :: PreOpenExp acc env aenv (Any sh :. Int)
slix = IndexCons IndexAny h
-- Vector' versions of combinators.
-- ===================================
aletL :: forall bnd. (Arrays bnd, Arrays t)
=> acc aenv bnd
-> acc (aenv, bnd) t
-> LiftedAcc acc aenv' t
aletL bnd body | IsC <- isArraysFlat (undefined :: bnd)
, IsC <- isArraysFlat (undefined :: t)
= injectL
$ case (cvtA bnd) of
AvoidedAcc a -> over (Alet a) (Alet a)
$ vectAcc strength (PushAccC ctx) (weakenA1 size) body
a -> over (Alet (lifted a)) (Alet (lifted a))
$ vectAcc strength (PushLAccC ctx) (weakenA1 size) body
avarL :: Arrays t
=> Idx aenv t
-> LiftedAcc acc aenv' t
avarL | IsC <- isArraysFlat (undefined :: t)
= cvtIx ctx
where
cvtIx :: forall env aenv env' aenv'. Arrays (Vector' t)
=> Context env aenv env' aenv'
-> Idx aenv t
-> LiftedAcc acc aenv' t
cvtIx (PushLExpC d) ix = weakenA1 (cvtIx d ix)
--cvtIx (PushExpC d) ix = weakenE1 (cvtIx d ix)
cvtIx (PushLAccC _) ZeroIdx = liftedAcc $ avar0
cvtIx (PushLAccC d) (SuccIdx ix) = weakenA1 (cvtIx d ix)
cvtIx (PushAccC _) ZeroIdx = AvoidedAcc avar0
cvtIx (PushAccC d) (SuccIdx ix) = weakenA1 (cvtIx d ix)
cvtIx _ _ = $internalError "liftExp" "Inconsistent valuation"
atupleL :: (Arrays t, IsAtuple t, Arrays (Vector' t))
=> Atuple (acc aenv) (TupleRepr t)
-> LiftedAcc acc aenv' t
atupleL t = case atl (cvtT t) of
Left (a,_) -> AvoidedAcc (inject $ Atuple a)
Right a -> case flavour (undefined :: t) of
ArraysFunit -> liftedAcc $^ Atuple (SnocAtup NilAtup (inject $ Unit size))
ArraysFtuple -> liftedAcc $ inject $ Atuple a
_ -> error "Absurd"
where
atl :: forall t.
Atuple (LiftedAcc acc aenv') t
-> Either (Atuple (acc aenv') t, Atuple (acc aenv') (LiftedTupleRepr t))
(Atuple (acc aenv') (LiftedTupleRepr t))
atl NilAtup = Left (NilAtup, NilAtup)
atl (SnocAtup t a) = case atl t of
Left (av,li) | AvoidedAcc (a' :: acc aenv' a) <- a
, IsC <- isArraysFlat (undefined :: a)
-> Left (SnocAtup av a', SnocAtup li (replicateA a' size))
| LiftedAcc (a' :: acc aenv' (Vector' a)) <- a
, IsC <- isArraysFlat (undefined :: a)
-> Right (SnocAtup li a')
Right li | LiftedAcc (a' :: acc aenv' (Vector' a)) <- a
, IsC <- isArraysFlat (undefined :: a)
-> Right (SnocAtup li a')
| AvoidedAcc (a' :: acc aenv' a) <- a
, IsC <- isArraysFlat (undefined :: a)
-> Right (SnocAtup li (replicateA a' size))
_ -> error "Unreachable code"
aprjL :: forall a arrs. (Arrays a, Arrays arrs, IsAtuple arrs, Arrays (Vector' a))
=> TupleIdx (TupleRepr arrs) a
-> acc aenv arrs
-> LiftedAcc acc aenv' a
aprjL tup a | IsC <- isArraysFlat (undefined :: arrs)
, ArraysFtuple <- flavour (undefined :: arrs)
= injectL $ over (Aprj tup) (Aprj (liftTupleIdx tup)) (cvtA a)
| otherwise = error "Absurd"
applyL :: forall a1 a2.
(Arrays a1, Arrays a2, Arrays (Vector' a2))
=> PreOpenAfun acc aenv (a1 -> a2)
-> acc aenv a1
-> LiftedAcc acc aenv' a2
applyL f a1 | IsC <- isArraysFlat (undefined :: a1)
, let (lft, pln) = liftAfun1 f
= case cvtA a1 of
AvoidedAcc a1' | avoidLifting
, Just f' <- pln
-> AvoidedAcc $ inject $ Apply f' a1'
| otherwise
-> liftedAcc $ inject $ Apply lft (replicateA a1' size)
a1' -> liftedAcc $ inject $ Apply lft (lifted a1')
foreignL :: (Arrays arrs, Arrays t, Foreign f)
=> f arrs t
-> PreAfun acc (arrs -> t)
-> acc aenv arrs
-> LiftedAcc acc aenv' t
foreignL ff afun (cvtA -> AvoidedAcc as)
= AvoidedAcc $ inject $ Aforeign ff afun as
foreignL _ _ _
= error $ nestedError "first" "foreign"
acondL :: (Arrays t, Arrays (Vector' t))
=> PreExp acc aenv Bool
-> acc aenv t
-> acc aenv t
-> LiftedAcc acc aenv' t
acondL (cvtE -> p) (cvtA -> t) (cvtA -> e)
| avoidLifting
, AvoidedExp b p' <- p
, AvoidedAcc t' <- t
, AvoidedAcc e' <- e
= AvoidedAcc $ inject $ bind b $ Acond p' (sink b t') (sink b e')
| AvoidedExp b p' <- p
= liftedAcc $ inject $ bind b $ Acond p' (sink b (lifted t)) (sink b (lifted e))
| otherwise
= liftedAcc $ liftedCondC (liftedE p) (lifted t) (lifted e)
-- TODO: Try to find a way to clean this up
awhileL :: forall t. (Arrays t, Arrays (Vector' t))
=> PreOpenAfun acc aenv (t -> Scalar Bool)
-> PreOpenAfun acc aenv (t -> t)
-> acc aenv t
-> LiftedAcc acc aenv' t
awhileL (liftAfun1 -> (pred_l, pred_p)) (liftAfun1 -> (iter_l, iter_p)) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
, Just pred_p' <- pred_p
, Just iter_p' <- iter_p
= AvoidedAcc $ inject $ Awhile pred_p' iter_p' a'
| otherwise
= liftedAcc
$^ Alet (lifted a)
$^ let
init = avar0
init' = inject $ Alet (values $ inject $ weakenA1 pred_l `subApply` init)
$ atup3 (weakenA1 init) avar0 (fromHOAS S.or avar0)
pred' = Alam $ Abody $ inject $ Aprj ZeroTupIdx avar0
iter' :: acc (aenv', s) (Vector' t)
-> acc (aenv', s) (Vector Bool)
-> acc (aenv', s) (Scalar Bool)
-> acc (aenv', s) (Vector' t, Vector Bool, Scalar Bool)
iter' a f _ = let a' = liftedCondC f (inject $ weakenA1 iter_l `subApply` a) a
f' = fromHOAS (S.zipWith (S.&&*)) f (values $ inject $ weakenA1 pred_l `subApply` a')
c' = fromHOAS S.or f'
in atup3 a' f' c'
iter'' :: PreOpenAfun acc aenv' ((Vector' t, Vector Bool, Scalar Bool)
-> (Vector' t, Vector Bool, Scalar Bool))
iter'' = Alam $ Abody $ iter' (inject $ Aprj (SuccTupIdx . SuccTupIdx $ ZeroTupIdx) avar0)
(inject $ Aprj (SuccTupIdx ZeroTupIdx) avar0)
(inject $ Aprj ZeroTupIdx avar0)
in Aprj (SuccTupIdx . SuccTupIdx $ ZeroTupIdx)
$^ Awhile pred'
(weakenA1 iter'')
init'
useL :: Arrays a
=> ArrRepr a
-> LiftedAcc acc aenv' a
useL a = AvoidedAcc $ inject $ Use a
unitL :: Elt e
=> PreExp acc aenv e
-> LiftedAcc acc aenv' (Scalar e)
unitL e = case cvtE e of
AvoidedExp b e | avoidLifting
-> AvoidedAcc $ inject $ bind b $ Unit e
a -> liftedAcc
$ construct (inject $ replicateE (Const ()) size) (liftedE a)
reshapeL :: forall sh sh' e.
(Shape sh, Shape sh', Elt e)
=> PreExp acc aenv sh
-> acc aenv (Array sh' e)
-> LiftedAcc acc aenv' (Array sh e)
reshapeL (cvtE -> sh) (cvtA -> a)
| avoidLifting
, AvoidedExp b sh' <- sh
, AvoidedAcc a' <- a
= AvoidedAcc $ inject $ (bind b $ Reshape sh' (sink b a'))
| otherwise
= liftedAcc $ liftedReshapeC (liftedE sh) (lifted a)
generateL :: forall sh e. (Elt e, Shape sh)
=> PreExp acc aenv sh
-> PreFun acc aenv (sh -> e)
-> LiftedAcc acc aenv' (Array sh e)
generateL (cvtE -> e) (cvtF1 -> (f_l, f_a))
| avoidLifting
, AvoidedExp b1 e' <- e
, Just (AvoidedFun b2 f) <- f_a
= AvoidedAcc
$^ bind b2
$ Alet (sink b2 $ inject $ bind b1 $ Unit e')
$^ Generate (the avar0) (weakenA1 f)
| avoidLifting
, AvoidedExp b1 e' <- e
= AvoidedAcc
$^ Alet (inject $ bind b1 $ Unit e')
$^ Reshape (the avar0) (inject $ weakenA1 f_l `subApply` extentVector (the avar0))
| otherwise
= liftedAcc
$^ Alet (liftedE e)
$ construct avar0
(inject $ weakenA1 f_l `subApply` (enumSegC avar0))
replicateL :: forall sh sl slix e co.
(Shape sh, Shape sl, Elt slix, Elt e)
=> SliceIndex (EltRepr slix)
(EltRepr sl)
co
(EltRepr sh)
-> PreExp acc aenv slix
-> acc aenv (Array sl e)
-> LiftedAcc acc aenv' (Array sh e)
replicateL sl (cvtE -> slix) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
, AvoidedExp b slix' <- slix
= AvoidedAcc
$^ bind b
$ Replicate sl slix' (sink b a')
replicateL sl slix a
= cvtA
$^ Alet a
$^ Alet (inject $ Unit $ weakenA1 slix)
$^ Backpermute (IndexFull sl (the avar0) (Shape avar1))
(Lam $ Body $ IndexSlice sl (weakenE1 (the avar0)) $ var0)
$ avar1
sliceL :: forall sh sl slix e co.
(Shape sh, Shape sl, Elt slix, Elt e)
=> SliceIndex (EltRepr slix)
(EltRepr sl)
co
(EltRepr sh)
-> acc aenv (Array sh e)
-> PreExp acc aenv slix
-> LiftedAcc acc aenv' (Array sl e)
sliceL sl (cvtA -> a) (cvtE -> slix)
| avoidLifting
, AvoidedAcc a' <- a
, AvoidedExp b slix' <- slix
= AvoidedAcc
$^ bind b
$ Slice sl (sink b a') slix'
sliceL sl a slix
= cvtA
$^ Alet a
$^ Alet (inject $ Unit $ weakenA1 slix)
$^ Backpermute (IndexSlice sl (the avar0) (Shape avar1))
(fun1 $ IndexFull sl (weakenE1 (the avar0)))
$ avar1
mapL :: forall sh e e'. (Elt e, Elt e', Shape sh)
=> PreFun acc aenv (e -> e')
-> acc aenv (Array sh e)
-> LiftedAcc acc aenv' (Array sh e')
mapL (cvtF1 -> (f_l, f_a)) (cvtA -> a)
| avoidLifting
, Just (AvoidedFun b f) <- f_a
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b
$ Map f (sink b a')
| otherwise
= liftedAcc
$^ Alet (lifted a)
$ construct (segments avar0) (inject $ weakenA1 f_l `Apply` values avar0)
zipWithL :: forall sh a b c. (Elt a, Elt b, Elt c, Shape sh)
=> PreFun acc aenv (a -> b -> c)
-> acc aenv (Array sh a)
-> acc aenv (Array sh b)
-> LiftedAcc acc aenv' (Array sh c)
zipWithL (cvtF2 -> (f_l, f_a)) (cvtA -> a) (cvtA -> b)
| Just (AvoidedFun binds f) <- f_a
, AvoidedAcc a' <- a
, AvoidedAcc b' <- b
= AvoidedAcc
$^ bind binds
$ ZipWith f (sink binds a') (sink binds b')
| otherwise
= liftedAcc
$^ Alet (fromHOAS liftedZip (lifted a) (lifted b))
$ construct (inject $ Aprj (SuccTupIdx . SuccTupIdx $ ZeroTupIdx) avar0)
(inject $ subApply2 (weakenA1 f_l) (inject $ Aprj (SuccTupIdx ZeroTupIdx) avar0)
(inject $ Aprj (ZeroTupIdx) avar0))
foldL :: forall sh e. (Elt e, Shape sh)
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Array (sh:.Int) e)
-> LiftedAcc acc aenv' (Array sh e)
foldL (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z') (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 $^ bind b2 (Unit z'))
$^ Fold (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 (lifted a))
$^ Alet (unzip $^ Map (fun1 indexSplit) (segments avar0))
$^ Alet (fromHOAS makeNonEmpty $ fstA avar0)
$ construct avar0
$^ Alet (weakenA3 $ sink b1 $^ bind b2 (Unit z'))
$^ FoldSeg (weakenA4 f)
(the avar0)
(values avar3)
(replicateSegC avar1 (fromHOAS (S.zipWith (\sh h -> S.shapeSize sh S.==* 0 S.? (0,h))) (fstA avar2) (sndA avar2)))
foldL _ _ _
= error $ nestedError "first or second" "fold"
fold1L :: forall sh e. (Elt e, Shape sh)
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (Array (sh:.Int) e)
-> LiftedAcc acc aenv' (Array sh e)
fold1L (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Fold1 f (sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 (lifted a))
$^ Alet (unzip $ inject $ Map (fun1 indexSplit) (segments avar0))
$ construct (fstA avar0)
$^ Fold1Seg (weakenA2 f)
(values avar1)
(replicateSegC (fstA avar0) (sndA avar0))
fold1L _ _
= error $ nestedError "first or second" "fold1"
foldSegL :: forall sh e i. (Elt e, Shape sh, IsIntegral i, Elt i)
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Array (sh:.Int) e)
-> acc aenv (Segments i)
-> LiftedAcc acc aenv' (Array (sh:.Int) e)
foldSegL (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z) (cvtA -> a) (cvtA -> segs)
| avoidLifting
, AvoidedAcc a' <- a
, AvoidedAcc segs' <- segs
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 . inject . bind b2 $ Unit z)
$^ FoldSeg (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a') (weakenA1 $ sink b1 segs')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 $ lifted a)
$^ Alet (fromHOAS (S.map S.fst) $^ Map (fun1 indexSplit) (segments avar0))
$^ Alet (weakenA2 $ sink b1 $ lifted segs)
$ let
segs' = inject $ ZipWith (fun2 indexCons)
avar1
(S.map S.unindex1 `fromHOAS` (segments avar0))
in construct segs'
$^ Alet (inject $ weakenA3 $ sink b1 $ bind b2 $ Unit z)
$^ FoldSeg (weakenA4 f) (the avar0) (values avar3)
$ fromHOAS replicateVectors avar2 avar1
foldSegL _ _ _ _
= error $ nestedError "first or second" "foldSeg"
fold1SegL :: forall sh e i. (Elt e, Shape sh, IsIntegral i, Elt i)
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (Array (sh:.Int) e)
-> acc aenv (Segments i)
-> LiftedAcc acc aenv' (Array (sh:.Int) e)
fold1SegL (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtA -> a) (cvtA -> segs)
| avoidLifting
, AvoidedAcc a' <- a
, AvoidedAcc segs' <- segs
= AvoidedAcc
$^ bind b1
$ Fold1Seg f (sink b1 a') (sink b1 segs')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 $ lifted a)
$^ Alet (fromHOAS (S.map S.fst) $^ Map (fun1 indexSplit) (segments avar0))
$^ Alet (weakenA2 $ sink b1 $ lifted segs)
$ let
segs' = inject $ ZipWith (fun2 indexCons)
avar1
(S.map S.unindex1 `fromHOAS` (segments avar0))
in construct segs'
$^ Fold1Seg (weakenA3 f) (values avar2)
$ fromHOAS replicateVectors avar1 avar0
fold1SegL _ _ _
= error $ nestedError "first" "foldSeg"
scanl1L :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e)
scanl1L (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Scanl1 f (sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ extract $ scanl1Lift f (sink b1 (lifted a))
scanl1L _ _
= error $ nestedError "first" "scanl1"
scanlL :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e)
scanlL (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$^ Scanl (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$ scanlLift (weakenA1 f) (the avar0) (weakenA1 $ sink b1 $ lifted a)
scanlL _ _ _
= error $ nestedError "first or second" "scanl"
scanl'L :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e, Scalar e)
scanl'L (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$^ Scanl' (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 $ lifted a)
$^ Alet (S.map S.unindex1 `fromHOAS` segments avar0)
$^ Alet (values avar1)
$^ Alet (weakenA3 $ sink b1 $ inject $ bind b2 $ Unit z)
$^ Alet (values $ scanlLift (weakenA4 f) (the avar0) avar3)
$ fromHOAS
(\seg vec vec' ->
let
seg' = S.map (+1) seg
tails = S.zipWith (+) seg . fst $ S.scanl' (+) 0 seg'
sums = S.backpermute (S.shape seg) (\ix -> S.index1 $ tails S.! ix) vec'
offset = S.scanl1 (+) seg
inc = S.scanl1 (+)
$ S.permute (+) (S.fill (S.index1 $ S.size vec + 1) 0)
(\ix -> S.index1 $ offset S.! ix)
(S.fill (S.shape seg) (1 :: S.Exp Int))
body = S.backpermute (S.shape vec)
(\ix -> S.index1 $ S.unindex1 ix + inc S.! ix)
vec'
in S.Acc . S.Atuple
$ SnocAtup (SnocAtup NilAtup (liftedArray (S.map S.index1 seg') body))
(liftedArray (S.fill (S.shape sums) S.index0) sums))
avar3
avar2
avar0
scanl'L _ _ _
= error $ nestedError "first or second" "scanl"
scanr1L :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e)
scanr1L (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Scanr1 f (sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ extract $ scanr1Lift f (sink b1 (lifted a))
scanr1L _ _
= error $ nestedError "first" "scanr1"
scanrL :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e)
scanrL (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$^ Scanr (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$ scanrLift (weakenA1 f) (the avar0) (weakenA1 $ sink b1 (lifted a))
scanrL _ _ _
= error $ nestedError "first or second" "scanr"
scanr'L :: forall e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (Vector e)
-> LiftedAcc acc aenv' (Vector e, Scalar e)
scanr'L (cvtF2' -> (_, Just (AvoidedFun b1 f))) (cvtE' -> AvoidedExp b2 z) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= AvoidedAcc
$^ bind b1
$ Alet (sink b1 $ inject $ bind b2 $ Unit z)
$^ Scanr' (weakenA1 f) (the avar0) (weakenA1 $ sink b1 a')
| otherwise
= liftedAcc
$^ bind b1
$ Alet (sink b1 (lifted a))
$^ Alet (segments avar0)
$^ Alet (values avar1)
$^ Alet (weakenA3 $ sink b1 $ inject $ bind b2 $ Unit z)
$^ Alet (values $ scanrLift (weakenA4 f) (the avar0) avar3)
$ fromHOAS
(\seg vec vec' ->
let
-- reduction values
seg' = S.map (+1) $ S.map S.unindex1 seg
heads = P.fst $ S.scanl' (+) 0 seg'
sums = S.backpermute (S.shape seg) (\ix -> S.index1 $ heads S.! ix) vec'
-- body segments
inc = S.scanl1 (+) $ mkHeadFlags seg
body = S.backpermute (S.shape vec)
(\ix -> S.index1 $ S.unindex1 ix + inc S.! ix)
vec'
in S.Acc . S.Atuple
$ SnocAtup (SnocAtup NilAtup (liftedArray (S.map S.index1 seg') body))
(liftedArray (S.fill (S.shape sums) S.index0) sums))
avar3
avar2
avar0
scanr'L _ _ _
= error $ nestedError "first or second" "scanr'"
backpermuteL :: (Shape sh, Shape sh', Elt e)
=> PreExp acc aenv sh'
-> PreFun acc aenv (sh' -> sh)
-> acc aenv (Array sh e)
-> LiftedAcc acc aenv' (Array sh' e)
backpermuteL (cvtE -> sh) (cvtF1 -> (f_l, f_a)) (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
, AvoidedExp b1 sh' <- sh
, Just (AvoidedFun b2 f) <- f_a
= AvoidedAcc
$^ bind b2
$ Alet (sink b2 $^ bind b1 (Unit sh'))
$^ Backpermute (the avar0) (weakenA1 f) (weakenA1 $ sink b2 a')
| otherwise
= liftedAcc
$^ Alet (liftedE sh)
$ liftedBackpermuteC avar0
(inject $ weakenA1 f_l `subApply` enumSegC avar0)
(weakenA1 $ lifted a)
permuteL :: (Shape sh, Shape sh', Elt e)
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (Array sh' e)
-> PreFun acc aenv (sh -> sh')
-> acc aenv (Array sh e)
-> LiftedAcc acc aenv' (Array sh' e)
permuteL (avoidFun -> Avoided (b1, comb))
(cvtA -> AvoidedAcc defs)
(avoidFun . sink b1 -> Avoided (b2, p))
(cvtA -> AvoidedAcc a)
| avoidLifting
, ExtendContext ctx1 b1' <- liftExtend vectAcc strength b1 ctx size
, ExtendContext ctx2 b2' <- liftExtend vectAcc strength b2 ctx1 (sink b1' $ size)
, Just comb' <- rebuildToLift ctx1 comb
, Just p' <- rebuildToLift ctx2 p
= AvoidedAcc
$^ bind b1'
$ bind b2'
$ Permute (sink b2' comb') (sink b2' . sink b1' $ defs) p' (sink b2' . sink b1' $ a)
-- Lifted version
permuteL (cvtF2' -> (_, Just (AvoidedFun b comb))) (cvtA -> defs) (cvtF1 -> (p_l,p_a)) (cvtA -> a)
= trace "permuteL" ("Lifting permute: " ++ show (isJust p_a))
$ liftedAcc
$^ bind b
$ Alet (sink b $ lifted defs)
$^ Alet (weakenA1 . sink b $ lifted a)
$ let init = avar0
defaults = avar1
shapes = segments init
shapes' = segments defaults
enums = enumSegC shapes
ixs = weakenA2 (sink b p_l) `subApply` enums
ixs' = asOffsetsOfC (construct shapes $^ ixs) shapes'
vals = Permute (weakenA2 $ comb)
(values defaults)
(fun1 (ixs' `Index`))
(values init)
in construct shapes' $^ vals
permuteL _ _ _ _
= error $ nestedError "first" "permute"
stencilL :: (Elt e, Elt e', Stencil sh e stencil)
=> PreFun acc aenv (stencil -> e')
-> Boundary (EltRepr e)
-> acc aenv (Array sh e)
-> LiftedAcc acc aenv' (Array sh e')
stencilL (cvtF1 -> (_, Just (AvoidedFun b1 f))) b (cvtA -> AvoidedAcc a)
= AvoidedAcc
$^ bind b1
$ Stencil f b (sink b1 a)
stencilL _ _ _
= error $ "Disallowed nested parallelism: Stencil operations must reside at the top level of "
++ "the program nesting and the stencil function contain no nested parallelism."
stencil2L :: (Elt e', Stencil sh e2 stencil2, Stencil sh e1 stencil1)
=> PreFun acc aenv (stencil1 ->
stencil2 -> e')
-> Boundary (EltRepr e1)
-> acc aenv (Array sh e1)
-> Boundary (EltRepr e2)
-> acc aenv (Array sh e2)
-> LiftedAcc acc aenv' (Array sh e')
stencil2L (cvtF2 -> (_, Just (AvoidedFun binds f)))
b1
(cvtA -> AvoidedAcc a1)
b2
(cvtA -> AvoidedAcc a2)
= AvoidedAcc
$^ bind binds
$ Stencil2 f b1 (sink binds a1) b2 (sink binds a2)
stencil2L _ _ _ _ _
= error $ "Disallowed nested parallelism: Stencil operations must reside at the top level of "
++ "parallel nesting and the supplied stencil function contain no nested parallelism."
scanl1Lift :: forall aenv e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (LiftedArray DIM1 e)
-> acc aenv (LiftedArray DIM1 e)
scanl1Lift f a
= inject
$ Alet a
$ construct (segments avar0)
$ sndA
$ unzip
$^ Scanl1 (weakenA1 $ segmented f)
$ let
flags :: forall aenv e. Elt e => acc (aenv, LiftedArray DIM1 e) (Vector Int)
flags = fromHOAS mkHeadFlags (segments avar0)
in fromHOAS S.zip flags (values avar0)
scanlLift :: forall aenv e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (LiftedArray DIM1 e)
-> acc aenv (LiftedArray DIM1 e)
scanlLift f z a
= scanl1Lift f
$^ Alet a
$^ Alet (segments avar0)
$^ Alet (values avar1)
$^ Alet (weakenA3 $ inject $ Unit z)
$ fromHOAS
(\seg vec z ->
let
seg' = S.map (S.ilift1 (+1)) seg
vec' = S.permute const
(S.fill (S.index1 $ S.size vec + S.size seg) (S.the z))
(\ix -> S.index1 $ S.unindex1 ix + inc S.! ix)
vec
flags = mkHeadFlags seg
inc = S.scanl1 (+) flags
in liftedArray seg' vec')
avar2
avar1
avar0
scanr1Lift :: forall aenv e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> acc aenv (LiftedArray DIM1 e)
-> acc aenv (LiftedArray DIM1 e)
scanr1Lift f a
= inject
$ Alet a
$ construct (segments avar0)
$ sndA
$ unzip
$^ Scanr1 (weakenA1 $ segmented f)
$ let
flags :: forall aenv e. Elt e => acc (aenv, LiftedArray DIM1 e) (Vector Int)
flags = fromHOAS mkTailFlags (segments avar0)
in fromHOAS S.zip flags (values avar0)
scanrLift :: forall aenv e. Elt e
=> PreFun acc aenv (e -> e -> e)
-> PreExp acc aenv e
-> acc aenv (LiftedArray DIM1 e)
-> acc aenv (LiftedArray DIM1 e)
scanrLift f z a
= scanr1Lift f
$^ Alet a
$^ Alet (segments avar0)
$^ Alet (values avar1)
$^ Alet (weakenA3 $ inject $ Unit z)
$ fromHOAS
(\seg vec z ->
let
seg' = S.map (S.ilift1 (+1)) seg
vec' = S.permute const
(S.fill (S.index1 $ S.size vec + S.size seg) (S.the z))
(\ix -> S.index1 $ S.unindex1 ix + inc S.! ix - 1)
vec
flags = mkHeadFlags seg
inc = S.scanl1 (+) flags
in liftedArray seg' vec')
avar2
avar1
avar0
extentVector :: forall sh aenv. Shape sh
=> PreExp acc aenv sh
-> acc aenv (Vector sh)
extentVector sh = inject
$ Alet (inject $ Unit sh)
$^ Reshape (IndexCons (Const ()) (ShapeSize (the avar0)))
$^ Generate (the avar0)
$ fun1 id
avoidF :: PreFun acc aenv f
-> AvoidFun acc () aenv' f
avoidF (avoidFun -> Avoided (env, f))
| ExtendContext ctx' env' <- liftExtend vectAcc Conservative env ctx size
= case rebuildToLift ctx' f of
Just f' -> Avoided (env', f')
_ -> trace "liftPreOpenAcc" "Function contains no nested parallelism, but depends on lifted variables"
$ Unavoided
avoidF _ = Unavoided
avoidE :: PreExp acc aenv f
-> AvoidExp acc () aenv' f
avoidE (avoidExp -> Avoided (env, e)) | ExtendContext ctx' env' <- liftExtend vectAcc Conservative env ctx size
, Just e' <- rebuildToLift ctx' e
= Avoided (env', e')
avoidE _ = Unavoided
-- |Performs the lifting transform on a given scalar expression.
--
-- Because lifting is performed in the presence of higher dimensional arrays, the output of the
-- transform has an extra element in the environment, the shape of the output array.
--
liftExp :: forall acc env env' aenv aenv' e. Kit acc
=> VectoriseAcc acc
-> Strength
-> Context env aenv env' aenv'
-> Size acc aenv'
-> PreOpenExp acc env aenv e
-> PreOpenAcc acc aenv' (Vector e)
liftExp vectAcc strength ctx size exp
= case exp of
Let bnd body -> letL bnd body
Var ix -> varL ctx ix id id
Const c -> replicateE size (Const c)
Tuple tup -> liftTuple vectAcc strength ctx size tup
Prj ix t -> Map (fun1 (Prj ix)) (cvtE t)
IndexNil -> replicateE size IndexNil
IndexAny -> replicateE size IndexAny
IndexCons sh sz -> ZipWith (fun2 IndexCons) (cvtE sh) (cvtE sz)
IndexHead sh -> Map (fun1 IndexHead) (cvtE sh)
IndexTail sh -> Map (fun1 IndexTail) (cvtE sh)
IndexSlice x ix sh -> ZipWith (fun2 (IndexSlice x)) (cvtE ix) (cvtE sh)
IndexFull x ix sl -> ZipWith (fun2 (IndexFull x)) (cvtE ix) (cvtE sl)
ToIndex sh ix -> ZipWith (fun2 ToIndex) (cvtE sh) (cvtE ix)
FromIndex sh ix -> ZipWith (fun2 FromIndex) (cvtE sh) (cvtE ix)
Cond p t e -> condL p t e
While p it i -> whileL p it i
PrimConst c -> replicateE size (PrimConst c)
PrimApp f x -> Map (fun1 (PrimApp f)) (cvtE x)
Index a sh -> indexL a sh
LinearIndex a i -> linearIndexL a i
Shape a -> shapeL a
ShapeSize sh -> Map (fun1 ShapeSize) (cvtE sh)
Intersect s t -> ZipWith (fun2 Intersect) (cvtE s) (cvtE t)
Union s t -> ZipWith (fun2 Union) (cvtE s) (cvtE t)
Foreign ff f e -> Map (fun1 (Foreign ff f)) (cvtE e)
where
avoidLifting :: Bool
avoidLifting | Conservative <- strength = True
| HoistOnly <- strength = True
| otherwise = False
lifted :: forall t. Arrays t => LiftedAcc acc aenv' t -> acc aenv' (Vector' t)
lifted (AvoidedAcc a) = replicateA a size
lifted (LiftedAcc l) = l
cvtE :: forall e. PreOpenExp acc env aenv e
-> acc aenv' (Vector e)
cvtE exp' = inject $ liftExp vectAcc strength ctx size exp'
cvtA :: forall sh' e'.
(Elt e', Shape sh')
=> acc aenv (Array sh' e')
-> LiftedAcc acc aenv' (Array sh' e')
cvtA a | EmbedContext ctx' wk <- embedContext ctx
= vectAcc strength ctx' size (weaken wk a)
cvtF1 :: PreOpenFun acc env aenv (a -> b)
-> PreOpenAfun acc aenv' (Vector a -> Vector b)
cvtF1 (Lam (Body f)) = Alam . Abody
$ inject
$ liftExp vectAcc strength (PushLExpC ctx) (ShapeSize (Shape avar0)) f
cvtF1 _ = $internalError "liftExp" "Inconsistent valuation"
replicateE :: forall e aenv. Elt e
=> Size acc aenv
-> PreExp acc aenv e
-> PreOpenAcc acc aenv (Vector e)
replicateE s c = Generate (index1 s) (Lam (Body $ weakenE1 c))
-- Lifted versions of operations
-- ==============================
varL :: forall env aenv env'' aenv''. Elt e
=> Context env aenv env'' aenv''
-> Idx env e
-> (forall e. Idx env'' e -> Idx env' e)
-> (forall a. Idx aenv'' a -> Idx aenv' a)
-> PreOpenAcc acc aenv' (Vector e)
varL (PushLExpC _) ZeroIdx _ cvtA = Avar (cvtA ZeroIdx)
--varL (PushExpC _) ZeroIdx cvtE _ = replicateE size (Var $ cvtE ZeroIdx)
varL (PushExpC d) (SuccIdx ix) cvtE cvtA = varL d ix (cvtE . SuccIdx) cvtA
varL (PushLExpC d) (SuccIdx ix) cvtE cvtA = varL d ix cvtE (cvtA . SuccIdx)
varL (PushAccC d) ix cvtE cvtA = varL d ix cvtE (cvtA . SuccIdx)
varL (PushLAccC d) ix cvtE cvtA = varL d ix cvtE (cvtA . SuccIdx)
varL _ _ _ _ = $internalError "liftExp" "Inconsistent valuation"
letL :: forall bnd_t. (Elt e, Elt bnd_t)
=> PreOpenExp acc env aenv bnd_t
-> PreOpenExp acc (env, bnd_t) aenv e
-> PreOpenAcc acc aenv' (Vector e)
letL bnd body = Alet bnd' (inject body')
where
bnd' = cvtE bnd
body' :: PreOpenAcc acc (aenv', Vector bnd_t) (Vector e)
body' = liftExp vectAcc strength (PushLExpC ctx) (weakenA1 size) body
condL :: Elt e
=> PreOpenExp acc env aenv Bool
-> PreOpenExp acc env aenv e
-> PreOpenExp acc env aenv e
-> PreOpenAcc acc aenv' (Vector e)
condL p t e = ZipWith (fun2 decide) (cvtE p) (inject $ ZipWith (fun2 tup) (cvtE t) (cvtE e))
where
decide p' ab = Cond p' (Prj (SuccTupIdx ZeroTupIdx) ab) (Prj ZeroTupIdx ab)
-- The lifted while is non-trivial. Here is an overview. We use '^' to denote lifting.
--
-- @
-- (while p it i)^
-- = fst $ awhile (\(_,flags) -> any flags)
-- (\(values, flags) ->
-- let
-- values' = zip (it^ values) flags
-- values'' = zipWith (\(v', f) v -> if f then v' else v) values' values
-- flags' = p^ values''
-- in (values'', flags')
-- )
-- (i^, replicate sh False)
-- @
--
whileL :: Elt e
=> PreOpenFun acc env aenv (e -> Bool)
-> PreOpenFun acc env aenv (e -> e)
-> PreOpenExp acc env aenv e
-> PreOpenAcc acc aenv' (Vector e)
whileL p it i = Aprj (SuccTupIdx ZeroTupIdx) (inject $ Awhile p' it' i')
where
p' :: PreOpenAfun acc aenv' ((Vector e, Vector Bool) -> Scalar Bool)
p' = Alam $ Abody $ let
flags = sndA avar0
any f = inject $ Fold or (Const False) f
or = fun2 (PrimApp PrimLOr S.$$ tup)
in any flags
it' :: PreOpenAfun acc aenv' ((Vector e, Vector Bool) -> (Vector e, Vector Bool))
it' = Alam $ Abody $ let
values = fstA avar0
flags = sndA avar0
values' = inject $ ZipWith (fun2 tup)
(inject $ weakenA1 (cvtF1 it) `subApply` values)
flags
values'' = inject $ ZipWith (Lam $ Lam $ Body $ Cond (sndE $ var1)
(fstE $ var1)
var0)
values'
values
flags' = inject $ (weakenA2) (cvtF1 p) `subApply` avar0
in inject $ Alet values'' (atup avar0 flags')
i' :: acc aenv' (Vector e, Vector Bool)
i' = cvtE i `atup` inject (replicateE size (Const True))
indexL :: forall sh'. (Elt e, Shape sh')
=> acc aenv (Array sh' e)
-> PreOpenExp acc env aenv sh'
-> PreOpenAcc acc aenv' (Vector e)
indexL (cvtA -> a) (cvtE -> ix)
| avoidLifting
, AvoidedAcc a' <- a
= Backpermute (index1 size) (fun1 (Index ix)) a'
| otherwise
= extract
$ liftedIndexC (lifted a) ix
linearIndexL :: forall sh'. (Elt e, Shape sh')
=> acc aenv (Array sh' e)
-> PreOpenExp acc env aenv Int
-> PreOpenAcc acc aenv' (Vector e)
linearIndexL (cvtA -> a) (cvtE -> ix)
| avoidLifting
, AvoidedAcc a' <- a
= Alet a'
$^ Generate (index1 $ weakenA1 size)
(Lam $ Body $ LinearIndex avar0 $ Index (weakenA1 ix) $ var0)
| otherwise
= extract $
fromHOAS liftedLinearIndex (lifted a) ix
shapeL :: forall e'. (Shape e, Elt e')
=> acc aenv (Array e e')
-> PreOpenAcc acc aenv' (Vector e)
shapeL (cvtA -> a)
| avoidLifting
, AvoidedAcc a' <- a
= Alet a'
$^ replicateE (weakenA1 size) (Shape avar0)
| otherwise
= extract
$ segments `fromHOAS` (lifted a)
type family VectorsOfTupleRepr t
type instance VectorsOfTupleRepr () = ()
type instance VectorsOfTupleRepr (t,e) = (VectorsOfTupleRepr t, Vector e)
type family ExpandEnv env env'
type instance ExpandEnv env () = env
type instance ExpandEnv env (env', t) = ExpandEnv (env, t) env'
type TupleEnv aenv t = ExpandEnv aenv (VectorsOfTupleRepr (TupleRepr t))
-- |Perform the lifting transform over a scalar tuple. We lift it as so:
--
-- @
-- (a1, a2,..., aN)^ =
-- let a1' = a1^
-- a2' = a2^
-- ...
-- aN' = aN^
-- in generate (\ix -> (a1' ! ix, a2' ! ix,..., aN' ! ix))
-- @
--
-- RCE: Ideally we would like to do this by lifting the tuple into a tuple of arrays.
-- Unfortunately this can't be done because the type system us unable to recognise that the
-- lifted tuple is an instance of IsTuple.
liftTuple :: forall acc env aenv env' aenv' e.
(Elt e, Kit acc, IsTuple e)
=> VectoriseAcc acc
-> Strength
-> Context env aenv env' aenv'
-> Size acc aenv'
-> Tuple (PreOpenExp acc env aenv) (TupleRepr e)
-> PreOpenAcc acc aenv' (Vector e)
liftTuple vectAcc strength ctx size t = cvtT t (liftExp vectAcc strength ctx size) gen size
where
cvtT :: forall t aenv'.
Tuple (PreOpenExp acc env aenv) t
-> (forall e. PreOpenExp acc env aenv e -> PreOpenAcc acc aenv' (Vector e))
-> (Size acc (ExpandEnv aenv' (VectorsOfTupleRepr t)) -> PreOpenAcc acc (ExpandEnv aenv' (VectorsOfTupleRepr t)) (Vector e))
-> Size acc aenv'
-> PreOpenAcc acc aenv' (Vector e)
cvtT NilTup _ arr size = arr size
cvtT(SnocTup t' e) lift arr size = Alet (inject $ lift e) (inject $ cvtT t' lift' arr (weakenA1 size))
where
lift' :: forall e e'. PreOpenExp acc env aenv e -> PreOpenAcc acc (aenv', Vector e') (Vector e)
lift' = weakenA1 . lift
gen :: Size acc (TupleEnv aenv' e) -> PreOpenAcc acc (TupleEnv aenv' e) (Vector e)
gen size = Generate (index1 size) (Lam (Body (Tuple t')))
where
t' :: Tuple (PreOpenExp acc ((),DIM1) (TupleEnv aenv' e)) (TupleRepr e)
t' = weakenTup (ixt (undefined :: aenv') t) (mkTup t)
where
mkTup :: forall e c. Tuple c e
-> Tuple (PreOpenExp acc ((),DIM1) (VectorsOfTupleRepr e)) e
mkTup NilTup = NilTup
mkTup (SnocTup t'' _) = SnocTup (weakenTup SuccIdx (mkTup t'')) e'
where
e' :: forall s e'. e ~ (s,e') => PreOpenExp acc ((),DIM1) (VectorsOfTupleRepr e) e'
e' = Index avar0 var0
weakenTup :: forall env aenv aenv' e. aenv :> aenv'
-> Tuple (PreOpenExp acc env aenv) e
-> Tuple (PreOpenExp acc env aenv') e
weakenTup _ NilTup = NilTup
weakenTup v (SnocTup t e) = SnocTup (weakenTup v t) (weaken v e)
tix :: forall t c env e. Tuple c t -> Idx env e -> Idx (ExpandEnv env (VectorsOfTupleRepr t)) e
tix NilTup ix = ix
tix (SnocTup t (_:: c t')) ix = tix t ix'
where
ix' :: Idx (env, Vector t') e
ix' = SuccIdx ix
ixt :: forall t c env e.
env {- dummy -}
-> Tuple c t
-> Idx (VectorsOfTupleRepr t) e
-> Idx (ExpandEnv env (VectorsOfTupleRepr t)) e
ixt _ (SnocTup NilTup _) ZeroIdx = ZeroIdx
ixt _ (SnocTup t _) ZeroIdx = tix t (ZeroIdx :: Idx (env, e) e)
ixt _ (SnocTup t _) (SuccIdx ix) = ixt env' t ix
where
env' :: forall s e'. t ~ (s,e') => (env, Vector e')
env' = undefined -- dummy argumen
ixt _ _ _ = error "liftTuple: Inconsistent valuation"
data Avoid f acc env aenv e where
Avoided :: (Extend acc aenv aenv', f acc env aenv' e) -> Avoid f acc env aenv e
Unavoided :: Avoid f acc env aenv e
instance Kit acc => Show (Avoid PreOpenFun acc env aenv e) where
show (Avoided (_,e)) = "lets ...\n" ++ show e
show Unavoided = "Unavoided"
type AvoidExp = Avoid PreOpenExp
type AvoidFun = Avoid PreOpenFun
-- |Avoid vectorisation in the cases where it's not necessary, or impossible.
--
avoidExp :: forall acc aenv env e. Kit acc
=> PreOpenExp acc env aenv e
-> AvoidExp acc env aenv e
avoidExp = cvtE
where
cvtE :: forall e env aenv. PreOpenExp acc env aenv e -> AvoidExp acc env aenv e
cvtE exp =
case exp of
Let a b -> letA a b
Var ix -> simple $ Var ix
Const c -> simple $ Const c
Tuple tup -> cvtT tup
Prj tup e -> Prj tup `cvtE1` e
IndexNil -> simple IndexNil
IndexCons sh sz -> cvtE2 IndexCons sh sz
IndexHead sh -> IndexHead `cvtE1` sh
IndexTail sh -> IndexTail `cvtE1` sh
IndexAny -> simple IndexAny
IndexSlice x ix sh -> cvtE2 (IndexSlice x) ix sh
IndexFull x ix sl -> cvtE2 (IndexFull x) ix sl
ToIndex sh ix -> cvtE2 ToIndex sh ix
FromIndex sh ix -> cvtE2 FromIndex sh ix
Cond p t e -> cvtE3 Cond p t e
While p f x -> whileA p f x
PrimConst c -> simple $ PrimConst c
PrimApp f x -> PrimApp f `cvtE1` x
Index a sh -> cvtA1E1 Index a sh
LinearIndex a i -> cvtA1E1 LinearIndex a i
Shape a -> Shape `cvtA1` a
ShapeSize sh -> ShapeSize `cvtE1` sh
Intersect s t -> cvtE2 Intersect s t
Union s t -> cvtE2 Union s t
Foreign ff f e -> Foreign ff f `cvtE1` e
unavoided :: forall env aenv e f. String -> Avoid f acc env aenv e
unavoided op = trace "avoidExp" ("Unable to avoid expression: " ++ op) $ Unavoided
letA :: forall bnd_t e env aenv. (Elt e, Elt bnd_t)
=> PreOpenExp acc env aenv bnd_t
-> PreOpenExp acc (env, bnd_t) aenv e
-> AvoidExp acc env aenv e
letA bnd body | Avoided (env , bnd' ) <- cvtE bnd
, Avoided (env', body') <- cvtE (sink env body)
= Avoided (append env env', Let (sink env' bnd') body')
| otherwise
= unavoided "let"
whileA :: forall e env aenv. Elt e
=> PreOpenFun acc env aenv (e -> Bool)
-> PreOpenFun acc env aenv (e -> e)
-> PreOpenExp acc env aenv e
-> AvoidExp acc env aenv e
whileA (Lam (Body p)) (Lam (Body it)) i
| Avoided (env0, p') <- cvtE p
, Avoided (env1, it') <- cvtE (sink env0 it)
, Avoided (env2, i') <- cvtE (sink env1 $ sink env0 i)
= let
p'' = (sink env2 . sink env1) p'
it'' = sink env2 it'
in Avoided (env0 `append` env1 `append` env2, While (Lam $ Body p'') (Lam $ Body it'') i')
whileA _ _ _ = unavoided "while"
simple :: forall e env aenv.
PreOpenExp acc env aenv e
-> AvoidExp acc env aenv e
simple e = Avoided (BaseEnv, e)
cvtE1 :: forall e a env aenv. (forall env aenv. PreOpenExp acc env aenv a -> PreOpenExp acc env aenv e)
-> PreOpenExp acc env aenv a
-> AvoidExp acc env aenv e
cvtE1 f (cvtE -> Avoided (env, a)) = Avoided (env, f a)
cvtE1 _ e = unavoided (showPreExpOp e)
cvtE2 :: forall e a b env aenv.
(forall env aenv. PreOpenExp acc env aenv a -> PreOpenExp acc env aenv b -> PreOpenExp acc env aenv e)
-> PreOpenExp acc env aenv a
-> PreOpenExp acc env aenv b
-> AvoidExp acc env aenv e
cvtE2 f (cvtE -> Avoided (env, a)) (cvtE . sink env -> Avoided (env', b))
= Avoided (env `append` env', f (sink env' a) b)
cvtE2 f _ _
= unavoided (showPreExpOp (f undefined undefined))
cvtE3 :: forall e a b c env aenv.
(forall env aenv. PreOpenExp acc env aenv a -> PreOpenExp acc env aenv b -> PreOpenExp acc env aenv c -> PreOpenExp acc env aenv e)
-> PreOpenExp acc env aenv a
-> PreOpenExp acc env aenv b
-> PreOpenExp acc env aenv c
-> AvoidExp acc env aenv e
cvtE3 f (cvtE -> Avoided (env, a))
(cvtE . sink env -> Avoided (env', b))
(cvtE . sink env' . sink env -> Avoided (env'', c))
= Avoided (env `append` env' `append` env'', f (sink env'' $ sink env' a) (sink env'' b) c)
cvtE3 f _ _ _ = unavoided (showPreExpOp (f undefined undefined undefined))
cvtT :: forall e env aenv. (IsTuple e, Elt e)
=> Tuple (PreOpenExp acc env aenv) (TupleRepr e)
-> AvoidExp acc env aenv e
cvtT t | Avoided (env, RebuildTup t) <- cvtT' t = Avoided (env, Tuple t)
where
cvtT' :: forall e.
Tuple (PreOpenExp acc env aenv) e
-> Avoid RebuildTup acc env aenv e
cvtT' NilTup = Avoided (BaseEnv, (RebuildTup NilTup))
cvtT' (SnocTup t e) | Avoided (env, RebuildTup t') <- cvtT' t
, Avoided (env', e') <- cvtE . sink env $ e
= Avoided (env `append` env', RebuildTup (SnocTup (unRTup $ sink env' $ RebuildTup t') e'))
cvtT' _ = unavoided "tuple"
cvtT _ = unavoided "tuple"
cvtA1 :: forall a e env aenv. Arrays a
=> (forall env aenv. acc aenv a -> PreOpenExp acc env aenv e)
-> acc aenv a
-> AvoidExp acc env aenv e
cvtA1 f a = Avoided (BaseEnv `PushEnv` a, f avar0)
cvtA1E1 :: forall a b e env aenv. Arrays a
=> (forall env aenv. acc aenv a -> PreOpenExp acc env aenv b -> PreOpenExp acc env aenv e)
-> acc aenv a
-> PreOpenExp acc env aenv b
-> AvoidExp acc env aenv e
cvtA1E1 f a (cvtE -> Avoided (env, b))
= Avoided (env `PushEnv` sink env a, f avar0 (weakenA1 b))
cvtA1E1 f _ _
= unavoided (showPreExpOp (f undefined undefined))
avoidFun :: Kit acc
=> PreOpenFun acc env aenv f
-> AvoidFun acc env aenv f
avoidFun (Lam f) | Avoided (env, f') <- avoidFun f
= Avoided (env, Lam f')
avoidFun (Body f) | Avoided (env, f') <- avoidExp f
= Avoided (env, Body f')
avoidFun _ = Unavoided
data ExtendContext acc aenv0' aenv1 where
ExtendContext :: Context () aenv1 () aenv1'
-> Extend acc aenv0' aenv1'
-> ExtendContext acc aenv0' aenv1
liftExtend :: forall acc aenv0 aenv0' aenv1. Kit acc
=> VectoriseAcc acc
-> Strength
-> Extend acc aenv0 aenv1
-> Context () aenv0 () aenv0'
-> Size acc aenv0'
-> ExtendContext acc aenv0' aenv1
liftExtend _ _ BaseEnv ctx _
= ExtendContext ctx BaseEnv
liftExtend k strength (PushEnv env a) ctx size
| ExtendContext ctx' env' <- liftExtend k strength env ctx size
= case k strength ctx' (sink env' size) a of
AvoidedAcc a' -> ExtendContext (PushAccC ctx') (PushEnv env' a')
LiftedAcc a' | IsC <- isArraysFlat (undefined :: aenv1 ~ (aenv1', a) => a)
-> ExtendContext (PushLAccC ctx') (PushEnv env' a')
data EmbedContext aenv aenv' = forall aenv''. EmbedContext (Context () aenv'' () aenv') (aenv :> aenv'')
embedContext :: Context env aenv env' aenv'
-> EmbedContext aenv aenv'
embedContext EmptyC = EmbedContext EmptyC id
embedContext (PushExpC d) = embedContext d
embedContext (PushLExpC d) | EmbedContext d wk <- embedContext d
= EmbedContext (PushAccC d) (SuccIdx . wk)
embedContext (PushAccC d) | EmbedContext d wk <- embedContext d
= EmbedContext (PushAccC d) (newTop wk)
embedContext (PushLAccC d) | EmbedContext d wk <- embedContext d
= EmbedContext (PushLAccC d) (newTop wk)
-- Vector' operations.
-- ------------------
values :: forall sh e. (Shape sh, Elt e) => S.Acc (LiftedArray sh e) -> S.Acc (Vector e)
values a = S.Acc $ S.Aprj ZeroTupIdx a
segments :: forall sh e. (Shape sh, Elt e) => S.Acc (LiftedArray sh e) -> S.Acc (Segments sh)
segments a = S.Acc $ S.Aprj (SuccTupIdx ZeroTupIdx) a
liftedArray :: (Shape sh, Elt e) => S.Acc (Segments sh) -> S.Acc (Vector e) -> S.Acc (LiftedArray sh e)
liftedArray segs vals = S.Acc $ S.Atuple $ SnocAtup (SnocAtup NilAtup segs) vals
asAtuple :: forall a. (Arrays a, IsAtuple a) => S.Acc a -> Atuple S.Acc (TupleRepr a)
asAtuple a = tOA (prod (Proxy :: Proxy Arrays) (undefined :: a)) id
where
tOA :: forall t. ProdR Arrays t -> (forall e. TupleIdx t e -> TupleIdx (TupleRepr a) e) -> Atuple S.Acc t
tOA ProdRunit _ = NilAtup
tOA (ProdRsnoc t) ixt = SnocAtup (tOA t (ixt . SuccTupIdx)) (S.Acc $ S.Aprj (ixt ZeroTupIdx) a)
replicate :: forall a. Arrays a => S.Exp Int -> S.Acc a -> S.Acc (Vector' a)
replicate size a = case flavour (undefined :: a) of
ArraysFunit -> S.Acc $ S.Atuple $ SnocAtup NilAtup $ S.unit size
ArraysFarray ->
let values = S.flatten $ S.replicate (S.lift (Z:.All:.size)) (S.flatten a)
segs = S.fill (S.index1 $ S.lift size) (S.shape a)
in liftedArray segs values
ArraysFtuple | IsC <- isArraysFlat (undefined :: a)
-> S.Acc $ S.Atuple $ replicateT (asAtuple a)
where
replicateT :: Atuple S.Acc t -> Atuple S.Acc (LiftedTupleRepr t)
replicateT NilAtup = NilAtup
replicateT (SnocAtup t (a' :: S.Acc a')) | IsC <- isArraysFlat (undefined :: a')
= SnocAtup (replicateT t) (replicate size a')
-- A segmented replicate.
replicateSeg :: (Elt e, Shape sh) => S.Acc (Segments sh) -> S.Acc (Vector e) -> S.Acc (Vector e)
replicateSeg segs vals
= let
(offs, length) = offsets segs
vShape = S.index1 $ S.the length
negs = S.fill (S.index1 $ S.the length + 1) (-1 :: S.Exp Int)
flags = S.permute max negs (\ix -> S.index1 (offs S.! ix)) (S.enumFromN (S.shape segs) 0)
flags' = S.scanl1 (\a b -> b S./=* -1 S.? (b, a)) flags
vals' = S.backpermute vShape ((S.!) (S.map S.index1 flags')) vals
in vals'
replicateVectors :: (Shape sh, Elt e) => S.Acc (Segments sh) -> S.Acc (LiftedArray DIM1 e) -> S.Acc (Vector e)
replicateVectors segs vecs = S.map (values vecs S.!!) $ S.zipWith (+) offs' ixs
where
segs' = replicateSeg segs (segments vecs)
ixs = enumSegLinear segs'
offs = replicateSeg segs (fst $ offsets segs)
offs' = replicateSeg segs' offs
enumSegLinear :: forall sh. Shape sh => S.Acc (Segments sh) -> S.Acc (Vector Int)
enumSegLinear segs = enum_s
where
sizes = S.map S.shapeSize segs
ones = S.fill (S.shape segs) (1 :: S.Exp Int)
ones_s = replicateSeg segs ones
enum_s = S.afst $ S.scanl'Seg (+) 0 ones_s sizes
enumSeg :: forall sh. Shape sh => S.Acc (Segments sh) -> S.Acc (Vector sh)
enumSeg segs = S.zipWith S.fromIndex shapes (enumSegLinear segs)
where
shapes = replicateSeg segs segs
-- |Convert segment shapes into segment offsets along with total length
--
offsets :: Shape sh => S.Acc (Segments sh) -> (S.Acc (Segments Int), S.Acc (Scalar Int))
offsets segs = S.scanl' (+) 0 $ S.map (S.shapeSize) segs
makeNonEmpty :: forall sh. Shape sh => S.Acc (Segments sh) -> S.Acc (Segments sh)
makeNonEmpty = S.map nonEmpty
where
nonEmpty = S.union (S.constant $ listToShape $ P.replicate (dim (ignore::sh)) 1)
-- RCE: I have a strong feeling this can be done better.
--
liftedCond :: forall a. Arrays a
=> S.Acc (Vector Bool) -- condition
-> S.Acc (Vector' a) -- then
-> S.Acc (Vector' a) -- else
-> S.Acc (Vector' a)
liftedCond pred th el
| IsC <- isArraysFlat (undefined :: a)
= case (flavour (undefined :: a)) of
ArraysFunit -> th
ArraysFarray -> liftedCond1 th el
ArraysFtuple -> S.Acc $ S.Atuple $ cvtT (prod (Proxy :: Proxy Arrays) (undefined :: a)) (asAtuple th) (asAtuple el)
where
cvtT :: ProdR Arrays t -> Atuple S.Acc (LiftedTupleRepr t) -> Atuple S.Acc (LiftedTupleRepr t) -> Atuple S.Acc (LiftedTupleRepr t)
cvtT ProdRunit NilAtup NilAtup = NilAtup
cvtT (ProdRsnoc t) (SnocAtup t1 a1) (SnocAtup t2 a2) = SnocAtup (cvtT t t1 t2) (liftedCond pred a1 a2)
cvtT _ _ _ = error "Unreachable code"
liftedCond1 :: (Elt e, Shape sh) => S.Acc (LiftedArray sh e) -> S.Acc (LiftedArray sh e) -> S.Acc (LiftedArray sh e)
liftedCond1 t e = liftedArray segs vals
where
segs_t = segments t
segs_e = segments e
segs = S.zipWith (\f p -> let (t,e) = S.unlift p in f S.? (t, e))
pred
(S.zip segs_t segs_e)
(offs_t, _) = offsets segs_t
(offs_e, _) = offsets segs_e
sz_v = S.fold (+) 0 $ S.map S.shapeSize segs
offs = S.zipWith (\f p -> let (t,e) = S.unlift p in f S.? (t, e))
pred
(S.zip offs_t offs_e)
flag_offs = replicateSeg segs $ S.zip pred offs
vals_t = values t
vals_e = values e
ones = S.fill (S.index1 $ S.the sz_v) (1 :: S.Exp Int)
enums = S.scanl1Seg (+) ones $ S.map S.shapeSize segs
vals = S.zipWith (\t ind -> let (f,s) = S.unlift t in f S.? (vals_t S.!! (s + ind), vals_e S.!! (s + ind)))
flag_offs
enums
--liftedAwhile :: forall t.
-- (Arrays t, Arrays (Vector' t))
-- => (S.Acc (Vector' t) -> S.Acc (Vector Bool))
-- -> (S.Acc (Vector' t) -> S.Acc (Vector' t))
-- -> S.Acc (Vector' t)
-- -> S.Acc (Vector' t)
--liftedAwhile pred iter init
-- = let
-- (a, _ :: S.Acc (Vector Bool), _ :: S.Acc (Scalar Bool)) = S.unlift $ S.awhile pred' iter' init'
-- in a
-- where
-- init' = let f = pred init
-- in S.lift (init, f, S.or f)
-- pred' :: S.Acc (Vector' t, Vector Bool, Scalar Bool) -> S.Acc (Scalar Bool)
-- pred' f = let (_ :: S.Acc (Vector' t), _ :: S.Acc (Vector Bool), c) = S.unlift f in c
-- iter' :: S.Acc (Vector' t, Vector Bool, Scalar Bool) -> S.Acc (Vector' t, Vector Bool, Scalar Bool)
-- iter' (S.unlift -> (a, f, _ :: S.Acc (Scalar Bool)))
-- = let a' = liftedCond f (iter a) a
-- f' = S.zipWith (S.&&*) f (pred a')
-- c' = S.or f'
-- in S.lift (a', f', c')
liftedReshape :: (Elt e, Shape sh, Shape sh') => S.Acc (Vector sh) -> S.Acc (LiftedArray sh' e) -> S.Acc (LiftedArray sh e)
liftedReshape extents a = liftedArray extents (values a)
--liftedGenerate :: (Elt e, Shape sh)
-- => S.Acc (Vector sh)
-- -> (S.Acc (Vector sh) -> S.Acc (Vector e))
-- -> S.Acc (LiftedArray sh e)
--liftedGenerate extents fun
-- = liftedArray extents (fun (enumSeg extents))
liftedZip :: (Elt a, Elt b, Shape sh)
=> S.Acc (LiftedArray sh a)
-> S.Acc (LiftedArray sh b)
-> S.Acc (Segments sh, Vector a, Vector b)
liftedZip as bs = S.lift (segs, valsA, valsB)
where
segsA = segments as
segsB = segments bs
segs = S.zipWith S.intersect segsA segsB
enums = enumSeg segs
enumsA = S.zipWith S.toIndex (replicateSeg segs segsA) enums
enumsB = S.zipWith S.toIndex (replicateSeg segs segsB) enums
(offsA, _) = offsets segsA
(offsB, _) = offsets segsB
valsA = S.map (values as S.!!) (S.zipWith (+) enumsA (replicateSeg segs offsA))
valsB = S.map (values bs S.!!) (S.zipWith (+) enumsB (replicateSeg segs offsB))
--liftedFold :: (Elt e, Shape sh, Slice sh)
-- => (S.Exp e -> S.Exp e -> S.Exp e)
-- -> S.Exp e
-- -> S.Acc (LiftedArray (sh:.Int) e)
-- -> S.Acc (LiftedArray sh e)
--liftedFold f z a = liftedArray segs' vals
-- where
-- vals = S.foldSeg f z (values a) (replicateSeg segs' heads')
-- (segs, heads) = S.unzip $ S.map (\sh -> S.lift (S.indexTail sh, S.indexHead sh)) (segments a)
-- segs' = makeNonEmpty segs
-- heads' = S.zipWith (\sh h -> S.shapeSize sh S.==* 0 S.? (0,h)) segs heads
--liftedFoldSeg :: (Elt e, Shape sh, Slice sh)
-- => (S.Exp e -> S.Exp e -> S.Exp e)
-- -> S.Exp e
-- -> S.Acc (LiftedArray (sh:.Int) e)
-- -> S.Acc (LiftedArray DIM1 Int)
-- -> S.Acc (LiftedArray (sh:.Int) e)
--liftedFoldSeg f z a is = liftedArray segs vals
-- where
-- tails = S.map S.indexTail (segments a)
-- vals = S.foldSeg f z (values a) isegs
-- segs = S.zipWith (\x y -> S.lift (x:.y)) tails
-- (S.map S.unindex1 (segments is))
-- isegs = replicateVectors tails is
--liftedBackpermute :: (Elt e, Shape sh, Shape sh')
-- => S.Acc (Vector sh')
-- -> (S.Acc (Vector sh') -> S.Acc (Vector sh))
-- -> S.Acc (LiftedArray sh e)
-- -> S.Acc (LiftedArray sh' e)
--liftedBackpermute shapes f a = liftedArray shapes vals'
-- where
-- segs = segments a
-- vals = values a
-- enums = enumSeg shapes
-- ixs = f enums
-- starts = replicateSeg shapes (fst $ offsets segs)
-- ixs' = S.map S.index1 $ S.zipWith (+) starts (S.map S.shapeSize ixs)
-- vals' = S.backpermute (S.shape ixs') (ixs' S.!) vals
liftedBackpermute :: (Elt e, Shape sh, Shape sh')
=> S.Acc (Vector sh')
-> S.Acc (Vector sh)
-> S.Acc (LiftedArray sh e)
-> S.Acc (LiftedArray sh' e)
liftedBackpermute shapes ixs a = liftedArray shapes vals'
where
segs = segments a
vals = values a
starts = replicateSeg shapes (fst $ offsets segs)
ixs' = S.map S.index1 $ S.zipWith (+) starts (S.map S.shapeSize ixs)
vals' = S.backpermute (S.shape ixs') (ixs' S.!) vals
--liftedPermute :: (Elt e, Shape sh, Shape sh')
-- => (S.Exp e -> S.Exp e -> S.Exp e)
-- -> S.Acc (LiftedArray sh' e)
-- -> (S.Acc (Vector sh) -> S.Acc (Vector sh'))
-- -> S.Acc (LiftedArray sh e)
-- -> S.Acc (LiftedArray sh' e)
--liftedPermute combine defaults perm init = liftedArray shapes' vals
-- where
-- shapes = segments ini
-- shapes' = segments defaults
-- enums = enumSeg shapes
-- ixs = perm enums
-- ixs' = asOffsetsOf (liftedArray shapes ixs) shapes'
-- vals = S.permute combine (values defaults) (ixs' S.!) (values init)
asOffsetsOf :: (Shape sh, Shape sh')
=> S.Acc (LiftedArray sh sh')
-> S.Acc (Segments sh')
-> S.Acc (Vector DIM1)
asOffsetsOf ixs shapes' = S.map S.index1 $ S.zipWith (+) starts (S.map S.shapeSize (values ixs))
where
shapes = segments ixs
starts = replicateSeg shapes (fst $ offsets shapes')
liftedIndex :: (Shape sh, Elt e)
=> S.Acc (LiftedArray sh e)
-> S.Acc (Vector sh)
-> S.Acc (Vector e)
liftedIndex vals ixs = S.backpermute (S.shape ixs) ixt (values vals)
where
segs = segments vals
starts = fst $ offsets segs
ixt ix = let
start = starts S.! ix
off = S.toIndex (segs S.! ix) (ixs S.! ix)
in S.index1 (start + off)
-- RCE: Using a generate here, as opposed to the backpermute used above, so that the linear indexing
-- is preserved. In reality, it may be better to do this as a backpermute or, equally as likely, i
-- makes no difference whatsoever.
liftedLinearIndex :: (Shape sh, Elt e)
=> S.Acc (LiftedArray sh e)
-> S.Acc (Vector Int)
-> S.Acc (Vector e)
liftedLinearIndex vals ixs = S.generate (S.shape ixs) (\ix -> values vals S.!! ((starts S.! ix) + ixs S.!ix))
where
starts = fst $ offsets (segments vals)
-- |Compute head flags vector from segment vector for left-scans.
--
-- The vector will be full of zeros in the body of a segment, and non-zero
-- otherwise. The "flag" value, if greater than one, indicates that several
-- empty segments are represented by this single flag entry. This is additional
-- data is used by exclusive segmented scan.
--
mkHeadFlags :: S.Acc (Segments DIM1) -> S.Acc (Vector Int)
mkHeadFlags seg
= S.init
$ S.permute (+) zeros (\ix -> S.index1 (offset S.! ix)) ones
where
(offset, len) = S.scanl' (+) 0 $ S.map S.unindex1 seg
zeros = S.fill (S.index1 $ S.the len + 1) 0
ones = S.fill (S.index1 $ S.size offset) 1
-- |Compute tail flags vector from segment vector for right-scans. That is, the
-- flag is placed at the last place in each segment.
--
mkTailFlags :: S.Acc (Segments DIM1) -> S.Acc (Segments Int)
mkTailFlags seg
= S.init
$ S.permute (+) zeros (\ix -> S.index1 (S.the len - 1 - offset S.! ix)) ones
where
(offset, len) = S.scanr' (+) 0 $ S.map S.unindex1 seg
zeros = S.fill (S.index1 $ S.the len + 1) 0
ones = S.fill (S.index1 $ S.size offset) 1
replicateC :: (Arrays a, Arrays (Vector' a), Kit acc)
=> acc aenv (Scalar Int) -> acc aenv a -> acc aenv (Vector' a)
replicateC = fromHOAS (replicate . S.the)
replicateSegC :: (Kit acc, Shape sh, Elt e)
=> acc aenv (Segments sh)
-> acc aenv (Vector e)
-> acc aenv (Vector e)
replicateSegC = fromHOAS replicateSeg
enumSegC :: (Shape sh, Kit acc) => acc aenv (Vector sh) -> acc aenv (Vector sh)
enumSegC = fromHOAS enumSeg
liftedCondC :: (Arrays a, Arrays (Vector' a), Kit acc)
=> acc aenv (Vector Bool)
-> acc aenv (Vector' a)
-> acc aenv (Vector' a)
-> acc aenv (Vector' a)
liftedCondC = fromHOAS liftedCond
liftedReshapeC :: (Elt e, Shape sh, Shape sh', Kit acc)
=> acc aenv (Vector sh)
-> acc aenv (LiftedArray sh' e)
-> acc aenv (LiftedArray sh e)
liftedReshapeC = fromHOAS liftedReshape
liftedBackpermuteC :: (Elt e, Shape sh, Shape sh', Kit acc)
=> acc aenv (Vector sh')
-> acc aenv (Vector sh)
-> acc aenv (LiftedArray sh e)
-> acc aenv (LiftedArray sh' e)
liftedBackpermuteC = fromHOAS liftedBackpermute
asOffsetsOfC :: (Shape sh, Shape sh', Kit acc)
=> acc aenv (LiftedArray sh sh')
-> acc aenv (Segments sh')
-> acc aenv (Vector DIM1)
asOffsetsOfC = fromHOAS asOffsetsOf
liftedIndexC :: (Kit acc, Shape sh, Elt e)
=> acc aenv (LiftedArray sh e)
-> acc aenv (Vector sh)
-> acc aenv (Vector e)
liftedIndexC = fromHOAS liftedIndex
-- Duplicating and sinking simple scalar expressions down the AST so as to avoid unecessary
-- vectorisation.
-- ------------------------------------------------------------------------
--type SinkExps acc = forall env env' aenv t. Supplement acc env env' aenv -> acc aenv t -> acc aenv
--data WeakenedSupplement acc env0 env1 env0' env1' aenv where
-- WeakenedSupplement :: Supplement acc env1 env2 aenv
-- -> (env1' :> env2)
-- -> (env2 :> env1')
-- -> WeakenedSupplement acc env0 env1 env0' env1' aenv
--sinkExpsIntoOpenAcc :: SinkExps OpenAcc
--sinkExpsIntoOpenAcc supp (OpenAcc a) = OpenAcc $ sinkExpsIntoAcc sinkExpsIntoOpenAcc supp a
--sinkExpsIntoAfun :: forall acc env env' aenv t. (Kit acc)
-- => SinkExps acc
-- -> Supplement acc env env' aenv
-- -> PreOpenAfun acc aenv
-- -> PreOpenAfun acc aenv
--sinkExpsIntoAfun sinker supp (Abody b) = Abody $ sinker supp b
--sinkExpsIntoAfun sinker supp (Alam f) = Alam $ sinkExpsIntoAfun sinker (weakenSupp1 supp) f
--sinkExpsIntoAcc :: forall acc env env' aenv t. (Kit acc)
-- => SinkExps acc
-- -> Supplement acc env env' aenv
-- -> PreOpenAcc acc aenv
-- -> PreOpenAcc acc aenv
--sinkExpsIntoAcc sinker supp pacc
-- = case pacc of
-- Alet bnd body -> Alet (cvtA bnd) (sinker (weakenSupp1 supp) body)
-- Avar ix -> Avar ix
-- Atuple tup -> Atuple (cvtT tup)
-- Aprj tup a -> Aprj tup (cvtA a)
-- Apply f a -> Apply (cvtAfun f) (cvtA a)
-- Aforeign ff afun acc -> Aforeign ff (sinkExpsIntoAfun sinker BaseSup afun) (cvtA acc)
-- Acond p t e -> Acond (cvtE p) (cvtA t) (cvtA e)
-- Awhile p f a -> Awhile (cvtAfun p) (cvtAfun f) (cvtA a)
-- Use a -> Use a
-- Unit e -> Unit (cvtE e)
-- Reshape e a -> Reshape (cvtE e) (cvtA a)
-- Generate e f -> Generate (cvtE e) (cvtF f)
-- Transform sh ix f a -> Transform (cvtE sh) (cvtF ix) (cvtF f) (cvtA a)
-- Replicate sl slix a -> Replicate sl (cvtE slix) (cvtA a)
-- Slice sl a slix -> Slice sl (cvtA a) (cvtE slix)
-- Map f a -> Map (cvtF f) (cvtA a)
-- ZipWith f a1 a2 -> ZipWith (cvtF f) (cvtA a1) (cvtA a2)
-- Fold f z a -> Fold (cvtF f) (cvtE z) (cvtA a)
-- Fold1 f a -> Fold1 (cvtF f) (cvtA a)
-- FoldSeg f z a s -> FoldSeg (cvtF f) (cvtE z) (cvtA a) (cvtA s)
-- Fold1Seg f a s -> Fold1Seg (cvtF f) (cvtA a) (cvtA s)
-- Scanl f z a -> Scanl (cvtF f) (cvtE z) (cvtA a)
-- Scanl' f z a -> Scanl' (cvtF f) (cvtE z) (cvtA a)
-- Scanl1 f a -> Scanl1 (cvtF f) (cvtA a)
-- Scanr f z a -> Scanr (cvtF f) (cvtE z) (cvtA a)
-- Scanr' f z a -> Scanr' (cvtF f) (cvtE z) (cvtA a)
-- Scanr1 f a -> Scanr1 (cvtF f) (cvtA a)
-- Permute f1 a1 f2 a2 -> Permute (cvtF f1) (cvtA a1) (cvtF f2) (cvtA a2)
-- Backpermute sh f a -> Backpermute (cvtE sh) (cvtF f) (cvtA a)
-- Stencil f b a -> Stencil (cvtF f) b (cvtA a)
-- Stencil2 f b1 a1 b2 a2 -> Stencil2 (cvtF f) b1 (cvtA a1) b2 (cvtA a2)
-- where
-- cvtA :: forall t. acc env' aenv t -> acc aenv
-- cvtA = sinker supp
-- cvtE :: forall t. Elt t => PreOpenExp acc env' aenv t -> PreOpenExp acc env aenv
-- cvtE = bindExps supp . sinkExpsIntoExp sinker supp
-- cvtF :: forall t. PreOpenFun acc env' aenv t -> PreOpenFun acc env aenv
-- cvtF = cvt supp
-- where
-- cvt :: forall t env env'. Supplement acc env env' aenv -> PreOpenFun acc env' aenv t -> PreOpenFun acc env aenv
-- cvt supp (Body b) = Body $ bindExps supp $ sinkExpsIntoExp sinker supp b
-- cvt supp (Lam f) | WeakenedSupplement supp' ixt _ <- weakenSuppE1 supp
-- = Lam $ cvt supp' (weakenE ixt f)
-- cvtAfun :: forall t. PreOpenAfun acc aenv t -> PreOpenAfun acc aenv
-- cvtAfun = cvt supp
-- where
-- cvt :: forall t aenv. Supplement acc env env' aenv -> PreOpenAfun acc aenv t -> PreOpenAfun acc aenv
-- cvt supp (Abody b) = Abody $ sinker supp b
-- cvt supp (Alam f) = Alam $ cvt (weakenSupp1 supp) f
-- cvtT :: forall t. Atuple (acc env' aenv) t -> Atuple (acc env aenv)
-- cvtT NilAtup = NilAtup
-- cvtT (SnocAtup t a) = SnocAtup (cvtT t) (cvtA a)
--sinkExpsIntoExp :: forall acc env env' aenv t. (Kit acc, Elt t)
-- => SinkExps acc
-- -> Supplement acc env env' aenv
-- -> PreOpenExp acc env' aenv
-- -> PreOpenExp acc env' aenv
--sinkExpsIntoExp sinker supp exp
-- = case exp of
-- Let bnd body -> sinkLet (cvtE bnd) body
-- Var idx -> Var idx
-- Const c -> Const c
-- Tuple t -> Tuple (cvtT t)
-- Prj tup e -> Prj tup (cvtE e)
-- IndexNil -> IndexNil
-- IndexCons sl sz -> IndexCons (cvtE sl) (cvtE sz)
-- IndexHead sh -> IndexHead (cvtE sh)
-- IndexTail sh -> IndexTail (cvtE sh)
-- IndexSlice x ix sh -> IndexSlice x (cvtE ix) (cvtE sh)
-- IndexFull x ix sl -> IndexFull x (cvtE ix) (cvtE sl)
-- IndexAny -> IndexAny
-- ToIndex sh ix -> ToIndex (cvtE sh) (cvtE ix)
-- FromIndex sh i -> FromIndex (cvtE sh) (cvtE i)
-- Cond p t e -> Cond (cvtE p) (cvtE t) (cvtE e)
-- While p f x -> While (cvtF p) (cvtF f) (cvtE x)
-- PrimConst c -> PrimConst c
-- PrimApp f x -> PrimApp f (cvtE x)
-- Index a sh -> Index (cvtA a) (cvtE sh)
-- LinearIndex a i -> LinearIndex (cvtA a) (cvtE i)
-- Shape a -> Shape (cvtA a)
-- ShapeSize sh -> ShapeSize (cvtE sh)
-- Intersect sh sz -> Intersect (cvtE sh) (cvtE sz)
-- Union sh sz -> Union (cvtE sh) (cvtE sz)
-- Foreign ff f e -> Foreign ff (cvtFun BaseSup f) (cvtE e)
-- where
-- cvtA :: forall t. acc env' aenv t -> acc aenv
-- cvtA = sinkExps supp . sinker supp
-- cvtE :: forall t. Elt t => PreOpenExp acc env' aenv t -> PreOpenExp acc env' aenv
-- cvtE = sinkExpsIntoExp sinker supp
-- cvtF :: forall t. PreOpenFun acc env' aenv t -> PreOpenFun acc env' aenv
-- cvtF = cvtFun supp
-- cvtFun :: forall t env env' aenv. Supplement acc env env' aenv -> PreOpenFun acc env' aenv t -> PreOpenFun acc env' aenv
-- cvtFun supp (Body b) = Body $ sinkExpsIntoExp sinker supp b
-- cvtFun supp (Lam f) | WeakenedSupplement supp' ixt txi <- weakenSuppE1 supp
-- = Lam $ weakenE txi $ cvtFun supp' (weakenE ixt f)
-- cvtT :: forall t. Tuple (PreOpenExp acc env' aenv) t -> Tuple (PreOpenExp acc env' aenv)
-- cvtT NilTup = NilTup
-- cvtT (SnocTup t a) = SnocTup (cvtT t) (cvtE a)
-- sinkLet :: Elt bnd => PreOpenExp acc env' aenv bnd -> PreOpenExp acc (env',bnd) aenv t -> PreOpenExp acc env' aenv
-- sinkLet bnd body | shouldSinkExp bnd
-- = Let bnd (sinkExpsIntoExp sinker (PushSup supp bnd) body)
-- | WeakenedSupplement supp' ixt txi <- weakenSuppE1 supp
-- = Let bnd (weakenE txi $ sinkExpsIntoExp sinker supp' (weakenE ixt body))
--shouldSinkExp :: PreOpenExp acc env aenv t -> Bool
--shouldSinkExp Const{} = True
--shouldSinkExp IndexNil{} = True
--shouldSinkExp IndexAny{} = True
--shouldSinkExp PrimConst{} = True
--shouldSinkExp _ = False
--weakenSuppE1 :: Kit acc => Supplement acc env env' aenv -> WeakenedSupplement acc env (env,s) env' (env',s) aenv
--weakenSuppE1 BaseSup = WeakenedSupplement BaseSup id id
--weakenSuppE1 (PushSup supp b)
-- | WeakenedSupplement supp' ixt txi <- weakenSuppE1 supp
-- = WeakenedSupplement (PushSup supp' (weakenE (ixt . SuccIdx) b)) (swiz ixt) (ziws txi)
-- where
-- swiz :: ((env'1,s) :> env2) -> (((env'1,e),s) :> (env2,e))
-- swiz ixt ZeroIdx = SuccIdx (ixt ZeroIdx)
-- swiz _ (SuccIdx ZeroIdx) = ZeroIdx
-- swiz ixt (SuccIdx (SuccIdx ix)) = SuccIdx (ixt (SuccIdx ix))
-- ziws :: (env2 :> (env'1,s)) -> ((env2,e) :> ((env'1,e),s))
-- ziws _ ZeroIdx = SuccIdx ZeroIdx
-- ziws ixt (SuccIdx ix) = case ixt ix of
-- ZeroIdx -> ZeroIdx
-- (SuccIdx ix) -> SuccIdx (SuccIdx ix)
-- HOAS-conversion
-- ---------------
-- Conversion from HOAS to Debruijn form in such a way that it is easier to use during the transform
-- The many arguments and fundeps are necessary because of overlap.
--
type family AfunctionR (acc :: * -> * -> *) aenv f
type instance AfunctionR acc aenv (S.Acc a) = acc aenv a
type instance AfunctionR acc aenv (S.Acc a -> r) = acc aenv a -> AfunctionR acc aenv r
class S.Afunction f => Convertible f where
applyable :: Kit acc => {- dummy -} f -> PreOpenAfun acc aenv (S.AfunctionR f) -> AfunctionR acc aenv f
-- Convenient HOAS term conversion. Requires AllowAmbiguousTypes.
fromHOAS :: forall acc aenv f. (Convertible f, Kit acc) => f -> AfunctionR acc aenv f
fromHOAS f = applyable (undefined :: f) f'
where
f' :: PreOpenAfun acc aenv (S.AfunctionR f)
f' = weaken undefined . fromOpenAfun . cvtS $ f
instance Arrays a => Convertible (S.Acc a) where
applyable _ (Abody a) = a
applyable _ _ = $internalError "applyable" "Inconsistent valuation"
instance (Arrays a, Convertible b) => Convertible (S.Acc a -> b) where
applyable _ = as
where
as :: Kit acc => PreOpenAfun acc aenv (a -> S.AfunctionR b) -> acc aenv a -> AfunctionR acc aenv b
as (Alam f) = \a -> applyable (undefined :: b) (rebindIndex f ZeroIdx `inlineA` extract a)
as _ = $internalError "applyable" "Inconsistent valuation"
rebindIndex :: forall acc aenv f a. (Kit acc, Arrays a)
=> PreOpenAfun acc aenv f
-> Idx aenv a
-> PreOpenAfun acc aenv f
rebindIndex (Abody b) ix = Abody
$^ Alet (inject $ Avar ix)
$ weaken (ixt ix) b
where
ixt :: forall aenv a s. Idx aenv a -> Idx aenv s -> Idx (aenv,a) s
ixt ix ix' | Just REFL <- matchIdx ix ix'
= ZeroIdx
| otherwise
= SuccIdx ix'
rebindIndex (Alam f) a = Alam $ rebindIndex f (SuccIdx a)
-- Utility functions
-- ------------------
fstA :: forall acc aenv a b. (Kit acc, Arrays a, Arrays b)
=> acc aenv (a,b)
-> acc aenv a
fstA t = inject $ Aprj (SuccTupIdx ZeroTupIdx) t
sndA :: forall acc aenv a b. (Kit acc, Arrays a, Arrays b)
=> acc aenv (a,b)
-> acc aenv b
sndA t = inject $ Aprj ZeroTupIdx t
fstE :: forall acc env aenv a b. (Elt a, Elt b)
=> PreOpenExp acc env aenv (a,b)
-> PreOpenExp acc env aenv a
fstE = Prj (SuccTupIdx ZeroTupIdx)
sndE :: forall acc env aenv a b. (Elt a, Elt b)
=> PreOpenExp acc env aenv (a,b)
-> PreOpenExp acc env aenv b
sndE = Prj ZeroTupIdx
tup :: forall acc env aenv a b. (Elt a,Elt b)
=> PreOpenExp acc env aenv a
-> PreOpenExp acc env aenv b
-> PreOpenExp acc env aenv (a,b)
tup a b = Tuple (SnocTup (SnocTup NilTup a) b)
atup :: forall acc aenv a b. (Kit acc, Arrays a, Arrays b)
=> acc aenv a
-> acc aenv b
-> acc aenv (a,b)
atup a b = inject $ Atuple $ NilAtup `SnocAtup` a `SnocAtup` b
atup3 :: forall acc aenv a b c. (Kit acc, Arrays a, Arrays b, Arrays c)
=> acc aenv a
-> acc aenv b
-> acc aenv c
-> acc aenv (a,b,c)
atup3 a b c = inject $ Atuple $ NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c
replicateA :: forall acc aenv a.
(Kit acc, Arrays a)
=> acc aenv a
-> Size acc aenv
-> acc aenv (Vector' a)
replicateA a size
| IsC <- isArraysFlat (undefined :: a)
= replicateC (inject $ Unit size) a
replicateE :: forall acc aenv e.
(Kit acc, Elt e)
=> PreExp acc aenv e
-> Size acc aenv
-> PreOpenAcc acc aenv (Vector e)
replicateE e size = Replicate (SliceFixed SliceNil) (IndexCons IndexNil size) (inject $ Unit e)
var0 :: (Kit acc, Elt t)
=> PreOpenExp acc (env, t) aenv t
var0 = Var ZeroIdx
var1 :: (Kit acc, Elt t)
=> PreOpenExp acc ((env, t), s) aenv t
var1 = Var $ SuccIdx ZeroIdx
avar0 :: (Kit acc, Arrays t)
=> acc (aenv, t) t
avar0 = inject $ Avar ZeroIdx
avar1 :: (Kit acc, Arrays t)
=> acc ((aenv, t), s) t
avar1 = inject $ Avar $ SuccIdx ZeroIdx
avar2 :: (Kit acc, Arrays t)
=> acc (((aenv, t), s), r) t
avar2 = inject $ Avar $ SuccIdx . SuccIdx $ ZeroIdx
avar3 :: (Kit acc, Arrays t)
=> acc ((((aenv, t), s), r), q) t
avar3 = inject $ Avar $ SuccIdx . SuccIdx . SuccIdx $ ZeroIdx
the :: Elt e
=> acc aenv (Scalar e)
-> PreOpenExp acc env aenv e
the a = Index a (Const ())
index1 :: PreOpenExp acc env aenv Int
-> PreOpenExp acc env aenv DIM1
index1 = IndexCons IndexNil
segmented :: (Elt e, Kit acc)
=> PreOpenFun acc env aenv (e -> e -> e)
-> PreOpenFun acc env aenv ((Int, e) -> (Int, e) -> (Int, e))
segmented f = Lam . Lam . Body
$ tup (PrimBOr integralType `PrimApp` tup (fstE var1) (fstE var0))
(Cond (PrimNEq scalarType `PrimApp` tup (fstE var0) (Const 0))
(sndE var0)
(subApplyE2 (weakenE2 f) (sndE var0) (sndE var1)))
newTop :: env :> env'
-> (env,t) :> (env', t)
newTop _ ZeroIdx = ZeroIdx
newTop wk (SuccIdx ix) = SuccIdx $ wk ix
weakenA1 :: Sink f
=> f aenv t
-> f (aenv,s) t
weakenA1 = weaken SuccIdx
weakenA2 :: Sink f
=> f aenv t
-> f ((aenv,r),s) t
weakenA2 = weaken (SuccIdx . SuccIdx)
weakenA3 :: Sink f
=> f aenv t
-> f (((aenv,q),r),s) t
weakenA3 = weaken (SuccIdx . SuccIdx . SuccIdx)
weakenA4 :: Sink f
=> f aenv t
-> f ((((aenv,p),q),r),s) t
weakenA4 = weaken (SuccIdx . SuccIdx . SuccIdx . SuccIdx)
weakenE1 :: SinkExp f
=> f env aenv t
-> f (env,s) aenv t
weakenE1 = weakenE SuccIdx
weakenE2 :: SinkExp f
=> f env aenv t
-> f ((env,r),s) aenv t
weakenE2 = weakenE (SuccIdx . SuccIdx)
fun1 :: (Kit acc, Elt a, Elt b)
=> (PreOpenExp acc (env,a) aenv a -> PreOpenExp acc (env,a) aenv b)
-> PreOpenFun acc env aenv (a -> b)
fun1 f = Lam (Body (f var0))
fun2 :: (Kit acc, Elt a, Elt b, Elt c)
=> (PreOpenExp acc ((env,a), b) aenv a -> PreOpenExp acc ((env,a), b) aenv b -> PreOpenExp acc ((env,a), b) aenv c)
-> PreOpenFun acc env aenv (a -> b -> c)
fun2 f = Lam (Lam (Body (f var1 var0)))
--weakenSupp1 :: Kit acc
-- => Supplement acc env env' aenv
-- -> Supplement acc env env' (aenv,s)
--weakenSupp1 BaseSup = BaseSup
--weakenSupp1 (PushSup s b) = PushSup (weakenSupp1 s) (weakenA1 b)
unliftA :: forall env aenv env' aenv'.
Context env aenv env' aenv'
-> (aenv :?> aenv')
unliftA (PushAccC _) ZeroIdx = Just ZeroIdx
unliftA (PushAccC d) (SuccIdx ix) = SuccIdx <$> unliftA d ix
unliftA (PushLAccC _) ZeroIdx = Nothing
unliftA (PushLAccC d) (SuccIdx ix) = SuccIdx <$> unliftA d ix
unliftA (PushExpC d) ix = unliftA d ix
unliftA (PushLExpC d) ix = SuccIdx <$> unliftA d ix
unliftA _ _ = error "unliftA: Inconsistent evalution"
--unliftE :: forall env aenv env' aenv'.
-- Context env aenv env' aenv'
-- -> (env :?> env')
--unliftE (PushAccC d) ix = unliftE d ix
--unliftE (PushLAccC d) ix = unliftE d ix
--unliftE (PushExpC _) ZeroIdx = Just ZeroIdx
--unliftE (PushExpC d) (SuccIdx ix) = SuccIdx <$> unliftE d ix
--unliftE (PushLExpC _) ZeroIdx = Nothing
--unliftE (PushLExpC d) (SuccIdx ix) = unliftE d ix
--unliftE _ _ = error "unliftE: Inconsistent evalution"
rebuildToLift :: Rebuildable f
=> Context env aenv env' aenv'
-> f aenv t
-> Maybe (f aenv' t)
rebuildToLift d = rebuildPartial (liftA Avar . unliftA d)
cvtS :: S.Afunction f => f -> OpenAfun aenv (S.AfunctionR f)
cvtS = weaken undefined . S.convertAfun True True True True
subApply2 :: (Kit acc, Arrays a)
=> PreOpenAfun acc aenv (a -> b -> c)
-> acc aenv a
-> acc aenv b
-> PreOpenAcc acc aenv c
subApply2 (Alam (Alam (Abody f))) a b
= Alet a
$ inject $ Alet (weakenA1 b)
$ f
subApply2 _ _ _ = error "subApply2: inconsistent evaluation"
subApplyE2 :: Kit acc
=> PreOpenFun acc env aenv (a -> b -> c)
-> PreOpenExp acc env aenv a
-> PreOpenExp acc env aenv b
-> PreOpenExp acc env aenv c
subApplyE2 (Lam (Lam (Body f))) a b
= Let a
$ Let (weakenE1 b)
$ f
subApplyE2 _ _ _ = error "subApplyE2: inconsistent evaluation"
--partApply :: Kit acc
-- => PreOpenAfun acc aenv (a -> r)
-- -> acc aenv a
-- -> PreOpenAfun acc aenv r
--partApply (Alam f) a
-- = app id a f
-- where
-- app :: forall acc env aenv aenv' a f. (Kit acc, Arrays a)
-- => (aenv' :> (aenv, a))
-- -> acc aenv a
-- -> PreOpenAfun acc aenv' f
-- -> PreOpenAfun acc aenv f
-- app ixt a (Abody b) = Abody (inject $ Alet a $ weaken ixt b)
-- app ixt a (Alam f) = Alam (app ixt' (weaken SuccIdx a) f)
-- where
-- ixt' :: Idx (aenv', s) t
-- -> Idx ((aenv, s), a) t
-- ixt' ZeroIdx = SuccIdx ZeroIdx
-- ixt' (SuccIdx ix) = case ixt ix of
-- ZeroIdx -> ZeroIdx
-- (SuccIdx ix) -> SuccIdx (SuccIdx ix)
--partApply _ _
-- = error "partApply: inconsistent evaluation"
infixr 0 $^
($^) :: Kit acc
=> (acc aenv a -> t)
-> PreOpenAcc acc aenv a
-> t
($^) f a = f $ inject a
-- Debugging
-- ----------
trace :: String -> String -> a -> a
trace header msg
= Debug.trace Debug.dump_vectorisation
$ header ++ ": " ++ msg
-- Sequence vectorisation
-- ------------------------
sequenceFreeAfun :: OpenAfun aenv t -> Bool
sequenceFreeAfun afun =
case afun of
Alam f -> sequenceFreeAfun f
Abody b -> sequenceFreeAcc b
sequenceFreeFun :: OpenFun env aenv t -> Bool
sequenceFreeFun afun =
case afun of
Lam f -> sequenceFreeFun f
Body b -> sequenceFreeExp b
sequenceFreeExp :: OpenExp env aenv t -> Bool
sequenceFreeExp = travE
where
travF :: OpenFun env aenv t -> Bool
travF = sequenceFreeFun
travT :: Tuple (OpenExp env aenv) t -> Bool
travT = sequenceFreeTup
travA :: OpenAcc aenv t -> Bool
travA = sequenceFreeAcc
travE :: OpenExp env aenv t -> Bool
travE exp =
case exp of
Let bnd body -> travE bnd && travE body
Var _ -> True
Const _ -> True
Tuple tup -> travT tup
Prj _ t -> travE t
IndexNil -> True
IndexCons sh sz -> travE sh && travE sz
IndexHead sh -> travE sh
IndexTail sh -> travE sh
IndexAny -> True
IndexSlice _ ix sh -> travE ix && travE sh
IndexFull _ ix sl -> travE ix && travE sl
ToIndex sh ix -> travE sh && travE ix
FromIndex sh ix -> travE sh && travE ix
Cond p t e -> travE p && travE t && travE e
While p f x -> travF p && travF f && travE x
PrimConst _ -> True
PrimApp _ x -> travE x
Index a sh -> travA a && travE sh
LinearIndex a i -> travA a && travE i
Shape a -> travA a
ShapeSize sh -> travE sh
Intersect s t -> travE s && travE t
Union s t -> travE s && travE t
Foreign _ f e -> travF f && travE e
sequenceFreeAtup :: Atuple (OpenAcc aenv) t -> Bool
sequenceFreeAtup t =
case t of
NilAtup -> True
SnocAtup t e -> sequenceFreeAtup t && sequenceFreeAcc e
sequenceFreeTup :: Tuple (OpenExp env aenv) t -> Bool
sequenceFreeTup t =
case t of
NilTup -> True
SnocTup t e -> sequenceFreeTup t && sequenceFreeExp e
sequenceFreeAcc :: OpenAcc aenv a -> Bool
sequenceFreeAcc = travA
where
travAfun :: OpenAfun aenv t -> Bool
travAfun = sequenceFreeAfun
travE :: Elt t => Exp aenv t -> Bool
travE = sequenceFreeExp
travF :: Fun aenv t -> Bool
travF = sequenceFreeFun
travAT :: Atuple (OpenAcc aenv) t -> Bool
travAT = sequenceFreeAtup
travA :: OpenAcc aenv t -> Bool
travA (OpenAcc acc) =
case acc of
Alet bnd body -> travA bnd && travA body
Avar _ -> True
Atuple tup -> travAT tup
Aprj _ a -> travA a
Apply f a -> travAfun f && travA a
Aforeign _ afun acc -> travAfun afun && travA acc
Acond p t e -> travE p && travA t && travA e
Awhile p f a -> travAfun p && travAfun f && travA a
Use _ -> True
Unit e -> travE e
Reshape e a -> travE e && travA a
Generate e f -> travE e && travF f
Transform sh ix f a -> travE sh && travF ix && travF f && travA a
Replicate _ slix a -> travE slix && travA a
Slice _ a slix -> travA a && travE slix
Map f a -> travF f && travA a
ZipWith f a1 a2 -> travF f && travA a1 && travA a2
Fold f z a -> travF f && travE z && travA a
Fold1 f a -> travF f && travA a
Scanl f z a -> travF f && travE z && travA a
Scanl' f z a -> travF f && travE z && travA a
Scanl1 f a -> travF f && travA a
Scanr f z a -> travF f && travE z && travA a
Scanr' f z a -> travF f && travE z && travA a
Scanr1 f a -> travF f && travA a
Permute f1 a1 f2 a2 -> travF f1 && travA a1 && travF f2 && travA a2
Backpermute sh f a -> travE sh && travF f && travA a
Stencil f _ a -> travF f && travA a
Stencil2 f _ a1 _ a2 -> travF f && travA a1 && travA a2
-- Interesting case:
Collect _ -> False
FoldSeg f z a s -> travF f && travE z && travA a && travA s
Fold1Seg f a s -> travF f && travA a && travA s
vectoriseSeq :: PreOpenSeq OpenAcc () () a -> PreOpenSeq OpenAcc () () a
vectoriseSeq = vectoriseOpenSeq Aggressive EmptyC
vectoriseOpenSeq :: forall aenv senv a.
Strength
-> Context () aenv () aenv
-> PreOpenSeq OpenAcc aenv senv a
-> PreOpenSeq OpenAcc aenv senv a
vectoriseOpenSeq strength ctx seq =
case seq of
Producer p s -> Producer (cvtP p) (vectoriseOpenSeq strength ctx s)
Consumer c -> Consumer (cvtC c)
Reify ix -> Reify ix
where
cvtP :: Producer OpenAcc aenv senv t -> Producer OpenAcc aenv senv t
cvtP p =
case p of
StreamIn arrs -> StreamIn arrs
ToSeq sl slix a -> ToSeq sl slix (cvtA a)
-- Interesting case:
MapSeq f x
| sequenceFreeAfun f -> trace "vectoriseSeq" ("MapSeq succesfully vectorised: " ++ show (liftOpenAfun1 strength ctx (cvtAfun f))) $
ChunkedMapSeq (liftOpenAfun1 strength ctx (cvtAfun f)) x
-- The following case is needed because we don't know how to
-- lift sequences yet.
| otherwise -> trace "vectoriseSeq" ("MapSeq could not be vectorised: " ++ show (cvtAfun f)) $
MapSeq (cvtAfun f) x
ChunkedMapSeq f x -> ChunkedMapSeq (cvtAfun f) x
ZipWithSeq f x y -> ZipWithSeq (cvtAfun f) x y
ScanSeq f e x -> ScanSeq (cvtF f) (cvtE e) x
cvtC :: Consumer OpenAcc aenv senv t -> Consumer OpenAcc aenv senv t
cvtC c =
case c of
FoldSeq f e x -> FoldSeq (cvtF f) (cvtE e) x
FoldSeqFlatten f a x -> FoldSeqFlatten (cvtAfun f) (cvtA a) x
Stuple t -> Stuple (cvtCT t)
cvtCT :: Atuple (Consumer OpenAcc aenv senv) t -> Atuple (Consumer OpenAcc aenv senv) t
cvtCT NilAtup = NilAtup
cvtCT (SnocAtup t c) = SnocAtup (cvtCT t) (cvtC c)
cvtE :: Elt t => Exp aenv t -> Exp aenv t
cvtE = vectoriseSeqOpenExp strength ctx
cvtF :: Fun aenv t -> Fun aenv t
cvtF = vectoriseSeqOpenFun strength ctx
cvtA :: OpenAcc aenv t -> OpenAcc aenv t
cvtA = vectoriseSeqOpenAcc strength ctx
cvtAfun :: OpenAfun aenv t -> OpenAfun aenv t
cvtAfun = vectoriseSeqOpenAfun strength ctx
stripExpCtx :: Context env aenv env aenv -> Context () aenv () aenv
stripExpCtx c =
case c of
EmptyC -> EmptyC
PushExpC c' -> stripExpCtx c'
PushAccC c' -> PushAccC (stripExpCtx c')
_ -> error "unreachable"
vectoriseSeqOpenExp :: forall env aenv a.
Strength
-> Context env aenv env aenv
-> OpenExp env aenv a
-> OpenExp env aenv a
vectoriseSeqOpenExp strength ctx = cvtE
where
cvtA :: OpenAcc aenv t -> OpenAcc aenv t
cvtA a = vectoriseSeqOpenAcc strength (stripExpCtx ctx) a
cvtT :: Tuple (OpenExp env aenv) t -> Tuple (OpenExp env aenv) t
cvtT tup = case tup of
NilTup -> NilTup
SnocTup t a -> cvtT t `SnocTup` cvtE a
cvtF :: OpenFun env aenv t -> OpenFun env aenv t
cvtF = vectoriseSeqOpenFun strength ctx
cvtE :: OpenExp env aenv t -> OpenExp env aenv t
cvtE exp =
case exp of
Let bnd body -> Let (cvtE bnd) (vectoriseSeqOpenExp strength (PushExpC ctx) body)
Var ix -> Var ix
Const c -> Const c
Tuple tup -> Tuple (cvtT tup)
Prj tup t -> Prj tup (cvtE t)
IndexNil -> IndexNil
IndexCons sh sz -> IndexCons (cvtE sh) (cvtE sz)
IndexHead sh -> IndexHead (cvtE sh)
IndexTail sh -> IndexTail (cvtE sh)
IndexAny -> IndexAny
IndexSlice x ix sh -> IndexSlice x (cvtE ix) (cvtE sh)
IndexFull x ix sl -> IndexFull x (cvtE ix) (cvtE sl)
ToIndex sh ix -> ToIndex (cvtE sh) (cvtE ix)
FromIndex sh ix -> FromIndex (cvtE sh) (cvtE ix)
Cond p t e -> Cond (cvtE p) (cvtE t) (cvtE e)
While p f x -> While (cvtF p) (cvtF f) (cvtE x)
PrimConst c -> PrimConst c
PrimApp f x -> PrimApp f (cvtE x)
Index a sh -> Index (cvtA a) (cvtE sh)
LinearIndex a i -> LinearIndex (cvtA a) (cvtE i)
Shape a -> Shape (cvtA a)
ShapeSize sh -> ShapeSize (cvtE sh)
Intersect s t -> Intersect (cvtE s) (cvtE t)
Union s t -> Union (cvtE s) (cvtE t)
Foreign ff f e -> Foreign ff (vectoriseSeqOpenFun strength EmptyC f) (cvtE e)
vectoriseSeqAcc :: OpenAcc () a -> OpenAcc () a
vectoriseSeqAcc = vectoriseSeqOpenAcc Aggressive EmptyC
vectoriseSeqOpenAcc :: forall aenv a.
Strength
-> Context () aenv () aenv
-> OpenAcc aenv a
-> OpenAcc aenv a
vectoriseSeqOpenAcc strength ctx = cvtA
where
cvtT :: Atuple (OpenAcc aenv) t -> Atuple (OpenAcc aenv) t
cvtT atup = case atup of
NilAtup -> NilAtup
SnocAtup t a -> cvtT t `SnocAtup` cvtA a
cvtAfun :: OpenAfun aenv t -> OpenAfun aenv t
cvtAfun = vectoriseSeqOpenAfun strength ctx
cvtE :: Elt t => Exp aenv t -> Exp aenv t
cvtE = vectoriseSeqOpenExp strength ctx
cvtF :: Fun aenv t -> Fun aenv t
cvtF = vectoriseSeqOpenFun strength ctx
cvtA :: OpenAcc aenv t -> OpenAcc aenv t
cvtA (OpenAcc pacc) = OpenAcc $ case pacc of
Alet bnd body -> Alet (cvtA bnd) (vectoriseSeqOpenAcc strength (PushAccC ctx) body)
Avar ix -> Avar ix
Atuple tup -> Atuple (cvtT tup)
Aprj tup a -> Aprj tup (cvtA a)
Apply f a -> Apply (cvtAfun f) (cvtA a)
Aforeign ff afun acc -> Aforeign ff (vectoriseSeqAfun afun) (cvtA acc)
Acond p t e -> Acond (cvtE p) (cvtA t) (cvtA e)
Awhile p f a -> Awhile (cvtAfun p) (cvtAfun f) (cvtA a)
Use a -> Use a
Unit e -> Unit (cvtE e)
Reshape e a -> Reshape (cvtE e) (cvtA a)
Generate e f -> Generate (cvtE e) (cvtF f)
Transform sh ix f a -> Transform (cvtE sh) (cvtF ix) (cvtF f) (cvtA a)
Replicate sl slix a -> Replicate sl (cvtE slix) (cvtA a)
Slice sl a slix -> Slice sl (cvtA a) (cvtE slix)
Map f a -> Map (cvtF f) (cvtA a)
ZipWith f a1 a2 -> ZipWith (cvtF f) (cvtA a1) (cvtA a2)
Fold f z a -> Fold (cvtF f) (cvtE z) (cvtA a)
Fold1 f a -> Fold1 (cvtF f) (cvtA a)
Scanl f z a -> Scanl (cvtF f) (cvtE z) (cvtA a)
Scanl' f z a -> Scanl' (cvtF f) (cvtE z) (cvtA a)
Scanl1 f a -> Scanl1 (cvtF f) (cvtA a)
Scanr f z a -> Scanr (cvtF f) (cvtE z) (cvtA a)
Scanr' f z a -> Scanr' (cvtF f) (cvtE z) (cvtA a)
Scanr1 f a -> Scanr1 (cvtF f) (cvtA a)
Permute f1 a1 f2 a2 -> Permute (cvtF f1) (cvtA a1) (cvtF f2) (cvtA a2)
Backpermute sh f a -> Backpermute (cvtE sh) (cvtF f) (cvtA a)
Stencil f b a -> Stencil (cvtF f) b (cvtA a)
Stencil2 f b1 a1 b2 a2 -> Stencil2 (cvtF f) b1 (cvtA a1) b2 (cvtA a2)
Collect s -> Collect (vectoriseOpenSeq strength ctx s)
FoldSeg f z a s -> FoldSeg (cvtF f) (cvtE z) (cvtA a) (cvtA s)
Fold1Seg f a s -> Fold1Seg (cvtF f) (cvtA a) (cvtA s)
vectoriseSeqAfun :: OpenAfun () t -> OpenAfun () t
vectoriseSeqAfun = vectoriseSeqOpenAfun Aggressive EmptyC
vectoriseSeqOpenFun :: forall env aenv t.
Strength
-> Context env aenv env aenv
-> OpenFun env aenv t
-> OpenFun env aenv t
vectoriseSeqOpenFun strength ctx fun =
case fun of
Body b -> Body (vectoriseSeqOpenExp strength ctx b)
Lam f -> Lam (vectoriseSeqOpenFun strength (PushExpC ctx) f)
vectoriseSeqOpenAfun :: Strength
-> Context () aenv () aenv
-> OpenAfun aenv t
-> OpenAfun aenv t
vectoriseSeqOpenAfun strength ctx afun =
case afun of
Abody b -> Abody (vectoriseSeqOpenAcc strength ctx b)
Alam f -> Alam (vectoriseSeqOpenAfun strength (PushAccC ctx) f)
| rrnewton/accelerate | Data/Array/Accelerate/Trafo/Vectorise.hs | bsd-3-clause | 119,637 | 0 | 24 | 43,818 | 38,735 | 19,372 | 19,363 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[StgLint]{A ``lint'' pass to check for Stg correctness}
-}
{-# LANGUAGE CPP #-}
module StgLint ( lintStgBindings ) where
import StgSyn
import Bag ( Bag, emptyBag, isEmptyBag, snocBag, bagToList )
import Id ( Id, idType, isLocalId )
import VarSet
import DataCon
import CoreSyn ( AltCon(..) )
import PrimOp ( primOpType )
import Literal ( literalType )
import Maybes
import Name ( getSrcLoc )
import ErrUtils ( MsgDoc, Severity(..), mkLocMessage )
import Type
import TyCon
import Util
import SrcLoc
import Outputable
import Control.Monad
import Data.Function
#include "HsVersions.h"
{-
Checks for
(a) *some* type errors
(b) locally-defined variables used but not defined
Note: unless -dverbose-stg is on, display of lint errors will result
in "panic: bOGUS_LVs".
WARNING:
~~~~~~~~
This module has suffered bit-rot; it is likely to yield lint errors
for Stg code that is currently perfectly acceptable for code
generation. Solution: don't use it! (KSW 2000-05).
************************************************************************
* *
\subsection{``lint'' for various constructs}
* *
************************************************************************
@lintStgBindings@ is the top-level interface function.
-}
lintStgBindings :: String -> [StgBinding] -> [StgBinding]
lintStgBindings whodunnit binds
= {-# SCC "StgLint" #-}
case (initL (lint_binds binds)) of
Nothing -> binds
Just msg -> pprPanic "" (vcat [
text "*** Stg Lint ErrMsgs: in" <+>
text whodunnit <+> text "***",
msg,
text "*** Offending Program ***",
pprStgBindings binds,
text "*** End of Offense ***"])
where
lint_binds :: [StgBinding] -> LintM ()
lint_binds [] = return ()
lint_binds (bind:binds) = do
binders <- lintStgBinds bind
addInScopeVars binders $
lint_binds binds
lintStgArg :: StgArg -> LintM (Maybe Type)
lintStgArg (StgLitArg lit) = return (Just (literalType lit))
lintStgArg (StgVarArg v) = lintStgVar v
lintStgVar :: Id -> LintM (Maybe Kind)
lintStgVar v = do checkInScope v
return (Just (idType v))
lintStgBinds :: StgBinding -> LintM [Id] -- Returns the binders
lintStgBinds (StgNonRec binder rhs) = do
lint_binds_help (binder,rhs)
return [binder]
lintStgBinds (StgRec pairs)
= addInScopeVars binders $ do
mapM_ lint_binds_help pairs
return binders
where
binders = [b | (b,_) <- pairs]
lint_binds_help :: (Id, StgRhs) -> LintM ()
lint_binds_help (binder, rhs)
= addLoc (RhsOf binder) $ do
-- Check the rhs
_maybe_rhs_ty <- lintStgRhs rhs
-- Check binder doesn't have unlifted type
checkL (not (isUnliftedType binder_ty))
(mkUnliftedTyMsg binder rhs)
-- Check match to RHS type
-- Actually we *can't* check the RHS type, because
-- unsafeCoerce means it really might not match at all
-- notably; eg x::Int = (error @Bool "urk") |> unsafeCoerce...
-- case maybe_rhs_ty of
-- Nothing -> return ()
-- Just rhs_ty -> checkTys binder_ty
-- rhs_ty
--- (mkRhsMsg binder rhs_ty)
return ()
where
binder_ty = idType binder
lintStgRhs :: StgRhs -> LintM (Maybe Type) -- Just ty => type is exact
lintStgRhs (StgRhsClosure _ _ _ _ [] expr)
= lintStgExpr expr
lintStgRhs (StgRhsClosure _ _ _ _ binders expr)
= addLoc (LambdaBodyOf binders) $
addInScopeVars binders $ runMaybeT $ do
body_ty <- MaybeT $ lintStgExpr expr
return (mkFunTys (map idType binders) body_ty)
lintStgRhs (StgRhsCon _ con args) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp con_ty arg_tys (mkRhsConMsg con_ty arg_tys)
where
con_ty = dataConRepType con
lintStgExpr :: StgExpr -> LintM (Maybe Type) -- Just ty => type is exact
lintStgExpr (StgLit l) = return (Just (literalType l))
lintStgExpr e@(StgApp fun args) = runMaybeT $ do
fun_ty <- MaybeT $ lintStgVar fun
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp fun_ty arg_tys (mkFunAppMsg fun_ty arg_tys e)
lintStgExpr e@(StgConApp con args) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp con_ty arg_tys (mkFunAppMsg con_ty arg_tys e)
where
con_ty = dataConRepType con
lintStgExpr e@(StgOpApp (StgPrimOp op) args _) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp op_ty arg_tys (mkFunAppMsg op_ty arg_tys e)
where
op_ty = primOpType op
lintStgExpr (StgOpApp _ args res_ty) = runMaybeT $ do
-- We don't have enough type information to check
-- the application for StgFCallOp and StgPrimCallOp; ToDo
_maybe_arg_tys <- mapM (MaybeT . lintStgArg) args
return res_ty
lintStgExpr (StgLam bndrs _) = do
addErrL (text "Unexpected StgLam" <+> ppr bndrs)
return Nothing
lintStgExpr (StgLet binds body) = do
binders <- lintStgBinds binds
addLoc (BodyOfLetRec binders) $
addInScopeVars binders $
lintStgExpr body
lintStgExpr (StgLetNoEscape binds body) = do
binders <- lintStgBinds binds
addLoc (BodyOfLetRec binders) $
addInScopeVars binders $
lintStgExpr body
lintStgExpr (StgTick _ expr) = lintStgExpr expr
lintStgExpr (StgCase scrut bndr alts_type alts) = runMaybeT $ do
_ <- MaybeT $ lintStgExpr scrut
in_scope <- MaybeT $ liftM Just $
case alts_type of
AlgAlt tc -> check_bndr tc >> return True
PrimAlt tc -> check_bndr tc >> return True
UbxTupAlt _ -> return False -- Binder is always dead in this case
PolyAlt -> return True
MaybeT $ addInScopeVars [bndr | in_scope] $
lintStgAlts alts scrut_ty
where
scrut_ty = idType bndr
UnaryRep scrut_rep = repType scrut_ty -- Not used if scrutinee is unboxed tuple
check_bndr tc = case tyConAppTyCon_maybe scrut_rep of
Just bndr_tc -> checkL (tc == bndr_tc) bad_bndr
Nothing -> addErrL bad_bndr
where
bad_bndr = mkDefltMsg bndr tc
lintStgAlts :: [StgAlt]
-> Type -- Type of scrutinee
-> LintM (Maybe Type) -- Just ty => type is accurage
lintStgAlts alts scrut_ty = do
maybe_result_tys <- mapM (lintAlt scrut_ty) alts
-- Check the result types
case catMaybes (maybe_result_tys) of
[] -> return Nothing
(first_ty:_tys) -> do -- mapM_ check tys
return (Just first_ty)
where
-- check ty = checkTys first_ty ty (mkCaseAltMsg alts)
-- We can't check that the alternatives have the
-- same type, because they don't, with unsafeCoerce#
lintAlt :: Type -> (AltCon, [Id], StgExpr) -> LintM (Maybe Type)
lintAlt _ (DEFAULT, _, rhs)
= lintStgExpr rhs
lintAlt scrut_ty (LitAlt lit, _, rhs) = do
checkTys (literalType lit) scrut_ty (mkAltMsg1 scrut_ty)
lintStgExpr rhs
lintAlt scrut_ty (DataAlt con, args, rhs) = do
case splitTyConApp_maybe scrut_ty of
Just (tycon, tys_applied) | isAlgTyCon tycon &&
not (isNewTyCon tycon) -> do
let
cons = tyConDataCons tycon
arg_tys = dataConInstArgTys con tys_applied
-- This does not work for existential constructors
checkL (con `elem` cons) (mkAlgAltMsg2 scrut_ty con)
checkL (length args == dataConRepArity con) (mkAlgAltMsg3 con args)
when (isVanillaDataCon con) $
mapM_ check (zipEqual "lintAlgAlt:stg" arg_tys args)
return ()
_ ->
addErrL (mkAltMsg1 scrut_ty)
addInScopeVars args $
lintStgExpr rhs
where
check (ty, arg) = checkTys ty (idType arg) (mkAlgAltMsg4 ty arg)
-- elem: yes, the elem-list here can sometimes be long-ish,
-- but as it's use-once, probably not worth doing anything different
-- We give it its own copy, so it isn't overloaded.
elem _ [] = False
elem x (y:ys) = x==y || elem x ys
{-
************************************************************************
* *
\subsection[lint-monad]{The Lint monad}
* *
************************************************************************
-}
newtype LintM a = LintM
{ unLintM :: [LintLocInfo] -- Locations
-> IdSet -- Local vars in scope
-> Bag MsgDoc -- Error messages so far
-> (a, Bag MsgDoc) -- Result and error messages (if any)
}
data LintLocInfo
= RhsOf Id -- The variable bound
| LambdaBodyOf [Id] -- The lambda-binder
| BodyOfLetRec [Id] -- One of the binders
dumpLoc :: LintLocInfo -> (SrcSpan, SDoc)
dumpLoc (RhsOf v) =
(srcLocSpan (getSrcLoc v), text " [RHS of " <> pp_binders [v] <> char ']' )
dumpLoc (LambdaBodyOf bs) =
(srcLocSpan (getSrcLoc (head bs)), text " [in body of lambda with binders " <> pp_binders bs <> char ']' )
dumpLoc (BodyOfLetRec bs) =
(srcLocSpan (getSrcLoc (head bs)), text " [in body of letrec with binders " <> pp_binders bs <> char ']' )
pp_binders :: [Id] -> SDoc
pp_binders bs
= sep (punctuate comma (map pp_binder bs))
where
pp_binder b
= hsep [ppr b, dcolon, ppr (idType b)]
initL :: LintM a -> Maybe MsgDoc
initL (LintM m)
= case (m [] emptyVarSet emptyBag) of { (_, errs) ->
if isEmptyBag errs then
Nothing
else
Just (vcat (punctuate blankLine (bagToList errs)))
}
instance Functor LintM where
fmap = liftM
instance Applicative LintM where
pure a = LintM $ \_loc _scope errs -> (a, errs)
(<*>) = ap
(*>) = thenL_
instance Monad LintM where
(>>=) = thenL
(>>) = (*>)
thenL :: LintM a -> (a -> LintM b) -> LintM b
thenL m k = LintM $ \loc scope errs
-> case unLintM m loc scope errs of
(r, errs') -> unLintM (k r) loc scope errs'
thenL_ :: LintM a -> LintM b -> LintM b
thenL_ m k = LintM $ \loc scope errs
-> case unLintM m loc scope errs of
(_, errs') -> unLintM k loc scope errs'
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = addErrL msg
addErrL :: MsgDoc -> LintM ()
addErrL msg = LintM $ \loc _scope errs -> ((), addErr errs msg loc)
addErr :: Bag MsgDoc -> MsgDoc -> [LintLocInfo] -> Bag MsgDoc
addErr errs_so_far msg locs
= errs_so_far `snocBag` mk_msg locs
where
mk_msg (loc:_) = let (l,hdr) = dumpLoc loc
in mkLocMessage SevWarning l (hdr $$ msg)
mk_msg [] = msg
addLoc :: LintLocInfo -> LintM a -> LintM a
addLoc extra_loc m = LintM $ \loc scope errs
-> unLintM m (extra_loc:loc) scope errs
addInScopeVars :: [Id] -> LintM a -> LintM a
addInScopeVars ids m = LintM $ \loc scope errs
-> -- We check if these "new" ids are already
-- in scope, i.e., we have *shadowing* going on.
-- For now, it's just a "trace"; we may make
-- a real error out of it...
let
new_set = mkVarSet ids
in
-- After adding -fliberate-case, Simon decided he likes shadowed
-- names after all. WDP 94/07
-- (if isEmptyVarSet shadowed
-- then id
-- else pprTrace "Shadowed vars:" (ppr (varSetElems shadowed))) $
unLintM m loc (scope `unionVarSet` new_set) errs
{-
Checking function applications: we only check that the type has the
right *number* of arrows, we don't actually compare the types. This
is because we can't expect the types to be equal - the type
applications and type lambdas that we use to calculate accurate types
have long since disappeared.
-}
checkFunApp :: Type -- The function type
-> [Type] -- The arg type(s)
-> MsgDoc -- Error message
-> LintM (Maybe Type) -- Just ty => result type is accurate
checkFunApp fun_ty arg_tys msg
= do { case mb_msg of
Just msg -> addErrL msg
Nothing -> return ()
; return mb_ty }
where
(mb_ty, mb_msg) = cfa True fun_ty arg_tys
cfa :: Bool -> Type -> [Type] -> (Maybe Type -- Accurate result?
, Maybe MsgDoc) -- Errors?
cfa accurate fun_ty [] -- Args have run out; that's fine
= (if accurate then Just fun_ty else Nothing, Nothing)
cfa accurate fun_ty arg_tys@(arg_ty':arg_tys')
| Just (arg_ty, res_ty) <- splitFunTy_maybe fun_ty
= if accurate && not (arg_ty `stgEqType` arg_ty')
then (Nothing, Just msg) -- Arg type mismatch
else cfa accurate res_ty arg_tys'
| Just (_, fun_ty') <- splitForAllTy_maybe fun_ty
= cfa False fun_ty' arg_tys
| Just (tc,tc_args) <- splitTyConApp_maybe fun_ty
, isNewTyCon tc
= if length tc_args < tyConArity tc
then WARN( True, text "cfa: unsaturated newtype" <+> ppr fun_ty $$ msg )
(Nothing, Nothing) -- This is odd, but I've seen it
else cfa False (newTyConInstRhs tc tc_args) arg_tys
| Just tc <- tyConAppTyCon_maybe fun_ty
, not (isTypeFamilyTyCon tc) -- Definite error
= (Nothing, Just msg) -- Too many args
| otherwise
= (Nothing, Nothing)
stgEqType :: Type -> Type -> Bool
-- Compare types, but crudely because we have discarded
-- both casts and type applications, so types might look
-- different but be the same. So reply "True" if in doubt.
-- "False" means that the types are definitely different.
--
-- Fundamentally this is a losing battle because of unsafeCoerce
stgEqType orig_ty1 orig_ty2
= gos (repType orig_ty1) (repType orig_ty2)
where
gos :: RepType -> RepType -> Bool
gos (UbxTupleRep tys1) (UbxTupleRep tys2)
= equalLength tys1 tys2 && and (zipWith go tys1 tys2)
gos (UnaryRep ty1) (UnaryRep ty2) = go ty1 ty2
gos _ _ = False
go :: UnaryType -> UnaryType -> Bool
go ty1 ty2
| Just (tc1, tc_args1) <- splitTyConApp_maybe ty1
, Just (tc2, tc_args2) <- splitTyConApp_maybe ty2
, let res = if tc1 == tc2
then equalLength tc_args1 tc_args2 && and (zipWith (gos `on` repType) tc_args1 tc_args2)
else -- TyCons don't match; but don't bleat if either is a
-- family TyCon because a coercion might have made it
-- equal to something else
(isFamilyTyCon tc1 || isFamilyTyCon tc2)
= if res then True
else
pprTrace "stgEqType: unequal" (vcat [ppr ty1, ppr ty2])
False
| otherwise = True -- Conservatively say "fine".
-- Type variables in particular
checkInScope :: Id -> LintM ()
checkInScope id = LintM $ \loc scope errs
-> if isLocalId id && not (id `elemVarSet` scope) then
((), addErr errs (hsep [ppr id, text "is out of scope"]) loc)
else
((), errs)
checkTys :: Type -> Type -> MsgDoc -> LintM ()
checkTys ty1 ty2 msg = LintM $ \loc _scope errs
-> if (ty1 `stgEqType` ty2)
then ((), errs)
else ((), addErr errs msg loc)
_mkCaseAltMsg :: [StgAlt] -> MsgDoc
_mkCaseAltMsg _alts
= ($$) (text "In some case alternatives, type of alternatives not all same:")
(Outputable.empty) -- LATER: ppr alts
mkDefltMsg :: Id -> TyCon -> MsgDoc
mkDefltMsg bndr tc
= ($$) (text "Binder of a case expression doesn't match type of scrutinee:")
(ppr bndr $$ ppr (idType bndr) $$ ppr tc)
mkFunAppMsg :: Type -> [Type] -> StgExpr -> MsgDoc
mkFunAppMsg fun_ty arg_tys expr
= vcat [text "In a function application, function type doesn't match arg types:",
hang (text "Function type:") 4 (ppr fun_ty),
hang (text "Arg types:") 4 (vcat (map (ppr) arg_tys)),
hang (text "Expression:") 4 (ppr expr)]
mkRhsConMsg :: Type -> [Type] -> MsgDoc
mkRhsConMsg fun_ty arg_tys
= vcat [text "In a RHS constructor application, con type doesn't match arg types:",
hang (text "Constructor type:") 4 (ppr fun_ty),
hang (text "Arg types:") 4 (vcat (map (ppr) arg_tys))]
mkAltMsg1 :: Type -> MsgDoc
mkAltMsg1 ty
= ($$) (text "In a case expression, type of scrutinee does not match patterns")
(ppr ty)
mkAlgAltMsg2 :: Type -> DataCon -> MsgDoc
mkAlgAltMsg2 ty con
= vcat [
text "In some algebraic case alternative, constructor is not a constructor of scrutinee type:",
ppr ty,
ppr con
]
mkAlgAltMsg3 :: DataCon -> [Id] -> MsgDoc
mkAlgAltMsg3 con alts
= vcat [
text "In some algebraic case alternative, number of arguments doesn't match constructor:",
ppr con,
ppr alts
]
mkAlgAltMsg4 :: Type -> Id -> MsgDoc
mkAlgAltMsg4 ty arg
= vcat [
text "In some algebraic case alternative, type of argument doesn't match data constructor:",
ppr ty,
ppr arg
]
_mkRhsMsg :: Id -> Type -> MsgDoc
_mkRhsMsg binder ty
= vcat [hsep [text "The type of this binder doesn't match the type of its RHS:",
ppr binder],
hsep [text "Binder's type:", ppr (idType binder)],
hsep [text "Rhs type:", ppr ty]
]
mkUnliftedTyMsg :: Id -> StgRhs -> SDoc
mkUnliftedTyMsg binder rhs
= (text "Let(rec) binder" <+> quotes (ppr binder) <+>
text "has unlifted type" <+> quotes (ppr (idType binder)))
$$
(text "RHS:" <+> ppr rhs)
| mcschroeder/ghc | compiler/stgSyn/StgLint.hs | bsd-3-clause | 17,963 | 0 | 19 | 5,243 | 4,725 | 2,405 | 2,320 | -1 | -1 |
module Data.Sequence.Chunk (chunk) where
import Data.Sequence as SQ
-- | 'chunk n xs' splits 'xs' into 'n' chunks
chunk :: Int -> Seq a -> Seq (Seq a)
chunk n xs = let m = ceiling $ realToFrac (SQ.length xs) / realToFrac n
f xs | SQ.null xs = SQ.empty
f xs = SQ.take m xs <| (f $ SQ.drop m xs)
in f xs
| beni55/bayes-stack | Data/Sequence/Chunk.hs | bsd-3-clause | 351 | 0 | 14 | 114 | 145 | 72 | 73 | 7 | 2 |
-- | drei-färbung (mit würfeln)
-- autor m.lindemeyer
-- stinfwww.informatik.uni-leipzig.de\/~psy99hvr
-- (8484955)
-- patches: <joe@informatik.uni-leipzig.de>
module Col.DreiCol
( DreiCol, Faerbung
, module FiniteMap
, module Graph.Graph
)
where
import Graph.Type
import Graph.Util
import Graph.Viz
import qualified Graph.Valid
import qualified Graph.Labeling
import Challenger
import ToDoc
import Data.Set
import Data.FiniteMap
import Sort
import Control.Monad ( guard ) -- old style
data DreiCol = DreiCol deriving Show
-- | Färbung als Abbildung : Knotemengen -> Zahlen
type Faerbung a = Graph.Labeling.Labeling a Integer
instance ( ToDoc (Graph a) , Show (Graph a) , Read (Graph a)
, ToDoc (Faerbung a),Show (Faerbung a), Read (Faerbung a)
, Ord a, ToDoc a, Show a, ShowText a
, ToDoc [a]
) => Problem DreiCol (Graph a) (Faerbung a) where
getInstanz DreiCol graph loesung dateiName =
getGraphviz graph instanzTrans dateiName
getBeweis DreiCol graph loesung dateiName =
-- Komplexität ist noch verbesserunswürdig
getGraphviz graph (getBeweisTrans loesung) dateiName
validiere DreiCol g f =
let ft1 @ (f1, t1) = Graph.Valid.valid g
ft2 @ (f2, t2) = Graph.Labeling.valid g f
in ( f1 && f2 , t1 <+> t2 )
verifiziere DreiCol g f =
let
range = mkSet $ eltsFM f
erlaubt = mkSet [ 1 .. 3 ]
fehlfarben = minusSet range erlaubt
same = do k <- setToList $ kanten g
let x = von k; y = nach k
guard $ Graph.Labeling.the (lookupFM f x)
== Graph.Labeling.the (lookupFM f y)
return k
ft @ ( flag, txt ) = Graph.Labeling.valid g f
in if not flag then ft
else if not $ isEmptySet fehlfarben
then ( False, fsep [ text "Erlaubt sind nur die Farben:"
<+> toDoc erlaubt
, text "aber nicht:" <+> toDoc fehlfarben
] )
else if not $ null same
then ( False, text "Diese Kanten haben gleichfarbige Endpunkte:"
<+> toDoc same )
else ( True , text "Das ist eine korrekte Drei-Färbung." )
-------------------------------------------------------------------------------
-- hier folgen Transformationen
-------------------------------------------------------------------------------
-- ganz einfache Transformation
instanzTrans :: ShowText knoten => GVTrans knoten
instanzTrans = GVTrans
{ getGVProg = Neato
, getGVFormat = "png"
, isGVDirected = False
, getGVNID = showText
, getGVNName = showText
, getGVNLabel = Nothing
, getGVNColor = Nothing
, getGVNXAtts = Nothing
, getGVELabel = Nothing
, getGVEXAtts = Nothing
}
-- um Label erweiterte Transformation
getBeweisTrans :: (ShowText knoten, Ord knoten)
=> Faerbung knoten -> GVTrans knoten
getBeweisTrans loesung = GVTrans
{ getGVProg = Neato
, getGVFormat = "png"
, isGVDirected = False
, getGVNID = showText
, getGVNName = showText
, getGVNLabel = Nothing
, getGVNColor = Just (getNColor loesung)
, getGVNXAtts = Nothing
, getGVELabel = Nothing
, getGVEXAtts = Nothing
}
-- sucht zu einem Knoten die Farbe entsprechend der Klasse
getNColor :: Ord knoten => Faerbung knoten -> knoten -> GVColor
getNColor f knoten = getColor (Graph.Labeling.the $ lookupFM f knoten)
-- gibt eine Farbe zu einem Index einer Klasse zurück
-- sind erst mal nur 8 Farben - sollte aber reichen
getColor :: Integer -> GVColor
getColor index
| index == 1 = "red"
| index == 2 = "blue"
| index == 3 = "green"
| index == 4 = "yellow"
| index == 5 = "magenta"
| index == 6 = "navy"
| index == 7 = "seagreen"
| index == 8 = "purple"
| otherwise = "white"
| Erdwolf/autotool-bonn | src/Graph/Col/DreiCol.hs | gpl-2.0 | 3,617 | 88 | 14 | 802 | 1,056 | 583 | 473 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.TagWindows
-- Copyright : (c) Karsten Schoelzel <kuser@gmx.de>
-- License : BSD
--
-- Maintainer : Karsten Schoelzel <kuser@gmx.de>
-- Stability : unstable
-- Portability : unportable
--
-- Functions for tagging windows and selecting them by tags.
-----------------------------------------------------------------------------
module XMonad.Actions.TagWindows (
-- * Usage
-- $usage
addTag, delTag, unTag,
setTags, getTags, hasTag,
withTaggedP, withTaggedGlobalP, withFocusedP,
withTagged , withTaggedGlobal ,
focusUpTagged, focusUpTaggedGlobal,
focusDownTagged, focusDownTaggedGlobal,
shiftHere, shiftToScreen,
tagPrompt,
tagDelPrompt,
TagPrompt,
) where
import Prelude hiding (catch)
import Data.List (nub,sortBy)
import Control.Monad
import Control.Exception
import XMonad.StackSet hiding (filter)
import XMonad.Prompt
import XMonad hiding (workspaces)
econst :: Monad m => a -> IOException -> m a
econst = const . return
-- $usage
--
-- To use window tags, import this module into your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.TagWindows
-- > import XMonad.Prompt -- to use tagPrompt
--
-- and add keybindings such as the following:
--
-- > , ((modm, xK_f ), withFocused (addTag "abc"))
-- > , ((modm .|. controlMask, xK_f ), withFocused (delTag "abc"))
-- > , ((modm .|. shiftMask, xK_f ), withTaggedGlobalP "abc" W.sink)
-- > , ((modm, xK_d ), withTaggedP "abc" (W.shiftWin "2"))
-- > , ((modm .|. shiftMask, xK_d ), withTaggedGlobalP "abc" shiftHere)
-- > , ((modm .|. controlMask, xK_d ), focusUpTaggedGlobal "abc")
-- > , ((modm, xK_g ), tagPrompt defaultXPConfig (\s -> withFocused (addTag s)))
-- > , ((modm .|. controlMask, xK_g ), tagDelPrompt defaultXPConfig)
-- > , ((modm .|. shiftMask, xK_g ), tagPrompt defaultXPConfig (\s -> withTaggedGlobal s float))
-- > , ((modWinMask, xK_g ), tagPrompt defaultXPConfig (\s -> withTaggedP s (W.shiftWin "2")))
-- > , ((modWinMask .|. shiftMask, xK_g ), tagPrompt defaultXPConfig (\s -> withTaggedGlobalP s shiftHere))
-- > , ((modWinMask .|. controlMask, xK_g ), tagPrompt defaultXPConfig (\s -> focusUpTaggedGlobal s))
--
-- NOTE: Tags are saved as space separated strings and split with
-- 'unwords'. Thus if you add a tag \"a b\" the window will have
-- the tags \"a\" and \"b\" but not \"a b\".
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | set multiple tags for a window at once (overriding any previous tags)
setTags :: [String] -> Window -> X ()
setTags = setTag . unwords
-- | set a tag for a window (overriding any previous tags)
-- writes it to the \"_XMONAD_TAGS\" window property
setTag :: String -> Window -> X ()
setTag s w = withDisplay $ \d ->
io $ internAtom d "_XMONAD_TAGS" False >>= setTextProperty d w s
-- | read all tags of a window
-- reads from the \"_XMONAD_TAGS\" window property
getTags :: Window -> X [String]
getTags w = withDisplay $ \d ->
io $ catch (internAtom d "_XMONAD_TAGS" False >>=
getTextProperty d w >>=
wcTextPropertyToTextList d)
(econst [[]])
>>= return . words . unwords
-- | check a window for the given tag
hasTag :: String -> Window -> X Bool
hasTag s w = (s `elem`) `fmap` getTags w
-- | add a tag to the existing ones
addTag :: String -> Window -> X ()
addTag s w = do
tags <- getTags w
if (s `notElem` tags) then setTags (s:tags) w else return ()
-- | remove a tag from a window, if it exists
delTag :: String -> Window -> X ()
delTag s w = do
tags <- getTags w
setTags (filter (/= s) tags) w
-- | remove all tags
unTag :: Window -> X ()
unTag = setTag ""
-- | Move the focus in a group of windows, which share the same given tag.
-- The Global variants move through all workspaces, whereas the other
-- ones operate only on the current workspace
focusUpTagged, focusDownTagged, focusUpTaggedGlobal, focusDownTaggedGlobal :: String -> X ()
focusUpTagged = focusTagged' (reverse . wsToList)
focusDownTagged = focusTagged' wsToList
focusUpTaggedGlobal = focusTagged' (reverse . wsToListGlobal)
focusDownTaggedGlobal = focusTagged' wsToListGlobal
wsToList :: (Ord i) => StackSet i l a s sd -> [a]
wsToList ws = crs ++ cls
where
(crs, cls) = (cms down, cms (reverse . up))
cms f = maybe [] f (stack . workspace . current $ ws)
wsToListGlobal :: (Ord i) => StackSet i l a s sd -> [a]
wsToListGlobal ws = concat ([crs] ++ rws ++ lws ++ [cls])
where
curtag = currentTag ws
(crs, cls) = (cms down, cms (reverse . up))
cms f = maybe [] f (stack . workspace . current $ ws)
(lws, rws) = (mws (<), mws (>))
mws cmp = map (integrate' . stack) . sortByTag . filter (\w -> tag w `cmp` curtag) . workspaces $ ws
sortByTag = sortBy (\x y -> compare (tag x) (tag y))
focusTagged' :: (WindowSet -> [Window]) -> String -> X ()
focusTagged' wl t = gets windowset >>= findM (hasTag t) . wl >>=
maybe (return ()) (windows . focusWindow)
findM :: (Monad m) => (a -> m Bool) -> [a] -> m (Maybe a)
findM _ [] = return Nothing
findM p (x:xs) = do b <- p x
if b then return (Just x) else findM p xs
-- | apply a pure function to windows with a tag
withTaggedP, withTaggedGlobalP :: String -> (Window -> WindowSet -> WindowSet) -> X ()
withTaggedP t f = withTagged' t (winMap f)
withTaggedGlobalP t f = withTaggedGlobal' t (winMap f)
winMap :: (Window -> WindowSet -> WindowSet) -> [Window] -> X ()
winMap f tw = when (tw /= []) (windows $ foldl1 (.) (map f tw))
withTagged, withTaggedGlobal :: String -> (Window -> X ()) -> X ()
withTagged t f = withTagged' t (mapM_ f)
withTaggedGlobal t f = withTaggedGlobal' t (mapM_ f)
withTagged' :: String -> ([Window] -> X ()) -> X ()
withTagged' t m = gets windowset >>= filterM (hasTag t) . index >>= m
withTaggedGlobal' :: String -> ([Window] -> X ()) -> X ()
withTaggedGlobal' t m = gets windowset >>=
filterM (hasTag t) . concat . map (integrate' . stack) . workspaces >>= m
withFocusedP :: (Window -> WindowSet -> WindowSet) -> X ()
withFocusedP f = withFocused $ windows . f
shiftHere :: (Ord a, Eq s, Eq i) => a -> StackSet i l a s sd -> StackSet i l a s sd
shiftHere w s = shiftWin (currentTag s) w s
shiftToScreen :: (Ord a, Eq s, Eq i) => s -> a -> StackSet i l a s sd -> StackSet i l a s sd
shiftToScreen sid w s = case filter (\m -> sid /= screen m) ((current s):(visible s)) of
[] -> s
(t:_) -> shiftWin (tag . workspace $ t) w s
data TagPrompt = TagPrompt
instance XPrompt TagPrompt where
showXPrompt TagPrompt = "Select Tag: "
tagPrompt :: XPConfig -> (String -> X ()) -> X ()
tagPrompt c f = do
sc <- tagComplList
mkXPrompt TagPrompt c (mkComplFunFromList' sc) f
tagComplList :: X [String]
tagComplList = gets (concat . map (integrate' . stack) . workspaces . windowset) >>=
mapM getTags >>=
return . nub . concat
tagDelPrompt :: XPConfig -> X ()
tagDelPrompt c = do
sc <- tagDelComplList
if (sc /= [])
then mkXPrompt TagPrompt c (mkComplFunFromList' sc) (\s -> withFocused (delTag s))
else return ()
tagDelComplList :: X [String]
tagDelComplList = gets windowset >>= maybe (return []) getTags . peek
| kmels/xmonad-launcher | XMonad/Actions/TagWindows.hs | bsd-3-clause | 7,764 | 0 | 16 | 1,945 | 2,139 | 1,135 | 1,004 | 108 | 2 |
{-# LANGUAGE CPP, FlexibleInstances, UnboxedTuples, MagicHash #-}
{-# OPTIONS_GHC -fno-cse -fno-warn-orphans #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- Monadery code used in InteractiveUI
--
-- (c) The GHC Team 2005-2006
--
-----------------------------------------------------------------------------
module GhciMonad (
GHCi(..), startGHCi,
GHCiState(..), setGHCiState, getGHCiState, modifyGHCiState,
GHCiOption(..), isOptionSet, setOption, unsetOption,
Command,
BreakLocation(..),
TickArray,
getDynFlags,
runStmt, runDecls, resume, timeIt, recordBreak, revertCAFs,
printForUser, printForUserPartWay, prettyLocations,
initInterpBuffering, turnOffBuffering, flushInterpBuffers,
) where
#include "HsVersions.h"
import qualified GHC
import GhcMonad hiding (liftIO)
import Outputable hiding (printForUser, printForUserPartWay)
import qualified Outputable
import Util
import DynFlags
import FastString
import HscTypes
import SrcLoc
import Module
import ObjLink
import Linker
import Exception
import Numeric
import Data.Array
import Data.IORef
import System.CPUTime
import System.Environment
import System.IO
import Control.Monad
import GHC.Exts
import System.Console.Haskeline (CompletionFunc, InputT)
import qualified System.Console.Haskeline as Haskeline
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
-----------------------------------------------------------------------------
-- GHCi monad
-- the Bool means: True = we should exit GHCi (:quit)
type Command = (String, String -> InputT GHCi Bool, CompletionFunc GHCi)
data GHCiState = GHCiState
{
progname :: String,
args :: [String],
prompt :: String,
prompt2 :: String,
editor :: String,
stop :: String,
options :: [GHCiOption],
line_number :: !Int, -- input line
break_ctr :: !Int,
breaks :: ![(Int, BreakLocation)],
tickarrays :: ModuleEnv TickArray,
-- tickarrays caches the TickArray for loaded modules,
-- so that we don't rebuild it each time the user sets
-- a breakpoint.
-- available ghci commands
ghci_commands :: [Command],
-- ":" at the GHCi prompt repeats the last command, so we
-- remember it here:
last_command :: Maybe Command,
cmdqueue :: [String],
remembered_ctx :: [InteractiveImport],
-- the imports that the user has asked for, via import
-- declarations and :module commands. This list is
-- persistent over :reloads (but any imports for modules
-- that are not loaded are temporarily ignored). After a
-- :load, all the home-package imports are stripped from
-- this list.
-- See bugs #2049, #1873, #1360
transient_ctx :: [InteractiveImport],
-- An import added automatically after a :load, usually of
-- the most recently compiled module. May be empty if
-- there are no modules loaded. This list is replaced by
-- :load, :reload, and :add. In between it may be modified
-- by :module.
ghc_e :: Bool, -- True if this is 'ghc -e' (or runghc)
-- help text to display to a user
short_help :: String,
long_help :: String,
lastErrorLocations :: IORef [(FastString, Int)]
}
type TickArray = Array Int [(BreakIndex,SrcSpan)]
data GHCiOption
= ShowTiming -- show time/allocs after evaluation
| ShowType -- show the type of expressions
| RevertCAFs -- revert CAFs after every evaluation
| Multiline -- use multiline commands
deriving Eq
data BreakLocation
= BreakLocation
{ breakModule :: !GHC.Module
, breakLoc :: !SrcSpan
, breakTick :: {-# UNPACK #-} !Int
, onBreakCmd :: String
}
instance Eq BreakLocation where
loc1 == loc2 = breakModule loc1 == breakModule loc2 &&
breakTick loc1 == breakTick loc2
prettyLocations :: [(Int, BreakLocation)] -> SDoc
prettyLocations [] = text "No active breakpoints."
prettyLocations locs = vcat $ map (\(i, loc) -> brackets (int i) <+> ppr loc) $ reverse $ locs
instance Outputable BreakLocation where
ppr loc = (ppr $ breakModule loc) <+> ppr (breakLoc loc) <+>
if null (onBreakCmd loc)
then Outputable.empty
else doubleQuotes (text (onBreakCmd loc))
recordBreak :: BreakLocation -> GHCi (Bool{- was already present -}, Int)
recordBreak brkLoc = do
st <- getGHCiState
let oldActiveBreaks = breaks st
-- don't store the same break point twice
case [ nm | (nm, loc) <- oldActiveBreaks, loc == brkLoc ] of
(nm:_) -> return (True, nm)
[] -> do
let oldCounter = break_ctr st
newCounter = oldCounter + 1
setGHCiState $ st { break_ctr = newCounter,
breaks = (oldCounter, brkLoc) : oldActiveBreaks
}
return (False, oldCounter)
newtype GHCi a = GHCi { unGHCi :: IORef GHCiState -> Ghc a }
reflectGHCi :: (Session, IORef GHCiState) -> GHCi a -> IO a
reflectGHCi (s, gs) m = unGhc (unGHCi m gs) s
reifyGHCi :: ((Session, IORef GHCiState) -> IO a) -> GHCi a
reifyGHCi f = GHCi f'
where
-- f' :: IORef GHCiState -> Ghc a
f' gs = reifyGhc (f'' gs)
-- f'' :: IORef GHCiState -> Session -> IO a
f'' gs s = f (s, gs)
startGHCi :: GHCi a -> GHCiState -> Ghc a
startGHCi g state = do ref <- liftIO $ newIORef state; unGHCi g ref
instance Functor GHCi where
fmap = liftM
instance Applicative GHCi where
pure a = GHCi $ \_ -> pure a
(<*>) = ap
instance Monad GHCi where
(GHCi m) >>= k = GHCi $ \s -> m s >>= \a -> unGHCi (k a) s
getGHCiState :: GHCi GHCiState
getGHCiState = GHCi $ \r -> liftIO $ readIORef r
setGHCiState :: GHCiState -> GHCi ()
setGHCiState s = GHCi $ \r -> liftIO $ writeIORef r s
modifyGHCiState :: (GHCiState -> GHCiState) -> GHCi ()
modifyGHCiState f = GHCi $ \r -> liftIO $ readIORef r >>= writeIORef r . f
liftGhc :: Ghc a -> GHCi a
liftGhc m = GHCi $ \_ -> m
instance MonadIO GHCi where
liftIO = liftGhc . liftIO
instance HasDynFlags GHCi where
getDynFlags = getSessionDynFlags
instance GhcMonad GHCi where
setSession s' = liftGhc $ setSession s'
getSession = liftGhc $ getSession
instance HasDynFlags (InputT GHCi) where
getDynFlags = lift getDynFlags
instance GhcMonad (InputT GHCi) where
setSession = lift . setSession
getSession = lift getSession
instance ExceptionMonad GHCi where
gcatch m h = GHCi $ \r -> unGHCi m r `gcatch` (\e -> unGHCi (h e) r)
gmask f =
GHCi $ \s -> gmask $ \io_restore ->
let
g_restore (GHCi m) = GHCi $ \s' -> io_restore (m s')
in
unGHCi (f g_restore) s
instance Haskeline.MonadException Ghc where
controlIO f = Ghc $ \s -> Haskeline.controlIO $ \(Haskeline.RunIO run) -> let
run' = Haskeline.RunIO (fmap (Ghc . const) . run . flip unGhc s)
in fmap (flip unGhc s) $ f run'
instance Haskeline.MonadException GHCi where
controlIO f = GHCi $ \s -> Haskeline.controlIO $ \(Haskeline.RunIO run) -> let
run' = Haskeline.RunIO (fmap (GHCi . const) . run . flip unGHCi s)
in fmap (flip unGHCi s) $ f run'
instance ExceptionMonad (InputT GHCi) where
gcatch = Haskeline.catch
gmask f = Haskeline.liftIOOp gmask (f . Haskeline.liftIOOp_)
isOptionSet :: GHCiOption -> GHCi Bool
isOptionSet opt
= do st <- getGHCiState
return (opt `elem` options st)
setOption :: GHCiOption -> GHCi ()
setOption opt
= do st <- getGHCiState
setGHCiState (st{ options = opt : filter (/= opt) (options st) })
unsetOption :: GHCiOption -> GHCi ()
unsetOption opt
= do st <- getGHCiState
setGHCiState (st{ options = filter (/= opt) (options st) })
printForUser :: GhcMonad m => SDoc -> m ()
printForUser doc = do
unqual <- GHC.getPrintUnqual
dflags <- getDynFlags
liftIO $ Outputable.printForUser dflags stdout unqual doc
printForUserPartWay :: SDoc -> GHCi ()
printForUserPartWay doc = do
unqual <- GHC.getPrintUnqual
dflags <- getDynFlags
liftIO $ Outputable.printForUserPartWay dflags stdout (pprUserLength dflags) unqual doc
-- | Run a single Haskell expression
runStmt :: String -> GHC.SingleStep -> GHCi (Maybe GHC.ExecResult)
runStmt expr step = do
st <- getGHCiState
reifyGHCi $ \x ->
withProgName (progname st) $
withArgs (args st) $
reflectGHCi x $ do
GHC.handleSourceError (\e -> do GHC.printException e;
return Nothing) $ do
let opts = GHC.execOptions
{ GHC.execSourceFile = progname st
, GHC.execLineNumber = line_number st
, GHC.execSingleStep = step }
r <- GHC.execStmt expr opts
return (Just r)
runDecls :: String -> GHCi (Maybe [GHC.Name])
runDecls decls = do
st <- getGHCiState
reifyGHCi $ \x ->
withProgName (progname st) $
withArgs (args st) $
reflectGHCi x $ do
GHC.handleSourceError (\e -> do GHC.printException e;
return Nothing) $ do
r <- GHC.runDeclsWithLocation (progname st) (line_number st) decls
return (Just r)
resume :: (SrcSpan -> Bool) -> GHC.SingleStep -> GHCi GHC.ExecResult
resume canLogSpan step = do
st <- getGHCiState
reifyGHCi $ \x ->
withProgName (progname st) $
withArgs (args st) $
reflectGHCi x $ do
GHC.resumeExec canLogSpan step
-- --------------------------------------------------------------------------
-- timing & statistics
timeIt :: (a -> Maybe Integer) -> InputT GHCi a -> InputT GHCi a
timeIt getAllocs action
= do b <- lift $ isOptionSet ShowTiming
if not b
then action
else do time1 <- liftIO $ getCPUTime
a <- action
let allocs = getAllocs a
time2 <- liftIO $ getCPUTime
dflags <- getDynFlags
liftIO $ printTimes dflags allocs (time2 - time1)
return a
printTimes :: DynFlags -> Maybe Integer -> Integer -> IO ()
printTimes dflags mallocs psecs
= do let secs = (fromIntegral psecs / (10^(12::Integer))) :: Float
secs_str = showFFloat (Just 2) secs
putStrLn (showSDoc dflags (
parens (text (secs_str "") <+> text "secs" <> comma <+>
case mallocs of
Nothing -> empty
Just allocs ->
text (separateThousands allocs) <+> text "bytes")))
where
separateThousands n = reverse . sep . reverse . show $ n
where sep n'
| length n' <= 3 = n'
| otherwise = take 3 n' ++ "," ++ sep (drop 3 n')
-----------------------------------------------------------------------------
-- reverting CAFs
revertCAFs :: GHCi ()
revertCAFs = do
liftIO rts_revertCAFs
s <- getGHCiState
when (not (ghc_e s)) $ liftIO turnOffBuffering
-- Have to turn off buffering again, because we just
-- reverted stdout, stderr & stdin to their defaults.
foreign import ccall "revertCAFs" rts_revertCAFs :: IO ()
-- Make it "safe", just in case
-----------------------------------------------------------------------------
-- To flush buffers for the *interpreted* computation we need
-- to refer to *its* stdout/stderr handles
GLOBAL_VAR(stdin_ptr, error "no stdin_ptr", Ptr ())
GLOBAL_VAR(stdout_ptr, error "no stdout_ptr", Ptr ())
GLOBAL_VAR(stderr_ptr, error "no stderr_ptr", Ptr ())
-- After various attempts, I believe this is the least bad way to do
-- what we want. We know look up the address of the static stdin,
-- stdout, and stderr closures in the loaded base package, and each
-- time we need to refer to them we cast the pointer to a Handle.
-- This avoids any problems with the CAF having been reverted, because
-- we'll always get the current value.
--
-- The previous attempt that didn't work was to compile an expression
-- like "hSetBuffering stdout NoBuffering" into an expression of type
-- IO () and run this expression each time we needed it, but the
-- problem is that evaluating the expression might cache the contents
-- of the Handle rather than referring to it from its static address
-- each time. There's no safe workaround for this.
initInterpBuffering :: Ghc ()
initInterpBuffering = do -- make sure these are linked
dflags <- GHC.getSessionDynFlags
liftIO $ do
initDynLinker dflags
-- ToDo: we should really look up these names properly, but
-- it's a fiddle and not all the bits are exposed via the GHC
-- interface.
mb_stdin_ptr <- ObjLink.lookupSymbol "base_GHCziIOziHandleziFD_stdin_closure"
mb_stdout_ptr <- ObjLink.lookupSymbol "base_GHCziIOziHandleziFD_stdout_closure"
mb_stderr_ptr <- ObjLink.lookupSymbol "base_GHCziIOziHandleziFD_stderr_closure"
let f ref (Just ptr) = writeIORef ref ptr
f _ Nothing = panic "interactiveUI:setBuffering2"
zipWithM_ f [stdin_ptr,stdout_ptr,stderr_ptr]
[mb_stdin_ptr,mb_stdout_ptr,mb_stderr_ptr]
flushInterpBuffers :: GHCi ()
flushInterpBuffers
= liftIO $ do getHandle stdout_ptr >>= hFlush
getHandle stderr_ptr >>= hFlush
turnOffBuffering :: IO ()
turnOffBuffering
= do hdls <- mapM getHandle [stdin_ptr,stdout_ptr,stderr_ptr]
mapM_ (\h -> hSetBuffering h NoBuffering) hdls
getHandle :: IORef (Ptr ()) -> IO Handle
getHandle ref = do
(Ptr addr) <- readIORef ref
case addrToAny# addr of (# hval #) -> return (unsafeCoerce# hval)
| AlexanderPankiv/ghc | ghc/GhciMonad.hs | bsd-3-clause | 14,239 | 0 | 20 | 3,886 | 3,548 | 1,853 | 1,695 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Prelude
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : stable
-- Portability : portable
--
-- The Prelude: a standard module imported by default into all Haskell
-- modules. For more documentation, see the Haskell 98 Report
-- <http://www.haskell.org/onlinereport/>.
--
-----------------------------------------------------------------------------
module Prelude (
-- * Standard types, classes and related functions
-- ** Basic data types
Bool(False, True),
(&&), (||), not, otherwise,
Maybe(Nothing, Just),
maybe,
Either(Left, Right),
either,
Ordering(LT, EQ, GT),
Char, String,
-- *** Tuples
fst, snd, curry, uncurry,
#if defined(__NHC__)
[]((:), []), -- Not legal Haskell 98;
-- ... available through built-in syntax
module Data.Tuple, -- Includes tuple types
()(..), -- Not legal Haskell 98
(->), -- ... available through built-in syntax
#endif
#ifdef __HUGS__
(:), -- Not legal Haskell 98
#endif
-- ** Basic type classes
Eq((==), (/=)),
Ord(compare, (<), (<=), (>=), (>), max, min),
Enum(succ, pred, toEnum, fromEnum, enumFrom, enumFromThen,
enumFromTo, enumFromThenTo),
Bounded(minBound, maxBound),
-- ** Numbers
-- *** Numeric types
Int, Integer, Float, Double,
Rational,
-- *** Numeric type classes
Num((+), (-), (*), negate, abs, signum, fromInteger),
Real(toRational),
Integral(quot, rem, div, mod, quotRem, divMod, toInteger),
Fractional((/), recip, fromRational),
Floating(pi, exp, log, sqrt, (**), logBase, sin, cos, tan,
asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh),
RealFrac(properFraction, truncate, round, ceiling, floor),
RealFloat(floatRadix, floatDigits, floatRange, decodeFloat,
encodeFloat, exponent, significand, scaleFloat, isNaN,
isInfinite, isDenormalized, isIEEE, isNegativeZero, atan2),
-- *** Numeric functions
subtract, even, odd, gcd, lcm, (^), (^^),
fromIntegral, realToFrac,
-- ** Monads and functors
Monad((>>=), (>>), return, fail),
Functor(fmap),
mapM, mapM_, sequence, sequence_, (=<<),
-- ** Miscellaneous functions
id, const, (.), flip, ($), until,
asTypeOf, error, undefined,
seq, ($!),
-- * List operations
map, (++), filter,
head, last, tail, init, null, length, (!!),
reverse,
-- ** Reducing lists (folds)
foldl, foldl1, foldr, foldr1,
-- *** Special folds
and, or, any, all,
sum, product,
concat, concatMap,
maximum, minimum,
-- ** Building lists
-- *** Scans
scanl, scanl1, scanr, scanr1,
-- *** Infinite lists
iterate, repeat, replicate, cycle,
-- ** Sublists
take, drop, splitAt, takeWhile, dropWhile, span, break,
-- ** Searching lists
elem, notElem, lookup,
-- ** Zipping and unzipping lists
zip, zip3, zipWith, zipWith3, unzip, unzip3,
-- ** Functions on strings
lines, words, unlines, unwords,
-- * Converting to and from @String@
-- ** Converting to @String@
ShowS,
Show(showsPrec, showList, show),
shows,
showChar, showString, showParen,
-- ** Converting from @String@
ReadS,
Read(readsPrec, readList),
reads, readParen, read, lex,
-- * Basic Input and output
IO,
-- ** Simple I\/O operations
-- All I/O functions defined here are character oriented. The
-- treatment of the newline character will vary on different systems.
-- For example, two characters of input, return and linefeed, may
-- read as a single newline character. These functions cannot be
-- used portably for binary I/O.
-- *** Output functions
putChar,
putStr, putStrLn, print,
-- *** Input functions
getChar,
getLine, getContents, interact,
-- *** Files
FilePath,
readFile, writeFile, appendFile, readIO, readLn,
-- ** Exception handling in the I\/O monad
IOError, ioError, userError,
) where
#ifndef __HUGS__
import Control.Monad
import System.IO
import System.IO.Error
import Data.List
import Data.Either
import Data.Maybe
import Data.Tuple
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import Text.Read
import GHC.Enum
import GHC.Num
import GHC.Real
import GHC.Float
import GHC.Show
import GHC.Err ( undefined )
#endif
#ifdef __HUGS__
import Hugs.Prelude
#endif
#ifndef __HUGS__
infixr 0 $!
#endif
-- -----------------------------------------------------------------------------
-- Miscellaneous functions
-- | Strict (call-by-value) application, defined in terms of 'seq'.
($!) :: (a -> b) -> a -> b
#ifdef __GLASGOW_HASKELL__
f $! x = let !vx = x in f vx -- see #2273
#elif !defined(__HUGS__)
f $! x = x `seq` f x
#endif
#ifdef __HADDOCK__
-- | The value of @'seq' a b@ is bottom if @a@ is bottom, and otherwise
-- equal to @b@. 'seq' is usually introduced to improve performance by
-- avoiding unneeded laziness.
seq :: a -> b -> b
seq _ y = y
#endif
| beni55/haste-compiler | libraries/ghc-7.8/base/Prelude.hs | bsd-3-clause | 5,371 | 264 | 9 | 1,248 | 1,215 | 748 | 467 | 115 | 1 |
{-# LANGUAGE TypeInType, TypeFamilies, UndecidableInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- The "bad case" in #11391
module CustomTypeErrors04 where
import Data.Kind
import GHC.TypeLits (TypeError, ErrorMessage(..))
type family Resolve (t :: Type -> Type) :: Type -> Type where
Resolve _ = TypeError (Text "ERROR")
testNOTOK1 :: Resolve [] Int
testNOTOK1 = ()
| ezyang/ghc | testsuite/tests/typecheck/should_fail/CustomTypeErrors04.hs | bsd-3-clause | 379 | 0 | 8 | 58 | 89 | 53 | 36 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
module T5978 where
class C from to | from -> to where
instance C Float Char where
instance C Double Bool where
polyFoo :: (C from to) => from
polyFoo = undefined
polyBar ::
(C fromA toA, C fromB toB) =>
(toA -> toB) ->
fromA -> fromB
polyBar = undefined
monoBar :: Double
monoBar = polyBar id monoFoo
monoFoo :: Float
monoFoo = polyFoo
| green-haskell/ghc | testsuite/tests/typecheck/should_fail/T5978.hs | bsd-3-clause | 459 | 0 | 8 | 117 | 133 | 74 | 59 | -1 | -1 |
{-|
Module: Capnp.Address
Description: Utilities for manipulating addresses within capnproto messages.
This module provides facilities for manipulating raw addresses within
Cap'N Proto messages.
This is a low level module that very few users will need to use directly.
-}
{-# LANGUAGE RecordWildCards #-}
module Capnp.Address
( WordAddr(..)
, CapAddr(..)
, Addr(..)
, OffsetError(..)
, computeOffset
, pointerFrom
, resolveOffset
)
where
import Data.Bits
import Data.Int
import Data.Word
import Capnp.Bits (WordCount)
import qualified Capnp.Pointer as P
-- | The address of a word within a message
data WordAddr = WordAt
{ segIndex :: !Int -- ^ Segment number
, wordIndex :: !WordCount -- ^ offset in words from the start of the segment.
} deriving(Show, Eq)
-- | The "address" of a capability
newtype CapAddr = Cap Word32 deriving(Show, Eq)
-- | An address, i.e. a location that a pointer may point at.
data Addr
-- | The address of some data in the message.
= WordAddr !WordAddr
-- | The "address" of a capability.
| CapAddr !CapAddr
deriving(Show, Eq)
-- | An error returned by 'computeOffset'; this describes the reason why a
-- value cannot be directly addressed from a given location.
data OffsetError
-- | The pointer and the value are in different segments.
= DifferentSegments
-- | The pointer is in the correct segment, but too far away to encode the
-- offset. (more than 30 bits would be required). This can only happen with
-- segments that are > 8 GiB, which this library refuses to either decode
-- or generate, so this should not come up in practice.
| OutOfRange
-- | @'computeOffset' ptrAddr valueAddr@ computes the offset that should be
-- stored in a struct or list pointer located at @ptrAddr@, in order to point
-- at a value located at @valueAddr@. If the value cannot be directly addressed
-- by a pointer at @ptrAddr@, then this returns 'Left', with the 'OffsetError'
-- describing the problem.
computeOffset :: WordAddr -> WordAddr -> Either OffsetError WordCount
computeOffset ptrAddr valueAddr
| segIndex ptrAddr /= segIndex valueAddr = Left DifferentSegments
| otherwise =
let offset = wordIndex valueAddr - (wordIndex ptrAddr + 1)
in if offset >= 1 `shiftL` 30
then Left OutOfRange
else Right offset
-- | @'pointerFrom' ptrAddr targetAddr ptr@ updates @ptr@, such that it is
-- correct to target a value located at @targetAddr@ given that the pointer
-- itself is located at @ptrAddr@. Returns 'Left' if this is not possible.
--
-- It is illegal to call this on a capability pointer.
--
-- For far pointers, @targetAddr@ is taken to be the address of the landing pad,
-- rather than the final value.
pointerFrom :: WordAddr -> WordAddr -> P.Ptr -> Either OffsetError P.Ptr
pointerFrom _ _ (P.CapPtr _) = error "pointerFrom called on a capability pointer."
pointerFrom _ WordAt{..} (P.FarPtr twoWords _ _) =
Right $ P.FarPtr twoWords (fromIntegral wordIndex) (fromIntegral segIndex)
pointerFrom ptrAddr targetAddr (P.StructPtr _ dataSz ptrSz) =
flip fmap (computeOffset ptrAddr targetAddr) $
\off -> P.StructPtr (fromIntegral off) dataSz ptrSz
pointerFrom ptrAddr targetAddr (P.ListPtr _ eltSpec) =
flip fmap (computeOffset ptrAddr targetAddr) $
\off -> P.ListPtr (fromIntegral off) eltSpec
-- | Add an offset to a WordAddr.
resolveOffset :: WordAddr -> Int32 -> WordAddr
resolveOffset addr@WordAt{..} off =
addr { wordIndex = wordIndex + fromIntegral off + 1 }
| zenhack/haskell-capnp | lib/Capnp/Address.hs | mit | 3,578 | 0 | 13 | 737 | 579 | 319 | 260 | 55 | 2 |
-- exercises from: -- exercises from:
-- http://www.cse.chalmers.se/~rjmh/OPLSS/Exercises.pdf | NickAger/LearningHaskell | Monads and all that/Tree Monad.hsproj/Main.hs | mit | 94 | 0 | 2 | 8 | 4 | 3 | 1 | 1 | 0 |
-- NIM
-- http://www.codewars.com/kata/54120de842dff35232000195/
module NIM where
import Data.Bits (xor)
chooseMove :: [Int] -> (Int,Int)
chooseMove xs = head . filter (\(i, v) -> v > 0) . zip [0..] . map (\v -> v - (v `xor` x)) $ xs
where x = foldl1 xor xs
| gafiatulin/codewars | src/4 kyu/NIM.hs | mit | 265 | 0 | 12 | 54 | 121 | 69 | 52 | 5 | 1 |
-- Regular expression parser
-- http://www.codewars.com/kata/5470c635304c127cad000f0d/
module RegExpParser (RegExp(..), parseRegExp) where
import Control.Applicative (pure, empty, (<|>), (<*>), (<$>) , Applicative, Alternative)
import Control.Monad (ap, (>=>), guard)
import Control.Arrow ((&&&))
data RegExp = Normal Char | Any | ZeroOrMore RegExp | Or RegExp RegExp | Str [RegExp] deriving (Show, Eq)
newtype Parser a = Parser {runParser :: String -> Maybe (a, String)}
instance Monad Parser where
return x = Parser (\s -> Just (x, s))
(Parser f) >>= g = Parser (f >=> (\(x, s) -> runParser (g x) s))
instance Functor Parser where
fmap f = ap (return f)
instance Applicative Parser where
pure = return
fa <*> xa = fa >>= \f -> xa >>= \x -> return (f x)
instance Alternative Parser where
empty = Parser (const Nothing)
(Parser f) <|> (Parser g) = Parser (uncurry (<|>) . (f &&& g))
liftToken :: (Char -> Maybe a) -> Parser a
liftToken f = Parser g
where g [] = Nothing
g (c:cs) = f c >>= (\x -> Just (x, cs))
eat :: Char -> Parser ()
eat t = liftToken (guard . (==t))
wrap :: Char -> Parser a -> Char -> Parser a
wrap l p r = eat l >>
p >>=
\x -> eat r >>
return x
parseList :: Parser a -> Parser [a]
parseList itemP = listP
where listP = ((:) <$> itemP <*> listP) <|> return []
(~>) :: Char -> a -> Parser a
c ~> x = eat c >> return x
atomP :: Parser RegExp
atomP = liftToken isChar <|> ('.' ~> Any) <|> wrap '(' regExpP ')'
where isChar c | c `elem` ".*|()" = Nothing
| otherwise = Just (Normal c)
zeroOrMoreP :: Parser RegExp
zeroOrMoreP = (atomP >>=
\a -> eat '*' >>
return (ZeroOrMore a)) <|> atomP
seqP :: Parser RegExp
seqP = (zeroOrMoreP >>=
\a -> zeroOrMoreP >>=
\b -> parseList zeroOrMoreP >>=
\c -> return (Str (a:b:c))) <|> zeroOrMoreP
regExpP :: Parser RegExp
regExpP = (seqP >>=
\l -> eat '|' >>
seqP >>=
\r -> return (Or l r)) <|> seqP
parseRegExp :: String -> Maybe RegExp
parseRegExp s = case runParser regExpP s of Just (x, "") -> Just x
_ -> Nothing
| gafiatulin/codewars | src/2 kyu/RegExpParser.hs | mit | 2,275 | 0 | 18 | 678 | 960 | 509 | 451 | 54 | 2 |
import System.Directory
import System.Environment
import System.FilePath.Posix
import System.Process
import Data.Time
import Text.Regex
import Control.Monad
import Data.Maybe
import Data.List
import Data.Function (on)
import System.Console.GetOpt
import System.Exit (ExitCode(..), exitWith)
import System.IO (hFlush, hPutStrLn, stderr, stdout)
data Flag = Help
| Recursive
| CSV
| Bench
| Range [Int]
| TimeOut Int
deriving (Eq, Show)
options :: [OptDescr Flag]
options = [ Option ['h'] ["help"] (NoArg Help)
"Show this help message",
Option ['r'] ["recursive"] (NoArg Recursive)
"Traverse recursively",
Option ['c'] ["csv"] (NoArg CSV)
"Print a CSV file",
Option ['n'] ["range"] (ReqArg (\_ -> Range []) "R")
"Only test specific range",
Option ['t'] ["timeout"] (ReqArg (\_ -> TimeOut 0) "secs")
"Only run each test at most this time, not recommended with -b",
Option ['b'] ["bench"] (NoArg Bench)
"Perform a deep benchmark, recommended only with -n"]
parseArgs :: IO ()
parseArgs = do
argv <- getArgs
case parse argv of
(opts, [], []) -> lenny opts ["."]
(opts, dirs, [])
| Help `elem` opts -> help
| CSV `elem` opts && length dirs > 1 -> help
| otherwise -> lenny opts dirs
(_,[],_) -> help
(_,_,errs) -> die errs
where
parse argv = getOpt Permute options argv
header = "Usage: lenny [-h] [-r] [-b] [-n range] [-t timeout] \n" ++
"[dir | -c dir | dir1 dir2]"
info = usageInfo header options
dump = hPutStrLn stderr
die errs = dump (concat errs ++ info) >> exitWith (ExitFailure 1)
help = dump info >> exitWith ExitSuccess
main = parseArgs
lenny :: [Flag] -> [FilePath] -> IO ()
lenny opts [dir] = do
bins <- getBins dir
timings <- lennyTime bins
if CSV `elem` opts then
lennyCSV timings
else
lennyTerm timings
lenny opts (x:y:_) = print "Unimplemented =("
-- Traverse directory x
getBins :: FilePath -> IO [(FilePath, Int)]
getBins target = do
-- Find files to be inspected
setCurrentDirectory target
files <- getDirectoryContents target
dir <- getCurrentDirectory
let abs = map ((dir ++ "/") ++) files
noDirs <- filterM doesFileExist abs
-- Determine which files are possible lenny
-- binaries
let problemNums = map problemNr noDirs
filesNums = zipWith (,) noDirs problemNums
validFiles = filter (isJust . snd) filesNums
convert = (read . head . fromJust) :: Maybe [String] -> Int
fileNr = map (\(f,n) -> (f, (convert n))) validFiles
-- Determine if these files are executable
-- Sort these that are in order of problem nr
permissions <- mapM getPermissions (map fst fileNr)
let filesPerm = zipWith (,) fileNr permissions
exe = filter (executable . snd) filesPerm
binaries = map fst exe
sortedBins = sortBy (compare `on` snd) binaries
return sortedBins
problemNr :: FilePath -> Maybe [String]
problemNr s = matchRegex reg s
where
reg = mkRegex "^.*/[A-Za-z]*0*([1-9]+)(\\..*)?$"
-- ^.*/ directory structure
-- [A-Za-z]* beginning of filename
-- 0* optional zero padding
-- ([1-9]+) the problem number
-- (\\..*)?$ optional file suffix
lennyTime :: [(FilePath,Int)] -> IO [((FilePath,Int), (String, NominalDiffTime))]
lennyTime bins = mapM timing bins
timing (bin, num) = do
start <- getCurrentTime
output <- readProcess bin [] ""
stop <- getCurrentTime
let timing = diffUTCTime stop start
out = head . lines $ output
return ((bin,num),(out,timing))
-- Formatting / Printing functions
-- Prints out the results as a CSV file
-- Only keeps the first occurence of the
-- problem file
lennyCSV :: [((FilePath, Int),(String,NominalDiffTime))] -> IO ()
lennyCSV bins = do
let uniqBins = nubBy (\((_,a),_) ((_,b),_) -> a == b) bins
putStrLn $ intercalate "\n" . map formatCSV $ uniqBins
formatCSV ((_,n),(_,t)) = (show n) ++ "," ++ ((init . show) t)
-- Prints out the results prettyprinted
-- Allow multiple occurences of the
-- same euler problem
lennyTerm :: [((FilePath, Int),(String,NominalDiffTime))] -> IO ()
lennyTerm bins = do
mapM_ prettyPrint bins
let timings = map (snd . snd) bins
total = sum timings
numOfTests = genericLength bins
putStrLn ""
putStrLn $ "Total running time: " ++ (show total)
putStrLn $ "Average : " ++ (show (total / numOfTests))
prettyPrint :: ((FilePath, Int),(String,NominalDiffTime)) -> IO ()
prettyPrint ((bin, num), (out,t)) = do
-- pretty print
let filename = takeFileName bin
r = mkRegex (show num)
colorized = subRegex r filename ("\x1b[31m" ++ (show num) ++ "\x1b[0m")
putStr $ colorized ++ " -> "
putStrLn $ "\x1b[32m" ++ out ++ "\x1b[0m" ++ " : " ++ (show t)
| axelri/lenny | lenny.hs | mit | 5,203 | 1 | 15 | 1,510 | 1,652 | 881 | 771 | 111 | 4 |
module Chapter2Ex10 where
this = x * 3 + y
where x = 3
y = 1000
that = x * 5
where y = 10
x = 10 * 5 + y
ahh = z / x + y
where x = 7
y = negate x
z = y * 10
| raventid/coursera_learning | haskell/ch2ex10.hs | mit | 244 | 0 | 8 | 140 | 95 | 53 | 42 | 11 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGZoomEvent
(js_getZoomRectScreen, getZoomRectScreen, js_getPreviousScale,
getPreviousScale, js_getPreviousTranslate, getPreviousTranslate,
js_getNewScale, getNewScale, js_getNewTranslate, getNewTranslate,
SVGZoomEvent, castToSVGZoomEvent, gTypeSVGZoomEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"zoomRectScreen\"]"
js_getZoomRectScreen :: JSRef SVGZoomEvent -> IO (JSRef SVGRect)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.zoomRectScreen Mozilla SVGZoomEvent.zoomRectScreen documentation>
getZoomRectScreen ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGRect)
getZoomRectScreen self
= liftIO
((js_getZoomRectScreen (unSVGZoomEvent self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"previousScale\"]"
js_getPreviousScale :: JSRef SVGZoomEvent -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.previousScale Mozilla SVGZoomEvent.previousScale documentation>
getPreviousScale :: (MonadIO m) => SVGZoomEvent -> m Float
getPreviousScale self
= liftIO (js_getPreviousScale (unSVGZoomEvent self))
foreign import javascript unsafe "$1[\"previousTranslate\"]"
js_getPreviousTranslate ::
JSRef SVGZoomEvent -> IO (JSRef SVGPoint)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.previousTranslate Mozilla SVGZoomEvent.previousTranslate documentation>
getPreviousTranslate ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGPoint)
getPreviousTranslate self
= liftIO
((js_getPreviousTranslate (unSVGZoomEvent self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"newScale\"]" js_getNewScale
:: JSRef SVGZoomEvent -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.newScale Mozilla SVGZoomEvent.newScale documentation>
getNewScale :: (MonadIO m) => SVGZoomEvent -> m Float
getNewScale self = liftIO (js_getNewScale (unSVGZoomEvent self))
foreign import javascript unsafe "$1[\"newTranslate\"]"
js_getNewTranslate :: JSRef SVGZoomEvent -> IO (JSRef SVGPoint)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.newTranslate Mozilla SVGZoomEvent.newTranslate documentation>
getNewTranslate ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGPoint)
getNewTranslate self
= liftIO ((js_getNewTranslate (unSVGZoomEvent self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/SVGZoomEvent.hs | mit | 3,284 | 30 | 11 | 453 | 715 | 414 | 301 | 50 | 1 |
-- |
-- Most of the code is borrowed from
-- <http://haskell.1045720.n5.nabble.com/darcs-patch-GenT-monad-transformer-variant-of-Gen-QuickCheck-2-td3172136.html a mailing list discussion>.
-- Therefor, credits go to Paul Johnson and Felix Martini.
module QuickCheck.GenT where
import QuickCheck.GenT.Prelude
import qualified Test.QuickCheck.Gen as QC
import Test.QuickCheck.Random (QCGen)
import qualified System.Random as Random
newtype GenT m a = GenT { unGenT :: QCGen -> Int -> m a }
instance (Functor m) => Functor (GenT m) where
fmap f m = GenT $ \r n -> fmap f $ unGenT m r n
instance (Monad m) => Monad (GenT m) where
return a = GenT (\_ _ -> return a)
m >>= k = GenT $ \r n -> do
let (r1, r2) = Random.split r
a <- unGenT m r1 n
unGenT (k a) r2 n
fail msg = GenT (\_ _ -> fail msg)
instance (Functor m, Monad m) => Applicative (GenT m) where
pure = return
(<*>) = ap
instance MonadTrans GenT where
lift m = GenT (\_ _ -> m)
instance (MonadIO m) => MonadIO (GenT m) where
liftIO = lift . liftIO
runGenT :: GenT m a -> QC.Gen (m a)
runGenT (GenT run) = QC.MkGen run
class (Applicative g, Monad g) => MonadGen g where
liftGen :: QC.Gen a -> g a
variant :: Integral n => n -> g a -> g a
sized :: (Int -> g a) -> g a
resize :: Int -> g a -> g a
choose :: Random.Random a => (a, a) -> g a
instance (Applicative m, Monad m) => MonadGen (GenT m) where
liftGen gen = GenT $ \r n -> return $ QC.unGen gen r n
choose rng = GenT $ \r _ -> return $ fst $ Random.randomR rng r
variant k (GenT g) = GenT $ \r n -> g (var k r) n
sized f = GenT $ \r n -> let GenT g = f n in g r n
resize n (GenT g) = GenT $ \r _ -> g r n
instance MonadGen QC.Gen where
liftGen = id
variant k (QC.MkGen g) = QC.MkGen $ \r n -> g (var k r) n
sized f = QC.MkGen $ \r n -> let QC.MkGen g = f n in g r n
resize n (QC.MkGen g) = QC.MkGen $ \r _ -> g r n
choose range = QC.MkGen $ \r _ -> fst $ Random.randomR range r
-- |
-- Private variant-generating function. Converts an integer into a chain
-- of (fst . split) and (snd . split) applications. Every integer (including
-- negative ones) will give rise to a different random number generator in
-- log2 n steps.
var :: Integral n => n -> QCGen -> QCGen
var k =
(if k == k' then id else var k') . (if even k then fst else snd) . Random.split
where k' = k `div` 2
--------------------------------------------------------------------------
-- ** Common generator combinators
-- | Generates a value that satisfies a predicate.
suchThat :: MonadGen m => m a -> (a -> Bool) -> m a
gen `suchThat` p =
do mx <- gen `suchThatMaybe` p
case mx of
Just x -> return x
Nothing -> sized (\n -> resize (n+1) (gen `suchThat` p))
-- | Tries to generate a value that satisfies a predicate.
suchThatMaybe :: MonadGen m => m a -> (a -> Bool) -> m (Maybe a)
gen `suchThatMaybe` p = sized (try 0 . max 1)
where
try _ 0 = return Nothing
try k n = do x <- resize (2*k+n) gen
if p x then return (Just x) else try (k+1) (n-1)
-- | Generates a list of random length. The maximum length depends on the
-- size parameter.
listOf :: MonadGen m => m a -> m [a]
listOf gen = sized $ \n ->
do k <- choose (0,n)
vectorOf k gen
-- | Generates a non-empty list of random length. The maximum length
-- depends on the size parameter.
listOf1 :: MonadGen m => m a -> m [a]
listOf1 gen = sized $ \n ->
do k <- choose (1,1 `max` n)
vectorOf k gen
-- | Generates a list of the given length.
vectorOf :: MonadGen m => Int -> m a -> m [a]
vectorOf k gen = sequence [ gen | _ <- [1..k] ]
-- * Partial functions
-------------------------
-- | Randomly uses one of the given generators. The input list
-- must be non-empty.
oneof :: MonadGen m => [m a] -> m a
oneof =
fmap (fromMaybe (error "QuickCheck.GenT.oneof used with empty list")) .
oneofMay
-- | Chooses one of the given generators, with a weighted random distribution.
-- The input list must be non-empty.
frequency :: MonadGen m => [(Int, m a)] -> m a
frequency [] = error "QuickCheck.GenT.frequency used with empty list"
frequency xs0 = choose (1, tot) >>= (`pick` xs0)
where
tot = sum (map fst xs0)
pick n ((k,x):xs)
| n <= k = x
| otherwise = pick (n-k) xs
pick _ _ = error "QuickCheck.GenT.pick used with empty list"
-- | Generates one of the given values. The input list must be non-empty.
elements :: MonadGen m => [a] -> m a
elements =
fmap (fromMaybe (error "QuickCheck.GenT.elements used with empty list")) .
elementsMay
-- | Takes a list of elements of increasing size, and chooses
-- among an initial segment of the list. The size of this initial
-- segment increases with the size parameter.
-- The input list must be non-empty.
growingElements :: MonadGen m => [a] -> m a
growingElements =
fmap (fromMaybe (error "QuickCheck.GenT.growingElements used with empty list")) .
growingElementsMay
-- * Non-partial functions resulting in Maybe
-------------------------
-- |
-- Randomly uses one of the given generators.
oneofMay :: MonadGen m => [m a] -> m (Maybe a)
oneofMay = \case
[] -> return Nothing
l -> fmap Just $ choose (0, length l - 1) >>= (l !!)
-- | Generates one of the given values.
elementsMay :: MonadGen m => [a] -> m (Maybe a)
elementsMay = \case
[] -> return Nothing
l -> Just . (l !!) <$> choose (0, length l - 1)
-- | Takes a list of elements of increasing size, and chooses
-- among an initial segment of the list. The size of this initial
-- segment increases with the size parameter.
growingElementsMay :: MonadGen m => [a] -> m (Maybe a)
growingElementsMay = \case
[] -> return Nothing
xs -> fmap Just $ sized $ \n -> elements (take (1 `max` size n) xs)
where
k = length xs
mx = 100
log' = round . log . fromIntegral
size n = (log' n + 1) * k `div` log' mx
| srhb/QuickCheck-GenT | src/QuickCheck/GenT.hs | mit | 5,876 | 0 | 15 | 1,372 | 2,134 | 1,099 | 1,035 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
module Hash( inlineHashes
, hashBucketFiles
, createHash
, FileOrFolderDoesNotExist(..)
, HashNotFound(..)
) where
import Conduit ( sourceFile
, runConduitRes
, awaitForever
, MonadBaseControl
)
import Control.Exception.Safe (throwM, MonadThrow)
import Control.Lens ((&), (.~))
import Control.Monad.IO.Class (MonadIO(..))
import Crypto.Hash (Digest)
import Crypto.Hash.Algorithms (SHA1(..))
import Crypto.Hash.Conduit (sinkHash)
import Data.Conduit ((.|))
import qualified Data.HashMap.Lazy as HashMap
import Data.Text (pack, unpack, Text)
import System.FilePath.Posix (joinPath)
import StackParameters (paths, BucketFiles(..))
import Types ( Paths
, FileHashes
, FileOrFolderDoesNotExist(..)
, HashNotFound(..)
)
import FileSystem (sourceFileOrDirectory)
inlineHashes :: MonadThrow m => FileHashes -> BucketFiles -> m BucketFiles
inlineHashes hashedPaths bucketFiles@BucketFiles{..} =
if _isHashed
then do
inlinedFiles <- HashMap.traverseWithKey
(prependWithHash hashedPaths) _paths
return $ bucketFiles & paths .~ inlinedFiles
else return bucketFiles
where
prependWithHash :: MonadThrow m => Paths -> Text -> Text -> m Text
prependWithHash hashes path altPath =
maybe (throwHashNotFound path)
(pure . pack . joinPath . (: [unpack altPath]) . unpack)
(HashMap.lookup path hashes)
where
throwHashNotFound filePath = throwM $ HashNotFound filePath
hashBucketFiles :: (MonadThrow m, MonadBaseControl IO m, MonadIO m) => BucketFiles -> m FileHashes
hashBucketFiles BucketFiles{..} =
if _isHashed
then HashMap.traverseWithKey hashKey _paths
else return HashMap.empty
where
hashKey :: (MonadThrow m, MonadBaseControl IO m, MonadIO m) => Text -> a -> m Text
hashKey path _ = do
hash <- createHash $ unpack path
return $ (pack . show) hash
createHash :: (MonadThrow m, MonadBaseControl IO m, MonadIO m)
=> FilePath
-> m (Digest SHA1)
createHash path = runConduitRes $
sourceFileOrDirectory True path .| awaitForever sourceFile .| sinkHash
| SEEK-Org/evaporate | src/Hash.hs | mit | 2,482 | 0 | 13 | 771 | 644 | 358 | 286 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, Rank2Types #-}
import Data.Word
import Control.Monad.State.Strict
import Data.Unique
import qualified Data.Unique.Local as L
import qualified Data.Unique.Global as G
import qualified Data.Unique.LocalSTM as LSTM
import qualified Data.Unique.GlobalSTM as GSTM
test :: MonadIO m => (forall a. a -> m (Unique a)) -> m ()
test genf = do
w :: Word <- liftIO $ read `fmap` getLine
i :: Int <- liftIO $ read `fmap` getLine
a <- genf w
b <- genf i
liftIO $ print (_id a, _id b)
local, global, localSTM, globalSTM :: IO ()
local = L.evalUniqueT $ test L.getUnique
global = test G.getUnique
localSTM = LSTM.evalUniqueT $ test LSTM.getUnique
globalSTM = test GSTM.getUnique
| treep/data-unique | Test.hs | mit | 715 | 2 | 12 | 128 | 264 | 143 | 121 | 20 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.WaveShaperNode
(js_setCurve, setCurve, js_getCurve, getCurve, js_setOversample,
setOversample, js_getOversample, getOversample, WaveShaperNode,
castToWaveShaperNode, gTypeWaveShaperNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"curve\"] = $2;" js_setCurve
:: WaveShaperNode -> Nullable Float32Array -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WaveShaperNode.curve Mozilla WaveShaperNode.curve documentation>
setCurve ::
(MonadIO m, IsFloat32Array val) =>
WaveShaperNode -> Maybe val -> m ()
setCurve self val
= liftIO
(js_setCurve (self) (maybeToNullable (fmap toFloat32Array val)))
foreign import javascript unsafe "$1[\"curve\"]" js_getCurve ::
WaveShaperNode -> IO (Nullable Float32Array)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WaveShaperNode.curve Mozilla WaveShaperNode.curve documentation>
getCurve :: (MonadIO m) => WaveShaperNode -> m (Maybe Float32Array)
getCurve self = liftIO (nullableToMaybe <$> (js_getCurve (self)))
foreign import javascript unsafe "$1[\"oversample\"] = $2;"
js_setOversample :: WaveShaperNode -> JSVal -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WaveShaperNode.oversample Mozilla WaveShaperNode.oversample documentation>
setOversample ::
(MonadIO m) => WaveShaperNode -> OverSampleType -> m ()
setOversample self val
= liftIO (js_setOversample (self) (pToJSVal val))
foreign import javascript unsafe "$1[\"oversample\"]"
js_getOversample :: WaveShaperNode -> IO JSVal
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WaveShaperNode.oversample Mozilla WaveShaperNode.oversample documentation>
getOversample :: (MonadIO m) => WaveShaperNode -> m OverSampleType
getOversample self
= liftIO ((js_getOversample (self)) >>= fromJSValUnchecked) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/WaveShaperNode.hs | mit | 2,714 | 28 | 11 | 369 | 650 | 382 | 268 | 42 | 1 |
module Language.Dash.VM.Types where
import Data.Word
import Data.List (intercalate)
import Data.List.Split (chunksOf)
type VMWord = Word32
data VMValue =
VMNumber Int
| VMSymbol String [VMValue]
| VMString String
| VMClosure -- TODO add meaningful data
| VMFunction -- TODO add meaningful data (name, arguments, etc)
| VMOpaqueSymbol
deriving (Eq)
instance Show VMValue where
show v =
case v of
VMNumber i -> show i
VMString s -> "\"" ++ s ++ "\""
VMClosure -> "<closure>"
VMFunction -> "<function>"
VMOpaqueSymbol -> "<opaque symbol>" -- TODO special handling for modules
VMSymbol "$_empty_list" [] -> "[]"
VMSymbol s [] -> ":" ++ s
VMSymbol "$_list" fields -> showNestedList fields
VMSymbol "$_tuple" fields -> "(" ++ intercalate ", " (map show fields) ++ ")"
VMSymbol "$_record" fields -> "{" ++ intercalate ", " (recordFields fields) ++ "}"
VMSymbol s fields -> ":" ++ s ++ "<" ++ intercalate ", " (map showField fields) ++ ">"
showField :: VMValue -> String
showField v =
case v of
s@(VMSymbol _ (_:_)) -> "(" ++ show s ++ ")"
_ -> show v
-- TODO Add better output for invalid lists
showNestedList :: [VMValue] -> String
showNestedList vs =
"[" ++ intercalate ", " (flatValues (VMSymbol "$_list" vs)) ++ "]"
where
flatValues (VMSymbol "$_list" [a, VMSymbol "$_empty_list" []]) = [show a]
flatValues (VMSymbol "$_list" [a, as@(VMSymbol "$_list" _)]) = show a : flatValues as
flatValues (VMSymbol "$_list" [a, xs]) = show a : (["!<"] ++ flatValues xs ++ [">!"])
flatValues x = [show x]
recordFields :: [VMValue] -> [String]
recordFields symbolBody =
map ( \(VMSymbol k [], v) -> k ++ " = " ++ (show v)) kvPairs
where
kvPairs = map ( \[a, b] -> (a, b) ) $ chunksOf 2 symbolBody
| arne-schroppe/dash | src/Language/Dash/VM/Types.hs | mit | 1,821 | 0 | 13 | 426 | 654 | 341 | 313 | 43 | 4 |
{-# LANGUAGE OverloadedStrings #-}
-- | A small utility module that provides a foundation for dynamically enabling and disabling features.
module Control.FeatureFlag where
import Control.Monad
import Data.Text (Text)
-- | A simple toggle for selectively enabling or disabling functionality.
data FeatureToggle a = Enabled | Disabled
deriving (Read, Show, Eq)
-- | A union of different feature providers which maintains a currently active provider and facilities for changing providers.
--
-- Use this when you don\'t need to disable a feature, just to replace the implementation.
data FeatureProvider a = FeatureProvider
{ enabledProvider :: a
, enabledProviderName :: Text
, availableProviders :: [(Text, a)]
, defaultProvider :: a
}
-- | Enable a feature.
enable :: FeatureToggle a -> FeatureToggle a
enable = const Enabled
-- | Disable a feature.
disable :: FeatureToggle a -> FeatureToggle a
disable = const Disabled
-- | Flip a toggle from enabled to disabled or vice versa.
toggle :: FeatureToggle a -> FeatureToggle a
toggle t = case t of
Enabled -> Disabled
Disabled -> Enabled
-- | Switch on values depending on whether a toggle is enabled or disabled.
withToggle :: FeatureToggle a
-> b -- return when the toggle is enabled
-> b -- return when the toggle is disabled
-> b
withToggle t x y = case t of
Enabled -> x
Disabled -> y
-- | Execute an action only when the specified feature is enabled.
whenEnabled :: (Functor m, Monad m) => FeatureToggle a -> m b -> m ()
whenEnabled t m = case t of
Enabled -> void m
_ -> return ()
-- | Execute an action only when the specified feature is disabled.
whenDisabled :: (Functor m, Monad m) => FeatureToggle a -> m b -> m ()
whenDisabled t m = case t of
Disabled -> void m
_ -> return ()
-- | Replace the current feature provider with another provider.
-- Returns Left if the default provider is used due to a failed lookup.
-- Returns Right if the lookup succeeded.
--
-- Use \"default\" as the lookup value if you want to explicitly load the default provider.
use :: Text -> FeatureProvider a -> Either (FeatureProvider a) (FeatureProvider a)
use name p = if name == "default"
then Right useDefault
else case lookup name $ availableProviders p of
Nothing -> Left useDefault
Just ep -> Right $ p { enabledProvider = ep, enabledProviderName = name }
where useDefault = p { enabledProvider = defaultProvider p, enabledProviderName = "default" }
-- | Apply a function that takes a feature provided by a "FeatureProvider".
withProvider :: FeatureProvider a -> (a -> b) -> b
withProvider p f = f $ enabledProvider p
| iand675/feature-flags | src/Control/FeatureFlag.hs | mit | 2,621 | 0 | 11 | 508 | 561 | 299 | 262 | 43 | 3 |
module Examples.DiscPool where
import Control.Applicative
import Data.Time
import System.IO
import Database.EventSafe.Types
import Database.EventSafe.DiscPool
import Examples.Shared
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
putStr "Storage location: "
dir <- getLine
storage <- makeDiscPool dir 5
handleQuery storage
handleQuery :: DiscPool Event -> IO ()
handleQuery storage = do
putStr "(1) New event\n(2) Get resource\nYour choice: "
choice <- getLine
case choice of
"1" -> newEvent storage
"2" -> getRes storage
_ -> putStrLn "Unrecognized sequence."
handleQuery storage
newEvent :: DiscPool Event -> IO ()
newEvent storage = do
putStr "(1) New user\n(2) Change password\n(3) New post\nYour choice: "
choice <- getLine
case choice of
"1" -> do
putStr "Email: "
email <- getLine
putStr "Password: "
pwd <- getLine
time <- show <$> getCurrentTime
addEventM storage $ UserCreation time (Email email) pwd
putStrLn "User created."
"2" -> do
putStr "Email: "
email <- getLine
putStr "Password: "
pwd <- getLine
time <- show <$> getCurrentTime
addEventM storage $ UserChangePassword time (Email email) pwd
putStrLn "Password changed."
"3" -> do
putStr "Id (number please): "
postIdStr <- getLine
putStr "Author e-mail: "
email <- getLine
putStr "Title: "
title <- getLine
time <- show <$> getCurrentTime
addEventM storage $ PostCreation time (PostId . read $ postIdStr) (Email email) title
putStrLn "Post created."
_ -> putStrLn "Unrecognized sequence."
getRes :: DiscPool Event -> IO ()
getRes events = do
putStr "(1) Get a user\n(2) Get a post\nYour choice: "
choice <- getLine
case choice of
"1" -> do
putStr "Email: "
email <- getLine
mres <- getResourceM events (Email email) :: IO (Maybe User)
print mres
"2" -> do
putStr "Post Id (number please): "
postIdStr <- getLine
mres <- getResourceM events (PostId . read $ postIdStr) :: IO (Maybe Post)
print mres
_ -> putStrLn "Unrecognized sequence."
| thoferon/eventsafe | Examples/DiscPool.hs | mit | 2,184 | 0 | 16 | 568 | 635 | 282 | 353 | 71 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving, UndecidableInstances, FlexibleInstances, FlexibleContexts, MultiParamTypeClasses, NamedFieldPuns, TupleSections #-}
{-
Copyright (C) 2012-2013 Jimmy Liang, Kacper Bak <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Intermediate.GLPKScopeAnalyzer (glpkScopeAnalysis) where
import Language.Clafer.Front.Absclafer hiding (Path)
import qualified Language.Clafer.Intermediate.Intclafer as I
import Language.Clafer.Intermediate.Analysis
import Language.Clafer.Intermediate.ResolverType
import Control.Applicative (Applicative(..), (<$>))
import Control.Monad
import Control.Monad.List
import Control.Monad.LPMonad
import Control.Monad.Maybe
import Control.Monad.Reader
import Control.Monad.State
import Data.Either
import Data.LinearProgram hiding (constraints)
import Data.List
import Data.Map ()
import qualified Data.Map as Map
import Data.Maybe
import System.IO.Unsafe
import Text.Parsec.Combinator
import Text.Parsec.Error
import Text.Parsec.Pos
import Text.Parsec.Prim
import Text.Parsec.String ()
{------------------------------------------------------------
---------- Linear programming ------------------------------
------------------------------------------------------------}
-- | Compute scopes for clafers by solving a system of linear equations
glpkScopeAnalysis :: I.IModule -> [(String, Integer)]
glpkScopeAnalysis imodule =
intScope ++ scopes
where
intScope = if bitwidth > 4 then return ("int", bitwidth) else fail "Bitwidth less than default."
bitwidth = bitwidthAnalysis (constants ++ map snd scopes)
scopes =
removeZeroes $ removeRoot $ removeAux $
-- unsafePerformIO should be safe (?)
-- We aren't modifying any global state.
-- If we don't use unsafePerformIO, then we have to be inside the IO monad and
-- makes things really ugly. Might as well contain the ugliness in here.
case unsafePerformIO solution of
(Success, Just (_, s)) -> Map.toList $ Map.map round s
_ -> [] -- No solution
((_, constants), analysis) = runScopeAnalysis run $ gatherInfo imodule
run =
do
setConstraints
abstracts' <- clafers `suchThat` isAbstract
constants' <- constantsAnalysis
return (abstracts', constants')
solution = {-trace (show $ unsafePerformIO $ writeLP "TESTTT" analysis) $-} glpSolveVars mipDefaults{msgLev = MsgOff} $ analysis
-- Any scope that is 0 will take the global scope of 1 instead.
removeZeroes = filter ((/= 0) . snd)
-- The root is implied and not and not part of the actual solution.
removeRoot = filter ((/= rootUid) . fst)
-- Auxilary variables are only part of the computation, not the solution.
removeAux = filter (not . (uniqNameSpace `isPrefixOf`) . fst)
-- The scope for abstract clafers are removed. Alloy doesn't need it. Makes
-- it easier use since user can increase the scope of subclafers without
-- needing to increase the scope of the abstract Clafer.
--removeAbstracts = filter (not . (`elem` map uid abstracts) . fst)
bitwidthAnalysis :: [Integer] -> Integer
bitwidthAnalysis constants =
toInteger $ 1 + fromJust (findIndex (\x -> all (`within` x) constants) bitRange)
where
within a (minB, maxB) = a >= minB && a <= maxB
bitRange = [(-2^i, 2^i-1) | i <- ([0..]::[Integer])]
-- Returns all constant literals
constantsAnalysis :: ScopeAnalysis [Integer]
constantsAnalysis =
do
cons <- constraintsUnder anything `select` snd
return $ mapMaybe integerConstant [I._exp sub | con <- cons, sub <- subexpressions con]
where
integerConstant (I.IInt i) = Just i
integerConstant _ = Nothing
-- (-1) for infinity
data Between =
Between Integer Integer
deriving Show
--atLeastOne :: Between -> Bool
--atLeastOne (Between i _) = i >= 1
{-
overlap :: Between -> Between -> Maybe Between
overlap (Between l1 h1) (Between l2 h2)
| l1 > h2 && h2 /= -1 = Nothing
| l2 > h1 && h1 /= -1 = Nothing
| otherwise = Just $ Between (maxx l1 l2) (minn h1 h2)
where
minn (-1) b = b
minn a (-1) = a
minn a b = min a b
maxx (-1) _ = -1
maxx _ (-1) = -1
maxx a b = max a b
overlapM :: Maybe Between -> Maybe Between -> Maybe Between
overlapM a b =
do
a' <- a
b' <- b
overlap a' b'
-}
-- Multiplies two positive integers where -1=infinity
mult :: Integer -> Integer -> Integer
mult (-1) _ = -1
mult _ (-1) = -1
mult a b = a * b
simpleAnalysis :: ScopeAnalysis [(String, Between)]
simpleAnalysis =
do
root <- claferWithUid rootUid
analysis <- simpleAnalysis' root (Between 1 1)
--moreAnalysis <- simpleConstraintAnalysis analysis
return analysis
where
simpleAnalysis' cur cb@(Between l h) =
runListT $ return (uid cur, cb) `mplus` do
child <- foreach $ (anything |^ cur) `select` fst
let b
| groupLow cur == 0 && groupHigh cur == -1 = Between (low child * l) (high child `mult` h)
| otherwise = Between 0 (-1)
foreach (simpleAnalysis' child b)
{-
mergeAnalysis analysis =
[(n, fromJust x) | (n, b) <- combine analysis, let x = foldr1 overlapM $ map Just b, isJust x]
simpleConstraintAnalysis :: [(String, Between)] -> ScopeAnalysis [(String, Between)]
simpleConstraintAnalysis analysis = mergeAnalysis <$> simpleConstraintAnalysis' analysis
simpleConstraintAnalysis' analysis =
runListT $ do
(curThis, cons) <- foreach $ constraintsUnder anything
constraintBetween curThis (I._exp cons)
where
constraintBetween _ I.IDeclPExp {I._quant = I.ISome, I._oDecls = [], I._bpexp} =
do
let t = map tLexeme $ fromMaybe [] $ unfoldJoins bpexp
guard (not $ null t)
guard ("this" `notElem` t)
guard ("parent" `notElem` t)
guard ("ref" `notElem` t)
msum $ map someStep t
constraintBetween curThis I.IFunExp{I._op = "&&", I._exps = [exp1, exp2]} =
constraintBetween curThis (I._exp exp1) `mplus` constraintBetween curThis (I._exp exp2)
constraintBetween _ _ = mzero
someStep step =
do
parent <- parentOf step
let parentBetween = fromMaybe (error $ "Missing parent " ++ parent) $ lookup parent analysis
guard $ atLeastOne parentBetween
return (step, Between 1 $ -1)
-}
setConstraints :: ScopeAnalysis ()
setConstraints =
do
simpleAnalysis
p <- flatten
withExtraClafers p $ do
optFormula
colonConstraints
refConstraints
parentConstraints
constraintConstraints
(var rootUid) `equalTo` 1
optFormula :: ScopeAnalysis ()
optFormula =
do
setDirection Min
c <- clafers
let concretes = [uid concrete | concrete <- c, isConcrete concrete, isDerived concrete, not $ uniqNameSpace `isPrefixOf` uid concrete]
setObjective $ varSum concretes
parentConstraints :: ScopeAnalysis ()
parentConstraints =
runListT_ $ do
-- forall child under parent ...
(child, parent) <- foreach $ anything |^ anything
let uchild = uid child
let uparent = uid parent
if low child == high child
-- Saves us one constraint
then do
var uchild `equal` (low child *^ var uparent)
else do
-- ... scope_this <= scope_parent * low-card(this) ...
var uchild `geq` (low child *^ var uparent)
-- ... scope_this >= scope_parent * high-card(this) ...
-- high == -1 implies high card is unbounded
if high child /= -1
then var uchild `leq` (high child *^ var uparent)
{-
- A
- B *
- [#B = 4]
-
- Need this constraint so that #A=1
-}
else (smallM *^ var uchild) `leq` var uparent
-- Use integer's not doubles
setVarKind uchild IntVar
setVarKind uparent IntVar
refConstraints :: ScopeAnalysis ()
refConstraints =
runListT_ $ do
-- for all uids of any clafer the refs another uid ...
(sub, sup) <- foreach $ (anything |-> anything) `suchThat` (isDerived . superClafers)
let usub = uid sub
let usup = uid sup
aux <- testPositive usub
-- scope_sup >= low-card(sub)
var usup `geq` ((max 1 $ low sub) *^ var aux)
colonConstraints :: ScopeAnalysis ()
colonConstraints =
runListT_ $ do
-- forall c in the set of clafers' uid ...
c <- foreach $ clafers `suchThat` isDerived
-- ... find all uids of any clafer that extends c (only colons) ...
subs <- findAll $ (anything |: c) `select` (uid . subClafers)
when (not $ null subs) $
-- ... then set the constraint scope_C = sum scope_subs
var (uid c) `equal` varSum subs
flatten :: ScopeAnalysis [SClafer]
flatten =
runListT $ do
abs' <- clafers `suchThat` isAbstract
(c, s) <- foreach $ anything |: anything
ListT $ runReaderT (addChildren (map uid abs') (Part [uid c, uid s]) (Part [])) []
addChildren :: MonadAnalysis m => [String] -> Part -> Part -> m [SClafer]
addChildren abs' (Part steps) ss@(Part supSteps) =
do
let parBase = last steps
chis <- directChildrenOf parBase
achis <- forM chis $
\chi -> do
let chiP = Part $ init steps ++ [chi]
let par = Part steps
let supP = Part $ supSteps ++ [chi]
chiC <- claferWithUid chi
let s = SClafer (reifyPartName chiP) chi False (low chiC) (high chiC) (groupLow chiC) (groupHigh chiC) (Just $ reifyPartName par) (Just $ Colon $ reifyPartName supP) (constraints chiC)
return s <:> addChildren abs' chiP ss
col <- runMaybeT $ colonOf parBase
case col of
Just col' -> do
acol <- addChildren abs' (Part $ steps ++ [col']) (Part $ supSteps ++ [parBase])
return $ concat achis ++ acol
Nothing -> return $ concat achis
where
notAbs = not . (`elem` abs')
reifyPartName (Part (t : target)) = reifyPartName' $ t : filter notAbs target
reifyPartName (Part []) = error "Function reifyPartName from GLPKScopeAnalyzer expects a non empty Part, but was given one!" -- This should never happen
reifyPartName' [target] = target
reifyPartName' target = uniqNameSpace ++ "reify_" ++ intercalate "_" target
data Path =
Path {parts::[Part]}
deriving (Eq, Ord, Show)
data Part =
Part {steps::[String]}
deriving (Eq, Ord, Show)
{-data Expr =
This {path::Path, eType::I.IType} |
Global {path::Path, eType::I.IType} |
Const Integer |
Concat {paths::[Expr], eType::I.IType} |
Positive {allPaths :: [Path], num::Integer, eType::I.IType}
deriving Show-}
data Expr =
This Path I.IType |
Global Path I.IType |
Const Integer |
Concat [Expr] I.IType |
Positive [Path] Integer I.IType
deriving Show
eType :: Expr -> I.IType
eType (This _ e) = e
eType (Global _ e) = e
eType (Concat _ e) = e
eType (Positive _ _ e) = e
eType (Const _) = error "Function eType from GLPK did not expect a Const"
isThis :: Expr -> Bool
isThis This{} = True
isThis _ = False
isGlobal :: Expr -> Bool
isGlobal Global{} = True
isGlobal _ = False
{-isConst :: Expr -> Bool
isConst Const{} = True
isConst _ = False-}
parentOfPart :: MonadAnalysis m => Part -> m Part
parentOfPart (Part s) =
do
s' <- parentOf $ last s
cs' <- claferWithUid s'
return $ if isAbstract cs'
then Part $ init s
else Part $ init s ++ [s']
{-
- Turns constraints that look like:
-
- [ A in List
- B in List ]
-
- to
-
- [ A, B in List ]
-}
optimizeInConstraints :: [I.PExp] -> [I.PExp]
optimizeInConstraints constraints =
noOpt ++ opt
where
(noOpt, toOpt) = partitionEithers (constraints >>= partitionConstraint)
opt = [ unionPExpAll (map fst inSame) `inPExp` snd (head inSame)
| inSame <- groupBy (testing' $ syntaxOf . snd) $ sortBy (comparing' snd) toOpt ]
inPExp a b = I.PExp (Just I.TBoolean) "" noSpan $ I.IFunExp "in" [a, b]
unionPExpAll es = foldr1 unionPExp es
unionPExp a b = I.PExp (liftM2 (+++) (I._iType a) (I._iType b)) "" noSpan $ I.IFunExp "++" [a, b]
partitionConstraint I.PExp{I._exp = I.IFunExp {I._op = "in", I._exps = [exp1, exp2]}} = return $ Right (exp1, exp2)
partitionConstraint I.PExp{I._exp = I.IFunExp {I._op = "&&", I._exps = [exp1, exp2]}} = partitionConstraint exp1 `mplus` partitionConstraint exp2
partitionConstraint e = return $ Left e
testing' f a b = f a == f b
comparing' f a b = f a `compare` f b
{-
- Phone *
-
- [all p : Phone | <constraint on p>]
-
- becomes
-
- Phone *
- [<constraint on p/this>]
-}
optimizeAllConstraints :: MonadAnalysis m => SClafer -> [I.PExp] -> m [(SClafer, I.PExp)]
optimizeAllConstraints curThis constraints =
runListT $ partitionConstraint =<< foreachM constraints
where
partitionConstraint I.PExp{I._exp = I.IDeclPExp I.IAll [I.IDecl _ [decl] I.PExp{I._exp = I.IClaferId{I._sident}}] bpexp} =
do
under <- claferWithUid _sident
return (under, rename decl bpexp)
partitionConstraint I.PExp{I._exp = I.IFunExp {I._op = "&&", I._exps = [exp1, exp2]}} = partitionConstraint exp1 `mplus` partitionConstraint exp2
partitionConstraint e = return (curThis, e)
rename :: String -> I.PExp -> I.PExp
rename f p@I.PExp{I._exp = exp'} =
p{I._exp = renameIExp exp'}
where
renameIExp (I.IFunExp op exps) = I.IFunExp op $ map (rename f) exps
renameIExp (I.IDeclPExp quant oDecls bpexp) = I.IDeclPExp quant (map renameDecl oDecls) $ rename f bpexp
renameIExp (I.IClaferId modName sident isTop)
| f == sident = I.IClaferId modName "this" isTop
| otherwise = I.IClaferId modName sident isTop
renameIExp i = i
renameDecl (I.IDecl isDisj decls body)
| f `elem` decls = I.IDecl isDisj decls body -- Not a free variable
| otherwise = I.IDecl isDisj decls $ rename f body -- Is a free variable
optConstraintsUnder :: MonadAnalysis m => SClafer -> m [(SClafer, [I.PExp])]
optConstraintsUnder clafer =
do
cons <- constraintsUnder clafer `select` snd
allCons <- optimizeAllConstraints clafer cons
let inCons = [(fst $ head c, optimizeInConstraints $ map snd c) | c <- groupBy (testing' $ uid . fst) $ sortBy (comparing' $ uid . fst) allCons]
return inCons
where
testing' f a b = f a == f b
comparing' f a b = f a `compare` f b
constraintConstraints :: MonadScope m => m ()
constraintConstraints =
do
runListT_ $ do
clafer <- foreach clafers
(supThis, cons) <- foreach $ optConstraintsUnder clafer
con <- foreachM cons
curThis <-
if isAbstract supThis
then
foreach $ colonsTo supThis
else
return supThis
constraint <- foreach $ scopeConstraint curThis con
oneConstraint curThis constraint
where
--base (Part steps) = last steps
oneConstraint c (e1, con, e2) =
void $ runMaybeT $ oneConstraintOneWay c e1 con e2 `mplus` oneConstraintOneWay c e2 (reverseCon con) e1
oneConstraintOneWay c@SClafer{uid} e1 con e2 =
oneConstraint' e1 e2
where
oneConstraint' _ (This (Path []) _) =
mzero
oneConstraint' _ (Global (Path []) _) =
mzero
oneConstraint' (This (Path []) _) (This (Path parts) _) =
return (var uid) `comp` reifyVar (last parts)
oneConstraint' (This (Path []) _) (Global (Path parts) _) =
return (var uid) `comp` reifyVar (last parts)
oneConstraint' (Positive [Path []] _ _) _ =
mzero
oneConstraint' _ (Positive [Path []] _ _) =
mzero
oneConstraint' (Global (Path gParts) _) (Positive allPaths claf _) =
do
aux <- testPositives (map (reifyVarName . last . parts) allPaths)
reifyVar (last gParts) `comp` return (claf *^ var aux)
oneConstraint' (This (Path parts) _) (Const constant)
| con == EQU = oneConstraintOneWay c e1 LEQ e2 >> oneConstraintOneWay c e1 GEQ e2
| con `elem` [GTH, GEQ] = foldM_ mkCon 1 (reverse parts)
| con `elem` [LTH, LEQ] = reifyVar (last parts) `comp` (return $ (fromInteger constant :: Double) *^ var uid)
where
mkCon :: MonadScope m => Integer -> Part -> m Integer
mkCon multiplier part =
do
let frac = (1 / fromInteger multiplier) * fromInteger constant :: Double
(reifyVar part) `comp` return (frac *^ var uid)
mult multiplier <$> prod part
oneConstraint' (Global (Path parts) _) (Const constant)
| con == EQU = oneConstraintOneWay c e1 LEQ e2 >> oneConstraintOneWay c e1 GEQ e2
| con `elem` [GTH, GEQ] =
do
k <- testPositive uid
foldM_ (mkCon k) 1 (reverse parts)
| con `elem` [LTH, LEQ] = reifyVar (last parts) `compTo` (return $ fromInteger constant)
where
mkCon :: MonadScope m => String -> Integer -> Part -> m Integer
mkCon pos (-1) part =
do
(reifyVar part) `comp` return (var pos)
return (-1)
mkCon pos multiplier part =
do
let frac = (1 / fromInteger multiplier) * fromInteger constant :: Double
(reifyVar part) `comp` return (frac *^ var pos)
mult multiplier <$> prod part
oneConstraint' (This (Path parts1) _) (This (Path parts2) _) =
reifyVar (last parts1) `comp` reifyVar (last parts2)
oneConstraint' (Global (Path parts1) _) (Global (Path parts2) _) =
reifyVar (last parts1) `comp` reifyVar (last parts2)
oneConstraint' (Global (Path parts) _) (Concat exprs _) =
if all isGlobal exprs
then reifyVar (last parts) `comp` reifyVars [last p | Global (Path p) _ <- exprs]
else mzero
oneConstraint' (This (Path parts) _) (Concat exprs _) =
if all isGlobal exprs
then do
let vs = [last p | Global (Path p) _ <- exprs]
claf <- mapM (claferWithUid . last . steps) $ vs
s <- mapM constantCard claf
p <- parentOfPart $ last parts
reifyVar (last parts) `comp` ((sum s *^) <$> reifyVar p)
else if all isThis exprs
then reifyVar (last parts) `comp` reifyVars [last p | This (Path p) _ <- exprs]
else mzero
oneConstraint' _ _ = mzero
constantCard SClafer{low, high}
| low == high = return low
| otherwise = mzero
prod (Part steps) = foldr1 mult <$> mapM (return . high <=< claferWithUid) steps
comp x y =
do
x' <- x
y' <- y
case con of
LTH -> (x' ^-^ y') `leqTo` (-smallM)
LEQ -> x' `leq` y'
EQU -> x' `equal` y'
GTH -> (x' ^-^ y') `geqTo` smallM
GEQ -> x' `geq` y'
compTo x y =
do
x' <- x
y' <- y
case con of
LTH -> x' `leqTo` (y' - smallM)
LEQ -> x' `leqTo` y'
EQU -> x' `equalTo` y'
GTH -> x' `geqTo` (y' + smallM)
GEQ -> x' `geqTo` y'
reifyVar p = return (var $ reifyVarName p)
reifyVars p = return (varSum $ map reifyVarName p)
reifyVarName (Part [target]) = target
reifyVarName (Part target) = uniqNameSpace ++ "reify_" ++ intercalate "_" target
{-
isAbstractPart (Part [_]) = False
isAbstractPart _ = True
reifiedSuper (Part steps) =
do
let (b : s : rest) = reverse steps
ss <- colonOf s
sss <- runMaybeT $ colonUid ss
if isNothing sss
then return $ Part $ reverse $ b : rest
else return $ Part $ reverse $ b : ss : rest
-- TODO: correct?
siblingParts (Part (conc : abst)) =
do
conc' <- claferWithUid conc
sup <- runMaybeT $ colonOf conc'
case sup of
Nothing -> return [Part $ conc : abst]
Just sup' -> runListT $ do
(sub, _) <- foreach $ anything |: sup'
return $ Part $ uid sub : abst
siblingParts [] = error "Function siblingParts from GLpkScopeAnalyzer expects a non empty list, given an empty one!" -- This should never happen
reifyPart (Part steps) =
do
as <- claferWithUid (last steps) >>= nonTopAncestors
forM as $
\a -> return $ Part $ init steps ++ [uid a]
nonTopAncestors child =
do
parent <- parentOf child
if uid parent == rootUid
then return []
else (++ [child]) `fmap` nonTopAncestors parent
-}
data Con = EQU | LTH | LEQ | GTH | GEQ deriving (Eq, Ord, Show)
reverseCon :: Con -> Con
reverseCon EQU = EQU
reverseCon LTH = GTH
reverseCon LEQ = GEQ
reverseCon GTH = LTH
reverseCon GEQ = LEQ
data Limit = Exact {lExpr::Expr} | AtLeast {lExpr::Expr} deriving Show
scopeConstraint :: MonadScope m => SClafer -> I.PExp -> m [(Expr, Con, Expr)]
scopeConstraint curThis pexp =
runListT $ scopeConstraint' $ I._exp pexp
where
scopeConstraint' I.IFunExp {I._op = "&&", I._exps} = msum $ map (scopeConstraint' . I._exp) _exps
scopeConstraint' I.IDeclPExp {I._quant = I.ISome, I._oDecls = [], I._bpexp} = parsePath curThis _bpexp `greaterThanEqual` constant (1::Integer)
scopeConstraint' I.IDeclPExp {I._quant = I.ISome, I._oDecls} = msum $ map pathAndMultDecl _oDecls
where
pathAndMultDecl I.IDecl {I._isDisj = True, I._decls, I._body} = parsePath curThis _body `greaterThanEqual` constant (length _decls)
pathAndMultDecl I.IDecl {I._isDisj = False, I._body} = parsePath curThis _body `greaterThanEqual` constant (1::Integer)
scopeConstraint' I.IDeclPExp {I._quant = I.IOne, I._oDecls = [], I._bpexp} = parsePath curThis _bpexp `eqTo` constant (1::Integer)
scopeConstraint' I.IDeclPExp {I._quant = I.IOne, I._oDecls} =
do
oDecl <- foreachM _oDecls
parsePath curThis (I._body oDecl) `eqTo` constant (1::Integer)
scopeConstraint' I.IFunExp {I._op, I._exps = [exp1, exp2]}
| _op == "in" = inConstraint1 exp1 exp2 `mplus` inConstraint2 exp1 exp2
| _op == "=" = equalConstraint1 exp1 exp2 `mplus` equalConstraint2 exp1 exp2
| _op == "<" = scopeConstraintNum exp1 `lessThan` scopeConstraintNum exp2
| _op == "<=" = scopeConstraintNum exp1 `lessThanEqual` scopeConstraintNum exp2
| _op == ">" = scopeConstraintNum exp1 `greaterThan` scopeConstraintNum exp2
| _op == ">=" = scopeConstraintNum exp1 `greaterThanEqual` scopeConstraintNum exp2
| _op == "<=>" = (exp1 `implies` exp2) `mplus` (exp2 `implies` exp1)
| _op == "=>" = exp1 `implies` exp2
scopeConstraint' _ = mzero
implies exp1 exp2 =
do
e1 <- scopeConstraint' $ I._exp exp1
e2 <- scopeConstraint' $ I._exp exp2
case (e1, e2) of
((This thisPath t1, GEQ, Const 1), (Global globalPath t0, comp, Positive allPaths c t2)) ->
return $ (Global globalPath t0, comp, Positive (thisPath : allPaths) c $ t1 +++ t2)
((This thisPath e1', GEQ, Const 1), (Global globalPath e2', comp, Const c)) ->
return $ (Global globalPath e2', comp, Positive [thisPath] c e1')
((Global path1 t1, GEQ, Const 1), (Global path2 t0, comp, Positive allPaths c t2)) ->
return $ (Global path2 t0, comp, Positive (path1 : allPaths) c $ t1 +++ t2)
((Global path1 e1', GEQ, Const 1), (Global path2 e2', comp, Const c)) ->
return $ (Global path2 e2', comp, Positive [path1] c e1')
((t1@(This (Path [thisPart1]) _), GEQ, Const 1), (t2@(This (Path [_]) _), GEQ, Const 1)) ->
do
c <- claferWithUid $ last $ steps thisPart1
guard (high c == 1)
return (t2, GEQ, t1)
_ -> mzero
equalConstraint1 exp1 exp2 =
do
l1 <- scopeConstraintSet exp1
l2 <- scopeConstraintSet exp2
case (l1, l2) of
(Exact e1, Exact e2) -> return e1 `eqTo` return e2
(AtLeast e1, Exact e2) -> return e1 `greaterThanEqual` return e2
(Exact e1, AtLeast e2) -> return e1 `lessThanEqual` return e2
_ -> mzero
equalConstraint2 exp1 exp2 = scopeConstraintNum exp1 `eqTo` scopeConstraintNum exp2
-- exp1 in exp2
inConstraint1 exp1 exp2 =
do
l1 <- scopeConstraintSet exp1
l2 <- scopeConstraintSet exp2
case l2 of
Exact e2 -> return (lExpr l1) `lessThanEqual` return e2
_ -> mzero
inConstraint2 exp1 exp2 = scopeConstraintNum exp1 `lessThanEqual` scopeConstraintNum exp2
scopeConstraintSet I.PExp {I._exp = I.IFunExp {I._op = "++", I._exps = [e1, e2]}} =
do
l1' <- scopeConstraintSet e1
l2' <- scopeConstraintSet e2
i <- intersects (eType $ lExpr l1') (eType $ lExpr l2')
if i
then return $ AtLeast $ lExpr l1'
else return $ combineDisjoint l1' l2'
scopeConstraintSet x = Exact <$> parsePath curThis x
combineDisjoint (Exact e1) (Exact e2) =
Exact (Concat ([e1, e2] >>= flattenConcat) $ eType e1 +++ eType e2)
combineDisjoint l1 l2 =
AtLeast (Concat ([e1, e2] >>= flattenConcat) $ eType e1 +++ eType e2)
where
e1 = lExpr l1
e2 = lExpr l2
flattenConcat (Concat es _) = es >>= flattenConcat
flattenConcat e = [e]
scopeConstraintNum I.PExp {I._exp = I.IInt const'} = constant const'
scopeConstraintNum I.PExp {I._exp = I.IFunExp {I._op = "#", I._exps = [path]}} = parsePath curThis path
scopeConstraintNum _ = mzero
constant :: (Monad m, Integral i) => i -> m Expr
constant = return . Const . toInteger
greaterThan = liftM2 (,GTH,)
greaterThanEqual = liftM2 (,GEQ,)
lessThan = liftM2 (,LTH,)
lessThanEqual = liftM2 (,LEQ,)
eqTo = liftM2 (,EQU,)
{-
- We use the stack to push every abstraction we traverse through.
- For example:
-
- abstract A
- B ?
- C : D ?
- abstract D
- E ?
- F : A
- G : A
- H : A
-
- [some F.B.C.E]
- [some G.B.C.E]
-
- The first constraint's final stack will look like ["C" ,"F"]
- Hence the linear programming equation will look like:
-
- scope_F_C_E >= scope_root
-
- Adding the second constraint:
-
- scope_G_C_E >= scope_root
- scope_E >= scope_F_C_E + scope_G_C_E (*)
-
- Solving the minimization should have scope_E = 2 in its solution.
- The (*) equation is set in constraintConstraints
-}
parsePath :: MonadScope m => SClafer -> I.PExp -> m Expr
parsePath start pexp =
do
start' <- claferWithUid (origUid start)
parsePath2 start' pexp
parsePath2 :: MonadScope m => SClafer -> I.PExp -> m Expr
parsePath2 start pexp =
do
root <- claferWithUid rootUid
case unfoldJoins pexp of
Just unfold -> do
match <- patternMatch parsePath' (ParseState root []) unfold
either (fail . show) return match
Nothing -> fail "Cannot unfold."
where
asPath :: [[String]] -> Path
asPath parts = Path [Part part | part <- parts, not $ null part]
parsePath' = (This <$> (asPath <$> parseThisPath) <*> getThisType) <|> (Global <$> (asPath <$> parseNonthisPath) <*> getThisType)
getThisType =
do
t <- getThis
return $ fromJust $ fromUnionType [uid t]
parseThisPath =
do
t <- _this_
do
many1 _parent_
return [[uid start]]
<|> (follow t >> parseNonthisPath)
parseNonthisPath =
do
paths <- many (step >>= follow)
lifo <- popStack
let end = if null paths then [] else [last paths]
let result = reverse $ end ++ map uid lifo
do
_ref_ >>= follow
-- recurse
rec <- parseNonthisPath
return $ result : rec
<|> return [result]
-- Step handles non-this token.
step :: MonadScope m => ParseT m String
step = _parent_ <|> _directChild_ <|> try (pushThis >> _indirectChild_)
-- Update the state of where "this" is.
-- Path is one step away from where "this" is.
follow :: MonadScope m => String -> ParseT m String
follow path =
do
curThis <- getThis
case path of
"this" -> putThis start
"parent" -> lift (parentOf curThis) >>= putThis -- the parent is now "this"
"ref" -> lift (refOf curThis) >>= putThis -- the ref'd Clafer is now "this"
u -> lift (claferWithUid u) >>= putThis
return path
{------------------------------------------------------------
---------- Internals ---------------------------------------
------------------------------------------------------------}
newtype ScopeAnalysis a = ScopeAnalysis (VSupplyT (AnalysisT (LPM String Double)) a)
deriving (Monad, Functor, MonadState (LP String Double), MonadSupply Var, MonadReader Info, MonadAnalysis)
class (MonadAnalysis m, MonadState (LP String Double) m, MonadSupply Var m) => MonadScope m
instance (MonadAnalysis m, MonadState (LP String Double) m, MonadSupply Var m) => MonadScope m
runScopeAnalysis :: ScopeAnalysis a -> Info -> (a, LP String Double)
runScopeAnalysis (ScopeAnalysis s) info = runLPM $ runAnalysisT (runVSupplyT s) info
-- Unfold joins
-- If the expression is a tree of only joins, then this function will flatten
-- the joins into a list.
-- Otherwise, returns an empty list.
unfoldJoins :: Monad m => I.PExp -> m [Token]
unfoldJoins pexp =
unfoldJoins' pexp
where
unfoldJoins' I.PExp{I._exp = (I.IFunExp "." args)} =
return $ args >>= (fromMaybe [] . unfoldJoins)
unfoldJoins' I.PExp{I._inPos, I._exp = I.IClaferId{I._sident}} =
return $ [Token (spanToSourcePos _inPos) _sident]
unfoldJoins' _ =
fail "not a join"
-- Variables starting with "_aux_" are reserved for creating
-- new variables at runtime.
uniqNameSpace :: String
uniqNameSpace = "_aux_"
uniqVar :: MonadScope m => m String
uniqVar =
do
c <- supplyNew
return $ uniqNameSpace ++ show (varId c)
{-
- Create a new variable "aux". If
- v == 0 -> aux == 0
- v > 0 -> aux == 1
-
- pre: v >= 0 and v is integer
-}
testPositive :: MonadScope m => String -> m String
testPositive v =
do
aux <- uniqVar
var aux `leq` var v
var aux `geq` (smallM *^ var v)
var aux `leqTo` 1
setVarKind aux IntVar
return aux
{-
- Create a new variable "aux". If
- all v == 0 -> aux == 0
- all v > 0 -> aux == 1
-
- pre: all v >= 0 and all v is integer
-}
testPositives :: MonadScope m => [String] -> m String
testPositives [v] = testPositive v
testPositives vs =
do
auxs <- mapM testPositive vs
aux <- uniqVar
(length vs *^ var aux) `equal` varSum auxs
a <- uniqVar
(var a ^-^ var aux) `geqTo` (-0.9999) -- Buffer for floating point inaccuracies
(var a ^-^ var aux) `leqTo` 0.0001 -- Buffer for floating point inaccuracies
setVarKind a IntVar
return a
{-
- smallM cannot be too small. For example, with glpk
- 0.000001 * 9 = 0
-}
smallM :: Double
smallM = 0.0005 -- 0.00001
{-
-
- Parsing
-
-}
data Token = Token {tPos::SourcePos, tLexeme::String} deriving Show
data ParseState = ParseState
{psThis::SClafer, -- "this"
psStack::[SClafer] -- the list of all the abstract Clafers traversed
}
deriving Show
type ParseT = ParsecT [Token] ParseState
-- Where "this" refers to.
getThis :: MonadScope m => ParseT m SClafer
getThis =
do
s <- getState
return (psThis s)
-- Update where "this" refers to.
putThis :: MonadScope m => SClafer -> ParseT m ()
putThis newThis =
do
state' <- getState
putState $ state'{psThis = newThis}
popStack :: MonadScope m => ParseT m [SClafer]
popStack =
do
state' <- getState
let stack = psStack state'
putState state'{psStack = []}
return stack
pushThis :: MonadScope m => ParseT m ()
pushThis =
do
state' <- getState
putState $ state'{psStack = psThis state' : psStack state'}
-- Parser combinator for "this"
_this_ :: MonadScope m => ParseT m String
_this_ = satisfy (== "this")
-- Parser combinator for "parent"
_parent_ :: MonadScope m => ParseT m String
_parent_ = satisfy (== "parent")
-- Parser combinator for "ref"
_ref_ :: MonadScope m => ParseT m String
_ref_ = satisfy (== "ref")
-- Parser combinator for a uid that is not "this", "parent", or "ref"
_child_ :: MonadScope m => ParseT m String
_child_ = satisfy (not . (`elem` ["this", "parent", "ref"]))
-- Parser combinator for a uid of direct child.
_directChild_ :: MonadScope m => ParseT m String
_directChild_ =
try $ do
curThis <- getThis
clafer <- _child_ >>= lift . claferWithUid
check <- lift $ isDirectChild clafer curThis
when (not check) $ unexpected $ (uid clafer) ++ " is not a direct child of " ++ (uid curThis)
return $ uid clafer
-- Parser combinator for a uid of indirect child.
_indirectChild_ :: MonadScope m => ParseT m String
_indirectChild_ =
try $ do
curThis <- getThis
clafer <- _child_ >>= lift . claferWithUid
check <- lift $ isIndirectChild clafer curThis
when (not check) $ unexpected $ (uid clafer) ++ " is not an indirect child of " ++ (uid curThis)
return $ uid clafer
satisfy :: MonadScope m => (String -> Bool) -> ParseT m String
satisfy f = tLexeme <$> tokenPrim (tLexeme)
(\_ c _ -> tPos c)
(\c -> if f $ tLexeme c then Just c else Nothing)
spanToSourcePos :: Span -> SourcePos
spanToSourcePos (Span (Pos l c) _) = (newPos "" (fromInteger l) (fromInteger c))
patternMatch :: MonadScope m => ParseT m a -> ParseState -> [Token] -> m (Either ParseError a)
patternMatch parse' state' =
runParserT (parse' <* eof) state' ""
{-
-
- Utility functions
-
-}
subexpressions :: I.PExp -> [I.PExp]
subexpressions p@I.PExp{I._exp = exp'} =
p : subexpressions' exp'
where
subexpressions' I.IDeclPExp{I._oDecls, I._bpexp} =
concatMap (subexpressions . I._body) _oDecls ++ subexpressions _bpexp
subexpressions' I.IFunExp{I._exps} = concatMap subexpressions _exps
subexpressions' _ = []
instance MonadSupply s m => MonadSupply s (ListT m) where
supplyNew = lift supplyNew
instance MonadSupply s m => MonadSupply s (MaybeT m) where
supplyNew = lift supplyNew
instance MonadSupply s m => MonadSupply s (ParsecT a b m) where
supplyNew = lift supplyNew
| juodaspaulius/clafer-old-customBNFC | src/Language/Clafer/Intermediate/GLPKScopeAnalyzer.hs | mit | 35,315 | 1 | 20 | 9,227 | 10,175 | 5,193 | 4,982 | -1 | -1 |
module DepsBuilder (nonExistentMtime, build) where
import Control.Applicative
import Control.Monad
import Data.ConfigFile
import Data.List.Utils
import System.Directory
import System.FilePath.Find hiding (FileInfo, FileType)
import System.IO
import Text.JSON
import Text.Read
import AppDeps
import AppDepsUtility
import FolderJsDeps
import FolderJsDepsUtility
import CompilationUtility
import qualified ConfigFile
import Templates
import Utility
build :: ConfigParser -> EitherT String IO (AppDeps, Maybe AppDeps)
build cp = do
forceRecomp <- hoistEither $ ConfigFile.getBool cp "DEFAULT" "force_recomp"
if forceRecomp then checkFolderJsDeps cp Nothing else do
cacheName <- ConfigFile.getMaybeFile cp "DEFAULT" "cache.path"
cacheExists <- catchIO $ doesFileExist cacheName
if not cacheExists then checkFolderJsDeps cp Nothing else do
cacheMaybe <- fmap readMaybe $ catchIO $ readFile cacheName
case cacheMaybe of
Nothing -> checkFolderJsDeps cp Nothing
Just cache -> do
modified <- mapAppDeps calcModified cache
if safeJsSoy modified
then return (cache, Just modified)
else checkFolderJsDeps cp (Just (cache, modified))
where
calcModified :: FileInfo -> EitherT String IO (Maybe FileInfo)
calcModified fileInfo = do
exists <- catchIO $ doesFileExist $ fi_path fileInfo
if exists then do
mtime <- catchIO $ getModificationTime $ fi_path fileInfo
if mtime == fi_mtime fileInfo
-- unmodified
then return Nothing
-- modified
else return $ Just $ fileInfo{fi_mtime = mtime}
-- deleted
else return $ Just $ fileInfo{fi_mtime = nonExistentMtime}
checkFolderJsDeps :: ConfigParser -> Maybe (AppDeps, AppDeps) -> EitherT String IO (AppDeps, Maybe AppDeps)
checkFolderJsDeps cp cacheMaybe = do
changed <- updateFolderJsDeps cp False
case cacheMaybe of
Just (cache, modified) -> if changed
then closureBuilder cp cacheMaybe
else return (cache, Just modified)
Nothing -> closureBuilder cp cacheMaybe
closureBuilder :: ConfigParser -> Maybe (AppDeps, AppDeps) -> EitherT String IO (AppDeps, Maybe AppDeps)
closureBuilder cp cacheMaybe = do
prefillSoyNamespaces cp
calc <- runCU =<< appDepsSettings cp
case cacheMaybe of
Nothing -> return (calc, Nothing)
Just (cache, modified) -> return (calc, Just $ mergeAppDeps calc cache modified)
prefillSoyNamespaces :: ConfigParser -> EitherT String IO ()
prefillSoyNamespaces cp = do
public <- hoistEither $ ConfigFile.get cp "DEFAULT" "public"
filePaths <- catchIO $ find (return True) (extension ==? ".soy") public
forM_ filePaths $ \soyPath -> do
soyJsPath <- hoistEither $ do
srcPath <- ConfigFile.get cp "DEFAULT" "src.path"
appPath <- ConfigFile.get cp "DEFAULT" "app.path"
soyToSoyJs srcPath appPath soyPath
exists <- catchIO $ doesFileExist soyJsPath
if exists then return () else do
namespace <- hoistEither $ ConfigFile.get cp "DEFAULT" "utility.templates.namespace"
catchIO $ withFile soyJsPath WriteMode $ \h -> do
hPutStrLn h $ "goog.provide('" ++ namespace ++ "." ++ (soyJsToNamespace soyJsPath) ++"')"
| Prinhotels/goog-closure | src/DepsBuilder.hs | mit | 3,219 | 0 | 23 | 666 | 940 | 469 | 471 | 71 | 7 |
-- yaht_4_4_to_4_5.hs
module Main
where
{-
Exercise 4.4 Write a data type declaration for Triple, a type which
contains three elements, all of different types. Write functions tripleFst,
tripleSnd and tripleThr to extract respectively the first, second and third
elements.
-}
data Triple a b c = Triple a b c
tripleFst (Triple a b c) = a
tripleSnd (Triple a b c) = b
tripleThr (Triple a b c) = c
{-
Exercise 4.5 Write a datatype Quadruple which holds four
elements. However, the first two elements must be the same type and the
last two elements must be the same type. Write a function firstTwo which
returns a list containing the first two elements and a function lastTwo
which returns a list containing the last two elements. Write type
signatures for these functions
-}
data Quadruple a b = Quadruple a a b b
firstTwo:: Quadruple a b -> [a]
firstTwo (Quadruple a b c d) = [a, b]
lastTwo:: Quadruple a b -> [b]
lastTwo (Quadruple a b c d) = [c, d]
main = do
let tNum = Triple 1 2 3
putStrLn ("tripleFst is:" ++ show(tripleFst tNum))
putStrLn ("tripleSnd is:" ++ show(tripleSnd tNum))
putStrLn ("tripleThr is:" ++ show(tripleThr tNum))
let qNum = Quadruple 1 2 3 4
putStrLn ("Quadruple firstTwo is:" ++ show(firstTwo qNum))
putStrLn ("Quadruple lastTwo is:" ++ show(lastTwo qNum))
| dormouse/blog | source/haskell/yaht-exercises/yaht_4_4_to_4_5.hs | gpl-2.0 | 1,322 | 0 | 12 | 266 | 329 | 167 | 162 | 18 | 1 |
{-# LANGUAGE FlexibleInstances, Rank2Types #-}
module Math.Semimeasure.Process where
import Control.Arrow (first, second)
import Control.Monad.Identity
import Control.Monad.Trans.Cont
import Control.Monad.Trans.State
import Control.Monad.Trans.Writer
import Control.Proxy hiding (execState, tell, StateT, WriterT)
import Data.Monoid
import System.IO.Unsafe
import Math.Semimeasure.Bit
newtype Process a = Process {
runProcess :: (Proxy p, Monad m, Eq a) => () -> Pipe p Bit a m ()
}
instance Monoid (Process Bit) where
mempty = Process idT
mappend a b = Process $ runProcess a >-> runProcess b
toFunction :: (Eq a) => Process a -> [Bit] -> [a]
toFunction pcs input =
execWriter $ runProxy pipeline where
pipeline = fromListS input >-> runProcess pcs >-> toListLazyD
foldLazyD :: (Proxy p, Monad m, Monoid w) =>
(a -> w) -> a' -> p a' a a' a (WriterT w m) r
foldLazyD f = runIdentityK go where
go a' = do
a <- request a'
lift $ tell (f a)
a'2 <- respond a
go a'2
toListLazyD :: (Proxy p, Monad m) =>
a' -> p a' a a' a (WriterT [a] m) r
toListLazyD = foldLazyD (\a -> [a])
| uycire/haskell-sm | Math/Semimeasure/Process.hs | gpl-2.0 | 1,139 | 0 | 12 | 247 | 446 | 239 | 207 | 31 | 1 |
module Access.Control.Concurrent.Chan
( module Control.Concurrent.Chan
, ChanAccess(..)
) where
import Control.Concurrent.Chan
import Access.Core
class Access io => ChanAccess io where
-- |Build and returns a new instance of 'Chan'.
newChan' :: io (Chan a)
-- |Write a value to a 'Chan'.
writeChan' :: Chan a -> a -> io ()
-- |Read the next value from the 'Chan'.
readChan' :: Chan a -> io a
-- |Duplicate a 'Chan': the duplicate channel begins empty, but data written to
-- either channel from then on will be available from both. Hence this creates
-- a kind of broadcast channel, where data written by anyone is seen by
-- everyone else.
--
-- (Note that a duplicated channel is not equal to its original.
-- So: @fmap (c /=) $ dupChan c@ returns @True@ for all @c@.)
dupChan' :: Chan a -> io (Chan a)
-- |Return a lazy list representing the contents of the supplied
-- 'Chan', much like 'System.IO.hGetContents'.
getChanContents' :: Chan a -> io [a]
-- |Write an entire list of items to a 'Chan'.
writeList2Chan' :: Chan a -> [a] -> io ()
instance ChanAccess IO where
newChan' = newChan
writeChan' = writeChan
readChan' = readChan
dupChan' = dupChan
getChanContents' = getChanContents
writeList2Chan' = writeList2Chan
| bheklilr/base-io-access | Access/Control/Concurrent/Chan.hs | gpl-2.0 | 1,441 | 0 | 10 | 426 | 215 | 121 | 94 | 19 | 0 |
--- |
--- | Main module, startup code
--- |
--- Copyright : (c) Florian Richter 2011
--- License : GPL
---
import Control.Concurrent
import System.Log.Logger
import System.Log.Handler.Simple
import Config
import Filelist
import FilelistTypes
import Filemgmt
import TTH
import DCCommon
import DCToHub
import DCToClient
import Filesystem
import FilesystemHandler
-- | all threads must be started after fuse loading
start appState = do
let config = appConfig appState
searchSocket <- createSearchSocket
forkIO $ startDCServer (configMyIp config) (configMyPort config) (ToClient Nothing DontKnow)
(startupClient appState) (handleClient appState) (stopClient appState)
forkIO $ openDCConnection (configHubIp config) (configHubPort config) ToHub
(startupHub appState) (handleHub appState searchSocket) (\state -> return ())
forkIO $ hashFileList appState
return ()
stop appState = return ()
main = do
config <- loadConfig "Hadcc.cfg"
appState <- newAppState config
initTTHCache appState
loadOwnShare appState
--withMVar (appFileTree appState) (\tree -> putStrLn $ treeNodeToXml tree)
--withMVar (appFileTree appState) (\tree -> putStrLn $ treeNodeToXml tree)
startupFileSystem (configMountpoint config) (start appState) (stop appState) (dcFileInfo appState)
-- vim: sw=4 expandtab
| f1ori/hadcc | Hadcc.hs | gpl-3.0 | 1,399 | 1 | 12 | 284 | 310 | 156 | 154 | 29 | 1 |
-- | Different logic-related components.
module OrgStat.Helpers
( convertRange
, resolveInputOrg
, resolveScope
, resolveReport
, resolveOutput
) where
import Universum
import Control.Lens (at, views, (.=))
import qualified Data.List.NonEmpty as NE
import Data.Time
(LocalTime(..), TimeOfDay(..), addDays, getZonedTime, toGregorian, zonedTimeToLocalTime)
import Data.Time.Calendar (addGregorianMonthsRollOver)
import Data.Time.Calendar.WeekDate (toWeekDate)
import OrgStat.Ast (Org(..), cutFromTo, orgTitle)
import OrgStat.Config
(ConfDate(..), ConfOutput(..), ConfRange(..), ConfReport(..), ConfScope(..), ConfigException(..),
OrgStatConfig(..))
import OrgStat.IO (readOrgFile)
import OrgStat.Scope (applyModifiers)
import OrgStat.WorkMonad (WorkM, wcConfig, wdReadFiles, wdResolvedReports, wdResolvedScopes)
-- | Converts config range to a pair of 'UTCTime', right bound not inclusive.
convertRange :: (MonadIO m) => ConfRange -> m (LocalTime, LocalTime)
convertRange range = case range of
(ConfFromTo f t) -> (,) <$> fromConfDate f <*> fromConfDate t
(ConfBlockDay i) | i < 0 -> error $ "ConfBlockDay i is <0: " <> show i
(ConfBlockDay 0) -> (,) <$> (localFromDay <$> startOfDay) <*> curTime
(ConfBlockDay i) -> do
d <- (negate (i - 1) `addDays`) <$> startOfDay
pure $ localFromDayPair ((negate 1) `addDays` d, d)
(ConfBlockWeek i) | i < 0 -> error $ "ConfBlockWeek i is <0: " <> show i
(ConfBlockWeek 0) -> (,) <$> (localFromDay <$> startOfWeek) <*> curTime
(ConfBlockWeek i) -> do
d <- (negate (i - 1) `addWeeks`) <$> startOfWeek
pure $ localFromDayPair ((negate 1) `addWeeks` d, d)
(ConfBlockMonth i) | i < 0 -> error $ "ConfBlockMonth i is <0: " <> show i
(ConfBlockMonth 0) -> (,) <$> (localFromDay <$> startOfMonth) <*> curTime
(ConfBlockMonth i) -> do
d <- addGregorianMonthsRollOver (negate $ i-1) <$> startOfMonth
pure $ localFromDayPair ((negate 1) `addGregorianMonthsRollOver` d, d)
where
localFromDay d = LocalTime d $ TimeOfDay 0 0 0
localFromDayPair = bimap localFromDay localFromDay
curTime = liftIO $ zonedTimeToLocalTime <$> getZonedTime
curDay = localDay <$> curTime
addWeeks i d = (i*7) `addDays` d
startOfDay = curDay
startOfWeek = do
d <- curDay
let weekDay = pred $ view _3 $ toWeekDate d
pure $ fromIntegral (negate weekDay) `addDays` d
startOfMonth = do
d <- curDay
let monthDate = pred $ view _3 $ toGregorian d
pure $ fromIntegral (negate monthDate) `addDays` d
fromConfDate ConfNow = curTime
fromConfDate (ConfLocal x) = pure x
-- | Resolves org file: reads from path and puts into state or just
-- gets out of state if was read before.
resolveInputOrg :: FilePath -> WorkM (Text, Org)
resolveInputOrg fp = use (wdReadFiles . at fp) >>= \case
Just x -> pure x
Nothing -> do
todoKeywords <- views wcConfig confTodoKeywords
o <- readOrgFile todoKeywords fp
wdReadFiles . at fp .= Just o
pure o
-- A lot of copy-paste here... 2 bad, though no time to fix
-- | Return scope with requested name or fail. It will be either
-- constructed on the spot or taken from the state if it had been
-- created previously.
resolveScope :: Text -> WorkM Org
resolveScope scopeName = use (wdResolvedScopes . at scopeName) >>= \case
Just x -> pure x
Nothing -> constructScope
where
constructScope = do
let filterScopes = filter (\x -> csName x == scopeName)
views wcConfig (filterScopes . confScopes) >>= \case
[] ->
throwM $ ConfigLogicException $
"Scope "<> scopeName <> " is not declared"
[sc] -> resolveFoundScope sc
scopes ->
throwM $ ConfigLogicException $
"Multple scopes with name "<> scopeName <>
" are declared " <> show scopes
resolveFoundScope ConfScope{..} = do
orgs <- NE.toList <$> forM csPaths resolveInputOrg
let orgTop = Org "/" [] [] $ map (\(fn,o) -> o & orgTitle .~ fn) orgs
wdResolvedScopes . at scopeName .= Just orgTop
pure orgTop
-- | Same as resolveScope but related to reports.
resolveReport :: Text -> WorkM Org
resolveReport reportName = use (wdResolvedReports . at reportName) >>= \case
Just x -> pure x
Nothing -> constructReport
where
constructReport = do
let filterReports = filter (\x -> crName x == reportName)
views wcConfig (filterReports . confReports) >>= \case
[] ->
throwM $ ConfigLogicException $
"Report " <> reportName <> " is not declared"
[rep] -> resolveFoundReport rep
reports ->
throwM $ ConfigLogicException $
"Multple reports with name "<> reportName <>
" are declared " <> show reports
resolveFoundReport ConfReport{..} = do
orgTop <- resolveScope crScope
fromto <- convertRange crRange
withModifiers <-
either throwM pure $
applyModifiers orgTop crModifiers
--let finalOrg = cutFromTo fromto $ mergeClocks withModifiers
let finalOrg = cutFromTo fromto withModifiers
wdResolvedReports . at reportName .= Just finalOrg
pure finalOrg
resolveOutput :: Text -> WorkM ConfOutput
resolveOutput outputName =
views wcConfig (filterOutputs . confOutputs) >>= \case
[] ->
throwM $ ConfigLogicException $
"Report " <> outputName <> " is not declared"
[rep] -> pure rep
reports ->
throwM $ ConfigLogicException $
"Multple outputs with name "<> outputName <>
" are declared " <> show reports
where
filterOutputs = filter (\x -> coName x == outputName)
| volhovM/orgstat | src/OrgStat/Helpers.hs | gpl-3.0 | 5,893 | 0 | 17 | 1,574 | 1,708 | 880 | 828 | -1 | -1 |
module P46WordFreq where
import qualified Data.Map as M
import Data.Function(on)
import Data.List(sortBy,sort, group)
import Text.Printf(printf)
import Control.Arrow ((&&&))
-- TODO: Test on shakespeare, bar chart, write in another language, tests
file :: String
--file = "../../../barncamp-signing-annotated.txt"
file = "shakespeare.txt"
main :: IO ()
main = do
xs <- readFile file
putStrLn $ concatMap mkRow . freqWds $ words xs
freqWds :: Ord a => [a] -> [(a,Int)]
freqWds =
sortBy (flip compare `on` snd) . map (head &&& length) . group . sort
freqWds' :: [String] -> [(String,Int)]
freqWds' ws =
sortBy (flip compare `on` snd) . M.toList $
foldr (\w m -> if M.member w m
then M.insert w ((M.!)m w + 1) m
else M.insert w 1 m) M.empty ws
mkRow :: (String, Int) -> String
mkRow (w,c) = printf "%-25s: " (take 25 w) ++ show c ++ "\n"
| ciderpunx/57-exercises-for-programmers | src/P46WordFreq.hs | gpl-3.0 | 903 | 0 | 14 | 210 | 363 | 199 | 164 | 23 | 2 |
-- Problem 40
-- (0.04 secs, 147,193,696 bytes)
import Data.Char (digitToInt)
d n = (concat . map show $ [0..1000000]) !! n
e040 = print . product . map (digitToInt . d . (10^)) $ [1..6] | synndicate/euler | solutions/e040.hs | gpl-3.0 | 189 | 2 | 10 | 38 | 91 | 46 | 45 | 3 | 1 |
sumS :: Num a => Int -> Stream a -> a
sumS n (Cons a as) = if n <= 0 then 0 else a + sumS (n - 1) as | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/haskell/snippet22.hs | gpl-3.0 | 100 | 0 | 9 | 30 | 74 | 36 | 38 | 2 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.RuntimeConfig.Operations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists operations that match the specified filter in the request. If the
-- server doesn\'t support this method, it returns \`UNIMPLEMENTED\`. NOTE:
-- the \`name\` binding below allows API services to override the binding
-- to use different resource name schemes, such as
-- \`users\/*\/operations\`.
--
-- /See:/ <https://cloud.google.com/deployment-manager/runtime-configurator/ Google Cloud RuntimeConfig API Reference> for @runtimeconfig.operations.list@.
module Network.Google.Resource.RuntimeConfig.Operations.List
(
-- * REST Resource
OperationsListResource
-- * Creating a Request
, operationsList
, OperationsList
-- * Request Lenses
, olXgafv
, olUploadProtocol
, olPp
, olAccessToken
, olUploadType
, olBearerToken
, olName
, olFilter
, olPageToken
, olPageSize
, olCallback
) where
import Network.Google.Prelude
import Network.Google.RuntimeConfig.Types
-- | A resource alias for @runtimeconfig.operations.list@ method which the
-- 'OperationsList' request conforms to.
type OperationsListResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListOperationsResponse
-- | Lists operations that match the specified filter in the request. If the
-- server doesn\'t support this method, it returns \`UNIMPLEMENTED\`. NOTE:
-- the \`name\` binding below allows API services to override the binding
-- to use different resource name schemes, such as
-- \`users\/*\/operations\`.
--
-- /See:/ 'operationsList' smart constructor.
data OperationsList = OperationsList'
{ _olXgafv :: !(Maybe Xgafv)
, _olUploadProtocol :: !(Maybe Text)
, _olPp :: !Bool
, _olAccessToken :: !(Maybe Text)
, _olUploadType :: !(Maybe Text)
, _olBearerToken :: !(Maybe Text)
, _olName :: !Text
, _olFilter :: !(Maybe Text)
, _olPageToken :: !(Maybe Text)
, _olPageSize :: !(Maybe (Textual Int32))
, _olCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olXgafv'
--
-- * 'olUploadProtocol'
--
-- * 'olPp'
--
-- * 'olAccessToken'
--
-- * 'olUploadType'
--
-- * 'olBearerToken'
--
-- * 'olName'
--
-- * 'olFilter'
--
-- * 'olPageToken'
--
-- * 'olPageSize'
--
-- * 'olCallback'
operationsList
:: Text -- ^ 'olName'
-> OperationsList
operationsList pOlName_ =
OperationsList'
{ _olXgafv = Nothing
, _olUploadProtocol = Nothing
, _olPp = True
, _olAccessToken = Nothing
, _olUploadType = Nothing
, _olBearerToken = Nothing
, _olName = pOlName_
, _olFilter = Nothing
, _olPageToken = Nothing
, _olPageSize = Nothing
, _olCallback = Nothing
}
-- | V1 error format.
olXgafv :: Lens' OperationsList (Maybe Xgafv)
olXgafv = lens _olXgafv (\ s a -> s{_olXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olUploadProtocol :: Lens' OperationsList (Maybe Text)
olUploadProtocol
= lens _olUploadProtocol
(\ s a -> s{_olUploadProtocol = a})
-- | Pretty-print response.
olPp :: Lens' OperationsList Bool
olPp = lens _olPp (\ s a -> s{_olPp = a})
-- | OAuth access token.
olAccessToken :: Lens' OperationsList (Maybe Text)
olAccessToken
= lens _olAccessToken
(\ s a -> s{_olAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olUploadType :: Lens' OperationsList (Maybe Text)
olUploadType
= lens _olUploadType (\ s a -> s{_olUploadType = a})
-- | OAuth bearer token.
olBearerToken :: Lens' OperationsList (Maybe Text)
olBearerToken
= lens _olBearerToken
(\ s a -> s{_olBearerToken = a})
-- | The name of the operation collection.
olName :: Lens' OperationsList Text
olName = lens _olName (\ s a -> s{_olName = a})
-- | The standard list filter.
olFilter :: Lens' OperationsList (Maybe Text)
olFilter = lens _olFilter (\ s a -> s{_olFilter = a})
-- | The standard list page token.
olPageToken :: Lens' OperationsList (Maybe Text)
olPageToken
= lens _olPageToken (\ s a -> s{_olPageToken = a})
-- | The standard list page size.
olPageSize :: Lens' OperationsList (Maybe Int32)
olPageSize
= lens _olPageSize (\ s a -> s{_olPageSize = a}) .
mapping _Coerce
-- | JSONP
olCallback :: Lens' OperationsList (Maybe Text)
olCallback
= lens _olCallback (\ s a -> s{_olCallback = a})
instance GoogleRequest OperationsList where
type Rs OperationsList = ListOperationsResponse
type Scopes OperationsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudruntimeconfig"]
requestClient OperationsList'{..}
= go _olName _olXgafv _olUploadProtocol (Just _olPp)
_olAccessToken
_olUploadType
_olBearerToken
_olFilter
_olPageToken
_olPageSize
_olCallback
(Just AltJSON)
runtimeConfigService
where go
= buildClient (Proxy :: Proxy OperationsListResource)
mempty
| rueshyna/gogol | gogol-runtimeconfig/gen/Network/Google/Resource/RuntimeConfig/Operations/List.hs | mpl-2.0 | 6,646 | 0 | 20 | 1,682 | 1,118 | 646 | 472 | 148 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ContainerAnalysis.Projects.Notes.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the specified note.
--
-- /See:/ <https://cloud.google.com/container-analysis/api/reference/rest/ Container Analysis API Reference> for @containeranalysis.projects.notes.get@.
module Network.Google.Resource.ContainerAnalysis.Projects.Notes.Get
(
-- * REST Resource
ProjectsNotesGetResource
-- * Creating a Request
, projectsNotesGet
, ProjectsNotesGet
-- * Request Lenses
, pngXgafv
, pngUploadProtocol
, pngAccessToken
, pngUploadType
, pngName
, pngCallback
) where
import Network.Google.ContainerAnalysis.Types
import Network.Google.Prelude
-- | A resource alias for @containeranalysis.projects.notes.get@ method which the
-- 'ProjectsNotesGet' request conforms to.
type ProjectsNotesGetResource =
"v1beta1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Note
-- | Gets the specified note.
--
-- /See:/ 'projectsNotesGet' smart constructor.
data ProjectsNotesGet =
ProjectsNotesGet'
{ _pngXgafv :: !(Maybe Xgafv)
, _pngUploadProtocol :: !(Maybe Text)
, _pngAccessToken :: !(Maybe Text)
, _pngUploadType :: !(Maybe Text)
, _pngName :: !Text
, _pngCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsNotesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pngXgafv'
--
-- * 'pngUploadProtocol'
--
-- * 'pngAccessToken'
--
-- * 'pngUploadType'
--
-- * 'pngName'
--
-- * 'pngCallback'
projectsNotesGet
:: Text -- ^ 'pngName'
-> ProjectsNotesGet
projectsNotesGet pPngName_ =
ProjectsNotesGet'
{ _pngXgafv = Nothing
, _pngUploadProtocol = Nothing
, _pngAccessToken = Nothing
, _pngUploadType = Nothing
, _pngName = pPngName_
, _pngCallback = Nothing
}
-- | V1 error format.
pngXgafv :: Lens' ProjectsNotesGet (Maybe Xgafv)
pngXgafv = lens _pngXgafv (\ s a -> s{_pngXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pngUploadProtocol :: Lens' ProjectsNotesGet (Maybe Text)
pngUploadProtocol
= lens _pngUploadProtocol
(\ s a -> s{_pngUploadProtocol = a})
-- | OAuth access token.
pngAccessToken :: Lens' ProjectsNotesGet (Maybe Text)
pngAccessToken
= lens _pngAccessToken
(\ s a -> s{_pngAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pngUploadType :: Lens' ProjectsNotesGet (Maybe Text)
pngUploadType
= lens _pngUploadType
(\ s a -> s{_pngUploadType = a})
-- | Required. The name of the note in the form of
-- \`projects\/[PROVIDER_ID]\/notes\/[NOTE_ID]\`.
pngName :: Lens' ProjectsNotesGet Text
pngName = lens _pngName (\ s a -> s{_pngName = a})
-- | JSONP
pngCallback :: Lens' ProjectsNotesGet (Maybe Text)
pngCallback
= lens _pngCallback (\ s a -> s{_pngCallback = a})
instance GoogleRequest ProjectsNotesGet where
type Rs ProjectsNotesGet = Note
type Scopes ProjectsNotesGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsNotesGet'{..}
= go _pngName _pngXgafv _pngUploadProtocol
_pngAccessToken
_pngUploadType
_pngCallback
(Just AltJSON)
containerAnalysisService
where go
= buildClient
(Proxy :: Proxy ProjectsNotesGetResource)
mempty
| brendanhay/gogol | gogol-containeranalysis/gen/Network/Google/Resource/ContainerAnalysis/Projects/Notes/Get.hs | mpl-2.0 | 4,498 | 0 | 15 | 1,026 | 696 | 407 | 289 | 100 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.TurnBasedMatches.Decline
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Decline an invitation to play a turn-based match.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.turnBasedMatches.decline@.
module Network.Google.Resource.Games.TurnBasedMatches.Decline
(
-- * REST Resource
TurnBasedMatchesDeclineResource
-- * Creating a Request
, turnBasedMatchesDecline
, TurnBasedMatchesDecline
-- * Request Lenses
, tbmdConsistencyToken
, tbmdLanguage
, tbmdMatchId
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.turnBasedMatches.decline@ method which the
-- 'TurnBasedMatchesDecline' request conforms to.
type TurnBasedMatchesDeclineResource =
"games" :>
"v1" :>
"turnbasedmatches" :>
Capture "matchId" Text :>
"decline" :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "alt" AltJSON :>
Put '[JSON] TurnBasedMatch
-- | Decline an invitation to play a turn-based match.
--
-- /See:/ 'turnBasedMatchesDecline' smart constructor.
data TurnBasedMatchesDecline = TurnBasedMatchesDecline'
{ _tbmdConsistencyToken :: !(Maybe (Textual Int64))
, _tbmdLanguage :: !(Maybe Text)
, _tbmdMatchId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TurnBasedMatchesDecline' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tbmdConsistencyToken'
--
-- * 'tbmdLanguage'
--
-- * 'tbmdMatchId'
turnBasedMatchesDecline
:: Text -- ^ 'tbmdMatchId'
-> TurnBasedMatchesDecline
turnBasedMatchesDecline pTbmdMatchId_ =
TurnBasedMatchesDecline'
{ _tbmdConsistencyToken = Nothing
, _tbmdLanguage = Nothing
, _tbmdMatchId = pTbmdMatchId_
}
-- | The last-seen mutation timestamp.
tbmdConsistencyToken :: Lens' TurnBasedMatchesDecline (Maybe Int64)
tbmdConsistencyToken
= lens _tbmdConsistencyToken
(\ s a -> s{_tbmdConsistencyToken = a})
. mapping _Coerce
-- | The preferred language to use for strings returned by this method.
tbmdLanguage :: Lens' TurnBasedMatchesDecline (Maybe Text)
tbmdLanguage
= lens _tbmdLanguage (\ s a -> s{_tbmdLanguage = a})
-- | The ID of the match.
tbmdMatchId :: Lens' TurnBasedMatchesDecline Text
tbmdMatchId
= lens _tbmdMatchId (\ s a -> s{_tbmdMatchId = a})
instance GoogleRequest TurnBasedMatchesDecline where
type Rs TurnBasedMatchesDecline = TurnBasedMatch
type Scopes TurnBasedMatchesDecline =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient TurnBasedMatchesDecline'{..}
= go _tbmdMatchId _tbmdConsistencyToken _tbmdLanguage
(Just AltJSON)
gamesService
where go
= buildClient
(Proxy :: Proxy TurnBasedMatchesDeclineResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/TurnBasedMatches/Decline.hs | mpl-2.0 | 3,908 | 0 | 15 | 893 | 488 | 287 | 201 | 75 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.YouTubeReporting
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Schedules reporting jobs containing your YouTube Analytics data and
-- downloads the resulting bulk data reports in the form of CSV files.
--
-- /See:/ <https://developers.google.com/youtube/reporting/v1/reports/ YouTube Reporting API Reference>
module Network.Google.YouTubeReporting
(
-- * Service Configuration
youTubeReportingService
-- * OAuth Scopes
, youTubeAnalyticsReadOnlyScope
, youTubeAnalyticsMonetaryReadOnlyScope
-- * API Declaration
, YouTubeReportingAPI
-- * Resources
-- ** youtubereporting.jobs.create
, module Network.Google.Resource.YouTubeReporting.Jobs.Create
-- ** youtubereporting.jobs.delete
, module Network.Google.Resource.YouTubeReporting.Jobs.Delete
-- ** youtubereporting.jobs.get
, module Network.Google.Resource.YouTubeReporting.Jobs.Get
-- ** youtubereporting.jobs.list
, module Network.Google.Resource.YouTubeReporting.Jobs.List
-- ** youtubereporting.jobs.reports.get
, module Network.Google.Resource.YouTubeReporting.Jobs.Reports.Get
-- ** youtubereporting.jobs.reports.list
, module Network.Google.Resource.YouTubeReporting.Jobs.Reports.List
-- ** youtubereporting.media.download
, module Network.Google.Resource.YouTubeReporting.Media.Download
-- ** youtubereporting.reportTypes.list
, module Network.Google.Resource.YouTubeReporting.ReportTypes.List
-- * Types
-- ** ListReportsResponse
, ListReportsResponse
, listReportsResponse
, lrrNextPageToken
, lrrReports
-- ** Empty
, Empty
, empty
-- ** Report
, Report
, report
, rJobId
, rStartTime
, rDownloadURL
, rEndTime
, rId
, rCreateTime
, rJobExpireTime
-- ** ListReportTypesResponse
, ListReportTypesResponse
, listReportTypesResponse
, lrtrNextPageToken
, lrtrReportTypes
-- ** Media
, Media
, media
, mResourceName
-- ** Job
, Job
, job
, jName
, jId
, jSystemManaged
, jReportTypeId
, jExpireTime
, jCreateTime
-- ** Xgafv
, Xgafv (..)
-- ** ListJobsResponse
, ListJobsResponse
, listJobsResponse
, ljrNextPageToken
, ljrJobs
-- ** ReportType
, ReportType
, reportType
, rtName
, rtId
, rtDeprecateTime
, rtSystemManaged
) where
import Network.Google.Prelude
import Network.Google.Resource.YouTubeReporting.Jobs.Create
import Network.Google.Resource.YouTubeReporting.Jobs.Delete
import Network.Google.Resource.YouTubeReporting.Jobs.Get
import Network.Google.Resource.YouTubeReporting.Jobs.List
import Network.Google.Resource.YouTubeReporting.Jobs.Reports.Get
import Network.Google.Resource.YouTubeReporting.Jobs.Reports.List
import Network.Google.Resource.YouTubeReporting.Media.Download
import Network.Google.Resource.YouTubeReporting.ReportTypes.List
import Network.Google.YouTubeReporting.Types
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the YouTube Reporting API service.
type YouTubeReportingAPI =
JobsReportsListResource :<|> JobsReportsGetResource
:<|> JobsListResource
:<|> JobsGetResource
:<|> JobsCreateResource
:<|> JobsDeleteResource
:<|> MediaDownloadResource
:<|> ReportTypesListResource
| rueshyna/gogol | gogol-youtube-reporting/gen/Network/Google/YouTubeReporting.hs | mpl-2.0 | 3,920 | 0 | 11 | 848 | 392 | 291 | 101 | 78 | 0 |
module Text.Diff.Parse.Types where
import Data.Text (Text)
data Annotation = Added | Removed | Context deriving (Show, Eq)
data Line = Line {
lineAnnotation :: Annotation
, lineContent :: Text
} deriving (Show, Eq)
data Range = Range {
rangeStartingLineNumber :: Int
, rangeNumberOfLines :: Int
} deriving (Show, Eq)
data Hunk = Hunk {
hunkSourceRange :: Range
, hunkDestRange :: Range
, hunkLines :: [Line]
} deriving (Show, Eq)
data Content = Binary | Hunks [Hunk] deriving (Show, Eq)
data FileStatus = Created | Deleted | Modified | Renamed deriving (Show, Eq)
data FileDelta = FileDelta {
fileDeltaStatus :: FileStatus
, fileDeltaSourceFile :: Text
, fileDeltaDestFile :: Text
, fileDeltaContent :: Content
} deriving (Show, Eq)
type FileDeltas = [FileDelta]
| mulby/diff-parse | src/Text/Diff/Parse/Types.hs | agpl-3.0 | 827 | 0 | 9 | 184 | 253 | 153 | 100 | 25 | 0 |
module Gonimo.Server.AuthHandlers.Internal where
import Control.Concurrent.STM (STM, TVar, readTVar)
import Control.Lens ((^.))
import Control.Monad (guard)
import Control.Monad.Freer (Eff)
import Control.Monad.STM.Class (liftSTM)
import Control.Monad.Trans.Maybe (MaybeT (..), runMaybeT)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Gonimo.Server.Auth (AuthServerConstraint,
authorizeAuthData,
authorizeJust, clientKey,
isFamilyMember)
import Gonimo.Server.DbEntities (ClientId, FamilyId)
import Gonimo.Server.Effects (atomically, getState, timeout)
import Gonimo.Server.State (FamilyOnlineState,
onlineMembers)
import Utils.Control.Monad.Trans.Maybe (maybeT)
authorizedPut :: AuthServerConstraint r
=> (TVar FamilyOnlineState -> STM ())
-> FamilyId -> ClientId -> ClientId -> Eff r ()
authorizedPut f familyId fromId toId = do
authorizeAuthData (isFamilyMember familyId)
authorizeAuthData ((toId ==) . clientKey)
let fromto = S.fromList [fromId, toId]
state <- getState
x <- timeout 2000 $ atomically $ runMaybeT $ do
a <- (maybeT . (familyId `M.lookup`)) =<< liftSTM (readTVar state)
b <- liftSTM $ readTVar a
guard $ fromto `S.isSubsetOf` (b^.onlineMembers)
liftSTM $ f a
authorizeJust id x
authorizedRecieve :: AuthServerConstraint r
=> (TVar FamilyOnlineState -> STM (Maybe a))
-> FamilyId -> ClientId -> ClientId -> Eff r a
authorizedRecieve f familyId fromId toId = do
authorizeAuthData (isFamilyMember familyId)
authorizeAuthData ((toId ==) . clientKey)
let fromto = S.fromList [fromId, toId]
state <- getState
x <- timeout 2000 $ atomically $ runMaybeT $ do
a <- (maybeT . (familyId `M.lookup`)) =<< liftSTM (readTVar state)
b <- liftSTM $ readTVar a
guard $ fromto `S.isSubsetOf` (b^.onlineMembers)
MaybeT $ f a
authorizeJust id x
authorizedRecieve' :: AuthServerConstraint r
=> (TVar FamilyOnlineState -> STM (Maybe a))
-> FamilyId -> ClientId -> Eff r a
authorizedRecieve' f familyId toId = do
authorizeAuthData (isFamilyMember familyId)
authorizeAuthData ((toId ==) . clientKey)
state <- getState
x <- timeout 2000 $ atomically $ runMaybeT $ do
a <- (maybeT . (familyId `M.lookup`)) =<< liftSTM (readTVar state)
MaybeT $ f a
authorizeJust id x
| charringer/gonimo-back | src/Gonimo/Server/AuthHandlers/Internal.hs | agpl-3.0 | 2,839 | 0 | 15 | 936 | 822 | 432 | 390 | 57 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Telegram where
import Calendar
import Ai
import Web.Telegram.API.Bot
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Control.Monad.Reader
import Control.Monad.State
import Data.Maybe
import Data.Binary.Tagged
import Data.Binary (Binary)
import GHC.Generics (Generic)
import AI.HNN.FF.Network
data Conf = Conf { _token :: !Token, _icalUrl :: !T.Text, _username :: !T.Text, _net :: !(Network Double) }
type Mystack a = ReaderT Conf (StateT MyState IO) a
newtype ChatId = ChatId T.Text deriving (Eq, Show, Generic)
instance Binary ChatId
instance HasStructuralInfo ChatId
instance HasSemanticVersion ChatId
newtype UserId = UserId Int deriving (Eq, Show, Generic)
instance Binary UserId
instance HasStructuralInfo UserId
instance HasSemanticVersion UserId
toText :: ChatId -> T.Text
toText (ChatId t) = t
data Cache = Cache { _group :: Maybe ChatId, _individuals :: [UserId] } deriving (Eq, Show, Generic)
instance Binary Cache
instance HasStructuralInfo Cache
instance HasSemanticVersion Cache
data MyState = MyState { _cache :: Cache, _offset :: Int }
log_ :: String -> Mystack ()
log_ = liftIO . putStrLn
loop :: Mystack ()
loop = do
Conf { _token = token } <- ask
MyState { _offset = offset } <- get
response <- liftIO $ getUpdates token (Just offset) (Just 1) (Just 5)
case response of
Left a -> do
log_ . show $ a
loop
Right UpdatesResponse { update_result = updates } -> do
liftIO $ mapM_ (putStrLn . show) updates
if length updates > 0
then do
newOffset <- mapM doOne updates >>= return . (+1) . maximum
modify (\x -> x { _offset = newOffset })
loop
else
loop
doOne :: Update -> Mystack Int
doOne update = do
Conf { _token = token, _icalUrl = icalUrl, _username = userName, _net = net} <- ask
let userId = UserId . user_id . fromJust . from . fromJust . message $ update
let chatId = ChatId . T.pack . show . chat_id . chat . fromJust . message $ update
let chatType = chat_type . chat . fromJust . message $ update
let textMaybe = text . fromJust . message $ update
let maxSeenOffset = update_id update
MyState { _cache = cache } <- get
group <- if isNothing (_group cache)
then do
let newCache = cache { _group = Just chatId}
modify (\x -> x { _cache = newCache})
liftIO $ taggedEncodeFile "cache" newCache
return chatId
else return . fromJust . _group $ cache
isGoodUser <- isKnownGoodUser userId
let shouldListen' = shouldListen (isGroup chatType) (T.isInfixOf userName (maybe "" id textMaybe))
let isGoodGroup = group == chatId
_ <- if isGoodGroup || (not (isGroup chatType) && isGoodUser)
then do
_ <- if isGoodGroup && not isGoodUser then addUser userId else return ()
let choice = choose textMaybe net
response <- case choice of
NoResponse -> return ""
NextPractice -> liftIO $ LT.toStrict <$> whenNextPractice icalUrl
StaticResponse t -> return t
WhereNextPractice -> liftIO $ LT.toStrict <$> whereNextPractice icalUrl
if T.length response > 0 && shouldListen'
then liftIO $ sendMessage token (SendMessageRequest (toText chatId) response (Just Markdown) Nothing Nothing Nothing) >> return ()
else liftIO . return $ ()
else liftIO $ if shouldListen'
then sendMessage token (SendMessageRequest (toText chatId)
"Tämä botti on vain YStävien käyttöön. Puhu minulle YS-kanavalla (ja kun tunnemme toisemme myös yksityischätissä)" (Just Markdown) Nothing Nothing Nothing) >> return ()
else return ()
return maxSeenOffset
shouldListen :: Bool -> Bool -> Bool
shouldListen isGroup' mentionsBot =
not isGroup' || mentionsBot
isGroup :: ChatType -> Bool
isGroup Group = True
isGroup _ = False
addUser :: UserId -> Mystack ()
addUser userId = do
modify (\state' ->
let cache = _cache state' in state' { _cache = cache {_individuals = (_individuals cache) ++ [userId]}})
newCache <- _cache <$> get
liftIO $ taggedEncodeFile "cache" newCache
isKnownGoodUser :: UserId -> Mystack Bool
isKnownGoodUser userId = do
MyState { _cache = Cache { _individuals = users }} <- get
return $ elem userId users
| daniellandau/ystava | src/Telegram.hs | agpl-3.0 | 4,346 | 0 | 19 | 1,013 | 1,466 | 744 | 722 | 110 | 9 |
{-# LANGUAGE BangPatterns, FlexibleContexts, FlexibleInstances
, MultiParamTypeClasses, PatternGuards, TypeFamilies
, UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Vision.Image.Type (
-- * Classes
Pixel (..), MaskedImage (..), Image (..), ImageChannel, FromFunction (..)
, FunctorImage (..)
-- * Manifest images
, Manifest (..)
-- * Delayed images
, Delayed (..)
-- * Delayed masked images
, DelayedMask (..)
-- * Functions
, nChannels, pixel
-- * Conversion
, Convertible (..), convert, delay, compute
-- * Types helpers
, delayed, manifest
) where
import Control.Applicative ((<$>))
import Data.Convertible (Convertible (..), convert)
import Data.Int
import Data.Vector.Storable (
Vector, (!), create, enumFromN, forM_, generate, unfoldr
)
import Data.Vector.Storable.Mutable (new, write)
import Data.Word
import Foreign.Storable (Storable)
import Prelude hiding (map, read)
import Vision.Primitive (
Z (..), (:.) (..), Point, Size
, ix2, fromLinearIndex, toLinearIndex, shapeLength
)
-- Classes ---------------------------------------------------------------------
-- | Determines the number of channels and the type of each pixel of the image
-- and how images are represented.
class Storable p => Pixel p where
type PixelChannel p
-- | Returns the number of channels of the pixel.
-- Must not consume 'p' (could be 'undefined').
pixNChannels :: p -> Int
pixIndex :: p -> Int -> PixelChannel p
instance Pixel Int16 where
type PixelChannel Int16 = Int16
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Int32 where
type PixelChannel Int32 = Int32
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Int where
type PixelChannel Int = Int
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Word8 where
type PixelChannel Word8 = Word8
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Word16 where
type PixelChannel Word16 = Word16
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Word32 where
type PixelChannel Word32 = Word32
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Word where
type PixelChannel Word = Word
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Float where
type PixelChannel Float = Float
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Double where
type PixelChannel Double = Double
pixNChannels _ = 1
pixIndex p _ = p
instance Pixel Bool where
type PixelChannel Bool = Bool
pixNChannels _ = 1
pixIndex p _ = p
-- | Provides an abstraction for images which are not defined for each of their
-- pixels. The interface is similar to 'Image' except that indexing functions
-- don't always return.
-- Image origin is located in the lower left corner.
class Pixel (ImagePixel i) => MaskedImage i where
type ImagePixel i
shape :: i -> Size
-- | Returns the pixel\'s value at 'Z :. y, :. x'.
maskedIndex :: i -> Point -> Maybe (ImagePixel i)
maskedIndex img = (img `maskedLinearIndex`) . toLinearIndex (shape img)
{-# INLINE maskedIndex #-}
-- | Returns the pixel\'s value as if the image was a single dimension
-- vector (row-major representation).
maskedLinearIndex :: i -> Int -> Maybe (ImagePixel i)
maskedLinearIndex img = (img `maskedIndex`) . fromLinearIndex (shape img)
{-# INLINE maskedLinearIndex #-}
-- | Returns the non-masked values of the image.
values :: i -> Vector (ImagePixel i)
values !img =
unfoldr step 0
where
!n = shapeLength (shape img)
step !i | i >= n = Nothing
| Just p <- img `maskedLinearIndex` i = Just (p, i + 1)
| otherwise = step (i + 1)
{-# INLINE values #-}
{-# MINIMAL shape, (maskedIndex | maskedLinearIndex) #-}
type ImageChannel i = PixelChannel (ImagePixel i)
-- | Provides an abstraction over the internal representation of an image.
-- Image origin is located in the lower left corner.
class MaskedImage i => Image i where
-- | Returns the pixel value at 'Z :. y :. x'.
index :: i -> Point -> ImagePixel i
index img = (img `linearIndex`) . toLinearIndex (shape img)
{-# INLINE index #-}
-- | Returns the pixel value as if the image was a single dimension vector
-- (row-major representation).
linearIndex :: i -> Int -> ImagePixel i
linearIndex img = (img `index`) . fromLinearIndex (shape img)
{-# INLINE linearIndex #-}
-- | Returns every pixel values as if the image was a single dimension
-- vector (row-major representation).
vector :: i -> Vector (ImagePixel i)
vector img = generate (shapeLength $ shape img) (img `linearIndex`)
{-# INLINE vector #-}
{-# MINIMAL index | linearIndex #-}
-- | Provides ways to construct an image from a function.
class FromFunction i where
type FromFunctionPixel i
-- | Generates an image by calling the given function for each pixel of the
-- constructed image.
fromFunction :: Size -> (Point -> FromFunctionPixel i) -> i
-- | Generates an image by calling the last function for each pixel of the
-- constructed image.
-- The first function is called for each line, generating a line invariant
-- value.
-- This function is faster for some image representations as some recurring
-- computation can be cached.
fromFunctionLine :: Size -> (Int -> a)
-> (a -> Point -> FromFunctionPixel i) -> i
fromFunctionLine size line f =
fromFunction size (\pt@(Z :. y :. _) -> f (line y) pt)
{-# INLINE fromFunctionLine #-}
-- | Generates an image by calling the last function for each pixel of the
-- constructed image.
-- The first function is called for each column, generating a column
-- invariant value.
-- This function *can* be faster for some image representations as some
-- recurring computations can be cached. However, it may requires a vector
-- allocation for these values. If the column invariant is cheap to
-- compute, prefer 'fromFunction'.
fromFunctionCol :: Storable b => Size -> (Int -> b)
-> (b -> Point -> FromFunctionPixel i) -> i
fromFunctionCol size col f =
fromFunction size (\pt@(Z :. _ :. x) -> f (col x) pt)
{-# INLINE fromFunctionCol #-}
-- | Generates an image by calling the last function for each pixel of the
-- constructed image.
-- The two first functions are called for each line and for each column,
-- respectively, generating common line and column invariant values.
-- This function is faster for some image representations as some recurring
-- computation can be cached. However, it may requires a vector
-- allocation for column values. If the column invariant is cheap to
-- compute, prefer 'fromFunctionLine'.
fromFunctionCached :: Storable b => Size
-> (Int -> a) -- ^ Line function
-> (Int -> b) -- ^ Column function
-> (a -> b -> Point
-> FromFunctionPixel i) -- ^ Pixel function
-> i
fromFunctionCached size line col f =
fromFunction size (\pt@(Z :. y :. x) -> f (line y) (col x) pt)
{-# INLINE fromFunctionCached #-}
{-# MINIMAL fromFunction #-}
-- | Defines a class for images on which a function can be applied. The class is
-- different from 'Functor' as there could be some constraints and
-- transformations the pixel and image types.
class (MaskedImage src, MaskedImage res) => FunctorImage src res where
map :: (ImagePixel src -> ImagePixel res) -> src -> res
-- Manifest images -------------------------------------------------------------
-- | Stores the image content in a 'Vector'.
data Storable p => Manifest p = Manifest {
manifestSize :: !Size
, manifestVector :: !(Vector p)
} deriving (Eq, Ord, Show)
instance Pixel p => MaskedImage (Manifest p) where
type ImagePixel (Manifest p) = p
shape = manifestSize
{-# INLINE shape #-}
Manifest _ vec `maskedLinearIndex` ix = Just $! vec ! ix
{-# INLINE maskedLinearIndex #-}
values = manifestVector
{-# INLINE values #-}
instance Pixel p => Image (Manifest p) where
Manifest _ vec `linearIndex` ix = vec ! ix
{-# INLINE linearIndex #-}
vector = manifestVector
{-# INLINE vector #-}
instance Storable p => FromFunction (Manifest p) where
type FromFunctionPixel (Manifest p) = p
fromFunction !size@(Z :. h :. w) f =
Manifest size $ create $ do
arr <- new (h * w)
forM_ (enumFromN 0 h) $ \y -> do
let !lineOffset = y * w
forM_ (enumFromN 0 w) $ \x -> do
let !offset = lineOffset + x
!val = f (ix2 y x)
write arr offset val
return arr
{-# INLINE fromFunction #-}
fromFunctionLine !size@(Z :. h :. w) line f =
Manifest size $ create $ do
-- Note: create is faster than unfoldrN.
arr <- new (h * w)
forM_ (enumFromN 0 h) $ \y -> do
let !lineVal = line y
!lineOffset = y * w
forM_ (enumFromN 0 w) $ \x -> do
let !offset = lineOffset + x
!val = f lineVal (ix2 y x)
write arr offset val
return arr
{-# INLINE fromFunctionLine #-}
fromFunctionCol !size@(Z :. h :. w) col f =
Manifest size $ create $ do
-- Note: create is faster than unfoldrN.
arr <- new (h * w)
forM_ (enumFromN 0 h) $ \y -> do
let !lineOffset = y * w
forM_ (enumFromN 0 w) $ \x -> do
let !offset = lineOffset + x
!val = f (cols ! x) (ix2 y x)
write arr offset val
return arr
where
!cols = generate w col
{-# INLINE fromFunctionCol #-}
fromFunctionCached !size@(Z :. h :. w) line col f =
Manifest size $ create $ do
-- Note: create is faster than unfoldrN.
arr <- new (h * w)
forM_ (enumFromN 0 h) $ \y -> do
let !lineVal = line y
!lineOffset = y * w
forM_ (enumFromN 0 w) $ \x -> do
let !offset = lineOffset + x
!val = f lineVal (cols ! x) (ix2 y x)
write arr offset val
return arr
where
!cols = generate w col
{-# INLINE fromFunctionCached #-}
instance (Image src, Pixel p) => FunctorImage src (Manifest p) where
map f img = fromFunction (shape img) (f . (img `index`))
{-# INLINE map #-}
-- Delayed images --------------------------------------------------------------
-- | A delayed image is an image which is constructed using a function.
--
-- Usually, a delayed image maps each of its pixels over another image.
-- Delayed images are useful by avoiding intermediate images in a
-- transformation pipeline of images or by avoiding the computation of the whole
-- resulting image when only a portion of its pixels will be accessed.
data Delayed p = Delayed {
delayedSize :: !Size
, delayedFun :: !(Point -> p)
}
instance Pixel p => MaskedImage (Delayed p) where
type ImagePixel (Delayed p) = p
shape = delayedSize
{-# INLINE shape #-}
maskedIndex img = Just . delayedFun img
{-# INLINE maskedIndex #-}
instance Pixel p => Image (Delayed p) where
index = delayedFun
{-# INLINE index #-}
instance FromFunction (Delayed p) where
type FromFunctionPixel (Delayed p) = p
fromFunction = Delayed
{-# INLINE fromFunction #-}
instance (Image src, Pixel p) => FunctorImage src (Delayed p) where
map f img = fromFunction (shape img) (f . (img `index`))
{-# INLINE map #-}
-- Masked delayed images -------------------------------------------------------
data DelayedMask p = DelayedMask {
delayedMaskSize :: !Size
, delayedMaskFun :: !(Point -> Maybe p)
}
instance Pixel p => MaskedImage (DelayedMask p) where
type ImagePixel (DelayedMask p) = p
shape = delayedMaskSize
{-# INLINE shape #-}
maskedIndex = delayedMaskFun
{-# INLINE maskedIndex #-}
instance Pixel p => FromFunction (DelayedMask p) where
type FromFunctionPixel (DelayedMask p) = Maybe p
fromFunction = DelayedMask
{-# INLINE fromFunction #-}
instance (MaskedImage src, Pixel p) => FunctorImage src (DelayedMask p) where
map f img = fromFunction (shape img) (\pt -> f <$> (img `maskedIndex` pt))
{-# INLINE map #-}
-- Functions -------------------------------------------------------------------
-- | Returns the number of channels of an image.
nChannels :: MaskedImage i => i -> Int
nChannels img = pixNChannels (pixel img)
{-# INLINE nChannels #-}
-- | Returns an 'undefined' instance of a pixel of the image. This is sometime
-- useful to satisfy the type checker as in a call to 'pixNChannels' :
--
-- > nChannels img = pixNChannels (pixel img)
pixel :: MaskedImage i => i -> ImagePixel i
pixel _ = undefined
-- Conversion ------------------------------------------------------------------
-- | Delays an image in its delayed representation.
delay :: Image i => i -> Delayed (ImagePixel i)
delay = map id
{-# INLINE delay #-}
-- | Computes the value of an image into a manifest representation.
compute :: (Image i, Storable (ImagePixel i)) => i -> Manifest (ImagePixel i)
compute = map id
{-# INLINE compute #-}
instance (Pixel p1, Pixel p2, Storable p1, Storable p2, Convertible p1 p2)
=> Convertible (Manifest p1) (Manifest p2) where
safeConvert = Right . map convert
{-# INLINE safeConvert #-}
instance (Pixel p1, Pixel p2, Convertible p1 p2)
=> Convertible (Delayed p1) (Delayed p2) where
safeConvert = Right . map convert
{-# INLINE safeConvert #-}
instance (Pixel p1, Pixel p2, Storable p2, Convertible p1 p2)
=> Convertible (Delayed p1) (Manifest p2) where
safeConvert = Right . map convert
{-# INLINE safeConvert #-}
instance (Pixel p1, Pixel p2, Storable p1, Convertible p1 p2)
=> Convertible (Manifest p1) (Delayed p2) where
safeConvert = Right . map convert
{-# INLINE safeConvert #-}
-- Types helpers ---------------------------------------------------------------------
-- | Forces an image to be in its delayed represenation. Does nothing.
delayed :: Delayed p -> Delayed p
delayed = id
-- | Forces an image to be in its manifest represenation. Does nothing.
manifest :: Manifest p -> Manifest p
manifest = id
| TomMD/friday | src/Vision/Image/Type.hs | lgpl-3.0 | 14,924 | 0 | 22 | 4,163 | 3,418 | 1,815 | 1,603 | 272 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
import System.IO
class Add a b c | a b -> c where
plus :: a -> b -> c
class Mul a b c | a b -> c where
mul :: a -> b -> c
instance Add Int Int Int where
a `plus` b = a + b
instance Add Int Float Float where
a `plus` b = (fromIntegral a) + b
instance Add Float Int Float where
a `plus` b = a + (fromIntegral b)
instance Add Float Float Float where
a `plus` b = a + b
instance Mul Int Int Int where
a `mul` b = a * b
instance Mul Int Float Float where
a `mul` b = (fromIntegral a) * b
data Vec a = Vec [a] deriving Show
data Mat a = Mat [[a]] deriving Show
instance Mul a b b => Mul a (Vec b) (Vec b) where
a `mul` (Vec vb) = Vec (map (mul a) vb)
demo1 = let x :: Int
x = 3
y :: Float
y = 4
in x `plus` y
demo2 = let x :: Int
x = 2
y :: [Int]
y = [4,5,6]
in (x `mul` (Vec y))
class Collection e ce | ce -> e where
empty :: ce
insert :: e -> ce -> ce
member :: e -> ce -> Bool
instance Collection Int [Int] where
empty = []
insert = (:)
member = elem
-- demo = let x :: Int
-- x = 3
-- in insert x empty
demo = let x :: Int
x = 3
ec :: [Int]
ec = empty
in insert x empty
main = print demo
| seckcoder/lang-learn | haskell/multi-param.hs | unlicense | 1,406 | 2 | 10 | 493 | 604 | 326 | 278 | 48 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module TriePathSpec
(spec)
where
import PPrelude
import TriePath
import Testing
import Data.Aeson
import qualified Data.ByteString as BS
import GHC.Generics
import Prelude hiding (seq)
import Test.Hspec
import Test.QuickCheck
data HexCase = HexCase
{ seq :: Path
, term :: Bool
, out :: ByteString
} deriving (Show, Generic)
instance FromJSON Word4 where
parseJSON x = sndWord4 <$> parseJSON x
instance FromJSON HexCase
spec :: Spec
spec = do
it "should roundtrip paths" $ property $ \x y ->
decodePath (encodePath x y) `shouldBe` Just (x, y)
it "should recover the first nibble" $ property $ \x y ->
fstWord4 (x `packWord8` y) `shouldBe` x
it "should recover the second nibble" $ property $ \x y ->
sndWord4 (x `packWord8` y) `shouldBe` y
describe "performs example conversions" $ testCommon "hexencodetest" $ \test -> do
it ("should encode to " ++ show (out test)) $
let encoded = encodePath (term test) (seq test)
in encoded `shouldBe` out test
it ("should decode " ++ show (out test)) $
let decoded = decodePath (out test)
in decoded `shouldBe` Just (term test, seq test)
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/blockchain/ben-kirwin-merkle/test/TriePathSpec.hs | unlicense | 1,312 | 0 | 18 | 356 | 412 | 219 | 193 | 36 | 1 |
module Coins.A265579Spec (main, spec) where
import Test.Hspec
import Coins.A265579 (a265579)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A265579" $
it "correctly computes the first 20 elements" $
take 20 (map a265579 [1..]) `shouldBe` expectedValue where
expectedValue = [0,1,2,3,1,4,2,5,6,0,7,6,3,7,4,5,3,8,6,9]
| peterokagey/haskellOEIS | test/Coins/A265579Spec.hs | apache-2.0 | 345 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{- |
Module : Neovim.Quickfix
Description : API for interacting with the quickfix list
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer : woozletoff@gmail.com
Stability : experimental
Portability : GHC
-}
module Neovim.Quickfix
where
import Control.Applicative
import Control.Monad (void)
import Data.ByteString as BS (ByteString, all, elem)
import qualified Data.Map as Map
import Data.Maybe
import Data.MessagePack
import Data.Monoid
import Neovim.API.String
import Neovim.Classes
import Neovim.Context
import Prelude
-- | This is a wrapper around neovim's @setqflist()@. @strType@ can be any
-- string that you can append to (hence 'Monoid') that is also an instance
-- of 'NvimObject'. You can e.g. use the plain old 'String'.
setqflist :: (Monoid strType, NvimObject strType)
=> [QuickfixListItem strType]
-> QuickfixAction
-> Neovim r st ()
setqflist qs a =
void $ vim_call_function "setqflist" [toObject qs, toObject a]
-- | Quickfix list item. The parameter names should mostly conform to those in
-- @:h setqflist()@. Some fields are merged to explicitly state mutually
-- exclusive elements or some other behavior of the fields.
--
-- see 'quickfixListItem' for creating a value of this type without typing too
-- much.
data QuickfixListItem strType = QFItem
{ bufOrFile :: Either Int strType
-- ^ Since the filename is only used if no buffer can be specified, this
-- field is a merge of @bufnr@ and @filename@.
, lnumOrPattern :: Either Int strType
-- ^ Line number or search pattern to locate the error.
, col :: Maybe (Int, Bool)
-- ^ A tuple of a column number and a boolean indicating which kind of
-- indexing should be used. 'True' means that the visual column should be
-- used. 'False' means to use the byte index.
, nr :: Maybe Int
-- ^ Error number.
, text :: strType
-- ^ Description of the error.
, errorType :: QuickfixErrorType
-- ^ Type of error.
} deriving (Eq, Show)
data QuickfixErrorType = Warning | Error
deriving (Eq, Ord, Show, Read, Enum, Bounded)
instance NvimObject QuickfixErrorType where
toObject = \case
Warning -> ObjectBinary "W"
Error -> ObjectBinary "E"
fromObject o = case fromObject o :: Either String String of
Right "W" -> return Warning
Right "E" -> return Error
_ -> return Error
-- | Create a 'QuickfixListItem' by providing the minimal amount of arguments
-- needed.
quickfixListItem :: (Monoid strType)
=> Either Int strType -- ^ buffer of file name
-> Either Int strType -- ^ line number or pattern
-> QuickfixListItem strType
quickfixListItem bufferOrFile lineOrPattern = QFItem
{ bufOrFile = bufferOrFile
, lnumOrPattern = lineOrPattern
, col = Nothing
, nr = Nothing
, text = mempty
, errorType = Error
}
instance (Monoid strType, NvimObject strType)
=> NvimObject (QuickfixListItem strType) where
toObject QFItem{..} =
(toObject :: Map.Map ByteString Object -> Object) . Map.fromList $
[ either (\b -> ("bufnr", toObject b))
(\f -> ("filename", toObject f))
bufOrFile
, either (\l -> ("lnum", toObject l))
(\p -> ("pattern", toObject p))
lnumOrPattern
, ("type", toObject errorType)
, ("text", toObject text)
] ++ catMaybes
[ (\n -> ("nr", toObject n)) <$> nr
, (\(c,_) -> ("col", toObject c)) <$> col
, (\(_,t) -> ("vcol", toObject t)) <$> col
]
fromObject objectMap@(ObjectMap _) = do
m <- fromObject objectMap
let l :: NvimObject o => ByteString -> Either String o
l key = case Map.lookup key m of
Just o -> fromObject o
Nothing -> Left "Key not found."
bufOrFile <- case (l "bufnr", l "filename") of
(Right b, _) -> return $ Left b
(_, Right f) -> return $ Right f
_ -> throwError "No buffer number or file name inside quickfix list item."
lnumOrPattern <- case (l "lnum", l "pattern") of
(Right lnum, _) -> return $ Left lnum
(_, Right pat) -> return $ Right pat
_ -> throwError "No line number or search pattern inside quickfix list item."
let l' :: NvimObject o => ByteString -> Either String (Maybe o)
l' key = case Map.lookup key m of
Just o -> Just <$> fromObject o
Nothing -> return Nothing
nr <- l' "nr" >>= \case
Just 0 -> return Nothing
nr' -> return nr'
c <- l' "col"
v <- l' "vcol"
let col = do
c' <- c
v' <- v
case c' of
0 -> Nothing
_ -> Just (c',v')
text <- fromMaybe mempty <$> l' "text"
errorType <- fromMaybe Error <$> l' "type"
return QFItem{..}
fromObject o = throwError $ "Could not deserialize QuickfixListItem, expected a map but received: " ++ show o
data QuickfixAction
= Append -- ^ Add items to the current list (or create a new one if none exists).
| Replace -- ^ Replace current list (or create a new one if none exists).
| New -- ^ Create a new list.
deriving (Eq, Ord, Enum, Bounded, Show)
instance NvimObject QuickfixAction where
toObject = \case
Append -> ObjectBinary "a"
Replace -> ObjectBinary "r"
New -> ObjectBinary ""
fromObject o = case fromObject o of
Right "a" -> return Append
Right "r" -> return Replace
Right s | BS.all (`BS.elem` " \t\n\r") s -> return New
_ -> Left "Could not convert to QuickfixAction"
| gibiansky/nvim-hs | library/Neovim/Quickfix.hs | apache-2.0 | 6,210 | 0 | 17 | 2,042 | 1,395 | 728 | 667 | 114 | 1 |
splitAt' :: [a] -> Int -> ([a], [a])
splitAt' xs n = (take n xs, drop n xs)
| plilja/h99 | p17.hs | apache-2.0 | 76 | 0 | 8 | 18 | 55 | 30 | 25 | 2 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Gfx (module GfxPP,Gfx.init,finish,loop,draw_text_default,add_log,viewport_default_2d) where
import System.Environment
import SDL
import SDL.Input.Keyboard
import Linear
import Linear.Affine ( Point(P) )
import Control.Monad
import Control.Monad.Loops
import Control.Monad.Primitive
import Control.Monad.IO.Class
import Control.Monad.State.Strict
import Data.Functor.Identity
import Foreign.C.Types
import Foreign.C.String
import Data.Word
import Data.Bits
import Data.Int
import qualified System.Directory
import Data.StateVar(($=))
import Prelude hiding ((.), log)
import Control.Category
import qualified Graphics.Rendering.OpenGL as GL
import qualified SDL.Raw as SDLR
import qualified Config
import Lens
import Util
import GLUtil
import GfxUtil
import GLLog
import GfxPP
init :: (Monad m, MonadIO m, Functor m) => cs -> m (AllData cs m)
init client_state = do
-- let empty_handler = ( \ _ -> (return :: a -> m a) ())
SDL.initialize [SDL.InitVideo]
old_state <- SDLR.eventState SDLR.SDL_DROPFILE (-1)
_ <- SDLR.eventState SDLR.SDL_DROPFILE 1
new_state <- SDLR.eventState SDLR.SDL_DROPFILE (-1)
liftIO $ do
putStrLn $ "changed event state from " ++ (show old_state) ++ " to " ++ (show new_state)
let wsize = Config.window_size Config.config
let winConfig =
SDL.defaultWindow {SDL.windowPosition = SDL.Absolute (P (V2 100 100))
,SDL.windowSize = wsize
,SDL.windowOpenGL = Just (Config.opengl Config.config)
,SDL.windowResizable = True}
let rdrConfig =
SDL.RendererConfig {SDL.rendererSoftware = False
,SDL.rendererAccelerated = True
,SDL.rendererPresentVSync = False
,SDL.rendererTargetTexture = True}
window <- liftIO $ SDL.createWindow (Config.window_title Config.config) winConfig
renderer <- liftIO $ SDL.createRenderer window (-1) rdrConfig
gl <- SDL.glCreateContext window
SDL.glMakeCurrent window gl
SDL.glSetSwapInterval SDL.ImmediateUpdates
framer <- frame_timer_new 60
log <- create 1000
return (client_state, GfxData {window = window
,renderer = renderer
-- ,key_handler = KeyHandler ( \ _ -> return () )
-- ,draw_handler = DrawHandler ( \ _ -> return () )
-- ,drop_handler = DropHandler ( \ _ -> return () )
,frame_timer = framer
,glcontext = gl
,log = log
,window_size = wsize
,bg_rot = 0.0
})
finish :: MonadIO m => AllStateT cs m ()
finish = do
(client_state, gfx_state) <- get
liftIO $ do
SDL.destroyRenderer (renderer gfx_state)
SDL.destroyWindow (window gfx_state)
SDL.quit
return ()
loop :: (Functor m, MonadIO m) => AllStateT cs m ()
loop = do
gfx_renderer <- gets $ renderer . snd
_ <- SDL.renderClear gfx_renderer
_ <- SDL.renderPresent gfx_renderer
GL.blend $= GL.Enabled
GL.blendFunc $= (GL.SrcAlpha, GL.OneMinusSrcAlpha)
GL.clearColor $= GL.Color4 0.5 0.5 0.5 (1.0 :: GL.GLfloat)
iterateUntil Prelude.id $ do
dts <- withLensT gfx_in_allstate $ do
dts <- withLensT frame_timer_in_gfx $ do
waittime <- frame_timer_wait
SDL.delay $ fromIntegral waittime
dt <- frame_timer_mark
dts <- seconds_of_counter $ fromIntegral dt
return dts
liftIO $
GL.clear [GL.ColorBuffer,GL.DepthBuffer]
window_dims@(V2 width height) <- gets window_size
draw_bg
viewport_2d window_dims
liftIO $ do
GL.matrixMode $= GL.Modelview 0
GL.loadIdentity
-- GL.translate $ GL.Vector3 0 (fromIntegral height) ((-0.5) :: GL.GLfloat)
withLensT log_in_gfx $ GLLog.render window_dims
viewport_center window_dims
return dts
liftIO $ do
GL.matrixMode $= GL.Modelview 0
GL.loadIdentity
gfx_draw_handler <- gets (draw_handler . snd)
gfx_draw_handler dts
gfx_window <- gets (window . snd)
SDL.glSwapWindow gfx_window
process_events
gfx_state <- gets snd
liftIO $ putStrLn $ "overall fps=" ++ (show (fps (frame_timer gfx_state)))
return ()
draw_bg :: (MonadIO m, Functor m) => GfxStateT cs m ()
draw_bg = do
rot <- gets bg_rot
size <- gets window_size
bg_rot_in_gfx != rot + 0.037
viewport_center size
-- GL.matrixMode $= GL.Projection
-- GL.loadIdentity
liftIO $ do
GL.color $ GL.Color4 1 1 1 (0.25 :: GL.GLfloat)
GL.rotate 30 $ GL.Vector3 1 0 (0 :: GL.GLfloat)
GL.translate $ GL.Vector3 0 (-200) (-200 :: GL.GLfloat)
GL.rotate rot $ GL.Vector3 0 1 (0 :: GL.GLfloat)
GL.matrixMode $= GL.Modelview 0
GL.loadIdentity
GL.renderPrimitive GL.Lines $ do
forM_ [-10..10] $ \ x -> do
vertex_float3 (x*10, 0, -100)
vertex_float3 (x*10, 0, 100)
vertex_float3 (-100, 0, x*10)
vertex_float3 (100, 0, x*10)
window_resize :: (MonadIO m, Functor m, Integral e) => V2 e -> AllStateT cs m ()
window_resize (new_size@(V2 new_width new_height)) = do
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral new_width) (fromIntegral new_height))
withLensT gfx_in_allstate $ do
window_size_in_gfx != V2 (fromIntegral new_width) (fromIntegral new_height)
return ()
draw_text_default :: (MonadIO m, Functor m) => String -> AllStateT cs m ()
draw_text_default s =
let font = Config.default_font in do
draw_text font [s] Nothing
collect_events_timeout :: (Functor m, MonadIO m) => Float -> m [SDL.Event]
collect_events_timeout t = do
m_ev <- SDL.waitEventTimeout $ truncate (1000 * t)
case m_ev of
Nothing -> return []
Just ev -> fmap (ev :) collect_events
collect_events :: (Functor m, MonadIO m) => m [SDL.Event]
collect_events = do
m_ev <- SDL.pollEvent
case m_ev of
Nothing -> return []
Just ev -> fmap (ev :) collect_events
process_events :: (Functor m, MonadIO m) => AllStateT cs m Bool
process_events = do
events <- collect_events
anyM process_event events
process_event :: (Functor m, MonadIO m) => SDL.Event -> AllStateT cs m Bool
process_event ev = do
(client_state, gfx_state) <- get
case SDL.eventPayload ev of
SDL.QuitEvent -> return True
SDL.DropEvent cs_string -> do
s <- liftIO $ peekCAString cs_string
add_log $ Item Message $ "drop event: " ++ (show s)
drop_handler gfx_state s
return False
SDL.KeyboardEvent _ SDL.KeyDown _ _ (SDL.Keysym _ KeycodeEscape _) ->
return True
SDL.KeyboardEvent _ SDL.KeyDown _ _ thing ->
do GfxPP.key_handler gfx_state thing
return False
-- add_log $ Item Message $ "key press event: " ++ (show kc)
-- return False
-- SDL.KeyboardEvent _ SDL.KeyDown _ _ (SDL.Keysym _ kc _) ->
-- do GfxPP.key_handler gfx_state kc
-- add_log $ Item Message $ "key press event: " ++ (show kc)
-- return False
--
-- SDL.KeyboardEvent _ SDL.KeyDown _ _ (SDL.Keysym sc _ _) ->
-- do GfxPP.key_handler gfx_state sc
-- add_log $ Item Message $ "key press event: " ++ (show sc)
-- return False
e@(SDL.WindowResized _ ns) -> do
window_resize ns
add_log $ Item Message $ "window resize: " ++ (show e)
liftIO $ putStrLn $ "window resize: " ++ (show e)
return False
SDL.MouseMotionEvent _ _ _ _ _ -> return False
SDL.MouseButtonEvent _ SDL.MouseButtonDown _ _ _ _ _ -> return False
SDL.TouchFingerEvent _ _ _ _ _ -> return False
SDL.MultiGestureEvent _ _ _ _ _ -> return False
e -> do
add_log $ Item Message $ "unhandled event: " ++ (show e)
liftIO $ putStrLn $ "unhandled event: " ++ (show e)
return False
process_events_wait :: (MonadIO m, Functor m) => Float -> AllStateT cs m Bool
process_events_wait t = do
events <- collect_events_timeout t
anyM process_event events
--get_window_size :: MonadIO m => AllStateT cs m (V2 CInt)
--get_window_size =
-- withLens (window_in_gfx . gfx_in_allstate) $ do
-- w <- get
-- ws <- liftIO $ SDL.getWindowSize w
-- return ws
setup_viewport :: MonadIO m => AllStateT cs m ()
setup_viewport = do
V2 width height <- gets $ window_size . snd
withLensT gfx_in_allstate $ do
return ()
add_log :: (MonadIO m) => Item -> AllStateT cs m ()
add_log i =
withLensT (gfx_in_allstate . log_in_gfx) $ add i
viewport_default_2d :: (MonadIO m) => AllStateT cs m ()
viewport_default_2d = do
s <- gets $ window_size . snd
viewport_2d s | piotrm0/planar | Gfx.hs | artistic-2.0 | 8,786 | 0 | 20 | 2,327 | 2,652 | 1,317 | 1,335 | 199 | 10 |
{-# LANGUAGE TupleSections #-}
{-| Implementation of the Ganeti Query2 functionality.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
{-
TODO: problems with the current model:
1. There's nothing preventing a result such as ResultEntry RSNormal
Nothing, or ResultEntry RSNoData (Just ...); ideally, we would
separate the the RSNormal and other types; we would need a new data
type for this, though, with JSON encoding/decoding
2. We don't have a way to 'bind' a FieldDefinition's field type
(e.q. QFTBool) with the actual value that is returned from a
FieldGetter. This means that the various getter functions can return
divergent types for the same field when evaluated against multiple
items. This is bad; it only works today because we 'hide' everything
behind JSValue, but is not nice at all. We should probably remove the
separation between FieldDefinition and the FieldGetter, and introduce
a new abstract data type, similar to QFT*, that contains the values
too.
-}
module Ganeti.Query.Query
( query
, queryFields
, queryCompat
, getRequestedNames
, nameField
, NoDataRuntime
, uuidField
) where
import Control.Arrow ((&&&))
import Control.DeepSeq
import Control.Monad (filterM, foldM, liftM, unless)
import Control.Monad.IO.Class
import Control.Monad.Trans (lift)
import qualified Data.Foldable as Foldable
import Data.List (intercalate, nub, find)
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Config
import Ganeti.Errors
import Ganeti.JQueue
import Ganeti.JSON
import Ganeti.Locking.Allocation (OwnerState, LockRequest(..), OwnerState(..))
import Ganeti.Locking.Locks (GanetiLocks, ClientId, lockName)
import Ganeti.Logging
import Ganeti.Objects
import Ganeti.Query.Common
import qualified Ganeti.Query.Export as Export
import qualified Ganeti.Query.FilterRules as FilterRules
import Ganeti.Query.Filter
import qualified Ganeti.Query.Instance as Instance
import qualified Ganeti.Query.Job as Query.Job
import qualified Ganeti.Query.Group as Group
import Ganeti.Query.Language
import qualified Ganeti.Query.Locks as Locks
import qualified Ganeti.Query.Network as Network
import qualified Ganeti.Query.Node as Node
import Ganeti.Query.Types
import Ganeti.Path
import Ganeti.THH.HsRPC (runRpcClient)
import Ganeti.Types
import Ganeti.Utils
import Ganeti.WConfd.Client (getWConfdClient, listLocksWaitingStatus)
-- | Collector type
data CollectorType a b
= CollectorSimple (Bool -> ConfigData -> [a] -> IO [(a, b)])
| CollectorFieldAware (Bool -> ConfigData -> [String] -> [a] -> IO [(a, b)])
-- * Helper functions
-- | Builds an unknown field definition.
mkUnknownFDef :: String -> FieldData a b
mkUnknownFDef name =
( FieldDefinition name name QFTUnknown ("Unknown field '" ++ name ++ "'")
, FieldUnknown
, QffNormal )
-- | Runs a field getter on the existing contexts.
execGetter :: ConfigData -> b -> a -> FieldGetter a b -> ResultEntry
execGetter _ _ item (FieldSimple getter) = getter item
execGetter cfg _ item (FieldConfig getter) = getter cfg item
execGetter _ rt item (FieldRuntime getter) = getter rt item
execGetter cfg rt item (FieldConfigRuntime getter) = getter cfg rt item
execGetter _ _ _ FieldUnknown = rsUnknown
-- * Main query execution
-- | Helper to build the list of requested fields. This transforms the
-- list of string fields to a list of field defs and getters, with
-- some of them possibly being unknown fields.
getSelectedFields :: FieldMap a b -- ^ Defined fields
-> [String] -- ^ Requested fields
-> FieldList a b -- ^ Selected fields
getSelectedFields defined =
map (\name -> fromMaybe (mkUnknownFDef name) $ name `Map.lookup` defined)
-- | Check whether list of queried fields contains live fields.
needsLiveData :: [FieldGetter a b] -> Bool
needsLiveData = any isRuntimeField
-- | Checks whether we have requested exactly some names. This is a
-- simple wrapper over 'requestedNames' and 'nameField'.
needsNames :: Query -> Maybe [FilterValue]
needsNames (Query kind _ qfilter) = requestedNames (nameField kind) qfilter
-- | Computes the name field for different query types.
nameField :: ItemType -> FilterField
nameField (ItemTypeLuxi QRJob) = "id"
nameField (ItemTypeOpCode QRExport) = "node"
nameField _ = "name"
-- | Computes the uuid field, or the best possible substitute, for different
-- query types.
uuidField :: ItemType -> FilterField
uuidField (ItemTypeLuxi QRJob) = nameField (ItemTypeLuxi QRJob)
uuidField (ItemTypeOpCode QRExport) = nameField (ItemTypeOpCode QRExport)
uuidField _ = "uuid"
-- | Extracts all quoted strings from a list, ignoring the
-- 'NumericValue' entries.
getAllQuotedStrings :: [FilterValue] -> [String]
getAllQuotedStrings =
concatMap extractor
where extractor (NumericValue _) = []
extractor (QuotedString val) = [val]
-- | Checks that we have either requested a valid set of names, or we
-- have a more complex filter.
getRequestedNames :: Query -> [String]
getRequestedNames qry =
case needsNames qry of
Just names -> getAllQuotedStrings names
Nothing -> []
-- | Compute the requested job IDs. This is custom since we need to
-- handle both strings and integers.
getRequestedJobIDs :: Filter FilterField -> Result [JobId]
getRequestedJobIDs qfilter =
case requestedNames (nameField (ItemTypeLuxi QRJob)) qfilter of
Nothing -> Ok []
Just [] -> Ok []
Just vals ->
liftM nub $
mapM (\e -> case e of
QuotedString s -> makeJobIdS s
NumericValue i -> makeJobId $ fromIntegral i
) vals
-- | Generic query implementation for resources that are backed by
-- some configuration objects.
--
-- Different query types use the same 'genericQuery' function by providing
-- a collector function and a field map. The collector function retrieves
-- live data, and the field map provides both the requirements and the logic
-- necessary to retrieve the data needed for the field.
--
-- The 'b' type in the specification is the runtime. Every query can gather
-- additional live data related to the configuration object using the collector
-- to perform RPC calls.
--
-- The gathered data, or the failure to get it, is expressed through a runtime
-- object. The type of a runtime object is determined by every query type for
-- itself, and used exclusively by that query.
genericQuery :: FieldMap a b -- ^ Maps field names to field definitions
-> CollectorType a b -- ^ Collector of live data
-> (a -> String) -- ^ Object to name function
-> (ConfigData -> Container a) -- ^ Get all objects from config
-> (ConfigData -> String -> ErrorResult a) -- ^ Lookup object
-> ConfigData -- ^ The config to run the query against
-> Bool -- ^ Whether the query should be run live
-> [String] -- ^ List of requested fields
-> Filter FilterField -- ^ Filter field
-> [String] -- ^ List of requested names
-> IO (ErrorResult QueryResult)
genericQuery fieldsMap collector nameFn configFn getFn cfg
live fields qfilter wanted =
runResultT $ do
cfilter <- toError $ compileFilter fieldsMap qfilter
let allfields = (++) fields . filter (not . (`elem` fields))
. ordNub $ filterArguments qfilter
count = length fields
selected = getSelectedFields fieldsMap allfields
(fdefs, fgetters, _) = unzip3 selected
live' = live && needsLiveData fgetters
objects <- toError $ case wanted of
[] -> Ok . niceSortKey nameFn .
Foldable.toList $ configFn cfg
_ -> mapM (getFn cfg) wanted
-- Run the first pass of the filter, without a runtime context; this will
-- limit the objects that we'll contact for exports
fobjects <- toError $
filterM (\n -> evaluateQueryFilter cfg Nothing n cfilter) objects
-- Gather the runtime data and filter the results again,
-- based on the gathered data
runtimes <- (case collector of
CollectorSimple collFn -> lift $ collFn live' cfg fobjects
CollectorFieldAware collFn -> lift $ collFn live' cfg allfields fobjects)
>>= (toError . filterM (\(obj, runtime) ->
evaluateQueryFilter cfg (Just runtime) obj cfilter))
let fdata = map (\(obj, runtime) ->
map (execGetter cfg runtime obj) fgetters)
runtimes
return QueryResult { qresFields = take count fdefs
, qresData = map (take count) fdata }
-- | Dummy recollection of the data for a lock from the prefected
-- data for all locks.
recollectLocksData :: ( [(GanetiLocks, [(ClientId, OwnerState)])]
, [(Integer, ClientId, [LockRequest GanetiLocks])]
)
-> Bool -> ConfigData -> [String]
-> IO [(String, Locks.RuntimeData)]
recollectLocksData (allLocks, pending) _ _ =
let getPending lock = pending >>= \(_, cid, req) ->
let req' = filter ((==) lock . lockName . lockAffected) req
in case () of
_ | any ((==) (Just OwnExclusive) . lockRequestType) req'
-> [(cid, OwnExclusive)]
_ | any ((==) (Just OwnShared) . lockRequestType) req'
-> [(cid, OwnShared)]
_ -> []
lookuplock lock = (,) lock
. maybe ([], getPending lock)
(\(_, c) -> (c, getPending lock))
. find ((==) lock . lockName . fst)
$ allLocks
in return . map lookuplock
-- | Main query execution function.
query :: ConfigData -- ^ The current configuration
-> Bool -- ^ Whether to collect live data
-> Query -- ^ The query (item, fields, filter)
-> IO (ErrorResult QueryResult) -- ^ Result
query cfg live (Query (ItemTypeLuxi QRJob) fields qfilter) =
queryJobs cfg live fields qfilter
query cfg live (Query (ItemTypeLuxi QRLock) fields qfilter) = runResultT $ do
unless live (failError "Locks can only be queried live")
cl <- liftIO $ do
socketpath <- defaultWConfdSocket
getWConfdClient socketpath
livedata <- runRpcClient listLocksWaitingStatus cl
logDebug $ "Live state of all locks is " ++ show livedata
let allLocks = Set.toList . Set.unions
$ (Set.fromList . map fst $ fst livedata)
: map (\(_, _, req) -> Set.fromList $ map lockAffected req)
(snd livedata)
answer <- liftIO $ genericQuery
Locks.fieldsMap
(CollectorSimple $ recollectLocksData livedata)
id
(const . GenericContainer . Map.fromList
. map ((id &&& id) . lockName) $ allLocks)
(const Ok)
cfg live fields qfilter []
toError answer
query cfg live qry = queryInner cfg live qry $ getRequestedNames qry
-- | Dummy data collection fuction
dummyCollectLiveData :: Bool -> ConfigData -> [a] -> IO [(a, NoDataRuntime)]
dummyCollectLiveData _ _ = return . map (, NoDataRuntime)
-- | Inner query execution function.
queryInner :: ConfigData -- ^ The current configuration
-> Bool -- ^ Whether to collect live data
-> Query -- ^ The query (item, fields, filter)
-> [String] -- ^ Requested names
-> IO (ErrorResult QueryResult) -- ^ Result
queryInner cfg live (Query (ItemTypeOpCode QRNode) fields qfilter) wanted =
genericQuery Node.fieldsMap (CollectorFieldAware Node.collectLiveData)
nodeName configNodes getNode cfg live fields qfilter wanted
queryInner cfg live (Query (ItemTypeOpCode QRInstance) fields qfilter) wanted =
genericQuery Instance.fieldsMap (CollectorFieldAware Instance.collectLiveData)
(fromMaybe "" . instName) configInstances getInstance cfg live
fields qfilter
wanted
queryInner cfg live (Query (ItemTypeOpCode QRGroup) fields qfilter) wanted =
genericQuery Group.fieldsMap (CollectorSimple dummyCollectLiveData) groupName
configNodegroups getGroup cfg live fields qfilter wanted
queryInner cfg live (Query (ItemTypeOpCode QRNetwork) fields qfilter) wanted =
genericQuery Network.fieldsMap (CollectorSimple dummyCollectLiveData)
(fromNonEmpty . networkName)
configNetworks getNetwork cfg live fields qfilter wanted
queryInner cfg live (Query (ItemTypeOpCode QRExport) fields qfilter) wanted =
genericQuery Export.fieldsMap (CollectorSimple Export.collectLiveData)
nodeName configNodes getNode cfg live fields qfilter wanted
queryInner cfg live (Query (ItemTypeLuxi QRFilter) fields qfilter) wanted =
genericQuery FilterRules.fieldsMap (CollectorSimple dummyCollectLiveData)
frUuid configFilters getFilterRule cfg live fields qfilter wanted
queryInner _ _ (Query qkind _ _) _ =
return . Bad . GenericError $ "Query '" ++ show qkind ++ "' not supported"
-- | Query jobs specific query function, needed as we need to accept
-- both 'QuotedString' and 'NumericValue' as wanted names.
queryJobs :: ConfigData -- ^ The current configuration
-> Bool -- ^ Whether to collect live data
-> [FilterField] -- ^ Item
-> Filter FilterField -- ^ Filter
-> IO (ErrorResult QueryResult) -- ^ Result
queryJobs cfg live fields qfilter = runResultT $ do
rootdir <- lift queueDir
wanted_names <- toErrorStr $ getRequestedJobIDs qfilter
rjids <- case wanted_names of
[] | live -> do -- we can check the filesystem for actual jobs
let want_arch = Query.Job.wantArchived fields
jobIDs <-
withErrorT (BlockDeviceError .
(++) "Unable to fetch the job list: " . show) $
liftIO (determineJobDirectories rootdir want_arch)
>>= ResultT . getJobIDs
return $ sortJobIDs jobIDs
-- else we shouldn't look at the filesystem...
v -> return v
cfilter <- toError $ compileFilter Query.Job.fieldsMap qfilter
let selected = getSelectedFields Query.Job.fieldsMap fields
(fdefs, fgetters, _) = unzip3 selected
(_, filtergetters, _) = unzip3 . getSelectedFields Query.Job.fieldsMap
$ Foldable.toList qfilter
live' = live && needsLiveData (fgetters ++ filtergetters)
disabled_data = Bad "live data disabled"
-- runs first pass of the filter, without a runtime context; this
-- will limit the jobs that we'll load from disk
jids <- toError $
filterM (\jid -> evaluateQueryFilter cfg Nothing jid cfilter) rjids
-- here we run the runtime data gathering, filtering and evaluation,
-- all in the same step, so that we don't keep jobs in memory longer
-- than we need; we can't be fully lazy due to the multiple monad
-- wrapping across different steps
qdir <- lift queueDir
fdata <- foldM
-- big lambda, but we use many variables from outside it...
(\lst jid -> do
job <- lift $ if live'
then loadJobFromDisk qdir True jid
else return disabled_data
pass <- toError $ evaluateQueryFilter cfg (Just job) jid cfilter
let nlst = if pass
then let row = map (execGetter cfg job jid) fgetters
in rnf row `seq` row:lst
else lst
-- evaluate nlst (to WHNF), otherwise we're too lazy
return $! nlst
) [] jids
return QueryResult { qresFields = fdefs, qresData = reverse fdata }
-- | Helper for 'queryFields'.
fieldsExtractor :: FieldMap a b -> [FilterField] -> QueryFieldsResult
fieldsExtractor fieldsMap fields =
let selected = if null fields
then map snd . niceSortKey fst $ Map.toList fieldsMap
else getSelectedFields fieldsMap fields
in QueryFieldsResult (map (\(defs, _, _) -> defs) selected)
-- | Query fields call.
queryFields :: QueryFields -> ErrorResult QueryFieldsResult
queryFields (QueryFields (ItemTypeOpCode QRNode) fields) =
Ok $ fieldsExtractor Node.fieldsMap fields
queryFields (QueryFields (ItemTypeOpCode QRGroup) fields) =
Ok $ fieldsExtractor Group.fieldsMap fields
queryFields (QueryFields (ItemTypeOpCode QRNetwork) fields) =
Ok $ fieldsExtractor Network.fieldsMap fields
queryFields (QueryFields (ItemTypeLuxi QRJob) fields) =
Ok $ fieldsExtractor Query.Job.fieldsMap fields
queryFields (QueryFields (ItemTypeOpCode QRExport) fields) =
Ok $ fieldsExtractor Export.fieldsMap fields
queryFields (QueryFields (ItemTypeOpCode QRInstance) fields) =
Ok $ fieldsExtractor Instance.fieldsMap fields
queryFields (QueryFields (ItemTypeLuxi QRLock) fields) =
Ok $ fieldsExtractor Locks.fieldsMap fields
queryFields (QueryFields (ItemTypeLuxi QRFilter) fields) =
Ok $ fieldsExtractor FilterRules.fieldsMap fields
queryFields (QueryFields qkind _) =
Bad . GenericError $ "QueryFields '" ++ show qkind ++ "' not supported"
-- | Classic query converter. It gets a standard query result on input
-- and computes the classic style results.
queryCompat :: QueryResult -> ErrorResult [[J.JSValue]]
queryCompat (QueryResult fields qrdata) =
case map fdefName $ filter ((== QFTUnknown) . fdefKind) fields of
[] -> Ok $ map (map (maybe J.JSNull J.showJSON . rentryValue)) qrdata
unknown -> Bad $ OpPrereqError ("Unknown output fields selected: " ++
intercalate ", " unknown) ECodeInval
| bitemyapp/ganeti | src/Ganeti/Query/Query.hs | bsd-2-clause | 19,151 | 0 | 24 | 4,647 | 4,005 | 2,112 | 1,893 | 283 | 4 |
{-# LANGUAGE ExistentialQuantification, TemplateHaskell, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Implementation of the opcodes.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.OpCodes
( pyClasses
, OpCode(..)
, ReplaceDisksMode(..)
, DiskIndex
, mkDiskIndex
, unDiskIndex
, opID
, opReasonSrcID
, allOpIDs
, allOpFields
, opSummary
, CommonOpParams(..)
, defOpParams
, MetaOpCode(..)
, resolveDependencies
, wrapOpCode
, setOpComment
, setOpPriority
) where
import Control.Applicative
import Data.List (intercalate)
import Data.Map (Map)
import qualified Text.JSON
import Text.JSON (readJSON, JSObject, JSON, JSValue(..), fromJSObject)
import qualified Ganeti.Constants as C
import qualified Ganeti.Hs2Py.OpDoc as OpDoc
import Ganeti.JSON (DictObject(..), readJSONfromDict, showJSONtoDict)
import Ganeti.OpParams
import Ganeti.PyValue ()
import Ganeti.Query.Language (queryTypeOpToRaw)
import Ganeti.THH
import Ganeti.Types
instance PyValue DiskIndex where
showValue = showValue . unDiskIndex
instance PyValue IDiskParams where
showValue _ = error "OpCodes.showValue(IDiskParams): unhandled case"
instance PyValue RecreateDisksInfo where
showValue RecreateDisksAll = "[]"
showValue (RecreateDisksIndices is) = showValue is
showValue (RecreateDisksParams is) = showValue is
instance PyValue a => PyValue (SetParamsMods a) where
showValue SetParamsEmpty = "[]"
showValue _ = error "OpCodes.showValue(SetParamsMods): unhandled case"
instance PyValue a => PyValue (NonNegative a) where
showValue = showValue . fromNonNegative
instance PyValue a => PyValue (NonEmpty a) where
showValue = showValue . fromNonEmpty
-- FIXME: should use the 'toRaw' function instead of being harcoded or
-- perhaps use something similar to the NonNegative type instead of
-- using the declareSADT
instance PyValue ExportMode where
showValue ExportModeLocal = show C.exportModeLocal
showValue ExportModeRemote = show C.exportModeLocal
instance PyValue CVErrorCode where
showValue = cVErrorCodeToRaw
instance PyValue VerifyOptionalChecks where
showValue = verifyOptionalChecksToRaw
instance PyValue INicParams where
showValue = error "instance PyValue INicParams: not implemented"
instance PyValue a => PyValue (JSObject a) where
showValue obj =
"{" ++ intercalate ", " (map showPair (fromJSObject obj)) ++ "}"
where showPair (k, v) = show k ++ ":" ++ showValue v
instance PyValue JSValue where
showValue (JSObject obj) = showValue obj
showValue x = show x
type JobIdListOnly = Map String [(Bool, Either String JobId)]
type InstanceMultiAllocResponse =
([(Bool, Either String JobId)], NonEmptyString)
type QueryFieldDef =
(NonEmptyString, NonEmptyString, TagKind, NonEmptyString)
type QueryResponse =
([QueryFieldDef], [[(QueryResultCode, JSValue)]])
type QueryFieldsResponse = [QueryFieldDef]
-- | OpCode representation.
--
-- We only implement a subset of Ganeti opcodes: those which are actually used
-- in the htools codebase.
$(genOpCode "OpCode"
[ ("OpClusterPostInit",
[t| Bool |],
OpDoc.opClusterPostInit,
[],
[])
, ("OpClusterDestroy",
[t| NonEmptyString |],
OpDoc.opClusterDestroy,
[],
[])
, ("OpClusterQuery",
[t| JSObject JSValue |],
OpDoc.opClusterQuery,
[],
[])
, ("OpClusterVerify",
[t| JobIdListOnly |],
OpDoc.opClusterVerify,
[ pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
, pOptGroupName
, pVerifyClutter
],
[])
, ("OpClusterVerifyConfig",
[t| Bool |],
OpDoc.opClusterVerifyConfig,
[ pDebugSimulateErrors
, pErrorCodes
, pIgnoreErrors
, pVerbose
],
[])
, ("OpClusterVerifyGroup",
[t| Bool |],
OpDoc.opClusterVerifyGroup,
[ pGroupName
, pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
, pVerifyClutter
],
"group_name")
, ("OpClusterVerifyDisks",
[t| JobIdListOnly |],
OpDoc.opClusterVerifyDisks,
[ pOptGroupName
],
[])
, ("OpGroupVerifyDisks",
[t| (Map String String, [String], Map String [[String]]) |],
OpDoc.opGroupVerifyDisks,
[ pGroupName
],
"group_name")
, ("OpClusterRepairDiskSizes",
[t| [(NonEmptyString, NonNegative Int, NonEmptyString, NonNegative Int)]|],
OpDoc.opClusterRepairDiskSizes,
[ pInstances
],
[])
, ("OpClusterConfigQuery",
[t| [JSValue] |],
OpDoc.opClusterConfigQuery,
[ pOutputFields
],
[])
, ("OpClusterRename",
[t| NonEmptyString |],
OpDoc.opClusterRename,
[ pName
],
"name")
, ("OpClusterSetParams",
[t| Either () JobIdListOnly |],
OpDoc.opClusterSetParams,
[ pForce
, pHvState
, pDiskState
, pVgName
, pEnabledHypervisors
, pClusterHvParams
, pClusterBeParams
, pOsHvp
, pClusterOsParams
, pClusterOsParamsPrivate
, pGroupDiskParams
, pCandidatePoolSize
, pMaxRunningJobs
, pMaxTrackedJobs
, pUidPool
, pAddUids
, pRemoveUids
, pMaintainNodeHealth
, pPreallocWipeDisks
, pNicParams
, withDoc "Cluster-wide node parameter defaults" pNdParams
, withDoc "Cluster-wide ipolicy specs" pIpolicy
, pDrbdHelper
, pDefaultIAllocator
, pDefaultIAllocatorParams
, pNetworkMacPrefix
, pMasterNetdev
, pMasterNetmask
, pReservedLvs
, pHiddenOs
, pBlacklistedOs
, pUseExternalMipScript
, pEnabledDiskTemplates
, pModifyEtcHosts
, pClusterFileStorageDir
, pClusterSharedFileStorageDir
, pClusterGlusterStorageDir
, pInstallImage
, pInstanceCommunicationNetwork
, pZeroingImage
, pCompressionTools
, pEnabledUserShutdown
, pEnabledDataCollectors
, pDataCollectorInterval
, pDiagnoseDataCollectorFilename
, pMaintdRoundDelay
, pMaintdEnableBalancing
, pMaintdBalancingThreshold
],
[])
, ("OpClusterRedistConf",
[t| () |],
OpDoc.opClusterRedistConf,
[],
[])
, ("OpClusterActivateMasterIp",
[t| () |],
OpDoc.opClusterActivateMasterIp,
[],
[])
, ("OpClusterDeactivateMasterIp",
[t| () |],
OpDoc.opClusterDeactivateMasterIp,
[],
[])
, ("OpClusterRenewCrypto",
[t| () |],
OpDoc.opClusterRenewCrypto,
[ pNodeSslCerts
, pSshKeys
, pVerbose
, pDebug
],
[])
, ("OpQuery",
[t| QueryResponse |],
OpDoc.opQuery,
[ pQueryWhat
, pUseLocking
, pQueryFields
, pQueryFilter
],
"what")
, ("OpQueryFields",
[t| QueryFieldsResponse |],
OpDoc.opQueryFields,
[ pQueryWhat
, pQueryFieldsFields
],
"what")
, ("OpOobCommand",
[t| [[(QueryResultCode, JSValue)]] |],
OpDoc.opOobCommand,
[ pNodeNames
, withDoc "List of node UUIDs to run the OOB command against" pNodeUuids
, pOobCommand
, pOobTimeout
, pIgnoreStatus
, pPowerDelay
],
[])
, ("OpRestrictedCommand",
[t| [(Bool, String)] |],
OpDoc.opRestrictedCommand,
[ pUseLocking
, withDoc
"Nodes on which the command should be run (at least one)"
pRequiredNodes
, withDoc
"Node UUIDs on which the command should be run (at least one)"
pRequiredNodeUuids
, pRestrictedCommand
],
[])
, ("OpRepairCommand",
[t| String |],
OpDoc.opRepairCommand,
[ pNodeName
, pRepairCommand
, pInput
],
[])
, ("OpNodeRemove",
[t| () |],
OpDoc.opNodeRemove,
[ pNodeName
, pNodeUuid
],
"node_name")
, ("OpNodeAdd",
[t| () |],
OpDoc.opNodeAdd,
[ pNodeName
, pHvState
, pDiskState
, pPrimaryIp
, pSecondaryIp
, pReadd
, pNodeGroup
, pMasterCapable
, pVmCapable
, pNdParams
, pNodeSetup
],
"node_name")
, ("OpNodeQueryvols",
[t| [JSValue] |],
OpDoc.opNodeQueryvols,
[ pOutputFields
, withDoc "Empty list to query all nodes, node names otherwise" pNodes
],
[])
, ("OpNodeQueryStorage",
[t| [[JSValue]] |],
OpDoc.opNodeQueryStorage,
[ pOutputFields
, pOptStorageType
, withDoc
"Empty list to query all, list of names to query otherwise"
pNodes
, pStorageName
],
[])
, ("OpNodeModifyStorage",
[t| () |],
OpDoc.opNodeModifyStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pStorageChanges
],
"node_name")
, ("OpRepairNodeStorage",
[t| () |],
OpDoc.opRepairNodeStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pIgnoreConsistency
],
"node_name")
, ("OpNodeSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opNodeSetParams,
[ pNodeName
, pNodeUuid
, pForce
, pHvState
, pDiskState
, pMasterCandidate
, withDoc "Whether to mark the node offline" pOffline
, pDrained
, pAutoPromote
, pMasterCapable
, pVmCapable
, pSecondaryIp
, pNdParams
, pPowered
],
"node_name")
, ("OpNodePowercycle",
[t| Maybe NonEmptyString |],
OpDoc.opNodePowercycle,
[ pNodeName
, pNodeUuid
, pForce
],
"node_name")
, ("OpNodeMigrate",
[t| JobIdListOnly |],
OpDoc.opNodeMigrate,
[ pNodeName
, pNodeUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pIallocator
],
"node_name")
, ("OpNodeEvacuate",
[t| JobIdListOnly |],
OpDoc.opNodeEvacuate,
[ pEarlyRelease
, pNodeName
, pNodeUuid
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
, pEvacMode
, pIgnoreSoftErrors
],
"node_name")
, ("OpInstanceCreate",
[t| [NonEmptyString] |],
OpDoc.opInstanceCreate,
[ pInstanceName
, pForceVariant
, pWaitForSync
, pNameCheck
, pIgnoreIpolicy
, pOpportunisticLocking
, pInstBeParams
, pInstDisks
, pOptDiskTemplate
, pOptGroupName
, pFileDriver
, pFileStorageDir
, pInstHvParams
, pHypervisor
, pIallocator
, pResetDefaults
, pIpCheck
, pIpConflictsCheck
, pInstCreateMode
, pInstNics
, pNoInstall
, pInstOsParams
, pInstOsParamsPrivate
, pInstOsParamsSecret
, pInstOs
, pPrimaryNode
, pPrimaryNodeUuid
, pSecondaryNode
, pSecondaryNodeUuid
, pSourceHandshake
, pSourceInstance
, pSourceShutdownTimeout
, pSourceX509Ca
, pSrcNode
, pSrcNodeUuid
, pSrcPath
, pBackupCompress
, pStartInstance
, pForthcoming
, pCommit
, pInstTags
, pInstanceCommunication
, pHelperStartupTimeout
, pHelperShutdownTimeout
],
"instance_name")
, ("OpInstanceMultiAlloc",
[t| InstanceMultiAllocResponse |],
OpDoc.opInstanceMultiAlloc,
[ pOpportunisticLocking
, pIallocator
, pMultiAllocInstances
],
[])
, ("OpInstanceReinstall",
[t| () |],
OpDoc.opInstanceReinstall,
[ pInstanceName
, pInstanceUuid
, pForceVariant
, pInstOs
, pTempOsParams
, pTempOsParamsPrivate
, pTempOsParamsSecret
],
"instance_name")
, ("OpInstanceRemove",
[t| () |],
OpDoc.opInstanceRemove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreFailures
],
"instance_name")
, ("OpInstanceRename",
[t| NonEmptyString |],
OpDoc.opInstanceRename,
[ pInstanceName
, pInstanceUuid
, withDoc "New instance name" pNewName
, pNameCheck
, pIpCheck
],
[])
, ("OpInstanceStartup",
[t| () |],
OpDoc.opInstanceStartup,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pTempHvParams
, pTempBeParams
, pNoRemember
, pStartupPaused
-- timeout to cleanup a user down instance
, pShutdownTimeout
],
"instance_name")
, ("OpInstanceShutdown",
[t| () |],
OpDoc.opInstanceShutdown,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pShutdownTimeout'
, pNoRemember
, pAdminStateSource
],
"instance_name")
, ("OpInstanceReboot",
[t| () |],
OpDoc.opInstanceReboot,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreSecondaries
, pRebootType
],
"instance_name")
, ("OpInstanceReplaceDisks",
[t| () |],
OpDoc.opInstanceReplaceDisks,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIgnoreIpolicy
, pReplaceDisksMode
, pReplaceDisksList
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
],
"instance_name")
, ("OpInstanceFailover",
[t| () |],
OpDoc.opInstanceFailover,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreConsistency
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
],
"instance_name")
, ("OpInstanceMigrate",
[t| () |],
OpDoc.opInstanceMigrate,
[ pInstanceName
, pInstanceUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
, pAllowFailover
, pIgnoreHVVersions
],
"instance_name")
, ("OpInstanceMove",
[t| () |],
OpDoc.opInstanceMove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreIpolicy
, pMoveTargetNode
, pMoveTargetNodeUuid
, pMoveCompress
, pIgnoreConsistency
],
"instance_name")
, ("OpInstanceConsole",
[t| JSObject JSValue |],
OpDoc.opInstanceConsole,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpInstanceActivateDisks",
[t| [(NonEmptyString, NonEmptyString, NonEmptyString)] |],
OpDoc.opInstanceActivateDisks,
[ pInstanceName
, pInstanceUuid
, pIgnoreDiskSize
, pWaitForSyncFalse
],
"instance_name")
, ("OpInstanceDeactivateDisks",
[t| () |],
OpDoc.opInstanceDeactivateDisks,
[ pInstanceName
, pInstanceUuid
, pForce
],
"instance_name")
, ("OpInstanceRecreateDisks",
[t| () |],
OpDoc.opInstanceRecreateDisks,
[ pInstanceName
, pInstanceUuid
, pRecreateDisksInfo
, withDoc "New instance nodes, if relocation is desired" pNodes
, withDoc "New instance node UUIDs, if relocation is desired" pNodeUuids
, pIallocator
],
"instance_name")
, ("OpInstanceQueryData",
[t| JSObject (JSObject JSValue) |],
OpDoc.opInstanceQueryData,
[ pUseLocking
, pInstances
, pStatic
],
[])
, ("OpInstanceSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opInstanceSetParams,
[ pInstanceName
, pInstanceUuid
, pForce
, pForceVariant
, pIgnoreIpolicy
, pInstParamsNicChanges
, pInstParamsDiskChanges
, pInstBeParams
, pRuntimeMem
, pInstHvParams
, pOptDiskTemplate
, pExtParams
, pFileDriver
, pFileStorageDir
, pPrimaryNode
, pPrimaryNodeUuid
, withDoc "Secondary node (used when changing disk template)" pRemoteNode
, withDoc
"Secondary node UUID (used when changing disk template)"
pRemoteNodeUuid
, pIallocator
, pOsNameChange
, pInstOsParams
, pInstOsParamsPrivate
, pWaitForSync
, withDoc "Whether to mark the instance as offline" pOffline
, pIpConflictsCheck
, pHotplug
, pHotplugIfPossible
, pOptInstanceCommunication
],
"instance_name")
, ("OpInstanceGrowDisk",
[t| () |],
OpDoc.opInstanceGrowDisk,
[ pInstanceName
, pInstanceUuid
, pWaitForSync
, pDiskIndex
, pDiskChgAmount
, pDiskChgAbsolute
, pIgnoreIpolicy
],
"instance_name")
, ("OpInstanceChangeGroup",
[t| JobIdListOnly |],
OpDoc.opInstanceChangeGroup,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIallocator
, pTargetGroups
],
"instance_name")
, ("OpGroupAdd",
[t| Either () JobIdListOnly |],
OpDoc.opGroupAdd,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupAssignNodes",
[t| () |],
OpDoc.opGroupAssignNodes,
[ pGroupName
, pForce
, withDoc "List of nodes to assign" pRequiredNodes
, withDoc "List of node UUIDs to assign" pRequiredNodeUuids
],
"group_name")
, ("OpGroupSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opGroupSetParams,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupRemove",
[t| () |],
OpDoc.opGroupRemove,
[ pGroupName
],
"group_name")
, ("OpGroupRename",
[t| NonEmptyString |],
OpDoc.opGroupRename,
[ pGroupName
, withDoc "New group name" pNewName
],
[])
, ("OpGroupEvacuate",
[t| JobIdListOnly |],
OpDoc.opGroupEvacuate,
[ pGroupName
, pEarlyRelease
, pIallocator
, pTargetGroups
, pSequential
, pForceFailover
],
"group_name")
, ("OpOsDiagnose",
[t| [[JSValue]] |],
OpDoc.opOsDiagnose,
[ pOutputFields
, withDoc "Which operating systems to diagnose" pNames
],
[])
, ("OpExtStorageDiagnose",
[t| [[JSValue]] |],
OpDoc.opExtStorageDiagnose,
[ pOutputFields
, withDoc "Which ExtStorage Provider to diagnose" pNames
],
[])
, ("OpBackupPrepare",
[t| Maybe (JSObject JSValue) |],
OpDoc.opBackupPrepare,
[ pInstanceName
, pInstanceUuid
, pExportMode
],
"instance_name")
, ("OpBackupExport",
[t| (Bool, [Bool]) |],
OpDoc.opBackupExport,
[ pInstanceName
, pInstanceUuid
, pBackupCompress
, pShutdownTimeout
, pExportTargetNode
, pExportTargetNodeUuid
, pShutdownInstance
, pRemoveInstance
, pIgnoreRemoveFailures
, defaultField [| ExportModeLocal |] pExportMode
, pX509KeyName
, pX509DestCA
, pZeroFreeSpace
, pZeroingTimeoutFixed
, pZeroingTimeoutPerMiB
, pLongSleep
],
"instance_name")
, ("OpBackupRemove",
[t| () |],
OpDoc.opBackupRemove,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpTagsGet",
[t| [NonEmptyString] |],
OpDoc.opTagsGet,
[ pTagsObject
, pUseLocking
, withDoc "Name of object to retrieve tags from" pTagsName
],
"name")
, ("OpTagsSearch",
[t| [(NonEmptyString, NonEmptyString)] |],
OpDoc.opTagsSearch,
[ pTagSearchPattern
],
"pattern")
, ("OpTagsSet",
[t| () |],
OpDoc.opTagsSet,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be added" pTagsName
],
[])
, ("OpTagsDel",
[t| () |],
OpDoc.opTagsDel,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be deleted" pTagsName
],
[])
, ("OpTestDelay",
[t| () |],
OpDoc.opTestDelay,
[ pDelayDuration
, pDelayOnMaster
, pDelayOnNodes
, pDelayOnNodeUuids
, pDelayRepeat
, pDelayInterruptible
, pDelayNoLocks
],
"duration")
, ("OpTestAllocator",
[t| String |],
OpDoc.opTestAllocator,
[ pIAllocatorDirection
, pIAllocatorMode
, pIAllocatorReqName
, pIAllocatorNics
, pIAllocatorDisks
, pHypervisor
, pIallocator
, pInstTags
, pIAllocatorMemory
, pIAllocatorVCpus
, pIAllocatorOs
, pOptDiskTemplate
, pIAllocatorInstances
, pIAllocatorEvacMode
, pTargetGroups
, pIAllocatorSpindleUse
, pIAllocatorCount
, pOptGroupName
],
"iallocator")
, ("OpTestJqueue",
[t| Bool |],
OpDoc.opTestJqueue,
[ pJQueueNotifyWaitLock
, pJQueueNotifyExec
, pJQueueLogMessages
, pJQueueFail
],
[])
, ("OpTestOsParams",
[t| () |],
OpDoc.opTestOsParams,
[ pInstOsParamsSecret
],
[])
, ("OpTestDummy",
[t| () |],
OpDoc.opTestDummy,
[ pTestDummyResult
, pTestDummyMessages
, pTestDummyFail
, pTestDummySubmitJobs
],
[])
, ("OpNetworkAdd",
[t| () |],
OpDoc.opNetworkAdd,
[ pNetworkName
, pNetworkAddress4
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, pNetworkAddRsvdIps
, pIpConflictsCheck
, withDoc "Network tags" pInstTags
],
"network_name")
, ("OpNetworkRemove",
[t| () |],
OpDoc.opNetworkRemove,
[ pNetworkName
, pForce
],
"network_name")
, ("OpNetworkSetParams",
[t| () |],
OpDoc.opNetworkSetParams,
[ pNetworkName
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, withDoc "Which external IP addresses to reserve" pNetworkAddRsvdIps
, pNetworkRemoveRsvdIps
],
"network_name")
, ("OpNetworkConnect",
[t| () |],
OpDoc.opNetworkConnect,
[ pGroupName
, pNetworkName
, pNetworkMode
, pNetworkLink
, pNetworkVlan
, pIpConflictsCheck
],
"network_name")
, ("OpNetworkDisconnect",
[t| () |],
OpDoc.opNetworkDisconnect,
[ pGroupName
, pNetworkName
],
"network_name")
])
deriving instance Ord OpCode
-- | Returns the OP_ID for a given opcode value.
$(genOpID ''OpCode "opID")
-- | A list of all defined/supported opcode IDs.
$(genAllOpIDs ''OpCode "allOpIDs")
-- | Convert the opcode name to lowercase with underscores and strip
-- the @Op@ prefix.
$(genOpLowerStrip (C.opcodeReasonSrcOpcode ++ ":") ''OpCode "opReasonSrcID")
instance JSON OpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Generates the summary value for an opcode.
opSummaryVal :: OpCode -> Maybe String
opSummaryVal OpClusterVerifyGroup { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupVerifyDisks { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpClusterRename { opName = s } = Just (fromNonEmpty s)
opSummaryVal OpQuery { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpQueryFields { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpNodeRemove { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeAdd { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeModifyStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpRepairNodeStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeSetParams { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodePowercycle { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeMigrate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeEvacuate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpInstanceCreate { opInstanceName = s } = Just s
opSummaryVal OpInstanceReinstall { opInstanceName = s } = Just s
opSummaryVal OpInstanceRemove { opInstanceName = s } = Just s
-- FIXME: instance rename should show both names; currently it shows none
-- opSummaryVal OpInstanceRename { opInstanceName = s } = Just s
opSummaryVal OpInstanceStartup { opInstanceName = s } = Just s
opSummaryVal OpInstanceShutdown { opInstanceName = s } = Just s
opSummaryVal OpInstanceReboot { opInstanceName = s } = Just s
opSummaryVal OpInstanceReplaceDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceFailover { opInstanceName = s } = Just s
opSummaryVal OpInstanceMigrate { opInstanceName = s } = Just s
opSummaryVal OpInstanceMove { opInstanceName = s } = Just s
opSummaryVal OpInstanceConsole { opInstanceName = s } = Just s
opSummaryVal OpInstanceActivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceDeactivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceRecreateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceSetParams { opInstanceName = s } = Just s
opSummaryVal OpInstanceGrowDisk { opInstanceName = s } = Just s
opSummaryVal OpInstanceChangeGroup { opInstanceName = s } = Just s
opSummaryVal OpGroupAdd { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupAssignNodes { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupSetParams { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupRemove { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupEvacuate { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpBackupPrepare { opInstanceName = s } = Just s
opSummaryVal OpBackupExport { opInstanceName = s } = Just s
opSummaryVal OpBackupRemove { opInstanceName = s } = Just s
opSummaryVal OpTagsGet { opKind = s } = Just (show s)
opSummaryVal OpTagsSearch { opTagSearchPattern = s } = Just (fromNonEmpty s)
opSummaryVal OpTestDelay { opDelayDuration = d } = Just (show d)
opSummaryVal OpTestAllocator { opIallocator = s } =
-- FIXME: Python doesn't handle None fields well, so we have behave the same
Just $ maybe "None" fromNonEmpty s
opSummaryVal OpNetworkAdd { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkRemove { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkSetParams { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkConnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkDisconnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal _ = Nothing
-- | Computes the summary of the opcode.
opSummary :: OpCode -> String
opSummary op =
case opSummaryVal op of
Nothing -> op_suffix
Just s -> op_suffix ++ "(" ++ s ++ ")"
where op_suffix = drop 3 $ opID op
-- | Generic\/common opcode parameters.
$(buildObject "CommonOpParams" "op"
[ pDryRun
, pDebugLevel
, pOpPriority
, pDependencies
, pComment
, pReason
])
deriving instance Ord CommonOpParams
-- | Default common parameter values.
defOpParams :: CommonOpParams
defOpParams =
CommonOpParams { opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioNormal
, opDepends = Nothing
, opComment = Nothing
, opReason = []
}
-- | Resolve relative dependencies to absolute ones, given the job ID.
resolveDependsCommon :: (Monad m) => CommonOpParams -> JobId -> m CommonOpParams
resolveDependsCommon p@(CommonOpParams { opDepends = Just deps}) jid = do
deps' <- mapM (`absoluteJobDependency` jid) deps
return p { opDepends = Just deps' }
resolveDependsCommon p _ = return p
-- | The top-level opcode type.
data MetaOpCode = MetaOpCode { metaParams :: CommonOpParams
, metaOpCode :: OpCode
} deriving (Show, Eq, Ord)
-- | Resolve relative dependencies to absolute ones, given the job Id.
resolveDependencies :: (Monad m) => MetaOpCode -> JobId -> m MetaOpCode
resolveDependencies mopc jid = do
mpar <- resolveDependsCommon (metaParams mopc) jid
return (mopc { metaParams = mpar })
instance DictObject MetaOpCode where
toDict (MetaOpCode meta op) = toDict meta ++ toDict op
fromDictWKeys dict = MetaOpCode <$> fromDictWKeys dict
<*> fromDictWKeys dict
instance JSON MetaOpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Wraps an 'OpCode' with the default parameters to build a
-- 'MetaOpCode'.
wrapOpCode :: OpCode -> MetaOpCode
wrapOpCode = MetaOpCode defOpParams
-- | Sets the comment on a meta opcode.
setOpComment :: String -> MetaOpCode -> MetaOpCode
setOpComment comment (MetaOpCode common op) =
MetaOpCode (common { opComment = Just comment}) op
-- | Sets the priority on a meta opcode.
setOpPriority :: OpSubmitPriority -> MetaOpCode -> MetaOpCode
setOpPriority prio (MetaOpCode common op) =
MetaOpCode (common { opPriority = prio }) op
| bitemyapp/ganeti | src/Ganeti/OpCodes.hs | bsd-2-clause | 29,960 | 0 | 12 | 7,714 | 5,902 | 3,618 | 2,284 | 957 | 2 |
import Test.Mafia.Main
import qualified Test.IO.Mafia.Chaos
import qualified Test.IO.Mafia.Flock
main :: IO ()
main =
disorderMain [
Test.IO.Mafia.Flock.tests
, Test.IO.Mafia.Chaos.tests
]
| ambiata/mafia | test/test-io.hs | bsd-3-clause | 217 | 0 | 7 | 48 | 57 | 37 | 20 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module IRTS.Cil.FFI
( parseDescriptor
, parseForeignFunctionType
, assemblyNameAndTypeFrom
, foreignType
, isIO
, CILForeign(..)
, ForeignFunctionType(..)
) where
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import Data.Monoid ((<>))
import Data.Text hiding (map, init, last)
import IRTS.Lang (FDesc(..))
import Idris.Core.TT (Name(..), sUN)
import Language.Cil (PrimitiveType(..), MethodRef(..), CallConv(..), Version)
import Language.Cil.Pretty (pr)
import IRTS.Cil.Parsers (parseVersion)
type CILTy = PrimitiveType
data CILForeign
= CILInstance !String
| CILInstanceCustom !String ![CILTy] !CILTy
| CILInstanceField !String
| CILStatic !CILTy !String
| CILStaticField !CILTy !String
| CILCall !MethodRef
| CILConstructor
| CILTypeOf !CILTy
| CILDelegate !CILTy
| CILExport !String
| CILDefault
deriving Show
parseDescriptor :: FDesc -> CILForeign
parseDescriptor (FApp ffi [declType, FStr fn])
| ffi == sUN "CILStatic" = CILStatic (foreignType declType) fn
parseDescriptor (FApp ffi [declType, FStr fn])
| ffi == sUN "CILStaticField" = CILStaticField (foreignType declType) fn
parseDescriptor (FApp ffi [FStr fn])
| ffi == sUN "CILInstance" = CILInstance fn
parseDescriptor (FApp ffi [FStr fn, paramTys, retTy])
| ffi == sUN "CILInstanceCustom" = CILInstanceCustom fn (foreignTypeList paramTys) (foreignType retTy)
parseDescriptor (FApp ffi [FStr fn])
| ffi == sUN "CILInstanceField" = CILInstanceField fn
parseDescriptor (FCon ffi)
| ffi == sUN "CILConstructor" = CILConstructor
parseDescriptor (FApp ffi [ty])
| ffi == sUN "CILDelegate" = CILDelegate (foreignType ty)
parseDescriptor (FApp ffi [ty])
| ffi == sUN "CILTypeOf" = CILTypeOf (foreignType ty)
parseDescriptor (FApp ffi [method])
| ffi == sUN "CILCall" = CILCall (foreignMethod method)
parseDescriptor e = error $ "invalid foreign descriptor: " <> show e
foreignMethod :: FDesc -> MethodRef
foreignMethod (FApp ctor [cc, declTy, FStr methodName, typeArgs, paramTys, retTy]) | ctor == sUN "CILGenMethod" =
GenericMethodInstance (foreignCallConvention cc)
(foreignType declTy) methodName
(foreignTypeList typeArgs) (foreignTypeList paramTys) (foreignType retTy)
foreignMethod e = error $ "invalid foreign method descriptor: " <> show e
foreignCallConvention :: FDesc -> [CallConv]
foreignCallConvention (FCon n) | n == sUN "CCCStatic" = []
foreignCallConvention (FCon n) | n == sUN "CCCInstance" = [CcInstance]
foreignCallConvention e = error $ "invalid foreign calling convention: " <> show e
isIO :: FDesc -> Bool
isIO (FIO _) = True
isIO _ = False
foreignType :: FDesc -> PrimitiveType
foreignType (FStr exportedDataType) = ValueType "" exportedDataType
foreignType (FCon t) = foreignTypeByName t
foreignType (FIO t) = foreignType t
foreignType (FApp cilTy [_, ty])
| cilTy == sUN "CIL_Array" = foreignType ty
foreignType (FApp cilTy [ty, _])
| cilTy == sUN "TypedArray" = foreignType ty
foreignType (FApp cilTy [ty])
| cilTy == sUN "CIL_CILT" = foreignType ty
foreignType (FApp cilTy [ty])
| cilTy == sUN "CIL" = foreignType ty
foreignType (FApp cilTy [_, ty, _])
| cilTy == sUN "CIL_FnT" = foreignType ty
foreignType (FApp cilTy [_, ty])
| cilTy == sUN "CIL_Ref" = ByRef $ foreignType ty
foreignType (FApp cilTy [_, ty])
| cilTy == sUN "CIL_MaybeT" = foreignType ty
foreignType (FApp cilTy [_, ty])
| cilTy == sUN "CIL_EnumT" = foreignType ty
foreignType (FApp cilTy [ty])
| cilTy == sUN "CILTyArr" = Array $ foreignType ty
foreignType (FApp cilTy [FStr assembly, FStr typeName])
| cilTy == sUN "CILTyRef" =
case (assembly, typeName) of
("", "object") -> Object
("", "string") -> String
("", "void") -> Void
_ -> ReferenceType assembly typeName
foreignType (FApp cilTy [FStr assembly, FStr typeName])
| cilTy == sUN "CILTyVal" =
case (assembly, typeName) of
("", "float32") -> Float32
("", "float64") -> Double64
("", "bool") -> Bool
("", "int") -> Int32
("", "char") -> Char
_ -> ValueType assembly typeName
foreignType (FApp cilTy [_, FCon (UN cilIntTy)])
| cilTy == sUN "CIL_IntT" =
let intName = unpack cilIntTy
in case intName of
"CIL_IntChar" -> Char
"CIL_IntNative" -> Int32
_ -> error $ "Unsupported foreign int type `" <> intName <> "'"
foreignType (FApp cilTy [def, typeArgs])
| cilTy == sUN "CILTyGen" = GenericReferenceTypeInstance assembly typeName cilTyArgs
where (ReferenceType assembly typeName) = foreignType def
cilTyArgs = foreignTypeList typeArgs
foreignType (FApp cilTy [FStr paramIndex])
| cilTy == sUN "CILTyGenParam" = GenericType (read paramIndex)
foreignType (FApp cilTy [FStr paramIndex])
| cilTy == sUN "CILTyGenMethodParam" = GenericMethodTypeParameter (read paramIndex)
foreignType d = error $ "invalid type descriptor: " <> show d
foreignTypeList :: FDesc -> [CILTy]
foreignTypeList = (foreignType <$>) . foreignList
foreignList :: FDesc -> [FDesc]
foreignList (FApp tag [_, x, xs])
| tag == sUN "::" = x : foreignList xs
foreignList (FApp tag [_])
| tag == sUN "Nil" = []
foreignList d = error $ "invalid foreign list: " <> show d
assemblyNameAndTypeFrom :: PrimitiveType -> (String, String)
assemblyNameAndTypeFrom (ReferenceType assemblyName typeName) = (assemblyName, typeName)
assemblyNameAndTypeFrom (ValueType assemblyName typeName) = (assemblyName, typeName)
assemblyNameAndTypeFrom gti@GenericReferenceTypeInstance{} = ("", pr gti "")
assemblyNameAndTypeFrom String = ("", "string")
assemblyNameAndTypeFrom Object = ("", "object")
assemblyNameAndTypeFrom Int32 = ("", "int32")
assemblyNameAndTypeFrom Double64 = ("", "float64")
assemblyNameAndTypeFrom t = error $ "unsupported assembly name for: " <> show t
foreignTypeByName :: Name -> PrimitiveType
foreignTypeByName (UN typeName) =
fromMaybe
(unsupportedForeignType $ unpack typeName)
(HM.lookup typeName foreignTypes)
foreignTypeByName n = unsupportedForeignType $ show n
unsupportedForeignType :: String -> a
unsupportedForeignType = error . ("Unsupported foreign type: " <>)
foreignTypes :: HM.HashMap Text PrimitiveType
foreignTypes = HM.fromList
[ ("CIL_Str", String)
, ("CIL_Ptr", Object)
, ("CIL_Float", Double64)
, ("CIL_Bool", Bool)
, ("CIL_Unit", Void)
]
data ForeignFunctionType = ForeignFunctionType
{ parameterTypes :: ![PrimitiveType]
, returnType :: !PrimitiveType
, returnTypeIO :: !Bool
} deriving (Eq, Ord, Show)
data CILFn
= CILFnIO !PrimitiveType
| CILFn !PrimitiveType
unCILFn :: CILFn -> PrimitiveType
unCILFn (CILFnIO t) = t
unCILFn (CILFn t) = t
parseForeignFunctionType :: FDesc -> ForeignFunctionType
parseForeignFunctionType (FApp n [_, _, fnT]) | n == sUN "CIL_FnT" =
let ft = functionType fnT
retType = last ft
io = case retType of { CILFnIO _ -> True; _ -> False }
in ForeignFunctionType (unCILFn <$> init ft) (unCILFn retType) io
parseForeignFunctionType d = functionTypeError d
functionType :: FDesc -> [CILFn]
functionType (FApp n [_, _, pT, fnT]) | n == sUN "CIL_Fn" = CILFn (foreignType pT) : functionType fnT
functionType (FApp n [_, _, ret]) | n == sUN "CIL_FnIO" = [CILFnIO (foreignType ret)]
functionType (FApp n [_, ret]) | n == sUN "CIL_FnBase" = [CILFn (foreignType ret)]
functionType d = functionTypeError d
functionTypeError :: FDesc -> a
functionTypeError d = error $ "foreign function signature: " <> show d
| bamboo/idris-cil | src/IRTS/Cil/FFI.hs | bsd-3-clause | 7,832 | 0 | 13 | 1,683 | 2,686 | 1,366 | 1,320 | 212 | 11 |
module Main ( main ) where
import Data.Text ( pack )
import Data.Text.Titlecase
import System.Environment
import Text.Blaze
import Text.Blaze.Renderer.Pretty
main :: IO ()
main = getArgs >>= putStr . renderMarkup . toMarkup . titlecase . pack . unwords
| nkaretnikov/titlecase | Main.hs | bsd-3-clause | 255 | 0 | 10 | 40 | 81 | 47 | 34 | 8 | 1 |
module Sprites
( SpriteStore
, parseSprites
) where
import Control.Applicative
import Control.Monad.Fix
import Control.Monad
import Data.Array
import Data.Word
import Foreign.Marshal
import Foreign.Ptr
import Graphics.Rendering.OpenGL
import GraphUtils
type SpriteStore = [(String, [TextureObject])]
parseSprites :: [String] -> IO SpriteStore
parseSprites dat = do
let (coldat,sprdat) = span (\l -> not (null l) && (head l /= '[')) dat
colmap :: Array Char [Word8]
colmap = listArray (' ','~') (repeat [0,0,0,0]) // map mkcol coldat
mkcol (c:dat) = (c,[r,g,b,a])
where [(r,dat')] = reads dat
[(g,dat'')] = reads dat'
[(b,dat''')] = reads dat''
[(a,dat'''')] = reads dat'''
flip fix sprdat $ \addGroup dat ->
createSpriteGroup colmap dat >>= \res -> case res of
Just (name,g,dat') -> do
gs <- addGroup dat'
return ((name,g):gs)
Nothing -> return []
createSpriteGroup colmap dat = do
let dat' = dropWhile null dat
gname = takeWhile (/=']') (tail (head dat'))
if null dat'
then return Nothing
else do
(sps,dat'') <- flip fix (tail dat',[]) $ \addSprites (dat,tids) ->
createSprite colmap dat >>= \res -> case res of
Just (tid,dat') -> do
addSprites (dat',tid:tids)
Nothing -> return (tids,dat)
return (Just (gname,reverse sps,dat''))
createSprite colmap dat = do
let dat' = dropWhile null dat
if null dat' || head (head dat') /= '"'
then return Nothing
else do
let (spdat,dat'') = span (\l -> not (null l) && (head l == '"')) dat'
spdat' = explodeMatrix 3 (map (takeWhile (/='"') . tail) spdat)
spw = length (head spdat')
sph = length spdat'
tid <- createTexture spw sph False $ flip pokeArray [comp | lin <- spdat', pix <- lin, comp <- colmap ! pix]
return (Just (tid,dat''))
| cobbpg/dow | src/Sprites.hs | bsd-3-clause | 1,915 | 0 | 22 | 507 | 845 | 444 | 401 | 52 | 3 |
-- | Tools for running hackager on many cores.
module Parallel (
Child, forkChild, waitForChildren,
) where
import Control.Concurrent
import Control.Monad.State
import Control.Exception
import HackageMonad
-- | Children process signal
type Child = MVar ()
-- | Fork a child and return an MVar that signals when the child is done.
forkChild :: Hkg () -> Hkg Child
forkChild hkg = do
mvar <- liftIO newEmptyMVar
st <- get
_ <- liftIO $ forkIO (evalStateT hkg st `finally` putMVar mvar ())
return mvar
-- | Wait on a list of children to finish processing
waitForChildren :: [Child] -> Hkg ()
waitForChildren [] = return ()
waitForChildren (child : children) = do
liftIO $ takeMVar child
waitForChildren children
| dterei/hackager | src/Parallel.hs | bsd-3-clause | 766 | 0 | 13 | 174 | 196 | 101 | 95 | 18 | 1 |
module Domains.Ring
( module Domains.Additive
, module Domains.Subtractive
, module Domains.Multiplicative
, module Domains.Ring
) where
import Domains.Additive
import Domains.Multiplicative
import Domains.Subtractive
-- https://en.wikipedia.org/wiki/Ring_(mathematics)
class (Additive a, Subtractive a, Multiplicative a) =>
Ring a
instance Ring Int
instance Ring Integer
instance Ring Double
| pmilne/algebra | src/Domains/Ring.hs | bsd-3-clause | 444 | 0 | 6 | 94 | 97 | 54 | 43 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ParallelListComp #-}
module Main where
import BlackScholes
import Data.Array.Accelerate as A
import Data.Array.Accelerate.Array.Data as A
import Data.Array.Accelerate.Array.Sugar as S
import Data.Array.Accelerate.CUDA as CUDA
import Data.Array.Accelerate.LLVM.Multi as Multi
import Data.Array.Accelerate.LLVM.Native as CPU
import Data.Array.Accelerate.LLVM.PTX as PTX
import Data.Array.Accelerate.Debug ( accInit )
import Foreign.CUDA.Driver as F
import Criterion.Main
import Control.Monad
import System.CPUTime
import System.Environment
import System.IO
import System.IO.Unsafe
import Text.Printf
import Prelude as P
import GHC.Conc
import GHC.Base ( quotInt, remInt )
maybeEnv :: Read a => String -> a -> IO a
maybeEnv var def = do
ms <- lookupEnv var
case ms of
Just s | [(v,[])] <- reads s -> return v
_ -> return def
main :: IO ()
main = do
accInit
-- Create a couple CUDA contexts for the CUDA and PTX backends. Sadly these
-- can not be shared.
F.initialise []
ngpu <- F.count
-- devs <- mapM F.device [0 .. ngpu-1]
-- prps <- mapM F.props devs
-- cc <- mapM (\dev -> CUDA.create dev []) devs
-- pc <- zipWithM (\dev prp -> PTX.createTargetForDevice dev prp []) devs prps
n <- maybeEnv "N" 20000000
pin <- maybeEnv "PINNED" False
printf "BlackScholes:\n"
printf " number of options: %d\n" n
printf " number of threads: %d\n" =<< getNumCapabilities
printf " number of GPUs: %d\n" ngpu
printf " using pinned memory: %s\n" (show pin)
printf "\n"
-- Requires a CUDA _context_ to be initialised, which is a little odd...
when pin $ do
registerForeignPtrAllocator (F.mallocHostForeignPtr [])
-- Generate random numbers. This can take a while...
printf "generating data... "
hFlush stdout
t1 <- getCPUTime
!opts_f32 <- mkData n :: IO (Vector (Float,Float,Float))
-- !opts_f64 <- mkData n :: IO (Vector (Double,Double,Double))
t2 <- getCPUTime
printf "done! (%.2fs)\n" (P.fromIntegral (t2-t1) * 1.0e-12 :: Double)
printf "\n"
defaultMain
[ bench "llvm-ptx" $ whnf (PTX.run1 blackscholes) opts_f32
, bench "llvm-cpu" $ whnf (CPU.run1 blackscholes) opts_f32
, bench "llvm-multi" $ whnf (Multi.run1 blackscholes) opts_f32
]
{--
-- Grab the default context for each GPU backend. The list will not be empty
-- (c.f. head) because if there are no devices, initialising CUDA would
-- already have failed. Assume that device 0 is the "best" device.
--
let c0 = head cc
p0 = head pc
-- gpugpu_opts_f32 = split ngpu opts_f32
-- cpugpu_opts_f32 = split (ngpu + 1) opts_f32
gpugpu_opts_f64 = split ngpu opts_f64
cpugpu_opts_f64 = split (ngpu + 1) opts_f64
defaultMain
-- [ bgroup "float"
-- [ bench "cuda" $ whnf (CUDA.run1With c0 blackscholes) opts_f32
-- , bench "llvm-ptx" $ whnf (PTX.run1With p0 blackscholes) opts_f32
-- , bench "llvm-cpu" $ whnf (CPU.run1 blackscholes) opts_f32
-- , bench "llvm-multi" $ whnf (Multi.run1 blackscholes) opts_f32
-- , bgroup "manual-split"
-- $ bench "llvm-cpu-ptx" (whnf (async ( CPU.run1Async blackscholes : [ PTX.run1AsyncWith ptx blackscholes | ptx <- pc ])) cpugpu_opts_f32)
-- : if ngpu > 1
-- then [ bench "cuda-cuda" $ whnf (async [ CUDA.run1AsyncWith ctx blackscholes | ctx <- cc ]) gpugpu_opts_f32
-- , bench "llvm-ptx-ptx" $ whnf (async [ PTX.run1AsyncWith ptx blackscholes | ptx <- pc ]) gpugpu_opts_f32
-- ]
-- else []
-- ]
-- , bgroup "double"
[ bench "cuda" $ whnf (CUDA.run1With c0 blackscholes) opts_f64
, bench "llvm-ptx" $ whnf (PTX.run1With p0 blackscholes) opts_f64
, bench "llvm-cpu" $ whnf (CPU.run1 blackscholes) opts_f64
, bench "llvm-multi" $ whnf (Multi.run1 blackscholes) opts_f64
, bgroup "manual-split"
$ bench "llvm-cpu-ptx" (whnf (async ( CPU.run1Async blackscholes : [ PTX.run1AsyncWith ptx blackscholes | ptx <- pc ])) cpugpu_opts_f64)
: if ngpu > 1
then [ bench "cuda-cuda" $ whnf (async [ CUDA.run1AsyncWith ctx blackscholes | ctx <- cc ]) gpugpu_opts_f64
, bench "llvm-ptx-ptx" $ whnf (async [ PTX.run1AsyncWith ptx blackscholes | ptx <- pc ]) gpugpu_opts_f64
]
else []
]
-- ]
--}
async :: [a -> IO (Async b)] -> [a] -> ()
async fs xs = unsafePerformIO $! do
as <- sequence $ P.zipWith ($) fs xs
() <- mapM_ wait as
return ()
split :: Elt e => Int -> Vector e -> [Vector e]
split pieces arr =
[ range from to | from <- splitPts
| to <- P.tail splitPts
]
where
Z :. n = arrayShape arr
chunk = n `quotInt` pieces
leftover = n `remInt` pieces
splitPts = P.map splitIx [0 .. pieces]
splitIx i
| i < leftover = i * (chunk + 1)
| otherwise = i * chunk + leftover
range from to =
A.fromFunction
(Z :. to - from)
(\(Z :. i) -> arr S.! (Z :. i+from))
| vollmerm/shallow-fission | tests/black-scholes/src-acc/Main.hs | bsd-3-clause | 5,645 | 1 | 16 | 1,841 | 889 | 464 | 425 | 75 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module Spec.ExecuteM where
import Spec.Decode
import Spec.Machine
import Utility.Utility
import Control.Monad
import Prelude
execute :: forall p t. (RiscvMachine p t) => InstructionM -> p ()
-- begin ast
execute (Mul rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
setRegister rd (x * y)
execute (Mulh rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
setRegister rd (highBits ((regToZ_signed x) * (regToZ_signed y)) :: t)
execute (Mulhsu rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
setRegister rd (highBits ((regToZ_signed x) * (regToZ_unsigned y)) :: t)
execute (Mulhu rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
setRegister rd (highBits ((regToZ_unsigned x) * (regToZ_unsigned y)) :: t)
execute (Div rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let q | x == minSigned && y == -1 = x
| y == 0 = -1
| otherwise = quot x y
in setRegister rd q
execute (Divu rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let q | y == 0 = maxUnsigned
| otherwise = divu x y
in setRegister rd q
execute (Rem rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let r | x == minSigned && y == -1 = 0
| y == 0 = x
| otherwise = rem x y
in setRegister rd r
execute (Remu rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let r | y == 0 = x
| otherwise = remu x y
in setRegister rd r
-- end ast
execute inst = error $ "dispatch bug: " ++ show inst
| mit-plv/riscv-semantics | src/Spec/ExecuteM.hs | bsd-3-clause | 1,566 | 0 | 16 | 407 | 729 | 339 | 390 | 51 | 1 |
{-# OPTIONS -fglasgow-exts #-}
module GFixpoints where
import Prelude
import Control.Applicative
import Generics.Regular
-- This assumes we have sums of products (with NO nested sums within the products)
data Tree a = Leaf a | Node (Tree a) (Tree a)
deriving Show
instance Applicative Tree where
pure = Leaf
Leaf x <*> Leaf y = Leaf (x y)
-- partial instance
foldTree :: (a -> b) -> (b -> b -> b) -> Tree a -> b
foldTree l n (Leaf x) = l x
foldTree l n (Node x y) = (foldTree l n x) `n` (foldTree l n y)
sum :: Tree Int -> Int
sum = foldTree id (+)
instance Functor Tree where
fmap f = foldTree (Leaf . f) Node
class GFixpoints f where
gfixpoints' :: f a -> Tree Int
instance GFixpoints U where
gfixpoints' _ = Leaf 0
instance GFixpoints I where
gfixpoints' _ = Leaf 1
instance GFixpoints (K a) where
gfixpoints' _ = Leaf 0
instance (GFixpoints f, GFixpoints g) => GFixpoints (f :+: g) where
gfixpoints' _ = gfixpoints' (undefined :: f a)
`Node` gfixpoints' (undefined :: g a)
instance (GFixpoints f, GFixpoints g) => GFixpoints (f :*: g) where
gfixpoints' _ = (+) <$> gfixpoints' (undefined :: f a)
<*> gfixpoints' (undefined :: g a)
| dreixel/regular | examples/framework/GFixpoints.hs | bsd-3-clause | 1,209 | 0 | 10 | 289 | 475 | 246 | 229 | -1 | -1 |
#!/usr/bin/env stack
-- stack --install-ghc runghc --package turtle
{-# LANGUAGE OverloadedStrings #-}
import Turtle
import Turtle.Prelude
main :: IO ExitCode
main = do
exists <- testdir "./inner-change.docset"
when exists $ do
echo "Docset already exists - removing"
rmtree "./inner-change.docset"
shell "stack exec -- haddocset -t inner-change.docset create" empty
shell "stack build --haddock" empty
shell ( "stack exec -- haddocset -t inner-change.docset "
<> "add $(stack path --snapshot-pkg-db)/*.conf" ) empty
shell ( "stack exec -- haddocset -t inner-change.docset "
<> "add $(stack path --local-pkg-db)/*.conf" ) empty
| lancelet/inner-change | mkdocset.hs | bsd-3-clause | 695 | 0 | 10 | 152 | 103 | 47 | 56 | 15 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module System.IO.MMap.Sync (msync, SyncFlag(..), InvalidateFlag(..)) where
import Control.Monad (when)
import Data.Int (Int64)
import Foreign.C.Error (throwErrno)
import Foreign.C.Types (CInt(..), CSize(..))
import Foreign.Ptr (Ptr)
-- TODO: This should be in the mmap package
data SyncFlag = Async | Sync
deriving (Eq, Ord, Read, Show)
data InvalidateFlag = Invalidate | NoInvalidate
deriving (Eq, Ord, Read, Show)
foreign import ccall unsafe "HsMsync.h system_io_msync"
c_system_io_msync :: Ptr a -> CSize -> CInt -> IO CInt
mkMSyncFlag :: Maybe SyncFlag -> CInt
mkMSyncFlag Nothing = 0
mkMSyncFlag (Just Sync) = 1
mkMSyncFlag (Just Async) = 2
mkInvalidateFlag :: InvalidateFlag -> CInt
mkInvalidateFlag NoInvalidate = 0
mkInvalidateFlag Invalidate = 4
mkFlags :: Maybe SyncFlag -> InvalidateFlag -> CInt
mkFlags mSyncFlag invalidateFlag =
mkMSyncFlag mSyncFlag +
mkInvalidateFlag invalidateFlag
msync :: Ptr a -> Int64 -> Maybe SyncFlag -> InvalidateFlag -> IO ()
msync ptr size mSyncFlag invalidateFlag = do
res <-
c_system_io_msync ptr (fromIntegral size) $
mkFlags mSyncFlag invalidateFlag
when (res == -1) $ throwErrno "msync failed"
| Peaker/keyvaluehash | src/System/IO/MMap/Sync.hs | bsd-3-clause | 1,215 | 0 | 11 | 195 | 386 | 207 | 179 | 30 | 1 |
-- Chapter2Exercise3.hs
module Chapter2Exercise3 where
-- Exercise 3.
-- Modify the previous exercise to support `\n`, `\r`, `\t`, `\\`, and any other
-- desired escape characters
import Control.Monad
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal -- (a b c ... . z)
| Number Integer
| String String
| Bool Bool
deriving Show -- debug purposes
parseString' :: Parser LispVal
parseString' = do
char '"'
-- order is important here; need to try parsing an escape sequence first because
-- otherwise we fail when we reach the second character of the escape
-- sequence
x <- many $ choice $ escChars ++ [nonQuote]
char '"'
return $ String x
where
nonQuote = noneOf "\""
-- taken from here:
-- https://en.wikipedia.org/wiki/Escape_sequences_in_C#Table_of_escape_sequences
-- (excluded characters by oct/hex codes)
escChars = [ char '\\' >> char x | x <- "abfnrtv\\'\"?" ]
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"") -- 0 or more non-doublequote characters.
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseNumber' :: Parser LispVal
parseNumber' = do
digits <- many1 digit
return $ (Number . read) digits
parseNumber'' :: Parser LispVal
parseNumber'' = (many1 digit) >>= (\x -> return $ (Number . read) x)
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString'
<|> parseNumber'
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
readExpr :: String -> String
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right val -> "Found value"
main :: IO ()
main = do
(expr:_) <- getArgs
putStrLn (readExpr expr)
| EFulmer/haskell-scheme-wikibook | src/Exercises/Ch2/Pt1/Ex3.hs | bsd-3-clause | 2,348 | 0 | 11 | 662 | 588 | 299 | 289 | 58 | 3 |
{-# LANGUAGE
NoImplicitPrelude,
OverloadedStrings
#-}
module Main (main) where
-- General
import BasePrelude hiding (on)
-- Monads
import Control.Monad.IO.Class (liftIO)
-- Text
import Data.Text (Text)
import qualified Data.Text.ICU.Char as T
import Text.Printf
-- Catching exceptions
import Control.Spoon
-- Random
import System.Random
-- GUI
import Graphics.UI.Gtk
main :: IO ()
main = do
initGUI
window <- windowNew
window `on` objectDestroy $ mainQuit
set window [
windowTitle := ("Bob game" :: Text),
windowGravity := GravityCenter,
windowWindowPosition := WinPosCenter,
windowDefaultWidth := 500,
windowDefaultHeight := 500 ]
-- On Escape, exit.
window `on` keyPressEvent $ tryEvent $ do
"Escape" <- eventKeyName
liftIO mainQuit
label <- labelNew (Nothing :: Maybe Text)
window `containerAdd` label
let arrows = filter ((== Just T.Arrows) . spoon . T.blockCode) ['\0'..]
char <- newIORef ""
keys <- newIORef ""
window `on` keyPressEvent $ do
key <- eventKeyVal
for_ (keyToChar key) $ \c ->
liftIO $ modifyIORef keys (++ [c])
return False
flip timeoutAdd 4000 $ do
do c <- readIORef char
k <- readIORef keys
unless (null c || null k) $
appendFile "game.log" (printf "%s = %s\n" c k)
x <- randomChoose arrows
writeIORef char [x]
writeIORef keys ""
set label [
labelText := (printf "<span font='70'>%c</span>" x :: String),
labelUseMarkup := True ]
return True
widgetShowAll window
mainGUI
randomChoose :: [a] -> IO a
randomChoose xs = do
n <- randomRIO (0, length xs - 1)
return (xs !! n)
| aelve/bob | src/Game.hs | bsd-3-clause | 1,655 | 0 | 16 | 394 | 555 | 282 | 273 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
module Astro.Orbit.ManeuverSpec where
import Test.Hspec
import Test.QuickCheck (property, (==>))
import Data.AEq
import TestInstances
import Numeric.Units.Dimensional.Prelude
import Numeric.Units.Dimensional.LinearAlgebra
import qualified Prelude
import Astro.Time
import Astro.Time.At
import Astro.Orbit.COE
import Astro.Orbit.Conversion
import Astro.Orbit.Types
import Astro.Orbit.Maneuver
import Astro.Trajectory
import Astro.Trajectory.EphemTrajectory
main = hspec spec
spec = do
spec_zeroManeuver
spec_randomManeuver
-- ----------------------------------------------------------
spec_zeroManeuver = describe "Zero maneuver " $ do
it "does not affect the trajectory"
( ephemeris (applyManeuver testTrajectory (zeroMan`At`mjd' 1)) (map mjd' [0..])
~== ephemeris testTrajectory (map mjd' [0..])
)
where
zeroMan = ImpulsiveRTN (0*~mps) (0*~mps) (0*~mps)
spec_randomManeuver = describe "Random maneuver at time t" $ do
it "does not affect trajectories that start after t"
(property $ \m
-> ephemeris (applyManeuver testTrajectory (m`At`mjd' (-1))) (map mjd' [0..])
== ephemeris testTrajectory (map mjd' [0..])
)
it "does not affect trajectories that end before t"
(property $ \m
-> ephemeris (applyManeuver testTrajectory (m`At`mjd' 11)) (map mjd' [0..])
== ephemeris testTrajectory (map mjd' [0..])
)
it "does not affect data prior to time t"
(property $ \m
-> absoluteDV m > _0
==> ephemeris (applyManeuver testTrajectory (m`At`mjd' 5)) (map mjd' [0..4])
== ephemeris testTrajectory (map mjd' [0..4])
)
it "affects data after time t" $
property $ \m
-> absoluteDV m > _0
==> ephemeris (applyManeuver testTrajectory (m`At`mjd' 5)) (map mjd' [6])
/= ephemeris testTrajectory (map mjd' [6])
absoluteDV :: Floating a => Maneuver a -> Velocity a
absoluteDV ImpulsiveRTN {..} = sqrt (dvr^pos2 + dvt^pos2 + dvn^pos2)
-- ----------------------------------------
mps = meter / second
testCOE0 :: COE Mean Double
testCOE0 = COE
{ mu = mu_Earth
, slr = 10000 *~ kilo meter
, ecc = 0 *~ one
, inc = 0 *~ degree
, aop = 0 *~ degree
, raan = 0 *~ degree
, anomaly = Anom $ 0 *~ degree
}
testCOE1 :: COE Mean Double
testCOE1 = COE
{ mu = mu_Earth
, slr = 24000 *~ kilo meter
, ecc = 0.01 *~ one
, inc = 15 *~ degree
, aop = (-105) *~ degree -- 255 *~ degree
, raan = 35 *~ degree
, anomaly = Anom $ 10 *~ degree
}
testTrajectory = ET [coe2meoe testCOE0`At`mjd' 0, coe2meoe testCOE1`At`mjd' 10]
mu_Earth = 398600.4418 *~ (kilo meter ^ pos3 / second ^ pos2)
| bjornbm/astro | test/Astro/Orbit/ManeuverSpec.hs | bsd-3-clause | 2,688 | 0 | 20 | 552 | 904 | 491 | 413 | -1 | -1 |
module Network.DGS.Status.Game (Game(..), Int16) where
import Network.DGS.Status.Imports hiding (style)
data Game = Game
{ gid :: ID GameTag
, opponent :: Nick
, nextToMove :: Color
, lastMove :: UTCTime
, timeRemaining :: Remaining
, action :: Action
, status :: Status
, mid :: ID MoveTag
, tid :: ID TournamentTag
, sid :: ID ShapeTag
, style :: Style
, priority :: Int16 -- ^ this will be zero unless you ask for 'Priority' ordering or the user has set priority as their status page's sort order and you ask for 'StatusPage' ordering
, opponentLastAccess :: UTCTime
, handicap :: Integer
} deriving (Eq, Ord, Show)
instance Atto Game where
attoparse = "G" --> Game
<*> column
<*> column
<*> column
<*> column
<*> column
<*> column
<*> column
<*> column
<*> column
<*> column
<*> (comma >> quoted attoparse)
<*> column
<*> column
<*> column
| dmwit/dgs | Network/DGS/Status/Game.hs | bsd-3-clause | 1,041 | 4 | 20 | 345 | 241 | 141 | 100 | 34 | 0 |
-- | Build instance tycons for the PData and PDatas type families.
--
-- TODO: the PData and PDatas cases are very similar.
-- We should be able to factor out the common parts.
module Vectorise.Generic.PData
( buildPDataTyCon
, buildPDatasTyCon )
where
import Vectorise.Monad
import Vectorise.Builtins
import Vectorise.Generic.Description
import Vectorise.Utils
import Vectorise.Env( GlobalEnv( global_fam_inst_env ) )
import BuildTyCl
import DataCon
import TyCon
import Type
import FamInst
import FamInstEnv
import TcMType
import Name
import Util
import MonadUtils
import Control.Monad
-- buildPDataTyCon ------------------------------------------------------------
-- | Build the PData instance tycon for a given type constructor.
buildPDataTyCon :: TyCon -> TyCon -> SumRepr -> VM FamInst
buildPDataTyCon orig_tc vect_tc repr
= fixV $ \fam_inst ->
do let repr_tc = dataFamInstRepTyCon fam_inst
name' <- mkLocalisedName mkPDataTyConOcc orig_name
rhs <- buildPDataTyConRhs orig_name vect_tc repr_tc repr
pdata <- builtin pdataTyCon
buildDataFamInst name' pdata vect_tc rhs
where
orig_name = tyConName orig_tc
buildDataFamInst :: Name -> TyCon -> TyCon -> AlgTyConRhs -> VM FamInst
buildDataFamInst name' fam_tc vect_tc rhs
= do { axiom_name <- mkDerivedName mkInstTyCoOcc name'
; (_, tyvars') <- liftDs $ freshenTyVarBndrs tyvars
; let ax = mkSingleCoAxiom Representational axiom_name tyvars' [] fam_tc pat_tys rep_ty
tys' = mkTyVarTys tyvars'
rep_ty = mkTyConApp rep_tc tys'
pat_tys = [mkTyConApp vect_tc tys']
rep_tc = mkAlgTyCon name'
(mkTyConBindersPreferAnon tyvars' liftedTypeKind)
liftedTypeKind
(map (const Nominal) tyvars')
Nothing
[] -- no stupid theta
rhs
(DataFamInstTyCon ax fam_tc pat_tys)
False -- not GADT syntax
; liftDs $ newFamInst (DataFamilyInst rep_tc) ax }
where
tyvars = tyConTyVars vect_tc
buildPDataTyConRhs :: Name -> TyCon -> TyCon -> SumRepr -> VM AlgTyConRhs
buildPDataTyConRhs orig_name vect_tc repr_tc repr
= do data_con <- buildPDataDataCon orig_name vect_tc repr_tc repr
return $ DataTyCon { data_cons = [data_con], is_enum = False }
buildPDataDataCon :: Name -> TyCon -> TyCon -> SumRepr -> VM DataCon
buildPDataDataCon orig_name vect_tc repr_tc repr
= do let tvs = tyConTyVars vect_tc
dc_name <- mkLocalisedName mkPDataDataConOcc orig_name
comp_tys <- mkSumTys repr_sel_ty mkPDataType repr
fam_envs <- readGEnv global_fam_inst_env
rep_nm <- liftDs $ newTyConRepName dc_name
liftDs $ buildDataCon fam_envs dc_name
False -- not infix
rep_nm
(map (const no_bang) comp_tys)
(Just $ map (const HsLazy) comp_tys)
[] -- no field labels
(mkTyVarBinders Specified tvs)
[] -- no existentials
[] -- no eq spec
[] -- no context
comp_tys
(mkFamilyTyConApp repr_tc (mkTyVarTys tvs))
repr_tc
where
no_bang = HsSrcBang Nothing NoSrcUnpack NoSrcStrict
-- buildPDatasTyCon -----------------------------------------------------------
-- | Build the PDatas instance tycon for a given type constructor.
buildPDatasTyCon :: TyCon -> TyCon -> SumRepr -> VM FamInst
buildPDatasTyCon orig_tc vect_tc repr
= fixV $ \fam_inst ->
do let repr_tc = dataFamInstRepTyCon fam_inst
name' <- mkLocalisedName mkPDatasTyConOcc orig_name
rhs <- buildPDatasTyConRhs orig_name vect_tc repr_tc repr
pdatas <- builtin pdatasTyCon
buildDataFamInst name' pdatas vect_tc rhs
where
orig_name = tyConName orig_tc
buildPDatasTyConRhs :: Name -> TyCon -> TyCon -> SumRepr -> VM AlgTyConRhs
buildPDatasTyConRhs orig_name vect_tc repr_tc repr
= do data_con <- buildPDatasDataCon orig_name vect_tc repr_tc repr
return $ DataTyCon { data_cons = [data_con], is_enum = False }
buildPDatasDataCon :: Name -> TyCon -> TyCon -> SumRepr -> VM DataCon
buildPDatasDataCon orig_name vect_tc repr_tc repr
= do let tvs = tyConTyVars vect_tc
dc_name <- mkLocalisedName mkPDatasDataConOcc orig_name
comp_tys <- mkSumTys repr_sels_ty mkPDatasType repr
fam_envs <- readGEnv global_fam_inst_env
rep_nm <- liftDs $ newTyConRepName dc_name
liftDs $ buildDataCon fam_envs dc_name
False -- not infix
rep_nm
(map (const no_bang) comp_tys)
(Just $ map (const HsLazy) comp_tys)
[] -- no field labels
(mkTyVarBinders Specified tvs)
[] -- no existentials
[] -- no eq spec
[] -- no context
comp_tys
(mkFamilyTyConApp repr_tc (mkTyVarTys tvs))
repr_tc
where
no_bang = HsSrcBang Nothing NoSrcUnpack NoSrcStrict
-- Utils ----------------------------------------------------------------------
-- | Flatten a SumRepr into a list of data constructor types.
mkSumTys
:: (SumRepr -> Type)
-> (Type -> VM Type)
-> SumRepr
-> VM [Type]
mkSumTys repr_selX_ty mkTc repr
= sum_tys repr
where
sum_tys EmptySum = return []
sum_tys (UnarySum r) = con_tys r
sum_tys d@(Sum { repr_cons = cons })
= liftM (repr_selX_ty d :) (concatMapM con_tys cons)
con_tys (ConRepr _ r) = prod_tys r
prod_tys EmptyProd = return []
prod_tys (UnaryProd r) = liftM singleton (comp_ty r)
prod_tys (Prod { repr_comps = comps }) = mapM comp_ty comps
comp_ty r = mkTc (compOrigType r)
{-
mk_fam_inst :: TyCon -> TyCon -> (TyCon, [Type])
mk_fam_inst fam_tc arg_tc
= (fam_tc, [mkTyConApp arg_tc . mkTyVarTys $ tyConTyVars arg_tc])
-}
| mettekou/ghc | compiler/vectorise/Vectorise/Generic/PData.hs | bsd-3-clause | 6,550 | 0 | 14 | 2,205 | 1,361 | 685 | 676 | 122 | 5 |
{-# LANGUAGE ViewPatterns #-}
module Supercompile.Residualise where
import Evaluator.Syntax
import Core.Renaming
import Core.Syntax
import Utilities
import qualified Data.Map as M
residualiseState :: State -> Out Term
residualiseState (heap, k, in_e) = residualiseHeap heap (\ids -> residualiseStack ids k (detagTerm (renameIn renameTaggedTerm ids in_e)))
residualiseHeap :: Heap -> (IdSupply -> ([(Out Var, Out Term)], Out Term)) -> Out Term
residualiseHeap (Heap h ids) (($ ids) -> (floats, e)) = letRec (residualisePureHeap ids h ++ floats) e
residualisePureHeap :: IdSupply -> PureHeap -> [(Out Var, Out Term)]
residualisePureHeap ids h = [(x', detagTerm $ renameIn renameTaggedTerm ids in_e) | (x', in_e) <- M.toList h]
residualiseStack :: IdSupply -> Stack -> Out Term -> ([(Out Var, Out Term)], Out Term)
residualiseStack _ [] e = ([], e)
residualiseStack ids (kf:k) (residualiseStackFrame ids (tagee kf) -> (floats, e)) = first (floats ++) $ residualiseStack ids k e
residualiseStackFrame :: IdSupply -> StackFrame -> Out Term -> ([(Out Var, Out Term)], Out Term)
residualiseStackFrame _ (Apply x2') e1 = ([], e1 `app` x2')
residualiseStackFrame ids (Scrutinise in_alts) e = ([], case_ e (detagAlts $ renameIn renameTaggedAlts ids in_alts))
residualiseStackFrame ids (PrimApply pop in_vs es') e = ([], primOp pop (map (value . detagValue . renameIn renameTaggedValue ids) in_vs ++ [e] ++ map (detagTerm . renameIn renameTaggedTerm ids) es'))
residualiseStackFrame _ (Update x') e = ([(x', e)], var x')
| batterseapower/supercompilation-by-evaluation | Supercompile/Residualise.hs | bsd-3-clause | 1,566 | 0 | 13 | 279 | 645 | 345 | 300 | 21 | 1 |
{-|
This module provides a collection of functions that enable you to
measure the algorithmic complexity of arbitrary functions.
Let's say you want to measure the time complexity of 'qsort':
@
qsort :: Ord a => [a] -> [a]
qsort [] = []
qsort (x:xs) = qsort (filter (\< x) xs) ++ [x] ++ qsort (filter (>= x) xs)
@
We want to now the time complexity of 'qsort' in terms of the size of
its 'InputSize' \'n\'. First we have to express what \'n\' is. We do this by
writing an 'InputGen':
@
-- Very simple pseudo random number generator.
pseudoRnd :: Int -> Int -> Int -> Int -> [Int]
pseudoRnd p1 p2 n d = iterate (\x -> (p1 * x + p2) `mod` n) d
@
@
genIntList :: 'InputGen' [Int]
genIntList n = take (fromInteger n) $ pseudoRnd 16807 0 (2 ^ 31 - 1) 79
@
The function 'genIntList' now generates a pseudo random list of Ints
of length \'n\'.
Next we have to specify what aspect of 'qsort' we want to
measure. Since we are interested in the time complexity we use a CPU
time sensor:
@
mySensor = 'cpuTimeSensor' 10
@
The 'cpuTimeSensor' is a 'Sensor' which measures CPU time. It takes
one argument which is a time in milliseconds. This is the minimum
execution time for an 'Action' which is measured. If the action doesn't
take more than 10 ms to execute it will be repeated until it
does. This allows us to measure actions which execute much faster than
the minimum measurable CPU time difference.
Now we can create an 'Experiment':
@
expQSort = 'pureExperiment' \"quicksort\" mySensor genIntList qsort
@
This is an experiment which measures the CPU time it takes to apply
the function 'qsort' on an input generate by 'genIntList'.
Before you can perform the experiment you need to decide which input
sizes you want to measure and when to stop. These ideas are contained
in a 'Strategy'. We'll use the 'simpleLinearHeuristic':
@
myStrategy = 'simpleLinearHeuristic' 1.1 10^5
@
This strategy looks at the last two points to decide which input size
to measure next. It picks a point where it thinks the measured value
will be 1.1 times the last measured value. It will stop if the input
size exceeds 10^5 to prevent running out of memory.
Now we can finally perform the experiment:
@
stats <- 'performExperiment' myStrategy 10 15 expQSort
@
The experiment will take 10 samples per input size and it will run for
15 seconds. The result is a bunch of 'MeasurementStats'. You can now
print these statistics to stdout or show them in a nice graph:
@
'printStats' [stats]
'showStatsChart' [stats]
@
Looking at the type signatures of these function you'll notice that
they accept a list of 'MeasurementStats'. This means you can compare
multiple experiments.
Let's compare 'qsort' to the build in 'Data.List.sort'. This time
we'll use some convenient utility functions to more easily setup and
perform an experiment.
@
expSorts = [ 'pureExperiment' \"qsort\" mySensor genIntList qsort
, 'pureExperiment' \"Data.List.sort\" mySensor genIntList 'sort'
]
'simpleSmartMeasure' 1.1 10^5 10 20 expSorts
@
The utility function 'simpleSmartMeasure' uses the
'simpleLinearHeuristic' strategy by default. The first to arguments
are passed to the heuristic. We again choose to take 10 samples per
input size. The total measurement time is increased to 20 seconds, but
it is now used to measure two functions instead of one. The time is
divided evenly and each function gets 10 seconds. The last argument is
a list of experiments. After 20 seconds you'll get a nice graph
comparing the complexity of the two sorting algorithms.
-}
module Test.Complexity
( module Test.Complexity.Experiment
, module Test.Complexity.Main
, module Test.Complexity.Sensors
, module Test.Complexity.Strategy
, module Test.Complexity.Types
) where
import Test.Complexity.Experiment
import Test.Complexity.Main
import Test.Complexity.Sensors
import Test.Complexity.Strategy
import Test.Complexity.Types
| roelvandijk/complexity | Test/Complexity.hs | bsd-3-clause | 3,979 | 0 | 5 | 761 | 74 | 51 | 23 | 11 | 0 |
foreign import ccall input :: Int
foreign import ccall output :: Int -> IO ()
main :: IO ()
main = output input
| bosu/josh | t/progs/InpOut.hs | bsd-3-clause | 113 | 0 | 8 | 24 | 48 | 25 | 23 | 4 | 1 |
-- | An 'OrdPSQ' uses the 'Ord' instance of the key type to build a priority
-- search queue.
--
-- It is based on Ralf Hinze's work.
--
-- * Hinze, R., A Simple Implementation Technique for Priority Search Queues,
-- ICFP 2001, pp. 110-121
--
-- <http://citeseer.ist.psu.edu/hinze01simple.html>
--
-- This means it is similar to the
-- <http://hackage.haskell.org/package/PSQueue-1.1 PSQueue> package but
-- our benchmarks showed it perform quite a bit faster.
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns #-}
module Data.OrdPSQ
( -- * Type
OrdPSQ
-- * Query
, null
, size
, member
, lookup
, findMin
-- * Construction
, empty
, singleton
-- * Insertion
, insert
-- * Delete/Update
, delete
, deleteMin
, alter
, alterMin
-- * Conversion
, fromList
, toList
, toAscList
, keys
-- * Views
, insertView
, deleteView
, minView
-- * Traversals
, map
, fold'
-- * Validity check
, valid
) where
import Prelude hiding (map, lookup, null, foldr)
import Data.OrdPSQ.Internal
| phadej/psqueues | src/Data/OrdPSQ.hs | bsd-3-clause | 1,212 | 0 | 5 | 369 | 126 | 92 | 34 | 30 | 0 |
module Problem103 where
import Control.Monad
import Data.Function
import Data.List
main :: IO ()
main = putStrLn . concatMap show $ minimumOptimumSpecialSumSet 7
minimumOptimumSpecialSumSet :: Int -> [Int]
minimumOptimumSpecialSumSet n = reverse . head . (!! max 0 n) $ iterate
(concatMap nextElement)
[[]]
where
-- NOTE : disjointness doesn't matter since if we include elements in both then comparison
-- remains same
nextElement soln
| null soln = map (: []) [1 .. 50]
| otherwise = filter valid $ map (: soln) [1 + head soln .. 50]
valid soln'@(x : xs) =
-- minimum sum of any subset of size k should be greater than
-- maximum sum of any subset of size k-1
and (zipWith (<) (maxSums soln') (tail $ minSums soln'))
-- sets of same size should not have unique values, so sets of size k from xs
-- and set of size k-1 from xs and including x should be sufficient to gurantee
-- uniqueness of sums of same size
&& and
(zipWith
(\oldSumsK newSumsK ->
null
[ True
| oldSumK <- oldSumsK
, newSumK <- newSumsK
, oldSumK == newSumK
]
)
(tail oldSums)
newSums
)
where
newSums = map (map (+ x)) oldSums
oldSums =
map (map sum)
. groupBy ((==) `on` length)
. sortBy (compare `on` length)
$ filterM (const [True, False]) xs
-- set is sorted so minSums formed by greedily taking smallest elements
minSums xs = map sum . reverse . notnull $ tails xs
maxSums xs = map sum . notnull $ inits xs
notnull = filter (not . null)
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem103.hs | bsd-3-clause | 1,904 | 0 | 16 | 753 | 453 | 239 | 214 | 34 | 1 |
module Report (logLine, logIt) where
import System.IO
logLine :: String -> IO ()
logLine = hPutStrLn stderr
logIt :: Show a => a -> IO ()
logIt = logLine . show
| JustusAdam/haxl-local-test | src/Report.hs | bsd-3-clause | 166 | 0 | 8 | 36 | 69 | 37 | 32 | 6 | 1 |
module RandomList where
import RandomValue
data RList a = RValue a (Maybe (a, RList a)) | cullina/Extractor | src/RandomList.hs | bsd-3-clause | 89 | 0 | 10 | 16 | 34 | 20 | 14 | 3 | 0 |
module Text.Roundtrip.Xml (
module Text.Roundtrip.Xml.Printer
, module Text.Roundtrip.Xml.Parser
) where
import Text.Roundtrip.Xml.Printer
import Text.Roundtrip.Xml.Parser
| skogsbaer/roundtrip-xml | src/Text/Roundtrip/Xml.hs | bsd-3-clause | 181 | 0 | 5 | 23 | 39 | 28 | 11 | 5 | 0 |
module LGtk.Demos.MazeGen
( genMaze
) where
import Data.List
import Data.Array.IArray
import System.Random
import Data.Equivalence.Persistent
import System.Random.Shuffle
import Control.Monad.State
import LGtk.Demos.Maze.Types hiding (Cell)
------------ copied from http://cdsmith.wordpress.com/2011/06/06/mazes-in-haskell-my-version/ on 9 May, 2014
-- Vertical walls are to the right of their cell (so the x component
-- must be less than width - 1), and horizontal walls are to the top
-- of their cell (so the y component must be less than height - 1).
type Cell = (Int, Int)
data Wall = H Cell | V Cell deriving (Eq, Show)
process _ [] = []
process rooms (H (x,y) : ws)
| equiv rooms (x,y) (x,y+1) = H (x,y) : process rooms ws
| otherwise = process (equate (x,y) (x,y+1) rooms) ws
process rooms (V (x,y) : ws)
| equiv rooms (x,y) (x+1,y) = V (x,y) : process rooms ws
| otherwise = process (equate (x,y) (x+1,y) rooms) ws
genMaze_ :: RandomGen gen => Int -> Int -> gen -> [Wall]
genMaze_ w h gen = finalWalls
where allWalls = [ H (x,y) | x <- [0 .. w-1], y <- [0 .. h-2] ]
++ [ V (x,y) | x <- [0 .. w-2], y <- [0 .. h-1] ]
startRooms = emptyEquivalence ((0,0), (w-1, h-1))
startWalls = shuffle' allWalls (length allWalls) gen
finalWalls = process startRooms startWalls
------------ end of copy
genMaze :: Size -> State StdGen Maze
genMaze (w, h) = state f where
f s = (tr $ genMaze_ w h s1, s2)
where
(s1, s2) = split s
tr ls = array ((1,1), (w,h)) [ ((i,j), C $ complement $ concatMap (g i j) ls) | i <- [1..w], j<-[1..h]]
where
g i j (V (x,y)) | i == x + 1 && j == y + 1 = [S]
g i j (V (x,y)) | i == x + 2 && j == y + 1 = [N]
g i j (H (x,y)) | i == x + 1 && j == y + 1 = [E]
g i j (H (x,y)) | i == x + 1 && j == y + 2 = [W]
g _ _ _ = []
complement :: [Cardinal] -> [Cardinal]
complement = f [N,E,S,W] . sort
where
f (x:xs) (y:ys) | x == y = f xs ys
f (x:xs) ys = x: f xs ys
f [] _ = []
| divipp/lgtk | lgtkdemo/LGtk/Demos/MazeGen.hs | bsd-3-clause | 2,102 | 0 | 16 | 605 | 1,065 | 575 | 490 | 40 | 5 |
{-# OPTIONS -Wall #-}
{-# LANGUAGE OverloadedStrings, RecordWildCards, FlexibleInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS -fno-warn-name-shadowing -fno-warn-orphans #-}
-- | Mini formlets library.
module Text.Formlet
(Formlet(..)
,formlet
,req
,opt
,wrap
,integer
,textInput
,dropInput
,areaInput
,submitInput
,parse
,options
,findOption) where
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.Trans.Error (ErrorList(..))
import Control.Monad.Writer
import Data.ByteString (ByteString)
import Data.List (find)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid.Operator
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding
import Prelude hiding ((++))
import Text.Blaze.Html5 as H hiding (map)
import qualified Text.Blaze.Html5.Attributes as A
type Params = Map ByteString [ByteString]
-- | A simple formlet data type, collects errors.
data Formlet a = Formlet {
formletValue :: Params -> Either [Text] a
, formletName :: Maybe Text
, formletHtml :: Params -> Html
}
instance Applicative Formlet where
pure a = Formlet { formletValue = const (return a)
, formletHtml = const mempty
, formletName = Nothing
}
Formlet f n fhtml <*> Formlet v n' vhtml =
Formlet { formletValue = \params ->
case v params of
Right x -> f params <*> Right x
Left e -> case f params <*> Left [] of
Right x -> return x
Left e' -> Left $ e' ++ e
, formletHtml = \params -> fhtml params ++ vhtml params
, formletName = case (n,n') of
(Just{},Just{}) -> Nothing
_ -> n `mplus` n'
}
-- | Normal instance.
instance Functor Formlet where
fmap f formlet@Formlet{..} = formlet { formletValue = value }
where value = \params ->
case formletValue params of
Left e -> Left e
Right a -> Right (f a)
-- | The error message for the formlets is a text value.
instance Error Text where noMsg = ""; strMsg = T.pack
instance ErrorList Text where listMsg = return . T.pack
-- | Make a simple formlet.
formlet :: Text -> (Maybe Text -> Html) -> Formlet Text
formlet name html =
Formlet { formletValue = \inputs ->
case (M.lookup (encodeUtf8 name) inputs) of
Just (value:_) -> return $ decodeUtf8 value
_ -> throwError $ ["missing input: " ++ name]
, formletHtml = \inputs ->
case M.lookup (encodeUtf8 name) inputs of
Just (value:_) -> html (Just $ decodeUtf8 value)
_ -> html Nothing
, formletName = Just name
}
-- | Make an input required (non-empty text).
req :: Formlet Text -> Formlet Text
req formlet@Formlet{..} =
formlet { formletValue = \inputs ->
case formletValue inputs of
Right v | T.null v ->
throwError $ ["required input" ++ maybe "" (": "++) formletName]
meh -> meh
}
-- | Make an input optional (empty text is nothing).
opt :: Formlet Text -> Formlet (Maybe Text)
opt formlet@Formlet{..} =
formlet { formletValue = \inputs ->
case formletValue inputs of
Right v | T.null v -> Right Nothing
meh -> Just <$> meh
}
-- | Parse a form value.
parse :: (a -> Either Text b) -> Formlet a -> Formlet b
parse parser formlet@Formlet{..} =
formlet { formletValue = \inputs ->
case formletValue inputs of
Left e -> Left e
Right x -> case parser x of
Right y -> Right y
Left e -> Left [e ++ maybe "" (": "++) formletName]
}
-- | Integer parser.
integer :: Text -> Either Text Integer
integer (readMay . T.unpack -> Just v) = Right v
integer _ = Left "expected integer"
readMay :: Read a => String -> Maybe a
readMay x = case reads x of
[(x,"")] -> return x
_ -> Nothing
-- | Wrap/transform formlet's HTML.
wrap :: (Html -> Html) -> Formlet Text -> Formlet Text
wrap f formlet@Formlet{..} = formlet { formletHtml = f . formletHtml }
-- | Make a text input formlet with a label.
textInput :: Text -> Text -> Maybe Text -> Formlet Text
textInput name caption def =
formlet name $ \value -> do
p $ H.label $ do
H.span $ toHtml $ caption ++ ": "
input ! A.name (toValue name)
! A.value (toValue $ fromMaybe "" (value <|> def))
! A.class_ "text"
-- | Make a textarea input with a label.
areaInput :: Text -> Text -> Maybe Text -> Formlet Text
areaInput name caption def =
formlet name $ \value -> do
p $ H.label $ do
H.span $ toHtml $ caption ++ ": "
textarea ! A.name (toValue name) $
toHtml $ fromMaybe "" (value <|> def)
-- | Make a drop down input with a label.
dropInput :: [(Text,Text)] -> Text -> Text -> Text -> Formlet Text
dropInput values name caption def =
formlet name $ \value -> do
p $ H.label $ do
H.span $ toHtml $ caption ++ ": "
select ! A.name (toValue name) $
forM_ values $ \(key,title) -> do
let nonSelected = all ((/=value) . Just . fst) values
defaulting = nonSelected && def == key
selected
| Just key == value = (! A.selected "selected")
| defaulting = (! A.selected "selected")
| otherwise = id
selected $ option ! A.value (toValue key) $ toHtml title
-- | Make a submit (captioned) button.
submitInput :: Text -> Text -> Html
submitInput name caption = p $ do
p $ H.input ! A.type_ "submit"
! A.name (toValue name)
! A.value (toValue caption)
-- | Make a list of options for use with the option formlet.
options :: (o -> Text) -> (o -> Text) -> [o] -> [(Text,Text)]
options slug caption os = ("","") : map (\o -> (slug o,caption o)) os
-- | Lookup a real internal id from a slug.
findOption :: (o -> Bool) -> [o] -> (o -> internalid) -> Either Text internalid
findOption pred os field =
case find pred os of
Nothing -> Left ""
Just x -> Right (field x)
| chrisdone/named-formlet | src/Text/Formlet.hs | bsd-3-clause | 6,792 | 0 | 23 | 2,381 | 2,073 | 1,084 | 989 | 145 | 3 |
module CallByValue.Evaluate( step, eval, value, lam, colam, CallByValue.Evaluate.app ) where
import Substitution
import Syntax
import Data.Maybe
step :: Stmt -> Maybe Stmt
-- If we are cutting against a non-value we can evaluate inside, do so
step (m `Cut` k) | Just (Just f, m) <- eval m = Just $ m `Cut` CoBind wildEcks (f (Var wildEcks) `Cut` k)
-- Two possibilities remain:
-- 1) We are cutting against a value
-- 2) We are cutting against a non-value we can't go inside: i.e. a bind
--
-- We tackle 2) first. NB: it doesn't matter if the bind is also a value, the result is confluent
step (Bind s a `Cut` k) = {- trace (prettyShow ("SHAREABLE", k)) $ -} Just $ substStmt (coTermSubst a k) s
-- The only remaining possibility is 1), so we can run the other clauses
step (Data v lr `Cut` CoData k l) = Just $ v `Cut` (case lr of Inl -> k; Inr -> l)
step (Tup v w `Cut` CoTup fs k) = Just $ (case fs of Fst -> v; Snd -> w) `Cut` k
step (Not k `Cut` CoNot m) = Just $ m `Cut` k
step (Lam x n `Cut` CoLam m k) = Just $ m `Cut` CoBind x (n `Cut` k)
step (v `Cut` CoBind x s) = Just $ substStmt (termSubst x v) s
-- We can't reduce if any one of these occurs:
-- 1) The term is a variable
-- 2) The coterm is a covariable
-- 3) The term is Fix (Fix isn't reducible in CBV. Could add CoFix to do something here though)
step _ = Nothing
-- Invariant: eval m == Just (_, n) ==> not (value n)
-- This prevents infinite loops in the normaliser: there is no point pulling out bare variables, for example
eval :: Term -> Maybe (Maybe (Term -> Term), Term)
eval (Data m lr) = do (mb_f, m) <- eval m; return (Just $ flip Data lr . fromMaybe id mb_f, m)
eval (Tup m n) | not (value m) = do (mb_f, m) <- eval m; return (Just $ \m -> Tup (fromMaybe id mb_f m) n, m)
| not (value n) = do (mb_f, n) <- eval n; return (Just $ \n -> Tup m (fromMaybe id mb_f n), n)
eval m | value m = Nothing
| otherwise = Just (Nothing, m)
value :: Term -> Bool
value (Var _) = True
value (Data m _) = value m
value (Tup m n) = value m && value n
value (Not _) = True
value (Lam _ _) = True
value (Fix _ _) = False
--value (Bind (m `Cut` CoTup _ (CoVar b)) a) = value m && a == b
value (Bind _ _) = False
-- CBV Is Dual To CBN, Reloaded: Section 3, Proposition 3
lam x m = Not (CoBind wildEcks (Var wildEcks `Cut` CoTup Fst (CoBind x (Var wildEcks `Cut` CoTup Snd (CoNot m)))))
colam m k = CoNot (Tup m (Not k))
app m n = Bind (m `Cut` (n `colam` CoVar wildAlpha)) wildAlpha
| batterseapower/dual-calculus | CallByValue/Evaluate.hs | bsd-3-clause | 2,519 | 0 | 17 | 600 | 970 | 509 | 461 | 30 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Loading interface files
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LoadIface (
-- Importing one thing
tcLookupImported_maybe, importDecl,
checkWiredInTyCon, ifCheckWiredInThing,
-- RnM/TcM functions
loadModuleInterface, loadModuleInterfaces,
loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
loadInterface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
initExternalPackageState,
moduleFreeHolesPrecise,
pprModIfaceSimple,
ifaceStats, pprModIface, showIface
) where
#include "HsVersions.h"
import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo, tcIfaceAnnotations )
import DynFlags
import IfaceSyn
import IfaceEnv
import HscTypes
import BasicTypes hiding (SuccessFlag(..))
import TcRnMonad
import Constants
import PrelNames
import PrelInfo
import PrimOp ( allThePrimOps, primOpFixity, primOpOcc )
import MkId ( seqId )
import TysPrim ( funTyConName )
import Rules
import TyCon
import Annotations
import InstEnv
import FamInstEnv
import Name
import NameEnv
import Avail
import Module
import Maybes
import ErrUtils
import Finder
import UniqFM
import SrcLoc
import Outputable
import BinIface
import Panic
import Util
import FastString
import Fingerprint
import Hooks
import FieldLabel
import RnModIface
import UniqDSet
import Control.Monad
import Data.IORef
import System.FilePath
{-
************************************************************************
* *
* tcImportDecl is the key function for "faulting in" *
* imported things
* *
************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
it in the EPS type envt. So it
1 loads GHC.Base.hi
2 gets the decl for GHC.Base.map
3 typechecks it via tcIfaceDecl
4 and adds it to the type env in the EPS
Note that DURING STEP 4, we may find that map's type mentions a type
constructor that also
Notice that for imported things we read the current version from the EPS
mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
-}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
= do { hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of
Just thing -> return (Succeeded thing)
Nothing -> tcImportDecl_maybe name }
tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Entry point for *source-code* uses of importDecl
tcImportDecl_maybe name
| Just thing <- wiredInNameTyThing_maybe name
= do { when (needWiredInHomeIface thing)
(initIfaceTcRn (loadWiredInHomeIface name))
-- See Note [Loading instances for wired-in things]
; return (Succeeded thing) }
| otherwise
= initIfaceTcRn (importDecl name)
importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing)
-- Get the TyThing for this Name from an interface file
-- It's not a wired-in thing -- the caller caught that
importDecl name
= ASSERT( not (isWiredInName name) )
do { traceIf nd_doc
-- Load the interface, which should populate the PTE
; mb_iface <- ASSERT2( isExternalName name, ppr name )
loadInterface nd_doc (nameModule name) ImportBySystem
; case mb_iface of {
Failed err_msg -> return (Failed err_msg) ;
Succeeded _ -> do
-- Now look it up again; this time we should find it
{ eps <- getEps
; case lookupTypeEnv (eps_PTE eps) name of
Just thing -> return (Succeeded thing)
Nothing -> return $ Failed (ifPprDebug (found_things_msg eps) $$ not_found_msg)
}}}
where
nd_doc = text "Need decl for" <+> ppr name
not_found_msg = hang (text "Can't find interface-file declaration for" <+>
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [text "Probable cause: bug in .hi-boot file, or inconsistent .hi file",
text "Use -ddump-if-trace to get an idea of which file caused the error"])
found_things_msg eps =
hang (text "Found the following declarations in" <+> ppr (nameModule name) <> colon)
2 (vcat (map ppr $ filter is_interesting $ nameEnvElts $ eps_PTE eps))
where
is_interesting thing = nameModule name == nameModule (getName thing)
{-
************************************************************************
* *
Checks for wired-in things
* *
************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to make sure that we have at least *read* the interface files
for any module with an instance decl or RULE that we might want.
* If the instance decl is an orphan, we have a whole separate mechanism
(loadOrphanModules)
* If the instance decl is not an orphan, then the act of looking at the
TyCon or Class will force in the defining module for the
TyCon/Class, and hence the instance decl
* BUT, if the TyCon is a wired-in TyCon, we don't really need its interface;
but we must make sure we read its interface in case it has instances or
rules. That is what LoadIface.loadWiredInHomeIface does. It's called
from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing}
* HOWEVER, only do this for TyCons. There are no wired-in Classes. There
are some wired-in Ids, but we don't want to load their interfaces. For
example, Control.Exception.Base.recSelError is wired in, but that module
is compiled late in the base library, and we don't want to force it to
load before it's been compiled!
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
-}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
-- It might not be a wired-in tycon (see the calls in TcUnify),
-- in which case this is a no-op.
checkWiredInTyCon tc
| not (isWiredInName tc_name)
= return ()
| otherwise
= do { mod <- getModule
; traceIf (text "checkWiredInTyCon" <+> ppr tc_name $$ ppr mod)
; ASSERT( isExternalName tc_name )
when (mod /= nameModule tc_name)
(initIfaceTcRn (loadWiredInHomeIface tc_name))
-- Don't look for (non-existent) Float.hi when
-- compiling Float.hs, which mentions Float of course
-- A bit yukky to call initIfaceTcRn here
}
where
tc_name = tyConName tc
ifCheckWiredInThing :: TyThing -> IfL ()
-- Even though we are in an interface file, we want to make
-- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double)
-- Ditto want to ensure that RULES are loaded too
-- See Note [Loading instances for wired-in things]
ifCheckWiredInThing thing
= do { mod <- getIfModule
-- Check whether we are typechecking the interface for this
-- very module. E.g when compiling the base library in --make mode
-- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in
-- the HPT, so without the test we'll demand-load it into the PIT!
-- C.f. the same test in checkWiredInTyCon above
; let name = getName thing
; ASSERT2( isExternalName name, ppr name )
when (needWiredInHomeIface thing && mod /= nameModule name)
(loadWiredInHomeIface name) }
needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
{-
************************************************************************
* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
These three are called from TcM-land
* *
************************************************************************
-}
-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
loadSrcInterface :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM ModIface
loadSrcInterface doc mod want_boot maybe_pkg
= do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg
; case res of
Failed err -> failWithTc err
Succeeded iface -> return iface }
-- | Like 'loadSrcInterface', but returns a 'MaybeErr'.
loadSrcInterface_maybe :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM (MaybeErr MsgDoc ModIface)
loadSrcInterface_maybe doc mod want_boot maybe_pkg
-- We must first find which Module this import refers to. This involves
-- calling the Finder, which as a side effect will search the filesystem
-- and create a ModLocation. If successful, loadIface will read the
-- interface; it will call the Finder again, but the ModLocation will be
-- cached from the first search.
= do { hsc_env <- getTopEnv
; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg
; case res of
Found _ mod -> initIfaceTcRn $ loadInterface doc mod (ImportByUser want_boot)
-- TODO: Make sure this error message is good
err -> return (Failed (cannotFindModule (hsc_dflags hsc_env) mod err)) }
-- | Load interface directly for a fully qualified 'Module'. (This is a fairly
-- rare operation, but in particular it is used to load orphan modules
-- in order to pull their instances into the global package table and to
-- handle some operations in GHCi).
loadModuleInterface :: SDoc -> Module -> TcM ModIface
loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod)
-- | Load interfaces for a collection of modules.
loadModuleInterfaces :: SDoc -> [Module] -> TcM ()
loadModuleInterfaces doc mods
| null mods = return ()
| otherwise = initIfaceTcRn (mapM_ load mods)
where
load mod = loadSysInterface (doc <+> parens (ppr mod)) mod
-- | Loads the interface for a given Name.
-- Should only be called for an imported name;
-- otherwise loadSysInterface may not find the interface
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
= do { when debugIsOn $ -- Check pre-condition
do { this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) }
; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name) }
-- | Loads the interface for a given Module.
loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface
loadInterfaceForModule doc m
= do
-- Should not be called with this module
when debugIsOn $ do
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
{-
*********************************************************
* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
* *
*********************************************************
-}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things]
loadWiredInHomeIface :: Name -> IfM lcl ()
loadWiredInHomeIface name
= ASSERT( isWiredInName name )
do _ <- loadSysInterface doc (nameModule name); return ()
where
doc = text "Need home interface for wired-in thing" <+> ppr name
------------------
-- | Loads a system interface and throws an exception if it fails
loadSysInterface :: SDoc -> Module -> IfM lcl ModIface
loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem
------------------
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
loadPluginInterface doc mod_name
= loadInterfaceWithException doc mod_name ImportByPlugin
------------------
-- | A wrapper for 'loadInterface' that throws an exception if it fails
loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= withException (loadInterface doc mod_name where_from)
------------------
loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
-- (to avoid repeated complaints)
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
| isHoleModule mod
-- Hole modules get special treatment
= do dflags <- getDynFlags
-- Redo search for our local hole module
loadInterface doc_str (mkModule (thisPackage dflags) (moduleName mod)) from
| otherwise
= do { -- Read the state
(eps,hpt) <- getEpsAndHpt
; gbl_env <- getGblEnv
; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from)
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
-- if an earlier import had a before we got to real imports. I think.
_ -> do {
-- READ THE MODULE IN
; read_result <- case (wantHiBootFile dflags eps mod from) of
Failed err -> return (Failed err)
Succeeded hi_boot_file ->
-- Stoutly warn against an EPS-updating import
-- of one's own boot file! (one-shot only)
--See Note [Do not update EPS with your own hi-boot]
-- in MkIface.
WARN( hi_boot_file &&
fmap fst (if_rec_types gbl_env) == Just mod,
ppr mod )
computeInterface doc_str hi_boot_file mod
; case read_result of {
Failed err -> do
{ let fake_iface = emptyModIface mod
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
-- Not found, so add an empty iface to
-- the EPS map so that we don't look again
; return (Failed err) } ;
-- Found and parsed!
-- We used to have a sanity check here that looked for:
-- * System importing ..
-- * a home package module ..
-- * that we know nothing about (mb_dep == Nothing)!
--
-- But this is no longer valid because thNameToGhcName allows users to
-- cause the system to load arbitrary interfaces (by supplying an appropriate
-- Template Haskell original-name).
Succeeded (iface, loc) ->
let
loc_doc = text loc
in
initIfaceLcl (mi_semantic_module iface) loc_doc (mi_boot iface) $ do
-- Load the new ModIface into the External Package State
-- Even home-package interfaces loaded by loadInterface
-- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
-- The main thing is to add the ModIface to the PIT, but
-- we also take the
-- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo
-- out of the ModIface and put them into the big EPS pools
-- NB: *first* we do loadDecl, so that the provenance of all the locally-defined
--- names is done correctly (notably, whether this is an .hi file or .hi-boot file).
-- If we do loadExport first the wrong info gets into the cache (unless we
-- explicitly tag each export which seems a bit of a bore)
; ignore_prags <- goptM Opt_IgnoreInterfacePragmas
; new_eps_decls <- loadDecls ignore_prags (mi_decls iface)
; new_eps_insts <- mapM tcIfaceInst (mi_insts iface)
; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface)
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
mi_rules = panic "No mi_rules in PIT",
mi_anns = panic "No mi_anns in PIT"
}
}
; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) || is_external_sig dflags iface
then eps else
eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
; return (Succeeded final_iface)
}}}}
-- | Returns @True@ if a 'ModIface' comes from an external package.
-- In this case, we should NOT load it into the EPS; the entities
-- should instead come from the local merged signature interface.
is_external_sig :: DynFlags -> ModIface -> Bool
is_external_sig dflags iface =
-- It's a signature iface...
mi_semantic_module iface /= mi_module iface &&
-- and it's not from the local package
moduleUnitId (mi_module iface) /= thisPackage dflags
-- | This is an improved version of 'findAndReadIface' which can also
-- handle the case when a user requests @p[A=<B>]:M@ but we only
-- have an interface for @p[A=<A>]:M@ (the indefinite interface.
-- If we are not trying to build code, we load the interface we have,
-- *instantiating it* according to how the holes are specified.
-- (Of course, if we're actually building code, this is a hard error.)
--
-- In the presence of holes, 'computeInterface' has an important invariant:
-- to load module M, its set of transitively reachable requirements must
-- have an up-to-date local hi file for that requirement. Note that if
-- we are loading the interface of a requirement, this does not
-- apply to the requirement itself; e.g., @p[A=<A>]:A@ does not require
-- A.hi to be up-to-date (and indeed, we MUST NOT attempt to read A.hi, unless
-- we are actually typechecking p.)
computeInterface ::
SDoc -> IsBootInterface -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
computeInterface doc_str hi_boot_file mod0 = do
MASSERT( not (isHoleModule mod0) )
dflags <- getDynFlags
case splitModuleInsts mod0 of
(imod, Just indef) | not (unitIdIsDefinite (thisPackage dflags)) -> do
r <- findAndReadIface doc_str imod hi_boot_file
case r of
Succeeded (iface0, path) -> do
hsc_env <- getTopEnv
r <- liftIO (rnModIface hsc_env (indefUnitIdInsts (indefModuleUnitId indef)) Nothing iface0)
return (Succeeded (r, path))
Failed err -> return (Failed err)
(mod, _) ->
findAndReadIface doc_str mod hi_boot_file
-- | Compute the signatures which must be compiled in order to
-- load the interface for a 'Module'. The output of this function
-- is always a subset of 'moduleFreeHoles'; it is more precise
-- because in signature @p[A=<A>,B=<B>]:B@, although the free holes
-- are A and B, B might not depend on A at all!
--
-- If this is invoked on a signature, this does NOT include the
-- signature itself; e.g. precise free module holes of
-- @p[A=<A>,B=<B>]:B@ never includes B.
moduleFreeHolesPrecise
:: SDoc -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (UniqDSet ModuleName))
moduleFreeHolesPrecise doc_str mod
| moduleIsDefinite mod = return (Succeeded emptyUniqDSet)
| otherwise =
case splitModuleInsts mod of
(imod, Just indef) -> do
let insts = indefUnitIdInsts (indefModuleUnitId indef)
traceIf (text "Considering whether to load" <+> ppr mod <+>
text "to compute precise free module holes")
(eps, hpt) <- getEpsAndHpt
dflags <- getDynFlags
case tryEpsAndHpt dflags eps hpt `firstJust` tryDepsCache eps imod insts of
Just r -> return (Succeeded r)
Nothing -> readAndCache imod insts
(_, Nothing) -> return (Succeeded emptyUniqDSet)
where
tryEpsAndHpt dflags eps hpt =
fmap mi_free_holes (lookupIfaceByModule dflags hpt (eps_PIT eps) mod)
tryDepsCache eps imod insts =
case lookupInstalledModuleEnv (eps_free_holes eps) imod of
Just ifhs -> Just (renameFreeHoles ifhs insts)
_otherwise -> Nothing
readAndCache imod insts = do
mb_iface <- findAndReadIface (text "moduleFreeHolesPrecise" <+> doc_str) imod False
case mb_iface of
Succeeded (iface, _) -> do
let ifhs = mi_free_holes iface
-- Cache it
updateEps_ (\eps ->
eps { eps_free_holes = extendInstalledModuleEnv (eps_free_holes eps) imod ifhs })
return (Succeeded (renameFreeHoles ifhs insts))
Failed err -> return (Failed err)
wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-> MaybeErr MsgDoc IsBootInterface
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
ImportByPlugin
-> Succeeded False
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
-- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
-- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == moduleUnitId mod
badSourceImport :: Module -> SDoc
badSourceImport mod
= hang (text "You cannot {-# SOURCE #-} import a module from another package")
2 (text "but" <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (moduleUnitId mod)))
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
-- its package info, so that we can build a Name for
-- each binder with the right package info in it
-- All subsequent lookups, including crucially lookups during typechecking
-- the declaration itself, will find the fully-glorious Name
--
-- We handle ATs specially. They are not main declarations, but also not
-- implicit things (in particular, adding them to `implicitTyThings' would mess
-- things up in the renaming/type checking of source programs).
-----------------------------------------------------
addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv
addDeclsToPTE pte things = extendNameEnvList pte things
loadDecls :: Bool
-> [(Fingerprint, IfaceDecl)]
-> IfL [(Name,TyThing)]
loadDecls ignore_prags ver_decls
= do { thingss <- mapM (loadDecl ignore_prags) ver_decls
; return (concat thingss)
}
loadDecl :: Bool -- Don't load pragmas into the decl pool
-> (Fingerprint, IfaceDecl)
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags (_version, decl)
= do { -- Populate the name cache with final versions of all
-- the names associated with the decl
main_name <- lookupIfaceTop (ifName decl)
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
-- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
; thing <- forkM doc $ do { bumpDeclStats main_name
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
--
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
-- buildable from 'thing' without demanding any of the things
-- 'forkM'd by tcIfaceDecl.
--
-- In more detail: Consider the example
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
--
-- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
-- We do this by mapping the implicit_names to the associated
-- TyThings. By the invariant on ifaceDeclImplicitBndrs and
-- implicitTyThings, we can use getOccName on the implicit
-- TyThings to make this association: each Name's OccName should
-- be the OccName of exactly one implicitTyThing. So the key is
-- to define a "mini-env"
--
-- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ]
-- where the 'MkT' here is the *OccName* associated with MkT.
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
-- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
-- type-checking before the recursive knot has been tied,
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
--
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
--
-- The code below works because of the following invariant:
-- getOccName on a TyThing does not force the suspended type
-- checks in order to extract the name. For example, we don't
-- poke on the "T a" type of <selector x> on the way to
-- extracting <selector x>'s OccName. Of course, there is no
-- reason in principle why getting the OccName should force the
-- thunks, but this means we need to be careful in
-- implicitTyThings and its helper functions.
--
-- All a bit too finely-balanced for my liking.
-- This mini-env and lookup function mediates between the
--'Name's n and the map from 'OccName's to the implicit TyThings
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM lookupIfaceTop (ifaceDeclImplicitBndrs decl)
-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names)
; return $ (main_name, thing) :
-- uses the invariant that implicit_names and
-- implicitTyThings are bijective
[(n, lookup n) | n <- implicit_names]
}
where
doc = text "Declaration for" <+> ppr (ifName decl)
bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used
bumpDeclStats name
= do { traceIf (text "Loading decl for" <+> ppr name)
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
{-
*********************************************************
* *
\subsection{Reading an interface file}
* *
*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the sought-for interface is in the current package (as determined
by -package-name flag) then it jolly well should already be in the HPT
because we process home-package modules in dependency order. (Except
in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
* We compile both packages.
* Now we edit package Q so that it somehow depends on P
* Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
-}
findAndReadIface :: SDoc -> InstalledModule
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
-- Just x <=> successfully found and parsed
-- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod hi_boot_file
= do traceIf (sep [hsep [text "Reading",
if hi_boot_file
then text "[boot]"
else Outputable.empty,
text "interface for",
ppr mod <> semi],
nest 4 (text "reason:" <+> doc_str)])
-- Check for GHC.Prim, and return its static interface
-- TODO: make this check a function
if mod `installedModuleEq` gHC_PRIM
then do
iface <- getHooked ghcPrimIfaceHook ghcPrimIface
return (Succeeded (iface,
"<built in interface for GHC.Prim>"))
else do
dflags <- getDynFlags
-- Look for the file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
InstalledFound loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
(ml_hi_file loc)
-- See Note [Home module load error]
if installedModuleUnitId mod `installedUnitIdEq` thisPackage dflags &&
not (isOneShot (ghcMode dflags))
then return (Failed (homeModError mod loc))
else do r <- read_file file_path
checkBuildDynamicToo r
return r
err -> do
traceIf (text "...not found")
dflags <- getDynFlags
return (Failed (cannotFindInterface dflags
(installedModuleName mod) err))
where read_file file_path = do
traceIf (text "readIFace" <+> text file_path)
read_result <- readIface mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
Succeeded iface
| not (mod `installedModuleEq` mi_module iface) ->
return (Failed (wrongIfaceModErr iface mod file_path))
| otherwise ->
return (Succeeded (iface, file_path))
-- Don't forget to fill in the package name...
checkBuildDynamicToo (Succeeded (iface, filePath)) = do
dflags <- getDynFlags
-- Indefinite interfaces are ALWAYS non-dynamic, and
-- that's OK.
let is_definite_iface = moduleIsDefinite (mi_module iface)
when is_definite_iface $
whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do
let ref = canGenerateDynamicToo dflags
dynFilePath = addBootSuffix_maybe hi_boot_file
$ replaceExtension filePath (dynHiSuf dflags)
r <- read_file dynFilePath
case r of
Succeeded (dynIface, _)
| mi_mod_hash iface == mi_mod_hash dynIface ->
return ()
| otherwise ->
do traceIf (text "Dynamic hash doesn't match")
liftIO $ writeIORef ref False
Failed err ->
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-- @readIface@ tries just the one file.
readIface :: InstalledModule -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
-- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; case res of
Right iface
-- Same deal
| wanted_mod `installedModuleEq` actual_mod
-> return (Succeeded iface)
| otherwise -> return (Failed err)
where
actual_mod = mi_module iface
err = hiModuleNameMismatchWarn wanted_mod actual_mod
Left exn -> return (Failed (text (showException exn)))
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
= EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_free_holes = emptyInstalledModuleEnv,
eps_PTE = emptyTypeEnv,
eps_inst_env = emptyInstEnv,
eps_fam_inst_env = emptyFamInstEnv,
eps_rule_base = mkRuleBase builtinRules,
-- Initialise the EPS rule pool with the built-in rules
eps_mod_fam_inst_env
= emptyModuleEnv,
eps_vect_info = noVectInfo,
eps_ann_env = emptyAnnEnv,
eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
mi_exports = ghcPrimExports,
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
}
where
fixities = (getOccName seqId, Fixity "0" 0 InfixR) -- seq is infixr 0
: (occName funTyConName, funTyFixity) -- trac #10145
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
{-
*********************************************************
* *
\subsection{Statistics}
* *
*********************************************************
-}
ifaceStats :: ExternalPackageState -> SDoc
ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
{-
************************************************************************
* *
Printing interfaces
* *
************************************************************************
-}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
-- skip the hi way check; we don't want to worry about profiled vs.
-- non-profiled interfaces, for example.
iface <- initTcRnIf 's' hsc_env () () $
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
log_action dflags dflags NoReason SevDump noSrcSpan defaultDumpStyle (pprModIface iface)
-- Show a ModIface but don't display details; suitable for ModIfaces stored in
-- the EPT.
pprModIfaceSimple :: ModIface -> SDoc
pprModIfaceSimple iface = ppr (mi_module iface) $$ pprDeps (mi_deps iface) $$ nest 2 (vcat (map pprExport (mi_exports iface)))
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
= vcat [ text "interface"
<+> ppr (mi_module iface) <+> pp_hsc_src (mi_hsc_src iface)
<+> (if mi_orphan iface then text "[orphan module]" else Outputable.empty)
<+> (if mi_finsts iface then text "[family instance module]" else Outputable.empty)
<+> (if mi_hpc iface then text "[hpc]" else Outputable.empty)
<+> integer hiVersion
, nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface))
, nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface))
, nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface))
, nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface))
, nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface))
, nest 2 (text "sig of:" <+> ppr (mi_sig_of iface))
, nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface))
, nest 2 (text "where")
, text "exports:"
, nest 2 (vcat (map pprExport (mi_exports iface)))
, pprDeps (mi_deps iface)
, vcat (map pprUsage (mi_usages iface))
, vcat (map pprIfaceAnnotation (mi_anns iface))
, pprFixities (mi_fixities iface)
, vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface]
, vcat (map ppr (mi_insts iface))
, vcat (map ppr (mi_fam_insts iface))
, vcat (map ppr (mi_rules iface))
, pprVectInfo (mi_vect_info iface)
, ppr (mi_warns iface)
, pprTrustInfo (mi_trust iface)
, pprTrustPkg (mi_trust_pkg iface)
]
where
pp_hsc_src HsBootFile = text "[boot]"
pp_hsc_src HsigFile = text "[hsig]"
pp_hsc_src HsSrcFile = Outputable.empty
{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
-}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ [] []) = Outputable.empty
pprExport (AvailTC n ns0 fs)
= case ns0 of
(n':ns) | n==n' -> ppr n <> pp_export ns fs
_ -> ppr n <> vbar <> pp_export ns0 fs
where
pp_export [] [] = Outputable.empty
pp_export names fs = braces (hsep (map ppr names ++ map (ppr . flLabel) fs))
pprUsage :: Usage -> SDoc
pprUsage usage@UsagePackageModule{}
= pprUsageImport usage usg_mod
pprUsage usage@UsageHomeModule{}
= pprUsageImport usage usg_mod_name $$
nest 2 (
maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$
vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ]
)
pprUsage usage@UsageFile{}
= hsep [text "addDependentFile",
doubleQuotes (text (usg_file_path usage))]
pprUsage usage@UsageMergedRequirement{}
= hsep [text "merged", ppr (usg_mod usage), ppr (usg_mod_hash usage)]
pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc
pprUsageImport usage usg_mod'
= hsep [text "import", safe, ppr (usg_mod' usage),
ppr (usg_mod_hash usage)]
where
safe | usg_safe usage = text "safe"
| otherwise = text " -/ "
pprDeps :: Dependencies -> SDoc
pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs,
dep_finsts = finsts })
= vcat [text "module dependencies:" <+> fsep (map ppr_mod mods),
text "package dependencies:" <+> fsep (map ppr_pkg pkgs),
text "orphans:" <+> fsep (map ppr orphs),
text "family instance modules:" <+> fsep (map ppr finsts)
]
where
ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot
ppr_pkg (pkg,trust_req) = ppr pkg <>
(if trust_req then text "*" else Outputable.empty)
ppr_boot True = text "[boot]"
ppr_boot False = Outputable.empty
pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = text "fixities" <+> pprWithCommas pprFix fixes
where
pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
}) =
vcat
[ text "vectorised variables:" <+> hsep (map ppr vars)
, text "vectorised tycons:" <+> hsep (map ppr tycons)
, text "vectorised reused tycons:" <+> hsep (map ppr tyconsReuse)
, text "parallel variables:" <+> hsep (map ppr parallelVars)
, text "parallel tycons:" <+> hsep (map ppr parallelTyCons)
]
pprTrustInfo :: IfaceTrustInfo -> SDoc
pprTrustInfo trust = text "trusted:" <+> ppr trust
pprTrustPkg :: Bool -> SDoc
pprTrustPkg tpkg = text "require own pkg trusted:" <+> ppr tpkg
instance Outputable Warnings where
ppr = pprWarns
pprWarns :: Warnings -> SDoc
pprWarns NoWarnings = Outputable.empty
pprWarns (WarnAll txt) = text "Warn all" <+> ppr txt
pprWarns (WarnSome prs) = text "Warnings"
<+> vcat (map pprWarning prs)
where pprWarning (name, txt) = ppr name <+> ppr txt
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> text "annotated by" <+> ppr serialized
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
= vcat [text "Bad interface file:" <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: InstalledModule -> Module -> MsgDoc
hiModuleNameMismatchWarn requested_mod read_mod =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle alwaysQualify AllTheWay) $
-- we want the Modules below to be qualified with package names,
-- so reset the PrintUnqualified setting.
hsep [ text "Something is amiss; requested module "
, ppr requested_mod
, text "differs from name found in the interface file"
, ppr read_mod
]
wrongIfaceModErr :: ModIface -> InstalledModule -> String -> SDoc
wrongIfaceModErr iface mod file_path
= sep [text "Interface file" <+> iface_file,
text "contains module" <+> quotes (ppr (mi_module iface)) <> comma,
text "but we were expecting module" <+> quotes (ppr mod),
sep [text "Probable cause: the source code which generated",
nest 2 iface_file,
text "has an incompatible module name"
]
]
where iface_file = doubleQuotes (text file_path)
homeModError :: InstalledModule -> ModLocation -> SDoc
-- See Note [Home module load error]
homeModError mod location
= text "attempting to use module " <> quotes (ppr mod)
<> (case ml_hs_file location of
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> text "which is not loaded"
| snoyberg/ghc | compiler/iface/LoadIface.hs | bsd-3-clause | 50,740 | 363 | 22 | 16,183 | 7,780 | 4,149 | 3,631 | -1 | -1 |
module Catching where
import Control.Exception
import Data.Typeable
handler :: SomeException -> IO ()
handler (SomeException e) = do
print (typeOf e)
putStrLn ("We errored! It was: " ++ show e)
writeFile "bbb" "hi"
main =
writeFile "zzz" "hi"
`catch` handler | nicklawls/haskellbook | src/Catching.hs | bsd-3-clause | 285 | 0 | 10 | 66 | 94 | 47 | 47 | 11 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Test where
import Data.List.Compat
| beni55/base-compat | check/check-hs/Data.List.Compat.check.hs | mit | 77 | 0 | 4 | 9 | 11 | 8 | 3 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.